_regex_core.py 137 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463
  1. #
  2. # Secret Labs' Regular Expression Engine core module
  3. #
  4. # Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.
  5. #
  6. # This version of the SRE library can be redistributed under CNRI's
  7. # Python 1.6 license. For any other use, please contact Secret Labs
  8. # AB (info@pythonware.com).
  9. #
  10. # Portions of this engine have been developed in cooperation with
  11. # CNRI. Hewlett-Packard provided funding for 1.6 integration and
  12. # other compatibility work.
  13. #
  14. # 2010-01-16 mrab Python front-end re-written and extended
  15. import string
  16. import sys
  17. import unicodedata
  18. from collections import defaultdict
  19. import regex._regex as _regex
  20. __all__ = ["A", "ASCII", "B", "BESTMATCH", "D", "DEBUG", "E", "ENHANCEMATCH",
  21. "F", "FULLCASE", "I", "IGNORECASE", "L", "LOCALE", "M", "MULTILINE", "P",
  22. "POSIX", "R", "REVERSE", "S", "DOTALL", "T", "TEMPLATE", "U", "UNICODE",
  23. "V0", "VERSION0", "V1", "VERSION1", "W", "WORD", "X", "VERBOSE", "error",
  24. "Scanner"]
  25. # The regex exception.
  26. class error(Exception):
  27. """Exception raised for invalid regular expressions.
  28. Attributes:
  29. msg: The unformatted error message
  30. pattern: The regular expression pattern
  31. pos: The position in the pattern where compilation failed, or None
  32. lineno: The line number where compilation failed, unless pos is None
  33. colno: The column number where compilation failed, unless pos is None
  34. """
  35. def __init__(self, message, pattern=None, pos=None):
  36. newline = '\n' if isinstance(pattern, str) else b'\n'
  37. self.msg = message
  38. self.pattern = pattern
  39. self.pos = pos
  40. if pattern is not None and pos is not None:
  41. self.lineno = pattern.count(newline, 0, pos) + 1
  42. self.colno = pos - pattern.rfind(newline, 0, pos)
  43. message = "{} at position {}".format(message, pos)
  44. if newline in pattern:
  45. message += " (line {}, column {})".format(self.lineno,
  46. self.colno)
  47. Exception.__init__(self, message)
  48. # The exception for when a positional flag has been turned on in the old
  49. # behaviour.
  50. class _UnscopedFlagSet(Exception):
  51. pass
  52. # The exception for when parsing fails and we want to try something else.
  53. class ParseError(Exception):
  54. pass
  55. # The exception for when there isn't a valid first set.
  56. class _FirstSetError(Exception):
  57. pass
  58. # Flags.
  59. A = ASCII = 0x80 # Assume ASCII locale.
  60. B = BESTMATCH = 0x1000 # Best fuzzy match.
  61. D = DEBUG = 0x200 # Print parsed pattern.
  62. E = ENHANCEMATCH = 0x8000 # Attempt to improve the fit after finding the first
  63. # fuzzy match.
  64. F = FULLCASE = 0x4000 # Unicode full case-folding.
  65. I = IGNORECASE = 0x2 # Ignore case.
  66. L = LOCALE = 0x4 # Assume current 8-bit locale.
  67. M = MULTILINE = 0x8 # Make anchors look for newline.
  68. P = POSIX = 0x10000 # POSIX-style matching (leftmost longest).
  69. R = REVERSE = 0x400 # Search backwards.
  70. S = DOTALL = 0x10 # Make dot match newline.
  71. U = UNICODE = 0x20 # Assume Unicode locale.
  72. V0 = VERSION0 = 0x2000 # Old legacy behaviour.
  73. V1 = VERSION1 = 0x100 # New enhanced behaviour.
  74. W = WORD = 0x800 # Default Unicode word breaks.
  75. X = VERBOSE = 0x40 # Ignore whitespace and comments.
  76. T = TEMPLATE = 0x1 # Template (present because re module has it).
  77. DEFAULT_VERSION = VERSION1
  78. _ALL_VERSIONS = VERSION0 | VERSION1
  79. _ALL_ENCODINGS = ASCII | LOCALE | UNICODE
  80. # The default flags for the various versions.
  81. DEFAULT_FLAGS = {VERSION0: 0, VERSION1: FULLCASE}
  82. # The mask for the flags.
  83. GLOBAL_FLAGS = (_ALL_ENCODINGS | _ALL_VERSIONS | BESTMATCH | DEBUG |
  84. ENHANCEMATCH | POSIX | REVERSE)
  85. SCOPED_FLAGS = FULLCASE | IGNORECASE | MULTILINE | DOTALL | WORD | VERBOSE
  86. ALPHA = frozenset(string.ascii_letters)
  87. DIGITS = frozenset(string.digits)
  88. ALNUM = ALPHA | DIGITS
  89. OCT_DIGITS = frozenset(string.octdigits)
  90. HEX_DIGITS = frozenset(string.hexdigits)
  91. SPECIAL_CHARS = frozenset("()|?*+{^$.[\\#") | frozenset([""])
  92. NAMED_CHAR_PART = ALNUM | frozenset(" -")
  93. PROPERTY_NAME_PART = ALNUM | frozenset(" &_-.")
  94. SET_OPS = ("||", "~~", "&&", "--")
  95. # The width of the code words inside the regex engine.
  96. BYTES_PER_CODE = _regex.get_code_size()
  97. BITS_PER_CODE = BYTES_PER_CODE * 8
  98. # The repeat count which represents infinity.
  99. UNLIMITED = (1 << BITS_PER_CODE) - 1
  100. # The regular expression flags.
  101. REGEX_FLAGS = {"a": ASCII, "b": BESTMATCH, "e": ENHANCEMATCH, "f": FULLCASE,
  102. "i": IGNORECASE, "L": LOCALE, "m": MULTILINE, "p": POSIX, "r": REVERSE,
  103. "s": DOTALL, "u": UNICODE, "V0": VERSION0, "V1": VERSION1, "w": WORD, "x":
  104. VERBOSE}
  105. # The case flags.
  106. CASE_FLAGS = FULLCASE | IGNORECASE
  107. NOCASE = 0
  108. FULLIGNORECASE = FULLCASE | IGNORECASE
  109. FULL_CASE_FOLDING = UNICODE | FULLIGNORECASE
  110. CASE_FLAGS_COMBINATIONS = {0: 0, FULLCASE: 0, IGNORECASE: IGNORECASE,
  111. FULLIGNORECASE: FULLIGNORECASE}
  112. # The number of digits in hexadecimal escapes.
  113. HEX_ESCAPES = {"x": 2, "u": 4, "U": 8}
  114. # The names of the opcodes.
  115. OPCODES = """
  116. FAILURE
  117. SUCCESS
  118. ANY
  119. ANY_ALL
  120. ANY_ALL_REV
  121. ANY_REV
  122. ANY_U
  123. ANY_U_REV
  124. ATOMIC
  125. BOUNDARY
  126. BRANCH
  127. CALL_REF
  128. CHARACTER
  129. CHARACTER_IGN
  130. CHARACTER_IGN_REV
  131. CHARACTER_REV
  132. CONDITIONAL
  133. DEFAULT_BOUNDARY
  134. DEFAULT_END_OF_WORD
  135. DEFAULT_START_OF_WORD
  136. END
  137. END_OF_LINE
  138. END_OF_LINE_U
  139. END_OF_STRING
  140. END_OF_STRING_LINE
  141. END_OF_STRING_LINE_U
  142. END_OF_WORD
  143. FUZZY
  144. GRAPHEME_BOUNDARY
  145. GREEDY_REPEAT
  146. GROUP
  147. GROUP_CALL
  148. GROUP_EXISTS
  149. KEEP
  150. LAZY_REPEAT
  151. LOOKAROUND
  152. NEXT
  153. PROPERTY
  154. PROPERTY_IGN
  155. PROPERTY_IGN_REV
  156. PROPERTY_REV
  157. PRUNE
  158. RANGE
  159. RANGE_IGN
  160. RANGE_IGN_REV
  161. RANGE_REV
  162. REF_GROUP
  163. REF_GROUP_FLD
  164. REF_GROUP_FLD_REV
  165. REF_GROUP_IGN
  166. REF_GROUP_IGN_REV
  167. REF_GROUP_REV
  168. SEARCH_ANCHOR
  169. SET_DIFF
  170. SET_DIFF_IGN
  171. SET_DIFF_IGN_REV
  172. SET_DIFF_REV
  173. SET_INTER
  174. SET_INTER_IGN
  175. SET_INTER_IGN_REV
  176. SET_INTER_REV
  177. SET_SYM_DIFF
  178. SET_SYM_DIFF_IGN
  179. SET_SYM_DIFF_IGN_REV
  180. SET_SYM_DIFF_REV
  181. SET_UNION
  182. SET_UNION_IGN
  183. SET_UNION_IGN_REV
  184. SET_UNION_REV
  185. SKIP
  186. START_OF_LINE
  187. START_OF_LINE_U
  188. START_OF_STRING
  189. START_OF_WORD
  190. STRING
  191. STRING_FLD
  192. STRING_FLD_REV
  193. STRING_IGN
  194. STRING_IGN_REV
  195. STRING_REV
  196. FUZZY_EXT
  197. """
  198. # Define the opcodes in a namespace.
  199. class Namespace:
  200. pass
  201. OP = Namespace()
  202. for i, op in enumerate(OPCODES.split()):
  203. setattr(OP, op, i)
  204. def _shrink_cache(cache_dict, args_dict, locale_sensitive, max_length, divisor=5):
  205. """Make room in the given cache.
  206. Args:
  207. cache_dict: The cache dictionary to modify.
  208. args_dict: The dictionary of named list args used by patterns.
  209. max_length: Maximum # of entries in cache_dict before it is shrunk.
  210. divisor: Cache will shrink to max_length - 1/divisor*max_length items.
  211. """
  212. # Toss out a fraction of the entries at random to make room for new ones.
  213. # A random algorithm was chosen as opposed to simply cache_dict.popitem()
  214. # as popitem could penalize the same regular expression repeatedly based
  215. # on its internal hash value. Being random should spread the cache miss
  216. # love around.
  217. cache_keys = tuple(cache_dict.keys())
  218. overage = len(cache_keys) - max_length
  219. if overage < 0:
  220. # Cache is already within limits. Normally this should not happen
  221. # but it could due to multithreading.
  222. return
  223. number_to_toss = max_length // divisor + overage
  224. # The import is done here to avoid a circular dependency.
  225. import random
  226. if not hasattr(random, 'sample'):
  227. # Do nothing while resolving the circular dependency:
  228. # re->random->warnings->tokenize->string->re
  229. return
  230. for doomed_key in random.sample(cache_keys, number_to_toss):
  231. try:
  232. del cache_dict[doomed_key]
  233. except KeyError:
  234. # Ignore problems if the cache changed from another thread.
  235. pass
  236. # Rebuild the arguments and locale-sensitivity dictionaries.
  237. args_dict.clear()
  238. sensitivity_dict = {}
  239. for pattern, pattern_type, flags, args, default_version, locale in tuple(cache_dict):
  240. args_dict[pattern, pattern_type, flags, default_version, locale] = args
  241. try:
  242. sensitivity_dict[pattern_type, pattern] = locale_sensitive[pattern_type, pattern]
  243. except KeyError:
  244. pass
  245. locale_sensitive.clear()
  246. locale_sensitive.update(sensitivity_dict)
  247. def _fold_case(info, string):
  248. "Folds the case of a string."
  249. flags = info.flags
  250. if (flags & _ALL_ENCODINGS) == 0:
  251. flags |= info.guess_encoding
  252. return _regex.fold_case(flags, string)
  253. def is_cased_i(info, char):
  254. "Checks whether a character is cased."
  255. return len(_regex.get_all_cases(info.flags, char)) > 1
  256. def is_cased_f(flags, char):
  257. "Checks whether a character is cased."
  258. return len(_regex.get_all_cases(flags, char)) > 1
  259. def _compile_firstset(info, fs):
  260. "Compiles the firstset for the pattern."
  261. reverse = bool(info.flags & REVERSE)
  262. fs = _check_firstset(info, reverse, fs)
  263. if not fs:
  264. return []
  265. # Compile the firstset.
  266. return fs.compile(reverse)
  267. def _check_firstset(info, reverse, fs):
  268. "Checks the firstset for the pattern."
  269. if not fs or None in fs:
  270. return None
  271. # If we ignore the case, for simplicity we won't build a firstset.
  272. members = set()
  273. case_flags = NOCASE
  274. for i in fs:
  275. if isinstance(i, Character) and not i.positive:
  276. return None
  277. # if i.case_flags:
  278. # if isinstance(i, Character):
  279. # if is_cased_i(info, i.value):
  280. # return []
  281. # elif isinstance(i, SetBase):
  282. # return []
  283. case_flags |= i.case_flags
  284. members.add(i.with_flags(case_flags=NOCASE))
  285. if case_flags == (FULLCASE | IGNORECASE):
  286. return None
  287. # Build the firstset.
  288. fs = SetUnion(info, list(members), case_flags=case_flags & ~FULLCASE,
  289. zerowidth=True)
  290. fs = fs.optimise(info, reverse, in_set=True)
  291. return fs
  292. def _flatten_code(code):
  293. "Flattens the code from a list of tuples."
  294. flat_code = []
  295. for c in code:
  296. flat_code.extend(c)
  297. return flat_code
  298. def make_case_flags(info):
  299. "Makes the case flags."
  300. flags = info.flags & CASE_FLAGS
  301. # Turn off FULLCASE if ASCII is turned on.
  302. if info.flags & ASCII:
  303. flags &= ~FULLCASE
  304. return flags
  305. def make_character(info, value, in_set=False):
  306. "Makes a character literal."
  307. if in_set:
  308. # A character set is built case-sensitively.
  309. return Character(value)
  310. return Character(value, case_flags=make_case_flags(info))
  311. def make_ref_group(info, name, position):
  312. "Makes a group reference."
  313. return RefGroup(info, name, position, case_flags=make_case_flags(info))
  314. def make_string_set(info, name):
  315. "Makes a string set."
  316. return StringSet(info, name, case_flags=make_case_flags(info))
  317. def make_property(info, prop, in_set):
  318. "Makes a property."
  319. if in_set:
  320. return prop
  321. return prop.with_flags(case_flags=make_case_flags(info))
  322. def _parse_pattern(source, info):
  323. "Parses a pattern, eg. 'a|b|c'."
  324. branches = [parse_sequence(source, info)]
  325. while source.match("|"):
  326. branches.append(parse_sequence(source, info))
  327. if len(branches) == 1:
  328. return branches[0]
  329. return Branch(branches)
  330. def parse_sequence(source, info):
  331. "Parses a sequence, eg. 'abc'."
  332. sequence = [None]
  333. case_flags = make_case_flags(info)
  334. while True:
  335. saved_pos = source.pos
  336. ch = source.get()
  337. if ch in SPECIAL_CHARS:
  338. if ch in ")|":
  339. # The end of a sequence. At the end of the pattern ch is "".
  340. source.pos = saved_pos
  341. break
  342. elif ch == "\\":
  343. # An escape sequence outside a set.
  344. sequence.append(parse_escape(source, info, False))
  345. elif ch == "(":
  346. # A parenthesised subpattern or a flag.
  347. element = parse_paren(source, info)
  348. if element is None:
  349. case_flags = make_case_flags(info)
  350. else:
  351. sequence.append(element)
  352. elif ch == ".":
  353. # Any character.
  354. if info.flags & DOTALL:
  355. sequence.append(AnyAll())
  356. elif info.flags & WORD:
  357. sequence.append(AnyU())
  358. else:
  359. sequence.append(Any())
  360. elif ch == "[":
  361. # A character set.
  362. sequence.append(parse_set(source, info))
  363. elif ch == "^":
  364. # The start of a line or the string.
  365. if info.flags & MULTILINE:
  366. if info.flags & WORD:
  367. sequence.append(StartOfLineU())
  368. else:
  369. sequence.append(StartOfLine())
  370. else:
  371. sequence.append(StartOfString())
  372. elif ch == "$":
  373. # The end of a line or the string.
  374. if info.flags & MULTILINE:
  375. if info.flags & WORD:
  376. sequence.append(EndOfLineU())
  377. else:
  378. sequence.append(EndOfLine())
  379. else:
  380. if info.flags & WORD:
  381. sequence.append(EndOfStringLineU())
  382. else:
  383. sequence.append(EndOfStringLine())
  384. elif ch in "?*+{":
  385. # Looks like a quantifier.
  386. counts = parse_quantifier(source, info, ch)
  387. if counts:
  388. # It _is_ a quantifier.
  389. apply_quantifier(source, info, counts, case_flags, ch,
  390. saved_pos, sequence)
  391. sequence.append(None)
  392. else:
  393. # It's not a quantifier. Maybe it's a fuzzy constraint.
  394. constraints = parse_fuzzy(source, info, ch)
  395. if constraints:
  396. # It _is_ a fuzzy constraint.
  397. apply_constraint(source, info, constraints, case_flags,
  398. saved_pos, sequence)
  399. sequence.append(None)
  400. else:
  401. # The element was just a literal.
  402. sequence.append(Character(ord(ch),
  403. case_flags=case_flags))
  404. else:
  405. # A literal.
  406. sequence.append(Character(ord(ch), case_flags=case_flags))
  407. else:
  408. # A literal.
  409. sequence.append(Character(ord(ch), case_flags=case_flags))
  410. sequence = [item for item in sequence if item is not None]
  411. return Sequence(sequence)
  412. def apply_quantifier(source, info, counts, case_flags, ch, saved_pos,
  413. sequence):
  414. element = sequence.pop()
  415. if element is None:
  416. if sequence:
  417. raise error("multiple repeat", source.string, saved_pos)
  418. raise error("nothing to repeat", source.string, saved_pos)
  419. if isinstance(element, (GreedyRepeat, LazyRepeat, PossessiveRepeat)):
  420. raise error("multiple repeat", source.string, saved_pos)
  421. min_count, max_count = counts
  422. saved_pos = source.pos
  423. ch = source.get()
  424. if ch == "?":
  425. # The "?" suffix that means it's a lazy repeat.
  426. repeated = LazyRepeat
  427. elif ch == "+":
  428. # The "+" suffix that means it's a possessive repeat.
  429. repeated = PossessiveRepeat
  430. else:
  431. # No suffix means that it's a greedy repeat.
  432. source.pos = saved_pos
  433. repeated = GreedyRepeat
  434. # Ignore the quantifier if it applies to a zero-width item or the number of
  435. # repeats is fixed at 1.
  436. if not element.is_empty() and (min_count != 1 or max_count != 1):
  437. element = repeated(element, min_count, max_count)
  438. sequence.append(element)
  439. def apply_constraint(source, info, constraints, case_flags, saved_pos,
  440. sequence):
  441. element = sequence.pop()
  442. if element is None:
  443. raise error("nothing for fuzzy constraint", source.string, saved_pos)
  444. # If a group is marked as fuzzy then put all of the fuzzy part in the
  445. # group.
  446. if isinstance(element, Group):
  447. element.subpattern = Fuzzy(element.subpattern, constraints)
  448. sequence.append(element)
  449. else:
  450. sequence.append(Fuzzy(element, constraints))
  451. _QUANTIFIERS = {"?": (0, 1), "*": (0, None), "+": (1, None)}
  452. def parse_quantifier(source, info, ch):
  453. "Parses a quantifier."
  454. q = _QUANTIFIERS.get(ch)
  455. if q:
  456. # It's a quantifier.
  457. return q
  458. if ch == "{":
  459. # Looks like a limited repeated element, eg. 'a{2,3}'.
  460. counts = parse_limited_quantifier(source)
  461. if counts:
  462. return counts
  463. return None
  464. def is_above_limit(count):
  465. "Checks whether a count is above the maximum."
  466. return count is not None and count >= UNLIMITED
  467. def parse_limited_quantifier(source):
  468. "Parses a limited quantifier."
  469. saved_pos = source.pos
  470. min_count = parse_count(source)
  471. if source.match(","):
  472. max_count = parse_count(source)
  473. # No minimum means 0 and no maximum means unlimited.
  474. min_count = int(min_count or 0)
  475. max_count = int(max_count) if max_count else None
  476. else:
  477. if not min_count:
  478. source.pos = saved_pos
  479. return None
  480. min_count = max_count = int(min_count)
  481. if not source.match ("}"):
  482. source.pos = saved_pos
  483. return None
  484. if is_above_limit(min_count) or is_above_limit(max_count):
  485. raise error("repeat count too big", source.string, saved_pos)
  486. if max_count is not None and min_count > max_count:
  487. raise error("min repeat greater than max repeat", source.string,
  488. saved_pos)
  489. return min_count, max_count
  490. def parse_fuzzy(source, info, ch):
  491. "Parses a fuzzy setting, if present."
  492. saved_pos = source.pos
  493. if ch != "{":
  494. return None
  495. constraints = {}
  496. try:
  497. parse_fuzzy_item(source, constraints)
  498. while source.match(","):
  499. parse_fuzzy_item(source, constraints)
  500. except ParseError:
  501. source.pos = saved_pos
  502. return None
  503. if source.match(":"):
  504. constraints["test"] = parse_fuzzy_test(source, info)
  505. if not source.match("}"):
  506. raise error("expected }", source.string, source.pos)
  507. return constraints
  508. def parse_fuzzy_item(source, constraints):
  509. "Parses a fuzzy setting item."
  510. saved_pos = source.pos
  511. try:
  512. parse_cost_constraint(source, constraints)
  513. except ParseError:
  514. source.pos = saved_pos
  515. parse_cost_equation(source, constraints)
  516. def parse_cost_constraint(source, constraints):
  517. "Parses a cost constraint."
  518. saved_pos = source.pos
  519. ch = source.get()
  520. if ch in ALPHA:
  521. # Syntax: constraint [("<=" | "<") cost]
  522. constraint = parse_constraint(source, constraints, ch)
  523. max_inc = parse_fuzzy_compare(source)
  524. if max_inc is None:
  525. # No maximum cost.
  526. constraints[constraint] = 0, None
  527. else:
  528. # There's a maximum cost.
  529. cost_pos = source.pos
  530. max_cost = parse_cost_limit(source)
  531. # Inclusive or exclusive limit?
  532. if not max_inc:
  533. max_cost -= 1
  534. if max_cost < 0:
  535. raise error("bad fuzzy cost limit", source.string, cost_pos)
  536. constraints[constraint] = 0, max_cost
  537. elif ch in DIGITS:
  538. # Syntax: cost ("<=" | "<") constraint ("<=" | "<") cost
  539. source.pos = saved_pos
  540. # Minimum cost.
  541. cost_pos = source.pos
  542. min_cost = parse_cost_limit(source)
  543. min_inc = parse_fuzzy_compare(source)
  544. if min_inc is None:
  545. raise ParseError()
  546. constraint = parse_constraint(source, constraints, source.get())
  547. max_inc = parse_fuzzy_compare(source)
  548. if max_inc is None:
  549. raise ParseError()
  550. # Maximum cost.
  551. cost_pos = source.pos
  552. max_cost = parse_cost_limit(source)
  553. # Inclusive or exclusive limits?
  554. if not min_inc:
  555. min_cost += 1
  556. if not max_inc:
  557. max_cost -= 1
  558. if not 0 <= min_cost <= max_cost:
  559. raise error("bad fuzzy cost limit", source.string, cost_pos)
  560. constraints[constraint] = min_cost, max_cost
  561. else:
  562. raise ParseError()
  563. def parse_cost_limit(source):
  564. "Parses a cost limit."
  565. cost_pos = source.pos
  566. digits = parse_count(source)
  567. try:
  568. return int(digits)
  569. except ValueError:
  570. pass
  571. raise error("bad fuzzy cost limit", source.string, cost_pos)
  572. def parse_constraint(source, constraints, ch):
  573. "Parses a constraint."
  574. if ch not in "deis":
  575. raise ParseError()
  576. if ch in constraints:
  577. raise ParseError()
  578. return ch
  579. def parse_fuzzy_compare(source):
  580. "Parses a cost comparator."
  581. if source.match("<="):
  582. return True
  583. elif source.match("<"):
  584. return False
  585. else:
  586. return None
  587. def parse_cost_equation(source, constraints):
  588. "Parses a cost equation."
  589. if "cost" in constraints:
  590. raise error("more than one cost equation", source.string, source.pos)
  591. cost = {}
  592. parse_cost_term(source, cost)
  593. while source.match("+"):
  594. parse_cost_term(source, cost)
  595. max_inc = parse_fuzzy_compare(source)
  596. if max_inc is None:
  597. raise ParseError()
  598. max_cost = int(parse_count(source))
  599. if not max_inc:
  600. max_cost -= 1
  601. if max_cost < 0:
  602. raise error("bad fuzzy cost limit", source.string, source.pos)
  603. cost["max"] = max_cost
  604. constraints["cost"] = cost
  605. def parse_cost_term(source, cost):
  606. "Parses a cost equation term."
  607. coeff = parse_count(source)
  608. ch = source.get()
  609. if ch not in "dis":
  610. raise ParseError()
  611. if ch in cost:
  612. raise error("repeated fuzzy cost", source.string, source.pos)
  613. cost[ch] = int(coeff or 1)
  614. def parse_fuzzy_test(source, info):
  615. saved_pos = source.pos
  616. ch = source.get()
  617. if ch in SPECIAL_CHARS:
  618. if ch == "\\":
  619. # An escape sequence outside a set.
  620. return parse_escape(source, info, False)
  621. elif ch == ".":
  622. # Any character.
  623. if info.flags & DOTALL:
  624. return AnyAll()
  625. elif info.flags & WORD:
  626. return AnyU()
  627. else:
  628. return Any()
  629. elif ch == "[":
  630. # A character set.
  631. return parse_set(source, info)
  632. else:
  633. raise error("expected character set", source.string, saved_pos)
  634. elif ch:
  635. # A literal.
  636. return Character(ord(ch), case_flags=case_flags)
  637. else:
  638. raise error("expected character set", source.string, saved_pos)
  639. def parse_count(source):
  640. "Parses a quantifier's count, which can be empty."
  641. return source.get_while(DIGITS)
  642. def parse_paren(source, info):
  643. """Parses a parenthesised subpattern or a flag. Returns FLAGS if it's an
  644. inline flag.
  645. """
  646. saved_pos = source.pos
  647. ch = source.get()
  648. if ch == "?":
  649. # (?...
  650. saved_pos_2 = source.pos
  651. ch = source.get()
  652. if ch == "<":
  653. # (?<...
  654. saved_pos_3 = source.pos
  655. ch = source.get()
  656. if ch in ("=", "!"):
  657. # (?<=... or (?<!...: lookbehind.
  658. return parse_lookaround(source, info, True, ch == "=")
  659. # (?<...: a named capture group.
  660. source.pos = saved_pos_3
  661. name = parse_name(source)
  662. group = info.open_group(name)
  663. source.expect(">")
  664. saved_flags = info.flags
  665. try:
  666. subpattern = _parse_pattern(source, info)
  667. source.expect(")")
  668. finally:
  669. info.flags = saved_flags
  670. source.ignore_space = bool(info.flags & VERBOSE)
  671. info.close_group()
  672. return Group(info, group, subpattern)
  673. if ch in ("=", "!"):
  674. # (?=... or (?!...: lookahead.
  675. return parse_lookaround(source, info, False, ch == "=")
  676. if ch == "P":
  677. # (?P...: a Python extension.
  678. return parse_extension(source, info)
  679. if ch == "#":
  680. # (?#...: a comment.
  681. return parse_comment(source)
  682. if ch == "(":
  683. # (?(...: a conditional subpattern.
  684. return parse_conditional(source, info)
  685. if ch == ">":
  686. # (?>...: an atomic subpattern.
  687. return parse_atomic(source, info)
  688. if ch == "|":
  689. # (?|...: a common/reset groups branch.
  690. return parse_common(source, info)
  691. if ch == "R" or "0" <= ch <= "9":
  692. # (?R...: probably a call to a group.
  693. return parse_call_group(source, info, ch, saved_pos_2)
  694. if ch == "&":
  695. # (?&...: a call to a named group.
  696. return parse_call_named_group(source, info, saved_pos_2)
  697. # (?...: probably a flags subpattern.
  698. source.pos = saved_pos_2
  699. return parse_flags_subpattern(source, info)
  700. if ch == "*":
  701. # (*...
  702. saved_pos_2 = source.pos
  703. word = source.get_while(set(")>"), include=False)
  704. if word[ : 1].isalpha():
  705. verb = VERBS.get(word)
  706. if not verb:
  707. raise error("unknown verb", source.string, saved_pos_2)
  708. source.expect(")")
  709. return verb
  710. # (...: an unnamed capture group.
  711. source.pos = saved_pos
  712. group = info.open_group()
  713. saved_flags = info.flags
  714. try:
  715. subpattern = _parse_pattern(source, info)
  716. source.expect(")")
  717. finally:
  718. info.flags = saved_flags
  719. source.ignore_space = bool(info.flags & VERBOSE)
  720. info.close_group()
  721. return Group(info, group, subpattern)
  722. def parse_extension(source, info):
  723. "Parses a Python extension."
  724. saved_pos = source.pos
  725. ch = source.get()
  726. if ch == "<":
  727. # (?P<...: a named capture group.
  728. name = parse_name(source)
  729. group = info.open_group(name)
  730. source.expect(">")
  731. saved_flags = info.flags
  732. try:
  733. subpattern = _parse_pattern(source, info)
  734. source.expect(")")
  735. finally:
  736. info.flags = saved_flags
  737. source.ignore_space = bool(info.flags & VERBOSE)
  738. info.close_group()
  739. return Group(info, group, subpattern)
  740. if ch == "=":
  741. # (?P=...: a named group reference.
  742. name = parse_name(source, allow_numeric=True)
  743. source.expect(")")
  744. if info.is_open_group(name):
  745. raise error("cannot refer to an open group", source.string,
  746. saved_pos)
  747. return make_ref_group(info, name, saved_pos)
  748. if ch == ">" or ch == "&":
  749. # (?P>...: a call to a group.
  750. return parse_call_named_group(source, info, saved_pos)
  751. source.pos = saved_pos
  752. raise error("unknown extension", source.string, saved_pos)
  753. def parse_comment(source):
  754. "Parses a comment."
  755. while True:
  756. saved_pos = source.pos
  757. c = source.get()
  758. if not c or c == ")":
  759. break
  760. if c == "\\":
  761. c = source.get()
  762. source.pos = saved_pos
  763. source.expect(")")
  764. return None
  765. def parse_lookaround(source, info, behind, positive):
  766. "Parses a lookaround."
  767. saved_flags = info.flags
  768. try:
  769. subpattern = _parse_pattern(source, info)
  770. source.expect(")")
  771. finally:
  772. info.flags = saved_flags
  773. source.ignore_space = bool(info.flags & VERBOSE)
  774. return LookAround(behind, positive, subpattern)
  775. def parse_conditional(source, info):
  776. "Parses a conditional subpattern."
  777. saved_flags = info.flags
  778. saved_pos = source.pos
  779. ch = source.get()
  780. if ch == "?":
  781. # (?(?...
  782. ch = source.get()
  783. if ch in ("=", "!"):
  784. # (?(?=... or (?(?!...: lookahead conditional.
  785. return parse_lookaround_conditional(source, info, False, ch == "=")
  786. if ch == "<":
  787. # (?(?<...
  788. ch = source.get()
  789. if ch in ("=", "!"):
  790. # (?(?<=... or (?(?<!...: lookbehind conditional.
  791. return parse_lookaround_conditional(source, info, True, ch ==
  792. "=")
  793. source.pos = saved_pos
  794. raise error("expected lookaround conditional", source.string,
  795. source.pos)
  796. source.pos = saved_pos
  797. try:
  798. group = parse_name(source, True)
  799. source.expect(")")
  800. yes_branch = parse_sequence(source, info)
  801. if source.match("|"):
  802. no_branch = parse_sequence(source, info)
  803. else:
  804. no_branch = Sequence()
  805. source.expect(")")
  806. finally:
  807. info.flags = saved_flags
  808. source.ignore_space = bool(info.flags & VERBOSE)
  809. if yes_branch.is_empty() and no_branch.is_empty():
  810. return Sequence()
  811. return Conditional(info, group, yes_branch, no_branch, saved_pos)
  812. def parse_lookaround_conditional(source, info, behind, positive):
  813. saved_flags = info.flags
  814. try:
  815. subpattern = _parse_pattern(source, info)
  816. source.expect(")")
  817. finally:
  818. info.flags = saved_flags
  819. source.ignore_space = bool(info.flags & VERBOSE)
  820. yes_branch = parse_sequence(source, info)
  821. if source.match("|"):
  822. no_branch = parse_sequence(source, info)
  823. else:
  824. no_branch = Sequence()
  825. source.expect(")")
  826. return LookAroundConditional(behind, positive, subpattern, yes_branch,
  827. no_branch)
  828. def parse_atomic(source, info):
  829. "Parses an atomic subpattern."
  830. saved_flags = info.flags
  831. try:
  832. subpattern = _parse_pattern(source, info)
  833. source.expect(")")
  834. finally:
  835. info.flags = saved_flags
  836. source.ignore_space = bool(info.flags & VERBOSE)
  837. return Atomic(subpattern)
  838. def parse_common(source, info):
  839. "Parses a common groups branch."
  840. # Capture group numbers in different branches can reuse the group numbers.
  841. initial_group_count = info.group_count
  842. branches = [parse_sequence(source, info)]
  843. final_group_count = info.group_count
  844. while source.match("|"):
  845. info.group_count = initial_group_count
  846. branches.append(parse_sequence(source, info))
  847. final_group_count = max(final_group_count, info.group_count)
  848. info.group_count = final_group_count
  849. source.expect(")")
  850. if len(branches) == 1:
  851. return branches[0]
  852. return Branch(branches)
  853. def parse_call_group(source, info, ch, pos):
  854. "Parses a call to a group."
  855. if ch == "R":
  856. group = "0"
  857. else:
  858. group = ch + source.get_while(DIGITS)
  859. source.expect(")")
  860. return CallGroup(info, group, pos)
  861. def parse_call_named_group(source, info, pos):
  862. "Parses a call to a named group."
  863. group = parse_name(source)
  864. source.expect(")")
  865. return CallGroup(info, group, pos)
  866. def parse_flag_set(source):
  867. "Parses a set of inline flags."
  868. flags = 0
  869. try:
  870. while True:
  871. saved_pos = source.pos
  872. ch = source.get()
  873. if ch == "V":
  874. ch += source.get()
  875. flags |= REGEX_FLAGS[ch]
  876. except KeyError:
  877. source.pos = saved_pos
  878. return flags
  879. def parse_flags(source, info):
  880. "Parses flags being turned on/off."
  881. flags_on = parse_flag_set(source)
  882. if source.match("-"):
  883. flags_off = parse_flag_set(source)
  884. if not flags_off:
  885. raise error("bad inline flags: no flags after '-'", source.string,
  886. source.pos)
  887. else:
  888. flags_off = 0
  889. if flags_on & LOCALE:
  890. # Remember that this pattern as an inline locale flag.
  891. info.inline_locale = True
  892. return flags_on, flags_off
  893. def parse_subpattern(source, info, flags_on, flags_off):
  894. "Parses a subpattern with scoped flags."
  895. saved_flags = info.flags
  896. info.flags = (info.flags | flags_on) & ~flags_off
  897. source.ignore_space = bool(info.flags & VERBOSE)
  898. try:
  899. subpattern = _parse_pattern(source, info)
  900. source.expect(")")
  901. finally:
  902. info.flags = saved_flags
  903. source.ignore_space = bool(info.flags & VERBOSE)
  904. return subpattern
  905. def parse_flags_subpattern(source, info):
  906. """Parses a flags subpattern. It could be inline flags or a subpattern
  907. possibly with local flags. If it's a subpattern, then that's returned;
  908. if it's a inline flags, then None is returned.
  909. """
  910. flags_on, flags_off = parse_flags(source, info)
  911. if flags_off & GLOBAL_FLAGS:
  912. raise error("bad inline flags: cannot turn off global flag",
  913. source.string, source.pos)
  914. if flags_on & flags_off:
  915. raise error("bad inline flags: flag turned on and off", source.string,
  916. source.pos)
  917. # Handle flags which are global in all regex behaviours.
  918. new_global_flags = (flags_on & ~info.global_flags) & GLOBAL_FLAGS
  919. if new_global_flags:
  920. info.global_flags |= new_global_flags
  921. # A global has been turned on, so reparse the pattern.
  922. raise _UnscopedFlagSet(info.global_flags)
  923. # Ensure that from now on we have only scoped flags.
  924. flags_on &= ~GLOBAL_FLAGS
  925. if source.match(":"):
  926. return parse_subpattern(source, info, flags_on, flags_off)
  927. if source.match(")"):
  928. parse_positional_flags(source, info, flags_on, flags_off)
  929. return None
  930. raise error("unknown extension", source.string, source.pos)
  931. def parse_positional_flags(source, info, flags_on, flags_off):
  932. "Parses positional flags."
  933. version = (info.flags & _ALL_VERSIONS) or DEFAULT_VERSION
  934. if version == VERSION0:
  935. # Positional flags are global and can only be turned on.
  936. if flags_off:
  937. raise error("bad inline flags: cannot turn flags off",
  938. source.string, source.pos)
  939. new_global_flags = flags_on & ~info.global_flags
  940. if new_global_flags:
  941. info.global_flags |= new_global_flags
  942. # A global has been turned on, so reparse the pattern.
  943. raise _UnscopedFlagSet(info.global_flags)
  944. else:
  945. info.flags = (info.flags | flags_on) & ~flags_off
  946. source.ignore_space = bool(info.flags & VERBOSE)
  947. def parse_name(source, allow_numeric=False, allow_group_0=False):
  948. "Parses a name."
  949. name = source.get_while(set(")>"), include=False)
  950. if not name:
  951. raise error("missing group name", source.string, source.pos)
  952. if name.isdigit():
  953. min_group = 0 if allow_group_0 else 1
  954. if not allow_numeric or int(name) < min_group:
  955. raise error("bad character in group name", source.string,
  956. source.pos)
  957. else:
  958. if not name.isidentifier():
  959. raise error("bad character in group name", source.string,
  960. source.pos)
  961. return name
  962. def is_octal(string):
  963. "Checks whether a string is octal."
  964. return all(ch in OCT_DIGITS for ch in string)
  965. def is_decimal(string):
  966. "Checks whether a string is decimal."
  967. return all(ch in DIGITS for ch in string)
  968. def is_hexadecimal(string):
  969. "Checks whether a string is hexadecimal."
  970. return all(ch in HEX_DIGITS for ch in string)
  971. def parse_escape(source, info, in_set):
  972. "Parses an escape sequence."
  973. saved_ignore = source.ignore_space
  974. source.ignore_space = False
  975. ch = source.get()
  976. source.ignore_space = saved_ignore
  977. if not ch:
  978. # A backslash at the end of the pattern.
  979. raise error("bad escape (end of pattern)", source.string, source.pos)
  980. if ch in HEX_ESCAPES:
  981. # A hexadecimal escape sequence.
  982. return parse_hex_escape(source, info, ch, HEX_ESCAPES[ch], in_set, ch)
  983. elif ch == "g" and not in_set:
  984. # A group reference.
  985. saved_pos = source.pos
  986. try:
  987. return parse_group_ref(source, info)
  988. except error:
  989. # Invalid as a group reference, so assume it's a literal.
  990. source.pos = saved_pos
  991. return make_character(info, ord(ch), in_set)
  992. elif ch == "G" and not in_set:
  993. # A search anchor.
  994. return SearchAnchor()
  995. elif ch == "L" and not in_set:
  996. # A string set.
  997. return parse_string_set(source, info)
  998. elif ch == "N":
  999. # A named codepoint.
  1000. return parse_named_char(source, info, in_set)
  1001. elif ch in "pP":
  1002. # A Unicode property, positive or negative.
  1003. return parse_property(source, info, ch == "p", in_set)
  1004. elif ch == "X" and not in_set:
  1005. # A grapheme cluster.
  1006. return Grapheme()
  1007. elif ch in ALPHA:
  1008. # An alphabetic escape sequence.
  1009. # Positional escapes aren't allowed inside a character set.
  1010. if not in_set:
  1011. if info.flags & WORD:
  1012. value = WORD_POSITION_ESCAPES.get(ch)
  1013. else:
  1014. value = POSITION_ESCAPES.get(ch)
  1015. if value:
  1016. return value
  1017. value = CHARSET_ESCAPES.get(ch)
  1018. if value:
  1019. return value
  1020. value = CHARACTER_ESCAPES.get(ch)
  1021. if value:
  1022. return Character(ord(value))
  1023. return make_character(info, ord(ch), in_set)
  1024. elif ch in DIGITS:
  1025. # A numeric escape sequence.
  1026. return parse_numeric_escape(source, info, ch, in_set)
  1027. else:
  1028. # A literal.
  1029. return make_character(info, ord(ch), in_set)
  1030. def parse_numeric_escape(source, info, ch, in_set):
  1031. "Parses a numeric escape sequence."
  1032. if in_set or ch == "0":
  1033. # Octal escape sequence, max 3 digits.
  1034. return parse_octal_escape(source, info, [ch], in_set)
  1035. # At least 1 digit, so either octal escape or group.
  1036. digits = ch
  1037. saved_pos = source.pos
  1038. ch = source.get()
  1039. if ch in DIGITS:
  1040. # At least 2 digits, so either octal escape or group.
  1041. digits += ch
  1042. saved_pos = source.pos
  1043. ch = source.get()
  1044. if is_octal(digits) and ch in OCT_DIGITS:
  1045. # 3 octal digits, so octal escape sequence.
  1046. encoding = info.flags & _ALL_ENCODINGS
  1047. if encoding == ASCII or encoding == LOCALE:
  1048. octal_mask = 0xFF
  1049. else:
  1050. octal_mask = 0x1FF
  1051. value = int(digits + ch, 8) & octal_mask
  1052. return make_character(info, value)
  1053. # Group reference.
  1054. source.pos = saved_pos
  1055. if info.is_open_group(digits):
  1056. raise error("cannot refer to an open group", source.string, source.pos)
  1057. return make_ref_group(info, digits, source.pos)
  1058. def parse_octal_escape(source, info, digits, in_set):
  1059. "Parses an octal escape sequence."
  1060. saved_pos = source.pos
  1061. ch = source.get()
  1062. while len(digits) < 3 and ch in OCT_DIGITS:
  1063. digits.append(ch)
  1064. saved_pos = source.pos
  1065. ch = source.get()
  1066. source.pos = saved_pos
  1067. try:
  1068. value = int("".join(digits), 8)
  1069. return make_character(info, value, in_set)
  1070. except ValueError:
  1071. if digits[0] in OCT_DIGITS:
  1072. raise error("incomplete escape \\%s" % ''.join(digits),
  1073. source.string, source.pos)
  1074. else:
  1075. raise error("bad escape \\%s" % digits[0], source.string,
  1076. source.pos)
  1077. def parse_hex_escape(source, info, esc, expected_len, in_set, type):
  1078. "Parses a hex escape sequence."
  1079. saved_pos = source.pos
  1080. digits = []
  1081. for i in range(expected_len):
  1082. ch = source.get()
  1083. if ch not in HEX_DIGITS:
  1084. raise error("incomplete escape \\%s%s" % (type, ''.join(digits)),
  1085. source.string, saved_pos)
  1086. digits.append(ch)
  1087. try:
  1088. value = int("".join(digits), 16)
  1089. except ValueError:
  1090. pass
  1091. else:
  1092. if value < 0x110000:
  1093. return make_character(info, value, in_set)
  1094. # Bad hex escape.
  1095. raise error("bad hex escape \\%s%s" % (esc, ''.join(digits)),
  1096. source.string, saved_pos)
  1097. def parse_group_ref(source, info):
  1098. "Parses a group reference."
  1099. source.expect("<")
  1100. saved_pos = source.pos
  1101. name = parse_name(source, True)
  1102. source.expect(">")
  1103. if info.is_open_group(name):
  1104. raise error("cannot refer to an open group", source.string, source.pos)
  1105. return make_ref_group(info, name, saved_pos)
  1106. def parse_string_set(source, info):
  1107. "Parses a string set reference."
  1108. source.expect("<")
  1109. name = parse_name(source, True)
  1110. source.expect(">")
  1111. if name is None or name not in info.kwargs:
  1112. raise error("undefined named list", source.string, source.pos)
  1113. return make_string_set(info, name)
  1114. def parse_named_char(source, info, in_set):
  1115. "Parses a named character."
  1116. saved_pos = source.pos
  1117. if source.match("{"):
  1118. name = source.get_while(NAMED_CHAR_PART)
  1119. if source.match("}"):
  1120. try:
  1121. value = unicodedata.lookup(name)
  1122. return make_character(info, ord(value), in_set)
  1123. except KeyError:
  1124. raise error("undefined character name", source.string,
  1125. source.pos)
  1126. source.pos = saved_pos
  1127. return make_character(info, ord("N"), in_set)
  1128. def parse_property(source, info, positive, in_set):
  1129. "Parses a Unicode property."
  1130. saved_pos = source.pos
  1131. ch = source.get()
  1132. if ch == "{":
  1133. negate = source.match("^")
  1134. prop_name, name = parse_property_name(source)
  1135. if source.match("}"):
  1136. # It's correctly delimited.
  1137. prop = lookup_property(prop_name, name, positive != negate, source)
  1138. return make_property(info, prop, in_set)
  1139. elif ch and ch in "CLMNPSZ":
  1140. # An abbreviated property, eg \pL.
  1141. prop = lookup_property(None, ch, positive, source)
  1142. return make_property(info, prop, in_set)
  1143. # Not a property, so treat as a literal "p" or "P".
  1144. source.pos = saved_pos
  1145. ch = "p" if positive else "P"
  1146. return make_character(info, ord(ch), in_set)
  1147. def parse_property_name(source):
  1148. "Parses a property name, which may be qualified."
  1149. name = source.get_while(PROPERTY_NAME_PART)
  1150. saved_pos = source.pos
  1151. ch = source.get()
  1152. if ch and ch in ":=":
  1153. prop_name = name
  1154. name = source.get_while(ALNUM | set(" &_-./")).strip()
  1155. if name:
  1156. # Name after the ":" or "=", so it's a qualified name.
  1157. saved_pos = source.pos
  1158. else:
  1159. # No name after the ":" or "=", so assume it's an unqualified name.
  1160. prop_name, name = None, prop_name
  1161. else:
  1162. prop_name = None
  1163. source.pos = saved_pos
  1164. return prop_name, name
  1165. def parse_set(source, info):
  1166. "Parses a character set."
  1167. version = (info.flags & _ALL_VERSIONS) or DEFAULT_VERSION
  1168. saved_ignore = source.ignore_space
  1169. source.ignore_space = False
  1170. # Negative set?
  1171. negate = source.match("^")
  1172. try:
  1173. if version == VERSION0:
  1174. item = parse_set_imp_union(source, info)
  1175. else:
  1176. item = parse_set_union(source, info)
  1177. if not source.match("]"):
  1178. raise error("missing ]", source.string, source.pos)
  1179. finally:
  1180. source.ignore_space = saved_ignore
  1181. if negate:
  1182. item = item.with_flags(positive=not item.positive)
  1183. item = item.with_flags(case_flags=make_case_flags(info))
  1184. return item
  1185. def parse_set_union(source, info):
  1186. "Parses a set union ([x||y])."
  1187. items = [parse_set_symm_diff(source, info)]
  1188. while source.match("||"):
  1189. items.append(parse_set_symm_diff(source, info))
  1190. if len(items) == 1:
  1191. return items[0]
  1192. return SetUnion(info, items)
  1193. def parse_set_symm_diff(source, info):
  1194. "Parses a set symmetric difference ([x~~y])."
  1195. items = [parse_set_inter(source, info)]
  1196. while source.match("~~"):
  1197. items.append(parse_set_inter(source, info))
  1198. if len(items) == 1:
  1199. return items[0]
  1200. return SetSymDiff(info, items)
  1201. def parse_set_inter(source, info):
  1202. "Parses a set intersection ([x&&y])."
  1203. items = [parse_set_diff(source, info)]
  1204. while source.match("&&"):
  1205. items.append(parse_set_diff(source, info))
  1206. if len(items) == 1:
  1207. return items[0]
  1208. return SetInter(info, items)
  1209. def parse_set_diff(source, info):
  1210. "Parses a set difference ([x--y])."
  1211. items = [parse_set_imp_union(source, info)]
  1212. while source.match("--"):
  1213. items.append(parse_set_imp_union(source, info))
  1214. if len(items) == 1:
  1215. return items[0]
  1216. return SetDiff(info, items)
  1217. def parse_set_imp_union(source, info):
  1218. "Parses a set implicit union ([xy])."
  1219. version = (info.flags & _ALL_VERSIONS) or DEFAULT_VERSION
  1220. items = [parse_set_member(source, info)]
  1221. while True:
  1222. saved_pos = source.pos
  1223. if source.match("]"):
  1224. # End of the set.
  1225. source.pos = saved_pos
  1226. break
  1227. if version == VERSION1 and any(source.match(op) for op in SET_OPS):
  1228. # The new behaviour has set operators.
  1229. source.pos = saved_pos
  1230. break
  1231. items.append(parse_set_member(source, info))
  1232. if len(items) == 1:
  1233. return items[0]
  1234. return SetUnion(info, items)
  1235. def parse_set_member(source, info):
  1236. "Parses a member in a character set."
  1237. # Parse a set item.
  1238. start = parse_set_item(source, info)
  1239. saved_pos1 = source.pos
  1240. if (not isinstance(start, Character) or not start.positive or not
  1241. source.match("-")):
  1242. # It's not the start of a range.
  1243. return start
  1244. version = (info.flags & _ALL_VERSIONS) or DEFAULT_VERSION
  1245. # It looks like the start of a range of characters.
  1246. saved_pos2 = source.pos
  1247. if version == VERSION1 and source.match("-"):
  1248. # It's actually the set difference operator '--', so return the
  1249. # character.
  1250. source.pos = saved_pos1
  1251. return start
  1252. if source.match("]"):
  1253. # We've reached the end of the set, so return both the character and
  1254. # hyphen.
  1255. source.pos = saved_pos2
  1256. return SetUnion(info, [start, Character(ord("-"))])
  1257. # Parse a set item.
  1258. end = parse_set_item(source, info)
  1259. if not isinstance(end, Character) or not end.positive:
  1260. # It's not a range, so return the character, hyphen and property.
  1261. return SetUnion(info, [start, Character(ord("-")), end])
  1262. # It _is_ a range.
  1263. if start.value > end.value:
  1264. raise error("bad character range", source.string, source.pos)
  1265. if start.value == end.value:
  1266. return start
  1267. return Range(start.value, end.value)
  1268. def parse_set_item(source, info):
  1269. "Parses an item in a character set."
  1270. version = (info.flags & _ALL_VERSIONS) or DEFAULT_VERSION
  1271. if source.match("\\"):
  1272. # An escape sequence in a set.
  1273. return parse_escape(source, info, True)
  1274. saved_pos = source.pos
  1275. if source.match("[:"):
  1276. # Looks like a POSIX character class.
  1277. try:
  1278. return parse_posix_class(source, info)
  1279. except ParseError:
  1280. # Not a POSIX character class.
  1281. source.pos = saved_pos
  1282. if version == VERSION1 and source.match("["):
  1283. # It's the start of a nested set.
  1284. # Negative set?
  1285. negate = source.match("^")
  1286. item = parse_set_union(source, info)
  1287. if not source.match("]"):
  1288. raise error("missing ]", source.string, source.pos)
  1289. if negate:
  1290. item = item.with_flags(positive=not item.positive)
  1291. return item
  1292. ch = source.get()
  1293. if not ch:
  1294. raise error("unterminated character set", source.string, source.pos)
  1295. return Character(ord(ch))
  1296. def parse_posix_class(source, info):
  1297. "Parses a POSIX character class."
  1298. negate = source.match("^")
  1299. prop_name, name = parse_property_name(source)
  1300. if not source.match(":]"):
  1301. raise ParseError()
  1302. return lookup_property(prop_name, name, not negate, source, posix=True)
  1303. def float_to_rational(flt):
  1304. "Converts a float to a rational pair."
  1305. int_part = int(flt)
  1306. error = flt - int_part
  1307. if abs(error) < 0.0001:
  1308. return int_part, 1
  1309. den, num = float_to_rational(1.0 / error)
  1310. return int_part * den + num, den
  1311. def numeric_to_rational(numeric):
  1312. "Converts a numeric string to a rational string, if possible."
  1313. if numeric[ : 1] == "-":
  1314. sign, numeric = numeric[0], numeric[1 : ]
  1315. else:
  1316. sign = ""
  1317. parts = numeric.split("/")
  1318. if len(parts) == 2:
  1319. num, den = float_to_rational(float(parts[0]) / float(parts[1]))
  1320. elif len(parts) == 1:
  1321. num, den = float_to_rational(float(parts[0]))
  1322. else:
  1323. raise ValueError()
  1324. result = "{}{}/{}".format(sign, num, den)
  1325. if result.endswith("/1"):
  1326. return result[ : -2]
  1327. return result
  1328. def standardise_name(name):
  1329. "Standardises a property or value name."
  1330. try:
  1331. return numeric_to_rational("".join(name))
  1332. except (ValueError, ZeroDivisionError):
  1333. return "".join(ch for ch in name if ch not in "_- ").upper()
  1334. _POSIX_CLASSES = set('ALNUM DIGIT PUNCT XDIGIT'.split())
  1335. _BINARY_VALUES = set('YES Y NO N TRUE T FALSE F'.split())
  1336. def lookup_property(property, value, positive, source=None, posix=False):
  1337. "Looks up a property."
  1338. # Normalise the names (which may still be lists).
  1339. property = standardise_name(property) if property else None
  1340. value = standardise_name(value)
  1341. if (property, value) == ("GENERALCATEGORY", "ASSIGNED"):
  1342. property, value, positive = "GENERALCATEGORY", "UNASSIGNED", not positive
  1343. if posix and not property and value.upper() in _POSIX_CLASSES:
  1344. value = 'POSIX' + value
  1345. if property:
  1346. # Both the property and the value are provided.
  1347. prop = PROPERTIES.get(property)
  1348. if not prop:
  1349. if not source:
  1350. raise error("unknown property")
  1351. raise error("unknown property", source.string, source.pos)
  1352. prop_id, value_dict = prop
  1353. val_id = value_dict.get(value)
  1354. if val_id is None:
  1355. if not source:
  1356. raise error("unknown property value")
  1357. raise error("unknown property value", source.string, source.pos)
  1358. return Property((prop_id << 16) | val_id, positive)
  1359. # Only the value is provided.
  1360. # It might be the name of a GC, script or block value.
  1361. for property in ("GC", "SCRIPT", "BLOCK"):
  1362. prop_id, value_dict = PROPERTIES.get(property)
  1363. val_id = value_dict.get(value)
  1364. if val_id is not None:
  1365. return Property((prop_id << 16) | val_id, positive)
  1366. # It might be the name of a binary property.
  1367. prop = PROPERTIES.get(value)
  1368. if prop:
  1369. prop_id, value_dict = prop
  1370. if set(value_dict) == _BINARY_VALUES:
  1371. return Property((prop_id << 16) | 1, positive)
  1372. return Property(prop_id << 16, not positive)
  1373. # It might be the name of a binary property starting with a prefix.
  1374. if value.startswith("IS"):
  1375. prop = PROPERTIES.get(value[2 : ])
  1376. if prop:
  1377. prop_id, value_dict = prop
  1378. if "YES" in value_dict:
  1379. return Property((prop_id << 16) | 1, positive)
  1380. # It might be the name of a script or block starting with a prefix.
  1381. for prefix, property in (("IS", "SCRIPT"), ("IN", "BLOCK")):
  1382. if value.startswith(prefix):
  1383. prop_id, value_dict = PROPERTIES.get(property)
  1384. val_id = value_dict.get(value[2 : ])
  1385. if val_id is not None:
  1386. return Property((prop_id << 16) | val_id, positive)
  1387. # Unknown property.
  1388. if not source:
  1389. raise error("unknown property")
  1390. raise error("unknown property", source.string, source.pos)
  1391. def _compile_replacement(source, pattern, is_unicode):
  1392. "Compiles a replacement template escape sequence."
  1393. ch = source.get()
  1394. if ch in ALPHA:
  1395. # An alphabetic escape sequence.
  1396. value = CHARACTER_ESCAPES.get(ch)
  1397. if value:
  1398. return False, [ord(value)]
  1399. if ch in HEX_ESCAPES and (ch == "x" or is_unicode):
  1400. # A hexadecimal escape sequence.
  1401. return False, [parse_repl_hex_escape(source, HEX_ESCAPES[ch], ch)]
  1402. if ch == "g":
  1403. # A group preference.
  1404. return True, [compile_repl_group(source, pattern)]
  1405. if ch == "N" and is_unicode:
  1406. # A named character.
  1407. value = parse_repl_named_char(source)
  1408. if value is not None:
  1409. return False, [value]
  1410. return False, [ord("\\"), ord(ch)]
  1411. if isinstance(source.sep, bytes):
  1412. octal_mask = 0xFF
  1413. else:
  1414. octal_mask = 0x1FF
  1415. if ch == "0":
  1416. # An octal escape sequence.
  1417. digits = ch
  1418. while len(digits) < 3:
  1419. saved_pos = source.pos
  1420. ch = source.get()
  1421. if ch not in OCT_DIGITS:
  1422. source.pos = saved_pos
  1423. break
  1424. digits += ch
  1425. return False, [int(digits, 8) & octal_mask]
  1426. if ch in DIGITS:
  1427. # Either an octal escape sequence (3 digits) or a group reference (max
  1428. # 2 digits).
  1429. digits = ch
  1430. saved_pos = source.pos
  1431. ch = source.get()
  1432. if ch in DIGITS:
  1433. digits += ch
  1434. saved_pos = source.pos
  1435. ch = source.get()
  1436. if ch and is_octal(digits + ch):
  1437. # An octal escape sequence.
  1438. return False, [int(digits + ch, 8) & octal_mask]
  1439. # A group reference.
  1440. source.pos = saved_pos
  1441. return True, [int(digits)]
  1442. if ch == "\\":
  1443. # An escaped backslash is a backslash.
  1444. return False, [ord("\\")]
  1445. if not ch:
  1446. # A trailing backslash.
  1447. raise error("bad escape (end of pattern)", source.string, source.pos)
  1448. # An escaped non-backslash is a backslash followed by the literal.
  1449. return False, [ord("\\"), ord(ch)]
  1450. def parse_repl_hex_escape(source, expected_len, type):
  1451. "Parses a hex escape sequence in a replacement string."
  1452. digits = []
  1453. for i in range(expected_len):
  1454. ch = source.get()
  1455. if ch not in HEX_DIGITS:
  1456. raise error("incomplete escape \\%s%s" % (type, ''.join(digits)),
  1457. source.string, source.pos)
  1458. digits.append(ch)
  1459. return int("".join(digits), 16)
  1460. def parse_repl_named_char(source):
  1461. "Parses a named character in a replacement string."
  1462. saved_pos = source.pos
  1463. if source.match("{"):
  1464. name = source.get_while(ALPHA | set(" "))
  1465. if source.match("}"):
  1466. try:
  1467. value = unicodedata.lookup(name)
  1468. return ord(value)
  1469. except KeyError:
  1470. raise error("undefined character name", source.string,
  1471. source.pos)
  1472. source.pos = saved_pos
  1473. return None
  1474. def compile_repl_group(source, pattern):
  1475. "Compiles a replacement template group reference."
  1476. source.expect("<")
  1477. name = parse_name(source, True, True)
  1478. source.expect(">")
  1479. if name.isdigit():
  1480. index = int(name)
  1481. if not 0 <= index <= pattern.groups:
  1482. raise error("invalid group reference", source.string, source.pos)
  1483. return index
  1484. try:
  1485. return pattern.groupindex[name]
  1486. except KeyError:
  1487. raise IndexError("unknown group")
  1488. # The regular expression is parsed into a syntax tree. The different types of
  1489. # node are defined below.
  1490. INDENT = " "
  1491. POSITIVE_OP = 0x1
  1492. ZEROWIDTH_OP = 0x2
  1493. FUZZY_OP = 0x4
  1494. REVERSE_OP = 0x8
  1495. REQUIRED_OP = 0x10
  1496. POS_TEXT = {False: "NON-MATCH", True: "MATCH"}
  1497. CASE_TEXT = {NOCASE: "", IGNORECASE: " SIMPLE_IGNORE_CASE", FULLCASE: "",
  1498. FULLIGNORECASE: " FULL_IGNORE_CASE"}
  1499. def make_sequence(items):
  1500. if len(items) == 1:
  1501. return items[0]
  1502. return Sequence(items)
  1503. # Common base class for all nodes.
  1504. class RegexBase:
  1505. def __init__(self):
  1506. self._key = self.__class__
  1507. def with_flags(self, positive=None, case_flags=None, zerowidth=None):
  1508. if positive is None:
  1509. positive = self.positive
  1510. else:
  1511. positive = bool(positive)
  1512. if case_flags is None:
  1513. case_flags = self.case_flags
  1514. else:
  1515. case_flags = CASE_FLAGS_COMBINATIONS[case_flags & CASE_FLAGS]
  1516. if zerowidth is None:
  1517. zerowidth = self.zerowidth
  1518. else:
  1519. zerowidth = bool(zerowidth)
  1520. if (positive == self.positive and case_flags == self.case_flags and
  1521. zerowidth == self.zerowidth):
  1522. return self
  1523. return self.rebuild(positive, case_flags, zerowidth)
  1524. def fix_groups(self, pattern, reverse, fuzzy):
  1525. pass
  1526. def optimise(self, info, reverse):
  1527. return self
  1528. def pack_characters(self, info):
  1529. return self
  1530. def remove_captures(self):
  1531. return self
  1532. def is_atomic(self):
  1533. return True
  1534. def can_be_affix(self):
  1535. return True
  1536. def contains_group(self):
  1537. return False
  1538. def get_firstset(self, reverse):
  1539. raise _FirstSetError()
  1540. def has_simple_start(self):
  1541. return False
  1542. def compile(self, reverse=False, fuzzy=False):
  1543. return self._compile(reverse, fuzzy)
  1544. def is_empty(self):
  1545. return False
  1546. def __hash__(self):
  1547. return hash(self._key)
  1548. def __eq__(self, other):
  1549. return type(self) is type(other) and self._key == other._key
  1550. def __ne__(self, other):
  1551. return not self.__eq__(other)
  1552. def get_required_string(self, reverse):
  1553. return self.max_width(), None
  1554. # Base class for zero-width nodes.
  1555. class ZeroWidthBase(RegexBase):
  1556. def __init__(self, positive=True):
  1557. RegexBase.__init__(self)
  1558. self.positive = bool(positive)
  1559. self._key = self.__class__, self.positive
  1560. def get_firstset(self, reverse):
  1561. return set([None])
  1562. def _compile(self, reverse, fuzzy):
  1563. flags = 0
  1564. if self.positive:
  1565. flags |= POSITIVE_OP
  1566. if fuzzy:
  1567. flags |= FUZZY_OP
  1568. if reverse:
  1569. flags |= REVERSE_OP
  1570. return [(self._opcode, flags)]
  1571. def dump(self, indent, reverse):
  1572. print("{}{} {}".format(INDENT * indent, self._op_name,
  1573. POS_TEXT[self.positive]))
  1574. def max_width(self):
  1575. return 0
  1576. class Any(RegexBase):
  1577. _opcode = {False: OP.ANY, True: OP.ANY_REV}
  1578. _op_name = "ANY"
  1579. def has_simple_start(self):
  1580. return True
  1581. def _compile(self, reverse, fuzzy):
  1582. flags = 0
  1583. if fuzzy:
  1584. flags |= FUZZY_OP
  1585. return [(self._opcode[reverse], flags)]
  1586. def dump(self, indent, reverse):
  1587. print("{}{}".format(INDENT * indent, self._op_name))
  1588. def max_width(self):
  1589. return 1
  1590. class AnyAll(Any):
  1591. _opcode = {False: OP.ANY_ALL, True: OP.ANY_ALL_REV}
  1592. _op_name = "ANY_ALL"
  1593. class AnyU(Any):
  1594. _opcode = {False: OP.ANY_U, True: OP.ANY_U_REV}
  1595. _op_name = "ANY_U"
  1596. class Atomic(RegexBase):
  1597. def __init__(self, subpattern):
  1598. RegexBase.__init__(self)
  1599. self.subpattern = subpattern
  1600. def fix_groups(self, pattern, reverse, fuzzy):
  1601. self.subpattern.fix_groups(pattern, reverse, fuzzy)
  1602. def optimise(self, info, reverse):
  1603. self.subpattern = self.subpattern.optimise(info, reverse)
  1604. if self.subpattern.is_empty():
  1605. return self.subpattern
  1606. return self
  1607. def pack_characters(self, info):
  1608. self.subpattern = self.subpattern.pack_characters(info)
  1609. return self
  1610. def remove_captures(self):
  1611. self.subpattern = self.subpattern.remove_captures()
  1612. return self
  1613. def can_be_affix(self):
  1614. return self.subpattern.can_be_affix()
  1615. def contains_group(self):
  1616. return self.subpattern.contains_group()
  1617. def get_firstset(self, reverse):
  1618. return self.subpattern.get_firstset(reverse)
  1619. def has_simple_start(self):
  1620. return self.subpattern.has_simple_start()
  1621. def _compile(self, reverse, fuzzy):
  1622. return ([(OP.ATOMIC, )] + self.subpattern.compile(reverse, fuzzy) +
  1623. [(OP.END, )])
  1624. def dump(self, indent, reverse):
  1625. print("{}ATOMIC".format(INDENT * indent))
  1626. self.subpattern.dump(indent + 1, reverse)
  1627. def is_empty(self):
  1628. return self.subpattern.is_empty()
  1629. def __eq__(self, other):
  1630. return (type(self) is type(other) and self.subpattern ==
  1631. other.subpattern)
  1632. def max_width(self):
  1633. return self.subpattern.max_width()
  1634. def get_required_string(self, reverse):
  1635. return self.subpattern.get_required_string(reverse)
  1636. class Boundary(ZeroWidthBase):
  1637. _opcode = OP.BOUNDARY
  1638. _op_name = "BOUNDARY"
  1639. class Branch(RegexBase):
  1640. def __init__(self, branches):
  1641. RegexBase.__init__(self)
  1642. self.branches = branches
  1643. def fix_groups(self, pattern, reverse, fuzzy):
  1644. for b in self.branches:
  1645. b.fix_groups(pattern, reverse, fuzzy)
  1646. def optimise(self, info, reverse):
  1647. if not self.branches:
  1648. return Sequence([])
  1649. # Flatten branches within branches.
  1650. branches = Branch._flatten_branches(info, reverse, self.branches)
  1651. # Move any common prefix or suffix out of the branches.
  1652. if reverse:
  1653. suffix, branches = Branch._split_common_suffix(info, branches)
  1654. prefix = []
  1655. else:
  1656. prefix, branches = Branch._split_common_prefix(info, branches)
  1657. suffix = []
  1658. # Try to reduce adjacent single-character branches to sets.
  1659. branches = Branch._reduce_to_set(info, reverse, branches)
  1660. if len(branches) > 1:
  1661. sequence = [Branch(branches)]
  1662. if not prefix or not suffix:
  1663. # We might be able to add a quick precheck before the branches.
  1664. firstset = self._add_precheck(info, reverse, branches)
  1665. if firstset:
  1666. if reverse:
  1667. sequence.append(firstset)
  1668. else:
  1669. sequence.insert(0, firstset)
  1670. else:
  1671. sequence = branches
  1672. return make_sequence(prefix + sequence + suffix)
  1673. def _add_precheck(self, info, reverse, branches):
  1674. charset = set()
  1675. pos = -1 if reverse else 0
  1676. for branch in branches:
  1677. if type(branch) is Literal and branch.case_flags == NOCASE:
  1678. charset.add(branch.characters[pos])
  1679. else:
  1680. return
  1681. if not charset:
  1682. return None
  1683. return _check_firstset(info, reverse, [Character(c) for c in charset])
  1684. def pack_characters(self, info):
  1685. self.branches = [b.pack_characters(info) for b in self.branches]
  1686. return self
  1687. def remove_captures(self):
  1688. self.branches = [b.remove_captures() for b in self.branches]
  1689. return self
  1690. def is_atomic(self):
  1691. return all(b.is_atomic() for b in self.branches)
  1692. def can_be_affix(self):
  1693. return all(b.can_be_affix() for b in self.branches)
  1694. def contains_group(self):
  1695. return any(b.contains_group() for b in self.branches)
  1696. def get_firstset(self, reverse):
  1697. fs = set()
  1698. for b in self.branches:
  1699. fs |= b.get_firstset(reverse)
  1700. return fs or set([None])
  1701. def _compile(self, reverse, fuzzy):
  1702. code = [(OP.BRANCH, )]
  1703. for b in self.branches:
  1704. code.extend(b.compile(reverse, fuzzy))
  1705. code.append((OP.NEXT, ))
  1706. code[-1] = (OP.END, )
  1707. return code
  1708. def dump(self, indent, reverse):
  1709. print("{}BRANCH".format(INDENT * indent))
  1710. self.branches[0].dump(indent + 1, reverse)
  1711. for b in self.branches[1 : ]:
  1712. print("{}OR".format(INDENT * indent))
  1713. b.dump(indent + 1, reverse)
  1714. @staticmethod
  1715. def _flatten_branches(info, reverse, branches):
  1716. # Flatten the branches so that there aren't branches of branches.
  1717. new_branches = []
  1718. for b in branches:
  1719. b = b.optimise(info, reverse)
  1720. if isinstance(b, Branch):
  1721. new_branches.extend(b.branches)
  1722. else:
  1723. new_branches.append(b)
  1724. return new_branches
  1725. @staticmethod
  1726. def _split_common_prefix(info, branches):
  1727. # Common leading items can be moved out of the branches.
  1728. # Get the items in the branches.
  1729. alternatives = []
  1730. for b in branches:
  1731. if isinstance(b, Sequence):
  1732. alternatives.append(b.items)
  1733. else:
  1734. alternatives.append([b])
  1735. # What is the maximum possible length of the prefix?
  1736. max_count = min(len(a) for a in alternatives)
  1737. # What is the longest common prefix?
  1738. prefix = alternatives[0]
  1739. pos = 0
  1740. end_pos = max_count
  1741. while pos < end_pos and prefix[pos].can_be_affix() and all(a[pos] ==
  1742. prefix[pos] for a in alternatives):
  1743. pos += 1
  1744. count = pos
  1745. if info.flags & UNICODE:
  1746. # We need to check that we're not splitting a sequence of
  1747. # characters which could form part of full case-folding.
  1748. count = pos
  1749. while count > 0 and not all(Branch._can_split(a, count) for a in
  1750. alternatives):
  1751. count -= 1
  1752. # No common prefix is possible.
  1753. if count == 0:
  1754. return [], branches
  1755. # Rebuild the branches.
  1756. new_branches = []
  1757. for a in alternatives:
  1758. new_branches.append(make_sequence(a[count : ]))
  1759. return prefix[ : count], new_branches
  1760. @staticmethod
  1761. def _split_common_suffix(info, branches):
  1762. # Common trailing items can be moved out of the branches.
  1763. # Get the items in the branches.
  1764. alternatives = []
  1765. for b in branches:
  1766. if isinstance(b, Sequence):
  1767. alternatives.append(b.items)
  1768. else:
  1769. alternatives.append([b])
  1770. # What is the maximum possible length of the suffix?
  1771. max_count = min(len(a) for a in alternatives)
  1772. # What is the longest common suffix?
  1773. suffix = alternatives[0]
  1774. pos = -1
  1775. end_pos = -1 - max_count
  1776. while pos > end_pos and suffix[pos].can_be_affix() and all(a[pos] ==
  1777. suffix[pos] for a in alternatives):
  1778. pos -= 1
  1779. count = -1 - pos
  1780. if info.flags & UNICODE:
  1781. # We need to check that we're not splitting a sequence of
  1782. # characters which could form part of full case-folding.
  1783. while count > 0 and not all(Branch._can_split_rev(a, count) for a
  1784. in alternatives):
  1785. count -= 1
  1786. # No common suffix is possible.
  1787. if count == 0:
  1788. return [], branches
  1789. # Rebuild the branches.
  1790. new_branches = []
  1791. for a in alternatives:
  1792. new_branches.append(make_sequence(a[ : -count]))
  1793. return suffix[-count : ], new_branches
  1794. @staticmethod
  1795. def _can_split(items, count):
  1796. # Check the characters either side of the proposed split.
  1797. if not Branch._is_full_case(items, count - 1):
  1798. return True
  1799. if not Branch._is_full_case(items, count):
  1800. return True
  1801. # Check whether a 1-1 split would be OK.
  1802. if Branch._is_folded(items[count - 1 : count + 1]):
  1803. return False
  1804. # Check whether a 1-2 split would be OK.
  1805. if (Branch._is_full_case(items, count + 2) and
  1806. Branch._is_folded(items[count - 1 : count + 2])):
  1807. return False
  1808. # Check whether a 2-1 split would be OK.
  1809. if (Branch._is_full_case(items, count - 2) and
  1810. Branch._is_folded(items[count - 2 : count + 1])):
  1811. return False
  1812. return True
  1813. @staticmethod
  1814. def _can_split_rev(items, count):
  1815. end = len(items)
  1816. # Check the characters either side of the proposed split.
  1817. if not Branch._is_full_case(items, end - count):
  1818. return True
  1819. if not Branch._is_full_case(items, end - count - 1):
  1820. return True
  1821. # Check whether a 1-1 split would be OK.
  1822. if Branch._is_folded(items[end - count - 1 : end - count + 1]):
  1823. return False
  1824. # Check whether a 1-2 split would be OK.
  1825. if (Branch._is_full_case(items, end - count + 2) and
  1826. Branch._is_folded(items[end - count - 1 : end - count + 2])):
  1827. return False
  1828. # Check whether a 2-1 split would be OK.
  1829. if (Branch._is_full_case(items, end - count - 2) and
  1830. Branch._is_folded(items[end - count - 2 : end - count + 1])):
  1831. return False
  1832. return True
  1833. @staticmethod
  1834. def _merge_common_prefixes(info, reverse, branches):
  1835. # Branches with the same case-sensitive character prefix can be grouped
  1836. # together if they are separated only by other branches with a
  1837. # character prefix.
  1838. prefixed = defaultdict(list)
  1839. order = {}
  1840. new_branches = []
  1841. for b in branches:
  1842. if Branch._is_simple_character(b):
  1843. # Branch starts with a simple character.
  1844. prefixed[b.value].append([b])
  1845. order.setdefault(b.value, len(order))
  1846. elif (isinstance(b, Sequence) and b.items and
  1847. Branch._is_simple_character(b.items[0])):
  1848. # Branch starts with a simple character.
  1849. prefixed[b.items[0].value].append(b.items)
  1850. order.setdefault(b.items[0].value, len(order))
  1851. else:
  1852. Branch._flush_char_prefix(info, reverse, prefixed, order,
  1853. new_branches)
  1854. new_branches.append(b)
  1855. Branch._flush_char_prefix(info, prefixed, order, new_branches)
  1856. return new_branches
  1857. @staticmethod
  1858. def _is_simple_character(c):
  1859. return isinstance(c, Character) and c.positive and not c.case_flags
  1860. @staticmethod
  1861. def _reduce_to_set(info, reverse, branches):
  1862. # Can the branches be reduced to a set?
  1863. new_branches = []
  1864. items = set()
  1865. case_flags = NOCASE
  1866. for b in branches:
  1867. if isinstance(b, (Character, Property, SetBase)):
  1868. # Branch starts with a single character.
  1869. if b.case_flags != case_flags:
  1870. # Different case sensitivity, so flush.
  1871. Branch._flush_set_members(info, reverse, items, case_flags,
  1872. new_branches)
  1873. case_flags = b.case_flags
  1874. items.add(b.with_flags(case_flags=NOCASE))
  1875. else:
  1876. Branch._flush_set_members(info, reverse, items, case_flags,
  1877. new_branches)
  1878. new_branches.append(b)
  1879. Branch._flush_set_members(info, reverse, items, case_flags,
  1880. new_branches)
  1881. return new_branches
  1882. @staticmethod
  1883. def _flush_char_prefix(info, reverse, prefixed, order, new_branches):
  1884. # Flush the prefixed branches.
  1885. if not prefixed:
  1886. return
  1887. for value, branches in sorted(prefixed.items(), key=lambda pair:
  1888. order[pair[0]]):
  1889. if len(branches) == 1:
  1890. new_branches.append(make_sequence(branches[0]))
  1891. else:
  1892. subbranches = []
  1893. optional = False
  1894. for b in branches:
  1895. if len(b) > 1:
  1896. subbranches.append(make_sequence(b[1 : ]))
  1897. elif not optional:
  1898. subbranches.append(Sequence())
  1899. optional = True
  1900. sequence = Sequence([Character(value), Branch(subbranches)])
  1901. new_branches.append(sequence.optimise(info, reverse))
  1902. prefixed.clear()
  1903. order.clear()
  1904. @staticmethod
  1905. def _flush_set_members(info, reverse, items, case_flags, new_branches):
  1906. # Flush the set members.
  1907. if not items:
  1908. return
  1909. if len(items) == 1:
  1910. item = list(items)[0]
  1911. else:
  1912. item = SetUnion(info, list(items)).optimise(info, reverse)
  1913. new_branches.append(item.with_flags(case_flags=case_flags))
  1914. items.clear()
  1915. @staticmethod
  1916. def _is_full_case(items, i):
  1917. if not 0 <= i < len(items):
  1918. return False
  1919. item = items[i]
  1920. return (isinstance(item, Character) and item.positive and
  1921. (item.case_flags & FULLIGNORECASE) == FULLIGNORECASE)
  1922. @staticmethod
  1923. def _is_folded(items):
  1924. if len(items) < 2:
  1925. return False
  1926. for i in items:
  1927. if (not isinstance(i, Character) or not i.positive or not
  1928. i.case_flags):
  1929. return False
  1930. folded = "".join(chr(i.value) for i in items)
  1931. folded = _regex.fold_case(FULL_CASE_FOLDING, folded)
  1932. # Get the characters which expand to multiple codepoints on folding.
  1933. expanding_chars = _regex.get_expand_on_folding()
  1934. for c in expanding_chars:
  1935. if folded == _regex.fold_case(FULL_CASE_FOLDING, c):
  1936. return True
  1937. return False
  1938. def is_empty(self):
  1939. return all(b.is_empty() for b in self.branches)
  1940. def __eq__(self, other):
  1941. return type(self) is type(other) and self.branches == other.branches
  1942. def max_width(self):
  1943. return max(b.max_width() for b in self.branches)
  1944. class CallGroup(RegexBase):
  1945. def __init__(self, info, group, position):
  1946. RegexBase.__init__(self)
  1947. self.info = info
  1948. self.group = group
  1949. self.position = position
  1950. self._key = self.__class__, self.group
  1951. def fix_groups(self, pattern, reverse, fuzzy):
  1952. try:
  1953. self.group = int(self.group)
  1954. except ValueError:
  1955. try:
  1956. self.group = self.info.group_index[self.group]
  1957. except KeyError:
  1958. raise error("invalid group reference", pattern, self.position)
  1959. if not 0 <= self.group <= self.info.group_count:
  1960. raise error("unknown group", pattern, self.position)
  1961. if self.group > 0 and self.info.open_group_count[self.group] > 1:
  1962. raise error("ambiguous group reference", pattern, self.position)
  1963. self.info.group_calls.append((self, reverse, fuzzy))
  1964. self._key = self.__class__, self.group
  1965. def remove_captures(self):
  1966. raise error("group reference not allowed", pattern, self.position)
  1967. def _compile(self, reverse, fuzzy):
  1968. return [(OP.GROUP_CALL, self.call_ref)]
  1969. def dump(self, indent, reverse):
  1970. print("{}GROUP_CALL {}".format(INDENT * indent, self.group))
  1971. def __eq__(self, other):
  1972. return type(self) is type(other) and self.group == other.group
  1973. def max_width(self):
  1974. return UNLIMITED
  1975. def __del__(self):
  1976. self.info = None
  1977. class CallRef(RegexBase):
  1978. def __init__(self, ref, parsed):
  1979. self.ref = ref
  1980. self.parsed = parsed
  1981. def _compile(self, reverse, fuzzy):
  1982. return ([(OP.CALL_REF, self.ref)] + self.parsed._compile(reverse,
  1983. fuzzy) + [(OP.END, )])
  1984. class Character(RegexBase):
  1985. _opcode = {(NOCASE, False): OP.CHARACTER, (IGNORECASE, False):
  1986. OP.CHARACTER_IGN, (FULLCASE, False): OP.CHARACTER, (FULLIGNORECASE,
  1987. False): OP.CHARACTER_IGN, (NOCASE, True): OP.CHARACTER_REV, (IGNORECASE,
  1988. True): OP.CHARACTER_IGN_REV, (FULLCASE, True): OP.CHARACTER_REV,
  1989. (FULLIGNORECASE, True): OP.CHARACTER_IGN_REV}
  1990. def __init__(self, value, positive=True, case_flags=NOCASE,
  1991. zerowidth=False):
  1992. RegexBase.__init__(self)
  1993. self.value = value
  1994. self.positive = bool(positive)
  1995. self.case_flags = CASE_FLAGS_COMBINATIONS[case_flags]
  1996. self.zerowidth = bool(zerowidth)
  1997. if (self.positive and (self.case_flags & FULLIGNORECASE) ==
  1998. FULLIGNORECASE):
  1999. self.folded = _regex.fold_case(FULL_CASE_FOLDING, chr(self.value))
  2000. else:
  2001. self.folded = chr(self.value)
  2002. self._key = (self.__class__, self.value, self.positive,
  2003. self.case_flags, self.zerowidth)
  2004. def rebuild(self, positive, case_flags, zerowidth):
  2005. return Character(self.value, positive, case_flags, zerowidth)
  2006. def optimise(self, info, reverse, in_set=False):
  2007. return self
  2008. def get_firstset(self, reverse):
  2009. return set([self])
  2010. def has_simple_start(self):
  2011. return True
  2012. def _compile(self, reverse, fuzzy):
  2013. flags = 0
  2014. if self.positive:
  2015. flags |= POSITIVE_OP
  2016. if self.zerowidth:
  2017. flags |= ZEROWIDTH_OP
  2018. if fuzzy:
  2019. flags |= FUZZY_OP
  2020. code = PrecompiledCode([self._opcode[self.case_flags, reverse], flags,
  2021. self.value])
  2022. if len(self.folded) > 1:
  2023. # The character expands on full case-folding.
  2024. code = Branch([code, String([ord(c) for c in self.folded],
  2025. case_flags=self.case_flags)])
  2026. return code.compile(reverse, fuzzy)
  2027. def dump(self, indent, reverse):
  2028. display = ascii(chr(self.value)).lstrip("bu")
  2029. print("{}CHARACTER {} {}{}".format(INDENT * indent,
  2030. POS_TEXT[self.positive], display, CASE_TEXT[self.case_flags]))
  2031. def matches(self, ch):
  2032. return (ch == self.value) == self.positive
  2033. def max_width(self):
  2034. return len(self.folded)
  2035. def get_required_string(self, reverse):
  2036. if not self.positive:
  2037. return 1, None
  2038. self.folded_characters = tuple(ord(c) for c in self.folded)
  2039. return 0, self
  2040. class Conditional(RegexBase):
  2041. def __init__(self, info, group, yes_item, no_item, position):
  2042. RegexBase.__init__(self)
  2043. self.info = info
  2044. self.group = group
  2045. self.yes_item = yes_item
  2046. self.no_item = no_item
  2047. self.position = position
  2048. def fix_groups(self, pattern, reverse, fuzzy):
  2049. try:
  2050. self.group = int(self.group)
  2051. except ValueError:
  2052. try:
  2053. self.group = self.info.group_index[self.group]
  2054. except KeyError:
  2055. if self.group == 'DEFINE':
  2056. # 'DEFINE' is a special name unless there's a group with
  2057. # that name.
  2058. self.group = 0
  2059. else:
  2060. raise error("unknown group", pattern, self.position)
  2061. if not 0 <= self.group <= self.info.group_count:
  2062. raise error("invalid group reference", pattern, self.position)
  2063. self.yes_item.fix_groups(pattern, reverse, fuzzy)
  2064. self.no_item.fix_groups(pattern, reverse, fuzzy)
  2065. def optimise(self, info, reverse):
  2066. yes_item = self.yes_item.optimise(info, reverse)
  2067. no_item = self.no_item.optimise(info, reverse)
  2068. return Conditional(info, self.group, yes_item, no_item, self.position)
  2069. def pack_characters(self, info):
  2070. self.yes_item = self.yes_item.pack_characters(info)
  2071. self.no_item = self.no_item.pack_characters(info)
  2072. return self
  2073. def remove_captures(self):
  2074. self.yes_item = self.yes_item.remove_captures()
  2075. self.no_item = self.no_item.remove_captures()
  2076. def is_atomic(self):
  2077. return self.yes_item.is_atomic() and self.no_item.is_atomic()
  2078. def can_be_affix(self):
  2079. return self.yes_item.can_be_affix() and self.no_item.can_be_affix()
  2080. def contains_group(self):
  2081. return self.yes_item.contains_group() or self.no_item.contains_group()
  2082. def get_firstset(self, reverse):
  2083. return (self.yes_item.get_firstset(reverse) |
  2084. self.no_item.get_firstset(reverse))
  2085. def _compile(self, reverse, fuzzy):
  2086. code = [(OP.GROUP_EXISTS, self.group)]
  2087. code.extend(self.yes_item.compile(reverse, fuzzy))
  2088. add_code = self.no_item.compile(reverse, fuzzy)
  2089. if add_code:
  2090. code.append((OP.NEXT, ))
  2091. code.extend(add_code)
  2092. code.append((OP.END, ))
  2093. return code
  2094. def dump(self, indent, reverse):
  2095. print("{}GROUP_EXISTS {}".format(INDENT * indent, self.group))
  2096. self.yes_item.dump(indent + 1, reverse)
  2097. if not self.no_item.is_empty():
  2098. print("{}OR".format(INDENT * indent))
  2099. self.no_item.dump(indent + 1, reverse)
  2100. def is_empty(self):
  2101. return self.yes_item.is_empty() and self.no_item.is_empty()
  2102. def __eq__(self, other):
  2103. return type(self) is type(other) and (self.group, self.yes_item,
  2104. self.no_item) == (other.group, other.yes_item, other.no_item)
  2105. def max_width(self):
  2106. return max(self.yes_item.max_width(), self.no_item.max_width())
  2107. def __del__(self):
  2108. self.info = None
  2109. class DefaultBoundary(ZeroWidthBase):
  2110. _opcode = OP.DEFAULT_BOUNDARY
  2111. _op_name = "DEFAULT_BOUNDARY"
  2112. class DefaultEndOfWord(ZeroWidthBase):
  2113. _opcode = OP.DEFAULT_END_OF_WORD
  2114. _op_name = "DEFAULT_END_OF_WORD"
  2115. class DefaultStartOfWord(ZeroWidthBase):
  2116. _opcode = OP.DEFAULT_START_OF_WORD
  2117. _op_name = "DEFAULT_START_OF_WORD"
  2118. class EndOfLine(ZeroWidthBase):
  2119. _opcode = OP.END_OF_LINE
  2120. _op_name = "END_OF_LINE"
  2121. class EndOfLineU(EndOfLine):
  2122. _opcode = OP.END_OF_LINE_U
  2123. _op_name = "END_OF_LINE_U"
  2124. class EndOfString(ZeroWidthBase):
  2125. _opcode = OP.END_OF_STRING
  2126. _op_name = "END_OF_STRING"
  2127. class EndOfStringLine(ZeroWidthBase):
  2128. _opcode = OP.END_OF_STRING_LINE
  2129. _op_name = "END_OF_STRING_LINE"
  2130. class EndOfStringLineU(EndOfStringLine):
  2131. _opcode = OP.END_OF_STRING_LINE_U
  2132. _op_name = "END_OF_STRING_LINE_U"
  2133. class EndOfWord(ZeroWidthBase):
  2134. _opcode = OP.END_OF_WORD
  2135. _op_name = "END_OF_WORD"
  2136. class Failure(ZeroWidthBase):
  2137. _op_name = "FAILURE"
  2138. def _compile(self, reverse, fuzzy):
  2139. return [(OP.FAILURE, )]
  2140. class Fuzzy(RegexBase):
  2141. def __init__(self, subpattern, constraints=None):
  2142. RegexBase.__init__(self)
  2143. if constraints is None:
  2144. constraints = {}
  2145. self.subpattern = subpattern
  2146. self.constraints = constraints
  2147. # If an error type is mentioned in the cost equation, then its maximum
  2148. # defaults to unlimited.
  2149. if "cost" in constraints:
  2150. for e in "dis":
  2151. if e in constraints["cost"]:
  2152. constraints.setdefault(e, (0, None))
  2153. # If any error type is mentioned, then all the error maxima default to
  2154. # 0, otherwise they default to unlimited.
  2155. if set(constraints) & set("dis"):
  2156. for e in "dis":
  2157. constraints.setdefault(e, (0, 0))
  2158. else:
  2159. for e in "dis":
  2160. constraints.setdefault(e, (0, None))
  2161. # The maximum of the generic error type defaults to unlimited.
  2162. constraints.setdefault("e", (0, None))
  2163. # The cost equation defaults to equal costs. Also, the cost of any
  2164. # error type not mentioned in the cost equation defaults to 0.
  2165. if "cost" in constraints:
  2166. for e in "dis":
  2167. constraints["cost"].setdefault(e, 0)
  2168. else:
  2169. constraints["cost"] = {"d": 1, "i": 1, "s": 1, "max":
  2170. constraints["e"][1]}
  2171. def fix_groups(self, pattern, reverse, fuzzy):
  2172. self.subpattern.fix_groups(pattern, reverse, True)
  2173. def pack_characters(self, info):
  2174. self.subpattern = self.subpattern.pack_characters(info)
  2175. return self
  2176. def remove_captures(self):
  2177. self.subpattern = self.subpattern.remove_captures()
  2178. return self
  2179. def is_atomic(self):
  2180. return self.subpattern.is_atomic()
  2181. def contains_group(self):
  2182. return self.subpattern.contains_group()
  2183. def _compile(self, reverse, fuzzy):
  2184. # The individual limits.
  2185. arguments = []
  2186. for e in "dise":
  2187. v = self.constraints[e]
  2188. arguments.append(v[0])
  2189. arguments.append(UNLIMITED if v[1] is None else v[1])
  2190. # The coeffs of the cost equation.
  2191. for e in "dis":
  2192. arguments.append(self.constraints["cost"][e])
  2193. # The maximum of the cost equation.
  2194. v = self.constraints["cost"]["max"]
  2195. arguments.append(UNLIMITED if v is None else v)
  2196. flags = 0
  2197. if reverse:
  2198. flags |= REVERSE_OP
  2199. test = self.constraints.get("test")
  2200. if test:
  2201. return ([(OP.FUZZY_EXT, flags) + tuple(arguments)] +
  2202. test.compile(reverse, True) + [(OP.NEXT,)] +
  2203. self.subpattern.compile(reverse, True) + [(OP.END,)])
  2204. return ([(OP.FUZZY, flags) + tuple(arguments)] +
  2205. self.subpattern.compile(reverse, True) + [(OP.END,)])
  2206. def dump(self, indent, reverse):
  2207. constraints = self._constraints_to_string()
  2208. if constraints:
  2209. constraints = " " + constraints
  2210. print("{}FUZZY{}".format(INDENT * indent, constraints))
  2211. self.subpattern.dump(indent + 1, reverse)
  2212. def is_empty(self):
  2213. return self.subpattern.is_empty()
  2214. def __eq__(self, other):
  2215. return (type(self) is type(other) and self.subpattern ==
  2216. other.subpattern and self.constraints == other.constraints)
  2217. def max_width(self):
  2218. return UNLIMITED
  2219. def _constraints_to_string(self):
  2220. constraints = []
  2221. for name in "ids":
  2222. min, max = self.constraints[name]
  2223. if max == 0:
  2224. continue
  2225. con = ""
  2226. if min > 0:
  2227. con = "{}<=".format(min)
  2228. con += name
  2229. if max is not None:
  2230. con += "<={}".format(max)
  2231. constraints.append(con)
  2232. cost = []
  2233. for name in "ids":
  2234. coeff = self.constraints["cost"][name]
  2235. if coeff > 0:
  2236. cost.append("{}{}".format(coeff, name))
  2237. limit = self.constraints["cost"]["max"]
  2238. if limit is not None and limit > 0:
  2239. cost = "{}<={}".format("+".join(cost), limit)
  2240. constraints.append(cost)
  2241. return ",".join(constraints)
  2242. class Grapheme(RegexBase):
  2243. def _compile(self, reverse, fuzzy):
  2244. # Match at least 1 character until a grapheme boundary is reached. Note
  2245. # that this is the same whether matching forwards or backwards.
  2246. grapheme_matcher = Atomic(Sequence([LazyRepeat(AnyAll(), 1, None),
  2247. GraphemeBoundary()]))
  2248. return grapheme_matcher.compile(reverse, fuzzy)
  2249. def dump(self, indent, reverse):
  2250. print("{}GRAPHEME".format(INDENT * indent))
  2251. def max_width(self):
  2252. return UNLIMITED
  2253. class GraphemeBoundary:
  2254. def compile(self, reverse, fuzzy):
  2255. return [(OP.GRAPHEME_BOUNDARY, 1)]
  2256. class GreedyRepeat(RegexBase):
  2257. _opcode = OP.GREEDY_REPEAT
  2258. _op_name = "GREEDY_REPEAT"
  2259. def __init__(self, subpattern, min_count, max_count):
  2260. RegexBase.__init__(self)
  2261. self.subpattern = subpattern
  2262. self.min_count = min_count
  2263. self.max_count = max_count
  2264. def fix_groups(self, pattern, reverse, fuzzy):
  2265. self.subpattern.fix_groups(pattern, reverse, fuzzy)
  2266. def optimise(self, info, reverse):
  2267. subpattern = self.subpattern.optimise(info, reverse)
  2268. return type(self)(subpattern, self.min_count, self.max_count)
  2269. def pack_characters(self, info):
  2270. self.subpattern = self.subpattern.pack_characters(info)
  2271. return self
  2272. def remove_captures(self):
  2273. self.subpattern = self.subpattern.remove_captures()
  2274. return self
  2275. def is_atomic(self):
  2276. return self.min_count == self.max_count and self.subpattern.is_atomic()
  2277. def can_be_affix(self):
  2278. return False
  2279. def contains_group(self):
  2280. return self.subpattern.contains_group()
  2281. def get_firstset(self, reverse):
  2282. fs = self.subpattern.get_firstset(reverse)
  2283. if self.min_count == 0:
  2284. fs.add(None)
  2285. return fs
  2286. def _compile(self, reverse, fuzzy):
  2287. repeat = [self._opcode, self.min_count]
  2288. if self.max_count is None:
  2289. repeat.append(UNLIMITED)
  2290. else:
  2291. repeat.append(self.max_count)
  2292. subpattern = self.subpattern.compile(reverse, fuzzy)
  2293. if not subpattern:
  2294. return []
  2295. return ([tuple(repeat)] + subpattern + [(OP.END, )])
  2296. def dump(self, indent, reverse):
  2297. if self.max_count is None:
  2298. limit = "INF"
  2299. else:
  2300. limit = self.max_count
  2301. print("{}{} {} {}".format(INDENT * indent, self._op_name,
  2302. self.min_count, limit))
  2303. self.subpattern.dump(indent + 1, reverse)
  2304. def is_empty(self):
  2305. return self.subpattern.is_empty()
  2306. def __eq__(self, other):
  2307. return type(self) is type(other) and (self.subpattern, self.min_count,
  2308. self.max_count) == (other.subpattern, other.min_count,
  2309. other.max_count)
  2310. def max_width(self):
  2311. if self.max_count is None:
  2312. return UNLIMITED
  2313. return self.subpattern.max_width() * self.max_count
  2314. def get_required_string(self, reverse):
  2315. max_count = UNLIMITED if self.max_count is None else self.max_count
  2316. if self.min_count == 0:
  2317. w = self.subpattern.max_width() * max_count
  2318. return min(w, UNLIMITED), None
  2319. ofs, req = self.subpattern.get_required_string(reverse)
  2320. if req:
  2321. return ofs, req
  2322. w = self.subpattern.max_width() * max_count
  2323. return min(w, UNLIMITED), None
  2324. class PossessiveRepeat(GreedyRepeat):
  2325. def is_atomic(self):
  2326. return True
  2327. def _compile(self, reverse, fuzzy):
  2328. subpattern = self.subpattern.compile(reverse, fuzzy)
  2329. if not subpattern:
  2330. return []
  2331. repeat = [self._opcode, self.min_count]
  2332. if self.max_count is None:
  2333. repeat.append(UNLIMITED)
  2334. else:
  2335. repeat.append(self.max_count)
  2336. return ([(OP.ATOMIC, ), tuple(repeat)] + subpattern + [(OP.END, ),
  2337. (OP.END, )])
  2338. def dump(self, indent, reverse):
  2339. print("{}ATOMIC".format(INDENT * indent))
  2340. if self.max_count is None:
  2341. limit = "INF"
  2342. else:
  2343. limit = self.max_count
  2344. print("{}{} {} {}".format(INDENT * (indent + 1), self._op_name,
  2345. self.min_count, limit))
  2346. self.subpattern.dump(indent + 2, reverse)
  2347. class Group(RegexBase):
  2348. def __init__(self, info, group, subpattern):
  2349. RegexBase.__init__(self)
  2350. self.info = info
  2351. self.group = group
  2352. self.subpattern = subpattern
  2353. self.call_ref = None
  2354. def fix_groups(self, pattern, reverse, fuzzy):
  2355. self.info.defined_groups[self.group] = (self, reverse, fuzzy)
  2356. self.subpattern.fix_groups(pattern, reverse, fuzzy)
  2357. def optimise(self, info, reverse):
  2358. subpattern = self.subpattern.optimise(info, reverse)
  2359. return Group(self.info, self.group, subpattern)
  2360. def pack_characters(self, info):
  2361. self.subpattern = self.subpattern.pack_characters(info)
  2362. return self
  2363. def remove_captures(self):
  2364. return self.subpattern.remove_captures()
  2365. def is_atomic(self):
  2366. return self.subpattern.is_atomic()
  2367. def can_be_affix(self):
  2368. return False
  2369. def contains_group(self):
  2370. return True
  2371. def get_firstset(self, reverse):
  2372. return self.subpattern.get_firstset(reverse)
  2373. def has_simple_start(self):
  2374. return self.subpattern.has_simple_start()
  2375. def _compile(self, reverse, fuzzy):
  2376. code = []
  2377. key = self.group, reverse, fuzzy
  2378. ref = self.info.call_refs.get(key)
  2379. if ref is not None:
  2380. code += [(OP.CALL_REF, ref)]
  2381. public_group = private_group = self.group
  2382. if private_group < 0:
  2383. public_group = self.info.private_groups[private_group]
  2384. private_group = self.info.group_count - private_group
  2385. code += ([(OP.GROUP, int(not reverse), private_group, public_group)] +
  2386. self.subpattern.compile(reverse, fuzzy) + [(OP.END, )])
  2387. if ref is not None:
  2388. code += [(OP.END, )]
  2389. return code
  2390. def dump(self, indent, reverse):
  2391. group = self.group
  2392. if group < 0:
  2393. group = private_groups[group]
  2394. print("{}GROUP {}".format(INDENT * indent, group))
  2395. self.subpattern.dump(indent + 1, reverse)
  2396. def __eq__(self, other):
  2397. return (type(self) is type(other) and (self.group, self.subpattern) ==
  2398. (other.group, other.subpattern))
  2399. def max_width(self):
  2400. return self.subpattern.max_width()
  2401. def get_required_string(self, reverse):
  2402. return self.subpattern.get_required_string(reverse)
  2403. def __del__(self):
  2404. self.info = None
  2405. class Keep(ZeroWidthBase):
  2406. _opcode = OP.KEEP
  2407. _op_name = "KEEP"
  2408. class LazyRepeat(GreedyRepeat):
  2409. _opcode = OP.LAZY_REPEAT
  2410. _op_name = "LAZY_REPEAT"
  2411. class LookAround(RegexBase):
  2412. _dir_text = {False: "AHEAD", True: "BEHIND"}
  2413. def __init__(self, behind, positive, subpattern):
  2414. RegexBase.__init__(self)
  2415. self.behind = bool(behind)
  2416. self.positive = bool(positive)
  2417. self.subpattern = subpattern
  2418. def fix_groups(self, pattern, reverse, fuzzy):
  2419. self.subpattern.fix_groups(pattern, self.behind, fuzzy)
  2420. def optimise(self, info, reverse):
  2421. subpattern = self.subpattern.optimise(info, self.behind)
  2422. if self.positive and subpattern.is_empty():
  2423. return subpattern
  2424. return LookAround(self.behind, self.positive, subpattern)
  2425. def pack_characters(self, info):
  2426. self.subpattern = self.subpattern.pack_characters(info)
  2427. return self
  2428. def remove_captures(self):
  2429. return self.subpattern.remove_captures()
  2430. def is_atomic(self):
  2431. return self.subpattern.is_atomic()
  2432. def can_be_affix(self):
  2433. return self.subpattern.can_be_affix()
  2434. def contains_group(self):
  2435. return self.subpattern.contains_group()
  2436. def get_firstset(self, reverse):
  2437. if self.positive and self.behind == reverse:
  2438. return self.subpattern.get_firstset(reverse)
  2439. return set([None])
  2440. def _compile(self, reverse, fuzzy):
  2441. flags = 0
  2442. if self.positive:
  2443. flags |= POSITIVE_OP
  2444. if fuzzy:
  2445. flags |= FUZZY_OP
  2446. if reverse:
  2447. flags |= REVERSE_OP
  2448. return ([(OP.LOOKAROUND, flags, int(not self.behind))] +
  2449. self.subpattern.compile(self.behind) + [(OP.END, )])
  2450. def dump(self, indent, reverse):
  2451. print("{}LOOK{} {}".format(INDENT * indent,
  2452. self._dir_text[self.behind], POS_TEXT[self.positive]))
  2453. self.subpattern.dump(indent + 1, self.behind)
  2454. def is_empty(self):
  2455. return self.positive and self.subpattern.is_empty()
  2456. def __eq__(self, other):
  2457. return type(self) is type(other) and (self.behind, self.positive,
  2458. self.subpattern) == (other.behind, other.positive, other.subpattern)
  2459. def max_width(self):
  2460. return 0
  2461. class LookAroundConditional(RegexBase):
  2462. _dir_text = {False: "AHEAD", True: "BEHIND"}
  2463. def __init__(self, behind, positive, subpattern, yes_item, no_item):
  2464. RegexBase.__init__(self)
  2465. self.behind = bool(behind)
  2466. self.positive = bool(positive)
  2467. self.subpattern = subpattern
  2468. self.yes_item = yes_item
  2469. self.no_item = no_item
  2470. def fix_groups(self, pattern, reverse, fuzzy):
  2471. self.subpattern.fix_groups(pattern, reverse, fuzzy)
  2472. self.yes_item.fix_groups(pattern, reverse, fuzzy)
  2473. self.no_item.fix_groups(pattern, reverse, fuzzy)
  2474. def optimise(self, info, reverse):
  2475. subpattern = self.subpattern.optimise(info, self.behind)
  2476. yes_item = self.yes_item.optimise(info, self.behind)
  2477. no_item = self.no_item.optimise(info, self.behind)
  2478. return LookAroundConditional(self.behind, self.positive, subpattern,
  2479. yes_item, no_item)
  2480. def pack_characters(self, info):
  2481. self.subpattern = self.subpattern.pack_characters(info)
  2482. self.yes_item = self.yes_item.pack_characters(info)
  2483. self.no_item = self.no_item.pack_characters(info)
  2484. return self
  2485. def remove_captures(self):
  2486. self.subpattern = self.subpattern.remove_captures()
  2487. self.yes_item = self.yes_item.remove_captures()
  2488. self.no_item = self.no_item.remove_captures()
  2489. def is_atomic(self):
  2490. return (self.subpattern.is_atomic() and self.yes_item.is_atomic() and
  2491. self.no_item.is_atomic())
  2492. def can_be_affix(self):
  2493. return (self.subpattern.can_be_affix() and self.yes_item.can_be_affix()
  2494. and self.no_item.can_be_affix())
  2495. def contains_group(self):
  2496. return (self.subpattern.contains_group() or
  2497. self.yes_item.contains_group() or self.no_item.contains_group())
  2498. def _compile(self, reverse, fuzzy):
  2499. code = [(OP.CONDITIONAL, int(self.positive), int(not self.behind))]
  2500. code.extend(self.subpattern.compile(self.behind, fuzzy))
  2501. code.append((OP.NEXT, ))
  2502. code.extend(self.yes_item.compile(reverse, fuzzy))
  2503. add_code = self.no_item.compile(reverse, fuzzy)
  2504. if add_code:
  2505. code.append((OP.NEXT, ))
  2506. code.extend(add_code)
  2507. code.append((OP.END, ))
  2508. return code
  2509. def dump(self, indent, reverse):
  2510. print("{}CONDITIONAL {} {}".format(INDENT * indent,
  2511. self._dir_text[self.behind], POS_TEXT[self.positive]))
  2512. self.subpattern.dump(indent + 1, self.behind)
  2513. print("{}EITHER".format(INDENT * indent))
  2514. self.yes_item.dump(indent + 1, reverse)
  2515. if not self.no_item.is_empty():
  2516. print("{}OR".format(INDENT * indent))
  2517. self.no_item.dump(indent + 1, reverse)
  2518. def is_empty(self):
  2519. return (self.subpattern.is_empty() and self.yes_item.is_empty() or
  2520. self.no_item.is_empty())
  2521. def __eq__(self, other):
  2522. return type(self) is type(other) and (self.subpattern, self.yes_item,
  2523. self.no_item) == (other.subpattern, other.yes_item, other.no_item)
  2524. def max_width(self):
  2525. return max(self.yes_item.max_width(), self.no_item.max_width())
  2526. def get_required_string(self, reverse):
  2527. return self.max_width(), None
  2528. class PrecompiledCode(RegexBase):
  2529. def __init__(self, code):
  2530. self.code = code
  2531. def _compile(self, reverse, fuzzy):
  2532. return [tuple(self.code)]
  2533. class Property(RegexBase):
  2534. _opcode = {(NOCASE, False): OP.PROPERTY, (IGNORECASE, False):
  2535. OP.PROPERTY_IGN, (FULLCASE, False): OP.PROPERTY, (FULLIGNORECASE, False):
  2536. OP.PROPERTY_IGN, (NOCASE, True): OP.PROPERTY_REV, (IGNORECASE, True):
  2537. OP.PROPERTY_IGN_REV, (FULLCASE, True): OP.PROPERTY_REV, (FULLIGNORECASE,
  2538. True): OP.PROPERTY_IGN_REV}
  2539. def __init__(self, value, positive=True, case_flags=NOCASE,
  2540. zerowidth=False):
  2541. RegexBase.__init__(self)
  2542. self.value = value
  2543. self.positive = bool(positive)
  2544. self.case_flags = CASE_FLAGS_COMBINATIONS[case_flags]
  2545. self.zerowidth = bool(zerowidth)
  2546. self._key = (self.__class__, self.value, self.positive,
  2547. self.case_flags, self.zerowidth)
  2548. def rebuild(self, positive, case_flags, zerowidth):
  2549. return Property(self.value, positive, case_flags, zerowidth)
  2550. def optimise(self, info, reverse, in_set=False):
  2551. return self
  2552. def get_firstset(self, reverse):
  2553. return set([self])
  2554. def has_simple_start(self):
  2555. return True
  2556. def _compile(self, reverse, fuzzy):
  2557. flags = 0
  2558. if self.positive:
  2559. flags |= POSITIVE_OP
  2560. if self.zerowidth:
  2561. flags |= ZEROWIDTH_OP
  2562. if fuzzy:
  2563. flags |= FUZZY_OP
  2564. return [(self._opcode[self.case_flags, reverse], flags, self.value)]
  2565. def dump(self, indent, reverse):
  2566. prop = PROPERTY_NAMES[self.value >> 16]
  2567. name, value = prop[0], prop[1][self.value & 0xFFFF]
  2568. print("{}PROPERTY {} {}:{}{}".format(INDENT * indent,
  2569. POS_TEXT[self.positive], name, value, CASE_TEXT[self.case_flags]))
  2570. def matches(self, ch):
  2571. return _regex.has_property_value(self.value, ch) == self.positive
  2572. def max_width(self):
  2573. return 1
  2574. class Prune(ZeroWidthBase):
  2575. _op_name = "PRUNE"
  2576. def _compile(self, reverse, fuzzy):
  2577. return [(OP.PRUNE, )]
  2578. class Range(RegexBase):
  2579. _opcode = {(NOCASE, False): OP.RANGE, (IGNORECASE, False): OP.RANGE_IGN,
  2580. (FULLCASE, False): OP.RANGE, (FULLIGNORECASE, False): OP.RANGE_IGN,
  2581. (NOCASE, True): OP.RANGE_REV, (IGNORECASE, True): OP.RANGE_IGN_REV,
  2582. (FULLCASE, True): OP.RANGE_REV, (FULLIGNORECASE, True): OP.RANGE_IGN_REV}
  2583. _op_name = "RANGE"
  2584. def __init__(self, lower, upper, positive=True, case_flags=NOCASE,
  2585. zerowidth=False):
  2586. RegexBase.__init__(self)
  2587. self.lower = lower
  2588. self.upper = upper
  2589. self.positive = bool(positive)
  2590. self.case_flags = CASE_FLAGS_COMBINATIONS[case_flags]
  2591. self.zerowidth = bool(zerowidth)
  2592. self._key = (self.__class__, self.lower, self.upper, self.positive,
  2593. self.case_flags, self.zerowidth)
  2594. def rebuild(self, positive, case_flags, zerowidth):
  2595. return Range(self.lower, self.upper, positive, case_flags, zerowidth)
  2596. def optimise(self, info, reverse, in_set=False):
  2597. # Is the range case-sensitive?
  2598. if not self.positive or not (self.case_flags & IGNORECASE) or in_set:
  2599. return self
  2600. # Is full case-folding possible?
  2601. if (not (info.flags & UNICODE) or (self.case_flags & FULLIGNORECASE) !=
  2602. FULLIGNORECASE):
  2603. return self
  2604. # Get the characters which expand to multiple codepoints on folding.
  2605. expanding_chars = _regex.get_expand_on_folding()
  2606. # Get the folded characters in the range.
  2607. items = []
  2608. for ch in expanding_chars:
  2609. if self.lower <= ord(ch) <= self.upper:
  2610. folded = _regex.fold_case(FULL_CASE_FOLDING, ch)
  2611. items.append(String([ord(c) for c in folded],
  2612. case_flags=self.case_flags))
  2613. if not items:
  2614. # We can fall back to simple case-folding.
  2615. return self
  2616. if len(items) < self.upper - self.lower + 1:
  2617. # Not all the characters are covered by the full case-folding.
  2618. items.insert(0, self)
  2619. return Branch(items)
  2620. def _compile(self, reverse, fuzzy):
  2621. flags = 0
  2622. if self.positive:
  2623. flags |= POSITIVE_OP
  2624. if self.zerowidth:
  2625. flags |= ZEROWIDTH_OP
  2626. if fuzzy:
  2627. flags |= FUZZY_OP
  2628. return [(self._opcode[self.case_flags, reverse], flags, self.lower,
  2629. self.upper)]
  2630. def dump(self, indent, reverse):
  2631. display_lower = ascii(chr(self.lower)).lstrip("bu")
  2632. display_upper = ascii(chr(self.upper)).lstrip("bu")
  2633. print("{}RANGE {} {} {}{}".format(INDENT * indent,
  2634. POS_TEXT[self.positive], display_lower, display_upper,
  2635. CASE_TEXT[self.case_flags]))
  2636. def matches(self, ch):
  2637. return (self.lower <= ch <= self.upper) == self.positive
  2638. def max_width(self):
  2639. return 1
  2640. class RefGroup(RegexBase):
  2641. _opcode = {(NOCASE, False): OP.REF_GROUP, (IGNORECASE, False):
  2642. OP.REF_GROUP_IGN, (FULLCASE, False): OP.REF_GROUP, (FULLIGNORECASE,
  2643. False): OP.REF_GROUP_FLD, (NOCASE, True): OP.REF_GROUP_REV, (IGNORECASE,
  2644. True): OP.REF_GROUP_IGN_REV, (FULLCASE, True): OP.REF_GROUP_REV,
  2645. (FULLIGNORECASE, True): OP.REF_GROUP_FLD_REV}
  2646. def __init__(self, info, group, position, case_flags=NOCASE):
  2647. RegexBase.__init__(self)
  2648. self.info = info
  2649. self.group = group
  2650. self.position = position
  2651. self.case_flags = CASE_FLAGS_COMBINATIONS[case_flags]
  2652. self._key = self.__class__, self.group, self.case_flags
  2653. def fix_groups(self, pattern, reverse, fuzzy):
  2654. try:
  2655. self.group = int(self.group)
  2656. except ValueError:
  2657. try:
  2658. self.group = self.info.group_index[self.group]
  2659. except KeyError:
  2660. raise error("unknown group", pattern, self.position)
  2661. if not 1 <= self.group <= self.info.group_count:
  2662. raise error("invalid group reference", pattern, self.position)
  2663. self._key = self.__class__, self.group, self.case_flags
  2664. def remove_captures(self):
  2665. raise error("group reference not allowed", pattern, self.position)
  2666. def _compile(self, reverse, fuzzy):
  2667. flags = 0
  2668. if fuzzy:
  2669. flags |= FUZZY_OP
  2670. return [(self._opcode[self.case_flags, reverse], flags, self.group)]
  2671. def dump(self, indent, reverse):
  2672. print("{}REF_GROUP {}{}".format(INDENT * indent, self.group,
  2673. CASE_TEXT[self.case_flags]))
  2674. def max_width(self):
  2675. return UNLIMITED
  2676. def __del__(self):
  2677. self.info = None
  2678. class SearchAnchor(ZeroWidthBase):
  2679. _opcode = OP.SEARCH_ANCHOR
  2680. _op_name = "SEARCH_ANCHOR"
  2681. class Sequence(RegexBase):
  2682. def __init__(self, items=None):
  2683. RegexBase.__init__(self)
  2684. if items is None:
  2685. items = []
  2686. self.items = items
  2687. def fix_groups(self, pattern, reverse, fuzzy):
  2688. for s in self.items:
  2689. s.fix_groups(pattern, reverse, fuzzy)
  2690. def optimise(self, info, reverse):
  2691. # Flatten the sequences.
  2692. items = []
  2693. for s in self.items:
  2694. s = s.optimise(info, reverse)
  2695. if isinstance(s, Sequence):
  2696. items.extend(s.items)
  2697. else:
  2698. items.append(s)
  2699. return make_sequence(items)
  2700. def pack_characters(self, info):
  2701. "Packs sequences of characters into strings."
  2702. items = []
  2703. characters = []
  2704. case_flags = NOCASE
  2705. for s in self.items:
  2706. if type(s) is Character and s.positive and not s.zerowidth:
  2707. if s.case_flags != case_flags:
  2708. # Different case sensitivity, so flush, unless neither the
  2709. # previous nor the new character are cased.
  2710. if s.case_flags or is_cased_i(info, s.value):
  2711. Sequence._flush_characters(info, characters,
  2712. case_flags, items)
  2713. case_flags = s.case_flags
  2714. characters.append(s.value)
  2715. elif type(s) is String or type(s) is Literal:
  2716. if s.case_flags != case_flags:
  2717. # Different case sensitivity, so flush, unless the neither
  2718. # the previous nor the new string are cased.
  2719. if s.case_flags or any(is_cased_i(info, c) for c in
  2720. characters):
  2721. Sequence._flush_characters(info, characters,
  2722. case_flags, items)
  2723. case_flags = s.case_flags
  2724. characters.extend(s.characters)
  2725. else:
  2726. Sequence._flush_characters(info, characters, case_flags, items)
  2727. items.append(s.pack_characters(info))
  2728. Sequence._flush_characters(info, characters, case_flags, items)
  2729. return make_sequence(items)
  2730. def remove_captures(self):
  2731. self.items = [s.remove_captures() for s in self.items]
  2732. return self
  2733. def is_atomic(self):
  2734. return all(s.is_atomic() for s in self.items)
  2735. def can_be_affix(self):
  2736. return False
  2737. def contains_group(self):
  2738. return any(s.contains_group() for s in self.items)
  2739. def get_firstset(self, reverse):
  2740. fs = set()
  2741. items = self.items
  2742. if reverse:
  2743. items.reverse()
  2744. for s in items:
  2745. fs |= s.get_firstset(reverse)
  2746. if None not in fs:
  2747. return fs
  2748. fs.discard(None)
  2749. return fs | set([None])
  2750. def has_simple_start(self):
  2751. return bool(self.items) and self.items[0].has_simple_start()
  2752. def _compile(self, reverse, fuzzy):
  2753. seq = self.items
  2754. if reverse:
  2755. seq = seq[::-1]
  2756. code = []
  2757. for s in seq:
  2758. code.extend(s.compile(reverse, fuzzy))
  2759. return code
  2760. def dump(self, indent, reverse):
  2761. for s in self.items:
  2762. s.dump(indent, reverse)
  2763. @staticmethod
  2764. def _flush_characters(info, characters, case_flags, items):
  2765. if not characters:
  2766. return
  2767. # Disregard case_flags if all of the characters are case-less.
  2768. if case_flags & IGNORECASE:
  2769. if not any(is_cased_i(info, c) for c in characters):
  2770. case_flags = NOCASE
  2771. if (case_flags & FULLIGNORECASE) == FULLIGNORECASE:
  2772. literals = Sequence._fix_full_casefold(characters)
  2773. for item in literals:
  2774. chars = item.characters
  2775. if len(chars) == 1:
  2776. items.append(Character(chars[0], case_flags=item.case_flags))
  2777. else:
  2778. items.append(String(chars, case_flags=item.case_flags))
  2779. else:
  2780. if len(characters) == 1:
  2781. items.append(Character(characters[0], case_flags=case_flags))
  2782. else:
  2783. items.append(String(characters, case_flags=case_flags))
  2784. characters[:] = []
  2785. @staticmethod
  2786. def _fix_full_casefold(characters):
  2787. # Split a literal needing full case-folding into chunks that need it
  2788. # and chunks that can use simple case-folding, which is faster.
  2789. expanded = [_regex.fold_case(FULL_CASE_FOLDING, c) for c in
  2790. _regex.get_expand_on_folding()]
  2791. string = _regex.fold_case(FULL_CASE_FOLDING, ''.join(chr(c)
  2792. for c in characters)).lower()
  2793. chunks = []
  2794. for e in expanded:
  2795. found = string.find(e)
  2796. while found >= 0:
  2797. chunks.append((found, found + len(e)))
  2798. found = string.find(e, found + 1)
  2799. pos = 0
  2800. literals = []
  2801. for start, end in Sequence._merge_chunks(chunks):
  2802. if pos < start:
  2803. literals.append(Literal(characters[pos : start],
  2804. case_flags=IGNORECASE))
  2805. literals.append(Literal(characters[start : end],
  2806. case_flags=FULLIGNORECASE))
  2807. pos = end
  2808. if pos < len(characters):
  2809. literals.append(Literal(characters[pos : ], case_flags=IGNORECASE))
  2810. return literals
  2811. @staticmethod
  2812. def _merge_chunks(chunks):
  2813. if len(chunks) < 2:
  2814. return chunks
  2815. chunks.sort()
  2816. start, end = chunks[0]
  2817. new_chunks = []
  2818. for s, e in chunks[1 : ]:
  2819. if s <= end:
  2820. end = max(end, e)
  2821. else:
  2822. new_chunks.append((start, end))
  2823. start, end = s, e
  2824. new_chunks.append((start, end))
  2825. return new_chunks
  2826. def is_empty(self):
  2827. return all(i.is_empty() for i in self.items)
  2828. def __eq__(self, other):
  2829. return type(self) is type(other) and self.items == other.items
  2830. def max_width(self):
  2831. return sum(s.max_width() for s in self.items)
  2832. def get_required_string(self, reverse):
  2833. seq = self.items
  2834. if reverse:
  2835. seq = seq[::-1]
  2836. offset = 0
  2837. for s in seq:
  2838. ofs, req = s.get_required_string(reverse)
  2839. offset += ofs
  2840. if req:
  2841. return offset, req
  2842. return offset, None
  2843. class SetBase(RegexBase):
  2844. def __init__(self, info, items, positive=True, case_flags=NOCASE,
  2845. zerowidth=False):
  2846. RegexBase.__init__(self)
  2847. self.info = info
  2848. self.items = tuple(items)
  2849. self.positive = bool(positive)
  2850. self.case_flags = CASE_FLAGS_COMBINATIONS[case_flags]
  2851. self.zerowidth = bool(zerowidth)
  2852. self.char_width = 1
  2853. self._key = (self.__class__, self.items, self.positive,
  2854. self.case_flags, self.zerowidth)
  2855. def rebuild(self, positive, case_flags, zerowidth):
  2856. return type(self)(self.info, self.items, positive, case_flags,
  2857. zerowidth).optimise(self.info, False)
  2858. def get_firstset(self, reverse):
  2859. return set([self])
  2860. def has_simple_start(self):
  2861. return True
  2862. def _compile(self, reverse, fuzzy):
  2863. flags = 0
  2864. if self.positive:
  2865. flags |= POSITIVE_OP
  2866. if self.zerowidth:
  2867. flags |= ZEROWIDTH_OP
  2868. if fuzzy:
  2869. flags |= FUZZY_OP
  2870. code = [(self._opcode[self.case_flags, reverse], flags)]
  2871. for m in self.items:
  2872. code.extend(m.compile())
  2873. code.append((OP.END, ))
  2874. return code
  2875. def dump(self, indent, reverse):
  2876. print("{}{} {}{}".format(INDENT * indent, self._op_name,
  2877. POS_TEXT[self.positive], CASE_TEXT[self.case_flags]))
  2878. for i in self.items:
  2879. i.dump(indent + 1, reverse)
  2880. def _handle_case_folding(self, info, in_set):
  2881. # Is the set case-sensitive?
  2882. if not self.positive or not (self.case_flags & IGNORECASE) or in_set:
  2883. return self
  2884. # Is full case-folding possible?
  2885. if (not (self.info.flags & UNICODE) or (self.case_flags &
  2886. FULLIGNORECASE) != FULLIGNORECASE):
  2887. return self
  2888. # Get the characters which expand to multiple codepoints on folding.
  2889. expanding_chars = _regex.get_expand_on_folding()
  2890. # Get the folded characters in the set.
  2891. items = []
  2892. seen = set()
  2893. for ch in expanding_chars:
  2894. if self.matches(ord(ch)):
  2895. folded = _regex.fold_case(FULL_CASE_FOLDING, ch)
  2896. if folded not in seen:
  2897. items.append(String([ord(c) for c in folded],
  2898. case_flags=self.case_flags))
  2899. seen.add(folded)
  2900. if not items:
  2901. # We can fall back to simple case-folding.
  2902. return self
  2903. return Branch([self] + items)
  2904. def max_width(self):
  2905. # Is the set case-sensitive?
  2906. if not self.positive or not (self.case_flags & IGNORECASE):
  2907. return 1
  2908. # Is full case-folding possible?
  2909. if (not (self.info.flags & UNICODE) or (self.case_flags &
  2910. FULLIGNORECASE) != FULLIGNORECASE):
  2911. return 1
  2912. # Get the characters which expand to multiple codepoints on folding.
  2913. expanding_chars = _regex.get_expand_on_folding()
  2914. # Get the folded characters in the set.
  2915. seen = set()
  2916. for ch in expanding_chars:
  2917. if self.matches(ord(ch)):
  2918. folded = _regex.fold_case(FULL_CASE_FOLDING, ch)
  2919. seen.add(folded)
  2920. if not seen:
  2921. return 1
  2922. return max(len(folded) for folded in seen)
  2923. def __del__(self):
  2924. self.info = None
  2925. class SetDiff(SetBase):
  2926. _opcode = {(NOCASE, False): OP.SET_DIFF, (IGNORECASE, False):
  2927. OP.SET_DIFF_IGN, (FULLCASE, False): OP.SET_DIFF, (FULLIGNORECASE, False):
  2928. OP.SET_DIFF_IGN, (NOCASE, True): OP.SET_DIFF_REV, (IGNORECASE, True):
  2929. OP.SET_DIFF_IGN_REV, (FULLCASE, True): OP.SET_DIFF_REV, (FULLIGNORECASE,
  2930. True): OP.SET_DIFF_IGN_REV}
  2931. _op_name = "SET_DIFF"
  2932. def optimise(self, info, reverse, in_set=False):
  2933. items = self.items
  2934. if len(items) > 2:
  2935. items = [items[0], SetUnion(info, items[1 : ])]
  2936. if len(items) == 1:
  2937. return items[0].with_flags(case_flags=self.case_flags,
  2938. zerowidth=self.zerowidth).optimise(info, reverse, in_set)
  2939. self.items = tuple(m.optimise(info, reverse, in_set=True) for m in
  2940. items)
  2941. return self._handle_case_folding(info, in_set)
  2942. def matches(self, ch):
  2943. m = self.items[0].matches(ch) and not self.items[1].matches(ch)
  2944. return m == self.positive
  2945. class SetInter(SetBase):
  2946. _opcode = {(NOCASE, False): OP.SET_INTER, (IGNORECASE, False):
  2947. OP.SET_INTER_IGN, (FULLCASE, False): OP.SET_INTER, (FULLIGNORECASE,
  2948. False): OP.SET_INTER_IGN, (NOCASE, True): OP.SET_INTER_REV, (IGNORECASE,
  2949. True): OP.SET_INTER_IGN_REV, (FULLCASE, True): OP.SET_INTER_REV,
  2950. (FULLIGNORECASE, True): OP.SET_INTER_IGN_REV}
  2951. _op_name = "SET_INTER"
  2952. def optimise(self, info, reverse, in_set=False):
  2953. items = []
  2954. for m in self.items:
  2955. m = m.optimise(info, reverse, in_set=True)
  2956. if isinstance(m, SetInter) and m.positive:
  2957. # Intersection in intersection.
  2958. items.extend(m.items)
  2959. else:
  2960. items.append(m)
  2961. if len(items) == 1:
  2962. return items[0].with_flags(case_flags=self.case_flags,
  2963. zerowidth=self.zerowidth).optimise(info, reverse, in_set)
  2964. self.items = tuple(items)
  2965. return self._handle_case_folding(info, in_set)
  2966. def matches(self, ch):
  2967. m = all(i.matches(ch) for i in self.items)
  2968. return m == self.positive
  2969. class SetSymDiff(SetBase):
  2970. _opcode = {(NOCASE, False): OP.SET_SYM_DIFF, (IGNORECASE, False):
  2971. OP.SET_SYM_DIFF_IGN, (FULLCASE, False): OP.SET_SYM_DIFF, (FULLIGNORECASE,
  2972. False): OP.SET_SYM_DIFF_IGN, (NOCASE, True): OP.SET_SYM_DIFF_REV,
  2973. (IGNORECASE, True): OP.SET_SYM_DIFF_IGN_REV, (FULLCASE, True):
  2974. OP.SET_SYM_DIFF_REV, (FULLIGNORECASE, True): OP.SET_SYM_DIFF_IGN_REV}
  2975. _op_name = "SET_SYM_DIFF"
  2976. def optimise(self, info, reverse, in_set=False):
  2977. items = []
  2978. for m in self.items:
  2979. m = m.optimise(info, reverse, in_set=True)
  2980. if isinstance(m, SetSymDiff) and m.positive:
  2981. # Symmetric difference in symmetric difference.
  2982. items.extend(m.items)
  2983. else:
  2984. items.append(m)
  2985. if len(items) == 1:
  2986. return items[0].with_flags(case_flags=self.case_flags,
  2987. zerowidth=self.zerowidth).optimise(info, reverse, in_set)
  2988. self.items = tuple(items)
  2989. return self._handle_case_folding(info, in_set)
  2990. def matches(self, ch):
  2991. m = False
  2992. for i in self.items:
  2993. m = m != i.matches(ch)
  2994. return m == self.positive
  2995. class SetUnion(SetBase):
  2996. _opcode = {(NOCASE, False): OP.SET_UNION, (IGNORECASE, False):
  2997. OP.SET_UNION_IGN, (FULLCASE, False): OP.SET_UNION, (FULLIGNORECASE,
  2998. False): OP.SET_UNION_IGN, (NOCASE, True): OP.SET_UNION_REV, (IGNORECASE,
  2999. True): OP.SET_UNION_IGN_REV, (FULLCASE, True): OP.SET_UNION_REV,
  3000. (FULLIGNORECASE, True): OP.SET_UNION_IGN_REV}
  3001. _op_name = "SET_UNION"
  3002. def optimise(self, info, reverse, in_set=False):
  3003. items = []
  3004. for m in self.items:
  3005. m = m.optimise(info, reverse, in_set=True)
  3006. if isinstance(m, SetUnion) and m.positive:
  3007. # Union in union.
  3008. items.extend(m.items)
  3009. else:
  3010. items.append(m)
  3011. if len(items) == 1:
  3012. i = items[0]
  3013. return i.with_flags(positive=i.positive == self.positive,
  3014. case_flags=self.case_flags,
  3015. zerowidth=self.zerowidth).optimise(info, reverse, in_set)
  3016. self.items = tuple(items)
  3017. return self._handle_case_folding(info, in_set)
  3018. def _compile(self, reverse, fuzzy):
  3019. flags = 0
  3020. if self.positive:
  3021. flags |= POSITIVE_OP
  3022. if self.zerowidth:
  3023. flags |= ZEROWIDTH_OP
  3024. if fuzzy:
  3025. flags |= FUZZY_OP
  3026. characters, others = defaultdict(list), []
  3027. for m in self.items:
  3028. if isinstance(m, Character):
  3029. characters[m.positive].append(m.value)
  3030. else:
  3031. others.append(m)
  3032. code = [(self._opcode[self.case_flags, reverse], flags)]
  3033. for positive, values in characters.items():
  3034. flags = 0
  3035. if positive:
  3036. flags |= POSITIVE_OP
  3037. if len(values) == 1:
  3038. code.append((OP.CHARACTER, flags, values[0]))
  3039. else:
  3040. code.append((OP.STRING, flags, len(values)) + tuple(values))
  3041. for m in others:
  3042. code.extend(m.compile())
  3043. code.append((OP.END, ))
  3044. return code
  3045. def matches(self, ch):
  3046. m = any(i.matches(ch) for i in self.items)
  3047. return m == self.positive
  3048. class Skip(ZeroWidthBase):
  3049. _op_name = "SKIP"
  3050. _opcode = OP.SKIP
  3051. class StartOfLine(ZeroWidthBase):
  3052. _opcode = OP.START_OF_LINE
  3053. _op_name = "START_OF_LINE"
  3054. class StartOfLineU(StartOfLine):
  3055. _opcode = OP.START_OF_LINE_U
  3056. _op_name = "START_OF_LINE_U"
  3057. class StartOfString(ZeroWidthBase):
  3058. _opcode = OP.START_OF_STRING
  3059. _op_name = "START_OF_STRING"
  3060. class StartOfWord(ZeroWidthBase):
  3061. _opcode = OP.START_OF_WORD
  3062. _op_name = "START_OF_WORD"
  3063. class String(RegexBase):
  3064. _opcode = {(NOCASE, False): OP.STRING, (IGNORECASE, False): OP.STRING_IGN,
  3065. (FULLCASE, False): OP.STRING, (FULLIGNORECASE, False): OP.STRING_FLD,
  3066. (NOCASE, True): OP.STRING_REV, (IGNORECASE, True): OP.STRING_IGN_REV,
  3067. (FULLCASE, True): OP.STRING_REV, (FULLIGNORECASE, True):
  3068. OP.STRING_FLD_REV}
  3069. def __init__(self, characters, case_flags=NOCASE):
  3070. self.characters = tuple(characters)
  3071. self.case_flags = CASE_FLAGS_COMBINATIONS[case_flags]
  3072. if (self.case_flags & FULLIGNORECASE) == FULLIGNORECASE:
  3073. folded_characters = []
  3074. for char in self.characters:
  3075. folded = _regex.fold_case(FULL_CASE_FOLDING, chr(char))
  3076. folded_characters.extend(ord(c) for c in folded)
  3077. else:
  3078. folded_characters = self.characters
  3079. self.folded_characters = tuple(folded_characters)
  3080. self.required = False
  3081. self._key = self.__class__, self.characters, self.case_flags
  3082. def get_firstset(self, reverse):
  3083. if reverse:
  3084. pos = -1
  3085. else:
  3086. pos = 0
  3087. return set([Character(self.characters[pos],
  3088. case_flags=self.case_flags)])
  3089. def has_simple_start(self):
  3090. return True
  3091. def _compile(self, reverse, fuzzy):
  3092. flags = 0
  3093. if fuzzy:
  3094. flags |= FUZZY_OP
  3095. if self.required:
  3096. flags |= REQUIRED_OP
  3097. return [(self._opcode[self.case_flags, reverse], flags,
  3098. len(self.folded_characters)) + self.folded_characters]
  3099. def dump(self, indent, reverse):
  3100. display = ascii("".join(chr(c) for c in self.characters)).lstrip("bu")
  3101. print("{}STRING {}{}".format(INDENT * indent, display,
  3102. CASE_TEXT[self.case_flags]))
  3103. def max_width(self):
  3104. return len(self.folded_characters)
  3105. def get_required_string(self, reverse):
  3106. return 0, self
  3107. class Literal(String):
  3108. def dump(self, indent, reverse):
  3109. literal = ''.join(chr(c) for c in self.characters)
  3110. display = ascii(literal).lstrip("bu")
  3111. print("{}LITERAL MATCH {}{}".format(INDENT * indent, display,
  3112. CASE_TEXT[self.case_flags]))
  3113. class StringSet(Branch):
  3114. def __init__(self, info, name, case_flags=NOCASE):
  3115. self.info = info
  3116. self.name = name
  3117. self.case_flags = CASE_FLAGS_COMBINATIONS[case_flags]
  3118. self._key = self.__class__, self.name, self.case_flags
  3119. self.set_key = (name, self.case_flags)
  3120. if self.set_key not in info.named_lists_used:
  3121. info.named_lists_used[self.set_key] = len(info.named_lists_used)
  3122. index = self.info.named_lists_used[self.set_key]
  3123. items = self.info.kwargs[self.name]
  3124. case_flags = self.case_flags
  3125. encoding = self.info.flags & _ALL_ENCODINGS
  3126. fold_flags = encoding | case_flags
  3127. choices = []
  3128. for string in items:
  3129. if isinstance(string, str):
  3130. string = [ord(c) for c in string]
  3131. choices.append([Character(c, case_flags=case_flags) for c in
  3132. string])
  3133. # Sort from longest to shortest.
  3134. choices.sort(key=len, reverse=True)
  3135. self.branches = [Sequence(choice) for choice in choices]
  3136. def dump(self, indent, reverse):
  3137. print("{}STRING_SET {}{}".format(INDENT * indent, self.name,
  3138. CASE_TEXT[self.case_flags]))
  3139. def __del__(self):
  3140. self.info = None
  3141. class Source:
  3142. "Scanner for the regular expression source string."
  3143. def __init__(self, string):
  3144. if isinstance(string, str):
  3145. self.string = string
  3146. self.char_type = chr
  3147. else:
  3148. self.string = string.decode("latin-1")
  3149. self.char_type = lambda c: bytes([c])
  3150. self.pos = 0
  3151. self.ignore_space = False
  3152. self.sep = string[ : 0]
  3153. def get(self):
  3154. string = self.string
  3155. pos = self.pos
  3156. try:
  3157. if self.ignore_space:
  3158. while True:
  3159. if string[pos].isspace():
  3160. # Skip over the whitespace.
  3161. pos += 1
  3162. elif string[pos] == "#":
  3163. # Skip over the comment to the end of the line.
  3164. pos = string.index("\n", pos)
  3165. else:
  3166. break
  3167. ch = string[pos]
  3168. self.pos = pos + 1
  3169. return ch
  3170. except IndexError:
  3171. # We've reached the end of the string.
  3172. self.pos = pos
  3173. return string[ : 0]
  3174. except ValueError:
  3175. # The comment extended to the end of the string.
  3176. self.pos = len(string)
  3177. return string[ : 0]
  3178. def get_many(self, count=1):
  3179. string = self.string
  3180. pos = self.pos
  3181. try:
  3182. if self.ignore_space:
  3183. substring = []
  3184. while len(substring) < count:
  3185. while True:
  3186. if string[pos].isspace():
  3187. # Skip over the whitespace.
  3188. pos += 1
  3189. elif string[pos] == "#":
  3190. # Skip over the comment to the end of the line.
  3191. pos = string.index("\n", pos)
  3192. else:
  3193. break
  3194. substring.append(string[pos])
  3195. pos += 1
  3196. substring = "".join(substring)
  3197. else:
  3198. substring = string[pos : pos + count]
  3199. pos += len(substring)
  3200. self.pos = pos
  3201. return substring
  3202. except IndexError:
  3203. # We've reached the end of the string.
  3204. self.pos = len(string)
  3205. return "".join(substring)
  3206. except ValueError:
  3207. # The comment extended to the end of the string.
  3208. self.pos = len(string)
  3209. return "".join(substring)
  3210. def get_while(self, test_set, include=True):
  3211. string = self.string
  3212. pos = self.pos
  3213. if self.ignore_space:
  3214. try:
  3215. substring = []
  3216. while True:
  3217. if string[pos].isspace():
  3218. # Skip over the whitespace.
  3219. pos += 1
  3220. elif string[pos] == "#":
  3221. # Skip over the comment to the end of the line.
  3222. pos = string.index("\n", pos)
  3223. elif (string[pos] in test_set) == include:
  3224. substring.append(string[pos])
  3225. pos += 1
  3226. else:
  3227. break
  3228. self.pos = pos
  3229. except IndexError:
  3230. # We've reached the end of the string.
  3231. self.pos = len(string)
  3232. except ValueError:
  3233. # The comment extended to the end of the string.
  3234. self.pos = len(string)
  3235. return "".join(substring)
  3236. else:
  3237. try:
  3238. while (string[pos] in test_set) == include:
  3239. pos += 1
  3240. substring = string[self.pos : pos]
  3241. self.pos = pos
  3242. return substring
  3243. except IndexError:
  3244. # We've reached the end of the string.
  3245. substring = string[self.pos : pos]
  3246. self.pos = pos
  3247. return substring
  3248. def skip_while(self, test_set, include=True):
  3249. string = self.string
  3250. pos = self.pos
  3251. try:
  3252. if self.ignore_space:
  3253. while True:
  3254. if string[pos].isspace():
  3255. # Skip over the whitespace.
  3256. pos += 1
  3257. elif string[pos] == "#":
  3258. # Skip over the comment to the end of the line.
  3259. pos = string.index("\n", pos)
  3260. elif (string[pos] in test_set) == include:
  3261. pos += 1
  3262. else:
  3263. break
  3264. else:
  3265. while (string[pos] in test_set) == include:
  3266. pos += 1
  3267. self.pos = pos
  3268. except IndexError:
  3269. # We've reached the end of the string.
  3270. self.pos = len(string)
  3271. except ValueError:
  3272. # The comment extended to the end of the string.
  3273. self.pos = len(string)
  3274. def match(self, substring):
  3275. string = self.string
  3276. pos = self.pos
  3277. if self.ignore_space:
  3278. try:
  3279. for c in substring:
  3280. while True:
  3281. if string[pos].isspace():
  3282. # Skip over the whitespace.
  3283. pos += 1
  3284. elif string[pos] == "#":
  3285. # Skip over the comment to the end of the line.
  3286. pos = string.index("\n", pos)
  3287. else:
  3288. break
  3289. if string[pos] != c:
  3290. return False
  3291. pos += 1
  3292. self.pos = pos
  3293. return True
  3294. except IndexError:
  3295. # We've reached the end of the string.
  3296. return False
  3297. except ValueError:
  3298. # The comment extended to the end of the string.
  3299. return False
  3300. else:
  3301. if not string.startswith(substring, pos):
  3302. return False
  3303. self.pos = pos + len(substring)
  3304. return True
  3305. def expect(self, substring):
  3306. if not self.match(substring):
  3307. raise error("missing {}".format(substring), self.string, self.pos)
  3308. def at_end(self):
  3309. string = self.string
  3310. pos = self.pos
  3311. try:
  3312. if self.ignore_space:
  3313. while True:
  3314. if string[pos].isspace():
  3315. pos += 1
  3316. elif string[pos] == "#":
  3317. pos = string.index("\n", pos)
  3318. else:
  3319. break
  3320. return pos >= len(string)
  3321. except IndexError:
  3322. # We've reached the end of the string.
  3323. return True
  3324. except ValueError:
  3325. # The comment extended to the end of the string.
  3326. return True
  3327. class Info:
  3328. "Info about the regular expression."
  3329. def __init__(self, flags=0, char_type=None, kwargs={}):
  3330. flags |= DEFAULT_FLAGS[(flags & _ALL_VERSIONS) or DEFAULT_VERSION]
  3331. self.flags = flags
  3332. self.global_flags = flags
  3333. self.inline_locale = False
  3334. self.kwargs = kwargs
  3335. self.group_count = 0
  3336. self.group_index = {}
  3337. self.group_name = {}
  3338. self.char_type = char_type
  3339. self.named_lists_used = {}
  3340. self.open_groups = []
  3341. self.open_group_count = {}
  3342. self.defined_groups = {}
  3343. self.group_calls = []
  3344. self.private_groups = {}
  3345. def open_group(self, name=None):
  3346. group = self.group_index.get(name)
  3347. if group is None:
  3348. while True:
  3349. self.group_count += 1
  3350. if name is None or self.group_count not in self.group_name:
  3351. break
  3352. group = self.group_count
  3353. if name:
  3354. self.group_index[name] = group
  3355. self.group_name[group] = name
  3356. if group in self.open_groups:
  3357. # We have a nested named group. We'll assign it a private group
  3358. # number, initially negative until we can assign a proper
  3359. # (positive) number.
  3360. group_alias = -(len(self.private_groups) + 1)
  3361. self.private_groups[group_alias] = group
  3362. group = group_alias
  3363. self.open_groups.append(group)
  3364. self.open_group_count[group] = self.open_group_count.get(group, 0) + 1
  3365. return group
  3366. def close_group(self):
  3367. self.open_groups.pop()
  3368. def is_open_group(self, name):
  3369. # In version 1, a group reference can refer to an open group. We'll
  3370. # just pretend the group isn't open.
  3371. version = (self.flags & _ALL_VERSIONS) or DEFAULT_VERSION
  3372. if version == VERSION1:
  3373. return False
  3374. if name.isdigit():
  3375. group = int(name)
  3376. else:
  3377. group = self.group_index.get(name)
  3378. return group in self.open_groups
  3379. def _check_group_features(info, parsed):
  3380. """Checks whether the reverse and fuzzy features of the group calls match
  3381. the groups which they call.
  3382. """
  3383. call_refs = {}
  3384. additional_groups = []
  3385. for call, reverse, fuzzy in info.group_calls:
  3386. # Look up the reference of this group call.
  3387. key = (call.group, reverse, fuzzy)
  3388. ref = call_refs.get(key)
  3389. if ref is None:
  3390. # This group doesn't have a reference yet, so look up its features.
  3391. if call.group == 0:
  3392. # Calling the pattern as a whole.
  3393. rev = bool(info.flags & REVERSE)
  3394. fuz = isinstance(parsed, Fuzzy)
  3395. if (rev, fuz) != (reverse, fuzzy):
  3396. # The pattern as a whole doesn't have the features we want,
  3397. # so we'll need to make a copy of it with the desired
  3398. # features.
  3399. additional_groups.append((CallRef(len(call_refs), parsed),
  3400. reverse, fuzzy))
  3401. else:
  3402. # Calling a capture group.
  3403. def_info = info.defined_groups[call.group]
  3404. group = def_info[0]
  3405. if def_info[1 : ] != (reverse, fuzzy):
  3406. # The group doesn't have the features we want, so we'll
  3407. # need to make a copy of it with the desired features.
  3408. additional_groups.append((group, reverse, fuzzy))
  3409. ref = len(call_refs)
  3410. call_refs[key] = ref
  3411. call.call_ref = ref
  3412. info.call_refs = call_refs
  3413. info.additional_groups = additional_groups
  3414. def _get_required_string(parsed, flags):
  3415. "Gets the required string and related info of a parsed pattern."
  3416. req_offset, required = parsed.get_required_string(bool(flags & REVERSE))
  3417. if required:
  3418. required.required = True
  3419. if req_offset >= UNLIMITED:
  3420. req_offset = -1
  3421. req_flags = required.case_flags
  3422. if not (flags & UNICODE):
  3423. req_flags &= ~UNICODE
  3424. req_chars = required.folded_characters
  3425. else:
  3426. req_offset = 0
  3427. req_chars = ()
  3428. req_flags = 0
  3429. return req_offset, req_chars, req_flags
  3430. class Scanner:
  3431. def __init__(self, lexicon, flags=0):
  3432. self.lexicon = lexicon
  3433. # Combine phrases into a compound pattern.
  3434. patterns = []
  3435. for phrase, action in lexicon:
  3436. # Parse the regular expression.
  3437. source = Source(phrase)
  3438. info = Info(flags, source.char_type)
  3439. source.ignore_space = bool(info.flags & VERBOSE)
  3440. parsed = _parse_pattern(source, info)
  3441. if not source.at_end():
  3442. raise error("unbalanced parenthesis", source.string,
  3443. source.pos)
  3444. # We want to forbid capture groups within each phrase.
  3445. patterns.append(parsed.remove_captures())
  3446. # Combine all the subpatterns into one pattern.
  3447. info = Info(flags)
  3448. patterns = [Group(info, g + 1, p) for g, p in enumerate(patterns)]
  3449. parsed = Branch(patterns)
  3450. # Optimise the compound pattern.
  3451. reverse = bool(info.flags & REVERSE)
  3452. parsed = parsed.optimise(info, reverse)
  3453. parsed = parsed.pack_characters(info)
  3454. # Get the required string.
  3455. req_offset, req_chars, req_flags = _get_required_string(parsed,
  3456. info.flags)
  3457. # Check the features of the groups.
  3458. _check_group_features(info, parsed)
  3459. # Complain if there are any group calls. They are not supported by the
  3460. # Scanner class.
  3461. if info.call_refs:
  3462. raise error("recursive regex not supported by Scanner",
  3463. source.string, source.pos)
  3464. reverse = bool(info.flags & REVERSE)
  3465. # Compile the compound pattern. The result is a list of tuples.
  3466. code = parsed.compile(reverse) + [(OP.SUCCESS, )]
  3467. # Flatten the code into a list of ints.
  3468. code = _flatten_code(code)
  3469. if not parsed.has_simple_start():
  3470. # Get the first set, if possible.
  3471. try:
  3472. fs_code = _compile_firstset(info, parsed.get_firstset(reverse))
  3473. fs_code = _flatten_code(fs_code)
  3474. code = fs_code + code
  3475. except _FirstSetError:
  3476. pass
  3477. # Check the global flags for conflicts.
  3478. version = (info.flags & _ALL_VERSIONS) or DEFAULT_VERSION
  3479. if version not in (0, VERSION0, VERSION1):
  3480. raise ValueError("VERSION0 and VERSION1 flags are mutually incompatible")
  3481. # Create the PatternObject.
  3482. #
  3483. # Local flags like IGNORECASE affect the code generation, but aren't
  3484. # needed by the PatternObject itself. Conversely, global flags like
  3485. # LOCALE _don't_ affect the code generation but _are_ needed by the
  3486. # PatternObject.
  3487. self.scanner = _regex.compile(None, (flags & GLOBAL_FLAGS) | version,
  3488. code, {}, {}, {}, [], req_offset, req_chars, req_flags,
  3489. len(patterns))
  3490. def scan(self, string):
  3491. result = []
  3492. append = result.append
  3493. match = self.scanner.scanner(string).match
  3494. i = 0
  3495. while True:
  3496. m = match()
  3497. if not m:
  3498. break
  3499. j = m.end()
  3500. if i == j:
  3501. break
  3502. action = self.lexicon[m.lastindex - 1][1]
  3503. if hasattr(action, '__call__'):
  3504. self.match = m
  3505. action = action(self, m.group())
  3506. if action is not None:
  3507. append(action)
  3508. i = j
  3509. return result, string[i : ]
  3510. # Get the known properties dict.
  3511. PROPERTIES = _regex.get_properties()
  3512. # Build the inverse of the properties dict.
  3513. PROPERTY_NAMES = {}
  3514. for prop_name, (prop_id, values) in PROPERTIES.items():
  3515. name, prop_values = PROPERTY_NAMES.get(prop_id, ("", {}))
  3516. name = max(name, prop_name, key=len)
  3517. PROPERTY_NAMES[prop_id] = name, prop_values
  3518. for val_name, val_id in values.items():
  3519. prop_values[val_id] = max(prop_values.get(val_id, ""), val_name,
  3520. key=len)
  3521. # Character escape sequences.
  3522. CHARACTER_ESCAPES = {
  3523. "a": "\a",
  3524. "b": "\b",
  3525. "f": "\f",
  3526. "n": "\n",
  3527. "r": "\r",
  3528. "t": "\t",
  3529. "v": "\v",
  3530. }
  3531. # Predefined character set escape sequences.
  3532. CHARSET_ESCAPES = {
  3533. "d": lookup_property(None, "Digit", True),
  3534. "D": lookup_property(None, "Digit", False),
  3535. "h": lookup_property(None, "Blank", True),
  3536. "s": lookup_property(None, "Space", True),
  3537. "S": lookup_property(None, "Space", False),
  3538. "w": lookup_property(None, "Word", True),
  3539. "W": lookup_property(None, "Word", False),
  3540. }
  3541. # Positional escape sequences.
  3542. POSITION_ESCAPES = {
  3543. "A": StartOfString(),
  3544. "b": Boundary(),
  3545. "B": Boundary(False),
  3546. "K": Keep(),
  3547. "m": StartOfWord(),
  3548. "M": EndOfWord(),
  3549. "Z": EndOfString(),
  3550. }
  3551. # Positional escape sequences when WORD flag set.
  3552. WORD_POSITION_ESCAPES = dict(POSITION_ESCAPES)
  3553. WORD_POSITION_ESCAPES.update({
  3554. "b": DefaultBoundary(),
  3555. "B": DefaultBoundary(False),
  3556. "m": DefaultStartOfWord(),
  3557. "M": DefaultEndOfWord(),
  3558. })
  3559. # Regex control verbs.
  3560. VERBS = {
  3561. "FAIL": Failure(),
  3562. "F": Failure(),
  3563. "PRUNE": Prune(),
  3564. "SKIP": Skip(),
  3565. }