158158else : # pragma: <3.12 cover
159159 FSTRING_START = FSTRING_MIDDLE = FSTRING_END = - 1
160160
161+ if sys .version_info >= (3 , 14 ): # pragma: >=3.14 cover
162+ TSTRING_START = tokenize .TSTRING_START
163+ TSTRING_MIDDLE = tokenize .TSTRING_MIDDLE
164+ TSTRING_END = tokenize .TSTRING_END
165+ else : # pragma: <3.14 cover
166+ TSTRING_START = TSTRING_MIDDLE = TSTRING_END = - 1
167+
161168_checks = {'physical_line' : {}, 'logical_line' : {}, 'tree' : {}}
162169
163170
@@ -697,7 +704,12 @@ def continued_indentation(logical_line, tokens, indent_level, hang_closing,
697704 if verbose >= 4 :
698705 print (f"bracket depth { depth } indent to { start [1 ]} " )
699706 # deal with implicit string concatenation
700- elif token_type in (tokenize .STRING , tokenize .COMMENT , FSTRING_START ):
707+ elif token_type in {
708+ tokenize .STRING ,
709+ tokenize .COMMENT ,
710+ FSTRING_START ,
711+ TSTRING_START
712+ }:
701713 indent_chances [start [1 ]] = str
702714 # visual indent after assert/raise/with
703715 elif not row and not depth and text in ["assert" , "raise" , "with" ]:
@@ -873,13 +885,17 @@ def missing_whitespace(logical_line, tokens):
873885 brace_stack .append (text )
874886 elif token_type == FSTRING_START : # pragma: >=3.12 cover
875887 brace_stack .append ('f' )
888+ elif token_type == TSTRING_START : # pragma: >=3.14 cover
889+ brace_stack .append ('t' )
876890 elif token_type == tokenize .NAME and text == 'lambda' :
877891 brace_stack .append ('l' )
878892 elif brace_stack :
879893 if token_type == tokenize .OP and text in {']' , ')' , '}' }:
880894 brace_stack .pop ()
881895 elif token_type == FSTRING_END : # pragma: >=3.12 cover
882896 brace_stack .pop ()
897+ elif token_type == TSTRING_END : # pragma: >=3.14 cover
898+ brace_stack .pop ()
883899 elif (
884900 brace_stack [- 1 ] == 'l' and
885901 token_type == tokenize .OP and
@@ -899,6 +915,9 @@ def missing_whitespace(logical_line, tokens):
899915 # 3.12+ fstring format specifier
900916 elif text == ':' and brace_stack [- 2 :] == ['f' , '{' ]: # pragma: >=3.12 cover # noqa: E501
901917 pass
918+ # 3.14+ tstring format specifier
919+ elif text == ':' and brace_stack [- 2 :] == ['t' , '{' ]: # pragma: >=3.14 cover # noqa: E501
920+ pass
902921 # tuple (and list for some reason?)
903922 elif text == ',' and next_char in ')]' :
904923 pass
@@ -948,7 +967,9 @@ def missing_whitespace(logical_line, tokens):
948967 # allow keyword args or defaults: foo(bar=None).
949968 brace_stack [- 1 :] == ['(' ] or
950969 # allow python 3.8 fstring repr specifier
951- brace_stack [- 2 :] == ['f' , '{' ]
970+ brace_stack [- 2 :] == ['f' , '{' ] or
971+ # allow python 3.8 fstring repr specifier
972+ brace_stack [- 2 :] == ['t' , '{' ]
952973 )
953974 ):
954975 pass
@@ -1639,11 +1660,11 @@ def python_3000_invalid_escape_sequence(logical_line, tokens, noqa):
16391660
16401661 prefixes = []
16411662 for token_type , text , start , _ , _ in tokens :
1642- if token_type in {tokenize .STRING , FSTRING_START }:
1663+ if token_type in {tokenize .STRING , FSTRING_START , TSTRING_START }:
16431664 # Extract string modifiers (e.g. u or r)
16441665 prefixes .append (text [:text .index (text [- 1 ])].lower ())
16451666
1646- if token_type in {tokenize .STRING , FSTRING_MIDDLE }:
1667+ if token_type in {tokenize .STRING , FSTRING_MIDDLE , TSTRING_MIDDLE }:
16471668 if 'r' not in prefixes [- 1 ]:
16481669 start_line , start_col = start
16491670 pos = text .find ('\\ ' )
@@ -1661,7 +1682,7 @@ def python_3000_invalid_escape_sequence(logical_line, tokens, noqa):
16611682 )
16621683 pos = text .find ('\\ ' , pos + 1 )
16631684
1664- if token_type in {tokenize .STRING , FSTRING_END }:
1685+ if token_type in {tokenize .STRING , FSTRING_END , TSTRING_END }:
16651686 prefixes .pop ()
16661687
16671688
@@ -1859,7 +1880,7 @@ def __init__(self, filename=None, lines=None,
18591880 self .max_line_length = options .max_line_length
18601881 self .max_doc_length = options .max_doc_length
18611882 self .indent_size = options .indent_size
1862- self .fstring_start = 0
1883+ self .fstring_start = self . tstring_start = 0
18631884 self .multiline = False # in a multiline string?
18641885 self .hang_closing = options .hang_closing
18651886 self .indent_size = options .indent_size
@@ -1954,7 +1975,7 @@ def build_tokens_line(self):
19541975 continue
19551976 if token_type == tokenize .STRING :
19561977 text = mute_string (text )
1957- elif token_type == FSTRING_MIDDLE : # pragma: >=3.12 cover
1978+ elif token_type in { FSTRING_MIDDLE , TSTRING_MIDDLE } : # pragma: >=3.12 cover # noqa: E501
19581979 # fstring tokens are "unescaped" braces -- re-escape!
19591980 brace_count = text .count ('{' ) + text .count ('}' )
19601981 text = 'x' * (len (text ) + brace_count )
@@ -2046,6 +2067,8 @@ def maybe_check_physical(self, token, prev_physical):
20462067
20472068 if token .type == FSTRING_START : # pragma: >=3.12 cover
20482069 self .fstring_start = token .start [0 ]
2070+ elif token .type == TSTRING_START : # pragma: >=3.14 cover
2071+ self .tstring_start = token .start [0 ]
20492072 # a newline token ends a single physical line.
20502073 elif _is_eol_token (token ):
20512074 # if the file does not end with a newline, the NEWLINE
@@ -2057,7 +2080,8 @@ def maybe_check_physical(self, token, prev_physical):
20572080 self .check_physical (token .line )
20582081 elif (
20592082 token .type == tokenize .STRING and '\n ' in token .string or
2060- token .type == FSTRING_END
2083+ token .type == FSTRING_END or
2084+ token .type == TSTRING_END
20612085 ):
20622086 # Less obviously, a string that contains newlines is a
20632087 # multiline string, either triple-quoted or with internal
@@ -2078,6 +2102,8 @@ def maybe_check_physical(self, token, prev_physical):
20782102 return
20792103 if token .type == FSTRING_END : # pragma: >=3.12 cover
20802104 start = self .fstring_start
2105+ elif token .type == TSTRING_END : # pragma: >=3.12 cover
2106+ start = self .tstring_start
20812107 else :
20822108 start = token .start [0 ]
20832109 end = token .end [0 ]
0 commit comments