Skip to content

Commit 3169b93

Browse files
committed
gh-47798: Prefer double quotes in run_pipeline code, tests, and doc examples
Style-only pass over the pipeline-related code, tests, and doc examples. Single quotes kept where the literal contains a " (to avoid escaping) and in repr-output examples (Python's repr uses ').
1 parent 6dd8dad commit 3169b93

3 files changed

Lines changed: 329 additions & 329 deletions

File tree

Doc/library/subprocess.rst

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -334,8 +334,8 @@ underlying :class:`Popen` interface can be used directly.
334334

335335
* In text mode the interleaving can split multi-byte characters across
336336
writes from different processes. If that is a concern, capture in
337-
binary mode and decode yourself, or pass ``errors='replace'`` or
338-
``errors='backslashreplace'``.
337+
binary mode and decode yourself, or pass ``errors="replace"`` or
338+
``errors="backslashreplace"``.
339339

340340
* If any child spawns a grandchild process that keeps the inherited
341341
stderr file descriptor open after the child itself exits, the
@@ -354,8 +354,8 @@ underlying :class:`Popen` interface can be used directly.
354354
>>> import subprocess
355355
>>> # Equivalent to: echo "hello world" | tr a-z A-Z
356356
>>> result = subprocess.run_pipeline(
357-
... ['echo', 'hello world'],
358-
... ['tr', 'a-z', 'A-Z'],
357+
... ["echo", "hello world"],
358+
... ["tr", "a-z", "A-Z"],
359359
... capture_output=True, text=True
360360
... )
361361
>>> result.stdout
@@ -365,28 +365,28 @@ underlying :class:`Popen` interface can be used directly.
365365

366366
>>> # Pipeline with three commands
367367
>>> result = subprocess.run_pipeline(
368-
... ['echo', 'one\ntwo\nthree'],
369-
... ['sort'],
370-
... ['head', '-n', '2'],
368+
... ["echo", "one\ntwo\nthree"],
369+
... ["sort"],
370+
... ["head", "-n", "2"],
371371
... capture_output=True, text=True
372372
... )
373373
>>> result.stdout
374374
'one\nthree\n'
375375

376376
>>> # Using input parameter
377377
>>> result = subprocess.run_pipeline(
378-
... ['cat'],
379-
... ['wc', '-l'],
380-
... input='line1\nline2\nline3\n',
378+
... ["cat"],
379+
... ["wc", "-l"],
380+
... input="line1\nline2\nline3\n",
381381
... capture_output=True, text=True
382382
... )
383383
>>> result.stdout.strip()
384384
'3'
385385

386386
>>> # Error handling with check=True
387387
>>> subprocess.run_pipeline(
388-
... ['echo', 'hello'],
389-
... ['false'], # exits with status 1
388+
... ["echo", "hello"],
389+
... ["false"], # exits with status 1
390390
... check=True
391391
... )
392392
Traceback (most recent call last):
@@ -468,10 +468,10 @@ underlying :class:`Popen` interface can be used directly.
468468
keep the pipeline's stderr handling::
469469

470470
>>> from subprocess import run_pipeline, PipelineCommand, DEVNULL
471-
>>> with open('out.gz', 'wb') as f:
471+
>>> with open("out.gz", "wb") as f:
472472
... result = run_pipeline(
473-
... PipelineCommand(['dd', 'if=infile', 'bs=1M'], stderr=DEVNULL),
474-
... ['pigz'],
473+
... PipelineCommand(["dd", "if=infile", "bs=1M"], stderr=DEVNULL),
474+
... ["pigz"],
475475
... stdout=f, check=True,
476476
... )
477477

Lib/subprocess.py

Lines changed: 61 additions & 61 deletions
Original file line numberDiff line numberDiff line change
@@ -215,7 +215,7 @@ def __init__(self, commands, returncodes, stdout=None, stderr=None):
215215
commands = tuple(commands)
216216
returncodes = tuple(returncodes)
217217
assert len(commands) == len(returncodes), (
218-
f'{len(commands)=} != {len(returncodes)=}')
218+
f"{len(commands)=} != {len(returncodes)=}")
219219
super().__init__(commands, returncodes)
220220
self.commands = commands
221221
self.returncodes = returncodes
@@ -877,21 +877,21 @@ class PipelineCommand:
877877
argv sequence.
878878
"""
879879

880-
__slots__ = ('args', 'stderr', 'env', 'cwd', 'shell')
880+
__slots__ = ("args", "stderr", "env", "cwd", "shell")
881881

882882
def __init__(self, args, /, *, stderr=None, env=None, cwd=None,
883883
shell=False):
884884
if stderr not in (None, STDOUT, DEVNULL):
885885
raise ValueError(
886-
'PipelineCommand stderr must be None, STDOUT, or DEVNULL')
886+
"PipelineCommand stderr must be None, STDOUT, or DEVNULL")
887887
if shell:
888888
if not isinstance(args, str):
889889
raise TypeError(
890-
'PipelineCommand with shell=True requires a str command')
890+
"PipelineCommand with shell=True requires a str command")
891891
elif isinstance(args, str):
892892
raise TypeError(
893-
'PipelineCommand args must be a sequence of program '
894-
'arguments, not a str (use shell=True for a shell command)')
893+
"PipelineCommand args must be a sequence of program "
894+
"arguments, not a str (use shell=True for a shell command)")
895895
self.args = args
896896
self.stderr = stderr
897897
self.env = env
@@ -904,23 +904,23 @@ def _has_overrides(self):
904904
or self.cwd is not None or self.shell)
905905

906906
def __repr__(self):
907-
parts = [f'{self.args!r}']
907+
parts = [f"{self.args!r}"]
908908
if self.stderr is STDOUT:
909-
parts.append('stderr=STDOUT')
909+
parts.append("stderr=STDOUT")
910910
elif self.stderr is DEVNULL:
911-
parts.append('stderr=DEVNULL')
911+
parts.append("stderr=DEVNULL")
912912
if self.env is not None:
913913
# env is commonly large and may contain credentials; don't
914914
# dump its contents into tracebacks via PipelineError.__str__.
915915
try:
916916
n = len(self.env)
917917
except TypeError:
918-
n = '?'
919-
parts.append(f'env=<{n} entries>')
918+
n = "?"
919+
parts.append(f"env=<{n} entries>")
920920
if self.cwd is not None:
921-
parts.append(f'cwd={self.cwd!r}')
921+
parts.append(f"cwd={self.cwd!r}")
922922
if self.shell:
923-
parts.append('shell=True')
923+
parts.append("shell=True")
924924
return f"{type(self).__name__}({', '.join(parts)})"
925925

926926

@@ -942,12 +942,12 @@ def __init__(self, commands, returncodes, stdout=None, stderr=None):
942942
self.stderr = stderr
943943

944944
def __repr__(self):
945-
args = [f'commands={self.commands!r}',
946-
f'returncodes={self.returncodes!r}']
945+
args = [f"commands={self.commands!r}",
946+
f"returncodes={self.returncodes!r}"]
947947
if self.stdout is not None:
948-
args.append(f'stdout={self.stdout!r}')
948+
args.append(f"stdout={self.stdout!r}")
949949
if self.stderr is not None:
950-
args.append(f'stderr={self.stderr!r}')
950+
args.append(f"stderr={self.stderr!r}")
951951
return f"{type(self).__name__}({', '.join(args)})"
952952

953953
__class_getitem__ = classmethod(types.GenericAlias)
@@ -1072,79 +1072,79 @@ def run_pipeline(*commands, input=None, capture_output=False, timeout=None,
10721072
Example:
10731073
# Equivalent to: cat file.txt | grep pattern | wc -l
10741074
result = run_pipeline(
1075-
['cat', 'file.txt'],
1076-
['grep', 'pattern'],
1077-
['wc', '-l'],
1075+
["cat", "file.txt"],
1076+
["grep", "pattern"],
1077+
["wc", "-l"],
10781078
capture_output=True, text=True
10791079
)
10801080
print(result.stdout) # "42\\n"
10811081
print(result.returncodes) # (0, 0, 0)
10821082
"""
10831083
if len(commands) < 2:
1084-
raise ValueError('run_pipeline requires at least 2 commands')
1084+
raise ValueError("run_pipeline requires at least 2 commands")
10851085

1086-
if input is not None and kwargs.get('stdin') is not None:
1087-
raise ValueError('stdin and input arguments may not both be used.')
1088-
if kwargs.get('stdin') is PIPE:
1089-
raise ValueError('stdin=PIPE is not supported by run_pipeline; '
1090-
'pass input= instead, or provide a file/fd')
1086+
if input is not None and kwargs.get("stdin") is not None:
1087+
raise ValueError("stdin and input arguments may not both be used.")
1088+
if kwargs.get("stdin") is PIPE:
1089+
raise ValueError("stdin=PIPE is not supported by run_pipeline; "
1090+
"pass input= instead, or provide a file/fd")
10911091

10921092
if capture_output:
1093-
if kwargs.get('stdout') is not None or kwargs.get('stderr') is not None:
1094-
raise ValueError('stdout and stderr arguments may not be used '
1095-
'with capture_output.')
1093+
if kwargs.get("stdout") is not None or kwargs.get("stderr") is not None:
1094+
raise ValueError("stdout and stderr arguments may not be used "
1095+
"with capture_output.")
10961096

1097-
if kwargs.get('close_fds') is False:
1097+
if kwargs.get("close_fds") is False:
10981098
raise ValueError(
1099-
'close_fds=False is not supported by run_pipeline; '
1100-
'inherited pipe ends would prevent EOF signaling between commands')
1099+
"close_fds=False is not supported by run_pipeline; "
1100+
"inherited pipe ends would prevent EOF signaling between commands")
11011101

1102-
if kwargs.get('shell'):
1102+
if kwargs.get("shell"):
11031103
raise ValueError(
1104-
'shell=True is not supported by run_pipeline; the pipeline itself '
1105-
'replaces the shell. Use PipelineCommand(cmd, shell=True) for a '
1106-
'single command that needs shell interpretation.')
1107-
if kwargs.get('executable') is not None:
1104+
"shell=True is not supported by run_pipeline; the pipeline itself "
1105+
"replaces the shell. Use PipelineCommand(cmd, shell=True) for a "
1106+
"single command that needs shell interpretation.")
1107+
if kwargs.get("executable") is not None:
11081108
raise ValueError(
1109-
'executable= is not supported by run_pipeline')
1109+
"executable= is not supported by run_pipeline")
11101110

1111-
if kwargs.get('stderr') is STDOUT:
1111+
if kwargs.get("stderr") is STDOUT:
11121112
raise ValueError(
1113-
'stderr=STDOUT at the run_pipeline level would merge each '
1113+
"stderr=STDOUT at the run_pipeline level would merge each "
11141114
"non-final command's stderr into the next command's stdin. "
1115-
'Use PipelineCommand(cmd, stderr=STDOUT) for a single command, '
1116-
'or capture_output=True to capture stderr from every command.')
1115+
"Use PipelineCommand(cmd, stderr=STDOUT) for a single command, "
1116+
"or capture_output=True to capture stderr from every command.")
11171117

1118-
if kwargs.get('start_new_session') or kwargs.get('process_group') is not None:
1118+
if kwargs.get("start_new_session") or kwargs.get("process_group") is not None:
11191119
# run_pipeline spawns each command as a sibling child of this
11201120
# process, so a per-command session/group does not give the shell
11211121
# "one process group per pipeline" semantic that callers passing
11221122
# these almost certainly want. Reject for now; a feature that
11231123
# places every command in a single new group is a possible
11241124
# follow-on.
11251125
raise ValueError(
1126-
'start_new_session and process_group are not supported by '
1127-
'run_pipeline; each command is spawned as a sibling child, '
1128-
'so a per-command session or group does not yield a single '
1129-
'process group for the pipeline')
1126+
"start_new_session and process_group are not supported by "
1127+
"run_pipeline; each command is spawned as a sibling child, "
1128+
"so a per-command session or group does not yield a single "
1129+
"process group for the pipeline")
11301130

11311131
commands = tuple(c if isinstance(c, PipelineCommand) else PipelineCommand(c)
11321132
for c in commands)
11331133

1134-
stderr_arg = kwargs.pop('stderr', None)
1134+
stderr_arg = kwargs.pop("stderr", None)
11351135
capture_stderr = capture_output or (stderr_arg is PIPE)
11361136

1137-
stdin_arg = kwargs.pop('stdin', None)
1138-
stdout_arg = kwargs.pop('stdout', None)
1137+
stdin_arg = kwargs.pop("stdin", None)
1138+
stdout_arg = kwargs.pop("stdout", None)
11391139

11401140
# Load-bearing: pop text=/universal_newlines=/encoding=/errors= so each
11411141
# Popen keeps its parent-side pipes binary. _communicate_streams_* relies
11421142
# on a bytes-in/bytes-out contract; leaving these in kwargs would wrap the
11431143
# pipes in TextIOWrapper and break the threaded Windows backend.
1144-
text = kwargs.pop('text', None)
1145-
universal_newlines = kwargs.pop('universal_newlines', None)
1146-
encoding = kwargs.pop('encoding', None)
1147-
errors_param = kwargs.pop('errors', None)
1144+
text = kwargs.pop("text", None)
1145+
universal_newlines = kwargs.pop("universal_newlines", None)
1146+
encoding = kwargs.pop("encoding", None)
1147+
errors_param = kwargs.pop("errors", None)
11481148
text_mode = bool(text or universal_newlines or encoding or errors_param)
11491149
if text_mode and encoding is None:
11501150
encoding = locale.getencoding()
@@ -1193,19 +1193,19 @@ def run_pipeline(*commands, input=None, capture_output=False, timeout=None,
11931193
if cmd.env is not None or cmd.cwd is not None or cmd.shell:
11941194
cmd_kwargs = dict(kwargs)
11951195
if cmd.env is not None:
1196-
cmd_kwargs['env'] = cmd.env
1196+
cmd_kwargs["env"] = cmd.env
11971197
if cmd.cwd is not None:
1198-
cmd_kwargs['cwd'] = cmd.cwd
1198+
cmd_kwargs["cwd"] = cmd.cwd
11991199
if cmd.shell:
1200-
cmd_kwargs['shell'] = True
1200+
cmd_kwargs["shell"] = True
12011201

12021202
try:
12031203
proc = Popen(cmd.args, stdin=proc_stdin, stdout=proc_stdout,
12041204
stderr=proc_stderr, **cmd_kwargs)
12051205
except OSError as e:
12061206
e.add_note(
1207-
f'raised while starting {cmd!r} '
1208-
f'(run_pipeline commands[{i}])')
1207+
f"raised while starting {cmd!r} "
1208+
f"(run_pipeline commands[{i}])")
12091209
raise
12101210
processes.append(proc)
12111211

@@ -1231,7 +1231,7 @@ def run_pipeline(*commands, input=None, capture_output=False, timeout=None,
12311231

12321232
input_data = input
12331233
if input_data is not None and text_mode:
1234-
input_data = input_data.encode(encoding, errors_param or 'strict')
1234+
input_data = input_data.encode(encoding, errors_param or "strict")
12351235

12361236
read_streams = []
12371237
if last_proc.stdout is not None:
@@ -1265,7 +1265,7 @@ def run_pipeline(*commands, input=None, capture_output=False, timeout=None,
12651265
stdout = results.get(last_proc.stdout)
12661266
stderr = results.get(stderr_reader)
12671267

1268-
decode_errors = errors_param or 'strict'
1268+
decode_errors = errors_param or "strict"
12691269
if text_mode and stdout is not None:
12701270
stdout = _translate_newlines(stdout, encoding, decode_errors)
12711271
if text_mode and stderr is not None:

0 commit comments

Comments
 (0)