run sort internally, refactor table output into separate method
This commit is contained in:
parent
3a8c61ff59
commit
c412af3de0
@ -98,22 +98,49 @@ class _Context:
|
||||
try:
|
||||
if self.arguments.table:
|
||||
add_filter(["csvlook"])
|
||||
if self.arguments.sort:
|
||||
add_filter(["csvsort", "--blanks"])
|
||||
self._run_search(search_iterator, stream=output)
|
||||
if self.arguments.csv:
|
||||
self._table_output(search_iterator, stream=output)
|
||||
else:
|
||||
self._ldif_or_json_output(search_iterator, stream=output)
|
||||
finally:
|
||||
if procs:
|
||||
output.close()
|
||||
for proc in reversed(procs):
|
||||
proc.wait()
|
||||
|
||||
def _run_search(self, search_iterator: typing.Iterable[Result], *, stream: typing.IO[str]) -> None:
|
||||
def _to_table_lines(self, search_iterator: typing.Iterable[Result]) -> typing.Iterable[tuple[str, ...]]:
|
||||
decoder = decode.Decoder(arguments=self.arguments)
|
||||
# "human" (json) dicts contain data by lower case key:
|
||||
column_keys = [col.lower() for col in self.arguments.columns]
|
||||
try:
|
||||
for dn, entry in search_iterator:
|
||||
if dn is None:
|
||||
continue
|
||||
# normal entry
|
||||
assert not isinstance(entry, list)
|
||||
obj = decoder.human(dn=dn, entry=decoder.read(dn=dn, entry=entry))
|
||||
yield tuple(obj.get(key, "") for key in column_keys)
|
||||
except SizeLimitExceeded as e:
|
||||
raise SystemExit(f"Error: {e}")
|
||||
|
||||
def _table_output(self, search_iterator: typing.Iterable[Result], *, stream: typing.IO[str]) -> None:
|
||||
line_iterator = self._to_table_lines(search_iterator)
|
||||
if self.arguments.sort:
|
||||
line_iterator = sorted(line_iterator)
|
||||
|
||||
csv_out = csv.writer(stream, lineterminator="\n")
|
||||
csv_out.writerow(self.arguments.columns)
|
||||
|
||||
for line in line_iterator:
|
||||
csv_out.writerow(line)
|
||||
|
||||
def _ldif_or_json_output(self, search_iterator: typing.Iterable[Result], *, stream: typing.IO[str]) -> None:
|
||||
decoder = decode.Decoder(arguments=self.arguments)
|
||||
|
||||
num_responses = 0
|
||||
num_entries = 0
|
||||
|
||||
ldif_output = not (self.arguments.csv or self.arguments.json or self.arguments.human)
|
||||
ldif_output = not (self.arguments.json or self.arguments.human)
|
||||
|
||||
if ldif_output:
|
||||
print("# extended LDIF")
|
||||
@ -128,22 +155,11 @@ class _Context:
|
||||
print("#")
|
||||
print()
|
||||
|
||||
if self.arguments.csv:
|
||||
csv_out = csv.DictWriter(
|
||||
stream,
|
||||
fieldnames=self.arguments.columns,
|
||||
lineterminator="\n",
|
||||
extrasaction="ignore",
|
||||
)
|
||||
csv_out.writeheader()
|
||||
# dicts contain data by lower case key
|
||||
csv_out.fieldnames = [col.lower() for col in self.arguments.columns]
|
||||
|
||||
try:
|
||||
for dn, entry in search_iterator:
|
||||
num_responses += 1
|
||||
if dn is None:
|
||||
if not self.arguments.csv:
|
||||
if ldif_output:
|
||||
print("# search reference")
|
||||
for ref in entry:
|
||||
assert isinstance(ref, str)
|
||||
@ -154,9 +170,6 @@ class _Context:
|
||||
assert not isinstance(entry, list)
|
||||
num_entries += 1
|
||||
obj = decoder.read(dn=dn, entry=entry)
|
||||
if self.arguments.csv:
|
||||
csv_out.writerow(decoder.human(dn=dn, entry=obj))
|
||||
else:
|
||||
decoder.emit(dn=dn, entry=obj)
|
||||
except SizeLimitExceeded as e:
|
||||
raise SystemExit(f"Error: {e}")
|
||||
|
Loading…
Reference in New Issue
Block a user