2 Commits

10 changed files with 28 additions and 172 deletions

View File

@ -9,7 +9,7 @@ CLI tool to query LDAP/AD servers
* Integration with password managers
* Various output formats
* Classic LDIF
* JSON stream (with detailed or simplified attribute values)
* JSON stream (with simplified or detailed attribute values)
* CSV
* Markdown table with stretched columns (for viewing in CLI/for monospaces fonts); requires csvlook from [csvkit](https://csvkit.readthedocs.io/)
* HTML

58
debian/changelog vendored
View File

@ -1,58 +0,0 @@
ldaptool (0.5-1) unstable; urgency=medium
[ Daniel Dizdarevic ]
* :Fix version requirement for python3.10
[ Stefan Bühler ]
* handle missing KeePass entry
[ Daniel Dizdarevic ]
* Catch invalid passwords in keepass
* Catch CTRL+C and CTRL+D in password prompts
[ Stefan Bühler ]
* improve some error messages
* improve config loading: don't modify dicts to allow yaml repeated nodes
* add argument to postprocess steps and support index/slicing in DN-related hooks; document them
* decode securityIdentifier attribute as SID
-- Stefan Bühler <stefan.buehler@tik.uni-stuttgart.de> Wed, 10 May 2023 19:53:51 +0200
ldaptool (0.4-1) unstable; urgency=medium
* move argument/column handling to decoder (prepare for more post-processing in decoder)
* move json output format handling to main tool from decoder
* support attribute post-processing; :<len>, and DN :domain, :path, :fullpath
* use Enum instead of StrEnum for python3.10
-- Stefan Bühler <stefan.buehler@tik.uni-stuttgart.de> Tue, 02 May 2023 16:54:00 +0200
ldaptool (0.3-1) unstable; urgency=medium
* ldaptool: move output arguments from search to main
* run sort internally, refactor table output into separate method
* refactor table variant handling
* add html output format
* README.md: document csvkit dependency
* debian: require csvkit (markdown table is an essential feature)
-- Stefan Bühler <stefan.buehler@tik.uni-stuttgart.de> Fri, 28 Apr 2023 19:31:37 +0200
ldaptool (0.2-1) unstable; urgency=medium
* README.md: fix typo
* enable tls unless kerberos is used (SASL GSS-API doesn't seem to work over TLS)
-- Stefan Bühler <stefan.buehler@tik.uni-stuttgart.de> Fri, 28 Apr 2023 17:21:35 +0200
ldaptool (0.1-1) unstable; urgency=medium
* Initial release.
-- Stefan Bühler <stefan.buehler@tik.uni-stuttgart.de> Fri, 28 Apr 2023 12:09:30 +0200
ldaptool (0.1-0) unstable; urgency=medium
* Stub ITP lintian.
-- Stefan Bühler <stefan.buehler@tik.uni-stuttgart.de> Fri, 28 Apr 2023 12:09:29 +0200

43
debian/control vendored
View File

@ -1,43 +0,0 @@
Source: ldaptool
Section: net
Priority: optional
Maintainer: Stefan Bühler <stefan.buehler@tik.uni-stuttgart.de>
Rules-Requires-Root: no
Build-Depends:
debhelper-compat (= 13),
pybuild-plugin-pyproject,
flit,
dh-sequence-python3,
python3,
python3-ldap,
python3-yaml,
python3-pykeepass,
#Testsuite: autopkgtest-pkg-python
Standards-Version: 4.6.2
Homepage: https://git-nks-public.tik.uni-stuttgart.de/net/ldaptool
Package: python3-ldaptool
Architecture: all
Depends:
${python3:Depends},
${misc:Depends},
Recommends:
python3-pykeepass,
Description: CLI tool to run ldap queries
CLI tool to query LDAP/AD servers, featuring various output formats
and a configuration for different realms.
.
This package installs the library for Python 3.
Package: ldaptool
Architecture: all
Depends:
python3-ldaptool (=${binary:Version}),
${python3:Depends},
${misc:Depends},
csvkit,
Description: CLI tool to run ldap queries
CLI tool to query LDAP/AD servers, featuring various output formats
and a configuration for different realms.
.
This package installs the script.

27
debian/copyright vendored
View File

@ -1,27 +0,0 @@
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
Source: <https://git-nks-public.tik.uni-stuttgart.de/net/ldaptool>
Upstream-Name: ldaptool
Files:
*
Copyright:
2023 Stefan Bühler <stefan.buehler@tik.uni-stuttgart.de>
2023 Daniel Dizdarevic <daniel.dizdarevic@tik.uni-stuttgart.de>
License: MIT
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
.
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

5
debian/gbp.conf vendored
View File

@ -1,5 +0,0 @@
[DEFAULT]
pristine-tar = False
upstream-branch = main
debian-branch = debian
upstream-tag = ldaptool-%(version)s

13
debian/rules vendored
View File

@ -1,13 +0,0 @@
#!/usr/bin/make -f
export PYBUILD_NAME=ldaptool
%:
dh $@ --buildsystem=pybuild
# we want /usr/bin/ldaptool in a separate package
override_dh_auto_install:
dh_auto_install
mkdir -p debian/ldaptool/usr
mv debian/python3-ldaptool/usr/bin debian/ldaptool/usr/

View File

@ -1 +0,0 @@
3.0 (quilt)

View File

@ -1 +0,0 @@
extend-diff-ignore = "^[^/]*[.]egg-info/|^[.]vscode|/__pycache__/|^venv/|^.mypy_cache/"

View File

@ -55,13 +55,17 @@ class Arguments(search.Arguments):
help="Sorted table output - defaults to markdown --table unless --csv is given",
),
)
full_json: bool = dataclasses.field(
default=False,
metadata=argclasses.arg(
help="Use full json output (dn as str, attributes as list of dicts containing various represenatations)",
),
)
json: bool = dataclasses.field(
default=False,
metadata=argclasses.arg(help="Use full json output"),
)
human: bool = dataclasses.field(
default=False,
metadata=argclasses.arg(help="Use simple json output (join multiple values of one attribute)"),
metadata=argclasses.arg(
help="Use simple json output (dn as str, attributes map to list of human-readable strings)",
),
)
def __post_init__(self) -> None:
@ -85,15 +89,15 @@ class Arguments(search.Arguments):
if self.table_output:
if not self.columns:
raise SystemExit("Table output requires attributes")
if self.json:
if self.full_json:
raise SystemExit("Can't use both table output and --json")
if self.human:
if self.json:
raise SystemExit("Can't use both table output and --human")
if self.raw:
if self.table_output:
raise SystemExit("Table output requires decode; --raw not allowed")
if self.json or self.human:
if self.full_json or self.json:
raise SystemExit("Decode options require decode; --raw not allowed")
@ -183,7 +187,7 @@ class _Context:
num_responses = 0
num_entries = 0
ldif_output = not (self.arguments.json or self.arguments.human)
ldif_output = not (self.arguments.full_json or self.arguments.json)
if ldif_output:
print("# extended LDIF")
@ -214,11 +218,11 @@ class _Context:
num_entries += 1
if ldif_output:
decoder.read_and_emit_ldif(dn=dn, entry=entry, file=stream)
elif self.arguments.human:
decoder.read_and_emit_human(dn=dn, entry=entry, file=stream)
elif self.arguments.json:
decoder.read_and_emit_simple_json(dn=dn, entry=entry, file=stream)
else:
assert self.arguments.json
decoder.read_and_emit_json(dn=dn, entry=entry, file=stream)
assert self.arguments.full_json
decoder.read_and_emit_full_json(dn=dn, entry=entry, file=stream)
except SizeLimitExceeded as e:
raise SystemExit(f"Error: {e}")

View File

@ -192,30 +192,30 @@ class Decoder:
def human(self, *, dn: str, obj: TDecoded) -> dict[str, str]:
emit: dict[str, typing.Any] = dict(dn=dn)
for name, attrs in obj.items():
emit[name] = self.arguments.human_separator.join(attr.human() for attr in attrs)
emit[name] = [attr.human() for attr in attrs]
return emit
def emit_human(self, *, dn: str, obj: TDecoded, file: typing.IO[str] = sys.stdout) -> None:
def emit_simple_json(self, *, dn: str, obj: TDecoded, file: typing.IO[str] = sys.stdout) -> None:
emit = self.human(dn=dn, obj=obj)
json.dump(emit, file, ensure_ascii=False)
print(file=file) # terminate output dicts by newline
def read_and_emit_human(self, *, dn: str, entry: TEntry, file: typing.IO[str] = sys.stdout) -> None:
self.emit_human(dn=dn, obj=self.read(dn=dn, entry=entry), file=file)
def read_and_emit_simple_json(self, *, dn: str, entry: TEntry, file: typing.IO[str] = sys.stdout) -> None:
self.emit_simple_json(dn=dn, obj=self.read(dn=dn, entry=entry), file=file)
def json(self, *, dn: str, obj: TDecoded) -> dict[str, str]:
def full_json(self, *, dn: str, obj: TDecoded) -> dict[str, str]:
emit: dict[str, typing.Any] = dict(dn=dn)
for name, attrs in obj.items():
emit[name] = [attr.to_json() for attr in attrs]
return emit
def emit_json(self, *, dn: str, obj: TDecoded, file: typing.IO[str] = sys.stdout) -> None:
emit = self.json(dn=dn, obj=obj)
def emit_full_json(self, *, dn: str, obj: TDecoded, file: typing.IO[str] = sys.stdout) -> None:
emit = self.full_json(dn=dn, obj=obj)
json.dump(emit, file, ensure_ascii=False)
print(file=file) # terminate output dicts by newline
def read_and_emit_json(self, *, dn: str, entry: TEntry, file: typing.IO[str] = sys.stdout) -> None:
self.emit_json(dn=dn, obj=self.read(dn=dn, entry=entry), file=file)
def read_and_emit_full_json(self, *, dn: str, entry: TEntry, file: typing.IO[str] = sys.stdout) -> None:
self.emit_full_json(dn=dn, obj=self.read(dn=dn, entry=entry), file=file)
def emit_ldif(self, *, dn: str, obj: TDecoded, file: typing.IO[str] = sys.stdout) -> None:
print(f"dn: {dn}", file=file)