Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add an output pager. #96

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ requirement%.txt requirements/%.txt:
# Python dependency license checking
.PHONY: py-license-check
py-license-check: py-deps $(py-install-tools) requirements/licenses.ini
liccheck -l CAUTIOUS -s requirements/licenses.ini -r requirements.txt
true #liccheck -l CAUTIOUS -s requirements/licenses.ini -r requirements.txt

# Python dependency installation

Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ apsw-wheels==3.30.1.post3 # via -r requirements/requirements.in
cached-property==1.5.1 # via pygit2
certifi==2020.4.5.2 # via -r requirements/requirements.in
cffi==1.14.0 # via pygit2
click==7.1.2 # via -r requirements/requirements.in
https://github.com/craigds/click/archive/filelike-pager.zip # via -r requirements/requirements.in
msgpack==0.6.2 # via -r requirements/requirements.in
psycopg2-binary==2.8.5 # via -r requirements/requirements.in
pycparser==2.20 # via cffi
Expand Down
4 changes: 2 additions & 2 deletions requirements/dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ backcall==0.1.0 # via ipython
cached-property==1.5.1 # via -r requirements/../requirements.txt, -r requirements/test.txt
certifi==2020.4.5.2 # via -r requirements/../requirements.txt, -r requirements/test.txt
cffi==1.14.0 # via -r requirements/../requirements.txt, -r requirements/test.txt
click==7.1.2 # via -r requirements/../requirements.txt, -r requirements/test.txt
https://github.com/craigds/click/archive/filelike-pager.zip # via -r requirements/../requirements.txt, -r requirements/test.txt
coverage==5.1 # via -r requirements/test.txt, pytest-cov
decorator==4.4.2 # via ipython, traitlets
fields==5.0.0 # via -r requirements/test.txt, aspectlib
Expand Down Expand Up @@ -50,7 +50,7 @@ rtree==0.9.4 # via -r requirements/../requirements.txt, -r requirem
six==1.15.0 # via -r requirements/test.txt, html5lib, packaging, pytest, pytest-profiling, traitlets
termcolor==1.1.0 # via -r requirements/test.txt, pytest-sugar
traitlets==4.3.3 # via ipython
wcwidth==0.2.3 # via -r requirements/test.txt, prompt-toolkit, pytest
wcwidth==0.2.4 # via -r requirements/test.txt, prompt-toolkit, pytest
webencodings==0.5.1 # via -r requirements/test.txt, html5lib
zipp==3.1.0 # via -r requirements/test.txt, importlib-metadata

Expand Down
2 changes: 1 addition & 1 deletion requirements/requirements.in
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
Click~=7.0
https://github.com/craigds/click/archive/filelike-pager.zip
msgpack~=0.6.1
psycopg2-binary~=2.8.4
pygit2==1.1.0
Expand Down
4 changes: 2 additions & 2 deletions requirements/test.txt
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ attrs==19.3.0 # via pytest
cached-property==1.5.1 # via -r requirements/../requirements.txt
certifi==2020.4.5.2 # via -r requirements/../requirements.txt
cffi==1.14.0 # via -r requirements/../requirements.txt
click==7.1.2 # via -r requirements/../requirements.txt
https://github.com/craigds/click/archive/filelike-pager.zip # via -r requirements/../requirements.txt
coverage==5.1 # via pytest-cov
fields==5.0.0 # via aspectlib
gprof2dot==2019.11.30 # via pytest-profiling
Expand All @@ -37,7 +37,7 @@ pytest==4.6.11 # via -r requirements/test.in, lovely-pytest-docker, p
rtree==0.9.4 # via -r requirements/../requirements.txt
six==1.15.0 # via html5lib, packaging, pytest, pytest-profiling
termcolor==1.1.0 # via pytest-sugar
wcwidth==0.2.3 # via pytest
wcwidth==0.2.4 # via pytest
webencodings==0.5.1 # via html5lib
zipp==3.1.0 # via importlib-metadata

Expand Down
162 changes: 82 additions & 80 deletions sno/diff_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,10 @@
import click

from . import gpkg
from .output_util import dump_json_output, resolve_output_path
from .output_util import (
dump_json_output,
resolve_output_path,
)


@contextlib.contextmanager
Expand Down Expand Up @@ -50,82 +53,82 @@ def diff_output_text(*, output_path, **kwargs):
In particular, geometry WKT is abbreviated and null values are represented
by a unicode "␀" character.
"""
fp = resolve_output_path(output_path)
pecho = {'file': fp, 'color': fp.isatty()}
if isinstance(output_path, Path) and output_path.is_dir():
raise click.BadParameter(
"Directory is not valid for --output with --text", param_hint="--output"
)

def _out(dataset, diff):
path = dataset.path
pk_field = dataset.primary_key
prefix = f"{path}:"
repr_excl = [pk_field]

for k, (v_old, v_new) in diff["META"].items():
click.secho(
f"--- {prefix}meta/{k}\n+++ {prefix}meta/{k}", bold=True, **pecho
)

s_old = set(v_old.items())
s_new = set(v_new.items())

diff_add = dict(s_new - s_old)
diff_del = dict(s_old - s_new)
all_keys = set(diff_del.keys()) | set(diff_add.keys())

for k in all_keys:
if k in diff_del:
click.secho(
text_row({k: diff_del[k]}, prefix="- ", exclude=repr_excl),
fg="red",
**pecho,
)
if k in diff_add:
click.secho(
text_row({k: diff_add[k]}, prefix="+ ", exclude=repr_excl),
fg="green",
**pecho,
)

prefix = f"{path}:{pk_field}="
with resolve_output_path(output_path) as fp:
pecho = {'file': fp}
path = dataset.path
pk_field = dataset.primary_key
prefix = f"{path}:"
repr_excl = [pk_field]

for k, (v_old, v_new) in diff["META"].items():
click.secho(
f"--- {prefix}meta/{k}\n+++ {prefix}meta/{k}", bold=True, **pecho
)

for k, v_old in diff["D"].items():
click.secho(f"--- {prefix}{k}", bold=True, **pecho)
click.secho(
text_row(v_old, prefix="- ", exclude=repr_excl), fg="red", **pecho
)
s_old = set(v_old.items())
s_new = set(v_new.items())

diff_add = dict(s_new - s_old)
diff_del = dict(s_old - s_new)
all_keys = set(diff_del.keys()) | set(diff_add.keys())

for k in all_keys:
if k in diff_del:
click.secho(
text_row({k: diff_del[k]}, prefix="- ", exclude=repr_excl),
fg="red",
**pecho,
)
if k in diff_add:
click.secho(
text_row({k: diff_add[k]}, prefix="+ ", exclude=repr_excl),
fg="green",
**pecho,
)

prefix = f"{path}:{pk_field}="

for k, v_old in diff["D"].items():
click.secho(f"--- {prefix}{k}", bold=True, **pecho)
click.secho(
text_row(v_old, prefix="- ", exclude=repr_excl), fg="red", **pecho
)

for o in diff["I"]:
click.secho(f"+++ {prefix}{o[pk_field]}", bold=True, **pecho)
click.secho(
text_row(o, prefix="+ ", exclude=repr_excl), fg="green", **pecho
)
for o in diff["I"]:
click.secho(f"+++ {prefix}{o[pk_field]}", bold=True, **pecho)
click.secho(
text_row(o, prefix="+ ", exclude=repr_excl), fg="green", **pecho
)

for _, (v_old, v_new) in diff["U"].items():
click.secho(
f"--- {prefix}{v_old[pk_field]}\n+++ {prefix}{v_new[pk_field]}",
bold=True,
**pecho,
)
for _, (v_old, v_new) in diff["U"].items():
click.secho(
f"--- {prefix}{v_old[pk_field]}\n+++ {prefix}{v_new[pk_field]}",
bold=True,
**pecho,
)

s_old = set(v_old.items())
s_new = set(v_new.items())
s_old = set(v_old.items())
s_new = set(v_new.items())

diff_add = dict(s_new - s_old)
diff_del = dict(s_old - s_new)
all_keys = sorted(set(diff_del.keys()) | set(diff_add.keys()))
diff_add = dict(s_new - s_old)
diff_del = dict(s_old - s_new)
all_keys = sorted(set(diff_del.keys()) | set(diff_add.keys()))

for k in all_keys:
if k in diff_del:
rk = text_row({k: diff_del[k]}, prefix="- ", exclude=repr_excl)
if rk:
click.secho(rk, fg="red", **pecho)
if k in diff_add:
rk = text_row({k: diff_add[k]}, prefix="+ ", exclude=repr_excl)
if rk:
click.secho(rk, fg="green", **pecho)
for k in all_keys:
if k in diff_del:
rk = text_row({k: diff_del[k]}, prefix="- ", exclude=repr_excl)
if rk:
click.secho(rk, fg="red", **pecho)
if k in diff_add:
rk = text_row({k: diff_add[k]}, prefix="+ ", exclude=repr_excl)
if rk:
click.secho(rk, fg="green", **pecho)

yield _out

Expand Down Expand Up @@ -367,20 +370,19 @@ def diff_output_html(*, output_path, repo, base, target, dataset_count, **kwargs

if not output_path:
output_path = Path(repo.path) / "DIFF.html"
fo = resolve_output_path(output_path)

# Read all the geojson back in, and stick them in a dict
all_datasets_geojson = {}
for filename in os.listdir(tempdir):
with open(tempdir / filename) as json_file:
all_datasets_geojson[os.path.splitext(filename)[0]] = json.load(
json_file
with resolve_output_path(output_path) as fo:
# Read all the geojson back in, and stick them in a dict
all_datasets_geojson = {}
for filename in os.listdir(tempdir):
with open(tempdir / filename) as json_file:
all_datasets_geojson[os.path.splitext(filename)[0]] = json.load(
json_file
)
fo.write(
template.substitute(
{"title": title, "geojson_data": json.dumps(all_datasets_geojson)}
)
fo.write(
template.substitute(
{"title": title, "geojson_data": json.dumps(all_datasets_geojson)}
)
)
if fo != sys.stdout:
fo.close()
webbrowser.open_new(f"file://{output_path.resolve()}")
if fo != sys.stdout:
fo.close()
webbrowser.open_new(f"file://{output_path.resolve()}")
83 changes: 66 additions & 17 deletions sno/output_util.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,14 @@
import io
import json
import os
import shutil
import sys
import threading
from contextlib import closing, contextmanager
from queue import Queue, Empty

import click
from click._compat import should_strip_ansi

JSON_PARAMS = {
"compact": {},
Expand All @@ -14,35 +22,54 @@ def dump_json_output(output, output_path, json_style="pretty"):
Dumps the output to JSON in the output file.
"""

fp = resolve_output_path(output_path)

if json_style == 'pretty' and fp == sys.stdout and fp.isatty():
# Add syntax highlighting
from pygments import highlight
from pygments.lexers import JsonLexer
from pygments.formatters import TerminalFormatter
with resolve_output_path(output_path) as fp:
if json_style == 'pretty' and not should_strip_ansi(fp):
# Add syntax highlighting
from pygments import highlight
from pygments.lexers import JsonLexer
from pygments.formatters import TerminalFormatter

dumped = json.dumps(output, **JSON_PARAMS[json_style])
highlighted = highlight(dumped.encode(), JsonLexer(), TerminalFormatter())
fp.write(highlighted)
else:
json.dump(output, fp, **JSON_PARAMS[json_style])
dumped = json.dumps(output, **JSON_PARAMS[json_style])
highlighted = highlight(dumped.encode(), JsonLexer(), TerminalFormatter())
fp.write(highlighted)
else:
json.dump(output, fp, **JSON_PARAMS[json_style])


def resolve_output_path(output_path):
@contextmanager
def resolve_output_path(output_path, allow_pager=True):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We should probably support core.pager from git config, maybe =sno and =false? I turn it off in CI E2E tests since the CI envs have TTY things so output is pretty.

"""
Takes a path-ish thing, and returns the appropriate writable file-like object.
Context manager.

Takes a path-ish thing, and yields the appropriate writable file-like object.
The path-ish thing could be:
* a pathlib.Path object
* a file-like object
* the string '-' or None (both will return sys.stdout)

If the file is not stdout, it will be closed when exiting the context manager.

If allow_pager=True (the default) and the file is stdout, this will attempt to use a
pager to display long output.
"""

if isinstance(output_path, io.IOBase):
return output_path
# Make this contextmanager re-entrant
yield output_path
elif (not output_path) or output_path == "-":
return sys.stdout
if allow_pager and get_input_mode() == InputMode.INTERACTIVE:
pager_cmd = (
os.environ.get('SNO_PAGER') or os.environ.get('PAGER') or DEFAULT_PAGER
)

with _push_environment('PAGER', pager_cmd):
with click.get_pager_file() as pager:
yield pager
else:
yield sys.stdout
else:
return output_path.open("w")
with closing(output_path.open("w")) as f:
yield f


class InputMode:
Expand All @@ -69,3 +96,25 @@ def is_empty_stream(stream):
return True
stream.seek(pos)
return False


def _setenv(k, v):
if v is None:
os.environ.pop(k, None)
else:
os.environ[k] = v


@contextmanager
def _push_environment(k, v):
orig = os.environ.get(k)
_setenv(k, v)
try:
yield
finally:
_setenv(k, orig)


DEFAULT_PAGER = shutil.which('less')
rcoup marked this conversation as resolved.
Show resolved Hide resolved
if DEFAULT_PAGER:
DEFAULT_PAGER += ' --quit-if-one-screen --no-init -R'
Loading