diff --git a/mosscli.py b/mosscli.py new file mode 100644 index 0000000..7ee7b11 --- /dev/null +++ b/mosscli.py @@ -0,0 +1,49 @@ +import click +import logging +import mosspy + +from configparser import ConfigParser + +@click.command() +@click.option("--base", type=click.Path(exists=True, readable=True), multiple=True) +@click.option("--report", type=click.Path(exists=False, writable=True)) +@click.option("--download", type=click.Path(exists=False, writable=True)) +@click.argument("language", type=click.Choice(list(mosspy.Moss.languages))) +@click.argument("files", nargs=-1) +def moss(base, report, download, language, files): + config_ini = click.get_app_dir("moss.ini") + parser = ConfigParser() + parser.read([config_ini]) + if "SERVER" not in parser or "userid" not in parser["SERVER"]: + click.echo(click.style(f"missing userid SERVER section in {config_ini}", fg='red')) + exit(2) + + m = mosspy.Moss(parser["SERVER"]["userid"], language) + for file in files: + m.addFilesByWildcard(file) + + for b in base: + m.addBaseFile(b) + + with click.progressbar(length=len(m.files), label="uploading") as bar: + url = m.send(lambda path, name: bar.update(1, name)) + + print(url) + + if report: + m.saveWebPage(url, report) + + if download: + with click.progressbar(length=10101010101010101010, label="downloading") as bar: + left_max = 10 + def update_bar(left): + nonlocal left_max + if left > left_max: + left_max = left + bar.length = left_max + bar.update(1) + mosspy.download_report(url, download, connections=8, log_level=logging.INFO, + on_read2=lambda u, left: update_bar(left)) + +if __name__ == '__main__': + moss() diff --git a/mosspy/download_report.py b/mosspy/download_report.py index 5040ee1..5fd5451 100644 --- a/mosspy/download_report.py +++ b/mosspy/download_report.py @@ -6,13 +6,14 @@ except ImportError: from urllib2 import urlopen -def process_url(url, urls, base_url, path, on_read): +def process_url(url, urls, base_url, path, on_read, on_read2): from bs4 import BeautifulSoup # Backward compability, don't break Moss when bs4 not available. logging.debug ("Processing URL: " + url) response = urlopen(url) html = response.read() on_read(url) + on_read2(url, len(urls)) soup = BeautifulSoup(html, 'lxml') file_name = os.path.basename(url) @@ -50,7 +51,7 @@ def process_url(url, urls, base_url, path, on_read): f.write(soup.encode(soup.original_encoding)) f.close() -def download_report(url, path, connections = 4, log_level=logging.DEBUG, on_read=lambda url: None): +def download_report(url, path, connections = 4, log_level=logging.DEBUG, on_read=lambda url: None, on_read2=lambda url, left: None): logging.basicConfig(level=log_level) if len(url) == 0: @@ -69,7 +70,7 @@ def download_report(url, path, connections = 4, log_level=logging.DEBUG, on_read # Handling thread for url in urls: - t = Thread(target=process_url, args=[url, urls, base_url, path, on_read]) + t = Thread(target=process_url, args=[url, urls, base_url, path, on_read, on_read2]) t.start() threads.append(t)