From 12d2b5626c596fa9b391c654e1b8e5ce5aae4e73 Mon Sep 17 00:00:00 2001 From: Justin Ross Date: Wed, 14 Feb 2024 13:27:51 -0500 Subject: [PATCH] Skewer update (missing pieces) --- .../skewer/example/python/plano/__init__.py | 24 + .../python/plano/_testproject/.plano.py | 112 ++ .../_testproject/src/chucker/__init__.py | 0 .../_testproject/src/chucker/moretests.py | 24 + .../plano/_testproject/src/chucker/tests.py | 70 + .../skewer/example/python/plano/_tests.py | 1338 +++++++++++++ .../skewer/example/python/plano/command.py | 511 +++++ .../skewer/example/python/plano/github.py | 80 + external/skewer/example/python/plano/main.py | 1772 +++++++++++++++++ external/skewer/example/python/plano/test.py | 428 ++++ .../skewer/example/python/skewer/__init__.py | 20 + external/skewer/example/python/skewer/main.py | 731 +++++++ .../example/python/skewer/planocommands.py | 91 + .../example/python/skewer/standardsteps.yaml | 295 +++ .../example/python/skewer/standardtext.yaml | 49 + .../skewer/example/python/skewer/tests.py | 67 + external/skewer/python/plano/__init__.py | 24 + .../python/plano/_testproject/.plano.py | 112 ++ .../_testproject/src/chucker/__init__.py | 0 .../_testproject/src/chucker/moretests.py | 24 + .../plano/_testproject/src/chucker/tests.py | 70 + external/skewer/python/plano/_tests.py | 1338 +++++++++++++ external/skewer/python/plano/command.py | 511 +++++ external/skewer/python/plano/github.py | 80 + external/skewer/python/plano/main.py | 1772 +++++++++++++++++ external/skewer/python/plano/test.py | 428 ++++ 26 files changed, 9971 insertions(+) create mode 100644 external/skewer/example/python/plano/__init__.py create mode 100644 external/skewer/example/python/plano/_testproject/.plano.py create mode 100644 external/skewer/example/python/plano/_testproject/src/chucker/__init__.py create mode 100644 external/skewer/example/python/plano/_testproject/src/chucker/moretests.py create mode 100644 external/skewer/example/python/plano/_testproject/src/chucker/tests.py create mode 100644 external/skewer/example/python/plano/_tests.py create mode 100644 external/skewer/example/python/plano/command.py create mode 100644 external/skewer/example/python/plano/github.py create mode 100644 external/skewer/example/python/plano/main.py create mode 100644 external/skewer/example/python/plano/test.py create mode 100644 external/skewer/example/python/skewer/__init__.py create mode 100644 external/skewer/example/python/skewer/main.py create mode 100644 external/skewer/example/python/skewer/planocommands.py create mode 100644 external/skewer/example/python/skewer/standardsteps.yaml create mode 100644 external/skewer/example/python/skewer/standardtext.yaml create mode 100644 external/skewer/example/python/skewer/tests.py create mode 100644 external/skewer/python/plano/__init__.py create mode 100644 external/skewer/python/plano/_testproject/.plano.py create mode 100644 external/skewer/python/plano/_testproject/src/chucker/__init__.py create mode 100644 external/skewer/python/plano/_testproject/src/chucker/moretests.py create mode 100644 external/skewer/python/plano/_testproject/src/chucker/tests.py create mode 100644 external/skewer/python/plano/_tests.py create mode 100644 external/skewer/python/plano/command.py create mode 100644 external/skewer/python/plano/github.py create mode 100644 external/skewer/python/plano/main.py create mode 100644 external/skewer/python/plano/test.py diff --git a/external/skewer/example/python/plano/__init__.py b/external/skewer/example/python/plano/__init__.py new file mode 100644 index 0000000..3218323 --- /dev/null +++ b/external/skewer/example/python/plano/__init__.py @@ -0,0 +1,24 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from .main import * +from .main import _default_sigterm_handler + +from .command import * +from .test import * diff --git a/external/skewer/example/python/plano/_testproject/.plano.py b/external/skewer/example/python/plano/_testproject/.plano.py new file mode 100644 index 0000000..8cda2e7 --- /dev/null +++ b/external/skewer/example/python/plano/_testproject/.plano.py @@ -0,0 +1,112 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from plano import * + +@command +def base_command(alpha, beta, omega="x"): + """ + Base command help + """ + + print("base", alpha, beta, omega) + +@command(name="extended-command", parent=base_command) +def extended_command(alpha, beta, omega="y"): + print("extended", alpha, omega) + parent(alpha, beta, omega) + +@command(parameters=[CommandParameter("message_", help="The message to print", display_name="message"), + CommandParameter("count", help="Print the message COUNT times"), + CommandParameter("extra", default=1, short_option="e")]) +def echo(message_, count=1, extra=None, trouble=False, verbose=False): + """ + Print a message to the console + """ + + print("Echoing (message={}, count={})".format(message_, count)) + + if trouble: + raise Exception("Trouble") + + for i in range(count): + print(message_) + +@command +def echoecho(message): + echo(message) + +@command +def haberdash(first, *middle, last="bowler"): + """ + Habberdash command help + """ + + data = [first, *middle, last] + write_json("haberdash.json", data) + +@command(parameters=[CommandParameter("optional", positional=True)]) +def balderdash(required, optional="malarkey", other="rubbish", **extra_kwargs): + """ + Balderdash command help + """ + + data = [required, optional, other] + write_json("balderdash.json", data) + +@command +def splasher(): + write_json("splasher.json", [1]) + +@command +def dasher(alpha, beta=123): + pass + +@command(passthrough=True) +def dancer(gamma, omega="abc", passthrough_args=[]): + write_json("dancer.json", passthrough_args) + +# Vixen's parent calls prancer. We are testing to ensure the extended +# prancer (below) is executed. + +from plano._tests import prancer, vixen + +@command(parent=prancer) +def prancer(): + parent() + + notice("Extended prancer") + + write_json("prancer.json", True) + +@command(parent=vixen) +def vixen(): + parent() + +@command +def no_parent(): + parent() + +@command(parameters=[CommandParameter("spinach")]) +def feta(*args, **kwargs): + write_json("feta.json", kwargs["spinach"]) + +@command(hidden=True) +def invisible(something="nothing"): + write_json("invisible.json", something) diff --git a/external/skewer/example/python/plano/_testproject/src/chucker/__init__.py b/external/skewer/example/python/plano/_testproject/src/chucker/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/external/skewer/example/python/plano/_testproject/src/chucker/moretests.py b/external/skewer/example/python/plano/_testproject/src/chucker/moretests.py new file mode 100644 index 0000000..2607880 --- /dev/null +++ b/external/skewer/example/python/plano/_testproject/src/chucker/moretests.py @@ -0,0 +1,24 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from plano import * + +@test +def hello_again(): + print("Hello again") diff --git a/external/skewer/example/python/plano/_testproject/src/chucker/tests.py b/external/skewer/example/python/plano/_testproject/src/chucker/tests.py new file mode 100644 index 0000000..4e0cec1 --- /dev/null +++ b/external/skewer/example/python/plano/_testproject/src/chucker/tests.py @@ -0,0 +1,70 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from plano import * + +@test +def hello(): + print("Hello") + +@test +async def hello_async(): + print("Hello") + +@test +def goodbye(): + print("Goodbye") + +@test(disabled=True) +def badbye(): + print("Badbye") + assert False + +@test(disabled=True) +def skipped(): + skip_test("Skipped") + assert False + +@test(disabled=True) +def keyboard_interrupt(): + raise KeyboardInterrupt() + +@test(disabled=True, timeout=0.05) +def timeout(): + sleep(10, quiet=True) + assert False + +@test(disabled=True) +def process_error(): + run("expr 1 / 0") + +@test(disabled=True) +def system_exit_(): + exit(1) + +def test_widget(message): + print(message) + +for message in "hi", "lo", "in between": + add_test(f"message-{message}", test_widget, message) + +@test(disabled=True) +def badbye2(): + print("Badbye 2") + assert False diff --git a/external/skewer/example/python/plano/_tests.py b/external/skewer/example/python/plano/_tests.py new file mode 100644 index 0000000..159c739 --- /dev/null +++ b/external/skewer/example/python/plano/_tests.py @@ -0,0 +1,1338 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +import datetime as _datetime +import getpass as _getpass +import os as _os +import signal as _signal +import socket as _socket +import sys as _sys +import threading as _threading + +from .github import * + +try: + import http.server as _http +except ImportError: # pragma: nocover + import BaseHTTPServer as _http + +from .test import * + +test_project_dir = join(get_parent_dir(__file__), "_testproject") + +class test_project(working_dir): + def __enter__(self): + dir = super(test_project, self).__enter__() + copy(test_project_dir, ".", inside=False) + return dir + +TINY_INTERVAL = 0.05 + +@test +def archive_operations(): + with working_dir(): + make_dir("some-dir") + touch("some-dir/some-file") + + make_archive("some-dir") + assert is_file("some-dir.tar.gz"), list_dir() + + extract_archive("some-dir.tar.gz", output_dir="some-subdir") + assert is_dir("some-subdir/some-dir"), list_dir("some-subdir") + assert is_file("some-subdir/some-dir/some-file"), list_dir("some-subdir/some-dir") + + rename_archive("some-dir.tar.gz", "something-else") + assert is_file("something-else.tar.gz"), list_dir() + + extract_archive("something-else.tar.gz") + assert is_dir("something-else"), list_dir() + assert is_file("something-else/some-file"), list_dir("something-else") + +@test +def command_operations(): + class SomeCommand(BaseCommand): + def __init__(self): + super().__init__() + + self.parser = BaseArgumentParser() + self.parser.add_argument("--interrupt", action="store_true") + self.parser.add_argument("--explode", action="store_true") + self.parser.add_argument("--verbose", action="store_true") + self.parser.add_argument("--quiet", action="store_true") + + def parse_args(self, args): + return self.parser.parse_args(args) + + def init(self, args): + self.interrupt = args.interrupt + self.explode = args.explode + self.verbose = args.verbose + self.quiet = args.quiet + + def run(self): + if self.interrupt: + raise KeyboardInterrupt() + + if self.explode: + raise PlanoError("Exploded") + + if self.verbose: + print("Hello") + + SomeCommand().main([]) + SomeCommand().main(["--verbose"]) + SomeCommand().main(["--interrupt"]) + + with expect_system_exit(): + SomeCommand().main(["--verbose", "--explode"]) + +@test +def console_operations(): + eprint("Here's a story") + eprint("About a", "man named Brady") + + pprint(list_dir()) + pprint(PlanoProcess, 1, "abc", end="\n\n") + + flush() + + with console_color("red"): + print("ALERT") + + print(cformat("AMBER ALERT", color="yellow")) + print(cformat("NO ALERT")) + + cprint("CRITICAL ALERT", color="red", bright=True) + +@test +def dir_operations(): + with working_dir(): + test_dir = make_dir("some-dir") + test_file_1 = touch(join(test_dir, "some-file-1")) + test_file_2 = touch(join(test_dir, "some-file-2")) + + result = list_dir(test_dir) + assert join(test_dir, result[0]) == test_file_1, (join(test_dir, result[0]), test_file_1) + + result = list_dir(test_dir, "*-file-1") + assert result == ["some-file-1"], (result, ["some-file-1"]) + + result = list_dir(test_dir, exclude="*-file-1") + assert result == ["some-file-2"], (result, ["some-file-2"]) + + result = list_dir("some-dir", "*.not-there") + assert result == [], result + + with working_dir(): + result = list_dir() + assert result == [], result + + result = find(test_dir) + assert result == [test_file_1, test_file_2], (result, [test_file_1, test_file_2]) + + result = find(test_dir, include="*-file-1") + assert result == [test_file_1], (result, [test_file_1]) + + result = find(test_dir, exclude="*-file-1") + assert result == [test_file_2], (result, [test_file_2]) + + with working_dir(): + result = find() + assert result == [], result + + make_dir("subdir") + + result = find("./subdir") + assert result == [], result + + with working_dir(): + with working_dir("a-dir", quiet=True): + touch("a-file") + + curr_dir = get_current_dir() + prev_dir = change_dir("a-dir") + new_curr_dir = get_current_dir() + new_prev_dir = change_dir(curr_dir) + + assert curr_dir == prev_dir, (curr_dir, prev_dir) + assert new_curr_dir == new_prev_dir, (new_curr_dir, new_prev_dir) + +@test +def env_operations(): + result = join_path_var("a", "b", "c", "a") + assert result == _os.pathsep.join(("a", "b", "c")), result + + curr_dir = get_current_dir() + + with working_dir("."): + assert get_current_dir() == curr_dir, (get_current_dir(), curr_dir) + + result = get_home_dir() + assert result == _os.path.expanduser("~"), (result, _os.path.expanduser("~")) + + result = get_home_dir("alice") + assert result.endswith("alice"), result + + user = _getpass.getuser() + result = get_user() + assert result == user, (result, user) + + result = get_hostname() + assert result, result + + result = get_program_name() + assert result, result + + result = get_program_name("alpha beta") + assert result == "alpha", result + + result = get_program_name("X=Y alpha beta") + assert result == "alpha", result + + result = which("echo") + assert result, result + + with working_env(YES_I_AM_SET=1): + check_env("YES_I_AM_SET") + + with expect_error(): + check_env("NO_I_AM_NOT") + + with working_env(I_AM_SET_NOW=1, amend=False): + check_env("I_AM_SET_NOW") + assert "YES_I_AM_SET" not in ENV, ENV + + with working_env(SOME_VAR=1): + assert ENV["SOME_VAR"] == "1", ENV.get("SOME_VAR") + + with working_env(SOME_VAR=2): + assert ENV["SOME_VAR"] == "2", ENV.get("SOME_VAR") + + with expect_error(): + check_program("not-there") + + with expect_error(): + check_module("not_there") + + with expect_output(contains="ARGS:") as out: + with open(out, "w") as f: + print_env(file=f) + + print_stack() + +@test +def file_operations(): + with working_dir(): + alpha_dir = make_dir("alpha-dir") + alpha_file = touch(join(alpha_dir, "alpha-file")) + alpha_link = make_link(join(alpha_dir, "alpha-file-link"), "alpha-file") + alpha_broken_link = make_link(join(alpha_dir, "broken-link"), "no-such-file") + + beta_dir = make_dir("beta-dir") + beta_file = touch(join(beta_dir, "beta-file")) + beta_link = make_link(join(beta_dir, "beta-file-link"), "beta-file") + beta_broken_link = make_link(join(beta_dir, "broken-link"), join("..", alpha_dir, "no-such-file")) + beta_another_link = make_link(join(beta_dir, "broken-link"), join("..", alpha_dir, "alpha-file-link")) + + assert exists(beta_link) + assert exists(beta_file) + + with working_dir("beta-dir"): + assert is_file(read_link("beta-file-link")) + + copied_file = copy(alpha_file, beta_dir) + assert copied_file == join(beta_dir, "alpha-file"), copied_file + assert is_file(copied_file), list_dir(beta_dir) + + copied_link = copy(beta_link, join(beta_dir, "beta-file-link-copy")) + assert copied_link == join(beta_dir, "beta-file-link-copy"), copied_link + assert is_link(copied_link), list_dir(beta_dir) + + copied_dir = copy(alpha_dir, beta_dir) + assert copied_dir == join(beta_dir, "alpha-dir"), copied_dir + assert is_link(join(copied_dir, "alpha-file-link")) + + moved_file = move(beta_file, alpha_dir) + assert moved_file == join(alpha_dir, "beta-file"), moved_file + assert is_file(moved_file), list_dir(alpha_dir) + assert not exists(beta_file), list_dir(beta_dir) + + moved_dir = move(beta_dir, alpha_dir) + assert moved_dir == join(alpha_dir, "beta-dir"), moved_dir + assert is_dir(moved_dir), list_dir(alpha_dir) + assert not exists(beta_dir) + + gamma_dir = make_dir("gamma-dir") + gamma_file = touch(join(gamma_dir, "gamma-file")) + + delta_dir = make_dir("delta-dir") + delta_file = touch(join(delta_dir, "delta-file")) + + copy(gamma_dir, delta_dir, inside=False) + assert is_file(join("delta-dir", "gamma-file")) + + move(gamma_dir, delta_dir, inside=False) + assert is_file(join("delta-dir", "gamma-file")) + assert not exists(gamma_dir) + + epsilon_dir = make_dir("epsilon-dir") + epsilon_file_1 = touch(join(epsilon_dir, "epsilon-file-1")) + epsilon_file_2 = touch(join(epsilon_dir, "epsilon-file-2")) + epsilon_file_3 = touch(join(epsilon_dir, "epsilon-file-3")) + epsilon_file_4 = touch(join(epsilon_dir, "epsilon-file-4")) + + remove("not-there") + + remove(epsilon_file_2) + assert not exists(epsilon_file_2) + + remove(epsilon_dir) + assert not exists(epsilon_file_1) + assert not exists(epsilon_dir) + + remove([epsilon_file_3, epsilon_file_4]) + assert not exists(epsilon_file_3) + assert not exists(epsilon_file_4) + + file = write("xes", "x" * 10) + result = get_file_size(file) + assert result == 10, result + + zeta_dir = make_dir("zeta-dir") + zeta_file = touch(join(zeta_dir, "zeta-file")) + + eta_dir = make_dir("eta-dir") + eta_file = touch(join(eta_dir, "eta-file")) + + replace(zeta_dir, eta_dir) + assert not exists(zeta_file) + assert exists(zeta_dir) + assert is_file(join(zeta_dir, "eta-file")) + + with expect_exception(): + replace(zeta_dir, "not-there") + + assert exists(zeta_dir) + assert is_file(join(zeta_dir, "eta-file")) + + theta_file = write("theta-file", "theta") + iota_file = write("iota-file", "iota") + + replace(theta_file, iota_file) + assert not exists(iota_file) + assert read(theta_file) == "iota" + +@test +def github_operations(): + result = convert_github_markdown("# Hello, Fritz") + assert "Hello, Fritz" in result, result + + with working_dir(): + update_external_from_github("temp", "ssorj", "plano") + assert is_file("temp/Makefile"), list_dir("temp") + +@test +def http_operations(): + class Handler(_http.BaseHTTPRequestHandler): + def do_GET(self): + if not self.path.startswith("/api"): + self.send_response(404) + self.end_headers() + return + + self.send_response(200) + self.end_headers() + self.wfile.write(b"[1]") + + def do_POST(self): + length = int(self.headers["content-length"]) + content = self.rfile.read(length) + + self.send_response(200) + self.end_headers() + self.wfile.write(content) + + def do_PUT(self): + length = int(self.headers["content-length"]) + content = self.rfile.read(length) + + self.send_response(200) + self.end_headers() + + class ServerThread(_threading.Thread): + def __init__(self, server): + _threading.Thread.__init__(self) + self.server = server + + def run(self): + self.server.serve_forever() + + host, port = "localhost", get_random_port() + url = "http://{}:{}/api".format(host, port) + missing_url = "http://{}:{}/nono".format(host, port) + + try: + server = _http.HTTPServer((host, port), Handler) + except (OSError, PermissionError): # pragma: nocover + # Try one more time + port = get_random_port() + server = _http.HTTPServer((host, port), Handler) + + server_thread = ServerThread(server) + server_thread.start() + + try: + with working_dir(): + result = http_get(url) + assert result == "[1]", result + + with expect_error(): + http_get(missing_url) + + result = http_get(url, insecure=True) + assert result == "[1]", result + + result = http_get(url, user="fritz", password="secret") + assert result == "[1]", result + + result = http_get(url, output_file="a") + output = read("a") + assert result is None, result + assert output == "[1]", output + + result = http_get_json(url) + assert result == [1], result + + file_b = write("b", "[2]") + + result = http_post(url, read(file_b), insecure=True) + assert result == "[2]", result + + result = http_post(url, read(file_b), output_file="x") + output = read("x") + assert result is None, result + assert output == "[2]", output + + result = http_post_file(url, file_b) + assert result == "[2]", result + + result = http_post_json(url, parse_json(read(file_b))) + assert result == [2], result + + file_c = write("c", "[3]") + + result = http_put(url, read(file_c), insecure=True) + assert result is None, result + + result = http_put_file(url, file_c) + assert result is None, result + + result = http_put_json(url, parse_json(read(file_c))) + assert result is None, result + finally: + server.shutdown() + server.server_close() + server_thread.join() + +@test +def io_operations(): + with working_dir(): + input_ = "some-text\n" + file_a = write("a", input_) + output = read(file_a) + + assert input_ == output, (input_, output) + + pre_input = "pre-some-text\n" + post_input = "post-some-text\n" + + prepend(file_a, pre_input) + append(file_a, post_input) + + output = tail(file_a, 100) + tailed = tail(file_a, 1) + + assert output.startswith(pre_input), (output, pre_input) + assert output.endswith(post_input), (output, post_input) + assert tailed == post_input, (tailed, post_input) + + input_lines = [ + "alpha\n", + "beta\n", + "gamma\n", + "chi\n", + "psi\n", + "omega\n", + ] + + file_b = write_lines("b", input_lines) + output_lines = read_lines(file_b) + + assert input_lines == output_lines, (input_lines, output_lines) + + pre_lines = ["pre-alpha\n"] + post_lines = ["post-omega\n"] + + prepend_lines(file_b, pre_lines) + append_lines(file_b, post_lines) + + output_lines = tail_lines(file_b, 100) + tailed_lines = tail_lines(file_b, 1) + + assert output_lines[0] == pre_lines[0], (output_lines[0], pre_lines[0]) + assert output_lines[-1] == post_lines[0], (output_lines[-1], post_lines[0]) + assert tailed_lines[0] == post_lines[0], (tailed_lines[0], post_lines[0]) + + file_c = touch("c") + assert is_file(file_c), file_c + + file_d = write("d", "front@middle@@middle@back") + path = string_replace_file(file_d, "@middle@", "M", count=1) + result = read(path) + assert result == "frontM@middle@back", result + + file_e = write("e", "123") + file_f = write("f", "456") + path = concatenate("g", (file_e, "not-there", file_f)) + result = read(path) + assert result == "123456", result + +@test +def iterable_operations(): + result = unique([1, 1, 1, 2, 2, 3]) + assert result == [1, 2, 3], result + + result = skip([1, "", 2, None, 3]) + assert result == [1, 2, 3], result + + result = skip([1, "", 2, None, 3], 2) + assert result == [1, "", None, 3], result + +@test +def json_operations(): + with working_dir(): + input_data = { + "alpha": [1, 2, 3], + } + + file_a = write_json("a", input_data) + output_data = read_json(file_a) + + assert input_data == output_data, (input_data, output_data) + + json = read(file_a) + parsed_data = parse_json(json) + emitted_json = emit_json(input_data) + + assert input_data == parsed_data, (input_data, parsed_data) + assert json == emitted_json, (json, emitted_json) + + with expect_output(equals=emitted_json) as out: + with open(out, "w") as f: + print_json(input_data, file=f, end="") + +@test +def link_operations(): + with working_dir(): + make_dir("some-dir") + path = get_absolute_path(touch("some-dir/some-file")) + + with working_dir("another-dir"): + link = make_link("a-link", path) + linked_path = read_link(link) + assert linked_path.endswith(path), (linked_path, path) + +@test +def logging_operations(): + error("Error!") + warning("Warning!") + notice("Take a look!") + notice(123) + debug("By the way") + debug("abc{}{}{}", 1, 2, 3) + + with expect_exception(RuntimeError): + fail(RuntimeError("Error!")) + + with expect_error(): + fail("Error!") + + with expect_error(): + fail("Error! {}", "Let me elaborate") + + for level in ("debug", "notice", "warning", "error"): + with expect_output(contains="Hello") as out: + with logging_disabled(): + with logging_enabled(level=level, output=out): + log(level, "hello") + + with expect_output(equals="") as out: + with logging_enabled(output=out): + with logging_disabled(): + error("Yikes") + + with expect_output(contains="flipper") as out: + with logging_enabled(output=out): + with logging_context("flipper"): + notice("Whhat") + + with logging_context("bip"): + with logging_context("boop"): + error("It's alarming!") + +@test +def path_operations(): + abspath = _os.path.abspath + normpath = _os.path.normpath + + with working_dir("/"): + result = get_current_dir() + expect = abspath(_os.sep) + assert result == expect, (result, expect) + + path = "a/b/c" + result = get_absolute_path(path) + expect = join(get_current_dir(), path) + assert result == expect, (result, expect) + + path = "/x/y/z" + result = get_absolute_path(path) + expect = abspath(path) + assert result == expect, (result, expect) + + path = "/x/y/z" + assert is_absolute(path) + + path = "x/y/z" + assert not is_absolute(path) + + path = "a//b/../c/" + result = normalize_path(path) + expect = normpath("a/c") + assert result == expect, (result, expect) + + path = "/a/../c" + result = get_real_path(path) + expect = abspath("/c") + assert result == expect, (result, expect) + + path = abspath("/a/b") + result = get_relative_path(path, "/a/c") + expect = normpath("../b") + assert result == expect, (result, expect) + + path = abspath("/a/b") + result = get_file_url(path) + expect = "file:{}".format(path) + assert result == expect, (result, expect) + + with working_dir(): + result = get_file_url("afile") + expect = join(get_file_url(get_current_dir()), "afile") + assert result == expect, (result, expect) + + path = "/alpha/beta.ext" + path_split = "/alpha", "beta.ext" + path_split_extension = "/alpha/beta", ".ext" + name_split_extension = "beta", ".ext" + + result = join(*path_split) + expect = normpath(path) + assert result == expect, (result, expect) + + result = split(path) + expect = normpath(path_split[0]), normpath(path_split[1]) + assert result == expect, (result, expect) + + result = split_extension(path) + expect = normpath(path_split_extension[0]), normpath(path_split_extension[1]) + assert result == expect, (result, expect) + + result = get_parent_dir(path) + expect = normpath(path_split[0]) + assert result == expect, (result, expect) + + result = get_base_name(path) + expect = normpath(path_split[1]) + assert result == expect, (result, expect) + + result = get_name_stem(path) + expect = normpath(name_split_extension[0]) + assert result == expect, (result, expect) + + result = get_name_stem("alpha.tar.gz") + expect = "alpha" + assert result == expect, (result, expect) + + result = get_name_extension(path) + expect = normpath(name_split_extension[1]) + assert result == expect, (result, expect) + + with working_dir(): + touch("adir/afile") + + check_exists("adir") + check_exists("adir/afile") + check_dir("adir") + check_file("adir/afile") + + with expect_error(): + check_exists("adir/notafile") + + with expect_error(): + check_file("adir/notafile") + + with expect_error(): + check_file("adir") + + with expect_error(): + check_dir("not-there") + + with expect_error(): + check_dir("adir/afile") + + await_exists("adir/afile") + + if not WINDOWS: + with expect_timeout(): + await_exists("adir/notafile", timeout=TINY_INTERVAL) + +@test +def port_operations(): + result = get_random_port() + assert result >= 49152 and result <= 65535, result + + server_port = get_random_port() + server_socket = _socket.socket(_socket.AF_INET, _socket.SOCK_STREAM) + + try: + try: + server_socket.bind(("localhost", server_port)) + except (OSError, PermissionError): # pragma: nocover + # Try one more time + server_port = get_random_port() + server_socket.bind(("localhost", server_port)) + + server_socket.listen(5) + + await_port(server_port) + await_port(str(server_port)) + + check_port(server_port) + + # Non-Linux platforms don't seem to produce the expected + # error. + if LINUX: + with expect_error(): + get_random_port(min=server_port, max=server_port) + finally: + server_socket.close() + + if not WINDOWS: + with expect_timeout(): + await_port(get_random_port(), timeout=TINY_INTERVAL) + +@test +def process_operations(): + result = get_process_id() + assert result, result + + proc = run("date") + assert proc is not None, proc + + print(repr(proc)) + + run("date", stash=True) + + run(["echo", 1, 2, 3]) + run(["echo", 1, 2, 3], shell=True) + + proc = run(["echo", "hello"], check=False) + assert proc.exit_code == 0, proc.exit_code + + proc = run("cat /uh/uh", check=False) + assert proc.exit_code > 0, proc.exit_code + + with expect_output() as out: + run("date", output=out) + + run("date", output=DEVNULL) + run("date", stdin=DEVNULL) + run("date", stdout=DEVNULL) + run("date", stderr=DEVNULL) + + run("echo hello", quiet=True) + run("echo hello | cat", shell=True) + run(["echo", "hello"], shell=True) + + with expect_error(): + run("/not/there") + + with expect_error(): + run("cat /whoa/not/really", stash=True) + + result = call("echo hello").strip() + expect = "hello" + assert result == expect, (result, expect) + + result = call("echo hello | cat", shell=True).strip() + expect = "hello" + assert result == expect, (result, expect) + + with expect_error(): + call("cat /whoa/not/really") + + proc = start("sleep 10") + + if not WINDOWS: + with expect_timeout(): + wait(proc, timeout=TINY_INTERVAL) + + proc = start("echo hello") + sleep(TINY_INTERVAL) + stop(proc) + + proc = start("sleep 10") + stop(proc) + + proc = start("sleep 10") + kill(proc) + sleep(TINY_INTERVAL) + stop(proc) + + proc = start("date --not-there") + sleep(TINY_INTERVAL) + stop(proc) + + with start("sleep 10"): + sleep(TINY_INTERVAL) + + with working_dir(): + touch("i") + + with start("date", stdin="i", stdout="o", stderr="e"): + pass + + with expect_system_exit(): + exit() + + with expect_system_exit(): + exit(verbose=True) + + with expect_system_exit(): + exit("abc") + + with expect_system_exit(): + exit("abc", verbose=True) + + with expect_system_exit(): + exit(Exception()) + + with expect_system_exit(): + exit(Exception(), verbose=True) + + with expect_system_exit(): + exit(123) + + with expect_system_exit(): + exit(123, verbose=True) + + with expect_system_exit(): + exit(-123) + + with expect_exception(PlanoException): + exit(object()) + +@test +def string_operations(): + result = string_replace("ab", "a", "b") + assert result == "bb", result + + result = string_replace("aba", "a", "b", count=1) + assert result == "bba", result + + result = remove_prefix(None, "xxx") + assert result == "", result + + result = remove_prefix("anterior", "ant") + assert result == "erior", result + + result = remove_prefix("anterior", "ext") + assert result == "anterior", result + + result = remove_suffix(None, "xxx") + assert result == "", result + + result = remove_suffix("exterior", "ior") + assert result == "exter", result + + result = remove_suffix("exterior", "nal") + assert result == "exterior" + + result = shorten("abc", 2) + assert result == "ab", result + + result = shorten("abc", None) + assert result == "abc", result + + result = shorten("abc", 10) + assert result == "abc", result + + result = shorten("ellipsis", 6, ellipsis="...") + assert result == "ell...", result + + result = shorten(None, 6) + assert result == "", result + + result = plural(None) + assert result == "", result + + result = plural("") + assert result == "", result + + result = plural("test") + assert result == "tests", result + + result = plural("test", 1) + assert result == "test", result + + result = plural("bus") + assert result == "busses", result + + result = plural("bus", 1) + assert result == "bus", result + + result = plural("terminus", 2, "termini") + assert result == "termini", result + + result = capitalize(None) + assert result == "", result + + result = capitalize("") + assert result == "", result + + result = capitalize("hello, Frank") + assert result == "Hello, Frank", result + + encoded_result = base64_encode(b"abc") + decoded_result = base64_decode(encoded_result) + assert decoded_result == b"abc", decoded_result + + encoded_result = url_encode("abc=123&yeah!") + decoded_result = url_decode(encoded_result) + assert decoded_result == "abc=123&yeah!", decoded_result + + result = parse_url("http://example.net/index.html") + assert result.hostname == "example.net" + +@test +def temp_operations(): + system_temp_dir = get_system_temp_dir() + + result = make_temp_file() + assert result.startswith(system_temp_dir), result + + result = make_temp_file(suffix=".txt") + assert result.endswith(".txt"), result + + result = make_temp_dir() + assert result.startswith(system_temp_dir), result + + with temp_dir() as d: + assert is_dir(d), d + list_dir(d) + + with temp_file() as f: + assert is_file(f), f + write(f, "test") + + with working_dir() as d: + assert is_dir(d), d + list_dir(d) + + user_temp_dir = get_user_temp_dir() + assert user_temp_dir, user_temp_dir + + ENV.pop("XDG_RUNTIME_DIR", None) + + user_temp_dir = get_user_temp_dir() + assert user_temp_dir, user_temp_dir + +@test +def test_operations(): + with test_project(): + with working_module_path("src"): + import chucker + import chucker.tests + import chucker.moretests + + print_tests(chucker.tests) + + for verbose in (False, True): + # Module 'chucker' has no tests + with expect_error(): + run_tests(chucker, verbose=verbose) + + run_tests(chucker.tests, verbose=verbose) + run_tests(chucker.tests, exclude="*hello*", verbose=verbose) + run_tests(chucker.tests, enable="skipped", verbose=verbose) + + with expect_error(): + run_tests(chucker.tests, enable="skipped", unskip="*skipped*", verbose=verbose) + + with expect_error(): + run_tests(chucker.tests, enable="*badbye*", verbose=verbose) + + with expect_error(): + run_tests(chucker.tests, enable="*badbye*", fail_fast=True, verbose=verbose) + + with expect_error(): + run_tests([chucker.tests, chucker.moretests], enable="*badbye2*", fail_fast=True, verbose=verbose) + + with expect_exception(KeyboardInterrupt): + run_tests(chucker.tests, enable="keyboard-interrupt", verbose=verbose) + + with expect_error(): + run_tests(chucker.tests, enable="timeout", verbose=verbose) + + with expect_error(): + run_tests(chucker.tests, enable="process-error", verbose=verbose) + + with expect_error(): + run_tests(chucker.tests, enable="system-exit", verbose=verbose) + + with expect_system_exit(): + PlanoTestCommand().main(["--module", "nosuchmodule"]) + + def run_command(*args): + PlanoTestCommand(chucker.tests).main(args) + + run_command("--verbose") + run_command("--quiet") + run_command("--list") + + with expect_system_exit(): + run_command("--enable", "*badbye*") + + with expect_system_exit(): + run_command("--enable", "*badbye*", "--verbose") + + try: + with expect_exception(): + pass + raise Exception() # pragma: nocover + except AssertionError: + pass + + with expect_output(equals="abc123", contains="bc12", startswith="abc", endswith="123") as out: + write(out, "abc123") + +@test +def time_operations(): + start_time = get_time() + + sleep(TINY_INTERVAL) + + assert get_time() - start_time > TINY_INTERVAL + + start_datetime = get_datetime() + + sleep(TINY_INTERVAL) + + assert get_datetime() - start_datetime > _datetime.timedelta(seconds=TINY_INTERVAL) + + timestamp = format_timestamp() + result = parse_timestamp(timestamp) + assert format_timestamp(result) == timestamp + + result = parse_timestamp(None) + assert result is None + + earlier = get_datetime() + result = format_date() + later = _datetime.datetime.strptime(result, "%d %B %Y") + later = later.replace(tzinfo=_datetime.timezone.utc) + assert later - earlier < _datetime.timedelta(days=1) + + now = get_datetime() + result = format_date(now) + assert result == f"{now.day} {now.strftime('%B')} {now.strftime('%Y')}" + + now = get_datetime() + result = format_time() + later = _datetime.datetime.strptime(result, "%H:%M:%S") + later = later.replace(tzinfo=_datetime.timezone.utc) + assert later - earlier < _datetime.timedelta(seconds=1) + + now = get_datetime() + result = format_time(now) + assert result == f"{now.hour}:{now.strftime('%M')}:{now.strftime('%S')}" + + now = get_datetime() + result = format_time(now, precision="minute") + assert result == f"{now.hour}:{now.strftime('%M')}" + + result = format_duration(0.1) + assert result == "0.1s", result + + result = format_duration(1) + assert result == "1s", result + + result = format_duration(1, align=True) + assert result == "1.0s", result + + result = format_duration(60) + assert result == "60s", result + + result = format_duration(3600) + assert result == "1h", result + + with expect_system_exit(): + with start("sleep 10"): + from plano import _default_sigterm_handler + _default_sigterm_handler(_signal.SIGTERM, None) + + with Timer() as timer: + sleep(TINY_INTERVAL) + assert timer.elapsed_time > TINY_INTERVAL + + assert timer.elapsed_time > TINY_INTERVAL + + if not WINDOWS: + with expect_timeout(): + with Timer(timeout=TINY_INTERVAL) as timer: + sleep(10) + +@test +def unique_id_operations(): + id1 = get_unique_id() + id2 = get_unique_id() + + assert id1 != id2, (id1, id2) + + result = get_unique_id(1) + assert len(result) == 2 + + result = get_unique_id(16) + assert len(result) == 32 + +@test +def value_operations(): + result = nvl(None, "a") + assert result == "a", result + + result = nvl("b", "a") + assert result == "b", result + + assert is_string("a") + assert not is_string(1) + + for value in (None, "", (), [], {}): + assert is_empty(value), value + + for value in (object(), " ", (1,), [1], {"a": 1}): + assert not is_empty(value), value + + result = pformat({"z": 1, "a": 2}) + assert result == "{'a': 2, 'z': 1}", result + + result = format_empty((), "[nothing]") + assert result == "[nothing]", result + + result = format_empty((1,), "[nothing]") + assert result == (1,), result + + result = format_not_empty("abc", "[{}]") + assert result == "[abc]", result + + result = format_not_empty({}, "[{}]") + assert result == {}, result + + result = format_repr(Namespace(a=1, b=2), limit=1) + assert result == "Namespace(a=1)", result + + result = Namespace(a=1, b=2) + assert result.a == 1, result + assert result.b == 2, result + assert "a" in result, result + assert "c" not in result, result + repr(result) + + other = Namespace(a=1, b=2, c=3) + assert result != other, (result, other) + +@test +def yaml_operations(): + try: + import yaml as _yaml + except ImportError: # pragma: nocover + raise PlanoTestSkipped("PyYAML is not available") + + with working_dir(): + input_data = { + "alpha": [1, 2, 3], + } + + file_a = write_yaml("a", input_data) + output_data = read_yaml(file_a) + + assert input_data == output_data, (input_data, output_data) + + yaml = read(file_a) + parsed_data = parse_yaml(yaml) + emitted_yaml = emit_yaml(input_data) + + assert input_data == parsed_data, (input_data, parsed_data) + assert yaml == emitted_yaml, (yaml, emitted_yaml) + + with expect_output(equals=emitted_yaml) as out: + with open(out, "w") as f: + print_yaml(input_data, file=f, end="") + +@command +def prancer(): + notice("Base prancer") + +@command +def vixen(): + prancer() + +@test +def plano_command(): + with working_dir(): + PlanoCommand().main([]) + + PlanoCommand(_sys.modules[__name__]).main([]) + + PlanoCommand().main(["-m", "plano.test"]) + + with expect_system_exit(): + PlanoCommand().main(["-m", "nosuchmodule"]) + + with working_dir(): + write(".plano.py", "garbage") + + with expect_system_exit(): + PlanoCommand().main([]) + + with expect_system_exit(): + PlanoCommand().main(["-f", "no-such-file"]) + + def run_command(*args): + PlanoCommand().main(["-f", test_project_dir] + list(args)) + + with test_project(): + run_command() + run_command("--help") + + with expect_system_exit(): + run_command("no-such-command") + + with expect_system_exit(): + run_command("no-such-command", "--help") + + with expect_system_exit(): + run_command("--help", "no-such-command") + + run_command("extended-command", "a", "b", "--omega", "z") + run_command("extended-command", "a", "b", "--omega", "z", "--verbose") + run_command("extended-command", "a", "b", "--omega", "z", "--quiet") + + with expect_system_exit(): + run_command("echo") + + with expect_exception(contains="Trouble"): + run_command("echo", "Hello", "--trouble") + + run_command("echo", "Hello", "--count", "5") + + run_command("echoecho", "Greetings") + + with expect_system_exit(): + run_command("echo", "Hello", "--count", "not-an-int") + + run_command("haberdash", "ballcap", "fedora", "hardhat", "--last", "turban") + result = read_json("haberdash.json") + assert result == ["ballcap", "fedora", "hardhat", "turban"], result + + run_command("haberdash", "ballcap", "--last", "turban") + result = read_json("haberdash.json") + assert result == ["ballcap", "turban"], result + + run_command("haberdash", "ballcap") + result = read_json("haberdash.json") + assert result == ["ballcap", "bowler"], result + + run_command("balderdash", "bunk", "poppycock") + result = read_json("balderdash.json") + assert result == ["bunk", "poppycock", "rubbish"], result + + run_command("balderdash", "bunk") + result = read_json("balderdash.json") + assert result == ["bunk", "malarkey", "rubbish"], result + + run_command("balderdash", "bunk", "--other", "bollocks") + result = read_json("balderdash.json") + assert result == ["bunk", "malarkey", "bollocks"], result + + run_command("splasher,balderdash", "claptrap") + result = read_json("splasher.json") + assert result == [1], result + result = read_json("balderdash.json") + assert result == ["claptrap", "malarkey", "rubbish"], result + + with expect_system_exit(): + run_command("no-such-command,splasher") + + with expect_system_exit(): + run_command("splasher,no-such-command-nope") + + run_command("dasher", "alpha", "--beta", "123") + + # Gamma is an unexpected arg + with expect_system_exit(): + run_command("dasher", "alpha", "--gamma", "123") + + # Args after "xyz" are extra passthrough args + run_command("dancer", "gamma", "--omega", "xyz", "extra1", "--extra2", "extra3") + result = read_json("dancer.json") + assert result == ["extra1", "--extra2", "extra3"], result + + # Ensure indirect calls (through parent commands) are specialized + run_command("vixen") + assert exists("prancer.json") + + with expect_system_exit(): + run_command("no-parent") + + run_command("feta", "--spinach", "oregano") + result = read_json("feta.json") + assert result == "oregano" + + run_command("invisible") + result = read_json("invisible.json") + assert result == "nothing" + + + +def main(): + PlanoTestCommand(_sys.modules[__name__]).main() + +if __name__ == "__main__": # pragma: nocover + main() diff --git a/external/skewer/example/python/plano/command.py b/external/skewer/example/python/plano/command.py new file mode 100644 index 0000000..219f964 --- /dev/null +++ b/external/skewer/example/python/plano/command.py @@ -0,0 +1,511 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from .main import * + +import argparse as _argparse +import importlib as _importlib +import inspect as _inspect +import os as _os +import sys as _sys +import traceback as _traceback + +class BaseCommand: + def parse_args(self, args): # pragma: nocover + raise NotImplementedError() + + def configure_logging(self, args): + return "warning", None + + def init(self, args): # pragma: nocover + raise NotImplementedError() + + def run(self): # pragma: nocover + raise NotImplementedError() + + def main(self, args=None): + if args is None: + args = ARGS[1:] + + args = self.parse_args(args) + + assert isinstance(args, _argparse.Namespace), args + + level, output = self.configure_logging(args) + + with logging_enabled(level=level, output=output): + try: + self.init(args) + self.run() + except KeyboardInterrupt: + pass + except PlanoError as e: + if PLANO_DEBUG: # pragma: nocover + error(e) + else: + error(str(e)) + + exit(1) + +class BaseArgumentParser(_argparse.ArgumentParser): + def __init__(self, **kwargs): + super().__init__(**kwargs) + + self.allow_abbrev = False + self.formatter_class = _argparse.RawDescriptionHelpFormatter + + _capitalize_help(self) + +_plano_command = None + +class PlanoCommand(BaseCommand): + def __init__(self, module=None, description="Run commands defined as Python functions", epilog=None): + self.module = module + self.bound_commands = dict() + self.running_commands = list() + self.passthrough_args = None + self.verbose = False + self.quiet = False + + assert self.module is None or _inspect.ismodule(self.module), self.module + + self.pre_parser = BaseArgumentParser(description=description, add_help=False) + self.pre_parser.add_argument("-h", "--help", action="store_true", + help="Show this help message and exit") + + if self.module is None: + self.pre_parser.add_argument("-f", "--file", help="Load commands from FILE (default '.plano.py')") + self.pre_parser.add_argument("-m", "--module", help="Load commands from MODULE") + + self.parser = _argparse.ArgumentParser(parents=(self.pre_parser,), + description=description, epilog=epilog, + add_help=False, allow_abbrev=False) + + # This is intentionally added after self.pre_parser is passed + # as parent to self.parser, since it is used only in the + # preliminary parsing. + self.pre_parser.add_argument("command", nargs="?", help=_argparse.SUPPRESS) + + global _plano_command + _plano_command = self + + def parse_args(self, args): + pre_args, _ = self.pre_parser.parse_known_args(args) + + if self.module is None: + if pre_args.module is None: + self.module = self._load_file(pre_args.file) + else: + self.module = self._load_module(pre_args.module) + + if self.module is not None: + self._bind_commands(self.module) + + self._process_commands() + + self.preceding_commands = list() + + if pre_args.command is not None and "," in pre_args.command: + names = pre_args.command.split(",") + + for name in names[:-1]: + try: + self.preceding_commands.append(self.bound_commands[name]) + except KeyError: + self.parser.error(f"Command '{name}' is unknown") + + args[args.index(pre_args.command)] = names[-1] + + args, self.passthrough_args = self.parser.parse_known_args(args) + + return args + + def configure_logging(self, args): + if args.command is not None and not self.bound_commands[args.command].passthrough: + if args.verbose: + return "debug", None + + if args.quiet: + return "warning", None + + return "notice", None + + def init(self, args): + self.help = args.help + + self.selected_command = None + self.command_args = list() + self.command_kwargs = dict() + + if args.command is not None: + for command in self.preceding_commands: + command() + + self.selected_command = self.bound_commands[args.command] + + if not self.selected_command.passthrough and self.passthrough_args: + self.parser.error(f"unrecognized arguments: {' '.join(self.passthrough_args)}") + + for param in self.selected_command.parameters.values(): + if param.name == "passthrough_args": + continue + + if param.positional: + if param.multiple: + self.command_args.extend(getattr(args, param.name)) + else: + self.command_args.append(getattr(args, param.name)) + else: + self.command_kwargs[param.name] = getattr(args, param.name) + + if self.selected_command.passthrough: + self.command_kwargs["passthrough_args"] = self.passthrough_args + + def run(self): + if self.help or self.module is None or self.selected_command is None: + self.parser.print_help() + return + + with Timer() as timer: + self.selected_command(*self.command_args, **self.command_kwargs) + + if not self.quiet: + cprint("OK", color="green", file=_sys.stderr, end="") + cprint(" ({})".format(format_duration(timer.elapsed_time)), color="magenta", file=_sys.stderr) + + def _load_module(self, name): + try: + return _importlib.import_module(name) + except ImportError: + exit("Module '{}' not found", name) + + def _load_file(self, path): + if path is not None and is_dir(path): + path = self._find_file(path) + + if path is not None and not is_file(path): + exit("File '{}' not found", path) + + if path is None: + path = self._find_file(get_current_dir()) + + if path is None: + return + + debug("Loading '{}'", path) + + _sys.path.insert(0, join(get_parent_dir(path), "python")) + + spec = _importlib.util.spec_from_file_location("_plano", path) + module = _importlib.util.module_from_spec(spec) + _sys.modules["_plano"] = module + + try: + spec.loader.exec_module(module) + except Exception as e: + error(e) + exit("Failure loading {}: {}", path, str(e)) + + return module + + def _find_file(self, dir): + # Planofile and .planofile remain temporarily for backward compatibility + for name in (".plano.py", "Planofile", ".planofile"): + path = join(dir, name) + + if is_file(path): + return path + + def _bind_commands(self, module): + for var in vars(module).values(): + if callable(var) and var.__class__.__name__ == "Command": + self.bound_commands[var.name] = var + + def _process_commands(self): + subparsers = self.parser.add_subparsers(title="commands", dest="command", metavar="{command}") + + for command in self.bound_commands.values(): + # This doesn't work yet, but in the future it might. + # https://bugs.python.org/issue22848 + # + # help = _argparse.SUPPRESS if command.hidden else command.help + + help = "[internal]" if command.hidden else command.help + add_help = False if command.passthrough else True + description = nvl(command.description, command.help) + + subparser = subparsers.add_parser(command.name, help=help, add_help=add_help, description=description, + formatter_class=_argparse.RawDescriptionHelpFormatter) + + if not command.passthrough: + subparser.add_argument("--verbose", action="store_true", + help="Print detailed logging to the console") + subparser.add_argument("--quiet", action="store_true", + help="Print no logging to the console") + + for param in command.parameters.values(): + if not command.passthrough and param.name in ("verbose", "quiet"): + continue + + if param.positional: + if param.multiple: + subparser.add_argument(param.name, metavar=param.metavar, type=param.type, help=param.help, + nargs="*") + elif param.optional: + subparser.add_argument(param.name, metavar=param.metavar, type=param.type, help=param.help, + nargs="?", default=param.default) + else: + subparser.add_argument(param.name, metavar=param.metavar, type=param.type, help=param.help) + else: + flag_args = list() + + if param.short_option is not None: + flag_args.append("-{}".format(param.short_option)) + + flag_args.append("--{}".format(param.display_name)) + + help = param.help + + if param.default not in (None, False): + if help is None: + help = "Default value is {}".format(repr(param.default)) + else: + help += " (default {})".format(repr(param.default)) + + if param.default is False: + subparser.add_argument(*flag_args, dest=param.name, default=param.default, action="store_true", + help=help) + else: + subparser.add_argument(*flag_args, dest=param.name, default=param.default, + metavar=param.metavar, type=param.type, help=help) + + _capitalize_help(subparser) + +_command_help = { + "build": "Build artifacts from source", + "clean": "Clean up the source tree", + "dist": "Generate distribution artifacts", + "install": "Install the built artifacts on your system", + "test": "Run the tests", + "coverage": "Run the tests and measure code coverage", +} + +def command(_function=None, name=None, parameters=None, parent=None, passthrough=False, hidden=False): + class Command: + def __init__(self, function): + self.function = function + self.module = _inspect.getmodule(self.function) + + self.name = name + self.parent = parent + + if self.parent is None: + # Strip leading and trailing underscores and convert + # remaining underscores to hyphens + default = self.function.__name__.strip("_").replace("_", "-") + + self.name = nvl(self.name, default) + self.parameters = self._process_parameters(parameters) + self.passthrough = passthrough + else: + assert parameters is None + + self.name = nvl(self.name, self.parent.name) + self.parameters = self.parent.parameters + self.passthrough = self.parent.passthrough + + doc = _inspect.getdoc(self.function) + + if doc is None: + self.help = _command_help.get(self.name) + self.description = self.help + else: + self.help = doc.split("\n")[0] + self.description = doc + + if self.parent is not None: + self.help = nvl(self.help, self.parent.help) + self.description = nvl(self.description, self.parent.description) + + self.hidden = hidden + + debug("Defining {}", self) + + for param in self.parameters.values(): + debug(" {}", str(param).capitalize()) + + def __repr__(self): + return "command '{}:{}'".format(self.module.__name__, self.name) + + def _process_parameters(self, cparams): + # CommandParameter objects from the @command decorator + cparams_in = {x.name: x for x in nvl(cparams, ())} + cparams_out = dict() + + # Parameter objects from the function signature + sig = _inspect.signature(self.function) + sparams = list(sig.parameters.values()) + + if len(sparams) == 2 and sparams[0].name == "args" and sparams[1].name == "kwargs": + # Don't try to derive command parameters from *args and **kwargs + return cparams_in + + for sparam in sparams: + try: + cparam = cparams_in[sparam.name] + except KeyError: + cparam = CommandParameter(sparam.name) + + if sparam.kind is sparam.POSITIONAL_ONLY: # pragma: nocover + if sparam.positional is None: + cparam.positional = True + elif sparam.kind is sparam.POSITIONAL_OR_KEYWORD and sparam.default is sparam.empty: + if cparam.positional is None: + cparam.positional = True + elif sparam.kind is sparam.POSITIONAL_OR_KEYWORD and sparam.default is not sparam.empty: + cparam.optional = True + cparam.default = sparam.default + elif sparam.kind is sparam.VAR_POSITIONAL: + if cparam.positional is None: + cparam.positional = True + cparam.multiple = True + elif sparam.kind is sparam.VAR_KEYWORD: + continue + elif sparam.kind is sparam.KEYWORD_ONLY: + cparam.optional = True + cparam.default = sparam.default + else: # pragma: nocover + raise NotImplementedError(sparam.kind) + + if cparam.type is None and cparam.default not in (None, False): # XXX why false? + cparam.type = type(cparam.default) + + cparams_out[cparam.name] = cparam + + return cparams_out + + def __call__(self, *args, **kwargs): + from .command import _plano_command, PlanoCommand + assert isinstance(_plano_command, PlanoCommand), _plano_command + + app = _plano_command + command = app.bound_commands[self.name] + + if command is not self: + # The command bound to this name has been overridden. + # This happens when a parent command invokes a peer + # command that is overridden. + + command(*args, **kwargs) + + return + + debug("Running {} {} {}".format(self, args, kwargs)) + + app.running_commands.append(self) + + if not app.quiet: + dashes = "--- " * (len(app.running_commands) - 1) + display_args = list(self._get_display_args(args, kwargs)) + + with console_color("magenta", file=_sys.stderr): + eprint("{}--> {}".format(dashes, self.name), end="") + + if display_args: + eprint(" ({})".format(", ".join(display_args)), end="") + + eprint() + + self.function(*args, **kwargs) + + if not app.quiet: + cprint("{}<-- {}".format(dashes, self.name), color="magenta", file=_sys.stderr) + + app.running_commands.pop() + + def _get_display_args(self, args, kwargs): + for i, param in enumerate(self.parameters.values()): + if param.positional: + if param.multiple: + for va in args[i:]: + yield repr(va) + elif param.optional: + value = args[i] + + if value == param.default: + continue + + yield repr(value) + else: + yield repr(args[i]) + else: + value = kwargs.get(param.name, param.default) + + if value == param.default: + continue + + if value in (True, False): + value = str(value).lower() + else: + value = repr(value) + + yield "{}={}".format(param.display_name, value) + + if _function is None: + return Command + else: + return Command(_function) + +def parent(*args, **kwargs): + try: + f_locals = _inspect.stack()[2].frame.f_locals + parent_fn = f_locals["self"].parent.function + except: + fail("Missing parent command") + + parent_fn(*args, **kwargs) + +class CommandParameter: + def __init__(self, name, display_name=None, type=None, metavar=None, help=None, short_option=None, default=None, positional=None): + self.name = name + self.display_name = nvl(display_name, self.name.replace("_", "-")) + self.type = type + self.metavar = nvl(metavar, self.display_name.upper()) + self.help = help + self.short_option = short_option + self.default = default + self.positional = positional + + self.optional = False + self.multiple = False + + def __repr__(self): + return "parameter '{}' (default {})".format(self.name, repr(self.default)) + +# Patch the default help text +def _capitalize_help(parser): + try: + for action in parser._actions: + if action.help and action.help is not _argparse.SUPPRESS: + action.help = capitalize(action.help) + except: # pragma: nocover + pass + +def _main(): # pragma: nocover + PlanoCommand().main() diff --git a/external/skewer/example/python/plano/github.py b/external/skewer/example/python/plano/github.py new file mode 100644 index 0000000..e1714b5 --- /dev/null +++ b/external/skewer/example/python/plano/github.py @@ -0,0 +1,80 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from .main import * + +_html_template = """ + + + + + + +
+ +@content@ + +
+ + +""".strip() + +def convert_github_markdown(markdown): + json = emit_json({"text": markdown}) + content = http_post("https://github.com/gitapi/markdown", json, content_type="application/json") + + # Remove the "user-content-" prefix from internal anchors + content = content.replace("id=\"user-content-", "id=\"") + + return _html_template.replace("@content@", content) + +def update_external_from_github(dir, owner, repo, ref="main"): + dir = get_absolute_path(dir) + make_parent_dir(dir) + + url = f"https://github.com/{owner}/{repo}/archive/{ref}.tar.gz" + + with temp_file() as temp: + assert exists(temp) + + http_get(url, output_file=temp) + + with working_dir(quiet=True): + extract_archive(temp) + + extracted_dir = list_dir()[0] + assert is_dir(extracted_dir) + + replace(dir, extracted_dir) diff --git a/external/skewer/example/python/plano/main.py b/external/skewer/example/python/plano/main.py new file mode 100644 index 0000000..903f654 --- /dev/null +++ b/external/skewer/example/python/plano/main.py @@ -0,0 +1,1772 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +import base64 as _base64 +import binascii as _binascii +import code as _code +import datetime as _datetime +import fnmatch as _fnmatch +import getpass as _getpass +import json as _json +import os as _os +import pprint as _pprint +import pkgutil as _pkgutil +import random as _random +import re as _re +import shlex as _shlex +import shutil as _shutil +import signal as _signal +import socket as _socket +import subprocess as _subprocess +import sys as _sys +import tempfile as _tempfile +import time as _time +import traceback as _traceback +import urllib as _urllib +import urllib.parse as _urllib_parse +import uuid as _uuid + +_max = max + +## Exceptions + +class PlanoException(Exception): + pass + +class PlanoError(PlanoException): + pass + +class PlanoTimeout(PlanoException): + pass + +## Global variables + +ENV = _os.environ +ARGS = _sys.argv + +STDIN = _sys.stdin +STDOUT = _sys.stdout +STDERR = _sys.stderr +DEVNULL = _os.devnull + +LINUX = _sys.platform == "linux" +WINDOWS = _sys.platform in ("win32", "cygwin") + +PLANO_DEBUG = "PLANO_DEBUG" in ENV +PLANO_COLOR = "PLANO_COLOR" in ENV + +## Archive operations + +def make_archive(input_dir, output_file=None, quiet=False): + check_program("tar") + + archive_stem = get_base_name(input_dir) + + if output_file is None: + # tar on Windows needs this + base = join(get_current_dir(), archive_stem) + base = base.replace("\\", "/") + + output_file = f"{base}.tar.gz" + + _notice(quiet, "Making archive {} from directory {}", repr(output_file), repr(input_dir)) + + with working_dir(get_parent_dir(input_dir), quiet=True): + run(f"tar -czf {output_file} {archive_stem}", quiet=True) + + return output_file + +def extract_archive(input_file, output_dir=None, quiet=False): + check_program("tar") + + if output_dir is None: + output_dir = get_current_dir() + + _notice(quiet, "Extracting archive {} to directory {}", repr(input_file), repr(output_dir)) + + input_file = get_absolute_path(input_file) + + # tar on Windows needs this + input_file = input_file.replace("\\", "/") + + with working_dir(output_dir, quiet=True): + run(f"tar -xf {input_file}", quiet=True) + + return output_dir + +def rename_archive(input_file, new_archive_stem, quiet=False): + _notice(quiet, "Renaming archive {} with stem {}", repr(input_file), repr(new_archive_stem)) + + output_dir = get_absolute_path(get_parent_dir(input_file)) + output_file = "{}.tar.gz".format(join(output_dir, new_archive_stem)) + + # tar on Windows needs this + output_file = output_file.replace("\\", "/") + + input_file = get_absolute_path(input_file) + + with working_dir(quiet=True): + extract_archive(input_file, quiet=True) + + input_name = list_dir()[0] + input_dir = move(input_name, new_archive_stem, quiet=True) + + make_archive(input_dir, output_file=output_file, quiet=True) + + remove(input_file, quiet=True) + + return output_file + +## Console operations + +def flush(): + _sys.stdout.flush() + _sys.stderr.flush() + +def eprint(*args, **kwargs): + print(*args, file=_sys.stderr, **kwargs) + +def pprint(*args, **kwargs): + args = [pformat(x) for x in args] + print(*args, **kwargs) + +_color_codes = { + "black": "\u001b[30", + "red": "\u001b[31", + "green": "\u001b[32", + "yellow": "\u001b[33", + "blue": "\u001b[34", + "magenta": "\u001b[35", + "cyan": "\u001b[36", + "white": "\u001b[37", + "gray": "\u001b[90", +} + +_color_reset = "\u001b[0m" + +def _get_color_code(color, bright): + elems = [_color_codes[color]] + + if bright: + elems.append(";1") + + elems.append("m") + + return "".join(elems) + +def _is_color_enabled(file): + return PLANO_COLOR or hasattr(file, "isatty") and file.isatty() + +class console_color: + def __init__(self, color=None, bright=False, file=_sys.stdout): + self.file = file + self.color_code = None + + if (color, bright) != (None, False): + self.color_code = _get_color_code(color, bright) + + self.enabled = self.color_code is not None and _is_color_enabled(self.file) + + def __enter__(self): + if self.enabled: + print(self.color_code, file=self.file, end="", flush=True) + + def __exit__(self, exc_type, exc_value, traceback): + if self.enabled: + print(_color_reset, file=self.file, end="", flush=True) + +def cformat(value, color=None, bright=False, file=_sys.stdout): + if (color, bright) != (None, False) and _is_color_enabled(file): + return "".join((_get_color_code(color, bright), value, _color_reset)) + else: + return value + +def cprint(*args, **kwargs): + color = kwargs.pop("color", "white") + bright = kwargs.pop("bright", False) + file = kwargs.get("file", _sys.stdout) + + with console_color(color, bright=bright, file=file): + print(*args, **kwargs) + +class output_redirected: + def __init__(self, output, quiet=False): + self.output = output + self.quiet = quiet + + def __enter__(self): + flush() + + _notice(self.quiet, "Redirecting output to file {}", repr(self.output)) + + if is_string(self.output): + output = open(self.output, "w") + + self.prev_stdout, self.prev_stderr = _sys.stdout, _sys.stderr + _sys.stdout, _sys.stderr = output, output + + def __exit__(self, exc_type, exc_value, traceback): + flush() + + _sys.stdout, _sys.stderr = self.prev_stdout, self.prev_stderr + +try: + breakpoint +except NameError: # pragma: nocover + def breakpoint(): + import pdb + pdb.set_trace() + +def repl(locals): # pragma: nocover + _code.InteractiveConsole(locals=locals).interact() + +def print_properties(props, file=None): + size = max([len(x[0]) for x in props]) + + for prop in props: + name = "{}:".format(prop[0]) + template = "{{:<{}}} ".format(size + 1) + + print(template.format(name), prop[1], end="", file=file) + + for value in prop[2:]: + print(" {}".format(value), end="", file=file) + + print(file=file) + +## Directory operations + +def find(dirs=None, include="*", exclude=()): + if dirs is None: + dirs = "." + + if is_string(dirs): + dirs = (dirs,) + + if is_string(include): + include = (include,) + + if is_string(exclude): + exclude = (exclude,) + + found = set() + + for dir in dirs: + for root, dir_names, file_names in _os.walk(dir, followlinks=True): + names = dir_names + file_names + + for include_pattern in include: + names = _fnmatch.filter(names, include_pattern) + + for exclude_pattern in exclude: + for name in _fnmatch.filter(names, exclude_pattern): + names.remove(name) + + if root.startswith("./"): + root = remove_prefix(root, "./") + elif root == ".": + root = "" + + found.update([join(root, x) for x in names]) + + return sorted(found) + +def make_dir(dir, quiet=False): + if dir == "": + return dir + + if not exists(dir): + _notice(quiet, "Making directory '{}'", dir) + _os.makedirs(dir) + + return dir + +def make_parent_dir(path, quiet=False): + return make_dir(get_parent_dir(path), quiet=quiet) + +# Returns the current working directory so you can change it back +def change_dir(dir, quiet=False): + _debug(quiet, "Changing directory to {}", repr(dir)) + + prev_dir = get_current_dir() + + if not dir: + return prev_dir + + _os.chdir(dir) + + return prev_dir + +def list_dir(dir=None, include="*", exclude=()): + if dir is None: + dir = get_current_dir() + else: + dir = expand(dir) + + assert is_dir(dir), dir + + if is_string(include): + include = (include,) + + if is_string(exclude): + exclude = (exclude,) + + names = _os.listdir(dir) + + for include_pattern in include: + names = _fnmatch.filter(names, include_pattern) + + for exclude_pattern in exclude: + for name in _fnmatch.filter(names, exclude_pattern): + names.remove(name) + + return sorted(names) + +# No args constructor gets a temp dir +class working_dir: + def __init__(self, dir=None, quiet=False): + self.dir = dir + self.prev_dir = None + self.remove = False + self.quiet = quiet + + if self.dir is None: + self.dir = make_temp_dir() + self.remove = True + else: + self.dir = expand(self.dir) + + def __enter__(self): + if self.dir == ".": + return + + _notice(self.quiet, "Entering directory {}", repr(get_absolute_path(self.dir))) + + make_dir(self.dir, quiet=True) + + self.prev_dir = change_dir(self.dir, quiet=True) + + return self.dir + + def __exit__(self, exc_type, exc_value, traceback): + if self.dir == ".": + return + + _debug(self.quiet, "Returning to directory {}", repr(get_absolute_path(self.prev_dir))) + + change_dir(self.prev_dir, quiet=True) + + if self.remove: + remove(self.dir, quiet=True) + +## Environment operations + +def join_path_var(*paths): + return _os.pathsep.join(unique(skip(paths))) + +def get_current_dir(): + return _os.getcwd() + +def get_home_dir(user=None): + return _os.path.expanduser("~{}".format(user or "")) + +def get_user(): + return _getpass.getuser() + +def get_hostname(): + return _socket.gethostname() + +def get_program_name(command=None): + if command is None: + args = ARGS + else: + args = command.split() + + for arg in args: + if "=" not in arg: + return get_base_name(arg) + +def which(program_name): + return _shutil.which(program_name) + +def check_env(var, message=None): + if var not in _os.environ: + if message is None: + message = "Environment variable {} is not set".format(repr(var)) + + raise PlanoError(message) + +def check_module(module, message=None): + if _pkgutil.find_loader(module) is None: + if message is None: + message = "Python module {} is not found".format(repr(module)) + + raise PlanoError(message) + +def check_program(program, message=None): + if which(program) is None: + if message is None: + message = "Program {} is not found".format(repr(program)) + + raise PlanoError(message) + +class working_env: + def __init__(self, **vars): + self.amend = vars.pop("amend", True) + self.vars = vars + + def __enter__(self): + self.prev_vars = dict(_os.environ) + + if not self.amend: + for name, value in list(_os.environ.items()): + if name not in self.vars: + del _os.environ[name] + + for name, value in self.vars.items(): + _os.environ[name] = str(value) + + def __exit__(self, exc_type, exc_value, traceback): + for name, value in self.prev_vars.items(): + _os.environ[name] = value + + for name, value in self.vars.items(): + if name not in self.prev_vars: + del _os.environ[name] + +class working_module_path: + def __init__(self, path, amend=True): + if is_string(path): + if not is_absolute(path): + path = get_absolute_path(path) + + path = [path] + + if amend: + path = path + _sys.path + + self.path = path + + def __enter__(self): + self.prev_path = _sys.path + _sys.path = self.path + + def __exit__(self, exc_type, exc_value, traceback): + _sys.path = self.prev_path + +def print_env(file=None): + props = ( + ("ARGS", ARGS), + ("ENV['PATH']", ENV.get("PATH")), + ("ENV['PYTHONPATH']", ENV.get("PYTHONPATH")), + ("sys.executable", _sys.executable), + ("sys.path", _sys.path), + ("sys.version", _sys.version.replace("\n", "")), + ("get_current_dir()", get_current_dir()), + ("get_home_dir()", get_home_dir()), + ("get_hostname()", get_hostname()), + ("get_program_name()", get_program_name()), + ("get_user()", get_user()), + ("plano.__file__", __file__), + ("which('plano')", which("plano")), + ) + + print_properties(props, file=file) + +def print_stack(file=None): + _traceback.print_stack(file=file) + +## File operations + +def touch(file, quiet=False): + file = expand(file) + + _notice(quiet, "Touching {}", repr(file)) + + try: + _os.utime(file, None) + except OSError: + append(file, "") + + return file + +# symlinks=True - Preserve symlinks +# inside=True - Place from_path inside to_path if to_path is a directory +def copy(from_path, to_path, symlinks=True, inside=True, quiet=False): + from_path = expand(from_path) + to_path = expand(to_path) + + _notice(quiet, "Copying {} to {}", repr(from_path), repr(to_path)) + + if is_dir(to_path) and inside: + to_path = join(to_path, get_base_name(from_path)) + else: + make_parent_dir(to_path, quiet=True) + + if is_dir(from_path): + for name in list_dir(from_path): + copy(join(from_path, name), join(to_path, name), symlinks=symlinks, inside=False, quiet=True) + + _shutil.copystat(from_path, to_path) + elif is_link(from_path) and symlinks: + make_link(to_path, read_link(from_path), quiet=True) + else: + _shutil.copy2(from_path, to_path) + + return to_path + +# inside=True - Place from_path inside to_path if to_path is a directory +def move(from_path, to_path, inside=True, quiet=False): + from_path = expand(from_path) + to_path = expand(to_path) + + _notice(quiet, "Moving {} to {}", repr(from_path), repr(to_path)) + + to_path = copy(from_path, to_path, inside=inside, quiet=True) + remove(from_path, quiet=True) + + return to_path + +def replace(path, replacement, quiet=False): + path = expand(path) + replacement = expand(replacement) + + _notice(quiet, "Replacing {} with {}", repr(path), repr(replacement)) + + with temp_dir() as backup_dir: + backup = join(backup_dir, "backup") + backup_created = False + + if exists(path): + move(path, backup, quiet=True) + backup_created = True + + try: + move(replacement, path, quiet=True) + except OSError: + notice("Removing") + remove(path, quiet=True) + + if backup_created: + move(backup, path, quiet=True) + + raise + + assert not exists(replacement), replacement + assert exists(path), path + + return path + +def remove(paths, quiet=False): + if is_string(paths): + paths = (paths,) + + for path in paths: + path = expand(path) + + if not exists(path): + continue + + _debug(quiet, "Removing {}", repr(path)) + + if is_dir(path): + _shutil.rmtree(path, ignore_errors=True) + else: + _os.remove(path) + +def get_file_size(file): + file = expand(file) + return _os.path.getsize(file) + +## IO operations + +def read(file): + file = expand(file) + + with open(file) as f: + return f.read() + +def write(file, string): + file = expand(file) + + make_parent_dir(file, quiet=True) + + with open(file, "w") as f: + f.write(string) + + return file + +def append(file, string): + file = expand(file) + + make_parent_dir(file, quiet=True) + + with open(file, "a") as f: + f.write(string) + + return file + +def prepend(file, string): + file = expand(file) + + orig = read(file) + + return write(file, string + orig) + +def tail(file, count): + file = expand(file) + return "".join(tail_lines(file, count)) + +def read_lines(file): + file = expand(file) + + with open(file) as f: + return f.readlines() + +def write_lines(file, lines): + file = expand(file) + + make_parent_dir(file, quiet=True) + + with open(file, "w") as f: + f.writelines(lines) + + return file + +def append_lines(file, lines): + file = expand(file) + + make_parent_dir(file, quiet=True) + + with open(file, "a") as f: + f.writelines(lines) + + return file + +def prepend_lines(file, lines): + file = expand(file) + + orig_lines = read_lines(file) + + make_parent_dir(file, quiet=True) + + with open(file, "w") as f: + f.writelines(lines) + f.writelines(orig_lines) + + return file + +def tail_lines(file, count): + assert count >= 0, count + + lines = read_lines(file) + + return lines[-count:] + +def string_replace_file(file, expr, replacement, count=0): + file = expand(file) + return write(file, string_replace(read(file), expr, replacement, count=count)) + +def concatenate(file, input_files): + file = expand(file) + + assert file not in input_files + + make_parent_dir(file, quiet=True) + + with open(file, "wb") as f: + for input_file in input_files: + if not exists(input_file): + continue + + with open(input_file, "rb") as inf: + _shutil.copyfileobj(inf, f) + + return file + +## Iterable operations + +def unique(iterable): + return list(dict.fromkeys(iterable).keys()) + +def skip(iterable, values=(None, "", (), [], {})): + if is_scalar(values): + values = (values,) + + items = list() + + for item in iterable: + if item not in values: + items.append(item) + + return items + +## JSON operations + +def read_json(file): + file = expand(file) + + with open(file) as f: + return _json.load(f) + +def write_json(file, data): + file = expand(file) + + make_parent_dir(file, quiet=True) + + with open(file, "w") as f: + _json.dump(data, f, indent=4, separators=(",", ": "), sort_keys=True) + + return file + +def parse_json(json): + return _json.loads(json) + +def emit_json(data): + return _json.dumps(data, indent=4, separators=(",", ": "), sort_keys=True) + +def print_json(data, **kwargs): + print(emit_json(data), **kwargs) + +## HTTP operations + +def _run_curl(method, url, content=None, content_file=None, content_type=None, output_file=None, insecure=False, + user=None, password=None, quiet=False): + check_program("curl") + + _notice(quiet, f"Sending {method} request to '{url}'") + + args = ["curl", "-sfL"] + + if method != "GET": + args.extend(["-X", method]) + + if content is not None: + assert content_file is None + args.extend(["-H", "Expect:", "-d", "@-"]) + + if content_file is not None: + assert content is None, content + args.extend(["-H", "Expect:", "-d", f"@{content_file}"]) + + if content_type is not None: + args.extend(["-H", f"'Content-Type: {content_type}'"]) + + if output_file is not None: + args.extend(["-o", output_file]) + + if insecure: + args.append("--insecure") + + if user is not None: + assert password is not None + args.extend(["--user", f"{user}:{password}"]) + + args.append(url) + + if output_file is not None: + make_parent_dir(output_file, quiet=True) + + proc = run(args, stdin=_subprocess.PIPE, stdout=_subprocess.PIPE, stderr=_subprocess.PIPE, + input=content, check=False, quiet=True) + + if proc.exit_code > 0: + raise PlanoProcessError(proc) + + if output_file is None: + return proc.stdout_result + +def http_get(url, output_file=None, insecure=False, user=None, password=None, quiet=False): + return _run_curl("GET", url, output_file=output_file, insecure=insecure, user=user, password=password, quiet=quiet) + +def http_get_json(url, insecure=False, user=None, password=None, quiet=False): + return parse_json(http_get(url, insecure=insecure, user=user, password=password, quiet=quiet)) + +def http_put(url, content, content_type=None, insecure=False, user=None, password=None, quiet=False): + _run_curl("PUT", url, content=content, content_type=content_type, insecure=insecure, user=user, password=password, + quiet=quiet) + +def http_put_file(url, content_file, content_type=None, insecure=False, user=None, password=None, quiet=False): + _run_curl("PUT", url, content_file=content_file, content_type=content_type, insecure=insecure, user=user, + password=password, quiet=quiet) + +def http_put_json(url, data, insecure=False, user=None, password=None, quiet=False): + http_put(url, emit_json(data), content_type="application/json", insecure=insecure, user=user, password=password, + quiet=quiet) + +def http_post(url, content, content_type=None, output_file=None, insecure=False, user=None, password=None, + quiet=False): + return _run_curl("POST", url, content=content, content_type=content_type, output_file=output_file, + insecure=insecure, user=user, password=password, quiet=quiet) + +def http_post_file(url, content_file, content_type=None, output_file=None, insecure=False, user=None, password=None, + quiet=False): + return _run_curl("POST", url, content_file=content_file, content_type=content_type, output_file=output_file, + insecure=insecure, user=user, password=password, quiet=quiet) + +def http_post_json(url, data, insecure=False, user=None, password=None, quiet=False): + return parse_json(http_post(url, emit_json(data), content_type="application/json", insecure=insecure, user=user, + password=password, quiet=quiet)) + +## Link operations + +def make_link(path: str, linked_path: str, quiet=False) -> str: + _notice(quiet, "Making symlink {} to {}", repr(path), repr(linked_path)) + + make_parent_dir(path, quiet=True) + remove(path, quiet=True) + + _os.symlink(linked_path, path) + + return path + +def read_link(path): + return _os.readlink(path) + +## Logging operations + +_logging_levels = ( + "debug", + "notice", + "warning", + "error", + "disabled", +) + +_DEBUG = _logging_levels.index("debug") +_NOTICE = _logging_levels.index("notice") +_WARNING = _logging_levels.index("warning") +_ERROR = _logging_levels.index("error") +_DISABLED = _logging_levels.index("disabled") + +_logging_output = None +_logging_threshold = _NOTICE +_logging_contexts = list() + +def enable_logging(level="notice", output=None, quiet=False): + assert level in _logging_levels, level + + _notice(quiet, "Enabling logging (level={}, output={})", repr(level), repr(nvl(output, "stderr"))) + + global _logging_threshold + _logging_threshold = _logging_levels.index(level) + + if is_string(output): + output = open(output, "w") + + global _logging_output + _logging_output = output + +def disable_logging(quiet=False): + _notice(quiet, "Disabling logging") + + global _logging_threshold + _logging_threshold = _DISABLED + +class logging_enabled: + def __init__(self, level="notice", output=None): + self.level = level + self.output = output + + def __enter__(self): + self.prev_level = _logging_levels[_logging_threshold] + self.prev_output = _logging_output + + if self.level == "disabled": + disable_logging(quiet=True) + else: + enable_logging(level=self.level, output=self.output, quiet=True) + + def __exit__(self, exc_type, exc_value, traceback): + if self.prev_level == "disabled": + disable_logging(quiet=True) + else: + enable_logging(level=self.prev_level, output=self.prev_output, quiet=True) + +class logging_disabled(logging_enabled): + def __init__(self): + super().__init__(level="disabled") + +class logging_context: + def __init__(self, name): + self.name = name + + def __enter__(self): + _logging_contexts.append(self.name) + + def __exit__(self, exc_type, exc_value, traceback): + _logging_contexts.pop() + +def fail(message, *args): + if isinstance(message, BaseException): + if not isinstance(message, PlanoError): + error(message) + + raise message + + if args: + message = message.format(*args) + + raise PlanoError(message) + +def error(message, *args): + log(_ERROR, message, *args) + +def warning(message, *args): + log(_WARNING, message, *args) + +def notice(message, *args): + log(_NOTICE, message, *args) + +def debug(message, *args): + log(_DEBUG, message, *args) + +def log(level, message, *args): + if is_string(level): + level = _logging_levels.index(level) + + if _logging_threshold <= level: + _print_message(level, message, args) + +def _print_message(level, message, args): + line = list() + out = nvl(_logging_output, _sys.stderr) + + program_text = "{}:".format(get_program_name()) + + line.append(cformat(program_text, color="gray")) + + level_text = "{}:".format(_logging_levels[level]) + level_color = ("white", "cyan", "yellow", "red", None)[level] + level_bright = (False, False, False, True, False)[level] + + line.append(cformat(level_text, color=level_color, bright=level_bright)) + + for name in _logging_contexts: + line.append(cformat("{}:".format(name), color="yellow")) + + if isinstance(message, BaseException): + exception = message + + line.append(str(exception)) + + print(" ".join(line), file=out) + + if hasattr(exception, "__traceback__"): + _traceback.print_exception(type(exception), exception, exception.__traceback__, file=out) + else: + message = str(message) + + if args: + message = message.format(*args) + + line.append(capitalize(message)) + + print(" ".join(line), file=out) + + out.flush() + +def _notice(quiet, message, *args): + if quiet: + debug(message, *args) + else: + notice(message, *args) + +def _debug(quiet, message, *args): + if not quiet: + debug(message, *args) + +## Path operations + +def expand(path): + path = _os.path.expanduser(path) + path = _os.path.expandvars(path) + + return path + +def get_absolute_path(path): + path = expand(path) + return _os.path.abspath(path) + +def normalize_path(path): + path = expand(path) + return _os.path.normpath(path) + +def get_real_path(path): + path = expand(path) + return _os.path.realpath(path) + +def get_relative_path(path, start=None): + path = expand(path) + return _os.path.relpath(path, start=start) + +def get_file_url(path): + path = expand(path) + return "file:{}".format(get_absolute_path(path)) + +def exists(path): + path = expand(path) + return _os.path.lexists(path) + +def is_absolute(path): + path = expand(path) + return _os.path.isabs(path) + +def is_dir(path): + path = expand(path) + return _os.path.isdir(path) + +def is_file(path): + path = expand(path) + return _os.path.isfile(path) + +def is_link(path): + path = expand(path) + return _os.path.islink(path) + +def join(*paths): + paths = [expand(x) for x in paths] + + path = _os.path.join(*paths) + path = normalize_path(path) + + return path + +def split(path): + path = expand(path) + path = normalize_path(path) + parent, child = _os.path.split(path) + + return parent, child + +def split_extension(path): + path = expand(path) + path = normalize_path(path) + root, ext = _os.path.splitext(path) + + return root, ext + +def get_parent_dir(path): + path = expand(path) + path = normalize_path(path) + parent, child = split(path) + + return parent + +def get_base_name(path): + path = expand(path) + path = normalize_path(path) + parent, name = split(path) + + return name + +def get_name_stem(file): + file = expand(file) + name = get_base_name(file) + + if name.endswith(".tar.gz"): + name = name[:-3] + + stem, ext = split_extension(name) + + return stem + +def get_name_extension(file): + file = expand(file) + name = get_base_name(file) + stem, ext = split_extension(name) + + return ext + +def _check_path(path, test_func, message): + path = expand(path) + + if not test_func(path): + parent_dir = get_parent_dir(path) + + if is_dir(parent_dir): + found_paths = ", ".join([repr(x) for x in list_dir(parent_dir)]) + message = "{}. The parent directory contains: {}".format(message.format(repr(path)), found_paths) + else: + message = "{}".format(message.format(repr(path))) + + raise PlanoError(message) + +def check_exists(path): + path = expand(path) + _check_path(path, exists, "File or directory {} not found") + +def check_file(path): + path = expand(path) + _check_path(path, is_file, "File {} not found") + +def check_dir(path): + path = expand(path) + _check_path(path, is_dir, "Directory {} not found") + +def await_exists(path, timeout=30, quiet=False): + path = expand(path) + + _notice(quiet, "Waiting for path {} to exist", repr(path)) + + timeout_message = "Timed out waiting for path {} to exist".format(path) + period = 0.03125 + + with Timer(timeout=timeout, timeout_message=timeout_message) as timer: + while True: + try: + check_exists(path) + except PlanoError: + sleep(period, quiet=True) + period = min(1, period * 2) + else: + return + +## Port operations + +def get_random_port(min=49152, max=65535): + ports = [_random.randint(min, max) for _ in range(3)] + + for port in ports: + try: + check_port(port) + except PlanoError: + return port + + raise PlanoError("Random ports unavailable") + +def check_port(port, host="localhost"): + sock = _socket.socket(_socket.AF_INET, _socket.SOCK_STREAM) + sock.setsockopt(_socket.SOL_SOCKET, _socket.SO_REUSEADDR, 1) + + if sock.connect_ex((host, port)) != 0: + raise PlanoError("Port {} (host {}) is not reachable".format(repr(port), repr(host))) + +def await_port(port, host="localhost", timeout=30, quiet=False): + _notice(quiet, "Waiting for port {}", port) + + if is_string(port): + port = int(port) + + timeout_message = "Timed out waiting for port {} to open".format(port) + period = 0.03125 + + with Timer(timeout=timeout, timeout_message=timeout_message) as timer: + while True: + try: + check_port(port, host=host) + except PlanoError: + sleep(period, quiet=True) + period = min(1, period * 2) + else: + return + +## Process operations + +def get_process_id(): + return _os.getpid() + +def _format_command(command, represent=True): + if is_string(command): + args = _shlex.split(command) + else: + args = command + + args = [expand(str(x)) for x in args] + command = " ".join(args) + + if represent: + return repr(command) + else: + return command + +# quiet=False - Don't log at notice level +# stash=False - No output unless there is an error +# output= - Send stdout and stderr to a file +# stdin= - XXX +# stdout= - Send stdout to a file +# stderr= - Send stderr to a file +# shell=False - XXX +def start(command, stdin=None, stdout=None, stderr=None, output=None, shell=False, stash=False, quiet=False): + _notice(quiet, "Starting a new process (command {})", _format_command(command)) + + if output is not None: + stdout, stderr = output, output + + if is_string(stdin): + stdin = expand(stdin) + stdin = open(stdin, "r") + + if is_string(stdout): + stdout = expand(stdout) + stdout = open(stdout, "w") + + if is_string(stderr): + stderr = expand(stderr) + stderr = open(stderr, "w") + + if stdin is None: + stdin = _sys.stdin + + if stdout is None: + stdout = _sys.stdout + + if stderr is None: + stderr = _sys.stderr + + stash_file = None + + if stash: + stash_file = make_temp_file() + out = open(stash_file, "w") + stdout = out + stderr = out + + if shell: + if is_string(command): + args = command + else: + args = " ".join(map(str, command)) + else: + if is_string(command): + args = _shlex.split(command) + else: + args = command + + args = [expand(str(x)) for x in args] + + try: + proc = PlanoProcess(args, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, close_fds=True, stash_file=stash_file) + except OSError as e: + raise PlanoError("Command {}: {}".format(_format_command(command), str(e))) + + _notice(quiet, "{} started", proc) + + return proc + +def stop(proc, timeout=None, quiet=False): + _notice(quiet, "Stopping {}", proc) + + if proc.poll() is not None: + if proc.exit_code == 0: + debug("{} already exited normally", proc) + elif proc.exit_code == -(_signal.SIGTERM): + debug("{} was already terminated", proc) + else: + debug("{} already exited with code {}", proc, proc.exit_code) + + return proc + + kill(proc, quiet=True) + + return wait(proc, timeout=timeout, quiet=True) + +def kill(proc, quiet=False): + _notice(quiet, "Killing {}", proc) + + proc.terminate() + +def wait(proc, timeout=None, check=False, quiet=False): + _notice(quiet, "Waiting for {} to exit", proc) + + try: + proc.wait(timeout=timeout) + except _subprocess.TimeoutExpired: + error("{} timed out after {} seconds", proc, timeout) + raise PlanoTimeout() + + if proc.exit_code == 0: + debug("{} exited normally", proc) + elif proc.exit_code < 0: + debug("{} was terminated by signal {}", proc, abs(proc.exit_code)) + else: + if check: + error("{} exited with code {}", proc, proc.exit_code) + else: + debug("{} exited with code {}", proc, proc.exit_code) + + if proc.stash_file is not None: + if proc.exit_code > 0: + eprint(read(proc.stash_file), end="") + + if not WINDOWS: + remove(proc.stash_file, quiet=True) + + if check and proc.exit_code > 0: + raise PlanoProcessError(proc) + + return proc + +# input= - Pipe to the process +def run(command, stdin=None, stdout=None, stderr=None, input=None, output=None, + stash=False, shell=False, check=True, quiet=False): + _notice(quiet, "Running command {}", _format_command(command)) + + if input is not None: + assert stdin in (None, _subprocess.PIPE), stdin + + input = input.encode("utf-8") + stdin = _subprocess.PIPE + + proc = start(command, stdin=stdin, stdout=stdout, stderr=stderr, output=output, + stash=stash, shell=shell, quiet=True) + + proc.stdout_result, proc.stderr_result = proc.communicate(input=input) + + if proc.stdout_result is not None: + proc.stdout_result = proc.stdout_result.decode("utf-8") + + if proc.stderr_result is not None: + proc.stderr_result = proc.stderr_result.decode("utf-8") + + return wait(proc, check=check, quiet=True) + +# input= - Pipe the given input into the process +def call(command, input=None, shell=False, quiet=False): + _notice(quiet, "Calling {}", _format_command(command)) + + proc = run(command, stdin=_subprocess.PIPE, stdout=_subprocess.PIPE, stderr=_subprocess.PIPE, + input=input, shell=shell, check=True, quiet=True) + + return proc.stdout_result + +def exit(arg=None, *args, **kwargs): + verbose = kwargs.get("verbose", False) + + if arg in (0, None): + if verbose: + notice("Exiting normally") + + _sys.exit() + + if is_string(arg): + if args: + arg = arg.format(*args) + + if verbose: + error(arg) + + _sys.exit(arg) + + if isinstance(arg, BaseException): + if verbose: + error(arg) + + _sys.exit(str(arg)) + + if isinstance(arg, int): + _sys.exit(arg) + + raise PlanoException("Illegal argument") + +_child_processes = list() + +class PlanoProcess(_subprocess.Popen): + def __init__(self, args, **options): + self.stash_file = options.pop("stash_file", None) + + super().__init__(args, **options) + + self.args = args + self.stdout_result = None + self.stderr_result = None + + _child_processes.append(self) + + @property + def exit_code(self): + return self.returncode + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + stop(self) + + def __repr__(self): + return "process {} (command {})".format(self.pid, _format_command(self.args)) + +class PlanoProcessError(_subprocess.CalledProcessError, PlanoError): + def __init__(self, proc): + super().__init__(proc.exit_code, _format_command(proc.args, represent=False)) + +def _default_sigterm_handler(signum, frame): + for proc in _child_processes: + if proc.poll() is None: + kill(proc, quiet=True) + + exit(-(_signal.SIGTERM)) + +_signal.signal(_signal.SIGTERM, _default_sigterm_handler) + +## String operations + +def string_replace(string, expr, replacement, count=0): + return _re.sub(expr, replacement, string, count) + +def remove_prefix(string, prefix): + if string is None: + return "" + + if prefix and string.startswith(prefix): + string = string[len(prefix):] + + return string + +def remove_suffix(string, suffix): + if string is None: + return "" + + if suffix and string.endswith(suffix): + string = string[:-len(suffix)] + + return string + +def shorten(string, max, ellipsis=None): + assert max is None or isinstance(max, int) + + if string is None: + return "" + + if max is None or len(string) < max: + return string + else: + if ellipsis is not None: + string = string + ellipsis + end = _max(0, max - len(ellipsis)) + return string[0:end] + ellipsis + else: + return string[0:max] + +def plural(noun, count=0, plural=None): + if noun in (None, ""): + return "" + + if count == 1: + return noun + + if plural is None: + if noun.endswith("s"): + plural = "{}ses".format(noun) + else: + plural = "{}s".format(noun) + + return plural + +def capitalize(string): + if not string: + return "" + + return string[0].upper() + string[1:] + +def base64_encode(string): + return _base64.b64encode(string) + +def base64_decode(string): + return _base64.b64decode(string) + +def url_encode(string): + return _urllib_parse.quote_plus(string) + +def url_decode(string): + return _urllib_parse.unquote_plus(string) + +def parse_url(url): + return _urllib_parse.urlparse(url) + +## Temp operations + +def get_system_temp_dir(): + return _tempfile.gettempdir() + +def get_user_temp_dir(): + try: + return _os.environ["XDG_RUNTIME_DIR"] + except KeyError: + return join(get_system_temp_dir(), get_user()) + +def make_temp_file(prefix="plano-", suffix="", dir=None): + if dir is None: + dir = get_system_temp_dir() + + return _tempfile.mkstemp(prefix=prefix, suffix=suffix, dir=dir)[1] + +def make_temp_dir(prefix="plano-", suffix="", dir=None): + if dir is None: + dir = get_system_temp_dir() + + return _tempfile.mkdtemp(prefix=prefix, suffix=suffix, dir=dir) + +class temp_file: + def __init__(self, prefix="plano-", suffix="", dir=None): + if dir is None: + dir = get_system_temp_dir() + + self.fd, self.file = _tempfile.mkstemp(prefix=prefix, suffix=suffix, dir=dir) + + def __enter__(self): + return self.file + + def __exit__(self, exc_type, exc_value, traceback): + _os.close(self.fd) + + if not WINDOWS: # XXX + remove(self.file, quiet=True) + +class temp_dir: + def __init__(self, prefix="plano-", suffix="", dir=None): + self.dir = make_temp_dir(prefix=prefix, suffix=suffix, dir=dir) + + def __enter__(self): + return self.dir + + def __exit__(self, exc_type, exc_value, traceback): + remove(self.dir, quiet=True) + +## Time operations + +# Unix time +def get_time(): + return _time.time() + +# Python UTC time +def get_datetime(): + return _datetime.datetime.now(tz=_datetime.timezone.utc) + +def parse_timestamp(timestamp, format="%Y-%m-%dT%H:%M:%SZ"): + if timestamp is None: + return None + + datetime = _datetime.datetime.strptime(timestamp, format) + datetime = datetime.replace(tzinfo=_datetime.timezone.utc) + + return datetime + +def format_timestamp(datetime=None, format="%Y-%m-%dT%H:%M:%SZ"): + if datetime is None: + datetime = get_datetime() + + return datetime.strftime(format) + +def format_date(datetime=None): + if datetime is None: + datetime = get_datetime() + + day = datetime.day + month = datetime.strftime("%B") + year = datetime.strftime("%Y") + + return f"{day} {month} {year}" + +def format_time(datetime=None, precision="second"): + if datetime is None: + datetime = get_datetime() + + assert precision in ("minute", "second"), "Illegal precision value" + + hour = datetime.hour + minute = datetime.strftime("%M") + second = datetime.strftime("%S") + + if precision == "second": + return f"{hour}:{minute}:{second}" + else: + return f"{hour}:{minute}" + +def format_duration(seconds, align=False): + assert seconds >= 0 + + if seconds >= 3600: + value = seconds / 3600 + unit = "h" + elif seconds >= 5 * 60: + value = seconds / 60 + unit = "m" + else: + value = seconds + unit = "s" + + if align: + return "{:.1f}{}".format(value, unit) + elif value > 10: + return "{:.0f}{}".format(value, unit) + else: + return remove_suffix("{:.1f}".format(value), ".0") + unit + +def sleep(seconds, quiet=False): + _notice(quiet, "Sleeping for {} {}", seconds, plural("second", seconds)) + + _time.sleep(seconds) + +class Timer: + def __init__(self, timeout=None, timeout_message=None): + self.timeout = timeout + self.timeout_message = timeout_message + + if self.timeout is not None and not hasattr(_signal, "SIGALRM"): # pragma: nocover + self.timeout = None + + self.start_time = None + self.stop_time = None + + def start(self): + self.start_time = get_time() + + if self.timeout is not None: + self.prev_handler = _signal.signal(_signal.SIGALRM, self.raise_timeout) + self.prev_timeout, prev_interval = _signal.setitimer(_signal.ITIMER_REAL, self.timeout) + self.prev_timer_suspend_time = get_time() + + assert prev_interval == 0.0, "This case is not yet handled" + + def stop(self): + self.stop_time = get_time() + + if self.timeout is not None: + assert get_time() - self.prev_timer_suspend_time > 0, "This case is not yet handled" + + _signal.signal(_signal.SIGALRM, self.prev_handler) + _signal.setitimer(_signal.ITIMER_REAL, self.prev_timeout) + + def __enter__(self): + self.start() + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.stop() + + @property + def elapsed_time(self): + assert self.start_time is not None + + if self.stop_time is None: + return get_time() - self.start_time + else: + return self.stop_time - self.start_time + + def raise_timeout(self, *args): + raise PlanoTimeout(self.timeout_message) + +## Unique ID operations + +# Length in bytes, renders twice as long in hex +def get_unique_id(bytes=16): + assert bytes >= 1 + assert bytes <= 16 + + uuid_bytes = _uuid.uuid4().bytes + uuid_bytes = uuid_bytes[:bytes] + + return _binascii.hexlify(uuid_bytes).decode("utf-8") + +## Value operations + +def nvl(value, replacement): + if value is None: + return replacement + + return value + +def is_string(value): + return isinstance(value, str) + +def is_scalar(value): + return value is None or isinstance(value, (str, int, float, complex, bool)) + +def is_empty(value): + return value in (None, "", (), [], {}) + +def pformat(value): + return _pprint.pformat(value, width=120) + +def format_empty(value, replacement): + if is_empty(value): + value = replacement + + return value + +def format_not_empty(value, template=None): + if not is_empty(value) and template is not None: + value = template.format(value) + + return value + +def format_repr(obj, limit=None): + attrs = ["{}={}".format(k, repr(v)) for k, v in obj.__dict__.items()] + return "{}({})".format(obj.__class__.__name__, ", ".join(attrs[:limit])) + +class Namespace: + def __init__(self, **kwargs): + for name in kwargs: + setattr(self, name, kwargs[name]) + + def __eq__(self, other): + return vars(self) == vars(other) + + def __contains__(self, key): + return key in self.__dict__ + + def __repr__(self): + return format_repr(self) + +## YAML operations + +def read_yaml(file): + check_module("yaml", "Python module 'yaml' is not found. To install it, run 'pip install pyyaml'.") + + import yaml as _yaml + + file = expand(file) + + with open(file) as f: + return _yaml.safe_load(f) + +def write_yaml(file, data): + check_module("yaml", "Python module 'yaml' is not found. To install it, run 'pip install pyyaml'.") + + import yaml as _yaml + + file = expand(file) + + make_parent_dir(file, quiet=True) + + with open(file, "w") as f: + _yaml.safe_dump(data, f) + + return file + +def parse_yaml(yaml): + check_module("yaml", "Python module 'yaml' is not found. To install it, run 'pip install pyyaml'.") + + import yaml as _yaml + + return _yaml.safe_load(yaml) + +def emit_yaml(data): + check_module("yaml", "Python module 'yaml' is not found. To install it, run 'pip install pyyaml'.") + + import yaml as _yaml + + return _yaml.safe_dump(data) + +def print_yaml(data, **kwargs): + print(emit_yaml(data), **kwargs) + +if PLANO_DEBUG: # pragma: nocover + enable_logging(level="debug") diff --git a/external/skewer/example/python/plano/test.py b/external/skewer/example/python/plano/test.py new file mode 100644 index 0000000..fb87d8d --- /dev/null +++ b/external/skewer/example/python/plano/test.py @@ -0,0 +1,428 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from .main import * +from .command import * + +import argparse as _argparse +import asyncio as _asyncio +import fnmatch as _fnmatch +import functools as _functools +import importlib as _importlib +import inspect as _inspect +import sys as _sys +import traceback as _traceback + +class PlanoTestCommand(BaseCommand): + def __init__(self, test_modules=[]): + self.test_modules = test_modules + + if _inspect.ismodule(self.test_modules): + self.test_modules = [self.test_modules] + + self.parser = BaseArgumentParser() + self.parser.add_argument("include", metavar="PATTERN", nargs="*", default=["*"], + help="Run tests with names matching PATTERN (default '*', all tests)") + self.parser.add_argument("-e", "--exclude", metavar="PATTERN", action="append", default=[], + help="Do not run tests with names matching PATTERN (repeatable)") + self.parser.add_argument("-m", "--module", action="append", default=[], + help="Collect tests from MODULE (repeatable)") + self.parser.add_argument("-l", "--list", action="store_true", + help="Print the test names and exit") + self.parser.add_argument("--enable", metavar="PATTERN", action="append", default=[], + help=_argparse.SUPPRESS) + self.parser.add_argument("--unskip", metavar="PATTERN", action="append", default=[], + help="Run skipped tests matching PATTERN (repeatable)") + self.parser.add_argument("--timeout", metavar="SECONDS", type=int, default=300, + help="Fail any test running longer than SECONDS (default 300)") + self.parser.add_argument("--fail-fast", action="store_true", + help="Exit on the first failure encountered in a test run") + self.parser.add_argument("--iterations", metavar="COUNT", type=int, default=1, + help="Run the tests COUNT times (default 1)") + self.parser.add_argument("--verbose", action="store_true", + help="Print detailed logging to the console") + self.parser.add_argument("--quiet", action="store_true", + help="Print no logging to the console") + + def parse_args(self, args): + return self.parser.parse_args(args) + + def configure_logging(self, args): + if args.verbose: + return "notice", None + + if args.quiet: + return "error", None + + return "warning", None + + def init(self, args): + self.list_only = args.list + self.include_patterns = args.include + self.exclude_patterns = args.exclude + self.enable_patterns = args.enable + self.unskip_patterns = args.unskip + self.timeout = args.timeout + self.fail_fast = args.fail_fast + self.iterations = args.iterations + self.verbose = args.verbose + self.quiet = args.quiet + + try: + for name in args.module: + self.test_modules.append(_importlib.import_module(name)) + except ImportError as e: + raise PlanoError(e) + + def run(self): + if self.list_only: + print_tests(self.test_modules) + return + + for i in range(self.iterations): + run_tests(self.test_modules, include=self.include_patterns, + exclude=self.exclude_patterns, + enable=self.enable_patterns, unskip=self.unskip_patterns, + test_timeout=self.timeout, fail_fast=self.fail_fast, + verbose=self.verbose, quiet=self.quiet) + +class PlanoTestSkipped(Exception): + pass + +def test(_function=None, name=None, module=None, timeout=None, disabled=False): + class Test: + def __init__(self, function): + self.function = function + self.name = name + self.module = module + self.timeout = timeout + self.disabled = disabled + + if self.name is None: + self.name = self.function.__name__.strip("_").replace("_", "-") + + if self.module is None: + self.module = _inspect.getmodule(self.function) + + if not hasattr(self.module, "_plano_tests"): + self.module._plano_tests = list() + + self.module._plano_tests.append(self) + + def __call__(self, test_run, unskipped): + try: + ret = self.function() + + if _inspect.iscoroutine(ret): + _asyncio.run(ret) + except SystemExit as e: + error(e) + raise PlanoError("System exit with code {}".format(e)) + + def __repr__(self): + return "test '{}:{}'".format(self.module.__name__, self.name) + + if _function is None: + return Test + else: + return Test(_function) + +def add_test(name, func, *args, **kwargs): + test(_functools.partial(func, *args, **kwargs), name=name, module=_inspect.getmodule(func)) + +def skip_test(reason=None): + if _inspect.stack()[2].frame.f_locals["unskipped"]: + return + + raise PlanoTestSkipped(reason) + +class expect_exception: + def __init__(self, exception_type=Exception, contains=None): + self.exception_type = exception_type + self.contains = contains + + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_value, traceback): + if exc_value is None: + assert False, "Never encountered expected exception {}".format(self.exception_type.__name__) + + if self.contains is None: + return isinstance(exc_value, self.exception_type) + else: + return isinstance(exc_value, self.exception_type) and self.contains in str(exc_value) + +class expect_error(expect_exception): + def __init__(self, contains=None): + super().__init__(PlanoError, contains=contains) + +class expect_timeout(expect_exception): + def __init__(self, contains=None): + super().__init__(PlanoTimeout, contains=contains) + +class expect_system_exit(expect_exception): + def __init__(self, contains=None): + super().__init__(SystemExit, contains=contains) + +class expect_output(temp_file): + def __init__(self, equals=None, contains=None, startswith=None, endswith=None): + super().__init__() + self.equals = equals + self.contains = contains + self.startswith = startswith + self.endswith = endswith + + def __exit__(self, exc_type, exc_value, traceback): + result = read(self.file) + + if self.equals is None: + assert len(result) > 0, result + else: + assert result == self.equals, result + + if self.contains is not None: + assert self.contains in result, result + + if self.startswith is not None: + assert result.startswith(self.startswith), result + + if self.endswith is not None: + assert result.endswith(self.endswith), result + + super().__exit__(exc_type, exc_value, traceback) + +def print_tests(modules): + if _inspect.ismodule(modules): + modules = (modules,) + + for module in modules: + for test in module._plano_tests: + flags = "(disabled)" if test.disabled else "" + print(" ".join((str(test), flags)).strip()) + +def run_tests(modules, include="*", exclude=(), enable=(), unskip=(), test_timeout=300, + fail_fast=False, verbose=False, quiet=False): + if _inspect.ismodule(modules): + modules = (modules,) + + if is_string(include): + include = (include,) + + if is_string(exclude): + exclude = (exclude,) + + if is_string(enable): + enable = (enable,) + + if is_string(unskip): + enable = (unskip,) + + test_run = TestRun(test_timeout=test_timeout, fail_fast=fail_fast, verbose=verbose, quiet=quiet) + + if verbose: + notice("Starting {}", test_run) + elif not quiet: + cprint("=== Configuration ===", color="cyan") + + props = ( + ("Modules", format_empty(", ".join([x.__name__ for x in modules]), "[none]")), + ("Test timeout", format_duration(test_timeout)), + ("Fail fast", fail_fast), + ) + + print_properties(props) + print() + + stop = False + + for module in modules: + if stop: + break + + if verbose: + notice("Running tests from module {} (file {})", repr(module.__name__), repr(module.__file__)) + elif not quiet: + cprint("=== Module {} ===".format(repr(module.__name__)), color="cyan") + + if not hasattr(module, "_plano_tests"): + warning("Module {} has no tests", repr(module.__name__)) + continue + + for test in module._plano_tests: + if stop: + break + + if test.disabled and not any([_fnmatch.fnmatchcase(test.name, x) for x in enable]): + continue + + included = any([_fnmatch.fnmatchcase(test.name, x) for x in include]) + excluded = any([_fnmatch.fnmatchcase(test.name, x) for x in exclude]) + unskipped = any([_fnmatch.fnmatchcase(test.name, x) for x in unskip]) + + if included and not excluded: + test_run.tests.append(test) + stop = _run_test(test_run, test, unskipped) + + if not verbose and not quiet: + print() + + total = len(test_run.tests) + skipped = len(test_run.skipped_tests) + failed = len(test_run.failed_tests) + + if total == 0: + raise PlanoError("No tests ran") + + notes = "" + + if skipped != 0: + notes = "({} skipped)".format(skipped) + + if failed == 0: + result_message = "All tests passed {}".format(notes).strip() + else: + result_message = "{} {} failed {}".format(failed, plural("test", failed), notes).strip() + + if verbose: + if failed == 0: + notice(result_message) + else: + error(result_message) + elif not quiet: + cprint("=== Summary ===", color="cyan") + + props = ( + ("Total", total), + ("Skipped", skipped, format_not_empty(", ".join([x.name for x in test_run.skipped_tests]), "({})")), + ("Failed", failed, format_not_empty(", ".join([x.name for x in test_run.failed_tests]), "({})")), + ) + + print_properties(props) + print() + + cprint("=== RESULT ===", color="cyan") + + if failed == 0: + cprint(result_message, color="green") + else: + cprint(result_message, color="red", bright="True") + + print() + + if failed != 0: + raise PlanoError(result_message) + +def _run_test(test_run, test, unskipped): + if test_run.verbose: + notice("Running {}", test) + elif not test_run.quiet: + print("{:.<65} ".format(test.name + " "), end="") + + timeout = nvl(test.timeout, test_run.test_timeout) + + with temp_file() as output_file: + try: + with Timer(timeout=timeout) as timer: + if test_run.verbose: + test(test_run, unskipped) + else: + with output_redirected(output_file, quiet=True): + test(test_run, unskipped) + except KeyboardInterrupt: + raise + except PlanoTestSkipped as e: + test_run.skipped_tests.append(test) + + if test_run.verbose: + notice("{} SKIPPED ({})", test, format_duration(timer.elapsed_time)) + elif not test_run.quiet: + _print_test_result("SKIPPED", timer, "yellow") + print("Reason: {}".format(str(e))) + except Exception as e: + test_run.failed_tests.append(test) + + if test_run.verbose: + _traceback.print_exc() + + if isinstance(e, PlanoTimeout): + error("{} **FAILED** (TIMEOUT) ({})", test, format_duration(timer.elapsed_time)) + else: + error("{} **FAILED** ({})", test, format_duration(timer.elapsed_time)) + elif not test_run.quiet: + if isinstance(e, PlanoTimeout): + _print_test_result("**FAILED** (TIMEOUT)", timer, color="red", bright=True) + else: + _print_test_result("**FAILED**", timer, color="red", bright=True) + + _print_test_error(e) + _print_test_output(output_file) + + if test_run.fail_fast: + return True + else: + test_run.passed_tests.append(test) + + if test_run.verbose: + notice("{} PASSED ({})", test, format_duration(timer.elapsed_time)) + elif not test_run.quiet: + _print_test_result("PASSED", timer) + +def _print_test_result(status, timer, color="white", bright=False): + cprint("{:<7}".format(status), color=color, bright=bright, end="") + print("{:>6}".format(format_duration(timer.elapsed_time, align=True))) + +def _print_test_error(e): + cprint("--- Error ---", color="yellow") + + if isinstance(e, PlanoProcessError): + print("> {}".format(str(e))) + else: + lines = _traceback.format_exc().rstrip().split("\n") + lines = ["> {}".format(x) for x in lines] + + print("\n".join(lines)) + +def _print_test_output(output_file): + if get_file_size(output_file) == 0: + return + + cprint("--- Output ---", color="yellow") + + with open(output_file, "r") as out: + for line in out: + print("> {}".format(line), end="") + +class TestRun: + def __init__(self, test_timeout=None, fail_fast=False, verbose=False, quiet=False): + self.test_timeout = test_timeout + self.fail_fast = fail_fast + self.verbose = verbose + self.quiet = quiet + + self.tests = list() + self.skipped_tests = list() + self.failed_tests = list() + self.passed_tests = list() + + def __repr__(self): + return format_repr(self) + +def _main(): # pragma: nocover + PlanoTestCommand().main() diff --git a/external/skewer/example/python/skewer/__init__.py b/external/skewer/example/python/skewer/__init__.py new file mode 100644 index 0000000..3324b21 --- /dev/null +++ b/external/skewer/example/python/skewer/__init__.py @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from .main import * diff --git a/external/skewer/example/python/skewer/main.py b/external/skewer/example/python/skewer/main.py new file mode 100644 index 0000000..8db876f --- /dev/null +++ b/external/skewer/example/python/skewer/main.py @@ -0,0 +1,731 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +import inspect + +from plano import * + +__all__ = [ + "generate_readme", "run_steps", "Minikube", +] + +standard_text = read_yaml(join(get_parent_dir(__file__), "standardtext.yaml")) +standard_steps = read_yaml(join(get_parent_dir(__file__), "standardsteps.yaml")) + +def check_environment(): + check_program("base64") + check_program("curl") + check_program("kubectl") + check_program("skupper") + +def resource_exists(resource): + return run(f"kubectl get {resource}", output=DEVNULL, check=False, quiet=True).exit_code == 0 + +def get_resource_json(resource, jsonpath=""): + return call(f"kubectl get {resource} -o jsonpath='{{{jsonpath}}}'", quiet=True) + +def await_resource(resource, timeout=300): + assert "/" in resource, resource + + start_time = get_time() + + while True: + notice(f"Waiting for {resource} to become available") + + if resource_exists(resource): + break + + if get_time() - start_time > timeout: + fail(f"Timed out waiting for {resource}") + + sleep(5, quiet=True) + + if resource.startswith("deployment/"): + try: + run(f"kubectl wait --for condition=available --timeout {timeout}s {resource}", quiet=True, stash=True) + except: + run(f"kubectl logs {resource}") + raise + +def await_ingress(service, timeout=300): + assert service.startswith("service/"), service + + start_time = get_time() + + await_resource(service, timeout=timeout) + + while True: + notice(f"Waiting for hostname or IP from {service} to become available") + + json = get_resource_json(service, ".status.loadBalancer.ingress") + + if json != "": + break + + if get_time() - start_time > timeout: + fail(f"Timed out waiting for hostnmae or external IP for {service}") + + sleep(5, quiet=True) + + data = parse_json(json) + + if len(data): + if "hostname" in data[0]: + return data[0]["hostname"] + + if "ip" in data[0]: + return data[0]["ip"] + + fail(f"Failed to get hostname or IP from {service}") + +def await_http_ok(service, url_template, user=None, password=None, timeout=300): + assert service.startswith("service/"), service + + start_time = get_time() + + ip = await_ingress(service, timeout=timeout) + + url = url_template.format(ip) + insecure = url.startswith("https") + + while True: + notice(f"Waiting for HTTP OK from {url}") + + try: + http_get(url, insecure=insecure, user=user, password=password, quiet=True) + except PlanoError: + if get_time() - start_time > timeout: + fail(f"Timed out waiting for HTTP OK from {url}") + + sleep(5, quiet=True) + else: + break + +def await_console_ok(): + await_resource("secret/skupper-console-users") + + password = get_resource_json("secret/skupper-console-users", ".data.admin") + password = base64_decode(password) + + await_http_ok("service/skupper", "https://{}:8010/", user="admin", password=password) + +def run_steps(skewer_file, kubeconfigs=[], work_dir=None, debug=False): + notice(f"Running steps (skewer_file='{skewer_file}')") + + check_environment() + + model = Model(skewer_file, kubeconfigs) + model.check() + + if work_dir is None: + work_dir = join(get_user_temp_dir(), "skewer") + remove(work_dir, quiet=True) + make_dir(work_dir, quiet=True) + + try: + for step in model.steps: + if step.name == "cleaning_up": + continue + + run_step(model, step, work_dir) + + if "SKEWER_DEMO" in ENV: + pause_for_demo(model) + except: + if debug: + print_debug_output(model) + + raise + finally: + for step in model.steps: + if step.name == "cleaning_up": + run_step(model, step, work_dir, check=False) + break + +def run_step(model, step, work_dir, check=True): + if not step.commands: + return + + notice(f"Running {step}") + + for site_name, commands in step.commands: + with dict(model.sites)[site_name] as site: + if site.platform == "kubernetes": + run(f"kubectl config set-context --current --namespace {site.namespace}", stdout=DEVNULL, quiet=True) + + for command in commands: + if command.apply == "readme": + continue + + if command.await_resource: + await_resource(command.await_resource) + + if command.await_ingress: + await_ingress(command.await_ingress) + + if command.await_http_ok: + await_http_ok(*command.await_http_ok) + + if command.await_console_ok: + await_console_ok() + + if command.run: + run(command.run.replace("~", work_dir), shell=True, check=check) + +def pause_for_demo(model): + notice("Pausing for demo time") + + first_site = [x for _, x in model.sites][0] + console_url = None + password = None + frontend_url = None + + if first_site.platform == "kubernetes": + with first_site: + if resource_exists("service/frontend"): + if get_resource_json("service/frontend", ".spec.type") == "LoadBalancer": + frontend_host = await_ingress("service/frontend") + frontend_url = f"http://{frontend_host}:8080/" + + if resource_exists("secret/skupper-console-users"): + console_host = await_ingress("service/skupper") + console_url = f"https://{console_host}:8010/" + + await_resource("secret/skupper-console-users") + password = get_resource_json("secret/skupper-console-users", ".data.admin") + password = base64_decode(password).decode("ascii") + + print() + print("Demo time!") + print() + print("Sites:") + print() + + for _, site in model.sites: + if site.platform == "kubernetes": + kubeconfig = site.env["KUBECONFIG"] + print(f" {site.name}: export KUBECONFIG={kubeconfig}") + elif site.platform == "podman": + print(f" {site.name}: export SKUPPER_PLATFORM=podman") + + print() + + if frontend_url: + print(f"Frontend URL: {frontend_url}") + print() + + if console_url: + print(f"Console URL: {console_url}") + print( "Console user: admin") + print(f"Console password: {password}") + print() + + if "SKEWER_DEMO_NO_WAIT" not in ENV: + while input("Are you done (yes)? ") != "yes": # pragma: nocover + pass + +def print_debug_output(model): + print("TROUBLE!") + print("-- Start of debug output") + + for _, site in model.sites: + print(f"---- Debug output for site '{site.name}'") + + with site: + if site.platform == "kubernetes": + run("kubectl get services", check=False) + run("kubectl get deployments", check=False) + run("kubectl get statefulsets", check=False) + run("kubectl get pods", check=False) + run("kubectl get events", check=False) + + run("skupper version", check=False) + run("skupper status", check=False) + run("skupper link status", check=False) + run("skupper service status", check=False) + run("skupper network status", check=False) + run("skupper debug events", check=False) + + if site.platform == "kubernetes": + run("kubectl logs deployment/skupper-router", check=False) + run("kubectl logs deployment/skupper-service-controller", check=False) + + print("-- End of debug output") + +def generate_readme(skewer_file, output_file): + notice(f"Generating the readme (skewer_file='{skewer_file}', output_file='{output_file}')") + + model = Model(skewer_file) + model.check() + + out = list() + + def generate_workflow_url(workflow): + result = parse_url(workflow) + + if result.scheme: + return workflow + + owner, repo = get_github_owner_repo() + + return f"https://github.com/{owner}/{repo}/actions/workflows/{workflow}" + + def generate_step_heading(step): + if step.numbered: + return f"Step {step.number}: {step.title}" + else: + return step.title + + def append_toc_entry(heading, condition=True): + if not condition: + return + + fragment = string_replace(heading, r"[ -]", "_") + fragment = string_replace(fragment, r"[\W]", "") + fragment = string_replace(fragment, "_", "-") + fragment = fragment.lower() + + out.append(f"* [{heading}](#{fragment})") + + def append_section(heading, text): + if not text: + return + + out.append(f"## {heading}") + out.append("") + out.append(text.strip()) + out.append("") + + out.append(f"# {model.title}") + out.append("") + + if model.workflow: + url = generate_workflow_url(model.workflow) + out.append(f"[![main]({url}/badge.svg)]({url})") + out.append("") + + if model.subtitle: + out.append(f"#### {model.subtitle}") + out.append("") + + out.append(standard_text["example_suite"].strip()) + out.append("") + out.append("#### Contents") + out.append("") + + append_toc_entry("Overview", model.overview) + append_toc_entry("Prerequisites") + + for step in model.steps: + append_toc_entry(generate_step_heading(step)) + + append_toc_entry("Summary") + append_toc_entry("Next steps") + append_toc_entry("About this example") + + out.append("") + + append_section("Overview", model.overview) + append_section("Prerequisites", model.prerequisites) + + for step in model.steps: + heading = generate_step_heading(step) + text = generate_readme_step(model, step) + + append_section(heading, text) + + append_section("Summary", model.summary) + append_section("Next steps", model.next_steps) + append_section("About this example", standard_text["about_this_example"].strip()) + + write(output_file, "\n".join(out).strip() + "\n") + +def generate_readme_step(model, step): + notice(f"Generating {step}") + + out = list() + + if step.preamble: + out.append(step.preamble.strip()) + out.append("") + + for site_name, commands in step.commands: + site = dict(model.sites)[site_name] + outputs = list() + + out.append(f"_**{site.title}:**_") + out.append("") + out.append("~~~ shell") + + for command in commands: + if command.apply == "test": + continue + + if command.run: + out.append(command.run) + + if command.output: + assert command.run + + outputs.append((command.run, command.output)) + + out.append("~~~") + out.append("") + + if outputs: + out.append("_Sample output:_") + out.append("") + out.append("~~~ console") + out.append("\n\n".join((f"$ {run}\n{output.strip()}" for run, output in outputs))) + out.append("~~~") + out.append("") + + if step.postamble: + out.append(step.postamble.strip()) + + return "\n".join(out).strip() + +def apply_kubeconfigs(model, kubeconfigs): + kube_sites = [x for _, x in model.sites if x.platform == "kubernetes"] + + if kubeconfigs and len(kubeconfigs) < len(kube_sites): + fail("The provided kubeconfigs are fewer than the number of Kubernetes sites") + + for site, kubeconfig in zip(kube_sites, kubeconfigs): + site.env["KUBECONFIG"] = kubeconfig + +def apply_standard_steps(model): + notice("Applying standard steps") + + for step in model.steps: + if "standard" not in step.data: + continue + + standard_step_name = step.data["standard"] + + try: + standard_step_data = standard_steps[standard_step_name] + except KeyError: + fail(f"Standard step '{standard_step_name}' not found") + + del step.data["standard"] + + def apply_attribute(name, default=None): + if name not in step.data: + value = standard_step_data.get(name, default) + + if value and name in ("title", "preamble", "postamble"): + for i, site in enumerate([x for _, x in model.sites]): + value = value.replace(f"@site{i}@", site.title) + + if site.namespace: + value = value.replace(f"@namespace{i}@", site.namespace) + + step.data[name] = value + + apply_attribute("name") + apply_attribute("title") + apply_attribute("numbered", True) + apply_attribute("preamble") + apply_attribute("postamble") + + platform = standard_step_data.get("platform") + + if "commands" not in step.data and "commands" in standard_step_data: + step.data["commands"] = dict() + + for i, item in enumerate(dict(model.sites).items()): + site_name, site = item + + if platform and site.platform != platform: + continue + + if str(i) in standard_step_data["commands"]: + # Is a specific index in the standard commands? + commands = standard_step_data["commands"][str(i)] + step.data["commands"][site_name] = resolve_command_variables(commands, site) + elif "*" in standard_step_data["commands"]: + # Is "*" in the standard commands? + commands = standard_step_data["commands"]["*"] + step.data["commands"][site_name] = resolve_command_variables(commands, site) + else: + # Otherwise, omit commands for this site + continue + +def resolve_command_variables(commands, site): + resolved_commands = list() + + for command in commands: + resolved_command = dict(command) + + if "run" in command: + resolved_command["run"] = command["run"] + + if site.platform == "kubernetes": + resolved_command["run"] = resolved_command["run"].replace("@kubeconfig@", site.env["KUBECONFIG"]) + resolved_command["run"] = resolved_command["run"].replace("@namespace@", site.namespace) + + if "output" in command: + resolved_command["output"] = command["output"] + + if site.platform == "kubernetes": + resolved_command["output"] = resolved_command["output"].replace("@kubeconfig@", site.env["KUBECONFIG"]) + resolved_command["output"] = resolved_command["output"].replace("@namespace@", site.namespace) + + resolved_commands.append(resolved_command) + + return resolved_commands + +def get_github_owner_repo(): + check_program("git") + + url = call("git remote get-url origin", quiet=True) + result = parse_url(url) + + if result.scheme == "" and result.path.startswith("git@github.com:"): + path = remove_prefix(result.path, "git@github.com:") + path = remove_suffix(path, ".git") + + return path.split("/", 1) + + if result.scheme in ("http", "https") and result.netloc == "github.com": + path = remove_prefix(result.path, "/") + + return path.split("/", 1) + + fail("Unknown origin URL format") + +def object_property(name, default=None): + def get(obj): + return obj.data.get(name, default) + + return property(get) + +def check_required_attributes(obj, *names): + for name in names: + if name not in obj.data: + fail(f"{obj} is missing required attribute '{name}'") + +def check_unknown_attributes(obj): + known_attributes = dict(inspect.getmembers(obj.__class__, lambda x: isinstance(x, property))) + + for name in obj.data: + if name not in known_attributes: + fail(f"{obj} has unknown attribute '{name}'") + +class Model: + title = object_property("title") + subtitle = object_property("subtitle") + workflow = object_property("workflow", "main.yaml") + overview = object_property("overview") + prerequisites = object_property("prerequisites", standard_text["prerequisites"].strip()) + summary = object_property("summary") + next_steps = object_property("next_steps", standard_text["next_steps"].strip()) + + def __init__(self, skewer_file, kubeconfigs=[]): + self.skewer_file = skewer_file + self.data = read_yaml(self.skewer_file) + + apply_kubeconfigs(self, kubeconfigs) + apply_standard_steps(self) + + def __repr__(self): + return f"model '{self.skewer_file}'" + + def check(self): + check_required_attributes(self, "title", "sites", "steps") + check_unknown_attributes(self) + + for _, site in self.sites: + site.check() + + for step in self.steps: + step.check() + + @property + def sites(self): + for name, data in self.data["sites"].items(): + yield name, Site(self, data, name) + + @property + def steps(self): + for data in self.data["steps"]: + yield Step(self, data) + +class Site: + platform = object_property("platform") + namespace = object_property("namespace") + env = object_property("env", dict()) + + def __init__(self, model, data, name): + assert name is not None + + self.model = model + self.data = data + self.name = name + + def __repr__(self): + return f"site '{self.name}'" + + def __enter__(self): + self._logging_context = logging_context(self.name) + self._working_env = working_env(**self.env) + + self._logging_context.__enter__() + self._working_env.__enter__() + + return self + + def __exit__(self, exc_type, exc_value, traceback): + self._working_env.__exit__(exc_type, exc_value, traceback) + self._logging_context.__exit__(exc_type, exc_value, traceback) + + def check(self): + check_required_attributes(self, "platform") + check_unknown_attributes(self) + + if self.platform not in ("kubernetes", "podman"): + fail(f"{self} attribute 'platform' has an illegal value: {self.platform}") + + if self.platform == "kubernetes": + check_required_attributes(self, "namespace") + + if "KUBECONFIG" not in self.env: + fail(f"Kubernetes {self} has no KUBECONFIG environment variable") + + if self.platform == "podman": + if "SKUPPER_PLATFORM" not in self.env: + fail(f"Podman {self} has no SKUPPER_PLATFORM environment variable") + + platform = self.env["SKUPPER_PLATFORM"] + + if platform != "podman": + fail(f"Podman {self} environment variable SKUPPER_PLATFORM has an illegal value: {platform}") + + @property + def title(self): + return self.data.get("title", capitalize(self.name)) + +class Step: + numbered = object_property("numbered", True) + name = object_property("name") + title = object_property("title") + preamble = object_property("preamble") + postamble = object_property("postamble") + + def __init__(self, model, data): + self.model = model + self.data = data + + def __repr__(self): + return f"step {self.number} '{self.title}'" + + def check(self): + check_required_attributes(self, "title") + check_unknown_attributes(self) + + site_names = [x.name for _, x in self.model.sites] + + for site_name, commands in self.commands: + if site_name not in site_names: + fail(f"Unknown site name '{site_name}' in commands for {self}") + + for command in commands: + command.check() + + @property + def number(self): + return self.model.data["steps"].index(self.data) + 1 + + @property + def commands(self): + for site_name, commands in self.data.get("commands", dict()).items(): + yield site_name, [Command(self.model, data) for data in commands] + +class Command: + run = object_property("run") + apply = object_property("apply") + output = object_property("output") + await_resource = object_property("await_resource") + await_ingress = object_property("await_ingress") + await_http_ok = object_property("await_http_ok") + await_console_ok = object_property("await_console_ok") + + def __init__(self, model, data): + self.model = model + self.data = data + + def __repr__(self): + if self.run: + return f"command '{self.run.splitlines()[0]}'" + + return "command" + + def check(self): + check_unknown_attributes(self) + +class Minikube: + def __init__(self, skewer_file): + self.skewer_file = skewer_file + self.kubeconfigs = list() + self.work_dir = join(get_user_temp_dir(), "skewer") + + def __enter__(self): + notice("Starting Minikube") + + check_environment() + check_program("minikube") + + profile_data = parse_json(call("minikube profile list --output json", quiet=True)) + + for profile in profile_data.get("valid", []): + if profile["Name"] == "skewer": + fail("A Minikube profile 'skewer' already exists. Delete it using 'minikube delete -p skewer'.") + + remove(self.work_dir, quiet=True) + make_dir(self.work_dir, quiet=True) + + run("minikube start -p skewer --auto-update-drivers false") + + tunnel_output_file = open(f"{self.work_dir}/minikube-tunnel-output", "w") + self.tunnel = start("minikube tunnel -p skewer", output=tunnel_output_file) + + model = Model(self.skewer_file) + model.check() + + kube_sites = [x for _, x in model.sites if x.platform == "kubernetes"] + + for site in kube_sites: + kubeconfig = site.env["KUBECONFIG"].replace("~", self.work_dir) + site.env["KUBECONFIG"] = kubeconfig + + self.kubeconfigs.append(kubeconfig) + + with site: + run("minikube update-context -p skewer") + check_file(ENV["KUBECONFIG"]) + + return self + + def __exit__(self, exc_type, exc_value, traceback): + notice("Stopping Minikube") + + stop(self.tunnel) + + run("minikube delete -p skewer") diff --git a/external/skewer/example/python/skewer/planocommands.py b/external/skewer/example/python/skewer/planocommands.py new file mode 100644 index 0000000..754fb1e --- /dev/null +++ b/external/skewer/example/python/skewer/planocommands.py @@ -0,0 +1,91 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from plano import * +from plano.github import * +from skewer import * + +_debug_param = CommandParameter("debug", help="Produce extra debug output on failure") + +@command +def generate(output="README.md"): + """ + Generate README.md from the data in skewer.yaml + """ + generate_readme("skewer.yaml", output) + +@command +def render(quiet=False): + """ + Render README.html from README.md + """ + generate() + + markdown = read("README.md") + html = convert_github_markdown(markdown) + + write("README.html", html) + + if not quiet: + print(f"file:{get_real_path('README.html')}") + +@command +def clean(): + remove(find(".", "__pycache__")) + remove("README.html") + +@command(parameters=[_debug_param]) +def run_(*kubeconfigs, debug=False): + """ + Run the example steps + + If no kubeconfigs are provided, Skewer starts a local Minikube + instance and runs the steps using it. + """ + if not kubeconfigs: + with Minikube("skewer.yaml") as mk: + run_steps("skewer.yaml", kubeconfigs=mk.kubeconfigs, work_dir=mk.work_dir, debug=debug) + else: + run_steps("skewer.yaml", kubeconfigs=kubeconfigs, debug=debug) + +@command(parameters=[_debug_param]) +def demo(*kubeconfigs, debug=False): + """ + Run the example steps and pause for a demo before cleaning up + """ + with working_env(SKEWER_DEMO=1): + run_(*kubeconfigs, debug=debug) + +@command(parameters=[_debug_param]) +def test_(debug=False): + """ + Test README generation and run the steps on Minikube + """ + generate(output=make_temp_file()) + run_(debug=debug) + +@command +def update_skewer(): + """ + Update the embedded Skewer repo and GitHub workflow + + This results in local changes to review and commit. + """ + update_external_from_github("external/skewer", "skupperproject", "skewer") + copy("external/skewer/config/.github/workflows/main.yaml", ".github/workflows/main.yaml") diff --git a/external/skewer/example/python/skewer/standardsteps.yaml b/external/skewer/example/python/skewer/standardsteps.yaml new file mode 100644 index 0000000..8a57de3 --- /dev/null +++ b/external/skewer/example/python/skewer/standardsteps.yaml @@ -0,0 +1,295 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +install_the_skupper_command_line_tool: + title: Install the Skupper command-line tool + preamble: | + This example uses the Skupper command-line tool to deploy Skupper. + You need to install the `skupper` command only once for each + development environment. + + On Linux or Mac, you can use the install script (inspect it + [here][install-script]) to download and extract the command: + + ~~~ shell + curl https://skupper.io/install.sh | sh + ~~~ + + The script installs the command under your home directory. It + prompts you to add the command to your path if necessary. + + For Windows and other installation options, see [Installing + Skupper][install-docs]. + + [install-script]: https://github.com/skupperproject/skupper-website/blob/main/input/install.sh + [install-docs]: https://skupper.io/install/ +kubernetes/set_up_your_namespaces: + title: Set up your namespaces + platform: kubernetes + preamble: | + Skupper is designed for use with multiple Kubernetes namespaces, + usually on different clusters. The `skupper` and `kubectl` + commands use your [kubeconfig][kubeconfig] and current context to + select the namespace where they operate. + + [kubeconfig]: https://kubernetes.io/docs/concepts/configuration/organize-cluster-access-kubeconfig/ + + Your kubeconfig is stored in a file in your home directory. The + `skupper` and `kubectl` commands use the `KUBECONFIG` environment + variable to locate it. + + A single kubeconfig supports only one active context per user. + Since you will be using multiple contexts at once in this + exercise, you need to create distinct kubeconfigs. + + For each namespace, open a new terminal window. In each terminal, + set the `KUBECONFIG` environment variable to a different path and + log in to your cluster. Then create the namespace you wish to use + and set the namespace on your current context. + + **Note:** The login procedure varies by provider. See the + documentation for yours: + + * [Minikube](https://skupper.io/start/minikube.html#cluster-access) + * [Amazon Elastic Kubernetes Service (EKS)](https://skupper.io/start/eks.html#cluster-access) + * [Azure Kubernetes Service (AKS)](https://skupper.io/start/aks.html#cluster-access) + * [Google Kubernetes Engine (GKE)](https://skupper.io/start/gke.html#cluster-access) + * [IBM Kubernetes Service](https://skupper.io/start/ibmks.html#cluster-access) + * [OpenShift](https://skupper.io/start/openshift.html#cluster-access) + commands: + "*": + - run: export KUBECONFIG=@kubeconfig@ + - run: "# Enter your provider-specific login command" + - run: kubectl create namespace @namespace@ + apply: readme + - run: kubectl create namespace @namespace@ --dry-run=client -o yaml | kubectl apply -f - + apply: test + - run: kubectl config set-context --current --namespace @namespace@ +kubernetes/set_up_your_kubernetes_namespace: + title: Set up your Kubernetes namespace + platform: kubernetes + preamble: | + Open a new terminal window and log in to your cluster. Then + create the namespace you wish to use and set the namespace on your + current context. + + **Note:** The login procedure varies by provider. See the + documentation for your chosen providers: + + * [Minikube](https://skupper.io/start/minikube.html#cluster-access) + * [Amazon Elastic Kubernetes Service (EKS)](https://skupper.io/start/eks.html#cluster-access) + * [Azure Kubernetes Service (AKS)](https://skupper.io/start/aks.html#cluster-access) + * [Google Kubernetes Engine (GKE)](https://skupper.io/start/gke.html#cluster-access) + * [IBM Kubernetes Service](https://skupper.io/start/ibmks.html#cluster-access) + * [OpenShift](https://skupper.io/start/openshift.html#cluster-access) + commands: + "*": + - run: "# Enter your provider-specific login command" + - run: kubectl create namespace @namespace@ + - run: kubectl config set-context --current --namespace @namespace@ +kubernetes/create_your_sites: + title: Create your sites + platform: kubernetes + preamble: | + A Skupper _site_ is a location where components of your + application are running. Sites are linked together to form a + network for your application. In Kubernetes, a site is associated + with a namespace. + + For each namespace, use `skupper init` to create a site. This + deploys the Skupper router and controller. Then use `skupper + status` to see the outcome. + + **Note:** If you are using Minikube, you need to [start minikube + tunnel][minikube-tunnel] before you run `skupper init`. + + [minikube-tunnel]: https://skupper.io/start/minikube.html#running-minikube-tunnel + commands: + "0": + - run: skupper init + output: | + Waiting for LoadBalancer IP or hostname... + Waiting for status... + Skupper is now installed in namespace 'west'. Use 'skupper status' to get more information. + - run: skupper status + output: | + Skupper is enabled for namespace "west". It is not connected to any other sites. It has no exposed services. + "*": + - run: skupper init + output: | + Waiting for LoadBalancer IP or hostname... + Waiting for status... + Skupper is now installed in namespace 'east'. Use 'skupper status' to get more information. + - run: skupper status + output: | + Skupper is enabled for namespace "east". It is not connected to any other sites. It has no exposed services. + postamble: | + As you move through the steps below, you can use `skupper status` at + any time to check your progress. +podman/set_up_your_podman_network: + title: Set up your Podman network + platform: podman + preamble: | + Open a new terminal window and set the `SKUPPER_PLATFORM` + environment variable to `podman`. This sets the Skupper platform + to Podman for this terminal session. + + Use `podman network create` to create the Podman network that + Skupper will use. + + Use `systemctl` to enable the Podman API service. + commands: + "*": + - run: export SKUPPER_PLATFORM=podman + - run: podman network create skupper + apply: readme + - run: if ! podman network exists skupper; then podman network create skupper; fi + apply: test + - run: systemctl --user enable --now podman.socket + postamble: | + If the `systemctl` command doesn't work, you can try the `podman + system service` command instead: + + ~~~ + podman system service --time=0 unix://$XDG_RUNTIME_DIR/podman/podman.sock & + ~~~ +link_your_sites: + title: Link your sites + preamble: | + Creating a link requires use of two `skupper` commands in + conjunction, `skupper token create` and `skupper link create`. + + The `skupper token create` command generates a secret token that + signifies permission to create a link. The token also carries the + link details. Then, in a remote site, The `skupper link + create` command uses the token to create a link to the site + that generated it. + + **Note:** The link token is truly a *secret*. Anyone who has the + token can link to your site. Make sure that only those you trust + have access to it. + + First, use `skupper token create` in site @site0@ to generate the + token. Then, use `skupper link create` in site @site1@ to link + the sites. + commands: + "0": + - run: skupper token create ~/secret.token + output: Token written to ~/secret.token + "1": + - run: skupper link create ~/secret.token + output: | + Site configured to link to https://10.105.193.154:8081/ed9c37f6-d78a-11ec-a8c7-04421a4c5042 (name=link1) + Check the status of the link using 'skupper link status'. + - run: skupper link status --wait 60 + apply: test + postamble: | + If your terminal sessions are on different machines, you may need + to use `scp` or a similar tool to transfer the token securely. By + default, tokens expire after a single use or 15 minutes after + creation. +cleaning_up: + name: cleaning_up + title: Cleaning up + numbered: false + preamble: | + To remove Skupper and the other resources from this exercise, use + the following commands. + commands: + "*": + - run: skupper delete +hello_world/deploy_the_frontend_and_backend: + title: Deploy the frontent and backend + preamble: | + This example runs the frontend and the backend in separate + Kubernetes namespaces, on different clusters. + + Use `kubectl create deployment` to deploy the frontend in + namespace `@namespace0@` and the backend in namespace + `@namespace1@`. + commands: + "0": + - run: kubectl create deployment frontend --image quay.io/skupper/hello-world-frontend + "1": + - run: kubectl create deployment backend --image quay.io/skupper/hello-world-backend --replicas 3 +hello_world/expose_the_backend: + title: Expose the backend + preamble: | + We now have our sites linked to form a Skupper network, but no + services are exposed on it. Skupper uses the `skupper expose` + command to select a service from one site for exposure in all the + linked sites. + + Use `skupper expose` to expose the backend service in @site1@ to + the frontend in @site0@. + commands: + "1": + - await_resource: deployment/backend + - run: skupper expose deployment/backend --port 8080 + output: deployment backend exposed as backend +hello_world/access_the_frontend: + title: Access the frontend + preamble: | + In order to use and test the application, we need external access + to the frontend. + + Use `kubectl expose` with `--type LoadBalancer` to open network + access to the frontend service. + + Once the frontend is exposed, use `kubectl get service/frontend` + to look up the external IP of the frontend service. If the + external IP is ``, try again after a moment. + + Once you have the external IP, use `curl` or a similar tool to + request the `/api/health` endpoint at that address. + + **Note:** The `` field in the following commands is a + placeholder. The actual value is an IP address. + commands: + "0": + - run: kubectl expose deployment/frontend --port 8080 --type LoadBalancer + output: service/frontend exposed + - await_resource: service/frontend + - run: kubectl get service/frontend + apply: readme + output: | + NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE + frontend LoadBalancer 10.103.232.28 8080:30407/TCP 15s + - run: curl http://:8080/api/health + apply: readme + output: OK + - await_http_ok: [service/frontend, "http://{}:8080/api/health"] + postamble: | + If everything is in order, you can now access the web interface by + navigating to `http://:8080/` in your browser. +hello_world/cleaning_up: + name: cleaning_up + title: Cleaning up + numbered: false + preamble: | + To remove Skupper and the other resources from this exercise, use + the following commands: + commands: + "0": + - run: skupper delete + - run: kubectl delete service/frontend + - run: kubectl delete deployment/frontend + "1": + - run: skupper delete + - run: kubectl delete deployment/backend diff --git a/external/skewer/example/python/skewer/standardtext.yaml b/external/skewer/example/python/skewer/standardtext.yaml new file mode 100644 index 0000000..add76a2 --- /dev/null +++ b/external/skewer/example/python/skewer/standardtext.yaml @@ -0,0 +1,49 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +example_suite: | + This example is part of a [suite of examples][examples] showing the + different ways you can use [Skupper][website] to connect services + across cloud providers, data centers, and edge sites. + + [website]: https://skupper.io/ + [examples]: https://skupper.io/examples/index.html +prerequisites: | + * The `kubectl` command-line tool, version 1.15 or later + ([installation guide][install-kubectl]) + + * Access to at least one Kubernetes cluster, from [any provider you + choose][kube-providers] + + [install-kubectl]: https://kubernetes.io/docs/tasks/tools/install-kubectl/ + [kube-providers]: https://skupper.io/start/kubernetes.html +next_steps: | + Check out the other [examples][examples] on the Skupper website. +about_this_example: | + This example was produced using [Skewer][skewer], a library for + documenting and testing Skupper examples. + + [skewer]: https://github.com/skupperproject/skewer + + Skewer provides utility functions for generating the README and + running the example steps. Use the `./plano` command in the project + root to see what is available. + + To quickly stand up the example using Minikube, try the `./plano demo` + command. diff --git a/external/skewer/example/python/skewer/tests.py b/external/skewer/example/python/skewer/tests.py new file mode 100644 index 0000000..7fa00b6 --- /dev/null +++ b/external/skewer/example/python/skewer/tests.py @@ -0,0 +1,67 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from plano import * +from skewer import * + +@test +def plano_commands(): + with working_dir("example"): + run("./plano") + run("./plano generate") + run("./plano render") + run("./plano clean") + +@test +def config_files(): + check_file("config/.github/workflows/main.yaml") + check_file("config/.gitignore") + check_file("config/.plano.py") + + parse_yaml(read("config/.github/workflows/main.yaml")) + +@test +def generate_readme_(): + with working_dir("example"): + generate_readme("skewer.yaml", "README.md") + check_file("README.md") + +@test +def run_steps_(): + with working_dir("example"): + with Minikube("skewer.yaml") as mk: + run_steps("skewer.yaml", kubeconfigs=mk.kubeconfigs, work_dir=mk.work_dir, debug=True) + +@test +def run_steps_demo(): + with working_dir("example"): + with Minikube("skewer.yaml") as mk: + run_steps("skewer.yaml", kubeconfigs=mk.kubeconfigs, work_dir=mk.work_dir, debug=True) + +@test +def run_steps_debug(): + with working_dir("example"): + with expect_error(): + with working_env(SKEWER_FAIL=1): + with Minikube("skewer.yaml") as mk: + run_steps("skewer.yaml", kubeconfigs=mk.kubeconfigs, work_dir=mk.work_dir, debug=True) + +if __name__ == "__main__": + import sys + run_tests(sys.modules[__name__]) diff --git a/external/skewer/python/plano/__init__.py b/external/skewer/python/plano/__init__.py new file mode 100644 index 0000000..3218323 --- /dev/null +++ b/external/skewer/python/plano/__init__.py @@ -0,0 +1,24 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from .main import * +from .main import _default_sigterm_handler + +from .command import * +from .test import * diff --git a/external/skewer/python/plano/_testproject/.plano.py b/external/skewer/python/plano/_testproject/.plano.py new file mode 100644 index 0000000..8cda2e7 --- /dev/null +++ b/external/skewer/python/plano/_testproject/.plano.py @@ -0,0 +1,112 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from plano import * + +@command +def base_command(alpha, beta, omega="x"): + """ + Base command help + """ + + print("base", alpha, beta, omega) + +@command(name="extended-command", parent=base_command) +def extended_command(alpha, beta, omega="y"): + print("extended", alpha, omega) + parent(alpha, beta, omega) + +@command(parameters=[CommandParameter("message_", help="The message to print", display_name="message"), + CommandParameter("count", help="Print the message COUNT times"), + CommandParameter("extra", default=1, short_option="e")]) +def echo(message_, count=1, extra=None, trouble=False, verbose=False): + """ + Print a message to the console + """ + + print("Echoing (message={}, count={})".format(message_, count)) + + if trouble: + raise Exception("Trouble") + + for i in range(count): + print(message_) + +@command +def echoecho(message): + echo(message) + +@command +def haberdash(first, *middle, last="bowler"): + """ + Habberdash command help + """ + + data = [first, *middle, last] + write_json("haberdash.json", data) + +@command(parameters=[CommandParameter("optional", positional=True)]) +def balderdash(required, optional="malarkey", other="rubbish", **extra_kwargs): + """ + Balderdash command help + """ + + data = [required, optional, other] + write_json("balderdash.json", data) + +@command +def splasher(): + write_json("splasher.json", [1]) + +@command +def dasher(alpha, beta=123): + pass + +@command(passthrough=True) +def dancer(gamma, omega="abc", passthrough_args=[]): + write_json("dancer.json", passthrough_args) + +# Vixen's parent calls prancer. We are testing to ensure the extended +# prancer (below) is executed. + +from plano._tests import prancer, vixen + +@command(parent=prancer) +def prancer(): + parent() + + notice("Extended prancer") + + write_json("prancer.json", True) + +@command(parent=vixen) +def vixen(): + parent() + +@command +def no_parent(): + parent() + +@command(parameters=[CommandParameter("spinach")]) +def feta(*args, **kwargs): + write_json("feta.json", kwargs["spinach"]) + +@command(hidden=True) +def invisible(something="nothing"): + write_json("invisible.json", something) diff --git a/external/skewer/python/plano/_testproject/src/chucker/__init__.py b/external/skewer/python/plano/_testproject/src/chucker/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/external/skewer/python/plano/_testproject/src/chucker/moretests.py b/external/skewer/python/plano/_testproject/src/chucker/moretests.py new file mode 100644 index 0000000..2607880 --- /dev/null +++ b/external/skewer/python/plano/_testproject/src/chucker/moretests.py @@ -0,0 +1,24 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from plano import * + +@test +def hello_again(): + print("Hello again") diff --git a/external/skewer/python/plano/_testproject/src/chucker/tests.py b/external/skewer/python/plano/_testproject/src/chucker/tests.py new file mode 100644 index 0000000..4e0cec1 --- /dev/null +++ b/external/skewer/python/plano/_testproject/src/chucker/tests.py @@ -0,0 +1,70 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from plano import * + +@test +def hello(): + print("Hello") + +@test +async def hello_async(): + print("Hello") + +@test +def goodbye(): + print("Goodbye") + +@test(disabled=True) +def badbye(): + print("Badbye") + assert False + +@test(disabled=True) +def skipped(): + skip_test("Skipped") + assert False + +@test(disabled=True) +def keyboard_interrupt(): + raise KeyboardInterrupt() + +@test(disabled=True, timeout=0.05) +def timeout(): + sleep(10, quiet=True) + assert False + +@test(disabled=True) +def process_error(): + run("expr 1 / 0") + +@test(disabled=True) +def system_exit_(): + exit(1) + +def test_widget(message): + print(message) + +for message in "hi", "lo", "in between": + add_test(f"message-{message}", test_widget, message) + +@test(disabled=True) +def badbye2(): + print("Badbye 2") + assert False diff --git a/external/skewer/python/plano/_tests.py b/external/skewer/python/plano/_tests.py new file mode 100644 index 0000000..159c739 --- /dev/null +++ b/external/skewer/python/plano/_tests.py @@ -0,0 +1,1338 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +import datetime as _datetime +import getpass as _getpass +import os as _os +import signal as _signal +import socket as _socket +import sys as _sys +import threading as _threading + +from .github import * + +try: + import http.server as _http +except ImportError: # pragma: nocover + import BaseHTTPServer as _http + +from .test import * + +test_project_dir = join(get_parent_dir(__file__), "_testproject") + +class test_project(working_dir): + def __enter__(self): + dir = super(test_project, self).__enter__() + copy(test_project_dir, ".", inside=False) + return dir + +TINY_INTERVAL = 0.05 + +@test +def archive_operations(): + with working_dir(): + make_dir("some-dir") + touch("some-dir/some-file") + + make_archive("some-dir") + assert is_file("some-dir.tar.gz"), list_dir() + + extract_archive("some-dir.tar.gz", output_dir="some-subdir") + assert is_dir("some-subdir/some-dir"), list_dir("some-subdir") + assert is_file("some-subdir/some-dir/some-file"), list_dir("some-subdir/some-dir") + + rename_archive("some-dir.tar.gz", "something-else") + assert is_file("something-else.tar.gz"), list_dir() + + extract_archive("something-else.tar.gz") + assert is_dir("something-else"), list_dir() + assert is_file("something-else/some-file"), list_dir("something-else") + +@test +def command_operations(): + class SomeCommand(BaseCommand): + def __init__(self): + super().__init__() + + self.parser = BaseArgumentParser() + self.parser.add_argument("--interrupt", action="store_true") + self.parser.add_argument("--explode", action="store_true") + self.parser.add_argument("--verbose", action="store_true") + self.parser.add_argument("--quiet", action="store_true") + + def parse_args(self, args): + return self.parser.parse_args(args) + + def init(self, args): + self.interrupt = args.interrupt + self.explode = args.explode + self.verbose = args.verbose + self.quiet = args.quiet + + def run(self): + if self.interrupt: + raise KeyboardInterrupt() + + if self.explode: + raise PlanoError("Exploded") + + if self.verbose: + print("Hello") + + SomeCommand().main([]) + SomeCommand().main(["--verbose"]) + SomeCommand().main(["--interrupt"]) + + with expect_system_exit(): + SomeCommand().main(["--verbose", "--explode"]) + +@test +def console_operations(): + eprint("Here's a story") + eprint("About a", "man named Brady") + + pprint(list_dir()) + pprint(PlanoProcess, 1, "abc", end="\n\n") + + flush() + + with console_color("red"): + print("ALERT") + + print(cformat("AMBER ALERT", color="yellow")) + print(cformat("NO ALERT")) + + cprint("CRITICAL ALERT", color="red", bright=True) + +@test +def dir_operations(): + with working_dir(): + test_dir = make_dir("some-dir") + test_file_1 = touch(join(test_dir, "some-file-1")) + test_file_2 = touch(join(test_dir, "some-file-2")) + + result = list_dir(test_dir) + assert join(test_dir, result[0]) == test_file_1, (join(test_dir, result[0]), test_file_1) + + result = list_dir(test_dir, "*-file-1") + assert result == ["some-file-1"], (result, ["some-file-1"]) + + result = list_dir(test_dir, exclude="*-file-1") + assert result == ["some-file-2"], (result, ["some-file-2"]) + + result = list_dir("some-dir", "*.not-there") + assert result == [], result + + with working_dir(): + result = list_dir() + assert result == [], result + + result = find(test_dir) + assert result == [test_file_1, test_file_2], (result, [test_file_1, test_file_2]) + + result = find(test_dir, include="*-file-1") + assert result == [test_file_1], (result, [test_file_1]) + + result = find(test_dir, exclude="*-file-1") + assert result == [test_file_2], (result, [test_file_2]) + + with working_dir(): + result = find() + assert result == [], result + + make_dir("subdir") + + result = find("./subdir") + assert result == [], result + + with working_dir(): + with working_dir("a-dir", quiet=True): + touch("a-file") + + curr_dir = get_current_dir() + prev_dir = change_dir("a-dir") + new_curr_dir = get_current_dir() + new_prev_dir = change_dir(curr_dir) + + assert curr_dir == prev_dir, (curr_dir, prev_dir) + assert new_curr_dir == new_prev_dir, (new_curr_dir, new_prev_dir) + +@test +def env_operations(): + result = join_path_var("a", "b", "c", "a") + assert result == _os.pathsep.join(("a", "b", "c")), result + + curr_dir = get_current_dir() + + with working_dir("."): + assert get_current_dir() == curr_dir, (get_current_dir(), curr_dir) + + result = get_home_dir() + assert result == _os.path.expanduser("~"), (result, _os.path.expanduser("~")) + + result = get_home_dir("alice") + assert result.endswith("alice"), result + + user = _getpass.getuser() + result = get_user() + assert result == user, (result, user) + + result = get_hostname() + assert result, result + + result = get_program_name() + assert result, result + + result = get_program_name("alpha beta") + assert result == "alpha", result + + result = get_program_name("X=Y alpha beta") + assert result == "alpha", result + + result = which("echo") + assert result, result + + with working_env(YES_I_AM_SET=1): + check_env("YES_I_AM_SET") + + with expect_error(): + check_env("NO_I_AM_NOT") + + with working_env(I_AM_SET_NOW=1, amend=False): + check_env("I_AM_SET_NOW") + assert "YES_I_AM_SET" not in ENV, ENV + + with working_env(SOME_VAR=1): + assert ENV["SOME_VAR"] == "1", ENV.get("SOME_VAR") + + with working_env(SOME_VAR=2): + assert ENV["SOME_VAR"] == "2", ENV.get("SOME_VAR") + + with expect_error(): + check_program("not-there") + + with expect_error(): + check_module("not_there") + + with expect_output(contains="ARGS:") as out: + with open(out, "w") as f: + print_env(file=f) + + print_stack() + +@test +def file_operations(): + with working_dir(): + alpha_dir = make_dir("alpha-dir") + alpha_file = touch(join(alpha_dir, "alpha-file")) + alpha_link = make_link(join(alpha_dir, "alpha-file-link"), "alpha-file") + alpha_broken_link = make_link(join(alpha_dir, "broken-link"), "no-such-file") + + beta_dir = make_dir("beta-dir") + beta_file = touch(join(beta_dir, "beta-file")) + beta_link = make_link(join(beta_dir, "beta-file-link"), "beta-file") + beta_broken_link = make_link(join(beta_dir, "broken-link"), join("..", alpha_dir, "no-such-file")) + beta_another_link = make_link(join(beta_dir, "broken-link"), join("..", alpha_dir, "alpha-file-link")) + + assert exists(beta_link) + assert exists(beta_file) + + with working_dir("beta-dir"): + assert is_file(read_link("beta-file-link")) + + copied_file = copy(alpha_file, beta_dir) + assert copied_file == join(beta_dir, "alpha-file"), copied_file + assert is_file(copied_file), list_dir(beta_dir) + + copied_link = copy(beta_link, join(beta_dir, "beta-file-link-copy")) + assert copied_link == join(beta_dir, "beta-file-link-copy"), copied_link + assert is_link(copied_link), list_dir(beta_dir) + + copied_dir = copy(alpha_dir, beta_dir) + assert copied_dir == join(beta_dir, "alpha-dir"), copied_dir + assert is_link(join(copied_dir, "alpha-file-link")) + + moved_file = move(beta_file, alpha_dir) + assert moved_file == join(alpha_dir, "beta-file"), moved_file + assert is_file(moved_file), list_dir(alpha_dir) + assert not exists(beta_file), list_dir(beta_dir) + + moved_dir = move(beta_dir, alpha_dir) + assert moved_dir == join(alpha_dir, "beta-dir"), moved_dir + assert is_dir(moved_dir), list_dir(alpha_dir) + assert not exists(beta_dir) + + gamma_dir = make_dir("gamma-dir") + gamma_file = touch(join(gamma_dir, "gamma-file")) + + delta_dir = make_dir("delta-dir") + delta_file = touch(join(delta_dir, "delta-file")) + + copy(gamma_dir, delta_dir, inside=False) + assert is_file(join("delta-dir", "gamma-file")) + + move(gamma_dir, delta_dir, inside=False) + assert is_file(join("delta-dir", "gamma-file")) + assert not exists(gamma_dir) + + epsilon_dir = make_dir("epsilon-dir") + epsilon_file_1 = touch(join(epsilon_dir, "epsilon-file-1")) + epsilon_file_2 = touch(join(epsilon_dir, "epsilon-file-2")) + epsilon_file_3 = touch(join(epsilon_dir, "epsilon-file-3")) + epsilon_file_4 = touch(join(epsilon_dir, "epsilon-file-4")) + + remove("not-there") + + remove(epsilon_file_2) + assert not exists(epsilon_file_2) + + remove(epsilon_dir) + assert not exists(epsilon_file_1) + assert not exists(epsilon_dir) + + remove([epsilon_file_3, epsilon_file_4]) + assert not exists(epsilon_file_3) + assert not exists(epsilon_file_4) + + file = write("xes", "x" * 10) + result = get_file_size(file) + assert result == 10, result + + zeta_dir = make_dir("zeta-dir") + zeta_file = touch(join(zeta_dir, "zeta-file")) + + eta_dir = make_dir("eta-dir") + eta_file = touch(join(eta_dir, "eta-file")) + + replace(zeta_dir, eta_dir) + assert not exists(zeta_file) + assert exists(zeta_dir) + assert is_file(join(zeta_dir, "eta-file")) + + with expect_exception(): + replace(zeta_dir, "not-there") + + assert exists(zeta_dir) + assert is_file(join(zeta_dir, "eta-file")) + + theta_file = write("theta-file", "theta") + iota_file = write("iota-file", "iota") + + replace(theta_file, iota_file) + assert not exists(iota_file) + assert read(theta_file) == "iota" + +@test +def github_operations(): + result = convert_github_markdown("# Hello, Fritz") + assert "Hello, Fritz" in result, result + + with working_dir(): + update_external_from_github("temp", "ssorj", "plano") + assert is_file("temp/Makefile"), list_dir("temp") + +@test +def http_operations(): + class Handler(_http.BaseHTTPRequestHandler): + def do_GET(self): + if not self.path.startswith("/api"): + self.send_response(404) + self.end_headers() + return + + self.send_response(200) + self.end_headers() + self.wfile.write(b"[1]") + + def do_POST(self): + length = int(self.headers["content-length"]) + content = self.rfile.read(length) + + self.send_response(200) + self.end_headers() + self.wfile.write(content) + + def do_PUT(self): + length = int(self.headers["content-length"]) + content = self.rfile.read(length) + + self.send_response(200) + self.end_headers() + + class ServerThread(_threading.Thread): + def __init__(self, server): + _threading.Thread.__init__(self) + self.server = server + + def run(self): + self.server.serve_forever() + + host, port = "localhost", get_random_port() + url = "http://{}:{}/api".format(host, port) + missing_url = "http://{}:{}/nono".format(host, port) + + try: + server = _http.HTTPServer((host, port), Handler) + except (OSError, PermissionError): # pragma: nocover + # Try one more time + port = get_random_port() + server = _http.HTTPServer((host, port), Handler) + + server_thread = ServerThread(server) + server_thread.start() + + try: + with working_dir(): + result = http_get(url) + assert result == "[1]", result + + with expect_error(): + http_get(missing_url) + + result = http_get(url, insecure=True) + assert result == "[1]", result + + result = http_get(url, user="fritz", password="secret") + assert result == "[1]", result + + result = http_get(url, output_file="a") + output = read("a") + assert result is None, result + assert output == "[1]", output + + result = http_get_json(url) + assert result == [1], result + + file_b = write("b", "[2]") + + result = http_post(url, read(file_b), insecure=True) + assert result == "[2]", result + + result = http_post(url, read(file_b), output_file="x") + output = read("x") + assert result is None, result + assert output == "[2]", output + + result = http_post_file(url, file_b) + assert result == "[2]", result + + result = http_post_json(url, parse_json(read(file_b))) + assert result == [2], result + + file_c = write("c", "[3]") + + result = http_put(url, read(file_c), insecure=True) + assert result is None, result + + result = http_put_file(url, file_c) + assert result is None, result + + result = http_put_json(url, parse_json(read(file_c))) + assert result is None, result + finally: + server.shutdown() + server.server_close() + server_thread.join() + +@test +def io_operations(): + with working_dir(): + input_ = "some-text\n" + file_a = write("a", input_) + output = read(file_a) + + assert input_ == output, (input_, output) + + pre_input = "pre-some-text\n" + post_input = "post-some-text\n" + + prepend(file_a, pre_input) + append(file_a, post_input) + + output = tail(file_a, 100) + tailed = tail(file_a, 1) + + assert output.startswith(pre_input), (output, pre_input) + assert output.endswith(post_input), (output, post_input) + assert tailed == post_input, (tailed, post_input) + + input_lines = [ + "alpha\n", + "beta\n", + "gamma\n", + "chi\n", + "psi\n", + "omega\n", + ] + + file_b = write_lines("b", input_lines) + output_lines = read_lines(file_b) + + assert input_lines == output_lines, (input_lines, output_lines) + + pre_lines = ["pre-alpha\n"] + post_lines = ["post-omega\n"] + + prepend_lines(file_b, pre_lines) + append_lines(file_b, post_lines) + + output_lines = tail_lines(file_b, 100) + tailed_lines = tail_lines(file_b, 1) + + assert output_lines[0] == pre_lines[0], (output_lines[0], pre_lines[0]) + assert output_lines[-1] == post_lines[0], (output_lines[-1], post_lines[0]) + assert tailed_lines[0] == post_lines[0], (tailed_lines[0], post_lines[0]) + + file_c = touch("c") + assert is_file(file_c), file_c + + file_d = write("d", "front@middle@@middle@back") + path = string_replace_file(file_d, "@middle@", "M", count=1) + result = read(path) + assert result == "frontM@middle@back", result + + file_e = write("e", "123") + file_f = write("f", "456") + path = concatenate("g", (file_e, "not-there", file_f)) + result = read(path) + assert result == "123456", result + +@test +def iterable_operations(): + result = unique([1, 1, 1, 2, 2, 3]) + assert result == [1, 2, 3], result + + result = skip([1, "", 2, None, 3]) + assert result == [1, 2, 3], result + + result = skip([1, "", 2, None, 3], 2) + assert result == [1, "", None, 3], result + +@test +def json_operations(): + with working_dir(): + input_data = { + "alpha": [1, 2, 3], + } + + file_a = write_json("a", input_data) + output_data = read_json(file_a) + + assert input_data == output_data, (input_data, output_data) + + json = read(file_a) + parsed_data = parse_json(json) + emitted_json = emit_json(input_data) + + assert input_data == parsed_data, (input_data, parsed_data) + assert json == emitted_json, (json, emitted_json) + + with expect_output(equals=emitted_json) as out: + with open(out, "w") as f: + print_json(input_data, file=f, end="") + +@test +def link_operations(): + with working_dir(): + make_dir("some-dir") + path = get_absolute_path(touch("some-dir/some-file")) + + with working_dir("another-dir"): + link = make_link("a-link", path) + linked_path = read_link(link) + assert linked_path.endswith(path), (linked_path, path) + +@test +def logging_operations(): + error("Error!") + warning("Warning!") + notice("Take a look!") + notice(123) + debug("By the way") + debug("abc{}{}{}", 1, 2, 3) + + with expect_exception(RuntimeError): + fail(RuntimeError("Error!")) + + with expect_error(): + fail("Error!") + + with expect_error(): + fail("Error! {}", "Let me elaborate") + + for level in ("debug", "notice", "warning", "error"): + with expect_output(contains="Hello") as out: + with logging_disabled(): + with logging_enabled(level=level, output=out): + log(level, "hello") + + with expect_output(equals="") as out: + with logging_enabled(output=out): + with logging_disabled(): + error("Yikes") + + with expect_output(contains="flipper") as out: + with logging_enabled(output=out): + with logging_context("flipper"): + notice("Whhat") + + with logging_context("bip"): + with logging_context("boop"): + error("It's alarming!") + +@test +def path_operations(): + abspath = _os.path.abspath + normpath = _os.path.normpath + + with working_dir("/"): + result = get_current_dir() + expect = abspath(_os.sep) + assert result == expect, (result, expect) + + path = "a/b/c" + result = get_absolute_path(path) + expect = join(get_current_dir(), path) + assert result == expect, (result, expect) + + path = "/x/y/z" + result = get_absolute_path(path) + expect = abspath(path) + assert result == expect, (result, expect) + + path = "/x/y/z" + assert is_absolute(path) + + path = "x/y/z" + assert not is_absolute(path) + + path = "a//b/../c/" + result = normalize_path(path) + expect = normpath("a/c") + assert result == expect, (result, expect) + + path = "/a/../c" + result = get_real_path(path) + expect = abspath("/c") + assert result == expect, (result, expect) + + path = abspath("/a/b") + result = get_relative_path(path, "/a/c") + expect = normpath("../b") + assert result == expect, (result, expect) + + path = abspath("/a/b") + result = get_file_url(path) + expect = "file:{}".format(path) + assert result == expect, (result, expect) + + with working_dir(): + result = get_file_url("afile") + expect = join(get_file_url(get_current_dir()), "afile") + assert result == expect, (result, expect) + + path = "/alpha/beta.ext" + path_split = "/alpha", "beta.ext" + path_split_extension = "/alpha/beta", ".ext" + name_split_extension = "beta", ".ext" + + result = join(*path_split) + expect = normpath(path) + assert result == expect, (result, expect) + + result = split(path) + expect = normpath(path_split[0]), normpath(path_split[1]) + assert result == expect, (result, expect) + + result = split_extension(path) + expect = normpath(path_split_extension[0]), normpath(path_split_extension[1]) + assert result == expect, (result, expect) + + result = get_parent_dir(path) + expect = normpath(path_split[0]) + assert result == expect, (result, expect) + + result = get_base_name(path) + expect = normpath(path_split[1]) + assert result == expect, (result, expect) + + result = get_name_stem(path) + expect = normpath(name_split_extension[0]) + assert result == expect, (result, expect) + + result = get_name_stem("alpha.tar.gz") + expect = "alpha" + assert result == expect, (result, expect) + + result = get_name_extension(path) + expect = normpath(name_split_extension[1]) + assert result == expect, (result, expect) + + with working_dir(): + touch("adir/afile") + + check_exists("adir") + check_exists("adir/afile") + check_dir("adir") + check_file("adir/afile") + + with expect_error(): + check_exists("adir/notafile") + + with expect_error(): + check_file("adir/notafile") + + with expect_error(): + check_file("adir") + + with expect_error(): + check_dir("not-there") + + with expect_error(): + check_dir("adir/afile") + + await_exists("adir/afile") + + if not WINDOWS: + with expect_timeout(): + await_exists("adir/notafile", timeout=TINY_INTERVAL) + +@test +def port_operations(): + result = get_random_port() + assert result >= 49152 and result <= 65535, result + + server_port = get_random_port() + server_socket = _socket.socket(_socket.AF_INET, _socket.SOCK_STREAM) + + try: + try: + server_socket.bind(("localhost", server_port)) + except (OSError, PermissionError): # pragma: nocover + # Try one more time + server_port = get_random_port() + server_socket.bind(("localhost", server_port)) + + server_socket.listen(5) + + await_port(server_port) + await_port(str(server_port)) + + check_port(server_port) + + # Non-Linux platforms don't seem to produce the expected + # error. + if LINUX: + with expect_error(): + get_random_port(min=server_port, max=server_port) + finally: + server_socket.close() + + if not WINDOWS: + with expect_timeout(): + await_port(get_random_port(), timeout=TINY_INTERVAL) + +@test +def process_operations(): + result = get_process_id() + assert result, result + + proc = run("date") + assert proc is not None, proc + + print(repr(proc)) + + run("date", stash=True) + + run(["echo", 1, 2, 3]) + run(["echo", 1, 2, 3], shell=True) + + proc = run(["echo", "hello"], check=False) + assert proc.exit_code == 0, proc.exit_code + + proc = run("cat /uh/uh", check=False) + assert proc.exit_code > 0, proc.exit_code + + with expect_output() as out: + run("date", output=out) + + run("date", output=DEVNULL) + run("date", stdin=DEVNULL) + run("date", stdout=DEVNULL) + run("date", stderr=DEVNULL) + + run("echo hello", quiet=True) + run("echo hello | cat", shell=True) + run(["echo", "hello"], shell=True) + + with expect_error(): + run("/not/there") + + with expect_error(): + run("cat /whoa/not/really", stash=True) + + result = call("echo hello").strip() + expect = "hello" + assert result == expect, (result, expect) + + result = call("echo hello | cat", shell=True).strip() + expect = "hello" + assert result == expect, (result, expect) + + with expect_error(): + call("cat /whoa/not/really") + + proc = start("sleep 10") + + if not WINDOWS: + with expect_timeout(): + wait(proc, timeout=TINY_INTERVAL) + + proc = start("echo hello") + sleep(TINY_INTERVAL) + stop(proc) + + proc = start("sleep 10") + stop(proc) + + proc = start("sleep 10") + kill(proc) + sleep(TINY_INTERVAL) + stop(proc) + + proc = start("date --not-there") + sleep(TINY_INTERVAL) + stop(proc) + + with start("sleep 10"): + sleep(TINY_INTERVAL) + + with working_dir(): + touch("i") + + with start("date", stdin="i", stdout="o", stderr="e"): + pass + + with expect_system_exit(): + exit() + + with expect_system_exit(): + exit(verbose=True) + + with expect_system_exit(): + exit("abc") + + with expect_system_exit(): + exit("abc", verbose=True) + + with expect_system_exit(): + exit(Exception()) + + with expect_system_exit(): + exit(Exception(), verbose=True) + + with expect_system_exit(): + exit(123) + + with expect_system_exit(): + exit(123, verbose=True) + + with expect_system_exit(): + exit(-123) + + with expect_exception(PlanoException): + exit(object()) + +@test +def string_operations(): + result = string_replace("ab", "a", "b") + assert result == "bb", result + + result = string_replace("aba", "a", "b", count=1) + assert result == "bba", result + + result = remove_prefix(None, "xxx") + assert result == "", result + + result = remove_prefix("anterior", "ant") + assert result == "erior", result + + result = remove_prefix("anterior", "ext") + assert result == "anterior", result + + result = remove_suffix(None, "xxx") + assert result == "", result + + result = remove_suffix("exterior", "ior") + assert result == "exter", result + + result = remove_suffix("exterior", "nal") + assert result == "exterior" + + result = shorten("abc", 2) + assert result == "ab", result + + result = shorten("abc", None) + assert result == "abc", result + + result = shorten("abc", 10) + assert result == "abc", result + + result = shorten("ellipsis", 6, ellipsis="...") + assert result == "ell...", result + + result = shorten(None, 6) + assert result == "", result + + result = plural(None) + assert result == "", result + + result = plural("") + assert result == "", result + + result = plural("test") + assert result == "tests", result + + result = plural("test", 1) + assert result == "test", result + + result = plural("bus") + assert result == "busses", result + + result = plural("bus", 1) + assert result == "bus", result + + result = plural("terminus", 2, "termini") + assert result == "termini", result + + result = capitalize(None) + assert result == "", result + + result = capitalize("") + assert result == "", result + + result = capitalize("hello, Frank") + assert result == "Hello, Frank", result + + encoded_result = base64_encode(b"abc") + decoded_result = base64_decode(encoded_result) + assert decoded_result == b"abc", decoded_result + + encoded_result = url_encode("abc=123&yeah!") + decoded_result = url_decode(encoded_result) + assert decoded_result == "abc=123&yeah!", decoded_result + + result = parse_url("http://example.net/index.html") + assert result.hostname == "example.net" + +@test +def temp_operations(): + system_temp_dir = get_system_temp_dir() + + result = make_temp_file() + assert result.startswith(system_temp_dir), result + + result = make_temp_file(suffix=".txt") + assert result.endswith(".txt"), result + + result = make_temp_dir() + assert result.startswith(system_temp_dir), result + + with temp_dir() as d: + assert is_dir(d), d + list_dir(d) + + with temp_file() as f: + assert is_file(f), f + write(f, "test") + + with working_dir() as d: + assert is_dir(d), d + list_dir(d) + + user_temp_dir = get_user_temp_dir() + assert user_temp_dir, user_temp_dir + + ENV.pop("XDG_RUNTIME_DIR", None) + + user_temp_dir = get_user_temp_dir() + assert user_temp_dir, user_temp_dir + +@test +def test_operations(): + with test_project(): + with working_module_path("src"): + import chucker + import chucker.tests + import chucker.moretests + + print_tests(chucker.tests) + + for verbose in (False, True): + # Module 'chucker' has no tests + with expect_error(): + run_tests(chucker, verbose=verbose) + + run_tests(chucker.tests, verbose=verbose) + run_tests(chucker.tests, exclude="*hello*", verbose=verbose) + run_tests(chucker.tests, enable="skipped", verbose=verbose) + + with expect_error(): + run_tests(chucker.tests, enable="skipped", unskip="*skipped*", verbose=verbose) + + with expect_error(): + run_tests(chucker.tests, enable="*badbye*", verbose=verbose) + + with expect_error(): + run_tests(chucker.tests, enable="*badbye*", fail_fast=True, verbose=verbose) + + with expect_error(): + run_tests([chucker.tests, chucker.moretests], enable="*badbye2*", fail_fast=True, verbose=verbose) + + with expect_exception(KeyboardInterrupt): + run_tests(chucker.tests, enable="keyboard-interrupt", verbose=verbose) + + with expect_error(): + run_tests(chucker.tests, enable="timeout", verbose=verbose) + + with expect_error(): + run_tests(chucker.tests, enable="process-error", verbose=verbose) + + with expect_error(): + run_tests(chucker.tests, enable="system-exit", verbose=verbose) + + with expect_system_exit(): + PlanoTestCommand().main(["--module", "nosuchmodule"]) + + def run_command(*args): + PlanoTestCommand(chucker.tests).main(args) + + run_command("--verbose") + run_command("--quiet") + run_command("--list") + + with expect_system_exit(): + run_command("--enable", "*badbye*") + + with expect_system_exit(): + run_command("--enable", "*badbye*", "--verbose") + + try: + with expect_exception(): + pass + raise Exception() # pragma: nocover + except AssertionError: + pass + + with expect_output(equals="abc123", contains="bc12", startswith="abc", endswith="123") as out: + write(out, "abc123") + +@test +def time_operations(): + start_time = get_time() + + sleep(TINY_INTERVAL) + + assert get_time() - start_time > TINY_INTERVAL + + start_datetime = get_datetime() + + sleep(TINY_INTERVAL) + + assert get_datetime() - start_datetime > _datetime.timedelta(seconds=TINY_INTERVAL) + + timestamp = format_timestamp() + result = parse_timestamp(timestamp) + assert format_timestamp(result) == timestamp + + result = parse_timestamp(None) + assert result is None + + earlier = get_datetime() + result = format_date() + later = _datetime.datetime.strptime(result, "%d %B %Y") + later = later.replace(tzinfo=_datetime.timezone.utc) + assert later - earlier < _datetime.timedelta(days=1) + + now = get_datetime() + result = format_date(now) + assert result == f"{now.day} {now.strftime('%B')} {now.strftime('%Y')}" + + now = get_datetime() + result = format_time() + later = _datetime.datetime.strptime(result, "%H:%M:%S") + later = later.replace(tzinfo=_datetime.timezone.utc) + assert later - earlier < _datetime.timedelta(seconds=1) + + now = get_datetime() + result = format_time(now) + assert result == f"{now.hour}:{now.strftime('%M')}:{now.strftime('%S')}" + + now = get_datetime() + result = format_time(now, precision="minute") + assert result == f"{now.hour}:{now.strftime('%M')}" + + result = format_duration(0.1) + assert result == "0.1s", result + + result = format_duration(1) + assert result == "1s", result + + result = format_duration(1, align=True) + assert result == "1.0s", result + + result = format_duration(60) + assert result == "60s", result + + result = format_duration(3600) + assert result == "1h", result + + with expect_system_exit(): + with start("sleep 10"): + from plano import _default_sigterm_handler + _default_sigterm_handler(_signal.SIGTERM, None) + + with Timer() as timer: + sleep(TINY_INTERVAL) + assert timer.elapsed_time > TINY_INTERVAL + + assert timer.elapsed_time > TINY_INTERVAL + + if not WINDOWS: + with expect_timeout(): + with Timer(timeout=TINY_INTERVAL) as timer: + sleep(10) + +@test +def unique_id_operations(): + id1 = get_unique_id() + id2 = get_unique_id() + + assert id1 != id2, (id1, id2) + + result = get_unique_id(1) + assert len(result) == 2 + + result = get_unique_id(16) + assert len(result) == 32 + +@test +def value_operations(): + result = nvl(None, "a") + assert result == "a", result + + result = nvl("b", "a") + assert result == "b", result + + assert is_string("a") + assert not is_string(1) + + for value in (None, "", (), [], {}): + assert is_empty(value), value + + for value in (object(), " ", (1,), [1], {"a": 1}): + assert not is_empty(value), value + + result = pformat({"z": 1, "a": 2}) + assert result == "{'a': 2, 'z': 1}", result + + result = format_empty((), "[nothing]") + assert result == "[nothing]", result + + result = format_empty((1,), "[nothing]") + assert result == (1,), result + + result = format_not_empty("abc", "[{}]") + assert result == "[abc]", result + + result = format_not_empty({}, "[{}]") + assert result == {}, result + + result = format_repr(Namespace(a=1, b=2), limit=1) + assert result == "Namespace(a=1)", result + + result = Namespace(a=1, b=2) + assert result.a == 1, result + assert result.b == 2, result + assert "a" in result, result + assert "c" not in result, result + repr(result) + + other = Namespace(a=1, b=2, c=3) + assert result != other, (result, other) + +@test +def yaml_operations(): + try: + import yaml as _yaml + except ImportError: # pragma: nocover + raise PlanoTestSkipped("PyYAML is not available") + + with working_dir(): + input_data = { + "alpha": [1, 2, 3], + } + + file_a = write_yaml("a", input_data) + output_data = read_yaml(file_a) + + assert input_data == output_data, (input_data, output_data) + + yaml = read(file_a) + parsed_data = parse_yaml(yaml) + emitted_yaml = emit_yaml(input_data) + + assert input_data == parsed_data, (input_data, parsed_data) + assert yaml == emitted_yaml, (yaml, emitted_yaml) + + with expect_output(equals=emitted_yaml) as out: + with open(out, "w") as f: + print_yaml(input_data, file=f, end="") + +@command +def prancer(): + notice("Base prancer") + +@command +def vixen(): + prancer() + +@test +def plano_command(): + with working_dir(): + PlanoCommand().main([]) + + PlanoCommand(_sys.modules[__name__]).main([]) + + PlanoCommand().main(["-m", "plano.test"]) + + with expect_system_exit(): + PlanoCommand().main(["-m", "nosuchmodule"]) + + with working_dir(): + write(".plano.py", "garbage") + + with expect_system_exit(): + PlanoCommand().main([]) + + with expect_system_exit(): + PlanoCommand().main(["-f", "no-such-file"]) + + def run_command(*args): + PlanoCommand().main(["-f", test_project_dir] + list(args)) + + with test_project(): + run_command() + run_command("--help") + + with expect_system_exit(): + run_command("no-such-command") + + with expect_system_exit(): + run_command("no-such-command", "--help") + + with expect_system_exit(): + run_command("--help", "no-such-command") + + run_command("extended-command", "a", "b", "--omega", "z") + run_command("extended-command", "a", "b", "--omega", "z", "--verbose") + run_command("extended-command", "a", "b", "--omega", "z", "--quiet") + + with expect_system_exit(): + run_command("echo") + + with expect_exception(contains="Trouble"): + run_command("echo", "Hello", "--trouble") + + run_command("echo", "Hello", "--count", "5") + + run_command("echoecho", "Greetings") + + with expect_system_exit(): + run_command("echo", "Hello", "--count", "not-an-int") + + run_command("haberdash", "ballcap", "fedora", "hardhat", "--last", "turban") + result = read_json("haberdash.json") + assert result == ["ballcap", "fedora", "hardhat", "turban"], result + + run_command("haberdash", "ballcap", "--last", "turban") + result = read_json("haberdash.json") + assert result == ["ballcap", "turban"], result + + run_command("haberdash", "ballcap") + result = read_json("haberdash.json") + assert result == ["ballcap", "bowler"], result + + run_command("balderdash", "bunk", "poppycock") + result = read_json("balderdash.json") + assert result == ["bunk", "poppycock", "rubbish"], result + + run_command("balderdash", "bunk") + result = read_json("balderdash.json") + assert result == ["bunk", "malarkey", "rubbish"], result + + run_command("balderdash", "bunk", "--other", "bollocks") + result = read_json("balderdash.json") + assert result == ["bunk", "malarkey", "bollocks"], result + + run_command("splasher,balderdash", "claptrap") + result = read_json("splasher.json") + assert result == [1], result + result = read_json("balderdash.json") + assert result == ["claptrap", "malarkey", "rubbish"], result + + with expect_system_exit(): + run_command("no-such-command,splasher") + + with expect_system_exit(): + run_command("splasher,no-such-command-nope") + + run_command("dasher", "alpha", "--beta", "123") + + # Gamma is an unexpected arg + with expect_system_exit(): + run_command("dasher", "alpha", "--gamma", "123") + + # Args after "xyz" are extra passthrough args + run_command("dancer", "gamma", "--omega", "xyz", "extra1", "--extra2", "extra3") + result = read_json("dancer.json") + assert result == ["extra1", "--extra2", "extra3"], result + + # Ensure indirect calls (through parent commands) are specialized + run_command("vixen") + assert exists("prancer.json") + + with expect_system_exit(): + run_command("no-parent") + + run_command("feta", "--spinach", "oregano") + result = read_json("feta.json") + assert result == "oregano" + + run_command("invisible") + result = read_json("invisible.json") + assert result == "nothing" + + + +def main(): + PlanoTestCommand(_sys.modules[__name__]).main() + +if __name__ == "__main__": # pragma: nocover + main() diff --git a/external/skewer/python/plano/command.py b/external/skewer/python/plano/command.py new file mode 100644 index 0000000..219f964 --- /dev/null +++ b/external/skewer/python/plano/command.py @@ -0,0 +1,511 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from .main import * + +import argparse as _argparse +import importlib as _importlib +import inspect as _inspect +import os as _os +import sys as _sys +import traceback as _traceback + +class BaseCommand: + def parse_args(self, args): # pragma: nocover + raise NotImplementedError() + + def configure_logging(self, args): + return "warning", None + + def init(self, args): # pragma: nocover + raise NotImplementedError() + + def run(self): # pragma: nocover + raise NotImplementedError() + + def main(self, args=None): + if args is None: + args = ARGS[1:] + + args = self.parse_args(args) + + assert isinstance(args, _argparse.Namespace), args + + level, output = self.configure_logging(args) + + with logging_enabled(level=level, output=output): + try: + self.init(args) + self.run() + except KeyboardInterrupt: + pass + except PlanoError as e: + if PLANO_DEBUG: # pragma: nocover + error(e) + else: + error(str(e)) + + exit(1) + +class BaseArgumentParser(_argparse.ArgumentParser): + def __init__(self, **kwargs): + super().__init__(**kwargs) + + self.allow_abbrev = False + self.formatter_class = _argparse.RawDescriptionHelpFormatter + + _capitalize_help(self) + +_plano_command = None + +class PlanoCommand(BaseCommand): + def __init__(self, module=None, description="Run commands defined as Python functions", epilog=None): + self.module = module + self.bound_commands = dict() + self.running_commands = list() + self.passthrough_args = None + self.verbose = False + self.quiet = False + + assert self.module is None or _inspect.ismodule(self.module), self.module + + self.pre_parser = BaseArgumentParser(description=description, add_help=False) + self.pre_parser.add_argument("-h", "--help", action="store_true", + help="Show this help message and exit") + + if self.module is None: + self.pre_parser.add_argument("-f", "--file", help="Load commands from FILE (default '.plano.py')") + self.pre_parser.add_argument("-m", "--module", help="Load commands from MODULE") + + self.parser = _argparse.ArgumentParser(parents=(self.pre_parser,), + description=description, epilog=epilog, + add_help=False, allow_abbrev=False) + + # This is intentionally added after self.pre_parser is passed + # as parent to self.parser, since it is used only in the + # preliminary parsing. + self.pre_parser.add_argument("command", nargs="?", help=_argparse.SUPPRESS) + + global _plano_command + _plano_command = self + + def parse_args(self, args): + pre_args, _ = self.pre_parser.parse_known_args(args) + + if self.module is None: + if pre_args.module is None: + self.module = self._load_file(pre_args.file) + else: + self.module = self._load_module(pre_args.module) + + if self.module is not None: + self._bind_commands(self.module) + + self._process_commands() + + self.preceding_commands = list() + + if pre_args.command is not None and "," in pre_args.command: + names = pre_args.command.split(",") + + for name in names[:-1]: + try: + self.preceding_commands.append(self.bound_commands[name]) + except KeyError: + self.parser.error(f"Command '{name}' is unknown") + + args[args.index(pre_args.command)] = names[-1] + + args, self.passthrough_args = self.parser.parse_known_args(args) + + return args + + def configure_logging(self, args): + if args.command is not None and not self.bound_commands[args.command].passthrough: + if args.verbose: + return "debug", None + + if args.quiet: + return "warning", None + + return "notice", None + + def init(self, args): + self.help = args.help + + self.selected_command = None + self.command_args = list() + self.command_kwargs = dict() + + if args.command is not None: + for command in self.preceding_commands: + command() + + self.selected_command = self.bound_commands[args.command] + + if not self.selected_command.passthrough and self.passthrough_args: + self.parser.error(f"unrecognized arguments: {' '.join(self.passthrough_args)}") + + for param in self.selected_command.parameters.values(): + if param.name == "passthrough_args": + continue + + if param.positional: + if param.multiple: + self.command_args.extend(getattr(args, param.name)) + else: + self.command_args.append(getattr(args, param.name)) + else: + self.command_kwargs[param.name] = getattr(args, param.name) + + if self.selected_command.passthrough: + self.command_kwargs["passthrough_args"] = self.passthrough_args + + def run(self): + if self.help or self.module is None or self.selected_command is None: + self.parser.print_help() + return + + with Timer() as timer: + self.selected_command(*self.command_args, **self.command_kwargs) + + if not self.quiet: + cprint("OK", color="green", file=_sys.stderr, end="") + cprint(" ({})".format(format_duration(timer.elapsed_time)), color="magenta", file=_sys.stderr) + + def _load_module(self, name): + try: + return _importlib.import_module(name) + except ImportError: + exit("Module '{}' not found", name) + + def _load_file(self, path): + if path is not None and is_dir(path): + path = self._find_file(path) + + if path is not None and not is_file(path): + exit("File '{}' not found", path) + + if path is None: + path = self._find_file(get_current_dir()) + + if path is None: + return + + debug("Loading '{}'", path) + + _sys.path.insert(0, join(get_parent_dir(path), "python")) + + spec = _importlib.util.spec_from_file_location("_plano", path) + module = _importlib.util.module_from_spec(spec) + _sys.modules["_plano"] = module + + try: + spec.loader.exec_module(module) + except Exception as e: + error(e) + exit("Failure loading {}: {}", path, str(e)) + + return module + + def _find_file(self, dir): + # Planofile and .planofile remain temporarily for backward compatibility + for name in (".plano.py", "Planofile", ".planofile"): + path = join(dir, name) + + if is_file(path): + return path + + def _bind_commands(self, module): + for var in vars(module).values(): + if callable(var) and var.__class__.__name__ == "Command": + self.bound_commands[var.name] = var + + def _process_commands(self): + subparsers = self.parser.add_subparsers(title="commands", dest="command", metavar="{command}") + + for command in self.bound_commands.values(): + # This doesn't work yet, but in the future it might. + # https://bugs.python.org/issue22848 + # + # help = _argparse.SUPPRESS if command.hidden else command.help + + help = "[internal]" if command.hidden else command.help + add_help = False if command.passthrough else True + description = nvl(command.description, command.help) + + subparser = subparsers.add_parser(command.name, help=help, add_help=add_help, description=description, + formatter_class=_argparse.RawDescriptionHelpFormatter) + + if not command.passthrough: + subparser.add_argument("--verbose", action="store_true", + help="Print detailed logging to the console") + subparser.add_argument("--quiet", action="store_true", + help="Print no logging to the console") + + for param in command.parameters.values(): + if not command.passthrough and param.name in ("verbose", "quiet"): + continue + + if param.positional: + if param.multiple: + subparser.add_argument(param.name, metavar=param.metavar, type=param.type, help=param.help, + nargs="*") + elif param.optional: + subparser.add_argument(param.name, metavar=param.metavar, type=param.type, help=param.help, + nargs="?", default=param.default) + else: + subparser.add_argument(param.name, metavar=param.metavar, type=param.type, help=param.help) + else: + flag_args = list() + + if param.short_option is not None: + flag_args.append("-{}".format(param.short_option)) + + flag_args.append("--{}".format(param.display_name)) + + help = param.help + + if param.default not in (None, False): + if help is None: + help = "Default value is {}".format(repr(param.default)) + else: + help += " (default {})".format(repr(param.default)) + + if param.default is False: + subparser.add_argument(*flag_args, dest=param.name, default=param.default, action="store_true", + help=help) + else: + subparser.add_argument(*flag_args, dest=param.name, default=param.default, + metavar=param.metavar, type=param.type, help=help) + + _capitalize_help(subparser) + +_command_help = { + "build": "Build artifacts from source", + "clean": "Clean up the source tree", + "dist": "Generate distribution artifacts", + "install": "Install the built artifacts on your system", + "test": "Run the tests", + "coverage": "Run the tests and measure code coverage", +} + +def command(_function=None, name=None, parameters=None, parent=None, passthrough=False, hidden=False): + class Command: + def __init__(self, function): + self.function = function + self.module = _inspect.getmodule(self.function) + + self.name = name + self.parent = parent + + if self.parent is None: + # Strip leading and trailing underscores and convert + # remaining underscores to hyphens + default = self.function.__name__.strip("_").replace("_", "-") + + self.name = nvl(self.name, default) + self.parameters = self._process_parameters(parameters) + self.passthrough = passthrough + else: + assert parameters is None + + self.name = nvl(self.name, self.parent.name) + self.parameters = self.parent.parameters + self.passthrough = self.parent.passthrough + + doc = _inspect.getdoc(self.function) + + if doc is None: + self.help = _command_help.get(self.name) + self.description = self.help + else: + self.help = doc.split("\n")[0] + self.description = doc + + if self.parent is not None: + self.help = nvl(self.help, self.parent.help) + self.description = nvl(self.description, self.parent.description) + + self.hidden = hidden + + debug("Defining {}", self) + + for param in self.parameters.values(): + debug(" {}", str(param).capitalize()) + + def __repr__(self): + return "command '{}:{}'".format(self.module.__name__, self.name) + + def _process_parameters(self, cparams): + # CommandParameter objects from the @command decorator + cparams_in = {x.name: x for x in nvl(cparams, ())} + cparams_out = dict() + + # Parameter objects from the function signature + sig = _inspect.signature(self.function) + sparams = list(sig.parameters.values()) + + if len(sparams) == 2 and sparams[0].name == "args" and sparams[1].name == "kwargs": + # Don't try to derive command parameters from *args and **kwargs + return cparams_in + + for sparam in sparams: + try: + cparam = cparams_in[sparam.name] + except KeyError: + cparam = CommandParameter(sparam.name) + + if sparam.kind is sparam.POSITIONAL_ONLY: # pragma: nocover + if sparam.positional is None: + cparam.positional = True + elif sparam.kind is sparam.POSITIONAL_OR_KEYWORD and sparam.default is sparam.empty: + if cparam.positional is None: + cparam.positional = True + elif sparam.kind is sparam.POSITIONAL_OR_KEYWORD and sparam.default is not sparam.empty: + cparam.optional = True + cparam.default = sparam.default + elif sparam.kind is sparam.VAR_POSITIONAL: + if cparam.positional is None: + cparam.positional = True + cparam.multiple = True + elif sparam.kind is sparam.VAR_KEYWORD: + continue + elif sparam.kind is sparam.KEYWORD_ONLY: + cparam.optional = True + cparam.default = sparam.default + else: # pragma: nocover + raise NotImplementedError(sparam.kind) + + if cparam.type is None and cparam.default not in (None, False): # XXX why false? + cparam.type = type(cparam.default) + + cparams_out[cparam.name] = cparam + + return cparams_out + + def __call__(self, *args, **kwargs): + from .command import _plano_command, PlanoCommand + assert isinstance(_plano_command, PlanoCommand), _plano_command + + app = _plano_command + command = app.bound_commands[self.name] + + if command is not self: + # The command bound to this name has been overridden. + # This happens when a parent command invokes a peer + # command that is overridden. + + command(*args, **kwargs) + + return + + debug("Running {} {} {}".format(self, args, kwargs)) + + app.running_commands.append(self) + + if not app.quiet: + dashes = "--- " * (len(app.running_commands) - 1) + display_args = list(self._get_display_args(args, kwargs)) + + with console_color("magenta", file=_sys.stderr): + eprint("{}--> {}".format(dashes, self.name), end="") + + if display_args: + eprint(" ({})".format(", ".join(display_args)), end="") + + eprint() + + self.function(*args, **kwargs) + + if not app.quiet: + cprint("{}<-- {}".format(dashes, self.name), color="magenta", file=_sys.stderr) + + app.running_commands.pop() + + def _get_display_args(self, args, kwargs): + for i, param in enumerate(self.parameters.values()): + if param.positional: + if param.multiple: + for va in args[i:]: + yield repr(va) + elif param.optional: + value = args[i] + + if value == param.default: + continue + + yield repr(value) + else: + yield repr(args[i]) + else: + value = kwargs.get(param.name, param.default) + + if value == param.default: + continue + + if value in (True, False): + value = str(value).lower() + else: + value = repr(value) + + yield "{}={}".format(param.display_name, value) + + if _function is None: + return Command + else: + return Command(_function) + +def parent(*args, **kwargs): + try: + f_locals = _inspect.stack()[2].frame.f_locals + parent_fn = f_locals["self"].parent.function + except: + fail("Missing parent command") + + parent_fn(*args, **kwargs) + +class CommandParameter: + def __init__(self, name, display_name=None, type=None, metavar=None, help=None, short_option=None, default=None, positional=None): + self.name = name + self.display_name = nvl(display_name, self.name.replace("_", "-")) + self.type = type + self.metavar = nvl(metavar, self.display_name.upper()) + self.help = help + self.short_option = short_option + self.default = default + self.positional = positional + + self.optional = False + self.multiple = False + + def __repr__(self): + return "parameter '{}' (default {})".format(self.name, repr(self.default)) + +# Patch the default help text +def _capitalize_help(parser): + try: + for action in parser._actions: + if action.help and action.help is not _argparse.SUPPRESS: + action.help = capitalize(action.help) + except: # pragma: nocover + pass + +def _main(): # pragma: nocover + PlanoCommand().main() diff --git a/external/skewer/python/plano/github.py b/external/skewer/python/plano/github.py new file mode 100644 index 0000000..e1714b5 --- /dev/null +++ b/external/skewer/python/plano/github.py @@ -0,0 +1,80 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from .main import * + +_html_template = """ + + + + + + +
+ +@content@ + +
+ + +""".strip() + +def convert_github_markdown(markdown): + json = emit_json({"text": markdown}) + content = http_post("https://github.com/gitapi/markdown", json, content_type="application/json") + + # Remove the "user-content-" prefix from internal anchors + content = content.replace("id=\"user-content-", "id=\"") + + return _html_template.replace("@content@", content) + +def update_external_from_github(dir, owner, repo, ref="main"): + dir = get_absolute_path(dir) + make_parent_dir(dir) + + url = f"https://github.com/{owner}/{repo}/archive/{ref}.tar.gz" + + with temp_file() as temp: + assert exists(temp) + + http_get(url, output_file=temp) + + with working_dir(quiet=True): + extract_archive(temp) + + extracted_dir = list_dir()[0] + assert is_dir(extracted_dir) + + replace(dir, extracted_dir) diff --git a/external/skewer/python/plano/main.py b/external/skewer/python/plano/main.py new file mode 100644 index 0000000..903f654 --- /dev/null +++ b/external/skewer/python/plano/main.py @@ -0,0 +1,1772 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +import base64 as _base64 +import binascii as _binascii +import code as _code +import datetime as _datetime +import fnmatch as _fnmatch +import getpass as _getpass +import json as _json +import os as _os +import pprint as _pprint +import pkgutil as _pkgutil +import random as _random +import re as _re +import shlex as _shlex +import shutil as _shutil +import signal as _signal +import socket as _socket +import subprocess as _subprocess +import sys as _sys +import tempfile as _tempfile +import time as _time +import traceback as _traceback +import urllib as _urllib +import urllib.parse as _urllib_parse +import uuid as _uuid + +_max = max + +## Exceptions + +class PlanoException(Exception): + pass + +class PlanoError(PlanoException): + pass + +class PlanoTimeout(PlanoException): + pass + +## Global variables + +ENV = _os.environ +ARGS = _sys.argv + +STDIN = _sys.stdin +STDOUT = _sys.stdout +STDERR = _sys.stderr +DEVNULL = _os.devnull + +LINUX = _sys.platform == "linux" +WINDOWS = _sys.platform in ("win32", "cygwin") + +PLANO_DEBUG = "PLANO_DEBUG" in ENV +PLANO_COLOR = "PLANO_COLOR" in ENV + +## Archive operations + +def make_archive(input_dir, output_file=None, quiet=False): + check_program("tar") + + archive_stem = get_base_name(input_dir) + + if output_file is None: + # tar on Windows needs this + base = join(get_current_dir(), archive_stem) + base = base.replace("\\", "/") + + output_file = f"{base}.tar.gz" + + _notice(quiet, "Making archive {} from directory {}", repr(output_file), repr(input_dir)) + + with working_dir(get_parent_dir(input_dir), quiet=True): + run(f"tar -czf {output_file} {archive_stem}", quiet=True) + + return output_file + +def extract_archive(input_file, output_dir=None, quiet=False): + check_program("tar") + + if output_dir is None: + output_dir = get_current_dir() + + _notice(quiet, "Extracting archive {} to directory {}", repr(input_file), repr(output_dir)) + + input_file = get_absolute_path(input_file) + + # tar on Windows needs this + input_file = input_file.replace("\\", "/") + + with working_dir(output_dir, quiet=True): + run(f"tar -xf {input_file}", quiet=True) + + return output_dir + +def rename_archive(input_file, new_archive_stem, quiet=False): + _notice(quiet, "Renaming archive {} with stem {}", repr(input_file), repr(new_archive_stem)) + + output_dir = get_absolute_path(get_parent_dir(input_file)) + output_file = "{}.tar.gz".format(join(output_dir, new_archive_stem)) + + # tar on Windows needs this + output_file = output_file.replace("\\", "/") + + input_file = get_absolute_path(input_file) + + with working_dir(quiet=True): + extract_archive(input_file, quiet=True) + + input_name = list_dir()[0] + input_dir = move(input_name, new_archive_stem, quiet=True) + + make_archive(input_dir, output_file=output_file, quiet=True) + + remove(input_file, quiet=True) + + return output_file + +## Console operations + +def flush(): + _sys.stdout.flush() + _sys.stderr.flush() + +def eprint(*args, **kwargs): + print(*args, file=_sys.stderr, **kwargs) + +def pprint(*args, **kwargs): + args = [pformat(x) for x in args] + print(*args, **kwargs) + +_color_codes = { + "black": "\u001b[30", + "red": "\u001b[31", + "green": "\u001b[32", + "yellow": "\u001b[33", + "blue": "\u001b[34", + "magenta": "\u001b[35", + "cyan": "\u001b[36", + "white": "\u001b[37", + "gray": "\u001b[90", +} + +_color_reset = "\u001b[0m" + +def _get_color_code(color, bright): + elems = [_color_codes[color]] + + if bright: + elems.append(";1") + + elems.append("m") + + return "".join(elems) + +def _is_color_enabled(file): + return PLANO_COLOR or hasattr(file, "isatty") and file.isatty() + +class console_color: + def __init__(self, color=None, bright=False, file=_sys.stdout): + self.file = file + self.color_code = None + + if (color, bright) != (None, False): + self.color_code = _get_color_code(color, bright) + + self.enabled = self.color_code is not None and _is_color_enabled(self.file) + + def __enter__(self): + if self.enabled: + print(self.color_code, file=self.file, end="", flush=True) + + def __exit__(self, exc_type, exc_value, traceback): + if self.enabled: + print(_color_reset, file=self.file, end="", flush=True) + +def cformat(value, color=None, bright=False, file=_sys.stdout): + if (color, bright) != (None, False) and _is_color_enabled(file): + return "".join((_get_color_code(color, bright), value, _color_reset)) + else: + return value + +def cprint(*args, **kwargs): + color = kwargs.pop("color", "white") + bright = kwargs.pop("bright", False) + file = kwargs.get("file", _sys.stdout) + + with console_color(color, bright=bright, file=file): + print(*args, **kwargs) + +class output_redirected: + def __init__(self, output, quiet=False): + self.output = output + self.quiet = quiet + + def __enter__(self): + flush() + + _notice(self.quiet, "Redirecting output to file {}", repr(self.output)) + + if is_string(self.output): + output = open(self.output, "w") + + self.prev_stdout, self.prev_stderr = _sys.stdout, _sys.stderr + _sys.stdout, _sys.stderr = output, output + + def __exit__(self, exc_type, exc_value, traceback): + flush() + + _sys.stdout, _sys.stderr = self.prev_stdout, self.prev_stderr + +try: + breakpoint +except NameError: # pragma: nocover + def breakpoint(): + import pdb + pdb.set_trace() + +def repl(locals): # pragma: nocover + _code.InteractiveConsole(locals=locals).interact() + +def print_properties(props, file=None): + size = max([len(x[0]) for x in props]) + + for prop in props: + name = "{}:".format(prop[0]) + template = "{{:<{}}} ".format(size + 1) + + print(template.format(name), prop[1], end="", file=file) + + for value in prop[2:]: + print(" {}".format(value), end="", file=file) + + print(file=file) + +## Directory operations + +def find(dirs=None, include="*", exclude=()): + if dirs is None: + dirs = "." + + if is_string(dirs): + dirs = (dirs,) + + if is_string(include): + include = (include,) + + if is_string(exclude): + exclude = (exclude,) + + found = set() + + for dir in dirs: + for root, dir_names, file_names in _os.walk(dir, followlinks=True): + names = dir_names + file_names + + for include_pattern in include: + names = _fnmatch.filter(names, include_pattern) + + for exclude_pattern in exclude: + for name in _fnmatch.filter(names, exclude_pattern): + names.remove(name) + + if root.startswith("./"): + root = remove_prefix(root, "./") + elif root == ".": + root = "" + + found.update([join(root, x) for x in names]) + + return sorted(found) + +def make_dir(dir, quiet=False): + if dir == "": + return dir + + if not exists(dir): + _notice(quiet, "Making directory '{}'", dir) + _os.makedirs(dir) + + return dir + +def make_parent_dir(path, quiet=False): + return make_dir(get_parent_dir(path), quiet=quiet) + +# Returns the current working directory so you can change it back +def change_dir(dir, quiet=False): + _debug(quiet, "Changing directory to {}", repr(dir)) + + prev_dir = get_current_dir() + + if not dir: + return prev_dir + + _os.chdir(dir) + + return prev_dir + +def list_dir(dir=None, include="*", exclude=()): + if dir is None: + dir = get_current_dir() + else: + dir = expand(dir) + + assert is_dir(dir), dir + + if is_string(include): + include = (include,) + + if is_string(exclude): + exclude = (exclude,) + + names = _os.listdir(dir) + + for include_pattern in include: + names = _fnmatch.filter(names, include_pattern) + + for exclude_pattern in exclude: + for name in _fnmatch.filter(names, exclude_pattern): + names.remove(name) + + return sorted(names) + +# No args constructor gets a temp dir +class working_dir: + def __init__(self, dir=None, quiet=False): + self.dir = dir + self.prev_dir = None + self.remove = False + self.quiet = quiet + + if self.dir is None: + self.dir = make_temp_dir() + self.remove = True + else: + self.dir = expand(self.dir) + + def __enter__(self): + if self.dir == ".": + return + + _notice(self.quiet, "Entering directory {}", repr(get_absolute_path(self.dir))) + + make_dir(self.dir, quiet=True) + + self.prev_dir = change_dir(self.dir, quiet=True) + + return self.dir + + def __exit__(self, exc_type, exc_value, traceback): + if self.dir == ".": + return + + _debug(self.quiet, "Returning to directory {}", repr(get_absolute_path(self.prev_dir))) + + change_dir(self.prev_dir, quiet=True) + + if self.remove: + remove(self.dir, quiet=True) + +## Environment operations + +def join_path_var(*paths): + return _os.pathsep.join(unique(skip(paths))) + +def get_current_dir(): + return _os.getcwd() + +def get_home_dir(user=None): + return _os.path.expanduser("~{}".format(user or "")) + +def get_user(): + return _getpass.getuser() + +def get_hostname(): + return _socket.gethostname() + +def get_program_name(command=None): + if command is None: + args = ARGS + else: + args = command.split() + + for arg in args: + if "=" not in arg: + return get_base_name(arg) + +def which(program_name): + return _shutil.which(program_name) + +def check_env(var, message=None): + if var not in _os.environ: + if message is None: + message = "Environment variable {} is not set".format(repr(var)) + + raise PlanoError(message) + +def check_module(module, message=None): + if _pkgutil.find_loader(module) is None: + if message is None: + message = "Python module {} is not found".format(repr(module)) + + raise PlanoError(message) + +def check_program(program, message=None): + if which(program) is None: + if message is None: + message = "Program {} is not found".format(repr(program)) + + raise PlanoError(message) + +class working_env: + def __init__(self, **vars): + self.amend = vars.pop("amend", True) + self.vars = vars + + def __enter__(self): + self.prev_vars = dict(_os.environ) + + if not self.amend: + for name, value in list(_os.environ.items()): + if name not in self.vars: + del _os.environ[name] + + for name, value in self.vars.items(): + _os.environ[name] = str(value) + + def __exit__(self, exc_type, exc_value, traceback): + for name, value in self.prev_vars.items(): + _os.environ[name] = value + + for name, value in self.vars.items(): + if name not in self.prev_vars: + del _os.environ[name] + +class working_module_path: + def __init__(self, path, amend=True): + if is_string(path): + if not is_absolute(path): + path = get_absolute_path(path) + + path = [path] + + if amend: + path = path + _sys.path + + self.path = path + + def __enter__(self): + self.prev_path = _sys.path + _sys.path = self.path + + def __exit__(self, exc_type, exc_value, traceback): + _sys.path = self.prev_path + +def print_env(file=None): + props = ( + ("ARGS", ARGS), + ("ENV['PATH']", ENV.get("PATH")), + ("ENV['PYTHONPATH']", ENV.get("PYTHONPATH")), + ("sys.executable", _sys.executable), + ("sys.path", _sys.path), + ("sys.version", _sys.version.replace("\n", "")), + ("get_current_dir()", get_current_dir()), + ("get_home_dir()", get_home_dir()), + ("get_hostname()", get_hostname()), + ("get_program_name()", get_program_name()), + ("get_user()", get_user()), + ("plano.__file__", __file__), + ("which('plano')", which("plano")), + ) + + print_properties(props, file=file) + +def print_stack(file=None): + _traceback.print_stack(file=file) + +## File operations + +def touch(file, quiet=False): + file = expand(file) + + _notice(quiet, "Touching {}", repr(file)) + + try: + _os.utime(file, None) + except OSError: + append(file, "") + + return file + +# symlinks=True - Preserve symlinks +# inside=True - Place from_path inside to_path if to_path is a directory +def copy(from_path, to_path, symlinks=True, inside=True, quiet=False): + from_path = expand(from_path) + to_path = expand(to_path) + + _notice(quiet, "Copying {} to {}", repr(from_path), repr(to_path)) + + if is_dir(to_path) and inside: + to_path = join(to_path, get_base_name(from_path)) + else: + make_parent_dir(to_path, quiet=True) + + if is_dir(from_path): + for name in list_dir(from_path): + copy(join(from_path, name), join(to_path, name), symlinks=symlinks, inside=False, quiet=True) + + _shutil.copystat(from_path, to_path) + elif is_link(from_path) and symlinks: + make_link(to_path, read_link(from_path), quiet=True) + else: + _shutil.copy2(from_path, to_path) + + return to_path + +# inside=True - Place from_path inside to_path if to_path is a directory +def move(from_path, to_path, inside=True, quiet=False): + from_path = expand(from_path) + to_path = expand(to_path) + + _notice(quiet, "Moving {} to {}", repr(from_path), repr(to_path)) + + to_path = copy(from_path, to_path, inside=inside, quiet=True) + remove(from_path, quiet=True) + + return to_path + +def replace(path, replacement, quiet=False): + path = expand(path) + replacement = expand(replacement) + + _notice(quiet, "Replacing {} with {}", repr(path), repr(replacement)) + + with temp_dir() as backup_dir: + backup = join(backup_dir, "backup") + backup_created = False + + if exists(path): + move(path, backup, quiet=True) + backup_created = True + + try: + move(replacement, path, quiet=True) + except OSError: + notice("Removing") + remove(path, quiet=True) + + if backup_created: + move(backup, path, quiet=True) + + raise + + assert not exists(replacement), replacement + assert exists(path), path + + return path + +def remove(paths, quiet=False): + if is_string(paths): + paths = (paths,) + + for path in paths: + path = expand(path) + + if not exists(path): + continue + + _debug(quiet, "Removing {}", repr(path)) + + if is_dir(path): + _shutil.rmtree(path, ignore_errors=True) + else: + _os.remove(path) + +def get_file_size(file): + file = expand(file) + return _os.path.getsize(file) + +## IO operations + +def read(file): + file = expand(file) + + with open(file) as f: + return f.read() + +def write(file, string): + file = expand(file) + + make_parent_dir(file, quiet=True) + + with open(file, "w") as f: + f.write(string) + + return file + +def append(file, string): + file = expand(file) + + make_parent_dir(file, quiet=True) + + with open(file, "a") as f: + f.write(string) + + return file + +def prepend(file, string): + file = expand(file) + + orig = read(file) + + return write(file, string + orig) + +def tail(file, count): + file = expand(file) + return "".join(tail_lines(file, count)) + +def read_lines(file): + file = expand(file) + + with open(file) as f: + return f.readlines() + +def write_lines(file, lines): + file = expand(file) + + make_parent_dir(file, quiet=True) + + with open(file, "w") as f: + f.writelines(lines) + + return file + +def append_lines(file, lines): + file = expand(file) + + make_parent_dir(file, quiet=True) + + with open(file, "a") as f: + f.writelines(lines) + + return file + +def prepend_lines(file, lines): + file = expand(file) + + orig_lines = read_lines(file) + + make_parent_dir(file, quiet=True) + + with open(file, "w") as f: + f.writelines(lines) + f.writelines(orig_lines) + + return file + +def tail_lines(file, count): + assert count >= 0, count + + lines = read_lines(file) + + return lines[-count:] + +def string_replace_file(file, expr, replacement, count=0): + file = expand(file) + return write(file, string_replace(read(file), expr, replacement, count=count)) + +def concatenate(file, input_files): + file = expand(file) + + assert file not in input_files + + make_parent_dir(file, quiet=True) + + with open(file, "wb") as f: + for input_file in input_files: + if not exists(input_file): + continue + + with open(input_file, "rb") as inf: + _shutil.copyfileobj(inf, f) + + return file + +## Iterable operations + +def unique(iterable): + return list(dict.fromkeys(iterable).keys()) + +def skip(iterable, values=(None, "", (), [], {})): + if is_scalar(values): + values = (values,) + + items = list() + + for item in iterable: + if item not in values: + items.append(item) + + return items + +## JSON operations + +def read_json(file): + file = expand(file) + + with open(file) as f: + return _json.load(f) + +def write_json(file, data): + file = expand(file) + + make_parent_dir(file, quiet=True) + + with open(file, "w") as f: + _json.dump(data, f, indent=4, separators=(",", ": "), sort_keys=True) + + return file + +def parse_json(json): + return _json.loads(json) + +def emit_json(data): + return _json.dumps(data, indent=4, separators=(",", ": "), sort_keys=True) + +def print_json(data, **kwargs): + print(emit_json(data), **kwargs) + +## HTTP operations + +def _run_curl(method, url, content=None, content_file=None, content_type=None, output_file=None, insecure=False, + user=None, password=None, quiet=False): + check_program("curl") + + _notice(quiet, f"Sending {method} request to '{url}'") + + args = ["curl", "-sfL"] + + if method != "GET": + args.extend(["-X", method]) + + if content is not None: + assert content_file is None + args.extend(["-H", "Expect:", "-d", "@-"]) + + if content_file is not None: + assert content is None, content + args.extend(["-H", "Expect:", "-d", f"@{content_file}"]) + + if content_type is not None: + args.extend(["-H", f"'Content-Type: {content_type}'"]) + + if output_file is not None: + args.extend(["-o", output_file]) + + if insecure: + args.append("--insecure") + + if user is not None: + assert password is not None + args.extend(["--user", f"{user}:{password}"]) + + args.append(url) + + if output_file is not None: + make_parent_dir(output_file, quiet=True) + + proc = run(args, stdin=_subprocess.PIPE, stdout=_subprocess.PIPE, stderr=_subprocess.PIPE, + input=content, check=False, quiet=True) + + if proc.exit_code > 0: + raise PlanoProcessError(proc) + + if output_file is None: + return proc.stdout_result + +def http_get(url, output_file=None, insecure=False, user=None, password=None, quiet=False): + return _run_curl("GET", url, output_file=output_file, insecure=insecure, user=user, password=password, quiet=quiet) + +def http_get_json(url, insecure=False, user=None, password=None, quiet=False): + return parse_json(http_get(url, insecure=insecure, user=user, password=password, quiet=quiet)) + +def http_put(url, content, content_type=None, insecure=False, user=None, password=None, quiet=False): + _run_curl("PUT", url, content=content, content_type=content_type, insecure=insecure, user=user, password=password, + quiet=quiet) + +def http_put_file(url, content_file, content_type=None, insecure=False, user=None, password=None, quiet=False): + _run_curl("PUT", url, content_file=content_file, content_type=content_type, insecure=insecure, user=user, + password=password, quiet=quiet) + +def http_put_json(url, data, insecure=False, user=None, password=None, quiet=False): + http_put(url, emit_json(data), content_type="application/json", insecure=insecure, user=user, password=password, + quiet=quiet) + +def http_post(url, content, content_type=None, output_file=None, insecure=False, user=None, password=None, + quiet=False): + return _run_curl("POST", url, content=content, content_type=content_type, output_file=output_file, + insecure=insecure, user=user, password=password, quiet=quiet) + +def http_post_file(url, content_file, content_type=None, output_file=None, insecure=False, user=None, password=None, + quiet=False): + return _run_curl("POST", url, content_file=content_file, content_type=content_type, output_file=output_file, + insecure=insecure, user=user, password=password, quiet=quiet) + +def http_post_json(url, data, insecure=False, user=None, password=None, quiet=False): + return parse_json(http_post(url, emit_json(data), content_type="application/json", insecure=insecure, user=user, + password=password, quiet=quiet)) + +## Link operations + +def make_link(path: str, linked_path: str, quiet=False) -> str: + _notice(quiet, "Making symlink {} to {}", repr(path), repr(linked_path)) + + make_parent_dir(path, quiet=True) + remove(path, quiet=True) + + _os.symlink(linked_path, path) + + return path + +def read_link(path): + return _os.readlink(path) + +## Logging operations + +_logging_levels = ( + "debug", + "notice", + "warning", + "error", + "disabled", +) + +_DEBUG = _logging_levels.index("debug") +_NOTICE = _logging_levels.index("notice") +_WARNING = _logging_levels.index("warning") +_ERROR = _logging_levels.index("error") +_DISABLED = _logging_levels.index("disabled") + +_logging_output = None +_logging_threshold = _NOTICE +_logging_contexts = list() + +def enable_logging(level="notice", output=None, quiet=False): + assert level in _logging_levels, level + + _notice(quiet, "Enabling logging (level={}, output={})", repr(level), repr(nvl(output, "stderr"))) + + global _logging_threshold + _logging_threshold = _logging_levels.index(level) + + if is_string(output): + output = open(output, "w") + + global _logging_output + _logging_output = output + +def disable_logging(quiet=False): + _notice(quiet, "Disabling logging") + + global _logging_threshold + _logging_threshold = _DISABLED + +class logging_enabled: + def __init__(self, level="notice", output=None): + self.level = level + self.output = output + + def __enter__(self): + self.prev_level = _logging_levels[_logging_threshold] + self.prev_output = _logging_output + + if self.level == "disabled": + disable_logging(quiet=True) + else: + enable_logging(level=self.level, output=self.output, quiet=True) + + def __exit__(self, exc_type, exc_value, traceback): + if self.prev_level == "disabled": + disable_logging(quiet=True) + else: + enable_logging(level=self.prev_level, output=self.prev_output, quiet=True) + +class logging_disabled(logging_enabled): + def __init__(self): + super().__init__(level="disabled") + +class logging_context: + def __init__(self, name): + self.name = name + + def __enter__(self): + _logging_contexts.append(self.name) + + def __exit__(self, exc_type, exc_value, traceback): + _logging_contexts.pop() + +def fail(message, *args): + if isinstance(message, BaseException): + if not isinstance(message, PlanoError): + error(message) + + raise message + + if args: + message = message.format(*args) + + raise PlanoError(message) + +def error(message, *args): + log(_ERROR, message, *args) + +def warning(message, *args): + log(_WARNING, message, *args) + +def notice(message, *args): + log(_NOTICE, message, *args) + +def debug(message, *args): + log(_DEBUG, message, *args) + +def log(level, message, *args): + if is_string(level): + level = _logging_levels.index(level) + + if _logging_threshold <= level: + _print_message(level, message, args) + +def _print_message(level, message, args): + line = list() + out = nvl(_logging_output, _sys.stderr) + + program_text = "{}:".format(get_program_name()) + + line.append(cformat(program_text, color="gray")) + + level_text = "{}:".format(_logging_levels[level]) + level_color = ("white", "cyan", "yellow", "red", None)[level] + level_bright = (False, False, False, True, False)[level] + + line.append(cformat(level_text, color=level_color, bright=level_bright)) + + for name in _logging_contexts: + line.append(cformat("{}:".format(name), color="yellow")) + + if isinstance(message, BaseException): + exception = message + + line.append(str(exception)) + + print(" ".join(line), file=out) + + if hasattr(exception, "__traceback__"): + _traceback.print_exception(type(exception), exception, exception.__traceback__, file=out) + else: + message = str(message) + + if args: + message = message.format(*args) + + line.append(capitalize(message)) + + print(" ".join(line), file=out) + + out.flush() + +def _notice(quiet, message, *args): + if quiet: + debug(message, *args) + else: + notice(message, *args) + +def _debug(quiet, message, *args): + if not quiet: + debug(message, *args) + +## Path operations + +def expand(path): + path = _os.path.expanduser(path) + path = _os.path.expandvars(path) + + return path + +def get_absolute_path(path): + path = expand(path) + return _os.path.abspath(path) + +def normalize_path(path): + path = expand(path) + return _os.path.normpath(path) + +def get_real_path(path): + path = expand(path) + return _os.path.realpath(path) + +def get_relative_path(path, start=None): + path = expand(path) + return _os.path.relpath(path, start=start) + +def get_file_url(path): + path = expand(path) + return "file:{}".format(get_absolute_path(path)) + +def exists(path): + path = expand(path) + return _os.path.lexists(path) + +def is_absolute(path): + path = expand(path) + return _os.path.isabs(path) + +def is_dir(path): + path = expand(path) + return _os.path.isdir(path) + +def is_file(path): + path = expand(path) + return _os.path.isfile(path) + +def is_link(path): + path = expand(path) + return _os.path.islink(path) + +def join(*paths): + paths = [expand(x) for x in paths] + + path = _os.path.join(*paths) + path = normalize_path(path) + + return path + +def split(path): + path = expand(path) + path = normalize_path(path) + parent, child = _os.path.split(path) + + return parent, child + +def split_extension(path): + path = expand(path) + path = normalize_path(path) + root, ext = _os.path.splitext(path) + + return root, ext + +def get_parent_dir(path): + path = expand(path) + path = normalize_path(path) + parent, child = split(path) + + return parent + +def get_base_name(path): + path = expand(path) + path = normalize_path(path) + parent, name = split(path) + + return name + +def get_name_stem(file): + file = expand(file) + name = get_base_name(file) + + if name.endswith(".tar.gz"): + name = name[:-3] + + stem, ext = split_extension(name) + + return stem + +def get_name_extension(file): + file = expand(file) + name = get_base_name(file) + stem, ext = split_extension(name) + + return ext + +def _check_path(path, test_func, message): + path = expand(path) + + if not test_func(path): + parent_dir = get_parent_dir(path) + + if is_dir(parent_dir): + found_paths = ", ".join([repr(x) for x in list_dir(parent_dir)]) + message = "{}. The parent directory contains: {}".format(message.format(repr(path)), found_paths) + else: + message = "{}".format(message.format(repr(path))) + + raise PlanoError(message) + +def check_exists(path): + path = expand(path) + _check_path(path, exists, "File or directory {} not found") + +def check_file(path): + path = expand(path) + _check_path(path, is_file, "File {} not found") + +def check_dir(path): + path = expand(path) + _check_path(path, is_dir, "Directory {} not found") + +def await_exists(path, timeout=30, quiet=False): + path = expand(path) + + _notice(quiet, "Waiting for path {} to exist", repr(path)) + + timeout_message = "Timed out waiting for path {} to exist".format(path) + period = 0.03125 + + with Timer(timeout=timeout, timeout_message=timeout_message) as timer: + while True: + try: + check_exists(path) + except PlanoError: + sleep(period, quiet=True) + period = min(1, period * 2) + else: + return + +## Port operations + +def get_random_port(min=49152, max=65535): + ports = [_random.randint(min, max) for _ in range(3)] + + for port in ports: + try: + check_port(port) + except PlanoError: + return port + + raise PlanoError("Random ports unavailable") + +def check_port(port, host="localhost"): + sock = _socket.socket(_socket.AF_INET, _socket.SOCK_STREAM) + sock.setsockopt(_socket.SOL_SOCKET, _socket.SO_REUSEADDR, 1) + + if sock.connect_ex((host, port)) != 0: + raise PlanoError("Port {} (host {}) is not reachable".format(repr(port), repr(host))) + +def await_port(port, host="localhost", timeout=30, quiet=False): + _notice(quiet, "Waiting for port {}", port) + + if is_string(port): + port = int(port) + + timeout_message = "Timed out waiting for port {} to open".format(port) + period = 0.03125 + + with Timer(timeout=timeout, timeout_message=timeout_message) as timer: + while True: + try: + check_port(port, host=host) + except PlanoError: + sleep(period, quiet=True) + period = min(1, period * 2) + else: + return + +## Process operations + +def get_process_id(): + return _os.getpid() + +def _format_command(command, represent=True): + if is_string(command): + args = _shlex.split(command) + else: + args = command + + args = [expand(str(x)) for x in args] + command = " ".join(args) + + if represent: + return repr(command) + else: + return command + +# quiet=False - Don't log at notice level +# stash=False - No output unless there is an error +# output= - Send stdout and stderr to a file +# stdin= - XXX +# stdout= - Send stdout to a file +# stderr= - Send stderr to a file +# shell=False - XXX +def start(command, stdin=None, stdout=None, stderr=None, output=None, shell=False, stash=False, quiet=False): + _notice(quiet, "Starting a new process (command {})", _format_command(command)) + + if output is not None: + stdout, stderr = output, output + + if is_string(stdin): + stdin = expand(stdin) + stdin = open(stdin, "r") + + if is_string(stdout): + stdout = expand(stdout) + stdout = open(stdout, "w") + + if is_string(stderr): + stderr = expand(stderr) + stderr = open(stderr, "w") + + if stdin is None: + stdin = _sys.stdin + + if stdout is None: + stdout = _sys.stdout + + if stderr is None: + stderr = _sys.stderr + + stash_file = None + + if stash: + stash_file = make_temp_file() + out = open(stash_file, "w") + stdout = out + stderr = out + + if shell: + if is_string(command): + args = command + else: + args = " ".join(map(str, command)) + else: + if is_string(command): + args = _shlex.split(command) + else: + args = command + + args = [expand(str(x)) for x in args] + + try: + proc = PlanoProcess(args, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, close_fds=True, stash_file=stash_file) + except OSError as e: + raise PlanoError("Command {}: {}".format(_format_command(command), str(e))) + + _notice(quiet, "{} started", proc) + + return proc + +def stop(proc, timeout=None, quiet=False): + _notice(quiet, "Stopping {}", proc) + + if proc.poll() is not None: + if proc.exit_code == 0: + debug("{} already exited normally", proc) + elif proc.exit_code == -(_signal.SIGTERM): + debug("{} was already terminated", proc) + else: + debug("{} already exited with code {}", proc, proc.exit_code) + + return proc + + kill(proc, quiet=True) + + return wait(proc, timeout=timeout, quiet=True) + +def kill(proc, quiet=False): + _notice(quiet, "Killing {}", proc) + + proc.terminate() + +def wait(proc, timeout=None, check=False, quiet=False): + _notice(quiet, "Waiting for {} to exit", proc) + + try: + proc.wait(timeout=timeout) + except _subprocess.TimeoutExpired: + error("{} timed out after {} seconds", proc, timeout) + raise PlanoTimeout() + + if proc.exit_code == 0: + debug("{} exited normally", proc) + elif proc.exit_code < 0: + debug("{} was terminated by signal {}", proc, abs(proc.exit_code)) + else: + if check: + error("{} exited with code {}", proc, proc.exit_code) + else: + debug("{} exited with code {}", proc, proc.exit_code) + + if proc.stash_file is not None: + if proc.exit_code > 0: + eprint(read(proc.stash_file), end="") + + if not WINDOWS: + remove(proc.stash_file, quiet=True) + + if check and proc.exit_code > 0: + raise PlanoProcessError(proc) + + return proc + +# input= - Pipe to the process +def run(command, stdin=None, stdout=None, stderr=None, input=None, output=None, + stash=False, shell=False, check=True, quiet=False): + _notice(quiet, "Running command {}", _format_command(command)) + + if input is not None: + assert stdin in (None, _subprocess.PIPE), stdin + + input = input.encode("utf-8") + stdin = _subprocess.PIPE + + proc = start(command, stdin=stdin, stdout=stdout, stderr=stderr, output=output, + stash=stash, shell=shell, quiet=True) + + proc.stdout_result, proc.stderr_result = proc.communicate(input=input) + + if proc.stdout_result is not None: + proc.stdout_result = proc.stdout_result.decode("utf-8") + + if proc.stderr_result is not None: + proc.stderr_result = proc.stderr_result.decode("utf-8") + + return wait(proc, check=check, quiet=True) + +# input= - Pipe the given input into the process +def call(command, input=None, shell=False, quiet=False): + _notice(quiet, "Calling {}", _format_command(command)) + + proc = run(command, stdin=_subprocess.PIPE, stdout=_subprocess.PIPE, stderr=_subprocess.PIPE, + input=input, shell=shell, check=True, quiet=True) + + return proc.stdout_result + +def exit(arg=None, *args, **kwargs): + verbose = kwargs.get("verbose", False) + + if arg in (0, None): + if verbose: + notice("Exiting normally") + + _sys.exit() + + if is_string(arg): + if args: + arg = arg.format(*args) + + if verbose: + error(arg) + + _sys.exit(arg) + + if isinstance(arg, BaseException): + if verbose: + error(arg) + + _sys.exit(str(arg)) + + if isinstance(arg, int): + _sys.exit(arg) + + raise PlanoException("Illegal argument") + +_child_processes = list() + +class PlanoProcess(_subprocess.Popen): + def __init__(self, args, **options): + self.stash_file = options.pop("stash_file", None) + + super().__init__(args, **options) + + self.args = args + self.stdout_result = None + self.stderr_result = None + + _child_processes.append(self) + + @property + def exit_code(self): + return self.returncode + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + stop(self) + + def __repr__(self): + return "process {} (command {})".format(self.pid, _format_command(self.args)) + +class PlanoProcessError(_subprocess.CalledProcessError, PlanoError): + def __init__(self, proc): + super().__init__(proc.exit_code, _format_command(proc.args, represent=False)) + +def _default_sigterm_handler(signum, frame): + for proc in _child_processes: + if proc.poll() is None: + kill(proc, quiet=True) + + exit(-(_signal.SIGTERM)) + +_signal.signal(_signal.SIGTERM, _default_sigterm_handler) + +## String operations + +def string_replace(string, expr, replacement, count=0): + return _re.sub(expr, replacement, string, count) + +def remove_prefix(string, prefix): + if string is None: + return "" + + if prefix and string.startswith(prefix): + string = string[len(prefix):] + + return string + +def remove_suffix(string, suffix): + if string is None: + return "" + + if suffix and string.endswith(suffix): + string = string[:-len(suffix)] + + return string + +def shorten(string, max, ellipsis=None): + assert max is None or isinstance(max, int) + + if string is None: + return "" + + if max is None or len(string) < max: + return string + else: + if ellipsis is not None: + string = string + ellipsis + end = _max(0, max - len(ellipsis)) + return string[0:end] + ellipsis + else: + return string[0:max] + +def plural(noun, count=0, plural=None): + if noun in (None, ""): + return "" + + if count == 1: + return noun + + if plural is None: + if noun.endswith("s"): + plural = "{}ses".format(noun) + else: + plural = "{}s".format(noun) + + return plural + +def capitalize(string): + if not string: + return "" + + return string[0].upper() + string[1:] + +def base64_encode(string): + return _base64.b64encode(string) + +def base64_decode(string): + return _base64.b64decode(string) + +def url_encode(string): + return _urllib_parse.quote_plus(string) + +def url_decode(string): + return _urllib_parse.unquote_plus(string) + +def parse_url(url): + return _urllib_parse.urlparse(url) + +## Temp operations + +def get_system_temp_dir(): + return _tempfile.gettempdir() + +def get_user_temp_dir(): + try: + return _os.environ["XDG_RUNTIME_DIR"] + except KeyError: + return join(get_system_temp_dir(), get_user()) + +def make_temp_file(prefix="plano-", suffix="", dir=None): + if dir is None: + dir = get_system_temp_dir() + + return _tempfile.mkstemp(prefix=prefix, suffix=suffix, dir=dir)[1] + +def make_temp_dir(prefix="plano-", suffix="", dir=None): + if dir is None: + dir = get_system_temp_dir() + + return _tempfile.mkdtemp(prefix=prefix, suffix=suffix, dir=dir) + +class temp_file: + def __init__(self, prefix="plano-", suffix="", dir=None): + if dir is None: + dir = get_system_temp_dir() + + self.fd, self.file = _tempfile.mkstemp(prefix=prefix, suffix=suffix, dir=dir) + + def __enter__(self): + return self.file + + def __exit__(self, exc_type, exc_value, traceback): + _os.close(self.fd) + + if not WINDOWS: # XXX + remove(self.file, quiet=True) + +class temp_dir: + def __init__(self, prefix="plano-", suffix="", dir=None): + self.dir = make_temp_dir(prefix=prefix, suffix=suffix, dir=dir) + + def __enter__(self): + return self.dir + + def __exit__(self, exc_type, exc_value, traceback): + remove(self.dir, quiet=True) + +## Time operations + +# Unix time +def get_time(): + return _time.time() + +# Python UTC time +def get_datetime(): + return _datetime.datetime.now(tz=_datetime.timezone.utc) + +def parse_timestamp(timestamp, format="%Y-%m-%dT%H:%M:%SZ"): + if timestamp is None: + return None + + datetime = _datetime.datetime.strptime(timestamp, format) + datetime = datetime.replace(tzinfo=_datetime.timezone.utc) + + return datetime + +def format_timestamp(datetime=None, format="%Y-%m-%dT%H:%M:%SZ"): + if datetime is None: + datetime = get_datetime() + + return datetime.strftime(format) + +def format_date(datetime=None): + if datetime is None: + datetime = get_datetime() + + day = datetime.day + month = datetime.strftime("%B") + year = datetime.strftime("%Y") + + return f"{day} {month} {year}" + +def format_time(datetime=None, precision="second"): + if datetime is None: + datetime = get_datetime() + + assert precision in ("minute", "second"), "Illegal precision value" + + hour = datetime.hour + minute = datetime.strftime("%M") + second = datetime.strftime("%S") + + if precision == "second": + return f"{hour}:{minute}:{second}" + else: + return f"{hour}:{minute}" + +def format_duration(seconds, align=False): + assert seconds >= 0 + + if seconds >= 3600: + value = seconds / 3600 + unit = "h" + elif seconds >= 5 * 60: + value = seconds / 60 + unit = "m" + else: + value = seconds + unit = "s" + + if align: + return "{:.1f}{}".format(value, unit) + elif value > 10: + return "{:.0f}{}".format(value, unit) + else: + return remove_suffix("{:.1f}".format(value), ".0") + unit + +def sleep(seconds, quiet=False): + _notice(quiet, "Sleeping for {} {}", seconds, plural("second", seconds)) + + _time.sleep(seconds) + +class Timer: + def __init__(self, timeout=None, timeout_message=None): + self.timeout = timeout + self.timeout_message = timeout_message + + if self.timeout is not None and not hasattr(_signal, "SIGALRM"): # pragma: nocover + self.timeout = None + + self.start_time = None + self.stop_time = None + + def start(self): + self.start_time = get_time() + + if self.timeout is not None: + self.prev_handler = _signal.signal(_signal.SIGALRM, self.raise_timeout) + self.prev_timeout, prev_interval = _signal.setitimer(_signal.ITIMER_REAL, self.timeout) + self.prev_timer_suspend_time = get_time() + + assert prev_interval == 0.0, "This case is not yet handled" + + def stop(self): + self.stop_time = get_time() + + if self.timeout is not None: + assert get_time() - self.prev_timer_suspend_time > 0, "This case is not yet handled" + + _signal.signal(_signal.SIGALRM, self.prev_handler) + _signal.setitimer(_signal.ITIMER_REAL, self.prev_timeout) + + def __enter__(self): + self.start() + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.stop() + + @property + def elapsed_time(self): + assert self.start_time is not None + + if self.stop_time is None: + return get_time() - self.start_time + else: + return self.stop_time - self.start_time + + def raise_timeout(self, *args): + raise PlanoTimeout(self.timeout_message) + +## Unique ID operations + +# Length in bytes, renders twice as long in hex +def get_unique_id(bytes=16): + assert bytes >= 1 + assert bytes <= 16 + + uuid_bytes = _uuid.uuid4().bytes + uuid_bytes = uuid_bytes[:bytes] + + return _binascii.hexlify(uuid_bytes).decode("utf-8") + +## Value operations + +def nvl(value, replacement): + if value is None: + return replacement + + return value + +def is_string(value): + return isinstance(value, str) + +def is_scalar(value): + return value is None or isinstance(value, (str, int, float, complex, bool)) + +def is_empty(value): + return value in (None, "", (), [], {}) + +def pformat(value): + return _pprint.pformat(value, width=120) + +def format_empty(value, replacement): + if is_empty(value): + value = replacement + + return value + +def format_not_empty(value, template=None): + if not is_empty(value) and template is not None: + value = template.format(value) + + return value + +def format_repr(obj, limit=None): + attrs = ["{}={}".format(k, repr(v)) for k, v in obj.__dict__.items()] + return "{}({})".format(obj.__class__.__name__, ", ".join(attrs[:limit])) + +class Namespace: + def __init__(self, **kwargs): + for name in kwargs: + setattr(self, name, kwargs[name]) + + def __eq__(self, other): + return vars(self) == vars(other) + + def __contains__(self, key): + return key in self.__dict__ + + def __repr__(self): + return format_repr(self) + +## YAML operations + +def read_yaml(file): + check_module("yaml", "Python module 'yaml' is not found. To install it, run 'pip install pyyaml'.") + + import yaml as _yaml + + file = expand(file) + + with open(file) as f: + return _yaml.safe_load(f) + +def write_yaml(file, data): + check_module("yaml", "Python module 'yaml' is not found. To install it, run 'pip install pyyaml'.") + + import yaml as _yaml + + file = expand(file) + + make_parent_dir(file, quiet=True) + + with open(file, "w") as f: + _yaml.safe_dump(data, f) + + return file + +def parse_yaml(yaml): + check_module("yaml", "Python module 'yaml' is not found. To install it, run 'pip install pyyaml'.") + + import yaml as _yaml + + return _yaml.safe_load(yaml) + +def emit_yaml(data): + check_module("yaml", "Python module 'yaml' is not found. To install it, run 'pip install pyyaml'.") + + import yaml as _yaml + + return _yaml.safe_dump(data) + +def print_yaml(data, **kwargs): + print(emit_yaml(data), **kwargs) + +if PLANO_DEBUG: # pragma: nocover + enable_logging(level="debug") diff --git a/external/skewer/python/plano/test.py b/external/skewer/python/plano/test.py new file mode 100644 index 0000000..fb87d8d --- /dev/null +++ b/external/skewer/python/plano/test.py @@ -0,0 +1,428 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from .main import * +from .command import * + +import argparse as _argparse +import asyncio as _asyncio +import fnmatch as _fnmatch +import functools as _functools +import importlib as _importlib +import inspect as _inspect +import sys as _sys +import traceback as _traceback + +class PlanoTestCommand(BaseCommand): + def __init__(self, test_modules=[]): + self.test_modules = test_modules + + if _inspect.ismodule(self.test_modules): + self.test_modules = [self.test_modules] + + self.parser = BaseArgumentParser() + self.parser.add_argument("include", metavar="PATTERN", nargs="*", default=["*"], + help="Run tests with names matching PATTERN (default '*', all tests)") + self.parser.add_argument("-e", "--exclude", metavar="PATTERN", action="append", default=[], + help="Do not run tests with names matching PATTERN (repeatable)") + self.parser.add_argument("-m", "--module", action="append", default=[], + help="Collect tests from MODULE (repeatable)") + self.parser.add_argument("-l", "--list", action="store_true", + help="Print the test names and exit") + self.parser.add_argument("--enable", metavar="PATTERN", action="append", default=[], + help=_argparse.SUPPRESS) + self.parser.add_argument("--unskip", metavar="PATTERN", action="append", default=[], + help="Run skipped tests matching PATTERN (repeatable)") + self.parser.add_argument("--timeout", metavar="SECONDS", type=int, default=300, + help="Fail any test running longer than SECONDS (default 300)") + self.parser.add_argument("--fail-fast", action="store_true", + help="Exit on the first failure encountered in a test run") + self.parser.add_argument("--iterations", metavar="COUNT", type=int, default=1, + help="Run the tests COUNT times (default 1)") + self.parser.add_argument("--verbose", action="store_true", + help="Print detailed logging to the console") + self.parser.add_argument("--quiet", action="store_true", + help="Print no logging to the console") + + def parse_args(self, args): + return self.parser.parse_args(args) + + def configure_logging(self, args): + if args.verbose: + return "notice", None + + if args.quiet: + return "error", None + + return "warning", None + + def init(self, args): + self.list_only = args.list + self.include_patterns = args.include + self.exclude_patterns = args.exclude + self.enable_patterns = args.enable + self.unskip_patterns = args.unskip + self.timeout = args.timeout + self.fail_fast = args.fail_fast + self.iterations = args.iterations + self.verbose = args.verbose + self.quiet = args.quiet + + try: + for name in args.module: + self.test_modules.append(_importlib.import_module(name)) + except ImportError as e: + raise PlanoError(e) + + def run(self): + if self.list_only: + print_tests(self.test_modules) + return + + for i in range(self.iterations): + run_tests(self.test_modules, include=self.include_patterns, + exclude=self.exclude_patterns, + enable=self.enable_patterns, unskip=self.unskip_patterns, + test_timeout=self.timeout, fail_fast=self.fail_fast, + verbose=self.verbose, quiet=self.quiet) + +class PlanoTestSkipped(Exception): + pass + +def test(_function=None, name=None, module=None, timeout=None, disabled=False): + class Test: + def __init__(self, function): + self.function = function + self.name = name + self.module = module + self.timeout = timeout + self.disabled = disabled + + if self.name is None: + self.name = self.function.__name__.strip("_").replace("_", "-") + + if self.module is None: + self.module = _inspect.getmodule(self.function) + + if not hasattr(self.module, "_plano_tests"): + self.module._plano_tests = list() + + self.module._plano_tests.append(self) + + def __call__(self, test_run, unskipped): + try: + ret = self.function() + + if _inspect.iscoroutine(ret): + _asyncio.run(ret) + except SystemExit as e: + error(e) + raise PlanoError("System exit with code {}".format(e)) + + def __repr__(self): + return "test '{}:{}'".format(self.module.__name__, self.name) + + if _function is None: + return Test + else: + return Test(_function) + +def add_test(name, func, *args, **kwargs): + test(_functools.partial(func, *args, **kwargs), name=name, module=_inspect.getmodule(func)) + +def skip_test(reason=None): + if _inspect.stack()[2].frame.f_locals["unskipped"]: + return + + raise PlanoTestSkipped(reason) + +class expect_exception: + def __init__(self, exception_type=Exception, contains=None): + self.exception_type = exception_type + self.contains = contains + + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_value, traceback): + if exc_value is None: + assert False, "Never encountered expected exception {}".format(self.exception_type.__name__) + + if self.contains is None: + return isinstance(exc_value, self.exception_type) + else: + return isinstance(exc_value, self.exception_type) and self.contains in str(exc_value) + +class expect_error(expect_exception): + def __init__(self, contains=None): + super().__init__(PlanoError, contains=contains) + +class expect_timeout(expect_exception): + def __init__(self, contains=None): + super().__init__(PlanoTimeout, contains=contains) + +class expect_system_exit(expect_exception): + def __init__(self, contains=None): + super().__init__(SystemExit, contains=contains) + +class expect_output(temp_file): + def __init__(self, equals=None, contains=None, startswith=None, endswith=None): + super().__init__() + self.equals = equals + self.contains = contains + self.startswith = startswith + self.endswith = endswith + + def __exit__(self, exc_type, exc_value, traceback): + result = read(self.file) + + if self.equals is None: + assert len(result) > 0, result + else: + assert result == self.equals, result + + if self.contains is not None: + assert self.contains in result, result + + if self.startswith is not None: + assert result.startswith(self.startswith), result + + if self.endswith is not None: + assert result.endswith(self.endswith), result + + super().__exit__(exc_type, exc_value, traceback) + +def print_tests(modules): + if _inspect.ismodule(modules): + modules = (modules,) + + for module in modules: + for test in module._plano_tests: + flags = "(disabled)" if test.disabled else "" + print(" ".join((str(test), flags)).strip()) + +def run_tests(modules, include="*", exclude=(), enable=(), unskip=(), test_timeout=300, + fail_fast=False, verbose=False, quiet=False): + if _inspect.ismodule(modules): + modules = (modules,) + + if is_string(include): + include = (include,) + + if is_string(exclude): + exclude = (exclude,) + + if is_string(enable): + enable = (enable,) + + if is_string(unskip): + enable = (unskip,) + + test_run = TestRun(test_timeout=test_timeout, fail_fast=fail_fast, verbose=verbose, quiet=quiet) + + if verbose: + notice("Starting {}", test_run) + elif not quiet: + cprint("=== Configuration ===", color="cyan") + + props = ( + ("Modules", format_empty(", ".join([x.__name__ for x in modules]), "[none]")), + ("Test timeout", format_duration(test_timeout)), + ("Fail fast", fail_fast), + ) + + print_properties(props) + print() + + stop = False + + for module in modules: + if stop: + break + + if verbose: + notice("Running tests from module {} (file {})", repr(module.__name__), repr(module.__file__)) + elif not quiet: + cprint("=== Module {} ===".format(repr(module.__name__)), color="cyan") + + if not hasattr(module, "_plano_tests"): + warning("Module {} has no tests", repr(module.__name__)) + continue + + for test in module._plano_tests: + if stop: + break + + if test.disabled and not any([_fnmatch.fnmatchcase(test.name, x) for x in enable]): + continue + + included = any([_fnmatch.fnmatchcase(test.name, x) for x in include]) + excluded = any([_fnmatch.fnmatchcase(test.name, x) for x in exclude]) + unskipped = any([_fnmatch.fnmatchcase(test.name, x) for x in unskip]) + + if included and not excluded: + test_run.tests.append(test) + stop = _run_test(test_run, test, unskipped) + + if not verbose and not quiet: + print() + + total = len(test_run.tests) + skipped = len(test_run.skipped_tests) + failed = len(test_run.failed_tests) + + if total == 0: + raise PlanoError("No tests ran") + + notes = "" + + if skipped != 0: + notes = "({} skipped)".format(skipped) + + if failed == 0: + result_message = "All tests passed {}".format(notes).strip() + else: + result_message = "{} {} failed {}".format(failed, plural("test", failed), notes).strip() + + if verbose: + if failed == 0: + notice(result_message) + else: + error(result_message) + elif not quiet: + cprint("=== Summary ===", color="cyan") + + props = ( + ("Total", total), + ("Skipped", skipped, format_not_empty(", ".join([x.name for x in test_run.skipped_tests]), "({})")), + ("Failed", failed, format_not_empty(", ".join([x.name for x in test_run.failed_tests]), "({})")), + ) + + print_properties(props) + print() + + cprint("=== RESULT ===", color="cyan") + + if failed == 0: + cprint(result_message, color="green") + else: + cprint(result_message, color="red", bright="True") + + print() + + if failed != 0: + raise PlanoError(result_message) + +def _run_test(test_run, test, unskipped): + if test_run.verbose: + notice("Running {}", test) + elif not test_run.quiet: + print("{:.<65} ".format(test.name + " "), end="") + + timeout = nvl(test.timeout, test_run.test_timeout) + + with temp_file() as output_file: + try: + with Timer(timeout=timeout) as timer: + if test_run.verbose: + test(test_run, unskipped) + else: + with output_redirected(output_file, quiet=True): + test(test_run, unskipped) + except KeyboardInterrupt: + raise + except PlanoTestSkipped as e: + test_run.skipped_tests.append(test) + + if test_run.verbose: + notice("{} SKIPPED ({})", test, format_duration(timer.elapsed_time)) + elif not test_run.quiet: + _print_test_result("SKIPPED", timer, "yellow") + print("Reason: {}".format(str(e))) + except Exception as e: + test_run.failed_tests.append(test) + + if test_run.verbose: + _traceback.print_exc() + + if isinstance(e, PlanoTimeout): + error("{} **FAILED** (TIMEOUT) ({})", test, format_duration(timer.elapsed_time)) + else: + error("{} **FAILED** ({})", test, format_duration(timer.elapsed_time)) + elif not test_run.quiet: + if isinstance(e, PlanoTimeout): + _print_test_result("**FAILED** (TIMEOUT)", timer, color="red", bright=True) + else: + _print_test_result("**FAILED**", timer, color="red", bright=True) + + _print_test_error(e) + _print_test_output(output_file) + + if test_run.fail_fast: + return True + else: + test_run.passed_tests.append(test) + + if test_run.verbose: + notice("{} PASSED ({})", test, format_duration(timer.elapsed_time)) + elif not test_run.quiet: + _print_test_result("PASSED", timer) + +def _print_test_result(status, timer, color="white", bright=False): + cprint("{:<7}".format(status), color=color, bright=bright, end="") + print("{:>6}".format(format_duration(timer.elapsed_time, align=True))) + +def _print_test_error(e): + cprint("--- Error ---", color="yellow") + + if isinstance(e, PlanoProcessError): + print("> {}".format(str(e))) + else: + lines = _traceback.format_exc().rstrip().split("\n") + lines = ["> {}".format(x) for x in lines] + + print("\n".join(lines)) + +def _print_test_output(output_file): + if get_file_size(output_file) == 0: + return + + cprint("--- Output ---", color="yellow") + + with open(output_file, "r") as out: + for line in out: + print("> {}".format(line), end="") + +class TestRun: + def __init__(self, test_timeout=None, fail_fast=False, verbose=False, quiet=False): + self.test_timeout = test_timeout + self.fail_fast = fail_fast + self.verbose = verbose + self.quiet = quiet + + self.tests = list() + self.skipped_tests = list() + self.failed_tests = list() + self.passed_tests = list() + + def __repr__(self): + return format_repr(self) + +def _main(): # pragma: nocover + PlanoTestCommand().main()