mirror of
https://github.com/jupyter/docker-stacks.git
synced 2025-10-07 18:14:05 +00:00
pre-commit: run black autoformatter on all files
This commit is contained in:

committed by
Erik Sundell

parent
a99a182940
commit
fe3968efe0
@@ -22,10 +22,12 @@ def test_nbconvert(container, test_file):
|
||||
output_dir = "/tmp"
|
||||
timeout_ms = 600
|
||||
LOGGER.info(f"Test that {test_file} notebook can be executed ...")
|
||||
command = "jupyter nbconvert --to markdown " + \
|
||||
f"--ExecutePreprocessor.timeout={timeout_ms} " + \
|
||||
f"--output-dir {output_dir} " + \
|
||||
f"--execute {cont_data_dir}/{test_file}.ipynb"
|
||||
command = (
|
||||
"jupyter nbconvert --to markdown "
|
||||
+ f"--ExecutePreprocessor.timeout={timeout_ms} "
|
||||
+ f"--output-dir {output_dir} "
|
||||
+ f"--execute {cont_data_dir}/{test_file}.ipynb"
|
||||
)
|
||||
c = container.run(
|
||||
volumes={host_data_dir: {"bind": cont_data_dir, "mode": "ro"}},
|
||||
tty=True,
|
||||
|
@@ -8,7 +8,7 @@ import errno
|
||||
import stat
|
||||
|
||||
c = get_config() # noqa: F821
|
||||
c.NotebookApp.ip = '0.0.0.0'
|
||||
c.NotebookApp.ip = "0.0.0.0"
|
||||
c.NotebookApp.port = 8888
|
||||
c.NotebookApp.open_browser = False
|
||||
|
||||
@@ -16,9 +16,9 @@ c.NotebookApp.open_browser = False
|
||||
c.FileContentsManager.delete_to_trash = False
|
||||
|
||||
# Generate a self-signed certificate
|
||||
if 'GEN_CERT' in os.environ:
|
||||
if "GEN_CERT" in os.environ:
|
||||
dir_name = jupyter_data_dir()
|
||||
pem_file = os.path.join(dir_name, 'notebook.pem')
|
||||
pem_file = os.path.join(dir_name, "notebook.pem")
|
||||
try:
|
||||
os.makedirs(dir_name)
|
||||
except OSError as exc: # Python >2.5
|
||||
@@ -28,28 +28,42 @@ if 'GEN_CERT' in os.environ:
|
||||
raise
|
||||
|
||||
# Generate an openssl.cnf file to set the distinguished name
|
||||
cnf_file = os.path.join(os.getenv('CONDA_DIR', '/usr/lib'), 'ssl', 'openssl.cnf')
|
||||
cnf_file = os.path.join(os.getenv("CONDA_DIR", "/usr/lib"), "ssl", "openssl.cnf")
|
||||
if not os.path.isfile(cnf_file):
|
||||
with open(cnf_file, 'w') as fh:
|
||||
fh.write('''\
|
||||
with open(cnf_file, "w") as fh:
|
||||
fh.write(
|
||||
"""\
|
||||
[req]
|
||||
distinguished_name = req_distinguished_name
|
||||
[req_distinguished_name]
|
||||
''')
|
||||
"""
|
||||
)
|
||||
|
||||
# Generate a certificate if one doesn't exist on disk
|
||||
subprocess.check_call(['openssl', 'req', '-new',
|
||||
'-newkey', 'rsa:2048',
|
||||
'-days', '365',
|
||||
'-nodes', '-x509',
|
||||
'-subj', '/C=XX/ST=XX/L=XX/O=generated/CN=generated',
|
||||
'-keyout', pem_file,
|
||||
'-out', pem_file])
|
||||
subprocess.check_call(
|
||||
[
|
||||
"openssl",
|
||||
"req",
|
||||
"-new",
|
||||
"-newkey",
|
||||
"rsa:2048",
|
||||
"-days",
|
||||
"365",
|
||||
"-nodes",
|
||||
"-x509",
|
||||
"-subj",
|
||||
"/C=XX/ST=XX/L=XX/O=generated/CN=generated",
|
||||
"-keyout",
|
||||
pem_file,
|
||||
"-out",
|
||||
pem_file,
|
||||
]
|
||||
)
|
||||
# Restrict access to the file
|
||||
os.chmod(pem_file, stat.S_IRUSR | stat.S_IWUSR)
|
||||
c.NotebookApp.certfile = pem_file
|
||||
|
||||
# Change default umask for all subprocesses of the notebook server if set in
|
||||
# the environment
|
||||
if 'NB_UMASK' in os.environ:
|
||||
os.umask(int(os.environ['NB_UMASK'], 8))
|
||||
if "NB_UMASK" in os.environ:
|
||||
os.umask(int(os.environ["NB_UMASK"], 8))
|
||||
|
@@ -11,59 +11,52 @@ LOGGER = logging.getLogger(__name__)
|
||||
def test_cli_args(container, http_client):
|
||||
"""Container should respect notebook server command line args
|
||||
(e.g., disabling token security)"""
|
||||
c = container.run(
|
||||
command=["start-notebook.sh", "--NotebookApp.token=''"]
|
||||
)
|
||||
resp = http_client.get('http://localhost:8888')
|
||||
c = container.run(command=["start-notebook.sh", "--NotebookApp.token=''"])
|
||||
resp = http_client.get("http://localhost:8888")
|
||||
resp.raise_for_status()
|
||||
logs = c.logs(stdout=True).decode('utf-8')
|
||||
logs = c.logs(stdout=True).decode("utf-8")
|
||||
LOGGER.debug(logs)
|
||||
assert 'login_submit' not in resp.text
|
||||
assert "login_submit" not in resp.text
|
||||
|
||||
|
||||
@pytest.mark.filterwarnings('ignore:Unverified HTTPS request')
|
||||
@pytest.mark.filterwarnings("ignore:Unverified HTTPS request")
|
||||
def test_unsigned_ssl(container, http_client):
|
||||
"""Container should generate a self-signed SSL certificate
|
||||
and notebook server should use it to enable HTTPS.
|
||||
"""
|
||||
container.run(
|
||||
environment=['GEN_CERT=yes']
|
||||
)
|
||||
container.run(environment=["GEN_CERT=yes"])
|
||||
# NOTE: The requests.Session backing the http_client fixture does not retry
|
||||
# properly while the server is booting up. An SSL handshake error seems to
|
||||
# abort the retry logic. Forcing a long sleep for the moment until I have
|
||||
# time to dig more.
|
||||
time.sleep(5)
|
||||
resp = http_client.get('https://localhost:8888', verify=False)
|
||||
resp = http_client.get("https://localhost:8888", verify=False)
|
||||
resp.raise_for_status()
|
||||
assert 'login_submit' in resp.text
|
||||
assert "login_submit" in resp.text
|
||||
|
||||
|
||||
def test_uid_change(container):
|
||||
"""Container should change the UID of the default user."""
|
||||
c = container.run(
|
||||
tty=True,
|
||||
user='root',
|
||||
environment=['NB_UID=1010'],
|
||||
command=['start.sh', 'bash', '-c', 'id && touch /opt/conda/test-file']
|
||||
user="root",
|
||||
environment=["NB_UID=1010"],
|
||||
command=["start.sh", "bash", "-c", "id && touch /opt/conda/test-file"],
|
||||
)
|
||||
# usermod is slow so give it some time
|
||||
c.wait(timeout=120)
|
||||
assert 'uid=1010(jovyan)' in c.logs(stdout=True).decode('utf-8')
|
||||
assert "uid=1010(jovyan)" in c.logs(stdout=True).decode("utf-8")
|
||||
|
||||
|
||||
def test_gid_change(container):
|
||||
"""Container should change the GID of the default user."""
|
||||
c = container.run(
|
||||
tty=True,
|
||||
user='root',
|
||||
environment=['NB_GID=110'],
|
||||
command=['start.sh', 'id']
|
||||
tty=True, user="root", environment=["NB_GID=110"], command=["start.sh", "id"]
|
||||
)
|
||||
c.wait(timeout=10)
|
||||
logs = c.logs(stdout=True).decode('utf-8')
|
||||
assert 'gid=110(jovyan)' in logs
|
||||
assert 'groups=110(jovyan),100(users)' in logs
|
||||
logs = c.logs(stdout=True).decode("utf-8")
|
||||
assert "gid=110(jovyan)" in logs
|
||||
assert "groups=110(jovyan),100(users)" in logs
|
||||
|
||||
|
||||
def test_nb_user_change(container):
|
||||
@@ -72,11 +65,8 @@ def test_nb_user_change(container):
|
||||
running_container = container.run(
|
||||
tty=True,
|
||||
user="root",
|
||||
environment=[
|
||||
f"NB_USER={nb_user}",
|
||||
"CHOWN_HOME=yes"
|
||||
],
|
||||
command=['start.sh', 'bash', '-c', 'sleep infinity']
|
||||
environment=[f"NB_USER={nb_user}", "CHOWN_HOME=yes"],
|
||||
command=["start.sh", "bash", "-c", "sleep infinity"],
|
||||
)
|
||||
|
||||
# Give the chown time to complete. Use sleep, not wait, because the
|
||||
@@ -98,25 +88,27 @@ def test_nb_user_change(container):
|
||||
expected_output = f"{nb_user} users"
|
||||
cmd = running_container.exec_run(command, workdir=f"/home/{nb_user}")
|
||||
output = cmd.output.decode("utf-8").strip("\n")
|
||||
assert output == expected_output, f"Bad owner for the {nb_user} home folder {output}, expected {expected_output}"
|
||||
assert (
|
||||
output == expected_output
|
||||
), f"Bad owner for the {nb_user} home folder {output}, expected {expected_output}"
|
||||
|
||||
|
||||
def test_chown_extra(container):
|
||||
"""Container should change the UID/GID of CHOWN_EXTRA."""
|
||||
c = container.run(
|
||||
tty=True,
|
||||
user='root',
|
||||
user="root",
|
||||
environment=[
|
||||
'NB_UID=1010',
|
||||
'NB_GID=101',
|
||||
'CHOWN_EXTRA=/opt/conda',
|
||||
'CHOWN_EXTRA_OPTS=-R'
|
||||
"NB_UID=1010",
|
||||
"NB_GID=101",
|
||||
"CHOWN_EXTRA=/opt/conda",
|
||||
"CHOWN_EXTRA_OPTS=-R",
|
||||
],
|
||||
command=['start.sh', 'bash', '-c', 'stat -c \'%n:%u:%g\' /opt/conda/LICENSE.txt']
|
||||
command=["start.sh", "bash", "-c", "stat -c '%n:%u:%g' /opt/conda/LICENSE.txt"],
|
||||
)
|
||||
# chown is slow so give it some time
|
||||
c.wait(timeout=120)
|
||||
assert '/opt/conda/LICENSE.txt:1010:101' in c.logs(stdout=True).decode('utf-8')
|
||||
assert "/opt/conda/LICENSE.txt:1010:101" in c.logs(stdout=True).decode("utf-8")
|
||||
|
||||
|
||||
def test_chown_home(container):
|
||||
@@ -124,64 +116,66 @@ def test_chown_home(container):
|
||||
group to the current value of NB_UID and NB_GID."""
|
||||
c = container.run(
|
||||
tty=True,
|
||||
user='root',
|
||||
environment=[
|
||||
'CHOWN_HOME=yes',
|
||||
'CHOWN_HOME_OPTS=-R'
|
||||
user="root",
|
||||
environment=["CHOWN_HOME=yes", "CHOWN_HOME_OPTS=-R"],
|
||||
command=[
|
||||
"start.sh",
|
||||
"bash",
|
||||
"-c",
|
||||
"chown root:root /home/jovyan && ls -alsh /home",
|
||||
],
|
||||
command=['start.sh', 'bash', '-c', 'chown root:root /home/jovyan && ls -alsh /home']
|
||||
)
|
||||
c.wait(timeout=120)
|
||||
assert "Changing ownership of /home/jovyan to 1000:100 with options '-R'" in c.logs(stdout=True).decode('utf-8')
|
||||
assert "Changing ownership of /home/jovyan to 1000:100 with options '-R'" in c.logs(
|
||||
stdout=True
|
||||
).decode("utf-8")
|
||||
|
||||
|
||||
def test_sudo(container):
|
||||
"""Container should grant passwordless sudo to the default user."""
|
||||
c = container.run(
|
||||
tty=True,
|
||||
user='root',
|
||||
environment=['GRANT_SUDO=yes'],
|
||||
command=['start.sh', 'sudo', 'id']
|
||||
user="root",
|
||||
environment=["GRANT_SUDO=yes"],
|
||||
command=["start.sh", "sudo", "id"],
|
||||
)
|
||||
rv = c.wait(timeout=10)
|
||||
assert rv == 0 or rv["StatusCode"] == 0
|
||||
assert 'uid=0(root)' in c.logs(stdout=True).decode('utf-8')
|
||||
assert "uid=0(root)" in c.logs(stdout=True).decode("utf-8")
|
||||
|
||||
|
||||
def test_sudo_path(container):
|
||||
"""Container should include /opt/conda/bin in the sudo secure_path."""
|
||||
c = container.run(
|
||||
tty=True,
|
||||
user='root',
|
||||
environment=['GRANT_SUDO=yes'],
|
||||
command=['start.sh', 'sudo', 'which', 'jupyter']
|
||||
user="root",
|
||||
environment=["GRANT_SUDO=yes"],
|
||||
command=["start.sh", "sudo", "which", "jupyter"],
|
||||
)
|
||||
rv = c.wait(timeout=10)
|
||||
assert rv == 0 or rv["StatusCode"] == 0
|
||||
assert c.logs(stdout=True).decode('utf-8').rstrip().endswith('/opt/conda/bin/jupyter')
|
||||
assert (
|
||||
c.logs(stdout=True).decode("utf-8").rstrip().endswith("/opt/conda/bin/jupyter")
|
||||
)
|
||||
|
||||
|
||||
def test_sudo_path_without_grant(container):
|
||||
"""Container should include /opt/conda/bin in the sudo secure_path."""
|
||||
c = container.run(
|
||||
tty=True,
|
||||
user='root',
|
||||
command=['start.sh', 'which', 'jupyter']
|
||||
)
|
||||
c = container.run(tty=True, user="root", command=["start.sh", "which", "jupyter"])
|
||||
rv = c.wait(timeout=10)
|
||||
assert rv == 0 or rv["StatusCode"] == 0
|
||||
assert c.logs(stdout=True).decode('utf-8').rstrip().endswith('/opt/conda/bin/jupyter')
|
||||
assert (
|
||||
c.logs(stdout=True).decode("utf-8").rstrip().endswith("/opt/conda/bin/jupyter")
|
||||
)
|
||||
|
||||
|
||||
def test_group_add(container, tmpdir):
|
||||
"""Container should run with the specified uid, gid, and secondary
|
||||
group.
|
||||
"""
|
||||
c = container.run(
|
||||
user='1010:1010',
|
||||
group_add=['users'],
|
||||
command=['start.sh', 'id']
|
||||
)
|
||||
c = container.run(user="1010:1010", group_add=["users"], command=["start.sh", "id"])
|
||||
rv = c.wait(timeout=5)
|
||||
assert rv == 0 or rv["StatusCode"] == 0
|
||||
assert 'uid=1010 gid=1010 groups=1010,100(users)' in c.logs(stdout=True).decode('utf-8')
|
||||
assert "uid=1010 gid=1010 groups=1010,100(users)" in c.logs(stdout=True).decode(
|
||||
"utf-8"
|
||||
)
|
||||
|
@@ -23,8 +23,7 @@ def test_package_manager(container, package_manager, version_arg):
|
||||
f"Test that the package manager {package_manager} is working properly ..."
|
||||
)
|
||||
c = container.run(
|
||||
tty=True,
|
||||
command=["start.sh", "bash", "-c", f"{package_manager} {version_arg}"]
|
||||
tty=True, command=["start.sh", "bash", "-c", f"{package_manager} {version_arg}"]
|
||||
)
|
||||
rv = c.wait(timeout=5)
|
||||
logs = c.logs(stdout=True).decode("utf-8")
|
||||
|
@@ -9,8 +9,7 @@ LOGGER = logging.getLogger(__name__)
|
||||
def test_pandoc(container):
|
||||
"""Pandoc shall be able to convert MD to HTML."""
|
||||
c = container.run(
|
||||
tty=True,
|
||||
command=["start.sh", "bash", "-c", 'echo "**BOLD**" | pandoc']
|
||||
tty=True, command=["start.sh", "bash", "-c", 'echo "**BOLD**" | pandoc']
|
||||
)
|
||||
c.wait(timeout=10)
|
||||
logs = c.logs(stdout=True).decode("utf-8")
|
||||
|
25
conftest.py
25
conftest.py
@@ -14,26 +14,26 @@ from requests.adapters import HTTPAdapter
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@pytest.fixture(scope="session")
|
||||
def http_client():
|
||||
"""Requests session with retries and backoff."""
|
||||
s = requests.Session()
|
||||
retries = Retry(total=5, backoff_factor=1)
|
||||
s.mount('http://', HTTPAdapter(max_retries=retries))
|
||||
s.mount('https://', HTTPAdapter(max_retries=retries))
|
||||
s.mount("http://", HTTPAdapter(max_retries=retries))
|
||||
s.mount("https://", HTTPAdapter(max_retries=retries))
|
||||
return s
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@pytest.fixture(scope="session")
|
||||
def docker_client():
|
||||
"""Docker client configured based on the host environment"""
|
||||
return docker.from_env()
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@pytest.fixture(scope="session")
|
||||
def image_name():
|
||||
"""Image name to test"""
|
||||
return os.getenv('TEST_IMAGE')
|
||||
return os.getenv("TEST_IMAGE")
|
||||
|
||||
|
||||
class TrackedContainer:
|
||||
@@ -78,7 +78,9 @@ class TrackedContainer:
|
||||
all_kwargs.update(self.kwargs)
|
||||
all_kwargs.update(kwargs)
|
||||
LOGGER.info(f"Running {self.image_name} with args {all_kwargs} ...")
|
||||
self.container = self.docker_client.containers.run(self.image_name, **all_kwargs)
|
||||
self.container = self.docker_client.containers.run(
|
||||
self.image_name, **all_kwargs
|
||||
)
|
||||
return self.container
|
||||
|
||||
def remove(self):
|
||||
@@ -87,7 +89,7 @@ class TrackedContainer:
|
||||
self.container.remove(force=True)
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
@pytest.fixture(scope="function")
|
||||
def container(docker_client, image_name):
|
||||
"""Notebook container with initial configuration appropriate for testing
|
||||
(e.g., HTTP port exposed to the host for HTTP calls).
|
||||
@@ -95,12 +97,7 @@ def container(docker_client, image_name):
|
||||
Yields the container instance and kills it when the caller is done with it.
|
||||
"""
|
||||
container = TrackedContainer(
|
||||
docker_client,
|
||||
image_name,
|
||||
detach=True,
|
||||
ports={
|
||||
'8888/tcp': 8888
|
||||
}
|
||||
docker_client, image_name, detach=True, ports={"8888/tcp": 8888}
|
||||
)
|
||||
yield container
|
||||
container.remove()
|
||||
|
@@ -9,8 +9,7 @@ def test_julia(container):
|
||||
"""Basic julia test"""
|
||||
LOGGER.info("Test that julia is correctly installed ...")
|
||||
running_container = container.run(
|
||||
tty=True,
|
||||
command=["start.sh", "bash", "-c", "sleep infinity"]
|
||||
tty=True, command=["start.sh", "bash", "-c", "sleep infinity"]
|
||||
)
|
||||
command = "julia --version"
|
||||
cmd = running_container.exec_run(command)
|
||||
|
92
docs/conf.py
92
docs/conf.py
@@ -25,40 +25,37 @@
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#
|
||||
needs_sphinx = '2.1'
|
||||
needs_sphinx = "2.1"
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'myst_parser',
|
||||
'sphinx_copybutton'
|
||||
]
|
||||
extensions = ["myst_parser", "sphinx_copybutton"]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
templates_path = ["_templates"]
|
||||
|
||||
source_suffix = {
|
||||
'.rst': 'restructuredtext',
|
||||
'.md': 'markdown',
|
||||
".rst": "restructuredtext",
|
||||
".md": "markdown",
|
||||
}
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
master_doc = "index"
|
||||
|
||||
# General information about the project.
|
||||
project = 'docker-stacks'
|
||||
copyright = '2018- Project Jupyter'
|
||||
author = 'Project Jupyter'
|
||||
project = "docker-stacks"
|
||||
copyright = "2018- Project Jupyter"
|
||||
author = "Project Jupyter"
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = 'latest'
|
||||
version = "latest"
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = 'latest'
|
||||
release = "latest"
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
@@ -70,10 +67,10 @@ language = None
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
# This patterns also effect to html_static_path and html_extra_path
|
||||
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
||||
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
pygments_style = "sphinx"
|
||||
|
||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||
todo_include_todos = False
|
||||
@@ -84,27 +81,27 @@ todo_include_todos = False
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
#
|
||||
html_theme = 'alabaster'
|
||||
html_theme = "alabaster"
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#
|
||||
html_theme_options = {
|
||||
'description': "Jupyter Docker Stacks",
|
||||
'fixed_sidebar': False,
|
||||
'show_relbars': True,
|
||||
'github_user': 'jupyter',
|
||||
'github_repo': 'docker-stacks',
|
||||
'github_type': 'star',
|
||||
'logo': 'jupyter-logo.svg',
|
||||
'logo_text_align': 'left'
|
||||
"description": "Jupyter Docker Stacks",
|
||||
"fixed_sidebar": False,
|
||||
"show_relbars": True,
|
||||
"github_user": "jupyter",
|
||||
"github_repo": "docker-stacks",
|
||||
"github_type": "star",
|
||||
"logo": "jupyter-logo.svg",
|
||||
"logo_text_align": "left",
|
||||
}
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
html_static_path = ["_static"]
|
||||
|
||||
# Custom sidebar templates, must be a dictionary that maps document names
|
||||
# to template names.
|
||||
@@ -115,18 +112,18 @@ html_static_path = ['_static']
|
||||
# 'searchbox.html']``.
|
||||
#
|
||||
html_sidebars = {
|
||||
'**': [
|
||||
'about.html',
|
||||
'navigation.html',
|
||||
'relations.html',
|
||||
'searchbox.html',
|
||||
"**": [
|
||||
"about.html",
|
||||
"navigation.html",
|
||||
"relations.html",
|
||||
"searchbox.html",
|
||||
]
|
||||
}
|
||||
|
||||
# -- Options for HTMLHelp output ------------------------------------------
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'docker-stacksdoc'
|
||||
htmlhelp_basename = "docker-stacksdoc"
|
||||
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
@@ -135,15 +132,12 @@ latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#
|
||||
# 'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#
|
||||
# 'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#
|
||||
# 'preamble': '',
|
||||
|
||||
# Latex figure (float) alignment
|
||||
#
|
||||
# 'figure_align': 'htbp',
|
||||
@@ -153,8 +147,13 @@ latex_elements = {
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
(master_doc, 'docker-stacks.tex', 'docker-stacks Documentation',
|
||||
'Project Jupyter', 'manual'),
|
||||
(
|
||||
master_doc,
|
||||
"docker-stacks.tex",
|
||||
"docker-stacks Documentation",
|
||||
"Project Jupyter",
|
||||
"manual",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@@ -162,10 +161,7 @@ latex_documents = [
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
(master_doc, 'docker-stacks', 'docker-stacks Documentation',
|
||||
[author], 1)
|
||||
]
|
||||
man_pages = [(master_doc, "docker-stacks", "docker-stacks Documentation", [author], 1)]
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
@@ -174,9 +170,15 @@ man_pages = [
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(master_doc, 'docker-stacks', 'docker-stacks Documentation',
|
||||
author, 'docker-stacks', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
(
|
||||
master_doc,
|
||||
"docker-stacks",
|
||||
"docker-stacks Documentation",
|
||||
author,
|
||||
"docker-stacks",
|
||||
"One line description of project.",
|
||||
"Miscellaneous",
|
||||
),
|
||||
]
|
||||
|
||||
# -- Extension configuration ----------------------------------------------
|
||||
@@ -187,4 +189,4 @@ linkcheck_anchors = False
|
||||
# -- Translation ----------------------------------------------------------
|
||||
|
||||
gettext_uuid = True
|
||||
locale_dirs = ['locale/']
|
||||
locale_dirs = ["locale/"]
|
||||
|
@@ -10,8 +10,7 @@ def test_inkscape(container):
|
||||
"""Inkscape shall be installed to be able to convert SVG files."""
|
||||
LOGGER.info("Test that inkscape is working by printing its version ...")
|
||||
c = container.run(
|
||||
tty=True,
|
||||
command=["start.sh", "bash", "-c", "inkscape --version"]
|
||||
tty=True, command=["start.sh", "bash", "-c", "inkscape --version"]
|
||||
)
|
||||
c.wait(timeout=10)
|
||||
logs = c.logs(stdout=True).decode("utf-8")
|
||||
|
@@ -24,7 +24,9 @@ def test_nbconvert(container, test_file, output_format):
|
||||
host_data_dir = os.path.join(THIS_DIR, "data")
|
||||
cont_data_dir = "/home/jovyan/data"
|
||||
output_dir = "/tmp"
|
||||
LOGGER.info(f"Test that the example notebook {test_file} can be converted to {output_format} ...")
|
||||
LOGGER.info(
|
||||
f"Test that the example notebook {test_file} can be converted to {output_format} ..."
|
||||
)
|
||||
command = f"jupyter nbconvert {cont_data_dir}/{test_file}.ipynb --output-dir {output_dir} --to {output_format}"
|
||||
c = container.run(
|
||||
volumes={host_data_dir: {"bind": cont_data_dir, "mode": "ro"}},
|
||||
|
@@ -9,10 +9,9 @@ LOGGER = logging.getLogger(__name__)
|
||||
def test_spark_shell(container):
|
||||
"""Checking if Spark (spark-shell) is running properly"""
|
||||
c = container.run(
|
||||
tty=True,
|
||||
command=['start.sh', 'bash', '-c', 'spark-shell <<< "1+1"']
|
||||
tty=True, command=["start.sh", "bash", "-c", 'spark-shell <<< "1+1"']
|
||||
)
|
||||
c.wait(timeout=60)
|
||||
logs = c.logs(stdout=True).decode('utf-8')
|
||||
logs = c.logs(stdout=True).decode("utf-8")
|
||||
LOGGER.debug(logs)
|
||||
assert 'res0: Int = 2' in logs, "spark-shell does not work"
|
||||
assert "res0: Int = 2" in logs, "spark-shell does not work"
|
||||
|
@@ -14,8 +14,9 @@ s = 1 + np.sin(2 * np.pi * t)
|
||||
fig, ax = plt.subplots()
|
||||
ax.plot(t, s)
|
||||
|
||||
ax.set(xlabel='time (s)', ylabel='voltage (mV)',
|
||||
title='About as simple as it gets, folks')
|
||||
ax.set(
|
||||
xlabel="time (s)", ylabel="voltage (mV)", title="About as simple as it gets, folks"
|
||||
)
|
||||
ax.grid()
|
||||
# Note that the test can be run headless by checking if an image is produced
|
||||
file_path = os.path.join("/tmp", "test.png")
|
||||
|
@@ -3,16 +3,22 @@ import matplotlib
|
||||
import matplotlib.pyplot as plt
|
||||
import os
|
||||
|
||||
matplotlib.rcParams['pgf.texsystem'] = 'pdflatex'
|
||||
matplotlib.rcParams.update({'font.family': 'serif', 'font.size': 18,
|
||||
'axes.labelsize': 20, 'axes.titlesize': 24,
|
||||
'figure.titlesize': 28})
|
||||
matplotlib.rcParams['text.usetex'] = True
|
||||
matplotlib.rcParams["pgf.texsystem"] = "pdflatex"
|
||||
matplotlib.rcParams.update(
|
||||
{
|
||||
"font.family": "serif",
|
||||
"font.size": 18,
|
||||
"axes.labelsize": 20,
|
||||
"axes.titlesize": 24,
|
||||
"figure.titlesize": 28,
|
||||
}
|
||||
)
|
||||
matplotlib.rcParams["text.usetex"] = True
|
||||
|
||||
fig, ax = plt.subplots(1, 1)
|
||||
x = [1, 2]
|
||||
y = [1, 2]
|
||||
ax.plot(x, y, label='a label')
|
||||
ax.plot(x, y, label="a label")
|
||||
ax.legend(fontsize=15)
|
||||
|
||||
file_path = os.path.join("/tmp", "test_fonts.png")
|
||||
|
@@ -26,8 +26,7 @@ def test_check_extension(container, extension):
|
||||
"""
|
||||
LOGGER.info(f"Checking the extension: {extension} ...")
|
||||
c = container.run(
|
||||
tty=True,
|
||||
command=["start.sh", "jupyter", "labextension", "check", extension]
|
||||
tty=True, command=["start.sh", "jupyter", "labextension", "check", extension]
|
||||
)
|
||||
rv = c.wait(timeout=10)
|
||||
logs = c.logs(stdout=True).decode("utf-8")
|
||||
|
@@ -13,9 +13,17 @@ THIS_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||
@pytest.mark.parametrize(
|
||||
"test_file,expected_file,description",
|
||||
[
|
||||
("matplotlib_1.py", "test.png", "Test that matplotlib is able to plot a graph and write it as an image ..."),
|
||||
("matplotlib_fonts_1.py", "test_fonts.png", "Test cm-super latex labels in matplotlib ...")
|
||||
]
|
||||
(
|
||||
"matplotlib_1.py",
|
||||
"test.png",
|
||||
"Test that matplotlib is able to plot a graph and write it as an image ...",
|
||||
),
|
||||
(
|
||||
"matplotlib_fonts_1.py",
|
||||
"test_fonts.png",
|
||||
"Test cm-super latex labels in matplotlib ...",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_matplotlib(container, test_file, expected_file, description):
|
||||
"""Various tests performed on matplotlib
|
||||
|
@@ -19,7 +19,9 @@ BUILD_TIMESTAMP = datetime.datetime.utcnow().isoformat()[:-7] + "Z"
|
||||
MARKDOWN_LINE_BREAK = "<br />"
|
||||
|
||||
|
||||
def append_build_history_line(short_image_name: str, owner: str, wiki_path: str, all_tags: List[str]) -> None:
|
||||
def append_build_history_line(
|
||||
short_image_name: str, owner: str, wiki_path: str, all_tags: List[str]
|
||||
) -> None:
|
||||
logger.info("Appending build history line")
|
||||
|
||||
date_column = f"`{BUILD_TIMESTAMP}`"
|
||||
@@ -28,11 +30,13 @@ def append_build_history_line(short_image_name: str, owner: str, wiki_path: str,
|
||||
)
|
||||
commit_hash = GitHelper.commit_hash()
|
||||
commit_hash_tag = GitHelper.commit_hash_tag()
|
||||
links_column = MARKDOWN_LINE_BREAK.join([
|
||||
f"[Git diff](https://github.com/jupyter/docker-stacks/commit/{commit_hash})",
|
||||
f"[Dockerfile](https://github.com/jupyter/docker-stacks/blob/{commit_hash}/{short_image_name}/Dockerfile)",
|
||||
f"[Build manifest](./{short_image_name}-{commit_hash_tag})"
|
||||
])
|
||||
links_column = MARKDOWN_LINE_BREAK.join(
|
||||
[
|
||||
f"[Git diff](https://github.com/jupyter/docker-stacks/commit/{commit_hash})",
|
||||
f"[Dockerfile](https://github.com/jupyter/docker-stacks/blob/{commit_hash}/{short_image_name}/Dockerfile)",
|
||||
f"[Build manifest](./{short_image_name}-{commit_hash_tag})",
|
||||
]
|
||||
)
|
||||
build_history_line = "|".join([date_column, image_column, links_column]) + "|"
|
||||
|
||||
home_wiki_file = os.path.join(wiki_path, "Home.md")
|
||||
@@ -49,16 +53,19 @@ def create_manifest_file(
|
||||
owner: str,
|
||||
wiki_path: str,
|
||||
manifests: List[ManifestInterface],
|
||||
container
|
||||
container,
|
||||
) -> None:
|
||||
manifest_names = [manifest.__name__ for manifest in manifests]
|
||||
logger.info(f"Using manifests: {manifest_names}")
|
||||
|
||||
commit_hash_tag = GitHelper.commit_hash_tag()
|
||||
manifest_file = os.path.join(wiki_path, f"manifests/{short_image_name}-{commit_hash_tag}.md")
|
||||
manifest_file = os.path.join(
|
||||
wiki_path, f"manifests/{short_image_name}-{commit_hash_tag}.md"
|
||||
)
|
||||
|
||||
markdown_pieces = [ManifestHeader.create_header(short_image_name, owner, BUILD_TIMESTAMP)] + \
|
||||
[manifest.markdown_piece(container) for manifest in manifests]
|
||||
markdown_pieces = [
|
||||
ManifestHeader.create_header(short_image_name, owner, BUILD_TIMESTAMP)
|
||||
] + [manifest.markdown_piece(container) for manifest in manifests]
|
||||
markdown_content = "\n\n".join(markdown_pieces) + "\n"
|
||||
|
||||
with open(manifest_file, "w") as f:
|
||||
@@ -81,7 +88,9 @@ if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
arg_parser = argparse.ArgumentParser()
|
||||
arg_parser.add_argument("--short-image-name", required=True, help="Short image name to apply tags for")
|
||||
arg_parser.add_argument(
|
||||
"--short-image-name", required=True, help="Short image name to apply tags for"
|
||||
)
|
||||
arg_parser.add_argument("--owner", required=True, help="Owner of the image")
|
||||
arg_parser.add_argument("--wiki-path", required=True, help="Path to the wiki pages")
|
||||
args = arg_parser.parse_args()
|
||||
|
@@ -8,7 +8,12 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerRunner:
|
||||
def __init__(self, image_name: str, docker_client=docker.from_env(), command: str = "sleep infinity"):
|
||||
def __init__(
|
||||
self,
|
||||
image_name: str,
|
||||
docker_client=docker.from_env(),
|
||||
command: str = "sleep infinity",
|
||||
):
|
||||
self.container = None
|
||||
self.image_name = image_name
|
||||
self.command = command
|
||||
@@ -17,7 +22,9 @@ class DockerRunner:
|
||||
def __enter__(self):
|
||||
logger.info(f"Creating container for image {self.image_name} ...")
|
||||
self.container = self.docker_client.containers.run(
|
||||
image=self.image_name, command=self.command, detach=True,
|
||||
image=self.image_name,
|
||||
command=self.command,
|
||||
detach=True,
|
||||
)
|
||||
logger.info(f"Container {self.container.name} created")
|
||||
return self.container
|
||||
|
@@ -6,7 +6,9 @@ from .manifests import ManifestInterface
|
||||
from .taggers import TaggerInterface
|
||||
|
||||
|
||||
def get_taggers_and_manifests(short_image_name: str) -> Tuple[List[TaggerInterface], List[ManifestInterface]]:
|
||||
def get_taggers_and_manifests(
|
||||
short_image_name: str,
|
||||
) -> Tuple[List[TaggerInterface], List[ManifestInterface]]:
|
||||
taggers: List[TaggerInterface] = []
|
||||
manifests: List[ManifestInterface] = []
|
||||
while short_image_name is not None:
|
||||
|
@@ -2,15 +2,29 @@
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, List
|
||||
from .taggers import TaggerInterface, \
|
||||
SHATagger, \
|
||||
UbuntuVersionTagger, PythonVersionTagger, \
|
||||
JupyterNotebookVersionTagger, JupyterLabVersionTagger, JupyterHubVersionTagger, \
|
||||
RVersionTagger, TensorflowVersionTagger, JuliaVersionTagger, \
|
||||
SparkVersionTagger, HadoopVersionTagger, JavaVersionTagger
|
||||
from .manifests import ManifestInterface, \
|
||||
CondaEnvironmentManifest, AptPackagesManifest, \
|
||||
RPackagesManifest, JuliaPackagesManifest, SparkInfoManifest
|
||||
from .taggers import (
|
||||
TaggerInterface,
|
||||
SHATagger,
|
||||
UbuntuVersionTagger,
|
||||
PythonVersionTagger,
|
||||
JupyterNotebookVersionTagger,
|
||||
JupyterLabVersionTagger,
|
||||
JupyterHubVersionTagger,
|
||||
RVersionTagger,
|
||||
TensorflowVersionTagger,
|
||||
JuliaVersionTagger,
|
||||
SparkVersionTagger,
|
||||
HadoopVersionTagger,
|
||||
JavaVersionTagger,
|
||||
)
|
||||
from .manifests import (
|
||||
ManifestInterface,
|
||||
CondaEnvironmentManifest,
|
||||
AptPackagesManifest,
|
||||
RPackagesManifest,
|
||||
JuliaPackagesManifest,
|
||||
SparkInfoManifest,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -25,41 +39,37 @@ ALL_IMAGES = {
|
||||
parent_image=None,
|
||||
taggers=[
|
||||
SHATagger,
|
||||
UbuntuVersionTagger, PythonVersionTagger,
|
||||
JupyterNotebookVersionTagger, JupyterLabVersionTagger, JupyterHubVersionTagger
|
||||
UbuntuVersionTagger,
|
||||
PythonVersionTagger,
|
||||
JupyterNotebookVersionTagger,
|
||||
JupyterLabVersionTagger,
|
||||
JupyterHubVersionTagger,
|
||||
],
|
||||
manifests=[
|
||||
CondaEnvironmentManifest, AptPackagesManifest
|
||||
]
|
||||
),
|
||||
"minimal-notebook": ImageDescription(
|
||||
parent_image="base-notebook"
|
||||
),
|
||||
"scipy-notebook": ImageDescription(
|
||||
parent_image="minimal-notebook"
|
||||
manifests=[CondaEnvironmentManifest, AptPackagesManifest],
|
||||
),
|
||||
"minimal-notebook": ImageDescription(parent_image="base-notebook"),
|
||||
"scipy-notebook": ImageDescription(parent_image="minimal-notebook"),
|
||||
"r-notebook": ImageDescription(
|
||||
parent_image="minimal-notebook",
|
||||
taggers=[RVersionTagger],
|
||||
manifests=[RPackagesManifest]
|
||||
manifests=[RPackagesManifest],
|
||||
),
|
||||
"tensorflow-notebook": ImageDescription(
|
||||
parent_image="scipy-notebook",
|
||||
taggers=[TensorflowVersionTagger]
|
||||
parent_image="scipy-notebook", taggers=[TensorflowVersionTagger]
|
||||
),
|
||||
"datascience-notebook": ImageDescription(
|
||||
parent_image="scipy-notebook",
|
||||
taggers=[RVersionTagger, JuliaVersionTagger],
|
||||
manifests=[RPackagesManifest, JuliaPackagesManifest]
|
||||
manifests=[RPackagesManifest, JuliaPackagesManifest],
|
||||
),
|
||||
"pyspark-notebook": ImageDescription(
|
||||
parent_image="scipy-notebook",
|
||||
taggers=[SparkVersionTagger, HadoopVersionTagger, JavaVersionTagger],
|
||||
manifests=[SparkInfoManifest]
|
||||
manifests=[SparkInfoManifest],
|
||||
),
|
||||
"all-spark-notebook": ImageDescription(
|
||||
parent_image="pyspark-notebook",
|
||||
taggers=[RVersionTagger],
|
||||
manifests=[RPackagesManifest]
|
||||
)
|
||||
manifests=[RPackagesManifest],
|
||||
),
|
||||
}
|
||||
|
@@ -10,41 +10,49 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def quoted_output(container, cmd: str) -> str:
|
||||
return "\n".join([
|
||||
"```",
|
||||
DockerRunner.run_simple_command(container, cmd, print_result=False),
|
||||
"```"
|
||||
])
|
||||
return "\n".join(
|
||||
[
|
||||
"```",
|
||||
DockerRunner.run_simple_command(container, cmd, print_result=False),
|
||||
"```",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class ManifestHeader:
|
||||
"""ManifestHeader doesn't fall under common interface and we run it separately"""
|
||||
|
||||
@staticmethod
|
||||
def create_header(short_image_name: str, owner: str, build_timestamp: str) -> str:
|
||||
commit_hash = GitHelper.commit_hash()
|
||||
commit_hash_tag = GitHelper.commit_hash_tag()
|
||||
commit_message = GitHelper.commit_message()
|
||||
|
||||
image_size = docker["images", f"{owner}/{short_image_name}:latest", "--format", "{{.Size}}"]().rstrip()
|
||||
image_size = docker[
|
||||
"images", f"{owner}/{short_image_name}:latest", "--format", "{{.Size}}"
|
||||
]().rstrip()
|
||||
|
||||
return "\n".join([
|
||||
f"# Build manifest for image: {short_image_name}:{commit_hash_tag}",
|
||||
"",
|
||||
"## Build Info",
|
||||
"",
|
||||
f"* Build datetime: {build_timestamp}",
|
||||
f"* Docker image: {owner}/{short_image_name}:{commit_hash_tag}",
|
||||
f"* Docker image size: {image_size}",
|
||||
f"* Git commit SHA: [{commit_hash}](https://github.com/jupyter/docker-stacks/commit/{commit_hash})",
|
||||
"* Git commit message:",
|
||||
"```",
|
||||
f"{commit_message}",
|
||||
"```"
|
||||
])
|
||||
return "\n".join(
|
||||
[
|
||||
f"# Build manifest for image: {short_image_name}:{commit_hash_tag}",
|
||||
"",
|
||||
"## Build Info",
|
||||
"",
|
||||
f"* Build datetime: {build_timestamp}",
|
||||
f"* Docker image: {owner}/{short_image_name}:{commit_hash_tag}",
|
||||
f"* Docker image size: {image_size}",
|
||||
f"* Git commit SHA: [{commit_hash}](https://github.com/jupyter/docker-stacks/commit/{commit_hash})",
|
||||
"* Git commit message:",
|
||||
"```",
|
||||
f"{commit_message}",
|
||||
"```",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class ManifestInterface:
|
||||
"""Common interface for all manifests"""
|
||||
|
||||
@staticmethod
|
||||
def markdown_piece(container) -> str:
|
||||
raise NotImplementedError
|
||||
@@ -53,56 +61,66 @@ class ManifestInterface:
|
||||
class CondaEnvironmentManifest(ManifestInterface):
|
||||
@staticmethod
|
||||
def markdown_piece(container) -> str:
|
||||
return "\n".join([
|
||||
"## Python Packages",
|
||||
"",
|
||||
quoted_output(container, "python --version"),
|
||||
"",
|
||||
quoted_output(container, "conda info"),
|
||||
"",
|
||||
quoted_output(container, "conda list")
|
||||
])
|
||||
return "\n".join(
|
||||
[
|
||||
"## Python Packages",
|
||||
"",
|
||||
quoted_output(container, "python --version"),
|
||||
"",
|
||||
quoted_output(container, "conda info"),
|
||||
"",
|
||||
quoted_output(container, "conda list"),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class AptPackagesManifest(ManifestInterface):
|
||||
@staticmethod
|
||||
def markdown_piece(container) -> str:
|
||||
return "\n".join([
|
||||
"## Apt Packages",
|
||||
"",
|
||||
quoted_output(container, "apt list --installed")
|
||||
])
|
||||
return "\n".join(
|
||||
["## Apt Packages", "", quoted_output(container, "apt list --installed")]
|
||||
)
|
||||
|
||||
|
||||
class RPackagesManifest(ManifestInterface):
|
||||
@staticmethod
|
||||
def markdown_piece(container) -> str:
|
||||
return "\n".join([
|
||||
"## R Packages",
|
||||
"",
|
||||
quoted_output(container, "R --version"),
|
||||
"",
|
||||
quoted_output(container, "R --silent -e 'installed.packages(.Library)[, c(1,3)]'")
|
||||
])
|
||||
return "\n".join(
|
||||
[
|
||||
"## R Packages",
|
||||
"",
|
||||
quoted_output(container, "R --version"),
|
||||
"",
|
||||
quoted_output(
|
||||
container, "R --silent -e 'installed.packages(.Library)[, c(1,3)]'"
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class JuliaPackagesManifest(ManifestInterface):
|
||||
@staticmethod
|
||||
def markdown_piece(container) -> str:
|
||||
return "\n".join([
|
||||
"## Julia Packages",
|
||||
"",
|
||||
quoted_output(container, "julia -E 'using InteractiveUtils; versioninfo()'"),
|
||||
"",
|
||||
quoted_output(container, "julia -E 'import Pkg; Pkg.status()'")
|
||||
])
|
||||
return "\n".join(
|
||||
[
|
||||
"## Julia Packages",
|
||||
"",
|
||||
quoted_output(
|
||||
container, "julia -E 'using InteractiveUtils; versioninfo()'"
|
||||
),
|
||||
"",
|
||||
quoted_output(container, "julia -E 'import Pkg; Pkg.status()'"),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class SparkInfoManifest(ManifestInterface):
|
||||
@staticmethod
|
||||
def markdown_piece(container) -> str:
|
||||
return "\n".join([
|
||||
"## Apache Spark",
|
||||
"",
|
||||
quoted_output(container, "/usr/local/spark/bin/spark-submit --version"),
|
||||
])
|
||||
return "\n".join(
|
||||
[
|
||||
"## Apache Spark",
|
||||
"",
|
||||
quoted_output(container, "/usr/local/spark/bin/spark-submit --version"),
|
||||
]
|
||||
)
|
||||
|
@@ -21,7 +21,9 @@ def tag_image(short_image_name: str, owner: str) -> None:
|
||||
for tagger in taggers:
|
||||
tagger_name = tagger.__name__
|
||||
tag_value = tagger.tag_value(container)
|
||||
logger.info(f"Applying tag tagger_name: {tagger_name} tag_value: {tag_value}")
|
||||
logger.info(
|
||||
f"Applying tag tagger_name: {tagger_name} tag_value: {tag_value}"
|
||||
)
|
||||
docker["tag", image, f"{owner}/{short_image_name}:{tag_value}"]()
|
||||
|
||||
|
||||
@@ -29,7 +31,9 @@ if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
arg_parser = argparse.ArgumentParser()
|
||||
arg_parser.add_argument("--short-image-name", required=True, help="Short image name to apply tags for")
|
||||
arg_parser.add_argument(
|
||||
"--short-image-name", required=True, help="Short image name to apply tags for"
|
||||
)
|
||||
arg_parser.add_argument("--owner", required=True, help="Owner of the image")
|
||||
args = arg_parser.parse_args()
|
||||
|
||||
|
@@ -14,30 +14,27 @@ def _get_program_version(container, program: str) -> str:
|
||||
|
||||
def _get_env_variable(container, variable: str) -> str:
|
||||
env = DockerRunner.run_simple_command(
|
||||
container,
|
||||
cmd="env",
|
||||
print_result=False
|
||||
container, cmd="env", print_result=False
|
||||
).split()
|
||||
for env_entry in env:
|
||||
if env_entry.startswith(variable):
|
||||
return env_entry[len(variable) + 1:]
|
||||
return env_entry[len(variable) + 1 :]
|
||||
raise KeyError(variable)
|
||||
|
||||
|
||||
def _get_pip_package_version(container, package: str) -> str:
|
||||
VERSION_PREFIX = "Version: "
|
||||
package_info = DockerRunner.run_simple_command(
|
||||
container,
|
||||
cmd=f"pip show {package}",
|
||||
print_result=False
|
||||
container, cmd=f"pip show {package}", print_result=False
|
||||
)
|
||||
version_line = package_info.split("\n")[1]
|
||||
assert version_line.startswith(VERSION_PREFIX)
|
||||
return version_line[len(VERSION_PREFIX):]
|
||||
return version_line[len(VERSION_PREFIX) :]
|
||||
|
||||
|
||||
class TaggerInterface:
|
||||
"""Common interface for all taggers"""
|
||||
|
||||
@staticmethod
|
||||
def tag_value(container) -> str:
|
||||
raise NotImplementedError
|
||||
@@ -52,7 +49,9 @@ class SHATagger(TaggerInterface):
|
||||
class UbuntuVersionTagger(TaggerInterface):
|
||||
@staticmethod
|
||||
def tag_value(container) -> str:
|
||||
os_release = DockerRunner.run_simple_command(container, "cat /etc/os-release").split("\n")
|
||||
os_release = DockerRunner.run_simple_command(
|
||||
container, "cat /etc/os-release"
|
||||
).split("\n")
|
||||
for line in os_release:
|
||||
if line.startswith("VERSION_ID"):
|
||||
return "ubuntu-" + line.split("=")[1].strip('"')
|
||||
|
@@ -3,5 +3,5 @@
|
||||
import tensorflow as tf
|
||||
|
||||
|
||||
print(tf.constant('Hello, TensorFlow'))
|
||||
print(tf.constant("Hello, TensorFlow"))
|
||||
print(tf.reduce_sum(tf.random.normal([1000, 1000])))
|
||||
|
@@ -34,8 +34,7 @@ LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CondaPackageHelper:
|
||||
"""Conda package helper permitting to get information about packages
|
||||
"""
|
||||
"""Conda package helper permitting to get information about packages"""
|
||||
|
||||
def __init__(self, container):
|
||||
# if isinstance(container, TrackedContainer):
|
||||
@@ -50,8 +49,7 @@ class CondaPackageHelper:
|
||||
"""Start the TrackedContainer and return an instance of a running container"""
|
||||
LOGGER.info(f"Starting container {container.image_name} ...")
|
||||
return container.run(
|
||||
tty=True,
|
||||
command=["start.sh", "bash", "-c", "sleep infinity"]
|
||||
tty=True, command=["start.sh", "bash", "-c", "sleep infinity"]
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@@ -76,7 +74,9 @@ class CondaPackageHelper:
|
||||
if self.specs is None:
|
||||
LOGGER.info("Grabing the list of specifications ...")
|
||||
self.specs = CondaPackageHelper._packages_from_json(
|
||||
self._execute_command(CondaPackageHelper._conda_export_command(from_history=True))
|
||||
self._execute_command(
|
||||
CondaPackageHelper._conda_export_command(from_history=True)
|
||||
)
|
||||
)
|
||||
return self.specs
|
||||
|
||||
@@ -112,9 +112,7 @@ class CondaPackageHelper:
|
||||
def available_packages(self):
|
||||
"""Return the available packages"""
|
||||
if self.available is None:
|
||||
LOGGER.info(
|
||||
"Grabing the list of available packages (can take a while) ..."
|
||||
)
|
||||
LOGGER.info("Grabing the list of available packages (can take a while) ...")
|
||||
# Keeping command line output since `conda search --outdated --json` is way too long ...
|
||||
self.available = CondaPackageHelper._extract_available(
|
||||
self._execute_command(["conda", "search", "--outdated"])
|
||||
@@ -146,10 +144,9 @@ class CondaPackageHelper:
|
||||
current = min(inst_vs, key=CondaPackageHelper.semantic_cmp)
|
||||
newest = avail_vs[-1]
|
||||
if avail_vs and current != newest:
|
||||
if (
|
||||
CondaPackageHelper.semantic_cmp(current) <
|
||||
CondaPackageHelper.semantic_cmp(newest)
|
||||
):
|
||||
if CondaPackageHelper.semantic_cmp(
|
||||
current
|
||||
) < CondaPackageHelper.semantic_cmp(newest):
|
||||
self.comparison.append(
|
||||
{"Package": pkg, "Current": current, "Newest": newest}
|
||||
)
|
||||
@@ -162,6 +159,7 @@ class CondaPackageHelper:
|
||||
def mysplit(string):
|
||||
def version_substrs(x):
|
||||
return re.findall(r"([A-z]+|\d+)", x)
|
||||
|
||||
return list(chain(map(version_substrs, string.split("."))))
|
||||
|
||||
def str_ord(string):
|
||||
|
@@ -68,7 +68,7 @@ EXCLUDED_PACKAGES = [
|
||||
"protobuf",
|
||||
"r-irkernel",
|
||||
"unixodbc",
|
||||
"bzip2"
|
||||
"bzip2",
|
||||
]
|
||||
|
||||
|
||||
@@ -133,8 +133,9 @@ def _import_packages(package_helper, filtered_packages, check_function, max_fail
|
||||
for package in filtered_packages:
|
||||
LOGGER.info(f"Trying to import {package}")
|
||||
try:
|
||||
assert check_function(package_helper, package) == 0, \
|
||||
f"Package [{package}] import failed"
|
||||
assert (
|
||||
check_function(package_helper, package) == 0
|
||||
), f"Package [{package}] import failed"
|
||||
except AssertionError as err:
|
||||
failures[package] = err
|
||||
if len(failures) > max_failures:
|
||||
|
@@ -12,7 +12,7 @@ def test_units(container):
|
||||
"""Various units tests
|
||||
Add a py file in the {image}/test/units dir and it will be automatically tested
|
||||
"""
|
||||
short_image_name = container.image_name[container.image_name.rfind('/') + 1:]
|
||||
short_image_name = container.image_name[container.image_name.rfind("/") + 1 :]
|
||||
host_data_dir = os.path.join(THIS_DIR, f"../{short_image_name}/test/units")
|
||||
LOGGER.info(f"Searching for units tests in {host_data_dir}")
|
||||
cont_data_dir = "/home/jovyan/data"
|
||||
@@ -27,9 +27,9 @@ def test_units(container):
|
||||
c = container.run(
|
||||
volumes={host_data_dir: {"bind": cont_data_dir, "mode": "ro"}},
|
||||
tty=True,
|
||||
command=['start.sh', 'python', f'{cont_data_dir}/{test_file}']
|
||||
command=["start.sh", "python", f"{cont_data_dir}/{test_file}"],
|
||||
)
|
||||
rv = c.wait(timeout=30)
|
||||
logs = c.logs(stdout=True).decode('utf-8')
|
||||
logs = c.logs(stdout=True).decode("utf-8")
|
||||
LOGGER.debug(logs)
|
||||
assert rv == 0 or rv["StatusCode"] == 0
|
||||
|
Reference in New Issue
Block a user