pre-commit: run black autoformatter on all files

This commit is contained in:
Erik Sundell
2021-06-28 12:32:21 +02:00
committed by Erik Sundell
parent a99a182940
commit fe3968efe0
26 changed files with 359 additions and 291 deletions

View File

@@ -22,10 +22,12 @@ def test_nbconvert(container, test_file):
output_dir = "/tmp" output_dir = "/tmp"
timeout_ms = 600 timeout_ms = 600
LOGGER.info(f"Test that {test_file} notebook can be executed ...") LOGGER.info(f"Test that {test_file} notebook can be executed ...")
command = "jupyter nbconvert --to markdown " + \ command = (
f"--ExecutePreprocessor.timeout={timeout_ms} " + \ "jupyter nbconvert --to markdown "
f"--output-dir {output_dir} " + \ + f"--ExecutePreprocessor.timeout={timeout_ms} "
f"--execute {cont_data_dir}/{test_file}.ipynb" + f"--output-dir {output_dir} "
+ f"--execute {cont_data_dir}/{test_file}.ipynb"
)
c = container.run( c = container.run(
volumes={host_data_dir: {"bind": cont_data_dir, "mode": "ro"}}, volumes={host_data_dir: {"bind": cont_data_dir, "mode": "ro"}},
tty=True, tty=True,

View File

@@ -8,7 +8,7 @@ import errno
import stat import stat
c = get_config() # noqa: F821 c = get_config() # noqa: F821
c.NotebookApp.ip = '0.0.0.0' c.NotebookApp.ip = "0.0.0.0"
c.NotebookApp.port = 8888 c.NotebookApp.port = 8888
c.NotebookApp.open_browser = False c.NotebookApp.open_browser = False
@@ -16,9 +16,9 @@ c.NotebookApp.open_browser = False
c.FileContentsManager.delete_to_trash = False c.FileContentsManager.delete_to_trash = False
# Generate a self-signed certificate # Generate a self-signed certificate
if 'GEN_CERT' in os.environ: if "GEN_CERT" in os.environ:
dir_name = jupyter_data_dir() dir_name = jupyter_data_dir()
pem_file = os.path.join(dir_name, 'notebook.pem') pem_file = os.path.join(dir_name, "notebook.pem")
try: try:
os.makedirs(dir_name) os.makedirs(dir_name)
except OSError as exc: # Python >2.5 except OSError as exc: # Python >2.5
@@ -28,28 +28,42 @@ if 'GEN_CERT' in os.environ:
raise raise
# Generate an openssl.cnf file to set the distinguished name # Generate an openssl.cnf file to set the distinguished name
cnf_file = os.path.join(os.getenv('CONDA_DIR', '/usr/lib'), 'ssl', 'openssl.cnf') cnf_file = os.path.join(os.getenv("CONDA_DIR", "/usr/lib"), "ssl", "openssl.cnf")
if not os.path.isfile(cnf_file): if not os.path.isfile(cnf_file):
with open(cnf_file, 'w') as fh: with open(cnf_file, "w") as fh:
fh.write('''\ fh.write(
"""\
[req] [req]
distinguished_name = req_distinguished_name distinguished_name = req_distinguished_name
[req_distinguished_name] [req_distinguished_name]
''') """
)
# Generate a certificate if one doesn't exist on disk # Generate a certificate if one doesn't exist on disk
subprocess.check_call(['openssl', 'req', '-new', subprocess.check_call(
'-newkey', 'rsa:2048', [
'-days', '365', "openssl",
'-nodes', '-x509', "req",
'-subj', '/C=XX/ST=XX/L=XX/O=generated/CN=generated', "-new",
'-keyout', pem_file, "-newkey",
'-out', pem_file]) "rsa:2048",
"-days",
"365",
"-nodes",
"-x509",
"-subj",
"/C=XX/ST=XX/L=XX/O=generated/CN=generated",
"-keyout",
pem_file,
"-out",
pem_file,
]
)
# Restrict access to the file # Restrict access to the file
os.chmod(pem_file, stat.S_IRUSR | stat.S_IWUSR) os.chmod(pem_file, stat.S_IRUSR | stat.S_IWUSR)
c.NotebookApp.certfile = pem_file c.NotebookApp.certfile = pem_file
# Change default umask for all subprocesses of the notebook server if set in # Change default umask for all subprocesses of the notebook server if set in
# the environment # the environment
if 'NB_UMASK' in os.environ: if "NB_UMASK" in os.environ:
os.umask(int(os.environ['NB_UMASK'], 8)) os.umask(int(os.environ["NB_UMASK"], 8))

View File

@@ -11,59 +11,52 @@ LOGGER = logging.getLogger(__name__)
def test_cli_args(container, http_client): def test_cli_args(container, http_client):
"""Container should respect notebook server command line args """Container should respect notebook server command line args
(e.g., disabling token security)""" (e.g., disabling token security)"""
c = container.run( c = container.run(command=["start-notebook.sh", "--NotebookApp.token=''"])
command=["start-notebook.sh", "--NotebookApp.token=''"] resp = http_client.get("http://localhost:8888")
)
resp = http_client.get('http://localhost:8888')
resp.raise_for_status() resp.raise_for_status()
logs = c.logs(stdout=True).decode('utf-8') logs = c.logs(stdout=True).decode("utf-8")
LOGGER.debug(logs) LOGGER.debug(logs)
assert 'login_submit' not in resp.text assert "login_submit" not in resp.text
@pytest.mark.filterwarnings('ignore:Unverified HTTPS request') @pytest.mark.filterwarnings("ignore:Unverified HTTPS request")
def test_unsigned_ssl(container, http_client): def test_unsigned_ssl(container, http_client):
"""Container should generate a self-signed SSL certificate """Container should generate a self-signed SSL certificate
and notebook server should use it to enable HTTPS. and notebook server should use it to enable HTTPS.
""" """
container.run( container.run(environment=["GEN_CERT=yes"])
environment=['GEN_CERT=yes']
)
# NOTE: The requests.Session backing the http_client fixture does not retry # NOTE: The requests.Session backing the http_client fixture does not retry
# properly while the server is booting up. An SSL handshake error seems to # properly while the server is booting up. An SSL handshake error seems to
# abort the retry logic. Forcing a long sleep for the moment until I have # abort the retry logic. Forcing a long sleep for the moment until I have
# time to dig more. # time to dig more.
time.sleep(5) time.sleep(5)
resp = http_client.get('https://localhost:8888', verify=False) resp = http_client.get("https://localhost:8888", verify=False)
resp.raise_for_status() resp.raise_for_status()
assert 'login_submit' in resp.text assert "login_submit" in resp.text
def test_uid_change(container): def test_uid_change(container):
"""Container should change the UID of the default user.""" """Container should change the UID of the default user."""
c = container.run( c = container.run(
tty=True, tty=True,
user='root', user="root",
environment=['NB_UID=1010'], environment=["NB_UID=1010"],
command=['start.sh', 'bash', '-c', 'id && touch /opt/conda/test-file'] command=["start.sh", "bash", "-c", "id && touch /opt/conda/test-file"],
) )
# usermod is slow so give it some time # usermod is slow so give it some time
c.wait(timeout=120) c.wait(timeout=120)
assert 'uid=1010(jovyan)' in c.logs(stdout=True).decode('utf-8') assert "uid=1010(jovyan)" in c.logs(stdout=True).decode("utf-8")
def test_gid_change(container): def test_gid_change(container):
"""Container should change the GID of the default user.""" """Container should change the GID of the default user."""
c = container.run( c = container.run(
tty=True, tty=True, user="root", environment=["NB_GID=110"], command=["start.sh", "id"]
user='root',
environment=['NB_GID=110'],
command=['start.sh', 'id']
) )
c.wait(timeout=10) c.wait(timeout=10)
logs = c.logs(stdout=True).decode('utf-8') logs = c.logs(stdout=True).decode("utf-8")
assert 'gid=110(jovyan)' in logs assert "gid=110(jovyan)" in logs
assert 'groups=110(jovyan),100(users)' in logs assert "groups=110(jovyan),100(users)" in logs
def test_nb_user_change(container): def test_nb_user_change(container):
@@ -72,11 +65,8 @@ def test_nb_user_change(container):
running_container = container.run( running_container = container.run(
tty=True, tty=True,
user="root", user="root",
environment=[ environment=[f"NB_USER={nb_user}", "CHOWN_HOME=yes"],
f"NB_USER={nb_user}", command=["start.sh", "bash", "-c", "sleep infinity"],
"CHOWN_HOME=yes"
],
command=['start.sh', 'bash', '-c', 'sleep infinity']
) )
# Give the chown time to complete. Use sleep, not wait, because the # Give the chown time to complete. Use sleep, not wait, because the
@@ -98,25 +88,27 @@ def test_nb_user_change(container):
expected_output = f"{nb_user} users" expected_output = f"{nb_user} users"
cmd = running_container.exec_run(command, workdir=f"/home/{nb_user}") cmd = running_container.exec_run(command, workdir=f"/home/{nb_user}")
output = cmd.output.decode("utf-8").strip("\n") output = cmd.output.decode("utf-8").strip("\n")
assert output == expected_output, f"Bad owner for the {nb_user} home folder {output}, expected {expected_output}" assert (
output == expected_output
), f"Bad owner for the {nb_user} home folder {output}, expected {expected_output}"
def test_chown_extra(container): def test_chown_extra(container):
"""Container should change the UID/GID of CHOWN_EXTRA.""" """Container should change the UID/GID of CHOWN_EXTRA."""
c = container.run( c = container.run(
tty=True, tty=True,
user='root', user="root",
environment=[ environment=[
'NB_UID=1010', "NB_UID=1010",
'NB_GID=101', "NB_GID=101",
'CHOWN_EXTRA=/opt/conda', "CHOWN_EXTRA=/opt/conda",
'CHOWN_EXTRA_OPTS=-R' "CHOWN_EXTRA_OPTS=-R",
], ],
command=['start.sh', 'bash', '-c', 'stat -c \'%n:%u:%g\' /opt/conda/LICENSE.txt'] command=["start.sh", "bash", "-c", "stat -c '%n:%u:%g' /opt/conda/LICENSE.txt"],
) )
# chown is slow so give it some time # chown is slow so give it some time
c.wait(timeout=120) c.wait(timeout=120)
assert '/opt/conda/LICENSE.txt:1010:101' in c.logs(stdout=True).decode('utf-8') assert "/opt/conda/LICENSE.txt:1010:101" in c.logs(stdout=True).decode("utf-8")
def test_chown_home(container): def test_chown_home(container):
@@ -124,64 +116,66 @@ def test_chown_home(container):
group to the current value of NB_UID and NB_GID.""" group to the current value of NB_UID and NB_GID."""
c = container.run( c = container.run(
tty=True, tty=True,
user='root', user="root",
environment=[ environment=["CHOWN_HOME=yes", "CHOWN_HOME_OPTS=-R"],
'CHOWN_HOME=yes', command=[
'CHOWN_HOME_OPTS=-R' "start.sh",
"bash",
"-c",
"chown root:root /home/jovyan && ls -alsh /home",
], ],
command=['start.sh', 'bash', '-c', 'chown root:root /home/jovyan && ls -alsh /home']
) )
c.wait(timeout=120) c.wait(timeout=120)
assert "Changing ownership of /home/jovyan to 1000:100 with options '-R'" in c.logs(stdout=True).decode('utf-8') assert "Changing ownership of /home/jovyan to 1000:100 with options '-R'" in c.logs(
stdout=True
).decode("utf-8")
def test_sudo(container): def test_sudo(container):
"""Container should grant passwordless sudo to the default user.""" """Container should grant passwordless sudo to the default user."""
c = container.run( c = container.run(
tty=True, tty=True,
user='root', user="root",
environment=['GRANT_SUDO=yes'], environment=["GRANT_SUDO=yes"],
command=['start.sh', 'sudo', 'id'] command=["start.sh", "sudo", "id"],
) )
rv = c.wait(timeout=10) rv = c.wait(timeout=10)
assert rv == 0 or rv["StatusCode"] == 0 assert rv == 0 or rv["StatusCode"] == 0
assert 'uid=0(root)' in c.logs(stdout=True).decode('utf-8') assert "uid=0(root)" in c.logs(stdout=True).decode("utf-8")
def test_sudo_path(container): def test_sudo_path(container):
"""Container should include /opt/conda/bin in the sudo secure_path.""" """Container should include /opt/conda/bin in the sudo secure_path."""
c = container.run( c = container.run(
tty=True, tty=True,
user='root', user="root",
environment=['GRANT_SUDO=yes'], environment=["GRANT_SUDO=yes"],
command=['start.sh', 'sudo', 'which', 'jupyter'] command=["start.sh", "sudo", "which", "jupyter"],
) )
rv = c.wait(timeout=10) rv = c.wait(timeout=10)
assert rv == 0 or rv["StatusCode"] == 0 assert rv == 0 or rv["StatusCode"] == 0
assert c.logs(stdout=True).decode('utf-8').rstrip().endswith('/opt/conda/bin/jupyter') assert (
c.logs(stdout=True).decode("utf-8").rstrip().endswith("/opt/conda/bin/jupyter")
)
def test_sudo_path_without_grant(container): def test_sudo_path_without_grant(container):
"""Container should include /opt/conda/bin in the sudo secure_path.""" """Container should include /opt/conda/bin in the sudo secure_path."""
c = container.run( c = container.run(tty=True, user="root", command=["start.sh", "which", "jupyter"])
tty=True,
user='root',
command=['start.sh', 'which', 'jupyter']
)
rv = c.wait(timeout=10) rv = c.wait(timeout=10)
assert rv == 0 or rv["StatusCode"] == 0 assert rv == 0 or rv["StatusCode"] == 0
assert c.logs(stdout=True).decode('utf-8').rstrip().endswith('/opt/conda/bin/jupyter') assert (
c.logs(stdout=True).decode("utf-8").rstrip().endswith("/opt/conda/bin/jupyter")
)
def test_group_add(container, tmpdir): def test_group_add(container, tmpdir):
"""Container should run with the specified uid, gid, and secondary """Container should run with the specified uid, gid, and secondary
group. group.
""" """
c = container.run( c = container.run(user="1010:1010", group_add=["users"], command=["start.sh", "id"])
user='1010:1010',
group_add=['users'],
command=['start.sh', 'id']
)
rv = c.wait(timeout=5) rv = c.wait(timeout=5)
assert rv == 0 or rv["StatusCode"] == 0 assert rv == 0 or rv["StatusCode"] == 0
assert 'uid=1010 gid=1010 groups=1010,100(users)' in c.logs(stdout=True).decode('utf-8') assert "uid=1010 gid=1010 groups=1010,100(users)" in c.logs(stdout=True).decode(
"utf-8"
)

View File

@@ -23,8 +23,7 @@ def test_package_manager(container, package_manager, version_arg):
f"Test that the package manager {package_manager} is working properly ..." f"Test that the package manager {package_manager} is working properly ..."
) )
c = container.run( c = container.run(
tty=True, tty=True, command=["start.sh", "bash", "-c", f"{package_manager} {version_arg}"]
command=["start.sh", "bash", "-c", f"{package_manager} {version_arg}"]
) )
rv = c.wait(timeout=5) rv = c.wait(timeout=5)
logs = c.logs(stdout=True).decode("utf-8") logs = c.logs(stdout=True).decode("utf-8")

View File

@@ -9,8 +9,7 @@ LOGGER = logging.getLogger(__name__)
def test_pandoc(container): def test_pandoc(container):
"""Pandoc shall be able to convert MD to HTML.""" """Pandoc shall be able to convert MD to HTML."""
c = container.run( c = container.run(
tty=True, tty=True, command=["start.sh", "bash", "-c", 'echo "**BOLD**" | pandoc']
command=["start.sh", "bash", "-c", 'echo "**BOLD**" | pandoc']
) )
c.wait(timeout=10) c.wait(timeout=10)
logs = c.logs(stdout=True).decode("utf-8") logs = c.logs(stdout=True).decode("utf-8")

View File

@@ -14,26 +14,26 @@ from requests.adapters import HTTPAdapter
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
@pytest.fixture(scope='session') @pytest.fixture(scope="session")
def http_client(): def http_client():
"""Requests session with retries and backoff.""" """Requests session with retries and backoff."""
s = requests.Session() s = requests.Session()
retries = Retry(total=5, backoff_factor=1) retries = Retry(total=5, backoff_factor=1)
s.mount('http://', HTTPAdapter(max_retries=retries)) s.mount("http://", HTTPAdapter(max_retries=retries))
s.mount('https://', HTTPAdapter(max_retries=retries)) s.mount("https://", HTTPAdapter(max_retries=retries))
return s return s
@pytest.fixture(scope='session') @pytest.fixture(scope="session")
def docker_client(): def docker_client():
"""Docker client configured based on the host environment""" """Docker client configured based on the host environment"""
return docker.from_env() return docker.from_env()
@pytest.fixture(scope='session') @pytest.fixture(scope="session")
def image_name(): def image_name():
"""Image name to test""" """Image name to test"""
return os.getenv('TEST_IMAGE') return os.getenv("TEST_IMAGE")
class TrackedContainer: class TrackedContainer:
@@ -78,7 +78,9 @@ class TrackedContainer:
all_kwargs.update(self.kwargs) all_kwargs.update(self.kwargs)
all_kwargs.update(kwargs) all_kwargs.update(kwargs)
LOGGER.info(f"Running {self.image_name} with args {all_kwargs} ...") LOGGER.info(f"Running {self.image_name} with args {all_kwargs} ...")
self.container = self.docker_client.containers.run(self.image_name, **all_kwargs) self.container = self.docker_client.containers.run(
self.image_name, **all_kwargs
)
return self.container return self.container
def remove(self): def remove(self):
@@ -87,7 +89,7 @@ class TrackedContainer:
self.container.remove(force=True) self.container.remove(force=True)
@pytest.fixture(scope='function') @pytest.fixture(scope="function")
def container(docker_client, image_name): def container(docker_client, image_name):
"""Notebook container with initial configuration appropriate for testing """Notebook container with initial configuration appropriate for testing
(e.g., HTTP port exposed to the host for HTTP calls). (e.g., HTTP port exposed to the host for HTTP calls).
@@ -95,12 +97,7 @@ def container(docker_client, image_name):
Yields the container instance and kills it when the caller is done with it. Yields the container instance and kills it when the caller is done with it.
""" """
container = TrackedContainer( container = TrackedContainer(
docker_client, docker_client, image_name, detach=True, ports={"8888/tcp": 8888}
image_name,
detach=True,
ports={
'8888/tcp': 8888
}
) )
yield container yield container
container.remove() container.remove()

View File

@@ -9,8 +9,7 @@ def test_julia(container):
"""Basic julia test""" """Basic julia test"""
LOGGER.info("Test that julia is correctly installed ...") LOGGER.info("Test that julia is correctly installed ...")
running_container = container.run( running_container = container.run(
tty=True, tty=True, command=["start.sh", "bash", "-c", "sleep infinity"]
command=["start.sh", "bash", "-c", "sleep infinity"]
) )
command = "julia --version" command = "julia --version"
cmd = running_container.exec_run(command) cmd = running_container.exec_run(command)

View File

@@ -25,40 +25,37 @@
# If your documentation needs a minimal Sphinx version, state it here. # If your documentation needs a minimal Sphinx version, state it here.
# #
needs_sphinx = '2.1' needs_sphinx = "2.1"
# Add any Sphinx extension module names here, as strings. They can be # Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones. # ones.
extensions = [ extensions = ["myst_parser", "sphinx_copybutton"]
'myst_parser',
'sphinx_copybutton'
]
# Add any paths that contain templates here, relative to this directory. # Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates'] templates_path = ["_templates"]
source_suffix = { source_suffix = {
'.rst': 'restructuredtext', ".rst": "restructuredtext",
'.md': 'markdown', ".md": "markdown",
} }
# The master toctree document. # The master toctree document.
master_doc = 'index' master_doc = "index"
# General information about the project. # General information about the project.
project = 'docker-stacks' project = "docker-stacks"
copyright = '2018- Project Jupyter' copyright = "2018- Project Jupyter"
author = 'Project Jupyter' author = "Project Jupyter"
# The version info for the project you're documenting, acts as replacement for # The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the # |version| and |release|, also used in various other places throughout the
# built documents. # built documents.
# #
# The short X.Y version. # The short X.Y version.
version = 'latest' version = "latest"
# The full version, including alpha/beta/rc tags. # The full version, including alpha/beta/rc tags.
release = 'latest' release = "latest"
# The language for content autogenerated by Sphinx. Refer to documentation # The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages. # for a list of supported languages.
@@ -70,10 +67,10 @@ language = None
# List of patterns, relative to source directory, that match files and # List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files. # directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path # This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The name of the Pygments (syntax highlighting) style to use. # The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx' pygments_style = "sphinx"
# If true, `todo` and `todoList` produce output, else they produce nothing. # If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False todo_include_todos = False
@@ -84,27 +81,27 @@ todo_include_todos = False
# The theme to use for HTML and HTML Help pages. See the documentation for # The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes. # a list of builtin themes.
# #
html_theme = 'alabaster' html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme # Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the # further. For a list of options available for each theme, see the
# documentation. # documentation.
# #
html_theme_options = { html_theme_options = {
'description': "Jupyter Docker Stacks", "description": "Jupyter Docker Stacks",
'fixed_sidebar': False, "fixed_sidebar": False,
'show_relbars': True, "show_relbars": True,
'github_user': 'jupyter', "github_user": "jupyter",
'github_repo': 'docker-stacks', "github_repo": "docker-stacks",
'github_type': 'star', "github_type": "star",
'logo': 'jupyter-logo.svg', "logo": "jupyter-logo.svg",
'logo_text_align': 'left' "logo_text_align": "left",
} }
# Add any paths that contain custom static files (such as style sheets) here, # Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files, # relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css". # so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static'] html_static_path = ["_static"]
# Custom sidebar templates, must be a dictionary that maps document names # Custom sidebar templates, must be a dictionary that maps document names
# to template names. # to template names.
@@ -115,18 +112,18 @@ html_static_path = ['_static']
# 'searchbox.html']``. # 'searchbox.html']``.
# #
html_sidebars = { html_sidebars = {
'**': [ "**": [
'about.html', "about.html",
'navigation.html', "navigation.html",
'relations.html', "relations.html",
'searchbox.html', "searchbox.html",
] ]
} }
# -- Options for HTMLHelp output ------------------------------------------ # -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder. # Output file base name for HTML help builder.
htmlhelp_basename = 'docker-stacksdoc' htmlhelp_basename = "docker-stacksdoc"
# -- Options for LaTeX output --------------------------------------------- # -- Options for LaTeX output ---------------------------------------------
@@ -135,15 +132,12 @@ latex_elements = {
# The paper size ('letterpaper' or 'a4paper'). # The paper size ('letterpaper' or 'a4paper').
# #
# 'papersize': 'letterpaper', # 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt'). # The font size ('10pt', '11pt' or '12pt').
# #
# 'pointsize': '10pt', # 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble. # Additional stuff for the LaTeX preamble.
# #
# 'preamble': '', # 'preamble': '',
# Latex figure (float) alignment # Latex figure (float) alignment
# #
# 'figure_align': 'htbp', # 'figure_align': 'htbp',
@@ -153,8 +147,13 @@ latex_elements = {
# (source start file, target name, title, # (source start file, target name, title,
# author, documentclass [howto, manual, or own class]). # author, documentclass [howto, manual, or own class]).
latex_documents = [ latex_documents = [
(master_doc, 'docker-stacks.tex', 'docker-stacks Documentation', (
'Project Jupyter', 'manual'), master_doc,
"docker-stacks.tex",
"docker-stacks Documentation",
"Project Jupyter",
"manual",
),
] ]
@@ -162,10 +161,7 @@ latex_documents = [
# One entry per manual page. List of tuples # One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section). # (source start file, name, description, authors, manual section).
man_pages = [ man_pages = [(master_doc, "docker-stacks", "docker-stacks Documentation", [author], 1)]
(master_doc, 'docker-stacks', 'docker-stacks Documentation',
[author], 1)
]
# -- Options for Texinfo output ------------------------------------------- # -- Options for Texinfo output -------------------------------------------
@@ -174,9 +170,15 @@ man_pages = [
# (source start file, target name, title, author, # (source start file, target name, title, author,
# dir menu entry, description, category) # dir menu entry, description, category)
texinfo_documents = [ texinfo_documents = [
(master_doc, 'docker-stacks', 'docker-stacks Documentation', (
author, 'docker-stacks', 'One line description of project.', master_doc,
'Miscellaneous'), "docker-stacks",
"docker-stacks Documentation",
author,
"docker-stacks",
"One line description of project.",
"Miscellaneous",
),
] ]
# -- Extension configuration ---------------------------------------------- # -- Extension configuration ----------------------------------------------
@@ -187,4 +189,4 @@ linkcheck_anchors = False
# -- Translation ---------------------------------------------------------- # -- Translation ----------------------------------------------------------
gettext_uuid = True gettext_uuid = True
locale_dirs = ['locale/'] locale_dirs = ["locale/"]

View File

@@ -10,8 +10,7 @@ def test_inkscape(container):
"""Inkscape shall be installed to be able to convert SVG files.""" """Inkscape shall be installed to be able to convert SVG files."""
LOGGER.info("Test that inkscape is working by printing its version ...") LOGGER.info("Test that inkscape is working by printing its version ...")
c = container.run( c = container.run(
tty=True, tty=True, command=["start.sh", "bash", "-c", "inkscape --version"]
command=["start.sh", "bash", "-c", "inkscape --version"]
) )
c.wait(timeout=10) c.wait(timeout=10)
logs = c.logs(stdout=True).decode("utf-8") logs = c.logs(stdout=True).decode("utf-8")

View File

@@ -24,7 +24,9 @@ def test_nbconvert(container, test_file, output_format):
host_data_dir = os.path.join(THIS_DIR, "data") host_data_dir = os.path.join(THIS_DIR, "data")
cont_data_dir = "/home/jovyan/data" cont_data_dir = "/home/jovyan/data"
output_dir = "/tmp" output_dir = "/tmp"
LOGGER.info(f"Test that the example notebook {test_file} can be converted to {output_format} ...") LOGGER.info(
f"Test that the example notebook {test_file} can be converted to {output_format} ..."
)
command = f"jupyter nbconvert {cont_data_dir}/{test_file}.ipynb --output-dir {output_dir} --to {output_format}" command = f"jupyter nbconvert {cont_data_dir}/{test_file}.ipynb --output-dir {output_dir} --to {output_format}"
c = container.run( c = container.run(
volumes={host_data_dir: {"bind": cont_data_dir, "mode": "ro"}}, volumes={host_data_dir: {"bind": cont_data_dir, "mode": "ro"}},

View File

@@ -9,10 +9,9 @@ LOGGER = logging.getLogger(__name__)
def test_spark_shell(container): def test_spark_shell(container):
"""Checking if Spark (spark-shell) is running properly""" """Checking if Spark (spark-shell) is running properly"""
c = container.run( c = container.run(
tty=True, tty=True, command=["start.sh", "bash", "-c", 'spark-shell <<< "1+1"']
command=['start.sh', 'bash', '-c', 'spark-shell <<< "1+1"']
) )
c.wait(timeout=60) c.wait(timeout=60)
logs = c.logs(stdout=True).decode('utf-8') logs = c.logs(stdout=True).decode("utf-8")
LOGGER.debug(logs) LOGGER.debug(logs)
assert 'res0: Int = 2' in logs, "spark-shell does not work" assert "res0: Int = 2" in logs, "spark-shell does not work"

View File

@@ -14,8 +14,9 @@ s = 1 + np.sin(2 * np.pi * t)
fig, ax = plt.subplots() fig, ax = plt.subplots()
ax.plot(t, s) ax.plot(t, s)
ax.set(xlabel='time (s)', ylabel='voltage (mV)', ax.set(
title='About as simple as it gets, folks') xlabel="time (s)", ylabel="voltage (mV)", title="About as simple as it gets, folks"
)
ax.grid() ax.grid()
# Note that the test can be run headless by checking if an image is produced # Note that the test can be run headless by checking if an image is produced
file_path = os.path.join("/tmp", "test.png") file_path = os.path.join("/tmp", "test.png")

View File

@@ -3,16 +3,22 @@ import matplotlib
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import os import os
matplotlib.rcParams['pgf.texsystem'] = 'pdflatex' matplotlib.rcParams["pgf.texsystem"] = "pdflatex"
matplotlib.rcParams.update({'font.family': 'serif', 'font.size': 18, matplotlib.rcParams.update(
'axes.labelsize': 20, 'axes.titlesize': 24, {
'figure.titlesize': 28}) "font.family": "serif",
matplotlib.rcParams['text.usetex'] = True "font.size": 18,
"axes.labelsize": 20,
"axes.titlesize": 24,
"figure.titlesize": 28,
}
)
matplotlib.rcParams["text.usetex"] = True
fig, ax = plt.subplots(1, 1) fig, ax = plt.subplots(1, 1)
x = [1, 2] x = [1, 2]
y = [1, 2] y = [1, 2]
ax.plot(x, y, label='a label') ax.plot(x, y, label="a label")
ax.legend(fontsize=15) ax.legend(fontsize=15)
file_path = os.path.join("/tmp", "test_fonts.png") file_path = os.path.join("/tmp", "test_fonts.png")

View File

@@ -26,8 +26,7 @@ def test_check_extension(container, extension):
""" """
LOGGER.info(f"Checking the extension: {extension} ...") LOGGER.info(f"Checking the extension: {extension} ...")
c = container.run( c = container.run(
tty=True, tty=True, command=["start.sh", "jupyter", "labextension", "check", extension]
command=["start.sh", "jupyter", "labextension", "check", extension]
) )
rv = c.wait(timeout=10) rv = c.wait(timeout=10)
logs = c.logs(stdout=True).decode("utf-8") logs = c.logs(stdout=True).decode("utf-8")

View File

@@ -13,9 +13,17 @@ THIS_DIR = os.path.dirname(os.path.realpath(__file__))
@pytest.mark.parametrize( @pytest.mark.parametrize(
"test_file,expected_file,description", "test_file,expected_file,description",
[ [
("matplotlib_1.py", "test.png", "Test that matplotlib is able to plot a graph and write it as an image ..."), (
("matplotlib_fonts_1.py", "test_fonts.png", "Test cm-super latex labels in matplotlib ...") "matplotlib_1.py",
] "test.png",
"Test that matplotlib is able to plot a graph and write it as an image ...",
),
(
"matplotlib_fonts_1.py",
"test_fonts.png",
"Test cm-super latex labels in matplotlib ...",
),
],
) )
def test_matplotlib(container, test_file, expected_file, description): def test_matplotlib(container, test_file, expected_file, description):
"""Various tests performed on matplotlib """Various tests performed on matplotlib

View File

@@ -19,7 +19,9 @@ BUILD_TIMESTAMP = datetime.datetime.utcnow().isoformat()[:-7] + "Z"
MARKDOWN_LINE_BREAK = "<br />" MARKDOWN_LINE_BREAK = "<br />"
def append_build_history_line(short_image_name: str, owner: str, wiki_path: str, all_tags: List[str]) -> None: def append_build_history_line(
short_image_name: str, owner: str, wiki_path: str, all_tags: List[str]
) -> None:
logger.info("Appending build history line") logger.info("Appending build history line")
date_column = f"`{BUILD_TIMESTAMP}`" date_column = f"`{BUILD_TIMESTAMP}`"
@@ -28,11 +30,13 @@ def append_build_history_line(short_image_name: str, owner: str, wiki_path: str,
) )
commit_hash = GitHelper.commit_hash() commit_hash = GitHelper.commit_hash()
commit_hash_tag = GitHelper.commit_hash_tag() commit_hash_tag = GitHelper.commit_hash_tag()
links_column = MARKDOWN_LINE_BREAK.join([ links_column = MARKDOWN_LINE_BREAK.join(
f"[Git diff](https://github.com/jupyter/docker-stacks/commit/{commit_hash})", [
f"[Dockerfile](https://github.com/jupyter/docker-stacks/blob/{commit_hash}/{short_image_name}/Dockerfile)", f"[Git diff](https://github.com/jupyter/docker-stacks/commit/{commit_hash})",
f"[Build manifest](./{short_image_name}-{commit_hash_tag})" f"[Dockerfile](https://github.com/jupyter/docker-stacks/blob/{commit_hash}/{short_image_name}/Dockerfile)",
]) f"[Build manifest](./{short_image_name}-{commit_hash_tag})",
]
)
build_history_line = "|".join([date_column, image_column, links_column]) + "|" build_history_line = "|".join([date_column, image_column, links_column]) + "|"
home_wiki_file = os.path.join(wiki_path, "Home.md") home_wiki_file = os.path.join(wiki_path, "Home.md")
@@ -49,16 +53,19 @@ def create_manifest_file(
owner: str, owner: str,
wiki_path: str, wiki_path: str,
manifests: List[ManifestInterface], manifests: List[ManifestInterface],
container container,
) -> None: ) -> None:
manifest_names = [manifest.__name__ for manifest in manifests] manifest_names = [manifest.__name__ for manifest in manifests]
logger.info(f"Using manifests: {manifest_names}") logger.info(f"Using manifests: {manifest_names}")
commit_hash_tag = GitHelper.commit_hash_tag() commit_hash_tag = GitHelper.commit_hash_tag()
manifest_file = os.path.join(wiki_path, f"manifests/{short_image_name}-{commit_hash_tag}.md") manifest_file = os.path.join(
wiki_path, f"manifests/{short_image_name}-{commit_hash_tag}.md"
)
markdown_pieces = [ManifestHeader.create_header(short_image_name, owner, BUILD_TIMESTAMP)] + \ markdown_pieces = [
[manifest.markdown_piece(container) for manifest in manifests] ManifestHeader.create_header(short_image_name, owner, BUILD_TIMESTAMP)
] + [manifest.markdown_piece(container) for manifest in manifests]
markdown_content = "\n\n".join(markdown_pieces) + "\n" markdown_content = "\n\n".join(markdown_pieces) + "\n"
with open(manifest_file, "w") as f: with open(manifest_file, "w") as f:
@@ -81,7 +88,9 @@ if __name__ == "__main__":
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
arg_parser = argparse.ArgumentParser() arg_parser = argparse.ArgumentParser()
arg_parser.add_argument("--short-image-name", required=True, help="Short image name to apply tags for") arg_parser.add_argument(
"--short-image-name", required=True, help="Short image name to apply tags for"
)
arg_parser.add_argument("--owner", required=True, help="Owner of the image") arg_parser.add_argument("--owner", required=True, help="Owner of the image")
arg_parser.add_argument("--wiki-path", required=True, help="Path to the wiki pages") arg_parser.add_argument("--wiki-path", required=True, help="Path to the wiki pages")
args = arg_parser.parse_args() args = arg_parser.parse_args()

View File

@@ -8,7 +8,12 @@ logger = logging.getLogger(__name__)
class DockerRunner: class DockerRunner:
def __init__(self, image_name: str, docker_client=docker.from_env(), command: str = "sleep infinity"): def __init__(
self,
image_name: str,
docker_client=docker.from_env(),
command: str = "sleep infinity",
):
self.container = None self.container = None
self.image_name = image_name self.image_name = image_name
self.command = command self.command = command
@@ -17,7 +22,9 @@ class DockerRunner:
def __enter__(self): def __enter__(self):
logger.info(f"Creating container for image {self.image_name} ...") logger.info(f"Creating container for image {self.image_name} ...")
self.container = self.docker_client.containers.run( self.container = self.docker_client.containers.run(
image=self.image_name, command=self.command, detach=True, image=self.image_name,
command=self.command,
detach=True,
) )
logger.info(f"Container {self.container.name} created") logger.info(f"Container {self.container.name} created")
return self.container return self.container

View File

@@ -6,7 +6,9 @@ from .manifests import ManifestInterface
from .taggers import TaggerInterface from .taggers import TaggerInterface
def get_taggers_and_manifests(short_image_name: str) -> Tuple[List[TaggerInterface], List[ManifestInterface]]: def get_taggers_and_manifests(
short_image_name: str,
) -> Tuple[List[TaggerInterface], List[ManifestInterface]]:
taggers: List[TaggerInterface] = [] taggers: List[TaggerInterface] = []
manifests: List[ManifestInterface] = [] manifests: List[ManifestInterface] = []
while short_image_name is not None: while short_image_name is not None:

View File

@@ -2,15 +2,29 @@
# Distributed under the terms of the Modified BSD License. # Distributed under the terms of the Modified BSD License.
from dataclasses import dataclass, field from dataclasses import dataclass, field
from typing import Optional, List from typing import Optional, List
from .taggers import TaggerInterface, \ from .taggers import (
SHATagger, \ TaggerInterface,
UbuntuVersionTagger, PythonVersionTagger, \ SHATagger,
JupyterNotebookVersionTagger, JupyterLabVersionTagger, JupyterHubVersionTagger, \ UbuntuVersionTagger,
RVersionTagger, TensorflowVersionTagger, JuliaVersionTagger, \ PythonVersionTagger,
SparkVersionTagger, HadoopVersionTagger, JavaVersionTagger JupyterNotebookVersionTagger,
from .manifests import ManifestInterface, \ JupyterLabVersionTagger,
CondaEnvironmentManifest, AptPackagesManifest, \ JupyterHubVersionTagger,
RPackagesManifest, JuliaPackagesManifest, SparkInfoManifest RVersionTagger,
TensorflowVersionTagger,
JuliaVersionTagger,
SparkVersionTagger,
HadoopVersionTagger,
JavaVersionTagger,
)
from .manifests import (
ManifestInterface,
CondaEnvironmentManifest,
AptPackagesManifest,
RPackagesManifest,
JuliaPackagesManifest,
SparkInfoManifest,
)
@dataclass @dataclass
@@ -25,41 +39,37 @@ ALL_IMAGES = {
parent_image=None, parent_image=None,
taggers=[ taggers=[
SHATagger, SHATagger,
UbuntuVersionTagger, PythonVersionTagger, UbuntuVersionTagger,
JupyterNotebookVersionTagger, JupyterLabVersionTagger, JupyterHubVersionTagger PythonVersionTagger,
JupyterNotebookVersionTagger,
JupyterLabVersionTagger,
JupyterHubVersionTagger,
], ],
manifests=[ manifests=[CondaEnvironmentManifest, AptPackagesManifest],
CondaEnvironmentManifest, AptPackagesManifest
]
),
"minimal-notebook": ImageDescription(
parent_image="base-notebook"
),
"scipy-notebook": ImageDescription(
parent_image="minimal-notebook"
), ),
"minimal-notebook": ImageDescription(parent_image="base-notebook"),
"scipy-notebook": ImageDescription(parent_image="minimal-notebook"),
"r-notebook": ImageDescription( "r-notebook": ImageDescription(
parent_image="minimal-notebook", parent_image="minimal-notebook",
taggers=[RVersionTagger], taggers=[RVersionTagger],
manifests=[RPackagesManifest] manifests=[RPackagesManifest],
), ),
"tensorflow-notebook": ImageDescription( "tensorflow-notebook": ImageDescription(
parent_image="scipy-notebook", parent_image="scipy-notebook", taggers=[TensorflowVersionTagger]
taggers=[TensorflowVersionTagger]
), ),
"datascience-notebook": ImageDescription( "datascience-notebook": ImageDescription(
parent_image="scipy-notebook", parent_image="scipy-notebook",
taggers=[RVersionTagger, JuliaVersionTagger], taggers=[RVersionTagger, JuliaVersionTagger],
manifests=[RPackagesManifest, JuliaPackagesManifest] manifests=[RPackagesManifest, JuliaPackagesManifest],
), ),
"pyspark-notebook": ImageDescription( "pyspark-notebook": ImageDescription(
parent_image="scipy-notebook", parent_image="scipy-notebook",
taggers=[SparkVersionTagger, HadoopVersionTagger, JavaVersionTagger], taggers=[SparkVersionTagger, HadoopVersionTagger, JavaVersionTagger],
manifests=[SparkInfoManifest] manifests=[SparkInfoManifest],
), ),
"all-spark-notebook": ImageDescription( "all-spark-notebook": ImageDescription(
parent_image="pyspark-notebook", parent_image="pyspark-notebook",
taggers=[RVersionTagger], taggers=[RVersionTagger],
manifests=[RPackagesManifest] manifests=[RPackagesManifest],
) ),
} }

View File

@@ -10,41 +10,49 @@ logger = logging.getLogger(__name__)
def quoted_output(container, cmd: str) -> str: def quoted_output(container, cmd: str) -> str:
return "\n".join([ return "\n".join(
"```", [
DockerRunner.run_simple_command(container, cmd, print_result=False), "```",
"```" DockerRunner.run_simple_command(container, cmd, print_result=False),
]) "```",
]
)
class ManifestHeader: class ManifestHeader:
"""ManifestHeader doesn't fall under common interface and we run it separately""" """ManifestHeader doesn't fall under common interface and we run it separately"""
@staticmethod @staticmethod
def create_header(short_image_name: str, owner: str, build_timestamp: str) -> str: def create_header(short_image_name: str, owner: str, build_timestamp: str) -> str:
commit_hash = GitHelper.commit_hash() commit_hash = GitHelper.commit_hash()
commit_hash_tag = GitHelper.commit_hash_tag() commit_hash_tag = GitHelper.commit_hash_tag()
commit_message = GitHelper.commit_message() commit_message = GitHelper.commit_message()
image_size = docker["images", f"{owner}/{short_image_name}:latest", "--format", "{{.Size}}"]().rstrip() image_size = docker[
"images", f"{owner}/{short_image_name}:latest", "--format", "{{.Size}}"
]().rstrip()
return "\n".join([ return "\n".join(
f"# Build manifest for image: {short_image_name}:{commit_hash_tag}", [
"", f"# Build manifest for image: {short_image_name}:{commit_hash_tag}",
"## Build Info", "",
"", "## Build Info",
f"* Build datetime: {build_timestamp}", "",
f"* Docker image: {owner}/{short_image_name}:{commit_hash_tag}", f"* Build datetime: {build_timestamp}",
f"* Docker image size: {image_size}", f"* Docker image: {owner}/{short_image_name}:{commit_hash_tag}",
f"* Git commit SHA: [{commit_hash}](https://github.com/jupyter/docker-stacks/commit/{commit_hash})", f"* Docker image size: {image_size}",
"* Git commit message:", f"* Git commit SHA: [{commit_hash}](https://github.com/jupyter/docker-stacks/commit/{commit_hash})",
"```", "* Git commit message:",
f"{commit_message}", "```",
"```" f"{commit_message}",
]) "```",
]
)
class ManifestInterface: class ManifestInterface:
"""Common interface for all manifests""" """Common interface for all manifests"""
@staticmethod @staticmethod
def markdown_piece(container) -> str: def markdown_piece(container) -> str:
raise NotImplementedError raise NotImplementedError
@@ -53,56 +61,66 @@ class ManifestInterface:
class CondaEnvironmentManifest(ManifestInterface): class CondaEnvironmentManifest(ManifestInterface):
@staticmethod @staticmethod
def markdown_piece(container) -> str: def markdown_piece(container) -> str:
return "\n".join([ return "\n".join(
"## Python Packages", [
"", "## Python Packages",
quoted_output(container, "python --version"), "",
"", quoted_output(container, "python --version"),
quoted_output(container, "conda info"), "",
"", quoted_output(container, "conda info"),
quoted_output(container, "conda list") "",
]) quoted_output(container, "conda list"),
]
)
class AptPackagesManifest(ManifestInterface): class AptPackagesManifest(ManifestInterface):
@staticmethod @staticmethod
def markdown_piece(container) -> str: def markdown_piece(container) -> str:
return "\n".join([ return "\n".join(
"## Apt Packages", ["## Apt Packages", "", quoted_output(container, "apt list --installed")]
"", )
quoted_output(container, "apt list --installed")
])
class RPackagesManifest(ManifestInterface): class RPackagesManifest(ManifestInterface):
@staticmethod @staticmethod
def markdown_piece(container) -> str: def markdown_piece(container) -> str:
return "\n".join([ return "\n".join(
"## R Packages", [
"", "## R Packages",
quoted_output(container, "R --version"), "",
"", quoted_output(container, "R --version"),
quoted_output(container, "R --silent -e 'installed.packages(.Library)[, c(1,3)]'") "",
]) quoted_output(
container, "R --silent -e 'installed.packages(.Library)[, c(1,3)]'"
),
]
)
class JuliaPackagesManifest(ManifestInterface): class JuliaPackagesManifest(ManifestInterface):
@staticmethod @staticmethod
def markdown_piece(container) -> str: def markdown_piece(container) -> str:
return "\n".join([ return "\n".join(
"## Julia Packages", [
"", "## Julia Packages",
quoted_output(container, "julia -E 'using InteractiveUtils; versioninfo()'"), "",
"", quoted_output(
quoted_output(container, "julia -E 'import Pkg; Pkg.status()'") container, "julia -E 'using InteractiveUtils; versioninfo()'"
]) ),
"",
quoted_output(container, "julia -E 'import Pkg; Pkg.status()'"),
]
)
class SparkInfoManifest(ManifestInterface): class SparkInfoManifest(ManifestInterface):
@staticmethod @staticmethod
def markdown_piece(container) -> str: def markdown_piece(container) -> str:
return "\n".join([ return "\n".join(
"## Apache Spark", [
"", "## Apache Spark",
quoted_output(container, "/usr/local/spark/bin/spark-submit --version"), "",
]) quoted_output(container, "/usr/local/spark/bin/spark-submit --version"),
]
)

View File

@@ -21,7 +21,9 @@ def tag_image(short_image_name: str, owner: str) -> None:
for tagger in taggers: for tagger in taggers:
tagger_name = tagger.__name__ tagger_name = tagger.__name__
tag_value = tagger.tag_value(container) tag_value = tagger.tag_value(container)
logger.info(f"Applying tag tagger_name: {tagger_name} tag_value: {tag_value}") logger.info(
f"Applying tag tagger_name: {tagger_name} tag_value: {tag_value}"
)
docker["tag", image, f"{owner}/{short_image_name}:{tag_value}"]() docker["tag", image, f"{owner}/{short_image_name}:{tag_value}"]()
@@ -29,7 +31,9 @@ if __name__ == "__main__":
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
arg_parser = argparse.ArgumentParser() arg_parser = argparse.ArgumentParser()
arg_parser.add_argument("--short-image-name", required=True, help="Short image name to apply tags for") arg_parser.add_argument(
"--short-image-name", required=True, help="Short image name to apply tags for"
)
arg_parser.add_argument("--owner", required=True, help="Owner of the image") arg_parser.add_argument("--owner", required=True, help="Owner of the image")
args = arg_parser.parse_args() args = arg_parser.parse_args()

View File

@@ -14,30 +14,27 @@ def _get_program_version(container, program: str) -> str:
def _get_env_variable(container, variable: str) -> str: def _get_env_variable(container, variable: str) -> str:
env = DockerRunner.run_simple_command( env = DockerRunner.run_simple_command(
container, container, cmd="env", print_result=False
cmd="env",
print_result=False
).split() ).split()
for env_entry in env: for env_entry in env:
if env_entry.startswith(variable): if env_entry.startswith(variable):
return env_entry[len(variable) + 1:] return env_entry[len(variable) + 1 :]
raise KeyError(variable) raise KeyError(variable)
def _get_pip_package_version(container, package: str) -> str: def _get_pip_package_version(container, package: str) -> str:
VERSION_PREFIX = "Version: " VERSION_PREFIX = "Version: "
package_info = DockerRunner.run_simple_command( package_info = DockerRunner.run_simple_command(
container, container, cmd=f"pip show {package}", print_result=False
cmd=f"pip show {package}",
print_result=False
) )
version_line = package_info.split("\n")[1] version_line = package_info.split("\n")[1]
assert version_line.startswith(VERSION_PREFIX) assert version_line.startswith(VERSION_PREFIX)
return version_line[len(VERSION_PREFIX):] return version_line[len(VERSION_PREFIX) :]
class TaggerInterface: class TaggerInterface:
"""Common interface for all taggers""" """Common interface for all taggers"""
@staticmethod @staticmethod
def tag_value(container) -> str: def tag_value(container) -> str:
raise NotImplementedError raise NotImplementedError
@@ -52,7 +49,9 @@ class SHATagger(TaggerInterface):
class UbuntuVersionTagger(TaggerInterface): class UbuntuVersionTagger(TaggerInterface):
@staticmethod @staticmethod
def tag_value(container) -> str: def tag_value(container) -> str:
os_release = DockerRunner.run_simple_command(container, "cat /etc/os-release").split("\n") os_release = DockerRunner.run_simple_command(
container, "cat /etc/os-release"
).split("\n")
for line in os_release: for line in os_release:
if line.startswith("VERSION_ID"): if line.startswith("VERSION_ID"):
return "ubuntu-" + line.split("=")[1].strip('"') return "ubuntu-" + line.split("=")[1].strip('"')

View File

@@ -3,5 +3,5 @@
import tensorflow as tf import tensorflow as tf
print(tf.constant('Hello, TensorFlow')) print(tf.constant("Hello, TensorFlow"))
print(tf.reduce_sum(tf.random.normal([1000, 1000]))) print(tf.reduce_sum(tf.random.normal([1000, 1000])))

View File

@@ -34,8 +34,7 @@ LOGGER = logging.getLogger(__name__)
class CondaPackageHelper: class CondaPackageHelper:
"""Conda package helper permitting to get information about packages """Conda package helper permitting to get information about packages"""
"""
def __init__(self, container): def __init__(self, container):
# if isinstance(container, TrackedContainer): # if isinstance(container, TrackedContainer):
@@ -50,8 +49,7 @@ class CondaPackageHelper:
"""Start the TrackedContainer and return an instance of a running container""" """Start the TrackedContainer and return an instance of a running container"""
LOGGER.info(f"Starting container {container.image_name} ...") LOGGER.info(f"Starting container {container.image_name} ...")
return container.run( return container.run(
tty=True, tty=True, command=["start.sh", "bash", "-c", "sleep infinity"]
command=["start.sh", "bash", "-c", "sleep infinity"]
) )
@staticmethod @staticmethod
@@ -76,7 +74,9 @@ class CondaPackageHelper:
if self.specs is None: if self.specs is None:
LOGGER.info("Grabing the list of specifications ...") LOGGER.info("Grabing the list of specifications ...")
self.specs = CondaPackageHelper._packages_from_json( self.specs = CondaPackageHelper._packages_from_json(
self._execute_command(CondaPackageHelper._conda_export_command(from_history=True)) self._execute_command(
CondaPackageHelper._conda_export_command(from_history=True)
)
) )
return self.specs return self.specs
@@ -112,9 +112,7 @@ class CondaPackageHelper:
def available_packages(self): def available_packages(self):
"""Return the available packages""" """Return the available packages"""
if self.available is None: if self.available is None:
LOGGER.info( LOGGER.info("Grabing the list of available packages (can take a while) ...")
"Grabing the list of available packages (can take a while) ..."
)
# Keeping command line output since `conda search --outdated --json` is way too long ... # Keeping command line output since `conda search --outdated --json` is way too long ...
self.available = CondaPackageHelper._extract_available( self.available = CondaPackageHelper._extract_available(
self._execute_command(["conda", "search", "--outdated"]) self._execute_command(["conda", "search", "--outdated"])
@@ -146,10 +144,9 @@ class CondaPackageHelper:
current = min(inst_vs, key=CondaPackageHelper.semantic_cmp) current = min(inst_vs, key=CondaPackageHelper.semantic_cmp)
newest = avail_vs[-1] newest = avail_vs[-1]
if avail_vs and current != newest: if avail_vs and current != newest:
if ( if CondaPackageHelper.semantic_cmp(
CondaPackageHelper.semantic_cmp(current) < current
CondaPackageHelper.semantic_cmp(newest) ) < CondaPackageHelper.semantic_cmp(newest):
):
self.comparison.append( self.comparison.append(
{"Package": pkg, "Current": current, "Newest": newest} {"Package": pkg, "Current": current, "Newest": newest}
) )
@@ -162,6 +159,7 @@ class CondaPackageHelper:
def mysplit(string): def mysplit(string):
def version_substrs(x): def version_substrs(x):
return re.findall(r"([A-z]+|\d+)", x) return re.findall(r"([A-z]+|\d+)", x)
return list(chain(map(version_substrs, string.split(".")))) return list(chain(map(version_substrs, string.split("."))))
def str_ord(string): def str_ord(string):

View File

@@ -68,7 +68,7 @@ EXCLUDED_PACKAGES = [
"protobuf", "protobuf",
"r-irkernel", "r-irkernel",
"unixodbc", "unixodbc",
"bzip2" "bzip2",
] ]
@@ -133,8 +133,9 @@ def _import_packages(package_helper, filtered_packages, check_function, max_fail
for package in filtered_packages: for package in filtered_packages:
LOGGER.info(f"Trying to import {package}") LOGGER.info(f"Trying to import {package}")
try: try:
assert check_function(package_helper, package) == 0, \ assert (
f"Package [{package}] import failed" check_function(package_helper, package) == 0
), f"Package [{package}] import failed"
except AssertionError as err: except AssertionError as err:
failures[package] = err failures[package] = err
if len(failures) > max_failures: if len(failures) > max_failures:

View File

@@ -12,7 +12,7 @@ def test_units(container):
"""Various units tests """Various units tests
Add a py file in the {image}/test/units dir and it will be automatically tested Add a py file in the {image}/test/units dir and it will be automatically tested
""" """
short_image_name = container.image_name[container.image_name.rfind('/') + 1:] short_image_name = container.image_name[container.image_name.rfind("/") + 1 :]
host_data_dir = os.path.join(THIS_DIR, f"../{short_image_name}/test/units") host_data_dir = os.path.join(THIS_DIR, f"../{short_image_name}/test/units")
LOGGER.info(f"Searching for units tests in {host_data_dir}") LOGGER.info(f"Searching for units tests in {host_data_dir}")
cont_data_dir = "/home/jovyan/data" cont_data_dir = "/home/jovyan/data"
@@ -27,9 +27,9 @@ def test_units(container):
c = container.run( c = container.run(
volumes={host_data_dir: {"bind": cont_data_dir, "mode": "ro"}}, volumes={host_data_dir: {"bind": cont_data_dir, "mode": "ro"}},
tty=True, tty=True,
command=['start.sh', 'python', f'{cont_data_dir}/{test_file}'] command=["start.sh", "python", f"{cont_data_dir}/{test_file}"],
) )
rv = c.wait(timeout=30) rv = c.wait(timeout=30)
logs = c.logs(stdout=True).decode('utf-8') logs = c.logs(stdout=True).decode("utf-8")
LOGGER.debug(logs) LOGGER.debug(logs)
assert rv == 0 or rv["StatusCode"] == 0 assert rv == 0 or rv["StatusCode"] == 0