Commit 981c3683 authored by Ayaz Salikhov's avatar Ayaz Salikhov

Merge branch 'master' into asalikhov/remove_py2

parents 7b434396 95ccda36
[flake8]
ignore = W605,W503,W504,H306,H238,H301,H202
max-line-length = 120
per-file-ignores =
test/test_packages.py:E501
name: Build Docker Images
on:
pull_request:
paths-ignore:
- "*.md"
- "binder/**"
- "docs/**"
- "examples/**"
push:
branches:
- master
paths-ignore:
- "*.md"
- "binder/**"
- "docs/**"
- "examples/**"
jobs:
build:
name: Build Docker Images
runs-on: ubuntu-latest
if: >
!contains(github.event.head_commit.message, 'ci skip') &&
!contains(github.event.pull_request.title, 'ci skip')
steps:
- name: Clone Main Repo
uses: actions/checkout@v2
with:
path: main
- name: Clone Wiki
uses: actions/checkout@v2
with:
repository: ${{github.repository}}.wiki
path: wiki
- name: Set Up Python
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install Dev Dependencies
run: |
python -m pip install --upgrade pip
make -C main dev-env hadolint-install
- name: Lint Dockerfiles
run: make -C main hadolint-all
- name: Run pre-commit hooks
run: make -C main pre-commit-all
- name: Build Docker Images
run: make -C main build-test-all
- name: Run Post-Build Hooks
run: make -C main hook-all
env:
COMMIT_MSG: "${{github.event.head_commit.message}}"
WIKI_PATH: ../wiki
- name: Login to Docker Hub
if: github.ref == 'refs/heads/master'
run: >
echo '${{secrets.DOCKERHUB_PASSWORD}}' | docker login --username
'${{secrets.DOCKERHUB_USERNAME}}' --password-stdin
- name: Push Images to DockerHub
if: github.ref == 'refs/heads/master'
run: make -C main push-all
- name: Push Wiki to GitHub
if: github.ref == 'refs/heads/master'
# Pass GITHUB_REPOSITORY directly to avoid conflict with GitHub Actions built-in env var
run: make -C main git-commit GITHUB_REPOSITORY='${{ github.repository }}.wiki'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
LOCAL_PATH: ../wiki
name: Build Sphinx Documentation
on:
pull_request:
paths:
- "docs/**"
- ".github/workflows/sphinx.yml"
push:
branches:
- master
paths:
- "docs/**"
- ".github/workflows/sphinx.yml"
jobs:
build:
name: Build Sphinx Documentation
runs-on: ubuntu-latest
if: >
!contains(github.event.head_commit.message , 'ci skip') &&
!contains(github.event.pull_request.title, 'ci skip')
steps:
- name: Checkout Repo
uses: actions/checkout@v2
- name: Set Up Python
uses: actions/setup-python@v2
with:
python-version: 3.x
- name: Install Dev Dependencies
run: |
python -m pip install --upgrade pip
make dev-env
- name: Build Documentation
run: make docs
- name: Extract Source Strings
if: github.ref == 'refs/heads/master'
working-directory: docs
run: |
make gettext
sphinx-intl update -p _build/gettext -l en
- name: Push Strings to Master
if: github.ref == 'refs/heads/master'
run: make git-commit
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_REPOSITORY: ${{ github.repository }}
LOCAL_PATH: ./docs/locale/en
---
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.1.0
hooks:
- id: check-yaml
files: .*\.(yaml|yml)$
- repo: https://github.com/adrienverge/yamllint.git
rev: v1.23.0
hooks:
- id: yamllint
args: ['-d {extends: relaxed, rules: {line-length: disable}}', '-s']
files: \.(yaml|yml)$
- repo: https://github.com/openstack-dev/bashate.git
rev: 2.0.0
hooks:
- id: bashate
args: ['--ignore=E006']
- repo: https://gitlab.com/pycqa/flake8
rev: 3.8.3
hooks:
- id: flake8
- repo: https://github.com/pre-commit/mirrors-autopep8
rev: v1.5.4
hooks:
- id: autopep8
---
dist: bionic
language: python
python:
- 3.7
sudo: required
services:
- docker
jobs:
include:
- stage: diff-test
install:
- pip install --upgrade pip
- make dev-env
- make lint-install
script:
- set -e
- if [ $(make n-docs-diff) -ne 0 ]; then make docs; fi;
- if [ $(make n-other-diff) -ne 0 ]; then make lint-build-test-all DARGS="--build-arg TEST_ONLY_BUILD=1"; fi;
- stage: push-tx
install:
- pip install --upgrade pip
- make dev-env
script:
- if [ $(make n-docs-diff DIFF_RANGE=$TRAVIS_COMMIT_RANGE) -ne 0 ]; then make tx-en; fi;
- stage: full-test
install:
- pip install --upgrade pip
- make dev-env
- make lint-install
script:
- set -e
- make docs
- make lint-build-test-all DARGS="--build-arg TEST_ONLY_BUILD=1"
stages:
- name: diff-test
if: type = pull_request
- name: push-tx
if: type = push AND branch = master
- name: full-test
if: type = cron AND branch = master
env:
global:
# yamllint disable-line
secure: JDQKgB1laaenzmEGI9gduwK/iS0030qsl62njJg3jib0R8wxBi2OeetEdoGl9m4NFsqqnp0OO7nm4rzGfuwjL1A38flSlXOTxhjm/hvo3vmnqVu5lDcdk9+IRkafnfd3Dyd86tLxRVETOqZwCLmdNkB2fmQII8du5IIqbJuUGp8DrG7kVMf3NBr9rjkZRfByQrgG4s1NXuT61VvpWMPJAOhcrImuHBheVJDEV0U3n6Xavd7Wo+pAHrHU8exvYTZ1IzZMbHc6K0iC/NpCHcH9+9DAeLDk/q1aDNqbTExnQevOHZzNqgHC2qFOlN4jfy/TLYLpLXtUismneBBqVSK3iZso3Vqy2BRXWgouI+Tt+08ffocy9XPwEzSwkgPgDlFVUikPOy5imwjpDb13RMIyMY4CKlSOdQx2rH2kPkZ0MJJPcki3KGuGl3qRvqyblMn+lZvjAu6WVLZfo7EtcxsQ0ZZxbAbGoUVl27FHg+UvIfC0I3wEcZIp7oED47Q8s0MdCijD3AwkRPvx/iyp3J0A42su7kkOooFcmUItEIqegQJ4Aki1FBv2i5vHmBobClktytZceLsKvzCeLjMpL9HcUVfUaJDKRwtUYIozpYeBnac+E6J1s6glcqLrXIHWez8N6SzokBa6SPqdtODdzzk5OJupByub6CYWsRXvxIQ7/wI=
...@@ -4,9 +4,8 @@ ...@@ -4,9 +4,8 @@
# Use bash for inline if-statements in arch_patch target # Use bash for inline if-statements in arch_patch target
SHELL:=bash SHELL:=bash
OWNER:=jupyter
ARCH:=$(shell uname -m) ARCH:=$(shell uname -m)
DIFF_RANGE?=master...HEAD OWNER?=jupyter
# Need to list the images in build dependency order # Need to list the images in build dependency order
ifeq ($(ARCH),ppc64le) ifeq ($(ARCH),ppc64le)
...@@ -24,8 +23,9 @@ endif ...@@ -24,8 +23,9 @@ endif
ALL_IMAGES:=$(ALL_STACKS) ALL_IMAGES:=$(ALL_STACKS)
# Linter # Dockerfile Linter
HADOLINT="${HOME}/hadolint" HADOLINT="${HOME}/hadolint"
HADOLINT_VERSION="v1.18.0"
help: help:
# http://marmelab.com/blog/2016/02/29/auto-documented-makefile.html # http://marmelab.com/blog/2016/02/29/auto-documented-makefile.html
...@@ -74,24 +74,38 @@ dev/%: ## run a foreground container for a stack ...@@ -74,24 +74,38 @@ dev/%: ## run a foreground container for a stack
docker run -it --rm -p $(PORT):8888 $(DARGS) $(OWNER)/$(notdir $@) $(ARGS) docker run -it --rm -p $(PORT):8888 $(DARGS) $(OWNER)/$(notdir $@) $(ARGS)
dev-env: ## install libraries required to build docs and run tests dev-env: ## install libraries required to build docs and run tests
pip install -r requirements-dev.txt @pip install -r requirements-dev.txt
lint/%: ARGS?= docs: ## build HTML documentation
lint/%: ## lint the dockerfile(s) for a stack make -C docs html
@echo "Linting Dockerfiles in $(notdir $@)..."
@git ls-files --exclude='Dockerfile*' --ignored $(notdir $@) | grep -v ppc64 | xargs -L 1 $(HADOLINT) $(ARGS)
@echo "Linting done!"
lint-all: $(foreach I,$(ALL_IMAGES),lint/$(I) ) ## lint all stacks
lint-build-test-all: $(foreach I,$(ALL_IMAGES),lint/$(I) arch_patch/$(I) build/$(I) test/$(I) ) ## lint, build and test all stacks git-commit: LOCAL_PATH?=.
git-commit: GITHUB_SHA?=$(shell git rev-parse HEAD)
lint-install: ## install hadolint git-commit: GITHUB_REPOSITORY?=jupyter/docker-stacks
@echo "Installing hadolint at $(HADOLINT) ..." git-commit: GITHUB_TOKEN?=
@curl -sL -o $(HADOLINT) "https://github.com/hadolint/hadolint/releases/download/v1.18.0/hadolint-$(shell uname -s)-$(shell uname -m)" git-commit: ## commit outstading git changes and push to remote
@chmod 700 $(HADOLINT) @git config --global user.name "GitHub Actions"
@echo "Installation done!" @git config --global user.email "actions@users.noreply.github.com"
@$(HADOLINT) --version
@echo "Publishing outstanding changes in $(LOCAL_PATH) to $(GITHUB_REPOSITORY)"
@cd $(LOCAL_PATH) && \
git remote add publisher https://$(GITHUB_TOKEN)@github.com/$(GITHUB_REPOSITORY).git && \
git checkout master && \
git add -A -- . && \
git commit -m "[ci skip] Automated publish for $(GITHUB_SHA)" || exit 0
@cd $(LOCAL_PATH) && git push -u publisher master
hook/%: export COMMIT_MSG?=$(shell git log -1 --pretty=%B)
hook/%: export GITHUB_SHA?=$(shell git rev-parse HEAD)
hook/%: export WIKI_PATH?=../wiki
hook/%: ## run post-build hooks for an image
BUILD_TIMESTAMP="$$(date -u +%FT%TZ)" \
DOCKER_REPO="$(OWNER)/$(notdir $@)" \
IMAGE_NAME="$(OWNER)/$(notdir $@):latest" \
IMAGE_SHORT_NAME="$(notdir $@)" \
$(SHELL) $(notdir $@)/hooks/run_hook
hook-all: $(foreach I,$(ALL_IMAGES),hook/$(I) ) ## run post-build hooks for all images
img-clean: img-rm-dang img-rm ## clean dangling and jupyter images img-clean: img-rm-dang img-rm ## clean dangling and jupyter images
...@@ -107,20 +121,40 @@ img-rm-dang: ## remove dangling images (tagged None) ...@@ -107,20 +121,40 @@ img-rm-dang: ## remove dangling images (tagged None)
@echo "Removing dangling images ..." @echo "Removing dangling images ..."
-docker rmi --force $(shell docker images -f "dangling=true" -q) 2> /dev/null -docker rmi --force $(shell docker images -f "dangling=true" -q) 2> /dev/null
docs: ## build HTML documentation hadolint/%: ARGS?=
make -C docs html hadolint/%: ## lint the dockerfile(s) for a stack
@echo "Linting Dockerfiles in $(notdir $@)..."
@git ls-files --exclude='Dockerfile*' --ignored $(notdir $@) | grep -v ppc64 | xargs -L 1 $(HADOLINT) $(ARGS)
@echo "Linting done!"
hadolint-all: $(foreach I,$(ALL_IMAGES),hadolint/$(I) ) ## lint all stacks
hadolint-build-test-all: $(foreach I,$(ALL_IMAGES),hadolint/$(I) arch_patch/$(I) build/$(I) test/$(I) ) ## lint, build and test all stacks
n-docs-diff: ## number of docs/ files changed since branch from master hadolint-install: ## install hadolint
@git diff --name-only $(DIFF_RANGE) -- docs/ ':!docs/locale' | wc -l | awk '{print $$1}' @echo "Installing hadolint at $(HADOLINT) ..."
@curl -sL -o $(HADOLINT) "https://github.com/hadolint/hadolint/releases/download/$(HADOLINT_VERSION)/hadolint-$(shell uname -s)-$(shell uname -m)"
@chmod 700 $(HADOLINT)
@echo "Installation done!"
@$(HADOLINT) --version
pre-commit-all: ## run pre-commit hook on all files
@pre-commit run --all-files
n-other-diff: ## number of files outside docs/ changed since branch from master pre-commit-install: ## set up the git hook scripts
@git diff --name-only $(DIFF_RANGE) -- ':!docs/' | wc -l | awk '{print $$1}' @pre-commit --version
@pre-commit install
pull/%: DARGS?= pull/%: DARGS?=
pull/%: ## pull a jupyter image pull/%: ## pull a jupyter image
docker pull $(DARGS) $(OWNER)/$(notdir $@) docker pull $(DARGS) $(OWNER)/$(notdir $@)
push/%: DARGS?=
push/%: ## push all tags for a jupyter image
docker push $(DARGS) $(OWNER)/$(notdir $@)
push-all: $(foreach I,$(ALL_IMAGES),push/$(I) ) ## push all tagged images
run/%: DARGS?= run/%: DARGS?=
run/%: ## run a bash in interactive mode in a stack run/%: ## run a bash in interactive mode in a stack
docker run -it --rm $(DARGS) $(OWNER)/$(notdir $@) $(SHELL) docker run -it --rm $(DARGS) $(OWNER)/$(notdir $@) $(SHELL)
...@@ -129,20 +163,6 @@ run-sudo/%: DARGS?= ...@@ -129,20 +163,6 @@ run-sudo/%: DARGS?=
run-sudo/%: ## run a bash in interactive mode as root in a stack run-sudo/%: ## run a bash in interactive mode as root in a stack
docker run -it --rm -u root $(DARGS) $(OWNER)/$(notdir $@) $(SHELL) docker run -it --rm -u root $(DARGS) $(OWNER)/$(notdir $@) $(SHELL)
tx-en: ## rebuild en locale strings and push to master (req: GH_TOKEN)
@git config --global user.email "travis@travis-ci.org"
@git config --global user.name "Travis CI"
@git checkout master
@make -C docs clean gettext
@cd docs && sphinx-intl update -p _build/gettext -l en
@git add docs/locale/en
@git commit -m "[ci skip] Update en source strings (build: $$TRAVIS_JOB_NUMBER)"
@git remote add origin-tx https://$${GH_TOKEN}@github.com/jupyter/docker-stacks.git
@git push -u origin-tx master
test/%: ## run tests against a stack (only common tests or common tests + specific tests) test/%: ## run tests against a stack (only common tests or common tests + specific tests)
@if [ ! -d "$(notdir $@)/test" ]; then TEST_IMAGE="$(OWNER)/$(notdir $@)" pytest -m "not info" test; \ @if [ ! -d "$(notdir $@)/test" ]; then TEST_IMAGE="$(OWNER)/$(notdir $@)" pytest -m "not info" test; \
else TEST_IMAGE="$(OWNER)/$(notdir $@)" pytest -m "not info" test $(notdir $@)/test; fi else TEST_IMAGE="$(OWNER)/$(notdir $@)" pytest -m "not info" test $(notdir $@)/test; fi
......
...@@ -17,17 +17,17 @@ RUN apt-get update && \ ...@@ -17,17 +17,17 @@ RUN apt-get update && \
fonts-dejavu \ fonts-dejavu \
gfortran \ gfortran \
gcc && \ gcc && \
rm -rf /var/lib/apt/lists/* apt-get clean && rm -rf /var/lib/apt/lists/*
USER $NB_UID USER $NB_UID
# R packages # R packages
RUN conda install --quiet --yes \ RUN conda install --quiet --yes \
'r-base=3.6.3' \ 'r-base=4.0.3' \
'r-ggplot2=3.3*' \ 'r-ggplot2=3.3*' \
'r-irkernel=1.1*' \ 'r-irkernel=1.1*' \
'r-rcurl=1.98*' \ 'r-rcurl=1.98*' \
'r-sparklyr=1.2*' \ 'r-sparklyr=1.4*' \
&& \ && \
conda clean --all -f -y && \ conda clean --all -f -y && \
fix-permissions "${CONDA_DIR}" && \ fix-permissions "${CONDA_DIR}" && \
......
[![docker pulls](https://img.shields.io/docker/pulls/jupyter/all-spark-notebook.svg)](https://hub.docker.com/r/jupyter/all-spark-notebook/) [![docker stars](https://img.shields.io/docker/stars/jupyter/all-spark-notebook.svg)](https://hub.docker.com/r/jupyter/all-spark-notebook/) [![image metadata](https://images.microbadger.com/badges/image/jupyter/all-spark-notebook.svg)](https://microbadger.com/images/jupyter/all-spark-notebook "jupyter/all-spark-notebook image metadata") [![docker pulls](https://img.shields.io/docker/pulls/jupyter/all-spark-notebook.svg)](https://hub.docker.com/r/jupyter/all-spark-notebook/)
[![docker stars](https://img.shields.io/docker/stars/jupyter/all-spark-notebook.svg)](https://hub.docker.com/r/jupyter/all-spark-notebook/)
[![image metadata](https://images.microbadger.com/badges/image/jupyter/all-spark-notebook.svg)](https://microbadger.com/images/jupyter/all-spark-notebook "jupyter/all-spark-notebook image metadata")
# Jupyter Notebook Python, Scala, R, Spark Stack # Jupyter Notebook Python, Scala, R, Spark Stack
Please visit the documentation site for help using and contributing to this image and others. GitHub Actions in the https://github.com/jupyter/docker-stacks project builds and pushes this image
to Docker Hub.
* [Jupyter Docker Stacks on ReadTheDocs](http://jupyter-docker-stacks.readthedocs.io/en/latest/index.html) Please visit the project documentation site for help using and contributing to this image and
* [Selecting an Image :: Core Stacks :: jupyter/all-spark-notebook](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/selecting.html#jupyter-all-spark-notebook) others.
* [Image Specifics :: Apache Spark](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/specifics.html#apache-spark)
- [Jupyter Docker Stacks on ReadTheDocs](http://jupyter-docker-stacks.readthedocs.io/en/latest/index.html)
- [Selecting an Image :: Core Stacks :: jupyter/all-spark-notebook](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/selecting.html#jupyter-all-spark-notebook)
- [Image Specifics :: Apache Spark](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/specifics.html#apache-spark)
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${SOURCE_COMMIT})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${SOURCE_COMMIT}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "$INDEX_FILE"
\ No newline at end of file
#!/bin/bash
set -e
# Tag the latest build with the short git sha. Push the tag in addition
# to the "latest" tag already pushed.
GIT_SHA_TAG=${SOURCE_COMMIT:0:12}
docker tag $IMAGE_NAME $DOCKER_REPO:$GIT_SHA_TAG
docker push $DOCKER_REPO:$GIT_SHA_TAG
# Create a working directory.
BUILD_TIMESTAMP=$(date -u +%FT%TZ)
WORKDIR=$(mktemp -d)
GIT_URI="git@github.com:jupyter/docker-stacks.wiki.git"
GIT_SANDBOX="${WORKDIR}/docker-stacks.wiki"
IMAGE_SHORT_NAME=$(basename $DOCKER_REPO)
MANIFEST_FILE="${GIT_SANDBOX}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
INDEX_FILE="${GIT_SANDBOX}/Home.md"
# Configure git so it can push back to GitHub.
eval $(ssh-agent -s)
ssh-add <(base64 -d <(echo "$DEPLOY_KEY"))
ssh-add -l
git config --global user.email "jupyter@googlegroups.com"
git config --global user.name "Jupyter Docker Stacks"
# Glone the GitHub project wiki.
pushd "$WORKDIR"
git clone "$GIT_URI"
popd
# Render the build manifest template.
mkdir -p $(dirname "$MANIFEST_FILE")
source hooks/manifest.tmpl
source hooks/index.tmpl
# Push the wiki update back to GitHub.
pushd "$GIT_SANDBOX"
git add .
git commit -m "DOC: Build ${MANIFEST_FILE}"
git push -u origin master
popd
# Shutdown the ssh agent for good measure.
ssh-agent -k
# Invoke all downstream build triggers.
set +e
for url in $(echo $NEXT_BUILD_TRIGGERS | sed "s/,/ /g")
do
curl -X POST $url
done
\ No newline at end of file
#!/bin/bash
set -e
# Apply tags
GIT_SHA_TAG=${GITHUB_SHA:0:12}
docker tag $IMAGE_NAME "$DOCKER_REPO:$GIT_SHA_TAG"
# Update index
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${GITHUB_SHA}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "${WIKI_PATH}/Home.md"
# Build manifest
MANIFEST_FILE="${WIKI_PATH}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
mkdir -p $(dirname "$MANIFEST_FILE")
cat << EOF > "$MANIFEST_FILE" cat << EOF > "$MANIFEST_FILE"
* Build datetime: ${BUILD_TIMESTAMP} * Build datetime: ${BUILD_TIMESTAMP}
* DockerHub build code: ${BUILD_CODE}
* Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG} * Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG}
* Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}") * Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}")
* Git commit SHA: [${SOURCE_COMMIT}](https://github.com/jupyter/docker-stacks/commit/${SOURCE_COMMIT}) * Git commit SHA: [${GITHUB_SHA}](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})
* Git commit message: * Git commit message:
\`\`\` \`\`\`
${COMMIT_MSG} ${COMMIT_MSG}
......
...@@ -21,7 +21,10 @@ def test_nbconvert(container, test_file): ...@@ -21,7 +21,10 @@ def test_nbconvert(container, test_file):
output_dir = "/tmp" output_dir = "/tmp"
timeout_ms = 600 timeout_ms = 600
LOGGER.info(f"Test that {test_file} notebook can be executed ...") LOGGER.info(f"Test that {test_file} notebook can be executed ...")
command = f"jupyter nbconvert --to markdown --ExecutePreprocessor.timeout={timeout_ms} --output-dir {output_dir} --execute {cont_data_dir}/{test_file}.ipynb" command = "jupyter nbconvert --to markdown " + \
f"--ExecutePreprocessor.timeout={timeout_ms} " + \
f"--output-dir {output_dir} " + \
f"--execute {cont_data_dir}/{test_file}.ipynb"
c = container.run( c = container.run(
volumes={host_data_dir: {"bind": cont_data_dir, "mode": "ro"}}, volumes={host_data_dir: {"bind": cont_data_dir, "mode": "ro"}},
tty=True, tty=True,
......
...@@ -4,7 +4,7 @@ ...@@ -4,7 +4,7 @@
# Ubuntu 20.04 (focal) # Ubuntu 20.04 (focal)
# https://hub.docker.com/_/ubuntu/?tab=tags&name=focal # https://hub.docker.com/_/ubuntu/?tab=tags&name=focal
# OS/ARCH: linux/amd64 # OS/ARCH: linux/amd64
ARG ROOT_CONTAINER=ubuntu:focal-20200703@sha256:d5a6519d9f048100123c568eb83f7ef5bfcad69b01424f420f17c932b00dea76 ARG ROOT_CONTAINER=ubuntu:focal-20200925@sha256:2e70e9c81838224b5311970dbf7ed16802fbfe19e7a70b3cbfa3d7522aa285b4
ARG BASE_CONTAINER=$ROOT_CONTAINER ARG BASE_CONTAINER=$ROOT_CONTAINER
FROM $BASE_CONTAINER FROM $BASE_CONTAINER
...@@ -19,6 +19,17 @@ SHELL ["/bin/bash", "-o", "pipefail", "-c"] ...@@ -19,6 +19,17 @@ SHELL ["/bin/bash", "-o", "pipefail", "-c"]
USER root USER root
# Miniconda installation
# Default values can be overridden at build time
# (ARGS are in lower case to distinguish them from ENV)
# Check https://repo.anaconda.com/miniconda/
# Miniconda archive to install
ARG miniconda_version="4.8.3"
# Archive MD5 checksum
ARG miniconda_checksum="d63adf39f2c220950a063e0529d4ff74"
# Conda version that can be different from the archive
ARG conda_version="4.9.0"
# Install all OS dependencies for notebook server that starts but lacks all # Install all OS dependencies for notebook server that starts but lacks all
# features (e.g., download as all possible file formats) # features (e.g., download as all possible file formats)
ENV DEBIAN_FRONTEND noninteractive ENV DEBIAN_FRONTEND noninteractive
...@@ -53,9 +64,12 @@ COPY fix-permissions /usr/local/bin/fix-permissions ...@@ -53,9 +64,12 @@ COPY fix-permissions /usr/local/bin/fix-permissions
RUN chmod a+rx /usr/local/bin/fix-permissions RUN chmod a+rx /usr/local/bin/fix-permissions
# Enable prompt color in the skeleton .bashrc before creating the default NB_USER # Enable prompt color in the skeleton .bashrc before creating the default NB_USER
RUN sed -i 's/^#force_color_prompt=yes/force_color_prompt=yes/' /etc/skel/.bashrc # hadolint ignore=SC2016
RUN sed -i 's/^#force_color_prompt=yes/force_color_prompt=yes/' /etc/skel/.bashrc && \
# Add call to conda init script see https://stackoverflow.com/a/58081608/4413446
echo 'eval "$(command conda shell.bash hook 2> /dev/null)"' >> /etc/skel/.bashrc
# Create NB_USER wtih name jovyan user with UID=1000 and in the 'users' group # Create NB_USER with name jovyan user with UID=1000 and in the 'users' group
# and make sure these dirs are writable by the `users` group. # and make sure these dirs are writable by the `users` group.
RUN echo "auth requisite pam_deny.so" >> /etc/pam.d/su && \ RUN echo "auth requisite pam_deny.so" >> /etc/pam.d/su && \
sed -i.bak -e 's/^%admin/#%admin/' /etc/sudoers && \ sed -i.bak -e 's/^%admin/#%admin/' /etc/sudoers && \
...@@ -76,15 +90,15 @@ RUN mkdir /home/$NB_USER/work && \ ...@@ -76,15 +90,15 @@ RUN mkdir /home/$NB_USER/work && \
fix-permissions /home/$NB_USER fix-permissions /home/$NB_USER
# Install conda as jovyan and check the md5 sum provided on the download site # Install conda as jovyan and check the md5 sum provided on the download site
ENV MINICONDA_VERSION=4.8.3 \ ENV MINICONDA_VERSION="${miniconda_version}" \
MINICONDA_MD5=d63adf39f2c220950a063e0529d4ff74 \ CONDA_VERSION="${conda_version}"
CONDA_VERSION=4.8.3
WORKDIR /tmp WORKDIR /tmp
RUN wget --quiet https://repo.continuum.io/miniconda/Miniconda3-py38_${MINICONDA_VERSION}-Linux-x86_64.sh && \ RUN wget --quiet https://repo.continuum.io/miniconda/Miniconda3-py38_${MINICONDA_VERSION}-Linux-x86_64.sh && \
echo "${MINICONDA_MD5} *Miniconda3-py38_${MINICONDA_VERSION}-Linux-x86_64.sh" | md5sum -c - && \ echo "${miniconda_checksum} *Miniconda3-py38_${MINICONDA_VERSION}-Linux-x86_64.sh" | md5sum -c - && \
/bin/bash Miniconda3-py38_${MINICONDA_VERSION}-Linux-x86_64.sh -f -b -p $CONDA_DIR && \ /bin/bash Miniconda3-py38_${MINICONDA_VERSION}-Linux-x86_64.sh -f -b -p $CONDA_DIR && \
rm Miniconda3-py38_${MINICONDA_VERSION}-Linux-x86_64.sh && \ rm Miniconda3-py38_${MINICONDA_VERSION}-Linux-x86_64.sh && \
# Conda configuration see https://conda.io/projects/conda/en/latest/configuration.html
echo "conda ${CONDA_VERSION}" >> $CONDA_DIR/conda-meta/pinned && \ echo "conda ${CONDA_VERSION}" >> $CONDA_DIR/conda-meta/pinned && \
conda config --system --prepend channels conda-forge && \ conda config --system --prepend channels conda-forge && \
conda config --system --set auto_update_conda false && \ conda config --system --set auto_update_conda false && \
...@@ -92,7 +106,7 @@ RUN wget --quiet https://repo.continuum.io/miniconda/Miniconda3-py38_${MINICONDA ...@@ -92,7 +106,7 @@ RUN wget --quiet https://repo.continuum.io/miniconda/Miniconda3-py38_${MINICONDA
conda config --system --set channel_priority strict && \ conda config --system --set channel_priority strict && \
if [ ! $PYTHON_VERSION = 'default' ]; then conda install --yes python=$PYTHON_VERSION; fi && \ if [ ! $PYTHON_VERSION = 'default' ]; then conda install --yes python=$PYTHON_VERSION; fi && \
conda list python | grep '^python ' | tr -s ' ' | cut -d '.' -f 1,2 | sed 's/$/.*/' >> $CONDA_DIR/conda-meta/pinned && \ conda list python | grep '^python ' | tr -s ' ' | cut -d '.' -f 1,2 | sed 's/$/.*/' >> $CONDA_DIR/conda-meta/pinned && \
conda install --quiet --yes conda && \ conda install --quiet --yes "conda=${CONDA_VERSION}" && \
conda install --quiet --yes pip && \ conda install --quiet --yes pip && \
conda update --all --quiet --yes && \ conda update --all --quiet --yes && \
conda clean --all -f -y && \ conda clean --all -f -y && \
...@@ -114,9 +128,9 @@ RUN conda install --quiet --yes 'tini=0.18.0' && \ ...@@ -114,9 +128,9 @@ RUN conda install --quiet --yes 'tini=0.18.0' && \
# Do all this in a single RUN command to avoid duplicating all of the # Do all this in a single RUN command to avoid duplicating all of the
# files across image layers when the permissions change # files across image layers when the permissions change
RUN conda install --quiet --yes \ RUN conda install --quiet --yes \
'notebook=6.0.3' \ 'notebook=6.1.4' \
'jupyterhub=1.1.0' \ 'jupyterhub=1.1.0' \
'jupyterlab=2.1.5' && \ 'jupyterlab=2.2.8' && \
conda clean --all -f -y && \ conda clean --all -f -y && \
npm cache clean --force && \ npm cache clean --force && \
jupyter notebook --generate-config && \ jupyter notebook --generate-config && \
......
...@@ -107,9 +107,9 @@ RUN conda install --quiet --yes 'tini=0.18.0' && \ ...@@ -107,9 +107,9 @@ RUN conda install --quiet --yes 'tini=0.18.0' && \
# Do all this in a single RUN command to avoid duplicating all of the # Do all this in a single RUN command to avoid duplicating all of the
# files across image layers when the permissions change # files across image layers when the permissions change
RUN conda install --quiet --yes \ RUN conda install --quiet --yes \
'notebook=6.0.3' \ 'notebook=6.1.3' \
'jupyterhub=1.1.0' \ 'jupyterhub=1.1.0' \
'jupyterlab=2.1.1' && \ 'jupyterlab=2.2.5' && \
conda clean --all -f -y && \ conda clean --all -f -y && \
npm cache clean --force && \ npm cache clean --force && \
jupyter notebook --generate-config && \ jupyter notebook --generate-config && \
......
[![docker pulls](https://img.shields.io/docker/pulls/jupyter/base-notebook.svg)](https://hub.docker.com/r/jupyter/base-notebook/) [![docker stars](https://img.shields.io/docker/stars/jupyter/base-notebook.svg)](https://hub.docker.com/r/jupyter/base-notebook/) [![image metadata](https://images.microbadger.com/badges/image/jupyter/base-notebook.svg)](https://microbadger.com/images/jupyter/base-notebook "jupyter/base-notebook image metadata") [![docker pulls](https://img.shields.io/docker/pulls/jupyter/base-notebook.svg)](https://hub.docker.com/r/jupyter/base-notebook/)
[![docker stars](https://img.shields.io/docker/stars/jupyter/base-notebook.svg)](https://hub.docker.com/r/jupyter/base-notebook/)
[![image metadata](https://images.microbadger.com/badges/image/jupyter/base-notebook.svg)](https://microbadger.com/images/jupyter/base-notebook "jupyter/base-notebook image metadata")
# Base Jupyter Notebook Stack # Base Jupyter Notebook Stack
Please visit the documentation site for help using and contributing to this image and others. GitHub Actions in the https://github.com/jupyter/docker-stacks project builds and pushes this image
to Docker Hub.
* [Jupyter Docker Stacks on ReadTheDocs](http://jupyter-docker-stacks.readthedocs.io/en/latest/index.html) Please visit the project documentation site for help using and contributing to this image and
* [Selecting an Image :: Core Stacks :: jupyter/base-notebook](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/selecting.html#jupyter-base-notebook) others.
- [Jupyter Docker Stacks on ReadTheDocs](http://jupyter-docker-stacks.readthedocs.io/en/latest/index.html)
- [Selecting an Image :: Core Stacks :: jupyter/base-notebook](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/selecting.html#jupyter-base-notebook)
...@@ -18,18 +18,18 @@ ...@@ -18,18 +18,18 @@
set -e set -e
for d in "$@"; do for d in "$@"; do
find "$d" \ find "$d" \
! \( \ ! \( \
-group $NB_GID \ -group $NB_GID \
-a -perm -g+rwX \ -a -perm -g+rwX \
\) \ \) \
-exec chgrp $NB_GID {} \; \ -exec chgrp $NB_GID {} \; \
-exec chmod g+rwX {} \; -exec chmod g+rwX {} \;
# setuid,setgid *on directories only* # setuid, setgid *on directories only*
find "$d" \ find "$d" \
\( \ \( \
-type d \ -type d \
-a ! -perm -6000 \ -a ! -perm -6000 \
\) \ \) \
-exec chmod +6000 {} \; -exec chmod +6000 {} \;
done done
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${PY_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${NB_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${LAB_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${HUB_VERSION_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${SOURCE_COMMIT})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${SOURCE_COMMIT}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "$INDEX_FILE"
\ No newline at end of file
cat << EOF > "$MANIFEST_FILE"
* Build datetime: ${BUILD_TIMESTAMP}
* DockerHub build code: ${BUILD_CODE}
* Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG}
* Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}")
* Git commit SHA: [${SOURCE_COMMIT}](https://github.com/jupyter/docker-stacks/commit/${SOURCE_COMMIT})
* Git commit message:
\`\`\`
${COMMIT_MSG}
\`\`\`
## Python Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} python --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda info)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda list)
\`\`\`
## Apt Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} apt list --installed)
\`\`\`
EOF
\ No newline at end of file
#!/bin/bash
if [[ "$COMMIT_MSG" = *"skip ci"* || "$COMMIT_MSG" = *"ci skip"* ]]; then
exit 1;
fi
\ No newline at end of file
#!/bin/bash
set -e
# Apply and push all tags
source hooks/apply_tags
docker push $DOCKER_REPO
# Create a working directory.
BUILD_TIMESTAMP=$(date -u +%FT%TZ)
WORKDIR=$(mktemp -d)
GIT_URI="git@github.com:jupyter/docker-stacks.wiki.git"
GIT_SANDBOX="${WORKDIR}/docker-stacks.wiki"
IMAGE_SHORT_NAME=$(basename $DOCKER_REPO)
MANIFEST_FILE="${GIT_SANDBOX}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
INDEX_FILE="${GIT_SANDBOX}/Home.md"
# Configure git so it can push back to GitHub.
eval $(ssh-agent -s)
ssh-add <(base64 -d <(echo "$DEPLOY_KEY"))
ssh-add -l
git config --global user.email "jupyter@googlegroups.com"
git config --global user.name "Jupyter Docker Stacks"
# Glone the GitHub project wiki.
pushd "$WORKDIR"
git clone "$GIT_URI"
popd
# Render the build manifest template.
mkdir -p $(dirname "$MANIFEST_FILE")
source hooks/manifest.tmpl
source hooks/index.tmpl
# Push the wiki update back to GitHub.
pushd "$GIT_SANDBOX"
git add .
git commit -m "DOC: Build ${MANIFEST_FILE}"
git push -u origin master
popd
# Shutdown the ssh agent for good measure.
ssh-agent -k
# Invoke all downstream build triggers.
set +e
for url in $(echo $NEXT_BUILD_TRIGGERS | sed "s/,/ /g")
do
curl -X POST $url
done
\ No newline at end of file
#!/bin/bash #!/bin/bash
set -e set -e
# Tag the latest build with the short git sha as well as version of key runtimes # Apply tags
# and packages. GIT_SHA_TAG=${GITHUB_SHA:0:12}
GIT_SHA_TAG=${SOURCE_COMMIT:0:12}
docker tag $IMAGE_NAME "$DOCKER_REPO:$GIT_SHA_TAG" docker tag $IMAGE_NAME "$DOCKER_REPO:$GIT_SHA_TAG"
PY_VERSION_TAG="python-$(docker run --rm ${IMAGE_NAME} python --version 2>&1 | awk '{print $2}')" PY_VERSION_TAG="python-$(docker run --rm ${IMAGE_NAME} python --version 2>&1 | awk '{print $2}')"
docker tag $IMAGE_NAME "$DOCKER_REPO:$PY_VERSION_TAG" docker tag $IMAGE_NAME "$DOCKER_REPO:$PY_VERSION_TAG"
...@@ -12,4 +11,43 @@ docker tag $IMAGE_NAME "$DOCKER_REPO:${NB_VERSION_TAG%% }" ...@@ -12,4 +11,43 @@ docker tag $IMAGE_NAME "$DOCKER_REPO:${NB_VERSION_TAG%% }"
LAB_VERSION_TAG="lab-$(docker run --rm -a STDOUT ${IMAGE_NAME} jupyter-lab --version | tr -d '\r')" LAB_VERSION_TAG="lab-$(docker run --rm -a STDOUT ${IMAGE_NAME} jupyter-lab --version | tr -d '\r')"
docker tag $IMAGE_NAME "$DOCKER_REPO:${LAB_VERSION_TAG%%\r}" docker tag $IMAGE_NAME "$DOCKER_REPO:${LAB_VERSION_TAG%%\r}"
HUB_VERSION_TAG="hub-$(docker run --rm -a STDOUT ${IMAGE_NAME} jupyterhub --version | tr -d '\r')" HUB_VERSION_TAG="hub-$(docker run --rm -a STDOUT ${IMAGE_NAME} jupyterhub --version | tr -d '\r')"
docker tag $IMAGE_NAME "$DOCKER_REPO:${HUB_VERSION_TAG%%\r}" docker tag $IMAGE_NAME "$DOCKER_REPO:${HUB_VERSION_TAG%%\r}"
\ No newline at end of file
# Update index
INDEX_FILE="${WIKI_PATH}/Home.md"
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${PY_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${NB_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${LAB_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${HUB_VERSION_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${GITHUB_SHA}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "$INDEX_FILE"
# Build manifest
MANIFEST_FILE="${WIKI_PATH}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
mkdir -p $(dirname "$MANIFEST_FILE")
cat << EOF > "$MANIFEST_FILE"
* Build datetime: ${BUILD_TIMESTAMP}
* Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG}
* Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}")
* Git commit SHA: [${GITHUB_SHA}](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})
* Git commit message:
\`\`\`
${COMMIT_MSG}
\`\`\`
## Python Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} python --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda info)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda list)
\`\`\`
## Apt Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} apt list --installed)
\`\`\`
EOF
...@@ -7,7 +7,7 @@ import os ...@@ -7,7 +7,7 @@ import os
import errno import errno
import stat import stat
c = get_config() c = get_config() # noqa: F821
c.NotebookApp.ip = '0.0.0.0' c.NotebookApp.ip = '0.0.0.0'
c.NotebookApp.port = 8888 c.NotebookApp.port = 8888
c.NotebookApp.open_browser = False c.NotebookApp.open_browser = False
...@@ -52,4 +52,4 @@ distinguished_name = req_distinguished_name ...@@ -52,4 +52,4 @@ distinguished_name = req_distinguished_name
# Change default umask for all subprocesses of the notebook server if set in # Change default umask for all subprocesses of the notebook server if set in
# the environment # the environment
if 'NB_UMASK' in os.environ: if 'NB_UMASK' in os.environ:
os.umask(int(os.environ['NB_UMASK'], 8)) os.umask(int(os.environ['NB_UMASK'], 8))
\ No newline at end of file
...@@ -80,7 +80,7 @@ if [ $(id -u) == 0 ] ; then ...@@ -80,7 +80,7 @@ if [ $(id -u) == 0 ] ; then
if [ "$NB_UID" != $(id -u $NB_USER) ] || [ "$NB_GID" != $(id -g $NB_USER) ]; then if [ "$NB_UID" != $(id -u $NB_USER) ] || [ "$NB_GID" != $(id -g $NB_USER) ]; then
echo "Set user $NB_USER UID:GID to: $NB_UID:$NB_GID" echo "Set user $NB_USER UID:GID to: $NB_UID:$NB_GID"
if [ "$NB_GID" != $(id -g $NB_USER) ]; then if [ "$NB_GID" != $(id -g $NB_USER) ]; then
groupadd -g $NB_GID -o ${NB_GROUP:-${NB_USER}} groupadd -f -g $NB_GID -o ${NB_GROUP:-${NB_USER}}
fi fi
userdel $NB_USER userdel $NB_USER
useradd --home /home/$NB_USER -u $NB_UID -g $NB_GID -G 100 -l $NB_USER useradd --home /home/$NB_USER -u $NB_UID -g $NB_GID -G 100 -l $NB_USER
......
...@@ -11,11 +11,13 @@ LOGGER = logging.getLogger(__name__) ...@@ -11,11 +11,13 @@ LOGGER = logging.getLogger(__name__)
def test_cli_args(container, http_client): def test_cli_args(container, http_client):
"""Container should respect notebook server command line args """Container should respect notebook server command line args
(e.g., disabling token security)""" (e.g., disabling token security)"""
container.run( c = container.run(
command=['start-notebook.sh', '--NotebookApp.token=""'] command=["start-notebook.sh", "--NotebookApp.token=''"]
) )
resp = http_client.get('http://localhost:8888') resp = http_client.get('http://localhost:8888')
resp.raise_for_status() resp.raise_for_status()
logs = c.logs(stdout=True).decode('utf-8')
LOGGER.debug(logs)
assert 'login_submit' not in resp.text assert 'login_submit' not in resp.text
...@@ -70,23 +72,28 @@ def test_nb_user_change(container): ...@@ -70,23 +72,28 @@ def test_nb_user_change(container):
running_container = container.run( running_container = container.run(
tty=True, tty=True,
user="root", user="root",
environment=[f"NB_USER={nb_user}", environment=[
"CHOWN_HOME=yes"], f"NB_USER={nb_user}",
"CHOWN_HOME=yes"
],
working_dir=f"/home/{nb_user}", working_dir=f"/home/{nb_user}",
command=['start.sh', 'bash', '-c', 'sleep infinity'] command=['start.sh', 'bash', '-c', 'sleep infinity']
) )
# Give the chown time to complete. Use sleep, not wait, because the
# container sleeps forever.
time.sleep(10)
LOGGER.info(f"Checking if the user is changed to {nb_user} by the start script ...") LOGGER.info(f"Checking if the user is changed to {nb_user} by the start script ...")
output = running_container.logs(stdout=True).decode("utf-8") output = running_container.logs(stdout=True).decode("utf-8")
assert f"Set username to: {nb_user}" in output, f"User is not changed to {nb_user}" assert f"Set username to: {nb_user}" in output, f"User is not changed to {nb_user}"
LOGGER.info(f"Checking {nb_user} id ...") LOGGER.info(f"Checking {nb_user} id ...")
command = "id" command = "id"
expected_output = f"uid=1000({nb_user}) gid=100(users) groups=100(users)" expected_output = f"uid=1000({nb_user}) gid=100(users) groups=100(users)"
cmd = running_container.exec_run(command, user=nb_user) cmd = running_container.exec_run(command, user=nb_user)
output = cmd.output.decode("utf-8").strip("\n") output = cmd.output.decode("utf-8").strip("\n")
assert output == expected_output, f"Bad user {output}, expected {expected_output}" assert output == expected_output, f"Bad user {output}, expected {expected_output}"
LOGGER.info(f"Checking if {nb_user} owns his home folder ...") LOGGER.info(f"Checking if {nb_user} owns his home folder ...")
command = f'stat -c "%U %G" /home/{nb_user}/' command = f'stat -c "%U %G" /home/{nb_user}/'
expected_output = f"{nb_user} users" expected_output = f"{nb_user} users"
...@@ -100,10 +107,11 @@ def test_chown_extra(container): ...@@ -100,10 +107,11 @@ def test_chown_extra(container):
c = container.run( c = container.run(
tty=True, tty=True,
user='root', user='root',
environment=['NB_UID=1010', environment=[
'NB_GID=101', 'NB_UID=1010',
'CHOWN_EXTRA=/opt/conda', 'NB_GID=101',
'CHOWN_EXTRA_OPTS=-R', 'CHOWN_EXTRA=/opt/conda',
'CHOWN_EXTRA_OPTS=-R'
], ],
command=['start.sh', 'bash', '-c', 'stat -c \'%n:%u:%g\' /opt/conda/LICENSE.txt'] command=['start.sh', 'bash', '-c', 'stat -c \'%n:%u:%g\' /opt/conda/LICENSE.txt']
) )
...@@ -113,16 +121,18 @@ def test_chown_extra(container): ...@@ -113,16 +121,18 @@ def test_chown_extra(container):
def test_chown_home(container): def test_chown_home(container):
"""Container should change the NB_USER home directory owner and """Container should change the NB_USER home directory owner and
group to the current value of NB_UID and NB_GID.""" group to the current value of NB_UID and NB_GID."""
c = container.run( c = container.run(
tty=True, tty=True,
user='root', user='root',
environment=['CHOWN_HOME=yes', environment=[
'CHOWN_HOME_OPTS=-R', 'CHOWN_HOME=yes',
'CHOWN_HOME_OPTS=-R'
], ],
command=['start.sh', 'bash', '-c', 'chown root:root /home/jovyan && ls -alsh /home'] command=['start.sh', 'bash', '-c', 'chown root:root /home/jovyan && ls -alsh /home']
) )
c.wait(timeout=120)
assert "Changing ownership of /home/jovyan to 1000:100 with options '-R'" in c.logs(stdout=True).decode('utf-8') assert "Changing ownership of /home/jovyan to 1000:100 with options '-R'" in c.logs(stdout=True).decode('utf-8')
......
...@@ -3,8 +3,6 @@ ...@@ -3,8 +3,6 @@
import logging import logging
import pytest
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
...@@ -17,4 +15,3 @@ def test_pandoc(container): ...@@ -17,4 +15,3 @@ def test_pandoc(container):
logs = c.logs(stdout=True).decode("utf-8") logs = c.logs(stdout=True).decode("utf-8")
LOGGER.debug(logs) LOGGER.debug(logs)
assert "<p><strong>BOLD</strong></p>" in logs assert "<p><strong>BOLD</strong></p>" in logs
...@@ -13,6 +13,7 @@ from requests.adapters import HTTPAdapter ...@@ -13,6 +13,7 @@ from requests.adapters import HTTPAdapter
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
@pytest.fixture(scope='session') @pytest.fixture(scope='session')
def http_client(): def http_client():
"""Requests session with retries and backoff.""" """Requests session with retries and backoff."""
...@@ -48,6 +49,7 @@ class TrackedContainer(object): ...@@ -48,6 +49,7 @@ class TrackedContainer(object):
**kwargs: dict, optional **kwargs: dict, optional
Default keyword arguments to pass to docker.DockerClient.containers.run Default keyword arguments to pass to docker.DockerClient.containers.run
""" """
def __init__(self, docker_client, image_name, **kwargs): def __init__(self, docker_client, image_name, **kwargs):
self.container = None self.container = None
self.docker_client = docker_client self.docker_client = docker_client
...@@ -78,7 +80,7 @@ class TrackedContainer(object): ...@@ -78,7 +80,7 @@ class TrackedContainer(object):
LOGGER.info(f"Running {self.image_name} with args {all_kwargs} ...") LOGGER.info(f"Running {self.image_name} with args {all_kwargs} ...")
self.container = self.docker_client.containers.run(self.image_name, **all_kwargs) self.container = self.docker_client.containers.run(self.image_name, **all_kwargs)
return self.container return self.container
def remove(self): def remove(self):
"""Kills and removes the tracked docker container.""" """Kills and removes the tracked docker container."""
if self.container: if self.container:
......
...@@ -14,26 +14,34 @@ SHELL ["/bin/bash", "-o", "pipefail", "-c"] ...@@ -14,26 +14,34 @@ SHELL ["/bin/bash", "-o", "pipefail", "-c"]
USER root USER root
# Julia installation
# Default values can be overridden at build time
# (ARGS are in lower case to distinguish them from ENV)
# Check https://julialang.org/downloads/
ARG julia_version="1.5.2"
# SHA256 checksum
ARG julia_checksum="6da704fadcefa39725503e4c7a9cfa1a570ba8a647c4bd8de69a118f43584630"
# R pre-requisites # R pre-requisites
RUN apt-get update && \ RUN apt-get update && \
apt-get install -y --no-install-recommends \ apt-get install -y --no-install-recommends \
fonts-dejavu \ fonts-dejavu \
gfortran \ gfortran \
gcc && \ gcc && \
rm -rf /var/lib/apt/lists/* apt-get clean && rm -rf /var/lib/apt/lists/*
# Julia dependencies # Julia dependencies
# install Julia packages in /opt/julia instead of $HOME # install Julia packages in /opt/julia instead of $HOME
ENV JULIA_DEPOT_PATH=/opt/julia ENV JULIA_DEPOT_PATH=/opt/julia \
ENV JULIA_PKGDIR=/opt/julia JULIA_PKGDIR=/opt/julia \
ENV JULIA_VERSION=1.4.1 JULIA_VERSION="${julia_version}"
WORKDIR /tmp WORKDIR /tmp
# hadolint ignore=SC2046 # hadolint ignore=SC2046
RUN mkdir "/opt/julia-${JULIA_VERSION}" && \ RUN mkdir "/opt/julia-${JULIA_VERSION}" && \
wget -q https://julialang-s3.julialang.org/bin/linux/x64/$(echo "${JULIA_VERSION}" | cut -d. -f 1,2)"/julia-${JULIA_VERSION}-linux-x86_64.tar.gz" && \ wget -q https://julialang-s3.julialang.org/bin/linux/x64/$(echo "${JULIA_VERSION}" | cut -d. -f 1,2)"/julia-${JULIA_VERSION}-linux-x86_64.tar.gz" && \
echo "fd6d8cadaed678174c3caefb92207a3b0e8da9f926af6703fb4d1e4e4f50610a *julia-${JULIA_VERSION}-linux-x86_64.tar.gz" | sha256sum -c - && \ echo "${julia_checksum} *julia-${JULIA_VERSION}-linux-x86_64.tar.gz" | sha256sum -c - && \
tar xzf "julia-${JULIA_VERSION}-linux-x86_64.tar.gz" -C "/opt/julia-${JULIA_VERSION}" --strip-components=1 && \ tar xzf "julia-${JULIA_VERSION}-linux-x86_64.tar.gz" -C "/opt/julia-${JULIA_VERSION}" --strip-components=1 && \
rm "/tmp/julia-${JULIA_VERSION}-linux-x86_64.tar.gz" rm "/tmp/julia-${JULIA_VERSION}-linux-x86_64.tar.gz"
RUN ln -fs /opt/julia-*/bin/julia /usr/local/bin/julia RUN ln -fs /opt/julia-*/bin/julia /usr/local/bin/julia
...@@ -50,26 +58,23 @@ USER $NB_UID ...@@ -50,26 +58,23 @@ USER $NB_UID
# R packages including IRKernel which gets installed globally. # R packages including IRKernel which gets installed globally.
RUN conda install --quiet --yes \ RUN conda install --quiet --yes \
'r-base=3.6.3' \ 'r-base=4.0.3' \
'r-caret=6.0*' \ 'r-caret=6.0*' \
'r-crayon=1.3*' \ 'r-crayon=1.3*' \
'r-devtools=2.3*' \ 'r-devtools=2.3*' \
'r-forecast=8.12*' \ 'r-forecast=8.13*' \
'r-hexbin=1.28*' \ 'r-hexbin=1.28*' \
'r-htmltools=0.4*' \ 'r-htmltools=0.5*' \
'r-htmlwidgets=1.5*' \ 'r-htmlwidgets=1.5*' \
'r-irkernel=1.1*' \ 'r-irkernel=1.1*' \
'r-nycflights13=1.0*' \ 'r-nycflights13=1.0*' \
'r-plyr=1.8*' \
'r-randomforest=4.6*' \ 'r-randomforest=4.6*' \
'r-rcurl=1.98*' \ 'r-rcurl=1.98*' \
'r-reshape2=1.4*' \ 'r-rmarkdown=2.4*' \
'r-rmarkdown=2.1*' \
'r-rsqlite=2.2*' \ 'r-rsqlite=2.2*' \
'r-shiny=1.4*' \ 'r-shiny=1.5*' \
'r-tidyverse=1.3*' \ 'r-tidyverse=1.3*' \
'rpy2=3.1*' \ 'rpy2=3.3*' && \
&& \
conda clean --all -f -y && \ conda clean --all -f -y && \
fix-permissions "${CONDA_DIR}" && \ fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}" fix-permissions "/home/${NB_USER}"
......
[![docker pulls](https://img.shields.io/docker/pulls/jupyter/datascience-notebook.svg)](https://hub.docker.com/r/jupyter/datascience-notebook/) [![docker stars](https://img.shields.io/docker/stars/jupyter/datascience-notebook.svg)](https://hub.docker.com/r/jupyter/datascience-notebook/) [![image metadata](https://images.microbadger.com/badges/image/jupyter/datascience-notebook.svg)](https://microbadger.com/images/jupyter/datascience-notebook "jupyter/datascience-notebook image metadata") [![docker pulls](https://img.shields.io/docker/pulls/jupyter/datascience-notebook.svg)](https://hub.docker.com/r/jupyter/datascience-notebook/)
[![docker stars](https://img.shields.io/docker/stars/jupyter/datascience-notebook.svg)](https://hub.docker.com/r/jupyter/datascience-notebook/)
[![image metadata](https://images.microbadger.com/badges/image/jupyter/datascience-notebook.svg)](https://microbadger.com/images/jupyter/datascience-notebook "jupyter/datascience-notebook image metadata")
# Jupyter Notebook Data Science Stack # Jupyter Notebook Data Science Stack
Please visit the documentation site for help using and contributing to this image and others. GitHub Actions in the https://github.com/jupyter/docker-stacks project builds and pushes this image
to Docker Hub.
* [Jupyter Docker Stacks on ReadTheDocs](http://jupyter-docker-stacks.readthedocs.io/en/latest/index.html) Please visit the project documentation site for help using and contributing to this image and
* [Selecting an Image :: Core Stacks :: jupyter/datascience-notebook](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/selecting.html#jupyter-datascience-notebook) others.
- [Jupyter Docker Stacks on ReadTheDocs](http://jupyter-docker-stacks.readthedocs.io/en/latest/index.html)
- [Selecting an Image :: Core Stacks :: jupyter/datascience-notebook](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/selecting.html#jupyter-datascience-notebook)
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${PY_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${JULIA_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${R_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${NB_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${LAB_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${HUB_VERSION_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${SOURCE_COMMIT})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${SOURCE_COMMIT}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "$INDEX_FILE"
\ No newline at end of file
#!/bin/bash
set -e
# Apply and push all tags
source hooks/apply_tags
docker push $DOCKER_REPO
# Tag the latest build with the short git sha. Push the tag in addition
# to the "latest" tag already pushed.
GIT_SHA_TAG=${SOURCE_COMMIT:0:12}
docker tag $IMAGE_NAME $DOCKER_REPO:$GIT_SHA_TAG
docker push $DOCKER_REPO:$GIT_SHA_TAG
# Create a working directory.
BUILD_TIMESTAMP=$(date -u +%FT%TZ)
WORKDIR=$(mktemp -d)
GIT_URI="git@github.com:jupyter/docker-stacks.wiki.git"
GIT_SANDBOX="${WORKDIR}/docker-stacks.wiki"
IMAGE_SHORT_NAME=$(basename $DOCKER_REPO)
MANIFEST_FILE="${GIT_SANDBOX}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
INDEX_FILE="${GIT_SANDBOX}/Home.md"
# Configure git so it can push back to GitHub.
eval $(ssh-agent -s)
ssh-add <(base64 -d <(echo "$DEPLOY_KEY"))
ssh-add -l
git config --global user.email "jupyter@googlegroups.com"
git config --global user.name "Jupyter Docker Stacks"
# Glone the GitHub project wiki.
pushd "$WORKDIR"
git clone "$GIT_URI"
popd
# Render the build manifest template.
mkdir -p $(dirname "$MANIFEST_FILE")
source hooks/manifest.tmpl
source hooks/index.tmpl
# Push the wiki update back to GitHub.
pushd "$GIT_SANDBOX"
git add .
git commit -m "DOC: Build ${MANIFEST_FILE}"
git push -u origin master
popd
# Shutdown the ssh agent for good measure.
ssh-agent -k
# Invoke all downstream build triggers.
set +e
for url in $(echo $NEXT_BUILD_TRIGGERS | sed "s/,/ /g")
do
curl -X POST $url
done
#!/bin/bash #!/bin/bash
set -e set -e
# Tag the latest build with the short git sha as well as version of key runtimes # Apply tags
# and packages. GIT_SHA_TAG=${GITHUB_SHA:0:12}
GIT_SHA_TAG=${SOURCE_COMMIT:0:12}
docker tag $IMAGE_NAME "$DOCKER_REPO:$GIT_SHA_TAG" docker tag $IMAGE_NAME "$DOCKER_REPO:$GIT_SHA_TAG"
PY_VERSION_TAG="python-$(docker run --rm ${IMAGE_NAME} python --version 2>&1 | awk '{print $2}')" PY_VERSION_TAG="python-$(docker run --rm ${IMAGE_NAME} python --version 2>&1 | awk '{print $2}')"
docker tag $IMAGE_NAME "$DOCKER_REPO:$PY_VERSION_TAG" docker tag $IMAGE_NAME "$DOCKER_REPO:$PY_VERSION_TAG"
...@@ -17,3 +16,63 @@ LAB_VERSION_TAG="lab-$(docker run --rm -a STDOUT ${IMAGE_NAME} jupyter-lab --ver ...@@ -17,3 +16,63 @@ LAB_VERSION_TAG="lab-$(docker run --rm -a STDOUT ${IMAGE_NAME} jupyter-lab --ver
docker tag $IMAGE_NAME "$DOCKER_REPO:${LAB_VERSION_TAG%%\r}" docker tag $IMAGE_NAME "$DOCKER_REPO:${LAB_VERSION_TAG%%\r}"
HUB_VERSION_TAG="hub-$(docker run --rm -a STDOUT ${IMAGE_NAME} jupyterhub --version | tr -d '\r')" HUB_VERSION_TAG="hub-$(docker run --rm -a STDOUT ${IMAGE_NAME} jupyterhub --version | tr -d '\r')"
docker tag $IMAGE_NAME "$DOCKER_REPO:${HUB_VERSION_TAG%%\r}" docker tag $IMAGE_NAME "$DOCKER_REPO:${HUB_VERSION_TAG%%\r}"
# Update index
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${PY_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${JULIA_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${R_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${NB_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${LAB_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${HUB_VERSION_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${GITHUB_SHA}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "${WIKI_PATH}/Home.md"
# Build manifest
MANIFEST_FILE="${WIKI_PATH}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
mkdir -p $(dirname "$MANIFEST_FILE")
cat << EOF > "$MANIFEST_FILE"
* Build datetime: ${BUILD_TIMESTAMP}
* Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG}
* Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}")
* Git commit SHA: [${GITHUB_SHA}](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})
* Git commit message:
\`\`\`
${COMMIT_MSG}
\`\`\`
## Julia Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} julia -E 'using InteractiveUtils; versioninfo()')
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} julia -E 'import Pkg; Pkg.status()')
\`\`\`
## Python Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} python --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda info)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda list)
\`\`\`
## R Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} R --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} R --silent -e 'installed.packages(.Library)[, c(1,3)]')
\`\`\`
## Apt Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} apt list --installed)
\`\`\`
EOF
...@@ -2,18 +2,16 @@ ...@@ -2,18 +2,16 @@
# Distributed under the terms of the Modified BSD License. # Distributed under the terms of the Modified BSD License.
import logging import logging
import pytest
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
def test_julia(container): def test_julia(container):
"""Basic julia test""" """Basic julia test"""
LOGGER.info(f"Test that julia is correctly installed ...") LOGGER.info("Test that julia is correctly installed ...")
running_container = container.run( running_container = container.run(
tty=True, command=["start.sh", "bash", "-c", "sleep infinity"] tty=True, command=["start.sh", "bash", "-c", "sleep infinity"]
) )
command = f"julia --version" command = "julia --version"
cmd = running_container.exec_run(command) cmd = running_container.exec_run(command)
output = cmd.output.decode("utf-8") output = cmd.output.decode("utf-8")
assert cmd.exit_code == 0, f"Command {command} failed {output}" assert cmd.exit_code == 0, f"Command {command} failed {output}"
......
...@@ -21,11 +21,6 @@ ...@@ -21,11 +21,6 @@
# import sys # import sys
# sys.path.insert(0, os.path.abspath('.')) # sys.path.insert(0, os.path.abspath('.'))
# For conversion from markdown to html
import recommonmark.parser
from recommonmark.transform import AutoStructify
# -- General configuration ------------------------------------------------ # -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here. # If your documentation needs a minimal Sphinx version, state it here.
...@@ -199,4 +194,4 @@ linkcheck_anchors = False ...@@ -199,4 +194,4 @@ linkcheck_anchors = False
# -- Translation ---------------------------------------------------------- # -- Translation ----------------------------------------------------------
gettext_uuid = True gettext_uuid = True
locale_dirs = ['locale/'] locale_dirs = ['locale/']
\ No newline at end of file
# New Features # New Features
Thank you for contributing to the Jupyter Docker Stacks! We review pull requests of new features (e.g., new packages, new scripts, new flags) to balance the value of the images to the Jupyter community with the cost of maintaining the images over time. Thank you for contributing to the Jupyter Docker Stacks! We review pull requests of new features
(e.g., new packages, new scripts, new flags) to balance the value of the images to the Jupyter
community with the cost of maintaining the images over time.
## Suggesting a New Feature ## Suggesting a New Feature
Please follow the process below to suggest a new feature for inclusion in one of the core stacks: Please follow the process below to suggest a new feature for inclusion in one of the core stacks:
1. [Open a GitHub issue](https://github.com/jupyter/docker-stacks/issues) describing the feature you'd like to contribute. 1. [Open a GitHub issue](https://github.com/jupyter/docker-stacks/issues) describing the feature
2. Discuss with the maintainers whether the addition makes sense in [one of the core stacks](../using/selecting.md#Core-Stacks), as a [recipe in the documentation](recipes.md), as a [community stack](stacks.md), or as something else entirely. you'd like to contribute.
2. Discuss with the maintainers whether the addition makes sense in
[one of the core stacks](../using/selecting.md#Core-Stacks), as a
[recipe in the documentation](recipes.md), as a [community stack](stacks.md), or as something
else entirely.
## Selection Criteria ## Selection Criteria
Roughly speaking, we evaluate new features based on the following criteria: Roughly speaking, we evaluate new features based on the following criteria:
* **Usefulness to Jupyter users**: Is the feature generally applicable across domains? Does it work with Jupyter Notebook, JupyterLab, JupyterHub, etc.? - **Usefulness to Jupyter users**: Is the feature generally applicable across domains? Does it work
* **Fit with the image purpose**: Does the feature match the theme of the stack in which it will be added? Would it fit better in a new, community stack? with Jupyter Notebook, JupyterLab, JupyterHub, etc.?
* **Complexity of build / runtime configuration**: How many lines of code does the feature require in one of the Dockerfiles or startup scripts? Does it require new scripts entirely? Do users need to adjust how they use the images? - **Fit with the image purpose**: Does the feature match the theme of the stack in which it will be
* **Impact on image metrics**: How many bytes does the feature and its dependencies add to the image(s)? How many minutes do they add to the build time? added? Would it fit better in a new, community stack?
* **Ability to support the addition**: Can existing maintainers answer user questions and address future build issues? Are the contributors interested in helping with long-term maintenance? Can we write tests to ensure the feature continues to work over time? - **Complexity of build / runtime configuration**: How many lines of code does the feature require
in one of the Dockerfiles or startup scripts? Does it require new scripts entirely? Do users need
to adjust how they use the images?
- **Impact on image metrics**: How many bytes does the feature and its dependencies add to the
image(s)? How many minutes do they add to the build time?
- **Ability to support the addition**: Can existing maintainers answer user questions and address
future build issues? Are the contributors interested in helping with long-term maintenance? Can we
write tests to ensure the feature continues to work over time?
## Submitting a Pull Request ## Submitting a Pull Request
If there's agreement that the feature belongs in one or more of the core stacks: If there's agreement that the feature belongs in one or more of the core stacks:
1. Implement the feature in a local clone of the `jupyter/docker-stacks` project. 1. Implement the feature in a local clone of the `jupyter/docker-stacks` project.
2. Please build the image locally before submitting a pull request. Building the image locally shortens the debugging cycle by taking some load off [Travis CI](http://travis-ci.org/), which graciously provides free build services for open source projects like this one. If you use `make`, call: 2. Please build the image locally before submitting a pull request. Building the image locally
```bash shortens the debugging cycle by taking some load off GitHub Actions, which graciously provide
make build/somestack-notebook free build services for open source projects like this one. If you use `make`, call:
``` ```bash
3. [Submit a pull request](https://github.com/PointCloudLibrary/pcl/wiki/A-step-by-step-guide-on-preparing-and-submitting-a-pull-request) (PR) with your changes. make build/somestack-notebook
4. Watch for Travis to report a build success or failure for your PR on GitHub. ```
3. [Submit a pull request](https://github.com/PointCloudLibrary/pcl/wiki/A-step-by-step-guide-on-preparing-and-submitting-a-pull-request)
(PR) with your changes.
4. Watch for GitHub to report a build success or failure for your PR on GitHub.
5. Discuss changes with the maintainers and address any build issues. 5. Discuss changes with the maintainers and address any build issues.
# Image Lint # Lint
In order to enforce some rules **linters** are used in this project.
Linters can be run either during the **development phase** (by the developer) and during **integration phase** (by Travis).
To integrate and enforce this process in the project lifecycle we are using **git hooks** through [pre-commit][pre-commit].
## Pre-commit hook
### Installation
pre-commit is a Python package that needs to be installed.
This can be achieved by using the generic task used to install all Python development dependencies.
```sh
# Install all development dependencies for the project
$ make dev-env
# It can also be installed directly
$ pip install pre-commit
```
Then the git hooks scripts configured for the project in `.pre-commit-config.yaml` need to be installed in the local git repository.
```sh
$ make pre-commit-install
```
### Run
Now pre-commit (and so configured hooks) will run automatically on `git commit` on each changed file.
However it is also possible to trigger it against all files.
```sh
$ make pre-commit-all
```
## Image Lint
To comply with [Docker best practices][dbp], we are using the [Hadolint][hadolint] tool to analyse each `Dockerfile` . To comply with [Docker best practices][dbp], we are using the [Hadolint][hadolint] tool to analyse each `Dockerfile` .
## Installation ### Installation
There is a specific `make` target to install the linter. There is a specific `make` target to install the linter.
By default `hadolint` will be installed in `${HOME}/hadolint`. By default `hadolint` will be installed in `${HOME}/hadolint`.
```bash ```bash
$ make lint-install $ make hadolint-install
# Installing hadolint at /Users/romain/hadolint ... # Installing hadolint at /Users/romain/hadolint ...
# Installation done! # Installation done!
# Haskell Dockerfile Linter v1.17.6-0-gc918759 # Haskell Dockerfile Linter v1.17.6-0-gc918759
``` ```
## Lint ### Linting
### Per Stack #### Per Stack
The linter can be run per stack. The linter can be run per stack.
```bash ```bash
$ make lint/scipy-notebook $ make hadolint/scipy-notebook
# Linting Dockerfiles in scipy-notebook... # Linting Dockerfiles in scipy-notebook...
# scipy-notebook/Dockerfile:4 DL3006 Always tag the version of an image explicitly # scipy-notebook/Dockerfile:4 DL3006 Always tag the version of an image explicitly
...@@ -34,28 +69,28 @@ $ make lint/scipy-notebook ...@@ -34,28 +69,28 @@ $ make lint/scipy-notebook
# make: *** [lint/scipy-notebook] Error 1 # make: *** [lint/scipy-notebook] Error 1
``` ```
Optionally you can pass arguments to the linter. Optionally you can pass arguments to the hadolint.
```bash ```bash
# Use a different export format # Use a different export format
$ make lint/scipy-notebook ARGS="--format codeclimate" $ make hadolint/scipy-notebook ARGS="--format codeclimate"
``` ```
### All the Stacks #### All the Stacks
The linter can be run against all the stacks. The linter can be run against all the stacks.
```bash ```bash
$ make lint-all $ make hadolint-all
``` ```
## Ignoring Rules ### Ignoring Rules
Sometimes it is necessary to ignore [some rules][rules]. Sometimes it is necessary to ignore [some rules][rules].
The following rules are ignored by default and sor for all images in the `.hadolint.yaml` file. The following rules are ignored by default and sor for all images in the `.hadolint.yaml` file.
- [`DL3006`][DL3006]: We use a specific policy to manage image tags. - [`DL3006`][DL3006]: We use a specific policy to manage image tags.
- `base-notebook` `FROM` clause is fixed but based on an argument (`ARG`). - `base-notebook` `FROM` clause is fixed but based on an argument (`ARG`).
- Building downstream images from (`FROM`) the latest is done on purpose. - Building downstream images from (`FROM`) the latest is done on purpose.
- [`DL3008`][DL3008]: System packages are always updated (`apt-get`) to the latest version. - [`DL3008`][DL3008]: System packages are always updated (`apt-get`) to the latest version.
...@@ -75,4 +110,5 @@ RUN cd /tmp && echo "hello!" ...@@ -75,4 +110,5 @@ RUN cd /tmp && echo "hello!"
[dbp]: https://docs.docker.com/develop/develop-images/dockerfile_best-practices [dbp]: https://docs.docker.com/develop/develop-images/dockerfile_best-practices
[rules]: https://github.com/hadolint/hadolint#rules [rules]: https://github.com/hadolint/hadolint#rules
[DL3006]: https://github.com/hadolint/hadolint/wiki/DL3006 [DL3006]: https://github.com/hadolint/hadolint/wiki/DL3006
[DL3008]: https://github.com/hadolint/hadolint/wiki/DL3008 [DL3008]: https://github.com/hadolint/hadolint/wiki/DL3008
\ No newline at end of file [pre-commit]: https://pre-commit.com/
\ No newline at end of file
# Package Updates # Package Updates
We actively seek pull requests which update packages already included in the project Dockerfiles. This is a great way for first-time contributors to participate in developing the Jupyter Docker Stacks. We actively seek pull requests which update packages already included in the project Dockerfiles.
This is a great way for first-time contributors to participate in developing the Jupyter Docker
Stacks.
Please follow the process below to update a package version: Please follow the process below to update a package version:
1. Locate the Dockerfile containing the library you wish to update (e.g., [base-notebook/Dockerfile](https://github.com/jupyter/docker-stacks/blob/master/base-notebook/Dockerfile), [scipy-notebook/Dockerfile](https://github.com/jupyter/docker-stacks/blob/master/scipy-notebook/Dockerfile)) 1. Locate the Dockerfile containing the library you wish to update (e.g.,
2. Adjust the version number for the package. We prefer to pin the major and minor version number of packages so as to minimize rebuild side-effects when users submit pull requests (PRs). For example, you'll find the Jupyter Notebook package, `notebook`, installed using conda with `notebook=5.4.*`. [base-notebook/Dockerfile](https://github.com/jupyter/docker-stacks/blob/master/base-notebook/Dockerfile),
3. Please build the image locally before submitting a pull request. Building the image locally shortens the debugging cycle by taking some load off [Travis CI](http://travis-ci.org/), which graciously provides free build services for open source projects like this one. If you use `make`, call: [scipy-notebook/Dockerfile](https://github.com/jupyter/docker-stacks/blob/master/scipy-notebook/Dockerfile))
```bash 2. Adjust the version number for the package. We prefer to pin the major and minor version number of
make build/somestack-notebook packages so as to minimize rebuild side-effects when users submit pull requests (PRs). For
``` example, you'll find the Jupyter Notebook package, `notebook`, installed using conda with
4. [Submit a pull request](https://github.com/PointCloudLibrary/pcl/wiki/A-step-by-step-guide-on-preparing-and-submitting-a-pull-request) (PR) with your changes. `notebook=5.4.*`.
5. Watch for Travis to report a build success or failure for your PR on GitHub. 3. Please build the image locally before submitting a pull request. Building the image locally
6. Discuss changes with the maintainers and address any build issues. Version conflicts are the most common problem. You may need to upgrade additional packages to fix build failures. shortens the debugging cycle by taking some load off GitHub Actions, which graciously provide
free build services for open source projects like this one. If you use `make`, call:
```bash
make build/somestack-notebook
```
4. [Submit a pull request](https://github.com/PointCloudLibrary/pcl/wiki/A-step-by-step-guide-on-preparing-and-submitting-a-pull-request)
(PR) with your changes.
5. Watch for GitHub to report a build success or failure for your PR on GitHub.
6. Discuss changes with the maintainers and address any build issues. Version conflicts are the most
common problem. You may need to upgrade additional packages to fix build failures.
## Notes ## Notes
In order to help identifying packages that can be updated you can use the following helper tool. In order to help identifying packages that can be updated you can use the following helper tool. It
It will list all the packages installed in the `Dockerfile` that can be updated -- dependencies are filtered to focus only on requested packages. will list all the packages installed in the `Dockerfile` that can be updated -- dependencies are
filtered to focus only on requested packages.
```bash ```bash
$ make check-outdated/base-notebook $ make check-outdated/base-notebook
# INFO test_outdated:test_outdated.py:80 3/8 (38%) packages could be updated # INFO test_outdated:test_outdated.py:80 3/8 (38%) packages could be updated
# INFO test_outdated:test_outdated.py:82 # INFO test_outdated:test_outdated.py:82
# Package Current Newest # Package Current Newest
# ---------- --------- -------- # ---------- --------- --------
# conda 4.7.12 4.8.2 # conda 4.7.12 4.8.2
......
# Community Stacks # Community Stacks
We love to see the community create and share new Jupyter Docker images. We've put together a [cookiecutter project](https://github.com/jupyter/cookiecutter-docker-stacks) and the documentation below to help you get started defining, building, and sharing your Jupyter environments in Docker. Following these steps will: We love to see the community create and share new Jupyter Docker images. We've put together a
[cookiecutter project](https://github.com/jupyter/cookiecutter-docker-stacks) and the documentation
1. Setup a project on GitHub containing a Dockerfile based on either the `jupyter/base-notebook` or `jupyter/minimal-notebook` image. below to help you get started defining, building, and sharing your Jupyter environments in Docker.
2. Configure Travis CI to build and test your image when users submit pull requests to your repository. Following these steps will:
1. Setup a project on GitHub containing a Dockerfile based on either the `jupyter/base-notebook` or
`jupyter/minimal-notebook` image.
2. Configure GitHub Actions to build and test your image when users submit pull requests to your
repository.
3. Configure Docker Cloud to build and host your images for others to use. 3. Configure Docker Cloud to build and host your images for others to use.
4. Update the [list of community stacks](../using/selecting.html#community-stacks) in this documentation to include your image. 4. Update the [list of community stacks](../using/selecting.html#community-stacks) in this
documentation to include your image.
This approach mirrors how we build and share the core stack images. Feel free to follow it or pave your own path using alternative services and build tools. This approach mirrors how we build and share the core stack images. Feel free to follow it or pave
your own path using alternative services and build tools.
## Creating a Project ## Creating a Project
...@@ -17,29 +24,31 @@ First, install [cookiecutter](https://github.com/audreyr/cookiecutter) using pip ...@@ -17,29 +24,31 @@ First, install [cookiecutter](https://github.com/audreyr/cookiecutter) using pip
pip install cookiecutter # or conda install cookiecutter pip install cookiecutter # or conda install cookiecutter
``` ```
Run the cookiecutter command pointing to the [jupyter/cookiecutter-docker-stacks](https://github.com/jupyter/cookiecutter-docker-stacks) project on GitHub. Run the cookiecutter command pointing to the
[jupyter/cookiecutter-docker-stacks](https://github.com/jupyter/cookiecutter-docker-stacks) project
on GitHub.
```bash ```bash
cookiecutter https://github.com/jupyter/cookiecutter-docker-stacks.git cookiecutter https://github.com/jupyter/cookiecutter-docker-stacks.git
``` ```
Enter a name for your new stack image. This will serve as both the git repository Enter a name for your new stack image. This will serve as both the git repository name and the part
name and the part of the Docker image name after the slash. of the Docker image name after the slash.
``` ```
stack_name [my-jupyter-stack]: stack_name [my-jupyter-stack]:
``` ```
Enter the user or organization name under which this stack will reside on Enter the user or organization name under which this stack will reside on Docker Cloud / Hub. You
Docker Cloud / Hub. You must have access to manage this Docker Cloud org in must have access to manage this Docker Cloud org in order to push images here and setup automated
order to push images here and setup automated builds. builds.
``` ```
stack_org [my-project]: stack_org [my-project]:
``` ```
Select an image from the jupyter/docker-stacks project that will serve as the Select an image from the jupyter/docker-stacks project that will serve as the base for your new
base for your new image. image.
``` ```
stack_base_image [jupyter/base-notebook]: stack_base_image [jupyter/base-notebook]:
...@@ -65,47 +74,66 @@ git push -u origin master ...@@ -65,47 +74,66 @@ git push -u origin master
## Configuring Travis ## Configuring Travis
Next, link your GitHub project to Travis CI to build your Docker image whenever you or someone else submits a pull request. > NOTE: This section and the cookiecutter template should be updated to describe using GitHub
> Actions now that jupyter/docker-stacks uses that service.
1. Visit [https://docs.travis-ci.com/user/getting-started/#To-get-started-with-Travis-CI](https://docs.travis-ci.com/user/getting-started/#To-get-started-with-Travis-CI) and follow the instructions to add the Travis CI application to your GitHub account.
3. Visit [https://travis-ci.org](https://travis-ci.org). Next, link your GitHub project to Travis CI to build your Docker image whenever you or someone else
4. Click the + symbol at the top of the left sidebar. submits a pull request.
![Travis sidebar with plus button screenshot](../_static/travis-plus-repo.png)
5. Locate your project repository either in your primary user account or in one of the organizations to which you belong. 1. Visit
6. Click the toggle to enable builds for the project repository. [https://docs.travis-ci.com/user/getting-started/#To-get-started-with-Travis-CI](https://docs.travis-ci.com/user/getting-started/#To-get-started-with-Travis-CI)
7. Click the **Settings** button for that repository. and follow the instructions to add the Travis CI application to your GitHub account.
![Travis enable build toggle screenshot](../_static/travis-enable-build.png) 2. Visit [https://travis-ci.org](https://travis-ci.org).
8. Enable **Build only if .travis.yml is present** and **Build pushed pull requests**. 3. Click the + symbol at the top of the left sidebar.
![Travis build settings screenshot](../_static/travis-build-settings.png) ![Travis sidebar with plus button screenshot](../_static/travis-plus-repo.png)
9. Disable **Build pushed branches**. 4. Locate your project repository either in your primary user account or in one of the organizations
to which you belong.
5. Click the toggle to enable builds for the project repository.
6. Click the **Settings** button for that repository.
![Travis enable build toggle screenshot](../_static/travis-enable-build.png)
7. Enable **Build only if .travis.yml is present** and **Build pushed pull requests**.
![Travis build settings screenshot](../_static/travis-build-settings.png)
8. Disable **Build pushed branches**.
## Configuring Docker Cloud ## Configuring Docker Cloud
Now, configure Docker Cloud to build your stack image and push it to Docker Hub repository whenever you merge a GitHub pull request to the master branch of your project. Now, configure Docker Cloud to build your stack image and push it to Docker Hub repository whenever
you merge a GitHub pull request to the master branch of your project.
1. Visit [https://cloud.docker.com/](https://cloud.docker.com/) and login. 1. Visit [https://cloud.docker.com/](https://cloud.docker.com/) and login.
2. Select the account or organization matching the one you entered when prompted with `stack_org` by the cookiecutter. 2. Select the account or organization matching the one you entered when prompted with `stack_org` by
![Docker account selection screenshot](../_static/docker-org-select.png) the cookiecutter. ![Docker account selection screenshot](../_static/docker-org-select.png)
3. Scroll to the bottom of the page and click **Create repository**. 3. Scroll to the bottom of the page and click **Create repository**.
4. Enter the name of the image matching the one you entered when prompted with `stack_name` by the cookiecutter. 4. Enter the name of the image matching the one you entered when prompted with `stack_name` by the
![Docker image name and description screenshot](../_static/docker-repo-name.png) cookiecutter. ![Docker image name and description screenshot](../_static/docker-repo-name.png)
5. Enter a description for your image. 5. Enter a description for your image.
6. Click **GitHub** under the **Build Settings** and follow the prompts to connect your account if it is not already connected. 6. Click **GitHub** under the **Build Settings** and follow the prompts to connect your account if
7. Select the GitHub organization and repository containing your image definition from the dropdowns. it is not already connected.
![Docker from GitHub automated build screenshot](../_static/docker-github-settings.png) 7. Select the GitHub organization and repository containing your image definition from the
dropdowns.
![Docker from GitHub automated build screenshot](../_static/docker-github-settings.png)
8. Click the **Create and Build** button. 8. Click the **Create and Build** button.
## Defining Your Image ## Defining Your Image
Make edits the Dockerfile in your project to add third-party libraries and configure Jupyter applications. Refer to the Dockerfiles for the core stacks (e.g., [jupyter/datascience-notebook](https://github.com/jupyter/docker-stacks/blob/master/datascience-notebook/Dockerfile)) to get a feel for what's possible and best practices. Make edits the Dockerfile in your project to add third-party libraries and configure Jupyter
applications. Refer to the Dockerfiles for the core stacks (e.g.,
[jupyter/datascience-notebook](https://github.com/jupyter/docker-stacks/blob/master/datascience-notebook/Dockerfile))
to get a feel for what's possible and best practices.
[Submit pull requests](https://github.com/PointCloudLibrary/pcl/wiki/A-step-by-step-guide-on-preparing-and-submitting-a-pull-request) to your project repository on GitHub. Ensure your image builds properly on Travis before merging to master. Refer to Docker Cloud for builds of your master branch that you can `docker pull`. [Submit pull requests](https://github.com/PointCloudLibrary/pcl/wiki/A-step-by-step-guide-on-preparing-and-submitting-a-pull-request)
to your project repository on GitHub. Ensure your image builds properly on Travis before merging to
master. Refer to Docker Cloud for builds of your master branch that you can `docker pull`.
## Sharing Your Image ## Sharing Your Image
Finally, if you'd like to add a link to your project to this documentation site, please do the following: Finally, if you'd like to add a link to your project to this documentation site, please do the
following:
1. Clone ths [jupyter/docker-stacks](https://github.com/jupyter/docker-stacks) GitHub repository. 1. Clone ths [jupyter/docker-stacks](https://github.com/jupyter/docker-stacks) GitHub repository.
2. Open the `docs/using/selecting.md` source file and locate the **Community Stacks** section. 2. Open the `docs/using/selecting.md` source file and locate the **Community Stacks** section.
3. Add a bullet with a link to your project and a short description of what your Docker image contains. 3. Add a bullet with a link to your project and a short description of what your Docker image
4. [Submit a pull request](https://github.com/PointCloudLibrary/pcl/wiki/A-step-by-step-guide-on-preparing-and-submitting-a-pull-request) (PR) with your changes. Maintainers will respond and work with you to address any formatting or content issues. contains.
\ No newline at end of file 4. [Submit a pull request](https://github.com/PointCloudLibrary/pcl/wiki/A-step-by-step-guide-on-preparing-and-submitting-a-pull-request)
(PR) with your changes. Maintainers will respond and work with you to address any formatting or
content issues.
# Image Tests # Image Tests
We greatly appreciate pull requests that extend the automated tests that vet the basic functionality of the Docker images. We greatly appreciate pull requests that extend the automated tests that vet the basic functionality
of the Docker images.
## How the Tests Work ## How the Tests Work
Travis executes `make build-test-all` against pull requests submitted to the `jupyter/docker-stacks` repository. This `make` command builds every docker image. After building each image, the `make` command executes `pytest` to run both image-specific tests like those in [base-notebook/test/](https://github.com/jupyter/docker-stacks/tree/master/base-notebook/test) and common tests defined in [test/](https://github.com/jupyter/docker-stacks/tree/master/test). Both kinds of tests make use of global [pytest fixtures](https://docs.pytest.org/en/latest/fixture.html) defined in the [conftest.py](https://github.com/jupyter/docker-stacks/blob/master/conftest.py) file at the root of the projects. GitHub executes `make build-test-all` against pull requests submitted to the `jupyter/docker-stacks`
repository. This `make` command builds every docker image. After building each image, the `make`
command executes `pytest` to run both image-specific tests like those in
[base-notebook/test/](https://github.com/jupyter/docker-stacks/tree/master/base-notebook/test) and
common tests defined in [test/](https://github.com/jupyter/docker-stacks/tree/master/test). Both
kinds of tests make use of global [pytest fixtures](https://docs.pytest.org/en/latest/fixture.html)
defined in the [conftest.py](https://github.com/jupyter/docker-stacks/blob/master/conftest.py) file
at the root of the projects.
## Contributing New Tests ## Contributing New Tests
Please follow the process below to add new tests: Please follow the process below to add new tests:
1. If the test should run against every image built, add your test code to one of the modules in [test/](https://github.com/jupyter/docker-stacks/tree/master/test) or create a new module. 1. If the test should run against every image built, add your test code to one of the modules in
2. If your test should run against a single image, add your test code to one of the modules in `some-notebook/test/` or create a new module. [test/](https://github.com/jupyter/docker-stacks/tree/master/test) or create a new module.
2. If your test should run against a single image, add your test code to one of the modules in
`some-notebook/test/` or create a new module.
3. Build one or more images you intend to test and run the tests locally. If you use `make`, call: 3. Build one or more images you intend to test and run the tests locally. If you use `make`, call:
```bash ```bash
make build/somestack-notebook make build/somestack-notebook
make test/somestack-notebook make test/somestack-notebook
``` ```
4. [Submit a pull request](https://github.com/PointCloudLibrary/pcl/wiki/A-step-by-step-guide-on-preparing-and-submitting-a-pull-request) (PR) with your changes. 4. [Submit a pull request](https://github.com/PointCloudLibrary/pcl/wiki/A-step-by-step-guide-on-preparing-and-submitting-a-pull-request)
5. Watch for Travis to report a build success or failure for your PR on GitHub. (PR) with your changes.
6. Discuss changes with the maintainers and address any issues running the tests on Travis. 5. Watch for GitHub to report a build success or failure for your PR on GitHub.
6. Discuss changes with the maintainers and address any issues running the tests on GitHub.
...@@ -9,7 +9,7 @@ msgid "" ...@@ -9,7 +9,7 @@ msgid ""
msgstr "" msgstr ""
"Project-Id-Version: docker-stacks latest\n" "Project-Id-Version: docker-stacks latest\n"
"Report-Msgid-Bugs-To: \n" "Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2020-06-13 17:24+0000\n" "POT-Creation-Date: 2020-10-12 12:11+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n" "Language-Team: LANGUAGE <LL@li.org>\n"
...@@ -18,12 +18,12 @@ msgstr "" ...@@ -18,12 +18,12 @@ msgstr ""
"Content-Transfer-Encoding: 8bit\n" "Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel 2.8.0\n" "Generated-By: Babel 2.8.0\n"
#: ../../contributing/features.md:1 73568dd40cf8400f866860c92fa9a585 #: ../../contributing/features.md:1 87308dbcd6784ce9ad29d2305cea02f3
msgid "# New Features" msgid "# New Features"
msgstr "" msgstr ""
# 64c3ecc68ada47afada78f945253c9e9 # 64c3ecc68ada47afada78f945253c9e9
#: ../../contributing/features.md:3 68122b604e2e42c6ac8b339086c87100 #: ../../contributing/features.md:3 b8fb9393c2a445e5912f497253c98316
msgid "" msgid ""
"Thank you for contributing to the Jupyter Docker Stacks! We review pull " "Thank you for contributing to the Jupyter Docker Stacks! We review pull "
"requests of new features (e.g., new packages, new scripts, new flags) to " "requests of new features (e.g., new packages, new scripts, new flags) to "
...@@ -31,24 +31,24 @@ msgid "" ...@@ -31,24 +31,24 @@ msgid ""
" maintaining the images over time." " maintaining the images over time."
msgstr "" msgstr ""
#: ../../contributing/features.md:5 12333534a174461da43d636f2b91508e #: ../../contributing/features.md:7 78c4a61c35ef4459805bf39791465684
msgid "## Suggesting a New Feature" msgid "## Suggesting a New Feature"
msgstr "" msgstr ""
# c995f8cabb1d4b4fb53a9c56ae8e017b # c995f8cabb1d4b4fb53a9c56ae8e017b
#: ../../contributing/features.md:7 8cfae9246e674e638795557766880b19 #: ../../contributing/features.md:9 7b46b9181cd4467c979fd5be70706250
msgid "" msgid ""
"Please follow the process below to suggest a new feature for inclusion in" "Please follow the process below to suggest a new feature for inclusion in"
" one of the core stacks:" " one of the core stacks:"
msgstr "" msgstr ""
#: ../../contributing/features.md:9 4570b8972003443dbf43728a75f174f7 #: ../../contributing/features.md:11 00d28bc039ce4092bc6aca41a8bc553a
msgid "" msgid ""
"[Open a GitHub issue](https://github.com/jupyter/docker-stacks/issues) " "[Open a GitHub issue](https://github.com/jupyter/docker-stacks/issues) "
"describing the feature you'd like to contribute." "describing the feature you'd like to contribute."
msgstr "" msgstr ""
#: ../../contributing/features.md:10 5dc9c7f20a014c71b4ab55140024b384 #: ../../contributing/features.md:13 6acfd3ae068e40dcbc16490e90121847
msgid "" msgid ""
"Discuss with the maintainers whether the addition makes sense in [one of " "Discuss with the maintainers whether the addition makes sense in [one of "
"the core stacks](../using/selecting.md#Core-Stacks), as a [recipe in the " "the core stacks](../using/selecting.md#Core-Stacks), as a [recipe in the "
...@@ -56,32 +56,32 @@ msgid "" ...@@ -56,32 +56,32 @@ msgid ""
"something else entirely." "something else entirely."
msgstr "" msgstr ""
#: ../../contributing/features.md:12 bfafd430491f4ea4965b7ea6217b4cd9 #: ../../contributing/features.md:18 e5915de592804c548f5f1973cf6cea1d
msgid "## Selection Criteria" msgid "## Selection Criteria"
msgstr "" msgstr ""
# ca139cf0df684011bdf6f6f68e151796 # ca139cf0df684011bdf6f6f68e151796
#: ../../contributing/features.md:14 f59a6f3d31164e3d928287667e10a7fe #: ../../contributing/features.md:20 86f6a99b92284fd69be9a52cce713b4c
msgid "" msgid ""
"Roughly speaking, we evaluate new features based on the following " "Roughly speaking, we evaluate new features based on the following "
"criteria:" "criteria:"
msgstr "" msgstr ""
#: ../../contributing/features.md:16 2deadaa24a1340a3a148ca5a6f43c392 #: ../../contributing/features.md:22 8a9346f5b6884bf9b43f0bc57677f819
msgid "" msgid ""
"**Usefulness to Jupyter users**: Is the feature generally applicable " "**Usefulness to Jupyter users**: Is the feature generally applicable "
"across domains? Does it work with Jupyter Notebook, JupyterLab, " "across domains? Does it work with Jupyter Notebook, JupyterLab, "
"JupyterHub, etc.?" "JupyterHub, etc.?"
msgstr "" msgstr ""
#: ../../contributing/features.md:17 57c4f8a0e92842e291265ece9e88e447 #: ../../contributing/features.md:24 cf706a116a7f4026b9ab0b584847a8d7
msgid "" msgid ""
"**Fit with the image purpose**: Does the feature match the theme of the " "**Fit with the image purpose**: Does the feature match the theme of the "
"stack in which it will be added? Would it fit better in a new, community " "stack in which it will be added? Would it fit better in a new, community "
"stack?" "stack?"
msgstr "" msgstr ""
#: ../../contributing/features.md:18 e3819ccbbffa4fc99b3c611690473b20 #: ../../contributing/features.md:26 43fc5585256e48b7ad38dff717f12186
msgid "" msgid ""
"**Complexity of build / runtime configuration**: How many lines of code " "**Complexity of build / runtime configuration**: How many lines of code "
"does the feature require in one of the Dockerfiles or startup scripts? " "does the feature require in one of the Dockerfiles or startup scripts? "
...@@ -89,14 +89,14 @@ msgid "" ...@@ -89,14 +89,14 @@ msgid ""
"use the images?" "use the images?"
msgstr "" msgstr ""
#: ../../contributing/features.md:19 9ce572bde2f7408fb3cdb1c29b8f393f #: ../../contributing/features.md:29 7bcd2a965dcc4be7bfbfd6331d22e98f
msgid "" msgid ""
"**Impact on image metrics**: How many bytes does the feature and its " "**Impact on image metrics**: How many bytes does the feature and its "
"dependencies add to the image(s)? How many minutes do they add to the " "dependencies add to the image(s)? How many minutes do they add to the "
"build time?" "build time?"
msgstr "" msgstr ""
#: ../../contributing/features.md:20 ee75c19fd4084273bf0b05bb26da1a42 #: ../../contributing/features.md:31 5de0b8259ced455ba453570673b08494
msgid "" msgid ""
"**Ability to support the addition**: Can existing maintainers answer user" "**Ability to support the addition**: Can existing maintainers answer user"
" questions and address future build issues? Are the contributors " " questions and address future build issues? Are the contributors "
...@@ -104,71 +104,67 @@ msgid "" ...@@ -104,71 +104,67 @@ msgid ""
"ensure the feature continues to work over time?" "ensure the feature continues to work over time?"
msgstr "" msgstr ""
#: ../../contributing/features.md:22 22881b02fcdd4c43bc280f7a8214dded #: ../../contributing/features.md:35 5c052421f8df46d39c2bc72ebe8e19c5
msgid "## Submitting a Pull Request" msgid "## Submitting a Pull Request"
msgstr "" msgstr ""
# f7ca9b40be90476eb97c8fcd67205e9d # f7ca9b40be90476eb97c8fcd67205e9d
#: ../../contributing/features.md:24 ef4fabecf05c427797009ac7ecc9ffaa #: ../../contributing/features.md:37 2db28556d2cd4caba39a1ff0f849c74c
msgid "" msgid ""
"If there's agreement that the feature belongs in one or more of the core " "If there's agreement that the feature belongs in one or more of the core "
"stacks:" "stacks:"
msgstr "" msgstr ""
#: ../../contributing/features.md:26 0ae6b52d2ae94508af546696d256a045 #: ../../contributing/features.md:39 b061abb656de4eb390e1870f389d7924
msgid "" msgid ""
"Implement the feature in a local clone of the `jupyter/docker-stacks` " "Implement the feature in a local clone of the `jupyter/docker-stacks` "
"project." "project."
msgstr "" msgstr ""
#: ../../contributing/features.md:29 a043243b82c24bb3a0a6ac6fd4eb4ee3 #: ../../contributing/features.md:40 ../../contributing/packages.md:16
#: 55c69137787042128ebd124707ea616b df1ac6d2a26d406281bc9152610fe2af
msgid "" msgid ""
"Please build the image locally before submitting a pull request. Building" "Please build the image locally before submitting a pull request. Building"
" the image locally shortens the debugging cycle by taking some load off " " the image locally shortens the debugging cycle by taking some load off "
"[Travis CI](http://travis-ci.org/), which graciously provides free build " "GitHub Actions, which graciously provide free build services for open "
"services for open source projects like this one. If you use `make`, " "source projects like this one. If you use `make`, call: ```bash make "
"call:" "build/somestack-notebook ```"
msgstr "" msgstr ""
#: ../../contributing/features.md:28 ../../contributing/packages.md:10 #: ../../contributing/features.md:46 ../../contributing/packages.md:22
#: d78d28f63e474115b2f9a07c30192c50 fc7a500fd8944c3b964c5a84c816c430 #: ../../contributing/tests.md:30 363cb4db9e3e4fae9d6efd9a78ec1fcc
msgid "```bash make build/somestack-notebook ```" #: 3f7c28f5163843d780f23e813878c15e 7a5cbed2ef80436bbd09e6fb80a7639c
msgstr ""
#: ../../contributing/features.md:31 ../../contributing/packages.md:13
#: ../../contributing/tests.md:20 00212e32fbc5405cb89e2c1d64747446
#: 0cd21fddaf2744478818394f1db2fee6 200d6bfb805740ca9f26994a2629cf66
msgid "" msgid ""
"[Submit a pull request](https://github.com/PointCloudLibrary/pcl/wiki/A" "[Submit a pull request](https://github.com/PointCloudLibrary/pcl/wiki/A"
"-step-by-step-guide-on-preparing-and-submitting-a-pull-request) (PR) with" "-step-by-step-guide-on-preparing-and-submitting-a-pull-request) (PR) with"
" your changes." " your changes."
msgstr "" msgstr ""
#: ../../contributing/features.md:32 ../../contributing/packages.md:14 #: ../../contributing/features.md:48 ../../contributing/packages.md:24
#: ../../contributing/tests.md:21 01006b1634664420a55ab93140eeba1c #: ../../contributing/tests.md:32 04c2ba3dcf4345cca38686b36398b810
#: 1533f4346ec44d63aeff57798ed43311 ac34ca3def304fa1a7bc186ecb08d362 #: 07bf93330ea14e67bd97f2056875604f f0f9893eb5ce4aa88bb12d27c20f44fb
msgid "" msgid ""
"Watch for Travis to report a build success or failure for your PR on " "Watch for GitHub to report a build success or failure for your PR on "
"GitHub." "GitHub."
msgstr "" msgstr ""
#: ../../contributing/features.md:33 c1f5f6fff58944579bbac57d11eed188 #: ../../contributing/features.md:49 b51493712bb74ac2a4a30320a026a77e
msgid "Discuss changes with the maintainers and address any build issues." msgid "Discuss changes with the maintainers and address any build issues."
msgstr "" msgstr ""
#: ../../contributing/issues.md:1 90d9d26c52694bd39702dbe085548e60 #: ../../contributing/issues.md:1 c00f1dc66c21441b89d7cafd32739dde
msgid "# Project Issues" msgid "# Project Issues"
msgstr "" msgstr ""
# 9c2a6e9f67354e86aca23758676fca43 # 9c2a6e9f67354e86aca23758676fca43
#: ../../contributing/issues.md:3 ce7326457bd24c7284f84626ef7657e2 #: ../../contributing/issues.md:3 31ac1b2de5c047fba3430cfaae215291
msgid "" msgid ""
"We appreciate your taking the time to report an issue you encountered " "We appreciate your taking the time to report an issue you encountered "
"using the Jupyter Docker Stacks. Please review the following guidelines " "using the Jupyter Docker Stacks. Please review the following guidelines "
"when reporting your problem." "when reporting your problem."
msgstr "" msgstr ""
#: ../../contributing/issues.md:7 fd5aba92b5364d0e962a55331aee6659 #: ../../contributing/issues.md:7 a1c98f9ccf7142c89e08ff6dd704f140
msgid "" msgid ""
"If you believe you’ve found a security vulnerability in any of the " "If you believe you’ve found a security vulnerability in any of the "
"Jupyter projects included in Jupyter Docker Stacks images, please report " "Jupyter projects included in Jupyter Docker Stacks images, please report "
...@@ -178,7 +174,7 @@ msgid "" ...@@ -178,7 +174,7 @@ msgid ""
"notebook.readthedocs.io/en/stable/_downloads/ipython_security.asc)." "notebook.readthedocs.io/en/stable/_downloads/ipython_security.asc)."
msgstr "" msgstr ""
#: ../../contributing/issues.md:13 31028151b38b42cc9a502a2e3b899903 #: ../../contributing/issues.md:13 1d8b7ce9769548a6b938a0ad4ba5b9ba
msgid "" msgid ""
"If you think your problem is unique to the Jupyter Docker Stacks images, " "If you think your problem is unique to the Jupyter Docker Stacks images, "
"please search the [jupyter/docker-stacks issue " "please search the [jupyter/docker-stacks issue "
...@@ -189,14 +185,14 @@ msgid "" ...@@ -189,14 +185,14 @@ msgid ""
msgstr "" msgstr ""
# 69a18cc239b34b94800599bf185f58d6 # 69a18cc239b34b94800599bf185f58d6
#: ../../contributing/issues.md:19 3432baed3e664b9981402ba57e82f0d5 #: ../../contributing/issues.md:19 c8c8bdd67a004a13bd2ca6b4e307ff51
msgid "" msgid ""
"If the issue you're seeing is with one of the open source libraries " "If the issue you're seeing is with one of the open source libraries "
"included in the Docker images and is reproducible outside the images, " "included in the Docker images and is reproducible outside the images, "
"please file a bug with the appropriate open source project." "please file a bug with the appropriate open source project."
msgstr "" msgstr ""
#: ../../contributing/issues.md:22 27a1501090a044fea57a9b3d193d1ef9 #: ../../contributing/issues.md:22 974c46b5e11c4a75938e423bfcd88988
msgid "" msgid ""
"If you have a general question about how to use the Jupyter Docker Stacks" "If you have a general question about how to use the Jupyter Docker Stacks"
" in your environment, in conjunction with other tools, with " " in your environment, in conjunction with other tools, with "
...@@ -204,53 +200,109 @@ msgid "" ...@@ -204,53 +200,109 @@ msgid ""
"Discourse site](https://discourse.jupyter.org)." "Discourse site](https://discourse.jupyter.org)."
msgstr "" msgstr ""
#: ../../contributing/lint.md:1 f9f3dd16e01847edbfbfcf9ae2985042 #: ../../contributing/lint.md:1 4396b5f26aaf4ee2a706423a65b9f9ca
msgid "# Image Lint" msgid "# Lint"
msgstr "" msgstr ""
#: ../../contributing/lint.md:3 af0370f47a714f5c8c7d7209b5bb1aec #: ../../contributing/lint.md:3 07df41ff9d704181b26efbca931a2c4c
msgid "" msgid ""
"To comply with [Docker best practices][dbp], we are using the " "In order to enforce some rules **linters** are used in this project. "
"[Hadolint][hadolint] tool to analyse each `Dockerfile` ." "Linters can be run either during the **development phase** (by the "
"developer) and during **integration phase** (by Travis). To integrate and"
" enforce this process in the project lifecycle we are using **git hooks**"
" through [pre-commit][pre-commit]."
msgstr ""
#: ../../contributing/lint.md:7 e33136a6c8114c65a0a4d38180cc3fc8
msgid "## Pre-commit hook"
msgstr ""
#: ../../contributing/lint.md:9 ../../contributing/lint.md:40
#: 1b5448d363b8439b863be35b7f6183a9 1fca689d7de74f9b9457bfd13c6a9d30
msgid "### Installation"
msgstr ""
#: ../../contributing/lint.md:11 cd83a8dc7dd84758832ee0e483b3b66b
msgid ""
"pre-commit is a Python package that needs to be installed. This can be "
"achieved by using the generic task used to install all Python development"
" dependencies."
msgstr ""
#: ../../contributing/lint.md:14 9477502634654897aca51088e8d4787c
msgid ""
"```sh # Install all development dependencies for the project $ make dev-"
"env # It can also be installed directly $ pip install pre-commit ```"
msgstr ""
#: ../../contributing/lint.md:21 46be9c5e331748798064931114c4f70f
msgid ""
"Then the git hooks scripts configured for the project in `.pre-commit-"
"config.yaml` need to be installed in the local git repository."
msgstr ""
#: ../../contributing/lint.md:23 175d9f84c6744b66b3b1910be299d3fa
msgid "```sh $ make pre-commit-install ```"
msgstr ""
#: ../../contributing/lint.md:27 7f1926f94d7948e095690fefb1f93514
msgid "### Run"
msgstr ""
#: ../../contributing/lint.md:29 6ab4f30ae07142fea78d5d1b8b73899f
msgid ""
"Now pre-commit (and so configured hooks) will run automatically on `git "
"commit` on each changed file. However it is also possible to trigger it "
"against all files."
msgstr ""
#: ../../contributing/lint.md:32 bd7182f5c5754fe2b6d98c207ce5c2ef
msgid "```sh $ make pre-commit-all ```"
msgstr ""
#: ../../contributing/lint.md:36 cda6c7ee8c4446449cc3d6e5dbcf87cf
msgid "## Image Lint"
msgstr "" msgstr ""
#: ../../contributing/lint.md:5 c731a817fbe8414fb102cf3a1a25ea96 #: ../../contributing/lint.md:38 4aa54697088342ffa1b405d9ed1ec2fb
msgid "## Installation" msgid ""
"To comply with [Docker best practices][dbp], we are using the "
"[Hadolint][hadolint] tool to analyse each `Dockerfile` ."
msgstr "" msgstr ""
#: ../../contributing/lint.md:7 52bbf3443b1d429f87a3930e4ad0fbc0 #: ../../contributing/lint.md:42 6e41b84dee3347e388c030a714519d6f
msgid "" msgid ""
"There is a specific `make` target to install the linter. By default " "There is a specific `make` target to install the linter. By default "
"`hadolint` will be installed in `${HOME}/hadolint`." "`hadolint` will be installed in `${HOME}/hadolint`."
msgstr "" msgstr ""
#: ../../contributing/lint.md:10 98a0b08d5afe4338aeec1d7699f76a2d #: ../../contributing/lint.md:45 69e40b66f0de4db28efdedc5d9cf8f48
msgid "```bash $ make lint-install" msgid "```bash $ make hadolint-install"
msgstr "" msgstr ""
#: ../../contributing/lint.md:13 1d3fb52955c64b209df424d724c9679c #: ../../contributing/lint.md:48 f6007312e41146d29878e020b62c7053
msgid "" msgid ""
"# Installing hadolint at /Users/romain/hadolint ... # Installation done! " "# Installing hadolint at /Users/romain/hadolint ... # Installation done! "
"# Haskell Dockerfile Linter v1.17.6-0-gc918759 ```" "# Haskell Dockerfile Linter v1.17.6-0-gc918759 ```"
msgstr "" msgstr ""
#: ../../contributing/lint.md:18 507b0e46012d4419948aa6a603086fe3 #: ../../contributing/lint.md:53 da22db6ea37a474aaca1a00c228137e9
msgid "## Lint" msgid "### Linting"
msgstr "" msgstr ""
#: ../../contributing/lint.md:20 da32627dc3ed420bae073e85bd79c406 #: ../../contributing/lint.md:55 f44e656083764cb4af7a770562979466
msgid "### Per Stack" msgid "#### Per Stack"
msgstr "" msgstr ""
#: ../../contributing/lint.md:22 c42134dea1a44a27a508f58af72e698b #: ../../contributing/lint.md:57 c9701b319ef24841bb45cb511ea84ebc
msgid "The linter can be run per stack." msgid "The linter can be run per stack."
msgstr "" msgstr ""
#: ../../contributing/lint.md:24 3394806edf7447b88f7ebd1445e4b259 #: ../../contributing/lint.md:59 fc8640e34a2f4da49c9c64167f689a36
msgid "```bash $ make lint/scipy-notebook" msgid "```bash $ make hadolint/scipy-notebook"
msgstr "" msgstr ""
#: ../../contributing/lint.md:27 8bd8362eeb2a4d26b9f1d79da20841e9 #: ../../contributing/lint.md:62 9a28958a28484776b7134d96d162d925
msgid "" msgid ""
"# Linting Dockerfiles in scipy-notebook... # scipy-notebook/Dockerfile:4 " "# Linting Dockerfiles in scipy-notebook... # scipy-notebook/Dockerfile:4 "
"DL3006 Always tag the version of an image explicitly # scipy-" "DL3006 Always tag the version of an image explicitly # scipy-"
...@@ -264,59 +316,59 @@ msgid "" ...@@ -264,59 +316,59 @@ msgid ""
"splitting. # make: *** [lint/scipy-notebook] Error 1 ```" "splitting. # make: *** [lint/scipy-notebook] Error 1 ```"
msgstr "" msgstr ""
#: ../../contributing/lint.md:37 3274ec37f4224d2b9ab8367292c905dd #: ../../contributing/lint.md:72 50b1ceb50a6e463996fd7ed3b753e888
msgid "Optionally you can pass arguments to the linter." msgid "Optionally you can pass arguments to the hadolint."
msgstr "" msgstr ""
#: ../../contributing/lint.md:39 d4cb9163b89b493a925203d318d9d1ba #: ../../contributing/lint.md:74 082ffcf691e14c45a621507ab3e1c7ba
msgid "" msgid ""
"```bash # Use a different export format $ make lint/scipy-notebook " "```bash # Use a different export format $ make hadolint/scipy-notebook "
"ARGS=\"--format codeclimate\" ```" "ARGS=\"--format codeclimate\" ```"
msgstr "" msgstr ""
#: ../../contributing/lint.md:44 b41fe7b4867749a89539b155925dc619 #: ../../contributing/lint.md:79 8b4b3a3e00d44d4ba01dd2418f2f9bad
msgid "### All the Stacks" msgid "#### All the Stacks"
msgstr "" msgstr ""
#: ../../contributing/lint.md:46 a729e34bfd764f4694567b06dcc084aa #: ../../contributing/lint.md:81 060a24494a864096b8e104741f1f5a12
msgid "The linter can be run against all the stacks." msgid "The linter can be run against all the stacks."
msgstr "" msgstr ""
#: ../../contributing/lint.md:48 c84237491ed046fca7551f92bd147365 #: ../../contributing/lint.md:83 1e2218fbdc2b48ed8012ccfcaf8f784f
msgid "```bash $ make lint-all ```" msgid "```bash $ make hadolint-all ```"
msgstr "" msgstr ""
#: ../../contributing/lint.md:52 50166f6463634b4a90d25e7b96cc4b12 #: ../../contributing/lint.md:87 80a26f7c3dd145698319886f9a6c14a2
msgid "## Ignoring Rules" msgid "### Ignoring Rules"
msgstr "" msgstr ""
#: ../../contributing/lint.md:54 356fc7c100ef4a8fbd748eb27367d356 #: ../../contributing/lint.md:89 8a7febddacee495cb24c522ae9e9fe9e
msgid "" msgid ""
"Sometimes it is necessary to ignore [some rules][rules]. The following " "Sometimes it is necessary to ignore [some rules][rules]. The following "
"rules are ignored by default and sor for all images in the " "rules are ignored by default and sor for all images in the "
"`.hadolint.yaml` file." "`.hadolint.yaml` file."
msgstr "" msgstr ""
#: ../../contributing/lint.md:57 cf97f4a183734b8d9e57b760cfccc627 #: ../../contributing/lint.md:92 e3de5a5a054a43bbab01e7957247fa4c
msgid "" msgid ""
"[`DL3006`][DL3006]: We use a specific policy to manage image tags. - " "[`DL3006`][DL3006]: We use a specific policy to manage image tags. - "
"`base-notebook` `FROM` clause is fixed but based on an argument (`ARG`). " "`base-notebook` `FROM` clause is fixed but based on an argument (`ARG`). "
"- Building downstream images from (`FROM`) the latest is done on purpose." "- Building downstream images from (`FROM`) the latest is done on purpose."
msgstr "" msgstr ""
#: ../../contributing/lint.md:60 70076669dfb94f11b1194fbd7976a584 #: ../../contributing/lint.md:95 5848d2723079460687e8edc3370ce293
msgid "" msgid ""
"[`DL3008`][DL3008]: System packages are always updated (`apt-get`) to the" "[`DL3008`][DL3008]: System packages are always updated (`apt-get`) to the"
" latest version." " latest version."
msgstr "" msgstr ""
#: ../../contributing/lint.md:62 bdaf17200ecb4201b6d147943cf1b021 #: ../../contributing/lint.md:97 5572ba308fc84e77a1d5f86ef9ac609c
msgid "" msgid ""
"For other rules, the preferred way to do it is to flag ignored rules in " "For other rules, the preferred way to do it is to flag ignored rules in "
"the `Dockerfile`." "the `Dockerfile`."
msgstr "" msgstr ""
#: ../../contributing/lint.md:64 9965b559072043e6a66ec2fec7710ad4 #: ../../contributing/lint.md:99 cafb81e77c3642afa8857e0b62db4c39
msgid "" msgid ""
"> It is also possible to ignore rules by using a special comment directly" "> It is also possible to ignore rules by using a special comment directly"
" above the Dockerfile instruction you want to make an exception for. " " above the Dockerfile instruction you want to make an exception for. "
...@@ -324,33 +376,34 @@ msgid "" ...@@ -324,33 +376,34 @@ msgid ""
"example:" "example:"
msgstr "" msgstr ""
#: ../../contributing/lint.md:66 cb3b17c195084c669cae96359e30709f #: ../../contributing/lint.md:101 e0eb3a2b65b446128a063a8057d4157b
msgid "```dockerfile" msgid "```dockerfile"
msgstr "" msgstr ""
#: ../../contributing/lint.md:68 3ca17796e05a437d98882aecce21b7a5 #: ../../contributing/lint.md:103 6db846d111bc4d4d994cd4188e3affa5
msgid "FROM ubuntu" msgid "FROM ubuntu"
msgstr "" msgstr ""
#: ../../contributing/lint.md:70 c0214b6cb2d2487fae29e29815c68a05 #: ../../contributing/lint.md:105 0e37b9ac48b744779a21552c9d0b6738
msgid "# hadolint ignore=DL3003,SC1035 RUN cd /tmp && echo \"hello!\" ```" msgid "# hadolint ignore=DL3003,SC1035 RUN cd /tmp && echo \"hello!\" ```"
msgstr "" msgstr ""
#: ../../contributing/lint.md:74 6d86e357bd26405886819fd0f55bc87c #: ../../contributing/lint.md:109 6f67100d77094c12aac2c5a8dc27decf
msgid "" msgid ""
"[hadolint]: https://github.com/hadolint/hadolint [dbp]: " "[hadolint]: https://github.com/hadolint/hadolint [dbp]: "
"https://docs.docker.com/develop/develop-images/dockerfile_best-practices " "https://docs.docker.com/develop/develop-images/dockerfile_best-practices "
"[rules]: https://github.com/hadolint/hadolint#rules [DL3006]: " "[rules]: https://github.com/hadolint/hadolint#rules [DL3006]: "
"https://github.com/hadolint/hadolint/wiki/DL3006 [DL3008]: " "https://github.com/hadolint/hadolint/wiki/DL3006 [DL3008]: "
"https://github.com/hadolint/hadolint/wiki/DL3008" "https://github.com/hadolint/hadolint/wiki/DL3008 [pre-commit]: https"
"://pre-commit.com/"
msgstr "" msgstr ""
#: ../../contributing/packages.md:1 0e3c0f42f4484000ad6997221929c575 #: ../../contributing/packages.md:1 d68ccfd1ee584ea596fa1b5ef16c4aca
msgid "# Package Updates" msgid "# Package Updates"
msgstr "" msgstr ""
# 5f269a667f9a4c3ca342cfb49ecaefb2 # 5f269a667f9a4c3ca342cfb49ecaefb2
#: ../../contributing/packages.md:3 8686c5370f0b4a4786e58960d4c0c1f8 #: ../../contributing/packages.md:3 cd57b842483b4a9c8f1e2370b8d9f822
msgid "" msgid ""
"We actively seek pull requests which update packages already included in " "We actively seek pull requests which update packages already included in "
"the project Dockerfiles. This is a great way for first-time contributors " "the project Dockerfiles. This is a great way for first-time contributors "
...@@ -358,11 +411,11 @@ msgid "" ...@@ -358,11 +411,11 @@ msgid ""
msgstr "" msgstr ""
# 30d4a79bce8d439d97e6e3555a088548 # 30d4a79bce8d439d97e6e3555a088548
#: ../../contributing/packages.md:5 68187b524c5d418281ff4c0ba2d5ea3c #: ../../contributing/packages.md:7 4938511e6e7d470a9e22b9cfea38b6e2
msgid "Please follow the process below to update a package version:" msgid "Please follow the process below to update a package version:"
msgstr "" msgstr ""
#: ../../contributing/packages.md:7 7629d9afde3d46ca98cbe37c1e7fadad #: ../../contributing/packages.md:9 ce4ef90e4efd450494172e43a222d2a4
msgid "" msgid ""
"Locate the Dockerfile containing the library you wish to update (e.g., " "Locate the Dockerfile containing the library you wish to update (e.g., "
"[base-notebook/Dockerfile](https://github.com/jupyter/docker-" "[base-notebook/Dockerfile](https://github.com/jupyter/docker-"
...@@ -371,7 +424,7 @@ msgid "" ...@@ -371,7 +424,7 @@ msgid ""
"/scipy-notebook/Dockerfile))" "/scipy-notebook/Dockerfile))"
msgstr "" msgstr ""
#: ../../contributing/packages.md:8 209a59a32f7d416395da48ba7deb9ec4 #: ../../contributing/packages.md:12 f71ed60421f24a7db15710d6d958f43d
msgid "" msgid ""
"Adjust the version number for the package. We prefer to pin the major and" "Adjust the version number for the package. We prefer to pin the major and"
" minor version number of packages so as to minimize rebuild side-effects " " minor version number of packages so as to minimize rebuild side-effects "
...@@ -380,26 +433,18 @@ msgid "" ...@@ -380,26 +433,18 @@ msgid ""
"`notebook=5.4.*`." "`notebook=5.4.*`."
msgstr "" msgstr ""
#: ../../contributing/packages.md:11 ae6f768e7d254148afe4f65691363d4c #: ../../contributing/packages.md:25 d9de1265b7c848edacefec29bea6e9c2
msgid ""
"Please build the image locally before submitting a pull request. Building"
" the image locally shortens the debugging cycle by taking some load off "
"[Travis CI](http://travis-ci.org/), which graciously provides free build "
"services for open source projects like this one. If you use `make`, call:"
msgstr ""
#: ../../contributing/packages.md:15 33c70276f8544f4c9ce33f0fee67ae97
msgid "" msgid ""
"Discuss changes with the maintainers and address any build issues. " "Discuss changes with the maintainers and address any build issues. "
"Version conflicts are the most common problem. You may need to upgrade " "Version conflicts are the most common problem. You may need to upgrade "
"additional packages to fix build failures." "additional packages to fix build failures."
msgstr "" msgstr ""
#: ../../contributing/packages.md:17 7cf4452f20ed4bafaf16b523ef383325 #: ../../contributing/packages.md:28 b01b2c58231747018f0b6c2e9604907f
msgid "## Notes" msgid "## Notes"
msgstr "" msgstr ""
#: ../../contributing/packages.md:19 4d3fe02500b7454ab0413148de50f87e #: ../../contributing/packages.md:30 6ec6fa6d7d28432abf5c90acc9d5de93
msgid "" msgid ""
"In order to help identifying packages that can be updated you can use the" "In order to help identifying packages that can be updated you can use the"
" following helper tool. It will list all the packages installed in the " " following helper tool. It will list all the packages installed in the "
...@@ -407,11 +452,11 @@ msgid "" ...@@ -407,11 +452,11 @@ msgid ""
"only on requested packages." "only on requested packages."
msgstr "" msgstr ""
#: ../../contributing/packages.md:22 829b62bdfee54ce7912f4fd82e0811f1 #: ../../contributing/packages.md:34 2d52bd16ed7b42b5a2b4841e69e582c6
msgid "```bash $ make check-outdated/base-notebook" msgid "```bash $ make check-outdated/base-notebook"
msgstr "" msgstr ""
#: ../../contributing/packages.md:25 bc22117d06cc4f8792314762474712fa #: ../../contributing/packages.md:37 5b47002f04fa48b78c032fec4ba2554f
msgid "" msgid ""
"# INFO test_outdated:test_outdated.py:80 3/8 (38%) packages could be " "# INFO test_outdated:test_outdated.py:80 3/8 (38%) packages could be "
"updated # INFO test_outdated:test_outdated.py:82 # Package " "updated # INFO test_outdated:test_outdated.py:82 # Package "
...@@ -420,11 +465,11 @@ msgid "" ...@@ -420,11 +465,11 @@ msgid ""
"```" "```"
msgstr "" msgstr ""
#: ../../contributing/recipes.md:1 b62539e910b449cab65460c586b9460c #: ../../contributing/recipes.md:1 9d9c719b4f864b63ab2bef21848ac0e3
msgid "# New Recipes" msgid "# New Recipes"
msgstr "" msgstr ""
#: ../../contributing/recipes.md:3 dc7089fe2a494ea19925cf8af9a59e5a #: ../../contributing/recipes.md:3 9edb226afe8246ae9cae02e0e7b630b8
msgid "" msgid ""
"We welcome contributions of [recipes](../using/recipes.md), short " "We welcome contributions of [recipes](../using/recipes.md), short "
"examples of using, configuring, or extending the Docker Stacks, for " "examples of using, configuring, or extending the Docker Stacks, for "
...@@ -432,25 +477,25 @@ msgid "" ...@@ -432,25 +477,25 @@ msgid ""
"new recipe:" "new recipe:"
msgstr "" msgstr ""
#: ../../contributing/recipes.md:5 e10b70b695d147fb850d11b48beee7fc #: ../../contributing/recipes.md:5 3852ad45c4bf4f87a490e2a0878cf4dc
msgid "Open the `docs/using/recipes.md` source file." msgid "Open the `docs/using/recipes.md` source file."
msgstr "" msgstr ""
#: ../../contributing/recipes.md:6 7151c9e77ada4212adf77cae937e3cae #: ../../contributing/recipes.md:6 d7b7a4eaaa994125badb2e040bc54f36
msgid "" msgid ""
"Add a second-level Markdown heading naming your recipe at the bottom of " "Add a second-level Markdown heading naming your recipe at the bottom of "
"the file (e.g., `## Add the RISE extension`)" "the file (e.g., `## Add the RISE extension`)"
msgstr "" msgstr ""
# 8838b0ff2be24c23afaca9a6f43a9b66 # 8838b0ff2be24c23afaca9a6f43a9b66
#: ../../contributing/recipes.md:7 541d6ad94859490d98c8a7f04baa4947 #: ../../contributing/recipes.md:7 eb8e87e0c1c54104b4b219c5d0687a7f
msgid "" msgid ""
"Write the body of your recipe under the heading, including whatever " "Write the body of your recipe under the heading, including whatever "
"command line, Dockerfile, links, etc. you need." "command line, Dockerfile, links, etc. you need."
msgstr "" msgstr ""
#: ../../contributing/recipes.md:8 ../../contributing/stacks.md:111 #: ../../contributing/recipes.md:8 ../../contributing/stacks.md:137
#: 0b5ab045564f4c85a442996b93340288 8c42e042130646cb8e715440d3c88932 #: 1b072ea6b88c43ecaf29ab232059fa9a 1cc55819a5d2466ba936c6d7d51d73da
msgid "" msgid ""
"[Submit a pull request](https://github.com/PointCloudLibrary/pcl/wiki/A" "[Submit a pull request](https://github.com/PointCloudLibrary/pcl/wiki/A"
"-step-by-step-guide-on-preparing-and-submitting-a-pull-request) (PR) with" "-step-by-step-guide-on-preparing-and-submitting-a-pull-request) (PR) with"
...@@ -458,11 +503,11 @@ msgid "" ...@@ -458,11 +503,11 @@ msgid ""
"formatting or content issues." "formatting or content issues."
msgstr "" msgstr ""
#: ../../contributing/stacks.md:1 e23e06546a29406db6e337f82fabc3e6 #: ../../contributing/stacks.md:1 a2fb4c2f48d34e50afbee154fdb38484
msgid "# Community Stacks" msgid "# Community Stacks"
msgstr "" msgstr ""
#: ../../contributing/stacks.md:3 74ab09f407b744bdac1945aebbd91bcb #: ../../contributing/stacks.md:3 cd04d357da92428fa1bfd0f5bf472c41
msgid "" msgid ""
"We love to see the community create and share new Jupyter Docker images. " "We love to see the community create and share new Jupyter Docker images. "
"We've put together a [cookiecutter project](https://github.com/jupyter" "We've put together a [cookiecutter project](https://github.com/jupyter"
...@@ -471,263 +516,250 @@ msgid "" ...@@ -471,263 +516,250 @@ msgid ""
"Docker. Following these steps will:" "Docker. Following these steps will:"
msgstr "" msgstr ""
#: ../../contributing/stacks.md:5 7b86926f795a4846ab36cb755f3f1306 #: ../../contributing/stacks.md:8 85e339e52121447081aac3fb65e25318
msgid "" msgid ""
"Setup a project on GitHub containing a Dockerfile based on either the " "Setup a project on GitHub containing a Dockerfile based on either the "
"`jupyter/base-notebook` or `jupyter/minimal-notebook` image." "`jupyter/base-notebook` or `jupyter/minimal-notebook` image."
msgstr "" msgstr ""
# 8fa22b86dc9f4750b0b903371f16c1e6 #: ../../contributing/stacks.md:10 21bf55f67f17411f83687abd80d59d24
#: ../../contributing/stacks.md:6 194fc91e8b984dda94639d27941218e0
msgid "" msgid ""
"Configure Travis CI to build and test your image when users submit pull " "Configure GitHub Actions to build and test your image when users submit "
"requests to your repository." "pull requests to your repository."
msgstr "" msgstr ""
# cb04d6b8877b47e78277b7025f642ae3 # cb04d6b8877b47e78277b7025f642ae3
#: ../../contributing/stacks.md:7 3b980b7f6c1d4032b562e2f2ee6987e4 #: ../../contributing/stacks.md:12 4cfb7f9e9c324cf79514a9ae2d2ee81b
msgid "Configure Docker Cloud to build and host your images for others to use." msgid "Configure Docker Cloud to build and host your images for others to use."
msgstr "" msgstr ""
#: ../../contributing/stacks.md:8 38319010672e4fc6a302f43b8df51ffb #: ../../contributing/stacks.md:13 bc59cbb517a744b2bea901bd9a476011
msgid "" msgid ""
"Update the [list of community stacks](../using/selecting.html#community-" "Update the [list of community stacks](../using/selecting.html#community-"
"stacks) in this documentation to include your image." "stacks) in this documentation to include your image."
msgstr "" msgstr ""
# 8e0fd1dc73cc40ceab19307d0cd809c1 # 8e0fd1dc73cc40ceab19307d0cd809c1
#: ../../contributing/stacks.md:10 7718d7d6d0bc4e67bd75b64bdadba041 #: ../../contributing/stacks.md:16 1c38271a003a4718814876db567fdcb4
msgid "" msgid ""
"This approach mirrors how we build and share the core stack images. Feel " "This approach mirrors how we build and share the core stack images. Feel "
"free to follow it or pave your own path using alternative services and " "free to follow it or pave your own path using alternative services and "
"build tools." "build tools."
msgstr "" msgstr ""
#: ../../contributing/stacks.md:12 b8c907e8c118419796d1620ac9e71c8f #: ../../contributing/stacks.md:19 51aef9b1db844cb68012611a673e3436
msgid "## Creating a Project" msgid "## Creating a Project"
msgstr "" msgstr ""
#: ../../contributing/stacks.md:14 b635c7a30ced4430b88fed123c2231f7 #: ../../contributing/stacks.md:21 f182f54fc4c343a99aa9f1af271b2dc6
msgid "" msgid ""
"First, install [cookiecutter](https://github.com/audreyr/cookiecutter) " "First, install [cookiecutter](https://github.com/audreyr/cookiecutter) "
"using pip or conda:" "using pip or conda:"
msgstr "" msgstr ""
#: ../../contributing/stacks.md:16 3db9dc85806e46b791468009008aec23 #: ../../contributing/stacks.md:23 50c7c1895f4e483f85c3befa73a284bd
msgid "```bash pip install cookiecutter # or conda install cookiecutter ```" msgid "```bash pip install cookiecutter # or conda install cookiecutter ```"
msgstr "" msgstr ""
#: ../../contributing/stacks.md:20 2427280d8b0a4f178aa400c8fad76bfc #: ../../contributing/stacks.md:27 4a6d87dae24a484a9accbeacd74a7f40
msgid "" msgid ""
"Run the cookiecutter command pointing to the [jupyter/cookiecutter-" "Run the cookiecutter command pointing to the [jupyter/cookiecutter-"
"docker-stacks](https://github.com/jupyter/cookiecutter-docker-stacks) " "docker-stacks](https://github.com/jupyter/cookiecutter-docker-stacks) "
"project on GitHub." "project on GitHub."
msgstr "" msgstr ""
#: ../../contributing/stacks.md:22 32e9f27558ea4e09a52c9f1d62251fbb #: ../../contributing/stacks.md:31 6e0c2fee484a4b1ab6b5fb1f7d50a405
msgid "" msgid ""
"```bash cookiecutter https://github.com/jupyter/cookiecutter-docker-" "```bash cookiecutter https://github.com/jupyter/cookiecutter-docker-"
"stacks.git ```" "stacks.git ```"
msgstr "" msgstr ""
# 676ff068156d4ca7b1043b4a4fe2d1f1 # 676ff068156d4ca7b1043b4a4fe2d1f1
#: ../../contributing/stacks.md:26 a2566ac2324448e9af272bd9c5a1c1c1 #: ../../contributing/stacks.md:35 8159034dfc68487d816a55125703e5d2
msgid "" msgid ""
"Enter a name for your new stack image. This will serve as both the git " "Enter a name for your new stack image. This will serve as both the git "
"repository name and the part of the Docker image name after the slash." "repository name and the part of the Docker image name after the slash."
msgstr "" msgstr ""
#: ../../contributing/stacks.md:29 2dd8006f293643b1ba1ba9771a9bf992 #: ../../contributing/stacks.md:38 0c034655f6ac436eaede9d3925e65f19
msgid "``` stack_name [my-jupyter-stack]: ```" msgid "``` stack_name [my-jupyter-stack]: ```"
msgstr "" msgstr ""
# 96deffa98bab47da82e5598e549c8a39 # 96deffa98bab47da82e5598e549c8a39
#: ../../contributing/stacks.md:33 7b87469603c2457d8efbd8d38e1997a7 #: ../../contributing/stacks.md:42 a0f5a35cac1e4582a318afd73514efd5
msgid "" msgid ""
"Enter the user or organization name under which this stack will reside on" "Enter the user or organization name under which this stack will reside on"
" Docker Cloud / Hub. You must have access to manage this Docker Cloud org" " Docker Cloud / Hub. You must have access to manage this Docker Cloud org"
" in order to push images here and setup automated builds." " in order to push images here and setup automated builds."
msgstr "" msgstr ""
#: ../../contributing/stacks.md:37 de7fe5882a97405eb9c3e111f4fa3413 #: ../../contributing/stacks.md:46 c8100d324d084444bf67a58ee0ab2e09
msgid "``` stack_org [my-project]: ```" msgid "``` stack_org [my-project]: ```"
msgstr "" msgstr ""
# b796c2d7c08b4a1db5cdfd3de7d84c16 # b796c2d7c08b4a1db5cdfd3de7d84c16
#: ../../contributing/stacks.md:41 f94ec73b9c784cc1bcfb33494e5ce0df #: ../../contributing/stacks.md:50 a89e98daea0d4aad8cb7851e6d112d4c
msgid "" msgid ""
"Select an image from the jupyter/docker-stacks project that will serve as" "Select an image from the jupyter/docker-stacks project that will serve as"
" the base for your new image." " the base for your new image."
msgstr "" msgstr ""
#: ../../contributing/stacks.md:44 a64c3be460d0452aab96151c5a8bda12 #: ../../contributing/stacks.md:53 483295ba6cc4481f91e6da37606e4a38
msgid "``` stack_base_image [jupyter/base-notebook]: ```" msgid "``` stack_base_image [jupyter/base-notebook]: ```"
msgstr "" msgstr ""
# 7ef9d73286d04b12a1350e8d9565df65 # 7ef9d73286d04b12a1350e8d9565df65
#: ../../contributing/stacks.md:48 26646e2da5c2495a9d24b9d5522854e4 #: ../../contributing/stacks.md:57 138efdd072494ea8abf5c2dde20024d9
msgid "Enter a longer description of the stack for your README." msgid "Enter a longer description of the stack for your README."
msgstr "" msgstr ""
#: ../../contributing/stacks.md:50 57d24b5517b8495bbf6857ac93cbaf85 #: ../../contributing/stacks.md:59 647f97d0246943e893f84137b8226255
msgid "" msgid ""
"``` stack_description [my-jupyter-stack is a community maintained Jupyter" "``` stack_description [my-jupyter-stack is a community maintained Jupyter"
" Docker Stack image]: ```" " Docker Stack image]: ```"
msgstr "" msgstr ""
# 479d3a5c6ef9481a9dc4033224c540fa # 479d3a5c6ef9481a9dc4033224c540fa
#: ../../contributing/stacks.md:54 8c29ebcb67d94ca6b3761878e460e002 #: ../../contributing/stacks.md:63 c4a42cd3c9d94d3d980e0f2ec3118b36
msgid "Initialize your project as a Git repository and push it to GitHub." msgid "Initialize your project as a Git repository and push it to GitHub."
msgstr "" msgstr ""
#: ../../contributing/stacks.md:56 8a1307c751be41299ff0afdb2193fa5c #: ../../contributing/stacks.md:65 e0e73ae693f844609ca59b3763d72f2b
msgid "``` cd <stack_name you chose>" msgid "``` cd <stack_name you chose>"
msgstr "" msgstr ""
#: ../../contributing/stacks.md:59 6cd23d80cc3c4de1a59a3ce5451177cd #: ../../contributing/stacks.md:68 1f56621c93d44d17aea81b92a4380fc4
msgid "" msgid ""
"git init git add . git commit -m 'Seed repo' git remote add origin <url " "git init git add . git commit -m 'Seed repo' git remote add origin <url "
"from github> git push -u origin master ```" "from github> git push -u origin master ```"
msgstr "" msgstr ""
#: ../../contributing/stacks.md:66 6e38296bbde24e12898e62d2476e0889 #: ../../contributing/stacks.md:75 9c926fd26373447aaf1adb6ca61a549d
msgid "## Configuring Travis" msgid "## Configuring Travis"
msgstr "" msgstr ""
#: ../../contributing/stacks.md:77 d2901e5fe48047538a629599b53cabcf
msgid ""
"> NOTE: This section and the cookiecutter template should be updated to "
"describe using GitHub > Actions now that jupyter/docker-stacks uses that "
"service."
msgstr ""
# 38e3784d96f64d7481f0e1fd17aff9cb # 38e3784d96f64d7481f0e1fd17aff9cb
#: ../../contributing/stacks.md:68 af1ed0ba41394284a6e172594a0ced31 #: ../../contributing/stacks.md:80 6c82a90981524e9a8e72e7a35d948c35
msgid "" msgid ""
"Next, link your GitHub project to Travis CI to build your Docker image " "Next, link your GitHub project to Travis CI to build your Docker image "
"whenever you or someone else submits a pull request." "whenever you or someone else submits a pull request."
msgstr "" msgstr ""
#: ../../contributing/stacks.md:70 a2d9189c4b174051bd4f4e221c7e1fe5 #: ../../contributing/stacks.md:83 6110a2aee3e245829b443d0e7c0af93d
msgid "" msgid ""
"1. Visit [https://docs.travis-ci.com/user/getting-started/#To-get-" "Visit [https://docs.travis-ci.com/user/getting-started/#To-get-started-"
"started-with-Travis-CI](https://docs.travis-ci.com/user/getting-started" "with-Travis-CI](https://docs.travis-ci.com/user/getting-started/#To-get-"
"/#To-get-started-with-Travis-CI) and follow the instructions to add the " "started-with-Travis-CI) and follow the instructions to add the Travis CI "
"Travis CI application to your GitHub account. 3. Visit [https://travis-" "application to your GitHub account."
"ci.org](https://travis-ci.org). 4. Click the + symbol at the top of the "
"left sidebar."
msgstr "" msgstr ""
#: ../../contributing/stacks.md:73 8334004f6df740dea6a905b9f5972d4c #: ../../contributing/stacks.md:86 e2a8f63fa23549d0ba5bbd31441fffc2
msgid "Visit [https://travis-ci.org](https://travis-ci.org)."
msgstr ""
#: ../../contributing/stacks.md:87 f113948ac90c4505bf68bf6a68c52d7c
msgid "" msgid ""
"![Travis sidebar with plus button screenshot](../_static/travis-plus-" "Click the + symbol at the top of the left sidebar. ![Travis sidebar with "
"repo.png)" "plus button screenshot](../_static/travis-plus-repo.png)"
msgstr "" msgstr ""
# ac370ece6fb24becb8034cb994ad8f4b # ac370ece6fb24becb8034cb994ad8f4b
#: ../../contributing/stacks.md:74 d237527b18974c92886a87127521f56d #: ../../contributing/stacks.md:89 a26db0d349c64f799c69ffffa66c846e
msgid "" msgid ""
"Locate your project repository either in your primary user account or in " "Locate your project repository either in your primary user account or in "
"one of the organizations to which you belong." "one of the organizations to which you belong."
msgstr "" msgstr ""
# 6b6a7bab547d4e25bd930009a6a9ea44 # 6b6a7bab547d4e25bd930009a6a9ea44
#: ../../contributing/stacks.md:75 1dbc14fe092646d0b4248901235b40ed #: ../../contributing/stacks.md:91 425c5eee8ebc47d89f987a356595a4db
msgid "Click the toggle to enable builds for the project repository." msgid "Click the toggle to enable builds for the project repository."
msgstr "" msgstr ""
#: ../../contributing/stacks.md:76 a610a6a093a247eba3877322568d0bb9 #: ../../contributing/stacks.md:92 cdfa9566710f41698a1280d03890599d
msgid "Click the **Settings** button for that repository."
msgstr ""
#: ../../contributing/stacks.md:77 817ec9e36c8347d2b1f568654e28af83
msgid "" msgid ""
"![Travis enable build toggle screenshot](../_static/travis-enable-" "Click the **Settings** button for that repository. ![Travis enable build "
"build.png)" "toggle screenshot](../_static/travis-enable-build.png)"
msgstr "" msgstr ""
#: ../../contributing/stacks.md:78 84e38ed3b3b24e92a4f33db3af03723f #: ../../contributing/stacks.md:94 dd3284c38bae43e0a7f3f14ff5833b19
msgid "" msgid ""
"Enable **Build only if .travis.yml is present** and **Build pushed pull " "Enable **Build only if .travis.yml is present** and **Build pushed pull "
"requests**." "requests**. ![Travis build settings screenshot](../_static/travis-build-"
msgstr "" "settings.png)"
#: ../../contributing/stacks.md:79 b57a3c1f90a74d769abc08bbada764ac
msgid "![Travis build settings screenshot](../_static/travis-build-settings.png)"
msgstr "" msgstr ""
#: ../../contributing/stacks.md:80 577c4b978be540329cab6349be01db75 #: ../../contributing/stacks.md:96 205587c034d1423b81fc5fa4a496efc1
msgid "Disable **Build pushed branches**." msgid "Disable **Build pushed branches**."
msgstr "" msgstr ""
#: ../../contributing/stacks.md:82 9a3a675353694b418d23ce190fcde1c8 #: ../../contributing/stacks.md:98 990583b6752746af97649d1800576dd6
msgid "## Configuring Docker Cloud" msgid "## Configuring Docker Cloud"
msgstr "" msgstr ""
# f0c01a2906494d039d73324e90cbae44 # f0c01a2906494d039d73324e90cbae44
#: ../../contributing/stacks.md:84 bfd61f89b75543b3b24fbc92d3f70964 #: ../../contributing/stacks.md:100 08bda4db862d4603a9d49d3ab5351e3f
msgid "" msgid ""
"Now, configure Docker Cloud to build your stack image and push it to " "Now, configure Docker Cloud to build your stack image and push it to "
"Docker Hub repository whenever you merge a GitHub pull request to the " "Docker Hub repository whenever you merge a GitHub pull request to the "
"master branch of your project." "master branch of your project."
msgstr "" msgstr ""
#: ../../contributing/stacks.md:86 69cac7c980ab463083404e170a8f5e34 #: ../../contributing/stacks.md:103 47a00269a8ed47198bbc8dd123dba6fb
msgid "Visit [https://cloud.docker.com/](https://cloud.docker.com/) and login." msgid "Visit [https://cloud.docker.com/](https://cloud.docker.com/) and login."
msgstr "" msgstr ""
#: ../../contributing/stacks.md:87 02e0caa91e7b4069a8e07e10914445b0 #: ../../contributing/stacks.md:104 d61a554ff66e4c6dbce0fa697f1cd3de
msgid "" msgid ""
"Select the account or organization matching the one you entered when " "Select the account or organization matching the one you entered when "
"prompted with `stack_org` by the cookiecutter." "prompted with `stack_org` by the cookiecutter. ![Docker account selection"
msgstr "" " screenshot](../_static/docker-org-select.png)"
#: ../../contributing/stacks.md:88 1984a088c6af48899d61c5fa3f01fe55
msgid "![Docker account selection screenshot](../_static/docker-org-select.png)"
msgstr "" msgstr ""
#: ../../contributing/stacks.md:89 e4725346fb314d54ae78fb41c976709a #: ../../contributing/stacks.md:106 6e922ad53ece461d9dc14eceb9a3aeaf
msgid "Scroll to the bottom of the page and click **Create repository**." msgid "Scroll to the bottom of the page and click **Create repository**."
msgstr "" msgstr ""
#: ../../contributing/stacks.md:90 40088f0729d44d2cab75763351776a30 #: ../../contributing/stacks.md:107 e4405a6afcf543b5a1cc701cfa181b14
msgid "" msgid ""
"Enter the name of the image matching the one you entered when prompted " "Enter the name of the image matching the one you entered when prompted "
"with `stack_name` by the cookiecutter." "with `stack_name` by the cookiecutter. ![Docker image name and "
msgstr "" "description screenshot](../_static/docker-repo-name.png)"
#: ../../contributing/stacks.md:91 31cc5283e7424d7bb9665f599ac70b9f
msgid ""
"![Docker image name and description screenshot](../_static/docker-repo-"
"name.png)"
msgstr "" msgstr ""
# 79092e5007ba4bdead594a71e30cd58a # 79092e5007ba4bdead594a71e30cd58a
#: ../../contributing/stacks.md:92 39537f0c54a140fba52a17e74a4de963 #: ../../contributing/stacks.md:109 e05a5b44a22e40ff8a1924808ee1dc7d
msgid "Enter a description for your image." msgid "Enter a description for your image."
msgstr "" msgstr ""
#: ../../contributing/stacks.md:93 502b3baca67f4bc3bead7f72aad08505 #: ../../contributing/stacks.md:110 6cbf40533f1e4c0391ccf0e41c1978be
msgid "" msgid ""
"Click **GitHub** under the **Build Settings** and follow the prompts to " "Click **GitHub** under the **Build Settings** and follow the prompts to "
"connect your account if it is not already connected." "connect your account if it is not already connected."
msgstr "" msgstr ""
# e085cfd6d7664d04bcd14ce89f24b75a #: ../../contributing/stacks.md:112 0e50754e855540d28eff05ccd98388ef
#: ../../contributing/stacks.md:94 8cd69feca10342b082e3ffa365756531
msgid "" msgid ""
"Select the GitHub organization and repository containing your image " "Select the GitHub organization and repository containing your image "
"definition from the dropdowns." "definition from the dropdowns. ![Docker from GitHub automated build "
msgstr "" "screenshot](../_static/docker-github-settings.png)"
#: ../../contributing/stacks.md:95 36c799f13fe4421cab13f57df0a1dd3a
msgid ""
"![Docker from GitHub automated build screenshot](../_static/docker-"
"github-settings.png)"
msgstr "" msgstr ""
#: ../../contributing/stacks.md:96 6fe8225f223642748101e2a42becd21b #: ../../contributing/stacks.md:115 c30e839b855a42f1929fe6a08ba3bc84
msgid "Click the **Create and Build** button." msgid "Click the **Create and Build** button."
msgstr "" msgstr ""
#: ../../contributing/stacks.md:98 ff46d994b8094fdc945fbfa04b2ab183 #: ../../contributing/stacks.md:117 e15c750e97354edaacdc63b36fc1edc0
msgid "## Defining Your Image" msgid "## Defining Your Image"
msgstr "" msgstr ""
#: ../../contributing/stacks.md:100 34727a930d0c41969b2d2fd451295f73 #: ../../contributing/stacks.md:119 17382a87e8274b83ae1206d9f9ed6b8a
msgid "" msgid ""
"Make edits the Dockerfile in your project to add third-party libraries " "Make edits the Dockerfile in your project to add third-party libraries "
"and configure Jupyter applications. Refer to the Dockerfiles for the core" "and configure Jupyter applications. Refer to the Dockerfiles for the core"
...@@ -736,7 +768,7 @@ msgid "" ...@@ -736,7 +768,7 @@ msgid ""
"feel for what's possible and best practices." "feel for what's possible and best practices."
msgstr "" msgstr ""
#: ../../contributing/stacks.md:102 39d833a10d1441ff8ab92e2ef863698b #: ../../contributing/stacks.md:124 81684a9c7c6b4a56a511ac84cd4b74f5
msgid "" msgid ""
"[Submit pull requests](https://github.com/PointCloudLibrary/pcl/wiki/A" "[Submit pull requests](https://github.com/PointCloudLibrary/pcl/wiki/A"
"-step-by-step-guide-on-preparing-and-submitting-a-pull-request) to your " "-step-by-step-guide-on-preparing-and-submitting-a-pull-request) to your "
...@@ -745,54 +777,54 @@ msgid "" ...@@ -745,54 +777,54 @@ msgid ""
"master branch that you can `docker pull`." "master branch that you can `docker pull`."
msgstr "" msgstr ""
#: ../../contributing/stacks.md:104 29eecce312564952a12c58e6baa76bcb #: ../../contributing/stacks.md:128 1777e2d4a2cb418f80f8ace8e59deeb9
msgid "## Sharing Your Image" msgid "## Sharing Your Image"
msgstr "" msgstr ""
# d8e9f1a37f4c4a72bb630e7a3b265b92 # d8e9f1a37f4c4a72bb630e7a3b265b92
#: ../../contributing/stacks.md:106 a067a1e9d7f24067b1b71efc316c5581 #: ../../contributing/stacks.md:130 277fb84aa77f45f7a8a23ff5cc072d52
msgid "" msgid ""
"Finally, if you'd like to add a link to your project to this " "Finally, if you'd like to add a link to your project to this "
"documentation site, please do the following:" "documentation site, please do the following:"
msgstr "" msgstr ""
#: ../../contributing/stacks.md:108 e5e3359d21684f2cae5e9b8c9ed985ec #: ../../contributing/stacks.md:133 733d33f4230c4b6fb6bb8346cb14ca85
msgid "" msgid ""
"Clone ths [jupyter/docker-stacks](https://github.com/jupyter/docker-" "Clone ths [jupyter/docker-stacks](https://github.com/jupyter/docker-"
"stacks) GitHub repository." "stacks) GitHub repository."
msgstr "" msgstr ""
#: ../../contributing/stacks.md:109 c9d209061e8344c0a88a9de41f8c1ea1 #: ../../contributing/stacks.md:134 57f684437d914934836571fd79e9a06e
msgid "" msgid ""
"Open the `docs/using/selecting.md` source file and locate the **Community" "Open the `docs/using/selecting.md` source file and locate the **Community"
" Stacks** section." " Stacks** section."
msgstr "" msgstr ""
# 9d37dfec6fba48e6966c254b476e1e81 # 9d37dfec6fba48e6966c254b476e1e81
#: ../../contributing/stacks.md:110 7faf7108bf0f48e4878f50a5a2cec537 #: ../../contributing/stacks.md:135 4a3f3101813149a4ac636983bc4117c6
msgid "" msgid ""
"Add a bullet with a link to your project and a short description of what " "Add a bullet with a link to your project and a short description of what "
"your Docker image contains." "your Docker image contains."
msgstr "" msgstr ""
#: ../../contributing/tests.md:1 c45ba83d713641b89d86ffa1301769d5 #: ../../contributing/tests.md:1 50e771280fdc415aa192bd98fcfdee00
msgid "# Image Tests" msgid "# Image Tests"
msgstr "" msgstr ""
# 6dbd44985f3c4ba1a3823c90c5944ad0 # 6dbd44985f3c4ba1a3823c90c5944ad0
#: ../../contributing/tests.md:3 b729f245588842a0b0aec7495e8362d5 #: ../../contributing/tests.md:3 0e386f0f67a64e1d89fe8a312c5a887a
msgid "" msgid ""
"We greatly appreciate pull requests that extend the automated tests that " "We greatly appreciate pull requests that extend the automated tests that "
"vet the basic functionality of the Docker images." "vet the basic functionality of the Docker images."
msgstr "" msgstr ""
#: ../../contributing/tests.md:5 478d5e4939c24d1eae1c21dcf96005be #: ../../contributing/tests.md:6 82a536a5b8bd4d9e8081d44d67a60763
msgid "## How the Tests Work" msgid "## How the Tests Work"
msgstr "" msgstr ""
#: ../../contributing/tests.md:7 19e32ceec8ec4ef0b4c3a8ddfebce3dc #: ../../contributing/tests.md:8 db35e6490160442a9fb25951d2cfb3b5
msgid "" msgid ""
"Travis executes `make build-test-all` against pull requests submitted to " "GitHub executes `make build-test-all` against pull requests submitted to "
"the `jupyter/docker-stacks` repository. This `make` command builds every " "the `jupyter/docker-stacks` repository. This `make` command builds every "
"docker image. After building each image, the `make` command executes " "docker image. After building each image, the `make` command executes "
"`pytest` to run both image-specific tests like those in [base-" "`pytest` to run both image-specific tests like those in [base-"
...@@ -805,49 +837,46 @@ msgid "" ...@@ -805,49 +837,46 @@ msgid ""
"stacks/blob/master/conftest.py) file at the root of the projects." "stacks/blob/master/conftest.py) file at the root of the projects."
msgstr "" msgstr ""
#: ../../contributing/tests.md:9 fae9f419beda411db77ee0e8e99befa5 #: ../../contributing/tests.md:17 3c7d490c789048b9969f3e71855bf5a7
msgid "## Contributing New Tests" msgid "## Contributing New Tests"
msgstr "" msgstr ""
# d317e6be0fbf487e8528ff1fe0bbdb78 # d317e6be0fbf487e8528ff1fe0bbdb78
#: ../../contributing/tests.md:11 15db53d9689c434cb30c1a79ff9e78f8 #: ../../contributing/tests.md:19 85ba397ab21942c3b2370803ef9b9773
msgid "Please follow the process below to add new tests:" msgid "Please follow the process below to add new tests:"
msgstr "" msgstr ""
#: ../../contributing/tests.md:13 49c48242ed034964829c4f04357083d5 #: ../../contributing/tests.md:21 5832cd504f494785ab0f8ec0286e3094
msgid "" msgid ""
"If the test should run against every image built, add your test code to " "If the test should run against every image built, add your test code to "
"one of the modules in [test/](https://github.com/jupyter/docker-" "one of the modules in [test/](https://github.com/jupyter/docker-"
"stacks/tree/master/test) or create a new module." "stacks/tree/master/test) or create a new module."
msgstr "" msgstr ""
#: ../../contributing/tests.md:14 5224cd7b1ee243bda1f00a653e1cfd7c #: ../../contributing/tests.md:23 ef7ae1ac3dfb473380f586498048f8e7
msgid "" msgid ""
"If your test should run against a single image, add your test code to one" "If your test should run against a single image, add your test code to one"
" of the modules in `some-notebook/test/` or create a new module." " of the modules in `some-notebook/test/` or create a new module."
msgstr "" msgstr ""
#: ../../contributing/tests.md:18 6fac39aab4814bc9bc70d2a820d6125a #: ../../contributing/tests.md:25 43568c62e1724eb88ab71d9ccc8196be
msgid "" msgid ""
"Build one or more images you intend to test and run the tests locally. If" "Build one or more images you intend to test and run the tests locally. If"
" you use `make`, call:" " you use `make`, call: ```bash make build/somestack-notebook make test"
"/somestack-notebook ```"
msgstr "" msgstr ""
#: ../../contributing/tests.md:16 63873dc63c4f4ee99854f34d8a0be26f #: ../../contributing/tests.md:33 ef7e04dcc21149caafcb7a6079faaa3b
msgid "```bash make build/somestack-notebook make test/somestack-notebook ```"
msgstr ""
#: ../../contributing/tests.md:22 b3b48c0dffa248de8abbfbd25a02bba5
msgid "" msgid ""
"Discuss changes with the maintainers and address any issues running the " "Discuss changes with the maintainers and address any issues running the "
"tests on Travis." "tests on GitHub."
msgstr "" msgstr ""
#: ../../contributing/translations.md:1 7de975a1fcd2429c8b25de5780152e4a #: ../../contributing/translations.md:1 c75a0579bb1c4278be5aff8a72e141b3
msgid "# Doc Translations" msgid "# Doc Translations"
msgstr "" msgstr ""
#: ../../contributing/translations.md:3 88ccce80ee8446549f6709441fd09504 #: ../../contributing/translations.md:3 e2e2c42e5d3a411f947927eab693b079
msgid "" msgid ""
"We are delighted when members of the Jupyter community want to help " "We are delighted when members of the Jupyter community want to help "
"translate these documentation pages to other languages. If you're " "translate these documentation pages to other languages. If you're "
...@@ -856,14 +885,14 @@ msgid "" ...@@ -856,14 +885,14 @@ msgid ""
"updating translations of the Jupyter Docker Stacks documentation." "updating translations of the Jupyter Docker Stacks documentation."
msgstr "" msgstr ""
#: ../../contributing/translations.md:5 897e68f9d1b8484fa1721c6672e5ec64 #: ../../contributing/translations.md:5 99be6240a1494e02a5c0e8ce3ab8b391
msgid "" msgid ""
"Follow the steps documented on the [Getting Started as a " "Follow the steps documented on the [Getting Started as a "
"Translator](https://docs.transifex.com/getting-started-1/translators) " "Translator](https://docs.transifex.com/getting-started-1/translators) "
"page." "page."
msgstr "" msgstr ""
#: ../../contributing/translations.md:6 b929c4dfadf041d8ad27b5478e49cff0 #: ../../contributing/translations.md:6 c56ae9779af6494b80e42560e18e630e
msgid "" msgid ""
"Look for *jupyter-docker-stacks* when prompted to choose a translation " "Look for *jupyter-docker-stacks* when prompted to choose a translation "
"team. Alternatively, visit https://www.transifex.com/project-jupyter" "team. Alternatively, visit https://www.transifex.com/project-jupyter"
...@@ -871,7 +900,7 @@ msgid "" ...@@ -871,7 +900,7 @@ msgid ""
"the project." "the project."
msgstr "" msgstr ""
#: ../../contributing/translations.md:7 5f067106c7f04dfa869295f9b954df96 #: ../../contributing/translations.md:7 c8d10f6202f146f5b44fdf390289152d
msgid "" msgid ""
"See [Translating with the Web " "See [Translating with the Web "
"Editor](https://docs.transifex.com/translation/translating-with-the-web-" "Editor](https://docs.transifex.com/translation/translating-with-the-web-"
...@@ -980,9 +1009,6 @@ msgstr "" ...@@ -980,9 +1009,6 @@ msgstr ""
#~ msgid "Submit a pull request (PR) with your changes." #~ msgid "Submit a pull request (PR) with your changes."
#~ msgstr "" #~ msgstr ""
# fa69df66887f47d9a9abcd75fa8a8497
# 7083b6579dca40a487ab6bd7a21e20eb
# 2fbcbd3f9a63431294360d65dac1734a
#~ msgid "" #~ msgid ""
#~ "Watch for Travis to report a build" #~ "Watch for Travis to report a build"
#~ " success or failure for your PR " #~ " success or failure for your PR "
...@@ -1338,7 +1364,6 @@ msgstr "" ...@@ -1338,7 +1364,6 @@ msgstr ""
#~ " locally. If you use make, call:" #~ " locally. If you use make, call:"
#~ msgstr "" #~ msgstr ""
# 760abcc7be7744858eda5ee0b7be3107
#~ msgid "" #~ msgid ""
#~ "Discuss changes with the maintainers and" #~ "Discuss changes with the maintainers and"
#~ " address any issues running the tests" #~ " address any issues running the tests"
...@@ -1453,3 +1478,184 @@ msgstr "" ...@@ -1453,3 +1478,184 @@ msgstr ""
#~ "on Travis." #~ "on Travis."
#~ msgstr "" #~ msgstr ""
#~ msgid ""
#~ "Please build the image locally before"
#~ " submitting a pull request. Building "
#~ "the image locally shortens the debugging"
#~ " cycle by taking some load off "
#~ "[Travis CI](http://travis-ci.org/), which "
#~ "graciously provides free build services "
#~ "for open source projects like this "
#~ "one. If you use `make`, call:"
#~ msgstr ""
#~ msgid "```bash make build/somestack-notebook ```"
#~ msgstr ""
#~ msgid "# Image Lint"
#~ msgstr ""
#~ msgid "## Installation"
#~ msgstr ""
#~ msgid "## Lint"
#~ msgstr ""
#~ msgid "### Per Stack"
#~ msgstr ""
#~ msgid "### All the Stacks"
#~ msgstr ""
#~ msgid "## Ignoring Rules"
#~ msgstr ""
#~ msgid ""
#~ "[hadolint]: https://github.com/hadolint/hadolint [dbp]:"
#~ " https://docs.docker.com/develop/develop-images"
#~ "/dockerfile_best-practices [rules]: "
#~ "https://github.com/hadolint/hadolint#rules [DL3006]: "
#~ "https://github.com/hadolint/hadolint/wiki/DL3006 [DL3008]: "
#~ "https://github.com/hadolint/hadolint/wiki/DL3008"
#~ msgstr ""
#~ msgid ""
#~ "Please build the image locally before"
#~ " submitting a pull request. Building "
#~ "the image locally shortens the debugging"
#~ " cycle by taking some load off "
#~ "[Travis CI](http://travis-ci.org/), which "
#~ "graciously provides free build services "
#~ "for open source projects like this "
#~ "one. If you use `make`, call:"
#~ msgstr ""
# 8fa22b86dc9f4750b0b903371f16c1e6
#~ msgid ""
#~ "Configure Travis CI to build and "
#~ "test your image when users submit "
#~ "pull requests to your repository."
#~ msgstr ""
#~ msgid ""
#~ "1. Visit [https://docs.travis-ci.com/user"
#~ "/getting-started/#To-get-started-with-"
#~ "Travis-CI](https://docs.travis-ci.com/user/getting-"
#~ "started/#To-get-started-with-Travis-CI)"
#~ " and follow the instructions to add"
#~ " the Travis CI application to your"
#~ " GitHub account. 3. Visit [https"
#~ "://travis-ci.org](https://travis-ci.org). 4. "
#~ "Click the + symbol at the top "
#~ "of the left sidebar."
#~ msgstr ""
#~ msgid ""
#~ "![Travis sidebar with plus button "
#~ "screenshot](../_static/travis-plus-repo.png)"
#~ msgstr ""
#~ msgid "Click the **Settings** button for that repository."
#~ msgstr ""
#~ msgid ""
#~ "![Travis enable build toggle "
#~ "screenshot](../_static/travis-enable-build.png)"
#~ msgstr ""
#~ msgid ""
#~ "Enable **Build only if .travis.yml is"
#~ " present** and **Build pushed pull "
#~ "requests**."
#~ msgstr ""
#~ msgid ""
#~ "![Travis build settings screenshot](../_static"
#~ "/travis-build-settings.png)"
#~ msgstr ""
#~ msgid ""
#~ "Select the account or organization "
#~ "matching the one you entered when "
#~ "prompted with `stack_org` by the "
#~ "cookiecutter."
#~ msgstr ""
#~ msgid ""
#~ "![Docker account selection screenshot](../_static"
#~ "/docker-org-select.png)"
#~ msgstr ""
#~ msgid ""
#~ "Enter the name of the image "
#~ "matching the one you entered when "
#~ "prompted with `stack_name` by the "
#~ "cookiecutter."
#~ msgstr ""
#~ msgid ""
#~ "![Docker image name and description "
#~ "screenshot](../_static/docker-repo-name.png)"
#~ msgstr ""
# e085cfd6d7664d04bcd14ce89f24b75a
#~ msgid ""
#~ "Select the GitHub organization and "
#~ "repository containing your image definition"
#~ " from the dropdowns."
#~ msgstr ""
#~ msgid ""
#~ "![Docker from GitHub automated build "
#~ "screenshot](../_static/docker-github-settings.png)"
#~ msgstr ""
#~ msgid ""
#~ "Travis executes `make build-test-all`"
#~ " against pull requests submitted to "
#~ "the `jupyter/docker-stacks` repository. This"
#~ " `make` command builds every docker "
#~ "image. After building each image, the"
#~ " `make` command executes `pytest` to "
#~ "run both image-specific tests like "
#~ "those in [base-"
#~ "notebook/test/](https://github.com/jupyter/docker-"
#~ "stacks/tree/master/base-notebook/test) and common"
#~ " tests defined in "
#~ "[test/](https://github.com/jupyter/docker-"
#~ "stacks/tree/master/test). Both kinds of tests"
#~ " make use of global [pytest "
#~ "fixtures](https://docs.pytest.org/en/latest/fixture.html) "
#~ "defined in the "
#~ "[conftest.py](https://github.com/jupyter/docker-"
#~ "stacks/blob/master/conftest.py) file at the "
#~ "root of the projects."
#~ msgstr ""
#~ msgid ""
#~ "Build one or more images you "
#~ "intend to test and run the tests"
#~ " locally. If you use `make`, call:"
#~ msgstr ""
#~ msgid "```bash make build/somestack-notebook make test/somestack-notebook ```"
#~ msgstr ""
#~ msgid "```bash $ make lint-install"
#~ msgstr ""
#~ msgid "```bash $ make lint/scipy-notebook"
#~ msgstr ""
#~ msgid "Optionally you can pass arguments to the linter."
#~ msgstr ""
#~ msgid ""
#~ "```bash # Use a different export "
#~ "format $ make lint/scipy-notebook "
#~ "ARGS=\"--format codeclimate\" ```"
#~ msgstr ""
#~ msgid "```bash $ make lint-all ```"
#~ msgstr ""
...@@ -9,7 +9,7 @@ msgid "" ...@@ -9,7 +9,7 @@ msgid ""
msgstr "" msgstr ""
"Project-Id-Version: docker-stacks latest\n" "Project-Id-Version: docker-stacks latest\n"
"Report-Msgid-Bugs-To: \n" "Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2020-04-19 15:01+0000\n" "POT-Creation-Date: 2020-08-08 19:06+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n" "Language-Team: LANGUAGE <LL@li.org>\n"
...@@ -18,32 +18,31 @@ msgstr "" ...@@ -18,32 +18,31 @@ msgstr ""
"Content-Transfer-Encoding: 8bit\n" "Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel 2.8.0\n" "Generated-By: Babel 2.8.0\n"
#: ../../maintaining/tasks.md:1 088ca8dad40145ad998c886c16b741af #: ../../maintaining/tasks.md:1 4b12bcf1c691475db62e872bbdca68fa
msgid "# Maintainer Playbook" msgid "# Maintainer Playbook"
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:3 b231b29ae3d84d8a82614784f561140a #: ../../maintaining/tasks.md:3 f688b20624524b93a14add6065bee956
msgid "## Merging Pull Requests" msgid "## Merging Pull Requests"
msgstr "" msgstr ""
# 0a04eb6c0525428984f07f3c249f5d73 # 0a04eb6c0525428984f07f3c249f5d73
#: ../../maintaining/tasks.md:5 1baaa36062be4e57afb2f854a4391c97 #: ../../maintaining/tasks.md:5 5b916e113fc746d6bf5e1097c7dc928b
msgid "" msgid ""
"To build new images on Docker Cloud and publish them to the Docker Hub " "To build new images on Docker Cloud and publish them to the Docker Hub "
"registry, do the following:" "registry, do the following:"
msgstr "" msgstr ""
# f2710754a41e4354b51f4acd6f1f9545 #: ../../maintaining/tasks.md:7 fd849160eef9483f8205d9c6967c475e
#: ../../maintaining/tasks.md:7 fee1f50210d0497cb8281fab24cbbe4b msgid "Make sure GitHub Actions status checks pas for the PR."
msgid "Make sure Travis is green for a PR."
msgstr "" msgstr ""
# db74ca47dfde4e558a20aff52176347a # db74ca47dfde4e558a20aff52176347a
#: ../../maintaining/tasks.md:8 92d4db1ab5204a8197d69a38f3fdb879 #: ../../maintaining/tasks.md:8 80c6da1230554f92be8abdef70aa01ff
msgid "Merge the PR." msgid "Merge the PR."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:9 294b780a85e64af39c6d5aeab264913a #: ../../maintaining/tasks.md:9 2e1ced11629f4ec2a259101ac85735c7
msgid "" msgid ""
"Monitor the Docker Cloud build status for each of the stacks, starting " "Monitor the Docker Cloud build status for each of the stacks, starting "
"with [jupyter/base-" "with [jupyter/base-"
...@@ -56,14 +55,14 @@ msgid "" ...@@ -56,14 +55,14 @@ msgid ""
msgstr "" msgstr ""
# 9149cb3c2bdc48ffa9109b3b6ddcf446 # 9149cb3c2bdc48ffa9109b3b6ddcf446
#: ../../maintaining/tasks.md:10 532fe7ad94f14a649d58935fa5492b61 #: ../../maintaining/tasks.md:15 7ddbf4630fa3436e88d0ec901d7268cc
msgid "" msgid ""
"Manually click the retry button next to any build that fails to resume " "Manually click the retry button next to any build that fails to resume "
"that build and any dependent builds." "that build and any dependent builds."
msgstr "" msgstr ""
# d204ada7a14b46338be9b7afb0bc95b0 # d204ada7a14b46338be9b7afb0bc95b0
#: ../../maintaining/tasks.md:11 c804acac883745908b2c79ff1b5e5253 #: ../../maintaining/tasks.md:17 daa76f01193e48b79b1de4e0d6ec8f57
msgid "" msgid ""
"Try to avoid merging another PR to master until all outstanding builds " "Try to avoid merging another PR to master until all outstanding builds "
"complete. There's no way at present to propagate the git SHA to build " "complete. There's no way at present to propagate the git SHA to build "
...@@ -71,11 +70,11 @@ msgid "" ...@@ -71,11 +70,11 @@ msgid ""
" of master HEAD." " of master HEAD."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:13 8776ad2d8d92419bbb53c21793da6f27 #: ../../maintaining/tasks.md:21 37458f5852104d58b3f9a886f2c3cb83
msgid "## Updating the Ubuntu Base Image" msgid "## Updating the Ubuntu Base Image"
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:15 bc10da8f9c3a42a095e208abf29a488c #: ../../maintaining/tasks.md:23 db19a5192fab4cccbe86f6cc73ab778f
msgid "" msgid ""
"When there's a security fix in the Ubuntu base image or after some time " "When there's a security fix in the Ubuntu base image or after some time "
"passes, it's a good idea to update the pinned SHA in the [jupyter/base-" "passes, it's a good idea to update the pinned SHA in the [jupyter/base-"
...@@ -85,18 +84,18 @@ msgid "" ...@@ -85,18 +84,18 @@ msgid ""
"layer will rebuild." "layer will rebuild."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:17 3107fa471e514777abcbcfccd30b4722 #: ../../maintaining/tasks.md:29 4ced832a1c9844c4a23c7486670a40fc
msgid "## Adding a New Core Image to Docker Cloud" msgid "## Adding a New Core Image to Docker Cloud"
msgstr "" msgstr ""
# 201f0506bbb24b47b79a5db01db86557 # 201f0506bbb24b47b79a5db01db86557
#: ../../maintaining/tasks.md:19 78296cc9864c4f5eaa232479d56ea479 #: ../../maintaining/tasks.md:31 c9553182c042418ab3aba261d6c19d3f
msgid "" msgid ""
"When there's a new stack definition, do the following before merging the " "When there's a new stack definition, do the following before merging the "
"PR with the new stack:" "PR with the new stack:"
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:21 0b5c6c42d5134a6689de23bc0f333289 #: ../../maintaining/tasks.md:33 fabe64e7a1af46a7910b3cffeef5411c
msgid "" msgid ""
"Ensure the PR includes an update to the stack overview diagram [in the " "Ensure the PR includes an update to the stack overview diagram [in the "
"documentation](https://github.com/jupyter/docker-" "documentation](https://github.com/jupyter/docker-"
...@@ -105,212 +104,222 @@ msgid "" ...@@ -105,212 +104,222 @@ msgid ""
"used to create it." "used to create it."
msgstr "" msgstr ""
# 0e3d12dcfb4b42b8a3d24b9401caa757 #: ../../maintaining/tasks.md:36 cb36e81de2574d7eb3a3447dd9c40e68
#: ../../maintaining/tasks.md:22 9d653c0e44894b7e840f393c26b81544
msgid "" msgid ""
"Ensure the PR updates the Makefile which is used to build the stacks in " "Ensure the PR updates the Makefile which is used to build the stacks in "
"order on Travis CI." "order on GitHub Actions."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:23 f4694fd251cc4df38f389651a9f34164 #: ../../maintaining/tasks.md:37 a1db6dcafa40462c834b4eb78fb9f8f7
msgid "" msgid ""
"Create a new repository in the `jupyter` org on Docker Cloud named after " "Create a new repository in the `jupyter` org on Docker Cloud named after "
"the stack folder in the git repo." "the stack folder in the git repo."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:24 a088d32384274e6b96fd6b4ca9316762 #: ../../maintaining/tasks.md:39 1c547a028fbb4bd4b84066c28f5598d3
msgid "Grant the `stacks` team permission to write to the repo." msgid "Grant the `stacks` team permission to write to the repo."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:25 919261a2c2eb4fa4861e5414074d1d59 #: ../../maintaining/tasks.md:40 70b1bbb3c6d34473b9c613903831c122
msgid "Click *Builds* and then *Configure Automated Builds* for the repository." msgid "Click _Builds_ and then _Configure Automated Builds_ for the repository."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:26 56589eaa73ad4d01af35ddcaa16b89a7 #: ../../maintaining/tasks.md:41 bfad3b5b351541c88f89dfdf0a4ce909
msgid "Select `jupyter/docker-stacks` as the source repository." msgid "Select `jupyter/docker-stacks` as the source repository."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:27 fd4623531783407b80fb6f0b52a7402c #: ../../maintaining/tasks.md:42 b15d3cffa47d4b58ab2a15bdff0c4c76
msgid "" msgid ""
"Choose *Build on Docker Cloud's infrastructure using a Small node* unless" "Choose _Build on Docker Cloud's infrastructure using a Small node_ unless"
" you have reason to believe a bigger host is required." " you have reason to believe a bigger host is required."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:28 b9b619a6ecd246aea9a1df5fc6ff0419 #: ../../maintaining/tasks.md:44 0a64807f1c46424f8bffdaae134d9af9
msgid "" msgid ""
"Update the *Build Context* in the default build rule to be `/<name-of-" "Update the _Build Context_ in the default build rule to be `/<name-of-"
"the-stack>`." "the-stack>`."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:29 5a966d025bf1420990df95cb8ac2ad88 #: ../../maintaining/tasks.md:45 e372a3e606e74279a767f664773b4685
msgid "" msgid ""
"Toggle *Autobuild* to disabled unless the stack is a new root stack " "Toggle _Autobuild_ to disabled unless the stack is a new root stack "
"(e.g., like `jupyter/base-notebook`)." "(e.g., like `jupyter/base-notebook`)."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:30 f8150b6ed0284f8b8091a98b4364492e #: ../../maintaining/tasks.md:47 5dab6662eb524abb9878e0fe7c7b5e03
msgid "" msgid ""
"If the new stack depends on the build of another stack in the hierarchy: " "If the new stack depends on the build of another stack in the hierarchy: "
"1. Hit *Save* and then click *Configure Automated Builds*. 2. At the very" "1. Hit _Save_ and then click _Configure Automated Builds_. 2. At the very"
" bottom, add a build trigger named *Stack hierarchy trigger*. 3. Copy the" " bottom, add a build trigger named _Stack hierarchy trigger_. 3. Copy the"
" build trigger URL. 4. Visit the parent repository *Builds* page and " " build trigger URL. 4. Visit the parent repository _Builds_ page and "
"click *Configure Automated Builds*. 5. Add the URL you copied to the " "click _Configure Automated Builds_. 5. Add the URL you copied to the "
"*NEXT_BUILD_TRIGGERS* environment variable comma separated list of URLs, " "_NEXT_BUILD_TRIGGERS_ environment variable comma separated list"
"creating that environment variable if it does not already exist. 6. Hit " msgstr ""
"*Save*."
#: ../../maintaining/tasks.md:53 4886079321d54cf5959fc3e409140166
msgid "of URLs, creating that environment variable if it does not already exist."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:37 b814a7ef513342ddb9891749cdb49341 #: ../../maintaining/tasks.md:54 ../../maintaining/tasks.md:59
#: 7e6dec502b5841db915faf2aa3a8eb41 b2ee46f5748e4ac9a2348e81ab605a7b
msgid "Hit _Save_."
msgstr ""
#: ../../maintaining/tasks.md:55 8f2588957e2944389fcdba97ff7a5929
msgid "" msgid ""
"If the new stack should trigger other dependent builds: 1. Add an " "If the new stack should trigger other dependent builds: 1. Add an "
"environment variable named *NEXT_BUILD_TRIGGERS*. 2. Copy the build " "environment variable named _NEXT_BUILD_TRIGGERS_. 2. Copy the build "
"trigger URLs from the dependent builds into the *NEXT_BUILD_TRIGGERS* " "trigger URLs from the dependent builds into the _NEXT_BUILD_TRIGGERS_ "
"comma separated list of URLs. 3. Hit *Save*." "comma"
msgstr ""
#: ../../maintaining/tasks.md:58 6f987b32b0eb4bfb9632499dd9547cf8
msgid "separated list of URLs."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:41 276c0c0679d04b2da6680eda4d5d09ab #: ../../maintaining/tasks.md:60 c9f9d54989724b62ba46c690970ba576
msgid "" msgid ""
"Adjust other *NEXT_BUILD_TRIGGERS* values as needed so that the build " "Adjust other _NEXT_BUILD_TRIGGERS_ values as needed so that the build "
"order matches that in the stack hierarchy diagram." "order matches that in the stack hierarchy diagram."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:43 783ade249ba442f1a13574bede289441 #: ../../maintaining/tasks.md:63 8ff283d700cd4a9dad9c10a6789cc5f9
msgid "## Adding a New Maintainer Account" msgid "## Adding a New Maintainer Account"
msgstr "" msgstr ""
# e3bd3ced73994d9fad596784e1469cfc # e3bd3ced73994d9fad596784e1469cfc
#: ../../maintaining/tasks.md:45 c1982766fae547bca21ef693aec24df0 #: ../../maintaining/tasks.md:65 c0c7ecda9e6344ab9821beb463b849dc
msgid "Visit https://cloud.docker.com/app/jupyter/team/stacks/users" msgid "Visit https://cloud.docker.com/app/jupyter/team/stacks/users"
msgstr "" msgstr ""
# 51b166c70ba743e0b4d335b3471da69a # 51b166c70ba743e0b4d335b3471da69a
#: ../../maintaining/tasks.md:46 b8ac817887694fa2a6250be4eccf9865 #: ../../maintaining/tasks.md:66 85e68de60e4a4426b250f5dfb96f30c9
msgid "Add the maintainer's Docker Cloud username." msgid "Add the maintainer's Docker Cloud username."
msgstr "" msgstr ""
# 300f5dbd933f4ee6b5a550efd35f1c52 # 300f5dbd933f4ee6b5a550efd35f1c52
#: ../../maintaining/tasks.md:47 3ca59c0f57164b74aa62276d7f287105 #: ../../maintaining/tasks.md:67 eff9933085bc46ac81926d7e85e2b3f8
msgid "" msgid ""
"Visit https://github.com/orgs/jupyter/teams/docker-image-" "Visit https://github.com/orgs/jupyter/teams/docker-image-"
"maintainers/members" "maintainers/members"
msgstr "" msgstr ""
# e26ad8ffb6de489988e076e64b6a1415 # e26ad8ffb6de489988e076e64b6a1415
#: ../../maintaining/tasks.md:48 ad3194aa73f24de89b008bfff12cfe89 #: ../../maintaining/tasks.md:68 46d01c10e2c44664aee95e5360edd579
msgid "Add the maintainer's GitHub username." msgid "Add the maintainer's GitHub username."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:50 5398a7484d7146ac9d3ee0df9d0ca1e1 #: ../../maintaining/tasks.md:70 0278ae3e3f354df2accf4716aa4a619c
msgid "## Pushing a Build Manually" msgid "## Pushing a Build Manually"
msgstr "" msgstr ""
# 050b5c7a3d9d46bcbe26d54e8585ddd8 # 050b5c7a3d9d46bcbe26d54e8585ddd8
#: ../../maintaining/tasks.md:52 a0113e0c62844907b88117436ab655d5 #: ../../maintaining/tasks.md:72 b0260a73403c425da436c59fe8e78e09
msgid "" msgid ""
"If automated builds on Docker Cloud have got you down, do the following " "If automated builds on Docker Cloud have got you down, do the following "
"to push a build manually:" "to push a build manually:"
msgstr "" msgstr ""
# ead6a3a714ae4a8a9df5585c18260c16 # ead6a3a714ae4a8a9df5585c18260c16
#: ../../maintaining/tasks.md:54 8322e8f2931946cea180334142dce4d7 #: ../../maintaining/tasks.md:74 b1d36953ee174c3dbc04a153ace6d862
msgid "Clone this repository." msgid "Clone this repository."
msgstr "" msgstr ""
# 48e1d6954f414fe080d7b4afd9e0c391 # 48e1d6954f414fe080d7b4afd9e0c391
#: ../../maintaining/tasks.md:55 53dcb1a948234d96a02151bd0c702410 #: ../../maintaining/tasks.md:75 bd98e36df92b4a83b9fb5298fbabf0bf
msgid "Check out the git SHA you want to build and publish." msgid "Check out the git SHA you want to build and publish."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:56 64fa522c1c9743a9a5a94d7f1aca78c8 #: ../../maintaining/tasks.md:76 4b4af44a60f54f5db093942092362794
msgid "`docker login` with your Docker Hub/Cloud credentials." msgid "`docker login` with your Docker Hub/Cloud credentials."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:57 5561c3a444184f2f9faf3b2470a7e30d #: ../../maintaining/tasks.md:77 a60704a1dee24afeac8298732c88133c
msgid "Run `make retry/release-all`." msgid "Run `make retry/release-all`."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:59 75abab343159485b92293fb0eee5e8f3 #: ../../maintaining/tasks.md:79 1d7f037ade384524be9cf5a2da2e1732
msgid "## Enabling a New Doc Language Translation" msgid "## Enabling a New Doc Language Translation"
msgstr "" msgstr ""
# 5aafef10dc75417785a79aba203175e5 # 5aafef10dc75417785a79aba203175e5
#: ../../maintaining/tasks.md:61 05d5a331576446c8845f8db8c2564f30 #: ../../maintaining/tasks.md:81 9b565e5ad1ae4df9a168536dac18f7e2
msgid "First enable translation on Transifex:" msgid "First enable translation on Transifex:"
msgstr "" msgstr ""
# c1a249c0d0cd4e9192ed7814dfde6e34 # c1a249c0d0cd4e9192ed7814dfde6e34
#: ../../maintaining/tasks.md:63 02c24d7f64de4a40a10b34cb5c3c8b16 #: ../../maintaining/tasks.md:83 c39df2ae52ef49f8b4ff9d184abe8331
msgid "" msgid ""
"Visit https://www.transifex.com/project-jupyter/jupyter-docker-" "Visit https://www.transifex.com/project-jupyter/jupyter-docker-"
"stacks-1/languages/" "stacks-1/languages/"
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:64 db93f630f8b84e418893b1c788f6cd2f #: ../../maintaining/tasks.md:84 ec2ee93b70ce405ab43f4da197674467
msgid "Click *Edit Languages* in the top right." msgid "Click _Edit Languages_ in the top right."
msgstr "" msgstr ""
# 7efe7d98a98b47bd82d697673d277cbd # 7efe7d98a98b47bd82d697673d277cbd
#: ../../maintaining/tasks.md:65 64cd60c1323f46cfb4c01a978815a4c1 #: ../../maintaining/tasks.md:85 8c9578ec646e410bb0d4891bb4f5f71e
msgid "Select the language from the dropdown." msgid "Select the language from the dropdown."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:66 d2a439e2f5014dad830ca1613b743d5c #: ../../maintaining/tasks.md:86 5699c36519a34f6b9f2a0c76cb2844a5
msgid "Click *Apply*." msgid "Click _Apply_."
msgstr "" msgstr ""
# 1e3868ee7dae469f9921516dd7973766 # 1e3868ee7dae469f9921516dd7973766
#: ../../maintaining/tasks.md:68 135f935d1f654289ba9867871210984f #: ../../maintaining/tasks.md:88 d7108eada14f448497e580d6401b7593
msgid "Then setup a subproject on ReadTheDocs for the language:" msgid "Then setup a subproject on ReadTheDocs for the language:"
msgstr "" msgstr ""
# fffa155a75674f0dbe746a15eb3be492 # fffa155a75674f0dbe746a15eb3be492
#: ../../maintaining/tasks.md:70 d4e0bd836f974d93aa511ac220b2d32a #: ../../maintaining/tasks.md:90 12b36373698643b695784e7b0a68d585
msgid "Visit https://readthedocs.org/dashboard/import/manual/" msgid "Visit https://readthedocs.org/dashboard/import/manual/"
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:71 4da6f8ab4a9d4f89a09449a2154c5827 #: ../../maintaining/tasks.md:91 b488eae9bb924b3588d9d14e758ee7b6
msgid "Enter *jupyter-docker-stacks-language_abbreviation* for the project name." msgid "Enter _jupyter-docker-stacks-language_abbreviation_ for the project name."
msgstr "" msgstr ""
# 2869b2f7a89c428f903e3695dd511e9a # 2869b2f7a89c428f903e3695dd511e9a
#: ../../maintaining/tasks.md:72 0ba7634405354e90a6540f64bbb1e47e #: ../../maintaining/tasks.md:92 9ebca91beff846648bdc758fc90afec1
msgid "Enter https://github.com/jupyter/docker-stacks for the URL." msgid "Enter https://github.com/jupyter/docker-stacks for the URL."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:73 62a52d38a77c473d8160ab4310162a0a #: ../../maintaining/tasks.md:93 2d3b518fd69640169e17d44018a7a8e4
msgid "Check *Edit advanced project options*." msgid "Check _Edit advanced project options_."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:74 2bcec71b1ac142ff94707d9c889b6dc6 #: ../../maintaining/tasks.md:94 75e92ae1b3b0418f8256a68e974ae9e9
msgid "Click *Next*." msgid "Click _Next_."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:75 53574ea6cc9b48169a1e1612f9568861 #: ../../maintaining/tasks.md:95 f577fc280c154e37824461b6d8f39528
msgid "Select the *Language* from the dropdown on the next screen." msgid "Select the _Language_ from the dropdown on the next screen."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:76 9296b95300784a3094d56add04e6812f #: ../../maintaining/tasks.md:96 810b06550ec44599a81792006973d3af
msgid "Click *Finish*." msgid "Click _Finish_."
msgstr "" msgstr ""
# 529f3729d2474287adec0ff895100248 # 529f3729d2474287adec0ff895100248
#: ../../maintaining/tasks.md:78 6006f4be03a449d8a788839778a9cf70 #: ../../maintaining/tasks.md:98 08613ea94d7c4b7c99cc0c3e775670bc
msgid "Finally link the new language subproject to the top level doc project:" msgid "Finally link the new language subproject to the top level doc project:"
msgstr "" msgstr ""
# 024aaf54695141839eaa5537b4087a81 # 024aaf54695141839eaa5537b4087a81
#: ../../maintaining/tasks.md:80 97d1554572034f1a9c1e14c2de3f96da #: ../../maintaining/tasks.md:100 2a798b799c2649769bdbc0f202c49e50
msgid "" msgid ""
"Visit https://readthedocs.org/dashboard/jupyter-docker-" "Visit https://readthedocs.org/dashboard/jupyter-docker-"
"stacks/translations/" "stacks/translations/"
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:81 ec8db5a40970438fb6d6641fff20c8ec #: ../../maintaining/tasks.md:101 3c1717d4974c4e4a98dbbbcf947e93ec
msgid "Select the subproject you created from the *Project* dropdown." msgid "Select the subproject you created from the _Project_ dropdown."
msgstr "" msgstr ""
#: ../../maintaining/tasks.md:82 8bc67e5bc244476e853fe4122c1e0c72 #: ../../maintaining/tasks.md:102 91e9666e3b1348e7830a077ca601add8
msgid "Click *Add*." msgid "Click _Add_."
msgstr "" msgstr ""
# a798b8e31be7408d9301187ff3e3ef69 # a798b8e31be7408d9301187ff3e3ef69
...@@ -526,3 +535,102 @@ msgstr "" ...@@ -526,3 +535,102 @@ msgstr ""
#~ msgid "Click Add." #~ msgid "Click Add."
#~ msgstr "" #~ msgstr ""
# f2710754a41e4354b51f4acd6f1f9545
#~ msgid "Make sure Travis is green for a PR."
#~ msgstr ""
# 0e3d12dcfb4b42b8a3d24b9401caa757
#~ msgid ""
#~ "Ensure the PR updates the Makefile "
#~ "which is used to build the stacks"
#~ " in order on Travis CI."
#~ msgstr ""
#~ msgid ""
#~ "Click *Builds* and then *Configure "
#~ "Automated Builds* for the repository."
#~ msgstr ""
#~ msgid ""
#~ "Choose *Build on Docker Cloud's "
#~ "infrastructure using a Small node* "
#~ "unless you have reason to believe "
#~ "a bigger host is required."
#~ msgstr ""
#~ msgid ""
#~ "Update the *Build Context* in the "
#~ "default build rule to be `/<name-"
#~ "of-the-stack>`."
#~ msgstr ""
#~ msgid ""
#~ "Toggle *Autobuild* to disabled unless "
#~ "the stack is a new root stack "
#~ "(e.g., like `jupyter/base-notebook`)."
#~ msgstr ""
#~ msgid ""
#~ "If the new stack depends on the"
#~ " build of another stack in the "
#~ "hierarchy: 1. Hit *Save* and then "
#~ "click *Configure Automated Builds*. 2. "
#~ "At the very bottom, add a build"
#~ " trigger named *Stack hierarchy trigger*."
#~ " 3. Copy the build trigger URL. "
#~ "4. Visit the parent repository *Builds*"
#~ " page and click *Configure Automated "
#~ "Builds*. 5. Add the URL you copied"
#~ " to the *NEXT_BUILD_TRIGGERS* environment "
#~ "variable comma separated list of URLs,"
#~ " creating that environment variable if "
#~ "it does not already exist. 6. Hit"
#~ " *Save*."
#~ msgstr ""
#~ msgid ""
#~ "If the new stack should trigger "
#~ "other dependent builds: 1. Add an "
#~ "environment variable named *NEXT_BUILD_TRIGGERS*."
#~ " 2. Copy the build trigger URLs "
#~ "from the dependent builds into the "
#~ "*NEXT_BUILD_TRIGGERS* comma separated list of"
#~ " URLs. 3. Hit *Save*."
#~ msgstr ""
#~ msgid ""
#~ "Adjust other *NEXT_BUILD_TRIGGERS* values as"
#~ " needed so that the build order "
#~ "matches that in the stack hierarchy "
#~ "diagram."
#~ msgstr ""
#~ msgid "Click *Edit Languages* in the top right."
#~ msgstr ""
#~ msgid "Click *Apply*."
#~ msgstr ""
#~ msgid ""
#~ "Enter *jupyter-docker-stacks-"
#~ "language_abbreviation* for the project name."
#~ msgstr ""
#~ msgid "Check *Edit advanced project options*."
#~ msgstr ""
#~ msgid "Click *Next*."
#~ msgstr ""
#~ msgid "Select the *Language* from the dropdown on the next screen."
#~ msgstr ""
#~ msgid "Click *Finish*."
#~ msgstr ""
#~ msgid "Select the subproject you created from the *Project* dropdown."
#~ msgstr ""
#~ msgid "Click *Add*."
#~ msgstr ""
This source diff could not be displayed because it is too large. You can view the blob instead.
...@@ -4,41 +4,61 @@ ...@@ -4,41 +4,61 @@
To build new images on Docker Cloud and publish them to the Docker Hub registry, do the following: To build new images on Docker Cloud and publish them to the Docker Hub registry, do the following:
1. Make sure Travis is green for a PR. 1. Make sure GitHub Actions status checks pas for the PR.
2. Merge the PR. 2. Merge the PR.
3. Monitor the Docker Cloud build status for each of the stacks, starting with [jupyter/base-notebook](https://cloud.docker.com/app/jupyter/repository/docker/jupyter/base-notebook/general) and ending with [jupyter/all-spark-notebook](https://cloud.docker.com/app/jupyter/repository/docker/jupyter/all-spark-notebook/general). See the [stack hierarchy diagram](../using/selecting.html#image-relationships) for the current, complete build order. 3. Monitor the Docker Cloud build status for each of the stacks, starting with
4. Manually click the retry button next to any build that fails to resume that build and any dependent builds. [jupyter/base-notebook](https://cloud.docker.com/app/jupyter/repository/docker/jupyter/base-notebook/general)
5. Try to avoid merging another PR to master until all outstanding builds complete. There's no way at present to propagate the git SHA to build through the Docker Cloud build trigger API. Every build trigger works off of master HEAD. and ending with
[jupyter/all-spark-notebook](https://cloud.docker.com/app/jupyter/repository/docker/jupyter/all-spark-notebook/general).
See the [stack hierarchy diagram](../using/selecting.html#image-relationships) for the current,
complete build order.
4. Manually click the retry button next to any build that fails to resume that build and any
dependent builds.
5. Try to avoid merging another PR to master until all outstanding builds complete. There's no way
at present to propagate the git SHA to build through the Docker Cloud build trigger API. Every
build trigger works off of master HEAD.
## Updating the Ubuntu Base Image ## Updating the Ubuntu Base Image
When there's a security fix in the Ubuntu base image or after some time passes, it's a good idea to update the pinned SHA in the [jupyter/base-notebook Dockerfile](https://github.com/jupyter/docker-stacks/blob/master/base-notebook/Dockerfile). Submit it as a regular PR and go through the build process. Expect the build to take a while to complete: every image layer will rebuild. When there's a security fix in the Ubuntu base image or after some time passes, it's a good idea to
update the pinned SHA in the
[jupyter/base-notebook Dockerfile](https://github.com/jupyter/docker-stacks/blob/master/base-notebook/Dockerfile).
Submit it as a regular PR and go through the build process. Expect the build to take a while to
complete: every image layer will rebuild.
## Adding a New Core Image to Docker Cloud ## Adding a New Core Image to Docker Cloud
When there's a new stack definition, do the following before merging the PR with the new stack: When there's a new stack definition, do the following before merging the PR with the new stack:
1. Ensure the PR includes an update to the stack overview diagram [in the documentation](https://github.com/jupyter/docker-stacks/blob/master/docs/using/selecting.md#image-relationships). The image links to the [blockdiag source](http://interactive.blockdiag.com/) used to create it. 1. Ensure the PR includes an update to the stack overview diagram
2. Ensure the PR updates the Makefile which is used to build the stacks in order on Travis CI. [in the documentation](https://github.com/jupyter/docker-stacks/blob/master/docs/using/selecting.md#image-relationships).
3. Create a new repository in the `jupyter` org on Docker Cloud named after the stack folder in the git repo. The image links to the [blockdiag source](http://interactive.blockdiag.com/) used to create it.
2. Ensure the PR updates the Makefile which is used to build the stacks in order on GitHub Actions.
3. Create a new repository in the `jupyter` org on Docker Cloud named after the stack folder in the
git repo.
4. Grant the `stacks` team permission to write to the repo. 4. Grant the `stacks` team permission to write to the repo.
5. Click *Builds* and then *Configure Automated Builds* for the repository. 5. Click _Builds_ and then _Configure Automated Builds_ for the repository.
6. Select `jupyter/docker-stacks` as the source repository. 6. Select `jupyter/docker-stacks` as the source repository.
7. Choose *Build on Docker Cloud's infrastructure using a Small node* unless you have reason to believe a bigger host is required. 7. Choose _Build on Docker Cloud's infrastructure using a Small node_ unless you have reason to
8. Update the *Build Context* in the default build rule to be `/<name-of-the-stack>`. believe a bigger host is required.
9. Toggle *Autobuild* to disabled unless the stack is a new root stack (e.g., like `jupyter/base-notebook`). 8. Update the _Build Context_ in the default build rule to be `/<name-of-the-stack>`.
9. Toggle _Autobuild_ to disabled unless the stack is a new root stack (e.g., like
`jupyter/base-notebook`).
10. If the new stack depends on the build of another stack in the hierarchy: 10. If the new stack depends on the build of another stack in the hierarchy:
1. Hit *Save* and then click *Configure Automated Builds*. 1. Hit _Save_ and then click _Configure Automated Builds_.
2. At the very bottom, add a build trigger named *Stack hierarchy trigger*. 2. At the very bottom, add a build trigger named _Stack hierarchy trigger_.
3. Copy the build trigger URL. 3. Copy the build trigger URL.
4. Visit the parent repository *Builds* page and click *Configure Automated Builds*. 4. Visit the parent repository _Builds_ page and click _Configure Automated Builds_.
5. Add the URL you copied to the *NEXT_BUILD_TRIGGERS* environment variable comma separated list of URLs, creating that environment variable if it does not already exist. 5. Add the URL you copied to the _NEXT_BUILD_TRIGGERS_ environment variable comma separated list
6. Hit *Save*. of URLs, creating that environment variable if it does not already exist.
6. Hit _Save_.
11. If the new stack should trigger other dependent builds: 11. If the new stack should trigger other dependent builds:
1. Add an environment variable named *NEXT_BUILD_TRIGGERS*. 1. Add an environment variable named _NEXT_BUILD_TRIGGERS_.
2. Copy the build trigger URLs from the dependent builds into the *NEXT_BUILD_TRIGGERS* comma separated list of URLs. 2. Copy the build trigger URLs from the dependent builds into the _NEXT_BUILD_TRIGGERS_ comma
3. Hit *Save*. separated list of URLs.
12. Adjust other *NEXT_BUILD_TRIGGERS* values as needed so that the build order matches that in the stack hierarchy diagram. 3. Hit _Save_.
12. Adjust other _NEXT_BUILD_TRIGGERS_ values as needed so that the build order matches that in the
stack hierarchy diagram.
## Adding a New Maintainer Account ## Adding a New Maintainer Account
...@@ -61,22 +81,22 @@ If automated builds on Docker Cloud have got you down, do the following to push ...@@ -61,22 +81,22 @@ If automated builds on Docker Cloud have got you down, do the following to push
First enable translation on Transifex: First enable translation on Transifex:
1. Visit https://www.transifex.com/project-jupyter/jupyter-docker-stacks-1/languages/ 1. Visit https://www.transifex.com/project-jupyter/jupyter-docker-stacks-1/languages/
2. Click *Edit Languages* in the top right. 2. Click _Edit Languages_ in the top right.
3. Select the language from the dropdown. 3. Select the language from the dropdown.
4. Click *Apply*. 4. Click _Apply_.
Then setup a subproject on ReadTheDocs for the language: Then setup a subproject on ReadTheDocs for the language:
1. Visit https://readthedocs.org/dashboard/import/manual/ 1. Visit https://readthedocs.org/dashboard/import/manual/
2. Enter *jupyter-docker-stacks-language_abbreviation* for the project name. 2. Enter _jupyter-docker-stacks-language_abbreviation_ for the project name.
3. Enter https://github.com/jupyter/docker-stacks for the URL. 3. Enter https://github.com/jupyter/docker-stacks for the URL.
4. Check *Edit advanced project options*. 4. Check _Edit advanced project options_.
5. Click *Next*. 5. Click _Next_.
6. Select the *Language* from the dropdown on the next screen. 6. Select the _Language_ from the dropdown on the next screen.
7. Click *Finish*. 7. Click _Finish_.
Finally link the new language subproject to the top level doc project: Finally link the new language subproject to the top level doc project:
1. Visit https://readthedocs.org/dashboard/jupyter-docker-stacks/translations/ 1. Visit https://readthedocs.org/dashboard/jupyter-docker-stacks/translations/
2. Select the subproject you created from the *Project* dropdown. 2. Select the subproject you created from the _Project_ dropdown.
3. Click *Add*. 3. Click _Add_.
...@@ -48,7 +48,7 @@ packages desired. Next, create a new Dockerfile like the one shown below. ...@@ -48,7 +48,7 @@ packages desired. Next, create a new Dockerfile like the one shown below.
# Start from a core stack version # Start from a core stack version
FROM jupyter/datascience-notebook:9f9e5ca8fe5a FROM jupyter/datascience-notebook:9f9e5ca8fe5a
# Install from requirements.txt file # Install from requirements.txt file
COPY requirements.txt /tmp/ COPY --chown=${NB_UID}:${NB_GID} requirements.txt /tmp/
RUN pip install --requirement /tmp/requirements.txt && \ RUN pip install --requirement /tmp/requirements.txt && \
fix-permissions $CONDA_DIR && \ fix-permissions $CONDA_DIR && \
fix-permissions /home/$NB_USER fix-permissions /home/$NB_USER
...@@ -60,7 +60,7 @@ For conda, the Dockerfile is similar: ...@@ -60,7 +60,7 @@ For conda, the Dockerfile is similar:
# Start from a core stack version # Start from a core stack version
FROM jupyter/datascience-notebook:9f9e5ca8fe5a FROM jupyter/datascience-notebook:9f9e5ca8fe5a
# Install from requirements.txt file # Install from requirements.txt file
COPY requirements.txt /tmp/ COPY --chown=${NB_UID}:${NB_GID} requirements.txt /tmp/
RUN conda install --yes --file /tmp/requirements.txt && \ RUN conda install --yes --file /tmp/requirements.txt && \
fix-permissions $CONDA_DIR && \ fix-permissions $CONDA_DIR && \
fix-permissions /home/$NB_USER fix-permissions /home/$NB_USER
...@@ -118,7 +118,7 @@ RUN conda create --quiet --yes -p $CONDA_DIR/envs/$conda_env python=$py_ver ipyt ...@@ -118,7 +118,7 @@ RUN conda create --quiet --yes -p $CONDA_DIR/envs/$conda_env python=$py_ver ipyt
# alternatively, you can comment out the lines above and uncomment those below # alternatively, you can comment out the lines above and uncomment those below
# if you'd prefer to use a YAML file present in the docker build context # if you'd prefer to use a YAML file present in the docker build context
# COPY environment.yml /home/$NB_USER/tmp/ # COPY --chown=${NB_UID}:${NB_GID} environment.yml /home/$NB_USER/tmp/
# RUN cd /home/$NB_USER/tmp/ && \ # RUN cd /home/$NB_USER/tmp/ && \
# conda env create -p $CONDA_DIR/envs/$conda_env -f environment.yml && \ # conda env create -p $CONDA_DIR/envs/$conda_env -f environment.yml && \
# conda clean --all -f -y # conda clean --all -f -y
......
# Selecting an Image # Selecting an Image
* [Core Stacks](#core-stacks) - [Core Stacks](#core-stacks)
* [Image Relationships](#image-relationships) - [Image Relationships](#image-relationships)
* [Community Stacks](#community-stacks) - [Community Stacks](#community-stacks)
Using one of the Jupyter Docker Stacks requires two choices: Using one of the Jupyter Docker Stacks requires two choices:
...@@ -13,193 +13,233 @@ This section provides details about the first. ...@@ -13,193 +13,233 @@ This section provides details about the first.
## Core Stacks ## Core Stacks
The Jupyter team maintains a set of Docker image definitions in the [https://github.com/jupyter/docker-stacks](https://github.com/jupyter/docker-stacks) GitHub The Jupyter team maintains a set of Docker image definitions in the
repository. The following sections describe these images including their contents, relationships, and versioning strategy. [https://github.com/jupyter/docker-stacks](https://github.com/jupyter/docker-stacks) GitHub
repository. The following sections describe these images including their contents, relationships,
and versioning strategy.
### jupyter/base-notebook ### jupyter/base-notebook
[Source on GitHub](https://github.com/jupyter/docker-stacks/tree/master/base-notebook) [Source on GitHub](https://github.com/jupyter/docker-stacks/tree/master/base-notebook) |
| [Dockerfile commit history](https://github.com/jupyter/docker-stacks/commits/master/base-notebook/Dockerfile) [Dockerfile commit history](https://github.com/jupyter/docker-stacks/commits/master/base-notebook/Dockerfile)
| [Docker Hub image tags](https://hub.docker.com/r/jupyter/base-notebook/tags/) | [Docker Hub image tags](https://hub.docker.com/r/jupyter/base-notebook/tags/)
`jupyter/base-notebook` is a small image supporting the [options common across all core stacks](common.md). It is the basis for all other stacks. `jupyter/base-notebook` is a small image supporting the
[options common across all core stacks](common.md). It is the basis for all other stacks.
* Minimally-functional Jupyter Notebook server (e.g., no LaTeX support for saving notebooks as PDFs) - Minimally-functional Jupyter Notebook server (e.g., no LaTeX support for saving notebooks as PDFs)
* [Miniconda](https://conda.io/miniconda.html) Python 3.x in `/opt/conda` - [Miniconda](https://conda.io/miniconda.html) Python 3.x in `/opt/conda`
* No preinstalled scientific computing packages - No preinstalled scientific computing packages
* Unprivileged user `jovyan` (`uid=1000`, configurable, see options) in group `users` (`gid=100`) with ownership over the `/home/jovyan` and `/opt/conda` paths - Unprivileged user `jovyan` (`uid=1000`, configurable, see options) in group `users` (`gid=100`)
* `tini` as the container entrypoint and a `start-notebook.sh` script as the default command with ownership over the `/home/jovyan` and `/opt/conda` paths
* A `start-singleuser.sh` script useful for launching containers in JupyterHub - `tini` as the container entrypoint and a `start-notebook.sh` script as the default command
* A `start.sh` script useful for running alternative commands in the container (e.g. `ipython`, `jupyter kernelgateway`, `jupyter lab`) - A `start-singleuser.sh` script useful for launching containers in JupyterHub
* Options for a self-signed HTTPS certificate and passwordless sudo - A `start.sh` script useful for running alternative commands in the container (e.g. `ipython`,
`jupyter kernelgateway`, `jupyter lab`)
- Options for a self-signed HTTPS certificate and passwordless sudo
### jupyter/minimal-notebook ### jupyter/minimal-notebook
[Source on GitHub](https://github.com/jupyter/docker-stacks/tree/master/minimal-notebook) [Source on GitHub](https://github.com/jupyter/docker-stacks/tree/master/minimal-notebook) |
| [Dockerfile commit history](https://github.com/jupyter/docker-stacks/commits/master/minimal-notebook/Dockerfile) [Dockerfile commit history](https://github.com/jupyter/docker-stacks/commits/master/minimal-notebook/Dockerfile)
| [Docker Hub image tags](https://hub.docker.com/r/jupyter/minimal-notebook/tags/) | [Docker Hub image tags](https://hub.docker.com/r/jupyter/minimal-notebook/tags/)
`jupyter/minimal-notebook` adds command line tools useful when working in Jupyter applications. `jupyter/minimal-notebook` adds command line tools useful when working in Jupyter applications.
* Everything in `jupyter/base-notebook` - Everything in `jupyter/base-notebook`
* [TeX Live](https://www.tug.org/texlive/) for notebook document conversion - [TeX Live](https://www.tug.org/texlive/) for notebook document conversion
* [git](https://git-scm.com/), [emacs](https://www.gnu.org/software/emacs/) (actually `emacs-nox`), [vi](https://vim.org/) (actually `vim-tiny`), [jed](https://www.jedsoft.org/jed/), [nano](https://www.nano-editor.org/), tzdata, and unzip - [git](https://git-scm.com/), [emacs](https://www.gnu.org/software/emacs/) (actually `emacs-nox`),
[vi](https://vim.org/) (actually `vim-tiny`), [jed](https://www.jedsoft.org/jed/),
[nano](https://www.nano-editor.org/) (actually `nano-tiny`), tzdata, and unzip
### jupyter/r-notebook ### jupyter/r-notebook
[Source on GitHub](https://github.com/jupyter/docker-stacks/tree/master/r-notebook) [Source on GitHub](https://github.com/jupyter/docker-stacks/tree/master/r-notebook) |
| [Dockerfile commit history](https://github.com/jupyter/docker-stacks/commits/master/r-notebook/Dockerfile) [Dockerfile commit history](https://github.com/jupyter/docker-stacks/commits/master/r-notebook/Dockerfile)
| [Docker Hub image tags](https://hub.docker.com/r/jupyter/r-notebook/tags/) | [Docker Hub image tags](https://hub.docker.com/r/jupyter/r-notebook/tags/)
`jupyter/r-notebook` includes popular packages from the R ecosystem. `jupyter/r-notebook` includes popular packages from the R ecosystem.
* Everything in `jupyter/minimal-notebook` and its ancestor images - Everything in `jupyter/minimal-notebook` and its ancestor images
* The [R](https://www.r-project.org/) interpreter and base environment - The [R](https://www.r-project.org/) interpreter and base environment
* [IRKernel](https://irkernel.github.io/) to support R code in Jupyter notebooks - [IRKernel](https://irkernel.github.io/) to support R code in Jupyter notebooks
* [tidyverse](https://www.tidyverse.org/) packages, including [ggplot2](http://ggplot2.org/), [dplyr](http://dplyr.tidyverse.org/), - [tidyverse](https://www.tidyverse.org/) packages from
[tidyr](http://tidyr.tidyverse.org/), [readr](http://readr.tidyverse.org/), [purrr](http://purrr.tidyverse.org/), [tibble](http://tibble.tidyverse.org/), [conda-forge](https://conda-forge.github.io/feedstocks)
[stringr](http://stringr.tidyverse.org/), [lubridate](http://lubridate.tidyverse.org/), and - [devtools](https://cran.r-project.org/web/packages/devtools/index.html),
[broom](https://cran.r-project.org/web/packages/broom/vignettes/broom.html) from [conda-forge](https://conda-forge.github.io/feedstocks) [shiny](https://shiny.rstudio.com/), [rmarkdown](http://rmarkdown.rstudio.com/),
* [devtools](https://cran.r-project.org/web/packages/devtools/index.html), [forecast](https://cran.r-project.org/web/packages/forecast/forecast.pdf),
[shiny](https://shiny.rstudio.com/), [rmarkdown](http://rmarkdown.rstudio.com/), [forecast](https://cran.r-project.org/web/packages/forecast/forecast.pdf), [rsqlite](https://cran.r-project.org/web/packages/RSQLite/index.html),
[rsqlite](https://cran.r-project.org/web/packages/RSQLite/index.html), [nycflights13](https://cran.r-project.org/web/packages/nycflights13/index.html),
[nycflights13](https://cran.r-project.org/web/packages/nycflights13/index.html), [caret](http://topepo.github.io/caret/index.html), [tidymodels](https://www.tidymodels.org/), [caret](http://topepo.github.io/caret/index.html), [tidymodels](https://www.tidymodels.org/),
[rcurl](https://cran.r-project.org/web/packages/RCurl/index.html), and [randomforest](https://cran.r-project.org/web/packages/randomForest/randomForest.pdf) [rcurl](https://cran.r-project.org/web/packages/RCurl/index.html), and
packages from [conda-forge](https://conda-forge.github.io/feedstocks) [randomforest](https://cran.r-project.org/web/packages/randomForest/randomForest.pdf) packages
from [conda-forge](https://conda-forge.github.io/feedstocks)
### jupyter/scipy-notebook ### jupyter/scipy-notebook
[Source on GitHub](https://github.com/jupyter/docker-stacks/tree/master/scipy-notebook) [Source on GitHub](https://github.com/jupyter/docker-stacks/tree/master/scipy-notebook) |
| [Dockerfile commit history](https://github.com/jupyter/docker-stacks/commits/master/scipy-notebook/Dockerfile) [Dockerfile commit history](https://github.com/jupyter/docker-stacks/commits/master/scipy-notebook/Dockerfile)
| [Docker Hub image tags](https://hub.docker.com/r/jupyter/scipy-notebook/tags/) | [Docker Hub image tags](https://hub.docker.com/r/jupyter/scipy-notebook/tags/)
`jupyter/scipy-notebook` includes popular packages from the scientific Python ecosystem. `jupyter/scipy-notebook` includes popular packages from the scientific Python ecosystem.
* Everything in `jupyter/minimal-notebook` and its ancestor images - Everything in `jupyter/minimal-notebook` and its ancestor images
* [dask](https://dask.org/), [pandas](https://pandas.pydata.org/), [numexpr](https://github.com/pydata/numexpr), [matplotlib](https://matplotlib.org/), [scipy](https://www.scipy.org/), - [dask](https://dask.org/), [pandas](https://pandas.pydata.org/),
[seaborn](https://seaborn.pydata.org/), [scikit-learn](http://scikit-learn.org/stable/), [scikit-image](http://scikit-image.org/), [numexpr](https://github.com/pydata/numexpr), [matplotlib](https://matplotlib.org/),
[sympy](http://www.sympy.org/en/index.html), [cython](http://cython.org/), [patsy](https://patsy.readthedocs.io/en/latest/), [scipy](https://www.scipy.org/), [seaborn](https://seaborn.pydata.org/),
[statsmodel](http://www.statsmodels.org/stable/index.html), [cloudpickle](https://github.com/cloudpipe/cloudpickle), [dill](https://pypi.python.org/pypi/dill), [scikit-learn](http://scikit-learn.org/stable/), [scikit-image](http://scikit-image.org/),
[numba](https://numba.pydata.org/), [bokeh](https://bokeh.pydata.org/en/latest/), [sqlalchemy](https://www.sqlalchemy.org/), [hdf5](http://www.h5py.org/), [sympy](http://www.sympy.org/en/index.html), [cython](http://cython.org/),
[vincent](http://vincent.readthedocs.io/en/latest/), [beautifulsoup](https://www.crummy.com/software/BeautifulSoup/), [patsy](https://patsy.readthedocs.io/en/latest/),
[protobuf](https://developers.google.com/protocol-buffers/docs/pythontutorial), [xlrd](http://www.python-excel.org/), [bottleneck](https://bottleneck.readthedocs.io/en/latest/), and [pytables](https://www.pytables.org/) packages [statsmodel](http://www.statsmodels.org/stable/index.html),
* [ipywidgets](https://ipywidgets.readthedocs.io/en/stable/) and [ipympl](https://github.com/matplotlib/jupyter-matplotlib) for interactive visualizations and plots in Python notebooks [cloudpickle](https://github.com/cloudpipe/cloudpickle),
* [Facets](https://github.com/PAIR-code/facets) for visualizing machine learning datasets [dill](https://pypi.python.org/pypi/dill), [numba](https://numba.pydata.org/),
[bokeh](https://bokeh.pydata.org/en/latest/), [sqlalchemy](https://www.sqlalchemy.org/),
[hdf5](http://www.h5py.org/), [vincent](http://vincent.readthedocs.io/en/latest/),
[beautifulsoup](https://www.crummy.com/software/BeautifulSoup/),
[protobuf](https://developers.google.com/protocol-buffers/docs/pythontutorial),
[xlrd](http://www.python-excel.org/), [bottleneck](https://bottleneck.readthedocs.io/en/latest/),
and [pytables](https://www.pytables.org/) packages
- [ipywidgets](https://ipywidgets.readthedocs.io/en/stable/) and
[ipympl](https://github.com/matplotlib/jupyter-matplotlib) for interactive visualizations and
plots in Python notebooks
- [Facets](https://github.com/PAIR-code/facets) for visualizing machine learning datasets
### jupyter/tensorflow-notebook ### jupyter/tensorflow-notebook
[Source on GitHub](https://github.com/jupyter/docker-stacks/tree/master/tensorflow-notebook) [Source on GitHub](https://github.com/jupyter/docker-stacks/tree/master/tensorflow-notebook) |
| [Dockerfile commit history](https://github.com/jupyter/docker-stacks/commits/master/tensorflow-notebook/Dockerfile) [Dockerfile commit history](https://github.com/jupyter/docker-stacks/commits/master/tensorflow-notebook/Dockerfile)
| [Docker Hub image tags](https://hub.docker.com/r/jupyter/tensorflow-notebook/tags/) | [Docker Hub image tags](https://hub.docker.com/r/jupyter/tensorflow-notebook/tags/)
`jupyter/tensorflow-notebook` includes popular Python deep learning libraries. `jupyter/tensorflow-notebook` includes popular Python deep learning libraries.
* Everything in `jupyter/scipy-notebook` and its ancestor images - Everything in `jupyter/scipy-notebook` and its ancestor images
* [tensorflow](https://www.tensorflow.org/) and [keras](https://keras.io/) machine learning libraries - [tensorflow](https://www.tensorflow.org/) and [keras](https://keras.io/) machine learning
libraries
### jupyter/datascience-notebook ### jupyter/datascience-notebook
[Source on GitHub](https://github.com/jupyter/docker-stacks/tree/master/datascience-notebook) [Source on GitHub](https://github.com/jupyter/docker-stacks/tree/master/datascience-notebook) |
| [Dockerfile commit history](https://github.com/jupyter/docker-stacks/commits/master/datascience-notebook/Dockerfile) [Dockerfile commit history](https://github.com/jupyter/docker-stacks/commits/master/datascience-notebook/Dockerfile)
| [Docker Hub image tags](https://hub.docker.com/r/jupyter/datascience-notebook/tags/) | [Docker Hub image tags](https://hub.docker.com/r/jupyter/datascience-notebook/tags/)
`jupyter/datascience-notebook` includes libraries for data analysis from the Julia, Python, and R communities. `jupyter/datascience-notebook` includes libraries for data analysis from the Julia, Python, and R
communities.
* Everything in the `jupyter/scipy-notebook` and `jupyter/r-notebook` images, and their ancestor images - Everything in the `jupyter/scipy-notebook` and `jupyter/r-notebook` images, and their ancestor
* The [Julia](https://julialang.org/) compiler and base environment images
* [IJulia](https://github.com/JuliaLang/IJulia.jl) to support Julia code in Jupyter notebooks - The [Julia](https://julialang.org/) compiler and base environment
* [HDF5](https://github.com/JuliaIO/HDF5.jl), [Gadfly](http://gadflyjl.org/stable/), and [RDatasets](https://github.com/johnmyleswhite/RDatasets.jl) packages - [IJulia](https://github.com/JuliaLang/IJulia.jl) to support Julia code in Jupyter notebooks
- [HDF5](https://github.com/JuliaIO/HDF5.jl), [Gadfly](http://gadflyjl.org/stable/), and
[RDatasets](https://github.com/johnmyleswhite/RDatasets.jl) packages
### jupyter/pyspark-notebook ### jupyter/pyspark-notebook
[Source on GitHub](https://github.com/jupyter/docker-stacks/tree/master/pyspark-notebook) [Source on GitHub](https://github.com/jupyter/docker-stacks/tree/master/pyspark-notebook) |
| [Dockerfile commit history](https://github.com/jupyter/docker-stacks/commits/master/pyspark-notebook/Dockerfile) [Dockerfile commit history](https://github.com/jupyter/docker-stacks/commits/master/pyspark-notebook/Dockerfile)
| [Docker Hub image tags](https://hub.docker.com/r/jupyter/pyspark-notebook/tags/) | [Docker Hub image tags](https://hub.docker.com/r/jupyter/pyspark-notebook/tags/)
`jupyter/pyspark-notebook` includes Python support for Apache Spark. `jupyter/pyspark-notebook` includes Python support for Apache Spark.
* Everything in `jupyter/scipy-notebook` and its ancestor images - Everything in `jupyter/scipy-notebook` and its ancestor images
* [Apache Spark](https://spark.apache.org/) with Hadoop binaries - [Apache Spark](https://spark.apache.org/) with Hadoop binaries
### jupyter/all-spark-notebook ### jupyter/all-spark-notebook
[Source on GitHub](https://github.com/jupyter/docker-stacks/tree/master/all-spark-notebook) [Source on GitHub](https://github.com/jupyter/docker-stacks/tree/master/all-spark-notebook) |
| [Dockerfile commit history](https://github.com/jupyter/docker-stacks/commits/master/all-spark-notebook/Dockerfile) [Dockerfile commit history](https://github.com/jupyter/docker-stacks/commits/master/all-spark-notebook/Dockerfile)
| [Docker Hub image tags](https://hub.docker.com/r/jupyter/all-spark-notebook/tags/) | [Docker Hub image tags](https://hub.docker.com/r/jupyter/all-spark-notebook/tags/)
`jupyter/all-spark-notebook` includes Python, R, and Scala support for Apache Spark. `jupyter/all-spark-notebook` includes Python, R, and Scala support for Apache Spark.
* Everything in `jupyter/pyspark-notebook` and its ancestor images - Everything in `jupyter/pyspark-notebook` and its ancestor images
* [IRKernel](https://irkernel.github.io/) to support R code in Jupyter notebooks - [IRKernel](https://irkernel.github.io/) to support R code in Jupyter notebooks
* [Apache Toree](https://toree.apache.org/) and [spylon-kernel](https://github.com/maxpoint/spylon-kernel) to support Scala code in Jupyter notebooks - [Apache Toree](https://toree.apache.org/) and
* [ggplot2](http://ggplot2.org/), [sparklyr](http://spark.rstudio.com/), and [rcurl](https://cran.r-project.org/web/packages/RCurl/index.html) packages [spylon-kernel](https://github.com/maxpoint/spylon-kernel) to support Scala code in Jupyter
notebooks
- [ggplot2](http://ggplot2.org/), [sparklyr](http://spark.rstudio.com/), and
[rcurl](https://cran.r-project.org/web/packages/RCurl/index.html) packages
### Image Relationships ### Image Relationships
The following diagram depicts the build dependency tree of the core images. (i.e., the `FROM` statements in their Dockerfiles). Any given image inherits the The following diagram depicts the build dependency tree of the core images. (i.e., the `FROM`
complete content of all ancestor images pointing to it. statements in their Dockerfiles). Any given image inherits the complete content of all ancestor
images pointing to it.
[![Image inheritance [![Image inheritance
diagram](../images/inherit.svg)](http://interactive.blockdiag.com/?compression=deflate&src=eJyFzTEPgjAQhuHdX9Gws5sQjGzujsaYKxzmQrlr2msMGv-71K0srO_3XGud9NNA8DSfgzESCFlBSdi0xkvQAKTNugw4QnL6GIU10hvX-Zh7Z24OLLq2SjaxpvP10lX35vCf6pOxELFmUbQiUz4oQhYzMc3gCrRt2cWe_FKosmSjyFHC6OS1AwdQWCtyj7sfh523_BI9hKlQ25YdOFdv5fcH0kiEMA) diagram](../images/inherit.svg)](http://interactive.blockdiag.com/?compression=deflate&src=eJyFzTEPgjAQhuHdX9Gws5sQjGzujsaYKxzmQrlr2msMGv-71K0srO_3XGud9NNA8DSfgzESCFlBSdi0xkvQAKTNugw4QnL6GIU10hvX-Zh7Z24OLLq2SjaxpvP10lX35vCf6pOxELFmUbQiUz4oQhYzMc3gCrRt2cWe_FKosmSjyFHC6OS1AwdQWCtyj7sfh523_BI9hKlQ25YdOFdv5fcH0kiEMA)
### Builds ### Builds
Pull requests to the `jupyter/docker-stacks` repository trigger builds of all images on Travis CI. These images are for testing purposes only and are not saved for Pull requests to the `jupyter/docker-stacks` repository trigger builds of all images on GitHub
use. When pull requests merge to master, all images rebuild on Docker Cloud and become available to `docker pull` from Docker Hub. Actions. These images are for testing purposes only and are not saved for use. When pull requests
merge to master, all images rebuild on Docker Cloud and become available to `docker pull` from
Docker Hub.
### Versioning ### Versioning
The `latest` tag in each Docker Hub repository tracks the master branch `HEAD` reference on GitHub. `latest` is a moving target, by definition, and will have The `latest` tag in each Docker Hub repository tracks the master branch `HEAD` reference on GitHub.
backward-incompatible changes regularly. `latest` is a moving target, by definition, and will have backward-incompatible changes regularly.
Every image on Docker Hub also receives a 12-character tag which corresponds with the git commit SHA that triggered the image build. You can inspect the state of Every image on Docker Hub also receives a 12-character tag which corresponds with the git commit SHA
the `jupyter/docker-stacks` repository for that commit to review the definition of the image (e.g., images with tag 7c45ec67c8e7 were built from that triggered the image build. You can inspect the state of the `jupyter/docker-stacks` repository
for that commit to review the definition of the image (e.g., images with tag 7c45ec67c8e7 were built
from
[https://github.com/jupyter/docker-stacks/tree/7c45ec67c8e7](https://github.com/jupyter/docker-stacks/tree/7c45ec67c8e7)). [https://github.com/jupyter/docker-stacks/tree/7c45ec67c8e7](https://github.com/jupyter/docker-stacks/tree/7c45ec67c8e7)).
You must refer to git-SHA image tags when stability and reproducibility are important in your work. (e.g. `FROM jupyter/scipy-notebook:7c45ec67c8e7`, `docker run You must refer to git-SHA image tags when stability and reproducibility are important in your work.
-it --rm jupyter/scipy-notebook:7c45ec67c8e7`). You should only use `latest` when a one-off container instance is acceptable (e.g., you want to briefly try a new (e.g. `FROM jupyter/scipy-notebook:7c45ec67c8e7`,
library in a notebook). `docker run -it --rm jupyter/scipy-notebook:7c45ec67c8e7`). You should only use `latest` when a
one-off container instance is acceptable (e.g., you want to briefly try a new library in a
notebook).
## Community Stacks ## Community Stacks
The core stacks are just a tiny sample of what's possible when combining Jupyter with other technologies. We encourage members of the Jupyter community to create The core stacks are just a tiny sample of what's possible when combining Jupyter with other
their own stacks based on the core images and link them below. technologies. We encourage members of the Jupyter community to create their own stacks based on the
core images and link them below.
* [csharp-notebook is a community Jupyter Docker Stack image. Try C# in Jupyter Notebooks](https://github.com/tlinnet/csharp-notebook). The image includes more - [csharp-notebook is a community Jupyter Docker Stack image. Try C# in Jupyter Notebooks](https://github.com/tlinnet/csharp-notebook).
than 200 Jupyter Notebooks with example C# code and can readily be tried online via mybinder.org. Click here to launch The image includes more than 200 Jupyter Notebooks with example C# code and can readily be tried
[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/tlinnet/csharp-notebook/master). online via mybinder.org. Click here to launch
[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/tlinnet/csharp-notebook/master).
* [education-notebook is a community Jupyter Docker Stack image](https://github.com/umsi-mads/education-notebook). The image includes nbgrader and RISE on top of - [education-notebook is a community Jupyter Docker Stack image](https://github.com/umsi-mads/education-notebook).
the datascience-notebook image. Click here to launch it on The image includes nbgrader and RISE on top of the datascience-notebook image. Click here to
[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/umsi-mads/education-notebook/master). launch it on
[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/umsi-mads/education-notebook/master).
* __crosscompass/ihaskell-notebook__ - **crosscompass/ihaskell-notebook**
[Source on GitHub](https://github.com/jamesdbrock/ihaskell-notebook) [Source on GitHub](https://github.com/jamesdbrock/ihaskell-notebook) |
| [Dockerfile commit history](https://github.com/jamesdbrock/ihaskell-notebook/commits/master/Dockerfile) [Dockerfile commit history](https://github.com/jamesdbrock/ihaskell-notebook/commits/master/Dockerfile)
| [Docker Hub image tags](https://hub.docker.com/r/crosscompass/ihaskell-notebook/tags) | [Docker Hub image tags](https://hub.docker.com/r/crosscompass/ihaskell-notebook/tags)
`crosscompass/ihaskell-notebook` is based on [IHaskell](https://github.com/gibiansky/IHaskell). Includes popular packages and example notebooks. `crosscompass/ihaskell-notebook` is based on [IHaskell](https://github.com/gibiansky/IHaskell).
Includes popular packages and example notebooks.
Try it on [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/jamesdbrock/learn-you-a-haskell-notebook/master?urlpath=lab/tree/ihaskell_examples/ihaskell/IHaskell.ipynb) Try it on
[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/jamesdbrock/learn-you-a-haskell-notebook/master?urlpath=lab/tree/ihaskell_examples/ihaskell/IHaskell.ipynb)
* [java-notebook is a community Jupyter Docker Stack image](https://github.com/jbindinga/java-notebook). The image includes - [java-notebook is a community Jupyter Docker Stack image](https://github.com/jbindinga/java-notebook).
[IJava](https://github.com/SpencerPark/IJava) kernel on top of the minimal-notebook image. Click here to launch it on The image includes [IJava](https://github.com/SpencerPark/IJava) kernel on top of the
[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/jbindinga/java-notebook/master). minimal-notebook image. Click here to launch it on
[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/jbindinga/java-notebook/master).
* [sage-notebook](https://github.com/sharpTrick/sage-notebook) is a community Jupyter Docker Stack image with the [sagemath](https://sagemath.org) kernel on top of - [sage-notebook](https://github.com/sharpTrick/sage-notebook) is a community Jupyter Docker Stack
the minimal-notebook image. Click here to launch it on image with the [sagemath](https://sagemath.org) kernel on top of the minimal-notebook image. Click
[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/sharpTrick/sage-notebook/master). here to launch it on
[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/sharpTrick/sage-notebook/master).
* [GPU-Jupyter](https://github.com/iot-salzburg/gpu-jupyter/): Leverage Jupyter Notebooks with the power of your NVIDIA GPU and perform GPU calculations using - [GPU-Jupyter](https://github.com/iot-salzburg/gpu-jupyter/): Leverage Jupyter Notebooks with the
Tensorflow and Pytorch in collaborative notebooks. power of your NVIDIA GPU and perform GPU calculations using Tensorflow and Pytorch in
This is done by generating a Dockerfile, that consists of the **nvidia/cuda** base image, collaborative notebooks. This is done by generating a Dockerfile, that consists of the
the well-maintained **docker-stacks** that is integrated as submodule **nvidia/cuda** base image, the well-maintained **docker-stacks** that is integrated as submodule
and GPU-able libraries like **Tensorflow**, **Keras** and **PyTorch** on top of it. and GPU-able libraries like **Tensorflow**, **Keras** and **PyTorch** on top of it.
* [cgspatial-notebook](https://github.com/SCiO-systems/cgspatial-notebook) is a community Jupyter Docker Stack image. The image includes major geospatial Python & - [cgspatial-notebook](https://github.com/SCiO-systems/cgspatial-notebook) is a community Jupyter
R libraries on top of the datascience-notebook image. Try it on Docker Stack image. The image includes major geospatial Python & R libraries on top of the
binder:[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/SCiO-systems/cgspatial-notebook/master) datascience-notebook image. Try it on
binder:[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/SCiO-systems/cgspatial-notebook/master)
See the [contributing guide](../contributing/stacks.md) for information about how to create your own
See the [contributing guide](../contributing/stacks.md) for information about how to create your own Jupyter Docker Stack. Jupyter Docker Stack.
...@@ -2,21 +2,81 @@ ...@@ -2,21 +2,81 @@
This page provides details about features specific to one or more images. This page provides details about features specific to one or more images.
## Apache Spark ## Apache Spark
**Specific Docker Image Options** ### Specific Docker Image Options
* `-p 4040:4040` - The `jupyter/pyspark-notebook` and `jupyter/all-spark-notebook` images open [SparkUI (Spark Monitoring and Instrumentation UI)](http://spark.apache.org/docs/latest/monitoring.html) at default port `4040`, this option map `4040` port inside docker container to `4040` port on host machine . Note every new spark context that is created is put onto an incrementing port (ie. 4040, 4041, 4042, etc.), and it might be necessary to open multiple ports. For example: `docker run -d -p 8888:8888 -p 4040:4040 -p 4041:4041 jupyter/pyspark-notebook`. * `-p 4040:4040` - The `jupyter/pyspark-notebook` and `jupyter/all-spark-notebook` images open [SparkUI (Spark Monitoring and Instrumentation UI)](http://spark.apache.org/docs/latest/monitoring.html) at default port `4040`, this option map `4040` port inside docker container to `4040` port on host machine . Note every new spark context that is created is put onto an incrementing port (ie. 4040, 4041, 4042, etc.), and it might be necessary to open multiple ports. For example: `docker run -d -p 8888:8888 -p 4040:4040 -p 4041:4041 jupyter/pyspark-notebook`.
**Usage Examples** ### Build an Image with a Different Version of Spark
You can build a `pyspark-notebook` image (and also the downstream `all-spark-notebook` image) with a different version of Spark by overriding the default value of the following arguments at build time.
* Spark distribution is defined by the combination of the Spark and the Hadoop version and verified by the package checksum, see [Download Apache Spark](https://spark.apache.org/downloads.html) for more information. At this time the build will only work with the set of versions available on the Apache Spark download page, so it will not work with the archived versions.
* `spark_version`: The Spark version to install (`3.0.0`).
* `hadoop_version`: The Hadoop version (`3.2`).
* `spark_checksum`: The package checksum (`BFE4540...`).
* Spark is shipped with a version of Py4J that has to be referenced in the `PYTHONPATH`.
* `py4j_version`: The Py4J version (`0.10.9`), see the tip below.
* Spark can run with different OpenJDK versions.
* `openjdk_version`: The version of (JRE headless) the OpenJDK distribution (`11`), see [Ubuntu packages](https://packages.ubuntu.com/search?keywords=openjdk).
For example here is how to build a `pyspark-notebook` image with Spark `2.4.6`, Hadoop `2.7` and OpenJDK `8`.
```bash
# From the root of the project
# Build the image with different arguments
docker build --rm --force-rm \
-t jupyter/pyspark-notebook:spark-2.4.6 ./pyspark-notebook \
--build-arg spark_version=2.4.6 \
--build-arg hadoop_version=2.7 \
--build-arg spark_checksum=3A9F401EDA9B5749CDAFD246B1D14219229C26387017791C345A23A65782FB8B25A302BF4AC1ED7C16A1FE83108E94E55DAD9639A51C751D81C8C0534A4A9641 \
--build-arg openjdk_version=8 \
--build-arg py4j_version=0.10.7
# Check the newly built image
docker images jupyter/pyspark-notebook:spark-2.4.6
# REPOSITORY TAG IMAGE ID CREATED SIZE
# jupyter/pyspark-notebook spark-2.4.6 7ad7b5a9dbcd 4 minutes ago 3.44GB
# Check the Spark version
docker run -it --rm jupyter/pyspark-notebook:spark-2.4.6 pyspark --version
# Welcome to
# ____ __
# / __/__ ___ _____/ /__
# _\ \/ _ \/ _ `/ __/ '_/
# /___/ .__/\_,_/_/ /_/\_\ version 2.4.6
# /_/
#
# Using Scala version 2.11.12, OpenJDK 64-Bit Server VM, 1.8.0_265
```
**Tip**: to get the version of Py4J shipped with Spark:
* Build a first image without changing `py4j_version` (it will not prevent the image to build it will just prevent Python to find the `pyspark` module),
* get the version (`ls /usr/local/spark/python/lib/`),
* set the version `--build-arg py4j_version=0.10.7`.
```bash
docker run -it --rm jupyter/pyspark-notebook:spark-2.4.6 ls /usr/local/spark/python/lib/
# py4j-0.10.7-src.zip PY4J_LICENSE.txt pyspark.zip
# You can now set the build-arg
# --build-arg py4j_version=
```
*Note: At the time of writing there is an issue preventing to use Spark `2.4.6` with Python `3.8`, see [this answer on SO](https://stackoverflow.com/a/62173969/4413446) for more information.*
### Usage Examples
The `jupyter/pyspark-notebook` and `jupyter/all-spark-notebook` images support the use of [Apache Spark](https://spark.apache.org/) in Python, R, and Scala notebooks. The following sections provide some examples of how to get started using them. The `jupyter/pyspark-notebook` and `jupyter/all-spark-notebook` images support the use of [Apache Spark](https://spark.apache.org/) in Python, R, and Scala notebooks. The following sections provide some examples of how to get started using them.
### Using Spark Local Mode #### Using Spark Local Mode
Spark **local mode** is useful for experimentation on small data when you do not have a Spark cluster available. Spark **local mode** is useful for experimentation on small data when you do not have a Spark cluster available.
#### In Python ##### In Python
In a Python notebook. In a Python notebook.
...@@ -33,7 +93,7 @@ rdd.sum() ...@@ -33,7 +93,7 @@ rdd.sum()
# 5050 # 5050
``` ```
#### In R ##### In R
In a R notebook with [SparkR][sparkr]. In a R notebook with [SparkR][sparkr].
...@@ -71,9 +131,7 @@ sdf_len(sc, 100, repartition = 1) %>% ...@@ -71,9 +131,7 @@ sdf_len(sc, 100, repartition = 1) %>%
# 5050 # 5050
``` ```
#### In Scala ##### In Scala
##### In a Spylon Kernel
Spylon kernel instantiates a `SparkContext` for you in variable `sc` after you configure Spark Spylon kernel instantiates a `SparkContext` for you in variable `sc` after you configure Spark
options in a `%%init_spark` magic cell. options in a `%%init_spark` magic cell.
...@@ -91,18 +149,7 @@ rdd.sum() ...@@ -91,18 +149,7 @@ rdd.sum()
// 5050 // 5050
``` ```
##### In an Apache Toree Kernel #### Connecting to a Spark Cluster in Standalone Mode
Apache Toree instantiates a local `SparkContext` for you in variable `sc` when the kernel starts.
```scala
// Sum of the first 100 whole numbers
val rdd = sc.parallelize(0 to 100)
rdd.sum()
// 5050
```
### Connecting to a Spark Cluster in Standalone Mode
Connection to Spark Cluster on **[Standalone Mode](https://spark.apache.org/docs/latest/spark-standalone.html)** requires the following set of steps: Connection to Spark Cluster on **[Standalone Mode](https://spark.apache.org/docs/latest/spark-standalone.html)** requires the following set of steps:
...@@ -117,7 +164,7 @@ Connection to Spark Cluster on **[Standalone Mode](https://spark.apache.org/docs ...@@ -117,7 +164,7 @@ Connection to Spark Cluster on **[Standalone Mode](https://spark.apache.org/docs
**Note**: In the following examples we are using the Spark master URL `spark://master:7077` that shall be replaced by the URL of the Spark master. **Note**: In the following examples we are using the Spark master URL `spark://master:7077` that shall be replaced by the URL of the Spark master.
#### In Python ##### In Python
The **same Python version** need to be used on the notebook (where the driver is located) and on the Spark workers. The **same Python version** need to be used on the notebook (where the driver is located) and on the Spark workers.
The python version used at driver and worker side can be adjusted by setting the environment variables `PYSPARK_PYTHON` and / or `PYSPARK_DRIVER_PYTHON`, see [Spark Configuration][spark-conf] for more information. The python version used at driver and worker side can be adjusted by setting the environment variables `PYSPARK_PYTHON` and / or `PYSPARK_DRIVER_PYTHON`, see [Spark Configuration][spark-conf] for more information.
...@@ -135,7 +182,7 @@ rdd.sum() ...@@ -135,7 +182,7 @@ rdd.sum()
# 5050 # 5050
``` ```
#### In R ##### In R
In a R notebook with [SparkR][sparkr]. In a R notebook with [SparkR][sparkr].
...@@ -172,9 +219,7 @@ sdf_len(sc, 100, repartition = 1) %>% ...@@ -172,9 +219,7 @@ sdf_len(sc, 100, repartition = 1) %>%
# 5050 # 5050
``` ```
#### In Scala ##### In Scala
##### In a Spylon Kernel
Spylon kernel instantiates a `SparkContext` for you in variable `sc` after you configure Spark Spylon kernel instantiates a `SparkContext` for you in variable `sc` after you configure Spark
options in a `%%init_spark` magic cell. options in a `%%init_spark` magic cell.
...@@ -192,29 +237,6 @@ rdd.sum() ...@@ -192,29 +237,6 @@ rdd.sum()
// 5050 // 5050
``` ```
##### In an Apache Toree Scala Notebook
The Apache Toree kernel automatically creates a `SparkContext` when it starts based on configuration information from its command line arguments and environment variables. You can pass information about your cluster via the `SPARK_OPTS` environment variable when you spawn a container.
For instance, to pass information about a standalone Spark master, you could start the container like so:
```bash
docker run -d -p 8888:8888 -e SPARK_OPTS='--master=spark://master:7077' \
jupyter/all-spark-notebook
```
Note that this is the same information expressed in a notebook in the Python case above. Once the kernel spec has your cluster information, you can test your cluster in an Apache Toree notebook like so:
```scala
// should print the value of --master in the kernel spec
println(sc.master)
// Sum of the first 100 whole numbers
val rdd = sc.parallelize(0 to 100)
rdd.sum()
// 5050
```
## Tensorflow ## Tensorflow
The `jupyter/tensorflow-notebook` image supports the use of The `jupyter/tensorflow-notebook` image supports the use of
......
...@@ -4,12 +4,13 @@ ...@@ -4,12 +4,13 @@
# Pick your favorite docker-stacks image # Pick your favorite docker-stacks image
FROM jupyter/minimal-notebook:55d5ca6be183 FROM jupyter/minimal-notebook:55d5ca6be183
USER jovyan
# Add permanent pip/conda installs, data files, other user libs here
# e.g., RUN pip install jupyter_dashboards
USER root USER root
# Add permanent apt-get installs and other root commands here # Add permanent apt-get installs and other root commands here
# e.g., RUN apt-get install npm nodejs # e.g., RUN apt-get install npm nodejs
USER $NB_UID
# Switch back to jovyan to avoid accidental container runs as root
# Add permanent pip/conda installs, data files, other user libs here
# e.g., RUN pip install jupyter_dashboards
...@@ -4,12 +4,13 @@ ...@@ -4,12 +4,13 @@
# Pick your favorite docker-stacks image # Pick your favorite docker-stacks image
FROM jupyter/minimal-notebook:2d125a7161b5 FROM jupyter/minimal-notebook:2d125a7161b5
USER jovyan
# Add permanent pip/conda installs, data files, other user libs here
# e.g., RUN pip install jupyter_dashboards
USER root USER root
# Add permanent apt-get installs and other root commands here # Add permanent apt-get installs and other root commands here
# e.g., RUN apt-get install npm nodejs # e.g., RUN apt-get install npm nodejs
USER $NB_UID
# Switch back to jovyan to avoid accidental container runs as root
# Add permanent pip/conda installs, data files, other user libs here
# e.g., RUN pip install jupyter_dashboards
...@@ -24,13 +24,14 @@ RUN apt-get update && apt-get install -yq --no-install-recommends \ ...@@ -24,13 +24,14 @@ RUN apt-get update && apt-get install -yq --no-install-recommends \
texlive-xetex \ texlive-xetex \
texlive-fonts-recommended \ texlive-fonts-recommended \
texlive-plain-generic \ texlive-plain-generic \
# Optional dependency
texlive-fonts-extra \
# ---- # ----
tzdata \ tzdata \
unzip \ unzip \
nano \ nano-tiny \
&& apt-get clean && rm -rf /var/lib/apt/lists/* && apt-get clean && rm -rf /var/lib/apt/lists/*
# Create alternative for nano -> nano-tiny
RUN update-alternatives --install /usr/bin/nano nano /usr/bin/nano-tiny 10
# Switch back to jovyan to avoid accidental container runs as root # Switch back to jovyan to avoid accidental container runs as root
USER $NB_UID USER $NB_UID
[![docker pulls](https://img.shields.io/docker/pulls/jupyter/minimal-notebook.svg)](https://hub.docker.com/r/jupyter/minimal-notebook/) [![docker stars](https://img.shields.io/docker/stars/jupyter/minimal-notebook.svg)](https://hub.docker.com/r/jupyter/minimal-notebook/) [![image metadata](https://images.microbadger.com/badges/image/jupyter/minimal-notebook.svg)](https://microbadger.com/images/jupyter/minimal-notebook "jupyter/minimal-notebook image metadata") [![docker pulls](https://img.shields.io/docker/pulls/jupyter/minimal-notebook.svg)](https://hub.docker.com/r/jupyter/minimal-notebook/)
[![docker stars](https://img.shields.io/docker/stars/jupyter/minimal-notebook.svg)](https://hub.docker.com/r/jupyter/minimal-notebook/)
[![image metadata](https://images.microbadger.com/badges/image/jupyter/minimal-notebook.svg)](https://microbadger.com/images/jupyter/minimal-notebook "jupyter/minimal-notebook image metadata")
# Minimal Jupyter Notebook Stack # Minimal Jupyter Notebook Stack
Please visit the documentation site for help using and contributing to this image and others. GitHub Actions in the https://github.com/jupyter/docker-stacks project builds and pushes this image
to Docker Hub.
* [Jupyter Docker Stacks on ReadTheDocs](http://jupyter-docker-stacks.readthedocs.io/en/latest/index.html) Please visit the project documentation site for help using and contributing to this image and
* [Selecting an Image :: Core Stacks :: jupyter/minimal-notebook](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/selecting.html#jupyter-minimal-notebook) others.
- [Jupyter Docker Stacks on ReadTheDocs](http://jupyter-docker-stacks.readthedocs.io/en/latest/index.html)
- [Selecting an Image :: Core Stacks :: jupyter/minimal-notebook](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/selecting.html#jupyter-minimal-notebook)
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${SOURCE_COMMIT})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${SOURCE_COMMIT}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "$INDEX_FILE"
\ No newline at end of file
cat << EOF > "$MANIFEST_FILE"
* Build datetime: ${BUILD_TIMESTAMP}
* DockerHub build code: ${BUILD_CODE}
* Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG}
* Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}")
* Git commit SHA: [${SOURCE_COMMIT}](https://github.com/jupyter/docker-stacks/commit/${SOURCE_COMMIT})
* Git commit message:
\`\`\`
${COMMIT_MSG}
\`\`\`
## Python Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} python --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda info)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda list)
\`\`\`
## Apt Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} apt list --installed)
\`\`\`
EOF
\ No newline at end of file
#!/bin/bash
set -e
# Tag the latest build with the short git sha. Push the tag in addition
# to the "latest" tag already pushed.
GIT_SHA_TAG=${SOURCE_COMMIT:0:12}
docker tag $IMAGE_NAME $DOCKER_REPO:$GIT_SHA_TAG
docker push $DOCKER_REPO:$GIT_SHA_TAG
# Create a working directory.
BUILD_TIMESTAMP=$(date -u +%FT%TZ)
WORKDIR=$(mktemp -d)
GIT_URI="git@github.com:jupyter/docker-stacks.wiki.git"
GIT_SANDBOX="${WORKDIR}/docker-stacks.wiki"
IMAGE_SHORT_NAME=$(basename $DOCKER_REPO)
MANIFEST_FILE="${GIT_SANDBOX}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
INDEX_FILE="${GIT_SANDBOX}/Home.md"
# Configure git so it can push back to GitHub.
eval $(ssh-agent -s)
ssh-add <(base64 -d <(echo "$DEPLOY_KEY"))
ssh-add -l
git config --global user.email "jupyter@googlegroups.com"
git config --global user.name "Jupyter Docker Stacks"
# Glone the GitHub project wiki.
pushd "$WORKDIR"
git clone "$GIT_URI"
popd
# Render the build manifest template.
mkdir -p $(dirname "$MANIFEST_FILE")
source hooks/manifest.tmpl
source hooks/index.tmpl
# Push the wiki update back to GitHub.
pushd "$GIT_SANDBOX"
git add .
git commit -m "DOC: Build ${MANIFEST_FILE}"
git push -u origin master
popd
# Shutdown the ssh agent for good measure.
ssh-agent -k
# Invoke all downstream build triggers.
set +e
for url in $(echo $NEXT_BUILD_TRIGGERS | sed "s/,/ /g")
do
curl -X POST $url
done
\ No newline at end of file
#!/bin/bash
set -e
# Apply tags
GIT_SHA_TAG=${GITHUB_SHA:0:12}
docker tag $IMAGE_NAME "$DOCKER_REPO:$GIT_SHA_TAG"
# Update index
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${GITHUB_SHA}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "${WIKI_PATH}/Home.md"
# Build manifest
MANIFEST_FILE="${WIKI_PATH}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
mkdir -p $(dirname "$MANIFEST_FILE")
cat << EOF > "$MANIFEST_FILE"
* Build datetime: ${BUILD_TIMESTAMP}
* Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG}
* Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}")
* Git commit SHA: [${GITHUB_SHA}](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})
* Git commit message:
\`\`\`
${COMMIT_MSG}
\`\`\`
## Python Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} python --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda info)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda list)
\`\`\`
## Apt Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} apt list --installed)
\`\`\`
EOF
<svg width="44" height="51" viewBox="0 0 44 51" version="2.0" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:figma="http://www.figma.com/figma/ns">
<title>Group.svg</title>
<desc>Created using Figma 0.90</desc>
<g id="Canvas" transform="translate(-1640 -2453)" figma:type="canvas">
<g id="Group" style="mix-blend-mode:normal;" figma:type="group">
<g id="Group" style="mix-blend-mode:normal;" figma:type="group">
<g id="Group" style="mix-blend-mode:normal;" figma:type="group">
<g id="g" style="mix-blend-mode:normal;" figma:type="group">
<g id="path" style="mix-blend-mode:normal;" figma:type="group">
<g id="path9 fill" style="mix-blend-mode:normal;" figma:type="vector">
<use xlink:href="#path0_fill" transform="translate(1640.54 2474.36)" fill="#4E4E4E" style="mix-blend-mode:normal;"/>
</g>
</g>
<g id="path" style="mix-blend-mode:normal;" figma:type="group">
<g id="path10 fill" style="mix-blend-mode:normal;" figma:type="vector">
<use xlink:href="#path1_fill" transform="translate(1645.68 2474.37)" fill="#4E4E4E" style="mix-blend-mode:normal;"/>
</g>
</g>
<g id="path" style="mix-blend-mode:normal;" figma:type="group">
<g id="path11 fill" style="mix-blend-mode:normal;" figma:type="vector">
<use xlink:href="#path2_fill" transform="translate(1653.39 2474.26)" fill="#4E4E4E" style="mix-blend-mode:normal;"/>
</g>
</g>
<g id="path" style="mix-blend-mode:normal;" figma:type="group">
<g id="path12 fill" style="mix-blend-mode:normal;" figma:type="vector">
<use xlink:href="#path3_fill" transform="translate(1660.43 2474.39)" fill="#4E4E4E" style="mix-blend-mode:normal;"/>
</g>
</g>
<g id="path" style="mix-blend-mode:normal;" figma:type="group">
<g id="path13 fill" style="mix-blend-mode:normal;" figma:type="vector">
<use xlink:href="#path4_fill" transform="translate(1667.55 2472.54)" fill="#4E4E4E" style="mix-blend-mode:normal;"/>
</g>
</g>
<g id="path" style="mix-blend-mode:normal;" figma:type="group">
<g id="path14 fill" style="mix-blend-mode:normal;" figma:type="vector">
<use xlink:href="#path5_fill" transform="translate(1672.47 2474.29)" fill="#4E4E4E" style="mix-blend-mode:normal;"/>
</g>
</g>
<g id="path" style="mix-blend-mode:normal;" figma:type="group">
<g id="path15 fill" style="mix-blend-mode:normal;" figma:type="vector">
<use xlink:href="#path6_fill" transform="translate(1679.98 2474.24)" fill="#4E4E4E" style="mix-blend-mode:normal;"/>
</g>
</g>
</g>
</g>
<g id="g" style="mix-blend-mode:normal;" figma:type="group">
<g id="path" style="mix-blend-mode:normal;" figma:type="group">
<g id="path16 fill" style="mix-blend-mode:normal;" figma:type="vector">
<use xlink:href="#path7_fill" transform="translate(1673.48 2453.69)" fill="#767677" style="mix-blend-mode:normal;"/>
</g>
</g>
<g id="path" style="mix-blend-mode:normal;" figma:type="group">
<g id="path17 fill" style="mix-blend-mode:normal;" figma:type="vector">
<use xlink:href="#path8_fill" transform="translate(1643.21 2484.27)" fill="#F37726" style="mix-blend-mode:normal;"/>
</g>
</g>
<g id="path" style="mix-blend-mode:normal;" figma:type="group">
<g id="path18 fill" style="mix-blend-mode:normal;" figma:type="vector">
<use xlink:href="#path9_fill" transform="translate(1643.21 2457.88)" fill="#F37726" style="mix-blend-mode:normal;"/>
</g>
</g>
<g id="path" style="mix-blend-mode:normal;" figma:type="group">
<g id="path19 fill" style="mix-blend-mode:normal;" figma:type="vector">
<use xlink:href="#path10_fill" transform="translate(1643.28 2496.09)" fill="#9E9E9E" style="mix-blend-mode:normal;"/>
</g>
</g>
<g id="path" style="mix-blend-mode:normal;" figma:type="group">
<g id="path20 fill" style="mix-blend-mode:normal;" figma:type="vector">
<use xlink:href="#path11_fill" transform="translate(1641.87 2458.43)" fill="#616262" style="mix-blend-mode:normal;"/>
</g>
</g>
</g>
</g>
</g>
</g>
<defs>
<path id="path0_fill" d="M 1.74498 5.47533C 1.74498 7.03335 1.62034 7.54082 1.29983 7.91474C 0.943119 8.23595 0.480024 8.41358 0 8.41331L 0.124642 9.3036C 0.86884 9.31366 1.59095 9.05078 2.15452 8.56466C 2.45775 8.19487 2.6834 7.76781 2.818 7.30893C 2.95261 6.85005 2.99341 6.36876 2.93798 5.89377L 2.93798 0L 1.74498 0L 1.74498 5.43972L 1.74498 5.47533Z"/>
<path id="path1_fill" d="M 5.50204 4.76309C 5.50204 5.43081 5.50204 6.02731 5.55545 6.54368L 4.496 6.54368L 4.42478 5.48423C 4.20318 5.85909 3.88627 6.16858 3.50628 6.38125C 3.12628 6.59392 2.69675 6.70219 2.26135 6.69503C 1.22861 6.69503 0 6.13415 0 3.84608L 0 0.0445149L 1.193 0.0445149L 1.193 3.6057C 1.193 4.84322 1.57583 5.67119 2.65309 5.67119C 2.87472 5.67358 3.09459 5.63168 3.29982 5.54796C 3.50505 5.46424 3.69149 5.34039 3.84822 5.18366C 4.00494 5.02694 4.1288 4.84049 4.21252 4.63527C 4.29623 4.43004 4.33813 4.21016 4.33575 3.98853L 4.33575 0L 5.52874 0L 5.52874 4.72748L 5.50204 4.76309Z"/>
<path id="path2_fill" d="M 0.0534178 2.27264C 0.0534178 1.44466 0.0534178 0.768036 0 0.153731L 1.06836 0.153731L 1.12177 1.2666C 1.3598 0.864535 1.70247 0.534594 2.11325 0.311954C 2.52404 0.0893145 2.98754 -0.0176786 3.45435 0.00238095C 5.03908 0.00238095 6.23208 1.32892 6.23208 3.30538C 6.23208 5.63796 4.7987 6.79535 3.24958 6.79535C 2.85309 6.81304 2.45874 6.7281 2.10469 6.54874C 1.75064 6.36937 1.44888 6.10166 1.22861 5.77151L 1.22861 5.77151L 1.22861 9.33269L 0.0534178 9.33269L 0.0534178 2.29935L 0.0534178 2.27264ZM 1.22861 4.00872C 1.23184 4.17026 1.24972 4.33117 1.28203 4.48948C 1.38304 4.88479 1.61299 5.23513 1.93548 5.48506C 2.25798 5.735 2.65461 5.87026 3.06262 5.86944C 4.31794 5.86944 5.05689 4.8456 5.05689 3.3588C 5.05689 2.05897 4.36246 0.946096 3.10714 0.946096C 2.61036 0.986777 2.14548 1.20726 1.79965 1.5662C 1.45382 1.92514 1.25079 2.3979 1.22861 2.89585L 1.22861 4.00872Z"/>
<path id="path3_fill" d="M 1.31764 0.0178059L 2.75102 3.85499C 2.90237 4.28233 3.06262 4.7987 3.16946 5.18153C 3.2941 4.7898 3.42764 4.29123 3.5879 3.82828L 4.88773 0.0178059L 6.14305 0.0178059L 4.36246 4.64735C 3.47216 6.87309 2.92908 8.02158 2.11 8.71601C 1.69745 9.09283 1.19448 9.35658 0.649917 9.48166L 0.356119 8.48453C 0.736886 8.35942 1.09038 8.16304 1.39777 7.90584C 1.8321 7.55188 2.17678 7.10044 2.4038 6.5882C 2.45239 6.49949 2.48551 6.40314 2.50173 6.3033C 2.49161 6.19586 2.46457 6.0907 2.42161 5.9917L 0 0L 1.29983 0L 1.31764 0.0178059Z"/>
<path id="path4_fill" d="M 2.19013 0L 2.19013 1.86962L 3.8995 1.86962L 3.8995 2.75992L 2.19013 2.75992L 2.19013 6.26769C 2.19013 7.06896 2.42161 7.53191 3.08043 7.53191C 3.31442 7.53574 3.54789 7.5088 3.77486 7.45179L 3.82828 8.34208C 3.48794 8.45999 3.12881 8.51431 2.76882 8.50234C 2.53042 8.51726 2.29161 8.48043 2.06878 8.39437C 1.84595 8.30831 1.64438 8.17506 1.47789 8.00377C 1.11525 7.51873 0.949826 6.91431 1.01494 6.31221L 1.01494 2.75102L 0 2.75102L 0 1.86072L 1.03274 1.86072L 1.03274 0.275992L 2.19013 0Z"/>
<path id="path5_fill" d="M 1.17716 3.57899C 1.153 3.88093 1.19468 4.18451 1.29933 4.46876C 1.40398 4.75301 1.5691 5.01114 1.78329 5.22532C 1.99747 5.43951 2.2556 5.60463 2.53985 5.70928C 2.8241 5.81393 3.12768 5.85561 3.42962 5.83145C 4.04033 5.84511 4.64706 5.72983 5.21021 5.49313L 5.41498 6.38343C 4.72393 6.66809 3.98085 6.80458 3.23375 6.78406C 2.79821 6.81388 2.36138 6.74914 1.95322 6.59427C 1.54505 6.43941 1.17522 6.19809 0.869071 5.88688C 0.562928 5.57566 0.327723 5.2019 0.179591 4.79125C 0.0314584 4.38059 -0.0260962 3.94276 0.0108748 3.50777C 0.0108748 1.54912 1.17716 0 3.0824 0C 5.21911 0 5.75329 1.86962 5.75329 3.06262C 5.76471 3.24644 5.76471 3.43079 5.75329 3.61461L 1.15046 3.61461L 1.17716 3.57899ZM 4.66713 2.6887C 4.70149 2.45067 4.68443 2.20805 4.61709 1.97718C 4.54976 1.74631 4.43372 1.53255 4.2768 1.35031C 4.11987 1.16808 3.92571 1.0216 3.70739 0.920744C 3.48907 0.81989 3.25166 0.767006 3.01118 0.765656C 2.52201 0.801064 2.06371 1.01788 1.72609 1.37362C 1.38847 1.72935 1.19588 2.19835 1.18607 2.6887L 4.66713 2.6887Z"/>
<path id="path6_fill" d="M 0.0534178 2.19228C 0.0534178 1.42663 0.0534178 0.767806 0 0.162404L 1.06836 0.162404L 1.06836 1.43553L 1.12177 1.43553C 1.23391 1.04259 1.4656 0.694314 1.78468 0.439049C 2.10376 0.183783 2.4944 0.034196 2.90237 0.0110538C 3.01466 -0.00368459 3.12839 -0.00368459 3.24068 0.0110538L 3.24068 1.12393C 3.10462 1.10817 2.9672 1.10817 2.83114 1.12393C 2.427 1.13958 2.04237 1.30182 1.7491 1.58035C 1.45583 1.85887 1.27398 2.23462 1.23751 2.63743C 1.20422 2.8196 1.18635 3.00425 1.1841 3.18941L 1.1841 6.65267L 0.00890297 6.65267L 0.00890297 2.20118L 0.0534178 2.19228Z"/>
<path id="path7_fill" d="M 6.03059 2.83565C 6.06715 3.43376 5.92485 4.02921 5.6218 4.54615C 5.31875 5.0631 4.86869 5.47813 4.32893 5.73839C 3.78917 5.99864 3.18416 6.09233 2.59097 6.00753C 1.99778 5.92272 1.44326 5.66326 0.998048 5.26219C 0.552837 4.86113 0.23709 4.33661 0.0910307 3.75546C -0.0550287 3.17431 -0.0247891 2.56283 0.177897 1.99893C 0.380583 1.43503 0.746541 0.944221 1.22915 0.589037C 1.71176 0.233853 2.28918 0.0303686 2.88784 0.00450543C 3.28035 -0.0170932 3.67326 0.0391144 4.04396 0.169896C 4.41467 0.300677 4.75587 0.503453 5.04794 0.766561C 5.34 1.02967 5.57718 1.34792 5.74582 1.70301C 5.91446 2.0581 6.01124 2.44303 6.03059 2.83565L 6.03059 2.83565Z"/>
<path id="path8_fill" d="M 18.6962 7.12238C 10.6836 7.12238 3.64131 4.24672 0 0C 1.41284 3.82041 3.96215 7.1163 7.30479 9.44404C 10.6474 11.7718 14.623 13.0196 18.6962 13.0196C 22.7695 13.0196 26.745 11.7718 30.0877 9.44404C 33.4303 7.1163 35.9796 3.82041 37.3925 4.0486e-13C 33.7601 4.24672 26.7445 7.12238 18.6962 7.12238Z"/>
<path id="path9_fill" d="M 18.6962 5.89725C 26.7089 5.89725 33.7512 8.77291 37.3925 13.0196C 35.9796 9.19922 33.4303 5.90333 30.0877 3.57559C 26.745 1.24785 22.7695 4.0486e-13 18.6962 0C 14.623 4.0486e-13 10.6474 1.24785 7.30479 3.57559C 3.96215 5.90333 1.41284 9.19922 0 13.0196C 3.64131 8.76401 10.648 5.89725 18.6962 5.89725Z"/>
<path id="path10_fill" d="M 7.59576 3.56656C 7.64276 4.31992 7.46442 5.07022 7.08347 5.72186C 6.70251 6.3735 6.13619 6.89698 5.45666 7.22561C 4.77713 7.55424 4.01515 7.67314 3.26781 7.56716C 2.52046 7.46117 1.82158 7.13511 1.26021 6.63051C 0.698839 6.12591 0.300394 5.46561 0.115637 4.73375C -0.0691191 4.00188 -0.0318219 3.23159 0.222777 2.52099C 0.477376 1.8104 0.93775 1.19169 1.54524 0.743685C 2.15274 0.295678 2.87985 0.0386595 3.63394 0.00537589C 4.12793 -0.0210471 4.62229 0.0501173 5.08878 0.214803C 5.55526 0.37949 5.98473 0.63447 6.35264 0.965179C 6.72055 1.29589 7.01971 1.69584 7.233 2.1422C 7.4463 2.58855 7.56957 3.07256 7.59576 3.56656L 7.59576 3.56656Z"/>
<path id="path11_fill" d="M 2.25061 4.37943C 1.81886 4.39135 1.39322 4.27535 1.02722 4.04602C 0.661224 3.81668 0.371206 3.48424 0.193641 3.09052C 0.0160762 2.69679 -0.0411078 2.25935 0.0292804 1.83321C 0.0996686 1.40707 0.294486 1.01125 0.589233 0.695542C 0.883981 0.37983 1.2655 0.158316 1.68581 0.0588577C 2.10611 -0.0406005 2.54644 -0.0135622 2.95143 0.136572C 3.35641 0.286707 3.70796 0.553234 3.96186 0.902636C 4.21577 1.25204 4.3607 1.66872 4.37842 2.10027C 4.39529 2.6838 4.18131 3.25044 3.78293 3.67715C 3.38455 4.10387 2.83392 4.35623 2.25061 4.37943Z"/>
</defs>
</svg>
{
"cells": [
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"data": {
"image/svg+xml": [
"<svg xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:figma=\"http://www.figma.com/figma/ns\" width=\"44\" height=\"51\" viewBox=\"0 0 44 51\" version=\"2.0\">\n",
"<title>Group.svg</title>\n",
"<desc>Created using Figma 0.90</desc>\n",
"<g id=\"Canvas\" transform=\"translate(-1640 -2453)\" figma:type=\"canvas\">\n",
"<g id=\"Group\" style=\"mix-blend-mode:normal;\" figma:type=\"group\">\n",
"<g id=\"Group\" style=\"mix-blend-mode:normal;\" figma:type=\"group\">\n",
"<g id=\"Group\" style=\"mix-blend-mode:normal;\" figma:type=\"group\">\n",
"<g id=\"g\" style=\"mix-blend-mode:normal;\" figma:type=\"group\">\n",
"<g id=\"path\" style=\"mix-blend-mode:normal;\" figma:type=\"group\">\n",
"<g id=\"path9 fill\" style=\"mix-blend-mode:normal;\" figma:type=\"vector\">\n",
"<use xlink:href=\"#path0_fill\" transform=\"translate(1640.54 2474.36)\" fill=\"#4E4E4E\" style=\"mix-blend-mode:normal;\"/>\n",
"</g>\n",
"</g>\n",
"<g id=\"path\" style=\"mix-blend-mode:normal;\" figma:type=\"group\">\n",
"<g id=\"path10 fill\" style=\"mix-blend-mode:normal;\" figma:type=\"vector\">\n",
"<use xlink:href=\"#path1_fill\" transform=\"translate(1645.68 2474.37)\" fill=\"#4E4E4E\" style=\"mix-blend-mode:normal;\"/>\n",
"</g>\n",
"</g>\n",
"<g id=\"path\" style=\"mix-blend-mode:normal;\" figma:type=\"group\">\n",
"<g id=\"path11 fill\" style=\"mix-blend-mode:normal;\" figma:type=\"vector\">\n",
"<use xlink:href=\"#path2_fill\" transform=\"translate(1653.39 2474.26)\" fill=\"#4E4E4E\" style=\"mix-blend-mode:normal;\"/>\n",
"</g>\n",
"</g>\n",
"<g id=\"path\" style=\"mix-blend-mode:normal;\" figma:type=\"group\">\n",
"<g id=\"path12 fill\" style=\"mix-blend-mode:normal;\" figma:type=\"vector\">\n",
"<use xlink:href=\"#path3_fill\" transform=\"translate(1660.43 2474.39)\" fill=\"#4E4E4E\" style=\"mix-blend-mode:normal;\"/>\n",
"</g>\n",
"</g>\n",
"<g id=\"path\" style=\"mix-blend-mode:normal;\" figma:type=\"group\">\n",
"<g id=\"path13 fill\" style=\"mix-blend-mode:normal;\" figma:type=\"vector\">\n",
"<use xlink:href=\"#path4_fill\" transform=\"translate(1667.55 2472.54)\" fill=\"#4E4E4E\" style=\"mix-blend-mode:normal;\"/>\n",
"</g>\n",
"</g>\n",
"<g id=\"path\" style=\"mix-blend-mode:normal;\" figma:type=\"group\">\n",
"<g id=\"path14 fill\" style=\"mix-blend-mode:normal;\" figma:type=\"vector\">\n",
"<use xlink:href=\"#path5_fill\" transform=\"translate(1672.47 2474.29)\" fill=\"#4E4E4E\" style=\"mix-blend-mode:normal;\"/>\n",
"</g>\n",
"</g>\n",
"<g id=\"path\" style=\"mix-blend-mode:normal;\" figma:type=\"group\">\n",
"<g id=\"path15 fill\" style=\"mix-blend-mode:normal;\" figma:type=\"vector\">\n",
"<use xlink:href=\"#path6_fill\" transform=\"translate(1679.98 2474.24)\" fill=\"#4E4E4E\" style=\"mix-blend-mode:normal;\"/>\n",
"</g>\n",
"</g>\n",
"</g>\n",
"</g>\n",
"<g id=\"g\" style=\"mix-blend-mode:normal;\" figma:type=\"group\">\n",
"<g id=\"path\" style=\"mix-blend-mode:normal;\" figma:type=\"group\">\n",
"<g id=\"path16 fill\" style=\"mix-blend-mode:normal;\" figma:type=\"vector\">\n",
"<use xlink:href=\"#path7_fill\" transform=\"translate(1673.48 2453.69)\" fill=\"#767677\" style=\"mix-blend-mode:normal;\"/>\n",
"</g>\n",
"</g>\n",
"<g id=\"path\" style=\"mix-blend-mode:normal;\" figma:type=\"group\">\n",
"<g id=\"path17 fill\" style=\"mix-blend-mode:normal;\" figma:type=\"vector\">\n",
"<use xlink:href=\"#path8_fill\" transform=\"translate(1643.21 2484.27)\" fill=\"#F37726\" style=\"mix-blend-mode:normal;\"/>\n",
"</g>\n",
"</g>\n",
"<g id=\"path\" style=\"mix-blend-mode:normal;\" figma:type=\"group\">\n",
"<g id=\"path18 fill\" style=\"mix-blend-mode:normal;\" figma:type=\"vector\">\n",
"<use xlink:href=\"#path9_fill\" transform=\"translate(1643.21 2457.88)\" fill=\"#F37726\" style=\"mix-blend-mode:normal;\"/>\n",
"</g>\n",
"</g>\n",
"<g id=\"path\" style=\"mix-blend-mode:normal;\" figma:type=\"group\">\n",
"<g id=\"path19 fill\" style=\"mix-blend-mode:normal;\" figma:type=\"vector\">\n",
"<use xlink:href=\"#path10_fill\" transform=\"translate(1643.28 2496.09)\" fill=\"#9E9E9E\" style=\"mix-blend-mode:normal;\"/>\n",
"</g>\n",
"</g>\n",
"<g id=\"path\" style=\"mix-blend-mode:normal;\" figma:type=\"group\">\n",
"<g id=\"path20 fill\" style=\"mix-blend-mode:normal;\" figma:type=\"vector\">\n",
"<use xlink:href=\"#path11_fill\" transform=\"translate(1641.87 2458.43)\" fill=\"#616262\" style=\"mix-blend-mode:normal;\"/>\n",
"</g>\n",
"</g>\n",
"</g>\n",
"</g>\n",
"</g>\n",
"</g>\n",
"<defs>\n",
"<path id=\"path0_fill\" d=\"M 1.74498 5.47533C 1.74498 7.03335 1.62034 7.54082 1.29983 7.91474C 0.943119 8.23595 0.480024 8.41358 0 8.41331L 0.124642 9.3036C 0.86884 9.31366 1.59095 9.05078 2.15452 8.56466C 2.45775 8.19487 2.6834 7.76781 2.818 7.30893C 2.95261 6.85005 2.99341 6.36876 2.93798 5.89377L 2.93798 0L 1.74498 0L 1.74498 5.43972L 1.74498 5.47533Z\"/>\n",
"<path id=\"path1_fill\" d=\"M 5.50204 4.76309C 5.50204 5.43081 5.50204 6.02731 5.55545 6.54368L 4.496 6.54368L 4.42478 5.48423C 4.20318 5.85909 3.88627 6.16858 3.50628 6.38125C 3.12628 6.59392 2.69675 6.70219 2.26135 6.69503C 1.22861 6.69503 0 6.13415 0 3.84608L 0 0.0445149L 1.193 0.0445149L 1.193 3.6057C 1.193 4.84322 1.57583 5.67119 2.65309 5.67119C 2.87472 5.67358 3.09459 5.63168 3.29982 5.54796C 3.50505 5.46424 3.69149 5.34039 3.84822 5.18366C 4.00494 5.02694 4.1288 4.84049 4.21252 4.63527C 4.29623 4.43004 4.33813 4.21016 4.33575 3.98853L 4.33575 0L 5.52874 0L 5.52874 4.72748L 5.50204 4.76309Z\"/>\n",
"<path id=\"path2_fill\" d=\"M 0.0534178 2.27264C 0.0534178 1.44466 0.0534178 0.768036 0 0.153731L 1.06836 0.153731L 1.12177 1.2666C 1.3598 0.864535 1.70247 0.534594 2.11325 0.311954C 2.52404 0.0893145 2.98754 -0.0176786 3.45435 0.00238095C 5.03908 0.00238095 6.23208 1.32892 6.23208 3.30538C 6.23208 5.63796 4.7987 6.79535 3.24958 6.79535C 2.85309 6.81304 2.45874 6.7281 2.10469 6.54874C 1.75064 6.36937 1.44888 6.10166 1.22861 5.77151L 1.22861 5.77151L 1.22861 9.33269L 0.0534178 9.33269L 0.0534178 2.29935L 0.0534178 2.27264ZM 1.22861 4.00872C 1.23184 4.17026 1.24972 4.33117 1.28203 4.48948C 1.38304 4.88479 1.61299 5.23513 1.93548 5.48506C 2.25798 5.735 2.65461 5.87026 3.06262 5.86944C 4.31794 5.86944 5.05689 4.8456 5.05689 3.3588C 5.05689 2.05897 4.36246 0.946096 3.10714 0.946096C 2.61036 0.986777 2.14548 1.20726 1.79965 1.5662C 1.45382 1.92514 1.25079 2.3979 1.22861 2.89585L 1.22861 4.00872Z\"/>\n",
"<path id=\"path3_fill\" d=\"M 1.31764 0.0178059L 2.75102 3.85499C 2.90237 4.28233 3.06262 4.7987 3.16946 5.18153C 3.2941 4.7898 3.42764 4.29123 3.5879 3.82828L 4.88773 0.0178059L 6.14305 0.0178059L 4.36246 4.64735C 3.47216 6.87309 2.92908 8.02158 2.11 8.71601C 1.69745 9.09283 1.19448 9.35658 0.649917 9.48166L 0.356119 8.48453C 0.736886 8.35942 1.09038 8.16304 1.39777 7.90584C 1.8321 7.55188 2.17678 7.10044 2.4038 6.5882C 2.45239 6.49949 2.48551 6.40314 2.50173 6.3033C 2.49161 6.19586 2.46457 6.0907 2.42161 5.9917L 0 0L 1.29983 0L 1.31764 0.0178059Z\"/>\n",
"<path id=\"path4_fill\" d=\"M 2.19013 0L 2.19013 1.86962L 3.8995 1.86962L 3.8995 2.75992L 2.19013 2.75992L 2.19013 6.26769C 2.19013 7.06896 2.42161 7.53191 3.08043 7.53191C 3.31442 7.53574 3.54789 7.5088 3.77486 7.45179L 3.82828 8.34208C 3.48794 8.45999 3.12881 8.51431 2.76882 8.50234C 2.53042 8.51726 2.29161 8.48043 2.06878 8.39437C 1.84595 8.30831 1.64438 8.17506 1.47789 8.00377C 1.11525 7.51873 0.949826 6.91431 1.01494 6.31221L 1.01494 2.75102L 0 2.75102L 0 1.86072L 1.03274 1.86072L 1.03274 0.275992L 2.19013 0Z\"/>\n",
"<path id=\"path5_fill\" d=\"M 1.17716 3.57899C 1.153 3.88093 1.19468 4.18451 1.29933 4.46876C 1.40398 4.75301 1.5691 5.01114 1.78329 5.22532C 1.99747 5.43951 2.2556 5.60463 2.53985 5.70928C 2.8241 5.81393 3.12768 5.85561 3.42962 5.83145C 4.04033 5.84511 4.64706 5.72983 5.21021 5.49313L 5.41498 6.38343C 4.72393 6.66809 3.98085 6.80458 3.23375 6.78406C 2.79821 6.81388 2.36138 6.74914 1.95322 6.59427C 1.54505 6.43941 1.17522 6.19809 0.869071 5.88688C 0.562928 5.57566 0.327723 5.2019 0.179591 4.79125C 0.0314584 4.38059 -0.0260962 3.94276 0.0108748 3.50777C 0.0108748 1.54912 1.17716 0 3.0824 0C 5.21911 0 5.75329 1.86962 5.75329 3.06262C 5.76471 3.24644 5.76471 3.43079 5.75329 3.61461L 1.15046 3.61461L 1.17716 3.57899ZM 4.66713 2.6887C 4.70149 2.45067 4.68443 2.20805 4.61709 1.97718C 4.54976 1.74631 4.43372 1.53255 4.2768 1.35031C 4.11987 1.16808 3.92571 1.0216 3.70739 0.920744C 3.48907 0.81989 3.25166 0.767006 3.01118 0.765656C 2.52201 0.801064 2.06371 1.01788 1.72609 1.37362C 1.38847 1.72935 1.19588 2.19835 1.18607 2.6887L 4.66713 2.6887Z\"/>\n",
"<path id=\"path6_fill\" d=\"M 0.0534178 2.19228C 0.0534178 1.42663 0.0534178 0.767806 0 0.162404L 1.06836 0.162404L 1.06836 1.43553L 1.12177 1.43553C 1.23391 1.04259 1.4656 0.694314 1.78468 0.439049C 2.10376 0.183783 2.4944 0.034196 2.90237 0.0110538C 3.01466 -0.00368459 3.12839 -0.00368459 3.24068 0.0110538L 3.24068 1.12393C 3.10462 1.10817 2.9672 1.10817 2.83114 1.12393C 2.427 1.13958 2.04237 1.30182 1.7491 1.58035C 1.45583 1.85887 1.27398 2.23462 1.23751 2.63743C 1.20422 2.8196 1.18635 3.00425 1.1841 3.18941L 1.1841 6.65267L 0.00890297 6.65267L 0.00890297 2.20118L 0.0534178 2.19228Z\"/>\n",
"<path id=\"path7_fill\" d=\"M 6.03059 2.83565C 6.06715 3.43376 5.92485 4.02921 5.6218 4.54615C 5.31875 5.0631 4.86869 5.47813 4.32893 5.73839C 3.78917 5.99864 3.18416 6.09233 2.59097 6.00753C 1.99778 5.92272 1.44326 5.66326 0.998048 5.26219C 0.552837 4.86113 0.23709 4.33661 0.0910307 3.75546C -0.0550287 3.17431 -0.0247891 2.56283 0.177897 1.99893C 0.380583 1.43503 0.746541 0.944221 1.22915 0.589037C 1.71176 0.233853 2.28918 0.0303686 2.88784 0.00450543C 3.28035 -0.0170932 3.67326 0.0391144 4.04396 0.169896C 4.41467 0.300677 4.75587 0.503453 5.04794 0.766561C 5.34 1.02967 5.57718 1.34792 5.74582 1.70301C 5.91446 2.0581 6.01124 2.44303 6.03059 2.83565L 6.03059 2.83565Z\"/>\n",
"<path id=\"path8_fill\" d=\"M 18.6962 7.12238C 10.6836 7.12238 3.64131 4.24672 0 0C 1.41284 3.82041 3.96215 7.1163 7.30479 9.44404C 10.6474 11.7718 14.623 13.0196 18.6962 13.0196C 22.7695 13.0196 26.745 11.7718 30.0877 9.44404C 33.4303 7.1163 35.9796 3.82041 37.3925 4.0486e-13C 33.7601 4.24672 26.7445 7.12238 18.6962 7.12238Z\"/>\n",
"<path id=\"path9_fill\" d=\"M 18.6962 5.89725C 26.7089 5.89725 33.7512 8.77291 37.3925 13.0196C 35.9796 9.19922 33.4303 5.90333 30.0877 3.57559C 26.745 1.24785 22.7695 4.0486e-13 18.6962 0C 14.623 4.0486e-13 10.6474 1.24785 7.30479 3.57559C 3.96215 5.90333 1.41284 9.19922 0 13.0196C 3.64131 8.76401 10.648 5.89725 18.6962 5.89725Z\"/>\n",
"<path id=\"path10_fill\" d=\"M 7.59576 3.56656C 7.64276 4.31992 7.46442 5.07022 7.08347 5.72186C 6.70251 6.3735 6.13619 6.89698 5.45666 7.22561C 4.77713 7.55424 4.01515 7.67314 3.26781 7.56716C 2.52046 7.46117 1.82158 7.13511 1.26021 6.63051C 0.698839 6.12591 0.300394 5.46561 0.115637 4.73375C -0.0691191 4.00188 -0.0318219 3.23159 0.222777 2.52099C 0.477376 1.8104 0.93775 1.19169 1.54524 0.743685C 2.15274 0.295678 2.87985 0.0386595 3.63394 0.00537589C 4.12793 -0.0210471 4.62229 0.0501173 5.08878 0.214803C 5.55526 0.37949 5.98473 0.63447 6.35264 0.965179C 6.72055 1.29589 7.01971 1.69584 7.233 2.1422C 7.4463 2.58855 7.56957 3.07256 7.59576 3.56656L 7.59576 3.56656Z\"/>\n",
"<path id=\"path11_fill\" d=\"M 2.25061 4.37943C 1.81886 4.39135 1.39322 4.27535 1.02722 4.04602C 0.661224 3.81668 0.371206 3.48424 0.193641 3.09052C 0.0160762 2.69679 -0.0411078 2.25935 0.0292804 1.83321C 0.0996686 1.40707 0.294486 1.01125 0.589233 0.695542C 0.883981 0.37983 1.2655 0.158316 1.68581 0.0588577C 2.10611 -0.0406005 2.54644 -0.0135622 2.95143 0.136572C 3.35641 0.286707 3.70796 0.553234 3.96186 0.902636C 4.21577 1.25204 4.3607 1.66872 4.37842 2.10027C 4.39529 2.6838 4.18131 3.25044 3.78293 3.67715C 3.38455 4.10387 2.83392 4.35623 2.25061 4.37943Z\"/>\n",
"</defs>\n",
"</svg>"
],
"text/plain": [
"<IPython.core.display.SVG object>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"from IPython.display import SVG, display\n",
"\n",
"display(SVG(filename=\"Jupyter_logo.svg\"))"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.6"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import logging
LOGGER = logging.getLogger(__name__)
def test_inkscape(container):
"""Inkscape shall be installed to be able to convert SVG files."""
LOGGER.info("Test that inkscape is working by printing its version ...")
c = container.run(
tty=True, command=["start.sh", "bash", "-c", "inkscape --version"]
)
c.wait(timeout=10)
logs = c.logs(stdout=True).decode("utf-8")
LOGGER.debug(logs)
assert "Inkscape" in logs, "Inkscape not installed or not working"
...@@ -9,15 +9,17 @@ import os ...@@ -9,15 +9,17 @@ import os
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
@pytest.mark.parametrize("format", ["html", "pdf"]) @pytest.mark.parametrize("test_file, output_format,", [
def test_nbconvert(container, format): ("notebook_math", "pdf"), ("notebook_math", "html"),
("notebook_svg", "pdf"), ("notebook_svg", "html"),
])
def test_nbconvert(container, test_file, output_format):
"""Check if nbconvert is able to convert a notebook file""" """Check if nbconvert is able to convert a notebook file"""
host_data_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data") host_data_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data")
cont_data_dir = "/home/jovyan/data" cont_data_dir = "/home/jovyan/data"
test_file = "notebook1"
output_dir = "/tmp" output_dir = "/tmp"
LOGGER.info(f"Test that an example notebook can be converted to {format.upper()} ...") LOGGER.info(f"Test that the example notebook {test_file} can be converted to {output_format.upper()} ...")
command = f"jupyter nbconvert {cont_data_dir}/{test_file}.ipynb --output-dir {output_dir} --to {format}" command = f"jupyter nbconvert {cont_data_dir}/{test_file}.ipynb --output-dir {output_dir} --to {output_format}"
c = container.run( c = container.run(
volumes={host_data_dir: {"bind": cont_data_dir, "mode": "ro"}}, volumes={host_data_dir: {"bind": cont_data_dir, "mode": "ro"}},
tty=True, tty=True,
...@@ -27,5 +29,5 @@ def test_nbconvert(container, format): ...@@ -27,5 +29,5 @@ def test_nbconvert(container, format):
assert rv == 0 or rv["StatusCode"] == 0, f"Command {command} failed" assert rv == 0 or rv["StatusCode"] == 0, f"Command {command} failed"
logs = c.logs(stdout=True).decode("utf-8") logs = c.logs(stdout=True).decode("utf-8")
LOGGER.debug(logs) LOGGER.debug(logs)
expected_file = f"{output_dir}/{test_file}.{format}" expected_file = f"{output_dir}/{test_file}.{output_format}"
assert expected_file in logs, f"Expected file {expected_file} not generated" assert expected_file in logs, f"Expected file {expected_file} not generated"
...@@ -11,20 +11,30 @@ SHELL ["/bin/bash", "-o", "pipefail", "-c"] ...@@ -11,20 +11,30 @@ SHELL ["/bin/bash", "-o", "pipefail", "-c"]
USER root USER root
# Spark dependencies # Spark dependencies
ENV APACHE_SPARK_VERSION=3.0.0 \ # Default values can be overridden at build time
HADOOP_VERSION=3.2 # (ARGS are in lower case to distinguish them from ENV)
ARG spark_version="3.0.1"
ARG hadoop_version="3.2"
ARG spark_checksum="E8B47C5B658E0FBC1E57EEA06262649D8418AE2B2765E44DA53AAF50094877D17297CC5F0B9B35DF2CEEF830F19AA31D7E56EAD950BBE7F8830D6874F88CFC3C"
ARG py4j_version="0.10.9"
ARG openjdk_version="11"
ENV APACHE_SPARK_VERSION="${spark_version}" \
HADOOP_VERSION="${hadoop_version}"
RUN apt-get -y update && \ RUN apt-get -y update && \
apt-get install --no-install-recommends -y openjdk-11-jre-headless ca-certificates-java && \ apt-get install --no-install-recommends -y \
rm -rf /var/lib/apt/lists/* "openjdk-${openjdk_version}-jre-headless" \
ca-certificates-java && \
apt-get clean && rm -rf /var/lib/apt/lists/*
# Using the preferred mirror to download Spark # Spark installation
WORKDIR /tmp WORKDIR /tmp
# Using the preferred mirror to download Spark
# hadolint ignore=SC2046 # hadolint ignore=SC2046
RUN wget -q $(wget -qO- https://www.apache.org/dyn/closer.lua/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz\?as_json | \ RUN wget -q $(wget -qO- https://www.apache.org/dyn/closer.lua/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz\?as_json | \
python -c "import sys, json; content=json.load(sys.stdin); print(content['preferred']+content['path_info'])") && \ python -c "import sys, json; content=json.load(sys.stdin); print(content['preferred']+content['path_info'])") && \
echo "BFE45406C67CC4AE00411AD18CC438F51E7D4B6F14EB61E7BF6B5450897C2E8D3AB020152657C0239F253735C263512FFABF538AC5B9FFFA38B8295736A9C387 *spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" | sha512sum -c - && \ echo "${spark_checksum} *spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" | sha512sum -c - && \
tar xzf "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" -C /usr/local --owner root --group root --no-same-owner && \ tar xzf "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" -C /usr/local --owner root --group root --no-same-owner && \
rm "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" rm "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz"
...@@ -33,16 +43,17 @@ RUN ln -s "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}" spark ...@@ -33,16 +43,17 @@ RUN ln -s "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}" spark
# Configure Spark # Configure Spark
ENV SPARK_HOME=/usr/local/spark ENV SPARK_HOME=/usr/local/spark
ENV PYTHONPATH=$SPARK_HOME/python:$SPARK_HOME/python/lib/py4j-0.10.9-src.zip \ ENV PYTHONPATH="${SPARK_HOME}/python:${SPARK_HOME}/python/lib/py4j-${py4j_version}-src.zip" \
SPARK_OPTS="--driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M --driver-java-options=-Dlog4j.logLevel=info" \ SPARK_OPTS="--driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M --driver-java-options=-Dlog4j.logLevel=info" \
PATH=$PATH:$SPARK_HOME/bin PATH=$PATH:$SPARK_HOME/bin
USER $NB_UID USER $NB_UID
# Install pyarrow # Install pyarrow
RUN conda install --quiet -y 'pyarrow' && \ RUN conda install --quiet --yes --satisfied-skip-solve \
'pyarrow=1.0.*' && \
conda clean --all -f -y && \ conda clean --all -f -y && \
fix-permissions "${CONDA_DIR}" && \ fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}" fix-permissions "/home/${NB_USER}"
WORKDIR $HOME WORKDIR $HOME
\ No newline at end of file
[![docker pulls](https://img.shields.io/docker/pulls/jupyter/pyspark-notebook.svg)](https://hub.docker.com/r/jupyter/pyspark-notebook/) [![docker stars](https://img.shields.io/docker/stars/jupyter/pyspark-notebook.svg)](https://hub.docker.com/r/jupyter/pyspark-notebook/) [![image metadata](https://images.microbadger.com/badges/image/jupyter/pyspark-notebook.svg)](https://microbadger.com/images/jupyter/pyspark-notebook "jupyter/pyspark-notebook image metadata") [![docker pulls](https://img.shields.io/docker/pulls/jupyter/pyspark-notebook.svg)](https://hub.docker.com/r/jupyter/pyspark-notebook/)
[![docker stars](https://img.shields.io/docker/stars/jupyter/pyspark-notebook.svg)](https://hub.docker.com/r/jupyter/pyspark-notebook/)
[![image metadata](https://images.microbadger.com/badges/image/jupyter/pyspark-notebook.svg)](https://microbadger.com/images/jupyter/pyspark-notebook "jupyter/pyspark-notebook image metadata")
# Jupyter Notebook Python, Spark Stack # Jupyter Notebook Python, Spark Stack
Please visit the documentation site for help using and contributing to this image and others. GitHub Actions in the https://github.com/jupyter/docker-stacks project builds and pushes this image
to Docker Hub.
* [Jupyter Docker Stacks on ReadTheDocs](http://jupyter-docker-stacks.readthedocs.io/en/latest/index.html) Please visit the project documentation site for help using and contributing to this image and
* [Selecting an Image :: Core Stacks :: jupyter/pyspark-notebook](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/selecting.html#jupyter-pyspark-notebook) others.
* [Image Specifics :: Apache Spark](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/specifics.html#apache-spark)
- [Jupyter Docker Stacks on ReadTheDocs](http://jupyter-docker-stacks.readthedocs.io/en/latest/index.html)
- [Selecting an Image :: Core Stacks :: jupyter/pyspark-notebook](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/selecting.html#jupyter-pyspark-notebook)
- [Image Specifics :: Apache Spark](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/specifics.html#apache-spark)
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${SOURCE_COMMIT})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${SOURCE_COMMIT}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "$INDEX_FILE"
\ No newline at end of file
#!/bin/bash
set -e
# Tag the latest build with the short git sha. Push the tag in addition
# to the "latest" tag already pushed.
GIT_SHA_TAG=${SOURCE_COMMIT:0:12}
docker tag $IMAGE_NAME $DOCKER_REPO:$GIT_SHA_TAG
docker push $DOCKER_REPO:$GIT_SHA_TAG
# Create a working directory.
BUILD_TIMESTAMP=$(date -u +%FT%TZ)
WORKDIR=$(mktemp -d)
GIT_URI="git@github.com:jupyter/docker-stacks.wiki.git"
GIT_SANDBOX="${WORKDIR}/docker-stacks.wiki"
IMAGE_SHORT_NAME=$(basename $DOCKER_REPO)
MANIFEST_FILE="${GIT_SANDBOX}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
INDEX_FILE="${GIT_SANDBOX}/Home.md"
# Configure git so it can push back to GitHub.
eval $(ssh-agent -s)
ssh-add <(base64 -d <(echo "$DEPLOY_KEY"))
ssh-add -l
git config --global user.email "jupyter@googlegroups.com"
git config --global user.name "Jupyter Docker Stacks"
# Glone the GitHub project wiki.
pushd "$WORKDIR"
git clone "$GIT_URI"
popd
# Render the build manifest template.
mkdir -p $(dirname "$MANIFEST_FILE")
source hooks/manifest.tmpl
source hooks/index.tmpl
# Push the wiki update back to GitHub.
pushd "$GIT_SANDBOX"
git add .
git commit -m "DOC: Build ${MANIFEST_FILE}"
git push -u origin master
popd
# Shutdown the ssh agent for good measure.
ssh-agent -k
# Invoke all downstream build triggers.
set +e
for url in $(echo $NEXT_BUILD_TRIGGERS | sed "s/,/ /g")
do
curl -X POST $url
done
\ No newline at end of file
#!/bin/bash
set -e
# Apply tags
GIT_SHA_TAG=${GITHUB_SHA:0:12}
docker tag $IMAGE_NAME "$DOCKER_REPO:$GIT_SHA_TAG"
# Update index
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${GITHUB_SHA}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "${WIKI_PATH}/Home.md"
# Build manifest
MANIFEST_FILE="${WIKI_PATH}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
mkdir -p $(dirname "$MANIFEST_FILE")
cat << EOF > "$MANIFEST_FILE" cat << EOF > "$MANIFEST_FILE"
* Build datetime: ${BUILD_TIMESTAMP} * Build datetime: ${BUILD_TIMESTAMP}
* DockerHub build code: ${BUILD_CODE}
* Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG} * Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG}
* Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}") * Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}")
* Git commit SHA: [${SOURCE_COMMIT}](https://github.com/jupyter/docker-stacks/commit/${SOURCE_COMMIT}) * Git commit SHA: [${GITHUB_SHA}](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})
* Git commit message: * Git commit message:
\`\`\` \`\`\`
${COMMIT_MSG} ${COMMIT_MSG}
......
# Copyright (c) Jupyter Development Team. # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License. # Distributed under the terms of the Modified BSD License.
import time
import logging import logging
import pytest
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
def test_spark_shell(container): def test_spark_shell(container):
"""Checking if Spark (spark-shell) is running properly""" """Checking if Spark (spark-shell) is running properly"""
c = container.run( c = container.run(
...@@ -18,6 +17,7 @@ def test_spark_shell(container): ...@@ -18,6 +17,7 @@ def test_spark_shell(container):
LOGGER.debug(logs) LOGGER.debug(logs)
assert 'res0: Int = 2' in logs assert 'res0: Int = 2' in logs
def test_pyspark(container): def test_pyspark(container):
"""PySpark should be in the Python path""" """PySpark should be in the Python path"""
c = container.run( c = container.run(
......
...@@ -16,7 +16,7 @@ RUN apt-get update && \ ...@@ -16,7 +16,7 @@ RUN apt-get update && \
r-cran-rodbc \ r-cran-rodbc \
gfortran \ gfortran \
gcc && \ gcc && \
rm -rf /var/lib/apt/lists/* apt-get clean && rm -rf /var/lib/apt/lists/*
# Fix for devtools https://github.com/conda-forge/r-devtools-feedstock/issues/4 # Fix for devtools https://github.com/conda-forge/r-devtools-feedstock/issues/4
RUN ln -s /bin/tar /bin/gtar RUN ln -s /bin/tar /bin/gtar
...@@ -25,22 +25,22 @@ USER $NB_UID ...@@ -25,22 +25,22 @@ USER $NB_UID
# R packages # R packages
RUN conda install --quiet --yes \ RUN conda install --quiet --yes \
'r-base=4.0.0' \ 'r-base=4.0.3' \
'r-caret=6.*' \ 'r-caret=6.*' \
'r-crayon=1.3*' \ 'r-crayon=1.3*' \
'r-devtools=2.3*' \ 'r-devtools=2.3*' \
'r-forecast=8.12*' \ 'r-forecast=8.13*' \
'r-hexbin=1.28*' \ 'r-hexbin=1.28*' \
'r-htmltools=0.4*' \ 'r-htmltools=0.5*' \
'r-htmlwidgets=1.5*' \ 'r-htmlwidgets=1.5*' \
'r-irkernel=1.1*' \ 'r-irkernel=1.1*' \
'r-nycflights13=1.0*' \ 'r-nycflights13=1.0*' \
'r-randomforest=4.6*' \ 'r-randomforest=4.6*' \
'r-rcurl=1.98*' \ 'r-rcurl=1.98*' \
'r-rmarkdown=2.2*' \ 'r-rmarkdown=2.4*' \
'r-rodbc=1.3*' \ 'r-rodbc=1.3*' \
'r-rsqlite=2.2*' \ 'r-rsqlite=2.2*' \
'r-shiny=1.4*' \ 'r-shiny=1.5*' \
'r-tidyverse=1.3*' \ 'r-tidyverse=1.3*' \
'unixodbc=2.3.*' \ 'unixodbc=2.3.*' \
'r-tidymodels=0.1*' \ 'r-tidymodels=0.1*' \
......
[![docker pulls](https://img.shields.io/docker/pulls/jupyter/r-notebook.svg)](https://hub.docker.com/r/jupyter/r-notebook/) [![docker stars](https://img.shields.io/docker/stars/jupyter/r-notebook.svg)](https://hub.docker.com/r/jupyter/r-notebook/) [![image metadata](https://images.microbadger.com/badges/image/jupyter/r-notebook.svg)](https://microbadger.com/images/jupyter/r-notebook "jupyter/r-notebook image metadata") [![docker pulls](https://img.shields.io/docker/pulls/jupyter/r-notebook.svg)](https://hub.docker.com/r/jupyter/r-notebook/)
[![docker stars](https://img.shields.io/docker/stars/jupyter/r-notebook.svg)](https://hub.docker.com/r/jupyter/r-notebook/)
[![image metadata](https://images.microbadger.com/badges/image/jupyter/r-notebook.svg)](https://microbadger.com/images/jupyter/r-notebook "jupyter/r-notebook image metadata")
# Jupyter Notebook R Stack # Jupyter Notebook R Stack
* [Jupyter Docker Stacks on ReadTheDocs](http://jupyter-docker-stacks.readthedocs.io/en/latest/index.html) GitHub Actions in the https://github.com/jupyter/docker-stacks project builds and pushes this image
* [Selecting an Image :: Core Stacks :: jupyter/r-notebook](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/selecting.html#jupyter-r-notebook) to Docker Hub.
Please visit the project documentation site for help using and contributing to this image and
others.
- [Jupyter Docker Stacks on ReadTheDocs](http://jupyter-docker-stacks.readthedocs.io/en/latest/index.html)
- [Selecting an Image :: Core Stacks :: jupyter/r-notebook](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/selecting.html#jupyter-r-notebook)
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${SOURCE_COMMIT})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${SOURCE_COMMIT}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "$INDEX_FILE"
\ No newline at end of file
#!/bin/bash
set -e
# Tag the latest build with the short git sha. Push the tag in addition
# to the "latest" tag already pushed.
GIT_SHA_TAG=${SOURCE_COMMIT:0:12}
docker tag $IMAGE_NAME $DOCKER_REPO:$GIT_SHA_TAG
docker push $DOCKER_REPO:$GIT_SHA_TAG
# Create a working directory.
BUILD_TIMESTAMP=$(date -u +%FT%TZ)
WORKDIR=$(mktemp -d)
GIT_URI="git@github.com:jupyter/docker-stacks.wiki.git"
GIT_SANDBOX="${WORKDIR}/docker-stacks.wiki"
IMAGE_SHORT_NAME=$(basename $DOCKER_REPO)
MANIFEST_FILE="${GIT_SANDBOX}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
INDEX_FILE="${GIT_SANDBOX}/Home.md"
# Configure git so it can push back to GitHub.
eval $(ssh-agent -s)
ssh-add <(base64 -d <(echo "$DEPLOY_KEY"))
ssh-add -l
git config --global user.email "jupyter@googlegroups.com"
git config --global user.name "Jupyter Docker Stacks"
# Glone the GitHub project wiki.
pushd "$WORKDIR"
git clone "$GIT_URI"
popd
# Render the build manifest template.
mkdir -p $(dirname "$MANIFEST_FILE")
source hooks/manifest.tmpl
source hooks/index.tmpl
# Push the wiki update back to GitHub.
pushd "$GIT_SANDBOX"
git add .
git commit -m "DOC: Build ${MANIFEST_FILE}"
git push -u origin master
popd
# Shutdown the ssh agent for good measure.
ssh-agent -k
# Invoke all downstream build triggers.
set +e
for url in $(echo $NEXT_BUILD_TRIGGERS | sed "s/,/ /g")
do
curl -X POST $url
done
\ No newline at end of file
#!/bin/bash
set -e
# Apply tags
GIT_SHA_TAG=${GITHUB_SHA:0:12}
docker tag $IMAGE_NAME "$DOCKER_REPO:$GIT_SHA_TAG"
# Update index
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${GITHUB_SHA}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "${WIKI_PATH}/Home.md"
# Build manifest
MANIFEST_FILE="${WIKI_PATH}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
mkdir -p $(dirname "$MANIFEST_FILE")
cat << EOF > "$MANIFEST_FILE" cat << EOF > "$MANIFEST_FILE"
* Build datetime: ${BUILD_TIMESTAMP} * Build datetime: ${BUILD_TIMESTAMP}
* DockerHub build code: ${BUILD_CODE}
* Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG} * Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG}
* Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}") * Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}")
* Git commit SHA: [${SOURCE_COMMIT}](https://github.com/jupyter/docker-stacks/commit/${SOURCE_COMMIT}) * Git commit SHA: [${GITHUB_SHA}](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})
* Git commit message: * Git commit message:
\`\`\` \`\`\`
${COMMIT_MSG} ${COMMIT_MSG}
\`\`\` \`\`\`
## Julia Packages ## R Packages
\`\`\` \`\`\`
$(docker run --rm ${IMAGE_NAME} julia -E 'using InteractiveUtils; versioninfo()') $(docker run --rm ${IMAGE_NAME} R --version)
\`\`\` \`\`\`
\`\`\` \`\`\`
$(docker run --rm ${IMAGE_NAME} julia -E 'import Pkg; Pkg.status()') $(docker run --rm ${IMAGE_NAME} R --silent -e 'installed.packages(.Library)[, c(1,3)]')
\`\`\` \`\`\`
## Python Packages ## Python Packages
...@@ -33,19 +47,9 @@ $(docker run --rm ${IMAGE_NAME} conda info) ...@@ -33,19 +47,9 @@ $(docker run --rm ${IMAGE_NAME} conda info)
$(docker run --rm ${IMAGE_NAME} conda list) $(docker run --rm ${IMAGE_NAME} conda list)
\`\`\` \`\`\`
## R Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} R --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} R --silent -e 'installed.packages(.Library)[, c(1,3)]')
\`\`\`
## Apt Packages ## Apt Packages
\`\`\` \`\`\`
$(docker run --rm ${IMAGE_NAME} apt list --installed) $(docker run --rm ${IMAGE_NAME} apt list --installed)
\`\`\` \`\`\`
EOF EOF
\ No newline at end of file
docker docker
pre-commit
pytest pytest
recommonmark==0.5.0 recommonmark
requests requests
sphinx>=1.6 sphinx>=1.6
sphinx-intl sphinx-intl
tabulate tabulate
transifex-client transifex-client
...@@ -7,10 +7,10 @@ LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>" ...@@ -7,10 +7,10 @@ LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>"
USER root USER root
# ffmpeg for matplotlib anim & dvipng for latex labels # ffmpeg for matplotlib anim & dvipng+cm-super for latex labels
RUN apt-get update && \ RUN apt-get update && \
apt-get install -y --no-install-recommends ffmpeg dvipng && \ apt-get install -y --no-install-recommends ffmpeg dvipng cm-super && \
rm -rf /var/lib/apt/lists/* apt-get clean && rm -rf /var/lib/apt/lists/*
USER $NB_UID USER $NB_UID
...@@ -18,31 +18,29 @@ USER $NB_UID ...@@ -18,31 +18,29 @@ USER $NB_UID
RUN conda install --quiet --yes \ RUN conda install --quiet --yes \
'beautifulsoup4=4.9.*' \ 'beautifulsoup4=4.9.*' \
'conda-forge::blas=*=openblas' \ 'conda-forge::blas=*=openblas' \
'bokeh=2.0.*' \ 'bokeh=2.2.*' \
'bottleneck=1.3.*' \ 'bottleneck=1.3.*' \
'cloudpickle=1.4.*' \ 'cloudpickle=1.6.*' \
'cython=0.29.*' \ 'cython=0.29.*' \
'dask=2.15.*' \ 'dask=2.30.*' \
'dill=0.3.*' \ 'dill=0.3.*' \
'h5py=2.10.*' \ 'h5py=2.10.*' \
'hdf5=1.10.*' \
'ipywidgets=7.5.*' \ 'ipywidgets=7.5.*' \
'ipympl=0.5.*'\ 'ipympl=0.5.*'\
'matplotlib-base=3.2.*' \ 'matplotlib-base=3.3.*' \
# numba update to 0.49 fails resolving deps. 'numba=0.51.*' \
'numba=0.48.*' \
'numexpr=2.7.*' \ 'numexpr=2.7.*' \
'pandas=1.0.*' \ 'pandas=1.1.*' \
'patsy=0.5.*' \ 'patsy=0.5.*' \
'protobuf=3.11.*' \ 'protobuf=3.13.*' \
'pytables=3.6.*' \ 'pytables=3.6.*' \
'scikit-image=0.16.*' \ 'scikit-image=0.17.*' \
'scikit-learn=0.23.*' \ 'scikit-learn=0.23.*' \
'scipy=1.4.*' \ 'scipy=1.5.*' \
'seaborn=0.10.*' \ 'seaborn=0.11.*' \
'sqlalchemy=1.3.*' \ 'sqlalchemy=1.3.*' \
'statsmodels=0.11.*' \ 'statsmodels=0.12.*' \
'sympy=1.5.*' \ 'sympy=1.6.*' \
'vincent=0.4.*' \ 'vincent=0.4.*' \
'widgetsnbextension=3.5.*'\ 'widgetsnbextension=3.5.*'\
'xlrd=1.2.*' \ 'xlrd=1.2.*' \
......
[![docker pulls](https://img.shields.io/docker/pulls/jupyter/scipy-notebook.svg)](https://hub.docker.com/r/jupyter/scipy-notebook/) [![docker stars](https://img.shields.io/docker/stars/jupyter/scipy-notebook.svg)](https://hub.docker.com/r/jupyter/scipy-notebook/) [![image metadata](https://images.microbadger.com/badges/image/jupyter/scipy-notebook.svg)](https://microbadger.com/images/jupyter/scipy-notebook "jupyter/scipy-notebook image metadata") [![docker pulls](https://img.shields.io/docker/pulls/jupyter/scipy-notebook.svg)](https://hub.docker.com/r/jupyter/scipy-notebook/)
[![docker stars](https://img.shields.io/docker/stars/jupyter/scipy-notebook.svg)](https://hub.docker.com/r/jupyter/scipy-notebook/)
[![image metadata](https://images.microbadger.com/badges/image/jupyter/scipy-notebook.svg)](https://microbadger.com/images/jupyter/scipy-notebook "jupyter/scipy-notebook image metadata")
# Jupyter Notebook Scientific Python Stack # Jupyter Notebook Scientific Python Stack
Please visit the documentation site for help using and contributing to this image and others. GitHub Actions in the https://github.com/jupyter/docker-stacks project builds and pushes this image
to Docker Hub.
* [Jupyter Docker Stacks on ReadTheDocs](http://jupyter-docker-stacks.readthedocs.io/en/latest/index.html) Please visit the project documentation site for help using and contributing to this image and
* [Selecting an Image :: Core Stacks :: jupyter/scipy-notebook](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/selecting.html#jupyter-scipy-notebook) others.
- [Jupyter Docker Stacks on ReadTheDocs](http://jupyter-docker-stacks.readthedocs.io/en/latest/index.html)
- [Selecting an Image :: Core Stacks :: jupyter/scipy-notebook](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/selecting.html#jupyter-scipy-notebook)
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${SOURCE_COMMIT})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${SOURCE_COMMIT}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "$INDEX_FILE"
\ No newline at end of file
cat << EOF > "$MANIFEST_FILE"
* Build datetime: ${BUILD_TIMESTAMP}
* DockerHub build code: ${BUILD_CODE}
* Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG}
* Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}")
* Git commit SHA: [${SOURCE_COMMIT}](https://github.com/jupyter/docker-stacks/commit/${SOURCE_COMMIT})
* Git commit message:
\`\`\`
${COMMIT_MSG}
\`\`\`
## Python Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} python --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda info)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda list)
\`\`\`
## Apt Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} apt list --installed)
\`\`\`
EOF
\ No newline at end of file
#!/bin/bash
set -e
# Tag the latest build with the short git sha. Push the tag in addition
# to the "latest" tag already pushed.
GIT_SHA_TAG=${SOURCE_COMMIT:0:12}
docker tag $IMAGE_NAME $DOCKER_REPO:$GIT_SHA_TAG
docker push $DOCKER_REPO:$GIT_SHA_TAG
# Create a working directory.
BUILD_TIMESTAMP=$(date -u +%FT%TZ)
WORKDIR=$(mktemp -d)
GIT_URI="git@github.com:jupyter/docker-stacks.wiki.git"
GIT_SANDBOX="${WORKDIR}/docker-stacks.wiki"
IMAGE_SHORT_NAME=$(basename $DOCKER_REPO)
MANIFEST_FILE="${GIT_SANDBOX}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
INDEX_FILE="${GIT_SANDBOX}/Home.md"
# Configure git so it can push back to GitHub.
eval $(ssh-agent -s)
ssh-add <(base64 -d <(echo "$DEPLOY_KEY"))
ssh-add -l
git config --global user.email "jupyter@googlegroups.com"
git config --global user.name "Jupyter Docker Stacks"
# Glone the GitHub project wiki.
pushd "$WORKDIR"
git clone "$GIT_URI"
popd
# Render the build manifest template.
mkdir -p $(dirname "$MANIFEST_FILE")
source hooks/manifest.tmpl
source hooks/index.tmpl
# Push the wiki update back to GitHub.
pushd "$GIT_SANDBOX"
git add .
git commit -m "DOC: Build ${MANIFEST_FILE}"
git push -u origin master
popd
# Shutdown the ssh agent for good measure.
ssh-agent -k
# Invoke all downstream build triggers.
set +e
for url in $(echo $NEXT_BUILD_TRIGGERS | sed "s/,/ /g")
do
curl -X POST $url
done
\ No newline at end of file
#!/bin/bash
set -e
# Apply tags
GIT_SHA_TAG=${GITHUB_SHA:0:12}
docker tag $IMAGE_NAME "$DOCKER_REPO:$GIT_SHA_TAG"
# Update index
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${GITHUB_SHA}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "${WIKI_PATH}/Home.md"
# Build manifest
MANIFEST_FILE="${WIKI_PATH}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
mkdir -p $(dirname "$MANIFEST_FILE")
cat << EOF > "$MANIFEST_FILE" cat << EOF > "$MANIFEST_FILE"
* Build datetime: ${BUILD_TIMESTAMP} * Build datetime: ${BUILD_TIMESTAMP}
* DockerHub build code: ${BUILD_CODE}
* Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG} * Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG}
* Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}") * Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}")
* Git commit SHA: [${SOURCE_COMMIT}](https://github.com/jupyter/docker-stacks/commit/${SOURCE_COMMIT}) * Git commit SHA: [${GITHUB_SHA}](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})
* Git commit message: * Git commit message:
\`\`\` \`\`\`
${COMMIT_MSG} ${COMMIT_MSG}
\`\`\` \`\`\`
## R Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} R --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} R --silent -e 'installed.packages(.Library)[, c(1,3)]')
\`\`\`
## Python Packages ## Python Packages
\`\`\` \`\`\`
......
...@@ -3,7 +3,6 @@ ...@@ -3,7 +3,6 @@
# Optional test with [Matplotlib Jupyter Integration](https://github.com/matplotlib/ipympl) # Optional test with [Matplotlib Jupyter Integration](https://github.com/matplotlib/ipympl)
# %matplotlib widget # %matplotlib widget
import matplotlib
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import numpy as np import numpy as np
import os import os
...@@ -21,4 +20,4 @@ ax.grid() ...@@ -21,4 +20,4 @@ ax.grid()
# Note that the test can be run headless by checking if an image is produced # Note that the test can be run headless by checking if an image is produced
file_path = os.path.join("/tmp", "test.png") file_path = os.path.join("/tmp", "test.png")
fig.savefig(file_path) fig.savefig(file_path)
print(f"File {file_path} saved") print(f"File {file_path} saved")
\ No newline at end of file
# Matplotlit: Test tex fonts
import matplotlib
import matplotlib.pyplot as plt
import os
matplotlib.rcParams['pgf.texsystem'] = 'pdflatex'
matplotlib.rcParams.update({'font.family': 'serif', 'font.size': 18,
'axes.labelsize': 20, 'axes.titlesize': 24,
'figure.titlesize': 28})
matplotlib.rcParams['text.usetex'] = True
fig, ax = plt.subplots(1, 1)
x = [1, 2]
y = [1, 2]
ax.plot(x, y, label='a label')
ax.legend(fontsize=15)
file_path = os.path.join("/tmp", "test_fonts.png")
fig.savefig(file_path)
print(f"File {file_path} saved")
...@@ -9,13 +9,24 @@ import os ...@@ -9,13 +9,24 @@ import os
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
def test_matplotlib(container): @pytest.mark.parametrize("test_file,expected_file,description",
"""Test that matplotlib is able to plot a graph and write it as an image""" [
host_data_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data") ("matplotlib_1.py", "test.png",
"Test that matplotlib is able to plot a graph and write it as an image ..."),
("matplotlib_fonts_1.py", "test_fonts.png",
"Test cm-super latex labels in matplotlib ...")
])
def test_matplotlib(container, test_file, expected_file, description):
"""Various tests performed on matplotlib
- Test that matplotlib is able to plot a graph and write it as an image
- Test matplotlib latex fonts, which depend on the cm-super package
"""
host_data_dir = os.path.join(os.path.dirname(
os.path.realpath(__file__)), "data")
cont_data_dir = "/home/jovyan/data" cont_data_dir = "/home/jovyan/data"
test_file = "matplotlib_1.py"
output_dir = "/tmp" output_dir = "/tmp"
LOGGER.info(f"Test that matplotlib is able to plot a graph and write it as an image ...") LOGGER.info(description)
command = "sleep infinity" command = "sleep infinity"
running_container = container.run( running_container = container.run(
volumes={host_data_dir: {"bind": cont_data_dir, "mode": "ro"}}, volumes={host_data_dir: {"bind": cont_data_dir, "mode": "ro"}},
...@@ -28,8 +39,7 @@ def test_matplotlib(container): ...@@ -28,8 +39,7 @@ def test_matplotlib(container):
LOGGER.debug(cmd.output.decode("utf-8")) LOGGER.debug(cmd.output.decode("utf-8"))
# Checking if the file is generated # Checking if the file is generated
# https://stackoverflow.com/a/15895594/4413446 # https://stackoverflow.com/a/15895594/4413446
expected_file = f"{output_dir}/test.png" command = f"test -s {output_dir}/{expected_file}"
command = f"test -s {expected_file}"
cmd = running_container.exec_run(command) cmd = running_container.exec_run(command)
assert cmd.exit_code == 0, f"Command {command} failed" assert cmd.exit_code == 0, f"Command {command} failed"
LOGGER.debug(cmd.output.decode("utf-8")) LOGGER.debug(cmd.output.decode("utf-8"))
...@@ -8,17 +8,23 @@ LOGGER = logging.getLogger(__name__) ...@@ -8,17 +8,23 @@ LOGGER = logging.getLogger(__name__)
@pytest.mark.parametrize( @pytest.mark.parametrize(
"name,command", "name,command_list",
[ [
( (
"Sum series", "Sum series",
"import pandas as pd; import numpy as np; np.random.seed(0); print(pd.Series(np.random.randint(0, 7, size=10)).sum())", [
"import pandas as pd",
"import numpy as np",
"np.random.seed(0)",
"print(pd.Series(np.random.randint(0, 7, size=10)).sum())"
]
), ),
], ],
) )
def test_pandas(container, name, command): def test_pandas(container, name, command_list):
"""Basic pandas tests""" """Basic pandas tests"""
LOGGER.info(f"Testing pandas: {name} ...") LOGGER.info(f"Testing pandas: {name} ...")
command = ';'.join(command_list)
c = container.run(tty=True, command=["start.sh", "python", "-c", command]) c = container.run(tty=True, command=["start.sh", "python", "-c", command])
rv = c.wait(timeout=30) rv = c.wait(timeout=30)
assert rv == 0 or rv["StatusCode"] == 0, f"Command {command} failed" assert rv == 0 or rv["StatusCode"] == 0, f"Command {command} failed"
......
...@@ -7,6 +7,6 @@ LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>" ...@@ -7,6 +7,6 @@ LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>"
# Install Tensorflow # Install Tensorflow
RUN pip install --quiet --no-cache-dir \ RUN pip install --quiet --no-cache-dir \
'tensorflow==2.2.0' && \ 'tensorflow==2.3.1' && \
fix-permissions "${CONDA_DIR}" && \ fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}" fix-permissions "/home/${NB_USER}"
[![docker pulls](https://img.shields.io/docker/pulls/jupyter/tensorflow-notebook.svg)](https://hub.docker.com/r/jupyter/tensorflow-notebook/) [![docker stars](https://img.shields.io/docker/stars/jupyter/tensorflow-notebook.svg)](https://hub.docker.com/r/jupyter/tensorflow-notebook/) [![image metadata](https://images.microbadger.com/badges/image/jupyter/tensorflow-notebook.svg)](https://microbadger.com/images/jupyter/tensorflow-notebook "jupyter/tensorflow-notebook image metadata") [![docker pulls](https://img.shields.io/docker/pulls/jupyter/tensorflow-notebook.svg)](https://hub.docker.com/r/jupyter/tensorflow-notebook/)
[![docker stars](https://img.shields.io/docker/stars/jupyter/tensorflow-notebook.svg)](https://hub.docker.com/r/jupyter/tensorflow-notebook/)
[![image metadata](https://images.microbadger.com/badges/image/jupyter/tensorflow-notebook.svg)](https://microbadger.com/images/jupyter/tensorflow-notebook "jupyter/tensorflow-notebook image metadata")
# Jupyter Notebook Deep Learning Stack # Jupyter Notebook Deep Learning Stack
Please visit the documentation site for help using and contributing to this image and others. GitHub Actions in the https://github.com/jupyter/docker-stacks project builds and pushes this image
to Docker Hub.
* [Jupyter Docker Stacks on ReadTheDocs](http://jupyter-docker-stacks.readthedocs.io/en/latest/index.html) Please visit the project documentation site for help using and contributing to this image and
* [Selecting an Image :: Core Stacks :: jupyter/tensorflow-notebook](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/selecting.html#jupyter-tensorflow-notebook) others.
* [Image Specifics :: Tensorflow](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/specifics.html#tensorflow)
- [Jupyter Docker Stacks on ReadTheDocs](http://jupyter-docker-stacks.readthedocs.io/en/latest/index.html)
- [Selecting an Image :: Core Stacks :: jupyter/tensorflow-notebook](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/selecting.html#jupyter-tensorflow-notebook)
- [Image Specifics :: Tensorflow](http://jupyter-docker-stacks.readthedocs.io/en/latest/using/specifics.html#tensorflow)
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${SOURCE_COMMIT})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${SOURCE_COMMIT}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "$INDEX_FILE"
\ No newline at end of file
cat << EOF > "$MANIFEST_FILE"
* Build datetime: ${BUILD_TIMESTAMP}
* DockerHub build code: ${BUILD_CODE}
* Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG}
* Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}")
* Git commit SHA: [${SOURCE_COMMIT}](https://github.com/jupyter/docker-stacks/commit/${SOURCE_COMMIT})
* Git commit message:
\`\`\`
${COMMIT_MSG}
\`\`\`
## Python Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} python --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda info)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda list)
\`\`\`
## Apt Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} apt list --installed)
\`\`\`
EOF
\ No newline at end of file
#!/bin/bash
set -e
# Tag the latest build with the short git sha. Push the tag in addition
# to the "latest" tag already pushed.
GIT_SHA_TAG=${SOURCE_COMMIT:0:12}
docker tag $IMAGE_NAME $DOCKER_REPO:$GIT_SHA_TAG
docker push $DOCKER_REPO:$GIT_SHA_TAG
# Create a working directory.
BUILD_TIMESTAMP=$(date -u +%FT%TZ)
WORKDIR=$(mktemp -d)
GIT_URI="git@github.com:jupyter/docker-stacks.wiki.git"
GIT_SANDBOX="${WORKDIR}/docker-stacks.wiki"
IMAGE_SHORT_NAME=$(basename $DOCKER_REPO)
MANIFEST_FILE="${GIT_SANDBOX}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
INDEX_FILE="${GIT_SANDBOX}/Home.md"
# Configure git so it can push back to GitHub.
eval $(ssh-agent -s)
ssh-add <(base64 -d <(echo "$DEPLOY_KEY"))
ssh-add -l
git config --global user.email "jupyter@googlegroups.com"
git config --global user.name "Jupyter Docker Stacks"
# Glone the GitHub project wiki.
pushd "$WORKDIR"
git clone "$GIT_URI"
popd
# Render the build manifest template.
mkdir -p $(dirname "$MANIFEST_FILE")
source hooks/manifest.tmpl
source hooks/index.tmpl
# Push the wiki update back to GitHub.
pushd "$GIT_SANDBOX"
git add .
git commit -m "DOC: Build ${MANIFEST_FILE}"
git push -u origin master
popd
# Shutdown the ssh agent for good measure.
ssh-agent -k
# Invoke all downstream build triggers.
set +e
for url in $(echo $NEXT_BUILD_TRIGGERS | sed "s/,/ /g")
do
curl -X POST $url
done
\ No newline at end of file
#!/bin/bash
set -e
# Apply tags
GIT_SHA_TAG=${GITHUB_SHA:0:12}
docker tag $IMAGE_NAME "$DOCKER_REPO:$GIT_SHA_TAG"
# Update index
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${GITHUB_SHA}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "${WIKI_PATH}/Home.md"
# Build manifest
MANIFEST_FILE="${WIKI_PATH}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
mkdir -p $(dirname "$MANIFEST_FILE")
cat << EOF > "$MANIFEST_FILE"
* Build datetime: ${BUILD_TIMESTAMP}
* Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG}
* Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}")
* Git commit SHA: [${GITHUB_SHA}](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})
* Git commit message:
\`\`\`
${COMMIT_MSG}
\`\`\`
## Python Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} python --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda info)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda list)
\`\`\`
## Apt Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} apt list --installed)
\`\`\`
EOF
...@@ -64,7 +64,7 @@ class CondaPackageHelper: ...@@ -64,7 +64,7 @@ class CondaPackageHelper:
def installed_packages(self): def installed_packages(self):
"""Return the installed packages""" """Return the installed packages"""
if self.installed is None: if self.installed is None:
LOGGER.info(f"Grabing the list of installed packages ...") LOGGER.info("Grabing the list of installed packages ...")
self.installed = CondaPackageHelper._packages_from_json( self.installed = CondaPackageHelper._packages_from_json(
self._execute_command(CondaPackageHelper._conda_export_command()) self._execute_command(CondaPackageHelper._conda_export_command())
) )
...@@ -73,7 +73,7 @@ class CondaPackageHelper: ...@@ -73,7 +73,7 @@ class CondaPackageHelper:
def specified_packages(self): def specified_packages(self):
"""Return the specifications (i.e. packages installation requested)""" """Return the specifications (i.e. packages installation requested)"""
if self.specs is None: if self.specs is None:
LOGGER.info(f"Grabing the list of specifications ...") LOGGER.info("Grabing the list of specifications ...")
self.specs = CondaPackageHelper._packages_from_json( self.specs = CondaPackageHelper._packages_from_json(
self._execute_command(CondaPackageHelper._conda_export_command(True)) self._execute_command(CondaPackageHelper._conda_export_command(True))
) )
...@@ -87,11 +87,11 @@ class CondaPackageHelper: ...@@ -87,11 +87,11 @@ class CondaPackageHelper:
@staticmethod @staticmethod
def _packages_from_json(env_export): def _packages_from_json(env_export):
"""Extract packages and versions from the lines returned by the list of specifications""" """Extract packages and versions from the lines returned by the list of specifications"""
#dependencies = filter(lambda x: isinstance(x, str), json.loads(env_export).get("dependencies")) # dependencies = filter(lambda x: isinstance(x, str), json.loads(env_export).get("dependencies"))
dependencies = json.loads(env_export).get("dependencies") dependencies = json.loads(env_export).get("dependencies")
# Filtering packages installed through pip in this case it's a dict {'pip': ['toree==0.3.0']} # Filtering packages installed through pip in this case it's a dict {'pip': ['toree==0.3.0']}
# Since we only manage packages installed through conda here # Since we only manage packages installed through conda here
dependencies = filter(lambda x: isinstance(x, str), dependencies) dependencies = filter(lambda x: isinstance(x, str), dependencies)
packages_dict = dict() packages_dict = dict()
for split in map(lambda x: x.split("=", 1), dependencies): for split in map(lambda x: x.split("=", 1), dependencies):
# default values # default values
...@@ -112,7 +112,7 @@ class CondaPackageHelper: ...@@ -112,7 +112,7 @@ class CondaPackageHelper:
"""Return the available packages""" """Return the available packages"""
if self.available is None: if self.available is None:
LOGGER.info( LOGGER.info(
f"Grabing the list of available packages (can take a while) ..." "Grabing the list of available packages (can take a while) ..."
) )
# Keeping command line output since `conda search --outdated --json` is way too long ... # Keeping command line output since `conda search --outdated --json` is way too long ...
self.available = CondaPackageHelper._extract_available( self.available = CondaPackageHelper._extract_available(
...@@ -135,7 +135,7 @@ class CondaPackageHelper: ...@@ -135,7 +135,7 @@ class CondaPackageHelper:
installed = self.installed_packages() installed = self.installed_packages()
available = self.available_packages() available = self.available_packages()
self.comparison = list() self.comparison = list()
for pkg, inst_vs in self.installed.items(): for pkg, inst_vs in installed.items():
if not specifications_only or pkg in specs: if not specifications_only or pkg in specs:
avail_vs = sorted( avail_vs = sorted(
list(available[pkg]), key=CondaPackageHelper.semantic_cmp list(available[pkg]), key=CondaPackageHelper.semantic_cmp
...@@ -158,7 +158,8 @@ class CondaPackageHelper: ...@@ -158,7 +158,8 @@ class CondaPackageHelper:
"""Manage semantic versioning for comparison""" """Manage semantic versioning for comparison"""
def mysplit(string): def mysplit(string):
version_substrs = lambda x: re.findall(r"([A-z]+|\d+)", x) def version_substrs(x):
return re.findall(r"([A-z]+|\d+)", x)
return list(chain(map(version_substrs, string.split(".")))) return list(chain(map(version_substrs, string.split("."))))
def str_ord(string): def str_ord(string):
......
...@@ -131,7 +131,7 @@ def _import_packages(package_helper, filtered_packages, check_function, max_fail ...@@ -131,7 +131,7 @@ def _import_packages(package_helper, filtered_packages, check_function, max_fail
Note: using a list of packages instead of a fixture for the list of packages since pytest prevents use of multiple yields Note: using a list of packages instead of a fixture for the list of packages since pytest prevents use of multiple yields
""" """
failures = {} failures = {}
LOGGER.info(f"Testing the import of packages ...") LOGGER.info("Testing the import of packages ...")
for package in filtered_packages: for package in filtered_packages:
LOGGER.info(f"Trying to import {package}") LOGGER.info(f"Trying to import {package}")
try: try:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment