Skip to content

Commit c213bde

Browse files
release: v1.5.1 (#180)
1 parent 0ea7f62 commit c213bde

18 files changed

+1739
-0
lines changed
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
# Change log: 1.5.1(cpu)
2+
3+
## Upgrades:
4+
5+
Package | Previous Version | Current Version
6+
---|---|---
7+
jupyterlab|4.1.0|4.1.1
8+
amazon-sagemaker-jupyter-scheduler|3.0.6|3.0.7
9+
ipywidgets|8.1.1|8.1.2
10+
langchain|0.1.5|0.1.6
11+
uvicorn|0.27.0|0.27.1
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
# Change log: 1.5.1(gpu)
2+
3+
## Upgrades:
4+
5+
Package | Previous Version | Current Version
6+
---|---|---
7+
jupyterlab|4.1.0|4.1.1
8+
amazon-sagemaker-jupyter-scheduler|3.0.6|3.0.7
9+
ipywidgets|8.1.1|8.1.2
10+
langchain|0.1.5|0.1.6
11+
uvicorn|0.27.0|0.27.1
Lines changed: 127 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,127 @@
1+
ARG TAG_FOR_BASE_MICROMAMBA_IMAGE
2+
FROM mambaorg/micromamba:$TAG_FOR_BASE_MICROMAMBA_IMAGE
3+
4+
ARG CUDA_MAJOR_MINOR_VERSION=''
5+
ARG ENV_IN_FILENAME
6+
ARG ARG_BASED_ENV_IN_FILENAME
7+
8+
ARG AMZN_BASE="/opt/amazon/sagemaker"
9+
ARG DIRECTORY_TREE_STAGE_DIR="${AMZN_BASE}/dir-staging"
10+
11+
ARG NB_USER="sagemaker-user"
12+
ARG NB_UID=1000
13+
ARG NB_GID=100
14+
15+
ENV SAGEMAKER_LOGGING_DIR="/var/log/sagemaker/"
16+
ENV STUDIO_LOGGING_DIR="/var/log/studio/"
17+
18+
USER root
19+
RUN usermod "--login=${NB_USER}" "--home=/home/${NB_USER}" --move-home "-u ${NB_UID}" "${MAMBA_USER}" && \
20+
groupmod "--new-name=${NB_USER}" --non-unique "-g ${NB_GID}" "${MAMBA_USER}" && \
21+
# Update the expected value of MAMBA_USER for the
22+
# _entrypoint.sh consistency check.
23+
echo "${NB_USER}" > "/etc/arg_mamba_user" && \
24+
:
25+
ENV MAMBA_USER=$NB_USER
26+
ENV USER=$NB_USER
27+
28+
RUN apt-get update && \
29+
apt-get install -y --no-install-recommends sudo gettext-base wget curl unzip git rsync build-essential openssh-client nano && \
30+
# We just install tzdata below but leave default time zone as UTC. This helps packages like Pandas to function correctly.
31+
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends tzdata krb5-user libkrb5-dev libsasl2-dev libsasl2-modules && \
32+
chmod g+w /etc/passwd && \
33+
echo "ALL ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers && \
34+
touch /etc/krb5.conf.lock && chown ${NB_USER}:${MAMBA_USER} /etc/krb5.conf* && \
35+
# Note that we do NOT run `rm -rf /var/lib/apt/lists/*` here. If we did, anyone building on top of our images will
36+
# not be able to run any `apt-get install` commands and that would hamper customizability of the images.
37+
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" && \
38+
unzip awscliv2.zip && \
39+
sudo ./aws/install && \
40+
rm -rf aws awscliv2.zip && \
41+
:
42+
RUN echo "source /usr/local/bin/_activate_current_env.sh" | tee --append /etc/profile
43+
44+
USER $MAMBA_USER
45+
COPY --chown=$MAMBA_USER:$MAMBA_USER $ENV_IN_FILENAME *.in /tmp/
46+
47+
# Make sure that $ENV_IN_FILENAME has a newline at the end before the `tee` command runs. Otherwise, nasty things
48+
# will happen.
49+
RUN if [[ -z $ARG_BASED_ENV_IN_FILENAME ]] ; \
50+
then echo 'No ARG_BASED_ENV_IN_FILENAME passed' ; \
51+
else envsubst < /tmp/$ARG_BASED_ENV_IN_FILENAME | tee --append /tmp/$ENV_IN_FILENAME ; \
52+
fi
53+
54+
ARG CONDA_OVERRIDE_CUDA=$CUDA_MAJOR_MINOR_VERSION
55+
RUN micromamba install -y --name base --file /tmp/$ENV_IN_FILENAME && \
56+
micromamba clean --all --yes --force-pkgs-dirs && \
57+
rm -rf /tmp/*.in
58+
59+
60+
ARG MAMBA_DOCKERFILE_ACTIVATE=1
61+
RUN sudo ln -s $(which python3) /usr/bin/python
62+
63+
# Install glue kernels, and move to shared directory
64+
# Also patching base kernel so Studio background code doesn't start session silently
65+
RUN install-glue-kernels && \
66+
SITE_PACKAGES=$(pip show aws-glue-sessions | grep Location | awk '{print $2}') && \
67+
jupyter-kernelspec install $SITE_PACKAGES/aws_glue_interactive_sessions_kernel/glue_pyspark --user && \
68+
jupyter-kernelspec install $SITE_PACKAGES/aws_glue_interactive_sessions_kernel/glue_spark --user && \
69+
mv /home/sagemaker-user/.local/share/jupyter/kernels/glue_pyspark /opt/conda/share/jupyter/kernels && \
70+
mv /home/sagemaker-user/.local/share/jupyter/kernels/glue_spark /opt/conda/share/jupyter/kernels && \
71+
sed -i '/if not store_history and (/i\ if "sm_analytics_runtime_check" in code:\n return await self._complete_cell()\n' \
72+
"$SITE_PACKAGES/aws_glue_interactive_sessions_kernel/glue_kernel_base/BaseKernel.py"
73+
74+
75+
# Patch glue kernels to use kernel wrapper
76+
COPY patch_glue_pyspark.json /opt/conda/share/jupyter/kernels/glue_pyspark/kernel.json
77+
COPY patch_glue_spark.json /opt/conda/share/jupyter/kernels/glue_spark/kernel.json
78+
79+
USER root
80+
RUN HOME_DIR="/home/${NB_USER}/licenses" \
81+
&& mkdir -p ${HOME_DIR} \
82+
&& curl -o ${HOME_DIR}/oss_compliance.zip https://aws-dlinfra-utilities.s3.amazonaws.com/oss_compliance.zip \
83+
&& unzip ${HOME_DIR}/oss_compliance.zip -d ${HOME_DIR}/ \
84+
&& cp ${HOME_DIR}/oss_compliance/test/testOSSCompliance /usr/local/bin/testOSSCompliance \
85+
&& chmod +x /usr/local/bin/testOSSCompliance \
86+
&& chmod +x ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh \
87+
&& ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh ${HOME_DIR} python \
88+
&& rm -rf ${HOME_DIR}/oss_compliance*
89+
90+
# Merge in OS directory tree contents.
91+
RUN mkdir -p ${DIRECTORY_TREE_STAGE_DIR}
92+
COPY dirs/ ${DIRECTORY_TREE_STAGE_DIR}/
93+
RUN rsync -a ${DIRECTORY_TREE_STAGE_DIR}/ / && \
94+
rm -rf ${DIRECTORY_TREE_STAGE_DIR}
95+
96+
# Create logging directories for supervisor
97+
RUN mkdir -p $SAGEMAKER_LOGGING_DIR && \
98+
chmod a+rw $SAGEMAKER_LOGGING_DIR && \
99+
mkdir -p ${STUDIO_LOGGING_DIR} && \
100+
chown ${NB_USER}:${MAMBA_USER} ${STUDIO_LOGGING_DIR}
101+
102+
# Create supervisord runtime directory
103+
RUN mkdir -p /var/run/supervisord && \
104+
chmod a+rw /var/run/supervisord
105+
106+
USER $MAMBA_USER
107+
ENV PATH="/opt/conda/bin:/opt/conda/condabin:$PATH"
108+
WORKDIR "/home/${NB_USER}"
109+
110+
# Install Kerberos.
111+
# Make sure no dependency is added/updated
112+
RUN pip install "krb5>=0.5.1,<0.6" && \
113+
pip show krb5 | grep Require | xargs -i sh -c '[ $(echo {} | cut -d: -f2 | wc -w) -eq 0 ] '
114+
115+
# https://stackoverflow.com/questions/122327
116+
RUN SYSTEM_PYTHON_PATH=$(python3 -c "from __future__ import print_function;import sysconfig; print(sysconfig.get_paths().get('purelib'))") && \
117+
# Remove SparkRKernel as it's not supported \
118+
jupyter-kernelspec remove -f -y sparkrkernel && \
119+
# Patch Sparkmagic lib to support Custom Certificates \
120+
# https://github.com/jupyter-incubator/sparkmagic/pull/435/files \
121+
cp -a ${SYSTEM_PYTHON_PATH}/sagemaker_studio_analytics_extension/patches/configuration.py ${SYSTEM_PYTHON_PATH}/sparkmagic/utils/ && \
122+
cp -a ${SYSTEM_PYTHON_PATH}/sagemaker_studio_analytics_extension/patches/reliablehttpclient.py ${SYSTEM_PYTHON_PATH}/sparkmagic/livyclientlib/reliablehttpclient.py && \
123+
sed -i 's= "python"= "/opt/conda/bin/python"=g' /opt/conda/share/jupyter/kernels/pysparkkernel/kernel.json /opt/conda/share/jupyter/kernels/sparkkernel/kernel.json && \
124+
sed -i 's="Spark"="SparkMagic Spark"=g' /opt/conda/share/jupyter/kernels/sparkkernel/kernel.json && \
125+
sed -i 's="PySpark"="SparkMagic PySpark"=g' /opt/conda/share/jupyter/kernels/pysparkkernel/kernel.json
126+
127+
ENV SHELL=/bin/bash
Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
# Release notes: 1.5.1
2+
3+
Package | gpu| cpu
4+
---|---|---
5+
python|3.10.13|3.10.13
6+
numpy|1.26.4|1.26.4
7+
jinja2|3.1.3|3.1.3
8+
pandas|2.1.4|2.1.4
9+
altair|5.2.0|5.2.0
10+
boto3|1.28.64|1.28.64
11+
ipython|8.21.0|8.21.0
12+
jupyter-lsp|2.2.2|2.2.2
13+
jupyterlab|4.1.1|4.1.1
14+
amazon-codewhisperer-jupyterlab-ext|2.0.1|2.0.1
15+
jupyter-scheduler|2.5.0|2.5.0
16+
amazon-sagemaker-jupyter-scheduler|3.0.7|3.0.7
17+
scipy|1.11.4|1.11.4
18+
scikit-learn|1.4.0|1.4.0
19+
pip|23.3.2|23.3.2
20+
torchvision|0.15.2|0.15.2
21+
autogluon|0.8.2|0.8.2
22+
ipywidgets|8.1.2|8.1.2
23+
notebook|7.0.7|7.0.7
24+
aws-glue-sessions|1.0.4|1.0.4
25+
conda|23.11.0|23.11.0
26+
fastapi|0.103.2|0.103.2
27+
langchain|0.1.6|0.1.6
28+
jupyter-ai|2.9.1|2.9.1
29+
jupyter-dash|0.4.2|0.4.2
30+
jupyter-server-proxy|4.1.0|4.1.0
31+
jupyterlab-git|0.50.0|0.50.0
32+
jupyterlab-lsp|5.0.3|5.0.3
33+
keras|2.12.0|2.12.0
34+
matplotlib|3.8.2|3.8.2
35+
nodejs|18.18.2|18.18.2
36+
py-xgboost-gpu|1.7.6|
37+
thrift_sasl|0.4.3|0.4.3
38+
pyhive|0.7.0|0.7.0
39+
python-gssapi|1.8.3|1.8.3
40+
python-lsp-server|1.10.0|1.10.0
41+
pytorch-gpu|2.0.0|
42+
sagemaker-headless-execution-driver|0.0.12|0.0.12
43+
sagemaker-jupyterlab-emr-extension|0.1.9|0.1.9
44+
sagemaker-jupyterlab-extension|0.2.0|0.2.0
45+
sagemaker-kernel-wrapper|0.0.2|0.0.2
46+
sagemaker-python-sdk|2.198.1|2.198.1
47+
sagemaker-studio-analytics-extension|0.0.21|0.0.21
48+
sasl|0.3.1|0.3.1
49+
supervisor|4.2.5|4.2.5
50+
tensorflow|2.12.1|2.12.1
51+
uvicorn|0.27.1|0.27.1
52+
pytorch| |2.0.0
53+
py-xgboost-cpu| |1.7.6
Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
# This file is auto-generated.
2+
conda-forge::langchain[version='>=0.1.5,<0.2.0']
3+
conda-forge::fastapi[version='>=0.103.2,<0.104.0']
4+
conda-forge::uvicorn[version='>=0.27.0,<0.28.0']
5+
conda-forge::pytorch[version='>=2.0.0,<2.1.0']
6+
conda-forge::tensorflow[version='>=2.12.1,<2.13.0']
7+
conda-forge::python[version='>=3.10.13,<3.11.0']
8+
conda-forge::pip[version='>=23.3.2,<23.4.0']
9+
conda-forge::torchvision[version='>=0.15.2,<0.16.0']
10+
conda-forge::numpy[version='>=1.26.4,<1.27.0']
11+
conda-forge::pandas[version='>=2.1.4,<2.2.0']
12+
conda-forge::scikit-learn[version='>=1.4.0,<1.5.0']
13+
conda-forge::jinja2[version='>=3.1.3,<3.2.0']
14+
conda-forge::matplotlib[version='>=3.8.2,<3.9.0']
15+
conda-forge::sagemaker-headless-execution-driver[version='>=0.0.12,<0.1.0']
16+
conda-forge::ipython[version='>=8.21.0,<8.22.0']
17+
conda-forge::scipy[version='>=1.11.4,<1.12.0']
18+
conda-forge::keras[version='>=2.12.0,<2.13.0']
19+
conda-forge::py-xgboost-cpu[version='>=1.7.6,<1.8.0']
20+
conda-forge::jupyterlab[version='>=4.1.0,<4.2.0']
21+
conda-forge::ipywidgets[version='>=8.1.1,<8.2.0']
22+
conda-forge::conda[version='>=23.11.0,<23.12.0']
23+
conda-forge::boto3[version='>=1.28.64,<1.29.0']
24+
conda-forge::sagemaker-python-sdk[version='>=2.198.1,<2.199.0']
25+
conda-forge::supervisor[version='>=4.2.5,<4.3.0']
26+
conda-forge::autogluon[version='>=0.8.2,<0.9.0']
27+
conda-forge::aws-glue-sessions[version='>=1.0.4,<1.1.0']
28+
conda-forge::sagemaker-kernel-wrapper[version='>=0.0.2,<0.1.0']
29+
conda-forge::jupyter-ai[version='>=2.9.1,<2.10.0']
30+
conda-forge::jupyter-scheduler[version='>=2.5.0,<2.6.0']
31+
conda-forge::nodejs[version='>=18.18.2,<18.19.0']
32+
conda-forge::jupyter-lsp[version='>=2.2.2,<2.3.0']
33+
conda-forge::jupyterlab-lsp[version='>=5.0.3,<5.1.0']
34+
conda-forge::python-lsp-server[version='>=1.10.0,<1.11.0']
35+
conda-forge::notebook[version='>=7.0.7,<7.1.0']
36+
conda-forge::altair[version='>=5.2.0,<5.3.0']
37+
conda-forge::sagemaker-studio-analytics-extension[version='>=0.0.21,<0.1.0']
38+
conda-forge::jupyter-dash[version='>=0.4.2,<0.5.0']
39+
conda-forge::sagemaker-jupyterlab-extension[version='>=0.2.0,<0.3.0']
40+
conda-forge::sagemaker-jupyterlab-emr-extension[version='>=0.1.9,<0.2.0']
41+
conda-forge::amazon-sagemaker-jupyter-scheduler[version='>=3.0.6,<3.1.0']
42+
conda-forge::jupyter-server-proxy[version='>=4.1.0,<4.2.0']
43+
conda-forge::amazon-codewhisperer-jupyterlab-ext[version='>=2.0.1,<2.1.0']
44+
conda-forge::jupyterlab-git[version='>=0.50.0,<0.51.0']
45+
conda-forge::sasl[version='>=0.3.1,<0.4.0']
46+
conda-forge::thrift_sasl[version='>=0.4.3,<0.5.0']
47+
conda-forge::pyhive[version='>=0.7.0,<0.8.0']
48+
conda-forge::python-gssapi[version='>=1.8.3,<1.9.0']

0 commit comments

Comments
 (0)