Compare commits

..

1 commit

Author SHA1 Message Date
github-actions
7adbbe961a Automated documentation update [skip ci] 2023-03-10 16:54:03 +00:00
184 changed files with 1342 additions and 4915 deletions

View file

@ -1,17 +1,18 @@
{ {
"image": "mcr.microsoft.com/devcontainers/javascript-node:1-18", "image": "mcr.microsoft.com/vscode/devcontainers/javascript-node:0-18",
"features": { "remoteUser": "node",
"ghcr.io/devcontainers/features/docker-in-docker:2": {}
},
"postCreateCommand": "npm install -g @devcontainers/cli",
"hostRequirements": {
"cpus": 4
},
"customizations": { "customizations": {
"vscode": { "vscode": {
"extensions": [ "extensions": [
"mads-hartmann.bash-ide-vscode" "mads-hartmann.bash-ide-vscode"
] ]
} }
},
"features": {
"ghcr.io/devcontainers/features/docker-in-docker:2": {}
},
"postCreateCommand": "npm install -g @devcontainers/cli",
"hostRequirements": {
"cpus": 4
} }
} }

View file

@ -1,37 +0,0 @@
name: "Stress test - Docker in Docker"
on:
pull_request:
paths:
- 'src/docker-in-docker/**'
workflow_dispatch:
jobs:
test:
strategy:
matrix:
test-pass: [ 1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50 ]
fail-fast: false
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: "Install latest devcontainer CLI"
run: npm install -g @devcontainers/cli
- name: "Generating tests for 'docker-in-docker' which validates if docker daemon is running"
run: devcontainer features test --skip-scenarios -f docker-in-docker -i mcr.microsoft.com/devcontainers/base:ubuntu .
test-onCreate:
strategy:
matrix:
test-pass: [ 1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50 ]
fail-fast: false
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: "Install latest devcontainer CLI"
run: npm install -g @devcontainers/cli
- name: "Generating tests for 'docker-in-docker' which validates if docker daemon is available within 'onCreateCommand'"
run: devcontainer features test -f docker-in-docker --skip-autogenerated --filter "docker_with_on_create_command"

View file

@ -44,8 +44,9 @@ jobs:
[ [
"ubuntu:focal", "ubuntu:focal",
"ubuntu:jammy", "ubuntu:jammy",
"ubuntu:bionic",
"debian:11", "debian:11",
"debian:12", "debian:10",
"mcr.microsoft.com/devcontainers/base:ubuntu", "mcr.microsoft.com/devcontainers/base:ubuntu",
"mcr.microsoft.com/devcontainers/base:debian", "mcr.microsoft.com/devcontainers/base:debian",
] ]
@ -111,4 +112,4 @@ jobs:
run: npm install -g @devcontainers/cli run: npm install -g @devcontainers/cli
- name: "Testing global scenarios" - name: "Testing global scenarios"
run: devcontainer features test --global-scenarios-only . run: devcontainer features test --global-scenarios-only .

View file

@ -51,8 +51,9 @@ jobs:
[ [
"ubuntu:focal", "ubuntu:focal",
"ubuntu:jammy", "ubuntu:jammy",
"ubuntu:bionic",
"debian:11", "debian:11",
"debian:12", "debian:10",
"mcr.microsoft.com/devcontainers/base:ubuntu", "mcr.microsoft.com/devcontainers/base:ubuntu",
"mcr.microsoft.com/devcontainers/base:debian", "mcr.microsoft.com/devcontainers/base:debian",
] ]
@ -79,4 +80,4 @@ jobs:
run: npm install -g @devcontainers/cli run: npm install -g @devcontainers/cli
- name: "Testing '${{ matrix.features }}' scenarios" - name: "Testing '${{ matrix.features }}' scenarios"
run: devcontainer features test -f ${{ matrix.features }} --skip-autogenerated . run: devcontainer features test -f ${{ matrix.features }} --skip-autogenerated .

View file

@ -1,52 +0,0 @@
name: "Updates vendor 'dotnet-install' script"
on:
workflow_dispatch:
schedule:
- cron: '0 0 * * 0' # Runs every Sunday at midnight UTC (adjust as needed)
jobs:
fetch-latest-dotnet-install:
runs-on: ubuntu-latest
environment: documentation # grants access to secrets.PAT, for creating pull requests
steps:
- uses: actions/checkout@v3
- name: Run fetch-latest-dotnet-install.sh
run: src/dotnet/scripts/fetch-latest-dotnet-install.sh
- name: Create a PR for dotnet-install.sh
id: push_image_info
env:
GITHUB_TOKEN: ${{ secrets.PAT }}
run: |
set -e
echo "Start."
# Configure git and Push updates
git config --global user.email github-actions@github.com
git config --global user.name github-actions
git config pull.rebase false
branch=automated-script-update-$GITHUB_RUN_ID
git checkout -b $branch
message='[Updates] Automated vendor 'dotnet-install' script'
# Add / update and commit
git add src/dotnet/scripts/vendor/dotnet-install.sh
git commit -m 'Automated dotnet-install script update' || export NO_UPDATES=true
# Bump version and push
if [ "$NO_UPDATES" != "true" ] ; then
echo "$(jq --indent 4 '.version = (.version | split(".") | map(tonumber) | .[2] += 1 | join("."))' src/dotnet/devcontainer-feature.json)" > src/dotnet/devcontainer-feature.json
git add src/dotnet/devcontainer-feature.json
git commit -m 'Bump version'
git push origin "$branch"
gh api \
--method POST \
-H "Accept: application/vnd.github+json" \
/repos/${GITHUB_REPOSITORY}/pulls \
-f title="$message" \
-f body="$message" \
-f head="$branch" \
-f base="$GITHUB_REF_NAME"
fi

View file

@ -1,60 +0,0 @@
**Thanks for taking the time to contribute! ❤️**
All types of contributions are encouraged and valued, no matter if it's a bug
report 🐛, a feature request 💡, or a Pull Request 🚀.
- **❓ I have a question:** Ask in [our dev container community Slack channel]
- **🐛 I found a bug:** [Open an Issue]
- **💡 I have an idea:** [Open an Issue]
- **💻 I want to code:** See below
If you like the project, but just don't have time to contribute, that's OK too!
You can also star the project ⭐, rave about it online 💬, or add a link to us
🔗 in your project's readme.
⚠️ You must never report security 🔒 related issues, vulnerabilities or bugs
including sensitive information to the issue tracker, or elsewhere in public.
Instead, please [report them to the Microsoft Security Response Center]. You can
read more about our security policy in [`SECURITY.md`].
## Contributing code
👨‍💼 This repository is a collection of dev container Features managed by dev
container spec maintainers. This repository will **only accept improvements and
bug fixes** for the [current set of maintained Features].
🚀 If you're looking to create a new Feature, then we encourage adding it to a
separate repository of your control. The [devcontainers/feature-starter]
repository has more guidance on self authoring Features.
💡 Once you've self authored the Features and if you'd like to share them with
the community, then we'd recommend [adding it to the index].
👷‍ If you've identified an issue and you want to fix it, here's how you can get
started:
1. 🔀 Fork the repo
2. 💻 Open the repo in your editor
3. 👨‍💻 Add your changes to your workspace
4. ✨ [Test your changes using `devcontainer features test`] to make sure
everything still works
5. 🆚 Bump the version of the feature you changed according to [semver]
6. 🔖 Commit & push your changes
7. 🔁 Open a PR to get your changes merged
8. 🚀 Profit!
👩‍⚖️ When contributing code to this project, you may be asked to agree to our
[Contributor License Agreement].
<!-- prettier-ignore-start -->
[our dev container community Slack channel]: https://aka.ms/devcontainer_community
[open an issue]: https://github.com/devcontainers/features/issues/new
[current set of maintained Features]: https://github.com/devcontainers/features/tree/main/src
[devcontainers/feature-starter]: https://github.com/devcontainers/feature-starter#readme
[adding it to the index]: https://github.com/devcontainers/feature-starter#adding-features-to-the-index
[report them to the Microsoft Security Response Center]: https://msrc.microsoft.com/create-report
[`SECURITY.md`]: https://github.com/devcontainers/spec/blob/main/SECURITY.md
[contributor license agreement]: https://opensource.microsoft.com/cla/
[Test your changes using `devcontainer features test`]: https://github.com/devcontainers/cli/blob/main/docs/features/test.md
[semver]: https://semver.org/
<!-- prettier-ignore-end -->

View file

@ -109,7 +109,3 @@ We are eager to hear your feedback on self-authoring! Please provide comments a
This repository will accept improvement and bug fix contributions related to the This repository will accept improvement and bug fix contributions related to the
[current set of maintained Features](./src). [current set of maintained Features](./src).
🤝 You can read more about how to contribute in [`CONTRIBUTING.md`]. ❤️
[`CONTRIBUTING.md`]: CONTRIBUTING.md

View file

@ -17,12 +17,6 @@ Installs the AWS CLI along with needed dependencies. Useful for base Dockerfiles
|-----|-----|-----|-----| |-----|-----|-----|-----|
| version | Select or enter an AWS CLI version. | string | latest | | version | Select or enter an AWS CLI version. | string | latest |
## Customizations
### VS Code Extensions
- `AmazonWebServices.aws-toolkit-vscode`
Available versions of the AWS CLI can be found here: https://github.com/aws/aws-cli/blob/v2/CHANGELOG.rst. Available versions of the AWS CLI can be found here: https://github.com/aws/aws-cli/blob/v2/CHANGELOG.rst.
## OS Support ## OS Support

View file

@ -1,6 +1,6 @@
{ {
"id": "aws-cli", "id": "aws-cli",
"version": "1.0.7", "version": "1.0.6",
"name": "AWS CLI", "name": "AWS CLI",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/aws-cli", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/aws-cli",
"description": "Installs the AWS CLI along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like gpg.", "description": "Installs the AWS CLI along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like gpg.",

View file

@ -50,6 +50,21 @@ if [ "$(id -u)" -ne 0 ]; then
exit 1 exit 1
fi fi
# Get central common setting
get_common_setting() {
if [ "${common_settings_file_loaded}" != "true" ]; then
curl -sfL "https://aka.ms/vscode-dev-containers/script-library/settings.env" 2>/dev/null -o /tmp/vsdc-settings.env || echo "Could not download settings file. Skipping."
common_settings_file_loaded=true
fi
if [ -f "/tmp/vsdc-settings.env" ]; then
local multi_line=""
if [ "$2" = "true" ]; then multi_line="-z"; fi
local result="$(grep ${multi_line} -oP "$1=\"?\K[^\"]+" /tmp/vsdc-settings.env | tr -d '\0')"
if [ ! -z "${result}" ]; then declare -g $1="${result}"; fi
fi
echo "$1=${!1}"
}
apt_get_update() apt_get_update()
{ {
echo "Running apt-get update..." echo "Running apt-get update..."
@ -74,6 +89,9 @@ check_packages curl ca-certificates gnupg2 dirmngr unzip
verify_aws_cli_gpg_signature() { verify_aws_cli_gpg_signature() {
local filePath=$1 local filePath=$1
local sigFilePath=$2 local sigFilePath=$2
get_common_setting AWSCLI_GPG_KEY
get_common_setting AWSCLI_GPG_KEY_MATERIAL true
local awsGpgKeyring=aws-cli-public-key.gpg local awsGpgKeyring=aws-cli-public-key.gpg
echo "${AWSCLI_GPG_KEY_MATERIAL}" | gpg --dearmor > "./${awsGpgKeyring}" echo "${AWSCLI_GPG_KEY_MATERIAL}" | gpg --dearmor > "./${awsGpgKeyring}"

View file

@ -18,13 +18,6 @@ Installs the Azure CLI along with needed dependencies. Useful for base Dockerfil
| version | Select or enter an Azure CLI version. (Available versions may vary by Linux distribution.) | string | latest | | version | Select or enter an Azure CLI version. (Available versions may vary by Linux distribution.) | string | latest |
| extensions | Optional comma separated list of Azure CLI extensions to install in profile. | string | - | | extensions | Optional comma separated list of Azure CLI extensions to install in profile. | string | - |
| installBicep | Optionally install Azure Bicep | boolean | false | | installBicep | Optionally install Azure Bicep | boolean | false |
| installUsingPython | Install Azure CLI using Python instead of pipx | boolean | false |
## Customizations
### VS Code Extensions
- `ms-vscode.azurecli`

View file

@ -1,6 +1,6 @@
{ {
"id": "azure-cli", "id": "azure-cli",
"version": "1.2.2", "version": "1.0.8",
"name": "Azure CLI", "name": "Azure CLI",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/azure-cli", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/azure-cli",
"description": "Installs the Azure CLI along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like gpg.", "description": "Installs the Azure CLI along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like gpg.",
@ -22,11 +22,6 @@
"type": "boolean", "type": "boolean",
"description": "Optionally install Azure Bicep", "description": "Optionally install Azure Bicep",
"default": false "default": false
},
"installUsingPython": {
"type": "boolean",
"description": "Install Azure CLI using Python instead of pipx",
"default": false
} }
}, },
"customizations": { "customizations": {

View file

@ -15,7 +15,7 @@ rm -rf /var/lib/apt/lists/*
AZ_VERSION=${VERSION:-"latest"} AZ_VERSION=${VERSION:-"latest"}
AZ_EXTENSIONS=${EXTENSIONS} AZ_EXTENSIONS=${EXTENSIONS}
AZ_INSTALLBICEP=${INSTALLBICEP:-false} AZ_INSTALLBICEP=${INSTALLBICEP:-false}
INSTALL_USING_PYTHON=${INSTALL_USING_PYTHON:-true}
MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc" MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc"
AZCLI_ARCHIVE_ARCHITECTURES="amd64" AZCLI_ARCHIVE_ARCHITECTURES="amd64"
AZCLI_ARCHIVE_VERSION_CODENAMES="stretch buster bullseye bionic focal jammy" AZCLI_ARCHIVE_VERSION_CODENAMES="stretch buster bullseye bionic focal jammy"
@ -25,12 +25,20 @@ if [ "$(id -u)" -ne 0 ]; then
exit 1 exit 1
fi fi
if [ -z "${_REMOTE_USER}" ]; then # Get central common setting
echo -e 'Feature script must be executed by a tool that implements the dev container specification. See https://containers.dev/ for more information.' get_common_setting() {
exit 1 if [ "${common_settings_file_loaded}" != "true" ]; then
fi curl -sfL "https://aka.ms/vscode-dev-containers/script-library/settings.env" 2>/dev/null -o /tmp/vsdc-settings.env || echo "Could not download settings file. Skipping."
common_settings_file_loaded=true
echo "Effective REMOTE_USER: ${_REMOTE_USER}" fi
if [ -f "/tmp/vsdc-settings.env" ]; then
local multi_line=""
if [ "$2" = "true" ]; then multi_line="-z"; fi
local result="$(grep ${multi_line} -oP "$1=\"?\K[^\"]+" /tmp/vsdc-settings.env | tr -d '\0')"
if [ ! -z "${result}" ]; then declare -g $1="${result}"; fi
fi
echo "$1=${!1}"
}
apt_get_update() apt_get_update()
{ {
@ -49,6 +57,8 @@ check_packages() {
fi fi
} }
export DEBIAN_FRONTEND=noninteractive
# Soft version matching that resolves a version for a given package in the *current apt-cache* # Soft version matching that resolves a version for a given package in the *current apt-cache*
# Return value is stored in first argument (the unprocessed version) # Return value is stored in first argument (the unprocessed version)
apt_cache_version_soft_match() { apt_cache_version_soft_match() {
@ -95,6 +105,7 @@ install_using_apt() {
# Install dependencies # Install dependencies
check_packages apt-transport-https curl ca-certificates gnupg2 dirmngr check_packages apt-transport-https curl ca-certificates gnupg2 dirmngr
# Import key safely (new 'signed-by' method rather than deprecated apt-key approach) and install # Import key safely (new 'signed-by' method rather than deprecated apt-key approach) and install
get_common_setting MICROSOFT_GPG_KEYS_URI
curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg
echo "deb [arch=${architecture} signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/azure-cli/ ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/azure-cli.list echo "deb [arch=${architecture} signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/azure-cli/ ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/azure-cli.list
apt-get update apt-get update
@ -116,43 +127,7 @@ install_using_apt() {
fi fi
} }
install_using_pip_strategy() { install_using_pip() {
local ver=""
if [ "${AZ_VERSION}" = "latest" ] || [ "${AZ_VERSION}" = "lts" ] || [ "${AZ_VERSION}" = "stable" ]; then
# Empty, meaning grab the "latest" in the apt repo
ver=""
else
ver="==${AZ_VERSION}"
fi
if [ "${INSTALL_USING_PYTHON}" = "true" ]; then
install_with_complete_python_installation "${ver}" || install_with_pipx "${ver}" || return 1
else
install_with_pipx "${ver}" || install_with_complete_python_installation "${ver}" || return 1
fi
}
install_with_pipx() {
echo "(*) Attempting to install globally with pipx..."
local ver="$1"
export
local
if ! type pipx > /dev/null 2>&1; then
echo "(*) Installing pipx..."
check_packages pipx
pipx ensurepath # Ensures PIPX_BIN_DIR is on the PATH
fi
PIPX_HOME="/usr/local/pipx" \
PIPX_BIN_DIR=/usr/local/bin \
pipx install azure-cli${ver}
echo "(*) Finished installing globally with pipx."
}
install_with_complete_python_installation() {
local ver="$1"
echo "(*) No pre-built binaries available in apt-cache. Installing via pip3." echo "(*) No pre-built binaries available in apt-cache. Installing via pip3."
if ! dpkg -s python3-minimal python3-pip libffi-dev python3-venv > /dev/null 2>&1; then if ! dpkg -s python3-minimal python3-pip libffi-dev python3-venv > /dev/null 2>&1; then
apt_get_update apt_get_update
@ -169,20 +144,25 @@ install_with_complete_python_installation() {
pipx_bin=/tmp/pip-tmp/bin/pipx pipx_bin=/tmp/pip-tmp/bin/pipx
fi fi
if [ "${AZ_VERSION}" = "latest" ] || [ "${AZ_VERSION}" = "lts" ] || [ "${AZ_VERSION}" = "stable" ]; then
# Empty, meaning grab the "latest" in the apt repo
ver=""
else
ver="==${AZ_VERSION}"
fi
set +e set +e
${pipx_bin} install --pip-args '--no-cache-dir --force-reinstall' -f azure-cli${ver} ${pipx_bin} install --pip-args '--no-cache-dir --force-reinstall' -f azure-cli${ver}
# Fail gracefully # Fail gracefully
if [ "$?" != 0 ]; then if [ "$?" != 0 ]; then
echo "Could not install azure-cli${ver} via pip3" echo "Could not install azure-cli${ver} via pip"
rm -rf /tmp/pip-tmp rm -rf /tmp/pip-tmp
return 1 return 1
fi fi
set -e set -e
} }
export DEBIAN_FRONTEND=noninteractive
# See if we're on x86_64 and if so, install via apt-get, otherwise use pip3 # See if we're on x86_64 and if so, install via apt-get, otherwise use pip3
echo "(*) Installing Azure CLI..." echo "(*) Installing Azure CLI..."
. /etc/os-release . /etc/os-release
@ -196,7 +176,7 @@ fi
if [ "${use_pip}" = "true" ]; then if [ "${use_pip}" = "true" ]; then
AZ_VERSION=${CACHED_AZURE_VERSION} AZ_VERSION=${CACHED_AZURE_VERSION}
install_using_pip_strategy install_using_pip
if [ "$?" != 0 ]; then if [ "$?" != 0 ]; then
echo "Please provide a valid version for your distribution ${ID} ${VERSION_CODENAME} (${architecture})." echo "Please provide a valid version for your distribution ${ID} ${VERSION_CODENAME} (${architecture})."

View file

@ -2,11 +2,6 @@
This Feature should work on recent versions of Debian/Ubuntu, RedHat Enterprise Linux, Fedora, RockyLinux, and Alpine Linux. This Feature should work on recent versions of Debian/Ubuntu, RedHat Enterprise Linux, Fedora, RockyLinux, and Alpine Linux.
## Using with dev container images
This Feature is used in many of the [dev container images](https://github.com/search?q=repo%3Adevcontainers%2Fimages+%22ghcr.io%2Fdevcontainers%2Ffeatures%2Fcommon-utils%22&type=code), as a result
these images have already allocated UID & GID 1000. Attempting to add this Feature with UID 1000 and/or GID 1000 on top of such a dev container image will result in an error when building the dev container.
## Customizing the command prompt ## Customizing the command prompt
By default, this script provides a custom command prompt that includes information about the git repository for the current folder. However, with certain large repositories, this can result in a slow command prompt due to the performance of needed git operations. By default, this script provides a custom command prompt that includes information about the git repository for the current folder. However, with certain large repositories, this can result in a slow command prompt due to the performance of needed git operations.

View file

@ -18,7 +18,6 @@ Installs a set of common command line utilities, Oh My Zsh!, and sets up a non-r
| installZsh | Install ZSH? | boolean | true | | installZsh | Install ZSH? | boolean | true |
| configureZshAsDefaultShell | Change default shell to ZSH? | boolean | false | | configureZshAsDefaultShell | Change default shell to ZSH? | boolean | false |
| installOhMyZsh | Install Oh My Zsh!? | boolean | true | | installOhMyZsh | Install Oh My Zsh!? | boolean | true |
| installOhMyZshConfig | Allow installing the default dev container .zshrc templates? | boolean | true |
| upgradePackages | Upgrade OS packages? | boolean | true | | upgradePackages | Upgrade OS packages? | boolean | true |
| username | Enter name of a non-root user to configure or none to skip | string | automatic | | username | Enter name of a non-root user to configure or none to skip | string | automatic |
| userUid | Enter UID for non-root user | string | automatic | | userUid | Enter UID for non-root user | string | automatic |
@ -29,11 +28,6 @@ Installs a set of common command line utilities, Oh My Zsh!, and sets up a non-r
This Feature should work on recent versions of Debian/Ubuntu, RedHat Enterprise Linux, Fedora, RockyLinux, and Alpine Linux. This Feature should work on recent versions of Debian/Ubuntu, RedHat Enterprise Linux, Fedora, RockyLinux, and Alpine Linux.
## Using with dev container images
This Feature is used in many of the [dev container images](https://github.com/search?q=repo%3Adevcontainers%2Fimages+%22ghcr.io%2Fdevcontainers%2Ffeatures%2Fcommon-utils%22&type=code), as a result
these images have already allocated UID & GID 1000. Attempting to add this Feature with UID 1000 and/or GID 1000 on top of such a dev container image will result in an error when building the dev container.
## Customizing the command prompt ## Customizing the command prompt
By default, this script provides a custom command prompt that includes information about the git repository for the current folder. However, with certain large repositories, this can result in a slow command prompt due to the performance of needed git operations. By default, this script provides a custom command prompt that includes information about the git repository for the current folder. However, with certain large repositories, this can result in a slow command prompt due to the performance of needed git operations.

View file

@ -1,6 +1,6 @@
{ {
"id": "common-utils", "id": "common-utils",
"version": "2.3.1", "version": "2.0.8",
"name": "Common Utilities", "name": "Common Utilities",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/common-utils", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/common-utils",
"description": "Installs a set of common command line utilities, Oh My Zsh!, and sets up a non-root user.", "description": "Installs a set of common command line utilities, Oh My Zsh!, and sets up a non-root user.",
@ -20,11 +20,6 @@
"default": true, "default": true,
"description": "Install Oh My Zsh!?" "description": "Install Oh My Zsh!?"
}, },
"installOhMyZshConfig": {
"type": "boolean",
"default": true,
"description": "Allow installing the default dev container .zshrc templates?"
},
"upgradePackages": { "upgradePackages": {
"type": "boolean", "type": "boolean",
"default": true, "default": true,
@ -45,7 +40,7 @@
"userUid": { "userUid": {
"type": "string", "type": "string",
"proposals": [ "proposals": [
"1001", "1000",
"automatic" "automatic"
], ],
"default": "automatic", "default": "automatic",
@ -54,7 +49,7 @@
"userGid": { "userGid": {
"type": "string", "type": "string",
"proposals": [ "proposals": [
"1001", "1000",
"automatic" "automatic"
], ],
"default": "automatic", "default": "automatic",

View file

@ -12,7 +12,6 @@ set -e
INSTALL_ZSH="${INSTALLZSH:-"true"}" INSTALL_ZSH="${INSTALLZSH:-"true"}"
CONFIGURE_ZSH_AS_DEFAULT_SHELL="${CONFIGUREZSHASDEFAULTSHELL:-"false"}" CONFIGURE_ZSH_AS_DEFAULT_SHELL="${CONFIGUREZSHASDEFAULTSHELL:-"false"}"
INSTALL_OH_MY_ZSH="${INSTALLOHMYZSH:-"true"}" INSTALL_OH_MY_ZSH="${INSTALLOHMYZSH:-"true"}"
INSTALL_OH_MY_ZSH_CONFIG="${INSTALLOHMYZSHCONFIG:-"true"}"
UPGRADE_PACKAGES="${UPGRADEPACKAGES:-"true"}" UPGRADE_PACKAGES="${UPGRADEPACKAGES:-"true"}"
USERNAME="${USERNAME:-"automatic"}" USERNAME="${USERNAME:-"automatic"}"
USER_UID="${UID:-"automatic"}" USER_UID="${UID:-"automatic"}"

View file

@ -12,7 +12,6 @@ set -e
INSTALL_ZSH="${INSTALLZSH:-"true"}" INSTALL_ZSH="${INSTALLZSH:-"true"}"
CONFIGURE_ZSH_AS_DEFAULT_SHELL="${CONFIGUREZSHASDEFAULTSHELL:-"false"}" CONFIGURE_ZSH_AS_DEFAULT_SHELL="${CONFIGUREZSHASDEFAULTSHELL:-"false"}"
INSTALL_OH_MY_ZSH="${INSTALLOHMYZSH:-"true"}" INSTALL_OH_MY_ZSH="${INSTALLOHMYZSH:-"true"}"
INSTALL_OH_MY_ZSH_CONFIG="${INSTALLOHMYZSHCONFIG:-"true"}"
UPGRADE_PACKAGES="${UPGRADEPACKAGES:-"true"}" UPGRADE_PACKAGES="${UPGRADEPACKAGES:-"true"}"
USERNAME="${USERNAME:-"automatic"}" USERNAME="${USERNAME:-"automatic"}"
USER_UID="${USERUID:-"automatic"}" USER_UID="${USERUID:-"automatic"}"
@ -28,10 +27,7 @@ install_debian_packages() {
# Ensure apt is in non-interactive to avoid prompts # Ensure apt is in non-interactive to avoid prompts
export DEBIAN_FRONTEND=noninteractive export DEBIAN_FRONTEND=noninteractive
local package_list="" local package_list="apt-utils \
if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then
package_list="${package_list} \
apt-utils \
openssh-client \ openssh-client \
gnupg2 \ gnupg2 \
dirmngr \ dirmngr \
@ -72,35 +68,7 @@ install_debian_packages() {
manpages \ manpages \
manpages-dev \ manpages-dev \
init-system-helpers" init-system-helpers"
# Include libssl1.1 if available
if [[ ! -z $(apt-cache --names-only search ^libssl1.1$) ]]; then
package_list="${package_list} libssl1.1"
fi
# Include libssl3 if available
if [[ ! -z $(apt-cache --names-only search ^libssl3$) ]]; then
package_list="${package_list} libssl3"
fi
# Include appropriate version of libssl1.0.x if available
local libssl_package=$(dpkg-query -f '${db:Status-Abbrev}\t${binary:Package}\n' -W 'libssl1\.0\.?' 2>&1 || echo '')
if [ "$(echo "$libssl_package" | grep -o 'libssl1\.0\.[0-9]:' | uniq | sort | wc -l)" -eq 0 ]; then
if [[ ! -z $(apt-cache --names-only search ^libssl1.0.2$) ]]; then
# Debian 9
package_list="${package_list} libssl1.0.2"
elif [[ ! -z $(apt-cache --names-only search ^libssl1.0.0$) ]]; then
# Ubuntu 18.04
package_list="${package_list} libssl1.0.0"
fi
fi
# Include git if not already installed (may be more recent than distro version)
if ! type git > /dev/null 2>&1; then
package_list="${package_list} git"
fi
fi
# Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian # Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian
if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then
# Bring in variables from /etc/os-release like VERSION_CODENAME # Bring in variables from /etc/os-release like VERSION_CODENAME
@ -110,7 +78,7 @@ install_debian_packages() {
sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
# Handle bullseye location for security https://www.debian.org/releases/bullseye/amd64/release-notes/ch-information.en.html # Handle bullseye location for security https://www.debian.org/releases/bullseye/amd64/release-notes/ch-information.en.html
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
@ -119,6 +87,33 @@ install_debian_packages() {
package_list="${package_list} manpages-posix manpages-posix-dev" package_list="${package_list} manpages-posix manpages-posix-dev"
fi fi
# Include libssl1.1 if available
if [[ ! -z $(apt-cache --names-only search ^libssl1.1$) ]]; then
package_list="${package_list} libssl1.1"
fi
# Include libssl3 if available
if [[ ! -z $(apt-cache --names-only search ^libssl3$) ]]; then
package_list="${package_list} libssl3"
fi
# Include appropriate version of libssl1.0.x if available
local libssl_package=$(dpkg-query -f '${db:Status-Abbrev}\t${binary:Package}\n' -W 'libssl1\.0\.?' 2>&1 || echo '')
if [ "$(echo "$libssl_package" | grep -o 'libssl1\.0\.[0-9]:' | uniq | sort | wc -l)" -eq 0 ]; then
if [[ ! -z $(apt-cache --names-only search ^libssl1.0.2$) ]]; then
# Debian 9
package_list="${package_list} libssl1.0.2"
elif [[ ! -z $(apt-cache --names-only search ^libssl1.0.0$) ]]; then
# Ubuntu 18.04
package_list="${package_list} libssl1.0.0"
fi
fi
# Include git if not already installed (may be more recent than distro version)
if ! type git > /dev/null 2>&1; then
package_list="${package_list} git"
fi
# Install the list of packages # Install the list of packages
echo "Packages to verify are installed: ${package_list}" echo "Packages to verify are installed: ${package_list}"
rm -rf /var/lib/apt/lists/* rm -rf /var/lib/apt/lists/*
@ -138,83 +133,66 @@ install_debian_packages() {
# Ensure at least the en_US.UTF-8 UTF-8 locale is available = common need for both applications and things like the agnoster ZSH theme. # Ensure at least the en_US.UTF-8 UTF-8 locale is available = common need for both applications and things like the agnoster ZSH theme.
if [ "${LOCALE_ALREADY_SET}" != "true" ] && ! grep -o -E '^\s*en_US.UTF-8\s+UTF-8' /etc/locale.gen > /dev/null; then if [ "${LOCALE_ALREADY_SET}" != "true" ] && ! grep -o -E '^\s*en_US.UTF-8\s+UTF-8' /etc/locale.gen > /dev/null; then
echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen
locale-gen locale-gen
LOCALE_ALREADY_SET="true" LOCALE_ALREADY_SET="true"
fi fi
PACKAGES_ALREADY_INSTALLED="true"
# Clean up # Clean up
apt-get -y clean apt-get -y clean
rm -rf /var/lib/apt/lists/* rm -rf /var/lib/apt/lists/*
} }
# RedHat / RockyLinux / CentOS / Fedora packages # RedHat / RockyLinux / CentOS / Fedora packages
install_redhat_packages() { install_redhat_packages() {
local package_list="" local package_list="\
local remove_epel="false" gawk \
local install_cmd=dnf openssh-clients \
if ! type dnf > /dev/null 2>&1; then gnupg2 \
install_cmd=yum iproute \
procps \
lsof \
net-tools \
psmisc \
wget \
ca-certificates \
rsync \
unzip \
zip \
nano \
vim-minimal \
less \
jq \
openssl-libs \
krb5-libs \
libicu \
zlib \
sudo \
sed \
grep \
which \
man-db \
strace"
# rockylinux:9 installs 'curl-minimal' which clashes with 'curl'
# Install 'curl' for every OS except this rockylinux:9
if [[ "${ID}" = "rocky" ]] && [[ "${VERSION}" != *"9."* ]]; then
package_list="${package_list} curl"
fi fi
if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then # Install OpenSSL 1.0 compat if needed
package_list="${package_list} \ if ${install_cmd} -q list compat-openssl10 >/dev/null 2>&1; then
gawk \ package_list="${package_list} compat-openssl10"
openssh-clients \ fi
gnupg2 \
iproute \
procps \
lsof \
net-tools \
psmisc \
wget \
ca-certificates \
rsync \
unzip \
zip \
nano \
vim-minimal \
less \
jq \
openssl-libs \
krb5-libs \
libicu \
zlib \
sudo \
sed \
grep \
which \
man-db \
strace"
# rockylinux:9 installs 'curl-minimal' which clashes with 'curl' # Install lsb_release if available
# Install 'curl' for every OS except this rockylinux:9 if ${install_cmd} -q list redhat-lsb-core >/dev/null 2>&1; then
if [[ "${ID}" = "rocky" ]] && [[ "${VERSION}" != *"9."* ]]; then package_list="${package_list} redhat-lsb-core"
package_list="${package_list} curl" fi
fi
# Install OpenSSL 1.0 compat if needed # Install git if not already installed (may be more recent than distro version)
if ${install_cmd} -q list compat-openssl10 >/dev/null 2>&1; then if ! type git > /dev/null 2>&1; then
package_list="${package_list} compat-openssl10" package_list="${package_list} git"
fi
# Install lsb_release if available
if ${install_cmd} -q list redhat-lsb-core >/dev/null 2>&1; then
package_list="${package_list} redhat-lsb-core"
fi
# Install git if not already installed (may be more recent than distro version)
if ! type git > /dev/null 2>&1; then
package_list="${package_list} git"
fi
# Install EPEL repository if needed (required to install 'jq' for CentOS)
if ! ${install_cmd} -q list jq >/dev/null 2>&1; then
${install_cmd} -y install epel-release
remove_epel="true"
fi
fi fi
# Install zsh if needed # Install zsh if needed
@ -222,82 +200,73 @@ install_redhat_packages() {
package_list="${package_list} zsh" package_list="${package_list} zsh"
fi fi
if [ -n "${package_list}" ]; then local install_cmd=dnf
${install_cmd} -y install ${package_list} if ! type dnf > /dev/null 2>&1; then
install_cmd=yum
fi fi
${install_cmd} -y install ${package_list}
# Get to latest versions of all packages # Get to latest versions of all packages
if [ "${UPGRADE_PACKAGES}" = "true" ]; then if [ "${UPGRADE_PACKAGES}" = "true" ]; then
${install_cmd} upgrade -y ${install_cmd} upgrade -y
fi fi
if [[ "${remove_epel}" = "true" ]]; then
${install_cmd} -y remove epel-release
fi
PACKAGES_ALREADY_INSTALLED="true"
} }
# Alpine Linux packages # Alpine Linux packages
install_alpine_packages() { install_alpine_packages() {
apk update apk update
apk add --no-cache \
openssh-client \
gnupg \
procps \
lsof \
htop \
net-tools \
psmisc \
curl \
wget \
rsync \
ca-certificates \
unzip \
zip \
nano \
vim \
less \
jq \
libgcc \
libstdc++ \
krb5-libs \
libintl \
libssl1.1 \
lttng-ust \
tzdata \
userspace-rcu \
zlib \
sudo \
coreutils \
sed \
grep \
which \
ncdu \
shadow \
strace
if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then # Install man pages - package name varies between 3.12 and earlier versions
apk add --no-cache \ if apk info man > /dev/null 2>&1; then
openssh-client \ apk add --no-cache man man-pages
gnupg \ else
procps \ apk add --no-cache mandoc man-pages
lsof \ fi
htop \
net-tools \
psmisc \
curl \
wget \
rsync \
ca-certificates \
unzip \
zip \
nano \
vim \
less \
jq \
libgcc \
libstdc++ \
krb5-libs \
libintl \
libssl1.1 \
lttng-ust \
tzdata \
userspace-rcu \
zlib \
sudo \
coreutils \
sed \
grep \
which \
ncdu \
shadow \
strace
# Install man pages - package name varies between 3.12 and earlier versions # Install git if not already installed (may be more recent than distro version)
if apk info man > /dev/null 2>&1; then if ! type git > /dev/null 2>&1; then
apk add --no-cache man man-pages apk add --no-cache git
else
apk add --no-cache mandoc man-pages
fi
# Install git if not already installed (may be more recent than distro version)
if ! type git > /dev/null 2>&1; then
apk add --no-cache git
fi
fi fi
# Install zsh if needed # Install zsh if needed
if [ "${INSTALL_ZSH}" = "true" ] && ! type zsh > /dev/null 2>&1; then if [ "${INSTALL_ZSH}" = "true" ] && ! type zsh > /dev/null 2>&1; then
apk add --no-cache zsh apk add --no-cache zsh
fi fi
PACKAGES_ALREADY_INSTALLED="true"
} }
# ****************** # ******************
@ -336,23 +305,26 @@ else
fi fi
# Install packages for appropriate OS # Install packages for appropriate OS
case "${ADJUSTED_ID}" in if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then
"debian") case "${ADJUSTED_ID}" in
install_debian_packages "debian")
;; install_debian_packages
"rhel") ;;
install_redhat_packages "rhel")
;; install_redhat_packages
"alpine") ;;
install_alpine_packages "alpine")
;; install_alpine_packages
esac ;;
esac
PACKAGES_ALREADY_INSTALLED="true"
fi
# If in automatic mode, determine if a user already exists, if not use vscode # If in automatic mode, determine if a user already exists, if not use vscode
if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
if [ "${_REMOTE_USER}" != "root" ]; then if [ "${_REMOTE_USER}" != "root" ]; then
USERNAME="${_REMOTE_USER}" USERNAME="${_REMOTE_USER}"
else else
USERNAME="" USERNAME=""
POSSIBLE_USERS=("devcontainer" "vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)") POSSIBLE_USERS=("devcontainer" "vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
for CURRENT_USER in "${POSSIBLE_USERS[@]}"; do for CURRENT_USER in "${POSSIBLE_USERS[@]}"; do
@ -374,12 +346,12 @@ fi
group_name="${USERNAME}" group_name="${USERNAME}"
if id -u ${USERNAME} > /dev/null 2>&1; then if id -u ${USERNAME} > /dev/null 2>&1; then
# User exists, update if needed # User exists, update if needed
if [ "${USER_GID}" != "automatic" ] && [ "$USER_GID" != "$(id -g $USERNAME)" ]; then if [ "${USER_GID}" != "automatic" ] && [ "$USER_GID" != "$(id -g $USERNAME)" ]; then
group_name="$(id -gn $USERNAME)" group_name="$(id -gn $USERNAME)"
groupmod --gid $USER_GID ${group_name} groupmod --gid $USER_GID ${group_name}
usermod --gid $USER_GID $USERNAME usermod --gid $USER_GID $USERNAME
fi fi
if [ "${USER_UID}" != "automatic" ] && [ "$USER_UID" != "$(id -u $USERNAME)" ]; then if [ "${USER_UID}" != "automatic" ] && [ "$USER_UID" != "$(id -u $USERNAME)" ]; then
usermod --uid $USER_UID $USERNAME usermod --uid $USER_UID $USERNAME
fi fi
else else
@ -389,7 +361,7 @@ else
else else
groupadd --gid $USER_GID $USERNAME groupadd --gid $USER_GID $USERNAME
fi fi
if [ "${USER_UID}" = "automatic" ]; then if [ "${USER_UID}" = "automatic" ]; then
useradd -s /bin/bash --gid $USERNAME -m $USERNAME useradd -s /bin/bash --gid $USERNAME -m $USERNAME
else else
useradd -s /bin/bash --uid $USER_UID --gid $USERNAME -m $USERNAME useradd -s /bin/bash --uid $USER_UID --gid $USERNAME -m $USERNAME
@ -407,28 +379,23 @@ fi
# ** Shell customization section ** # ** Shell customization section **
# ********************************* # *********************************
if [ "${USERNAME}" = "root" ]; then if [ "${USERNAME}" = "root" ]; then
user_home="/root" user_rc_path="/root"
# Check if user already has a home directory other than /home/${USERNAME}
elif [ "/home/${USERNAME}" != $( getent passwd $USERNAME | cut -d: -f6 ) ]; then
user_home=$( getent passwd $USERNAME | cut -d: -f6 )
else else
user_home="/home/${USERNAME}" user_rc_path="/home/${USERNAME}"
if [ ! -d "${user_home}" ]; then if [ ! -d "${user_rc_path}" ]; then
mkdir -p "${user_home}" mkdir -p "${user_rc_path}"
chown ${USERNAME}:${group_name} "${user_home}" chown ${USERNAME}:${group_name} "${user_rc_path}"
fi fi
fi fi
# Restore user .bashrc / .profile / .zshrc defaults from skeleton file if it doesn't exist or is empty # Restore user .bashrc / .profile / .zshrc defaults from skeleton file if it doesn't exist or is empty
possible_rc_files=( ".bashrc" ".profile" ) possible_rc_files=( ".bashrc" ".profile" ".zshrc" )
[ "$INSTALL_OH_MY_ZSH_CONFIG" == "true" ] && possible_rc_files+=('.zshrc')
[ "$INSTALL_ZSH" == "true" ] && possible_rc_files+=('.zprofile')
for rc_file in "${possible_rc_files[@]}"; do for rc_file in "${possible_rc_files[@]}"; do
if [ -f "/etc/skel/${rc_file}" ]; then if [ -f "/etc/skel/${rc_file}" ]; then
if [ ! -e "${user_home}/${rc_file}" ] || [ ! -s "${user_home}/${rc_file}" ]; then if [ ! -e "${user_rc_path}/${rc_file}" ] || [ ! -s "${user_rc_path}/${rc_file}" ]; then
cp "/etc/skel/${rc_file}" "${user_home}/${rc_file}" cp "/etc/skel/${rc_file}" "${user_rc_path}/${rc_file}"
chown ${USERNAME}:${group_name} "${user_home}/${rc_file}" chown ${USERNAME}:${group_name} "${user_rc_path}/${rc_file}"
fi fi
fi fi
done done
@ -449,96 +416,64 @@ if [ "${RC_SNIPPET_ALREADY_ADDED}" != "true" ]; then
;; ;;
esac esac
cat "${FEATURE_DIR}/scripts/rc_snippet.sh" >> ${global_rc_path} cat "${FEATURE_DIR}/scripts/rc_snippet.sh" >> ${global_rc_path}
cat "${FEATURE_DIR}/scripts/bash_theme_snippet.sh" >> "${user_home}/.bashrc" cat "${FEATURE_DIR}/scripts/bash_theme_snippet.sh" >> "${user_rc_path}/.bashrc"
if [ "${USERNAME}" != "root" ]; then if [ "${USERNAME}" != "root" ]; then
cat "${FEATURE_DIR}/scripts/bash_theme_snippet.sh" >> "/root/.bashrc" cat "${FEATURE_DIR}/scripts/bash_theme_snippet.sh" >> "/root/.bashrc"
chown ${USERNAME}:${group_name} "${user_home}/.bashrc" chown ${USERNAME}:${group_name} "${user_rc_path}/.bashrc"
fi fi
RC_SNIPPET_ALREADY_ADDED="true" RC_SNIPPET_ALREADY_ADDED="true"
fi fi
# Optionally configure zsh and Oh My Zsh! # Optionally configure zsh and Oh My Zsh!
if [ "${INSTALL_ZSH}" = "true" ]; then if [ "${INSTALL_ZSH}" = "true" ]; then
if [ ! -f "${user_home}/.zprofile" ]; then
touch "${user_home}/.zprofile"
echo 'source $HOME/.profile' >> "${user_home}/.zprofile" # TODO: Reconsider adding '.profile' to '.zprofile'
chown ${USERNAME}:${group_name} "${user_home}/.zprofile"
fi
if [ "${ZSH_ALREADY_INSTALLED}" != "true" ]; then if [ "${ZSH_ALREADY_INSTALLED}" != "true" ]; then
if [ "${ADJUSTED_ID}" = "rhel" ]; then if [ "${ADJUSTED_ID}" = "rhel" ]; then
global_rc_path="/etc/zshrc" global_rc_path="/etc/zshrc"
else else
global_rc_path="/etc/zsh/zshrc" global_rc_path="/etc/zsh/zshrc"
fi fi
cat "${FEATURE_DIR}/scripts/rc_snippet.sh" >> ${global_rc_path} cat "${FEATURE_DIR}/scripts/rc_snippet.sh" >> /etc/zshrc
ZSH_ALREADY_INSTALLED="true" ZSH_ALREADY_INSTALLED="true"
fi fi
if [ "${CONFIGURE_ZSH_AS_DEFAULT_SHELL}" == "true" ]; then if [ "${CONFIGURE_ZSH_AS_DEFAULT_SHELL}" == "true" ]; then
# Fixing chsh always asking for a password on alpine linux
# ref: https://askubuntu.com/questions/812420/chsh-always-asking-a-password-and-get-pam-authentication-failure.
if [ ! -f "/etc/pam.d/chsh" ] || ! grep -Eq '^auth(.*)pam_rootok\.so$' /etc/pam.d/chsh; then
echo "auth sufficient pam_rootok.so" >> /etc/pam.d/chsh
elif [[ -n "$(awk '/^auth(.*)pam_rootok\.so$/ && !/^auth[[:blank:]]+sufficient[[:blank:]]+pam_rootok\.so$/' /etc/pam.d/chsh)" ]]; then
awk '/^auth(.*)pam_rootok\.so$/ { $2 = "sufficient" } { print }' /etc/pam.d/chsh > /tmp/chsh.tmp && mv /tmp/chsh.tmp /etc/pam.d/chsh
fi
chsh --shell /bin/zsh ${USERNAME} chsh --shell /bin/zsh ${USERNAME}
fi fi
# Adapted, simplified inline Oh My Zsh! install steps that adds, defaults to a codespaces theme. # Adapted, simplified inline Oh My Zsh! install steps that adds, defaults to a codespaces theme.
# See https://github.com/ohmyzsh/ohmyzsh/blob/master/tools/install.sh for official script. # See https://github.com/ohmyzsh/ohmyzsh/blob/master/tools/install.sh for official script.
if [ "${INSTALL_OH_MY_ZSH}" = "true" ]; then oh_my_install_dir="${user_rc_path}/.oh-my-zsh"
user_rc_file="${user_home}/.zshrc" if [ ! -d "${oh_my_install_dir}" ] && [ "${INSTALL_OH_MY_ZSH}" = "true" ]; then
oh_my_install_dir="${user_home}/.oh-my-zsh"
template_path="${oh_my_install_dir}/templates/zshrc.zsh-template" template_path="${oh_my_install_dir}/templates/zshrc.zsh-template"
if [ ! -d "${oh_my_install_dir}" ]; then user_rc_file="${user_rc_path}/.zshrc"
umask g-w,o-w umask g-w,o-w
mkdir -p ${oh_my_install_dir} mkdir -p ${oh_my_install_dir}
git clone --depth=1 \ git clone --depth=1 \
-c core.eol=lf \ -c core.eol=lf \
-c core.autocrlf=false \ -c core.autocrlf=false \
-c fsck.zeroPaddedFilemode=ignore \ -c fsck.zeroPaddedFilemode=ignore \
-c fetch.fsck.zeroPaddedFilemode=ignore \ -c fetch.fsck.zeroPaddedFilemode=ignore \
-c receive.fsck.zeroPaddedFilemode=ignore \ -c receive.fsck.zeroPaddedFilemode=ignore \
"https://github.com/ohmyzsh/ohmyzsh" "${oh_my_install_dir}" 2>&1 "https://github.com/ohmyzsh/ohmyzsh" "${oh_my_install_dir}" 2>&1
echo -e "$(cat "${template_path}")\nDISABLE_AUTO_UPDATE=true\nDISABLE_UPDATE_PROMPT=true" > ${user_rc_file}
# Shrink git while still enabling updates sed -i -e 's/ZSH_THEME=.*/ZSH_THEME="devcontainers"/g' ${user_rc_file}
cd "${oh_my_install_dir}"
git repack -a -d -f --depth=1 --window=1
fi
# Add Dev Containers theme # Add Dev Containers theme
mkdir -p ${oh_my_install_dir}/custom/themes mkdir -p ${oh_my_install_dir}/custom/themes
cp -f "${FEATURE_DIR}/scripts/devcontainers.zsh-theme" "${oh_my_install_dir}/custom/themes/devcontainers.zsh-theme" cp -f "${FEATURE_DIR}/scripts/devcontainers.zsh-theme" "${oh_my_install_dir}/custom/themes/devcontainers.zsh-theme"
ln -sf "${oh_my_install_dir}/custom/themes/devcontainers.zsh-theme" "${oh_my_install_dir}/custom/themes/codespaces.zsh-theme" ln -s "${oh_my_install_dir}/custom/themes/devcontainers.zsh-theme" "${oh_my_install_dir}/custom/themes/codespaces.zsh-theme"
# Add devcontainer .zshrc template
if [ "$INSTALL_OH_MY_ZSH_CONFIG" = "true" ]; then
echo -e "$(cat "${template_path}")\nDISABLE_AUTO_UPDATE=true\nDISABLE_UPDATE_PROMPT=true" > ${user_rc_file}
sed -i -e 's/ZSH_THEME=.*/ZSH_THEME="devcontainers"/g' ${user_rc_file}
fi
# Shrink git while still enabling updates
cd "${oh_my_install_dir}"
git repack -a -d -f --depth=1 --window=1
# Copy to non-root user if one is specified # Copy to non-root user if one is specified
if [ "${USERNAME}" != "root" ]; then if [ "${USERNAME}" != "root" ]; then
copy_to_user_files=("${oh_my_install_dir}") cp -rf "${user_rc_file}" "${oh_my_install_dir}" /root
[ -f "$user_rc_file" ] && copy_to_user_files+=("$user_rc_file") chown -R ${USERNAME}:${group_name} "${oh_my_install_dir}" "${user_rc_file}"
cp -rf "${copy_to_user_files[@]}" /root
chown -R ${USERNAME}:${group_name} "${copy_to_user_files[@]}"
fi fi
fi fi
fi fi
# *********************************
# ** Ensure config directory **
# *********************************
user_config_dir="${user_home}/.config"
if [ ! -d "${user_config_dir}" ]; then
mkdir -p "${user_config_dir}"
chown ${USERNAME}:${group_name} "${user_config_dir}"
fi
# **************************** # ****************************
# ** Utilities and commands ** # ** Utilities and commands **
# **************************** # ****************************

View file

@ -1,6 +1,6 @@
{ {
"id": "conda", "id": "conda",
"version": "1.0.9", "version": "1.0.8",
"name": "Conda", "name": "Conda",
"description": "A cross-platform, language-agnostic binary package manager", "description": "A cross-platform, language-agnostic binary package manager",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/conda", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/conda",

View file

@ -114,13 +114,8 @@ if ! conda --version &> /dev/null ; then
find "${CONDA_DIR}" -type d -print0 | xargs -n 1 -0 chmod g+s find "${CONDA_DIR}" -type d -print0 | xargs -n 1 -0 chmod g+s
# Temporary fixes # Temporary due to https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-23491
# Due to https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-23491
install_user_package certifi install_user_package certifi
# Due to https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-0286 and https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-23931
install_user_package cryptography
# Due to https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-40897
install_user_package setuptools
fi fi
# Display a notice on conda when not running in GitHub Codespaces # Display a notice on conda when not running in GitHub Codespaces

View file

@ -6,16 +6,6 @@ This feature provides two ways of connecting to the desktop environment it adds.
1. Open the ports view in your tool, select the noVNC port, and click the Globe icon. 1. Open the ports view in your tool, select the noVNC port, and click the Globe icon.
1. In the browser that appears, click the **Connect** button and enter the desktop password (`vscode` by default). 1. In the browser that appears, click the **Connect** button and enter the desktop password (`vscode` by default).
To set up the `6080` port from your `devcontainer.json` file, include the following:
```json
"forwardPorts": [6080],
"portsAttributes": {
"6080": {
"label": "desktop"
}
}
```
You can also connect to the desktop using a [VNC viewer](https://www.realvnc.com/en/connect/download/viewer/). To do so: You can also connect to the desktop using a [VNC viewer](https://www.realvnc.com/en/connect/download/viewer/). To do so:
1. Connect to the environment from a desktop tool that supports the dev container spec (e.g., VS Code client). 1. Connect to the environment from a desktop tool that supports the dev container spec (e.g., VS Code client).
@ -25,7 +15,7 @@ You can also connect to the desktop using a [VNC viewer](https://www.realvnc.com
## Customizing Fluxbox ## Customizing Fluxbox
The window manager installed is [Fluxbox](http://fluxbox.org/). **Right-click** to see the application menu. In addition, any UI-based commands you execute inside the dev container will automatically appear on the desktop. The window manager is installed is [Fluxbox](http://fluxbox.org/). **Right-click** to see the application menu. In addition, any UI-based commands you execute inside the dev container will automatically appear on the desktop.
You can customize the desktop using Fluxbox configuration files. The configuration files are located in the `.fluxbox` folder of the home directory of the user you using to connect to the dev container (`$HOME/.fluxbox`). You can customize the desktop using Fluxbox configuration files. The configuration files are located in the `.fluxbox` folder of the home directory of the user you using to connect to the dev container (`$HOME/.fluxbox`).

View file

@ -29,16 +29,6 @@ This feature provides two ways of connecting to the desktop environment it adds.
1. Open the ports view in your tool, select the noVNC port, and click the Globe icon. 1. Open the ports view in your tool, select the noVNC port, and click the Globe icon.
1. In the browser that appears, click the **Connect** button and enter the desktop password (`vscode` by default). 1. In the browser that appears, click the **Connect** button and enter the desktop password (`vscode` by default).
To set up the `6080` port from your `devcontainer.json` file, include the following:
```json
"forwardPorts": [6080],
"portsAttributes": {
"6080": {
"label": "desktop"
}
}
```
You can also connect to the desktop using a [VNC viewer](https://www.realvnc.com/en/connect/download/viewer/). To do so: You can also connect to the desktop using a [VNC viewer](https://www.realvnc.com/en/connect/download/viewer/). To do so:
1. Connect to the environment from a desktop tool that supports the dev container spec (e.g., VS Code client). 1. Connect to the environment from a desktop tool that supports the dev container spec (e.g., VS Code client).
@ -48,7 +38,7 @@ You can also connect to the desktop using a [VNC viewer](https://www.realvnc.com
## Customizing Fluxbox ## Customizing Fluxbox
The window manager installed is [Fluxbox](http://fluxbox.org/). **Right-click** to see the application menu. In addition, any UI-based commands you execute inside the dev container will automatically appear on the desktop. The window manager is installed is [Fluxbox](http://fluxbox.org/). **Right-click** to see the application menu. In addition, any UI-based commands you execute inside the dev container will automatically appear on the desktop.
You can customize the desktop using Fluxbox configuration files. The configuration files are located in the `.fluxbox` folder of the home directory of the user you using to connect to the dev container (`$HOME/.fluxbox`). You can customize the desktop using Fluxbox configuration files. The configuration files are located in the `.fluxbox` folder of the home directory of the user you using to connect to the dev container (`$HOME/.fluxbox`).

View file

@ -17,17 +17,11 @@ Create child containers *inside* a container, independent from the host's docker
|-----|-----|-----|-----| |-----|-----|-----|-----|
| version | Select or enter a Docker/Moby Engine version. (Availability can vary by OS version.) | string | latest | | version | Select or enter a Docker/Moby Engine version. (Availability can vary by OS version.) | string | latest |
| moby | Install OSS Moby build instead of Docker CE | boolean | true | | moby | Install OSS Moby build instead of Docker CE | boolean | true |
| dockerDashComposeVersion | Default version of Docker Compose (v1 or v2 or none) | string | v1 | | dockerDashComposeVersion | Default version of Docker Compose (v1 or v2) | string | v1 |
| azureDnsAutoDetection | Allow automatically setting the dockerd DNS server when the installation script detects it is running in Azure | boolean | true | | azureDnsAutoDetection | Allow automatically setting the dockerd DNS server when the installation script detects it is running in Azure | boolean | true |
| dockerDefaultAddressPool | Define default address pools for Docker networks. e.g. base=192.168.0.0/16,size=24 | string | - | | dockerDefaultAddressPool | Define default address pools for Docker networks. e.g. base=192.168.0.0/16,size=24 | string | - |
| installDockerBuildx | Install Docker Buildx | boolean | true | | installDockerBuildx | Install Docker Buildx | boolean | true |
## Customizations
### VS Code Extensions
- `ms-azuretools.vscode-docker`
## Limitations ## Limitations
This docker-in-docker Dev Container Feature is roughly based on the [official docker-in-docker wrapper script](https://github.com/moby/moby/blob/master/hack/dind) that is part of the [Moby project](https://mobyproject.org/). With this in mind: This docker-in-docker Dev Container Feature is roughly based on the [official docker-in-docker wrapper script](https://github.com/moby/moby/blob/master/hack/dind) that is part of the [Moby project](https://mobyproject.org/). With this in mind:

View file

@ -1,6 +1,6 @@
{ {
"id": "docker-in-docker", "id": "docker-in-docker",
"version": "2.7.1", "version": "2.1.0",
"name": "Docker (Docker-in-Docker)", "name": "Docker (Docker-in-Docker)",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/docker-in-docker", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/docker-in-docker",
"description": "Create child containers *inside* a container, independent from the host's docker instance. Installs Docker extension in the container along with needed CLIs.", "description": "Create child containers *inside* a container, independent from the host's docker instance. Installs Docker extension in the container along with needed CLIs.",
@ -23,12 +23,11 @@
"dockerDashComposeVersion": { "dockerDashComposeVersion": {
"type": "string", "type": "string",
"enum": [ "enum": [
"none",
"v1", "v1",
"v2" "v2"
], ],
"default": "v1", "default": "v1",
"description": "Default version of Docker Compose (v1 or v2 or none)" "description": "Default version of Docker Compose (v1 or v2)"
}, },
"azureDnsAutoDetection": { "azureDnsAutoDetection": {
"type": "boolean", "type": "boolean",

View file

@ -5,19 +5,19 @@
#------------------------------------------------------------------------------------------------------------- #-------------------------------------------------------------------------------------------------------------
# #
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/docker-in-docker.md # Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/docker-in-docker.md
# Maintainer: The Dev Container spec maintainers # Maintainer: The VS Code and Codespaces Teams
DOCKER_VERSION="${VERSION:-"latest"}" # The Docker/Moby Engine + CLI should match in version DOCKER_VERSION="${VERSION:-"latest"}" # The Docker/Moby Engine + CLI should match in version
USE_MOBY="${MOBY:-"true"}" USE_MOBY="${MOBY:-"true"}"
DOCKER_DASH_COMPOSE_VERSION="${DOCKERDASHCOMPOSEVERSION:-"v1"}" # v1 or v2 or none DOCKER_DASH_COMPOSE_VERSION="${DOCKERDASHCOMPOSEVERSION:-"v1"}" # v1 or v2
AZURE_DNS_AUTO_DETECTION="${AZUREDNSAUTODETECTION:-"true"}" AZURE_DNS_AUTO_DETECTION="${AZUREDNSAUTODETECTION:-"true"}"
DOCKER_DEFAULT_ADDRESS_POOL="${DOCKERDEFAULTADDRESSPOOL}" DOCKER_DEFAULT_ADDRESS_POOL="${DOCKERDEFAULTADDRESSPOOL}"
USERNAME="${USERNAME:-"${_REMOTE_USER:-"automatic"}"}" USERNAME="${USERNAME:-"${_REMOTE_USER:-"automatic"}"}"
INSTALL_DOCKER_BUILDX="${INSTALLDOCKERBUILDX:-"true"}" INSTALL_DOCKER_BUILDX="${INSTALLDOCKERBUILDX:-"true"}"
MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc" MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc"
DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES="bookworm buster bullseye bionic focal jammy" DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES="buster bullseye bionic focal jammy"
DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES="bookworm buster bullseye bionic focal hirsute impish jammy" DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES="buster bullseye bionic focal hirsute impish jammy"
# Default: Exit on any failure. # Default: Exit on any failure.
set -e set -e
@ -57,6 +57,21 @@ elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then
USERNAME=root USERNAME=root
fi fi
# Get central common setting
get_common_setting() {
if [ "${common_settings_file_loaded}" != "true" ]; then
curl -sfL "https://aka.ms/vscode-dev-containers/script-library/settings.env" 2>/dev/null -o /tmp/vsdc-settings.env || echo "Could not download settings file. Skipping."
common_settings_file_loaded=true
fi
if [ -f "/tmp/vsdc-settings.env" ]; then
local multi_line=""
if [ "$2" = "true" ]; then multi_line="-z"; fi
local result="$(grep ${multi_line} -oP "$1=\"?\K[^\"]+" /tmp/vsdc-settings.env | tr -d '\0')"
if [ ! -z "${result}" ]; then declare -g $1="${result}"; fi
fi
echo "$1=${!1}"
}
apt_get_update() apt_get_update()
{ {
if [ "$(find /var/lib/apt/lists/* | wc -l)" = "0" ]; then if [ "$(find /var/lib/apt/lists/* | wc -l)" = "0" ]; then
@ -122,6 +137,8 @@ architecture="$(dpkg --print-architecture)"
# Check if distro is supported # Check if distro is supported
if [ "${USE_MOBY}" = "true" ]; then if [ "${USE_MOBY}" = "true" ]; then
# 'get_common_setting' allows attribute to be updated remotely
get_common_setting DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES
if [[ "${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then if [[ "${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then
err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, either: (1) set feature option '\"moby\": false' , or (2) choose a compatible OS distribution" err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, either: (1) set feature option '\"moby\": false' , or (2) choose a compatible OS distribution"
err "Support distributions include: ${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}" err "Support distributions include: ${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}"
@ -129,6 +146,7 @@ if [ "${USE_MOBY}" = "true" ]; then
fi fi
echo "Distro codename '${VERSION_CODENAME}' matched filter '${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}'" echo "Distro codename '${VERSION_CODENAME}' matched filter '${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}'"
else else
get_common_setting DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES
if [[ "${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then if [[ "${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then
err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, please choose a compatible OS distribution" err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, please choose a compatible OS distribution"
err "Support distributions include: ${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}" err "Support distributions include: ${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}"
@ -159,6 +177,7 @@ if [ "${USE_MOBY}" = "true" ]; then
cli_package_name="moby-cli" cli_package_name="moby-cli"
# Import key safely and import Microsoft apt repo # Import key safely and import Microsoft apt repo
get_common_setting MICROSOFT_GPG_KEYS_URI
curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg
echo "deb [arch=${architecture} signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/microsoft-${ID}-${VERSION_CODENAME}-prod ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/microsoft.list echo "deb [arch=${architecture} signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/microsoft-${ID}-${VERSION_CODENAME}-prod ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/microsoft.list
else else
@ -223,77 +242,60 @@ fi
echo "Finished installing docker / moby!" echo "Finished installing docker / moby!"
# If 'docker-compose' command is to be included # Install Docker Compose if not already installed and is on a supported architecture
if [ "${DOCKER_DASH_COMPOSE_VERSION}" != "none" ]; then if type docker-compose > /dev/null 2>&1; then
# Install Docker Compose if not already installed and is on a supported architecture echo "Docker Compose v1 already installed."
if type docker-compose > /dev/null 2>&1; then else
echo "Docker Compose v1 already installed." target_compose_arch="${architecture}"
else if [ "${target_compose_arch}" = "amd64" ]; then
target_compose_arch="${architecture}" target_compose_arch="x86_64"
if [ "${target_compose_arch}" = "amd64" ]; then fi
target_compose_arch="x86_64" if [ "${target_compose_arch}" != "x86_64" ]; then
# Use pip to get a version that runs on this architecture
check_packages python3-minimal python3-pip libffi-dev python3-venv
export PIPX_HOME=/usr/local/pipx
mkdir -p ${PIPX_HOME}
export PIPX_BIN_DIR=/usr/local/bin
export PYTHONUSERBASE=/tmp/pip-tmp
export PIP_CACHE_DIR=/tmp/pip-tmp/cache
pipx_bin=pipx
if ! type pipx > /dev/null 2>&1; then
pip3 install --disable-pip-version-check --no-cache-dir --user pipx
pipx_bin=/tmp/pip-tmp/bin/pipx
fi fi
if [ "${target_compose_arch}" != "x86_64" ]; then ${pipx_bin} install --pip-args '--no-cache-dir --force-reinstall' docker-compose
# Use pip to get a version that runs on this architecture rm -rf /tmp/pip-tmp
check_packages python3-minimal python3-pip libffi-dev python3-venv
export PIPX_HOME=/usr/local/pipx
mkdir -p ${PIPX_HOME}
export PIPX_BIN_DIR=/usr/local/bin
export PYTHONUSERBASE=/tmp/pip-tmp
export PIP_CACHE_DIR=/tmp/pip-tmp/cache
pipx_bin=pipx
if ! type pipx > /dev/null 2>&1; then
pip3 install --disable-pip-version-check --no-cache-dir --user pipx
pipx_bin=/tmp/pip-tmp/bin/pipx
fi
set +e
${pipx_bin} install --pip-args '--no-cache-dir --force-reinstall' docker-compose
exit_code=$?
set -e
if [ ${exit_code} -ne 0 ]; then
# Temporary: https://github.com/devcontainers/features/issues/616
# See https://github.com/yaml/pyyaml/issues/601
echo "(*) Failed to install docker-compose via pipx. Trying via pip3..."
export PYTHONUSERBASE=/usr/local
pip3 install --disable-pip-version-check --no-cache-dir --user "Cython<3.0" pyyaml wheel docker-compose --no-build-isolation
fi
rm -rf /tmp/pip-tmp
else
compose_v1_version="1"
find_version_from_git_tags compose_v1_version "https://github.com/docker/compose" "tags/"
echo "(*) Installing docker-compose ${compose_v1_version}..."
curl -fsSL "https://github.com/docker/compose/releases/download/${compose_v1_version}/docker-compose-Linux-x86_64" -o /usr/local/bin/docker-compose
chmod +x /usr/local/bin/docker-compose
fi
fi
# Install docker-compose switch if not already installed - https://github.com/docker/compose-switch#manual-installation
current_v1_compose_path="$(which docker-compose)"
target_v1_compose_path="$(dirname "${current_v1_compose_path}")/docker-compose-v1"
if ! type compose-switch > /dev/null 2>&1; then
echo "(*) Installing compose-switch..."
compose_switch_version="latest"
find_version_from_git_tags compose_switch_version "https://github.com/docker/compose-switch"
curl -fsSL "https://github.com/docker/compose-switch/releases/download/v${compose_switch_version}/docker-compose-linux-${architecture}" -o /usr/local/bin/compose-switch
chmod +x /usr/local/bin/compose-switch
# TODO: Verify checksum once available: https://github.com/docker/compose-switch/issues/11
# Setup v1 CLI as alternative in addition to compose-switch (which maps to v2)
mv "${current_v1_compose_path}" "${target_v1_compose_path}"
update-alternatives --install /usr/local/bin/docker-compose docker-compose /usr/local/bin/compose-switch 99
update-alternatives --install /usr/local/bin/docker-compose docker-compose "${target_v1_compose_path}" 1
fi
if [ "${DOCKER_DASH_COMPOSE_VERSION}" = "v1" ]; then
update-alternatives --set docker-compose "${target_v1_compose_path}"
else else
update-alternatives --set docker-compose /usr/local/bin/compose-switch compose_v1_version="1"
find_version_from_git_tags compose_v1_version "https://github.com/docker/compose" "tags/"
echo "(*) Installing docker-compose ${compose_v1_version}..."
curl -fsSL "https://github.com/docker/compose/releases/download/${compose_v1_version}/docker-compose-Linux-x86_64" -o /usr/local/bin/docker-compose
chmod +x /usr/local/bin/docker-compose
fi fi
fi fi
# Install docker-compose switch if not already installed - https://github.com/docker/compose-switch#manual-installation
current_v1_compose_path="$(which docker-compose)"
target_v1_compose_path="$(dirname "${current_v1_compose_path}")/docker-compose-v1"
if ! type compose-switch > /dev/null 2>&1; then
echo "(*) Installing compose-switch..."
compose_switch_version="latest"
find_version_from_git_tags compose_switch_version "https://github.com/docker/compose-switch"
curl -fsSL "https://github.com/docker/compose-switch/releases/download/v${compose_switch_version}/docker-compose-linux-${architecture}" -o /usr/local/bin/compose-switch
chmod +x /usr/local/bin/compose-switch
# TODO: Verify checksum once available: https://github.com/docker/compose-switch/issues/11
# Setup v1 CLI as alternative in addition to compose-switch (which maps to v2)
mv "${current_v1_compose_path}" "${target_v1_compose_path}"
update-alternatives --install /usr/local/bin/docker-compose docker-compose /usr/local/bin/compose-switch 99
update-alternatives --install /usr/local/bin/docker-compose docker-compose "${target_v1_compose_path}" 1
fi
if [ "${DOCKER_DASH_COMPOSE_VERSION}" = "v1" ]; then
update-alternatives --set docker-compose "${target_v1_compose_path}"
else
update-alternatives --set docker-compose /usr/local/bin/compose-switch
fi
# If init file already exists, exit # If init file already exists, exit
if [ -f "/usr/local/share/docker-init.sh" ]; then if [ -f "/usr/local/share/docker-init.sh" ]; then
echo "/usr/local/share/docker-init.sh already exists, so exiting." echo "/usr/local/share/docker-init.sh already exists, so exiting."
@ -344,10 +346,11 @@ tee -a /usr/local/share/docker-init.sh > /dev/null \
<< 'EOF' << 'EOF'
dockerd_start="AZURE_DNS_AUTO_DETECTION=${AZURE_DNS_AUTO_DETECTION} DOCKER_DEFAULT_ADDRESS_POOL=${DOCKER_DEFAULT_ADDRESS_POOL} $(cat << 'INNEREOF' dockerd_start="AZURE_DNS_AUTO_DETECTION=${AZURE_DNS_AUTO_DETECTION} DOCKER_DEFAULT_ADDRESS_POOL=${DOCKER_DEFAULT_ADDRESS_POOL} $(cat << 'INNEREOF'
# explicitly remove dockerd and containerd PID file to ensure that it can start properly if it was stopped uncleanly # explicitly remove dockerd and containerd PID file to ensure that it can start properly if it was stopped uncleanly
# ie: docker kill <ID>
find /run /var/run -iname 'docker*.pid' -delete || : find /run /var/run -iname 'docker*.pid' -delete || :
find /run /var/run -iname 'container*.pid' -delete || : find /run /var/run -iname 'container*.pid' -delete || :
# -- Start: dind wrapper script -- ## Dind wrapper script from docker team, adapted to a function
# Maintained: https://github.com/moby/moby/blob/master/hack/dind # Maintained: https://github.com/moby/moby/blob/master/hack/dind
export container=docker export container=docker
@ -364,52 +367,31 @@ dockerd_start="AZURE_DNS_AUTO_DETECTION=${AZURE_DNS_AUTO_DETECTION} DOCKER_DEFAU
mount -t tmpfs none /tmp mount -t tmpfs none /tmp
fi fi
set_cgroup_nesting() # cgroup v2: enable nesting
{ if [ -f /sys/fs/cgroup/cgroup.controllers ]; then
# cgroup v2: enable nesting # move the processes from the root group to the /init group,
if [ -f /sys/fs/cgroup/cgroup.controllers ]; then # otherwise writing subtree_control fails with EBUSY.
# move the processes from the root group to the /init group, # An error during moving non-existent process (i.e., "cat") is ignored.
# otherwise writing subtree_control fails with EBUSY. mkdir -p /sys/fs/cgroup/init
# An error during moving non-existent process (i.e., "cat") is ignored. xargs -rn1 < /sys/fs/cgroup/cgroup.procs > /sys/fs/cgroup/init/cgroup.procs || :
mkdir -p /sys/fs/cgroup/init # enable controllers
xargs -rn1 < /sys/fs/cgroup/cgroup.procs > /sys/fs/cgroup/init/cgroup.procs || : sed -e 's/ / +/g' -e 's/^/+/' < /sys/fs/cgroup/cgroup.controllers \
# enable controllers > /sys/fs/cgroup/cgroup.subtree_control
sed -e 's/ / +/g' -e 's/^/+/' < /sys/fs/cgroup/cgroup.controllers \ fi
> /sys/fs/cgroup/cgroup.subtree_control ## Dind wrapper over.
fi
}
# Set cgroup nesting, retrying if necessary
retry_cgroup_nesting=0
until [ "${retry_cgroup_nesting}" -eq "5" ];
do
set +e
set_cgroup_nesting
if [ $? -ne 0 ]; then
echo "(*) cgroup v2: Failed to enable nesting, retrying..."
else
break
fi
retry_cgroup_nesting=`expr $retry_cgroup_nesting + 1`
set -e
done
# -- End: dind wrapper script --
# Handle DNS # Handle DNS
set +e set +e
cat /etc/resolv.conf | grep -i 'internal.cloudapp.net' > /dev/null 2>&1 cat /etc/resolv.conf | grep -i 'internal.cloudapp.net'
if [ $? -eq 0 ] && [ "${AZURE_DNS_AUTO_DETECTION}" = "true" ] if [ $? -eq 0 ] && [ "${AZURE_DNS_AUTO_DETECTION}" = "true" ]
then then
echo "Setting dockerd Azure DNS." echo "Setting dockerd Azure DNS."
CUSTOMDNS="--dns 168.63.129.16" CUSTOMDNS="--dns 168.63.129.16"
else else
echo "Not setting dockerd DNS manually." echo "Not setting dockerd DNS manually."
CUSTOMDNS="" CUSTOMDNS=""
fi fi
set -e set -e
if [ -z "$DOCKER_DEFAULT_ADDRESS_POOL" ] if [ -z "$DOCKER_DEFAULT_ADDRESS_POOL" ]
@ -424,49 +406,14 @@ dockerd_start="AZURE_DNS_AUTO_DETECTION=${AZURE_DNS_AUTO_DETECTION} DOCKER_DEFAU
INNEREOF INNEREOF
)" )"
sudo_if() { # Start using sudo if not invoked as root
COMMAND="$*" if [ "$(id -u)" -ne 0 ]; then
sudo /bin/sh -c "${dockerd_start}"
else
eval "${dockerd_start}"
fi
if [ "$(id -u)" -ne 0 ]; then set +e
sudo $COMMAND
else
$COMMAND
fi
}
retry_docker_start_count=0
docker_ok="false"
until [ "${docker_ok}" = "true" ] || [ "${retry_docker_start_count}" -eq "5" ];
do
# Start using sudo if not invoked as root
if [ "$(id -u)" -ne 0 ]; then
sudo /bin/sh -c "${dockerd_start}"
else
eval "${dockerd_start}"
fi
retry_count=0
until [ "${docker_ok}" = "true" ] || [ "${retry_count}" -eq "5" ];
do
sleep 1s
set +e
docker info > /dev/null 2>&1 && docker_ok="true"
set -e
retry_count=`expr $retry_count + 1`
done
if [ "${docker_ok}" != "true" ] && [ "${retry_docker_start_count}" != "4" ]; then
echo "(*) Failed to start docker, retrying..."
set +e
sudo_if pkill dockerd
sudo_if pkill containerd
set -e
fi
retry_docker_start_count=`expr $retry_docker_start_count + 1`
done
# Execute whatever commands were passed in (if any). This allows us # Execute whatever commands were passed in (if any). This allows us
# to set this script to ENTRYPOINT while still executing the default CMD. # to set this script to ENTRYPOINT while still executing the default CMD.

View file

@ -19,15 +19,9 @@ Re-use the host docker socket, adding the Docker CLI to a container. Feature inv
|-----|-----|-----|-----| |-----|-----|-----|-----|
| version | Select or enter a Docker/Moby CLI version. (Availability can vary by OS version.) | string | latest | | version | Select or enter a Docker/Moby CLI version. (Availability can vary by OS version.) | string | latest |
| moby | Install OSS Moby build instead of Docker CE | boolean | true | | moby | Install OSS Moby build instead of Docker CE | boolean | true |
| dockerDashComposeVersion | Compose version to use for docker-compose (v1 or v2 or none) | string | v2 | | dockerDashComposeVersion | Compose version to use for docker-compose (v1 or v2) | string | v1 |
| installDockerBuildx | Install Docker Buildx | boolean | true | | installDockerBuildx | Install Docker Buildx | boolean | true |
## Customizations
### VS Code Extensions
- `ms-azuretools.vscode-docker`
## Limitations ## Limitations
- As the name implies, the Feature is expected to work when the host is running Docker (or the OSS Moby container engine it is built on). It may be possible to get running in other container engines, but it has not been tested with them. - As the name implies, the Feature is expected to work when the host is running Docker (or the OSS Moby container engine it is built on). It may be possible to get running in other container engines, but it has not been tested with them.

View file

@ -1,6 +1,6 @@
{ {
"id": "docker-outside-of-docker", "id": "docker-outside-of-docker",
"version": "1.3.1", "version": "1.1.0",
"name": "Docker (docker-outside-of-docker)", "name": "Docker (docker-outside-of-docker)",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/docker-outside-of-docker", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/docker-outside-of-docker",
"description": "Re-use the host docker socket, adding the Docker CLI to a container. Feature invokes a script to enable using a forwarded Docker socket within a container to run Docker commands.", "description": "Re-use the host docker socket, adding the Docker CLI to a container. Feature invokes a script to enable using a forwarded Docker socket within a container to run Docker commands.",
@ -23,12 +23,11 @@
"dockerDashComposeVersion": { "dockerDashComposeVersion": {
"type": "string", "type": "string",
"enum": [ "enum": [
"none",
"v1", "v1",
"v2" "v2"
], ],
"default": "v2", "default": "v1",
"description": "Compose version to use for docker-compose (v1 or v2 or none)" "description": "Compose version to use for docker-compose (v1 or v2)"
}, },
"installDockerBuildx": { "installDockerBuildx": {
"type": "boolean", "type": "boolean",
@ -37,6 +36,9 @@
} }
}, },
"entrypoint": "/usr/local/share/docker-init.sh", "entrypoint": "/usr/local/share/docker-init.sh",
"containerEnv": {
"DOCKER_BUILDKIT": "1"
},
"customizations": { "customizations": {
"vscode": { "vscode": {
"extensions": [ "extensions": [

View file

@ -9,7 +9,7 @@
DOCKER_VERSION="${VERSION:-"latest"}" DOCKER_VERSION="${VERSION:-"latest"}"
USE_MOBY="${MOBY:-"true"}" USE_MOBY="${MOBY:-"true"}"
DOCKER_DASH_COMPOSE_VERSION="${DOCKERDASHCOMPOSEVERSION:-"v1"}" # v1 or v2 or none DOCKER_DASH_COMPOSE_VERSION="${DOCKERDASHCOMPOSEVERSION:-"v1"}" # v1 or v2
ENABLE_NONROOT_DOCKER="${ENABLE_NONROOT_DOCKER:-"true"}" ENABLE_NONROOT_DOCKER="${ENABLE_NONROOT_DOCKER:-"true"}"
SOURCE_SOCKET="${SOURCE_SOCKET:-"/var/run/docker-host.sock"}" SOURCE_SOCKET="${SOURCE_SOCKET:-"/var/run/docker-host.sock"}"
@ -18,8 +18,8 @@ USERNAME="${USERNAME:-"${_REMOTE_USER:-"automatic"}"}"
INSTALL_DOCKER_BUILDX="${INSTALLDOCKERBUILDX:-"true"}" INSTALL_DOCKER_BUILDX="${INSTALLDOCKERBUILDX:-"true"}"
MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc" MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc"
DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES="bookworm buster bullseye bionic focal jammy" DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES="buster bullseye bionic focal jammy"
DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES="bookworm buster bullseye bionic focal hirsute impish jammy" DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES="buster bullseye bionic focal hirsute impish jammy"
set -e set -e
@ -48,6 +48,21 @@ elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then
USERNAME=root USERNAME=root
fi fi
# Get central common setting
get_common_setting() {
if [ "${common_settings_file_loaded}" != "true" ]; then
curl -sfL "https://aka.ms/vscode-dev-containers/script-library/settings.env" 2>/dev/null -o /tmp/vsdc-settings.env || echo "Could not download settings file. Skipping."
common_settings_file_loaded=true
fi
if [ -f "/tmp/vsdc-settings.env" ]; then
local multi_line=""
if [ "$2" = "true" ]; then multi_line="-z"; fi
local result="$(grep ${multi_line} -oP "$1=\"?\K[^\"]+" /tmp/vsdc-settings.env | tr -d '\0')"
if [ ! -z "${result}" ]; then declare -g $1="${result}"; fi
fi
echo "$1=${!1}"
}
apt_get_update() apt_get_update()
{ {
if [ "$(find /var/lib/apt/lists/* | wc -l)" = "0" ]; then if [ "$(find /var/lib/apt/lists/* | wc -l)" = "0" ]; then
@ -72,7 +87,7 @@ find_version_from_git_tags() {
local repository=$2 local repository=$2
local prefix=${3:-"tags/v"} local prefix=${3:-"tags/v"}
local separator=${4:-"."} local separator=${4:-"."}
local last_part_optional=${5:-"false"} local last_part_optional=${5:-"false"}
if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then
local escaped_separator=${separator//./\\.} local escaped_separator=${separator//./\\.}
local last_part local last_part
@ -114,6 +129,8 @@ architecture="$(dpkg --print-architecture)"
# Check if distro is supported # Check if distro is supported
if [ "${USE_MOBY}" = "true" ]; then if [ "${USE_MOBY}" = "true" ]; then
# 'get_common_setting' allows attribute to be updated remotely
get_common_setting DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES
if [[ "${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then if [[ "${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then
err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, either: (1) set feature option '\"moby\": false' , or (2) choose a compatible OS distribution" err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, either: (1) set feature option '\"moby\": false' , or (2) choose a compatible OS distribution"
err "Support distributions include: ${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}" err "Support distributions include: ${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}"
@ -121,6 +138,7 @@ if [ "${USE_MOBY}" = "true" ]; then
fi fi
echo "Distro codename '${VERSION_CODENAME}' matched filter '${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}'" echo "Distro codename '${VERSION_CODENAME}' matched filter '${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}'"
else else
get_common_setting DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES
if [[ "${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then if [[ "${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then
err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, please choose a compatible OS distribution" err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, please choose a compatible OS distribution"
err "Support distributions include: ${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}" err "Support distributions include: ${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}"
@ -135,6 +153,7 @@ if [ "${USE_MOBY}" = "true" ]; then
cli_package_name="moby-cli" cli_package_name="moby-cli"
# Import key safely and import Microsoft apt repo # Import key safely and import Microsoft apt repo
get_common_setting MICROSOFT_GPG_KEYS_URI
curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg
echo "deb [arch=${architecture} signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/microsoft-${ID}-${VERSION_CODENAME}-prod ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/microsoft.list echo "deb [arch=${architecture} signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/microsoft-${ID}-${VERSION_CODENAME}-prod ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/microsoft.list
else else
@ -153,7 +172,7 @@ apt-get update
if [ "${DOCKER_VERSION}" = "latest" ] || [ "${DOCKER_VERSION}" = "lts" ] || [ "${DOCKER_VERSION}" = "stable" ]; then if [ "${DOCKER_VERSION}" = "latest" ] || [ "${DOCKER_VERSION}" = "lts" ] || [ "${DOCKER_VERSION}" = "stable" ]; then
# Empty, meaning grab whatever "latest" is in apt repo # Empty, meaning grab whatever "latest" is in apt repo
cli_version_suffix="" cli_version_suffix=""
else else
# Fetch a valid version from the apt-cache (eg: the Microsoft repo appends +azure, breakfix, etc...) # Fetch a valid version from the apt-cache (eg: the Microsoft repo appends +azure, breakfix, etc...)
docker_version_dot_escaped="${DOCKER_VERSION//./\\.}" docker_version_dot_escaped="${DOCKER_VERSION//./\\.}"
docker_version_dot_plus_escaped="${docker_version_dot_escaped//+/\\+}" docker_version_dot_plus_escaped="${docker_version_dot_escaped//+/\\+}"
@ -175,86 +194,90 @@ if type docker > /dev/null 2>&1; then
echo "Docker / Moby CLI already installed." echo "Docker / Moby CLI already installed."
else else
if [ "${USE_MOBY}" = "true" ]; then if [ "${USE_MOBY}" = "true" ]; then
buildx=() apt-get -y install --no-install-recommends moby-cli${cli_version_suffix} moby-buildx
if [ "${INSTALL_DOCKER_BUILDX}" = "true" ]; then
buildx=(moby-buildx)
fi
apt-get -y install --no-install-recommends ${cli_package_name}${cli_version_suffix} "${buildx[@]}"
apt-get -y install --no-install-recommends moby-compose || echo "(*) Package moby-compose (Docker Compose v2) not available for OS ${ID} ${VERSION_CODENAME} (${architecture}). Skipping." apt-get -y install --no-install-recommends moby-compose || echo "(*) Package moby-compose (Docker Compose v2) not available for OS ${ID} ${VERSION_CODENAME} (${architecture}). Skipping."
else else
buildx=() apt-get -y install --no-install-recommends docker-ce-cli${cli_version_suffix}
if [ "${INSTALL_DOCKER_BUILDX}" = "true" ]; then
buildx=(docker-buildx-plugin)
fi
apt-get -y install --no-install-recommends ${cli_package_name}${cli_version_suffix} "${buildx[@]}" docker-compose-plugin
buildx_path="/usr/libexec/docker/cli-plugins/docker-buildx"
# Older versions of Docker CE installs buildx as part of the CLI package
if [ "${INSTALL_DOCKER_BUILDX}" = "false" ] && [ -f "${buildx_path}" ]; then
echo "(*) Removing docker-buildx installed from docker-ce-cli since installDockerBuildx is disabled..."
rm -f "${buildx_path}"
fi
fi fi
unset buildx buildx_path
fi fi
# If 'docker-compose' command is to be included # Install Docker Compose if not already installed and is on a supported architecture
if [ "${DOCKER_DASH_COMPOSE_VERSION}" != "none" ]; then if type docker-compose > /dev/null 2>&1; then
# Install Docker Compose if not already installed and is on a supported architecture echo "Docker Compose already installed."
if type docker-compose > /dev/null 2>&1; then else
echo "Docker Compose already installed." TARGET_COMPOSE_ARCH="$(uname -m)"
elif [ "${DOCKER_DASH_COMPOSE_VERSION}" = "v1" ]; then if [ "${TARGET_COMPOSE_ARCH}" = "amd64" ]; then
TARGET_COMPOSE_ARCH="$(uname -m)" TARGET_COMPOSE_ARCH="x86_64"
if [ "${TARGET_COMPOSE_ARCH}" = "amd64" ]; then
TARGET_COMPOSE_ARCH="x86_64"
fi
if [ "${TARGET_COMPOSE_ARCH}" != "x86_64" ]; then
# Use pip to get a version that runs on this architecture
check_packages python3-minimal python3-pip libffi-dev python3-venv
export PIPX_HOME=/usr/local/pipx
mkdir -p ${PIPX_HOME}
export PIPX_BIN_DIR=/usr/local/bin
export PYTHONUSERBASE=/tmp/pip-tmp
export PIP_CACHE_DIR=/tmp/pip-tmp/cache
pipx_bin=pipx
if ! type pipx > /dev/null 2>&1; then
pip3 install --disable-pip-version-check --no-cache-dir --user pipx
pipx_bin=/tmp/pip-tmp/bin/pipx
fi
${pipx_bin} install --pip-args '--no-cache-dir --force-reinstall' docker-compose
rm -rf /tmp/pip-tmp
else
compose_v1_version="1"
find_version_from_git_tags compose_v1_version "https://github.com/docker/compose" "tags/"
echo "(*) Installing docker-compose ${compose_v1_version}..."
curl -fsSL "https://github.com/docker/compose/releases/download/${compose_v1_version}/docker-compose-Linux-x86_64" -o /usr/local/bin/docker-compose
chmod +x /usr/local/bin/docker-compose
fi
else
echo "(*) Installing compose-switch as docker-compose..."
compose_switch_version="latest"
find_version_from_git_tags compose_switch_version "https://github.com/docker/compose-switch"
curl -fsSL "https://github.com/docker/compose-switch/releases/download/v${compose_switch_version}/docker-compose-linux-${architecture}" -o /usr/local/bin/docker-compose
chmod +x /usr/local/bin/docker-compose
# TODO: Verify checksum once available: https://github.com/docker/compose-switch/issues/11
fi fi
if [ "${TARGET_COMPOSE_ARCH}" != "x86_64" ]; then
# Use pip to get a version that runs on this architecture
check_packages python3-minimal python3-pip libffi-dev python3-venv
export PIPX_HOME=/usr/local/pipx
mkdir -p ${PIPX_HOME}
export PIPX_BIN_DIR=/usr/local/bin
export PYTHONUSERBASE=/tmp/pip-tmp
export PIP_CACHE_DIR=/tmp/pip-tmp/cache
pipx_bin=pipx
if ! type pipx > /dev/null 2>&1; then
pip3 install --disable-pip-version-check --no-cache-dir --user pipx
pipx_bin=/tmp/pip-tmp/bin/pipx
fi
${pipx_bin} install --pip-args '--no-cache-dir --force-reinstall' docker-compose
rm -rf /tmp/pip-tmp
else
compose_v1_version="1"
find_version_from_git_tags compose_v1_version "https://github.com/docker/compose" "tags/"
echo "(*) Installing docker-compose ${compose_v1_version}..."
curl -fsSL "https://github.com/docker/compose/releases/download/${compose_v1_version}/docker-compose-Linux-x86_64" -o /usr/local/bin/docker-compose
chmod +x /usr/local/bin/docker-compose
fi
fi
# Install docker-compose switch if not already installed - https://github.com/docker/compose-switch#manual-installation
current_v1_compose_path="$(which docker-compose)"
target_v1_compose_path="$(dirname "${current_v1_compose_path}")/docker-compose-v1"
if ! type compose-switch > /dev/null 2>&1; then
echo "(*) Installing compose-switch..."
compose_switch_version="latest"
find_version_from_git_tags compose_switch_version "https://github.com/docker/compose-switch"
curl -fsSL "https://github.com/docker/compose-switch/releases/download/v${compose_switch_version}/docker-compose-linux-${architecture}" -o /usr/local/bin/compose-switch
chmod +x /usr/local/bin/compose-switch
# TODO: Verify checksum once available: https://github.com/docker/compose-switch/issues/11
# Setup v1 CLI as alternative in addition to compose-switch (which maps to v2)
mv "${current_v1_compose_path}" "${target_v1_compose_path}"
update-alternatives --install /usr/local/bin/docker-compose docker-compose /usr/local/bin/compose-switch 99
update-alternatives --install /usr/local/bin/docker-compose docker-compose "${target_v1_compose_path}" 1
fi
if [ "${DOCKER_DASH_COMPOSE_VERSION}" = "v1" ]; then
update-alternatives --set docker-compose "${target_v1_compose_path}"
else
update-alternatives --set docker-compose /usr/local/bin/compose-switch
fi fi
# Setup a docker group in the event the docker socket's group is not root # Setup a docker group in the event the docker socket's group is not root
if ! grep -qE '^docker:' /etc/group; then if ! grep -qE '^docker:' /etc/group; then
echo "(*) Creating missing docker group..."
groupadd --system docker groupadd --system docker
fi fi
# Remarking this out to restore functionality in Azure VMs. ID 999 is a reserved group ID
# Ensure docker group gid is 999
# if [ "$(getent group docker | cut -d: -f3)" != "999" ]; then
# echo "(*) Updating docker group gid to 999..."
# groupmod -g 999 docker
# fi
usermod -aG docker "${USERNAME}" usermod -aG docker "${USERNAME}"
if [ "${INSTALL_DOCKER_BUILDX}" = "true" ]; then
buildx_version="latest"
find_version_from_git_tags buildx_version "https://github.com/docker/buildx" "refs/tags/v"
echo "(*) Installing buildx ${buildx_version}..."
buildx_file_name="buildx-v${buildx_version}.linux-${architecture}"
cd /tmp && wget "https://github.com/docker/buildx/releases/download/v${buildx_version}/${buildx_file_name}"
mkdir -p ${_REMOTE_USER_HOME}/.docker/cli-plugins
mv ${buildx_file_name} ${_REMOTE_USER_HOME}/.docker/cli-plugins/docker-buildx
chmod +x ${_REMOTE_USER_HOME}/.docker/cli-plugins/docker-buildx
chown -R "${USERNAME}:docker" "${_REMOTE_USER_HOME}/.docker"
chmod -R g+r+w "${_REMOTE_USER_HOME}/.docker"
find "${_REMOTE_USER_HOME}/.docker" -type d -print0 | xargs -n 1 -0 chmod g+s
fi
# If init file already exists, exit # If init file already exists, exit
if [ -f "/usr/local/share/docker-init.sh" ]; then if [ -f "/usr/local/share/docker-init.sh" ]; then
# Clean up # Clean up
@ -281,10 +304,10 @@ fi
DOCKER_GID="$(grep -oP '^docker:x:\K[^:]+' /etc/group)" DOCKER_GID="$(grep -oP '^docker:x:\K[^:]+' /etc/group)"
# If enabling non-root access and specified user is found, setup socat and add script # If enabling non-root access and specified user is found, setup socat and add script
chown -h "${USERNAME}":root "${TARGET_SOCKET}" chown -h "${USERNAME}":root "${TARGET_SOCKET}"
check_packages socat check_packages socat
tee /usr/local/share/docker-init.sh > /dev/null \ tee /usr/local/share/docker-init.sh > /dev/null \
<< EOF << EOF
#!/usr/bin/env bash #!/usr/bin/env bash
#------------------------------------------------------------------------------------------------------------- #-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved. # Copyright (c) Microsoft Corporation. All rights reserved.
@ -316,8 +339,8 @@ log()
echo -e "\n** \$(date) **" | sudoIf tee -a \${SOCAT_LOG} > /dev/null echo -e "\n** \$(date) **" | sudoIf tee -a \${SOCAT_LOG} > /dev/null
log "Ensuring ${USERNAME} has access to ${SOURCE_SOCKET} via ${TARGET_SOCKET}" log "Ensuring ${USERNAME} has access to ${SOURCE_SOCKET} via ${TARGET_SOCKET}"
# If enabled, try to update the docker group with the right GID. If the group is root, # If enabled, try to update the docker group with the right GID. If the group is root,
# fall back on using socat to forward the docker socket to another unix socket so # fall back on using socat to forward the docker socket to another unix socket so
# that we can set permissions on it without affecting the host. # that we can set permissions on it without affecting the host.
if [ "${ENABLE_NONROOT_DOCKER}" = "true" ] && [ "${SOURCE_SOCKET}" != "${TARGET_SOCKET}" ] && [ "${USERNAME}" != "root" ] && [ "${USERNAME}" != "0" ]; then if [ "${ENABLE_NONROOT_DOCKER}" = "true" ] && [ "${SOURCE_SOCKET}" != "${TARGET_SOCKET}" ] && [ "${USERNAME}" != "root" ] && [ "${USERNAME}" != "0" ]; then
SOCKET_GID=\$(stat -c '%g' ${SOURCE_SOCKET}) SOCKET_GID=\$(stat -c '%g' ${SOURCE_SOCKET})
@ -337,7 +360,7 @@ if [ "${ENABLE_NONROOT_DOCKER}" = "true" ] && [ "${SOURCE_SOCKET}" != "${TARGET_
log "Success" log "Success"
fi fi
# Execute whatever commands were passed in (if any). This allows us # Execute whatever commands were passed in (if any). This allows us
# to set this script to ENTRYPOINT while still executing the default CMD. # to set this script to ENTRYPOINT while still executing the default CMD.
set +e set +e
exec "\$@" exec "\$@"

View file

@ -1,71 +1,4 @@
## Configuration examples
Installing only the latest .NET SDK version (the default).
``` json
{
"features": {
"ghcr.io/devcontainers/features/dotnet:2": "latest" // or "" or {}
}
```
Installing an additional SDK version. Multiple versions can be specified as comma-separated values.
``` json
{
"features": {
"ghcr.io/devcontainers/features/dotnet:2": {
"additionalVersions": "lts"
}
}
```
Installing specific SDK versions.
``` json
{
"features": {
"ghcr.io/devcontainers/features/dotnet:2": {
"version": "6.0",
"additionalVersions": "7.0, 8.0"
}
}
```
Installing a specific SDK feature band.
``` json
{
"features": {
"ghcr.io/devcontainers/features/dotnet:2": {
"version": "6.0.4xx",
}
}
```
Installing a specific SDK patch version.
``` json
{
"features": {
"ghcr.io/devcontainers/features/dotnet:2": {
"version": "6.0.412",
}
}
```
Installing only the .NET Runtime or the ASP.NET Core Runtime. (The SDK includes all runtimes so this configuration is only useful if you need to run .NET apps without building them from source.)
``` json
{
"features": {
"ghcr.io/devcontainers/features/dotnet:2": {
"version": "none",
"dotnetRuntimeVersions": "latest, lts",
"aspnetCoreRuntimeVersions": "latest, lts",
}
}
```
## OS Support ## OS Support

View file

@ -1,13 +1,13 @@
# Dotnet CLI (dotnet) # Dotnet CLI (dotnet)
This Feature installs the latest .NET SDK, which includes the .NET CLI and the shared runtime. Options are provided to choose a different version or additional versions. Installs the .NET CLI. Provides option of installing sdk or runtime, and option of versions to install. Uses latest version of .NET sdk as defaults to install.
## Example Usage ## Example Usage
```json ```json
"features": { "features": {
"ghcr.io/devcontainers/features/dotnet:2": {} "ghcr.io/devcontainers/features/dotnet:1": {}
} }
``` ```
@ -15,85 +15,11 @@ This Feature installs the latest .NET SDK, which includes the .NET CLI and the s
| Options Id | Description | Type | Default Value | | Options Id | Description | Type | Default Value |
|-----|-----|-----|-----| |-----|-----|-----|-----|
| version | Select or enter a .NET SDK version. Use 'latest' for the latest version, 'lts' for the latest LTS version, 'X.Y' or 'X.Y.Z' for a specific version. | string | latest | | version | Select or enter a dotnet CLI version. (Available versions may vary by Linux distribution.) | string | latest |
| additionalVersions | Enter additional .NET SDK versions, separated by commas. Use 'latest' for the latest version, 'lts' for the latest LTS version, 'X.Y' or 'X.Y.Z' for a specific version. | string | - | | runtimeOnly | Install just the dotnet runtime if true, and sdk if false. | boolean | false |
| dotnetRuntimeVersions | Enter additional .NET runtime versions, separated by commas. Use 'latest' for the latest version, 'lts' for the latest LTS version, 'X.Y' or 'X.Y.Z' for a specific version. | string | - | | installUsingApt | If true, it installs using apt instead of the release URL | boolean | true |
| aspNetCoreRuntimeVersions | Enter additional ASP.NET Core runtime versions, separated by commas. Use 'latest' for the latest version, 'lts' for the latest LTS version, 'X.Y' or 'X.Y.Z' for a specific version. | string | - |
## Customizations
### VS Code Extensions
- `ms-dotnettools.csharp`
## Configuration examples
Installing only the latest .NET SDK version (the default).
``` json
{
"features": {
"ghcr.io/devcontainers/features/dotnet:2": "latest" // or "" or {}
}
```
Installing an additional SDK version. Multiple versions can be specified as comma-separated values.
``` json
{
"features": {
"ghcr.io/devcontainers/features/dotnet:2": {
"additionalVersions": "lts"
}
}
```
Installing specific SDK versions.
``` json
{
"features": {
"ghcr.io/devcontainers/features/dotnet:2": {
"version": "6.0",
"additionalVersions": "7.0, 8.0"
}
}
```
Installing a specific SDK feature band.
``` json
{
"features": {
"ghcr.io/devcontainers/features/dotnet:2": {
"version": "6.0.4xx",
}
}
```
Installing a specific SDK patch version.
``` json
{
"features": {
"ghcr.io/devcontainers/features/dotnet:2": {
"version": "6.0.412",
}
}
```
Installing only the .NET Runtime or the ASP.NET Core Runtime. (The SDK includes all runtimes so this configuration is only useful if you need to run .NET apps without building them from source.)
``` json
{
"features": {
"ghcr.io/devcontainers/features/dotnet:2": {
"version": "none",
"dotnetRuntimeVersions": "latest, lts",
"aspnetCoreRuntimeVersions": "latest, lts",
}
}
```
## OS Support ## OS Support

View file

@ -1,44 +1,35 @@
{ {
"id": "dotnet", "id": "dotnet",
"version": "2.0.2", "version": "1.1.3",
"name": "Dotnet CLI", "name": "Dotnet CLI",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/dotnet", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/dotnet",
"description": "This Feature installs the latest .NET SDK, which includes the .NET CLI and the shared runtime. Options are provided to choose a different version or additional versions.", "description": "Installs the .NET CLI. Provides option of installing sdk or runtime, and option of versions to install. Uses latest version of .NET sdk as defaults to install.",
"options": { "options": {
"version": { "version": {
"type": "string", "type": "string",
"proposals": [ "proposals": [
"latest", "latest",
"lts", "7",
"none", "6",
"8.0", "3.1"
"7.0",
"6.0"
], ],
"default": "latest", "default": "latest",
"description": "Select or enter a .NET SDK version. Use 'latest' for the latest version, 'lts' for the latest LTS version, 'X.Y' or 'X.Y.Z' for a specific version." "description": "Select or enter a dotnet CLI version. (Available versions may vary by Linux distribution.)"
}, },
"additionalVersions": { "runtimeOnly": {
"type": "string", "type": "boolean",
"default": "", "default": false,
"description": "Enter additional .NET SDK versions, separated by commas. Use 'latest' for the latest version, 'lts' for the latest LTS version, 'X.Y' or 'X.Y.Z' for a specific version." "description": "Install just the dotnet runtime if true, and sdk if false."
}, },
"dotnetRuntimeVersions": { "installUsingApt": {
"type": "string", "type": "boolean",
"default": "", "default": true,
"description": "Enter additional .NET runtime versions, separated by commas. Use 'latest' for the latest version, 'lts' for the latest LTS version, 'X.Y' or 'X.Y.Z' for a specific version." "description": "If true, it installs using apt instead of the release URL"
},
"aspNetCoreRuntimeVersions": {
"type": "string",
"default": "",
"description": "Enter additional ASP.NET Core runtime versions, separated by commas. Use 'latest' for the latest version, 'lts' for the latest LTS version, 'X.Y' or 'X.Y.Z' for a specific version."
} }
}, },
"containerEnv": { "containerEnv": {
"DOTNET_ROOT": "/usr/share/dotnet", "DOTNET_ROOT": "/usr/local/dotnet/current",
"PATH": "$PATH:$DOTNET_ROOT:~/.dotnet/tools", "PATH": "/usr/local/dotnet/current:${PATH}"
"DOTNET_RUNNING_IN_CONTAINER": "true",
"DOTNET_USE_POLLING_FILE_WATCHER": "true"
}, },
"customizations": { "customizations": {
"vscode": { "vscode": {

View file

@ -4,21 +4,37 @@
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. # Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#------------------------------------------------------------------------------------------------------------- #-------------------------------------------------------------------------------------------------------------
# #
# Docs: https://github.com/devcontainers/features/tree/main/src/dotnet # Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/dotnet.md
# Maintainer: The Dev Container spec maintainers # Maintainer: The VS Code and Codespaces Teams
DOTNET_VERSION="${VERSION:-"latest"}" DOTNET_VERSION="${VERSION:-"latest"}"
ADDITIONAL_VERSIONS="${ADDITIONALVERSIONS}" DOTNET_RUNTIME_ONLY="${RUNTIMEONLY:-"false"}"
DOTNET_RUNTIME_VERSIONS="${DOTNETRUNTIMEVERSIONS}" OVERRIDE_DEFAULT_VERSION="${OVERRIDEDEFAULTVERSION:-"true"}"
ASPNETCORE_RUNTIME_VERSIONS="${ASPNETCORERUNTIMEVERSIONS}" INSTALL_USING_APT="${INSTALLUSINGAPT:-"true"}"
DOTNET_LATEST="7"
DOTNET_LTS="6"
USERNAME="${USERNAME:-"${_REMOTE_USER:-"automatic"}"}"
UPDATE_RC="${UPDATE_RC:-"true"}"
TARGET_DOTNET_ROOT="${TARGET_DOTNET_ROOT:-"/usr/local/dotnet"}"
ACCESS_GROUP="${ACCESS_GROUP:-"dotnet"}"
MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc"
DOTNET_ARCHIVE_ARCHITECTURES="amd64"
DOTNET_ARCHIVE_VERSION_CODENAMES="buster bullseye bionic focal hirsute jammy"
# Feed URI sourced from the official dotnet-install.sh
# https://github.com/dotnet/install-scripts/blob/1b98b94a6f6d81cc4845eb88e0195fac67caa0a6/src/dotnet-install.sh#L1342-L1343
DOTNET_CDN_FEED_URI="https://dotnetcli.azureedge.net"
# Ubuntu 22.04 and on do not ship with libssl1.1, which is required for versions of .NET < 6.0
DOTNET_VERSION_CODENAMES_REQUIRE_OLDER_LIBSSL_1="buster bullseye bionic focal hirsute"
# Comma-separated list of dotnet versions to be installed
# alongside DOTNET_VERSION, but not set as default.
ADDITIONAL_VERSIONS=${ADDITIONALVERSIONS:-""}
set -e set -e
# Import trim_whitespace and split_csv
source "scripts/string-helpers.sh"
# Import install_sdk and install_runtime
source "scripts/dotnet-helpers.sh"
# Clean up # Clean up
rm -rf /var/lib/apt/lists/* rm -rf /var/lib/apt/lists/*
@ -27,14 +43,72 @@ err() {
echo "(!) $*" >&2 echo "(!) $*" >&2
} }
apt_get_update() { # Ensure the appropriate root user is running the script.
if [ "$(id -u)" -ne 0 ]; then
err 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
exit 1
fi
# Ensure that login shells get the correct path if the user updated the PATH using ENV.
rm -f /etc/profile.d/00-restore-env.sh
echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" > /etc/profile.d/00-restore-env.sh
chmod +x /etc/profile.d/00-restore-env.sh
# Determine the appropriate non-root user.
if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
USERNAME=""
POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
for CURRENT_USER in "${POSSIBLE_USERS[@]}"; do
if id -u "${CURRENT_USER}" > /dev/null 2>&1; then
USERNAME="${CURRENT_USER}"
break
fi
done
if [ "${USERNAME}" = "" ]; then
USERNAME=root
fi
elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then
USERNAME=root
fi
###################
# Helper Functions
###################
# Cleanup temporary directory and associated files when exiting the script.
cleanup() {
EXIT_CODE=$?
set +e
if [[ -n "${TMP_DIR}" ]]; then
echo "Executing cleanup of tmp files"
rm -Rf "${TMP_DIR}"
fi
exit $EXIT_CODE
}
trap cleanup EXIT
# Add dotnet directory to PATH in bashrc/zshrc files if OVERRIDE_DEFAULT_VERSION=true.
updaterc() {
if [ "${UPDATE_RC}" = "true" ]; then
echo "Updating /etc/bash.bashrc and /etc/zsh/zshrc..."
if [[ "$(cat /etc/bash.bashrc)" != *"$1"* ]]; then
echo -e "$1" >> /etc/bash.bashrc
fi
if [ -f "/etc/zsh/zshrc" ] && [[ "$(cat /etc/zsh/zshrc)" != *"$1"* ]]; then
echo -e "$1" >> /etc/zsh/zshrc
fi
fi
}
apt_get_update()
{
if [ "$(find /var/lib/apt/lists/* | wc -l)" = "0" ]; then if [ "$(find /var/lib/apt/lists/* | wc -l)" = "0" ]; then
echo "Running apt-get update..." echo "Running apt-get update..."
apt-get update -y apt-get update -y
fi fi
} }
# Checks if packages are installed and installs them if not # Check if packages are installed and installs them if not.
check_packages() { check_packages() {
if ! dpkg -s "$@" > /dev/null 2>&1; then if ! dpkg -s "$@" > /dev/null 2>&1; then
apt_get_update apt_get_update
@ -42,77 +116,353 @@ check_packages() {
fi fi
} }
if [ "$(id -u)" -ne 0 ]; then # Get appropriate architecture name for .NET binaries for the target OS
err 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.' get_architecture_name_for_target_os() {
local architecture
architecture="$(uname -m)"
case $architecture in
x86_64) architecture="x64";;
aarch64 | armv8*) architecture="arm64";;
*) err "Architecture ${architecture} unsupported"; exit 1 ;;
esac
echo "${architecture}"
}
# Soft version matching that resolves a version for a given package in the *current apt-cache*
# Return value is stored in first argument (the unprocessed version)
apt_cache_package_and_version_soft_match() {
# Version
local version_variable_name="$1"
local requested_version=${!version_variable_name}
# Package Name
local package_variable_name="$2"
local partial_package_name=${!package_variable_name}
local package_name
# Exit on no match?
local exit_on_no_match="${3:-true}"
local major_minor_version
# Ensure we've exported useful variables
. /etc/os-release
local architecture="$(dpkg --print-architecture)"
major_minor_version="$(echo "${requested_version}" | cut -d "." --field=1,2)"
package_name="$(apt-cache search "${partial_package_name}-[0-9].[0-9]" | awk -F" - " '{print $1}' | grep -m 1 "${partial_package_name}-${major_minor_version}")"
dot_escaped="${requested_version//./\\.}"
dot_plus_escaped="${dot_escaped//+/\\+}"
# Regex needs to handle debian package version number format: https://www.systutorials.com/docs/linux/man/5-deb-version/
version_regex="^(.+:)?${dot_plus_escaped}([\\.\\+ ~:-]|$)"
set +e # Don't exit if finding version fails - handle gracefully
fuzzy_version="$(apt-cache madison ${package_name} | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${version_regex}")"
set -e
if [ -z "${fuzzy_version}" ]; then
echo "(!) No full or partial for package \"${partial_package_name}\" (resolved: \"${package_name}\") match found in apt-cache for \"${requested_version}\" on OS ${ID} ${VERSION_CODENAME} (${architecture})."
if $exit_on_no_match; then
echo "Available versions:"
apt-cache madison ${package_name} | awk -F"|" '{print $2}' | grep -oP '^(.+:)?\K.+'
exit 1 # Fail entire script
else
echo "Continuing to fallback method if available"
return 1;
fi
fi
# Globally assign fuzzy_version to this value
# Use this value as the return value of this function
declare -g ${version_variable_name}="=${fuzzy_version}"
echo "${version_variable_name} ${!version_variable_name}"
# Globally assign package to this value
# Use this value as the return value of this function
declare -g ${package_variable_name}="${package_name}"
echo "${package_variable_name} ${!package_variable_name}"
}
# Install .NET CLI using apt-get package installer
install_using_apt() {
local sdk_or_runtime="$1"
local target_dotnet_version="$2"
local use_msft_repo="$3"
if [ "${use_msft_repo}" = "true" ]; then
# Install dependencies
check_packages apt-transport-https curl ca-certificates gnupg2 dirmngr
# Import key safely and import Microsoft apt repo
curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg
echo "deb [arch=${architecture} signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/microsoft-${ID}-${VERSION_CODENAME}-prod ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/microsoft.list
apt-get update -y
fi
# .NET 7 is not a LTS version, so handle latest and LTS versions differently
export APT_DOTNET_VERSION="$target_dotnet_version"
if [ "${APT_DOTNET_VERSION}" = "latest" ]; then
APT_DOTNET_VERSION="${DOTNET_LATEST}.0"
elif [ "${APT_DOTNET_VERSION}" = "lts" ]; then
APT_DOTNET_VERSION="${DOTNET_LTS}.0"
fi
# Sets target_dotnet_version and dotnet_package if matches found.
local base_dotnet_package="dotnet-${sdk_or_runtime}"
export DOTNET_PACKAGE="${base_dotnet_package}"
apt_cache_package_and_version_soft_match APT_DOTNET_VERSION DOTNET_PACKAGE false
if [ "$?" != 0 ] || [ ${DOTNET_PACKAGE} == "${base_dotnet_package}" ]; then
echo "Failed to find requested version."
return 1
fi
if type dotnet > /dev/null 2>&1 && [[ "$(dotnet --version)" == *"${APT_DOTNET_VERSION}"* ]] ; then
echo "dotnet version ${APT_DOTNET_VERSION} is already installed"
return 1
fi
echo "Installing '${DOTNET_PACKAGE}${APT_DOTNET_VERSION}'..."
apt-get install -yq ${DOTNET_PACKAGE}${APT_DOTNET_VERSION}
if [ "$?" != 0 ]; then
echo "Failed to complete apt install of ${DOTNET_PACKAGE}${TARGET_DOTNET_VERSION}"
return 1
fi
# Add symlink for current
CURRENT_DIR="${TARGET_DOTNET_ROOT}/current"
if [[ -d "${CURRENT_DIR}" ]]; then
rm -rf "${CURRENT_DIR}"
fi
mkdir -p "${TARGET_DOTNET_ROOT}"
local dotnet_installed_version="$(dotnet --version)"
# See if its the distro version
if [[ "$(dotnet --info)" == *"Base Path: /usr/lib/dotnet/dotnet${dotnet_installed_version:0:1}-${dotnet_installed_version}"* ]]; then
ln -s "/usr/lib/dotnet/dotnet${dotnet_installed_version:0:1}" "${CURRENT_DIR}"
else
# Location used by MS repo versions
ln -s "/usr/share/dotnet" "${CURRENT_DIR}"
fi
}
# Find and extract .NET binary download details based on user-requested version
# args:
# sdk_or_runtime $1
# dotnet_version_to_download $2
# exports:
# DOTNET_DOWNLOAD_URL
# DOTNET_DOWNLOAD_HASH
# DOTNET_DOWNLOAD_NAME
get_full_version_details() {
local sdk_or_runtime="$1"
local architecture
local dotnet_channel_version
local dotnet_releases_url
local dotnet_releases_json
local dotnet_latest_version
local dotnet_download_details
export DOTNET_DOWNLOAD_VERSION="$2"
export DOTNET_DOWNLOAD_URL
export DOTNET_DOWNLOAD_HASH
export DOTNET_DOWNLOAD_NAME
# Set architecture variable to current user's architecture (x64 or ARM64).
architecture="$(get_architecture_name_for_target_os)"
# Set VERSION to empty string to ensure jq includes all .NET versions in reverse sort below
if [ "${DOTNET_DOWNLOAD_VERSION}" = "latest" ]; then
DOTNET_DOWNLOAD_VERSION=""
fi
dotnet_patchless_version="$(echo "${DOTNET_DOWNLOAD_VERSION}" | cut -d "." --field=1,2)"
set +e
dotnet_channel_version="$(curl -s "${DOTNET_CDN_FEED_URI}/dotnet/release-metadata/releases-index.json" | jq -r --arg channel_version "${dotnet_patchless_version}" '[."releases-index"[]] | sort_by(."channel-version") | reverse | map( select(."channel-version" | startswith($channel_version))) | first | ."channel-version"')"
set -e
# Construct the releases URL using the official channel-version if one was found. Otherwise make a best-effort using the user input.
if [ -n "${dotnet_channel_version}" ] && [ "${dotnet_channel_version}" != "null" ]; then
dotnet_releases_url="${DOTNET_CDN_FEED_URI}/dotnet/release-metadata/${dotnet_channel_version}/releases.json"
else
dotnet_releases_url="${DOTNET_CDN_FEED_URI}/dotnet/release-metadata/${dotnet_patchless_version}/releases.json"
fi
set +e
dotnet_releases_json="$(curl -s "${dotnet_releases_url}")"
set -e
if [ -n "${dotnet_releases_json}" ] && [[ ! "${dotnet_releases_json}" = *"Error"* ]]; then
dotnet_latest_version="$(echo "${dotnet_releases_json}" | jq -r --arg sdk_or_runtime "${sdk_or_runtime}" '."latest-\($sdk_or_runtime)"')"
# If user-specified version has 2 or more dots, use it as is. Otherwise use latest version.
if [ "$(echo "${DOTNET_DOWNLOAD_VERSION}" | grep -o "\." | wc -l)" -lt "2" ]; then
DOTNET_DOWNLOAD_VERSION="${dotnet_latest_version}"
fi
dotnet_download_details="$(echo "${dotnet_releases_json}" | jq -r --arg sdk_or_runtime "${sdk_or_runtime}" --arg dotnet_version "${DOTNET_DOWNLOAD_VERSION}" --arg arch "${architecture}" '.releases[]."\($sdk_or_runtime)" | select(.version==$dotnet_version) | .files[] | select(.name=="dotnet-\($sdk_or_runtime)-linux-\($arch).tar.gz")')"
if [ -n "${dotnet_download_details}" ]; then
echo "Found .NET binary version ${DOTNET_DOWNLOAD_VERSION}"
DOTNET_DOWNLOAD_URL="$(echo "${dotnet_download_details}" | jq -r '.url')"
DOTNET_DOWNLOAD_HASH="$(echo "${dotnet_download_details}" | jq -r '.hash')"
DOTNET_DOWNLOAD_NAME="$(echo "${dotnet_download_details}" | jq -r '.name')"
else
err "Unable to find .NET binary for version ${DOTNET_DOWNLOAD_VERSION}"
exit 1
fi
else
err "Unable to find .NET release details for version ${DOTNET_DOWNLOAD_VERSION} at ${dotnet_releases_url}"
exit 1
fi
}
# Install .NET CLI using the .NET releases url
install_using_dotnet_releases_url() {
local sdk_or_runtime="$1"
local version="$2"
# Check listed package dependencies and install them if they are not already installed.
# NOTE: icu-devtools is a small package with similar dependencies to .NET.
# It will install the appropriate dependencies based on the OS:
# - libgcc-s1 OR libgcc1 depending on OS
# - the latest libicuXX depending on OS (eg libicu57 for stretch)
# - also installs libc6 and libstdc++6 which are required by .NET
check_packages curl ca-certificates tar jq icu-devtools libgssapi-krb5-2 zlib1g
# Starting with Ubuntu 22.04 (jammy), libssl1.1 does not ship with the OS anymore.
if [[ "${DOTNET_VERSION_CODENAMES_REQUIRE_OLDER_LIBSSL_1}" = *"${VERSION_CODENAME}"* ]]; then
check_packages libssl1.1
else
check_packages libssl3
fi
get_full_version_details "${sdk_or_runtime}" "${version}"
DOTNET_INSTALL_PATH="${TARGET_DOTNET_ROOT}/${DOTNET_DOWNLOAD_VERSION}"
if [ -d "${DOTNET_INSTALL_PATH}" ]; then
echo "(!) Dotnet version ${DOTNET_DOWNLOAD_VERSION} already exists."
exit 1
fi
# exports DOTNET_DOWNLOAD_URL, DOTNET_DOWNLOAD_HASH, DOTNET_DOWNLOAD_NAME
echo "DOWNLOAD LINK: ${DOTNET_DOWNLOAD_URL}"
# Setup the access group and add the user to it.
umask 0002
if ! cat /etc/group | grep -e "^${ACCESS_GROUP}:" > /dev/null 2>&1; then
groupadd -r "${ACCESS_GROUP}"
fi
usermod -a -G "${ACCESS_GROUP}" "${USERNAME}"
# Download the .NET binaries.
echo "DOWNLOADING BINARY..."
TMP_DIR="/tmp/dotnetinstall"
mkdir -p "${TMP_DIR}"
curl -sSL "${DOTNET_DOWNLOAD_URL}" -o "${TMP_DIR}/${DOTNET_DOWNLOAD_NAME}"
# Get checksum from .NET CLI blob storage using the runtime version and
# run validation (sha512sum) of checksum against the expected checksum hash.
echo "VERIFY CHECKSUM"
cd "${TMP_DIR}"
echo "${DOTNET_DOWNLOAD_HASH} *${DOTNET_DOWNLOAD_NAME}" | sha512sum -c -
# Extract binaries and add to path.
mkdir -p "${DOTNET_INSTALL_PATH}"
echo "Extract Binary to ${DOTNET_INSTALL_PATH}"
tar -xzf "${TMP_DIR}/${DOTNET_DOWNLOAD_NAME}" -C "${DOTNET_INSTALL_PATH}" --strip-components=1
CURRENT_DIR="${TARGET_DOTNET_ROOT}/current"
if [[ ! -d "${CURRENT_DIR}" ]]; then
ln -s "${DOTNET_INSTALL_PATH}" "${CURRENT_DIR}"
fi
# Give write permissions to the user.
chown -R "${USERNAME}:${USERNAME}" "${CURRENT_DIR}"
chmod g+r+w+s "${CURRENT_DIR}"
chmod -R g+r+w "${CURRENT_DIR}"
if [[ "${OVERRIDE_DEFAULT_VERSION}" = "true" ]]; then
if [[ $(ls -l ${CURRENT_DIR}) != *"-> ${DOTNET_INSTALL_PATH}"* ]] ; then
rm "${CURRENT_DIR}"
ln -s "${DOTNET_INSTALL_PATH}" "${CURRENT_DIR}"
fi
fi
updaterc "if [[ \"\${PATH}\" != *\"${CURRENT_DIR}\"* ]]; then export PATH=${CURRENT_DIR}:\${PATH}; fi"
}
###########################
# Start .NET installation
###########################
export DEBIAN_FRONTEND=noninteractive
. /etc/os-release
architecture="$(dpkg --print-architecture)"
# Dotnet 3.1 and 5 are not supported on Ubuntu 22.04 (jammy)+,
# due to lack of libssl3.0 support.
# See: https://github.com/microsoft/vscode-dev-containers/issues/1458#issuecomment-1135077775
# NOTE: This will only guard against installation of the dotnet versions we propose via 'features'.
# The user can attempt to install any other version at their own risk.
if [[ "${DOTNET_VERSION}" = "3"* ]] || [[ "${DOTNET_VERSION}" = "5"* ]]; then
if [[ ! "${DOTNET_VERSION_CODENAMES_REQUIRE_OLDER_LIBSSL_1}" = *"${VERSION_CODENAME}"* ]]; then
err "Dotnet ${DOTNET_VERSION} is not supported on Ubuntu ${VERSION_CODENAME} due to a change in the 'libssl' dependency across distributions.\n Please upgrade your version of dotnet, or downgrade your OS version."
exit 1
fi
fi
# Determine if the user wants to download .NET Runtime only, or .NET SDK & Runtime
# and set the appropriate variables.
if [ "${DOTNET_RUNTIME_ONLY}" = "true" ]; then
DOTNET_SDK_OR_RUNTIME="runtime"
elif [ "${DOTNET_RUNTIME_ONLY}" = "false" ]; then
DOTNET_SDK_OR_RUNTIME="sdk"
else
err "Expected true for installing dotnet Runtime only or false for installing SDK and Runtime. Received ${DOTNET_RUNTIME_ONLY}."
exit 1 exit 1
fi fi
# For our own convenience, combine DOTNET_VERSION and ADDITIONAL_VERSIONS into a single 'versions' array # Install the .NET CLI
versions=() echo "(*) Installing .NET CLI..."
# The version can be set to 'none' for runtime-only installations, then the array will just remain empty CHANGE_OWNERSHIP="false"
# Ensure there are no leading or trailing spaces that can break regex pattern matching if [[ "${DOTNET_ARCHIVE_ARCHITECTURES}" = *"${architecture}"* ]] && [[ "${DOTNET_ARCHIVE_VERSION_CODENAMES}" = *"${VERSION_CODENAME}"* ]] && [[ "${INSTALL_USING_APT}" = "true" ]]; then
if [ "$DOTNET_VERSION" != "none" ]; then echo "Detected ${VERSION_CODENAME} on ${architecture}. Attempting to install dotnet from apt"
versions+=("$(trim_whitespace "$DOTNET_VERSION")")
for additional_version in $(split_csv "$ADDITIONAL_VERSIONS"); do install_using_apt "${DOTNET_SDK_OR_RUNTIME}" "${DOTNET_VERSION}" false || install_using_apt "${DOTNET_SDK_OR_RUNTIME}" "${DOTNET_VERSION}" true
versions+=("$additional_version") if [ "$?" != 0 ]; then
done echo "Could not install requested version from apt on current distribution."
exit 1
fi
else
if [[ "${INSTALL_USING_APT}" = "false" ]]; then
echo "Installing dotnet from releases url"
else
echo "Attempting to install dotnet from releases url"
fi
install_using_dotnet_releases_url "${DOTNET_SDK_OR_RUNTIME}" "${DOTNET_VERSION}"
CHANGE_OWNERSHIP="true"
fi fi
dotnetRuntimeVersions=() # Additional dotnet versions to be installed but not be set as default.
for dotnetRuntimeVersion in $(split_csv "$DOTNET_RUNTIME_VERSIONS"); do if [ ! -z "${ADDITIONAL_VERSIONS}" ]; then
dotnetRuntimeVersions+=("$dotnetRuntimeVersion") OLDIFS=$IFS
done IFS=","
read -a additional_versions <<< "$ADDITIONAL_VERSIONS"
for version in "${additional_versions[@]}"; do
OVERRIDE_DEFAULT_VERSION="false"
install_using_dotnet_releases_url "${DOTNET_SDK_OR_RUNTIME}" "${version}"
done
IFS=$OLDIFS
fi
aspNetCoreRuntimeVersions=() if [ "${CHANGE_OWNERSHIP}" = "true" ]; then
for aspNetCoreRuntimeVersion in $(split_csv "$ASPNETCORE_RUNTIME_VERSIONS"); do if ! cat /etc/group | grep -e "^dotnet:" > /dev/null 2>&1; then
aspNetCoreRuntimeVersions+=("$aspNetCoreRuntimeVersion") groupadd -r dotnet
done
# Fail fast in case of bad input to avoid unneccesary work
# v1 of the .NET feature allowed specifying only a major version 'X' like '3'
# v2 removed this ability
# - because install-dotnet.sh does not support it directly
# - because the previous behavior installed an old version like '3.0.103', not the newest version '3.1.426', which was counterintuitive
for version in "${versions[@]}"; do
if [[ "$version" =~ ^[0-9]+$ ]]; then
err "Unsupported .NET SDK version '${version}'. Use 'latest' for the latest version, 'lts' for the latest LTS version, 'X.Y' or 'X.Y.Z' for a specific version."
exit 1
fi fi
done usermod -a -G dotnet "${USERNAME}"
for version in "${dotnetRuntimeVersions[@]}"; do chown -R "${USERNAME}:dotnet" "${TARGET_DOTNET_ROOT}"
if [[ "$version" =~ ^[0-9]+$ ]]; then chmod -R g+r+w "${TARGET_DOTNET_ROOT}"
err "Unsupported .NET Runtime version '${version}'. Use 'latest' for the latest version, 'lts' for the latest LTS version, 'X.Y' or 'X.Y.Z' for a specific version." find "${TARGET_DOTNET_ROOT}" -type d -print0 | xargs -n 1 -0 chmod g+s
exit 1 fi
fi
done
for version in "${aspNetCoreRuntimeVersions[@]}"; do
if [[ "$version" =~ ^[0-9]+$ ]]; then
err "Unsupported ASP.NET Core Runtime version '${version}'. Use 'latest' for the latest version, 'lts' for the latest LTS version, 'X.Y' or 'X.Y.Z' for a specific version."
exit 1
fi
done
# Install .NET versions and dependencies
# icu-devtools includes dependencies for .NET
check_packages wget ca-certificates icu-devtools
for version in "${versions[@]}"; do
install_sdk "$version"
done
for version in "${dotnetRuntimeVersions[@]}"; do
install_runtime "dotnet" "$version"
done
for version in "${aspNetCoreRuntimeVersions[@]}"; do
install_runtime "aspnetcore" "$version"
done
# Clean up # Clean up
rm -rf /var/lib/apt/lists/* rm -rf /var/lib/apt/lists/*
rm -rf scripts
echo "Done!" echo "Done!"

View file

@ -1,119 +0,0 @@
#!/bin/bash
#-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
#
# Docs: https://github.com/devcontainers/features/tree/main/src/dotnet
# Maintainer: The Dev Container spec maintainers
DOTNET_SCRIPTS=$(dirname "${BASH_SOURCE[0]}")
DOTNET_INSTALL_SCRIPT="$DOTNET_SCRIPTS/vendor/dotnet-install.sh"
DOTNET_INSTALL_DIR='/usr/share/dotnet'
# Prints the latest dotnet version in the specified channel
# Usage: fetch_latest_version_in_channel <channel> [<runtime>]
# Example: fetch_latest_version_in_channel "LTS"
# Example: fetch_latest_version_in_channel "6.0" "dotnet"
# Example: fetch_latest_version_in_channel "6.0" "aspnetcore"
fetch_latest_version_in_channel() {
local channel="$1"
local runtime="$2"
if [ "$runtime" = "dotnet" ]; then
wget -qO- "https://dotnetcli.azureedge.net/dotnet/Runtime/$channel/latest.version"
elif [ "$runtime" = "aspnetcore" ]; then
wget -qO- "https://dotnetcli.azureedge.net/dotnet/aspnetcore/Runtime/$channel/latest.version"
else
wget -qO- "https://dotnetcli.azureedge.net/dotnet/Sdk/$channel/latest.version"
fi
}
# Prints the latest dotnet version
# Usage: fetch_latest_version [<runtime>]
# Example: fetch_latest_version
# Example: fetch_latest_version "dotnet"
# Example: fetch_latest_version "aspnetcore"
fetch_latest_version() {
local runtime="$1"
local sts_version
local lts_version
sts_version=$(fetch_latest_version_in_channel "STS" "$runtime")
lts_version=$(fetch_latest_version_in_channel "LTS" "$runtime")
if [[ "$sts_version" > "$lts_version" ]]; then
echo "$sts_version"
else
echo "$lts_version"
fi
}
# Installs a version of the .NET SDK
# Usage: install_sdk <version>
install_sdk() {
local inputVersion="$1"
local version=""
local channel=""
if [[ "$inputVersion" == "latest" ]]; then
# Fetch the latest version manually, because dotnet-install.sh does not support it directly
version=$(fetch_latest_version)
elif [[ "$inputVersion" == "lts" ]]; then
# When user input is 'lts'
# Then version=latest, channel=LTS
version="latest"
channel="LTS"
elif [[ "$inputVersion" =~ ^[0-9]+\.[0-9]+$ ]]; then
# When user input is form 'A.B' like '3.1'
# Then version=latest, channel=3.1
version="latest"
channel="$inputVersion"
elif [[ "$inputVersion" =~ ^[0-9]+\.[0-9]+\.[0-9]xx$ ]]; then
# When user input is form 'A.B.Cxx' like '6.0.4xx'
# Then version=latest, channel=6.0.4xx
version="latest"
channel="$inputVersion"
else
# Assume version is an exact version string like '6.0.413' or '8.0.100-rc.2.23425.18'
version="$inputVersion"
fi
# Currently this script does not make it possible to qualify the version, 'GA' is always implied
echo "Executing $DOTNET_INSTALL_SCRIPT --version $version --channel $channel --install-dir $DOTNET_INSTALL_DIR --no-path"
"$DOTNET_INSTALL_SCRIPT" \
--version "$version" \
--channel "$channel" \
--install-dir "$DOTNET_INSTALL_DIR" \
--no-path
}
# Installs a version of the .NET Runtime
# Usage: install_runtime <runtime> <version>
install_runtime() {
local runtime="$1"
local inputVersion="$2"
local version=""
local channel=""
if [[ "$inputVersion" == "latest" ]]; then
# Fetch the latest version manually, because dotnet-install.sh does not support it directly
version=$(fetch_latest_version "$runtime")
elif [[ "$inputVersion" == "lts" ]]; then
# When user input is 'lts'
# Then version=latest, channel=LTS
version="latest"
channel="LTS"
elif [[ "$inputVersion" =~ ^[0-9]+\.[0-9]+$ ]]; then
# When user input is form 'A.B' like '3.1'
# Then version=latest, channel=3.1
version="latest"
channel="$inputVersion"
else
# Assume version is an exact version string like '6.0.21' or '8.0.0-preview.7.23375.6'
version="$inputVersion"
fi
echo "Executing $DOTNET_INSTALL_SCRIPT --runtime $runtime --version $version --channel $channel --install-dir $DOTNET_INSTALL_DIR --no-path"
"$DOTNET_INSTALL_SCRIPT" \
--runtime "$runtime" \
--version "$version" \
--channel "$channel" \
--install-dir "$DOTNET_INSTALL_DIR" \
--no-path
}

View file

@ -1,16 +0,0 @@
#!/bin/bash
#-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
#
# Docs: https://github.com/devcontainers/features/tree/main/src/dotnet
# Maintainer: The Dev Container spec maintainers
#
# Run this script to replace dotnet-install.sh with the latest and greatest available version
#
DOTNET_SCRIPTS=$(dirname "${BASH_SOURCE[0]}")
DOTNET_INSTALL_SCRIPT="$DOTNET_SCRIPTS/vendor/dotnet-install.sh"
wget https://dot.net/v1/dotnet-install.sh -O "$DOTNET_INSTALL_SCRIPT"
chmod +x "$DOTNET_INSTALL_SCRIPT"

View file

@ -1,42 +0,0 @@
#!/bin/bash
#-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
#
# Docs: https://github.com/devcontainers/features/tree/main/src/dotnet
# Maintainer: The Dev Container spec maintainers
# Removes leading and trailing whitespace from an input string
# Usage: trim_whitespace <text>
trim_whitespace() {
text="$1"
# Remove leading spaces
while [ "${text:0:1}" == " " ]; do
text="${text:1}"
done
# Remove trailing spaces
while [ "${text: -1}" == " " ]; do
text="${text:0:-1}"
done
echo "$text"
}
# Splits comma-separated values into an array while ignoring empty entries
# Usage: split_csv <comma-separated-values>
split_csv() {
local -a values=()
while IFS="," read -ra entries; do
for entry in "${entries[@]}"; do
entry="$(trim_whitespace "$entry")"
if [ -n "$entry" ]; then
values+=("$entry")
fi
done
done <<< "$1"
echo "${values[@]}"
}

View file

@ -1,27 +0,0 @@
### **IMPORTANT NOTE**
Scripts in this directory are sourced externally and not maintained by the Dev Container spec maintainers. Do not make changes directly as they might be overwritten at any moment.
## dotnet-install.sh
`dotnet-install.sh` is a copy of <https://dot.net/v1/dotnet-install.sh>. ([Script reference](https://learn.microsoft.com/en-us/dotnet/core/tools/dotnet-install-script))
Quick options reminder for `dotnet-install.sh`:
- `--version`: `"latest"` (default) or an exact version in the form A.B.C like `"6.0.413"`
- `--channel`: `"LTS"` (default), `"STS"`, a two-part version in the form A.B like `"6.0"` or three-part form A.B.Cxx like `"6.0.1xx"`
- `--quality`: `"daily"`, `"preview"` or `"GA"`
- The channel option is only used when version is 'latest' because an exact version overrides the channel option
- The quality option is only used when channel is 'A.B' or 'A.B.Cxx' because it can't be used with STS or LTS
Examples
```
dotnet-install.sh [--version latest] [--channel LTS]
dotnet-install.sh [--version latest] --channel STS
dotnet-install.sh [--version latest] --channel 6.0 [--quality GA]
dotnet-install.sh [--version latest] --channel 6.0.4xx [--quality GA]
dotnet-install.sh [--version latest] --channel 8.0 --quality preview
dotnet-install.sh [--version latest] --channel 8.0 --quality daily
dotnet-install.sh --version 6.0.413
```

File diff suppressed because it is too large Load diff

View file

@ -16,7 +16,6 @@ Installs Git Large File Support (Git LFS) along with needed dependencies. Useful
| Options Id | Description | Type | Default Value | | Options Id | Description | Type | Default Value |
|-----|-----|-----|-----| |-----|-----|-----|-----|
| version | Select version of Git LFS to install | string | latest | | version | Select version of Git LFS to install | string | latest |
| autoPull | Automatically pull LFS files when creating the container. When false, running 'git lfs pull' in the container will have the same effect. | boolean | true |

View file

@ -1,6 +1,6 @@
{ {
"id": "git-lfs", "id": "git-lfs",
"version": "1.1.1", "version": "1.0.7",
"name": "Git Large File Support (LFS)", "name": "Git Large File Support (LFS)",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/git-lfs", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/git-lfs",
"description": "Installs Git Large File Support (Git LFS) along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like git and curl.", "description": "Installs Git Large File Support (Git LFS) along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like git and curl.",
@ -13,14 +13,8 @@
], ],
"default": "latest", "default": "latest",
"description": "Select version of Git LFS to install" "description": "Select version of Git LFS to install"
},
"autoPull": {
"type": "boolean",
"default": true,
"description": "Automatically pull LFS files when creating the container. When false, running 'git lfs pull' in the container will have the same effect."
} }
}, },
"postCreateCommand": "/usr/local/share/pull-git-lfs-artifacts.sh",
"installsAfter": [ "installsAfter": [
"ghcr.io/devcontainers/features/common-utils" "ghcr.io/devcontainers/features/common-utils"
] ]

View file

@ -8,14 +8,12 @@
# Maintainer: The VS Code and Codespaces Teams # Maintainer: The VS Code and Codespaces Teams
GIT_LFS_VERSION=${VERSION:-"latest"} GIT_LFS_VERSION=${VERSION:-"latest"}
AUTO_PULL=${AUTOPULL:="true"}
GIT_LFS_ARCHIVE_GPG_KEY_URI="https://packagecloud.io/github/git-lfs/gpgkey" GIT_LFS_ARCHIVE_GPG_KEY_URI="https://packagecloud.io/github/git-lfs/gpgkey"
GIT_LFS_ARCHIVE_ARCHITECTURES="amd64 arm64" GIT_LFS_ARCHIVE_ARCHITECTURES="amd64 arm64"
GIT_LFS_ARCHIVE_VERSION_CODENAMES="stretch buster bullseye bionic focal jammy" GIT_LFS_ARCHIVE_VERSION_CODENAMES="stretch buster bullseye bionic focal jammy"
GIT_LFS_CHECKSUM_GPG_KEYS="0x88ace9b29196305ba9947552f1ba225c0223b187 0x86cd3297749375bcf8206715f54fe648088335a9 0xaa3b3450295830d2de6db90caba67be5a5795889" GIT_LFS_CHECKSUM_GPG_KEYS="0x88ace9b29196305ba9947552f1ba225c0223b187 0x86cd3297749375bcf8206715f54fe648088335a9 0xaa3b3450295830d2de6db90caba67be5a5795889"
GPG_KEY_SERVERS="keyserver hkp://keyserver.ubuntu.com GPG_KEY_SERVERS="keyserver hkp://keyserver.ubuntu.com
keyserver hkp://keyserver.ubuntu.com:80
keyserver hkps://keys.openpgp.org keyserver hkps://keys.openpgp.org
keyserver hkp://keyserver.pgp.com" keyserver hkp://keyserver.pgp.com"
@ -187,37 +185,6 @@ if [ "${use_github}" = "true" ]; then
install_using_github install_using_github
fi fi
# --- Generate a 'pull-git-lfs-artifacts.sh' script to be executed by the 'postCreateCommand' lifecycle hook
PULL_GIT_LFS_SCRIPT_PATH="/usr/local/share/pull-git-lfs-artifacts.sh"
tee "$PULL_GIT_LFS_SCRIPT_PATH" > /dev/null \
<< EOF
#!/bin/sh
set -e
AUTO_PULL=${AUTO_PULL}
EOF
tee -a "$PULL_GIT_LFS_SCRIPT_PATH" > /dev/null \
<< 'EOF'
echo "Fetching git lfs artifacts..."
if [ "${AUTO_PULL}" != "true" ]; then
echo "(!) Skipping 'git lfs pull' because 'autoPull' is not set to 'true'"
exit 0
fi
# Check if repo is a git lfs repo.
if ! git lfs ls-files > /dev/null 2>&1; then
echo "(!) Skipping automatic 'git lfs pull' because no git lfs files were detected"
exit 0
fi
git lfs pull
EOF
chmod 755 "$PULL_GIT_LFS_SCRIPT_PATH"
# Clean up # Clean up
rm -rf /var/lib/apt/lists/* rm -rf /var/lib/apt/lists/*

View file

@ -1,6 +1,6 @@
{ {
"id": "git", "id": "git",
"version": "1.1.6", "version": "1.1.5",
"name": "Git (from source)", "name": "Git (from source)",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/git", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/git",
"description": "Install an up-to-date version of Git, built from source as needed. Useful for when you want the latest and greatest features. Auto-detects latest stable version and installs needed dependencies.", "description": "Install an up-to-date version of Git, built from source as needed. Useful for when you want the latest and greatest features. Auto-detects latest stable version and installs needed dependencies.",

View file

@ -12,7 +12,6 @@ USE_PPA_IF_AVAILABLE=${PPA}
GIT_CORE_PPA_ARCHIVE_GPG_KEY=E1DD270288B4E6030699E45FA1715D88E1DF1F24 GIT_CORE_PPA_ARCHIVE_GPG_KEY=E1DD270288B4E6030699E45FA1715D88E1DF1F24
GPG_KEY_SERVERS="keyserver hkp://keyserver.ubuntu.com GPG_KEY_SERVERS="keyserver hkp://keyserver.ubuntu.com
keyserver hkp://keyserver.ubuntu.com:80
keyserver hkps://keys.openpgp.org keyserver hkps://keys.openpgp.org
keyserver hkp://keyserver.pgp.com" keyserver hkp://keyserver.pgp.com"

View file

@ -1,6 +1,6 @@
{ {
"id": "github-cli", "id": "github-cli",
"version": "1.0.11", "version": "1.0.10",
"name": "GitHub CLI", "name": "GitHub CLI",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/github-cli", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/github-cli",
"description": "Installs the GitHub CLI. Auto-detects latest version and installs needed dependencies.", "description": "Installs the GitHub CLI. Auto-detects latest version and installs needed dependencies.",

View file

@ -12,7 +12,6 @@ INSTALL_DIRECTLY_FROM_GITHUB_RELEASE=${INSTALLDIRECTLYFROMGITHUBRELEASE:-"true"}
GITHUB_CLI_ARCHIVE_GPG_KEY=23F3D4EA75716059 GITHUB_CLI_ARCHIVE_GPG_KEY=23F3D4EA75716059
GPG_KEY_SERVERS="keyserver hkp://keyserver.ubuntu.com GPG_KEY_SERVERS="keyserver hkp://keyserver.ubuntu.com
keyserver hkp://keyserver.ubuntu.com:80
keyserver hkps://keys.openpgp.org keyserver hkps://keys.openpgp.org
keyserver hkp://keyserver.pgp.com" keyserver hkp://keyserver.pgp.com"

View file

@ -18,12 +18,6 @@ Installs Go and common Go utilities. Auto-detects latest version and installs ne
| version | Select or enter a Go version to install | string | latest | | version | Select or enter a Go version to install | string | latest |
| golangciLintVersion | Version of golangci-lint to install | string | latest | | golangciLintVersion | Version of golangci-lint to install | string | latest |
## Customizations
### VS Code Extensions
- `golang.Go`
## OS Support ## OS Support

View file

@ -1,6 +1,6 @@
{ {
"id": "go", "id": "go",
"version": "1.2.2", "version": "1.1.3",
"name": "Go", "name": "Go",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/go", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/go",
"description": "Installs Go and common Go utilities. Auto-detects latest version and installs needed dependencies.", "description": "Installs Go and common Go utilities. Auto-detects latest version and installs needed dependencies.",
@ -10,8 +10,8 @@
"proposals": [ "proposals": [
"latest", "latest",
"none", "none",
"1.21", "1.19",
"1.20" "1.18"
], ],
"default": "latest", "default": "latest",
"description": "Select or enter a Go version to install" "description": "Select or enter a Go version to install"

View file

@ -84,6 +84,21 @@ find_version_from_git_tags() {
echo "${variable_name}=${!variable_name}" echo "${variable_name}=${!variable_name}"
} }
# Get central common setting
get_common_setting() {
if [ "${common_settings_file_loaded}" != "true" ]; then
curl -sfL "https://aka.ms/vscode-dev-containers/script-library/settings.env" 2>/dev/null -o /tmp/vsdc-settings.env || echo "Could not download settings file. Skipping."
common_settings_file_loaded=true
fi
if [ -f "/tmp/vsdc-settings.env" ]; then
local multi_line=""
if [ "$2" = "true" ]; then multi_line="-z"; fi
local result="$(grep ${multi_line} -oP "$1=\"?\K[^\"]+" /tmp/vsdc-settings.env | tr -d '\0')"
if [ ! -z "${result}" ]; then declare -g $1="${result}"; fi
fi
echo "$1=${!1}"
}
apt_get_update() apt_get_update()
{ {
if [ "$(find /var/lib/apt/lists/* | wc -l)" = "0" ]; then if [ "$(find /var/lib/apt/lists/* | wc -l)" = "0" ]; then
@ -133,6 +148,7 @@ if [[ "${TARGET_GO_VERSION}" != "none" ]] && [[ "$(go version)" != *"${TARGET_GO
export GNUPGHOME="/tmp/tmp-gnupg" export GNUPGHOME="/tmp/tmp-gnupg"
mkdir -p ${GNUPGHOME} mkdir -p ${GNUPGHOME}
chmod 700 ${GNUPGHOME} chmod 700 ${GNUPGHOME}
get_common_setting GO_GPG_KEY_URI
curl -sSL -o /tmp/tmp-gnupg/golang_key "${GO_GPG_KEY_URI}" curl -sSL -o /tmp/tmp-gnupg/golang_key "${GO_GPG_KEY_URI}"
gpg -q --import /tmp/tmp-gnupg/golang_key gpg -q --import /tmp/tmp-gnupg/golang_key
echo "Downloading Go ${TARGET_GO_VERSION}..." echo "Downloading Go ${TARGET_GO_VERSION}..."
@ -175,18 +191,15 @@ else
fi fi
# Install Go tools that are isImportant && !replacedByGopls based on # Install Go tools that are isImportant && !replacedByGopls based on
# https://github.com/golang/vscode-go/blob/v0.38.0/src/goToolsInformation.ts # https://github.com/golang/vscode-go/blob/v0.31.1/src/goToolsInformation.ts
GO_TOOLS="\ GO_TOOLS="\
golang.org/x/tools/gopls@latest \ golang.org/x/tools/gopls@latest \
honnef.co/go/tools/cmd/staticcheck@latest \ honnef.co/go/tools/cmd/staticcheck@latest \
golang.org/x/lint/golint@latest \ golang.org/x/lint/golint@latest \
github.com/mgechev/revive@latest \ github.com/mgechev/revive@latest \
github.com/go-delve/delve/cmd/dlv@latest \ github.com/uudashr/gopkgs/v2/cmd/gopkgs@latest \
github.com/fatih/gomodifytags@latest \ github.com/ramya-rao-a/go-outline@latest \
github.com/haya14busa/goplay/cmd/goplay@latest \ github.com/go-delve/delve/cmd/dlv@latest"
github.com/cweill/gotests/gotests@latest \
github.com/josharian/impl@latest"
if [ "${INSTALL_GO_TOOLS}" = "true" ]; then if [ "${INSTALL_GO_TOOLS}" = "true" ]; then
echo "Installing common Go tools..." echo "Installing common Go tools..."
export PATH=${TARGET_GOROOT}/bin:${PATH} export PATH=${TARGET_GOROOT}/bin:${PATH}

View file

@ -24,12 +24,6 @@ Installs Java, SDKMAN! (if not installed), and needed dependencies.
| installAnt | Install Ant, a software tool for automating software build processes | boolean | false | | installAnt | Install Ant, a software tool for automating software build processes | boolean | false |
| antVersion | Select or enter an Ant version | string | latest | | antVersion | Select or enter an Ant version | string | latest |
## Customizations
### VS Code Extensions
- `vscjava.vscode-java-pack`
## License ## License
For the Java Feature from this repository, see [NOTICE.txt](https://github.com/devcontainers/features/tree/main/src/java/NOTICE.txt) for licensing information on JDK distributions. For the Java Feature from this repository, see [NOTICE.txt](https://github.com/devcontainers/features/tree/main/src/java/NOTICE.txt) for licensing information on JDK distributions.

View file

@ -1,6 +1,6 @@
{ {
"id": "java", "id": "java",
"version": "1.2.2", "version": "1.2.1",
"name": "Java (via SDKMAN!)", "name": "Java (via SDKMAN!)",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/java", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/java",
"description": "Installs Java, SDKMAN! (if not installed), and needed dependencies.", "description": "Installs Java, SDKMAN! (if not installed), and needed dependencies.",

View file

@ -111,13 +111,13 @@ sdk_install() {
elif echo "${requested_version}" | grep -oE "${full_version_check}" > /dev/null 2>&1; then elif echo "${requested_version}" | grep -oE "${full_version_check}" > /dev/null 2>&1; then
echo "${requested_version}" echo "${requested_version}"
else else
local regex="${prefix}\\K[0-9]+\\.?[0-9]*\\.?[0-9]*${suffix}" local regex="${prefix}\\K[0-9]+\\.[0-9]+\\.[0-9]+${suffix}"
local version_list=$(su ${USERNAME} -c ". \${SDKMAN_DIR}/bin/sdkman-init.sh && sdk list ${install_type} 2>&1 | grep -oP \"${regex}\" | tr -d ' ' | sort -rV") local version_list=$(su ${USERNAME} -c ". \${SDKMAN_DIR}/bin/sdkman-init.sh && sdk list ${install_type} 2>&1 | grep -oP \"${regex}\" | tr -d ' ' | sort -rV")
if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ]; then if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ]; then
requested_version="$(echo "${version_list}" | head -n 1)" requested_version="$(echo "${version_list}" | head -n 1)"
else else
set +e set +e
requested_version="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|-|$)")" requested_version="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")"
set -e set -e
fi fi
if [ -z "${requested_version}" ] || ! echo "${version_list}" | grep "^${requested_version//./\\.}$" > /dev/null 2>&1; then if [ -z "${requested_version}" ] || ! echo "${version_list}" | grep "^${requested_version//./\\.}$" > /dev/null 2>&1; then

View file

@ -1,6 +1,6 @@
{ {
"id": "kubectl-helm-minikube", "id": "kubectl-helm-minikube",
"version": "1.1.5", "version": "1.1.4",
"name": "Kubectl, Helm, and Minikube", "name": "Kubectl, Helm, and Minikube",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/kubectl-helm-minikube", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/kubectl-helm-minikube",
"description": "Installs latest version of kubectl, Helm, and optionally minikube. Auto-detects latest versions and installs needed dependencies.", "description": "Installs latest version of kubectl, Helm, and optionally minikube. Auto-detects latest versions and installs needed dependencies.",

View file

@ -23,7 +23,6 @@ USERNAME="${USERNAME:-"${_REMOTE_USER:-"automatic"}"}"
HELM_GPG_KEYS_URI="https://raw.githubusercontent.com/helm/helm/main/KEYS" HELM_GPG_KEYS_URI="https://raw.githubusercontent.com/helm/helm/main/KEYS"
GPG_KEY_SERVERS="keyserver hkp://keyserver.ubuntu.com GPG_KEY_SERVERS="keyserver hkp://keyserver.ubuntu.com
keyserver hkp://keyserver.ubuntu.com:80
keyserver hkps://keys.openpgp.org keyserver hkps://keys.openpgp.org
keyserver hkp://keyserver.pgp.com" keyserver hkp://keyserver.pgp.com"

View file

@ -18,13 +18,7 @@ Installs Node.js, nvm, yarn, pnpm, and needed dependencies.
| version | Select or enter a Node.js version to install | string | lts | | version | Select or enter a Node.js version to install | string | lts |
| nodeGypDependencies | Install dependencies to compile native node modules (node-gyp)? | boolean | true | | nodeGypDependencies | Install dependencies to compile native node modules (node-gyp)? | boolean | true |
| nvmInstallPath | The path where NVM will be installed. | string | /usr/local/share/nvm | | nvmInstallPath | The path where NVM will be installed. | string | /usr/local/share/nvm |
| nvmVersion | Version of NVM to install. | string | latest | | nvmVersion | Version of NVM to install. | string | 0.39.2 |
## Customizations
### VS Code Extensions
- `dbaeumer.vscode-eslint`
## Using nvm from postCreateCommand or another lifecycle command ## Using nvm from postCreateCommand or another lifecycle command

View file

@ -1,6 +1,6 @@
{ {
"id": "node", "id": "node",
"version": "1.3.1", "version": "1.2.0",
"name": "Node.js (via nvm), yarn and pnpm", "name": "Node.js (via nvm), yarn and pnpm",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/node", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/node",
"description": "Installs Node.js, nvm, yarn, pnpm, and needed dependencies.", "description": "Installs Node.js, nvm, yarn, pnpm, and needed dependencies.",
@ -30,11 +30,7 @@
}, },
"nvmVersion": { "nvmVersion": {
"type": "string", "type": "string",
"proposals": [ "default": "0.39.2",
"latest",
"0.39"
],
"default": "latest",
"description": "Version of NVM to install." "description": "Version of NVM to install."
} }
}, },

View file

@ -8,8 +8,8 @@
# Maintainer: The Dev Container spec maintainers # Maintainer: The Dev Container spec maintainers
export NODE_VERSION="${VERSION:-"lts"}" export NODE_VERSION="${VERSION:-"lts"}"
export NVM_VERSION="${NVMVERSION:-"latest"}" export NVM_VERSION="${NVMVERSION:-"0.39.2"}"
export NVM_DIR="${NVMINSTALLPATH:-"/usr/local/share/nvm"}" export NVM_DIR=${NVMINSTALLPATH:-"/usr/local/share/nvm"}
INSTALL_TOOLS_FOR_NODE_GYP="${NODEGYPDEPENDENCIES:-true}" INSTALL_TOOLS_FOR_NODE_GYP="${NODEGYPDEPENDENCIES:-true}"
# Comma-separated list of node versions to be installed (with nvm) # Comma-separated list of node versions to be installed (with nvm)
@ -78,40 +78,6 @@ check_packages() {
fi fi
} }
# Figure out correct version of a three part version number is not passed
find_version_from_git_tags() {
local variable_name=$1
local requested_version=${!variable_name}
if [ "${requested_version}" = "none" ]; then return; fi
local repository=$2
local prefix=${3:-"tags/v"}
local separator=${4:-"."}
local last_part_optional=${5:-"false"}
if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then
local escaped_separator=${separator//./\\.}
local last_part
if [ "${last_part_optional}" = "true" ]; then
last_part="(${escaped_separator}[0-9]+)?"
else
last_part="${escaped_separator}[0-9]+"
fi
local regex="${prefix}\\K[0-9]+${escaped_separator}[0-9]+${last_part}$"
local version_list="$(git ls-remote --tags ${repository} | grep -oP "${regex}" | tr -d ' ' | tr "${separator}" "." | sort -rV)"
if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ] || [ "${requested_version}" = "lts" ]; then
declare -g ${variable_name}="$(echo "${version_list}" | head -n 1)"
else
set +e
declare -g ${variable_name}="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")"
set -e
fi
fi
if [ -z "${!variable_name}" ] || ! echo "${version_list}" | grep "^${!variable_name//./\\.}$" > /dev/null 2>&1; then
echo -e "Invalid ${variable_name} value: ${requested_version}\nValid values:\n${version_list}" >&2
exit 1
fi
echo "${variable_name}=${!variable_name}"
}
# Ensure apt is in non-interactive to avoid prompts # Ensure apt is in non-interactive to avoid prompts
export DEBIAN_FRONTEND=noninteractive export DEBIAN_FRONTEND=noninteractive
@ -126,10 +92,6 @@ fi
# Install dependencies # Install dependencies
check_packages apt-transport-https curl ca-certificates tar gnupg2 dirmngr check_packages apt-transport-https curl ca-certificates tar gnupg2 dirmngr
if ! type git > /dev/null 2>&1; then
check_packages git
fi
# Install yarn # Install yarn
if type yarn > /dev/null 2>&1; then if type yarn > /dev/null 2>&1; then
echo "Yarn already installed." echo "Yarn already installed."
@ -150,18 +112,16 @@ elif [ "${NODE_VERSION}" = "latest" ]; then
export NODE_VERSION="node" export NODE_VERSION="node"
fi fi
find_version_from_git_tags NVM_VERSION "https://github.com/nvm-sh/nvm"
# Install snipppet that we will run as the user # Install snipppet that we will run as the user
nvm_install_snippet="$(cat << EOF nvm_install_snippet="$(cat << EOF
set -e set -e
umask 0002 umask 0002
# Do not update profile - we'll do this manually # Do not update profile - we'll do this manually
export PROFILE=/dev/null export PROFILE=/dev/null
curl -so- "https://raw.githubusercontent.com/nvm-sh/nvm/v${NVM_VERSION}/install.sh" | bash curl -so- https://raw.githubusercontent.com/nvm-sh/nvm/v${NVM_VERSION}/install.sh | bash
source "${NVM_DIR}/nvm.sh" source ${NVM_DIR}/nvm.sh
if [ "${NODE_VERSION}" != "" ]; then if [ "${NODE_VERSION}" != "" ]; then
nvm alias default "${NODE_VERSION}" nvm alias default ${NODE_VERSION}
fi fi
EOF EOF
)" )"
@ -189,9 +149,9 @@ usermod -a -G nvm ${USERNAME}
umask 0002 umask 0002
if [ ! -d "${NVM_DIR}" ]; then if [ ! -d "${NVM_DIR}" ]; then
# Create nvm dir, and set sticky bit # Create nvm dir, and set sticky bit
mkdir -p "${NVM_DIR}" mkdir -p ${NVM_DIR}
chown "${USERNAME}:nvm" "${NVM_DIR}" chown "${USERNAME}:nvm" ${NVM_DIR}
chmod g+rws "${NVM_DIR}" chmod g+rws ${NVM_DIR}
su ${USERNAME} -c "${nvm_install_snippet}" 2>&1 su ${USERNAME} -c "${nvm_install_snippet}" 2>&1
# Update rc files # Update rc files
if [ "${UPDATE_RC}" = "true" ]; then if [ "${UPDATE_RC}" = "true" ]; then
@ -200,11 +160,11 @@ if [ ! -d "${NVM_DIR}" ]; then
else else
echo "NVM already installed." echo "NVM already installed."
if [ "${NODE_VERSION}" != "" ]; then if [ "${NODE_VERSION}" != "" ]; then
su ${USERNAME} -c "umask 0002 && . '$NVM_DIR/nvm.sh' && nvm install '${NODE_VERSION}' && nvm alias default '${NODE_VERSION}'" su ${USERNAME} -c "umask 0002 && . $NVM_DIR/nvm.sh && nvm install ${NODE_VERSION} && nvm alias default ${NODE_VERSION}"
fi fi
fi fi
# Additional node versions to be installed but not be set as # Additional node versions to be installed but not be set as
# default we can assume the nvm is the group owner of the nvm # default we can assume the nvm is the group owner of the nvm
# directory and the sticky bit on directories so any installed # directory and the sticky bit on directories so any installed
# files will have will have the correct ownership (nvm) # files will have will have the correct ownership (nvm)
@ -213,12 +173,12 @@ if [ ! -z "${ADDITIONAL_VERSIONS}" ]; then
IFS="," IFS=","
read -a additional_versions <<< "$ADDITIONAL_VERSIONS" read -a additional_versions <<< "$ADDITIONAL_VERSIONS"
for ver in "${additional_versions[@]}"; do for ver in "${additional_versions[@]}"; do
su ${USERNAME} -c "umask 0002 && . '$NVM_DIR/nvm.sh' && nvm install '${ver}'" su ${USERNAME} -c "umask 0002 && . $NVM_DIR/nvm.sh && nvm install ${ver}"
done done
# Ensure $NODE_VERSION is on the $PATH # Ensure $NODE_VERSION is on the $PATH
if [ "${NODE_VERSION}" != "" ]; then if [ "${NODE_VERSION}" != "" ]; then
su ${USERNAME} -c "umask 0002 && . '$NVM_DIR/nvm.sh' && nvm use default" su ${USERNAME} -c "umask 0002 && . $NVM_DIR/nvm.sh && nvm use default"
fi fi
IFS=$OLDIFS IFS=$OLDIFS
fi fi
@ -228,9 +188,6 @@ if type pnpm > /dev/null 2>&1; then
echo "pnpm already installed." echo "pnpm already installed."
else else
if type npm > /dev/null 2>&1; then if type npm > /dev/null 2>&1; then
[ ! -z "$http_proxy" ] && npm set proxy="$http_proxy"
[ ! -z "$https_proxy" ] && npm set https-proxy="$https_proxy"
[ ! -z "$no_proxy" ] && npm set noproxy="$no_proxy"
npm install -g pnpm npm install -g pnpm
else else
echo "Skip installing pnpm because npm is missing" echo "Skip installing pnpm because npm is missing"
@ -261,7 +218,7 @@ fi
# Clean up # Clean up
su ${USERNAME} -c "umask 0002 && . '$NVM_DIR/nvm.sh' && nvm clear-cache" su ${USERNAME} -c "umask 0002 && . $NVM_DIR/nvm.sh && nvm clear-cache"
rm -rf /var/lib/apt/lists/* rm -rf /var/lib/apt/lists/*
# Ensure privs are correct for installed node versions. Unfortunately the # Ensure privs are correct for installed node versions. Unfortunately the

View file

@ -10,19 +10,14 @@ Follow [NVIDIA's instructions to install the NVIDIA Container Toolkit](https://d
### Enable GPU passthrough ### Enable GPU passthrough
Enable GPU passthrough to your devcontainer by using `hostRequirements`. Here's an example of a devcontainer with this property: Enable GPU passthrough to your devcontainer by adding `["--gpus", "all"]` to your devcontainer's `runArgs` property. Here's an example of a devcontainer with this property:
```json ```json
{ {
"hostRequirements": { "runArgs": ["--gpus", "all"]
"gpu": "optional"
}
} }
``` ```
> Note: Setting `gpu` property's value to `true` will work with GPU machine types, but fail with CPUs. Hence, setting it to `optional` works in both cases. See [schema](https://containers.dev/implementors/json_schema/#base-schema) for more configuration details.
## OS Support ## OS Support

View file

@ -16,9 +16,7 @@ Installs shared libraries for NVIDIA CUDA.
| Options Id | Description | Type | Default Value | | Options Id | Description | Type | Default Value |
|-----|-----|-----|-----| |-----|-----|-----|-----|
| installCudnn | Additionally install CUDA Deep Neural Network (cuDNN) shared library | boolean | false | | installCudnn | Additionally install CUDA Deep Neural Network (cuDNN) shared library | boolean | false |
| installCudnnDev | Additionally install CUDA Deep Neural Network (cuDNN) development libraries and headers | boolean | false |
| installNvtx | Additionally install NVIDIA Tools Extension (NVTX) | boolean | false | | installNvtx | Additionally install NVIDIA Tools Extension (NVTX) | boolean | false |
| installToolkit | Additionally install NVIDIA CUDA Toolkit | boolean | false |
| cudaVersion | Version of CUDA to install | string | 11.8 | | cudaVersion | Version of CUDA to install | string | 11.8 |
| cudnnVersion | Version of cuDNN to install | string | 8.6.0.163 | | cudnnVersion | Version of cuDNN to install | string | 8.6.0.163 |
@ -34,19 +32,14 @@ Follow [NVIDIA's instructions to install the NVIDIA Container Toolkit](https://d
### Enable GPU passthrough ### Enable GPU passthrough
Enable GPU passthrough to your devcontainer by using `hostRequirements`. Here's an example of a devcontainer with this property: Enable GPU passthrough to your devcontainer by adding `["--gpus", "all"]` to your devcontainer's `runArgs` property. Here's an example of a devcontainer with this property:
```json ```json
{ {
"hostRequirements": { "runArgs": ["--gpus", "all"]
"gpu": "optional"
}
} }
``` ```
> Note: Setting `gpu` property's value to `true` will work with GPU machine types, but fail with CPUs. Hence, setting it to `optional` works in both cases. See [schema](https://containers.dev/implementors/json_schema/#base-schema) for more configuration details.
## OS Support ## OS Support

View file

@ -1,6 +1,6 @@
{ {
"id": "nvidia-cuda", "id": "nvidia-cuda",
"version": "1.1.0", "version": "1.0.7",
"name": "NVIDIA CUDA", "name": "NVIDIA CUDA",
"description": "Installs shared libraries for NVIDIA CUDA.", "description": "Installs shared libraries for NVIDIA CUDA.",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/nvidia-cuda", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/nvidia-cuda",
@ -10,27 +10,14 @@
"default": false, "default": false,
"description": "Additionally install CUDA Deep Neural Network (cuDNN) shared library" "description": "Additionally install CUDA Deep Neural Network (cuDNN) shared library"
}, },
"installCudnnDev": {
"type": "boolean",
"default": false,
"description": "Additionally install CUDA Deep Neural Network (cuDNN) development libraries and headers"
},
"installNvtx": { "installNvtx": {
"type": "boolean", "type": "boolean",
"default": false, "default": false,
"description": "Additionally install NVIDIA Tools Extension (NVTX)" "description": "Additionally install NVIDIA Tools Extension (NVTX)"
}, },
"installToolkit": {
"type": "boolean",
"default": false,
"description": "Additionally install NVIDIA CUDA Toolkit"
},
"cudaVersion": { "cudaVersion": {
"type": "string", "type": "string",
"proposals": [ "proposals": [
"12.2",
"12.1",
"12.0",
"11.8", "11.8",
"11.7", "11.7",
"11.6", "11.6",
@ -45,15 +32,6 @@
"cudnnVersion": { "cudnnVersion": {
"type": "string", "type": "string",
"proposals": [ "proposals": [
"8.9.5.29",
"8.9.4.25",
"8.9.3.28",
"8.9.2.26",
"8.9.1.23",
"8.9.0.131",
"8.8.1.3",
"8.8.0.121",
"8.7.0.84",
"8.6.0.163", "8.6.0.163",
"8.5.0.96", "8.5.0.96",
"8.4.1.50", "8.4.1.50",

View file

@ -6,9 +6,7 @@ set -e
rm -rf /var/lib/apt/lists/* rm -rf /var/lib/apt/lists/*
INSTALL_CUDNN=${INSTALLCUDNN} INSTALL_CUDNN=${INSTALLCUDNN}
INSTALL_CUDNNDEV=${INSTALLCUDNNDEV}
INSTALL_NVTX=${INSTALLNVTX} INSTALL_NVTX=${INSTALLNVTX}
INSTALL_TOOLKIT=${INSTALLTOOLKIT}
CUDA_VERSION=${CUDAVERSION} CUDA_VERSION=${CUDAVERSION}
CUDNN_VERSION=${CUDNNVERSION} CUDNN_VERSION=${CUDNNVERSION}
@ -49,7 +47,6 @@ apt-get update -yq
# Ensure that the requested version of CUDA is available # Ensure that the requested version of CUDA is available
cuda_pkg="cuda-libraries-${CUDA_VERSION/./-}" cuda_pkg="cuda-libraries-${CUDA_VERSION/./-}"
nvtx_pkg="cuda-nvtx-${CUDA_VERSION/./-}" nvtx_pkg="cuda-nvtx-${CUDA_VERSION/./-}"
toolkit_pkg="cuda-toolkit-${CUDA_VERSION/./-}"
if ! apt-cache show "$cuda_pkg"; then if ! apt-cache show "$cuda_pkg"; then
echo "The requested version of CUDA is not available: CUDA $CUDA_VERSION" echo "The requested version of CUDA is not available: CUDA $CUDA_VERSION"
exit 1 exit 1
@ -70,28 +67,11 @@ if [ "$INSTALL_CUDNN" = "true" ]; then
apt-get install -yq "$cudnn_pkg_version" apt-get install -yq "$cudnn_pkg_version"
fi fi
if [ "$INSTALL_CUDNNDEV" = "true" ]; then
# Ensure that the requested version of cuDNN development package is available AND compatible
cudnn_dev_pkg_version="libcudnn8-dev=${CUDNN_VERSION}-1+cuda${CUDA_VERSION}"
if ! apt-cache show "$cudnn_dev_pkg_version"; then
echo "The requested version of cuDNN development package is not available: cuDNN $CUDNN_VERSION for CUDA $CUDA_VERSION"
exit 1
fi
echo "Installing cuDNN dev libraries..."
apt-get install -yq "$cudnn_dev_pkg_version"
fi
if [ "$INSTALL_NVTX" = "true" ]; then if [ "$INSTALL_NVTX" = "true" ]; then
echo "Installing NVTX..." echo "Installing NVTX..."
apt-get install -yq "$nvtx_pkg" apt-get install -yq "$nvtx_pkg"
fi fi
if [ "$INSTALL_TOOLKIT" = "true" ]; then
echo "Installing CUDA Toolkit..."
apt-get install -yq "$toolkit_pkg"
fi
# Clean up # Clean up
rm -rf /var/lib/apt/lists/* rm -rf /var/lib/apt/lists/*

View file

@ -1,6 +1,6 @@
{ {
"id": "oryx", "id": "oryx",
"version": "1.1.0", "version": "1.0.13",
"name": "Oryx", "name": "Oryx",
"description": "Installs the oryx CLI", "description": "Installs the oryx CLI",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/oryx", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/oryx",
@ -14,7 +14,6 @@
"PATH": "/usr/local/oryx:${PATH}" "PATH": "/usr/local/oryx:${PATH}"
}, },
"installsAfter": [ "installsAfter": [
"ghcr.io/devcontainers/features/common-utils", "ghcr.io/devcontainers/features/common-utils"
"ghcr.io/devcontainers/features/dotnet"
] ]
} }

View file

@ -74,7 +74,7 @@ install_dotnet_using_apt() {
echo "Attempting to auto-install dotnet..." echo "Attempting to auto-install dotnet..."
install_from_microsoft_feed=false install_from_microsoft_feed=false
apt_get_update apt_get_update
DOTNET_INSTALLATION_PACKAGE="dotnet7" DOTNET_INSTALLATION_PACKAGE="dotnet6"
apt-get -yq install $DOTNET_INSTALLATION_PACKAGE || install_from_microsoft_feed="true" apt-get -yq install $DOTNET_INSTALLATION_PACKAGE || install_from_microsoft_feed="true"
if [ "${install_from_microsoft_feed}" = "true" ]; then if [ "${install_from_microsoft_feed}" = "true" ]; then
@ -82,7 +82,7 @@ install_dotnet_using_apt() {
curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg
echo "deb [arch=${architecture} signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/microsoft-${ID}-${VERSION_CODENAME}-prod ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/microsoft.list echo "deb [arch=${architecture} signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/microsoft-${ID}-${VERSION_CODENAME}-prod ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/microsoft.list
apt-get update -y apt-get update -y
DOTNET_INSTALLATION_PACKAGE="dotnet-sdk-7.0" DOTNET_INSTALLATION_PACKAGE="dotnet-sdk-6.0"
DOTNET_SKIP_FIRST_TIME_EXPERIENCE="true" apt-get install -yq $DOTNET_INSTALLATION_PACKAGE DOTNET_SKIP_FIRST_TIME_EXPERIENCE="true" apt-get install -yq $DOTNET_INSTALLATION_PACKAGE
fi fi
@ -132,9 +132,9 @@ if dotnet --version > /dev/null ; then
DOTNET_BINARY=$(which dotnet) DOTNET_BINARY=$(which dotnet)
fi fi
# Oryx needs to be built with .NET 7 # Oryx needs to be built with .NET 6
if [[ "${DOTNET_BINARY}" = "" ]] || [[ "$(dotnet --version)" != *"7"* ]] ; then if [[ "${DOTNET_BINARY}" = "" ]] || [[ "$(dotnet --version)" != *"6"* ]] ; then
echo "'dotnet 7' was not detected. Attempting to install .NET 7 to build oryx." echo "'dotnet 6' was not detected. Attempting to install .NET 6 to build oryx."
install_dotnet_using_apt install_dotnet_using_apt
if ! dotnet --version > /dev/null ; then if ! dotnet --version > /dev/null ; then
@ -154,11 +154,7 @@ mkdir -p ${ORYX}
git clone --depth=1 https://github.com/microsoft/Oryx $GIT_ORYX git clone --depth=1 https://github.com/microsoft/Oryx $GIT_ORYX
SOLUTION_FILE_NAME="Oryx.sln" $GIT_ORYX/build/buildSln.sh
echo "Building solution '$SOLUTION_FILE_NAME'..."
cd $GIT_ORYX
${DOTNET_BINARY} build "$SOLUTION_FILE_NAME" -c Debug
${DOTNET_BINARY} publish -property:ValidateExecutableReferencesMatchSelfContained=false -r linux-x64 -o ${BUILD_SCRIPT_GENERATOR} -c Release $GIT_ORYX/src/BuildScriptGeneratorCli/BuildScriptGeneratorCli.csproj ${DOTNET_BINARY} publish -property:ValidateExecutableReferencesMatchSelfContained=false -r linux-x64 -o ${BUILD_SCRIPT_GENERATOR} -c Release $GIT_ORYX/src/BuildScriptGeneratorCli/BuildScriptGeneratorCli.csproj
${DOTNET_BINARY} publish -r linux-x64 -o ${BUILD_SCRIPT_GENERATOR} -c Release $GIT_ORYX/src/BuildServer/BuildServer.csproj ${DOTNET_BINARY} publish -r linux-x64 -o ${BUILD_SCRIPT_GENERATOR} -c Release $GIT_ORYX/src/BuildServer/BuildServer.csproj

View file

@ -18,15 +18,6 @@
| version | Select or enter a PHP version | string | latest | | version | Select or enter a PHP version | string | latest |
| installComposer | Install PHP Composer? | boolean | true | | installComposer | Install PHP Composer? | boolean | true |
## Customizations
### VS Code Extensions
- `xdebug.php-debug`
- `bmewburn.vscode-intelephense-client`
- `xdebug.php-pack`
- `devsense.phptools-vscode`
## OS Support ## OS Support

View file

@ -18,12 +18,6 @@ Installs PowerShell along with needed dependencies. Useful for base Dockerfiles
| version | Select or enter a version of PowerShell. | string | latest | | version | Select or enter a version of PowerShell. | string | latest |
| modules | Optional comma separated list of PowerShell modules to install. | string | - | | modules | Optional comma separated list of PowerShell modules to install. | string | - |
## Customizations
### VS Code Extensions
- `ms-vscode.powershell`
## OS Support ## OS Support

View file

@ -1,6 +1,6 @@
{ {
"id": "powershell", "id": "powershell",
"version": "1.2.0", "version": "1.1.0",
"name": "PowerShell", "name": "PowerShell",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/powershell", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/powershell",
"description": "Installs PowerShell along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like gpg.", "description": "Installs PowerShell along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like gpg.",
@ -19,16 +19,9 @@
"type": "string", "type": "string",
"default": "", "default": "",
"description": "Optional comma separated list of PowerShell modules to install." "description": "Optional comma separated list of PowerShell modules to install."
} }
}, },
"customizations": {
"vscode": {
"extensions": [
"ms-vscode.powershell"
]
}
},
"installsAfter": [ "installsAfter": [
"ghcr.io/devcontainers/features/common-utils" "ghcr.io/devcontainers/features/common-utils"
] ]
} }

View file

@ -19,7 +19,6 @@ MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc"
POWERSHELL_ARCHIVE_ARCHITECTURES="amd64" POWERSHELL_ARCHIVE_ARCHITECTURES="amd64"
POWERSHELL_ARCHIVE_VERSION_CODENAMES="stretch buster bionic focal bullseye jammy" POWERSHELL_ARCHIVE_VERSION_CODENAMES="stretch buster bionic focal bullseye jammy"
GPG_KEY_SERVERS="keyserver hkp://keyserver.ubuntu.com GPG_KEY_SERVERS="keyserver hkp://keyserver.ubuntu.com
keyserver hkp://keyserver.ubuntu.com:80
keyserver hkps://keys.openpgp.org keyserver hkps://keys.openpgp.org
keyserver hkp://keyserver.pgp.com" keyserver hkp://keyserver.pgp.com"

View file

@ -21,14 +21,6 @@ Installs the provided version of Python, as well as PIPX, and other common Pytho
| installPath | The path where python will be installed. | string | /usr/local/python | | installPath | The path where python will be installed. | string | /usr/local/python |
| installJupyterlab | Install JupyterLab, a web-based interactive development environment for notebooks | boolean | false | | installJupyterlab | Install JupyterLab, a web-based interactive development environment for notebooks | boolean | false |
| configureJupyterlabAllowOrigin | Configure JupyterLab to accept HTTP requests from the specified origin | string | - | | configureJupyterlabAllowOrigin | Configure JupyterLab to accept HTTP requests from the specified origin | string | - |
| httpProxy | Connect to GPG keyservers using a proxy for fetching source code signatures by configuring this option | string | - |
## Customizations
### VS Code Extensions
- `ms-python.python`
- `ms-python.vscode-pylance`

View file

@ -1,6 +1,6 @@
{ {
"id": "python", "id": "python",
"version": "1.3.1", "version": "1.0.21",
"name": "Python", "name": "Python",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/python", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/python",
"description": "Installs the provided version of Python, as well as PIPX, and other common Python utilities. JupyterLab is conditionally installed with the python feature. Note: May require source code compilation.", "description": "Installs the provided version of Python, as well as PIPX, and other common Python utilities. JupyterLab is conditionally installed with the python feature. Note: May require source code compilation.",
@ -11,7 +11,6 @@
"latest", "latest",
"os-provided", "os-provided",
"none", "none",
"3.12",
"3.11", "3.11",
"3.10", "3.10",
"3.9", "3.9",
@ -46,11 +45,6 @@
"type": "string", "type": "string",
"default": "", "default": "",
"description": "Configure JupyterLab to accept HTTP requests from the specified origin" "description": "Configure JupyterLab to accept HTTP requests from the specified origin"
},
"httpProxy": {
"type": "string",
"default": "",
"description": "Connect to GPG keyservers using a proxy for fetching source code signatures by configuring this option"
} }
}, },
"containerEnv": { "containerEnv": {
@ -66,7 +60,15 @@
"ms-python.vscode-pylance" "ms-python.vscode-pylance"
], ],
"settings": { "settings": {
"python.defaultInterpreterPath": "/usr/local/python/current/bin/python" "python.defaultInterpreterPath": "/usr/local/python/current/bin/python",
"python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8",
"python.formatting.blackPath": "/usr/local/py-utils/bin/black",
"python.linting.flake8Path": "/usr/local/py-utils/bin/flake8",
"python.linting.flake8Enabled": false,
"python.linting.mypyPath": "/usr/local/py-utils/bin/mypy",
"python.linting.mypyEnabled": false,
"python.linting.pylintPath": "/usr/local/py-utils/bin/pylint",
"python.linting.pylintEnabled": false
} }
} }
}, },

View file

@ -26,15 +26,12 @@ CONFIGURE_JUPYTERLAB_ALLOW_ORIGIN="${CONFIGUREJUPYTERLABALLOWORIGIN:-""}"
# alongside PYTHON_VERSION, but not set as default. # alongside PYTHON_VERSION, but not set as default.
ADDITIONAL_VERSIONS="${ADDITIONALVERSIONS:-""}" ADDITIONAL_VERSIONS="${ADDITIONALVERSIONS:-""}"
DEFAULT_UTILS=("pylint" "flake8" "autopep8" "black" "yapf" "mypy" "pydocstyle" "pycodestyle" "bandit" "pipenv" "virtualenv" "pytest") DEFAULT_UTILS=("pylint" "flake8" "autopep8" "black" "yapf" "mypy" "pydocstyle" "pycodestyle" "bandit" "pipenv" "virtualenv")
PYTHON_SOURCE_GPG_KEYS="64E628F8D684696D B26995E310250568 2D347EA6AA65421D FB9921286F5E1540 3A5CA953F73C700D 04C367C218ADD4FF 0EDDC5F26A45C816 6AF053F07D9DC8D2 C9BE28DEE6DF025C 126EB563A74B06BF D9866941EA5BBD71 ED9D77D5 A821E680E5FA6305" PYTHON_SOURCE_GPG_KEYS="64E628F8D684696D B26995E310250568 2D347EA6AA65421D FB9921286F5E1540 3A5CA953F73C700D 04C367C218ADD4FF 0EDDC5F26A45C816 6AF053F07D9DC8D2 C9BE28DEE6DF025C 126EB563A74B06BF D9866941EA5BBD71 ED9D77D5"
GPG_KEY_SERVERS="keyserver hkp://keyserver.ubuntu.com GPG_KEY_SERVERS="keyserver hkp://keyserver.ubuntu.com
keyserver hkp://keyserver.ubuntu.com:80
keyserver hkps://keys.openpgp.org keyserver hkps://keys.openpgp.org
keyserver hkp://keyserver.pgp.com" keyserver hkp://keyserver.pgp.com"
KEYSERVER_PROXY="${HTTPPROXY:-"${HTTP_PROXY:-""}"}"
set -e set -e
# Clean up # Clean up
@ -87,9 +84,6 @@ receive_gpg_keys() {
mkdir -p "$(dirname \"$2\")" mkdir -p "$(dirname \"$2\")"
keyring_args="--no-default-keyring --keyring $2" keyring_args="--no-default-keyring --keyring $2"
fi fi
if [ ! -z "${KEYSERVER_PROXY}" ]; then
keyring_args="${keyring_args} --keyserver-options http-proxy=${KEYSERVER_PROXY}"
fi
# Use a temporary location for gpg keys to avoid polluting image # Use a temporary location for gpg keys to avoid polluting image
export GNUPGHOME="/tmp/tmp-gnupg" export GNUPGHOME="/tmp/tmp-gnupg"
@ -305,31 +299,25 @@ sudo_if() {
if [ "$(id -u)" -eq 0 ] && [ "$USERNAME" != "root" ]; then if [ "$(id -u)" -eq 0 ] && [ "$USERNAME" != "root" ]; then
su - "$USERNAME" -c "$COMMAND" su - "$USERNAME" -c "$COMMAND"
else else
$COMMAND "$COMMAND"
fi fi
} }
install_user_package() { install_user_package() {
INSTALL_UNDER_ROOT="$1" PACKAGE="$1"
PACKAGE="$2" sudo_if "${PYTHON_SRC}" -m pip install --user --upgrade --no-cache-dir "$PACKAGE"
if [ "$INSTALL_UNDER_ROOT" = true ]; then
sudo_if "${PYTHON_SRC}" -m pip install --upgrade --no-cache-dir "$PACKAGE"
else
sudo_if "${PYTHON_SRC}" -m pip install --user --upgrade --no-cache-dir "$PACKAGE"
fi
} }
add_user_jupyter_config() { add_user_jupyter_config() {
CONFIG_DIR="$1" CONFIG_DIR="/home/$USERNAME/.jupyter"
CONFIG_FILE="$2" CONFIG_FILE="$CONFIG_DIR/jupyter_server_config.py"
# Make sure the config file exists or create it with proper permissions # Make sure the config file exists or create it with proper permissions
test -d "$CONFIG_DIR" || sudo_if mkdir "$CONFIG_DIR" test -d "$CONFIG_DIR" || sudo_if mkdir "$CONFIG_DIR"
test -f "$CONFIG_FILE" || sudo_if touch "$CONFIG_FILE" test -f "$CONFIG_FILE" || sudo_if touch "$CONFIG_FILE"
# Don't write the same config more than once # Don't write the same config more than once
grep -q "$3" "$CONFIG_FILE" || echo "$3" >> "$CONFIG_FILE" grep -q "$1" "$CONFIG_FILE" || echo "$1" >> "$CONFIG_FILE"
} }
install_python() { install_python() {
@ -468,26 +456,13 @@ if [ "${INSTALL_JUPYTERLAB}" = "true" ]; then
exit 1 exit 1
fi fi
INSTALL_UNDER_ROOT=true install_user_package jupyterlab
if [ "$(id -u)" -eq 0 ] && [ "$USERNAME" != "root" ]; then install_user_package jupyterlab-git
INSTALL_UNDER_ROOT=false
fi
install_user_package $INSTALL_UNDER_ROOT jupyterlab
install_user_package $INSTALL_UNDER_ROOT jupyterlab-git
# Configure JupyterLab if needed # Configure JupyterLab if needed
if [ -n "${CONFIGURE_JUPYTERLAB_ALLOW_ORIGIN}" ]; then if [ -n "${CONFIGURE_JUPYTERLAB_ALLOW_ORIGIN}" ]; then
# Resolve config directory add_user_jupyter_config "c.ServerApp.allow_origin = '${CONFIGURE_JUPYTERLAB_ALLOW_ORIGIN}'"
CONFIG_DIR="/root/.jupyter" add_user_jupyter_config "c.NotebookApp.allow_origin = '${CONFIGURE_JUPYTERLAB_ALLOW_ORIGIN}'"
if [ "$INSTALL_UNDER_ROOT" = false ]; then
CONFIG_DIR="/home/$USERNAME/.jupyter"
fi
CONFIG_FILE="$CONFIG_DIR/jupyter_server_config.py"
add_user_jupyter_config $CONFIG_DIR $CONFIG_FILE "c.ServerApp.allow_origin = '${CONFIGURE_JUPYTERLAB_ALLOW_ORIGIN}'"
add_user_jupyter_config $CONFIG_DIR $CONFIG_FILE "c.NotebookApp.allow_origin = '${CONFIGURE_JUPYTERLAB_ALLOW_ORIGIN}'"
fi fi
fi fi

View file

@ -17,12 +17,6 @@ Installs Ruby, rvm, rbenv, common Ruby utilities, and needed dependencies.
|-----|-----|-----|-----| |-----|-----|-----|-----|
| version | Select or enter a Ruby version to install | string | latest | | version | Select or enter a Ruby version to install | string | latest |
## Customizations
### VS Code Extensions
- `shopify.ruby-lsp`
## OS Support ## OS Support

View file

@ -1,6 +1,6 @@
{ {
"id": "ruby", "id": "ruby",
"version": "1.2.0", "version": "1.0.9",
"name": "Ruby (via rvm)", "name": "Ruby (via rvm)",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/ruby", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/ruby",
"description": "Installs Ruby, rvm, rbenv, common Ruby utilities, and needed dependencies.", "description": "Installs Ruby, rvm, rbenv, common Ruby utilities, and needed dependencies.",
@ -21,7 +21,7 @@
"customizations": { "customizations": {
"vscode": { "vscode": {
"extensions": [ "extensions": [
"shopify.ruby-lsp" "rebornix.Ruby"
] ]
} }
}, },

View file

@ -19,12 +19,10 @@ ADDITIONAL_VERSIONS="${ADDITIONALVERSIONS:-""}"
# Note: ruby-debug-ide will install the right version of debase if missing and # Note: ruby-debug-ide will install the right version of debase if missing and
# installing debase directly fails on Ruby 3.1.0 as of 1/7/2022, so omitting. # installing debase directly fails on Ruby 3.1.0 as of 1/7/2022, so omitting.
# installing ruby-debug-ide on debian fails, so omitting. DEFAULT_GEMS="rake ruby-debug-ide"
DEFAULT_GEMS="rake"
RVM_GPG_KEYS="409B6B1796C275462A1703113804BB82D39DC0E3 7D2BAF1CF37B13E2069D6956105BD0E739499BDB" RVM_GPG_KEYS="409B6B1796C275462A1703113804BB82D39DC0E3 7D2BAF1CF37B13E2069D6956105BD0E739499BDB"
GPG_KEY_SERVERS="keyserver hkp://keyserver.ubuntu.com GPG_KEY_SERVERS="keyserver hkp://keyserver.ubuntu.com
keyserver hkp://keyserver.ubuntu.com:80
keyserver hkps://keys.openpgp.org keyserver hkps://keys.openpgp.org
keyserver hkp://keyserver.pgp.com" keyserver hkp://keyserver.pgp.com"
@ -178,11 +176,9 @@ fi
find_version_from_git_tags RUBY_VERSION "https://github.com/ruby/ruby" "tags/v" "_" find_version_from_git_tags RUBY_VERSION "https://github.com/ruby/ruby" "tags/v" "_"
# Just install Ruby if RVM already installed # Just install Ruby if RVM already installed
if rvm --version > /dev/null; then if [ $(rvm --version) != "" ]; then
echo "Ruby Version Manager already exists." echo "Ruby Version Manager already exists."
if [[ "$(ruby -v)" = *"${RUBY_VERSION}"* ]]; then if [ "${RUBY_VERSION}" != "none" ]; then
echo "(!) Ruby is already installed with version ${RUBY_VERSION}. Skipping..."
elif [ "${RUBY_VERSION}" != "none" ]; then
echo "Installing specified Ruby version." echo "Installing specified Ruby version."
su ${USERNAME} -c "rvm install ruby ${RUBY_VERSION}" su ${USERNAME} -c "rvm install ruby ${RUBY_VERSION}"
fi fi
@ -194,9 +190,6 @@ else
# Determine appropriate settings for rvm installer # Determine appropriate settings for rvm installer
if [ "${RUBY_VERSION}" = "none" ]; then if [ "${RUBY_VERSION}" = "none" ]; then
RVM_INSTALL_ARGS="" RVM_INSTALL_ARGS=""
elif [[ "$(ruby -v)" = *"${RUBY_VERSION}"* ]]; then
echo "(!) Ruby is already installed with version ${RUBY_VERSION}. Skipping..."
RVM_INSTALL_ARGS=""
else else
if [ "${RUBY_VERSION}" = "latest" ] || [ "${RUBY_VERSION}" = "current" ] || [ "${RUBY_VERSION}" = "lts" ]; then if [ "${RUBY_VERSION}" = "latest" ] || [ "${RUBY_VERSION}" = "current" ] || [ "${RUBY_VERSION}" = "lts" ]; then
RVM_INSTALL_ARGS="--ruby" RVM_INSTALL_ARGS="--ruby"
@ -279,11 +272,8 @@ if [ "${SKIP_RBENV_RBUILD}" != "true" ]; then
ln -s /usr/local/share/ruby-build /home/${USERNAME}/.rbenv/plugins/ruby-build ln -s /usr/local/share/ruby-build /home/${USERNAME}/.rbenv/plugins/ruby-build
fi fi
# Oryx expects ruby to be installed in this specific path, else it breaks the oryx magic for ruby projects. ln -s /usr/local/rvm/rubies/default/bin/ruby /usr/local/rvm/gems/default/bin
if [ ! -f /usr/local/rvm/gems/default/bin/ruby ]; then
ln -s /usr/local/rvm/rubies/default/bin/ruby /usr/local/rvm/gems/default/bin
fi
chown -R "${USERNAME}:rvm" "/home/${USERNAME}/.rbenv/" chown -R "${USERNAME}:rvm" "/home/${USERNAME}/.rbenv/"
chmod -R g+r+w "/home/${USERNAME}/.rbenv" chmod -R g+r+w "/home/${USERNAME}/.rbenv"
find "/home/${USERNAME}/.rbenv" -type d | xargs -n 1 chmod g+s find "/home/${USERNAME}/.rbenv" -type d | xargs -n 1 chmod g+s

View file

@ -18,15 +18,6 @@ Installs Rust, common Rust utilities, and their required dependencies
| version | Select or enter a version of Rust to install. | string | latest | | version | Select or enter a version of Rust to install. | string | latest |
| profile | Select a rustup install profile. | string | minimal | | profile | Select a rustup install profile. | string | minimal |
## Customizations
### VS Code Extensions
- `vadimcn.vscode-lldb`
- `rust-lang.rust-analyzer`
- `tamasfe.even-better-toml`
- `serayuzgur.crates`
## OS Support ## OS Support

View file

@ -1,6 +1,6 @@
{ {
"id": "rust", "id": "rust",
"version": "1.1.1", "version": "1.0.11",
"name": "Rust", "name": "Rust",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/rust", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/rust",
"description": "Installs Rust, common Rust utilities, and their required dependencies", "description": "Installs Rust, common Rust utilities, and their required dependencies",
@ -10,13 +10,6 @@
"proposals": [ "proposals": [
"latest", "latest",
"none", "none",
"1.70",
"1.69",
"1.68",
"1.67",
"1.66",
"1.65",
"1.64",
"1.63", "1.63",
"1.62", "1.62",
"1.61" "1.61"
@ -39,6 +32,7 @@
"vscode": { "vscode": {
"extensions": [ "extensions": [
"vadimcn.vscode-lldb", "vadimcn.vscode-lldb",
"mutantdino.resourcemonitor",
"rust-lang.rust-analyzer", "rust-lang.rust-analyzer",
"tamasfe.even-better-toml", "tamasfe.even-better-toml",
"serayuzgur.crates" "serayuzgur.crates"

View file

@ -48,6 +48,21 @@ elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then
USERNAME=root USERNAME=root
fi fi
# Get central common setting
get_common_setting() {
if [ "${common_settings_file_loaded}" != "true" ]; then
curl -sfL "https://aka.ms/vscode-dev-containers/script-library/settings.env" 2>/dev/null -o /tmp/vsdc-settings.env || echo "Could not download settings file. Skipping."
common_settings_file_loaded=true
fi
if [ -f "/tmp/vsdc-settings.env" ]; then
local multi_line=""
if [ "$2" = "true" ]; then multi_line="-z"; fi
local result="$(grep ${multi_line} -oP "$1=\"?\K[^\"]+" /tmp/vsdc-settings.env | tr -d '\0')"
if [ ! -z "${result}" ]; then declare -g $1="${result}"; fi
fi
echo "$1=${!1}"
}
# Figure out correct version of a three part version number is not passed # Figure out correct version of a three part version number is not passed
find_version_from_git_tags() { find_version_from_git_tags() {
local variable_name=$1 local variable_name=$1

View file

@ -1,6 +1,6 @@
{ {
"id": "sshd", "id": "sshd",
"version": "1.0.9", "version": "1.0.8",
"name": "SSH server", "name": "SSH server",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/sshd", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/sshd",
"description": "Adds a SSH server into a container so that you can use an external terminal, sftp, or SSHFS to interact with it.", "description": "Adds a SSH server into a container so that you can use an external terminal, sftp, or SSHFS to interact with it.",

View file

@ -13,6 +13,7 @@ SSHD_PORT="${SSHD_PORT:-"2222"}"
USERNAME="${USERNAME:-"${_REMOTE_USER:-"automatic"}"}" USERNAME="${USERNAME:-"${_REMOTE_USER:-"automatic"}"}"
START_SSHD="${START_SSHD:-"false"}" START_SSHD="${START_SSHD:-"false"}"
NEW_PASSWORD="${NEW_PASSWORD:-"skip"}" NEW_PASSWORD="${NEW_PASSWORD:-"skip"}"
FIX_ENVIRONMENT="${FIX_ENVIRONMENT:-"true"}"
set -e set -e
@ -92,6 +93,35 @@ sed -i -E "s/#*\s*Port\s+.+/Port ${SSHD_PORT}/g" /etc/ssh/sshd_config
# Need to UsePAM so /etc/environment is processed # Need to UsePAM so /etc/environment is processed
sed -i -E "s/#?\s*UsePAM\s+.+/UsePAM yes/g" /etc/ssh/sshd_config sed -i -E "s/#?\s*UsePAM\s+.+/UsePAM yes/g" /etc/ssh/sshd_config
# Script to store variables that exist at the time the ENTRYPOINT is fired
store_env_script="$(cat << 'EOF'
# Wire in codespaces secret processing to zsh if present (since may have been added to image after script was run)
if [ -f /etc/zsh/zlogin ] && ! grep '/etc/profile.d/00-restore-secrets.sh' /etc/zsh/zlogin > /dev/null 2>&1; then
echo -e "if [ -f /etc/profile.d/00-restore-secrets.sh ]; then . /etc/profile.d/00-restore-secrets.sh; fi\n$(cat /etc/zsh/zlogin 2>/dev/null || echo '')" | sudoIf tee /etc/zsh/zlogin > /dev/null
fi
EOF
)"
# Script to ensure login shells get the latest Codespaces secrets
restore_secrets_script="$(cat << 'EOF'
#!/bin/sh
if [ "${CODESPACES}" != "true" ] || [ "${VSCDC_FIXED_SECRETS}" = "true" ] || [ ! -z "${GITHUB_CODESPACES_TOKEN}" ]; then
# Not codespaces, already run, or secrets already in environment, so return
return
fi
if [ -f /workspaces/.codespaces/shared/.env-secrets ]; then
while read line
do
key=$(echo $line | sed "s/=.*//")
value=$(echo $line | sed "s/$key=//1")
decodedValue=$(echo $value | base64 -d)
export $key="$decodedValue"
done < /workspaces/.codespaces/shared/.env-secrets
fi
export VSCDC_FIXED_SECRETS=true
EOF
)"
# Write out a scripts that can be referenced as an ENTRYPOINT to auto-start sshd and fix login environments # Write out a scripts that can be referenced as an ENTRYPOINT to auto-start sshd and fix login environments
tee /usr/local/share/ssh-init.sh > /dev/null \ tee /usr/local/share/ssh-init.sh > /dev/null \
<< 'EOF' << 'EOF'
@ -111,6 +141,15 @@ sudoIf()
} }
EOF EOF
if [ "${FIX_ENVIRONMENT}" = "true" ]; then
echo "${store_env_script}" >> /usr/local/share/ssh-init.sh
echo "${restore_secrets_script}" > /etc/profile.d/00-restore-secrets.sh
chmod +x /etc/profile.d/00-restore-secrets.sh
# Wire in zsh if present
if type zsh > /dev/null 2>&1; then
echo -e "if [ -f /etc/profile.d/00-restore-secrets.sh ]; then . /etc/profile.d/00-restore-secrets.sh; fi\n$(cat /etc/zsh/zlogin 2>/dev/null || echo '')" > /etc/zsh/zlogin
fi
fi
tee -a /usr/local/share/ssh-init.sh > /dev/null \ tee -a /usr/local/share/ssh-init.sh > /dev/null \
<< 'EOF' << 'EOF'

View file

@ -1,9 +1,5 @@
## Licensing
On August 10, 2023, HashiCorp announced a change of license for its products, including Terraform. After ~9 years of Terraform being open source under the MPL v2 license, it was to move under a non-open source BSL v1.1 license, starting from the next (1.6) version. See https://github.com/hashicorp/terraform/blob/main/LICENSE
## OS Support ## OS Support
This Feature should work on recent versions of Debian/Ubuntu-based distributions with the `apt` package manager installed. This Feature should work on recent versions of Debian/Ubuntu-based distributions with the `apt` package manager installed.

View file

@ -16,25 +16,14 @@ Installs the Terraform CLI and optionally TFLint and Terragrunt. Auto-detects la
| Options Id | Description | Type | Default Value | | Options Id | Description | Type | Default Value |
|-----|-----|-----|-----| |-----|-----|-----|-----|
| version | Terraform version | string | latest | | version | Terraform version | string | latest |
| tflint | Tflint version (https://github.com/terraform-linters/tflint/releases) | string | latest | | tflint | Tflint version | string | latest |
| terragrunt | Terragrunt version | string | latest | | terragrunt | Terragrunt version | string | latest |
| installSentinel | Install sentinel, a language and framework for policy built to be embedded in existing software to enable fine-grained, logic-based policy decisions | boolean | false | | installSentinel | Install sentinel, a language and framework for policy built to be embedded in existing software to enable fine-grained, logic-based policy decisions | boolean | false |
| installTFsec | Install tfsec, a tool to spot potential misconfigurations for your terraform code | boolean | false | | installTFsec | Install tfsec, a tool to spot potential misconfigurations for your terraform code | boolean | false |
| installTerraformDocs | Install terraform-docs, a utility to generate documentation from Terraform modules | boolean | false | | installTerraformDocs | Install terraform-docs, a utility to generate documentation from Terraform modules | boolean | false |
| httpProxy | Connect to a keyserver using a proxy by configuring this option | string | - | | httpProxy | Connect to a keyserver using a proxy by configuring this option | string | - |
## Customizations
### VS Code Extensions
- `HashiCorp.terraform`
- `ms-azuretools.vscode-azureterraform`
## Licensing
On August 10, 2023, HashiCorp announced a change of license for its products, including Terraform. After ~9 years of Terraform being open source under the MPL v2 license, it was to move under a non-open source BSL v1.1 license, starting from the next (1.6) version. See https://github.com/hashicorp/terraform/blob/main/LICENSE
## OS Support ## OS Support

View file

@ -1,6 +1,6 @@
{ {
"id": "terraform", "id": "terraform",
"version": "1.3.5", "version": "1.3.0",
"name": "Terraform, tflint, and TFGrunt", "name": "Terraform, tflint, and TFGrunt",
"documentationURL": "https://github.com/devcontainers/features/tree/main/src/terraform", "documentationURL": "https://github.com/devcontainers/features/tree/main/src/terraform",
"description": "Installs the Terraform CLI and optionally TFLint and Terragrunt. Auto-detects latest version and installs needed dependencies.", "description": "Installs the Terraform CLI and optionally TFLint and Terragrunt. Auto-detects latest version and installs needed dependencies.",
@ -20,12 +20,10 @@
"tflint": { "tflint": {
"type": "string", "type": "string",
"proposals": [ "proposals": [
"latest", "latest"
"0.47.0",
"0.46.1"
], ],
"default": "latest", "default": "latest",
"description": "Tflint version (https://github.com/terraform-linters/tflint/releases)" "description": "Tflint version"
}, },
"terragrunt": { "terragrunt": {
"type": "string", "type": "string",
@ -74,4 +72,4 @@
"installsAfter": [ "installsAfter": [
"ghcr.io/devcontainers/features/common-utils" "ghcr.io/devcontainers/features/common-utils"
] ]
} }

View file

@ -27,7 +27,7 @@ TFSEC_SHA256="${TFSEC_SHA256:-"automatic"}"
TERRAFORM_DOCS_SHA256="${TERRAFORM_DOCS_SHA256:-"automatic"}" TERRAFORM_DOCS_SHA256="${TERRAFORM_DOCS_SHA256:-"automatic"}"
TERRAFORM_GPG_KEY="72D7468F" TERRAFORM_GPG_KEY="72D7468F"
TFLINT_GPG_KEY_URI="https://raw.githubusercontent.com/terraform-linters/tflint/v0.46.1/8CE69160EB3F2FE9.key" TFLINT_GPG_KEY_URI="https://raw.githubusercontent.com/terraform-linters/tflint/master/8CE69160EB3F2FE9.key"
GPG_KEY_SERVERS="keyserver hkps://keyserver.ubuntu.com GPG_KEY_SERVERS="keyserver hkps://keyserver.ubuntu.com
keyserver hkps://keys.openpgp.org keyserver hkps://keys.openpgp.org
keyserver hkps://keyserver.pgp.com" keyserver hkps://keyserver.pgp.com"
@ -77,25 +77,6 @@ receive_gpg_keys() {
sleep 10s sleep 10s
fi fi
done done
# If all attempts fail, try getting the keyserver IP address and explicitly passing it to gpg
if [ "${gpg_ok}" = "false" ]; then
retry_count=0;
echo "(*) Resolving GPG keyserver IP address..."
local keyserver_ip_address=$( dig +short keyserver.ubuntu.com | head -n1 )
echo "(*) GPG keyserver IP address $keyserver_ip_address"
until [ "${gpg_ok}" = "true" ] || [ "${retry_count}" -eq "3" ];
do
echo "(*) Downloading GPG key..."
( echo "${keys}" | xargs -n 1 gpg -q ${keyring_args} --recv-keys --keyserver ${keyserver_ip_address}) 2>&1 && gpg_ok="true"
if [ "${gpg_ok}" != "true" ]; then
echo "(*) Failed getting key, retring in 10s..."
(( retry_count++ ))
sleep 10s
fi
done
fi
set -e set -e
if [ "${gpg_ok}" = "false" ]; then if [ "${gpg_ok}" = "false" ]; then
echo "(!) Failed to get gpg key." echo "(!) Failed to get gpg key."
@ -177,32 +158,11 @@ check_packages() {
fi fi
} }
# Install 'cosign' for validating signatures
# https://docs.sigstore.dev/cosign/overview/
ensure_cosign() {
check_packages curl ca-certificates gnupg2
if ! type cosign > /dev/null 2>&1; then
echo "Installing cosign..."
LATEST_COSIGN_VERSION="latest"
find_version_from_git_tags LATEST_COSIGN_VERSION 'https://github.com/sigstore/cosign'
curl -L "https://github.com/sigstore/cosign/releases/latest/download/cosign_${LATEST_COSIGN_VERSION}_${architecture}.deb" -o /tmp/cosign_${LATEST_COSIGN_VERSION}_${architecture}.deb
dpkg -i /tmp/cosign_${LATEST_COSIGN_VERSION}_${architecture}.deb
rm /tmp/cosign_${LATEST_COSIGN_VERSION}_${architecture}.deb
fi
if ! type cosign > /dev/null 2>&1; then
echo "(!) Failed to install cosign."
exit 1
fi
cosign version
}
# Ensure apt is in non-interactive to avoid prompts # Ensure apt is in non-interactive to avoid prompts
export DEBIAN_FRONTEND=noninteractive export DEBIAN_FRONTEND=noninteractive
# Install dependencies if missing # Install dependencies if missing
check_packages curl ca-certificates gnupg2 dirmngr coreutils unzip dnsutils check_packages curl ca-certificates gnupg2 dirmngr coreutils unzip
if ! type git > /dev/null 2>&1; then if ! type git > /dev/null 2>&1; then
check_packages git check_packages git
fi fi
@ -238,42 +198,17 @@ if [ "${TFLINT_VERSION}" != "none" ]; then
TFLINT_FILENAME="tflint_linux_${architecture}.zip" TFLINT_FILENAME="tflint_linux_${architecture}.zip"
curl -sSL -o /tmp/tf-downloads/${TFLINT_FILENAME} https://github.com/terraform-linters/tflint/releases/download/v${TFLINT_VERSION}/${TFLINT_FILENAME} curl -sSL -o /tmp/tf-downloads/${TFLINT_FILENAME} https://github.com/terraform-linters/tflint/releases/download/v${TFLINT_VERSION}/${TFLINT_FILENAME}
if [ "${TFLINT_SHA256}" != "dev-mode" ]; then if [ "${TFLINT_SHA256}" != "dev-mode" ]; then
if [ "${TFLINT_SHA256}" = "automatic" ]; then
if [ "${TFLINT_SHA256}" != "automatic" ]; then curl -sSL -o tflint_key "${TFLINT_GPG_KEY_URI}"
echo "${TFLINT_SHA256} *${TFLINT_FILENAME}" > tflint_checksums.txt gpg -q --import tflint_key
sha256sum --ignore-missing -c tflint_checksums.txt
else
curl -sSL -o tflint_checksums.txt https://github.com/terraform-linters/tflint/releases/download/v${TFLINT_VERSION}/checksums.txt curl -sSL -o tflint_checksums.txt https://github.com/terraform-linters/tflint/releases/download/v${TFLINT_VERSION}/checksums.txt
curl -sSL -o tflint_checksums.txt.sig https://github.com/terraform-linters/tflint/releases/download/v${TFLINT_VERSION}/checksums.txt.sig
set +e gpg --verify tflint_checksums.txt.sig tflint_checksums.txt
curl -sSL -o checksums.txt.keyless.sig https://github.com/terraform-linters/tflint/releases/download/v${TFLINT_VERSION}/checksums.txt.keyless.sig else
set -e echo "${TFLINT_SHA256} *${TFLINT_FILENAME}" > tflint_checksums.txt
# Check that checksums.txt.keyless.sig exists and is not empty
if [ -s checksums.txt.keyless.sig ]; then
# Validate checksums with cosign
curl -sSL -o checksums.txt.pem https://github.com/terraform-linters/tflint/releases/download/v${TFLINT_VERSION}/checksums.txt.pem
ensure_cosign
cosign verify-blob \
--certificate=/tmp/tf-downloads/checksums.txt.pem \
--signature=/tmp/tf-downloads/checksums.txt.keyless.sig \
--certificate-identity-regexp="^https://github.com/terraform-linters/tflint" \
--certificate-oidc-issuer=https://token.actions.githubusercontent.com \
/tmp/tf-downloads/tflint_checksums.txt
# Ensure that checksums.txt has $TFLINT_FILENAME
grep ${TFLINT_FILENAME} /tmp/tf-downloads/tflint_checksums.txt
# Validate downloaded file
sha256sum --ignore-missing -c tflint_checksums.txt
else
# Fallback to older, GPG-based verification (pre-0.47.0 of tflint)
curl -sSL -o tflint_checksums.txt.sig https://github.com/terraform-linters/tflint/releases/download/v${TFLINT_VERSION}/checksums.txt.sig
curl -sSL -o tflint_key "${TFLINT_GPG_KEY_URI}"
gpg -q --import tflint_key
gpg --verify tflint_checksums.txt.sig tflint_checksums.txt
fi
fi fi
sha256sum --ignore-missing -c tflint_checksums.txt
fi fi
unzip /tmp/tf-downloads/${TFLINT_FILENAME} unzip /tmp/tf-downloads/${TFLINT_FILENAME}
mv -f tflint /usr/local/bin/ mv -f tflint /usr/local/bin/
fi fi

View file

@ -8,8 +8,6 @@ source dev-container-features-test-lib
# Check to make sure the user is vscode # Check to make sure the user is vscode
check "user is vscode" whoami | grep vscode check "user is vscode" whoami | grep vscode
check "version" az --version
# Bicep-specific tests # Bicep-specific tests
check "bicep" bicep --version check "bicep" bicep --version
check "az bicep" az bicep version check "az bicep" az bicep version

View file

@ -8,8 +8,6 @@ source dev-container-features-test-lib
# Check to make sure the user is vscode # Check to make sure the user is vscode
check "user is vscode" whoami | grep vscode check "user is vscode" whoami | grep vscode
check "version" az --version
# Extension-specific tests # Extension-specific tests
check "aks-preview" az extension show --name aks-preview check "aks-preview" az extension show --name aks-preview
check "amg" az extension show --name amg check "amg" az extension show --name amg

View file

@ -1,8 +0,0 @@
#!/bin/bash
set -e
# Import test library for `check` command
source dev-container-features-test-lib
./install_extensions.sh

View file

@ -1,14 +0,0 @@
#!/bin/bash
set -e
# Import test library for `check` command
source dev-container-features-test-lib
# Check to make sure the user is vscode
check "user is vscode" whoami | grep vscode
check "version" az --version
# Report result
reportResults

View file

@ -9,16 +9,6 @@
} }
} }
}, },
"install_extensions_bookworm": {
"image": "mcr.microsoft.com/devcontainers/base:bookworm",
"user": "vscode",
"features": {
"azure-cli": {
"version": "latest",
"extensions": "aks-preview,amg,containerapp"
}
}
},
"install_bicep": { "install_bicep": {
"image": "mcr.microsoft.com/devcontainers/base:jammy", "image": "mcr.microsoft.com/devcontainers/base:jammy",
"user": "vscode", "user": "vscode",
@ -28,15 +18,5 @@
"installBicep": true "installBicep": true
} }
} }
},
"install_with_python": {
"image": "mcr.microsoft.com/devcontainers/base:jammy",
"user": "vscode",
"features": {
"azure-cli": {
"version": "latest",
"installUsingPython": true
}
}
} }
} }

View file

@ -1,13 +0,0 @@
#!/bin/bash
set -e
# Optional: Import test library
source dev-container-features-test-lib
# Definition specific tests
check "alpine default shell zsh" \
bash -c "getent passwd $(whoami) | awk -F : '{ print $7 }' | grep '/bin/zsh'"
# Report result
reportResults

View file

@ -9,7 +9,6 @@ source dev-container-features-test-lib
. /etc/os-release . /etc/os-release
check "non-root user" test "$(whoami)" = "devcontainer" check "non-root user" test "$(whoami)" = "devcontainer"
check "distro" test "${VERSION_ID}" = "7" check "distro" test "${VERSION_ID}" = "7"
check "jq" jq --version
# Report result # Report result
reportResults reportResults

View file

@ -1,13 +0,0 @@
#!/bin/bash
set -e
# Optional: Import test library
source dev-container-features-test-lib
# Definition specific tests
check "owned-config-sub-directory" bash -c "ls -ld ~/.config/subdirectory | awk '{print $3}' | grep 'devcontainer'"
check "owned-config-directory" bash -c "ls -ld ~/.config | awk '{print $3}' | grep 'devcontainer'"
# Report result
reportResults

View file

@ -7,11 +7,6 @@ source dev-container-features-test-lib
# Definition specific tests # Definition specific tests
check "default-shell-is-zsh" bash -c "getent passwd $(whoami) | awk -F: '{ print $7 }' | grep '/bin/zsh'" check "default-shell-is-zsh" bash -c "getent passwd $(whoami) | awk -F: '{ print $7 }' | grep '/bin/zsh'"
# check it overrides the ~/.zshrc with default dev containers template
check "default-zshrc-is-dev-container-template" bash -c "cat ~/.zshrc | grep ZSH_THEME | grep devcontainers"
check "zsh-path-contains-local-bin" zsh -l -c "echo $PATH | grep '/home/devcontainer/.local/bin'"
check "Ensure .zprofile is owned by remoteUser" bash -c "stat -c '%U' /home/devcontainer/.zprofile | grep devcontainer"
# Report result # Report result
reportResults reportResults

View file

@ -1,12 +0,0 @@
#!/bin/bash
set -e
# Optional: Import test library
source dev-container-features-test-lib
# Definition specific tests
check "default-zsh-with-no-zshrc" bash -c "[ ! -e ~/.zshrc ]"
# Report result
reportResults

View file

@ -1,14 +0,0 @@
#!/bin/bash
set -e
# Optional: Import test library
source dev-container-features-test-lib
# Definition specific tests
function file_not_overridden() {
cat $1 | grep 'alias fnomockalias=' | grep testingmock
}
check "default-zsh-with-no-zshrc" file_not_overridden /home/devcontainer/.zshrc
# Report result
reportResults

View file

@ -1,13 +0,0 @@
#!/bin/bash
set -e
# Optional: Import test library
source dev-container-features-test-lib
# Definition specific tests
check "user is customUser" grep customUser <(whoami)
check "home is /customHome" grep "/customHome" <(getent passwd customUser | cut -d: -f6)
# Report result
reportResults

View file

@ -1,4 +0,0 @@
FROM ubuntu:focal
RUN groupadd customUser -g 30000 && \
useradd customUser -u 30000 -g 30000 --create-home --home-dir /customHome

View file

@ -1,13 +0,0 @@
#!/bin/bash
set -e
# Optional: Import test library
source dev-container-features-test-lib
# Definition specific tests
check "user is customUser" grep customUser <(whoami)
check "home is /home/customUser" grep "/home/customUser" <(getent passwd customUser | cut -d: -f6)
# Report result
reportResults

View file

@ -9,7 +9,6 @@ source dev-container-features-test-lib
. /etc/os-release . /etc/os-release
check "non-root user" test "$(whoami)" = "devcontainer" check "non-root user" test "$(whoami)" = "devcontainer"
check "distro" test "${ID}" = "fedora" check "distro" test "${ID}" = "fedora"
check "jq" jq --version
# Report result # Report result
reportResults reportResults

View file

@ -9,7 +9,6 @@ source dev-container-features-test-lib
. /etc/os-release . /etc/os-release
check "non-root user" test "$(whoami)" = "devcontainer" check "non-root user" test "$(whoami)" = "devcontainer"
check "distro" test "${ID}" = "mariner" check "distro" test "${ID}" = "mariner"
check "jq" jq --version
# Report result # Report result
reportResults reportResults

View file

@ -10,7 +10,6 @@ source dev-container-features-test-lib
check "non-root user" test "$(whoami)" = "devcontainer" check "non-root user" test "$(whoami)" = "devcontainer"
check "distro" test "${PLATFORM_ID}" = "platform:el8" check "distro" test "${PLATFORM_ID}" = "platform:el8"
check "curl" curl --version check "curl" curl --version
check "jq" jq --version
# Report result # Report result
reportResults reportResults

View file

@ -10,7 +10,6 @@ source dev-container-features-test-lib
check "non-root user" test "$(whoami)" = "devcontainer" check "non-root user" test "$(whoami)" = "devcontainer"
check "distro" test "${PLATFORM_ID}" = "platform:el9" check "distro" test "${PLATFORM_ID}" = "platform:el9"
check "curl" curl --version check "curl" curl --version
check "jq" jq --version
# Report result # Report result
reportResults reportResults

View file

@ -108,46 +108,13 @@
} }
}, },
"configure_zsh_as_default_shell": { "configure_zsh_as_default_shell": {
"image": "ubuntu", "image": "mcr.microsoft.com/devcontainers/base:ubuntu",
"features": { "features": {
"common-utils": { "common-utils": {
"installZsh": true,
"configureZshAsDefaultShell": true "configureZshAsDefaultShell": true
} }
},
"remoteUser": "devcontainer"
},
"configure_zsh_no_template_second_step": {
"image": "mcr.microsoft.com/devcontainers/base:ubuntu",
"postCreateCommand": "echo alias fnomockalias=testingmock >> /home/devcontainer/.zshrc",
"remoteUser": "devcontainer",
"features": {
"common-utils": {
"installZsh": true,
"installOhMyZshConfig": false
}
} }
}, },
"configure_zsh_no_template_first_step": {
"image": "debian:bullseye",
"remoteUser": "devcontainer",
"features": {
"common-utils": {
"installZsh": true,
"installOhMyZshConfig": false
}
}
},
"config-subdirectory": {
"image": "alpine",
"remoteUser": "devcontainer",
"features": {
"common-utils": {}
},
"mounts": [
"source=${localEnv:HOME},target=/home/devcontainer/.config/subdirectory,type=bind,readonly"
]
},
"alpine-3-14": { "alpine-3-14": {
"image": "alpine:3.14", "image": "alpine:3.14",
"remoteUser": "devcontainer", "remoteUser": "devcontainer",
@ -166,7 +133,6 @@
"build": { "build": {
"dockerfile": "Dockerfile" "dockerfile": "Dockerfile"
}, },
"remoteUser": "vscode",
"features": { "features": {
"common-utils": { "common-utils": {
"username": "vscode", "username": "vscode",
@ -175,31 +141,7 @@
"upgradePackages": true, "upgradePackages": true,
"installZsh": true "installZsh": true
} }
}
},
"alpine-base-zsh-default": {
"image": "mcr.microsoft.com/devcontainers/base:alpine",
"remoteUser": "vscode",
"features": {
"common-utils": {
"configureZshAsDefaultShell": true
}
}
},
"devcontainer-custom-home": {
"build": {
"dockerfile": "Dockerfile"
}, },
"remoteUser": "customUser", "remoteUser": "vscode"
"features": {
"common-utils": {}
}
},
"devcontainer-custom-user-default-home": {
"image": "mcr.microsoft.com/devcontainers/base:alpine",
"remoteUser": "customUser",
"features": {
"common-utils": {}
}
} }
} }

View file

@ -32,11 +32,5 @@ check-version-ge() {
certifiVersion=$(python -c "import certifi; print(certifi.__version__)") certifiVersion=$(python -c "import certifi; print(certifi.__version__)")
check-version-ge "certifi" "${certifiVersion}" "2022.12.07" check-version-ge "certifi" "${certifiVersion}" "2022.12.07"
cryptographyVersion=$(python -c "import cryptography; print(cryptography.__version__)")
check-version-ge "cryptography" "${cryptographyVersion}" "39.0.1"
setuptoolsVersion=$(python -c "import setuptools; print(setuptools.__version__)")
check-version-ge "setuptools" "${setuptoolsVersion}" "65.5.1"
# Report result # Report result
reportResults reportResults

Some files were not shown because too many files have changed in this diff Show more