diff --git a/.devcontainer/cccl-entrypoint.sh b/.devcontainer/cccl-entrypoint.sh index 36506e8ab3a..acfc6b86fea 100755 --- a/.devcontainer/cccl-entrypoint.sh +++ b/.devcontainer/cccl-entrypoint.sh @@ -5,7 +5,8 @@ set -e; if ! test -n "${DISABLE_SCCACHE:+x}" && test -n "${DEVCONTAINER_UTILS_ENABLE_SCCACHE_DIST:+x}" && ! test -n "${SCCACHE_DIST_URL:+x}"; then - export SCCACHE_DIST_URL="https://$(dpkg --print-architecture).$(uname -s | tr '[:upper:]' '[:lower:]').sccache.rapids.nvidia.com"; + SCCACHE_DIST_URL="https://$(uname -m | sed -e 's/x86_/amd/' -e 's/aarch/arm/').linux.sccache.rapids.nvidia.com"; + export SCCACHE_DIST_URL; echo "export SCCACHE_DIST_URL=$SCCACHE_DIST_URL" >> ~/.bashrc; fi diff --git a/.devcontainer/cuda12.0-gcc10/devcontainer.json b/.devcontainer/cuda12.0-gcc10/devcontainer.json index d88a4d6d1ab..cf1b32fa100 100644 --- a/.devcontainer/cuda12.0-gcc10/devcontainer.json +++ b/.devcontainer/cuda12.0-gcc10/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.0-gcc11/devcontainer.json b/.devcontainer/cuda12.0-gcc11/devcontainer.json index e3963d1642b..e4cf0c4b441 100644 --- a/.devcontainer/cuda12.0-gcc11/devcontainer.json +++ b/.devcontainer/cuda12.0-gcc11/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.0-gcc12/devcontainer.json b/.devcontainer/cuda12.0-gcc12/devcontainer.json index bd405957e2d..da7ed5a78f7 100644 --- a/.devcontainer/cuda12.0-gcc12/devcontainer.json +++ b/.devcontainer/cuda12.0-gcc12/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.0-gcc13/devcontainer.json b/.devcontainer/cuda12.0-gcc13/devcontainer.json index a191ca0549b..22068779d76 100644 --- a/.devcontainer/cuda12.0-gcc13/devcontainer.json +++ b/.devcontainer/cuda12.0-gcc13/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.0-gcc7/devcontainer.json b/.devcontainer/cuda12.0-gcc7/devcontainer.json index f701ad200ef..7990a990ede 100644 --- a/.devcontainer/cuda12.0-gcc7/devcontainer.json +++ b/.devcontainer/cuda12.0-gcc7/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.0-gcc8/devcontainer.json b/.devcontainer/cuda12.0-gcc8/devcontainer.json index 7b018eea978..11edc229692 100644 --- a/.devcontainer/cuda12.0-gcc8/devcontainer.json +++ b/.devcontainer/cuda12.0-gcc8/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.0-gcc9/devcontainer.json b/.devcontainer/cuda12.0-gcc9/devcontainer.json index 09bcf571880..6556f4eb38d 100644 --- a/.devcontainer/cuda12.0-gcc9/devcontainer.json +++ b/.devcontainer/cuda12.0-gcc9/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.0-llvm14/devcontainer.json b/.devcontainer/cuda12.0-llvm14/devcontainer.json index 226e3de1ad4..b7608836a97 100644 --- a/.devcontainer/cuda12.0-llvm14/devcontainer.json +++ b/.devcontainer/cuda12.0-llvm14/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.9-gcc10/devcontainer.json b/.devcontainer/cuda12.9-gcc10/devcontainer.json index 190a148dacf..f8f201a8e7a 100644 --- a/.devcontainer/cuda12.9-gcc10/devcontainer.json +++ b/.devcontainer/cuda12.9-gcc10/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.9-gcc11/devcontainer.json b/.devcontainer/cuda12.9-gcc11/devcontainer.json index b83a738d06e..3023919ffd4 100644 --- a/.devcontainer/cuda12.9-gcc11/devcontainer.json +++ b/.devcontainer/cuda12.9-gcc11/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.9-gcc12/devcontainer.json b/.devcontainer/cuda12.9-gcc12/devcontainer.json index 3728f62bc08..68024399cc6 100644 --- a/.devcontainer/cuda12.9-gcc12/devcontainer.json +++ b/.devcontainer/cuda12.9-gcc12/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.9-gcc13/devcontainer.json b/.devcontainer/cuda12.9-gcc13/devcontainer.json index 51db96863e8..8e5dad0e50f 100644 --- a/.devcontainer/cuda12.9-gcc13/devcontainer.json +++ b/.devcontainer/cuda12.9-gcc13/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.9-gcc14/devcontainer.json b/.devcontainer/cuda12.9-gcc14/devcontainer.json index 0e4cebdcbc0..b360ad1baee 100644 --- a/.devcontainer/cuda12.9-gcc14/devcontainer.json +++ b/.devcontainer/cuda12.9-gcc14/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.9-gcc7/devcontainer.json b/.devcontainer/cuda12.9-gcc7/devcontainer.json index a9858b271d0..28644870b1e 100644 --- a/.devcontainer/cuda12.9-gcc7/devcontainer.json +++ b/.devcontainer/cuda12.9-gcc7/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.9-gcc8/devcontainer.json b/.devcontainer/cuda12.9-gcc8/devcontainer.json index 61fcf440bfb..8bb1366cad9 100644 --- a/.devcontainer/cuda12.9-gcc8/devcontainer.json +++ b/.devcontainer/cuda12.9-gcc8/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.9-gcc9/devcontainer.json b/.devcontainer/cuda12.9-gcc9/devcontainer.json index 5cd47a2388f..117d6ea38fe 100644 --- a/.devcontainer/cuda12.9-gcc9/devcontainer.json +++ b/.devcontainer/cuda12.9-gcc9/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.9-llvm14/devcontainer.json b/.devcontainer/cuda12.9-llvm14/devcontainer.json index 8ab953abee6..519ff98f981 100644 --- a/.devcontainer/cuda12.9-llvm14/devcontainer.json +++ b/.devcontainer/cuda12.9-llvm14/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.9-llvm15/devcontainer.json b/.devcontainer/cuda12.9-llvm15/devcontainer.json index 8729ae3f95e..524557005a3 100644 --- a/.devcontainer/cuda12.9-llvm15/devcontainer.json +++ b/.devcontainer/cuda12.9-llvm15/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.9-llvm16/devcontainer.json b/.devcontainer/cuda12.9-llvm16/devcontainer.json index 6771dd82c74..5c87b618322 100644 --- a/.devcontainer/cuda12.9-llvm16/devcontainer.json +++ b/.devcontainer/cuda12.9-llvm16/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.9-llvm17/devcontainer.json b/.devcontainer/cuda12.9-llvm17/devcontainer.json index 43816532fbc..999aa8e0784 100644 --- a/.devcontainer/cuda12.9-llvm17/devcontainer.json +++ b/.devcontainer/cuda12.9-llvm17/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.9-llvm18/devcontainer.json b/.devcontainer/cuda12.9-llvm18/devcontainer.json index bfc232b74a2..0dcf28a1c95 100644 --- a/.devcontainer/cuda12.9-llvm18/devcontainer.json +++ b/.devcontainer/cuda12.9-llvm18/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.9-llvm19/devcontainer.json b/.devcontainer/cuda12.9-llvm19/devcontainer.json index 8b3b8ff3943..dbd3d9c7b76 100644 --- a/.devcontainer/cuda12.9-llvm19/devcontainer.json +++ b/.devcontainer/cuda12.9-llvm19/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.9-llvm20/devcontainer.json b/.devcontainer/cuda12.9-llvm20/devcontainer.json index 94b6ff4ae7c..c94a43899be 100644 --- a/.devcontainer/cuda12.9-llvm20/devcontainer.json +++ b/.devcontainer/cuda12.9-llvm20/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.9ext-gcc14/devcontainer.json b/.devcontainer/cuda12.9ext-gcc14/devcontainer.json index 53a8246ab86..2784ef4c68a 100644 --- a/.devcontainer/cuda12.9ext-gcc14/devcontainer.json +++ b/.devcontainer/cuda12.9ext-gcc14/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda12.9ext-llvm20/devcontainer.json b/.devcontainer/cuda12.9ext-llvm20/devcontainer.json index b163b6bfa23..c8227fbe781 100644 --- a/.devcontainer/cuda12.9ext-llvm20/devcontainer.json +++ b/.devcontainer/cuda12.9ext-llvm20/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.0-gcc11/devcontainer.json b/.devcontainer/cuda13.0-gcc11/devcontainer.json index eec87758816..0ea2ad8ceeb 100644 --- a/.devcontainer/cuda13.0-gcc11/devcontainer.json +++ b/.devcontainer/cuda13.0-gcc11/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.0-gcc12/devcontainer.json b/.devcontainer/cuda13.0-gcc12/devcontainer.json index 4114ed54917..a4ce23f1368 100644 --- a/.devcontainer/cuda13.0-gcc12/devcontainer.json +++ b/.devcontainer/cuda13.0-gcc12/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.0-gcc13/devcontainer.json b/.devcontainer/cuda13.0-gcc13/devcontainer.json index ce166862e6d..761aa768623 100644 --- a/.devcontainer/cuda13.0-gcc13/devcontainer.json +++ b/.devcontainer/cuda13.0-gcc13/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.0-gcc14/devcontainer.json b/.devcontainer/cuda13.0-gcc14/devcontainer.json index a4a20b04d3f..656381e123f 100644 --- a/.devcontainer/cuda13.0-gcc14/devcontainer.json +++ b/.devcontainer/cuda13.0-gcc14/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.0-llvm15/devcontainer.json b/.devcontainer/cuda13.0-llvm15/devcontainer.json index 00042f2271f..7d3c87c0350 100644 --- a/.devcontainer/cuda13.0-llvm15/devcontainer.json +++ b/.devcontainer/cuda13.0-llvm15/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.0-llvm16/devcontainer.json b/.devcontainer/cuda13.0-llvm16/devcontainer.json index 9ae6160023a..23e89a7f4e0 100644 --- a/.devcontainer/cuda13.0-llvm16/devcontainer.json +++ b/.devcontainer/cuda13.0-llvm16/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.0-llvm17/devcontainer.json b/.devcontainer/cuda13.0-llvm17/devcontainer.json index 56ed3a2c008..5901a35a450 100644 --- a/.devcontainer/cuda13.0-llvm17/devcontainer.json +++ b/.devcontainer/cuda13.0-llvm17/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.0-llvm18/devcontainer.json b/.devcontainer/cuda13.0-llvm18/devcontainer.json index 16e94e5b10a..371dd109630 100644 --- a/.devcontainer/cuda13.0-llvm18/devcontainer.json +++ b/.devcontainer/cuda13.0-llvm18/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.0-llvm19/devcontainer.json b/.devcontainer/cuda13.0-llvm19/devcontainer.json index 0920ba3ce97..f2b237d4540 100644 --- a/.devcontainer/cuda13.0-llvm19/devcontainer.json +++ b/.devcontainer/cuda13.0-llvm19/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.0-llvm20/devcontainer.json b/.devcontainer/cuda13.0-llvm20/devcontainer.json index 11147e6ef83..fa78c857f60 100644 --- a/.devcontainer/cuda13.0-llvm20/devcontainer.json +++ b/.devcontainer/cuda13.0-llvm20/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.0-nvhpc25.11/devcontainer.json b/.devcontainer/cuda13.0-nvhpc25.11/devcontainer.json index f0b84cfa4a1..219b5993dee 100644 --- a/.devcontainer/cuda13.0-nvhpc25.11/devcontainer.json +++ b/.devcontainer/cuda13.0-nvhpc25.11/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.0ext-gcc14/devcontainer.json b/.devcontainer/cuda13.0ext-gcc14/devcontainer.json index 46636605575..6f2381f399a 100644 --- a/.devcontainer/cuda13.0ext-gcc14/devcontainer.json +++ b/.devcontainer/cuda13.0ext-gcc14/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.0ext-llvm20/devcontainer.json b/.devcontainer/cuda13.0ext-llvm20/devcontainer.json index b7670a675f4..b7afd5a7fb7 100644 --- a/.devcontainer/cuda13.0ext-llvm20/devcontainer.json +++ b/.devcontainer/cuda13.0ext-llvm20/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.1-gcc11/devcontainer.json b/.devcontainer/cuda13.1-gcc11/devcontainer.json index f07b4235601..73c76302d3e 100644 --- a/.devcontainer/cuda13.1-gcc11/devcontainer.json +++ b/.devcontainer/cuda13.1-gcc11/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.1-gcc12/devcontainer.json b/.devcontainer/cuda13.1-gcc12/devcontainer.json index 098e0ce627a..85dcac07ceb 100644 --- a/.devcontainer/cuda13.1-gcc12/devcontainer.json +++ b/.devcontainer/cuda13.1-gcc12/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.1-gcc13/devcontainer.json b/.devcontainer/cuda13.1-gcc13/devcontainer.json index b80404eb5e7..5d2f07f5858 100644 --- a/.devcontainer/cuda13.1-gcc13/devcontainer.json +++ b/.devcontainer/cuda13.1-gcc13/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.1-gcc14/devcontainer.json b/.devcontainer/cuda13.1-gcc14/devcontainer.json index 149491ec805..0d82aa5e4b9 100644 --- a/.devcontainer/cuda13.1-gcc14/devcontainer.json +++ b/.devcontainer/cuda13.1-gcc14/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.1-llvm15/devcontainer.json b/.devcontainer/cuda13.1-llvm15/devcontainer.json index 279e768b545..ea405d56988 100644 --- a/.devcontainer/cuda13.1-llvm15/devcontainer.json +++ b/.devcontainer/cuda13.1-llvm15/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.1-llvm16/devcontainer.json b/.devcontainer/cuda13.1-llvm16/devcontainer.json index 0aebba64a5e..a5e048f548e 100644 --- a/.devcontainer/cuda13.1-llvm16/devcontainer.json +++ b/.devcontainer/cuda13.1-llvm16/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.1-llvm17/devcontainer.json b/.devcontainer/cuda13.1-llvm17/devcontainer.json index 2758c694777..2a012f25bb5 100644 --- a/.devcontainer/cuda13.1-llvm17/devcontainer.json +++ b/.devcontainer/cuda13.1-llvm17/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.1-llvm18/devcontainer.json b/.devcontainer/cuda13.1-llvm18/devcontainer.json index 7e10039648f..2b0c2956aa7 100644 --- a/.devcontainer/cuda13.1-llvm18/devcontainer.json +++ b/.devcontainer/cuda13.1-llvm18/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.1-llvm19/devcontainer.json b/.devcontainer/cuda13.1-llvm19/devcontainer.json index 0589c4869d2..52ccb4afcea 100644 --- a/.devcontainer/cuda13.1-llvm19/devcontainer.json +++ b/.devcontainer/cuda13.1-llvm19/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.1-llvm20/devcontainer.json b/.devcontainer/cuda13.1-llvm20/devcontainer.json index cb91ece7fe0..7d1590bc4ae 100644 --- a/.devcontainer/cuda13.1-llvm20/devcontainer.json +++ b/.devcontainer/cuda13.1-llvm20/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.1-nvhpc26.1/devcontainer.json b/.devcontainer/cuda13.1-nvhpc26.1/devcontainer.json index 71620a63bc5..32c71919a6b 100644 --- a/.devcontainer/cuda13.1-nvhpc26.1/devcontainer.json +++ b/.devcontainer/cuda13.1-nvhpc26.1/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.1ext-gcc14/devcontainer.json b/.devcontainer/cuda13.1ext-gcc14/devcontainer.json index 730dffebd85..0da5bf81d3a 100644 --- a/.devcontainer/cuda13.1ext-gcc14/devcontainer.json +++ b/.devcontainer/cuda13.1ext-gcc14/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda13.1ext-llvm20/devcontainer.json b/.devcontainer/cuda13.1ext-llvm20/devcontainer.json index af84ff0b28a..582971a2a36 100644 --- a/.devcontainer/cuda13.1ext-llvm20/devcontainer.json +++ b/.devcontainer/cuda13.1ext-llvm20/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda99.8-gcc14/devcontainer.json b/.devcontainer/cuda99.8-gcc14/devcontainer.json index cddaa8a9778..8f03b456cba 100644 --- a/.devcontainer/cuda99.8-gcc14/devcontainer.json +++ b/.devcontainer/cuda99.8-gcc14/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda99.8-llvm20/devcontainer.json b/.devcontainer/cuda99.8-llvm20/devcontainer.json index bf920a1f96b..afe6568f37a 100644 --- a/.devcontainer/cuda99.8-llvm20/devcontainer.json +++ b/.devcontainer/cuda99.8-llvm20/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda99.9-gcc14/devcontainer.json b/.devcontainer/cuda99.9-gcc14/devcontainer.json index 169b29be7ea..177f306cd78 100644 --- a/.devcontainer/cuda99.9-gcc14/devcontainer.json +++ b/.devcontainer/cuda99.9-gcc14/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/cuda99.9-llvm20/devcontainer.json b/.devcontainer/cuda99.9-llvm20/devcontainer.json index f947360b228..ba85e8c6490 100644 --- a/.devcontainer/cuda99.9-llvm20/devcontainer.json +++ b/.devcontainer/cuda99.9-llvm20/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 149491ec805..0d82aa5e4b9 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -52,7 +52,8 @@ "llvm-vs-code-extensions.vscode-clangd", "seaube.clangformat", "nvidia.nsight-vscode-edition", - "ms-vscode.cmake-tools" + "ms-vscode.cmake-tools", + "timonwong.shellcheck" ], "settings": { "editor.defaultFormatter": "seaube.clangformat", @@ -63,7 +64,8 @@ "--compile-commands-dir=${workspaceFolder}" ], "files.eol": "\n", - "files.trimTrailingWhitespace": true + "files.trimTrailingWhitespace": true, + "shellcheck.useWorkspaceRootAsCwd": true } } }, diff --git a/.devcontainer/docker-entrypoint.sh b/.devcontainer/docker-entrypoint.sh index d98202841b6..d4ee559f474 100755 --- a/.devcontainer/docker-entrypoint.sh +++ b/.devcontainer/docker-entrypoint.sh @@ -8,14 +8,16 @@ : "${NEW_UID:=}"; : "${NEW_GID:=}"; -eval "$(sed -n "s/${REMOTE_USER}:[^:]*:\([^:]*\):\([^:]*\):[^:]*:\([^:]*\).*/OLD_UID=\1;OLD_GID=\2;HOME_FOLDER=\3/p" /etc/passwd)"; -eval "$(sed -n "s/\([^:]*\):[^:]*:${NEW_UID}:.*/EXISTING_USER=\1/p" /etc/passwd)"; -eval "$(sed -n "s/\([^:]*\):[^:]*:${NEW_GID}:.*/EXISTING_GROUP=\1/p" /etc/group)"; +OLD_UID="$(sed -n "s/${REMOTE_USER}:[^:]*:\([^:]*\):\([^:]*\):[^:]*:\([^:]*\).*/\1/p" /etc/passwd)"; +OLD_GID="$(sed -n "s/${REMOTE_USER}:[^:]*:\([^:]*\):\([^:]*\):[^:]*:\([^:]*\).*/\2/p" /etc/passwd)"; +HOME_FOLDER="$(sed -n "s/${REMOTE_USER}:[^:]*:\([^:]*\):\([^:]*\):[^:]*:\([^:]*\).*/\3/p" /etc/passwd)"; +EXISTING_USER="$(sed -n "s/\([^:]*\):[^:]*:${NEW_UID}:.*/\1/p" /etc/passwd)"; +EXISTING_GROUP="$(sed -n "s/\([^:]*\):[^:]*:${NEW_GID}:.*/\1/p" /etc/group)"; -if [ -z "$OLD_UID" ]; then +if [[ -z "$OLD_UID" ]]; then echo "Remote user not found in /etc/passwd ($REMOTE_USER)."; exec "$(pwd)/.devcontainer/cccl-entrypoint.sh" "$@"; -elif [ "$OLD_UID" = "$NEW_UID" ] && [ "$OLD_GID" = "$NEW_GID" ]; then +elif [[ "$OLD_UID" = "$NEW_UID" ]] && [[ "$OLD_GID" = "$NEW_GID" ]]; then echo "UIDs and GIDs are the same ($NEW_UID:$NEW_GID)."; # Even when IDs match, ensure we execute as the non-root REMOTE_USER so # gh and sccache use the mapped HOME (/home/coder) where ~/.aws is bind-mounted. @@ -27,17 +29,17 @@ elif [ "$OLD_UID" = "$NEW_UID" ] && [ "$OLD_GID" = "$NEW_GID" ]; then export XDG_STATE_HOME="$HOME_FOLDER/.local/state"; export PYTHONHISTFILE="$HOME_FOLDER/.local/state/.python_history"; exec su -p "$REMOTE_USER" -- "$(pwd)/.devcontainer/cccl-entrypoint.sh" "$@"; -elif [ "$OLD_UID" != "$NEW_UID" ] && [ -n "$EXISTING_USER" ]; then +elif [[ "$OLD_UID" != "$NEW_UID" ]] && [[ -n "$EXISTING_USER" ]]; then echo "User with UID exists ($EXISTING_USER=$NEW_UID)."; exec "$(pwd)/.devcontainer/cccl-entrypoint.sh" "$@"; else - if [ "$OLD_GID" != "$NEW_GID" ] && [ -n "$EXISTING_GROUP" ]; then + if [[ "$OLD_GID" != "$NEW_GID" ]] && [[ -n "$EXISTING_GROUP" ]]; then echo "Group with GID exists ($EXISTING_GROUP=$NEW_GID)."; NEW_GID="$OLD_GID"; fi echo "Updating UID:GID from $OLD_UID:$OLD_GID to $NEW_UID:$NEW_GID."; sed -i -e "s/\(${REMOTE_USER}:[^:]*:\)[^:]*:[^:]*/\1${NEW_UID}:${NEW_GID}/" /etc/passwd; - if [ "$OLD_GID" != "$NEW_GID" ]; then + if [[ "$OLD_GID" != "$NEW_GID" ]]; then sed -i -e "s/\([^:]*:[^:]*:\)${OLD_GID}:/\1${NEW_GID}:/" /etc/group; fi @@ -57,19 +59,5 @@ else export XDG_STATE_HOME="$HOME_FOLDER/.local/state"; export PYTHONHISTFILE="$HOME_FOLDER/.local/state/.python_history"; - if command -V module 2>&1 | grep -q function; then - # "deactivate" lmod so it will be reactivated as the non-root user - export LMOD_CMD= - export LMOD_DEFAULT_MODULEPATH= - export LMOD_DIR= - export LMOD_PKG= - export LOADEDMODULES= - export MANPATH= - export MODULEPATH_ROOT= - export MODULEPATH= - export MODULESHOME= - export -fn module - fi - exec su -p "$REMOTE_USER" -- "$(pwd)/.devcontainer/cccl-entrypoint.sh" "$@"; fi diff --git a/.devcontainer/launch.sh b/.devcontainer/launch.sh index 0faa319e35e..47865ba8648 100755 --- a/.devcontainer/launch.sh +++ b/.devcontainer/launch.sh @@ -30,9 +30,10 @@ print_help() { _upvar() { if unset -v "$1"; then if (( $# == 2 )); then - eval $1=\"\$2\"; + eval "$1"=\"\$2\"; else - eval $1=\(\"\${@:2}\"\); + # shellcheck disable=SC1083 + eval "$1"=\(\"\${@:2}\"\); fi; fi } @@ -47,7 +48,7 @@ parse_options() { # implementations of getopt(1), and this version if the environment variable # GETOPT_COMPATIBLE is set, will return '--' and error status 0. set +e - getopt -T 2>&1 > /dev/null + getopt -T >/dev/null 2>&1 getopt_ret=$? set -e @@ -135,6 +136,7 @@ launch_docker() { # Introduces the `DOCKER_IMAGE`, `ENTRYPOINTS`, `ENV_VARS`, `GPU_REQUEST`, # `INITIALIZE_COMMANDS`, `MOUNTS`, `REMOTE_USER`, `RUN_ARGS`, and # `WORKSPACE_FOLDER` variables + # shellcheck disable=SC2312,SC1090 source <(python3 .devcontainer/launch.py "${path}/devcontainer.json") ### @@ -142,6 +144,7 @@ launch_docker() { ### local init_cmd; + # shellcheck disable=SC2154 for init_cmd in "${INITIALIZE_COMMANDS[@]}"; do eval "${init_cmd}" done @@ -163,6 +166,7 @@ launch_docker() { RUN_ARGS+=(--gpus "${gpu_request}") else # Otherwise read and infer from hostRequirements.gpu + # shellcheck disable=SC2154,SC2153 RUN_ARGS+=("${GPU_REQUEST[@]}") fi @@ -176,6 +180,7 @@ launch_docker() { ENV_VARS+=(--env NEW_UID="$(id -u)") ENV_VARS+=(--env NEW_GID="$(id -g)") ENV_VARS+=(--env REMOTE_USER="$REMOTE_USER") + # shellcheck disable=SC2154 ENTRYPOINTS+=("${WORKSPACE_FOLDER}/.devcontainer/docker-entrypoint.sh") ;; esac @@ -201,6 +206,7 @@ launch_docker() { echo "::group::Docker run command" set -x fi + # shellcheck disable=SC2154 exec docker run \ "${RUN_ARGS[@]}" \ "${ENV_VARS[@]}" \ @@ -225,14 +231,17 @@ launch_vscode() { # and compiler environment into this temporary directory, adjusting paths to ensure the # correct workspace is loaded. A special URL is then generated to instruct VSCode to # launch the development container using this temporary configuration. - local workspace="$(basename "$(pwd)")" - local tmpdir="$(mktemp -d)/${workspace}" + local workspace + workspace="$(basename "$(pwd)")" + local tmpdir + tmpdir="$(mktemp -d)/${workspace}" mkdir -p "${tmpdir}" mkdir -p "${tmpdir}/.devcontainer" cp -arL "${path}/devcontainer.json" "${tmpdir}/.devcontainer" sed -i "s@\${localWorkspaceFolder}@$(pwd)@g" "${tmpdir}/.devcontainer/devcontainer.json" local path="${tmpdir}" - local hash="$(echo -n "${path}" | xxd -pu - | tr -d '[:space:]')" + local hash + hash="$(echo -n "${path}" | xxd -pu - | tr -d '[:space:]')" local url="vscode://vscode-remote/dev-container+${hash}/home/coder/cccl" local launch="" @@ -242,7 +251,7 @@ launch_vscode() { launch="xdg-open" fi - if [ -n "${launch}" ]; then + if [[ -n "${launch}" ]]; then echo "Launching VSCode Dev Container URL: ${url}" code --new-window "${tmpdir}" exec "${launch}" "${url}" >/dev/null 2>&1 diff --git a/.devcontainer/make_devcontainers.sh b/.devcontainer/make_devcontainers.sh index afc553423a7..21bad063e4d 100755 --- a/.devcontainer/make_devcontainers.sh +++ b/.devcontainer/make_devcontainers.sh @@ -39,7 +39,7 @@ update_devcontainer() { # NVHPC SDK comes with its own bundled toolkit local toolkit_name="-cuda${cuda_version}${cuda_suffix}" - if [ $compiler_name == "nvhpc" ]; then + if [[ "$compiler_name" == "nvhpc" ]]; then toolkit_name="" fi @@ -47,7 +47,7 @@ update_devcontainer() { local INTERNAL_ROOT="gitlab-master.nvidia.com:5005/cccl/cccl-devcontainers:cpp-" img=$IMAGE_ROOT - if [ "$internal" == "true" ]; then + if [[ "$internal" == "true" ]]; then img=$INTERNAL_ROOT fi; @@ -111,43 +111,55 @@ MATRIX_FILE="../ci/matrix.yaml" COMPUTE_MATRIX="../.github/actions/workflow-build/build-workflow.py" # Enable verbose mode if requested -if [ "$VERBOSE" = true ]; then +if [[ "$VERBOSE" = true ]]; then set -x - cat ${MATRIX_FILE} + cat "${MATRIX_FILE}" fi # Read matrix.yaml and convert it to json -matrix_json=$(python3 ${COMPUTE_MATRIX} ${MATRIX_FILE} --devcontainer-info) +matrix_json=$(python3 "${COMPUTE_MATRIX}" "${MATRIX_FILE}" --devcontainer-info) -if [ "$VERBOSE" = true ]; then +if [[ "$VERBOSE" = true ]]; then echo "$matrix_json" fi # Get the devcontainer image version and define image tag root -readonly DEVCONTAINER_VERSION=$(echo "$matrix_json" | jq -r '.devcontainer_version') +DEVCONTAINER_VERSION=$(echo "$matrix_json" | jq -r '.devcontainer_version') +readonly DEVCONTAINER_VERSION # Internal image compiler versions: -readonly CUDA99_GCC_VERSION=$( echo "$matrix_json" | jq -r '.cuda99_gcc_version') -readonly CUDA99_LLVM_VERSION=$(echo "$matrix_json" | jq -r '.cuda99_clang_version') +CUDA99_GCC_VERSION=$( echo "$matrix_json" | jq -r '.cuda99_gcc_version') +readonly CUDA99_GCC_VERSION +CUDA99_LLVM_VERSION=$(echo "$matrix_json" | jq -r '.cuda99_clang_version') +readonly CUDA99_LLVM_VERSION # Get unique combinations of cuda version, compiler name/version, and Ubuntu version -readonly combinations=$(echo "$matrix_json" | jq -c '.combinations[]') +combinations=$(echo "$matrix_json" | jq -c '.combinations[]') +readonly combinations # Update the base devcontainer with the default values # The root devcontainer.json file is used as the default container as well as a template for all # other devcontainer.json files by replacing the `image:` field with the appropriate image name readonly base_devcontainer_file="./devcontainer.json" -readonly NEWEST_GCC_CUDA_ENTRY=$(echo "$combinations" | jq -rs '[.[] | select(.compiler_name == "gcc")] | sort_by((.cuda | tonumber), (.compiler_version | tonumber)) | .[-1]') -readonly NEWEST_LLVM_CUDA_ENTRY=$(echo "$combinations" | jq -rs '[.[] | select(.compiler_name == "llvm")] | sort_by((.cuda | tonumber), (.compiler_version | tonumber)) | .[-1]') -readonly DEFAULT_CUDA=$(echo "$NEWEST_GCC_CUDA_ENTRY" | jq -r '.cuda') +NEWEST_GCC_CUDA_ENTRY=$(echo "$combinations" | jq -rs '[.[] | select(.compiler_name == "gcc")] | sort_by((.cuda | tonumber), (.compiler_version | tonumber)) | .[-1]') +readonly NEWEST_GCC_CUDA_ENTRY +NEWEST_LLVM_CUDA_ENTRY=$(echo "$combinations" | jq -rs '[.[] | select(.compiler_name == "llvm")] | sort_by((.cuda | tonumber), (.compiler_version | tonumber)) | .[-1]') +# shellcheck disable=SC2034 +readonly NEWEST_LLVM_CUDA_ENTRY +DEFAULT_CUDA=$(echo "$NEWEST_GCC_CUDA_ENTRY" | jq -r '.cuda') +readonly DEFAULT_CUDA readonly DEFAULT_CUDA_EXT=false -readonly DEFAULT_COMPILER_NAME=$(echo "$NEWEST_GCC_CUDA_ENTRY" | jq -r '.compiler_name') -readonly DEFAULT_COMPILER_EXE=$(echo "$NEWEST_GCC_CUDA_ENTRY" | jq -r '.compiler_exe') -readonly DEFAULT_COMPILER_VERSION=$(echo "$NEWEST_GCC_CUDA_ENTRY" | jq -r '.compiler_version') -readonly DEFAULT_NAME=$(make_name "$DEFAULT_CUDA" "$DEFAULT_CUDA_EXT" "$DEFAULT_COMPILER_NAME" "$DEFAULT_COMPILER_VERSION") - -update_devcontainer ${base_devcontainer_file} "./temp_devcontainer.json" "$DEFAULT_NAME" "$DEFAULT_CUDA" "$DEFAULT_CUDA_EXT" "$DEFAULT_COMPILER_NAME" "$DEFAULT_COMPILER_EXE" "$DEFAULT_COMPILER_VERSION" "$DEVCONTAINER_VERSION" "false" -mv "./temp_devcontainer.json" ${base_devcontainer_file} +DEFAULT_COMPILER_NAME=$(echo "$NEWEST_GCC_CUDA_ENTRY" | jq -r '.compiler_name') +readonly DEFAULT_COMPILER_NAME +DEFAULT_COMPILER_EXE=$(echo "$NEWEST_GCC_CUDA_ENTRY" | jq -r '.compiler_exe') +readonly DEFAULT_COMPILER_EXE +DEFAULT_COMPILER_VERSION=$(echo "$NEWEST_GCC_CUDA_ENTRY" | jq -r '.compiler_version') +readonly DEFAULT_COMPILER_VERSION +DEFAULT_NAME=$(make_name "$DEFAULT_CUDA" "$DEFAULT_CUDA_EXT" "$DEFAULT_COMPILER_NAME" "$DEFAULT_COMPILER_VERSION") +readonly DEFAULT_NAME + +update_devcontainer "${base_devcontainer_file}" "./temp_devcontainer.json" "$DEFAULT_NAME" "$DEFAULT_CUDA" "$DEFAULT_CUDA_EXT" "$DEFAULT_COMPILER_NAME" "$DEFAULT_COMPILER_EXE" "$DEFAULT_COMPILER_VERSION" "$DEVCONTAINER_VERSION" "false" +mv "./temp_devcontainer.json" "${base_devcontainer_file}" # Create an array to keep track of valid subdirectory names valid_subdirs=() @@ -178,10 +190,14 @@ make_compiler_entry() { }" | jq -c '.' } -readonly cuda99_8_gcc=$( make_compiler_entry "gcc" "$CUDA99_GCC_VERSION" "gcc" "99.8" "false" "true") -readonly cuda99_9_gcc=$( make_compiler_entry "gcc" "$CUDA99_GCC_VERSION" "gcc" "99.9" "false" "true") -readonly cuda99_8_llvm=$(make_compiler_entry "llvm" "$CUDA99_LLVM_VERSION" "clang" "99.8" "false" "true") -readonly cuda99_9_llvm=$(make_compiler_entry "llvm" "$CUDA99_LLVM_VERSION" "clang" "99.9" "false" "true") +cuda99_8_gcc=$( make_compiler_entry "gcc" "$CUDA99_GCC_VERSION" "gcc" "99.8" "false" "true") +readonly cuda99_8_gcc +cuda99_9_gcc=$( make_compiler_entry "gcc" "$CUDA99_GCC_VERSION" "gcc" "99.9" "false" "true") +readonly cuda99_9_gcc +cuda99_8_llvm=$(make_compiler_entry "llvm" "$CUDA99_LLVM_VERSION" "clang" "99.8" "false" "true") +readonly cuda99_8_llvm +cuda99_9_llvm=$(make_compiler_entry "llvm" "$CUDA99_LLVM_VERSION" "clang" "99.9" "false" "true") +readonly cuda99_9_llvm readonly all_comb="$combinations $cuda99_9_gcc $cuda99_8_gcc $cuda99_9_llvm $cuda99_8_llvm" # For each unique combination @@ -205,9 +221,9 @@ for combination in $all_comb; do done # Clean up stale subdirectories and devcontainer.json files -if [ "$CLEAN" = true ]; then +if [[ "$CLEAN" = true ]]; then for subdir in ./*; do - if [ -d "$subdir" ] && [[ ! " ${valid_subdirs[@]} " =~ " ${subdir#./} " ]]; then + if [[ -d "$subdir" ]] && [[ " ${valid_subdirs[*]} " != *" ${subdir#./} "* ]]; then echo "Removing stale subdirectory: $subdir" rm -r "$subdir" fi diff --git a/.devcontainer/verify_devcontainer.sh b/.devcontainer/verify_devcontainer.sh index e93b8c03560..26b8489d4f5 100755 --- a/.devcontainer/verify_devcontainer.sh +++ b/.devcontainer/verify_devcontainer.sh @@ -19,10 +19,13 @@ check_envvars() { } check_host_compiler_version() { - local version_output=$($CXX --version) + local version_output + # shellcheck disable=SC2154 + version_output=$("$CXX" --version) if [[ "$CXX" == "g++" ]]; then - local actual_version=$(echo "$version_output" | head -n 1 | cut -d ' ' -f 4 | cut -d '.' -f 1) + local actual_version + actual_version=$(echo "$version_output" | head -n 1 | cut -d ' ' -f 4 | cut -d '.' -f 1) local expected_compiler="gcc" elif [[ "$CXX" == "clang++" ]]; then if [[ $version_output =~ clang\ version\ ([0-9]+) ]]; then @@ -33,20 +36,23 @@ check_host_compiler_version() { fi expected_compiler="llvm" elif [[ "$CXX" == "icpc" ]]; then - local actual_version=$(echo "$version_output" | head -n 1 | cut -d ' ' -f 3 ) + local actual_version + actual_version=$(echo "$version_output" | head -n 1 | cut -d ' ' -f 3 ) # The icpc compiler version of oneAPI release 2023.2.0 is 2021.10.0 if [[ "$actual_version" == "2021.10.0" ]]; then actual_version="2023.2.0" fi expected_compiler="oneapi" - elif [[ "$CXX" =~ "nvc++" ]]; then - local actual_version=$(echo "$version_output" | head -n 2 | cut -d ' ' -f 2 | cut -d '-' -f 1 | tail -n 1) + elif [[ "$CXX" =~ nvc++ ]]; then + local actual_version + actual_version=$(echo "$version_output" | head -n 2 | cut -d ' ' -f 2 | cut -d '-' -f 1 | tail -n 1) local expected_compiler="nvhpc" else echo "::error:: Unexpected CXX value ($CXX)." exit 1 fi + # shellcheck disable=SC2154 if [[ "$expected_compiler" != "${CCCL_HOST_COMPILER}" || "$actual_version" != "$CCCL_HOST_COMPILER_VERSION" ]]; then echo "::error:: CXX ($CXX) version ($actual_version) does not match the expected compiler (${CCCL_HOST_COMPILER}) and version (${CCCL_HOST_COMPILER_VERSION})." exit 1 @@ -56,7 +62,8 @@ check_host_compiler_version() { } check_cuda_version() { - local cuda_version_output=$(nvcc --version) + local cuda_version_output + cuda_version_output=$(nvcc --version) if [[ $cuda_version_output =~ release\ ([0-9]+\.[0-9]+) ]]; then local actual_cuda_version=${BASH_REMATCH[1]} else @@ -64,6 +71,7 @@ check_cuda_version() { exit 1 fi + # shellcheck disable=SC2154 if [[ "$actual_cuda_version" != "$CCCL_CUDA_VERSION" ]]; then echo "::error:: CUDA version ($actual_cuda_version) does not match the expected CUDA version ($CCCL_CUDA_VERSION)." exit 1 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index dd3a3c48e71..f44a45e9d81 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,6 +30,14 @@ repos: exclude_types: [python] - id: check-yaml + - repo: https://github.com/shellcheck-py/shellcheck-py + rev: v0.11.0.1 + hooks: + - id: shellcheck + exclude: | + (?x)^( + ^.*libcudacxx/cmake/config\.guess$ + ) - repo: https://github.com/pre-commit/mirrors-clang-format rev: v20.1.7 diff --git a/.shellcheckrc b/.shellcheckrc new file mode 100644 index 00000000000..f73429efca3 --- /dev/null +++ b/.shellcheckrc @@ -0,0 +1,20 @@ +# -*- conf -*- +enable=all +color=never +external-sources=true +severity=style +source-path=SCRIPTDIR +# Consider invoking this command separately to avoid masking its return value (or use '|| +# true' to ignore). +# +# This just clutters up the code. We usually have `set -eo pipefail` enabled. +disable=SC2312 +# This function is invoked in an 'if' condition so set -e will be disabled. Invoke +# separately if failures should cause the script to exit. +# +# We don't want to exit if errors happen inside an if, that's why we have the if +# statement... +disable=SC2310 +# Don't suggest putting braces around all variable references +disable=SC2250 +shell=bash diff --git a/benchmarks/scripts/submit_benchmark_job.sh b/benchmarks/scripts/submit_benchmark_job.sh index 5d80135c637..109dc7874dc 100755 --- a/benchmarks/scripts/submit_benchmark_job.sh +++ b/benchmarks/scripts/submit_benchmark_job.sh @@ -9,7 +9,7 @@ jobtime="4:00:00" benchmark_preset="benchmark" batch_script=$scratch/batch.sh -cat << BATCH_SCRIPT > $batch_script +cat << BATCH_SCRIPT > "$batch_script" #!/bin/bash pip install --break-system-packages fpzip pandas scipy @@ -37,11 +37,11 @@ export PYTHONPATH=../benchmarks/scripts/ echo "Benchmark done. Results in $scratch/\$host/cccl/build_perf/cccl_meta_bench.db" BATCH_SCRIPT -chmod +x $batch_script +chmod +x "$batch_script" # schedule SLURM job echo "Scheduling script $batch_script" echo "#################################################################################" -cat $batch_script +cat "$batch_script" echo "#################################################################################" -crun -q "$node_selector" -ex -t $jobtime -img $container_image -b $batch_script +crun -q "$node_selector" -ex -t "$jobtime" -img "$container_image" -b "$batch_script" diff --git a/ci/build_cccl_c_parallel.sh b/ci/build_cccl_c_parallel.sh index 10e363fa5c0..4207d2e6a54 100755 --- a/ci/build_cccl_c_parallel.sh +++ b/ci/build_cccl_c_parallel.sh @@ -2,14 +2,15 @@ set -euo pipefail +# shellcheck source=ci/build_common.sh source "$(dirname "${BASH_SOURCE[0]}")/build_common.sh" print_environment_details PRESET="cccl-c-parallel" -CMAKE_OPTIONS="-DCMAKE_CXX_STANDARD=${CXX_STANDARD} -DCMAKE_CUDA_STANDARD=${CXX_STANDARD}" +CMAKE_OPTIONS=("-DCMAKE_CXX_STANDARD=${CXX_STANDARD}" "-DCMAKE_CUDA_STANDARD=${CXX_STANDARD}") -configure_and_build_preset "CCCL C Parallel Library" "$PRESET" "$CMAKE_OPTIONS" +configure_and_build_preset "CCCL C Parallel Library" "$PRESET" "${CMAKE_OPTIONS[@]}" print_time_summary diff --git a/ci/build_cccl_c_stf.sh b/ci/build_cccl_c_stf.sh index 9a6160ed051..178c51f5fb4 100755 --- a/ci/build_cccl_c_stf.sh +++ b/ci/build_cccl_c_stf.sh @@ -2,14 +2,15 @@ set -euo pipefail +# shellcheck source=ci/build_common.sh source "$(dirname "${BASH_SOURCE[0]}")/build_common.sh" print_environment_details PRESET="cccl-c-stf" -CMAKE_OPTIONS="-DCMAKE_CXX_STANDARD=${CXX_STANDARD} -DCMAKE_CUDA_STANDARD=${CXX_STANDARD}" +CMAKE_OPTIONS=("-DCMAKE_CXX_STANDARD=${CXX_STANDARD}" "-DCMAKE_CUDA_STANDARD=${CXX_STANDARD}") -configure_and_build_preset "CCCL C CUDASTF Library" "$PRESET" "$CMAKE_OPTIONS" +configure_and_build_preset "CCCL C CUDASTF Library" "$PRESET" "${CMAKE_OPTIONS[@]}" print_time_summary diff --git a/ci/build_common.sh b/ci/build_common.sh index ca268aaf7a9..2635eb60480 100755 --- a/ci/build_common.sh +++ b/ci/build_common.sh @@ -57,7 +57,7 @@ function check_required_dependencies() { command -v "$tool" &>/dev/null || missing_deps+=("$tool") done - if [ ${#missing_deps[@]} -ne 0 ]; then + if [[ ${#missing_deps[@]} -ne 0 ]]; then echo "❌ Error: Missing required dependencies:" >&2 printf " • %s\n" "${missing_deps[@]}" >&2 echo >&2 @@ -70,7 +70,7 @@ function check_required_dependencies() { # Copy the args into a temporary array, since we will modify them and # the parent script may still need them. args=("$@") -while [ "${#args[@]}" -ne 0 ]; do +while [[ "${#args[@]}" -ne 0 ]]; do case "${args[0]}" in -v | --verbose | -verbose) VERBOSE=1; args=("${args[@]:1}");; -configure) CONFIGURE_ONLY=true; args=("${args[@]:1}");; @@ -79,15 +79,18 @@ while [ "${#args[@]}" -ne 0 ]; do -cuda) CUDA_COMPILER="${args[1]}"; args=("${args[@]:2}");; -arch) CUDA_ARCHS="${args[1]}"; args=("${args[@]:2}");; -pedantic | --pedantic) PEDANTIC=1; args=("${args[@]:1}");; - -disable-benchmarks) DISABLE_CUB_BENCHMARKS=1; args=("${args[@]:1}");; + -disable-benchmarks) export DISABLE_CUB_BENCHMARKS=1; args=("${args[@]:1}");; -cmake-options) - if [ -n "${args[1]}" ]; then + if [[ -n "${args[1]}" ]]; then IFS=' ' read -ra split_args <<< "${args[1]}" GLOBAL_CMAKE_OPTIONS+=("${split_args[@]}") args=("${args[@]:2}") else echo "Error: No arguments provided for -cmake-options" usage + # usage will exit 1 for us, so below exit 1 is unreachable, but it does not + # hurt and guards against changes in usage. + # shellcheck disable=SC2317 exit 1 fi ;; @@ -102,8 +105,8 @@ function validate_and_resolve_compiler() { local compiler_var="$2" local compiler_path - compiler_path=$(which "${compiler_var}" 2>/dev/null) - if [ -z "$compiler_path" ]; then + compiler_path=$(command -v "${compiler_var}" 2>/dev/null) + if [[ -z "$compiler_path" ]]; then echo "❌ Error: ${compiler_name} '${compiler_var}' not found in PATH" >&2 exit 1 fi @@ -141,7 +144,7 @@ else GLOBAL_CMAKE_OPTIONS+=("-DCCCL_ENABLE_WERROR=OFF" "-DCCCL_ENABLE_PRAGMA_SYSTEM_HEADER=ON") fi -if [ $VERBOSE ]; then +if [[ -n "$VERBOSE" ]]; then set -x fi @@ -153,7 +156,7 @@ set -u readonly PARALLEL_LEVEL=${PARALLEL_LEVEL:=$(nproc --all --ignore=1)} -if [ -z ${CCCL_BUILD_INFIX+x} ]; then +if [[ -z ${CCCL_BUILD_INFIX+x} ]]; then CCCL_BUILD_INFIX="" fi @@ -165,9 +168,9 @@ BUILD_ROOT=$(cd "../build" && pwd) BUILD_DIR="$BUILD_ROOT/$CCCL_BUILD_INFIX" # The most recent devcontainer build dir will always be symlinked to cccl/build/latest -mkdir -p $BUILD_DIR -rm -f $BUILD_ROOT/latest -ln -sf $BUILD_DIR $BUILD_ROOT/latest +mkdir -p "$BUILD_DIR" +rm -f "$BUILD_ROOT"/latest +ln -sf "$BUILD_DIR" "$BUILD_ROOT"/latest # The more recent preset build dir will always be symlinked to: # cccl/preset-latest @@ -189,6 +192,7 @@ export CUDACXX="${CUDA_COMPILER}" export CUDAHOSTCXX="${HOST_COMPILER}" export CXX_STANDARD +# shellcheck source=ci/pretty_printing.sh source ./pretty_printing.sh # Kill any build / test steps that exceed this time, otherwise CI jobs may be @@ -282,15 +286,15 @@ function print_test_time_summary() { ctest_log=${1} - if [ -f ${ctest_log} ]; then + if [[ -f "${ctest_log}" ]]; then begin_group "⏱️ Longest Test Steps" # Only print the full output in CI: - if [ -n "${GITHUB_ACTIONS:-}" ]; then - cmake -DLOGFILE=${ctest_log} -P ../cmake/PrintCTestRunTimes.cmake + if [[ -n "${GITHUB_ACTIONS:-}" ]]; then + cmake -DLOGFILE="${ctest_log}" -P ../cmake/PrintCTestRunTimes.cmake else # `|| :` to avoid `set -o pipefail` from triggering when `head` closes the pipe before `cmake` finishes. # Otherwise the script will exit early with status 141 (SIGPIPE). - cmake -DLOGFILE=${ctest_log} -P ../cmake/PrintCTestRunTimes.cmake | head -n 15 || : + cmake -DLOGFILE="${ctest_log}" -P ../cmake/PrintCTestRunTimes.cmake | head -n 15 || : fi end_group "⏱️ Longest Test Steps" fi @@ -301,7 +305,7 @@ function configure_preset() local BUILD_NAME=$1 local PRESET=$2 shift 2 - local CMAKE_OPTIONS=$@ + local CMAKE_OPTIONS=("$@") local GROUP_NAME="🛠️ CMake Configure ${BUILD_NAME}" @@ -310,10 +314,10 @@ function configure_preset() pushd .. > /dev/null if [[ -n "${GITHUB_ACTIONS:-}" ]]; then # Retry 5 times with 30 seconds between attempts to try to WAR network issues during CPM fetch on CI runners: - export RUN_COMMAND_RETRY_PARAMS="5 30" + export RUN_COMMAND_RETRY_PARAMS=(5 30) fi status=0 - run_command "$GROUP_NAME" cmake --preset=$PRESET --log-level=VERBOSE $CMAKE_OPTIONS "${GLOBAL_CMAKE_OPTIONS[@]}" || status=$? + run_command "$GROUP_NAME" cmake --preset="$PRESET" --log-level=VERBOSE "${CMAKE_OPTIONS[@]}" "${GLOBAL_CMAKE_OPTIONS[@]}" || status=$? if [[ -n "${GITHUB_ACTIONS:-}" ]]; then unset RUN_COMMAND_RETRY_PARAMS fi @@ -323,18 +327,20 @@ function configure_preset() echo "${BUILD_NAME} configuration complete:" echo " Exit code: ${status}" echo " CMake Preset: ${PRESET}" - echo " CMake Options: ${CMAKE_OPTIONS}" + echo " CMake Options: ${CMAKE_OPTIONS[*]}" echo " Build Directory: ${BUILD_DIR}/${PRESET}" - exit $status + exit "$status" fi - return $status + return "$status" } function build_preset() { local BUILD_NAME=$1 local PRESET=$2 + # shellcheck disable=SC2034 local green="1;32" + # shellcheck disable=SC2034 local red="1;31" local GROUP_NAME="🏗️ Build ${BUILD_NAME}" shift 2 @@ -355,10 +361,10 @@ function build_preset() { # Track memory usage on CI: if [[ -n "${GITHUB_ACTIONS:-}" || -n "${MEMMON:-}" ]]; then util/memmon.sh --start \ - --log-threshold ${MEMMON_LOG_THRESHOLD:-2} \ - --print-threshold ${MEMMON_PRINT_THRESHOLD:-5} \ + --log-threshold "${MEMMON_LOG_THRESHOLD:-2}" \ + --print-threshold "${MEMMON_PRINT_THRESHOLD:-5}" \ --log-file "$memmon_log" \ - --poll ${MEMMON_POLL_INTERVAL:-5} \ + --poll "${MEMMON_POLL_INTERVAL:-5}" \ || : fi @@ -373,24 +379,24 @@ function build_preset() { fi # Only print detailed stats in actions workflow - if [ -n "${GITHUB_ACTIONS:-}" ]; then + if [[ -n "${GITHUB_ACTIONS:-}" ]]; then sccache --show-adv-stats --stats-format=json > "${sccache_json}" || : run_command "📊 sccache stats" sccache --show-adv-stats || : begin_group "🥷 ninja build times" - echo "The "weighted" time is the elapsed time of each build step divided by the number + echo "The \"weighted\" time is the elapsed time of each build step divided by the number of tasks that were running in parallel. This makes it an excellent approximation - of how "important" a slow step was. A link that is entirely or mostly serialized + of how \"important\" a slow step was. A link that is entirely or mostly serialized will have a weighted time that is the same or similar to its elapsed time. A compile that runs in parallel with 999 other compiles will have a weighted time that is tiny." - ./ninja_summary.py -C ${BUILD_DIR}/${PRESET} || echo "Warning: ninja_summary.py failed to execute properly." + ./ninja_summary.py -C "${BUILD_DIR}"/"${PRESET}" || echo "Warning: ninja_summary.py failed to execute properly." end_group else sccache -s || : fi - return $status + return "$status" } function test_preset() @@ -419,9 +425,9 @@ function test_preset() run_ci_timed_command "$GROUP_NAME" ctest --output-log "${ctest_log}" --preset="$PRESET" || status=$? popd > /dev/null - print_test_time_summary ${ctest_log} + print_test_time_summary "${ctest_log}" - return $status + return "$status" } function configure_and_build_preset() @@ -429,9 +435,9 @@ function configure_and_build_preset() local BUILD_NAME=$1 local PRESET=$2 shift 2 - local CMAKE_OPTIONS=$@ + local CMAKE_OPTIONS=("$@") - configure_preset "$BUILD_NAME" "$PRESET" "$CMAKE_OPTIONS" + configure_preset "$BUILD_NAME" "$PRESET" "${CMAKE_OPTIONS[@]}" if ! $CONFIGURE_ONLY; then build_preset "$BUILD_NAME" "$PRESET" diff --git a/ci/build_cub.sh b/ci/build_cub.sh index effcf47c1c9..64f21dbd2e9 100755 --- a/ci/build_cub.sh +++ b/ci/build_cub.sh @@ -10,14 +10,15 @@ ARTIFACT_TAGS=() ci_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) -new_args=$("${ci_dir}/util/extract_switches.sh" \ +new_args="$("${ci_dir}/util/extract_switches.sh" \ -no-lid \ -lid0 \ -lid1 \ -lid2 \ - -- "$@") + -- "$@")" -eval set -- ${new_args} +declare -a new_args="(${new_args})" +set -- "${new_args[@]}" while true; do case "$1" in -no-lid) @@ -51,6 +52,7 @@ while true; do esac done +# shellcheck source=ci/build_common.sh source "${ci_dir}/build_common.sh" print_environment_details @@ -87,13 +89,13 @@ elif $LID2; then fi CMAKE_OPTIONS=( - -DCMAKE_CXX_STANDARD=$CXX_STANDARD - -DCMAKE_CUDA_STANDARD=$CXX_STANDARD - -DCCCL_ENABLE_BENCHMARKS=$ENABLE_CCCL_BENCHMARKS - -DCUB_ENABLE_RDC_TESTS=$ENABLE_CUB_RDC + "-DCMAKE_CXX_STANDARD=$CXX_STANDARD" + "-DCMAKE_CUDA_STANDARD=$CXX_STANDARD" + "-DCCCL_ENABLE_BENCHMARKS=$ENABLE_CCCL_BENCHMARKS" + "-DCUB_ENABLE_RDC_TESTS=$ENABLE_CUB_RDC" ) -configure_and_build_preset "CUB" "$PRESET" "${CMAKE_OPTIONS[*]}" +configure_and_build_preset "CUB" "$PRESET" "${CMAKE_OPTIONS[@]}" # Create test artifacts: if [[ -n "${GITHUB_ACTIONS:-}" ]]; then diff --git a/ci/build_cuda_cccl_python.sh b/ci/build_cuda_cccl_python.sh index 2c1bc9c02d0..6ee64d0a625 100755 --- a/ci/build_cuda_cccl_python.sh +++ b/ci/build_cuda_cccl_python.sh @@ -5,30 +5,30 @@ ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" usage="Usage: $0 -py-version [additional options...]" +# shellcheck source=ci/util/python/common_arg_parser.sh source "$ci_dir/util/python/common_arg_parser.sh" parse_python_args "$@" # Check if py_version was provided (this script requires it) require_py_version "$usage" || exit 1 -echo "Docker socket: " $(ls /var/run/docker.sock) +echo "Docker socket: " "$(ls /var/run/docker.sock)" if [[ -n "${GITHUB_ACTIONS:-}" ]]; then # Prepare mount points etc for getting artifacts in/out of the container. + # shellcheck source=ci/util/artifacts/common.sh source "$ci_dir/util/artifacts/common.sh" # Note that these mounts use the runner (not the devcontainer) filesystem for # source directories because of docker-out-of-docker quirks. # The workflow-job GH actions make sure that they exist before running any # scripts. - action_mounts=$(cat </etc/profile.d/enable_devtools.sh +# shellcheck disable=SC1091 source /etc/profile.d/enable_devtools.sh # Check what's available -which gcc +command -v gcc gcc --version -which nvcc +command -v nvcc nvcc --version # Set up Python environment +# shellcheck source=ci/pyenv_helper.sh source /workspace/ci/pyenv_helper.sh +# shellcheck disable=SC2154 setup_python_env "${py_version}" -which python +command -v python python --version echo "Done setting up python env" # Figure out the version to use for the package, we need repo history -if $(git rev-parse --is-shallow-repository); then +if "$(git rev-parse --is-shallow-repository)"; then git fetch --unshallow fi export PACKAGE_VERSION_PREFIX="0.1." @@ -39,9 +42,12 @@ cuda_version=$(nvcc --version | grep -oP 'release \K[0-9]+\.[0-9]+' | cut -d. -f echo "Detected CUDA version: ${cuda_version}" # Configure compilers: -export CXX="$(which g++)" -export CUDACXX="$(which nvcc)" -export CUDAHOSTCXX="$(which g++)" +CXX="$(command -v g++)" +export CXX +CUDACXX="$(command -v nvcc)" +export CUDACXX +CUDAHOSTCXX="$(command -v g++)" +export CUDAHOSTCXX # Build the wheel python -m pip wheel --no-deps --verbose --wheel-dir dist . diff --git a/ci/build_cudax.sh b/ci/build_cudax.sh index 589db267426..2b067d0a542 100755 --- a/ci/build_cudax.sh +++ b/ci/build_cudax.sh @@ -2,6 +2,7 @@ set -euo pipefail +# shellcheck source=ci/build_common.sh source "$(dirname "${BASH_SOURCE[0]}")/build_common.sh" print_environment_details @@ -12,7 +13,7 @@ print_environment_details if [[ -z "${cudax_ENABLE_CUFILE:-}" ]]; then cudax_ENABLE_CUFILE="false" if [[ -n "${NVCC_VERSION:-}" ]] && [[ "$(basename "${HOST_COMPILER}")" != "nvc++" ]]; then - if util/version_compare.sh ${NVCC_VERSION} ge 12.9; then + if util/version_compare.sh "${NVCC_VERSION}" ge 12.9; then cudax_ENABLE_CUFILE="true" fi fi @@ -26,6 +27,6 @@ CMAKE_OPTIONS=( "-DCMAKE_CUDA_STANDARD=${CXX_STANDARD}" ) -configure_and_build_preset "CUDA Experimental" "$PRESET" "${CMAKE_OPTIONS[*]}" +configure_and_build_preset "CUDA Experimental" "$PRESET" "${CMAKE_OPTIONS[@]}" print_time_summary diff --git a/ci/build_libcudacxx.sh b/ci/build_libcudacxx.sh index 017fd1b10cd..94880b45da1 100755 --- a/ci/build_libcudacxx.sh +++ b/ci/build_libcudacxx.sh @@ -2,13 +2,14 @@ set -euo pipefail +# shellcheck source=ci/build_common.sh source "$(dirname "${BASH_SOURCE[0]}")/build_common.sh" print_environment_details PRESET="libcudacxx" -CMAKE_OPTIONS="-DCMAKE_CXX_STANDARD=${CXX_STANDARD} -DCMAKE_CUDA_STANDARD=${CXX_STANDARD}" +CMAKE_OPTIONS=("-DCMAKE_CXX_STANDARD=${CXX_STANDARD}" "-DCMAKE_CUDA_STANDARD=${CXX_STANDARD}") -configure_and_build_preset libcudacxx "$PRESET" "$CMAKE_OPTIONS" +configure_and_build_preset libcudacxx "$PRESET" "${CMAKE_OPTIONS[@]}" print_time_summary diff --git a/ci/build_stdpar.sh b/ci/build_stdpar.sh index 3bebd5d142d..d85c0e12317 100755 --- a/ci/build_stdpar.sh +++ b/ci/build_stdpar.sh @@ -12,7 +12,7 @@ readonly workdir="${cccl_repo}/test/stdpar" CXX_STANDARD=17 args=("$@") -while [ "${#args[@]}" -ne 0 ]; do +while [[ "${#args[@]}" -ne 0 ]]; do case "${args[0]}" in -std) CXX_STANDARD="${args[1]}"; args=("${args[@]:2}");; *) echo "Unrecognized option: ${args[0]}"; exit 1 ;; @@ -30,4 +30,8 @@ cmake -B build -S . -G Ninja \ `# Explicitly compile for hopper since the CI machine does not have a gpu:` \ -DCMAKE_CXX_FLAGS="-gpu=cc90" +# Disabled because `cmake --build -j ""` is invalid, but so is +# `cmake --build -j8`. CMake expects a space between `-j` and +# the numeric argument, or no argument at all. +# shellcheck disable=SC2086 cmake --build build -j ${PARALLEL_LEVEL:-} diff --git a/ci/build_thrust.sh b/ci/build_thrust.sh index 0f1604440d6..fb9b1a90eb7 100755 --- a/ci/build_thrust.sh +++ b/ci/build_thrust.sh @@ -2,15 +2,16 @@ set -euo pipefail +# shellcheck source=ci/build_common.sh source "$(dirname "${BASH_SOURCE[0]}")/build_common.sh" print_environment_details PRESET="thrust" -CMAKE_OPTIONS="-DCMAKE_CXX_STANDARD=$CXX_STANDARD -DCMAKE_CUDA_STANDARD=$CXX_STANDARD" +CMAKE_OPTIONS=("-DCMAKE_CXX_STANDARD=$CXX_STANDARD" "-DCMAKE_CUDA_STANDARD=$CXX_STANDARD") -configure_and_build_preset "Thrust" "$PRESET" "$CMAKE_OPTIONS" +configure_and_build_preset "Thrust" "$PRESET" "${CMAKE_OPTIONS[@]}" # Create test artifacts: if [[ -n "${GITHUB_ACTIONS:-}" ]]; then diff --git a/ci/generate_version.sh b/ci/generate_version.sh index 00e6071ede5..b3de7886cd8 100755 --- a/ci/generate_version.sh +++ b/ci/generate_version.sh @@ -9,7 +9,7 @@ CCCL_BRANCH="${CCCL_BRANCH:-dev}" PACKAGE_VERSION_PREFIX="${PACKAGE_VERSION_PREFIX:-}" GIT_DESCRIBE_TAG=$(git describe --tags --match "v[0-9]*" --abbrev=0) -GIT_DESCRIBE_NUMBER=$(git rev-list ${GIT_DESCRIBE_TAG}..HEAD --count) +GIT_DESCRIBE_NUMBER=$(git rev-list "${GIT_DESCRIBE_TAG}"..HEAD --count) JSON_VERSION=$(jq -r .full /workspace/cccl-version.json) diff --git a/ci/install_cccl.sh b/ci/install_cccl.sh index 15666d891e8..3c587911cc5 100755 --- a/ci/install_cccl.sh +++ b/ci/install_cccl.sh @@ -33,7 +33,7 @@ while [[ "$#" -gt 0 ]]; do shift done -if [ $VERBOSE ]; then +if [[ -n "$VERBOSE" ]]; then set -x fi diff --git a/ci/matx/build_matx.sh b/ci/matx/build_matx.sh index f1c235de5e8..a2202bba927 100755 --- a/ci/matx/build_matx.sh +++ b/ci/matx/build_matx.sh @@ -38,19 +38,23 @@ else cccl_sha="$(git -C "${cccl_repo}" rev-parse HEAD)"; fi -readonly cccl_repo_version="$(git -C "${cccl_repo}" describe ${cccl_sha}| grep -Eo '[0-9]+\.[0-9]+\.[0-9]+')" +cccl_repo_version="$(git -C "${cccl_repo}" describe "${cccl_sha}"| grep -Eo '[0-9]+\.[0-9]+\.[0-9]+')" +readonly cccl_repo_version # Define CCCL_VERSION to override the version used by rapids-cmake to patch CCCL. echo "CCCL_VERSION (override): ${CCCL_VERSION-}"; if test -n "${CCCL_VERSION-}"; then readonly cccl_rapids_cmake_version="${CCCL_VERSION}" else - readonly cccl_rapids_cmake_version="${cccl_repo_version}" + cccl_rapids_cmake_version="${cccl_repo_version}" + # shellcheck disable=SC2034 + readonly cccl_rapids_cmake_version fi # If the current version is less than 2.8.0, use 2.8.0 for the rapids-cmake version. # This is to allow rapids-cmake to correctly patch the CCCL install rules on current `main`. -readonly cccl_version=$(version_max "${cccl_repo_version}" "2.8.0") +cccl_version=$(version_max "${cccl_repo_version}" "2.8.0") +readonly cccl_version readonly workdir="${cccl_repo}/build/${CCCL_BUILD_INFIX:-}/matx" readonly version_file="${workdir}/MatX/cmake/versions.json" @@ -70,7 +74,7 @@ pip install numpy # Clone MatX rm -rf MatX -git clone ${matx_repo} -b ${matx_branch} +git clone "${matx_repo}" -b "${matx_branch}" cd MatX echo "MatX HEAD:" @@ -88,7 +92,7 @@ jq -r ".packages.CCCL *= "${version_file}" > "${version_override_file}" echo "Overriding MatX versions.json file:" -cat $version_override_file +cat "$version_override_file" # Configure and build rm -rf build @@ -102,4 +106,8 @@ cmake \ -DMATX_BUILD_BENCHMARKS=ON \ -DMATX_EN_CUTENSOR=ON +# Disabled because `cmake --build -j ""` is invalid, but so is +# `cmake --build -j8`. CMake expects a space between `-j` and +# the numeric argument, or no argument at all. +# shellcheck disable=SC2086 cmake --build build -j ${PARALLEL_LEVEL:-} diff --git a/ci/nvrtc_libcudacxx.sh b/ci/nvrtc_libcudacxx.sh index 5c3cc2ecbb5..855d345ec84 100755 --- a/ci/nvrtc_libcudacxx.sh +++ b/ci/nvrtc_libcudacxx.sh @@ -1,14 +1,15 @@ #!/bin/bash +# shellcheck source=ci/build_common.sh source "$(dirname "${BASH_SOURCE[0]}")/build_common.sh" print_environment_details PRESET="libcudacxx-nvrtc" -CMAKE_OPTIONS="-DCMAKE_CXX_STANDARD=${CXX_STANDARD} -DCMAKE_CUDA_STANDARD=${CXX_STANDARD}" +CMAKE_OPTIONS=("-DCMAKE_CXX_STANDARD=${CXX_STANDARD}" "-DCMAKE_CUDA_STANDARD=${CXX_STANDARD}") -configure_and_build_preset "libcudacxx NVRTC" "$PRESET" "$CMAKE_OPTIONS" +configure_and_build_preset "libcudacxx NVRTC" "$PRESET" "${CMAKE_OPTIONS[@]}" sccache -z > /dev/null || : test_preset "libcudacxx NVRTC" "${PRESET}" diff --git a/ci/pretty_printing.sh b/ci/pretty_printing.sh index a94fff9f7b2..ab0f2c8b192 100644 --- a/ci/pretty_printing.sh +++ b/ci/pretty_printing.sh @@ -4,7 +4,7 @@ ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" function print_var_values() { # Iterate through the arguments for var_name in "$@"; do - if [ -z "$var_name" ]; then + if [[ -z "$var_name" ]]; then echo "Usage: print_var_values ..." return 1 fi @@ -24,7 +24,7 @@ function begin_group() { local name="${1:-}" local color="${2:-$blue}" - if [ -n "${GITHUB_ACTIONS:-}" ]; then + if [[ -n "${GITHUB_ACTIONS:-}" ]]; then echo -e "::group::\e[${color}m${name}\e[0m" else echo -e "\e[${color}m================== ${name} ======================\e[0m" @@ -42,14 +42,14 @@ function end_group() { local red="31" local blue="34" - if [ -n "${GITHUB_ACTIONS:-}" ]; then + if [[ -n "${GITHUB_ACTIONS:-}" ]]; then echo "::endgroup::" - if [ "$build_status" -ne 0 ]; then + if [[ "$build_status" -ne 0 ]]; then echo -e "::error::\e[${red}m ${name} - Failed (⬆️ click above for full log ⬆️)\e[0m" fi else - if [ "$build_status" -ne 0 ]; then + if [[ "$build_status" -ne 0 ]]; then echo -e "\e[${red}m================== End ${name} - Failed${duration:+ - Duration: ${duration}s} ==================\e[0m" else echo -e "\e[${blue}m================== End ${name} - Success${duration:+ - Duration: ${duration}s} ==================\n\e[0m" @@ -71,21 +71,23 @@ function run_command() { echo "Working directory: $(pwd)" echo "Running command: ${command[*]}" set +e - local start_time=$(date +%s) + local start_time + start_time=$(date +%s) # If RUN_COMMAND_RETRY_PARAMS is set to " ", use retry.sh to run the command: - if [[ -n "${RUN_COMMAND_RETRY_PARAMS:-}" ]]; then + if [[ -v RUN_COMMAND_RETRY_PARAMS && "${#RUN_COMMAND_RETRY_PARAMS[@]}" -eq 2 ]]; then status=0 - "$ci_dir/util/retry.sh" $RUN_COMMAND_RETRY_PARAMS "${command[@]}" || status=$? + "$ci_dir/util/retry.sh" "${RUN_COMMAND_RETRY_PARAMS[@]}" "${command[@]}" || status=$? else status=0 "${command[@]}" || status=$? fi - local end_time=$(date +%s) + local end_time + end_time=$(date +%s) set -e local duration=$((end_time - start_time)) - end_group "$group_name" $status $duration + end_group "$group_name" "$status" "$duration" command_durations["$group_name"]=$duration - return $status + return "$status" } function string_width() { @@ -99,13 +101,14 @@ function print_time_summary() { # Find the longest group name for formatting for group in "${!command_durations[@]}"; do - local group_length=$(echo "$group" | awk '{print length}') - if [ "$group_length" -gt "$max_length" ]; then + local group_length + group_length=$(echo "$group" | awk '{print length}') + if [[ "$group_length" -gt "$max_length" ]]; then max_length=$group_length fi done - if [ "$max_length" -eq 0 ]; then + if [[ "$max_length" -eq 0 ]]; then return fi diff --git a/ci/pyenv_helper.sh b/ci/pyenv_helper.sh index 9d3b2a9ec12..464fb60ff2d 100644 --- a/ci/pyenv_helper.sh +++ b/ci/pyenv_helper.sh @@ -2,7 +2,9 @@ setup_python_env() { local py_version=$1 # Source pretty_printing.sh for begin_group/end_group helpers - local script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + local script_dir + script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + # shellcheck source=ci/pretty_printing.sh source "${script_dir}/pretty_printing.sh" begin_group "🐍 Setting up Python ${py_version} (pyenv)" @@ -14,9 +16,9 @@ setup_python_env() { fi # Install the build dependencies, check /etc/os-release to see if we are on ubuntu or rocky - if [ -f /etc/os-release ]; then + if [[ -f /etc/os-release ]]; then source /etc/os-release - if [ "$ID" = "ubuntu" ]; then + if [[ "$ID" = "ubuntu" ]]; then # Use the retry helper to mitigate issues with apt network errors: retry() { "${script_dir}/util/retry.sh" 5 30 "$@" @@ -26,7 +28,7 @@ setup_python_env() { retry sudo apt install -y make libssl-dev zlib1g-dev \ libbz2-dev libreadline-dev libsqlite3-dev curl git \ libncursesw5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev libffi-dev liblzma-dev - elif [ "$ID" = "rocky" ]; then + elif [[ "$ID" = "rocky" ]]; then # we're inside the rockylinux container, sudo not required/available dnf install -y make patch zlib-devel bzip2 bzip2-devel readline-devel \ sqlite sqlite-devel openssl-devel tk-devel libffi-devel xz-devel libuuid-devel \ diff --git a/ci/pytorch/build_pytorch.sh b/ci/pytorch/build_pytorch.sh index 4b0a6a58381..975d1f0c2ed 100755 --- a/ci/pytorch/build_pytorch.sh +++ b/ci/pytorch/build_pytorch.sh @@ -58,13 +58,13 @@ echo "::group::Setting up clone of CUDA environment with custom CCCL..." export PATH="$PWD/cuda/bin:$PATH" export CUDA_HOME="$PWD/cuda" export CUDA_PATH="$PWD/cuda" -which nvcc +command -v nvcc nvcc --version echo "::endgroup::" echo "::group::Cloning PyTorch..." rm -rf pytorch -git clone ${pytorch_repo} -b ${pytorch_branch} --recursive --depth 1 +git clone "${pytorch_repo}" -b "${pytorch_branch}" --recursive --depth 1 echo "PyTorch HEAD:" git -C pytorch log -1 --format=short echo "::endgroup::" @@ -120,7 +120,9 @@ fi echo "::endgroup::" echo "::group::Building $num_targets pytorch CUDA targets with custom CCCL..." -ninja -C ./build $(xargs -a build/cuda_targets.txt) +torch_cuda_targets="$(xargs -a build/cuda_targets.txt)" +declare -a torch_cuda_targets="($torch_cuda_targets)" +ninja -C ./build "${torch_cuda_targets[@]}" echo "::endgroup::" echo "PyTorch CUDA targets built successfully with custom CCCL." diff --git a/ci/rapids/post-create-command.sh b/ci/rapids/post-create-command.sh index ccb285e2938..db427da1bb2 100755 --- a/ci/rapids/post-create-command.sh +++ b/ci/rapids/post-create-command.sh @@ -141,6 +141,6 @@ _run_post_create_command() { clone-all -j "$(nproc --all)" -v -q --clone-upstream --single-branch --shallow-submodules --no-update-env; } -if [ "$(basename "${BASH_SOURCE[${#BASH_SOURCE[@]}-1]}")" = post-create-command.sh ]; then +if [[ "$(basename "${BASH_SOURCE[${#BASH_SOURCE[@]}-1]}")" = post-create-command.sh ]]; then _run_post_create_command; fi diff --git a/ci/test_cccl_c_parallel.sh b/ci/test_cccl_c_parallel.sh index f71b9342936..9c8bf2e5f1e 100755 --- a/ci/test_cccl_c_parallel.sh +++ b/ci/test_cccl_c_parallel.sh @@ -8,6 +8,6 @@ print_environment_details PRESET="cccl-c-parallel" -test_preset "CCCL C Parallel Library" ${PRESET} +test_preset "CCCL C Parallel Library" "${PRESET}" print_time_summary diff --git a/ci/test_cccl_c_stf.sh b/ci/test_cccl_c_stf.sh index 090e341292a..c5664f480e6 100755 --- a/ci/test_cccl_c_stf.sh +++ b/ci/test_cccl_c_stf.sh @@ -8,6 +8,6 @@ print_environment_details PRESET="cccl-c-stf" -test_preset "CCCL C Parallel Library" ${PRESET} +test_preset "CCCL C Parallel Library" "${PRESET}" print_time_summary diff --git a/ci/test_cub.sh b/ci/test_cub.sh index b2eac45c03b..52fb78f1260 100755 --- a/ci/test_cub.sh +++ b/ci/test_cub.sh @@ -12,7 +12,7 @@ ARTIFACT_TAGS=() ci_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) -new_args=$("${ci_dir}/util/extract_switches.sh" \ +new_args="$("${ci_dir}/util/extract_switches.sh" \ -no-lid \ -lid0 \ -lid1 \ @@ -22,9 +22,10 @@ new_args=$("${ci_dir}/util/extract_switches.sh" \ -compute-sanitizer-racecheck \ -compute-sanitizer-initcheck \ -compute-sanitizer-synccheck \ - -- "$@") + -- "$@")" -eval set -- ${new_args} +declare -a new_args="(${new_args})" +set -- "${new_args[@]}" while true; do case "$1" in -no-lid) @@ -88,7 +89,7 @@ if $LIMITED; then export C2H_SEED_COUNT_OVERRIDE=1 readonly device_mem_GiB=8 - export C2H_DEVICE_MEMORY_LIMIT=$((${device_mem_GiB} * 1024 * 1024 * 1024)) + export C2H_DEVICE_MEMORY_LIMIT=$((device_mem_GiB * 1024 * 1024 * 1024)) export C2H_DEBUG_CHECKED_ALLOC_FAILURES=1 echo "Configuring limited environment:" @@ -98,6 +99,7 @@ if $LIMITED; then echo fi +# shellcheck source=ci/build_common.sh source "${ci_dir}/build_common.sh" print_environment_details @@ -108,7 +110,7 @@ if [[ -z "${GITHUB_ACTIONS:-}" ]]; then else producer_id=$(util/workflow/get_producer_id.sh) for tag in "${ARTIFACT_TAGS[@]}"; do - artifact="z_cub-test-artifacts-$DEVCONTAINER_NAME-$producer_id-$tag" + artifact="z_cub-test-artifacts-${DEVCONTAINER_NAME:?}-$producer_id-$tag" run_command "📦 Unpacking artifact '$artifact'" \ "${ci_dir}/util/artifacts/download_packed.sh" "$artifact" /home/coder/cccl done @@ -133,8 +135,8 @@ if $COMPUTE_SANITIZER; then export C2H_SEED_COUNT_OVERRIDE=1 fi -for PRESET in ${PRESETS[@]}; do - test_preset "CUB (${PRESET})" ${PRESET} +for PRESET in "${PRESETS[@]}"; do + test_preset "CUB (${PRESET})" "${PRESET}" done print_time_summary diff --git a/ci/test_cuda_cccl_examples_python.sh b/ci/test_cuda_cccl_examples_python.sh index a53d401c2fe..bb7867b42fc 100755 --- a/ci/test_cuda_cccl_examples_python.sh +++ b/ci/test_cuda_cccl_examples_python.sh @@ -16,7 +16,7 @@ setup_python_env "${py_version}" # Fetch or build the cuda_cccl wheel: if [[ -n "${GITHUB_ACTIONS:-}" ]]; then wheel_artifact_name=$("$ci_dir/util/workflow/get_wheel_artifact_name.sh") - "$ci_dir/util/artifacts/download.sh" ${wheel_artifact_name} /home/coder/cccl/ + "$ci_dir/util/artifacts/download.sh" "${wheel_artifact_name}" /home/coder/cccl/ else "$ci_dir/build_cuda_cccl_python.sh" -py-version "${py_version}" fi diff --git a/ci/test_cuda_cccl_headers_python.sh b/ci/test_cuda_cccl_headers_python.sh index 1bb0a0fbcf1..8a9d09642b4 100755 --- a/ci/test_cuda_cccl_headers_python.sh +++ b/ci/test_cuda_cccl_headers_python.sh @@ -16,7 +16,7 @@ setup_python_env "${py_version}" # Fetch or build the cuda_cccl wheel: if [[ -n "${GITHUB_ACTIONS:-}" ]]; then wheel_artifact_name=$("$ci_dir/util/workflow/get_wheel_artifact_name.sh") - "$ci_dir/util/artifacts/download.sh" ${wheel_artifact_name} /home/coder/cccl/ + "$ci_dir/util/artifacts/download.sh" "${wheel_artifact_name}" /home/coder/cccl/ else "$ci_dir/build_cuda_cccl_python.sh" -py-version "${py_version}" fi diff --git a/ci/test_cuda_compute_python.sh b/ci/test_cuda_compute_python.sh index 6a5676f79e8..9abed5e69e6 100755 --- a/ci/test_cuda_compute_python.sh +++ b/ci/test_cuda_compute_python.sh @@ -16,7 +16,7 @@ setup_python_env "${py_version}" # Fetch or build the cuda_cccl wheel: if [[ -n "${GITHUB_ACTIONS:-}" ]]; then wheel_artifact_name=$("$ci_dir/util/workflow/get_wheel_artifact_name.sh") - "$ci_dir/util/artifacts/download.sh" ${wheel_artifact_name} /home/coder/cccl/ + "$ci_dir/util/artifacts/download.sh" "${wheel_artifact_name}" /home/coder/cccl/ else "$ci_dir/build_cuda_cccl_python.sh" -py-version "${py_version}" fi diff --git a/ci/test_cuda_coop_python.sh b/ci/test_cuda_coop_python.sh index e2680942d98..bf75a2f8d02 100755 --- a/ci/test_cuda_coop_python.sh +++ b/ci/test_cuda_coop_python.sh @@ -16,7 +16,7 @@ setup_python_env "${py_version}" # Fetch or build the cuda_cccl wheel: if [[ -n "${GITHUB_ACTIONS:-}" ]]; then wheel_artifact_name=$("$ci_dir/util/workflow/get_wheel_artifact_name.sh") - "$ci_dir/util/artifacts/download.sh" ${wheel_artifact_name} /home/coder/cccl/ + "$ci_dir/util/artifacts/download.sh" "${wheel_artifact_name}" /home/coder/cccl/ else "$ci_dir/build_cuda_cccl_python.sh" -py-version "${py_version}" fi diff --git a/ci/test_cudax.sh b/ci/test_cudax.sh index 266f2137d9f..ff7096b3b25 100755 --- a/ci/test_cudax.sh +++ b/ci/test_cudax.sh @@ -8,6 +8,6 @@ print_environment_details PRESET="cudax" -test_preset "CUDA Experimental" ${PRESET} +test_preset "CUDA Experimental" "${PRESET}" print_time_summary diff --git a/ci/test_libcudacxx.sh b/ci/test_libcudacxx.sh index 34cc867b9d4..78fbd68ef15 100755 --- a/ci/test_libcudacxx.sh +++ b/ci/test_libcudacxx.sh @@ -1,6 +1,7 @@ #!/bin/bash cd "$(dirname "${BASH_SOURCE[0]}")" +# shellcheck source=ci/build_common.sh source "./build_common.sh" print_environment_details @@ -8,9 +9,9 @@ print_environment_details "./build_libcudacxx.sh" "$@" PRESET="libcudacxx" -CMAKE_OPTIONS="-DCMAKE_CXX_STANDARD=${CXX_STANDARD} -DCMAKE_CUDA_STANDARD=${CXX_STANDARD}" +CMAKE_OPTIONS=("-DCMAKE_CXX_STANDARD=${CXX_STANDARD}" "-DCMAKE_CUDA_STANDARD=${CXX_STANDARD}") -configure_preset libcudacxx "$PRESET" "$CMAKE_OPTIONS" +configure_preset libcudacxx "$PRESET" "${CMAKE_OPTIONS[@]}" test_preset "libcudacxx (CTest)" "libcudacxx-ctest" diff --git a/ci/test_nvbench_helper.sh b/ci/test_nvbench_helper.sh index ab8356efd22..e590a5d766a 100755 --- a/ci/test_nvbench_helper.sh +++ b/ci/test_nvbench_helper.sh @@ -1,16 +1,17 @@ #!/bin/bash +# shellcheck source=ci/build_common.sh source "$(dirname "${BASH_SOURCE[0]}")/build_common.sh" print_environment_details PRESET="nvbench-helper" -CMAKE_OPTIONS="" +CMAKE_OPTIONS=() GPU_REQUIRED="true" -configure_and_build_preset "NVBench Helper" "$PRESET" "$CMAKE_OPTIONS" +configure_and_build_preset "NVBench Helper" "$PRESET" "${CMAKE_OPTIONS[@]}" test_preset "NVBench Helper" "$PRESET" "$GPU_REQUIRED" print_time_summary diff --git a/ci/test_packaging.sh b/ci/test_packaging.sh index f542947156f..92fe81e0253 100755 --- a/ci/test_packaging.sh +++ b/ci/test_packaging.sh @@ -8,8 +8,9 @@ cccl_dir="$(realpath "${ci_dir}/..")" MIN_CMAKE=false minimum_cmake_version=3.18.0 -new_args=$("${ci_dir}/util/extract_switches.sh" -min-cmake -- "$@") -eval set -- ${new_args} +new_args="$("${ci_dir}/util/extract_switches.sh" -min-cmake -- "$@")" +declare -a new_args="(${new_args})" +set -- "${new_args[@]}" while true; do case "$1" in -min-cmake) @@ -30,15 +31,16 @@ done if $MIN_CMAKE; then echo "Installing minimum CMake version v${minimum_cmake_version}..." wget -q \ - https://github.com/Kitware/CMake/releases/download/v${minimum_cmake_version}/cmake-${minimum_cmake_version}-Linux-x86_64.sh \ + https://github.com/Kitware/CMake/releases/download/v"${minimum_cmake_version}"/cmake-"${minimum_cmake_version}"-Linux-x86_64.sh \ -O /tmp/cmake-install.sh prefix=/tmp/cmake-${minimum_cmake_version} - mkdir -p ${prefix} - bash /tmp/cmake-install.sh --skip-license --prefix=${prefix} + mkdir -p "${prefix}" + bash /tmp/cmake-install.sh --skip-license --prefix="${prefix}" export MIN_CTEST_COMMAND="${prefix}/bin/ctest" fi # Needs to happen after cmake is installed: +# shellcheck source=ci/build_common.sh source "${ci_dir}/build_common.sh" cd "${ci_dir}" @@ -46,17 +48,17 @@ print_environment_details PRESET="packaging" -CMAKE_OPTIONS="" +CMAKE_OPTIONS=() GPU_REQUIRED="true" CMAKE_OPTIONS=("-DCCCL_EXAMPLE_CPM_REPOSITORY=${cccl_dir}") # Local -- build against the current repo's HEAD commit: -if [ -z "${GITHUB_ACTIONS:-}" ]; then +if [[ -z "${GITHUB_ACTIONS:-}" ]]; then CMAKE_OPTIONS+=("-DCCCL_EXAMPLE_CPM_TAG=HEAD") else - CMAKE_OPTIONS+=("-DCCCL_EXAMPLE_CPM_TAG=${GITHUB_SHA}") + CMAKE_OPTIONS+=("-DCCCL_EXAMPLE_CPM_TAG=${GITHUB_SHA:?}") fi if [[ -n "${MIN_CTEST_COMMAND:-}" ]]; then diff --git a/ci/test_python_common.sh b/ci/test_python_common.sh index fa4d970c38b..e18d9b54e86 100644 --- a/ci/test_python_common.sh +++ b/ci/test_python_common.sh @@ -14,13 +14,14 @@ function run_tests { TEMP_VENV_DIR="/tmp/${module}_venv" rm -rf "${TEMP_VENV_DIR}" python -m venv "${TEMP_VENV_DIR}" + # shellcheck disable=SC1091 . "${TEMP_VENV_DIR}/bin/activate" echo 'cuda-cccl @ file:///home/coder/cccl/python/cuda_cccl' > /tmp/cuda-cccl_constraints.txt - run_command "⚙️ Pip install ${module}" pip install -c /tmp/cuda-cccl_constraints.txt .[test] + run_command "⚙️ Pip install ${module}" pip install -c /tmp/cuda-cccl_constraints.txt ".[test]" begin_group "⚙️ ${module} site-packages" pip freeze end_group "⚙️ ${module} site-packages" - run_command "🚀 Pytest ${module}" pytest -n ${PARALLEL_LEVEL} -v ./tests + run_command "🚀 Pytest ${module}" pytest -n "${PARALLEL_LEVEL:-$(nproc --all --ignore=1)}" -v ./tests deactivate popd >/dev/null diff --git a/ci/test_thrust.sh b/ci/test_thrust.sh index 8458f0d670f..862e2ddd710 100755 --- a/ci/test_thrust.sh +++ b/ci/test_thrust.sh @@ -7,8 +7,9 @@ GPU_ONLY=false ci_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) -new_args=$("${ci_dir}/util/extract_switches.sh" -cpu-only -gpu-only -- "$@") -eval set -- ${new_args} +new_args="$("${ci_dir}/util/extract_switches.sh" -cpu-only -gpu-only -- "$@")" +declare -a new_args="(${new_args})" +set -- "${new_args[@]}" while true; do case "$1" in -cpu-only) @@ -32,6 +33,7 @@ while true; do esac done +# shellcheck source=ci/build_common.sh source "${ci_dir}/build_common.sh" print_environment_details @@ -39,9 +41,10 @@ print_environment_details if [[ -z "${GITHUB_ACTIONS:-}" ]]; then ./build_thrust.sh "$@" else + producer_id="$(util/workflow/get_producer_id.sh)" run_command "📦 Unpacking test artifacts" \ "${ci_dir}/util/artifacts/download_packed.sh" \ - "z_thrust-test-artifacts-$DEVCONTAINER_NAME-$(util/workflow/get_producer_id.sh)-$ARTIFACT_TAG" \ + "z_thrust-test-artifacts-${DEVCONTAINER_NAME:?}-$producer_id-$ARTIFACT_TAG" \ /home/coder/cccl/ fi @@ -56,8 +59,8 @@ else GPU_REQUIRED=true fi -for PRESET in ${PRESETS[@]}; do - test_preset "Thrust (${PRESET})" ${PRESET} ${GPU_REQUIRED} +for PRESET in "${PRESETS[@]}"; do + test_preset "Thrust (${PRESET})" "${PRESET}" "${GPU_REQUIRED}" done print_time_summary diff --git a/ci/update_rapids_version.sh b/ci/update_rapids_version.sh index 3a749fedf15..961375433f0 100755 --- a/ci/update_rapids_version.sh +++ b/ci/update_rapids_version.sh @@ -11,9 +11,10 @@ NEXT_FULL_TAG=$1 #Get . for next version -NEXT_MAJOR=$(echo $NEXT_FULL_TAG | awk '{split($0, a, "."); print a[1]}') -NEXT_MINOR=$(echo $NEXT_FULL_TAG | awk '{split($0, a, "."); print a[2]}') -NEXT_PATCH=$(echo $NEXT_FULL_TAG | awk '{split($0, a, "."); print a[3]}') +NEXT_MAJOR=$(echo "$NEXT_FULL_TAG" | awk '{split($0, a, "."); print a[1]}') +NEXT_MINOR=$(echo "$NEXT_FULL_TAG" | awk '{split($0, a, "."); print a[2]}') +# shellcheck disable=SC2034 +NEXT_PATCH=$(echo "$NEXT_FULL_TAG" | awk '{split($0, a, "."); print a[3]}') NEXT_SHORT_TAG=${NEXT_MAJOR}.${NEXT_MINOR} # Need to distutils-normalize the versions for some use cases @@ -23,7 +24,7 @@ echo "Updating RAPIDS and devcontainers to $NEXT_FULL_TAG" # Inplace sed replace; workaround for Linux and Mac function sed_runner() { - sed -i.bak ''"$1"'' $2 && rm -f ${2}.bak + sed -i.bak ''"$1"'' "$2" && rm -f "${2}".bak } # Update CI files diff --git a/ci/update_version.sh b/ci/update_version.sh index 1152ac11ff4..f58dd38c119 100755 --- a/ci/update_version.sh +++ b/ci/update_version.sh @@ -19,10 +19,8 @@ done major="$1" minor="$2" patch="$3" -pymajor="0" -pyminor="1" -if [ -z "$major" ] || [ -z "$minor" ] || [ -z "$patch" ]; then +if [[ -z "$major" ]] || [[ -z "$minor" ]] || [[ -z "$patch" ]]; then echo "Usage: $0 [--dry-run] " exit 1 fi @@ -69,8 +67,9 @@ update_file () { local file=$1 local pattern=$2 local new_value=$3 - if [ "$DRY_RUN" = true ]; then - local temp_file=$(mktemp) + if [[ "$DRY_RUN" = true ]]; then + local temp_file + temp_file=$(mktemp) sed "s/$pattern/$new_value/g" "$file" > "$temp_file" diff --color=auto -U 0 "$file" "$temp_file" || true rm "$temp_file" @@ -115,7 +114,7 @@ update_file "$CUDAX_CMAKE_VERSION_FILE" "set(cudax_VERSION_PATCH \([0-9]\+\))" " update_file "$CUDA_CCCL_VERSION_FILE" "^__version__ = \"\([0-9.]\+\)\"" "__version__ = \"$major.$minor.$patch\"" -if [ "$DRY_RUN" = true ]; then +if [[ "$DRY_RUN" = true ]]; then echo "Dry run completed. No changes made." else echo "Version updated to $major.$minor.$patch" diff --git a/ci/upload_cub_test_artifacts.sh b/ci/upload_cub_test_artifacts.sh index a4cd0539c4b..efe26fbad12 100755 --- a/ci/upload_cub_test_artifacts.sh +++ b/ci/upload_cub_test_artifacts.sh @@ -2,13 +2,15 @@ set -euo pipefail -if [ -z "${GITHUB_ACTIONS:-}" ]; then +if [[ -z "${GITHUB_ACTIONS:-}" ]]; then echo "This script must be run in a GitHub Actions environment." >&2 exit 1 fi -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -readonly repo_root="$(cd "${ci_dir}/.." && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +readonly ci_dir +repo_root="$(cd "${ci_dir}/.." && pwd)" +readonly repo_root cd "$repo_root" @@ -17,7 +19,7 @@ if ! ci/util/workflow/has_consumers.sh; then exit 0 fi -if [ "$#" -gt 0 ]; then +if [[ "$#" -gt 0 ]]; then preset_variants=("$@") else # Figure out which artifacts need to be built: @@ -42,15 +44,15 @@ else fi # Remove duplicates: -preset_variants=($(echo "${preset_variants[@]}" | tr ' ' '\n' | sort -u | tr '\n' ' ')) +mapfile -t preset_variants < <(echo "${preset_variants[*]}" | tr ' ' '\n' | sort -u) -artifact_prefix=z_cub-test-artifacts-$DEVCONTAINER_NAME-${JOB_ID} +artifact_prefix="z_cub-test-artifacts-${DEVCONTAINER_NAME:?}-${JOB_ID:?}" # BUILD_INFIX is undefined on windows CI build_dir_regex="build${CCCL_BUILD_INFIX:+/$CCCL_BUILD_INFIX}/cub[^/]*" # Just collect the minimum set of files needed for running each ctest preset: -for preset_variant in ${preset_variants[@]}; do +for preset_variant in "${preset_variants[@]}"; do # Shared across all presets: ci/util/artifacts/stage.sh "$artifact_prefix-$preset_variant" \ @@ -58,8 +60,7 @@ for preset_variant in ${preset_variants[@]}; do "$build_dir_regex/.*rules\.ninja$" \ "$build_dir_regex/CMakeCache\.txt$" \ "$build_dir_regex/.*VerifyGlobs\.cmake$" \ - "$build_dir_regex/.*CTestTestfile\.cmake$" \ - > /dev/null + "$build_dir_regex/.*CTestTestfile\.cmake$" > /dev/null # Add per-preset executables: if [[ "$preset_variant" == lid_* ]]; then @@ -75,7 +76,7 @@ for preset_variant in ${preset_variants[@]}; do fi done -if [[ " ${preset_variants[@]} " =~ " no_lid " ]]; then +if [[ " ${preset_variants[*]} " == *" no_lid "* ]]; then # Initially add all binaries to no_lid, then remove the lid variants in later passes: ci/util/artifacts/stage.sh \ "$artifact_prefix-no_lid" \ @@ -98,5 +99,10 @@ if [[ " ${preset_variants[@]} " =~ " no_lid " ]]; then "$artifact_prefix-no_lid" \ "$build_dir_regex/cub/test/ptx-json/.*\.cubin$" > /dev/null + # These cubin outputs are needed for FileCheck tests in test/cubin-check + ci/util/artifacts/stage.sh \ + "$artifact_prefix-no_lid" \ + "$build_dir_regex/cub/test/cubin-check/.*\.cubin$" > /dev/null || : + ci/util/artifacts/upload_stage_packed.sh "$artifact_prefix-no_lid" fi diff --git a/ci/upload_job_result_artifacts.sh b/ci/upload_job_result_artifacts.sh index 5dd8165ddaa..797a05461bc 100755 --- a/ci/upload_job_result_artifacts.sh +++ b/ci/upload_job_result_artifacts.sh @@ -2,13 +2,15 @@ set -euo pipefail -if [ -z "${GITHUB_ACTIONS:-}" ]; then +if [[ -z "${GITHUB_ACTIONS:-}" ]]; then echo "This script must be run in a GitHub Actions environment." >&2 exit 1 fi -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -readonly repo_root="$(cd "${ci_dir}/.." && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +readonly ci_dir +repo_root="$(cd "${ci_dir}/.." && pwd)" +readonly repo_root cd "$repo_root" @@ -17,7 +19,7 @@ Usage: $0 EOF ) -if [ "$#" -ne 2 ]; then +if [[ "$#" -ne 2 ]]; then echo "Error: Invalid number of arguments." >&2 echo "$usage" >&2 exit 1 diff --git a/ci/upload_thrust_test_artifacts.sh b/ci/upload_thrust_test_artifacts.sh index 67cc6d236eb..c54c6f3df29 100755 --- a/ci/upload_thrust_test_artifacts.sh +++ b/ci/upload_thrust_test_artifacts.sh @@ -2,13 +2,15 @@ set -euo pipefail -if [ -z "${GITHUB_ACTIONS:-}" ]; then +if [[ -z "${GITHUB_ACTIONS:-}" ]]; then echo "This script must be run in a GitHub Actions environment." >&2 exit 1 fi -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -readonly repo_root="$(cd "${ci_dir}/.." && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +readonly ci_dir +repo_root="$(cd "${ci_dir}/.." && pwd)" +readonly repo_root cd "$repo_root" @@ -27,13 +29,13 @@ if grep -q "TestCPU" <<< "$consumers"; then preset_variants+=("test_cpu") fi -artifact_prefix=z_thrust-test-artifacts-$DEVCONTAINER_NAME-${JOB_ID} +artifact_prefix="z_thrust-test-artifacts-${DEVCONTAINER_NAME:?}-${JOB_ID:?}" # BUILD_INFIX is undefined on windows CI build_dir_regex="build${CCCL_BUILD_INFIX:+/$CCCL_BUILD_INFIX}/thrust[^/]*" # Just collect the minimum set of files needed for running each ctest preset: -for preset_variant in ${preset_variants[@]}; do +for preset_variant in "${preset_variants[@]}"; do # Shared across all presets: ci/util/artifacts/stage.sh "$artifact_prefix-$preset_variant" \ "$build_dir_regex/build\.ninja$" \ @@ -44,7 +46,7 @@ for preset_variant in ${preset_variants[@]}; do > /dev/null done -if [[ " ${preset_variants[@]} " =~ " test_cpu " ]]; then +if [[ " ${preset_variants[*]} " == *" test_cpu "* ]]; then # Initially add all binaries, then remove all containing 'cuda' in the name: ci/util/artifacts/stage.sh \ "$artifact_prefix-test_cpu" \ @@ -68,7 +70,7 @@ if [[ " ${preset_variants[@]} " =~ " test_cpu " ]]; then ci/util/artifacts/upload_stage_packed.sh "$artifact_prefix-test_cpu" fi -if [[ " ${preset_variants[@]} " =~ " test_gpu " ]]; then +if [[ " ${preset_variants[*]} " == *" test_gpu "* ]]; then # Only binaries containing 'cuda': ci/util/artifacts/stage.sh \ "$artifact_prefix-test_gpu" \ diff --git a/ci/util/artifacts/common.sh b/ci/util/artifacts/common.sh index 2d956610c1e..9daa472f717 100755 --- a/ci/util/artifacts/common.sh +++ b/ci/util/artifacts/common.sh @@ -7,7 +7,7 @@ if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then exit 1 fi -if [ -z "${GITHUB_ACTIONS:-}" ]; then +if [[ -z "${GITHUB_ACTIONS:-}" ]]; then echo "This script must be run in a GitHub Actions environment." >&2 exit 1 fi @@ -34,6 +34,6 @@ export ARTIFACT_UPLOAD_REGISTERY="${ARTIFACT_UPLOAD_STAGE}/artifact_upload_regis mkdir -p "$ARTIFACT_UPLOAD_STAGE" "$ARTIFACT_ARCHIVES" -if [ ! -f "$ARTIFACT_UPLOAD_REGISTERY" ]; then +if [[ ! -f "$ARTIFACT_UPLOAD_REGISTERY" ]]; then echo "[]" > "$ARTIFACT_UPLOAD_REGISTERY" fi diff --git a/ci/util/artifacts/download.sh b/ci/util/artifacts/download.sh index a3b5c744494..658ac4d063f 100755 --- a/ci/util/artifacts/download.sh +++ b/ci/util/artifacts/download.sh @@ -2,10 +2,12 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/artifacts/common.sh source "$ci_dir/util/artifacts/common.sh" -readonly usage=$(cat < [] Download artifacts uploaded by other jobs in this CI run. @@ -18,15 +20,16 @@ Example Usage: $0 job-\$ID-products some/path/ EOF ) +readonly usage -if [ "$#" -lt 1 ]; then +if [[ "$#" -lt 1 ]]; then echo "Error: Missing artifact name." >&2 echo "$usage" >&2 exit 1 fi readonly artifact_name="$1" -if [ "$#" -eq 1 ]; then +if [[ "$#" -eq 1 ]]; then artifact_path="./" else artifact_path="$2" diff --git a/ci/util/artifacts/download/fetch.sh b/ci/util/artifacts/download/fetch.sh index 2f1c682eaaa..0f1aaff672e 100755 --- a/ci/util/artifacts/download/fetch.sh +++ b/ci/util/artifacts/download/fetch.sh @@ -2,10 +2,12 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/artifacts/common.sh source "$ci_dir/util/artifacts/common.sh" -readonly usage=$(cat < Downloads files from a named artifact from the current CI workflow run into the specified directory. @@ -15,8 +17,9 @@ Example Usages: - $0 my_artifact /path/to/some/directory/ EOF ) +readonly usage -if [ "$#" -lt 2 ]; then +if [[ "$#" -lt 2 ]]; then echo "Error: Missing arguments." >&2 echo "$usage" >&2 exit 1 @@ -26,10 +29,12 @@ readonly artifact_name="$1" # Create the target directory and then get its absolute path mkdir -p "$2" -readonly target_directory="$(cd "$2" && pwd)" +target_directory="$(cd "$2" && pwd)" +readonly target_directory echo "Downloading artifact '$artifact_name' to '$target_directory'" +# shellcheck disable=SC2154 "$ci_dir/util/retry.sh" 5 30 \ - gh run download ${GITHUB_RUN_ID} \ + gh run download "${GITHUB_RUN_ID}" \ --name "$artifact_name" \ --dir "$target_directory" diff --git a/ci/util/artifacts/download/unpack.sh b/ci/util/artifacts/download/unpack.sh index 2093033a8d0..e8313e3566e 100755 --- a/ci/util/artifacts/download/unpack.sh +++ b/ci/util/artifacts/download/unpack.sh @@ -2,10 +2,12 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/artifacts/common.sh source "$ci_dir/util/artifacts/common.sh" -readonly usage=$(cat < Unpacks a fetched packed artifact's tar.zst archive into the specified directory. @@ -15,8 +17,9 @@ Example Usages: - $0 /path/to/archive.tar.zst /path/to/extract/ EOF ) +readonly usage -if [ "$#" -lt 2 ]; then +if [[ "$#" -lt 2 ]]; then echo "Error: Missing arguments." >&2 echo "$usage" >&2 exit 1 @@ -33,7 +36,7 @@ readonly artifact_path="$2" readonly artifact_archive="$ARTIFACT_ARCHIVES/${artifact_name}.tar.zst" echo "Unpacking artifact from '$artifact_archive' to '$artifact_path'" -echo "Using zstd executable: `which zstd`" +echo "Using zstd executable: $(command -v zstd)" # Create the artifact path directory if it doesn't exist mkdir -p "$artifact_path" diff --git a/ci/util/artifacts/download_packed.sh b/ci/util/artifacts/download_packed.sh index f562cb762fc..043a53c0a07 100755 --- a/ci/util/artifacts/download_packed.sh +++ b/ci/util/artifacts/download_packed.sh @@ -2,10 +2,12 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/artifacts/common.sh source "$ci_dir/util/artifacts/common.sh" -readonly usage=$(cat < [] Download and extracts a packed artifact uploaded by another job in this CI run. @@ -18,22 +20,21 @@ Example Usage: $0 job-\$ID-products build/ EOF ) +readonly usage -if [ "$#" -lt 1 ]; then +if [[ "$#" -lt 1 ]]; then echo "Error: Missing artifact name." >&2 echo "$usage" >&2 exit 1 fi readonly artifact_name="$1" -if [ "$#" -eq 1 ]; then +if [[ "$#" -eq 1 ]]; then artifact_path="./" else artifact_path="$2" fi -readonly artifact_archive="$ARTIFACT_ARCHIVES/$artifact_name.tar.zst" - start=$SECONDS "$ci_dir/util/artifacts/download/fetch.sh" "$artifact_name" "${ARTIFACT_ARCHIVES}" fetched=$SECONDS diff --git a/ci/util/artifacts/stage.sh b/ci/util/artifacts/stage.sh index 7aedf9fd4c7..6e4e75eb138 100755 --- a/ci/util/artifacts/stage.sh +++ b/ci/util/artifacts/stage.sh @@ -2,10 +2,12 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/artifacts/common.sh source "$ci_dir/util/artifacts/common.sh" -readonly usage=$(cat < [ ...] Stages files matching the provided regexes path for upload under the specified artifact. @@ -24,8 +26,9 @@ Stage built binaries and .cmake files in \${ARTIFACT_UPLOAD_STAGE}/test_artifact $0 test_artifacts 'bin/.*' 'lib/.*' '.*cmake$' EOF ) +readonly usage -if [ "$#" -lt 2 ]; then +if [[ "$#" -lt 2 ]]; then echo "Error: Missing arguments." >&2 echo "$usage" >&2 exit 1 diff --git a/ci/util/artifacts/unstage.sh b/ci/util/artifacts/unstage.sh index 5e51e0d61b7..66d9b9e4f32 100755 --- a/ci/util/artifacts/unstage.sh +++ b/ci/util/artifacts/unstage.sh @@ -2,10 +2,12 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/artifacts/common.sh source "$ci_dir/util/artifacts/common.sh" -readonly usage=$(cat < [ ...] Unstages (removes) files matching the provided regexes from the specified artifact stage. @@ -19,8 +21,9 @@ Unstage previously-staged built binaries and .cmake files from test_artifacts: $0 test_artifacts 'bin/.*' 'lib/.*' '.*cmake$' EOF ) +readonly usage -if [ "$#" -lt 2 ]; then +if [[ "$#" -lt 2 ]]; then echo "Error: Missing arguments." >&2 echo "$usage" >&2 exit 1 diff --git a/ci/util/artifacts/upload.sh b/ci/util/artifacts/upload.sh index 1e01dc95a46..a9a11f145bb 100755 --- a/ci/util/artifacts/upload.sh +++ b/ci/util/artifacts/upload.sh @@ -2,15 +2,17 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/artifacts/common.sh source "$ci_dir/util/artifacts/common.sh" -readonly usage=$(cat < [ ...] Creates an artifact consisting of a zip file containing a single file or set of regex matches. -Regexes are passed to the `find` command's -regex option in the current directory. +Regexes are passed to the $(command -v find) command's -regex option in the current directory. './' is prepended to all regexes for convenience. The artifact will contain all matching files with paths relative to the current directory. @@ -35,8 +37,9 @@ Example Usage: 'lib/.*' EOF ) +readonly usage -if [ "$#" -lt 1 ]; then +if [[ "$#" -lt 1 ]]; then echo "Error: Missing artifact name." >&2 echo "$usage" >&2 exit 1 @@ -45,7 +48,7 @@ fi readonly artifact_name="$1" # If no regexes are provided, use the artifact name as the path: -if [ "$#" -eq 1 ]; then +if [[ "$#" -eq 1 ]]; then "$ci_dir/util/artifacts/upload/register.sh" "$artifact_name" "$artifact_name" exit fi diff --git a/ci/util/artifacts/upload/build.sh b/ci/util/artifacts/upload/build.sh index cdadd168c4f..12f3c4b6343 100755 --- a/ci/util/artifacts/upload/build.sh +++ b/ci/util/artifacts/upload/build.sh @@ -2,10 +2,12 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/artifacts/common.sh source "$ci_dir/util/artifacts/common.sh" -readonly usage=$(cat < Builds a physical tree containing a staged artifact created using artifact/stage.sh / unstage.sh. @@ -13,8 +15,9 @@ Builds a physical tree containing a staged artifact created using artifact/stage The artifact root will be located at \${ARTIFACT_UPLOAD_STAGE}//. EOF ) +readonly usage -if [ "$#" -ne 1 ]; then +if [[ "$#" -ne 1 ]]; then echo "Error: Invalid number of arguments." >&2 echo "$usage" >&2 exit 1 @@ -25,7 +28,8 @@ readonly artifact_stage_path="${ARTIFACT_UPLOAD_STAGE}/${artifact_name}" readonly artifact_index_file="$artifact_stage_path/artifact_index.txt" readonly artifact_cwd_file="$artifact_stage_path/artifact_index_cwd.txt" readonly artifact_dir="${ARTIFACT_UPLOAD_STAGE}/${artifact_name}/${artifact_name}" -readonly artifact_cwd="$(cat "$artifact_cwd_file")" +artifact_cwd="$(cat "$artifact_cwd_file")" +readonly artifact_cwd mkdir -p "$artifact_dir" diff --git a/ci/util/artifacts/upload/pack.sh b/ci/util/artifacts/upload/pack.sh index a1592ca4c75..3e74012aab2 100755 --- a/ci/util/artifacts/upload/pack.sh +++ b/ci/util/artifacts/upload/pack.sh @@ -2,10 +2,12 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/artifacts/common.sh source "$ci_dir/util/artifacts/common.sh" -readonly usage=$(cat < Packs a staged artifact (created using artifact/stage.sh) into a tar.zst archive. @@ -17,8 +19,9 @@ Example Usages: - $0 test_artifact EOF ) +readonly usage -if [ "$#" -ne 1 ]; then +if [[ "$#" -ne 1 ]]; then echo "Error: Invalid number of arguments." >&2 echo "$usage" >&2 exit 1 @@ -36,7 +39,7 @@ readonly artifact_cwd_file="$artifact_stage_path/artifact_index_cwd.txt" readonly artifact_archive="${ARTIFACT_UPLOAD_STAGE}/${artifact_name}/${artifact_name}.tar.zst" echo "Packing artifact '$artifact_stage_path' into '$artifact_archive'" -echo "Using zstd: `which zstd`" +echo "Using zstd: $(command -v zstd)" echo "Pulling artifacts from working directory: $(cat "$artifact_cwd_file")" tar -cv -C "$(cat "$artifact_cwd_file")" -T "$artifact_index_file" \ diff --git a/ci/util/artifacts/upload/print_matrix.sh b/ci/util/artifacts/upload/print_matrix.sh index 8b8a2539325..a7caf7c5f08 100755 --- a/ci/util/artifacts/upload/print_matrix.sh +++ b/ci/util/artifacts/upload/print_matrix.sh @@ -2,17 +2,20 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/artifacts/common.sh source "$ci_dir/util/artifacts/common.sh" -readonly usage=$(cat <&2 exit 1 fi -cat $ARTIFACT_UPLOAD_REGISTERY | jq -c '.' +jq -c '.' "${ARTIFACT_UPLOAD_REGISTERY:?}" diff --git a/ci/util/artifacts/upload/register.sh b/ci/util/artifacts/upload/register.sh index 0722168bcac..fc8c157f424 100755 --- a/ci/util/artifacts/upload/register.sh +++ b/ci/util/artifacts/upload/register.sh @@ -2,13 +2,15 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/artifacts/common.sh source "$ci_dir/util/artifacts/common.sh" readonly artifact_compression_level=6 readonly artifact_retention_days=7 -readonly usage=$(cat < [] Registers artifacts for upload. If path is not provided, it defaults to the artifact name. @@ -25,8 +27,9 @@ Example Usages: - $0 my_artifact /path/to/my_artifact_directory/ EOF ) +readonly usage -if [ "$#" -lt 1 ]; then +if [[ "$#" -lt 1 ]]; then echo "Error: Missing artifact name." >&2 echo "$usage" >&2 exit 1 @@ -40,7 +43,7 @@ if [[ "$artifact_path" != /* ]]; then artifact_path="$(pwd)/$artifact_path" fi -if [ ! -e "$artifact_path" ]; then +if [[ ! -e "$artifact_path" ]]; then echo "Error: Artifact path '$artifact_path' does not exist." >&2 echo "$usage" >&2 exit 1 diff --git a/ci/util/artifacts/upload/set_compression_level.sh b/ci/util/artifacts/upload/set_compression_level.sh index 65f318acc8d..621da2e6f41 100755 --- a/ci/util/artifacts/upload/set_compression_level.sh +++ b/ci/util/artifacts/upload/set_compression_level.sh @@ -2,10 +2,12 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/artifacts/common.sh source "$ci_dir/util/artifacts/common.sh" -readonly usage=$(cat < Sets the compression level for an artifact registered for upload. @@ -15,8 +17,9 @@ Example Usage: $0 some_many_small_uncompressed_files 10 EOF ) +readonly usage -if [ "$#" -lt 2 ]; then +if [[ "$#" -lt 2 ]]; then echo "Error: Missing arguments." >&2 echo "$usage" >&2 exit 1 diff --git a/ci/util/artifacts/upload/set_retention_days.sh b/ci/util/artifacts/upload/set_retention_days.sh index f40004d2965..6bebef62997 100755 --- a/ci/util/artifacts/upload/set_retention_days.sh +++ b/ci/util/artifacts/upload/set_retention_days.sh @@ -2,10 +2,12 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/artifacts/common.sh source "$ci_dir/util/artifacts/common.sh" -readonly usage=$(cat < Sets the retention days for an artifact registered for upload. @@ -16,8 +18,9 @@ Example Usage: $0 some_long_term_artifact 30 EOF ) +readonly usage -if [ "$#" -lt 2 ]; then +if [[ "$#" -lt 2 ]]; then echo "Error: Missing arguments." >&2 echo "$usage" >&2 exit 1 diff --git a/ci/util/artifacts/upload_packed.sh b/ci/util/artifacts/upload_packed.sh index 392eca3bbf2..3a0aad9fe97 100755 --- a/ci/util/artifacts/upload_packed.sh +++ b/ci/util/artifacts/upload_packed.sh @@ -2,17 +2,19 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/artifacts/common.sh source "$ci_dir/util/artifacts/common.sh" -readonly usage=$(cat < [ ...] Create a compressed artifact, suitable for large, temporary files such as build products or test binaries that need to be quickly uploaded and downloaded between CI jobs. The artifact will exist of a zip file containing an .tar.zst archive, packed with the parallel zstd. -Regexes are passed to the `find` command's -regex option in the current directory. +Regexes are passed to the $(command -v find) command's -regex option in the current directory. './' is prepended to all regexes for convenience. The artifact will contain all matching files relative to the current directory. @@ -37,8 +39,9 @@ Example Usage: 'lib/.*' EOF ) +readonly usage -if [ "$#" -lt 2 ]; then +if [[ "$#" -lt 2 ]]; then echo "Error: Invalid number of arguments." >&2 echo "$usage" >&2 exit 1 diff --git a/ci/util/artifacts/upload_stage.sh b/ci/util/artifacts/upload_stage.sh index c5517b5895a..72a161a32ea 100755 --- a/ci/util/artifacts/upload_stage.sh +++ b/ci/util/artifacts/upload_stage.sh @@ -2,18 +2,21 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/artifacts/common.sh source "$ci_dir/util/artifacts/common.sh" -readonly usage=$(cat < Same as 'ci/util/artifacts/upload_packed.sh', but assumes that the stage has already been created using 'ci/util/artifacts/stage.sh' and 'unstage.sh'. Performs the packing and registration steps only. EOF ) +readonly usage -if [ "$#" -ne 1 ]; then +if [[ "$#" -ne 1 ]]; then echo "Error: Invalid number of arguments." >&2 echo "$usage" >&2 exit 1 diff --git a/ci/util/artifacts/upload_stage_packed.sh b/ci/util/artifacts/upload_stage_packed.sh index ea2b26e815c..82dade3e558 100755 --- a/ci/util/artifacts/upload_stage_packed.sh +++ b/ci/util/artifacts/upload_stage_packed.sh @@ -2,24 +2,28 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/artifacts/common.sh source "$ci_dir/util/artifacts/common.sh" -readonly usage=$(cat < Same as 'ci/util/artifacts/upload_packed.sh', but assumes that the stage has already been created using 'ci/util/artifacts/stage.sh' and 'unstage.sh'. Performs the packing and registration steps only. EOF ) +readonly usage -if [ "$#" -ne 1 ]; then +if [[ "$#" -ne 1 ]]; then echo "Error: Invalid number of arguments." >&2 echo "$usage" >&2 exit 1 fi readonly artifact_name="$1" +# shellcheck disable=SC2154 readonly artifact_archive="$ARTIFACT_UPLOAD_STAGE/${artifact_name}/${artifact_name}.tar.zst" start=$SECONDS diff --git a/ci/util/build_and_test_targets.sh b/ci/util/build_and_test_targets.sh index 0a783faaeea..f79bf2202e0 100755 --- a/ci/util/build_and_test_targets.sh +++ b/ci/util/build_and_test_targets.sh @@ -40,11 +40,11 @@ function elapsed_time { } PRESET="" -BUILD_TARGETS="" -CTEST_TARGETS="" -LIT_PRECOMPILE_TESTS="" -LIT_TESTS="" -CMAKE_OPTIONS="" +BUILD_TARGETS=() +CTEST_TARGETS=() +LIT_PRECOMPILE_TESTS=() +LIT_TESTS=() +CMAKE_OPTIONS=() CONFIGURE_OVERRIDE="" CUSTOM_TEST_CMD="" @@ -52,11 +52,11 @@ while [[ $# -gt 0 ]]; do case "$1" in -h|--help) usage; exit 0 ;; --preset) PRESET="${2:-}"; shift 2 ;; - --build-targets) BUILD_TARGETS="${2:-}"; shift 2 ;; - --ctest-targets) CTEST_TARGETS="${2:-}"; shift 2 ;; - --lit-precompile-tests) LIT_PRECOMPILE_TESTS="${2:-}"; shift 2 ;; - --lit-tests) LIT_TESTS="${2:-}"; shift 2 ;; - --cmake-options) CMAKE_OPTIONS="${2:-}"; shift 2 ;; + --build-targets) declare -a BUILD_TARGETS="(${2:-})"; shift 2 ;; + --ctest-targets) declare -a CTEST_TARGETS="(${2:-})"; shift 2 ;; + --lit-precompile-tests) declare -a LIT_PRECOMPILE_TESTS="(${2:-})"; shift 2 ;; + --lit-tests) declare -a LIT_TESTS="(${2:-})"; shift 2 ;; + --cmake-options) declare -a CMAKE_OPTIONS="(${2:-})"; shift 2 ;; --configure-override) CONFIGURE_OVERRIDE="${2:-}"; shift 2 ;; --custom-test-cmd) CUSTOM_TEST_CMD="${2:-}"; shift 2 ;; *) echo "Unknown argument: $1" >&2; usage; exit 2 ;; @@ -73,7 +73,7 @@ if [[ -n "${CONFIGURE_OVERRIDE}" ]]; then if [[ -n "${PRESET}" ]]; then echo "::warning:: --preset ignored due to --configure-override" >&2 fi - if [[ -n "${CMAKE_OPTIONS}" ]]; then + if [[ "${#CMAKE_OPTIONS}" -gt 0 ]]; then echo "::warning:: --cmake-options ignored due to --configure-override" >&2 fi fi @@ -86,12 +86,11 @@ cmlog_file="$(mktemp /tmp/cmake-config-XXXXXX.log)" if [[ -n "${CONFIGURE_OVERRIDE}" ]]; then if ! (set -x; eval "${CONFIGURE_OVERRIDE}") 2>&1 | tee "${cmlog_file}"; then echo "::endgroup::" - echo "🔴📝 Configuration override failed ($(elapsed_time)):\n\t${CONFIGURE_OVERRIDE}" + echo -e "🔴📝 Configuration override failed ($(elapsed_time)):\n\t${CONFIGURE_OVERRIDE}" exit 1 fi else - read -r -a _cmake_opts <<< "${CMAKE_OPTIONS}" - if ! (set -x; cmake --preset "${PRESET}" "${_cmake_opts[@]}") 2>&1 | tee "${cmlog_file}"; then + if ! (set -x; cmake --preset "${PRESET}" "${CMAKE_OPTIONS[@]}") 2>&1 | tee "${cmlog_file}"; then echo "::endgroup::" echo "🔴📝 CMake configure failed for preset ${PRESET} ($(elapsed_time))" exit 1 @@ -104,16 +103,16 @@ if [[ -z "${BUILD_DIR}" ]]; then exit 1 fi -if [[ -n "${BUILD_TARGETS}" ]]; then - if ! (set -x; ninja -C "${BUILD_DIR}" ${BUILD_TARGETS}); then +if [[ "${#BUILD_TARGETS}" -gt 0 ]]; then + if ! (set -x; ninja -C "${BUILD_DIR}" "${BUILD_TARGETS[@]}"); then echo "::endgroup::" - echo "🔴🛠️ Ninja build failed for targets ($(elapsed_time)): ${BUILD_TARGETS}" + echo "🔴🛠️ Ninja build failed for targets ($(elapsed_time)): ${BUILD_TARGETS[*]@Q}" exit 1 fi fi -if [[ -n "${CTEST_TARGETS}" ]]; then - for t in ${CTEST_TARGETS}; do +if [[ "${#CTEST_TARGETS}" -gt 0 ]]; then + for t in "${CTEST_TARGETS[@]}"; do if ! (set -x; ctest --test-dir "${BUILD_DIR}" -R "$t" -V --output-on-failure); then echo "::endgroup::" echo "🔴🔎 CTest failed for target $t ($(elapsed_time))" @@ -122,7 +121,7 @@ if [[ -n "${CTEST_TARGETS}" ]]; then done fi -if [[ -n "${LIT_PRECOMPILE_TESTS}" || -n "${LIT_TESTS}" ]]; then +if [[ "${#LIT_PRECOMPILE_TESTS}" -gt 0 || "${#LIT_TESTS}" -gt 0 ]]; then lit_site_cfg="${BUILD_DIR}/libcudacxx/test/libcudacxx/lit.site.cfg" if [[ ! -f "${lit_site_cfg}" ]]; then echo "::endgroup::" @@ -131,8 +130,8 @@ if [[ -n "${LIT_PRECOMPILE_TESTS}" || -n "${LIT_TESTS}" ]]; then fi fi -if [[ -n "${LIT_PRECOMPILE_TESTS}" ]]; then - for t in ${LIT_PRECOMPILE_TESTS}; do +if [[ "${#LIT_PRECOMPILE_TESTS}" -gt 0 ]]; then + for t in "${LIT_PRECOMPILE_TESTS[@]}"; do t_path="libcudacxx/test/libcudacxx/${t}" if ! (set -x; LIBCUDACXX_SITE_CONFIG="${lit_site_cfg}" lit -v "-Dexecutor=NoopExecutor()" "${t_path}"); then echo "::endgroup::" @@ -142,8 +141,8 @@ if [[ -n "${LIT_PRECOMPILE_TESTS}" ]]; then done fi -if [[ -n "${LIT_TESTS}" ]]; then - for t in ${LIT_TESTS}; do +if [[ "${#LIT_TESTS}" -gt 0 ]]; then + for t in "${LIT_TESTS[@]}"; do t_path="libcudacxx/test/libcudacxx/${t}" if ! (set -x; LIBCUDACXX_SITE_CONFIG="${lit_site_cfg}" lit -v "${t_path}"); then echo "::endgroup::" diff --git a/ci/util/create_mock_job_env.sh b/ci/util/create_mock_job_env.sh index cd1d4721d9a..658a0979775 100755 --- a/ci/util/create_mock_job_env.sh +++ b/ci/util/create_mock_job_env.sh @@ -2,7 +2,7 @@ set -euo pipefail -readonly usage=$(cat < Allows the scripts in ci/util/workflow and ci/util/artifacts to run as though they are running in a CI environment. @@ -24,20 +24,21 @@ Caches and previously downloaded artifacts in /tmp are deleted to ensure a clean This is usually fine, but be might overwrite files in-use by other mock environments. EOF ) +readonly usage -if [ "$#" -ne 2 ]; then +if [[ "$#" -ne 2 ]]; then echo "Error: Invalid number of arguments." >&2 echo "$usage" >&2 exit 1 fi -if [ -n "${GITHUB_ACTIONS:-}" ]; then +if [[ -n "${GITHUB_ACTIONS:-}" ]]; then echo "$0: Detected another GITHUB_ACTIONS environment." >&2 echo "unset GITHUB_ACTIONS if this is intentional." >&2 exit 1 fi -if [ -z "${DEVCONTAINER_NAME:-}" ]; then +if [[ -z "${DEVCONTAINER_NAME:-}" ]]; then echo "This script must be run inside a devcontainer." >&2 exit 1 else @@ -51,7 +52,9 @@ export GITHUB_RUN_ID="$1" export JOB_ID="$2" ( + # shellcheck source=ci/util/workflow/common.sh source "$ci_dir/util/workflow/common.sh" + # shellcheck source=ci/util/artifacts/common.sh source "$ci_dir/util/artifacts/common.sh" rm -rf "$WORKFLOW_DIR" diff --git a/ci/util/extract_switches.sh b/ci/util/extract_switches.sh index 97f59d4ae81..2a9fd9b50ea 100755 --- a/ci/util/extract_switches.sh +++ b/ci/util/extract_switches.sh @@ -4,7 +4,8 @@ # # Example Usage: # new_args=$(extract_switches.sh -cpu-only -gpu-only -- "$@") -# eval set -- ${new_args} +# declare -a new_args="(${new_args})" +# set -- "${new_args[@]}" # while true; do # case "$1" in # -cpu-only) CPU_ONLY=true; shift;; @@ -28,8 +29,9 @@ for arg in "$@"; do Unrecognized switches are left in place. Example Usage: - new_args=$(extract_switches.sh -cpu-only -gpu-only -- "$@") - eval set -- ${new_args} + new_args="$(extract_switches.sh -cpu-only -gpu-only -- "$@")" + declare -a new_args="(${new_args})" + set -- "${new_args[@]}" while true; do case "$1" in -cpu-only) CPU_ONLY=true; shift;; @@ -56,7 +58,7 @@ found_switches=() other_args=() for arg in "$@"; do for switch in "${switches[@]}"; do - if [ "$arg" = "$switch" ]; then + if [[ "$arg" = "$switch" ]]; then found_switches+=("\"$arg\"") continue 2 fi @@ -64,4 +66,4 @@ for arg in "$@"; do other_args+=("\"$arg\"") done -echo "${found_switches[@]} -- ${other_args[@]}" +echo "${found_switches[*]} -- ${other_args[*]}" diff --git a/ci/util/git_bisect.sh b/ci/util/git_bisect.sh index 477574a4a7f..53120639242 100755 --- a/ci/util/git_bisect.sh +++ b/ci/util/git_bisect.sh @@ -281,6 +281,8 @@ if [[ "${found}" == "true" ]]; then echo "- Commit URL: https://github.com/NVIDIA/cccl/commit/${bad_commit}" if [[ -n "${GHA_LOG_URL:-}" ]]; then echo "- Bisection Logs: [GHA Job](${GHA_LOG_URL})" + fi + if [[ -n "${STEP_SUMMARY_URL:-}" ]]; then echo "- Bisection Summary: [GHA Report](${STEP_SUMMARY_URL})" fi echo diff --git a/ci/util/memmon.sh b/ci/util/memmon.sh index ad0dd73fdfc..63593af5989 100755 --- a/ci/util/memmon.sh +++ b/ci/util/memmon.sh @@ -49,8 +49,10 @@ ensure_absolute_log() { case "$log_file" in /*) return ;; *) - local dir="$(cd "$(dirname "$log_file")" && pwd)" - local base="$(basename "$log_file")" + local dir + dir="$(cd "$(dirname "$log_file")" && pwd)" + local base + base="$(basename "$log_file")" log_file="$dir/$base" ;; esac @@ -118,7 +120,8 @@ extract_target() { start_memmon() { # Check if already running if [[ -f "$pid_file" ]]; then - local existing_pid="$(<"$pid_file")" + local existing_pid + existing_pid="$(<"$pid_file")" if kill -0 "$existing_pid" 2>/dev/null; then error "already running (pid $existing_pid)" fi @@ -142,7 +145,8 @@ stop_memmon() { if [[ ! -f "$pid_file" ]]; then error "not running" fi - local running_pid="$(<"$pid_file")" + local running_pid + running_pid="$(<"$pid_file")" if ! kill -0 "$running_pid" 2>/dev/null; then rm -f "$pid_file" error "not running" @@ -179,8 +183,10 @@ monitor_mem() { ensure_absolute_log - local log_threshold_kib="$(to_kib "$log_threshold")" - local print_threshold_kib="$(to_kib "$print_threshold")" + local log_threshold_kib + log_threshold_kib="$(to_kib "$log_threshold")" + local print_threshold_kib + print_threshold_kib="$(to_kib "$print_threshold")" local running=true @@ -192,12 +198,14 @@ monitor_mem() { if [[ ${#MEMMON_MAX_RSS[@]} -eq 0 ]]; then printf "No processes exceeded %s GB\n" "$(format_threshold "$log_threshold")" else - local tmp="$(mktemp)" + local tmp + tmp="$(mktemp)" for pid in "${!MEMMON_MAX_RSS[@]}"; do printf "%s\t%s\t%s\t%s\n" "${MEMMON_MAX_RSS[$pid]}" "$pid" "${MEMMON_TARGET[$pid]}" "${MEMMON_CMD[$pid]}" >>"$tmp" done sort -nr -k1,1 "$tmp" | while IFS=$'\t' read -r peak pid target cmd; do - local mem_gib="$(format_gib "$peak")" + local mem_gib + mem_gib="$(format_gib "$peak")" printf "%s GB | %s | %s | %s\n" "$mem_gib" "$pid" "$target" "$cmd" done rm -f "$tmp" @@ -222,7 +230,8 @@ monitor_mem() { MEMMON_CMD[$pid]=$(get_cmdline "$pid") MEMMON_TARGET[$pid]=$(extract_target "${MEMMON_CMD[$pid]}") if (( rss >= print_threshold_kib )); then - local mem_gib="$(format_gib "$rss")" + local mem_gib + mem_gib="$(format_gib "$rss")" printf 'memmon: %s GB | %s | %s | %s\n' "$mem_gib" "$pid" "${MEMMON_TARGET[$pid]}" "${MEMMON_CMD[$pid]}" fi fi diff --git a/ci/util/retry.sh b/ci/util/retry.sh index b55b319bf62..90ea4ef8632 100755 --- a/ci/util/retry.sh +++ b/ci/util/retry.sh @@ -1,6 +1,6 @@ #!/bin/bash -if [ "$#" -lt 3 ]; then +if [[ "$#" -lt 3 ]]; then echo "Usage: $0 num_tries sleep_time command [args...]" echo " num_tries: Number of attempts to run the command" echo " sleep_time: Time to wait between attempts (in seconds)" @@ -20,12 +20,12 @@ for ((i=1; i<=num_tries; i++)); do status=0 eval "${command[*]}" || status=$? - if [ $status -eq 0 ]; then + if [[ "$status" -eq 0 ]]; then echo "Command '${command[*]}' succeeded on attempt ${i}." exit 0 else echo "Command '${command[*]}' failed with status ${status}. Retrying in ${sleep_time} seconds..." - sleep $sleep_time + sleep "$sleep_time" fi done echo "Command '${command[*]}' failed after ${num_tries} attempts." diff --git a/ci/util/version_compare.sh b/ci/util/version_compare.sh index 499fedbfc7a..5b7d3a183d6 100755 --- a/ci/util/version_compare.sh +++ b/ci/util/version_compare.sh @@ -2,7 +2,7 @@ set -euo pipefail -readonly usage=$(cat < A.B[.C[.D[...]]] Compares two version strings with the specified operator. @@ -16,8 +16,9 @@ compare_ops: gt - greater than EOF ) +readonly usage -if [ "$#" -ne 3 ]; then +if [[ "$#" -ne 3 ]]; then echo "Error: Invalid arguments: $*" >&2 echo "$usage" >&2 exit 1 @@ -51,7 +52,7 @@ fi IFS='.' read -r -a ver_a_parts <<< "$version_a" IFS='.' read -r -a ver_b_parts <<< "$version_b" max_length=${#ver_a_parts[@]} -if [ "${#ver_b_parts[@]}" -gt "$max_length" ]; then +if [[ "${#ver_b_parts[@]}" -gt "$max_length" ]]; then max_length=${#ver_b_parts[@]} fi @@ -78,6 +79,7 @@ case "$operator" in ne) [[ "$result" != "eq" ]] ;; ge) [[ "$result" == "gt" || "$result" == "eq" ]] ;; gt) [[ "$result" == "gt" ]] ;; + *) echo "Error: Unhandled operator '${operator}'." >&2; exit 1 ;; esac exit $? diff --git a/ci/util/workflow/common.sh b/ci/util/workflow/common.sh index 770730599b5..48fa0167239 100755 --- a/ci/util/workflow/common.sh +++ b/ci/util/workflow/common.sh @@ -7,7 +7,7 @@ if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then exit 1 fi -if [ -z "${GITHUB_ACTIONS:-}" ]; then +if [[ -z "${GITHUB_ACTIONS:-}" ]]; then echo "This script must be run in a GitHub Actions environment." >&2 exit 1 fi diff --git a/ci/util/workflow/get_consumers.sh b/ci/util/workflow/get_consumers.sh index a3edeb1839e..5c837f35d41 100755 --- a/ci/util/workflow/get_consumers.sh +++ b/ci/util/workflow/get_consumers.sh @@ -2,18 +2,21 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/workflow/common.sh source "$ci_dir/util/workflow/common.sh" -readonly usage=$(cat <&2 echo "$usage" >&2 exit 1 @@ -21,7 +24,7 @@ fi job_id="${1:-${JOB_ID:-}}" -if [ -z "$job_id" ]; then +if [[ -z "$job_id" ]]; then echo "Error: No job ID provided and \$JOB_ID is not set." >&2 echo "$usage" >&2 exit 1 diff --git a/ci/util/workflow/get_job_def.sh b/ci/util/workflow/get_job_def.sh index 478b5362291..2977ef56365 100755 --- a/ci/util/workflow/get_job_def.sh +++ b/ci/util/workflow/get_job_def.sh @@ -2,10 +2,12 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/workflow/common.sh source "$ci_dir/util/workflow/common.sh" -readonly usage=$(cat <&2 echo "$usage" >&2 exit 1 @@ -22,7 +25,7 @@ fi job_id="${1:-${JOB_ID:-}}" -if [ -z "$job_id" ]; then +if [[ -z "$job_id" ]]; then echo "Error: No job ID provided and \$JOB_ID is not set." >&2 echo "$usage" >&2 exit 1 @@ -40,7 +43,7 @@ job_obj=$(jq --arg job_id "$job_id" ' ) ' "$WORKFLOW_DIR/workflow.json") -if [ -z "$job_obj" ]; then +if [[ -z "$job_obj" ]]; then echo "Error: No job definition found for job ID '$job_id'." >&2 exit 1 fi diff --git a/ci/util/workflow/get_job_project.sh b/ci/util/workflow/get_job_project.sh index 4f101e170dc..a7d070fa581 100755 --- a/ci/util/workflow/get_job_project.sh +++ b/ci/util/workflow/get_job_project.sh @@ -2,9 +2,10 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +readonly ci_dir -readonly usage=$(cat <&2 echo "$usage" >&2 exit 1 @@ -21,7 +23,7 @@ fi job_id="${1:-${JOB_ID:-}}" -if [ -z "$job_id" ]; then +if [[ -z "$job_id" ]]; then echo "Error: No job ID provided and \$JOB_ID is not set." >&2 echo "$usage" >&2 exit 1 diff --git a/ci/util/workflow/get_producer_id.sh b/ci/util/workflow/get_producer_id.sh index 600efe18e04..90028bea302 100755 --- a/ci/util/workflow/get_producer_id.sh +++ b/ci/util/workflow/get_producer_id.sh @@ -2,10 +2,12 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/workflow/common.sh source "$ci_dir/util/workflow/common.sh" -readonly usage=$(cat <&2 echo "$usage" >&2 exit 1 @@ -22,7 +25,7 @@ fi job_id="${1:-${JOB_ID:-}}" -if [ -z "$job_id" ]; then +if [[ -z "$job_id" ]]; then echo "Error: No job ID provided and \$JOB_ID is not set." >&2 echo "$usage" >&2 exit 1 @@ -39,13 +42,13 @@ producers=$(jq --arg job_id "$job_id" ' ' "$WORKFLOW_DIR/workflow.json") producer_count=$(echo "$producers" | jq 'length') -if [ "$producer_count" -ne 1 ]; then +if [[ "$producer_count" -ne 1 ]]; then echo "Error: Expected exactly one producer for job ID '$job_id', but found ${producer_count:-0}." >&2 exit 1 fi producer_id=$(echo "$producers" | jq -r '.[0].id') -if [ -z "$producer_id" ]; then +if [[ -z "$producer_id" ]]; then echo "Error: No producer ID found for job ID '$job_id'." >&2 exit 1 fi diff --git a/ci/util/workflow/get_producers.sh b/ci/util/workflow/get_producers.sh index 17d3b367c73..60ac12aadc5 100755 --- a/ci/util/workflow/get_producers.sh +++ b/ci/util/workflow/get_producers.sh @@ -2,18 +2,21 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/workflow/common.sh source "$ci_dir/util/workflow/common.sh" -readonly usage=$(cat <&2 echo "$usage" >&2 exit 1 @@ -21,7 +24,7 @@ fi job_id="${1:-${JOB_ID:-}}" -if [ -z "$job_id" ]; then +if [[ -z "$job_id" ]]; then echo "Error: No job ID provided and \$JOB_ID is not set." >&2 echo "$usage" >&2 exit 1 diff --git a/ci/util/workflow/get_stable_job_hash.sh b/ci/util/workflow/get_stable_job_hash.sh index 4f91150f0e1..66eeda6c49f 100755 --- a/ci/util/workflow/get_stable_job_hash.sh +++ b/ci/util/workflow/get_stable_job_hash.sh @@ -2,9 +2,10 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +readonly ci_dir -readonly usage=$(cat <&2 echo "$usage" >&2 exit 1 @@ -22,7 +24,7 @@ fi job_id="${1:-${JOB_ID:-}}" -if [ -z "$job_id" ]; then +if [[ -z "$job_id" ]]; then echo "Error: No job ID provided and \$JOB_ID is not set." >&2 echo "$usage" >&2 exit 1 diff --git a/ci/util/workflow/get_wheel_artifact_name.sh b/ci/util/workflow/get_wheel_artifact_name.sh index b777de72a40..798ff8e22b2 100755 --- a/ci/util/workflow/get_wheel_artifact_name.sh +++ b/ci/util/workflow/get_wheel_artifact_name.sh @@ -2,9 +2,10 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +readonly ci_dir -readonly usage=$(cat <&2 echo "$usage" >&2 exit 1 @@ -21,7 +23,7 @@ fi job_id="${1:-${JOB_ID:-}}" -if [ -z "$job_id" ]; then +if [[ -z "$job_id" ]]; then echo "Error: No job ID provided and \$JOB_ID is not set." >&2 echo "$usage" >&2 exit 1 diff --git a/ci/util/workflow/has_consumers.sh b/ci/util/workflow/has_consumers.sh index f7aa94e9385..b91741d19d7 100755 --- a/ci/util/workflow/has_consumers.sh +++ b/ci/util/workflow/has_consumers.sh @@ -2,18 +2,21 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/workflow/common.sh source "$ci_dir/util/workflow/common.sh" -readonly usage=$(cat <&2 echo "$usage" >&2 exit 1 @@ -21,7 +24,7 @@ fi job_id="${1:-${JOB_ID:-}}" -if [ -z "$job_id" ]; then +if [[ -z "$job_id" ]]; then echo "Error: No job ID provided and \$JOB_ID is not set." >&2 echo "$usage" >&2 exit 1 @@ -37,7 +40,7 @@ matching_producer=$(jq --arg job_id "$job_id" ' | select(.id == $job_id) ' "$WORKFLOW_DIR/workflow.json") -if [ -n "$matching_producer" ]; then +if [[ -n "$matching_producer" ]]; then exit 0 else exit 1 diff --git a/ci/util/workflow/has_producers.sh b/ci/util/workflow/has_producers.sh index 89de5db8fcf..f2829091659 100755 --- a/ci/util/workflow/has_producers.sh +++ b/ci/util/workflow/has_producers.sh @@ -2,18 +2,21 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/workflow/common.sh source "$ci_dir/util/workflow/common.sh" -readonly usage=$(cat <&2 echo "$usage" >&2 exit 1 @@ -21,7 +24,7 @@ fi job_id="${1:-${JOB_ID:-}}" -if [ -z "$job_id" ]; then +if [[ -z "$job_id" ]]; then echo "Error: No job ID provided and \$JOB_ID is not set." >&2 echo "$usage" >&2 exit 1 @@ -37,7 +40,7 @@ matching_consumer=$(jq --arg job_id "$job_id" ' | select(.id == $job_id) ' "$WORKFLOW_DIR/workflow.json") -if [ -n "$matching_consumer" ]; then +if [[ -n "$matching_consumer" ]]; then exit 0 else exit 1 diff --git a/ci/util/workflow/initialize.sh b/ci/util/workflow/initialize.sh index 63a826492f5..b6e1a99b4f8 100755 --- a/ci/util/workflow/initialize.sh +++ b/ci/util/workflow/initialize.sh @@ -2,22 +2,25 @@ set -euo pipefail -readonly ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +ci_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" +readonly ci_dir +# shellcheck source=ci/util/workflow/common.sh source "$ci_dir/util/workflow/common.sh" -readonly usage=$(cat <&2 echo "$usage" >&2 exit 1 fi -if [ ! -f "$WORKFLOW_DIR/workflow.json" ]; then +if [[ ! -f "$WORKFLOW_DIR/workflow.json" ]]; then "$ci_dir/util/artifacts/download/fetch.sh" "$WORKFLOW_ARTIFACT" "$WORKFLOW_DIR" > /dev/null fi diff --git a/cub/test/cubin-check/dump_and_check.bash b/cub/test/cubin-check/dump_and_check.bash index 2677871407d..f9b6f9410da 100755 --- a/cub/test/cubin-check/dump_and_check.bash +++ b/cub/test/cubin-check/dump_and_check.bash @@ -1,4 +1,4 @@ #!/bin/bash -set -e +set -euo pipefail cuobjdump=$1; cubin=$2; testfile=$3 $cuobjdump -symbols "$cubin" | FileCheck --allow-empty "$testfile" diff --git a/cub/test/ptx-json/dump_and_check.bash b/cub/test/ptx-json/dump_and_check.bash index 8237dbc3928..e268920f4d1 100755 --- a/cub/test/ptx-json/dump_and_check.bash +++ b/cub/test/ptx-json/dump_and_check.bash @@ -1,10 +1,10 @@ #!/bin/bash -set -e +set -euo pipefail ## Usage: dump_and_check filter test.ptx test.cu JSON_ID -input_filter=$1 -input_ptx=$2 -input_testfile=$3 -input_json_id=$4 +input_filter="$1" +input_ptx="$2" +input_testfile="$3" +input_json_id="$4" -$input_filter $input_ptx $input_json_id | FileCheck $input_testfile +$input_filter "$input_ptx" "$input_json_id" | FileCheck "$input_testfile" diff --git a/docs/gen_docs.bash b/docs/gen_docs.bash index 17958ed8575..ab6c9add44d 100755 --- a/docs/gen_docs.bash +++ b/docs/gen_docs.bash @@ -12,7 +12,7 @@ # consistent documentation generation. The built Doxygen will be stored # in _build/doxygen-build/ and reused for subsequent runs. -set -e +set -euo pipefail ALLOW_DEP_INSTALL=false CLEAN=false @@ -27,7 +27,7 @@ for arg in "$@"; do esac done -SCRIPT_PATH=$(cd $(dirname ${0}); pwd -P) +SCRIPT_PATH=$(cd "$(dirname "${0}")"; pwd -P) cd "$SCRIPT_PATH" BUILDDIR="_build" @@ -35,10 +35,10 @@ DOXYGEN_BUILD_DIR="${SCRIPT_PATH}/_build/doxygen-build" DOXYGEN_SRC_DIR="${SCRIPT_PATH}/_build/doxygen-src" # Handle clean command (before dep checks — clean doesn't need deps) -if [ "$CLEAN" = true ]; then +if [[ "$CLEAN" = true ]]; then echo "Cleaning build directory..." rm -rf "${BUILDDIR:?}"/* - if [ "$CLEAN_ALL" = true ]; then + if [[ "$CLEAN_ALL" = true ]]; then echo "Also removing Doxygen source and build directories..." rm -rf "${DOXYGEN_SRC_DIR}" "${DOXYGEN_BUILD_DIR}" fi @@ -68,13 +68,13 @@ check_system_deps() { fi done - if [ ${#missing[@]} -eq 0 ]; then + if [[ ${#missing[@]} -eq 0 ]]; then return 0 fi echo "Missing system dependencies: ${missing[*]}" - if [ "$ALLOW_DEP_INSTALL" = true ]; then + if [[ "$ALLOW_DEP_INSTALL" = true ]]; then echo "Installing missing dependencies (--allow-dep-install)..." sudo apt-get update -qq sudo apt-get install -y -qq "${missing[@]}" @@ -95,11 +95,11 @@ check_system_deps # Configuration # Keep going to surface all warnings; -W makes warnings fail the build. -SPHINXOPTS="${SPHINXOPTS:---keep-going -W}" +declare -a SPHINXOPTS="(${SPHINXOPTS:---keep-going -W})" DOXYGEN_BIN="${DOXYGEN_BUILD_DIR}/bin/doxygen" # Use custom-built doxygen if available, otherwise fall back to system doxygen -if [ -f "${DOXYGEN_BIN}" ]; then +if [[ -f "${DOXYGEN_BIN}" ]]; then DOXYGEN="${DOXYGEN_BIN}" else DOXYGEN="${DOXYGEN:-doxygen}" @@ -110,25 +110,26 @@ rm -rf img mkdir -p img # Pull cub images -if [ ! -d cubimg ]; then +if [[ ! -d cubimg ]]; then git clone -b gh-pages https://github.com/NVlabs/cub.git cubimg fi -if [ ! -n "$(find cubimg -name 'example_range.png')" ]; then +if [[ -z "$(find cubimg -name 'example_range.png')" ]]; then wget -q https://raw.githubusercontent.com/NVIDIA/NVTX/release-v3/docs/images/example_range.png -O cubimg/example_range.png fi -if [ ! -n "$(find img -name '*.png')" ]; then +if [[ -z "$(find img -name '*.png')" ]]; then wget -q https://docs.nvidia.com/cuda/_static/Logo_and_CUDA.png -O img/logo.png # Parse files and collects unique names ending with .png - imgs=( $(grep -R -o -h '[[:alpha:][:digit:]_]*.png' ../cub/cub | uniq) ) + imgs="$(grep -R -o -h '[[:alpha:][:digit:]_]*.png' ../cub/cub | uniq)" + declare -a imgs="($imgs)" imgs+=( "cub_overview.png" "nested_composition.png" "tile.png" "blocked.png" "striped.png" ) for img in "${imgs[@]}" do - echo ${img} - cp cubimg/${img} img/${img} + echo "${img}" + cp cubimg/"${img}" img/"${img}" done fi @@ -137,7 +138,7 @@ build_doxygen() { echo "Building Doxygen 1.9.6..." # Clone Doxygen if not already cloned - if [ ! -d "${DOXYGEN_SRC_DIR}" ]; then + if [[ ! -d "${DOXYGEN_SRC_DIR}" ]]; then echo "Cloning Doxygen repository..." git clone https://github.com/doxygen/doxygen.git "${DOXYGEN_SRC_DIR}" fi @@ -186,7 +187,7 @@ build_doxygen() { } # Check if custom Doxygen needs to be built -if [ ! -f "${DOXYGEN_BIN}" ]; then +if [[ ! -f "${DOXYGEN_BIN}" ]]; then echo "Custom Doxygen 1.9.6 not found, building it now..." build_doxygen DOXYGEN="${DOXYGEN_BIN}" @@ -198,12 +199,14 @@ fi echo "Checking for documentation dependencies..." # Use virtual environment if it exists, otherwise create one -if [ -d "env" ]; then +if [[ -d "env" ]]; then echo "Using existing virtual environment..." + # shellcheck disable=SC1091 source env/bin/activate else echo "Creating virtual environment..." python3 -m venv env + # shellcheck disable=SC1091 source env/bin/activate fi @@ -218,14 +221,14 @@ if ! python -c "import sphinx" 2>/dev/null; then fi # Generate Doxygen XML in parallel (if doxygen is available) -if which ${DOXYGEN} > /dev/null 2>&1; then +if command -v "${DOXYGEN}" > /dev/null 2>&1; then echo "Generating Doxygen XML..." - mkdir -p ${BUILDDIR}/doxygen/cub ${BUILDDIR}/doxygen/thrust ${BUILDDIR}/doxygen/cudax ${BUILDDIR}/doxygen/libcudacxx + mkdir -p "${BUILDDIR}"/doxygen/cub "${BUILDDIR}"/doxygen/thrust "${BUILDDIR}"/doxygen/cudax "${BUILDDIR}"/doxygen/libcudacxx # Copy all images to Doxygen XML output directories where they're expected for project in cub thrust cudax libcudacxx; do - mkdir -p ${BUILDDIR}/doxygen/${project}/xml - cp img/*.png ${BUILDDIR}/doxygen/${project}/xml/ 2>/dev/null || true + mkdir -p "${BUILDDIR}"/doxygen/"${project}"/xml + cp img/*.png "${BUILDDIR}"/doxygen/"${project}"/xml/ 2>/dev/null || true done # Run all Doxygen builds in parallel, fail if any produce warnings/errors @@ -244,7 +247,7 @@ if which ${DOXYGEN} > /dev/null 2>&1; then doxygen_failed=1 fi done - if [ "$doxygen_failed" -ne 0 ]; then + if [[ "$doxygen_failed" -ne 0 ]]; then echo "Error: one or more Doxygen builds failed (see warnings above)" exit 1 fi @@ -257,7 +260,7 @@ fi # Build Sphinx HTML documentation echo "Building documentation with Sphinx..." # Use the virtual environment's Python -python -m sphinx.cmd.build -b html -d "${BUILDDIR}/doctrees" -j auto "." "${BUILDDIR}/html" ${SPHINXOPTS} +python -m sphinx.cmd.build -b html -d "${BUILDDIR}/doctrees" -j auto "." "${BUILDDIR}/html" "${SPHINXOPTS[@]}" # Reorganize output to include versioned directory and root assets VERSION="${SPHINX_CCCL_VER:-unstable}" @@ -275,7 +278,7 @@ cp -a "${ORIG_DIR}/." "${HTML_DIR}/${VERSION}/" rm -rf "${ORIG_DIR}" # Copy objects.inv to the root to support intersphinx consumers -if [ -f "${HTML_DIR}/${VERSION}/objects.inv" ]; then +if [[ -f "${HTML_DIR}/${VERSION}/objects.inv" ]]; then cp "${HTML_DIR}/${VERSION}/objects.inv" "${HTML_DIR}/objects.inv" fi diff --git a/docs/scrape_docs.bash b/docs/scrape_docs.bash index 9dff9f3be70..f54eedcc216 100755 --- a/docs/scrape_docs.bash +++ b/docs/scrape_docs.bash @@ -5,18 +5,17 @@ ## [path] is the starting point for searching for HTML. Ideally this is the siteroot ## script will find all HTML files and record them into a CSV file that can be used for searching docs. -set -e +set -euo pipefail -SCRIPT_PATH=$(cd $(dirname ${0}); pwd -P) +SCRIPT_PATH=$(cd "$(dirname "$0")"; pwd -P) -path_to_docs=$(realpath $1) +path_to_docs=$(realpath "$1") -cd $SCRIPT_PATH +cd "$SCRIPT_PATH" pages=$( - cd $path_to_docs; - # Embed a token to artificially limit search results - find ./ -iname "*.html" -printf '/%P(end),' - ) + cd "$path_to_docs"; + find ./ -iname "*.html" -printf '/%P,' +) -echo "$pages" > $path_to_docs/pagelist.txt +echo "$pages" > "$path_to_docs/pagelist.txt" diff --git a/libcudacxx/test/atomic_codegen/dump_and_check.bash b/libcudacxx/test/atomic_codegen/dump_and_check.bash index c66bb433637..d1c5f16b949 100755 --- a/libcudacxx/test/atomic_codegen/dump_and_check.bash +++ b/libcudacxx/test/atomic_codegen/dump_and_check.bash @@ -1,9 +1,9 @@ #!/bin/bash -set -e +set -euo pipefail ## Usage: dump_and_check test.a test.cu PREFIX -input_archive=$1 -input_testfile=$2 -input_prefix=$3 +input_archive="$1" +input_testfile="$2" +input_prefix="$3" -cuobjdump --dump-ptx $input_archive | FileCheck --match-full-lines --check-prefix $input_prefix $input_testfile +cuobjdump --dump-ptx "$input_archive" | FileCheck --match-full-lines --check-prefix "$input_prefix" "$input_testfile" diff --git a/libcudacxx/test/maintenance/all-internal-headers b/libcudacxx/test/maintenance/all-internal-headers index df8523603ac..f58f0e948fa 100755 --- a/libcudacxx/test/maintenance/all-internal-headers +++ b/libcudacxx/test/maintenance/all-internal-headers @@ -2,7 +2,7 @@ #this creates a set of tests, that ensures, that all internal headers can be build independently -set -e +set -euo pipefail internal_headers=$(find ../../include -path "*cuda/std/__*/*" -not -path "*/__cuda/*") @@ -15,7 +15,7 @@ do test_name=${test_name%.h}.pass.cpp mkdir -p -- "${test_name%/*}" - cat > $test_name << EOL + cat > "$test_name" << EOL //===----------------------------------------------------------------------===// // // Part of libcu++, the C++ Standard Library for your entire system, diff --git a/libcudacxx/test/maintenance/cuda-to-std b/libcudacxx/test/maintenance/cuda-to-std index f9266718b21..0c24bcf413b 100755 --- a/libcudacxx/test/maintenance/cuda-to-std +++ b/libcudacxx/test/maintenance/cuda-to-std @@ -1,15 +1,16 @@ #!/usr/bin/env bash -set -e +set -euo pipefail root_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd) stdlib_headers=$(<"${root_dir}/maintenance/stdlib-headers") +# shellcheck disable=SC2001 header_replacements=$(echo "${stdlib_headers}" | sed 's#<\(.*\)>#-e s::<\1>:g#') find "${root_dir}/test" -name "*.cpp" | while read -r file do - sed -i "${file}" ${header_replacements} + sed -i "${file}" "${header_replacements}" sed -i "${file}" -e 's/cuda::std::/std::/g' done diff --git a/libcudacxx/test/maintenance/std-to-cuda b/libcudacxx/test/maintenance/std-to-cuda index 4395820fd4d..0a54299e40f 100755 --- a/libcudacxx/test/maintenance/std-to-cuda +++ b/libcudacxx/test/maintenance/std-to-cuda @@ -1,15 +1,16 @@ #!/usr/bin/env bash -set -e +set -euo pipefail root_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd) stdlib_headers=$(<"${root_dir}/maintenance/stdlib-headers") +# shellcheck disable=SC2001 header_replacements=$(echo "${stdlib_headers}" | sed 's#<\(.*\)>#-e s:<\1>::g#') find "${root_dir}/test" -name "*.cpp" | while read -r file do - sed -i "${file}" ${header_replacements} + sed -i "${file}" "${header_replacements}" perl -pi -e 's/((?