mirror of
https://github.com/nodejs/node.git
synced 2024-11-21 10:59:27 +00:00
tools: lint shell scripts
PR-URL: https://github.com/nodejs/node/pull/36099 Reviewed-By: Rich Trott <rtrott@gmail.com>
This commit is contained in:
parent
4a6200e289
commit
0e96dc1f95
7
.github/workflows/linters.yml
vendored
7
.github/workflows/linters.yml
vendored
@ -77,6 +77,13 @@ jobs:
|
||||
run: |
|
||||
make lint-py-build || true
|
||||
NODE=$(command -v node) make lint-py
|
||||
lint-sh:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: shellcheck -V
|
||||
- name: Lint Shell scripts
|
||||
run: tools/lint-sh.js .
|
||||
|
||||
lint-codeowners:
|
||||
runs-on: ubuntu-latest
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/bin/sh
|
||||
|
||||
set -xe
|
||||
|
||||
@ -14,19 +14,19 @@ API_URL=https://api.github.com
|
||||
COMMIT_QUEUE_LABEL='commit-queue'
|
||||
COMMIT_QUEUE_FAILED_LABEL='commit-queue-failed'
|
||||
|
||||
function issueUrl() {
|
||||
issueUrl() {
|
||||
echo "$API_URL/repos/${OWNER}/${REPOSITORY}/issues/${1}"
|
||||
}
|
||||
|
||||
function labelsUrl() {
|
||||
labelsUrl() {
|
||||
echo "$(issueUrl "${1}")/labels"
|
||||
}
|
||||
|
||||
function commentsUrl() {
|
||||
commentsUrl() {
|
||||
echo "$(issueUrl "${1}")/comments"
|
||||
}
|
||||
|
||||
function gitHubCurl() {
|
||||
gitHubCurl() {
|
||||
url=$1
|
||||
method=$2
|
||||
shift 2
|
||||
@ -67,6 +67,7 @@ for pr in "$@"; do
|
||||
if ! tail -n 10 output | grep '. Post "Landed in .*/pull/'"${pr}"; then
|
||||
gitHubCurl "$(labelsUrl "$pr")" POST --data '{"labels": ["'"${COMMIT_QUEUE_FAILED_LABEL}"'"]}'
|
||||
|
||||
# shellcheck disable=SC2154
|
||||
cqurl="${GITHUB_SERVER_URL}/${OWNER}/${REPOSITORY}/actions/runs/${GITHUB_RUN_ID}"
|
||||
jq -n --arg content "<details><summary>Commit Queue failed</summary><pre>$(cat output)</pre><a href='$cqurl'>$cqurl</a></details>" '{body: $content}' > output.json
|
||||
cat output.json
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/bin/sh
|
||||
|
||||
set -xe
|
||||
|
||||
@ -10,15 +10,15 @@ REQUEST_CI_LABEL='request-ci'
|
||||
REQUEST_CI_FAILED_LABEL='request-ci-failed'
|
||||
shift 3
|
||||
|
||||
function issueUrl() {
|
||||
issueUrl() {
|
||||
echo "$API_URL/repos/${OWNER}/${REPOSITORY}/issues/${1}"
|
||||
}
|
||||
|
||||
function labelsUrl() {
|
||||
labelsUrl() {
|
||||
echo "$(issueUrl "${1}")/labels"
|
||||
}
|
||||
|
||||
function commentsUrl() {
|
||||
commentsUrl() {
|
||||
echo "$(issueUrl "${1}")/comments"
|
||||
}
|
||||
|
||||
@ -33,7 +33,7 @@ for pr in "$@"; do
|
||||
ncu-ci run "$pr" >output 2>&1 || ci_started=no
|
||||
cat output
|
||||
|
||||
if [ "$ci_started" == "no" ]; then
|
||||
if [ "$ci_started" = "no" ]; then
|
||||
# Do we need to reset?
|
||||
curl -sL --request PUT \
|
||||
--url "$(labelsUrl "$pr")" \
|
||||
|
@ -37,15 +37,15 @@
|
||||
echo "Searching $1 to write out expfile to $2"
|
||||
|
||||
# This special sequence must be at the start of the exp file.
|
||||
echo "#!." > $2.tmp
|
||||
echo "#!." > "$2.tmp"
|
||||
|
||||
# Pull the symbols from the .a files.
|
||||
find $1 -name "*.a" | grep -v gtest \
|
||||
find "$1" -name "*.a" | grep -v gtest \
|
||||
| xargs nm -Xany -BCpg \
|
||||
| awk '{
|
||||
if ((($2 == "T") || ($2 == "D") || ($2 == "B")) &&
|
||||
(substr($3,1,1) != ".")) { print $3 }
|
||||
}' \
|
||||
| sort -u >> $2.tmp
|
||||
| sort -u >> "$2.tmp"
|
||||
|
||||
mv -f $2.tmp $2
|
||||
mv -f "$2.tmp" "$2"
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
#!/bin/sh
|
||||
|
||||
# Shell script to lint the message of the first commit in a pull request.
|
||||
#
|
||||
@ -13,7 +13,7 @@ PR_ID=$1;
|
||||
if [ -z "${PR_ID}" ]; then
|
||||
# Attempt to work out the PR number based on current HEAD
|
||||
if HEAD_COMMIT="$( git rev-parse HEAD )"; then
|
||||
if SEARCH_RESULTS="$( curl -s ${GH_API_URL}/search/issues?q=sha:${HEAD_COMMIT}+type:pr+repo:nodejs/node )"; then
|
||||
if SEARCH_RESULTS="$( curl -s "${GH_API_URL}/search/issues?q=sha:${HEAD_COMMIT}+type:pr+repo:nodejs/node" )"; then
|
||||
if FOUND_PR="$( node -p 'JSON.parse(process.argv[1]).items[0].number' "${SEARCH_RESULTS}" 2> /dev/null )"; then
|
||||
PR_ID=${FOUND_PR}
|
||||
fi
|
||||
@ -26,13 +26,13 @@ if [ -z "${PR_ID}" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
PATCH=$( curl -sL https://github.com/nodejs/node/pull/${PR_ID}.patch | grep '^From ' )
|
||||
PATCH=$( curl -sL "https://github.com/nodejs/node/pull/${PR_ID}.patch" | grep '^From ' )
|
||||
if FIRST_COMMIT="$( echo "$PATCH" | awk '/^From [0-9a-f]{40} / { if (count++ == 0) print $2 }' )"; then
|
||||
MESSAGE=$( git show --quiet --format='format:%B' $FIRST_COMMIT )
|
||||
MESSAGE=$( git show --quiet --format='format:%B' "$FIRST_COMMIT" )
|
||||
echo "
|
||||
*** Linting the first commit message for pull request ${PR_ID}
|
||||
*** according to the guidelines at https://goo.gl/p2fr5Q.
|
||||
*** Commit message for $(echo $FIRST_COMMIT | cut -c 1-10) is:
|
||||
*** Commit message for $(echo "$FIRST_COMMIT" | cut -c 1-10) is:
|
||||
${MESSAGE}
|
||||
"
|
||||
npx -q core-validate-commit --no-validate-metadata "${FIRST_COMMIT}"
|
||||
|
185
tools/lint-sh.js
Executable file
185
tools/lint-sh.js
Executable file
@ -0,0 +1,185 @@
|
||||
#!/usr/bin/env node
|
||||
'use strict';
|
||||
|
||||
const { execSync, spawn } = require('child_process');
|
||||
const { promises: fs, readdirSync, statSync } = require('fs');
|
||||
const { extname, join, relative, resolve } = require('path');
|
||||
|
||||
const FIX_MODE_ENABLED = process.argv.includes('--fix');
|
||||
const USE_NPX = process.argv.includes('--from-npx');
|
||||
|
||||
const SHELLCHECK_EXE_NAME = 'shellcheck';
|
||||
const SHELLCHECK_OPTIONS = ['--shell=sh', '--severity=info', '--enable=all'];
|
||||
if (FIX_MODE_ENABLED) SHELLCHECK_OPTIONS.push('--format=diff');
|
||||
else if (process.env.GITHUB_ACTIONS) SHELLCHECK_OPTIONS.push('--format=json');
|
||||
|
||||
const SPAWN_OPTIONS = {
|
||||
cwd: null,
|
||||
shell: false,
|
||||
stdio: ['pipe', 'pipe', 'inherit'],
|
||||
};
|
||||
|
||||
function* findScriptFilesRecursively(dirPath) {
|
||||
const entries = readdirSync(dirPath, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const path = join(dirPath, entry.name);
|
||||
|
||||
if (
|
||||
entry.isDirectory() &&
|
||||
entry.name !== 'build' &&
|
||||
entry.name !== 'changelogs' &&
|
||||
entry.name !== 'deps' &&
|
||||
entry.name !== 'fixtures' &&
|
||||
entry.name !== 'gyp' &&
|
||||
entry.name !== 'inspector_protocol' &&
|
||||
entry.name !== 'node_modules' &&
|
||||
entry.name !== 'out' &&
|
||||
entry.name !== 'tmp'
|
||||
) {
|
||||
yield* findScriptFilesRecursively(path);
|
||||
} else if (entry.isFile() && extname(entry.name) === '.sh') {
|
||||
yield path;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const expectedHashBang = Buffer.from('#!/bin/sh\n');
|
||||
async function hasInvalidHashBang(fd) {
|
||||
const { length } = expectedHashBang;
|
||||
|
||||
const actual = Buffer.allocUnsafe(length);
|
||||
await fd.read(actual, 0, length, 0);
|
||||
|
||||
return Buffer.compare(actual, expectedHashBang);
|
||||
}
|
||||
|
||||
async function checkFiles(...files) {
|
||||
const flags = FIX_MODE_ENABLED ? 'r+' : 'r';
|
||||
await Promise.all(
|
||||
files.map(async (file) => {
|
||||
const fd = await fs.open(file, flags);
|
||||
if (await hasInvalidHashBang(fd)) {
|
||||
if (FIX_MODE_ENABLED) {
|
||||
const file = await fd.readFile();
|
||||
|
||||
const fileContent =
|
||||
file[0] === '#'.charCodeAt() ?
|
||||
file.subarray(file.indexOf('\n') + 1) :
|
||||
file;
|
||||
|
||||
const toWrite = Buffer.concat([expectedHashBang, fileContent]);
|
||||
await fd.truncate(toWrite.length);
|
||||
await fd.write(toWrite, 0, toWrite.length, 0);
|
||||
} else {
|
||||
if (!process.exitCode) process.exitCode = 1;
|
||||
console.error(
|
||||
(process.env.GITHUB_ACTIONS ?
|
||||
`::error file=${file},line=1,col=1::` :
|
||||
'Fixable with --fix: ') +
|
||||
`Invalid hashbang for ${file} (expected /bin/sh).`
|
||||
);
|
||||
}
|
||||
}
|
||||
await fd.close();
|
||||
})
|
||||
);
|
||||
|
||||
const stdout = await new Promise((resolve, reject) => {
|
||||
const SHELLCHECK_EXE =
|
||||
process.env.SHELLCHECK ||
|
||||
execSync('command -v ' + (USE_NPX ? 'npx' : SHELLCHECK_EXE_NAME))
|
||||
.toString()
|
||||
.trim();
|
||||
const NPX_OPTIONS = USE_NPX ? [SHELLCHECK_EXE_NAME] : [];
|
||||
|
||||
const shellcheck = spawn(
|
||||
SHELLCHECK_EXE,
|
||||
[
|
||||
...NPX_OPTIONS,
|
||||
...SHELLCHECK_OPTIONS,
|
||||
...(FIX_MODE_ENABLED ?
|
||||
files.map((filePath) => relative(SPAWN_OPTIONS.cwd, filePath)) :
|
||||
files),
|
||||
],
|
||||
SPAWN_OPTIONS
|
||||
);
|
||||
shellcheck.once('error', reject);
|
||||
|
||||
let json = '';
|
||||
let childProcess = shellcheck;
|
||||
if (FIX_MODE_ENABLED) {
|
||||
const GIT_EXE =
|
||||
process.env.GIT || execSync('command -v git').toString().trim();
|
||||
|
||||
const gitApply = spawn(GIT_EXE, ['apply'], SPAWN_OPTIONS);
|
||||
shellcheck.stdout.pipe(gitApply.stdin);
|
||||
shellcheck.once('exit', (code) => {
|
||||
if (!process.exitCode && code) process.exitCode = code;
|
||||
});
|
||||
gitApply.stdout.pipe(process.stdout);
|
||||
|
||||
gitApply.once('error', reject);
|
||||
childProcess = gitApply;
|
||||
} else if (process.env.GITHUB_ACTIONS) {
|
||||
shellcheck.stdout.on('data', (chunk) => {
|
||||
json += chunk;
|
||||
});
|
||||
} else {
|
||||
shellcheck.stdout.pipe(process.stdout);
|
||||
}
|
||||
childProcess.once('exit', (code) => {
|
||||
if (!process.exitCode && code) process.exitCode = code;
|
||||
resolve(json);
|
||||
});
|
||||
});
|
||||
|
||||
if (!FIX_MODE_ENABLED && process.env.GITHUB_ACTIONS) {
|
||||
const data = JSON.parse(stdout);
|
||||
for (const { file, line, column, message } of data) {
|
||||
console.error(
|
||||
`::error file=${file},line=${line},col=${column}::` +
|
||||
`${file}:${line}:${column}: ${message}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const USAGE_STR =
|
||||
`Usage: ${process.argv[1]} <path> [--fix] [--from-npx]\n` +
|
||||
'\n' +
|
||||
'Environment variables:\n' +
|
||||
' - SHELLCHECK: absolute path to `shellcheck`. If not provided, the\n' +
|
||||
' script will use the result of `command -v shellcheck`, or\n' +
|
||||
' `$(command -v npx) shellcheck` if the flag `--from-npx` is provided\n' +
|
||||
' (may require an internet connection).\n' +
|
||||
' - GIT: absolute path to `git`. If not provided, the \n' +
|
||||
' script will use the result of `command -v git`.\n';
|
||||
|
||||
if (
|
||||
process.argv.length < 3 ||
|
||||
process.argv.includes('-h') ||
|
||||
process.argv.includes('--help')
|
||||
) {
|
||||
console.log(USAGE_STR);
|
||||
} else {
|
||||
console.log('Running Shell scripts checker...');
|
||||
const entryPoint = resolve(process.argv[2]);
|
||||
const stats = statSync(entryPoint, { throwIfNoEntry: false });
|
||||
|
||||
function onError(e) {
|
||||
console.log(USAGE_STR);
|
||||
console.error(e);
|
||||
process.exitCode = 1;
|
||||
}
|
||||
if (stats?.isDirectory()) {
|
||||
SPAWN_OPTIONS.cwd = entryPoint;
|
||||
checkFiles(...findScriptFilesRecursively(entryPoint)).catch(onError);
|
||||
} else if (stats?.isFile()) {
|
||||
SPAWN_OPTIONS.cwd = process.cwd();
|
||||
checkFiles(entryPoint).catch(onError);
|
||||
} else {
|
||||
onError(new Error('You must provide a valid directory or file path. ' +
|
||||
`Received '${process.argv[2]}'.`));
|
||||
}
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/bin/sh
|
||||
# Script that adds rules to Mac OS X Socket Firewall to avoid
|
||||
# popups asking to accept incoming network connections when
|
||||
# running tests.
|
||||
|
@ -1,16 +1,18 @@
|
||||
#!/bin/bash -xe
|
||||
#!/bin/sh
|
||||
|
||||
set -xe
|
||||
|
||||
BUILD_ARCH_TYPE=$1
|
||||
V8_BUILD_OPTIONS=$2
|
||||
|
||||
cd deps/v8
|
||||
find . -type d -name .git | xargs rm -rf
|
||||
cd deps/v8 || exit
|
||||
find . -type d -name .git -print0 | xargs -0 rm -rf
|
||||
tools/node/fetch_deps.py .
|
||||
|
||||
ARCH="`arch`"
|
||||
if [[ "$ARCH" == "s390x" ]] || [[ "$ARCH" == "ppc64le" ]]; then
|
||||
if [ "$ARCH" = "s390x" ] || [ "$ARCH" = "ppc64le" ]; then
|
||||
TARGET_ARCH=$ARCH
|
||||
if [[ "$ARCH" == "ppc64le" ]]; then
|
||||
if [ "$ARCH" = "ppc64le" ]; then
|
||||
TARGET_ARCH="ppc64"
|
||||
fi
|
||||
# set paths manually for now to use locally installed gn
|
||||
@ -18,20 +20,22 @@ if [[ "$ARCH" == "s390x" ]] || [[ "$ARCH" == "ppc64le" ]]; then
|
||||
export LD_LIBRARY_PATH=$BUILD_TOOLS:$LD_LIBRARY_PATH
|
||||
# Avoid linking to ccache symbolic links as ccache decides which
|
||||
# binary to run based on the name of the link (we always name them gcc/g++).
|
||||
CC_PATH=`which -a $CC gcc | grep -v ccache | head -n 1`
|
||||
CXX_PATH=`which -a $CXX g++ | grep -v ccache | head -n 1`
|
||||
# shellcheck disable=SC2154
|
||||
CC_PATH=`command -v "$CC" gcc | grep -v ccache | head -n 1`
|
||||
# shellcheck disable=SC2154
|
||||
CXX_PATH=`command -v "$CXX" g++ | grep -v ccache | head -n 1`
|
||||
rm -f "$BUILD_TOOLS/g++"
|
||||
rm -f "$BUILD_TOOLS/gcc"
|
||||
ln -s $CXX_PATH "$BUILD_TOOLS/g++"
|
||||
ln -s $CC_PATH "$BUILD_TOOLS/gcc"
|
||||
ln -s "$CXX_PATH" "$BUILD_TOOLS/g++"
|
||||
ln -s "$CC_PATH" "$BUILD_TOOLS/gcc"
|
||||
export PATH=$BUILD_TOOLS:$PATH
|
||||
|
||||
g++ --version
|
||||
gcc --version
|
||||
export PKG_CONFIG_PATH=$BUILD_TOOLS/pkg-config
|
||||
gn gen -v out.gn/$BUILD_ARCH_TYPE --args="is_component_build=false is_debug=false use_goma=false goma_dir=\"None\" use_custom_libcxx=false v8_target_cpu=\"$TARGET_ARCH\" target_cpu=\"$TARGET_ARCH\" v8_enable_backtrace=true"
|
||||
ninja -v -C out.gn/$BUILD_ARCH_TYPE d8 cctest inspector-test
|
||||
gn gen -v "out.gn/$BUILD_ARCH_TYPE" --args="is_component_build=false is_debug=false use_goma=false goma_dir=\"None\" use_custom_libcxx=false v8_target_cpu=\"$TARGET_ARCH\" target_cpu=\"$TARGET_ARCH\" v8_enable_backtrace=true"
|
||||
ninja -v -C "out.gn/$BUILD_ARCH_TYPE" d8 cctest inspector-test
|
||||
else
|
||||
PATH=~/_depot_tools:$PATH tools/dev/v8gen.py $BUILD_ARCH_TYPE --no-goma $V8_BUILD_OPTIONS
|
||||
PATH=~/_depot_tools:$PATH ninja -C out.gn/$BUILD_ARCH_TYPE/ d8 cctest inspector-test
|
||||
PATH=~/_depot_tools:$PATH tools/dev/v8gen.py "$BUILD_ARCH_TYPE" --no-goma "$V8_BUILD_OPTIONS"
|
||||
PATH=~/_depot_tools:$PATH ninja -C "out.gn/$BUILD_ARCH_TYPE/" d8 cctest inspector-test
|
||||
fi
|
||||
|
@ -1,6 +1,6 @@
|
||||
#!/bin/sh
|
||||
# TODO Can this be done inside the .pmdoc?
|
||||
# TODO Can we extract $PREFIX from the installer?
|
||||
cd /usr/local/bin
|
||||
cd /usr/local/bin || exit
|
||||
ln -sf ../lib/node_modules/npm/bin/npm-cli.js npm
|
||||
ln -sf ../lib/node_modules/npm/bin/npx-cli.js npx
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
set -e
|
||||
|
||||
TOOLSDIR=`dirname $0`
|
||||
TOOLSDIR=`dirname "$0"`
|
||||
TOPLEVELDIR="$TOOLSDIR/../.."
|
||||
|
||||
RPMBUILD_PATH="${RPMBUILD_PATH:-$HOME/rpmbuild}"
|
||||
@ -38,7 +38,7 @@ fi
|
||||
set -x
|
||||
|
||||
sed -re "s/%define _version .+/%define _version ${VERSION}/" \
|
||||
"$TOOLSDIR/node.spec" > $RPMBUILD_PATH/SPECS/node.spec
|
||||
"$TOOLSDIR/node.spec" > "$RPMBUILD_PATH/SPECS/node.spec"
|
||||
tar --exclude-vcs --transform="s|^|node-${VERSION}/|" \
|
||||
-czf $RPMBUILD_PATH/SOURCES/node-v${VERSION}.tar.gz .
|
||||
rpmbuild $* -ba $RPMBUILD_PATH/SPECS/node.spec
|
||||
-czf "$RPMBUILD_PATH/SOURCES/node-v${VERSION}.tar.gz" .
|
||||
rpmbuild "$*" -ba "$RPMBUILD_PATH/SPECS/node.spec"
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
#!/bin/sh
|
||||
|
||||
# Shell script to update ESLint in the source tree to the latest release.
|
||||
|
||||
@ -7,24 +7,28 @@
|
||||
# This script must be be in the tools directory when it runs because it uses
|
||||
# $BASH_SOURCE[0] to determine directories to work in.
|
||||
|
||||
cd "$( dirname "${BASH_SOURCE[0]}" )"
|
||||
cd "$( dirname "$0" )" || exit
|
||||
rm -rf node_modules/eslint
|
||||
mkdir eslint-tmp
|
||||
cd eslint-tmp
|
||||
npm init --yes
|
||||
{
|
||||
mkdir eslint-tmp
|
||||
cd eslint-tmp || exit
|
||||
npm init --yes
|
||||
|
||||
npm install --global-style --no-bin-links --production --no-package-lock eslint@latest
|
||||
cd node_modules/eslint
|
||||
npm install --global-style --no-bin-links --production --no-package-lock eslint@latest
|
||||
|
||||
npm install --no-bin-links --production --no-package-lock eslint-plugin-markdown@latest
|
||||
cd ../..
|
||||
{
|
||||
cd node_modules/eslint || exit
|
||||
|
||||
# Use dmn to remove some unneeded files.
|
||||
npx dmn@2.2.2 -f clean
|
||||
# Use removeNPMAbsolutePaths to remove unused data in package.json.
|
||||
# This avoids churn as absolute paths can change from one dev to another.
|
||||
npx removeNPMAbsolutePaths@1.0.4 .
|
||||
npm install --no-bin-links --production --no-package-lock eslint-plugin-markdown@latest
|
||||
}
|
||||
|
||||
|
||||
# Use dmn to remove some unneeded files.
|
||||
npx dmn@2.2.2 -f clean
|
||||
# Use removeNPMAbsolutePaths to remove unused data in package.json.
|
||||
# This avoids churn as absolute paths can change from one dev to another.
|
||||
npx removeNPMAbsolutePaths@1.0.4 .
|
||||
}
|
||||
|
||||
cd ..
|
||||
mv eslint-tmp/node_modules/eslint node_modules/eslint
|
||||
rm -rf eslint-tmp/
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
#!/bin/sh
|
||||
set -e
|
||||
# Shell script to update npm in the source tree to a specific version
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user