Use Github Actions for release (#3309)
* move scripts and tone down travis * Update and rename main.yml to push.yml * Create release.yml
This commit is contained in:
parent
caa391ab34
commit
ed96d2a1b7
11
tools/ci/on-push.sh → .github/scripts/on-push.sh
vendored
11
tools/ci/on-push.sh → .github/scripts/on-push.sh
vendored
@ -28,11 +28,14 @@ elif [ "$CHUNK_INDEX" -eq "$CHUNKS_CNT" ]; then
|
|||||||
BUILD_PIO=1
|
BUILD_PIO=1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
echo "Updating submodules ..."
|
||||||
|
git -C "$GITHUB_WORKSPACE" submodule update --init --recursive > /dev/null 2>&1
|
||||||
|
|
||||||
if [ "$BUILD_PIO" -eq 0 ]; then
|
if [ "$BUILD_PIO" -eq 0 ]; then
|
||||||
# ArduinoIDE Test
|
# ArduinoIDE Test
|
||||||
FQBN="espressif:esp32:esp32:PSRAM=enabled,PartitionScheme=huge_app"
|
FQBN="espressif:esp32:esp32:PSRAM=enabled,PartitionScheme=huge_app"
|
||||||
source ./tools/ci/install-arduino-ide.sh
|
source ./.github/scripts/install-arduino-ide.sh
|
||||||
source ./tools/ci/install-arduino-core-esp32.sh
|
source ./.github/scripts/install-arduino-core-esp32.sh
|
||||||
if [ "$OS_IS_WINDOWS" == "1" ]; then
|
if [ "$OS_IS_WINDOWS" == "1" ]; then
|
||||||
build_sketch "$FQBN" "$ARDUINO_ESP32_PATH/libraries/WiFiClientSecure/examples/WiFiClientSecure/WiFiClientSecure.ino" && \
|
build_sketch "$FQBN" "$ARDUINO_ESP32_PATH/libraries/WiFiClientSecure/examples/WiFiClientSecure/WiFiClientSecure.ino" && \
|
||||||
build_sketch "$FQBN" "$ARDUINO_ESP32_PATH/libraries/BLE/examples/BLE_server/BLE_server.ino" && \
|
build_sketch "$FQBN" "$ARDUINO_ESP32_PATH/libraries/BLE/examples/BLE_server/BLE_server.ino" && \
|
||||||
@ -48,14 +51,14 @@ if [ "$BUILD_PIO" -eq 0 ]; then
|
|||||||
else
|
else
|
||||||
# CMake Test
|
# CMake Test
|
||||||
if [ "$CHUNK_INDEX" -eq 0 ]; then
|
if [ "$CHUNK_INDEX" -eq 0 ]; then
|
||||||
bash "$ARDUINO_ESP32_PATH/tools/ci/check-cmakelists.sh"
|
bash "$ARDUINO_ESP32_PATH/.github/scripts/check-cmakelists.sh"
|
||||||
if [ $? -ne 0 ]; then exit 1; fi
|
if [ $? -ne 0 ]; then exit 1; fi
|
||||||
fi
|
fi
|
||||||
build_sketches "$FQBN" "$ARDUINO_ESP32_PATH/libraries" "$CHUNK_INDEX" "$CHUNKS_CNT"
|
build_sketches "$FQBN" "$ARDUINO_ESP32_PATH/libraries" "$CHUNK_INDEX" "$CHUNKS_CNT"
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
# PlatformIO Test
|
# PlatformIO Test
|
||||||
source ./tools/ci/install-platformio-esp32.sh
|
source ./.github/scripts/install-platformio-esp32.sh
|
||||||
BOARD="esp32dev"
|
BOARD="esp32dev"
|
||||||
build_pio_sketch "$BOARD" "$PLATFORMIO_ESP32_PATH/libraries/WiFi/examples/WiFiClient/WiFiClient.ino" && \
|
build_pio_sketch "$BOARD" "$PLATFORMIO_ESP32_PATH/libraries/WiFi/examples/WiFiClient/WiFiClient.ino" && \
|
||||||
build_pio_sketch "$BOARD" "$PLATFORMIO_ESP32_PATH/libraries/WiFiClientSecure/examples/WiFiClientSecure/WiFiClientSecure.ino" && \
|
build_pio_sketch "$BOARD" "$PLATFORMIO_ESP32_PATH/libraries/WiFiClientSecure/examples/WiFiClientSecure/WiFiClientSecure.ino" && \
|
380
.github/scripts/on-release.sh
vendored
Executable file
380
.github/scripts/on-release.sh
vendored
Executable file
@ -0,0 +1,380 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
if [ ! $GITHUB_EVENT_NAME == "release" ]; then
|
||||||
|
echo "Wrong event '$GITHUB_EVENT_NAME'!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
EVENT_JSON=`cat $GITHUB_EVENT_PATH`
|
||||||
|
|
||||||
|
action=`echo $EVENT_JSON | jq -r '.action'`
|
||||||
|
if [ ! $action == "published" ]; then
|
||||||
|
echo "Wrong action '$action'. Exiting now..."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
draft=`echo $EVENT_JSON | jq -r '.release.draft'`
|
||||||
|
if [ $draft == "true" ]; then
|
||||||
|
echo "It's a draft release. Exiting now..."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
RELEASE_PRE=`echo $EVENT_JSON | jq -r '.release.prerelease'`
|
||||||
|
RELEASE_TAG=`echo $EVENT_JSON | jq -r '.release.tag_name'`
|
||||||
|
RELEASE_BRANCH=`echo $EVENT_JSON | jq -r '.release.target_commitish'`
|
||||||
|
RELEASE_ID=`echo $EVENT_JSON | jq -r '.release.id'`
|
||||||
|
RELEASE_BODY=`echo $EVENT_JSON | jq -r '.release.body'`
|
||||||
|
|
||||||
|
OUTPUT_DIR="$GITHUB_WORKSPACE/build"
|
||||||
|
PACKAGE_NAME="esp32-$RELEASE_TAG"
|
||||||
|
PACKAGE_JSON_MERGE="$GITHUB_WORKSPACE/.github/scripts/merge_packages.py"
|
||||||
|
PACKAGE_JSON_TEMPLATE="$GITHUB_WORKSPACE/package/package_esp32_index.template.json"
|
||||||
|
PACKAGE_JSON_DEV="package_esp32_dev_index.json"
|
||||||
|
PACKAGE_JSON_REL="package_esp32_index.json"
|
||||||
|
|
||||||
|
echo "Event: $GITHUB_EVENT_NAME, Repo: $GITHUB_REPOSITORY, Path: $GITHUB_WORKSPACE, Ref: $GITHUB_REF"
|
||||||
|
echo "Action: $action, Branch: $RELEASE_BRANCH, ID: $RELEASE_ID"
|
||||||
|
echo "Tag: $RELEASE_TAG, Draft: $draft, Pre-Release: $RELEASE_PRE"
|
||||||
|
|
||||||
|
function get_file_size(){
|
||||||
|
local file="$1"
|
||||||
|
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||||
|
eval `stat -s "$file"`
|
||||||
|
local res="$?"
|
||||||
|
echo "$st_size"
|
||||||
|
return $res
|
||||||
|
else
|
||||||
|
stat --printf="%s" "$file"
|
||||||
|
return $?
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function git_upload_asset(){
|
||||||
|
local name=$(basename "$1")
|
||||||
|
# local mime=$(file -b --mime-type "$1")
|
||||||
|
curl -k -X POST -sH "Authorization: token $GITHUB_TOKEN" -H "Content-Type: application/octet-stream" --data-binary @"$1" "https://uploads.github.com/repos/$GITHUB_REPOSITORY/releases/$RELEASE_ID/assets?name=$name"
|
||||||
|
}
|
||||||
|
|
||||||
|
function git_safe_upload_asset(){
|
||||||
|
local file="$1"
|
||||||
|
local name=$(basename "$file")
|
||||||
|
local size=`get_file_size "$file"`
|
||||||
|
local upload_res=`git_upload_asset "$file"`
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
>&2 echo "ERROR: Failed to upload '$name' ($?)"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
up_size=`echo "$upload_res" | jq -r '.size'`
|
||||||
|
if [ $up_size -ne $size ]; then
|
||||||
|
>&2 echo "ERROR: Uploaded size does not match! $up_size != $size"
|
||||||
|
#git_delete_asset
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
echo "$upload_res" | jq -r '.browser_download_url'
|
||||||
|
return $?
|
||||||
|
}
|
||||||
|
|
||||||
|
function git_upload_to_pages(){
|
||||||
|
local path=$1
|
||||||
|
local src=$2
|
||||||
|
|
||||||
|
if [ ! -f "$src" ]; then
|
||||||
|
>&2 echo "Input is not a file! Aborting..."
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
local info=`curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages"`
|
||||||
|
local type=`echo "$info" | jq -r '.type'`
|
||||||
|
local message=$(basename $path)
|
||||||
|
local sha=""
|
||||||
|
local content=""
|
||||||
|
|
||||||
|
if [ $type == "file" ]; then
|
||||||
|
sha=`echo "$info" | jq -r '.sha'`
|
||||||
|
sha=",\"sha\":\"$sha\""
|
||||||
|
message="Updating $message"
|
||||||
|
elif [ ! $type == "null" ]; then
|
||||||
|
>&2 echo "Wrong type '$type'"
|
||||||
|
return 1
|
||||||
|
else
|
||||||
|
message="Creating $message"
|
||||||
|
fi
|
||||||
|
|
||||||
|
content=`base64 -i "$src"`
|
||||||
|
data="{\"branch\":\"gh-pages\",\"message\":\"$message\",\"content\":\"$content\"$sha}"
|
||||||
|
|
||||||
|
echo "$data" | curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.raw+json" -X PUT --data @- "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path"
|
||||||
|
}
|
||||||
|
|
||||||
|
function git_safe_upload_to_pages(){
|
||||||
|
local path=$1
|
||||||
|
local file="$2"
|
||||||
|
local name=$(basename "$file")
|
||||||
|
local size=`get_file_size "$file"`
|
||||||
|
local upload_res=`git_upload_to_pages "$path" "$file"`
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
>&2 echo "ERROR: Failed to upload '$name' ($?)"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
up_size=`echo "$upload_res" | jq -r '.content.size'`
|
||||||
|
if [ $up_size -ne $size ]; then
|
||||||
|
>&2 echo "ERROR: Uploaded size does not match! $up_size != $size"
|
||||||
|
#git_delete_asset
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
echo "$upload_res" | jq -r '.content.download_url'
|
||||||
|
return $?
|
||||||
|
}
|
||||||
|
|
||||||
|
function merge_package_json(){
|
||||||
|
local jsonLink=$1
|
||||||
|
local jsonOut=$2
|
||||||
|
local old_json=$OUTPUT_DIR/oldJson.json
|
||||||
|
local merged_json=$OUTPUT_DIR/mergedJson.json
|
||||||
|
|
||||||
|
echo "Downloading previous JSON $jsonLink ..."
|
||||||
|
curl -L -o "$old_json" "https://github.com/$GITHUB_REPOSITORY/releases/download/$jsonLink?access_token=$GITHUB_TOKEN" 2>/dev/null
|
||||||
|
if [ $? -ne 0 ]; then echo "ERROR: Download Failed! $?"; exit 1; fi
|
||||||
|
|
||||||
|
echo "Creating new JSON ..."
|
||||||
|
set +e
|
||||||
|
stdbuf -oL python "$PACKAGE_JSON_MERGE" "$jsonOut" "$old_json" > "$merged_json"
|
||||||
|
set -e
|
||||||
|
|
||||||
|
set -v
|
||||||
|
if [ ! -s $merged_json ]; then
|
||||||
|
rm -f "$merged_json"
|
||||||
|
echo "Nothing to merge"
|
||||||
|
else
|
||||||
|
rm -f "$jsonOut"
|
||||||
|
mv "$merged_json" "$jsonOut"
|
||||||
|
echo "JSON data successfully merged"
|
||||||
|
fi
|
||||||
|
rm -f "$old_json"
|
||||||
|
set +v
|
||||||
|
}
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
##
|
||||||
|
## PACKAGE ZIP
|
||||||
|
##
|
||||||
|
|
||||||
|
mkdir -p "$OUTPUT_DIR"
|
||||||
|
PKG_DIR="$OUTPUT_DIR/$PACKAGE_NAME"
|
||||||
|
PACKAGE_ZIP="$PACKAGE_NAME.zip"
|
||||||
|
|
||||||
|
echo "Updating submodules ..."
|
||||||
|
git -C "$GITHUB_WORKSPACE" submodule update --init --recursive > /dev/null 2>&1
|
||||||
|
|
||||||
|
mkdir -p "$PKG_DIR/tools"
|
||||||
|
|
||||||
|
# Copy all core files to the package folder
|
||||||
|
echo "Copying files for packaging ..."
|
||||||
|
cp -f "$GITHUB_WORKSPACE/boards.txt" "$PKG_DIR/"
|
||||||
|
cp -f "$GITHUB_WORKSPACE/programmers.txt" "$PKG_DIR/"
|
||||||
|
cp -Rf "$GITHUB_WORKSPACE/cores" "$PKG_DIR/"
|
||||||
|
cp -Rf "$GITHUB_WORKSPACE/libraries" "$PKG_DIR/"
|
||||||
|
cp -Rf "$GITHUB_WORKSPACE/variants" "$PKG_DIR/"
|
||||||
|
cp -f "$GITHUB_WORKSPACE/tools/espota.exe" "$PKG_DIR/tools/"
|
||||||
|
cp -f "$GITHUB_WORKSPACE/tools/espota.py" "$PKG_DIR/tools/"
|
||||||
|
cp -f "$GITHUB_WORKSPACE/tools/esptool.py" "$PKG_DIR/tools/"
|
||||||
|
cp -f "$GITHUB_WORKSPACE/tools/gen_esp32part.py" "$PKG_DIR/tools/"
|
||||||
|
cp -f "$GITHUB_WORKSPACE/tools/gen_esp32part.exe" "$PKG_DIR/tools/"
|
||||||
|
cp -Rf "$GITHUB_WORKSPACE/tools/partitions" "$PKG_DIR/tools/"
|
||||||
|
cp -Rf "$GITHUB_WORKSPACE/tools/sdk" "$PKG_DIR/tools/"
|
||||||
|
|
||||||
|
# Remove unnecessary files in the package folder
|
||||||
|
echo "Cleaning up folders ..."
|
||||||
|
find "$PKG_DIR" -name '*.DS_Store' -exec rm -f {} \;
|
||||||
|
find "$PKG_DIR" -name '*.git*' -type f -delete
|
||||||
|
|
||||||
|
# Replace tools locations in platform.txt
|
||||||
|
echo "Generating platform.txt..."
|
||||||
|
cat "$GITHUB_WORKSPACE/platform.txt" | \
|
||||||
|
sed "s/version=.*/version=$ver$extent/g" | \
|
||||||
|
sed 's/runtime.tools.xtensa-esp32-elf-gcc.path={runtime.platform.path}\/tools\/xtensa-esp32-elf//g' | \
|
||||||
|
sed 's/tools.esptool_py.path={runtime.platform.path}\/tools\/esptool/tools.esptool_py.path=\{runtime.tools.esptool_py.path\}/g' \
|
||||||
|
> "$PKG_DIR/platform.txt"
|
||||||
|
|
||||||
|
# Add header with version information
|
||||||
|
echo "Generating core_version.h ..."
|
||||||
|
ver_define=`echo $RELEASE_TAG | tr "[:lower:].\055" "[:upper:]_"`
|
||||||
|
ver_hex=`git -C "$GITHUB_WORKSPACE" rev-parse --short=8 HEAD 2>/dev/null`
|
||||||
|
echo \#define ARDUINO_ESP32_GIT_VER 0x$ver_hex > "$PKG_DIR/cores/esp32/core_version.h"
|
||||||
|
echo \#define ARDUINO_ESP32_GIT_DESC `git -C "$GITHUB_WORKSPACE" describe --tags 2>/dev/null` >> "$PKG_DIR/cores/esp32/core_version.h"
|
||||||
|
echo \#define ARDUINO_ESP32_RELEASE_$ver_define >> "$PKG_DIR/cores/esp32/core_version.h"
|
||||||
|
echo \#define ARDUINO_ESP32_RELEASE \"$ver_define\" >> "$PKG_DIR/cores/esp32/core_version.h"
|
||||||
|
|
||||||
|
# Compress package folder
|
||||||
|
echo "Creating ZIP ..."
|
||||||
|
pushd "$OUTPUT_DIR" >/dev/null
|
||||||
|
zip -qr "$PACKAGE_ZIP" "$PACKAGE_NAME"
|
||||||
|
if [ $? -ne 0 ]; then echo "ERROR: Failed to create $PACKAGE_ZIP ($?)"; exit 1; fi
|
||||||
|
|
||||||
|
# Calculate SHA-256
|
||||||
|
echo "Calculating SHA sum ..."
|
||||||
|
PACKAGE_PATH="$OUTPUT_DIR/$PACKAGE_ZIP"
|
||||||
|
PACKAGE_SHA=`shasum -a 256 "$PACKAGE_ZIP" | cut -f 1 -d ' '`
|
||||||
|
PACKAGE_SIZE=`get_file_size "$PACKAGE_ZIP"`
|
||||||
|
popd >/dev/null
|
||||||
|
rm -rf "$PKG_DIR"
|
||||||
|
echo "'$PACKAGE_ZIP' Created! Size: $PACKAGE_SIZE, SHA-256: $PACKAGE_SHA"
|
||||||
|
echo
|
||||||
|
|
||||||
|
# Upload package to release page
|
||||||
|
echo "Uploading package to release page ..."
|
||||||
|
PACKAGE_URL=`git_safe_upload_asset "$PACKAGE_PATH"`
|
||||||
|
echo "Package Uploaded"
|
||||||
|
echo "Download URL: $PACKAGE_URL"
|
||||||
|
echo
|
||||||
|
|
||||||
|
##
|
||||||
|
## PACKAGE JSON
|
||||||
|
##
|
||||||
|
|
||||||
|
# Construct JQ argument with package data
|
||||||
|
jq_arg=".packages[0].platforms[0].version = \"$RELEASE_TAG\" | \
|
||||||
|
.packages[0].platforms[0].url = \"$PACKAGE_URL\" |\
|
||||||
|
.packages[0].platforms[0].archiveFileName = \"$PACKAGE_ZIP\" |\
|
||||||
|
.packages[0].platforms[0].size = \"$PACKAGE_SIZE\" |\
|
||||||
|
.packages[0].platforms[0].checksum = \"SHA-256:$PACKAGE_SHA\""
|
||||||
|
|
||||||
|
# Generate package JSONs
|
||||||
|
echo "Genarating $PACKAGE_JSON_DEV ..."
|
||||||
|
cat "$PACKAGE_JSON_TEMPLATE" | jq "$jq_arg" > "$OUTPUT_DIR/$PACKAGE_JSON_DEV"
|
||||||
|
if [ "$RELEASE_PRE" == "false" ]; then
|
||||||
|
echo "Genarating $PACKAGE_JSON_REL ..."
|
||||||
|
cat "$PACKAGE_JSON_TEMPLATE" | jq "$jq_arg" > "$OUTPUT_DIR/$PACKAGE_JSON_REL"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Figure out the last release or pre-release
|
||||||
|
echo "Getting previous releases ..."
|
||||||
|
releasesJson=`curl -sH "Authorization: token $GITHUB_TOKEN" "https://api.github.com/repos/$GITHUB_REPOSITORY/releases" 2>/dev/null`
|
||||||
|
if [ $? -ne 0 ]; then echo "ERROR: Get Releases Failed! ($?)"; exit 1; fi
|
||||||
|
|
||||||
|
set +e
|
||||||
|
prev_release=$(echo "$releasesJson" | jq -e -r '. | map(select(.draft == false and .prerelease == false)) | sort_by(.created_at | - fromdateiso8601) | .[0].tag_name')
|
||||||
|
prev_any_release=$(echo "$releasesJson" | jq -e -r '. | map(select(.draft == false)) | sort_by(.created_at | - fromdateiso8601) | .[0].tag_name')
|
||||||
|
shopt -s nocasematch
|
||||||
|
if [ "$prev_any_release" == "$RELEASE_TAG" ]; then
|
||||||
|
prev_release=$(echo "$releasesJson" | jq -e -r '. | map(select(.draft == false and .prerelease == false)) | sort_by(.created_at | - fromdateiso8601) | .[1].tag_name')
|
||||||
|
prev_any_release=$(echo "$releasesJson" | jq -e -r '. | map(select(.draft == false)) | sort_by(.created_at | - fromdateiso8601) | .[1].tag_name')
|
||||||
|
fi
|
||||||
|
COMMITS_SINCE_RELEASE="$prev_any_release"
|
||||||
|
shopt -u nocasematch
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Merge package JSONs with previous releases
|
||||||
|
if [ ! -z "$prev_any_release" ] && [ "$prev_any_release" != "null" ]; then
|
||||||
|
echo "Merging with JSON from $prev_any_release ..."
|
||||||
|
merge_package_json "$prev_any_release/$PACKAGE_JSON_DEV" "$OUTPUT_DIR/$PACKAGE_JSON_DEV"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$RELEASE_PRE" == "false" ]; then
|
||||||
|
COMMITS_SINCE_RELEASE="$prev_release"
|
||||||
|
if [ ! -z "$prev_release" ] && [ "$prev_release" != "null" ]; then
|
||||||
|
echo "Merging with JSON from $prev_release ..."
|
||||||
|
merge_package_json "$prev_release/$PACKAGE_JSON_REL" "$OUTPUT_DIR/$PACKAGE_JSON_REL"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Previous Release: $prev_release"
|
||||||
|
echo "Previous (any)release: $prev_any_release"
|
||||||
|
echo
|
||||||
|
|
||||||
|
# Upload package JSONs
|
||||||
|
echo "Uploading $PACKAGE_JSON_DEV ..."
|
||||||
|
echo "Download URL: "`git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_DEV"`
|
||||||
|
echo "Pages URL: "`git_safe_upload_to_pages "$PACKAGE_JSON_DEV" "$OUTPUT_DIR/$PACKAGE_JSON_DEV"`
|
||||||
|
echo
|
||||||
|
if [ "$RELEASE_PRE" == "false" ]; then
|
||||||
|
echo "Uploading $PACKAGE_JSON_REL ..."
|
||||||
|
echo "Download URL: "`git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_REL"`
|
||||||
|
echo "Pages URL: "`git_safe_upload_to_pages "$PACKAGE_JSON_REL" "$OUTPUT_DIR/$PACKAGE_JSON_REL"`
|
||||||
|
echo
|
||||||
|
fi
|
||||||
|
|
||||||
|
##
|
||||||
|
## RELEASE NOTES
|
||||||
|
##
|
||||||
|
|
||||||
|
# Create release notes
|
||||||
|
echo "Preparing release notes ..."
|
||||||
|
releaseNotes=""
|
||||||
|
|
||||||
|
# Process annotated tags
|
||||||
|
relNotesRaw=`git -C "$GITHUB_WORKSPACE" show -s --format=%b $RELEASE_TAG`
|
||||||
|
readarray -t msgArray <<<"$relNotesRaw"
|
||||||
|
arrLen=${#msgArray[@]}
|
||||||
|
if [ $arrLen > 3 ] && [ "${msgArray[0]:0:3}" == "tag" ]; then
|
||||||
|
ind=3
|
||||||
|
while [ $ind -lt $arrLen ]; do
|
||||||
|
if [ $ind -eq 3 ]; then
|
||||||
|
releaseNotes="#### ${msgArray[ind]}"
|
||||||
|
releaseNotes+=$'\r\n'
|
||||||
|
else
|
||||||
|
oneLine="$(echo -e "${msgArray[ind]}" | sed -e 's/^[[:space:]]*//')"
|
||||||
|
if [ ${#oneLine} -gt 0 ]; then
|
||||||
|
if [ "${oneLine:0:2}" == "* " ]; then oneLine=$(echo ${oneLine/\*/-}); fi
|
||||||
|
if [ "${oneLine:0:2}" != "- " ]; then releaseNotes+="- "; fi
|
||||||
|
releaseNotes+="$oneLine"
|
||||||
|
releaseNotes+=$'\r\n'
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
let ind=$ind+1
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Append Commit Messages
|
||||||
|
if [ ! -z "$COMMITS_SINCE_RELEASE" ] && [ "$COMMITS_SINCE_RELEASE" != "null" ]; then
|
||||||
|
echo "Getting commits since $COMMITS_SINCE_RELEASE ..."
|
||||||
|
commitFile=$OUTPUT_DIR/commits.txt
|
||||||
|
git -C "$GITHUB_WORKSPACE" log --oneline $COMMITS_SINCE_RELEASE.. > "$OUTPUT_DIR/commits.txt"
|
||||||
|
releaseNotes+=$'\r\n##### Commits\r\n'
|
||||||
|
IFS=$'\n'
|
||||||
|
for next in `cat $commitFile`
|
||||||
|
do
|
||||||
|
IFS=' ' read -r commitId commitMsg <<< "$next"
|
||||||
|
commitLine="- [$commitId](https://github.com/$GITHUB_REPOSITORY/commit/$commitId) $commitMsg"
|
||||||
|
releaseNotes+="$commitLine"
|
||||||
|
releaseNotes+=$'\r\n'
|
||||||
|
done
|
||||||
|
rm -f $commitFile
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Prepend the original release body
|
||||||
|
if [ "${RELEASE_BODY: -1}" == $'\r' ]; then
|
||||||
|
RELEASE_BODY="${RELEASE_BODY:0:-1}"
|
||||||
|
else
|
||||||
|
RELEASE_BODY="$RELEASE_BODY"
|
||||||
|
fi
|
||||||
|
RELEASE_BODY+=$'\r\n'
|
||||||
|
releaseNotes="$RELEASE_BODY$releaseNotes"
|
||||||
|
|
||||||
|
# Update release page
|
||||||
|
echo "Updating release notes ..."
|
||||||
|
releaseNotes=$(printf '%s' "$releaseNotes" | python -c 'import json,sys; print(json.dumps(sys.stdin.read()))')
|
||||||
|
releaseNotes=${releaseNotes:1:-1}
|
||||||
|
curlData="{\"body\": \"$releaseNotes\"}"
|
||||||
|
releaseData=`curl --data "$curlData" "https://api.github.com/repos/$GITHUB_REPOSITORY/releases/$RELEASE_ID?access_token=$GITHUB_TOKEN" 2>/dev/null`
|
||||||
|
if [ $? -ne 0 ]; then echo "ERROR: Updating Release Failed: $?"; exit 1; fi
|
||||||
|
echo "Release notes successfully updated"
|
||||||
|
echo
|
||||||
|
|
||||||
|
##
|
||||||
|
## SUBMODULE VERSIONS
|
||||||
|
##
|
||||||
|
|
||||||
|
# Upload submodules versions
|
||||||
|
echo "Generating submodules.txt ..."
|
||||||
|
git -C "$GITHUB_WORKSPACE" submodule status > "$OUTPUT_DIR/submodules.txt"
|
||||||
|
echo "Uploading submodules.txt ..."
|
||||||
|
echo "Download URL: "`git_safe_upload_asset "$OUTPUT_DIR/submodules.txt"`
|
||||||
|
echo ""
|
||||||
|
set +e
|
||||||
|
|
||||||
|
##
|
||||||
|
## DONE
|
||||||
|
##
|
||||||
|
echo "DONE!"
|
@ -20,7 +20,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v1
|
||||||
- name: Build Sketches
|
- name: Build Sketches
|
||||||
run: bash ./tools/ci/on-push.sh ${{ matrix.chunk }} 15
|
run: bash ./.github/scripts/on-push.sh ${{ matrix.chunk }} 15
|
||||||
|
|
||||||
# Windows and MacOS
|
# Windows and MacOS
|
||||||
build-arduino-win-mac:
|
build-arduino-win-mac:
|
||||||
@ -33,7 +33,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v1
|
||||||
- name: Build Sketches
|
- name: Build Sketches
|
||||||
run: bash ./tools/ci/on-push.sh
|
run: bash ./.github/scripts/on-push.sh
|
||||||
|
|
||||||
# PlatformIO on Windows, Ubuntu and Mac
|
# PlatformIO on Windows, Ubuntu and Mac
|
||||||
build-platformio:
|
build-platformio:
|
||||||
@ -46,4 +46,4 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v1
|
||||||
- name: Build Sketches
|
- name: Build Sketches
|
||||||
run: bash ./tools/ci/on-push.sh 1 1 #equal and non-zero to trigger PIO
|
run: bash ./.github/scripts/on-push.sh 1 1 #equal and non-zero to trigger PIO
|
17
.github/workflows/release.yml
vendored
Normal file
17
.github/workflows/release.yml
vendored
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
name: ESP32 Arduino Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: published
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
name: Publish Release
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@master
|
||||||
|
- name: Build Release
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: bash ./.github/scripts/on-release.sh
|
25
.travis.yml
25
.travis.yml
@ -21,42 +21,27 @@ jobs:
|
|||||||
- name: "Build Arduino 0"
|
- name: "Build Arduino 0"
|
||||||
if: tag IS blank AND (type = pull_request OR (type = push AND branch = master))
|
if: tag IS blank AND (type = pull_request OR (type = push AND branch = master))
|
||||||
stage: build
|
stage: build
|
||||||
script: $TRAVIS_BUILD_DIR/tools/ci/on-push.sh 0 4
|
script: $TRAVIS_BUILD_DIR/.github/scripts/on-push.sh 0 10
|
||||||
|
|
||||||
- name: "Build Arduino 1"
|
- name: "Build Arduino 1"
|
||||||
if: tag IS blank AND (type = pull_request OR (type = push AND branch = master))
|
if: tag IS blank AND (type = pull_request OR (type = push AND branch = master))
|
||||||
stage: build
|
stage: build
|
||||||
script: $TRAVIS_BUILD_DIR/tools/ci/on-push.sh 1 4
|
script: $TRAVIS_BUILD_DIR/.github/scripts/on-push.sh 1 10
|
||||||
|
|
||||||
- name: "Build Arduino 2"
|
- name: "Build Arduino 2"
|
||||||
if: tag IS blank AND (type = pull_request OR (type = push AND branch = master))
|
if: tag IS blank AND (type = pull_request OR (type = push AND branch = master))
|
||||||
stage: build
|
stage: build
|
||||||
script: $TRAVIS_BUILD_DIR/tools/ci/on-push.sh 2 4
|
script: $TRAVIS_BUILD_DIR/.github/scripts/on-push.sh 2 10
|
||||||
|
|
||||||
- name: "Build Arduino 3"
|
- name: "Build Arduino 3"
|
||||||
if: tag IS blank AND (type = pull_request OR (type = push AND branch = master))
|
if: tag IS blank AND (type = pull_request OR (type = push AND branch = master))
|
||||||
stage: build
|
stage: build
|
||||||
script: $TRAVIS_BUILD_DIR/tools/ci/on-push.sh 3 4
|
script: $TRAVIS_BUILD_DIR/.github/scripts/on-push.sh 3 10
|
||||||
|
|
||||||
- name: "Build PlatformIO"
|
- name: "Build PlatformIO"
|
||||||
if: tag IS blank AND (type = pull_request OR (type = push AND branch = master))
|
if: tag IS blank AND (type = pull_request OR (type = push AND branch = master))
|
||||||
stage: build
|
stage: build
|
||||||
script: $TRAVIS_BUILD_DIR/tools/ci/on-push.sh 4 4
|
script: $TRAVIS_BUILD_DIR/.github/scripts/on-push.sh 1 1
|
||||||
|
|
||||||
- name: "Package & Deploy"
|
|
||||||
if: tag IS present
|
|
||||||
stage: deploy
|
|
||||||
env:
|
|
||||||
- secure: "l/4Dt+KQ/mACtGAHDUsPr66fUte840PZoQ4xpPikqWZI0uARu4l+Ym7+sHinnT6fBqrj8AJeBYGz4nFa8NK4LutZn9mSD40w+sxl0wSV4oHV8rzKe3Cd8+sMG3+o33yWoikMNjSvqa73Q0rm+SgrlInNdZbuAyixL+a2alaWSnGPm4F2xwUGj+S33TOy5P/Xp77CYtCV5S8vzyk/eEdNhoF0GYePJVdfuzCOUjXMyT5OWxORkzzQ7Hnn/Ka/RDfV8Si4HgujLQBrK5q6iPnNBFqBSqilYBepSMn4opnOBpIm0SCgePz7XQEFC83buA7GUcnCnfg38bf+dCwHaODf1d1PmqVRYt2QmfinexXtM4afAtL0iBUDtvrfnXHzwW9w82VeZhpbJSVh9DUQvB0IlsZeCz9J9PUBAi3N+SMX+9l+BomYwRUlPuKY+Ef2JKk9q6mxtUkky5R0daAlVxEhpVdQks1rT+T+NMoDMemxQ3SKEiqAHh6EgHecruszffmZ71uLX9MpERpew0qN+UFiafws+jkTjx+3yF9yut0Hf9sMbeAYzzkGzRqJTUEBJ6B29Cql8M0yRXCNN/8wuuTHhG8esstozga4ZQoIVrq7mEAgup376PTcNfr1+imbbWVQ7lJdYIuDe6OS5V3OX6np11vgK/DbhfyzvQv9Z1zAGnM="
|
|
||||||
- REMOTE_URL=https://github.com/$TRAVIS_REPO_SLUG/releases/download/$TRAVIS_TAG
|
|
||||||
script: bash $TRAVIS_BUILD_DIR/tools/ci/build-release.sh -a$ESP32_GITHUB_TOKEN
|
|
||||||
before_deploy: git submodule update --init
|
|
||||||
deploy:
|
|
||||||
- provider: script
|
|
||||||
skip_cleanup: true
|
|
||||||
script: bash $TRAVIS_BUILD_DIR/tools/ci/deploy-release.sh -t$TRAVIS_TAG -a$ESP32_GITHUB_TOKEN -s$TRAVIS_REPO_SLUG -drelease
|
|
||||||
on:
|
|
||||||
tags: true
|
|
||||||
|
|
||||||
notifications:
|
notifications:
|
||||||
email:
|
email:
|
||||||
|
19
appveyor.yml
19
appveyor.yml
@ -1,19 +0,0 @@
|
|||||||
build: off
|
|
||||||
environment:
|
|
||||||
|
|
||||||
matrix:
|
|
||||||
- PLATFORMIO_CI_SRC: "libraries/WiFi/examples/WiFiClient"
|
|
||||||
- PLATFORMIO_CI_SRC: "libraries/WiFi/examples/WiFiClientBasic"
|
|
||||||
- PLATFORMIO_CI_SRC: "libraries/WiFi/examples/WiFiClientEvents"
|
|
||||||
- PLATFORMIO_CI_SRC: "libraries/WiFi/examples/WiFiIPv6"
|
|
||||||
- PLATFORMIO_CI_SRC: "libraries/WiFi/examples/WiFiScan"
|
|
||||||
- PLATFORMIO_CI_SRC: "libraries/WiFi/examples/WiFiSmartConfig"
|
|
||||||
|
|
||||||
install:
|
|
||||||
- cmd: git submodule update --init --recursive
|
|
||||||
- cmd: SET PATH=%PATH%;C:\Python27\Scripts
|
|
||||||
- cmd: pip install -U https://github.com/platformio/platformio/archive/develop.zip
|
|
||||||
- cmd: platformio platform install https://github.com/platformio/platform-espressif32.git#feature/stage
|
|
||||||
|
|
||||||
test_script:
|
|
||||||
- cmd: platformio ci -b esp32dev -b nano32 -b node32s
|
|
@ -1,379 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
if [ -z "$TRAVIS_TAG" ]; then
|
|
||||||
echo "Skipping Packaging: Regular build"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
############################################################
|
|
||||||
# $1 - download link
|
|
||||||
# $2 - JSON output file
|
|
||||||
function downloadAndMergePackageJSON()
|
|
||||||
{
|
|
||||||
echo
|
|
||||||
echo " ---Package JSON definition merge BEGIN--->"
|
|
||||||
|
|
||||||
jsonLink=$1
|
|
||||||
jsonOut=$2
|
|
||||||
curlAuthToken=$3
|
|
||||||
outDirectory=$4
|
|
||||||
|
|
||||||
echo " - remote package JSON: $jsonLink (source)"
|
|
||||||
echo " - current package JSON: $jsonOut (target)"
|
|
||||||
|
|
||||||
old_json=$outDirectory/oldJson.json
|
|
||||||
merged_json=$outDirectory/mergedJson.json
|
|
||||||
|
|
||||||
#DEBUG
|
|
||||||
#echo " Local tmp for remote JSON: $old_json"
|
|
||||||
#echo " Merge output JSON: $merged_json"
|
|
||||||
|
|
||||||
echo " - downloading JSON package definition: $jsonLink ..."
|
|
||||||
|
|
||||||
# Authentication through HTTP headers might fail on redirection due to bug in cURL (https://curl.haxx.se/docs/adv_2018-b3bf.html - headers are resent to the target location including the original authentication)
|
|
||||||
# Notes:
|
|
||||||
# - eg AmazonAWS fails with Bad Request due to having maximum 1 authentication mechanism per a request, might be general issue
|
|
||||||
# - it's a first-class credential leakage
|
|
||||||
# - the fix is available in cURL 7.58.0+, however, TravisCI is not yet updated (May 29, 2018) - see https://docs.travis-ci.com/user/build-environment-updates
|
|
||||||
# - TravisCI workaround: updating build environment through .travis.yml (ie install required version of cURL using apt-get, see https://docs.travis-ci.com/user/installing-dependencies/)
|
|
||||||
# - previous point not used on purpose (build time increase, possible failure corrupts whole build, etc) but it's good to know there's a way out of hell
|
|
||||||
# - local workaround: authentication through 'access_token' as GET parameter works smoothly, however, HTTP headers are preferred
|
|
||||||
|
|
||||||
#curl --verbose -sH "Authorization: token $curlAuthToken" -L -o "$old_json" "$jsonLink"
|
|
||||||
curl -L -o "$old_json" "$jsonLink?access_token=$curlAuthToken"
|
|
||||||
if [ $? -ne 0 ]; then echo "FAILED: $? => aborting"; exit 1; fi
|
|
||||||
|
|
||||||
|
|
||||||
#curl -L -o "$old_json" "$jsonLink"
|
|
||||||
|
|
||||||
echo " - merging $old_json into $jsonOut ..."
|
|
||||||
|
|
||||||
echo
|
|
||||||
set +e
|
|
||||||
stdbuf -oL python package/merge_packages.py "$jsonOut" "$old_json" > "$merged_json"
|
|
||||||
set -e #supposed to be ON by default
|
|
||||||
echo
|
|
||||||
|
|
||||||
set -v
|
|
||||||
if [ ! -s $merged_json ]; then
|
|
||||||
rm -f "$merged_json"
|
|
||||||
echo " Done: nothing to merge ($merged_json empty) => $jsonOut remains unchanged"
|
|
||||||
else
|
|
||||||
rm -f "$jsonOut"
|
|
||||||
mv "$merged_json" "$jsonOut"
|
|
||||||
echo " Done: JSON data successfully merged to $jsonOut"
|
|
||||||
fi
|
|
||||||
|
|
||||||
rm -f "$old_json"
|
|
||||||
set +v
|
|
||||||
echo " <---Package JSON definition merge END---"
|
|
||||||
echo
|
|
||||||
}
|
|
||||||
############################################################
|
|
||||||
|
|
||||||
#Cmdline options
|
|
||||||
# -a: GitHub API access token
|
|
||||||
# -d: output directory to store the (pre)release filedir set
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
echo
|
|
||||||
echo "==================================================================="
|
|
||||||
echo "RELEASE PACKAGE PUBLISHING ARRANGEMENTS (GitHub/Arduino compliance)"
|
|
||||||
echo "==================================================================="
|
|
||||||
echo
|
|
||||||
cmdLine=`basename $0 $@`
|
|
||||||
echo "Cmdline: ${cmdLine}"
|
|
||||||
|
|
||||||
# cURL authentication token
|
|
||||||
while getopts ":a:,:d:" opt; do
|
|
||||||
case $opt in
|
|
||||||
a)
|
|
||||||
curlAuth=$OPTARG
|
|
||||||
#echo " ACCESS TOKEN: $curlAuth" >&2
|
|
||||||
;;
|
|
||||||
d)
|
|
||||||
releaseDir=$OPTARG
|
|
||||||
#echo " RELEASE OUTPUT DIRECTORY: $releaseDir" >&2
|
|
||||||
;;
|
|
||||||
\?)
|
|
||||||
echo "Error: invalid option -$OPTARG => aborting" >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
:)
|
|
||||||
echo "Error: option -$OPTARG requires an argument => aborting" >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
# where we at?
|
|
||||||
echo
|
|
||||||
echo "Prequisite check:"
|
|
||||||
if [ -z "$TRAVIS_BUILD_DIR" ]; then
|
|
||||||
echo " - non-TravisCI environment"
|
|
||||||
cd "$( dirname ${BASH_SOURCE[0]} )"/..
|
|
||||||
bTravisRun=0
|
|
||||||
else
|
|
||||||
echo " - TravisCI run"
|
|
||||||
cd $TRAVIS_BUILD_DIR
|
|
||||||
bTravisRun=1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# no tag, no love
|
|
||||||
if [ -z "$TRAVIS_TAG" ] && [ $bTravisRun -eq 1 ]; then
|
|
||||||
echo "Warning: non-tagged builds not supported in Travis CI environment => exiting"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo
|
|
||||||
echo "Package build settings:"
|
|
||||||
echo "======================="
|
|
||||||
|
|
||||||
# source directory
|
|
||||||
srcdir=`pwd`
|
|
||||||
echo "Current working directory: ${srcdir}"
|
|
||||||
|
|
||||||
# target directory for actual release fileset
|
|
||||||
if [ -z "$releaseDir" ]; then
|
|
||||||
releaseDir=release
|
|
||||||
fi
|
|
||||||
echo "Release output directory: $releaseDir"
|
|
||||||
|
|
||||||
# Git versions, branch names, tags
|
|
||||||
branch_name=""
|
|
||||||
verx=""
|
|
||||||
extent=""
|
|
||||||
|
|
||||||
if [ -z "$TRAVIS_TAG" ]; then
|
|
||||||
branch_name=`git rev-parse --abbrev-ref HEAD 2>/dev/null`
|
|
||||||
ver=`sed -n -E 's/version=([0-9.]+)/\1/p' platform.txt`
|
|
||||||
else
|
|
||||||
ver=$TRAVIS_TAG
|
|
||||||
fi
|
|
||||||
verx=`git rev-parse --short=8 HEAD 2>/dev/null`
|
|
||||||
|
|
||||||
# Package name resolving (case-insensitive):
|
|
||||||
# - unknown branch, master branch or branch in detached state (HEAD revision) use only the tag's name as version string (esp32-$TAG_NAME, eg 'esp32-1.0.0-RC1')
|
|
||||||
# - all other branches use long-version string (esp32-$BRANCH_NAME-$GITREV_NUMBER_SHORT, eg 'esp32-idf_update-cde668da')
|
|
||||||
|
|
||||||
shopt -s nocasematch
|
|
||||||
|
|
||||||
if [ ! -z "$branch_name" ] && [ "$branch_name" != "master" ] && [ "$branch_name" != "head" ]; then
|
|
||||||
extent="-$branch_name-$verx"
|
|
||||||
fi
|
|
||||||
|
|
||||||
package_name=esp32-$ver$extent
|
|
||||||
|
|
||||||
shopt -u nocasematch
|
|
||||||
|
|
||||||
echo "Package version: $ver"
|
|
||||||
echo "Git branch name: $branch_name"
|
|
||||||
echo "Git revision number: $verx"
|
|
||||||
echo "Package name extension: $extent"
|
|
||||||
echo "Travis CI tag: $TRAVIS_TAG"
|
|
||||||
echo "Release package name: $package_name"
|
|
||||||
|
|
||||||
# Set REMOTE_URL environment variable to the address where the package will be
|
|
||||||
# available for download. This gets written into package json file.
|
|
||||||
|
|
||||||
if [ -z "$REMOTE_URL" ]; then
|
|
||||||
REMOTE_URL="http://localhost:8000"
|
|
||||||
remoteEchoOut="${REMOTE_URL} (REMOTE_URL variable not defined, using default)"
|
|
||||||
else
|
|
||||||
remoteEchoOut="${REMOTE_URL}"
|
|
||||||
fi
|
|
||||||
echo "Target URL for download (JSON incl): ${remoteEchoOut}"
|
|
||||||
|
|
||||||
# Create directory for the package
|
|
||||||
outdir=$releaseDir/$package_name
|
|
||||||
echo "Local temp directory: $outdir"
|
|
||||||
|
|
||||||
rm -rf $releaseDir
|
|
||||||
mkdir -p $outdir
|
|
||||||
|
|
||||||
# Copy files required for the package release:
|
|
||||||
echo
|
|
||||||
echo "Package build processing:"
|
|
||||||
echo "========================="
|
|
||||||
echo
|
|
||||||
echo "Prepare files for the package main archive:"
|
|
||||||
echo " - copying neccessary files from current Git repository..."
|
|
||||||
|
|
||||||
# <PACKAGE ROOT>
|
|
||||||
cp -f $srcdir/boards.txt $outdir/
|
|
||||||
cp -f $srcdir/platform.txt $outdir/
|
|
||||||
cp -f $srcdir/programmers.txt $outdir/
|
|
||||||
|
|
||||||
# <COMPLETE DIRS>
|
|
||||||
# cores/
|
|
||||||
# libraries/
|
|
||||||
# variants/
|
|
||||||
# tools/partitions/
|
|
||||||
cp -Rf $srcdir/cores $outdir/
|
|
||||||
cp -Rf $srcdir/libraries $outdir/
|
|
||||||
cp -Rf $srcdir/variants $outdir/
|
|
||||||
mkdir -p $outdir/tools
|
|
||||||
cp -Rf $srcdir/tools/partitions $outdir/tools/
|
|
||||||
|
|
||||||
# <DIR & FILES>
|
|
||||||
# tools/sdk/
|
|
||||||
cp -Rf $srcdir/tools/sdk $outdir/tools/
|
|
||||||
|
|
||||||
# tools/
|
|
||||||
cp -f $srcdir/tools/espota.exe $outdir/tools/
|
|
||||||
cp -f $srcdir/tools/espota.py $outdir/tools/
|
|
||||||
cp -f $srcdir/tools/esptool.py $outdir/tools/
|
|
||||||
cp -f $srcdir/tools/gen_esp32part.py $outdir/tools/
|
|
||||||
cp -f $srcdir/tools/gen_esp32part.exe $outdir/tools/
|
|
||||||
|
|
||||||
echo " - cleaning *.DS_Store files..."
|
|
||||||
find $outdir -name '*.DS_Store' -exec rm -f {} \;
|
|
||||||
|
|
||||||
# Do some replacements in platform.txt file, which are required because IDE
|
|
||||||
# handles tool paths differently when package is installed in hardware folder
|
|
||||||
echo " - updating platform.txt..."
|
|
||||||
cat $srcdir/platform.txt | \
|
|
||||||
sed "s/version=.*/version=$ver$extent/g" | \
|
|
||||||
sed 's/runtime.tools.xtensa-esp32-elf-gcc.path={runtime.platform.path}\/tools\/xtensa-esp32-elf//g' | \
|
|
||||||
sed 's/tools.esptool_py.path={runtime.platform.path}\/tools\/esptool/tools.esptool_py.path=\{runtime.tools.esptool_py.path\}/g' \
|
|
||||||
> $outdir/platform.txt
|
|
||||||
|
|
||||||
# Put core version and short hash of git version into core_version.h
|
|
||||||
ver_define=`echo $ver | tr "[:lower:].\055" "[:upper:]_"`
|
|
||||||
echo " - generating C/C++ header defines ($ver_define -> /cores/esp32/core_version.h)..."
|
|
||||||
|
|
||||||
echo \#define ARDUINO_ESP32_GIT_VER 0x$verx >$outdir/cores/esp32/core_version.h
|
|
||||||
echo \#define ARDUINO_ESP32_GIT_DESC `git describe --tags 2>/dev/null` >>$outdir/cores/esp32/core_version.h
|
|
||||||
echo \#define ARDUINO_ESP32_RELEASE_$ver_define >>$outdir/cores/esp32/core_version.h
|
|
||||||
echo \#define ARDUINO_ESP32_RELEASE \"$ver_define\" >>$outdir/cores/esp32/core_version.h
|
|
||||||
|
|
||||||
# Store submodules' current versions
|
|
||||||
echo " - getting submodule list (${releaseDir}/submodules.txt)..."
|
|
||||||
git submodule status > $releaseDir/submodules.txt
|
|
||||||
|
|
||||||
# remove all .git* files
|
|
||||||
echo " - removing *.git files possibly fetched to package tempdir..."
|
|
||||||
find $outdir -name '*.git*' -type f -delete
|
|
||||||
|
|
||||||
# Zip the package
|
|
||||||
package_name_zip=$package_name.zip
|
|
||||||
echo " - creating package ZIP archive (${package_name_zip})..."
|
|
||||||
|
|
||||||
pushd $releaseDir >/dev/null
|
|
||||||
|
|
||||||
zip -qr $package_name_zip $package_name
|
|
||||||
if [ $? -ne 0 ]; then echo " !error: failed to create ${package_name_zip} (ZIP errno: $?) => aborting"; exit 1; fi
|
|
||||||
|
|
||||||
# Calculate SHA sum and size of ZIP archive
|
|
||||||
sha=`shasum -a 256 $package_name_zip | cut -f 1 -d ' '`
|
|
||||||
size=`/bin/ls -l $package_name_zip | awk '{print $5}'`
|
|
||||||
echo " ${package_name_zip} creation OK (size: $size, sha2: $sha)"
|
|
||||||
echo
|
|
||||||
|
|
||||||
echo "Making $package_name JSON definition file(s):"
|
|
||||||
|
|
||||||
popd >/dev/null
|
|
||||||
|
|
||||||
PACKAGE_JSON_DEV="package_esp32_dev_index.json"
|
|
||||||
PACKAGE_JSON_REL="package_esp32_index.json"
|
|
||||||
|
|
||||||
# figure out the package type (release / pre-release)
|
|
||||||
shopt -s nocasematch
|
|
||||||
if [[ $TRAVIS_TAG == *-RC* ]]; then
|
|
||||||
bIsPrerelease=1
|
|
||||||
package_name_json=$PACKAGE_JSON_DEV
|
|
||||||
echo " - package type: PRE-RELEASE, JSON def.file: $PACKAGE_JSON_DEV"
|
|
||||||
else
|
|
||||||
bIsPrerelease=0
|
|
||||||
package_name_json=$PACKAGE_JSON_REL
|
|
||||||
echo " - package type: RELEASE, JSON def.files: $PACKAGE_JSON_REL, $PACKAGE_JSON_DEV"
|
|
||||||
fi
|
|
||||||
shopt -u nocasematch
|
|
||||||
|
|
||||||
# Cleanup temporary work dir
|
|
||||||
rm -rf $outdir
|
|
||||||
|
|
||||||
# Get all previously released versions
|
|
||||||
echo " - fetching previous (pre)release versions from GitHub..."
|
|
||||||
|
|
||||||
set +e
|
|
||||||
|
|
||||||
releasesJson=$releaseDir/releases.json
|
|
||||||
curl -sH "Authorization: token $curlAuth" https://api.github.com/repos/$TRAVIS_REPO_SLUG/releases > $releasesJson
|
|
||||||
if [ $? -ne 0 ]; then echo "FAILED: $? => aborting"; exit 1; fi
|
|
||||||
|
|
||||||
prev_release=$(jq -e -r '. | map(select(.draft == false and .prerelease == false)) | sort_by(.created_at | - fromdateiso8601) | .[0].tag_name' ${releasesJson})
|
|
||||||
prev_any_release=$(jq -e -r '. | map(select(.draft == false)) | sort_by(.created_at | - fromdateiso8601) | .[0].tag_name' ${releasesJson})
|
|
||||||
prev_pre_release=$(jq -e -r '. | map(select(.draft == false and .prerelease == true)) | sort_by(.created_at | - fromdateiso8601) | .[0].tag_name' ${releasesJson})
|
|
||||||
|
|
||||||
shopt -s nocasematch
|
|
||||||
if [ "$prev_any_release" == "$TRAVIS_TAG" ]; then
|
|
||||||
prev_release=$(jq -e -r '. | map(select(.draft == false and .prerelease == false)) | sort_by(.created_at | - fromdateiso8601) | .[1].tag_name' ${releasesJson})
|
|
||||||
prev_any_release=$(jq -e -r '. | map(select(.draft == false)) | sort_by(.created_at | - fromdateiso8601) | .[1].tag_name' ${releasesJson})
|
|
||||||
prev_pre_release=$(jq -e -r '. | map(select(.draft == false and .prerelease == true)) | sort_by(.created_at | - fromdateiso8601) | .[1].tag_name' ${releasesJson})
|
|
||||||
fi
|
|
||||||
shopt -u nocasematch
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
rm -f "$releasesJson"
|
|
||||||
|
|
||||||
echo " previous Release: $prev_release"
|
|
||||||
echo " previous Pre-release: $prev_pre_release"
|
|
||||||
echo " previous (any)release: $prev_any_release"
|
|
||||||
|
|
||||||
# add generated items to JSON package-definition contents
|
|
||||||
jq_arg=".packages[0].platforms[0].version = \"$ver\" | \
|
|
||||||
.packages[0].platforms[0].url = \"$REMOTE_URL/$package_name_zip\" |\
|
|
||||||
.packages[0].platforms[0].archiveFileName = \"$package_name_zip\""
|
|
||||||
|
|
||||||
jq_arg="$jq_arg |\
|
|
||||||
.packages[0].platforms[0].size = \"$size\" |\
|
|
||||||
.packages[0].platforms[0].checksum = \"SHA-256:$sha\""
|
|
||||||
|
|
||||||
# always get DEV version of JSON (included in both RC/REL)
|
|
||||||
pkgJsonDev=$releaseDir/$PACKAGE_JSON_DEV
|
|
||||||
echo " - generating/merging _DEV_ JSON file (${pkgJsonDev})..."
|
|
||||||
|
|
||||||
cat $srcdir/package/package_esp32_index.template.json | jq "$jq_arg" > $pkgJsonDev
|
|
||||||
cd $srcdir
|
|
||||||
if [ ! -z "$prev_any_release" ] && [ "$prev_any_release" != "null" ]; then
|
|
||||||
downloadAndMergePackageJSON "https://github.com/$TRAVIS_REPO_SLUG/releases/download/${prev_any_release}/${PACKAGE_JSON_DEV}" "${pkgJsonDev}" "${curlAuth}" "$releaseDir"
|
|
||||||
|
|
||||||
# Release notes: GIT log comments (prev_any_release, current_release>
|
|
||||||
echo " - executing: git log --oneline $prev_any_release.."
|
|
||||||
git log --oneline $prev_any_release.. > $releaseDir/commits.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
# for RELEASE run update REL JSON as well
|
|
||||||
if [ $bIsPrerelease -eq 0 ]; then
|
|
||||||
|
|
||||||
pkgJsonRel=$releaseDir/$PACKAGE_JSON_REL
|
|
||||||
echo " - generating/merging _REL_ JSON file (${pkgJsonRel})..."
|
|
||||||
|
|
||||||
cat $srcdir/package/package_esp32_index.template.json | jq "$jq_arg" > $pkgJsonRel
|
|
||||||
if [ ! -z "$prev_release" ] && [ "$prev_release" != "null" ]; then
|
|
||||||
downloadAndMergePackageJSON "https://github.com/$TRAVIS_REPO_SLUG/releases/download/${prev_release}/${PACKAGE_JSON_REL}" "${pkgJsonRel}" "${curlAuth}" "$releaseDir"
|
|
||||||
|
|
||||||
# Release notes: GIT log comments (prev_release, current_release>
|
|
||||||
echo " - executing: git log --oneline $prev_release.."
|
|
||||||
git log --oneline $prev_release.. > $releaseDir/commits.txt
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo
|
|
||||||
echo "JSON definition file(s) creation OK"
|
|
||||||
|
|
||||||
echo
|
|
||||||
echo "==================================================================="
|
|
||||||
echo "Package preparation done ('$releaseDir' contents):"
|
|
||||||
fileset=`ls -1 $releaseDir`
|
|
||||||
echo -e $fileset
|
|
||||||
|
|
||||||
echo
|
|
||||||
echo "==================================================================="
|
|
||||||
echo "==================================================================="
|
|
||||||
echo "'$package_name' ready for publishing, processing completed."
|
|
||||||
echo "==================================================================="
|
|
||||||
echo
|
|
@ -1,248 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
|
|
||||||
json_escape () {
|
|
||||||
printf '%s' "$1" | python -c 'import json,sys; print(json.dumps(sys.stdin.read()))'
|
|
||||||
#printf '%s' "$1" | php -r 'echo json_encode(file_get_contents("php://stdin"));'
|
|
||||||
}
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
#Cmdline options
|
|
||||||
# -t: tag (*_RC* determines prerelease version, can be overriden be -p)
|
|
||||||
# -a: GitHub API access token
|
|
||||||
# -s: GitHub repository slug (user/repo)
|
|
||||||
# -p: prerelease true/false
|
|
||||||
# -f: files to upload (ie assets. delim = ';', must come quoted)
|
|
||||||
# -d: directory to upload (by adding dir contents to assets)
|
|
||||||
while getopts ":t:,:a:,:s:,:p:,:f:,:d:" opt; do
|
|
||||||
case $opt in
|
|
||||||
t)
|
|
||||||
varTagName=$OPTARG
|
|
||||||
echo "TAG: $varTagName" >&2
|
|
||||||
;;
|
|
||||||
a)
|
|
||||||
varAccessToken=$OPTARG
|
|
||||||
echo "ACCESS TOKEN: $varAccessToken" >&2
|
|
||||||
;;
|
|
||||||
s)
|
|
||||||
varRepoSlug=$OPTARG
|
|
||||||
echo "REPO SLUG: $varRepoSlug" >&2
|
|
||||||
;;
|
|
||||||
p)
|
|
||||||
varPrerelease=$OPTARG
|
|
||||||
echo "PRERELEASE: $varPrerelease" >&2
|
|
||||||
;;
|
|
||||||
f)
|
|
||||||
varAssets=$OPTARG
|
|
||||||
echo "ASSETS: $varAssets" >&2
|
|
||||||
;;
|
|
||||||
d)
|
|
||||||
varAssetsDir=$OPTARG
|
|
||||||
echo "ASSETS DIR: $varAssetsDir" >&2
|
|
||||||
;;
|
|
||||||
\?)
|
|
||||||
echo "Invalid option: -$OPTARG" >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
:)
|
|
||||||
echo "Option -$OPTARG requires an argument." >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
# use TravisCI env as default, if available
|
|
||||||
if [ -z $varTagName ] && [ ! -z $TRAVIS_TAG ]; then
|
|
||||||
varTagName=$TRAVIS_TAG
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z $varTagName ]; then
|
|
||||||
echo "No tag name available => aborting"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
#Check tag name for release/prerelease (prerelease tag contains '_RC' as for release-candidate. case-insensitive)
|
|
||||||
shopt -s nocasematch
|
|
||||||
if [ -z $varPrerelease ]; then
|
|
||||||
if [[ $varTagName == *-RC* ]]; then
|
|
||||||
varPrerelease=true
|
|
||||||
else
|
|
||||||
varPrerelease=false
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
shopt -u nocasematch
|
|
||||||
|
|
||||||
#
|
|
||||||
# Prepare Markdown release notes:
|
|
||||||
#################################
|
|
||||||
# - annotated tags only, lightweight tags just display message of referred commit
|
|
||||||
# - tag's description conversion to relnotes:
|
|
||||||
# first 3 lines (tagname, commiter, blank): ignored
|
|
||||||
# 4th line: relnotes heading
|
|
||||||
# remaining lines: each converted to bullet-list item
|
|
||||||
# empty lines ignored
|
|
||||||
# if '* ' found as a first char pair, it's converted to '- ' to keep bulleting unified
|
|
||||||
echo
|
|
||||||
echo Preparing release notes
|
|
||||||
echo -----------------------
|
|
||||||
echo "Tag's message:"
|
|
||||||
|
|
||||||
relNotesRaw=`git show -s --format=%b $varTagName`
|
|
||||||
readarray -t msgArray <<<"$relNotesRaw"
|
|
||||||
arrLen=${#msgArray[@]}
|
|
||||||
|
|
||||||
#process annotated tags only
|
|
||||||
if [ $arrLen > 3 ] && [ "${msgArray[0]:0:3}" == "tag" ]; then
|
|
||||||
ind=3
|
|
||||||
while [ $ind -lt $arrLen ]; do
|
|
||||||
if [ $ind -eq 3 ]; then
|
|
||||||
releaseNotes="#### ${msgArray[ind]}"
|
|
||||||
releaseNotes+=$'\r\n'
|
|
||||||
else
|
|
||||||
oneLine="$(echo -e "${msgArray[ind]}" | sed -e 's/^[[:space:]]*//')"
|
|
||||||
|
|
||||||
if [ ${#oneLine} -gt 0 ]; then
|
|
||||||
if [ "${oneLine:0:2}" == "* " ]; then oneLine=$(echo ${oneLine/\*/-}); fi
|
|
||||||
if [ "${oneLine:0:2}" != "- " ]; then releaseNotes+="- "; fi
|
|
||||||
releaseNotes+="$oneLine"
|
|
||||||
releaseNotes+=$'\r\n'
|
|
||||||
|
|
||||||
#debug output
|
|
||||||
echo " ${oneLine}"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
let ind=$ind+1
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "$releaseNotes"
|
|
||||||
|
|
||||||
# - list of commits (commits.txt must exit in the output dir)
|
|
||||||
commitFile=$varAssetsDir/commits.txt
|
|
||||||
if [ -s "$commitFile" ]; then
|
|
||||||
|
|
||||||
releaseNotes+=$'\r\n##### Commits\r\n'
|
|
||||||
|
|
||||||
echo
|
|
||||||
echo "Commits:"
|
|
||||||
|
|
||||||
IFS=$'\n'
|
|
||||||
for next in `cat $commitFile`
|
|
||||||
do
|
|
||||||
IFS=' ' read -r commitId commitMsg <<< "$next"
|
|
||||||
commitLine="- [$commitId](https://github.com/$varRepoSlug/commit/$commitId) $commitMsg"
|
|
||||||
echo " $commitLine"
|
|
||||||
|
|
||||||
releaseNotes+="$commitLine"
|
|
||||||
releaseNotes+=$'\r\n'
|
|
||||||
done
|
|
||||||
rm -f $commitFile
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check possibly existing release for current tag
|
|
||||||
echo
|
|
||||||
echo "Processing GitHub release record for $varTagName:"
|
|
||||||
echo "-------------------------------------------------"
|
|
||||||
|
|
||||||
echo " - check $varTagName possible existence..."
|
|
||||||
|
|
||||||
# (eg build invoked by Create New Release GHUI button -> GH default release pack created immediately including default assests)
|
|
||||||
HTTP_RESPONSE=$(curl -L --silent --write-out "HTTPSTATUS:%{http_code}" https://api.github.com/repos/$varRepoSlug/releases/tags/$varTagName?access_token=$varAccessToken)
|
|
||||||
if [ $? -ne 0 ]; then echo "FAILED: $? => aborting"; exit 1; fi
|
|
||||||
|
|
||||||
HTTP_BODY=$(echo $HTTP_RESPONSE | sed -e 's/HTTPSTATUS\:.*//g')
|
|
||||||
HTTP_STATUS=$(echo $HTTP_RESPONSE | tr -d '\n' | sed -e 's/.*HTTPSTATUS://')
|
|
||||||
echo " ---> GitHub server HTTP response: $HTTP_STATUS"
|
|
||||||
|
|
||||||
# if the release exists, append/update recent files to its assets vector
|
|
||||||
if [ $HTTP_STATUS -eq 200 ]; then
|
|
||||||
releaseId=$(echo $HTTP_BODY | jq -r '.id')
|
|
||||||
echo " - $varTagName release found (id $releaseId)"
|
|
||||||
|
|
||||||
#Merge release notes and overwrite pre-release flag. all other attributes remain unchanged:
|
|
||||||
|
|
||||||
# 1. take existing notes from server (added by release creator)
|
|
||||||
releaseNotesGH=$(echo $HTTP_BODY | jq -r '.body')
|
|
||||||
|
|
||||||
# - strip possibly trailing CR
|
|
||||||
if [ "${releaseNotesGH: -1}" == $'\r' ]; then
|
|
||||||
releaseNotesTemp="${releaseNotesGH:0:-1}"
|
|
||||||
else
|
|
||||||
releaseNotesTemp="$releaseNotesGH"
|
|
||||||
fi
|
|
||||||
# - add CRLF to make relnotes consistent for JSON encoding
|
|
||||||
releaseNotesTemp+=$'\r\n'
|
|
||||||
|
|
||||||
# 2. #append generated relnotes (usually commit oneliners)
|
|
||||||
releaseNotes="$releaseNotesTemp$releaseNotes"
|
|
||||||
|
|
||||||
# 3. JSON-encode whole string for GH API transfer
|
|
||||||
releaseNotes=$(json_escape "$releaseNotes")
|
|
||||||
|
|
||||||
# 4. remove extra quotes returned by python (dummy but whatever)
|
|
||||||
releaseNotes=${releaseNotes:1:-1}
|
|
||||||
|
|
||||||
#Update current GH release record
|
|
||||||
echo " - updating release notes and pre-release flag:"
|
|
||||||
|
|
||||||
curlData="{\"body\": \"$releaseNotes\",\"prerelease\": $varPrerelease}"
|
|
||||||
echo " <data.begin>$curlData<data.end>"
|
|
||||||
echo
|
|
||||||
#echo "DEBUG: curl --data \"$curlData\" https://api.github.com/repos/$varRepoSlug/releases/$releaseId?access_token=$varAccessToken"
|
|
||||||
|
|
||||||
curl --data "$curlData" https://api.github.com/repos/$varRepoSlug/releases/$releaseId?access_token=$varAccessToken
|
|
||||||
if [ $? -ne 0 ]; then echo "FAILED: $? => aborting"; exit 1; fi
|
|
||||||
|
|
||||||
echo " - $varTagName release record successfully updated"
|
|
||||||
|
|
||||||
#... or create a new release record
|
|
||||||
else
|
|
||||||
releaseNotes=$(json_escape "$releaseNotes")
|
|
||||||
releaseNotes=${releaseNotes:1:-1}
|
|
||||||
|
|
||||||
echo " - release $varTagName not found, creating a new record:"
|
|
||||||
|
|
||||||
curlData="{\"tag_name\": \"$varTagName\",\"target_commitish\": \"master\",\"name\": \"v$varTagName\",\"body\": \"$releaseNotes\",\"draft\": false,\"prerelease\": $varPrerelease}"
|
|
||||||
echo " <data.begin>$curlData<data.end>"
|
|
||||||
|
|
||||||
#echo "DEBUG: curl --data \"${curlData}\" https://api.github.com/repos/${varRepoSlug}/releases?access_token=$varAccessToken | jq -r '.id'"
|
|
||||||
releaseId=$(curl --data "$curlData" https://api.github.com/repos/$varRepoSlug/releases?access_token=$varAccessToken | jq -r '.id')
|
|
||||||
if [ $? -ne 0 ]; then echo "FAILED: $? => aborting"; exit 1; fi
|
|
||||||
|
|
||||||
echo " - $varTagName release record successfully created (id $releaseId)"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Assets defined by dir contents
|
|
||||||
if [ ! -z $varAssetsDir ]; then
|
|
||||||
varAssetsTemp=$(ls -p $varAssetsDir | grep -v / | tr '\n' ';')
|
|
||||||
for item in $(echo $varAssetsTemp | tr ";" "\n")
|
|
||||||
do
|
|
||||||
varAssets+=$varAssetsDir/$item;
|
|
||||||
varAssets+=';'
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
#Upload additional assets
|
|
||||||
if [ ! -z $varAssets ]; then
|
|
||||||
echo
|
|
||||||
echo "Uploading assets:"
|
|
||||||
echo "-----------------"
|
|
||||||
echo " Files to upload:"
|
|
||||||
echo " $varAssets"
|
|
||||||
echo
|
|
||||||
|
|
||||||
curlAuth="Authorization: token $varAccessToken"
|
|
||||||
for filename in $(echo $varAssets | tr ";" "\n")
|
|
||||||
do
|
|
||||||
echo " - ${filename}:"
|
|
||||||
curl -X POST -sH "$curlAuth" -H "Content-Type: application/octet-stream" --data-binary @"$filename" https://uploads.github.com/repos/$varRepoSlug/releases/$releaseId/assets?name=$(basename $filename)
|
|
||||||
|
|
||||||
if [ $? -ne 0 ]; then echo "FAILED: $? => aborting"; exit 1; fi
|
|
||||||
|
|
||||||
echo
|
|
||||||
echo "OK"
|
|
||||||
echo
|
|
||||||
|
|
||||||
done
|
|
||||||
fi
|
|
Loading…
Reference in New Issue
Block a user