diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..047ceaf3 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,7 @@ +#Bare bones dependabot only for GH Workflow Files +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "daily" \ No newline at end of file diff --git a/.github/scripts/aarch64-Linux/env.sh b/.github/scripts/aarch64-Linux/env.sh new file mode 100644 index 00000000..10579a26 --- /dev/null +++ b/.github/scripts/aarch64-Linux/env.sh @@ -0,0 +1,47 @@ +#!/usr/bin/env bash +## +# source <(curl -qfsSL "https://raw.githubusercontent.com/Azathothas/Toolpacks-Extras/main/.github/scripts/$(uname -m)-$(uname -s)/env.sh") +## + +#-------------------------------------------------------# +USER="$(whoami)" && export USER="${USER}" +HOME="$(getent passwd ${USER} | cut -d: -f6)" && export HOME="${HOME}" +export PATH="${HOME}/bin:${HOME}/.cargo/bin:${HOME}/.cargo/env:${HOME}/.go/bin:${HOME}/go/bin:${HOME}/.local/bin:${HOME}/miniconda3/bin:${HOME}/miniconda3/condabin:/usr/local/zig:/usr/local/zig/lib:/usr/local/zig/lib/include:/usr/local/musl/bin:/usr/local/musl/lib:/usr/local/musl/include:$PATH" +SYSTMP="$(dirname $(mktemp -u))" && export SYSTMP="${SYSTMP}" +TMPDIRS="mktemp -d --tmpdir=${SYSTMP}/toolpacks XXXXXXX_$(uname -m)_$(uname -s)" && export TMPDIRS="$TMPDIRS" +rm -rf "${SYSTMP}/toolpacks" 2>/dev/null ; mkdir -p "${SYSTMP}/toolpacks" +BINDIR="${SYSTMP}/toolpack_$(uname -m)" && export BINDIR="${BINDIR}" +rm -rf "${BINDIR}" 2>/dev/null ; mkdir -p "${BINDIR}" +export GIT_TERMINAL_PROMPT="0" +export GIT_ASKPASS="/bin/echo" +EGET_TIMEOUT="timeout -k 1m 2m" && export EGET_TIMEOUT="${EGET_TIMEOUT}" +USER_AGENT="$(curl -qfsSL 'https://pub.ajam.dev/repos/Azathothas/Wordlists/Misc/User-Agents/ua_chrome_macos_latest.txt')" && export USER_AGENT="${USER_AGENT}" +BUILD="YES" && export BUILD="${BUILD}" +sudo groupadd docker 2>/dev/null ; sudo usermod -aG docker "${USER}" 2>/dev/null +if ! sudo systemctl is-active --quiet docker; then + sudo service docker restart >/dev/null 2>&1 ; sleep 10 +fi +sudo systemctl status "docker.service" --no-pager +#Nix +source "/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh" +#sg docker newgrp "$(id -gn)" +cd "${HOME}" ; clear +##Sanity Checks +if [[ ! -n "${GITHUB_TOKEN}" ]]; then + echo -e "\n[-] GITHUB_TOKEN is NOT Exported" + echo -e "Export it to Use GH\n" +fi +if ! command -v git-lfs &> /dev/null; then + echo -e "\n[-] git-lfs is NOT Installed\n" +fi +#huggingface-cli +if [[ ! -n "${HF_TOKEN}" ]]; then + echo -e "\n[-] HF_TOKEN is NOT Exported" + echo -e "Export it to Use huggingface-cli\n" +fi +if ! command -v huggingface-cli &> /dev/null; then + echo -e "\n[-] huggingface-cli is NOT Installed\n" +fi +#-------------------------------------------------------# +history -c 2>/dev/null ; rm -rf "${HOME}/.bash_history" ; pushd "$(mktemp -d)" >/dev/null 2>&1 +#-------------------------------------------------------# \ No newline at end of file diff --git a/.github/scripts/x86_64-Linux/build_debian.sh b/.github/scripts/x86_64-Linux/build_debian.sh new file mode 100644 index 00000000..234712e5 --- /dev/null +++ b/.github/scripts/x86_64-Linux/build_debian.sh @@ -0,0 +1,211 @@ +#!/usr/bin/env bash + +#-------------------------------------------------------# +# This should be run on Debian (Debian Based) Distros with apt, coreutils, curl, dos2unix & passwordless sudo +# sudo apt-get update -y && sudo apt-get install coreutils curl dos2unix moreutils -y +# OR (without sudo): apt-get update -y && apt-get install coreutils curl dos2unix moreutils sudo -y +# +# Hardware : At least 2vCPU + 8GB RAM + 50GB SSD +# Once requirement is satisfied, simply: +# export GITHUB_TOKEN="NON_PRIVS_READ_ONLY_TOKEN" +# bash <(curl -qfsSL "https://pub.ajam.dev/repos/Azathothas/Toolpacks/.github/scripts/$(uname -m)-$(uname -s)/build_debian.sh") +#-------------------------------------------------------# + +#-------------------------------------------------------# +##ENV:$PATH + source <(curl -qfsSL "https://raw.githubusercontent.com/Azathothas/Toolpacks-Extras/main/.github/scripts/$(uname -m)-$(uname -s)/env.sh") +#-------------------------------------------------------# + +#-------------------------------------------------------# +##Init + #Get + INITSCRIPT="$(mktemp --tmpdir=${SYSTMP} XXXXX_init.sh)" && export INITSCRIPT="$INITSCRIPT" + curl -qfsSL "https://raw.githubusercontent.com/Azathothas/Toolpacks-Extras/main/.github/scripts/$(uname -m)-$(uname -s)/init_debian.sh" -o "$INITSCRIPT" + chmod +xwr "$INITSCRIPT" && source "$INITSCRIPT" + #Check + if [ "$CONTINUE" != "YES" ]; then + echo -e "\n[+] Failed To Initialize\n" + exit 1 + fi +##Ulimits +#(-n) Open File Descriptors + echo -e "[+] ulimit -n (open file descriptors) :: [Soft --> $(ulimit -n -S)] [Hard --> $(ulimit -n -H)] [Total --> $(cat '/proc/sys/fs/file-max')]" + ulimit -n "$(ulimit -n -H)" +#Stack Size + ulimit -s unlimited +#-------------------------------------------------------# + +#-------------------------------------------------------# +##Sanity Checks +if [[ -n "$GITHUB_TOKEN" ]]; then + echo -e "\n[+] GITHUB_TOKEN is Exported" + ##gh-cli (uses $GITHUB_TOKEN env var) + #echo "$GITHUB_TOKEN" | gh auth login --with-token + gh auth status + ##eget + # 5000 req/minute (80 req/minute) + eget --rate +else + # 60 req/hr + echo -e "\n[-] GITHUB_TOKEN is NOT Exported" + echo -e "Export it to avoid ratelimits\n" + eget --rate + exit 1 +fi +#hf +if ! command -v huggingface-cli &> /dev/null; then + echo -e "\n[-] huggingface-cli is NOT Installed" + exit 1 +fi +if [[ -n "${HF_TOKEN}" ]]; then + echo -e "\n[+] HF_TOKEN is Exported" + git config --global "credential.helper" store + git config --global "user.email" "AjamX101@gmail.com" + git config --global "user.name" "Azathothas" + huggingface-cli login --token "${HF_TOKEN}" --add-to-git-credential +else + echo -e "\n[-] HF_TOKEN is NOT Exported" + echo -e "Export it to use huggingface-cli\n" + exit 1 +fi +#-------------------------------------------------------# + + + +LOG SINGLE + + +#-------------------------------------------------------# +##ENV (In Case of ENV Resets) +#TMPDIRS + #For build-cache + TMPDIRS="mktemp -d --tmpdir=${SYSTMP}/toolpacks XXXXXXX_$(uname -m)_$(uname -s)" && export TMPDIRS="$TMPDIRS" + rm -rf "${SYSTMP}/toolpacks" 2>/dev/null ; mkdir -p "${SYSTMP}/toolpacks" + #For Bins + BINDIR="${SYSTMP}/toolpack_$(uname -m)" && export BINDIR="${BINDIR}" + rm -rf "${BINDIR}" 2>/dev/null ; mkdir -p "${BINDIR}" +##Build +set +x + BUILD="YES" && export BUILD="$BUILD" + #ENV + BUILDSCRIPT="$(mktemp --tmpdir=${SYSTMP} XXXXX_build.sh)" && export BUILDSCRIPT="$BUILDSCRIPT" + #Get URlS + curl -qfsSL "https://api.github.com/repos/AAzathothas/Toolpacks-Extras/contents/.github/scripts/$(uname -m)-$(uname -s)/pkgs" \ + -H "Authorization: Bearer ${GITHUB_TOKEN}" | jq -r '.[] | select(.download_url | endswith(".sh")) | .download_url' \ + grep -i "\.sh$" | sort -u -o "${SYSTMP}/BUILDURLS" + #Run + echo -e "\n\n [+] Started Building at :: $(TZ='Asia/Kathmandu' date +'%A, %Y-%m-%d (%I:%M:%S %p)')\n\n" + readarray -t RECIPES < "${SYSTMP}/BUILDURLS" + unset TOTAL_RECIPES + TOTAL_RECIPES="${#RECIPES[@]}" && export TOTAL_RECIPES="${TOTAL_RECIPES}" ; echo -e "\n[+] Total RECIPES :: ${TOTAL_RECIPES}\n" + for ((i=0; i<${#RECIPES[@]}; i++)); do + #Init + START_TIME="$(date +%s)" && export START_TIME="$START_TIME" + RECIPE="${RECIPES[i]}" + CURRENT_RECIPE=$((i+1)) + echo -e "\n[+] Fetching : ${RECIPE} (${CURRENT_RECIPE}/${TOTAL_RECIPES})\n" + #Fetch + curl -qfsSL "${RECIPE}" -o "$BUILDSCRIPT" + chmod +xwr "$BUILDSCRIPT" + #Run + source "$BUILDSCRIPT" || true + #Clean & Purge + sudo rm -rf "${SYSTMP}/toolpacks" 2>/dev/null + mkdir -p "${SYSTMP}/toolpacks" + #Finish + END_TIME="$(date +%s)" && export END_TIME="$END_TIME" + ELAPSED_TIME="$(date -u -d@"$((END_TIME - START_TIME))" "+%H(Hr):%M(Min):%S(Sec)")" + echo -e "\n[+] Completed (Building|Fetching) $BIN [$SOURCE_URL] :: $ELAPSED_TIME\n" + done + echo -e "\n\n [+] Finished Building at :: $(TZ='Asia/Kathmandu' date +'%A, %Y-%m-%d (%I:%M:%S %p)')\n\n" + #Check + BINDIR_SIZE="$(du -sh "${BINDIR}" 2>/dev/null | awk '{print $1}' 2>/dev/null)" && export "BINDIR_SIZE=${BINDIR_SIZE}" + if [ ! -d "${BINDIR}" ] || [ -z "$(ls -A "${BINDIR}")" ] || [ -z "${BINDIR_SIZE}" ] || [[ "${BINDIR_SIZE}" == *K* ]]; then + echo -e "\n[+] Broken/Empty Built "${BINDIR}" Found\n" + exit 1 + else + echo -e "\n[+] Built "${BINDIR}" :: ${BINDIR_SIZE}\n" + fi +#-------------------------------------------------------# + + +#-------------------------------------------------------# +#Cleanup [${BINDIR}] + #Chmod +xwr + find "${BINDIR}" -maxdepth 1 -type f -exec sudo chmod +xwr {} \; 2>/dev/null +#-------------------------------------------------------# +##Sync to https://raw.githubusercontent.com/Azathothas/Toolpacks-Extras/refs/heads/main/$(uname -m)-$(uname -s) +#Setup Repo +pushd "$(mktemp -d)" >/dev/null 2>&1 && git clone --depth="1" --filter="blob:none" --no-checkout "https://huggingface.co/datasets/Azathothas/Toolpacks-Extras" && cd "./Toolpacks-Extras" + git sparse-checkout set "." && git checkout + git lfs install + huggingface-cli lfs-enable-largefiles "." + TOOLPACKS_BIN="$(realpath .)" && export TOOLPACKS_BIN="${TOOLPACKS_BIN}" + HF_REPO_PATH="$(realpath ${TOOLPACKS_BIN})/$(uname -m)-$(uname -s)" && export HF_REPO_PATH="${HF_REPO_PATH}" + git fetch origin main ; git lfs track "${HF_REPO_PATH}/**" +popd >/dev/null 2>&1 +#-------------------------------------------------------# + + +#-------------------------------------------------------# +##Fetch Bins +pushd "${TOOLPACKS_BIN}" >/dev/null 2>&1 + rsync -av --checksum --copy-links --human-readable --remove-source-files --exclude="*/" "${BINDIR}/." "${HF_REPO_PATH}/" +popd >/dev/null 2>&1 +##Generate Metadata +#Chmod +xwr + find "${BINDIR}" -maxdepth 1 -type f -exec chmod +xwr {} \; 2>/dev/null + #File + cd "${BINDIR}" && find "./" -maxdepth 1 -type f | grep -v -E '\.jq$|\.txt$|\.upx$' | sort | xargs file > "${SYSTMP}/$(uname -m)-$(uname -s)_FILE" + rsync -av --checksum --copy-links --human-readable --remove-source-files --exclude="*/" "${SYSTMP}/$(uname -m)-$(uname -s)_FILE" "${HF_REPO_PATH}/FILE.txt" +#Size (Dust) + dust --depth 1 --only-file --no-percent-bars --no-colors --ignore_hidden --reverse --number-of-lines 99999999 "${BINDIR}" | tee "${SYSTMP}/$(uname -m)-$(uname -s)_SIZE.txt" + rsync -av --checksum --copy-links --human-readable --remove-source-files --exclude="*/" "${SYSTMP}/$(uname -m)-$(uname -s)_SIZE.txt" "${HF_REPO_PATH}/SIZE.txt" +#BLAKE3SUM + cd "${BINDIR}" && find "./" -maxdepth 1 -type f | grep -v -E '\.jq$|\.txt$|\.upx$' | sort | xargs b3sum > "${SYSTMP}/$(uname -m)-$(uname -s)_BLAKE3SUM" + rsync -av --checksum --copy-links --human-readable --remove-source-files --exclude="*/" "${SYSTMP}/$(uname -m)-$(uname -s)_BLAKE3SUM" "${HF_REPO_PATH}/BLAKE3SUM.txt" +#SHA256SUM + cd "${BINDIR}" && find "./" -maxdepth 1 -type f | grep -v -E '\.jq$|\.txt$|\.upx$' | sort | xargs sha256sum > "${SYSTMP}/$(uname -m)-$(uname -s)_SHA256SUM" + rsync -av --checksum --copy-links --human-readable --remove-source-files --exclude="*/" "${SYSTMP}/$(uname -m)-$(uname -s)_SHA256SUM" "${HF_REPO_PATH}/SHA256SUM.txt" +#Sync Repo +pushd "${TOOLPACKS_BIN}" >/dev/null 2>&1 + BINDIR_SIZE="$(du -sh "${HF_REPO_PATH}" 2>/dev/null | awk '{print $1}' 2>/dev/null)" && export "BINDIR_SIZE=${BINDIR_SIZE}" + git add --all --verbose && git commit -m "[+] Built ($(uname -m)-$(uname -s) [${BINDIR_SIZE}B $(TZ='UTC' date +'%Y_%m_%d')]" ; df -h "/" 2>/dev/null + git branch -a || git show-branch + git fetch origin main ; git push origin main +popd >/dev/null 2>&1 +#-------------------------------------------------------# + + +#-------------------------------------------------------# +##Generate Metadata +pushd "${HF_REPO_PATH}" >/dev/null && curl -qfsSL "https://pub.ajam.dev/utils/devscripts/jq/to_human_bytes.jq" -o "./to_human_bytes.jq" + +rclone lsjson --fast-list "." | jq -r 'include "./to_human_bytes" ; .[] | select(.Size != 0 and .Size != -1 and (.Name | test("\\.(7z|bz2|gz|jq|json|md|rar|tar|tgz|tmp|txt|upx|zip)$") | not)) | {name: (.Name), build_date: (.ModTime | split(".")[0]), build_script, b3sum, description, download_url: "https://bin.ajam.dev/arm64_v8a_Android/\(.Path)", extra_bins, note, sha256, size: (.Size | tonumber | bytes), repo_url, web_url}' | jq -s 'sort_by(.name)' | jq '.[]' > "./metadata.json.tmp_arm64_v8a_Android" & + + + +GEN INITITAL META + + + +GEN FINAL META +SYNC GIT + +#-------------------------------------------------------# +##END + echo -e "\n\n[+] Size ${HF_REPO_PATH} --> $(du -sh ${HF_REPO_PATH} | awk '{print $1}')" +#GH Runner + if [ "$USER" = "runner" ] || [ "$(whoami)" = "runner" ]; then + #Preserve Files for Artifacts + echo -e "\n[+] Detected GH Actions... Preserving Logs & Output\n" + else + #Purge Files + echo -e "\n[+] PURGING Logs & Output in 180 Seconds... (Hit Ctrl + C)\n" ; sleep 180 + rm -rf "${HF_REPO_PATH}" 2>/dev/null + fi +#VARS +unset GIT_ASKPASS GIT_TERMINAL_PROMPT +unset AR CC CXX DLLTOOL HOST_CC HOST_CXX OBJCOPY RANLIB +#EOF +#-------------------------------------------------------# \ No newline at end of file diff --git a/.github/scripts/x86_64-Linux/env.sh b/.github/scripts/x86_64-Linux/env.sh new file mode 100644 index 00000000..10579a26 --- /dev/null +++ b/.github/scripts/x86_64-Linux/env.sh @@ -0,0 +1,47 @@ +#!/usr/bin/env bash +## +# source <(curl -qfsSL "https://raw.githubusercontent.com/Azathothas/Toolpacks-Extras/main/.github/scripts/$(uname -m)-$(uname -s)/env.sh") +## + +#-------------------------------------------------------# +USER="$(whoami)" && export USER="${USER}" +HOME="$(getent passwd ${USER} | cut -d: -f6)" && export HOME="${HOME}" +export PATH="${HOME}/bin:${HOME}/.cargo/bin:${HOME}/.cargo/env:${HOME}/.go/bin:${HOME}/go/bin:${HOME}/.local/bin:${HOME}/miniconda3/bin:${HOME}/miniconda3/condabin:/usr/local/zig:/usr/local/zig/lib:/usr/local/zig/lib/include:/usr/local/musl/bin:/usr/local/musl/lib:/usr/local/musl/include:$PATH" +SYSTMP="$(dirname $(mktemp -u))" && export SYSTMP="${SYSTMP}" +TMPDIRS="mktemp -d --tmpdir=${SYSTMP}/toolpacks XXXXXXX_$(uname -m)_$(uname -s)" && export TMPDIRS="$TMPDIRS" +rm -rf "${SYSTMP}/toolpacks" 2>/dev/null ; mkdir -p "${SYSTMP}/toolpacks" +BINDIR="${SYSTMP}/toolpack_$(uname -m)" && export BINDIR="${BINDIR}" +rm -rf "${BINDIR}" 2>/dev/null ; mkdir -p "${BINDIR}" +export GIT_TERMINAL_PROMPT="0" +export GIT_ASKPASS="/bin/echo" +EGET_TIMEOUT="timeout -k 1m 2m" && export EGET_TIMEOUT="${EGET_TIMEOUT}" +USER_AGENT="$(curl -qfsSL 'https://pub.ajam.dev/repos/Azathothas/Wordlists/Misc/User-Agents/ua_chrome_macos_latest.txt')" && export USER_AGENT="${USER_AGENT}" +BUILD="YES" && export BUILD="${BUILD}" +sudo groupadd docker 2>/dev/null ; sudo usermod -aG docker "${USER}" 2>/dev/null +if ! sudo systemctl is-active --quiet docker; then + sudo service docker restart >/dev/null 2>&1 ; sleep 10 +fi +sudo systemctl status "docker.service" --no-pager +#Nix +source "/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh" +#sg docker newgrp "$(id -gn)" +cd "${HOME}" ; clear +##Sanity Checks +if [[ ! -n "${GITHUB_TOKEN}" ]]; then + echo -e "\n[-] GITHUB_TOKEN is NOT Exported" + echo -e "Export it to Use GH\n" +fi +if ! command -v git-lfs &> /dev/null; then + echo -e "\n[-] git-lfs is NOT Installed\n" +fi +#huggingface-cli +if [[ ! -n "${HF_TOKEN}" ]]; then + echo -e "\n[-] HF_TOKEN is NOT Exported" + echo -e "Export it to Use huggingface-cli\n" +fi +if ! command -v huggingface-cli &> /dev/null; then + echo -e "\n[-] huggingface-cli is NOT Installed\n" +fi +#-------------------------------------------------------# +history -c 2>/dev/null ; rm -rf "${HOME}/.bash_history" ; pushd "$(mktemp -d)" >/dev/null 2>&1 +#-------------------------------------------------------# \ No newline at end of file diff --git a/.github/scripts/x86_64-Linux/gen_meta.sh b/.github/scripts/x86_64-Linux/gen_meta.sh new file mode 100644 index 00000000..8aec73fa --- /dev/null +++ b/.github/scripts/x86_64-Linux/gen_meta.sh @@ -0,0 +1,159 @@ +#!/usr/bin/env bash + +#-------------------------------------------------------# +##ENV +SYSTMP="$(dirname $(mktemp -u))" && export SYSTMP="${SYSTMP}" +TMPDIR="$(mktemp -d)" && export TMPDIR="${TMPDIR}" ; echo -e "\n[+] Using TEMP: ${TMPDIR}\n" +BUILDYAML="$(mktemp --tmpdir=${TMPDIR} XXXXX.yaml)" && export BUILDYAML="${BUILDYAML}" +HF_REPO="https://huggingface.co/datasets/Azathothas/Toolpacks-Extras/raw/main" && export HF_REPO="${HF_REPO}" +#GH_REPO="https://pub.ajam.dev/repos/Azathothas/Toolpacks-Extras" && export GH_REPO="${GH_REPO}" +GH_REPO="https://raw.githubusercontent.com/Azathothas/Toolpacks-Extras/refs/heads/main" && export GH_REPO="${GH_REPO}" +#Get URlS +curl -qfsSL "https://api.github.com/repos/AAzathothas/Toolpacks-Extras/contents/.github/scripts/$(uname -m)-$(uname -s)/pkgs" \ +-H "Authorization: Bearer ${GITHUB_TOKEN}" | jq -r '.[] | select(.download_url | endswith(".yaml")) | .download_url' \ +grep -i '\.yaml$' | sort -u -o "${TMPDIR}/BUILDURLS" +#Get METADATA.json +curl -qfsSL "${HF_REPO}/$(uname -m)-$(uname -s)/METADATA.json.tmp" -o "${TMPDIR}/METADATA.json" || curl -qfsSL "${HF_REPO}/$(uname -m)-$(uname -s)/METADATA.json" -o "${TMPDIR}/METADATA.json" +#Get BLAKE3SUM.txt +curl -qfsSL "${HF_REPO}/$(uname -m)-$(uname -s)/BLAKE3SUM.txt" -o "${TMPDIR}/BLAKE3SUM.txt" +#Get SHA256SUM.txt +curl -qfsSL "${HF_REPO}/$(uname -m)-$(uname -s)/SHA256SUM.txt" -o "${TMPDIR}/SHA256SUM.txt" +##Sanity +if [[ -n "${GITHUB_TOKEN}" ]]; then + echo -e "\n[+] GITHUB_TOKEN is Exported" +else + # 60 req/hr + echo -e "\n[-] GITHUB_TOKEN is NOT Exported" + echo -e "Export it to avoid ratelimits\n" + exit 1 +fi +if ! command -v git-lfs &> /dev/null; then + echo -e "\n[-] git-lfs is NOT Installed\n" + exit 1 +fi +if [[ -n "${HF_TOKEN}" ]]; then + echo -e "\n[+] HF_TOKEN is Exported" +else + echo -e "\n[-] HF_TOKEN is NOT Exported" + echo -e "Export it to use huggingface-cli\n" + exit 1 +fi +if ! command -v huggingface-cli &> /dev/null; then + echo -e "\n[-] huggingface-cli is NOT Installed\n" + exit 1 +fi +if [ ! -s "${TMPDIR}/BUILDURLS" ] || [ ! -s "${TMPDIR}/METADATA.json" ] || [ ! -s "${TMPDIR}/BLAKE3SUM.txt" ] || [ ! -s "${TMPDIR}/SHA256SUM.txt" ]; then + echo -e "\n[-] Required Files Aren't Available\n" + exit 1 +fi +#-------------------------------------------------------# + +#-------------------------------------------------------# +##Run +echo -e "\n\n [+] Started Metadata Update at :: $(TZ='UTC' date +'%A, %Y-%m-%d (%I:%M:%S %p)')\n\n" + for BUILD_URL in $(cat "${TMPDIR}/BUILDURLS" | sed 's/"//g'); do + echo -e "\n[+] Fetching : ${BUILD_URL}" + if curl -qfsSL "${BUILD_URL}" -o "${BUILDYAML}" &> /dev/null; then + dos2unix --quiet "${BUILDYAML}" + #Sanity Check + if [ "$(yq e '.path' "${BUILDYAML}")" = "/" ]; then + #export Name + NAME="$(yq -r '.name' ${BUILDYAML})" && export NAME="${NAME}" + #export Bin Name + BIN_NAME="$(yq -r '.bin_name' ${BUILDYAML})" && export BIN_NAME="${BIN_NAME}" + #export Description (Descr) + DESCRIPTION="$(yq -r '.description' ${BUILDYAML})" && export DESCRIPTION="${DESCRIPTION}" + #export CATEGORY + CATEGORY="$(yq -r '.category[]' "${BUILDYAML}" | paste -sd ',' - | tr -d '[:space:]')" && export CATEGORY="${CATEGORY}" + #export Notes (Note) + NOTE="$(yq -r '.note' ${BUILDYAML})" && export NOTE="$NOTE" + #export REPO_URL + REPO_URL="$(yq -r '.src_url' ${BUILDYAML})" && export REPO_URL="$REPO_URL" + #export WEB_URL (WebURL) + WEB_URL="$(yq -r '.web_url' ${BUILDYAML})" && export WEB_URL="$WEB_URL" + #export Build Script + BUILD_SCRIPT="$(echo "${BUILD_URL}" | sed 's|\.yaml$|.sh|')" && export BUILD_SCRIPT="${BUILD_SCRIPT}" + #export BIN= + yq -r '.bins[]' "${BUILDYAML}" | sort -u -o "${TMPDIR}/BINS.txt" + #Merge with json + for BIN in $(cat "${TMPDIR}/BINS.txt" | sed 's/"//g'); do + #BSUM + B3SUM="$(cat "${TMPDIR}/BLAKE3SUM.txt" | grep --fixed-strings --ignore-case --word-regexp "${BIN}" | awk '{print $1}' | sort -u | head -n 1 | sed 's/"//g' | sed 's/^[ \t]*//;s/[ \t]*$//' | sed 's/["'\'']//g' | sed 's/`//g' | sed 's/|//g' | tr -d '[:space:]')" && export B3SUM="${B3SUM}" + jq --arg BIN "$BIN" --arg BSUM "$B3SUM" '.[] |= if .name == $BIN then . + {bsum: $BSUM} else . end' "${TMPDIR}/METADATA.json" > "${TMPDIR}/METADATA.tmp" && mv "${TMPDIR}/METADATA.tmp" "${TMPDIR}/METADATA.json" + #Build_Log + BUILD_LOG="${HF_REPO}/$(uname -m)-$(uname -s)/${NAME}_.log" && export BUILD_LOG="${BUILD_LOG}" + jq --arg BIN "$BIN" --arg BUILD_LOG "${BUILD_LOG}" '.[] |= if .name == $BIN then . + {build_log: ${BUILD_LOG}} else . end' "${TMPDIR}/METADATA.json" > "${TMPDIR}/METADATA.tmp" && mv "${TMPDIR}/METADATA.tmp" "${TMPDIR}/METADATA.json" + #Build_Script + jq --arg BIN "$BIN" --arg BUILD_SCRIPT "${BUILD_SCRIPT}" '.[] |= if .name == $BIN then . + {build_script: ${BUILD_SCRIPT}} else . end' "${TMPDIR}/METADATA.json" > "${TMPDIR}/METADATA.tmp" && mv "${TMPDIR}/METADATA.tmp" "${TMPDIR}/METADATA.json" + #Category + jq --arg BIN "$BIN" --arg CATEGORY "${CATEGORY}" '.[] |= if .name == $BIN then . + {category: ${CATEGORY}} else . end' "${TMPDIR}/METADATA.json" > "${TMPDIR}/METADATA.tmp" && mv "${TMPDIR}/METADATA.tmp" "${TMPDIR}/METADATA.json" + #Description + jq --arg BIN "$BIN" --arg DESCRIPTION "${DESCRIPTION}" '.[] |= if .name == $BIN then . + {description: ${DESCRIPTION}} else . end' "${TMPDIR}/METADATA.json" > "${TMPDIR}/METADATA.tmp" && mv "${TMPDIR}/METADATA.tmp" "${TMPDIR}/METADATA.json" + #Extras (All Bins) + EXTRA_BINS="$(cat ${TMPDIR}/BINS.txt | sed "/^$BIN$/d" | paste -sd ',' -)" && export EXTRA_BINS="${EXTRA_BINS}" + jq --arg BIN "$BIN" --arg EXTRA_BINS "$EXTRA_BINS" '.[] |= if .name == $BIN then . + {extra_bins: $EXTRA_BINS} else . end' "${TMPDIR}/METADATA.json" > "${TMPDIR}/METADATA.tmp" && mv "${TMPDIR}/METADATA.tmp" "${TMPDIR}/METADATA.json" + #Note + jq --arg BIN "$BIN" --arg NOTE "$NOTE" '.[] |= if .name == $BIN then . + {note: $NOTE} else . end' "${TMPDIR}/METADATA.json" > "${TMPDIR}/METADATA.tmp" && mv "${TMPDIR}/METADATA.tmp" "${TMPDIR}/METADATA.json" + #SHASUM + SHA256="$(cat "${TMPDIR}/SHA256SUM.txt" | grep --fixed-strings --ignore-case --word-regexp "${BIN}" | awk '{print $1}' | sort -u | head -n 1 | sed 's/"//g' | sed 's/^[ \t]*//;s/[ \t]*$//' | sed 's/["'\'']//g' | sed 's/`//g' | sed 's/|//g' | tr -d '[:space:]')" && export SHA256="$SHA256" + jq --arg BIN "$BIN" --arg SHASUM "$SHA256" '.[] |= if .name == $BIN then . + {shasum: $SHASUM} else . end' "${TMPDIR}/METADATA.json" > "${TMPDIR}/METADATA.tmp" && mv "${TMPDIR}/METADATA.tmp" "${TMPDIR}/METADATA.json" + #Version + VERSION="$(curl -qfsSL "${HF_REPO}/$(uname -m)-$(uname -s)/${NAME}_.version" | tr -d '[:space:]')" && export VERSION="${VERSION}" + if [ -z "${VERSION}" ]; then + export VERSION="latest" + fi + jq --arg BIN "$BIN" --arg VERSION "${VERSION}" '.[] |= if .name == $BIN then . + {version: ${VERSION}} else . end' "${TMPDIR}/METADATA.json" > "${TMPDIR}/METADATA.tmp" && mv "${TMPDIR}/METADATA.tmp" "${TMPDIR}/METADATA.json" + #Web URLs + jq --arg BIN "$BIN" --arg WEB_URL "$WEB_URL" '.[] |= if .name == $BIN then . + {web_url: $WEB_URL} else . end' "${TMPDIR}/METADATA.json" > "${TMPDIR}/METADATA.tmp" && mv "${TMPDIR}/METADATA.tmp" "${TMPDIR}/METADATA.json" + #Sort & Map + jq 'map({name: (.name // "" | if . == null or . == "" then "" else . end), bin_name: (.bin_name // "" | if . == null or . == "" then "" else . end), description: (.description // "" | if . == null or . == "" then "" else . end), note: (.note // "" | if . == null or . == "" then "" else . end), version: (.version // "" | if . == null or . == "" then "" else . end), download_url: (.download_url // "" | if . == null or . == "" then "" else . end), size: (.size // "" | if . == null or . == "" then "" else . end), bsum: (.bsum // "" | if . == null or . == "" then "" else . end), shasum: (.shasum // "" | if . == null or . == "" then "" else . end), build_date: (.build_date // "" | if . == null or . == "" then "" else . end), src_url: (.src_url // "" | if . == null or . == "" then "" else . end), web_url: (.web_url // "" | if . == null or . == "" then "" else . end), build_script: (.build_script // "" | if . == null or . == "" then "" else . end), build_log: (.build_log // "" | if . == null or . == "" then "" else . end), category: (.category // "" | if . == null or . == "" then "" else . end), extra_bins: (.extra_bins // "" | if . == null or . == "" then "" else . end)})' "${TMPDIR}/METADATA.json" > "${TMPDIR}/METADATA.tmp" && mv "${TMPDIR}/METADATA.tmp" "${TMPDIR}/METADATA.json" + #Print json + echo -e "\n[+] BIN: $BIN" + jq --arg BIN "$BIN" '.[] | select(.name == $BIN)' "${TMPDIR}/METADATA.json" 2>/dev/null | tee "${TMPDIR}/METADATA.json.bak.tmp" + #Append + if jq --exit-status . "${TMPDIR}/METADATA.json.bak.tmp" >/dev/null 2>&1; then + cat "${TMPDIR}/METADATA.json.bak.tmp" >> "${TMPDIR}/METADATA.json.bak" + fi + done + fi + fi + done +#-------------------------------------------------------# + + +#-------------------------------------------------------# +#Configure git + git config --global "credential.helper" store + git config --global "user.email" "AjamX101@gmail.com" + git config --global "user.name" "Azathothas" +#Login + huggingface-cli login --token "${HF_TOKEN}" --add-to-git-credential +#Clone + pushd "$(mktemp -d)" >/dev/null 2>&1 && git clone --depth="1" --filter="blob:none" --no-checkout "https://huggingface.co/datasets/Azathothas/Toolpacks-Snapshots" && cd "./Toolpacks-Snapshots" + git sparse-checkout set "." && git checkout + TOOLPACKS_BIN="$(realpath .)" && export TOOLPACKS_BIN="${TOOLPACKS_BIN}" + git lfs install + huggingface-cli lfs-enable-largefiles "." + popd >/dev/null 2>&1 +#-------------------------------------------------------# + +#-------------------------------------------------------# +#Update HF +echo -e "\n[+] Updating Metadata.json ($(realpath ${TMPDIR}/METADATA.json))\n" +if jq --exit-status . "${TMPDIR}/METADATA.json.bak" >/dev/null 2>&1; then + cat "${TMPDIR}/METADATA.json.bak" | jq -s '.' | jq 'walk(if type == "string" and . == "null" then "" else . end)' > "${TMPDIR}/METADATA.json" + #Sync + pushd "${TOOLPACKS_BIN}" >/dev/null 2>&1 + git fetch origin main + rm "./METADATA.json.tmp" + cp "${TMPDIR}/METADATA.json" "./METADATA.json" + #Commit & Push + git add --all --verbose && git commit -m "[+] Snapshot (METADATA.json) $(TZ='UTC' date +'%Y_%m_%d')]" + git branch -a || git show-branch + git fetch origin main ; git push origin main + popd >/dev/null 2>&1 +else + echo -e "\n[-] FATAL: ($(realpath ${TMPDIR}/METADATA.json.bak)) is Inavlid\n" + exit 1 +fi +#-------------------------------------------------------# \ No newline at end of file diff --git a/.github/scripts/x86_64-Linux/pkgs/mullvad-browser.sh b/.github/scripts/x86_64-Linux/pkgs/mullvad-browser.sh new file mode 100644 index 00000000..a2400355 --- /dev/null +++ b/.github/scripts/x86_64-Linux/pkgs/mullvad-browser.sh @@ -0,0 +1,55 @@ +#!/usr/bin/env bash +set +x +#-------------------------------------------------------# +#Sanity Checks +if [ "${BUILD}" != "YES" ] || \ + [ -z "${BINDIR}" ] || \ + [ -z "${GIT_TERMINAL_PROMPT}" ] || \ + [ -z "${GIT_ASKPASS}" ] || \ + [ -z "${GITHUB_TOKEN}" ] || \ + [ -z "${SYSTMP}" ] || \ + [ -z "${TMPDIRS}" ]; then + #exit + echo -e "\n[+]Skipping Builds...\n" + exit 1 +fi +#-------------------------------------------------------# + +#-------------------------------------------------------# +##Main +export SKIP_BUILD="NO" +if [ "$SKIP_BUILD" == "NO" ]; then + #mullvad-browser : Unarchiver + export BIN="mullvad-browser" + export SOURCE_URL="https://github.com/mullvad/mullvad-browser" + echo -e "\n\n [+] (Building | Fetching) $BIN :: $SOURCE_URL\n" + #Fetch (NO --exclude-pre-releases) + pushd "$($TMPDIRS)" >/dev/null 2>&1 + RELEASE_TAG="$(gh release list --repo "${SOURCE_URL}" --order "desc" --exclude-drafts --json "tagName" | jq -r '.[0].tagName | gsub("\\s+"; "")' | tr -d '[:space:]')" && export RELEASE_TAG="${RELEASE_TAG}" + gh release view "${RELEASE_TAG}" --repo "${SOURCE_URL}" --json "assets" \ + --jq '.assets[].name' | grep -P 'linux.*x.*64.*\.tar\.xz$' | xargs -I "{}" \ + gh release download --repo "${SOURCE_URL}" "${RELEASE_TAG}" --clobber --pattern "{}" \ + --output "${BINDIR}/mullvad-browser.tar.xz" + PKG_VERSION="$(echo ${RELEASE_TAG})" && export PKG_VERSION="${PKG_VERSION}" + realpath "${BINDIR}/mullvad-browser.tar.xz" | xargs -I {} sh -c 'file {}; b3sum {}; sha256sum {}; du -sh {}' + #Meta + if [[ -f "${BINDIR}/mullvad-browser.tar.xz" ]] && [[ $(stat -c%s "${BINDIR}/mullvad-browser.tar.xz") -gt 1024 ]]; then + echo "${PKG_VERSION}" > "${BINDIR}/mullvad-browser.tar.xz_.version" + fi + #End + unset BIN SOURCE_URL RELEASE_TAG + popd >/dev/null 2>&1 +fi +#-------------------------------------------------------# + +#-------------------------------------------------------# +##Cleanup +unset SKIP_BUILD ; export BUILT="YES" +#In case of zig polluted env +unset AR CC CFLAGS CXX CPPFLAGS CXXFLAGS DLLTOOL HOST_CC HOST_CXX LDFLAGS LIBS OBJCOPY RANLIB +#In case of go polluted env +unset GOARCH GOOS CGO_ENABLED CGO_CFLAGS +#PKG Config +unset PKG_CONFIG_PATH PKG_CONFIG_LIBDIR PKG_CONFIG_SYSROOT_DIR PKG_CONFIG_SYSTEM_INCLUDE_PATH PKG_CONFIG_SYSTEM_LIBRARY_PATH +set +x +#-------------------------------------------------------# \ No newline at end of file diff --git a/.github/scripts/x86_64-Linux/pkgs/mullvad-browser.yaml b/.github/scripts/x86_64-Linux/pkgs/mullvad-browser.yaml new file mode 100644 index 00000000..944ee9eb --- /dev/null +++ b/.github/scripts/x86_64-Linux/pkgs/mullvad-browser.yaml @@ -0,0 +1,15 @@ +#https://github.com/ivan-hc/AM/blob/main/programs/x86_64/mullvad-browser +name: "mullvad-browser" +bin_name: "mullvad-browser" +bins: + - "mullvad-browser.tar.xz" +category: + - "anonymity" + - "browser" + - "privacy" + - "tor" +description: "Privacy-focused browser for Linux, macOS and Windows" +note: "This is a tar.xz Archive (CI_VERIFIED)" +web_url: "https://mullvad.net/en/browser" +path: "/" +src_url: "https://gitlab.torproject.org/tpo/applications/mullvad-browser/" \ No newline at end of file diff --git a/.github/temp/specs_draft.yaml b/.github/temp/specs_draft.yaml new file mode 100644 index 00000000..a6b3b9c3 --- /dev/null +++ b/.github/temp/specs_draft.yaml @@ -0,0 +1,16 @@ +name: "#Contains the Name of the $BINARY itself, this is NOT what it will/should be Installed as" +bin_name: "#Contains the real name, the $BINARY will be installed as" +description: "#Contains the Description of the $BINARY/$PKG_FAMILY [Otherwise EMPTY]" +category: "#Contains the $BINARY/$PKG_FAMILY's Category" +note: "#Contains Additional Notes,Refs,Info the user need to be aware of, of the $BINARY/$PKG_FAMILY [Otherwise EMPTY]" +version: "#Contains the version of the $BINARY " +src_url: "#Contains the Git/Source URL of the $BINARY/$PKG_FAMILY [Otherwise EMPTY]" +web_url: "#Contains the Website/Project Page URL of the $BINARY/$PKG_FAMILY [Otherwise EMPTY]" +download_url: "#Contains the Raw Direct Download URL of the $BINARY " +size: "#Contains the Total Size of the $BINARY " +bsum: "#Contains the Exact Blake3sum of the $BINARY " +shasum: "#Contains the Exact Sha256sum of the $BINARY " +build_date: "#Contains the Exact Date the $BINARY was Built(Fetched) & Uploaded " +build_script: "#Contains the Actual Script the $BINARY was Built(Fetched) With " +build_log: "#Contains the link to view the Actual CI BUILD LOG of the $BINARY " +extra_bins: "#Contains names of related binaries (Only if they belong to same $PKG_FAMILY) of the $BINARY/$PKG_FAMILY [Otherwise EMPTY]" \ No newline at end of file