From 2355c7775bdf1aea20b10e038834163597c5dc54 Mon Sep 17 00:00:00 2001 From: Ahmed Zetao Yang Date: Sun, 28 Apr 2019 20:11:32 +0800 Subject: [PATCH] docker build appimage --- .travis.yml | 38 +++ Dockerfile | 20 ++ docker/functions.sh | 348 ++++++++++++++++++++++ docker/netease-cloud-music.yml | 14 + docker/pkg2appimage | 446 +++++++++++++++++++++++++++++ netease-cloud-music.yml | 14 + pkg2appimage-with-docker | 101 +++++++ scripts/services/0x0.st.sh | 25 ++ scripts/services/file.io.sh | 26 ++ scripts/services/transfer.sh.sh | 19 ++ scripts/services/transferwee.py | 330 +++++++++++++++++++++ scripts/services/wetransfer.com.sh | 25 ++ 12 files changed, 1406 insertions(+) create mode 100644 .travis.yml create mode 100644 Dockerfile create mode 100644 docker/functions.sh create mode 100644 docker/netease-cloud-music.yml create mode 100755 docker/pkg2appimage create mode 100644 netease-cloud-music.yml create mode 100644 pkg2appimage-with-docker create mode 100644 scripts/services/0x0.st.sh create mode 100644 scripts/services/file.io.sh create mode 100644 scripts/services/transfer.sh.sh create mode 100755 scripts/services/transferwee.py create mode 100755 scripts/services/wetransfer.com.sh diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..f1105fb --- /dev/null +++ b/.travis.yml @@ -0,0 +1,38 @@ +dist: xenial +sudo: required + +branches: + only: + - master + +cache: + directories: + - $HOME/.cache + +env: + global: + - PRODUCT=netease-cloud-music + +matrix: + include: + - os: linux + env: OS=ubuntu EXTEN=appimage + services: docker + language: cpp + # - os: linux + # env: OS=fedora EXTEN=appimage + # services: docker + # language: cpp + +before_install: + +install: + + +script: + - export ROOT_PATH="$(pwd)" + - bash pkg2appimage-with-docker netease-cloud-music.yml + +after_success: + - cd ${ROOT_PATH}/out + - if [[ "${EXTEN}" == "appimage" ]]; then python3 ${ROOT_PATH}/scripts/services/transferwee.py upload NetEase_Cloud_Music-1.2.0.2.*.AppImage fi \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..9c8b326 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,20 @@ +# Beware: only meant for use with pkg2appimage-with-docker +# Beware: only use for build netease-music-appimage + +FROM ubuntu:18.10 + +MAINTAINER "kelleg " + +ENV DEBIAN_FRONTEND=noninteractive \ + DOCKER_BUILD=1 + +RUN apt-get update && \ + apt-get install -y apt-transport-https libcurl3-gnutls libarchive13 wget \ + ca-cacert appstream desktop-file-utils fuse gnupg2 build-essential file \ + libglib2.0-dev libglib2.0-bin git && install -m 0777 -d /workspace + +COPY docker/* /workspace/ +RUN adduser --system --uid 1000 test + +WORKDIR /workspace + diff --git a/docker/functions.sh b/docker/functions.sh new file mode 100644 index 0000000..7688d79 --- /dev/null +++ b/docker/functions.sh @@ -0,0 +1,348 @@ +# This file is supposed to be sourced by each Recipe +# that wants to use the functions contained herein +# like so: +# wget -q https://github.com/AppImage/AppImages/raw/${PKG2AICOMMIT}/functions.sh -O ./functions.sh +# . ./functions.sh + +# RECIPE=$(realpath "$0") + +# Specify a certain commit if you do not want to use master +# by using: +# export PKG2AICOMMIT= +if [ -z "$PKG2AICOMMIT" ] ; then + PKG2AICOMMIT=master +fi + +# Options for apt-get to use local files rather than the system ones +OPTIONS="-o Debug::NoLocking=1 +-o APT::Cache-Limit=125829120 +-o Dir::Etc::sourcelist=./sources.list +-o Dir::State=./tmp +-o Dir::Cache=./tmp +-o Dir::State::status=./status +-o Dir::Etc::sourceparts=- +-o APT::Get::List-Cleanup=0 +-o APT::Get::AllowUnauthenticated=1 +-o Debug::pkgProblemResolver=true +-o Debug::pkgDepCache::AutoInstall=true +-o APT::Install-Recommends=0 +-o APT::Install-Suggests=0 +" + +# Detect system architecture to know which binaries of AppImage tools +# should be downloaded and used. +case "$(uname -i)" in + x86_64|amd64) +# echo "x86-64 system architecture" + SYSTEM_ARCH="x86_64";; + i?86) +# echo "x86 system architecture" + SYSTEM_ARCH="i686";; +# arm*) +# echo "ARM system architecture" +# SYSTEM_ARCH="";; + unknown|AuthenticAMD|GenuineIntel) +# uname -i not answer on debian, then: + case "$(uname -m)" in + x86_64|amd64) +# echo "x86-64 system architecture" + SYSTEM_ARCH="x86_64";; + i?86) +# echo "x86 system architecture" + SYSTEM_ARCH="i686";; + esac ;; + *) + echo "Unsupported system architecture" + exit 1;; +esac + +# Either get the file from remote or from a static place. +# critical for builds without network access like in Open Build Service +cat_file_from_url() +{ + cat_excludelist="wget -q $1 -O -" + [ -e "$STATIC_FILES/${1##*/}" ] && cat_excludelist="cat $STATIC_FILES/${1##*/}" + $cat_excludelist +} + +git_pull_rebase_helper() +{ + git reset --hard HEAD + git pull +} + +# Patch /usr to ././ in ./usr +# to make the contents of usr/ relocateable +# (this requires us to cd ./usr before running the application; AppRun does that) +patch_usr() +{ + find usr/ -type f -executable -exec sed -i -e "s|/usr|././|g" {} \; +} + +# Download AppRun and make it executable +get_apprun() +{ + TARGET_ARCH=${ARCH:-$SYSTEM_ARCH} + wget -c https://github.com/AppImage/AppImageKit/releases/download/continuous/AppRun-${TARGET_ARCH} -O AppRun + chmod a+x AppRun +} + +# Copy the library dependencies of all exectuable files in the current directory +# (it can be beneficial to run this multiple times) +copy_deps() +{ + PWD=$(readlink -f .) + FILES=$(find . -type f -executable -or -name *.so.* -or -name *.so | sort | uniq ) + for FILE in $FILES ; do + ldd "${FILE}" | grep "=>" | awk '{print $3}' | xargs -I '{}' echo '{}' >> DEPSFILE + done + DEPS=$(cat DEPSFILE | sort | uniq) + for FILE in $DEPS ; do + if [ -e $FILE ] && [[ $(readlink -f $FILE)/ != $PWD/* ]] ; then + cp -v --parents -rfL $FILE ./ || true + fi + done + rm -f DEPSFILE +} + +# Move ./lib/ tree to ./usr/lib/ +move_lib() +{ + mkdir -p ./usr/lib ./lib && find ./lib/ -exec cp -v --parents -rfL {} ./usr/ \; && rm -rf ./lib + mkdir -p ./usr/lib ./lib64 && find ./lib64/ -exec cp -v --parents -rfL {} ./usr/ \; && rm -rf ./lib64 +} + +# Delete blacklisted files +delete_blacklisted() +{ + BLACKLISTED_FILES=$(cat_file_from_url https://github.com/AppImage/pkg2appimage/raw/${PKG2AICOMMIT}/excludelist | sed 's|#.*||g') + echo $BLACKLISTED_FILES + for FILE in $BLACKLISTED_FILES ; do + FILES="$(find . -name "${FILE}" -not -path "./usr/optional/*")" + for FOUND in $FILES ; do + rm -vf "$FOUND" "$(readlink -f "$FOUND")" + done + done + + # Do not bundle developer stuff + rm -rf usr/include || true + rm -rf usr/lib/cmake || true + rm -rf usr/lib/pkgconfig || true + find . -name '*.la' | xargs -i rm {} +} + +# Echo highest glibc version needed by the executable files in the current directory +glibc_needed() +{ + find . -name *.so -or -name *.so.* -or -type f -executable -exec strings {} \; | grep ^GLIBC_2 | sed s/GLIBC_//g | sort --version-sort | uniq | tail -n 1 + # find . -name *.so -or -name *.so.* -or -type f -executable -exec readelf -s '{}' 2>/dev/null \; | sed -n 's/.*@GLIBC_//p'| awk '{print $1}' | sort --version-sort | tail -n 1 +} +# Add desktop integration +# Usage: get_desktopintegration name_of_desktop_file_and_exectuable +get_desktopintegration() +{ + # REALBIN=$(grep -o "^Exec=.*" *.desktop | sed -e 's|Exec=||g' | cut -d " " -f 1 | head -n 1) + # cat_file_from_url https://raw.githubusercontent.com/AppImage/AppImageKit/deprecated/AppImageAssistant/desktopintegration > ./usr/bin/$REALBIN.wrapper + # chmod a+x ./usr/bin/$REALBIN.wrapper + echo "The desktopintegration script is deprecated. Please advise users to use https://github.com/AppImage/appimaged instead." + # sed -i -e "s|^Exec=$REALBIN|Exec=$REALBIN.wrapper|g" $1.desktop +} + +# Generate AppImage; this expects $ARCH, $APP and $VERSION to be set +generate_appimage() +{ + # Download AppImageAssistant + URL="https://github.com/AppImage/AppImageKit/releases/download/6/AppImageAssistant_6-${SYSTEM_ARCH}.AppImage" + wget -c "$URL" -O AppImageAssistant + chmod a+x ./AppImageAssistant + + # if [[ "$RECIPE" == *ecipe ]] ; then + # echo "#!/bin/bash -ex" > ./$APP.AppDir/Recipe + # echo "# This recipe was used to generate this AppImage." >> ./$APP.AppDir/Recipe + # echo "# See http://appimage.org for more information." >> ./$APP.AppDir/Recipe + # echo "" >> ./$APP.AppDir/Recipe + # cat $RECIPE >> ./$APP.AppDir/Recipe + # fi + # + # Detect the architecture of what we are packaging. + # The main binary could be a script, so let's use a .so library + BIN=$(find . -name *.so* -type f | head -n 1) + INFO=$(file "$BIN") + if [ -z $ARCH ] ; then + if [[ $INFO == *"x86-64"* ]] ; then + ARCH=x86_64 + elif [[ $INFO == *"i686"* ]] ; then + ARCH=i686 + elif [[ $INFO == *"armv6l"* ]] ; then + ARCH=armhf + else + echo "Could not automatically detect the architecture." + echo "Please set the \$ARCH environment variable." + exit 1 + fi + fi + + mkdir -p ../out || true + rm ../out/$APP"-"$VERSION".glibc"$GLIBC_NEEDED"-"$ARCH".AppImage" 2>/dev/null || true + GLIBC_NEEDED=$(glibc_needed) + ./AppImageAssistant ./$APP.AppDir/ ../out/$APP"-"$VERSION".glibc"$GLIBC_NEEDED"-"$ARCH".AppImage" +} + +# Generate AppImage type 2 +# Additional parameters given to this routine will be passed on to appimagetool +# +# If the environment variable NO_GLIBC_VERSION is set, the required glibc version +# will not be added to the AppImage filename +generate_type2_appimage() +{ + # Get the ID of the last successful build on Travis CI + # ID=$(wget -q https://api.travis-ci.org/repos/AppImage/appimagetool/builds -O - | head -n 1 | sed -e 's|}|\n|g' | grep '"result":0' | head -n 1 | sed -e 's|,|\n|g' | grep '"id"' | cut -d ":" -f 2) + # Get the transfer.sh URL from the logfile of the last successful build on Travis CI + # Only Travis knows why build ID and job ID don't match and why the above doesn't give both... + # URL=$(wget -q "https://s3.amazonaws.com/archive.travis-ci.org/jobs/$((ID+1))/log.txt" -O - | grep "https://transfer.sh/.*/appimagetool" | tail -n 1 | sed -e 's|\r||g') + # if [ -z "$URL" ] ; then + # URL=$(wget -q "https://s3.amazonaws.com/archive.travis-ci.org/jobs/$((ID+2))/log.txt" -O - | grep "https://transfer.sh/.*/appimagetool" | tail -n 1 | sed -e 's|\r||g') + # fi + if [ -z "$(which appimagetool)" ] ; then + URL="https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-${SYSTEM_ARCH}.AppImage" + wget -c "$URL" -O appimagetool + chmod a+x ./appimagetool + appimagetool=$(readlink -f appimagetool) + else + appimagetool=$(which appimagetool) + fi + if [ "$DOCKER_BUILD" ]; then + appimagetool_tempdir=$(mktemp -d) + mv appimagetool "$appimagetool_tempdir" + pushd "$appimagetool_tempdir" &>/dev/null + ls -al + ./appimagetool --appimage-extract + rm appimagetool + appimagetool=$(readlink -f squashfs-root/AppRun) + popd &>/dev/null + _appimagetool_cleanup() { [ -d "$appimagetool_tempdir" ] && rm -r "$appimagetool_tempdir"; } + trap _appimagetool_cleanup EXIT + fi + + if [ -z ${NO_GLIBC_VERSION+true} ]; then + GLIBC_NEEDED=$(glibc_needed) + VERSION_EXPANDED=$VERSION.glibc$GLIBC_NEEDED + else + VERSION_EXPANDED=$VERSION + fi + + set +x + GLIBC_NEEDED=$(glibc_needed) + if ( [ ! -z "$KEY" ] ) && ( ! -z "$TRAVIS" ) ; then + wget https://github.com/AppImage/AppImageKit/files/584665/data.zip -O data.tar.gz.gpg + ( set +x ; echo $KEY | gpg2 --batch --passphrase-fd 0 --no-tty --skip-verify --output data.tar.gz --decrypt data.tar.gz.gpg ) + tar xf data.tar.gz + sudo chown -R $USER .gnu* + mv $HOME/.gnu* $HOME/.gnu_old ; mv .gnu* $HOME/ + VERSION=$VERSION_EXPANDED "$appimagetool" $@ -n -s --bintray-user $BINTRAY_USER --bintray-repo $BINTRAY_REPO -v ./$APP.AppDir/ + else + VERSION=$VERSION_EXPANDED "$appimagetool" $@ -n --bintray-user $BINTRAY_USER --bintray-repo $BINTRAY_REPO -v ./$APP.AppDir/ + fi + set -x + mkdir -p ../out/ || true + mv *.AppImage* ../out/ +} + +# Generate status file for use by apt-get; assuming that the recipe uses no newer +# ingredients than what would require more recent dependencies than what we assume +# to be part of the base system +generate_status() +{ + mkdir -p ./tmp/archives/ + mkdir -p ./tmp/lists/partial + touch tmp/pkgcache.bin tmp/srcpkgcache.bin + if [ -e "${HERE}/usr/share/pkg2appimage/excludedeblist" ] ; then + EXCLUDEDEBLIST="${HERE}/usr/share/pkg2appimage/excludedeblist" + else + wget -q -c "https://github.com/AppImage/AppImages/raw/${PKG2AICOMMIT}/excludedeblist" + EXCLUDEDEBLIST=excludedeblist + fi + rm status 2>/dev/null || true + for PACKAGE in $(cat excludedeblist | cut -d "#" -f 1) ; do + printf "Package: $PACKAGE\nStatus: install ok installed\nArchitecture: all\nVersion: 9:999.999.999\n\n" >> status + done +} + +# Find the desktop file and copy it to the AppDir +get_desktop() +{ + find usr/share/applications -iname "*${LOWERAPP}.desktop" -exec cp {} . \; || true +} + +fix_desktop() { + # fix trailing semicolons + for key in Actions Categories Implements Keywords MimeType NotShowIn OnlyShowIn; do + sed -i '/'"$key"'.*[^;]$/s/$/;/' $1 + done +} + +# Find the icon file and copy it to the AppDir +get_icon() +{ + find ./usr/share/pixmaps/$LOWERAPP.png -exec cp {} . \; 2>/dev/null || true + find ./usr/share/icons -path *64* -name $LOWERAPP.png -exec cp {} . \; 2>/dev/null || true + find ./usr/share/icons -path *128* -name $LOWERAPP.png -exec cp {} . \; 2>/dev/null || true + find ./usr/share/icons -path *512* -name $LOWERAPP.png -exec cp {} . \; 2>/dev/null || true + find ./usr/share/icons -path *256* -name $LOWERAPP.png -exec cp {} . \; 2>/dev/null || true + ls -lh $LOWERAPP.png || true +} + +# Find out the version +get_version() +{ + THEDEB=$(find ../*.deb -name $LOWERAPP"_*" | head -n 1) + if [ -z "$THEDEB" ] ; then + echo "Version could not be determined from the .deb; you need to determine it manually" + fi + VERSION=$(echo $THEDEB | cut -d "~" -f 1 | cut -d "_" -f 2 | cut -d "-" -f 1 | sed -e 's|1%3a||g' | sed -e 's|.dfsg||g' ) + echo $VERSION +} + +# transfer.sh +transfer() { if [ $# -eq 0 ]; then echo "No arguments specified. Usage:\necho transfer /tmp/test.md\ncat /tmp/test.md | transfer test.md"; return 1; fi +tmpfile=$( mktemp -t transferXXX ); if tty -s; then basefile=$(basename "$1" | sed -e 's/[^a-zA-Z0-9._-]/-/g'); curl --progress-bar --upload-file "$1" "https://transfer.sh/$basefile" >> $tmpfile; else curl --progress-bar --upload-file "-" "https://transfer.sh/$1" >> $tmpfile ; fi; cat $tmpfile; rm -f $tmpfile; } + +# Patch binary files; fill with padding if replacement is shorter than original +# http://everydaywithlinux.blogspot.de/2012/11/patch-strings-in-binary-files-with-sed.html +# Example: patch_strings_in_file foo "/usr/local/lib/foo" "/usr/lib/foo" +patch_strings_in_file() { + local FILE="$1" + local PATTERN="$2" + local REPLACEMENT="$3" + # Find all unique strings in FILE that contain the pattern + STRINGS=$(strings ${FILE} | grep ${PATTERN} | sort -u -r) + if [ "${STRINGS}" != "" ] ; then + echo "File '${FILE}' contain strings with '${PATTERN}' in them:" + for OLD_STRING in ${STRINGS} ; do + # Create the new string with a simple bash-replacement + NEW_STRING=${OLD_STRING//${PATTERN}/${REPLACEMENT}} + # Create null terminated ASCII HEX representations of the strings + OLD_STRING_HEX="$(echo -n ${OLD_STRING} | xxd -g 0 -u -ps -c 256)00" + NEW_STRING_HEX="$(echo -n ${NEW_STRING} | xxd -g 0 -u -ps -c 256)00" + if [ ${#NEW_STRING_HEX} -le ${#OLD_STRING_HEX} ] ; then + # Pad the replacement string with null terminations so the + # length matches the original string + while [ ${#NEW_STRING_HEX} -lt ${#OLD_STRING_HEX} ] ; do + NEW_STRING_HEX="${NEW_STRING_HEX}00" + done + # Now, replace every occurrence of OLD_STRING with NEW_STRING + echo -n "Replacing ${OLD_STRING} with ${NEW_STRING}... " + hexdump -ve '1/1 "%.2X"' ${FILE} | \ + sed "s/${OLD_STRING_HEX}/${NEW_STRING_HEX}/g" | \ + xxd -r -p > ${FILE}.tmp + chmod --reference ${FILE} ${FILE}.tmp + mv ${FILE}.tmp ${FILE} + echo "Done!" + else + echo "New string '${NEW_STRING}' is longer than old" \ + "string '${OLD_STRING}'. Skipping." + fi + done + fi +} + diff --git a/docker/netease-cloud-music.yml b/docker/netease-cloud-music.yml new file mode 100644 index 0000000..8076143 --- /dev/null +++ b/docker/netease-cloud-music.yml @@ -0,0 +1,14 @@ +app: netease-cloud-music +binpatch: true + +ingredients: + dist: deepin15.9 + sources: + - deb http://mirrors.163.com/deepin panda main contrib non-free + script: + - VERSION=1.2.0.2 + - wget -c http://mirrors.163.com/deepin/pool/main/n/netease-cloud-music/netease-cloud-music_1.2.0.2-1_amd64.deb --trust-server-names + - echo $VERSION > VERSION + +script: + - cp usr/share/icons/hicolor/scalable/apps/netease-cloud-music.svg . diff --git a/docker/pkg2appimage b/docker/pkg2appimage new file mode 100755 index 0000000..80ef7c7 --- /dev/null +++ b/docker/pkg2appimage @@ -0,0 +1,446 @@ +#!/usr/bin/env bash + +# env + +# Specify a certain commit if you do not want to use master +# by using: +# export PKG2AICOMMIT= +if [ -z "$PKG2AICOMMIT" ] ; then + PKG2AICOMMIT=master +fi + + +usage() { + echo "usage:" + echo " $0 [--no-di] META-NAME|YAMLFILE" + echo "" + echo "options:" + echo " --no-di disable desktop integration" + exit 1 +} + +if [ $# -eq 0 ] || [ "x${!#}" = "x--no-di" ] ; then + usage +fi +if [ $# -eq 2 ] && [ "x$1" != "x--no-di" ] ; then + usage +fi + +if [ "x$1" = "x--no-di" ] ; then + ENABLE_DI="no" +else + ENABLE_DI="yes" +fi + +# Halt on errors +set -e +set -x + +# Check dependencies +which wget >/dev/null 2>&1 || ( echo wget missing && exit 1 ) +which grep >/dev/null 2>&1 || ( echo grep missing && exit 1 ) +which sed >/dev/null 2>&1 || ( echo sed missing && exit 1 ) +which cut >/dev/null 2>&1 || ( echo cut missing && exit 1 ) + +# If the yaml file doesn't exist locally, get it from GitHub +if [ ! -f "${!#}" ] ; then + YAMLFILE=/tmp/_recipe.yml + rm -f "$YAMLFILE" + wget -q "https://github.com/AppImage/AppImages/raw/${PKG2AICOMMIT}/recipes/${!#}.yml" -O "$YAMLFILE" +else + YAMLFILE=$(readlink -f "${!#}") +fi + +# Lightweight bash-only dpkg-scanpackages replacement +scanpackages() { + for deb in *.deb ; do + dpkg -I $deb | sed 's/^ *//g' | grep -i -E '(package|version|installed-size|architecture|depends|priority):' + echo "Filename: $(readlink -f $deb)" + echo "MD5sum: $(md5sum -b $deb | cut -d' ' -f1)" + echo "SHA1: $(sha1sum -b $deb | cut -d' ' -f1)" + echo "SHA256: $(sha256sum -b $deb | cut -d' ' -f1)" + echo + done +} + +# Function to parse yaml +# https://gist.github.com/epiloque/8cf512c6d64641bde388 +# based on https://gist.github.com/pkuczynski/8665367 +parse_yaml() { + local prefix=$2 + local s + local w + local fs + s='[[:blank:]]*' + w='[a-zA-Z0-9_]*' + fs="$(echo @|tr @ '\034')" + sed -ne "s|^\($s\)\($w\)$s:$s\"\(.*\)\"$s\$|\1$fs\2$fs\3|p" \ + -e "s|^\($s\)\($w\)$s[:-]$s\(.*\)$s\$|\1$fs\2$fs\3|p" "$1" | + awk -F"$fs" '{ + indent = length($1)/2; + vname[indent] = $2; + for (i in vname) {if (i > indent) {delete vname[i]}} + if (length($3) > 0) { + vn=""; for (i=0; i/dev/null || true + generate_status + + # Some packages depend on packages which we do not want to bundle, + # in addition to the global excludes defined in excludedeblist. + # Use + # ingredients: + # exclude: + # - packagename + if [ ! -z "${_ingredients_exclude[0]}" ] ; then + for PACKAGE in "${_ingredients_exclude[@]}" ; do + printf "Package: $PACKAGE\nStatus: install ok installed\nArchitecture: all\nVersion: 9:999.999.999\n\n" >> status + done + fi + + # Some packages depend on an exact version of a dependency to be installed. + # Use + # ingredients: + # pretend: + # - packagename version_to_be_pretended + if [ ! -z "${_ingredients_pretend[0]}" ] ; then + for PRETEND in "${_ingredients_pretend[@]}" ; do + P_PKG=$(echo "$PRETEND" | cut -d " " -f 1) + P_VER=$(echo "$PRETEND" | cut -d " " -f 2) + cat status | tr '\n' '@' | sed -e 's|@@|\n\n|g' | sed -e 's|Package: '"$P_PKG"'@Status: install ok installed@Architecture: all@Version: 9:999.999.999|Package: '"$P_PKG"'@Status: install ok installed@Architecture: all@Version: '"$P_VER"'|g' | sed -e 's|@|\n|g' > status.temp + mv status.temp status + done + fi + + if [ -e sources.list ] ; then + rm sources.list + fi + for SOURCE in "${_ingredients_sources[@]}" ; do + echo "${SOURCE}" >> sources.list + done + for PPA in "${_ingredients_ppas[@]}" ; do + echo "deb http://ppa.launchpad.net/${PPA}/ubuntu ${_ingredients_dist} main" >> sources.list + done + for DEBFILE in "${_ingredients_debs[@]}" ; do + cp ${DEBFILE} . + done + # Use libcurl-slim to reduce AppImage size, thanks darealshinji + # Not really compiled on xenial but CentOS 6, https://github.com/AppImage/AppImages/issues/187 + echo "deb http://ppa.launchpad.net/djcj/libcurl-slim/ubuntu xenial main" >> sources.list + # Use gnutls-patched to have libgnutls look in various distributions' places for certificates, + # https://github.com/darealshinji/vlc-AppImage/issues/1#issuecomment-321041496 + # echo "deb http://ppa.launchpad.net/djcj/gnutls-patched/ubuntu ${_ingredients_dist} main" >> sources.list + echo "deb http://ppa.launchpad.net/djcj/gnutls-patched/ubuntu trusty main" >> sources.list +fi + +if [ ! -z "${_ingredients_script[0]}" ] ; then + # Execute extra steps defined in recipe + shell_execute $YAMLFILE _ingredients_script +fi + +if [ ! -z "${_ingredients_dist}" ] ; then + # Some projects provide raw .deb files without a repository + # hence we create our own local repository as part of + # the AppImage creation process in order to "install" + # the package using apt-get as normal + if [ ! -z "${_ingredients_debs[0]}" ] ; then + for DEB in "${_ingredients_debs[@]}" ; do + if [ ! -f $(basename "$DEB") ] ; then + wget -c $DEB + fi + done + fi + scanpackages | gzip -9c > Packages.gz + echo "deb file:$(readlink -e $PWD) ./" >> sources.list + + INSTALL=$LOWERAPP + if [ ! -z "${_ingredients_package}" ] ; then + INSTALL="${_ingredients_package}" + fi + if [ ! -z "${_ingredients_packages}" ] ; then + INSTALL="" + fi + + # If packages are specifically listed, only install these, not a package with the name of the app + if [ ! -z "${_ingredients_packages[0]}" ] ; then + INSTALL=${_ingredients_packages[@]} + fi + + apt-get -o Acquire::AllowInsecureRepositories=true -o Acquire::Languages="none" -o Acquire::AllowDowngradeToInsecureRepositories=true $OPTIONS update || true + URLS=$(apt-get --allow-unauthenticated -o Apt::Get::AllowUnauthenticated=true $OPTIONS -y install --print-uris $INSTALL | cut -d "'" -f 2 | grep -e "^http") + if which aria2c &>/dev/null; then + dltool=aria2c + else + dltool=wget + fi + + $dltool -c -i- <<<"$URLS" +fi + +if [ ! -z "${_ingredients_post_script[0]}" ] ; then + # Execute extra steps defined in recipe + shell_execute $YAMLFILE _ingredients_post_script +fi + +mkdir -p ./$APP.AppDir/ +cd ./$APP.AppDir/ + +mkdir -p usr/bin usr/lib +find ../*.deb -exec dpkg -x {} . \; || true + +# Try to copy icons to standard locations where appimaged can pick them up +mkdir -p usr/share/icons/hicolor/{22x22,24x24,32x32,48x48,64x64,128x128,256x256,512x512}/apps/ +find . -path *icons* -path *22* -name "*$LOWERAPP*" -exec cp {} usr/share/icons/hicolor/22x22/apps/ \; || true +find . -path *icons* -path *24* -name "*$LOWERAPP*" -exec cp {} usr/share/icons/hicolor/24x24/apps/ \; || true +find . -path *icons* -path *32* -name "*$LOWERAPP*" -exec cp {} usr/share/icons/hicolor/32x32/apps/ \; || true +find . -path *icons* -path *48* -name "*$LOWERAPP*" -exec cp {} usr/share/icons/hicolor/48x48/apps/ \; || true +find . -path *icons* -path *64* -name "*$LOWERAPP*" -exec cp {} usr/share/icons/hicolor/64x64/apps/ \; || true +find . -path *icons* -path *128* -name "*$LOWERAPP*" -exec cp {} usr/share/icons/hicolor/128x128/apps/ \; || true +find . -path *icons* -path *256* -name "*$LOWERAPP*" -exec cp {} usr/share/icons/hicolor/256x256/apps/ \; || true +find . -path *icons* -path *512* -name "*$LOWERAPP*" -exec cp {} usr/share/icons/hicolor/512x512/apps/ \; || true + +get_icon + +if [ -z "${_union}" ] ; then + get_apprun +else +cat > AppRun <<\EOF +#!/bin/sh +HERE="$(dirname "$(readlink -f "${0}")")" +export UNION_PRELOAD="${HERE}" +export LD_PRELOAD="${HERE}/libunionpreload.so" +export PATH="${HERE}"/usr/bin/:"${HERE}"/usr/sbin/:"${HERE}"/usr/games/:"${HERE}"/bin/:"${HERE}"/sbin/:"${PATH}" +export LD_LIBRARY_PATH="${HERE}"/usr/lib/:"${HERE}"/usr/lib/i386-linux-gnu/:"${HERE}"/usr/lib/x86_64-linux-gnu/:"${HERE}"/usr/lib32/:"${HERE}"/usr/lib64/:"${HERE}"/lib/:"${HERE}"/lib/i386-linux-gnu/:"${HERE}"/lib/x86_64-linux-gnu/:"${HERE}"/lib32/:"${HERE}"/lib64/:"${LD_LIBRARY_PATH}" +export PYTHONPATH="${HERE}"/usr/share/pyshared/:"${PYTHONPATH}" +export PYTHONHOME="${HERE}"/usr/ +export XDG_DATA_DIRS="${HERE}"/usr/share/:"${XDG_DATA_DIRS}" +export PERLLIB="${HERE}"/usr/share/perl5/:"${HERE}"/usr/lib/perl5/:"${PERLLIB}" +export GSETTINGS_SCHEMA_DIR="${HERE}"/usr/share/glib-2.0/schemas/:"${GSETTINGS_SCHEMA_DIR}" +export QT_PLUGIN_PATH="${HERE}"/usr/lib/qt4/plugins/:"${HERE}"/usr/lib/i386-linux-gnu/qt4/plugins/:"${HERE}"/usr/lib/x86_64-linux-gnu/qt4/plugins/:"${HERE}"/usr/lib32/qt4/plugins/:"${HERE}"/usr/lib64/qt4/plugins/:"${HERE}"/usr/lib/qt5/plugins/:"${HERE}"/usr/lib/i386-linux-gnu/qt5/plugins/:"${HERE}"/usr/lib/x86_64-linux-gnu/qt5/plugins/:"${HERE}"/usr/lib32/qt5/plugins/:"${HERE}"/usr/lib64/qt5/plugins/:"${QT_PLUGIN_PATH}" +EXEC=$(grep -e '^Exec=.*' "${HERE}"/*.desktop | head -n 1 | cut -d "=" -f 2- | sed -e 's|%.||g') +exec ${EXEC} $@ +EOF +chmod a+x AppRun +fi + +get_desktop + +# Prevent Qt from loading plugins from the system +unset QTPATH +QTPATH=$(find usr/lib -type d -name qt4 -or -name qt5 | sed -e 's|usr/|../|g') +if [ ! -z $QTPATH ] ; then +cat > usr/bin/qt.conf < temp \; + # Remove all absolute paths + sed -i -E 's|target=\"\/(.*\/)([a-z0-9].*?)>|target=\"\2>|g' temp + SONAMES=$(cat temp | cut -d '"' -f 4 | grep ".so") + for SONAME in $SONAMES; do + find . -name "$SONAME" -exec mv {} usr/lib \; + done + rm temp + PATH_OF_THE_EXE="usr/lib/mono/exe" + mkdir -p "PATH_OF_THE_EXE" + # Force all dll files into PATH_OF_THE_EXE (or MONO_PATH which we would have to set) + find . -name "*.dll" -and -not -name "mscorlib.dll" -exec mv {} "$PATH_OF_THE_EXE" \; + # Edit all config files in place to remove absolute paths + find . -name "*.dll.config" -exec sed -i -E 's|target=\"\/(.*\/)([a-z0-9].*?)>|target=\"\2>|g' {} \; + # Force all config files into the PATH_OF_THE_EXE (or MONO_PATH which we would have to set) + find . -name "*.dll.config" -exec mv {} "$PATH_OF_THE_EXE" \; + # Remove gac, we are not using it since it is convoluted + rm -rf usr/lib/mono/gac/ +fi + +if [ -d "./usr/lib/x86_64-linux-gnu/gstreamer-1.0/" ] ; then + mv ./usr/lib/x86_64-linux-gnu/gstreamer-1.0/* ./usr/lib/x86_64-linux-gnu/ + rm -r ./usr/lib/x86_64-linux-gnu/gstreamer-1.0 +fi + +if [ -d "./usr/lib/x86_64-linux-gnu/pulseaudio/" ] ; then + mv ./usr/lib/x86_64-linux-gnu/pulseaudio/* ./usr/lib/x86_64-linux-gnu/ + rm -r ./usr/lib/x86_64-linux-gnu/pulseaudio +fi + +# Execute extra steps defined in recipe +if [ ! -z "${_script}" ] ; then + shell_execute $YAMLFILE _script +fi + +DESKTOP=$(find . -name '*.desktop' | sort | head -n 1) + +# desktop-file-validate complains about missing trailing semicolons for some +# keys although the format definition says that they are optional +fix_desktop "$DESKTOP" + +# Some non-distribution provided applications have an absolute +# path in the Exec= line which we remove for relocateability +if [ -z "$DESKTOP" ] ; then + echo "desktop file not found, aborting" + exit 1 +else + desktop-file-validate "$DESKTOP" || exit 1 + ORIG=$(grep -o "^Exec=.*$" "${DESKTOP}" | head -n 1| cut -d " " -f 1) + REPL=$(basename $(grep -o "^Exec=.*$" "${DESKTOP}" | head -n 1 | cut -d " " -f 1 | sed -e 's|Exec=||g')) + sed -i -e 's|'"${ORIG}"'|Exec='"${REPL}"'|g' "${DESKTOP}" +fi + +# Compile GLib schemas if the subdirectory is present in the AppImage +# AppRun has to export GSETTINGS_SCHEMA_DIR for this to work +if [ -d usr/share/glib-2.0/schemas/ ] ; then + ( cd usr/share/glib-2.0/schemas/ ; glib-compile-schemas . ) +fi + +if [ -f ../VERSION ] ; then + VERSION=$(cat ../VERSION) +else + get_version || true +fi + +# patch_usr +# Patching only the executable files seems not to be enough for some apps +if [ ! -z "${_binpatch}" ] ; then + find usr/ -type f -exec sed -i -e 's|/usr|././|g' {} \; + find usr/ -type f -exec sed -i -e 's@././/bin/env@/usr/bin/env@g' {} \; +fi + +# Don't suffer from NIH; use LD_PRELOAD to override calls to /usr paths +if [ ! -z "${_union}" ] ; then + mkdir -p usr/src/ + wget -q "https://raw.githubusercontent.com/mikix/deb2snap/master/src/preload.c" -O - | \ + sed -e 's|SNAPPY|UNION|g' | sed -e 's|SNAPP|UNION|g' | sed -e 's|SNAP|UNION|g' | \ + sed -e 's|snappy|union|g' > usr/src/libunionpreload.c + gcc -shared -fPIC usr/src/libunionpreload.c -o libunionpreload.so -ldl -DUNION_LIBNAME=\"libunionpreload.so\" + strip libunionpreload.so +fi + +delete_blacklisted + +if [ "$ENABLE_DI" = "yes" ] ; then + get_desktopintegration $LOWERAPP +fi + +# Fix desktop files that have file endings for icons +sed -i -e 's|\.png||g' *.desktop || true +sed -i -e 's|\.svg||g' *.desktop || true +sed -i -e 's|\.svgz||g' *.desktop || true +sed -i -e 's|\.xpm||g' *.desktop || true + +# Setting PYTHONHOME instead +# Fix Python imports, +# https://github.com/AppImage/AppImages/issues/172 +# SITECUSTOMIZEFILES=$(find . -name "sitecustomize.py") +# for SITECUSTOMIZEFILE in $SITECUSTOMIZEFILES ; do +# rm $SITECUSTOMIZEFILE # Remove symlinks, replace by files +# cat > $SITECUSTOMIZEFILE <<\EOF +# import sys,os +# if sys.version_info[0] < 3: +# prefix = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(sys.path[0])))) +# sys.path = [ prefix+s for s in sys.path if not s.startswith(prefix) ] +# EOF +# done + +# Execute extra steps defined in recipe +if [ ! -z "${_post}" ] ; then + shell_execute $YAMLFILE _post +fi + +# Go out of AppImage +cd .. + +generate_type2_appimage +ls -lh ../out/*.AppImage + diff --git a/netease-cloud-music.yml b/netease-cloud-music.yml new file mode 100644 index 0000000..8076143 --- /dev/null +++ b/netease-cloud-music.yml @@ -0,0 +1,14 @@ +app: netease-cloud-music +binpatch: true + +ingredients: + dist: deepin15.9 + sources: + - deb http://mirrors.163.com/deepin panda main contrib non-free + script: + - VERSION=1.2.0.2 + - wget -c http://mirrors.163.com/deepin/pool/main/n/netease-cloud-music/netease-cloud-music_1.2.0.2-1_amd64.deb --trust-server-names + - echo $VERSION > VERSION + +script: + - cp usr/share/icons/hicolor/scalable/apps/netease-cloud-music.svg . diff --git a/pkg2appimage-with-docker b/pkg2appimage-with-docker new file mode 100644 index 0000000..c951428 --- /dev/null +++ b/pkg2appimage-with-docker @@ -0,0 +1,101 @@ +#!/bin/bash + +set -e + +log() { + (echo -e "\e[91m\e[1m$*\e[0m") +} + +cleanup() { + if [ "$containerid" == "" ]; then + return 0 + fi + + if [ "$1" == "error" ]; then + log "error occurred, cleaning up..." + elif [ "$1" != "" ]; then + log "$1 received, please wait a few seconds for cleaning up..." + else + log "cleaning up..." + fi + + docker ps -a | grep -q $containerid && docker rm -f $containerid +} + +trap "cleanup SIGINT" SIGINT +trap "cleanup SIGTERM" SIGTERM +trap "cleanup error" 0 +trap "cleanup" EXIT + +CACHE=0 +RECIPE="" +ARGS="" + +for var in $@; do + case "$1" in + -c|--cache) + if [ $CACHE -ne 0 ]; then + log "warning: caching already enabled" + fi + CACHE=1 + ;; + *) + if [ "$RECIPE" != "" ]; then + log "warning: ignoring argument: $1" + else + RECIPE="$1" + fi + ;; + esac + shift +done + +if [ "$RECIPE" == "" ]; then + log "usage: $0 [-c] name.yml" + log "" + log "\t-c, --cache\tEnable pkg2appimage's caching by mounting the build cache directory into the container" + exit 1 +fi + +if [ $(basename $RECIPE .yml) == "$RECIPE" ]; then + RECIPE="$RECIPE.yml" +fi + + +if [ ! -f $RECIPE ]; then + if [ -f recipes/"$RECIPE" ]; then + RECIPE=recipes/"$RECIPE" + else + log "error: no such file or directory: $RECIPE" + exit 1 + fi +fi + +if [ $CACHE -ne 0 ]; then + recipe_name=$(basename "$RECIPE" .yml) + mkdir -p "$recipe_name" + ARGS="-v $(readlink -f $recipe_name):/workspace/$recipe_name" +fi + +log "Building $RECIPE in a container..." + +randstr=$(cat /dev/urandom | tr -dc 'a-z0-9' | fold -w 8 | head -n 1) +containerid=appimage-build-$randstr +imageid=appimage-build-netease-music + +log "Building Docker container" +(set -xe; docker build -t $imageid .) + +log "Running container" +mkdir -p out +set -xe +docker run -it \ + --name $containerid \ + --cap-add SYS_ADMIN \ + --device /dev/fuse \ + --security-opt apparmor:unconfined \ + --user $(id -u):$(id -g) \ + -v "$(readlink -f out):/workspace/out" \ + $ARGS \ + $imageid \ + ./pkg2appimage $RECIPE || cleanup error diff --git a/scripts/services/0x0.st.sh b/scripts/services/0x0.st.sh new file mode 100644 index 0000000..c2f5ad0 --- /dev/null +++ b/scripts/services/0x0.st.sh @@ -0,0 +1,25 @@ +#!/bin/sh +#=============================================================== +# File URLs are valid for at least 30 days and up to a year (see below). +# Shortened URLs do not expire. +# Maximum file size: 512.0 MiB +# Blocked file types: application/x-dosexec, application/x-executable +#=============================================================== + +URL="https://0x0.st" + +if [ $# -eq 0 ]; then + echo "Usage: 0x0.st FILE\n" + exit 1 +fi + +FILE=$1 + +if [ ! -f "$FILE" ]; then + echo "File ${FILE} not found" + exit 1 +fi + +RESPONSE=$(curl -# -F "file=@${FILE}" "${URL}") + +echo "${RESPONSE}" # to terminal diff --git a/scripts/services/file.io.sh b/scripts/services/file.io.sh new file mode 100644 index 0000000..ec55d92 --- /dev/null +++ b/scripts/services/file.io.sh @@ -0,0 +1,26 @@ +#!/bin/sh + +#========================================== +# 100 uploads per day, 5GB file size limit for FREE plan. +#========================================== + +URL="https://file.io" +DEFAULT_EXPIRE="14d" # Default to 14 days + +if [ $# -eq 0 ]; then + echo "Usage: file.io FILE [DURATION]\n" + echo "Example: file.io path/to/my/file 1w\n" + exit 1 +fi + +FILE=$1 +EXPIRE=${2:-$DEFAULT_EXPIRE} + +if [ ! -f "$FILE" ]; then + echo "File ${FILE} not found" + exit 1 +fi + +RESPONSE=$(curl -# -F "file=@${FILE}" "${URL}/?expires=${EXPIRE}") + +echo "${RESPONSE}" # to terminal diff --git a/scripts/services/transfer.sh.sh b/scripts/services/transfer.sh.sh new file mode 100644 index 0000000..082ac8c --- /dev/null +++ b/scripts/services/transfer.sh.sh @@ -0,0 +1,19 @@ +#!/bin/sh + +URL="https://transfer.sh" + +if [ $# -eq 0 ]; then + echo "Usage: transfer.sh FILE\n" + exit 1 +fi + +FILE=$1 + +if [ ! -f "$FILE" ]; then + echo "File ${FILE} not found" + exit 1 +fi + +RESPONSE=$(curl -# -F "file=@${FILE}" "${URL}") + +echo "${RESPONSE}" # to terminal \ No newline at end of file diff --git a/scripts/services/transferwee.py b/scripts/services/transferwee.py new file mode 100755 index 0000000..3bf4fc3 --- /dev/null +++ b/scripts/services/transferwee.py @@ -0,0 +1,330 @@ +#!/usr/bin/env python3.7 + +# +# Copyright (c) 2018-2019 Leonardo Taccari +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS +# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# + + +""" +Download/upload files via wetransfer.com + +transferwee is a script/module to download/upload files via wetransfer.com. + +It exposes `download' and `upload' subcommands, respectively used to download +files from a `we.tl' or `wetransfer.com/downloads' URLs and upload files that +will be shared via emails or link. +""" + + +from typing import List +import os.path +import urllib.parse +import zlib + +import requests + + +WETRANSFER_API_URL = 'https://wetransfer.com/api/v4/transfers' +WETRANSFER_DOWNLOAD_URL = WETRANSFER_API_URL + '/{transfer_id}/download' +WETRANSFER_UPLOAD_EMAIL_URL = WETRANSFER_API_URL + '/email' +WETRANSFER_UPLOAD_LINK_URL = WETRANSFER_API_URL + '/link' +WETRANSFER_FILES_URL = WETRANSFER_API_URL + '/{transfer_id}/files' +WETRANSFER_PART_PUT_URL = WETRANSFER_FILES_URL + '/{file_id}/part-put-url' +WETRANSFER_FINALIZE_MPP_URL = WETRANSFER_FILES_URL + '/{file_id}/finalize-mpp' +WETRANSFER_FINALIZE_URL = WETRANSFER_API_URL + '/{transfer_id}/finalize' + +WETRANSFER_DEFAULT_CHUNK_SIZE = 5242880 + + +def download_url(url: str) -> str: + """Given a wetransfer.com download URL download return the downloadable URL. + + The URL should be of the form `https://we.tl/' or + `https://wetransfer.com/downloads/'. If it is a short URL (i.e. `we.tl') + the redirect is followed in order to retrieve the corresponding + `wetransfer.com/downloads/' URL. + + The following type of URLs are supported: + - `https://we.tl/`: + received via link upload, via email to the sender and printed by + `upload` action + - `https://wetransfer.com//`: + directly not shared in any ways but the short URLs actually redirect to + them + - `https://wetransfer.com///`: + received via email by recipients when the files are shared via email + upload + + Return the download URL (AKA `direct_link') as a str or None if the URL + could not be parsed. + """ + # Follow the redirect if we have a short URL + if url.startswith('https://we.tl/'): + r = requests.head(url, allow_redirects=True) + url = r.url + + recipient_id = None + params = url.replace('https://wetransfer.com/downloads/', '').split('/') + + if len(params) == 2: + transfer_id, security_hash = params + elif len(params) == 3: + transfer_id, recipient_id, security_hash = params + else: + return None + + j = { + "security_hash": security_hash, + } + if recipient_id: + j["recipient_id"] = recipient_id + r = requests.post(WETRANSFER_DOWNLOAD_URL.format(transfer_id=transfer_id), + json=j) + + j = r.json() + return j.get('direct_link') + + +def download(url: str) -> None: + """Given a `we.tl/' or `wetransfer.com/downloads/' download it. + + First a direct link is retrieved (via download_url()), the filename will + be extracted to it and it will be fetched and stored on the current + working directory. + """ + dl_url = download_url(url) + file = urllib.parse.urlparse(dl_url).path.split('/')[-1] + + r = requests.get(dl_url, stream=True) + with open(file, 'wb') as f: + for chunk in r.iter_content(chunk_size=1024): + f.write(chunk) + + +def _file_name_and_size(file: str) -> dict: + """Given a file, prepare the "name" and "size" dictionary. + + Return a dictionary with "name" and "size" keys. + """ + filename = os.path.basename(file) + filesize = os.path.getsize(file) + + return { + "name": filename, + "size": filesize + } + + +def _prepare_email_upload(filenames: List[str], message: str, + sender: str, recipients: List[str]) -> str: + """Given a list of filenames, message a sender and recipients prepare for + the email upload. + + Return the parsed JSON response. + """ + j = { + "files": [_file_name_and_size(f) for f in filenames], + "from": sender, + "message": message, + "recipients": recipients, + "ui_language": "en", + } + + r = requests.post(WETRANSFER_UPLOAD_EMAIL_URL, json=j) + return r.json() + + +def _prepare_link_upload(filenames: List[str], message: str) -> str: + """Given a list of filenames and a message prepare for the link upload. + + Return the parsed JSON response. + """ + j = { + "files": [_file_name_and_size(f) for f in filenames], + "message": message, + "ui_language": "en", + } + + r = requests.post(WETRANSFER_UPLOAD_LINK_URL, json=j) + return r.json() + + +def _prepare_file_upload(transfer_id: str, file: str) -> str: + """Given a transfer_id and file prepare it for the upload. + + Return the parsed JSON response. + """ + j = _file_name_and_size(file) + r = requests.post(WETRANSFER_FILES_URL.format(transfer_id=transfer_id), + json=j) + return r.json() + + +def _upload_chunks(transfer_id: str, file_id: str, file: str, + default_chunk_size: int = WETRANSFER_DEFAULT_CHUNK_SIZE) -> str: + """Given a transfer_id, file_id and file upload it. + + Return the parsed JSON response. + """ + f = open(file, 'rb') + + chunk_number = 0 + while True: + chunk = f.read(default_chunk_size) + chunk_size = len(chunk) + if chunk_size == 0: + break + chunk_number += 1 + + j = { + "chunk_crc": zlib.crc32(chunk), + "chunk_number": chunk_number, + "chunk_size": chunk_size, + "retries": 0 + } + + r = requests.post( + WETRANSFER_PART_PUT_URL.format(transfer_id=transfer_id, + file_id=file_id), + json=j) + url = r.json().get('url') + r = requests.options(url, + headers={ + 'Origin': 'https://wetransfer.com', + 'Access-Control-Request-Method': 'PUT', + }) + r = requests.put(url, data=chunk) + + j = { + 'chunk_count': chunk_number + } + r = requests.put( + WETRANSFER_FINALIZE_MPP_URL.format(transfer_id=transfer_id, + file_id=file_id), + json=j) + + return r.json() + + +def _finalize_upload(transfer_id: str) -> str: + """Given a transfer_id finalize the upload. + + Return the parsed JSON response. + """ + r = requests.put(WETRANSFER_FINALIZE_URL.format(transfer_id=transfer_id)) + + return r.json() + + +def upload(files: List[str], message: str = '', sender: str = None, + recipients: List[str] = []) -> str: + """Given a list of files upload them and return the corresponding URL. + + Also accepts optional parameters: + - `message': message used as a description of the transfer + - `sender': email address used to receive an ACK if the upload is + successfull. For every download by the recipients an email + will be also sent + - `recipients': list of email addresses of recipients. When the upload + succeed every recipients will receive an email with a link + + If both sender and recipient parameters are passed the email upload will be + used. Otherwise, the link upload will be used. + + Return the short URL of the transfer on success. + """ + + # Check that all files exists + for f in files: + if not os.path.exists(f): + return None + + # Check that there are no duplicates filenames + # (despite possible different dirname()) + filenames = [os.path.basename(f) for f in files] + if len(files) != len(set(filenames)): + return None + + transfer_id = None + if sender and recipients: + # email upload + transfer_id = \ + _prepare_email_upload(filenames, message, sender, recipients)['id'] + else: + # link upload + transfer_id = _prepare_link_upload(filenames, message)['id'] + + for f in files: + file_id = _prepare_file_upload(transfer_id, os.path.basename(f))['id'] + _upload_chunks(transfer_id, file_id, f) + + return _finalize_upload(transfer_id)['shortened_url'] + + +if __name__ == '__main__': + import argparse + + ap = argparse.ArgumentParser( + prog='transferwee', + description='Download/upload files via wetransfer.com' + ) + sp = ap.add_subparsers(dest='action', help='action') + + # download subcommand + dp = sp.add_parser('download', help='download files') + dp.add_argument('-g', action='store_true', + help='only print the direct link (without downloading it)') + dp.add_argument('url', nargs='+', type=str, metavar='url', + help='URL (we.tl/... or wetransfer.com/downloads/...)') + + # upload subcommand + up = sp.add_parser('upload', help='upload files') + up.add_argument('-m', type=str, default='', metavar='message', + help='message description for the transfer') + up.add_argument('-f', type=str, metavar='from', help='sender email') + up.add_argument('-t', nargs='+', type=str, metavar='to', + help='recipient emails') + up.add_argument('files', nargs='+', type=str, metavar='file', + help='files to upload') + + args = ap.parse_args() + + if args.action == 'download': + if args.g: + for u in args.url: + print(download_url(u)) + else: + for u in args.url: + download(u) + exit(0) + + if args.action == 'upload': + print(upload(args.files, args.m, args.f, args.t)) + exit(0) + + # No action selected, print help message + ap.print_help() + exit(1) diff --git a/scripts/services/wetransfer.com.sh b/scripts/services/wetransfer.com.sh new file mode 100755 index 0000000..eb948e1 --- /dev/null +++ b/scripts/services/wetransfer.com.sh @@ -0,0 +1,25 @@ +#!/bin/sh +#========================================================================================================================= +# WeTransfer is a service to send big or small files from A to B. +# It can transfer any type of file - such as presentations, photos, videos, music or documents - to friends and colleagues. +# You can send files up to 2 GB and they will be available for 7 days, with no registration. + +# API doc: https://developers.wetransfer.com/documentation +# Using transferwee.py: https://github.com/iamleot/transferwee +#========================================================================================================================= + +if [ $# -eq 0 ]; then + echo "Usage: python3 transferwee.py FILE\n" + exit 1 +fi + +FILE=$1 + +if [ ! -f "$FILE" ]; then + echo "File ${FILE} not found" + exit 1 +fi + +RESPONSE=$(python3 ${ROOT_PATH}/scripts/services/transferwee.py upload "${FILE}") + +echo "${RESPONSE}" # to terminal