#!/bin/bash #---------------------------------------------------- # Author: Florian "Bluewind" Pritz # Contributor: Moritz Wilhelmy # # Licensed under GPLv3 # (see COPYING for full license text) # #---------------------------------------------------- # Optional dependency: xclip #---------------------------------------------------- pastebin="https://paste.xinu.at" warnsize=10485760 libdir="@LIBDIR@" # the calling conventions for stat(1) are highly system dependent stat='stat -c %s' # GNU stat(1) is the default since most people have it clipboard_cmd=xclip case "`uname -s`" in *BSD) stat='stat -f %z';; Minix) stat='stat -size';; Darwin) stat='stat -f %z' clipboard_cmd=pbcopy ;; esac if [ -z "$XDG_CONFIG_HOME" ]; then XDG_CONFIG_HOME="$HOME/.config" fi apikey_file="$XDG_CONFIG_HOME/fb-client/apikey" config_file="$XDG_CONFIG_HOME/fb-client/config" if [ -e "$config_file" ]; then . "$config_file" fi version="@VERSION@" delete= extension="" filename="stdin" get= multipaste= multipaste_ids=() tar= compress=0 display_history= create_apikey= clipboard="" exitcode=0 debug= useragent="fb-client/shell-$version" default_curlopts=(-# -L -A "$useragent" --speed-time 30 --speed-limit 1 --connect-timeout 10) base64_encode() { if type base64 2>&1 >/dev/null; then printf "%s" "$1" | base64 elif type openssl 2>&1 >/dev/null; then printf "%s" "$1" | openssl enc -base64 else printf "%s\n" "Warning: can't find base64 nor openssl executable" >&2 printf "%s\n" " filename of uploaded file will be set to stdin" >&2 printf "%s\n" "stdin" fi } request_helper() { # available modes are: d, u, m mode=$1 url=$2 file=$3 # if available use the external helper, else fall back to calling curl if [ -x "$libdir/fb-helper" ]; then helperopts=(-u "$url") if [ "$debug" ]; then helperopts+=(-D) fi if [ -e "$apikey_file" ]; then helperopts+=(-a $apikey_file) fi if [ "$mode" = "u" ]; then helperopts+=(-f "$file") fi if [ "$mode" = "m" ]; then for id in "${multipaste_ids[@]}"; do helperopts+=(-F "ids[]=$id") done fi $libdir/fb-helper "${helperopts[@]}" else curlopts=(${default_curlopts[@]}) require_executable curl if [ -e "$apikey_file" ]; then curlopts+=("-F" "apikey=<${apikey_file}") else curlopts+=("-n") fi if [ "$debug" ]; then curlopts+=("-v") fi if [ "$mode" = "m" ]; then for id in "${multipaste_ids[@]}"; do curlopts+=(-F "ids[]=$id") done fi if [ "$mode" = "d" ]; then curlopts+=("-s") fi if [ "$mode" = "u" ]; then basefilename=`basename -- "$file"` if [ "`$stat -- "$file"`" -eq "0" ] || printf "%s" "$basefilename" | grep -F -q ","; then if [ "`wc -c < "$file"`" -eq "0" ]; then printf "%s\n" "Error: skipping 0-byte file: \"$file\"" >&2 return 1 fi base64fn="`base64_encode "$basefilename"`" curl "${curlopts[@]}" -F "file=@-" -F "filename=$base64fn" "$url" < "$file" -o /dev/stdout else curl "${curlopts[@]}" -F "file=@$file" "$url" -o /dev/stdout fi else curl "${curlopts[@]}" "$url" fi fi } require_executable() { if ! type $1 >/dev/null; then printf "%s\n" "Error: $1 not found. Please install." >&2 exit 1 fi } is_url() { if printf "%s" "$i" | grep -qE "^(f|ht)tp(s)?://.+"; then return 0 fi return 1 } is_pastebin_url() { if printf "%s" "$i" | grep -qE "^$pastebin.+"; then return 0 fi return 1 } do_tar_upload() { if [ "$compress" = "1" ]; then file="$tmpdir/$filename.tar.gz" tar -cf - -- "$@" | gzip -n -c > "$file" || return 1 elif [ "$compress" = "2" ]; then file="$tmpdir/$filename.tar.xz" tar -cf - -- "$@" | xz -c > "$file" || return 1 else file="$tmpdir/$filename.tar" tar -cf "$file" -- "$@" || return 1 fi compress=0 do_upload "$file" || return 1 } do_upload() { local extra="" file="$1" basefilename="`basename -- "$file"`" basedirname="`dirname -- "$file"`" if [ ! -r "$file" ]; then # sh doesn't have perror so this message can't be more precise printf "%s\n" "Error: File \"$file\" is not readable/not found." >&2 return 1 fi if [ -d "$file" ]; then cd "$basedirname" if [ "$compress" = "1" ]; then file="$tmpdir/$basefilename.tar.gz" tar -cf - -- "$basefilename" | gzip -n -c > "$file" || return 1 elif [ "$compress" = "2" ]; then file="$tmpdir/$basefilename.tar.xz" tar -cf - -- "$basefilename" | xz -c > "$file" || return 1 else file="$tmpdir/$basefilename.tar" tar -cf "$file" -- "$basefilename" || return 1 fi else if [ "$compress" = "1" ]; then gzip -n -c -- "$file" > "$tmpdir/$basefilename.gz" || return 1 file="$tmpdir/$basefilename.gz" elif [ "$compress" = "2" ]; then xz -c -- "$file" > "$tmpdir/$basefilename.xz" || return 1 file="$tmpdir/$basefilename.xz" fi fi tmpfile=`mktemp "$tmpdir/data.XXXXXX"` if [ "`$stat -- "$file"`" -gt "$warnsize" ]; then warnsize=`request_helper d "$pastebin/file/get_max_size"` if [ "`$stat -- "$file"`" -gt "$warnsize" ]; then printf "%s\n" "Warning: Your upload is too big and would be rejected. Maximum size is: $warnsize bytes. Skipping..." >&2 return 1 fi fi request_helper u "$pastebin/file/do_upload" "$file" > $tmpfile || return 1 sed '$d' $tmpfile >&2 url=`tail -1 $tmpfile`"$extension" rm "$tmpfile" printf "%s\n" "$url" if printf "%s" "$url" | grep -qE "^https?://"; then if [ -z "$clipboard" ]; then clipboard="$url" else clipboard="$clipboard $url" fi fi } read_stdin() { if tty -s; then printf "%s\n" "^C to exit, ^D to send" fi cat > "$1" } read_string() { read -r tmp echo "$tmp" } id_from_arg() { local regex="https?:\/\/[^\/]+\/([^\/]+).*" if [[ $1 =~ $regex ]]; then printf "%s" "${BASH_REMATCH[1]}" else printf "%s" "$1" fi } create_apikey() { if [ -z "$HOST" ]; then HOST=`hostname` fi require_executable curl printf "%s" "Username: " read_string | tr -d "\n" > "$tmpdir/username" printf "%s" "Password: " stty -echo read_string | tr -d "\n" > "$tmpdir/password" stty echo printf "\n" curlopts=(${default_curlopts[@]}) if [ "$debug" ]; then curlopts+=(-v) fi curl "${curlopts[@]}" -w "%{http_code}\n" -s -F "username=<$tmpdir/username" -F "password=<$tmpdir/password" -F "comment=fb-client $USER@$HOST" "$pastebin/user/create_apikey" > "$tmpdir/api-result" rm "$tmpdir/username" "$tmpdir/password" status_code=`tail -n 1 "$tmpdir/api-result"` if [ "$status_code" == "200" ]; then if [ ! -d "$XDG_CONFIG_HOME/fb-client/" ]; then mkdir -p "$XDG_CONFIG_HOME/fb-client" fi head -n 1 "$tmpdir/api-result" > "$apikey_file" return 0 fi echo "Failed to generate API key:" >&2 sed '$d' "$tmpdir/api-result" >&2 return 1 } help() { cat <] Upload/nopaste file(s)/stdin to paste.xinu.at and copy URL(s) to clipboard. Switches: -d delete the IDs -g download the IDs and output on stdout (use with care!) -m create a multipaste -h this help -v show the client version -H display an upload history -a create a new api key Options: -e extension for default highlighting (e.g. "diff") -n file name to use for upload when reading from stdin Defaults to "stdin" -t upload a tar file containing all files (and directories) -c compress the file being uploaded with gz or xz if used 2 times When used in conjunction with -g this decompresses the download -D show debugging information ! } if ! type getopts >/dev/null 2>&1; then printf "%s\n" "Error: getopts is not supported by your shell" >&2 exit 1 fi while getopts "e:n:gmdhHatcvD" option; do case $option in e) extension="$OPTARG";; n) filename="$OPTARG";; g) get=1;; m) multipaste=1;; c) compress=`expr $compress + 1`;; t) tar=1;; d) delete=1;; H) display_history=1;; a) create_apikey=1;; v) printf "%s\n" "$version"; exit 0;; D) debug=1;; h|\?) help; exit 0;; esac done shift `expr $OPTIND - 1` if [[ "$#" -gt 1 && ! "$tar" ]]; then multipaste=1 fi if [ "$compress" = "1" ]; then require_executable gzip elif [ "$compress" = "2" ]; then require_executable xz fi tmpdir="`mktemp -dt "fb.XXXXXX"`" trap "rm -rf '${tmpdir}'" EXIT TERM if [ "$delete" ] || [ "$get" ]; then if [ $# -eq 0 ]; then printf "%s\n" "Error: no ID specified" >&2 exit 1 fi for i in "$@"; do i=$(id_from_arg "$i") if [ "$delete" ]; then request_helper d "$pastebin/file/delete/$i" || exitcode=1 elif [ "$get" ]; then if [ "$compress" = "1" ]; then request_helper d "$pastebin/$i" | gzip -cd || exitcode=1 elif [ "$compress" = "2" ]; then request_helper d "$pastebin/$i" | xz -cd || exitcode=1 else request_helper d "$pastebin/$i" || exitcode=1 fi fi done elif [ "$display_history" ]; then request_helper d "$pastebin/file/upload_history" || exitcode=1 elif [ "$create_apikey" ]; then create_apikey exit $? elif [ $# -eq 0 ]; then if [ "$tar" ]; then printf "%s\n" "Error: -t is not supported when operating on stdin" >&2 exit 1 fi read_stdin "$tmpdir/$filename" do_upload "$tmpdir/$filename" || exitcode=1 else if [ "$tar" ]; then have_url= for i in "$@"; do if is_url "$i"; then have_url=1 fi done if [ "$have_url" ]; then # TODO: support -t when passing URLs as arguments printf "%s\n" "Error: -t is not yet supported when operating on a URL" >&2 exit 1 else do_tar_upload "$@" || exitcode=1 fi else for i in "$@"; do if is_url "$i"; then if [ "$multipaste" ]; then if is_pastebin_url "$i"; then multipaste_ids+=("$(id_from_arg "$i")") continue fi fi cd $tmpdir if ! request_helper d "$i" > "`basename "$i"`"; then exitcode=1 continue fi for f in *; do if ! do_upload "$f"; then exitcode=1 fi rm -f -- "$f" done else do_upload "$i" || exitcode=1 fi done fi if [ "$multipaste" ]; then for url in $clipboard; do id="$(id_from_arg "$url")" multipaste_ids+=("$id") done tmpfile=`mktemp "$tmpdir/data.XXXXXX"` request_helper m "$pastebin/file/do_multipaste" > $tmpfile || exitcode=1 sed '$d' $tmpfile >&2 url=`tail -1 $tmpfile`"$extension" printf "%s\n" "$url" if printf "%s" "$url" | grep -qE "^https?://"; then clipboard="$url" fi fi fi if [ "$clipboard" != "" ]; then type $clipboard_cmd >/dev/null 2>&1 && printf "%s" "$clipboard" | nohup $clipboard_cmd >/dev/null 2>&1 fi exit $exitcode #vim: set noet: