diff --git a/bash/common-utils.bash b/bash/common-utils.bash index 7334aea..fb54496 100755 --- a/bash/common-utils.bash +++ b/bash/common-utils.bash @@ -192,24 +192,27 @@ _get_latest_sha() { ################################################### # Encode the given string to parse properly as json # Globals: None -# Arguments: 1 -# ${1} = string -# Result: print encoded string +# Arguments: 2 +# ${1} = json or something else +# ${2} = input +# Result: if ${1} is j, then escape all chars, else only special chars # Reference: # https://tools.ietf.org/html/rfc7159#section-7 ################################################### _json_escape() { - declare input="${1:?Provide Input}" - # \ and / - : "${input//\\/\\\\}" - : "${_//\//\\\/}" - : "${_//\'/\\\'}" # ' (not strictly needed ?) - : "${_//\"/\\\"}" # " - : "${_//$'\t'/\\\t}" # \t (tab) - : "${_//$'\n'/\\\n}" # \n (newline) - : "${_//$'\r'/\\\r}" # \r (carriage return) - : "${_//$'\f'/\\\f}" # \f (form feed) - : "${_//$'\b'/\\\b}" # \b (backspace) + declare mode="${1:?}" input="${2:?Provide Input}" + [[ ${mode} = "j" ]] && { + # \ and / + : "${input//\\/\\\\}" + : "${_//\//\\\/}" + # : "${_//\'/\\\'}" # ' (not strictly needed ?) + input="${_//\"/\\\"}" # " + } + : "${input//$'\t'/\\\t}" # \t (tab) + : "${_//$'\n'/\\\n}" # \n (newline) + : "${_//$'\r'/\\\r}" # \r (carriage return) + : "${_//$'\f'/\\\f}" # \f (form feed) + : "${_//$'\b'/\\\b}" # \b (backspace) printf "%s" "${_}" } diff --git a/bash/drive-utils.bash b/bash/drive-utils.bash index c344495..a3294b6 100755 --- a/bash/drive-utils.bash +++ b/bash/drive-utils.bash @@ -17,7 +17,7 @@ _check_existing_file() { declare name="${1##*/}" rootdir="${2}" query search_response id "${EXTRA_LOG}" "justify" "Checking if file" " exists on gdrive.." "-" 1>&2 - query="$(_url_encode "name='${name}' and '${rootdir}' in parents and trashed=false")" + query="$(_url_encode "name=\"${name}\" and '${rootdir}' in parents and trashed=false")" search_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \ "${API_URL}/drive/${API_VERSION}/files?q=${query}&fields=files(id,name,mimeType)&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 @@ -29,9 +29,9 @@ _check_existing_file() { ################################################### # Copy/Clone a public gdrive file/folder from another/same gdrive account -# Globals: 6 variables, 2 functions +# Globals: 6 variables, 6 functions # Variables - API_URL, API_VERSION, CURL_PROGRESS, LOG_FILE_ID, QUIET, ACCESS_TOKEN, DESCRIPTION_FILE -# Functions - _print_center, _check_existing_file, _json_value, _bytes_to_human, _clear_line +# Functions - _print_center, _check_existing_file, _json_value, _json_escape _bytes_to_human, _clear_line # Arguments: 5 # ${1} = update or upload ( upload type ) # ${2} = file id to upload @@ -47,32 +47,32 @@ _check_existing_file() { _clone_file() { [[ $# -lt 5 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 declare job="${1}" file_id="${2}" file_root_id="${3}" name="${4}" size="${5}" - declare clone_file_post_data clone_file_response readable_size _file_id description && STRING="Cloned" - readable_size="$(_bytes_to_human "${size}")" + declare clone_file_post_data clone_file_response readable_size _file_id description escaped_name && STRING="Cloned" + escaped_name="$(_json_escape j "${name}")" print_name="$(_json_escape p "${name}")" readable_size="$(_bytes_to_human "${size}")" # create description data [[ -n ${DESCRIPTION_FILE} ]] && { : "${DESCRIPTION_FILE//%f/${name}}" && : "${_//%s/${readable_size}}" - description="$(_json_escape "${_}")" # escape for json + description="$(_json_escape j "${_}")" # escape for json } clone_file_post_data="{\"parents\": [\"${file_root_id}\"]${description:+,\"description\":\"${description}\"}}" - _print_center "justify" "${name} " "| ${readable_size}" "=" + _print_center "justify" "${print_name} " "| ${readable_size}" "=" if [[ ${job} = update ]]; then declare file_check_json # Check if file actually exists. - if file_check_json="$(_check_existing_file "${name}" "${file_root_id}")"; then + if file_check_json="$(_check_existing_file "${escaped_name}" "${file_root_id}")"; then if [[ -n ${SKIP_DUPLICATES} ]]; then _collect_file_info "${file_check_json}" || return 1 _clear_line 1 - "${QUIET:-_print_center}" "justify" "${name}" " already exists." "=" && return 0 + "${QUIET:-_print_center}" "justify" "${print_name}" " already exists." "=" && return 0 else _print_center "justify" "Overwriting file.." "-" { _file_id="$(_json_value id 1 1 <<< "${file_check_json}")" && clone_file_post_data="$(_drive_info "${_file_id}" "parents,writersCanShare")"; } || - { _error_logging_upload "${name}" "${post_data:-${file_check_json}}" || return 1; } + { _error_logging_upload "${print_name}" "${post_data:-${file_check_json}}" || return 1; } if [[ ${_file_id} != "${file_id}" ]]; then _api_request -s \ -X DELETE \ @@ -97,15 +97,15 @@ _clone_file() { "${API_URL}/drive/${API_VERSION}/files/${file_id}/copy?supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" for _ in 1 2 3; do _clear_line 1; done _collect_file_info "${clone_file_response}" || return 1 - "${QUIET:-_print_center}" "justify" "${name} " "| ${readable_size} | ${STRING}" "=" + "${QUIET:-_print_center}" "justify" "${print_name} " "| ${readable_size} | ${STRING}" "=" return 0 } ################################################### # Create/Check directory in google drive. -# Globals: 3 variables, 2 functions +# Globals: 3 variables, 3 functions # Variables - API_URL, API_VERSION, ACCESS_TOKEN -# Functions - _url_encode, _json_value +# Functions - _url_encode, _json_value, _json_escape # Arguments: 2 # ${1} = dir name # ${2} = root dir id of given dir @@ -115,17 +115,18 @@ _clone_file() { ################################################### _create_directory() { [[ $# -lt 2 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare dirname="${1##*/}" rootdir="${2}" query search_response folder_id + declare dirname="${1##*/}" escaped_dirname rootdir="${2}" query search_response folder_id + escaped_dirname="$(_json_escape j "${dirname}")" print_dirname="$(_json_escape p "${dirname}")" - "${EXTRA_LOG}" "justify" "Creating gdrive folder:" " ${dirname}" "-" 1>&2 - query="$(_url_encode "mimeType='application/vnd.google-apps.folder' and name='${dirname}' and trashed=false and '${rootdir}' in parents")" + "${EXTRA_LOG}" "justify" "Creating gdrive folder:" " ${print_dirname}" "-" 1>&2 + query="$(_url_encode "mimeType='application/vnd.google-apps.folder' and name=\"${escaped_dirname}\" and trashed=false and '${rootdir}' in parents")" search_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \ "${API_URL}/drive/${API_VERSION}/files?q=${query}&fields=files(id)&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 if ! folder_id="$(printf "%s\n" "${search_response}" | _json_value id 1 1)"; then declare create_folder_post_data create_folder_response - create_folder_post_data="{\"mimeType\": \"application/vnd.google-apps.folder\",\"name\": \"${dirname}\",\"parents\": [\"${rootdir}\"]}" + create_folder_post_data="{\"mimeType\": \"application/vnd.google-apps.folder\",\"name\": \"${escaped_dirname}\",\"parents\": [\"${rootdir}\"]}" create_folder_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \ -X POST \ -H "Content-Type: application/json; charset=UTF-8" \ @@ -187,9 +188,9 @@ _extract_id() { ################################################### # Upload ( Create/Update ) files on gdrive. # Interrupted uploads can be resumed. -# Globals: 8 variables, 10 functions +# Globals: 8 variables, 11 functions # Variables - API_URL, API_VERSION, QUIET, VERBOSE, VERBOSE_PROGRESS, CURL_PROGRESS, LOG_FILE_ID, ACCESS_TOKEN, DESCRIPTION_FILE -# Functions - _url_encode, _json_value, _print_center, _bytes_to_human +# Functions - _url_encode, _json_value, _json_escape _print_center, _bytes_to_human # _generate_upload_link, _upload_file_from_uri, _log_upload_session, _remove_upload_session # _full_upload, _collect_file_info # Arguments: 3 @@ -207,10 +208,10 @@ _extract_id() { _upload_file() { [[ $# -lt 3 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 declare job="${1}" input="${2}" folder_id="${3}" \ - slug inputname extension inputsize readable_size request_method url postdata uploadlink upload_body mime_type description \ + slug escaped_slug inputname extension inputsize readable_size request_method url postdata uploadlink upload_body mime_type description \ resume_args1 resume_args2 resume_args3 - slug="${input##*/}" + slug="${input##*/}" escaped_slug="$(_json_escape j "${slug}")" print_slug="$(_json_escape p "${slug}")" inputname="${slug%.*}" extension="${slug##*.}" inputsize="$(($(wc -c < "${input}")))" && content_length="${inputsize}" @@ -227,28 +228,28 @@ _upload_file() { # create description data [[ -n ${DESCRIPTION_FILE} ]] && { : "${DESCRIPTION_FILE//%f/${slug}}" && : "${_//%s/${inputsize}}" && : "${_//%m/${mime_type}}" - description="$(_json_escape "${_}")" # escape for json + description="$(_json_escape j "${_}")" # escape for json } - _print_center "justify" "${input##*/}" " | ${readable_size}" "=" + _print_center "justify" "${print_slug}" " | ${readable_size}" "=" # Set proper variables for overwriting files [[ ${job} = update ]] && { declare file_check_json # Check if file actually exists, and create if not. - if file_check_json="$(_check_existing_file "${slug}" "${folder_id}")"; then + if file_check_json="$(_check_existing_file "${escaped_slug}" "${folder_id}")"; then if [[ -n ${SKIP_DUPLICATES} ]]; then # Stop upload if already exists ( -d/--skip-duplicates ) - _collect_file_info "${file_check_json}" "${slug}" || return 1 + _collect_file_info "${file_check_json}" "${escaped_slug}" || return 1 _clear_line 1 - "${QUIET:-_print_center}" "justify" "${slug}" " already exists." "=" && return 0 + "${QUIET:-_print_center}" "justify" "${print_slug}" " already exists." "=" && return 0 else request_method="PATCH" _file_id="$(_json_value id 1 1 <<< "${file_check_json}")" || - { _error_logging_upload "${slug}" "${file_check_json}" || return 1; } + { _error_logging_upload "${print_slug}" "${file_check_json}" || return 1; } url="${API_URL}/upload/drive/${API_VERSION}/files/${_file_id}?uploadType=resumable&supportsAllDrives=true&includeItemsFromAllDrives=true" # JSON post data to specify the file name and folder under while the file to be updated - postdata="{\"mimeType\": \"${mime_type}\",\"name\": \"${slug}\",\"addParents\": [\"${folder_id}\"]${description:+,\"description\":\"${description}\"}}" + postdata="{\"mimeType\": \"${mime_type}\",\"name\": \"${escaped_slug}\",\"addParents\": [\"${folder_id}\"]${description:+,\"description\":\"${description}\"}}" STRING="Updated" fi else @@ -261,11 +262,11 @@ _upload_file() { url="${API_URL}/upload/drive/${API_VERSION}/files?uploadType=resumable&supportsAllDrives=true&includeItemsFromAllDrives=true" request_method="POST" # JSON post data to specify the file name and folder under while the file to be created - postdata="{\"mimeType\": \"${mime_type}\",\"name\": \"${slug}\",\"parents\": [\"${folder_id}\"]${description:+,\"description\":\"${description}\"}}" + postdata="{\"mimeType\": \"${mime_type}\",\"name\": \"${escaped_slug}\",\"parents\": [\"${folder_id}\"]${description:+,\"description\":\"${description}\"}}" STRING="Uploaded" } - __file="${HOME}/.google-drive-upload/${slug}__::__${folder_id}__::__${inputsize}" + __file="${HOME}/.google-drive-upload/${print_slug}__::__${folder_id}__::__${inputsize}" # https://developers.google.com/drive/api/v3/manage-uploads if [[ -r "${__file}" ]]; then uploadlink="$(< "${__file}")" @@ -283,7 +284,7 @@ _upload_file() { # Resuming interrupted uploads needs http1.1 resume_args1='-s' resume_args2='--http1.1' resume_args3="Content-Range: ${content_range}" _upload_file_from_uri _clear_line - _collect_file_info "${upload_body}" "${slug}" || return 1 + _collect_file_info "${upload_body}" "${print_slug}" || return 1 _normal_logging_upload _remove_upload_session else @@ -292,7 +293,7 @@ _upload_file() { ;; 201 | 200) # Completed Resumable URI give 20* status upload_body="${http_code}" - _collect_file_info "${upload_body}" "${slug}" || return 1 + _collect_file_info "${upload_body}" "${print_slug}" || return 1 _normal_logging_upload _remove_upload_session ;; @@ -337,7 +338,7 @@ _upload_file_from_uri() { -X PUT \ -H "Content-Type: ${mime_type}" \ -H "Content-Length: ${content_length}" \ - -H "Slug: ${slug}" \ + -H "Slug: ${print_slug}" \ -T "${input}" \ -o- \ --url "${uploadlink}" \ @@ -351,7 +352,7 @@ _upload_file_from_uri() { # logging in case of successful upload _normal_logging_upload() { [[ -z ${VERBOSE_PROGRESS} ]] && _clear_line 1 - "${QUIET:-_print_center}" "justify" "${slug} " "| ${readable_size} | ${STRING}" "=" + "${QUIET:-_print_center}" "justify" "${print_slug} " "| ${readable_size} | ${STRING}" "=" return 0 } @@ -369,10 +370,10 @@ _remove_upload_session() { # wrapper to fully upload a file from scratch _full_upload() { - _generate_upload_link || { _error_logging_upload "${slug}" "${uploadlink}" || return 1; } + _generate_upload_link || { _error_logging_upload "${print_slug}" "${uploadlink}" || return 1; } _log_upload_session _upload_file_from_uri - _collect_file_info "${upload_body}" "${slug}" || return 1 + _collect_file_info "${upload_body}" "${print_slug}" || return 1 _normal_logging_upload _remove_upload_session return 0 diff --git a/bash/release/gsync b/bash/release/gsync index 8099db9..f02e4b3 100755 --- a/bash/release/gsync +++ b/bash/release/gsync @@ -193,24 +193,27 @@ _get_latest_sha() { ################################################### # Encode the given string to parse properly as json # Globals: None -# Arguments: 1 -# ${1} = string -# Result: print encoded string +# Arguments: 2 +# ${1} = json or something else +# ${2} = input +# Result: if ${1} is j, then escape all chars, else only special chars # Reference: # https://tools.ietf.org/html/rfc7159#section-7 ################################################### _json_escape() { - declare input="${1:?Provide Input}" - # \ and / - : "${input//\\/\\\\}" - : "${_//\//\\\/}" - : "${_//\'/\\\'}" # ' (not strictly needed ?) - : "${_//\"/\\\"}" # " - : "${_//$'\t'/\\\t}" # \t (tab) - : "${_//$'\n'/\\\n}" # \n (newline) - : "${_//$'\r'/\\\r}" # \r (carriage return) - : "${_//$'\f'/\\\f}" # \f (form feed) - : "${_//$'\b'/\\\b}" # \b (backspace) + declare mode="${1:?}" input="${2:?Provide Input}" + [[ ${mode} = "j" ]] && { + # \ and / + : "${input//\\/\\\\}" + : "${_//\//\\\/}" + # : "${_//\'/\\\'}" # ' (not strictly needed ?) + input="${_//\"/\\\"}" # " + } + : "${input//$'\t'/\\\t}" # \t (tab) + : "${_//$'\n'/\\\n}" # \n (newline) + : "${_//$'\r'/\\\r}" # \r (carriage return) + : "${_//$'\f'/\\\f}" # \f (form feed) + : "${_//$'\b'/\\\b}" # \b (backspace) printf "%s" "${_}" } diff --git a/bash/release/gupload b/bash/release/gupload index 85b47c3..9f843eb 100755 --- a/bash/release/gupload +++ b/bash/release/gupload @@ -193,24 +193,27 @@ _get_latest_sha() { ################################################### # Encode the given string to parse properly as json # Globals: None -# Arguments: 1 -# ${1} = string -# Result: print encoded string +# Arguments: 2 +# ${1} = json or something else +# ${2} = input +# Result: if ${1} is j, then escape all chars, else only special chars # Reference: # https://tools.ietf.org/html/rfc7159#section-7 ################################################### _json_escape() { - declare input="${1:?Provide Input}" - # \ and / - : "${input//\\/\\\\}" - : "${_//\//\\\/}" - : "${_//\'/\\\'}" # ' (not strictly needed ?) - : "${_//\"/\\\"}" # " - : "${_//$'\t'/\\\t}" # \t (tab) - : "${_//$'\n'/\\\n}" # \n (newline) - : "${_//$'\r'/\\\r}" # \r (carriage return) - : "${_//$'\f'/\\\f}" # \f (form feed) - : "${_//$'\b'/\\\b}" # \b (backspace) + declare mode="${1:?}" input="${2:?Provide Input}" + [[ ${mode} = "j" ]] && { + # \ and / + : "${input//\\/\\\\}" + : "${_//\//\\\/}" + # : "${_//\'/\\\'}" # ' (not strictly needed ?) + input="${_//\"/\\\"}" # " + } + : "${input//$'\t'/\\\t}" # \t (tab) + : "${_//$'\n'/\\\n}" # \n (newline) + : "${_//$'\r'/\\\r}" # \r (carriage return) + : "${_//$'\f'/\\\f}" # \f (form feed) + : "${_//$'\b'/\\\b}" # \b (backspace) printf "%s" "${_}" } @@ -941,7 +944,7 @@ _check_existing_file() { declare name="${1##*/}" rootdir="${2}" query search_response id "${EXTRA_LOG}" "justify" "Checking if file" " exists on gdrive.." "-" 1>&2 - query="$(_url_encode "name='${name}' and '${rootdir}' in parents and trashed=false")" + query="$(_url_encode "name=\"${name}\" and '${rootdir}' in parents and trashed=false")" search_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \ "${API_URL}/drive/${API_VERSION}/files?q=${query}&fields=files(id,name,mimeType)&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 @@ -953,9 +956,9 @@ _check_existing_file() { ################################################### # Copy/Clone a public gdrive file/folder from another/same gdrive account -# Globals: 6 variables, 2 functions +# Globals: 6 variables, 6 functions # Variables - API_URL, API_VERSION, CURL_PROGRESS, LOG_FILE_ID, QUIET, ACCESS_TOKEN, DESCRIPTION_FILE -# Functions - _print_center, _check_existing_file, _json_value, _bytes_to_human, _clear_line +# Functions - _print_center, _check_existing_file, _json_value, _json_escape _bytes_to_human, _clear_line # Arguments: 5 # ${1} = update or upload ( upload type ) # ${2} = file id to upload @@ -971,32 +974,32 @@ _check_existing_file() { _clone_file() { [[ $# -lt 5 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 declare job="${1}" file_id="${2}" file_root_id="${3}" name="${4}" size="${5}" - declare clone_file_post_data clone_file_response readable_size _file_id description && STRING="Cloned" - readable_size="$(_bytes_to_human "${size}")" + declare clone_file_post_data clone_file_response readable_size _file_id description escaped_name && STRING="Cloned" + escaped_name="$(_json_escape j "${name}")" print_name="$(_json_escape p "${name}")" readable_size="$(_bytes_to_human "${size}")" # create description data [[ -n ${DESCRIPTION_FILE} ]] && { : "${DESCRIPTION_FILE//%f/${name}}" && : "${_//%s/${readable_size}}" - description="$(_json_escape "${_}")" # escape for json + description="$(_json_escape j "${_}")" # escape for json } clone_file_post_data="{\"parents\": [\"${file_root_id}\"]${description:+,\"description\":\"${description}\"}}" - _print_center "justify" "${name} " "| ${readable_size}" "=" + _print_center "justify" "${print_name} " "| ${readable_size}" "=" if [[ ${job} = update ]]; then declare file_check_json # Check if file actually exists. - if file_check_json="$(_check_existing_file "${name}" "${file_root_id}")"; then + if file_check_json="$(_check_existing_file "${escaped_name}" "${file_root_id}")"; then if [[ -n ${SKIP_DUPLICATES} ]]; then _collect_file_info "${file_check_json}" || return 1 _clear_line 1 - "${QUIET:-_print_center}" "justify" "${name}" " already exists." "=" && return 0 + "${QUIET:-_print_center}" "justify" "${print_name}" " already exists." "=" && return 0 else _print_center "justify" "Overwriting file.." "-" { _file_id="$(_json_value id 1 1 <<< "${file_check_json}")" && clone_file_post_data="$(_drive_info "${_file_id}" "parents,writersCanShare")"; } || - { _error_logging_upload "${name}" "${post_data:-${file_check_json}}" || return 1; } + { _error_logging_upload "${print_name}" "${post_data:-${file_check_json}}" || return 1; } if [[ ${_file_id} != "${file_id}" ]]; then _api_request -s \ -X DELETE \ @@ -1021,15 +1024,15 @@ _clone_file() { "${API_URL}/drive/${API_VERSION}/files/${file_id}/copy?supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" for _ in 1 2 3; do _clear_line 1; done _collect_file_info "${clone_file_response}" || return 1 - "${QUIET:-_print_center}" "justify" "${name} " "| ${readable_size} | ${STRING}" "=" + "${QUIET:-_print_center}" "justify" "${print_name} " "| ${readable_size} | ${STRING}" "=" return 0 } ################################################### # Create/Check directory in google drive. -# Globals: 3 variables, 2 functions +# Globals: 3 variables, 3 functions # Variables - API_URL, API_VERSION, ACCESS_TOKEN -# Functions - _url_encode, _json_value +# Functions - _url_encode, _json_value, _json_escape # Arguments: 2 # ${1} = dir name # ${2} = root dir id of given dir @@ -1039,17 +1042,18 @@ _clone_file() { ################################################### _create_directory() { [[ $# -lt 2 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare dirname="${1##*/}" rootdir="${2}" query search_response folder_id + declare dirname="${1##*/}" escaped_dirname rootdir="${2}" query search_response folder_id + escaped_dirname="$(_json_escape j "${dirname}")" print_dirname="$(_json_escape p "${dirname}")" - "${EXTRA_LOG}" "justify" "Creating gdrive folder:" " ${dirname}" "-" 1>&2 - query="$(_url_encode "mimeType='application/vnd.google-apps.folder' and name='${dirname}' and trashed=false and '${rootdir}' in parents")" + "${EXTRA_LOG}" "justify" "Creating gdrive folder:" " ${print_dirname}" "-" 1>&2 + query="$(_url_encode "mimeType='application/vnd.google-apps.folder' and name=\"${escaped_dirname}\" and trashed=false and '${rootdir}' in parents")" search_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \ "${API_URL}/drive/${API_VERSION}/files?q=${query}&fields=files(id)&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 if ! folder_id="$(printf "%s\n" "${search_response}" | _json_value id 1 1)"; then declare create_folder_post_data create_folder_response - create_folder_post_data="{\"mimeType\": \"application/vnd.google-apps.folder\",\"name\": \"${dirname}\",\"parents\": [\"${rootdir}\"]}" + create_folder_post_data="{\"mimeType\": \"application/vnd.google-apps.folder\",\"name\": \"${escaped_dirname}\",\"parents\": [\"${rootdir}\"]}" create_folder_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \ -X POST \ -H "Content-Type: application/json; charset=UTF-8" \ @@ -1111,9 +1115,9 @@ _extract_id() { ################################################### # Upload ( Create/Update ) files on gdrive. # Interrupted uploads can be resumed. -# Globals: 8 variables, 10 functions +# Globals: 8 variables, 11 functions # Variables - API_URL, API_VERSION, QUIET, VERBOSE, VERBOSE_PROGRESS, CURL_PROGRESS, LOG_FILE_ID, ACCESS_TOKEN, DESCRIPTION_FILE -# Functions - _url_encode, _json_value, _print_center, _bytes_to_human +# Functions - _url_encode, _json_value, _json_escape _print_center, _bytes_to_human # _generate_upload_link, _upload_file_from_uri, _log_upload_session, _remove_upload_session # _full_upload, _collect_file_info # Arguments: 3 @@ -1131,10 +1135,10 @@ _extract_id() { _upload_file() { [[ $# -lt 3 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 declare job="${1}" input="${2}" folder_id="${3}" \ - slug inputname extension inputsize readable_size request_method url postdata uploadlink upload_body mime_type description \ + slug escaped_slug inputname extension inputsize readable_size request_method url postdata uploadlink upload_body mime_type description \ resume_args1 resume_args2 resume_args3 - slug="${input##*/}" + slug="${input##*/}" escaped_slug="$(_json_escape j "${slug}")" print_slug="$(_json_escape p "${slug}")" inputname="${slug%.*}" extension="${slug##*.}" inputsize="$(($(wc -c < "${input}")))" && content_length="${inputsize}" @@ -1151,28 +1155,28 @@ _upload_file() { # create description data [[ -n ${DESCRIPTION_FILE} ]] && { : "${DESCRIPTION_FILE//%f/${slug}}" && : "${_//%s/${inputsize}}" && : "${_//%m/${mime_type}}" - description="$(_json_escape "${_}")" # escape for json + description="$(_json_escape j "${_}")" # escape for json } - _print_center "justify" "${input##*/}" " | ${readable_size}" "=" + _print_center "justify" "${print_slug}" " | ${readable_size}" "=" # Set proper variables for overwriting files [[ ${job} = update ]] && { declare file_check_json # Check if file actually exists, and create if not. - if file_check_json="$(_check_existing_file "${slug}" "${folder_id}")"; then + if file_check_json="$(_check_existing_file "${escaped_slug}" "${folder_id}")"; then if [[ -n ${SKIP_DUPLICATES} ]]; then # Stop upload if already exists ( -d/--skip-duplicates ) - _collect_file_info "${file_check_json}" "${slug}" || return 1 + _collect_file_info "${file_check_json}" "${escaped_slug}" || return 1 _clear_line 1 - "${QUIET:-_print_center}" "justify" "${slug}" " already exists." "=" && return 0 + "${QUIET:-_print_center}" "justify" "${print_slug}" " already exists." "=" && return 0 else request_method="PATCH" _file_id="$(_json_value id 1 1 <<< "${file_check_json}")" || - { _error_logging_upload "${slug}" "${file_check_json}" || return 1; } + { _error_logging_upload "${print_slug}" "${file_check_json}" || return 1; } url="${API_URL}/upload/drive/${API_VERSION}/files/${_file_id}?uploadType=resumable&supportsAllDrives=true&includeItemsFromAllDrives=true" # JSON post data to specify the file name and folder under while the file to be updated - postdata="{\"mimeType\": \"${mime_type}\",\"name\": \"${slug}\",\"addParents\": [\"${folder_id}\"]${description:+,\"description\":\"${description}\"}}" + postdata="{\"mimeType\": \"${mime_type}\",\"name\": \"${escaped_slug}\",\"addParents\": [\"${folder_id}\"]${description:+,\"description\":\"${description}\"}}" STRING="Updated" fi else @@ -1185,11 +1189,11 @@ _upload_file() { url="${API_URL}/upload/drive/${API_VERSION}/files?uploadType=resumable&supportsAllDrives=true&includeItemsFromAllDrives=true" request_method="POST" # JSON post data to specify the file name and folder under while the file to be created - postdata="{\"mimeType\": \"${mime_type}\",\"name\": \"${slug}\",\"parents\": [\"${folder_id}\"]${description:+,\"description\":\"${description}\"}}" + postdata="{\"mimeType\": \"${mime_type}\",\"name\": \"${escaped_slug}\",\"parents\": [\"${folder_id}\"]${description:+,\"description\":\"${description}\"}}" STRING="Uploaded" } - __file="${HOME}/.google-drive-upload/${slug}__::__${folder_id}__::__${inputsize}" + __file="${HOME}/.google-drive-upload/${print_slug}__::__${folder_id}__::__${inputsize}" # https://developers.google.com/drive/api/v3/manage-uploads if [[ -r "${__file}" ]]; then uploadlink="$(< "${__file}")" @@ -1207,7 +1211,7 @@ _upload_file() { # Resuming interrupted uploads needs http1.1 resume_args1='-s' resume_args2='--http1.1' resume_args3="Content-Range: ${content_range}" _upload_file_from_uri _clear_line - _collect_file_info "${upload_body}" "${slug}" || return 1 + _collect_file_info "${upload_body}" "${print_slug}" || return 1 _normal_logging_upload _remove_upload_session else @@ -1216,7 +1220,7 @@ _upload_file() { ;; 201 | 200) # Completed Resumable URI give 20* status upload_body="${http_code}" - _collect_file_info "${upload_body}" "${slug}" || return 1 + _collect_file_info "${upload_body}" "${print_slug}" || return 1 _normal_logging_upload _remove_upload_session ;; @@ -1261,7 +1265,7 @@ _upload_file_from_uri() { -X PUT \ -H "Content-Type: ${mime_type}" \ -H "Content-Length: ${content_length}" \ - -H "Slug: ${slug}" \ + -H "Slug: ${print_slug}" \ -T "${input}" \ -o- \ --url "${uploadlink}" \ @@ -1275,7 +1279,7 @@ _upload_file_from_uri() { # logging in case of successful upload _normal_logging_upload() { [[ -z ${VERBOSE_PROGRESS} ]] && _clear_line 1 - "${QUIET:-_print_center}" "justify" "${slug} " "| ${readable_size} | ${STRING}" "=" + "${QUIET:-_print_center}" "justify" "${print_slug} " "| ${readable_size} | ${STRING}" "=" return 0 } @@ -1293,10 +1297,10 @@ _remove_upload_session() { # wrapper to fully upload a file from scratch _full_upload() { - _generate_upload_link || { _error_logging_upload "${slug}" "${uploadlink}" || return 1; } + _generate_upload_link || { _error_logging_upload "${print_slug}" "${uploadlink}" || return 1; } _log_upload_session _upload_file_from_uri - _collect_file_info "${upload_body}" "${slug}" || return 1 + _collect_file_info "${upload_body}" "${print_slug}" || return 1 _normal_logging_upload _remove_upload_session return 0 diff --git a/sh/common-utils.sh b/sh/common-utils.sh index c9c7004..350dd15 100755 --- a/sh/common-utils.sh +++ b/sh/common-utils.sh @@ -171,24 +171,34 @@ _get_latest_sha() { ################################################### # Encode the given string to parse properly as json # Globals: None -# Arguments: 1 -# ${1} = string -# Result: print encoded string +# Arguments: 2 +# ${1} = json or something else +# ${2} = input +# Result: if ${1} is j, then escape all chars, else only special chars # Reference: # https://tools.ietf.org/html/rfc7159#section-7 ################################################### _json_escape() { - input_json_escape="${1:?Provide Input}" output_json_escape="" - output_json_escape="$(printf "%s" "${input_json_escape}" | sed \ - -e "s|'\'|'\\'|g" \ - -e "s|'/'|'\/'|g" \ - -e "s|'|\'|g" \ - -e 's/\"/\\\"/g' \ - -e ':a; $!N' \ - -e 's|\t|\\t|g' \ - -e 's|\r|\\r|g' \ - -e 's|\f|\\f|g' | awk -v ORS='\\n' '1')" + mode_json_escape="${1:?Missing mode}" input_json_escape="${2:?Provide Input}" output_json_escape="" + # just for refrence "s|'|\'|g" + if [ "${mode_json_escape}" = "j" ]; then + output_json_escape="$(printf "%s" "${input_json_escape}" | sed \ + -e "s|'\'|'\\'|g" \ + -e "s|'/'|'\/'|g" \ + -e 's/\"/\\\"/g' \ + -e ':a; $!N' \ + -e 's|\t|\\t|g' \ + -e 's|\r|\\r|g' \ + -e 's|\f|\\f|g')" + else + output_json_escape="$(printf "%s" "${input_json_escape}" | sed \ + -e ':a; $!N' \ + -e 's|\t|\\t|g' \ + -e 's|\r|\\r|g' \ + -e 's|\f|\\f|g')" + fi # use awk because sed just messes up with newlines + output_json_escape="$(printf "%s" "${output_json_escape}" | awk '{printf "%s%s",sep,$0; sep="\\n"} END{print ""}')" printf "%s" "${output_json_escape}" } diff --git a/sh/drive-utils.sh b/sh/drive-utils.sh index 10997f8..21cff42 100755 --- a/sh/drive-utils.sh +++ b/sh/drive-utils.sh @@ -18,7 +18,7 @@ _check_existing_file() ( unset query_check_existing_file response_check_existing_file id_check_existing_file "${EXTRA_LOG}" "justify" "Checking if file" " exists on gdrive.." "-" 1>&2 - query_check_existing_file="$(_url_encode "name='${name_check_existing_file}' and '${rootdir_check_existing_file}' in parents and trashed=false and 'me' in writers")" + query_check_existing_file="$(_url_encode "name=\"${name_check_existing_file}\" and '${rootdir_check_existing_file}' in parents and trashed=false and 'me' in writers")" response_check_existing_file="$(_api_request "${CURL_PROGRESS_EXTRA}" \ "${API_URL}/drive/${API_VERSION}/files?q=${query_check_existing_file}&fields=files(id,name,mimeType)&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 @@ -30,9 +30,9 @@ _check_existing_file() ( ################################################### # Copy/Clone a public gdrive file/folder from another/same gdrive account -# Globals: 6 variables, 2 functions +# Globals: 6 variables, 6 functions # Variables - API_URL, API_VERSION, CURL_PROGRESS, LOG_FILE_ID, QUIET, ACCESS_TOKEN, DESCRIPTION_FILE -# Functions - _print_center, _check_existing_file, _json_value, _bytes_to_human, _clear_line +# Functions - _print_center, _check_existing_file, _json_value, _bytes_to_human, _clear_line, _json_escape # Arguments: 5 # ${1} = update or upload ( upload type ) # ${2} = file id to upload @@ -50,37 +50,38 @@ _clone_file() { job_clone_file="${1}" file_id_clone_file="${2}" file_root_id_clone_file="${3}" name_clone_file="${4}" size_clone_file="${5}" unset post_data_clone_file response_clone_file readable_size_clone_file description_clone_file && STRING="Cloned" readable_size_clone_file="$(printf "%s\n" "${size_clone_file}" | _bytes_to_human)" + escaped_name_clone_file="$(_json_escape j "${name_clone_file}")" print_name_clone_file="$(_json_escape p "${name_clone_file}")" # create description data [ -n "${DESCRIPTION_FILE}" ] && { description_clone_file="$(printf "%s\n" "${DESCRIPTION_FILE}" | sed -e "s|%f|${name_clone_file}|g|" -e "s|%f|${readable_size_clone_file}|g|")" - description_clone_file="$(_json_escape "${description_clone_file}")" # escape for json + description_clone_file="$(_json_escape j "${description_clone_file}")" # escape for json } post_data_clone_file="{\"parents\": [\"${file_root_id_clone_file}\"]${description_clone_file:+,\"description\":\"${description_clone_file}\"}}" - _print_center "justify" "${name_clone_file} " "| ${readable_size_clone_file}" "=" + _print_center "justify" "${print_name_clone_file} " "| ${readable_size_clone_file}" "=" if [ "${job_clone_file}" = update ]; then unset file_check_json_clone_file # Check if file actually exists. - if file_check_json_clone_file="$(_check_existing_file "${name_clone_file}" "${file_root_id_clone_file}")"; then + if file_check_json_clone_file="$(_check_existing_file "${escaped_name_clone_file}" "${file_root_id_clone_file}")"; then if [ -n "${SKIP_DUPLICATES}" ]; then - _collect_file_info "${file_check_json_clone_file}" "${name_clone_file}" || return 1 + _collect_file_info "${file_check_json_clone_file}" "${print_name_clone_file}" || return 1 _clear_line 1 - "${QUIET:-_print_center}" "justify" "${name_clone_file}" " already exists." "=" && return 0 + "${QUIET:-_print_center}" "justify" "${print_name_clone_file}" " already exists." "=" && return 0 else _print_center "justify" "Overwriting file.." "-" { _file_id_clone_file="$(printf "%s\n" "${file_check_json_clone_file}" | _json_value id 1 1)" && post_data_clone_file="$(_drive_info "${_file_id_clone_file}" "parents,writersCanShare")"; } || - { _error_logging_upload "${name_clone_file}" "${post_data_clone_file:-${file_check_json_clone_file}}" || return 1; } + { _error_logging_upload "${print_name_clone_file}" "${post_data_clone_file:-${file_check_json_clone_file}}" || return 1; } if [ "${_file_id_clone_file}" != "${file_id_clone_file}" ]; then _api_request -s \ -X DELETE \ "${API_URL}/drive/${API_VERSION}/files/${_file_id_clone_file}?supportsAllDrives=true&includeItemsFromAllDrives=true" 2>| /dev/null 1>&2 || : STRING="Updated" else - _collect_file_info "${file_check_json_clone_file}" "${name_clone_file}" || return 1 + _collect_file_info "${file_check_json_clone_file}" "${print_name_clone_file}" || return 1 fi fi else @@ -97,16 +98,16 @@ _clone_file() { -d "${post_data_clone_file}" \ "${API_URL}/drive/${API_VERSION}/files/${file_id_clone_file}/copy?supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" for _ in 1 2 3; do _clear_line 1; done - _collect_file_info "${response_clone_file}" "${name_clone_file}" || return 1 - "${QUIET:-_print_center}" "justify" "${name_clone_file} " "| ${readable_size_clone_file} | ${STRING}" "=" + _collect_file_info "${response_clone_file}" "${print_name_clone_file}" || return 1 + "${QUIET:-_print_center}" "justify" "${print_name_clone_file} " "| ${readable_size_clone_file} | ${STRING}" "=" return 0 } ################################################### # Create/Check directory in google drive. -# Globals: 3 variables, 2 functions +# Globals: 3 variables, 3 functions # Variables - API_URL, API_VERSION, ACCESS_TOKEN -# Functions - _url_encode, _json_value +# Functions - _url_encode, _json_value, _json_escape # Arguments: 2 # ${1} = dir name # ${2} = root dir id of given dir @@ -118,16 +119,18 @@ _create_directory() { [ $# -lt 2 ] && printf "Missing arguments\n" && return 1 dirname_create_directory="${1##*/}" rootdir_create_directory="${2}" unset query_create_directory search_response_create_directory folder_id_create_directory + escaped_dirname_create_directory="$(_json_escape j "${dirname_create_directory}")" + print_dirname_create_directory="$(_json_escape p "${dirname_create_directory}")" - "${EXTRA_LOG}" "justify" "Creating GDRIVE DIR:" " ${dirname_create_directory}" "-" 1>&2 - query_create_directory="$(_url_encode "mimeType='application/vnd.google-apps.folder' and name='${dirname_create_directory}' and trashed=false and '${rootdir_create_directory}' in parents")" + "${EXTRA_LOG}" "justify" "Creating GDRIVE DIR:" " ${print_dirname_create_directory}" "-" 1>&2 + query_create_directory="$(_url_encode "mimeType='application/vnd.google-apps.folder' and name=\"${escaped_dirname_create_directory}\" and trashed=false and '${rootdir_create_directory}' in parents")" search_response_create_directory="$(_api_request "${CURL_PROGRESS_EXTRA}" \ "${API_URL}/drive/${API_VERSION}/files?q=${query_create_directory}&fields=files(id)&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 if ! folder_id_create_directory="$(printf "%s\n" "${search_response_create_directory}" | _json_value id 1 1)"; then unset create_folder_post_data_create_directory create_folder_response_create_directory - create_folder_post_data_create_directory="{\"mimeType\": \"application/vnd.google-apps.folder\",\"name\": \"${dirname_create_directory}\",\"parents\": [\"${rootdir_create_directory}\"]}" + create_folder_post_data_create_directory="{\"mimeType\": \"application/vnd.google-apps.folder\",\"name\": \"${escaped_dirname_create_directory}\",\"parents\": [\"${rootdir_create_directory}\"]}" create_folder_response_create_directory="$(_api_request "${CURL_PROGRESS_EXTRA}" \ -X POST \ -H "Content-Type: application/json; charset=UTF-8" \ @@ -190,9 +193,9 @@ _extract_id() { ################################################### # Upload ( Create/Update ) files on gdrive. # Interrupted uploads can be resumed. -# Globals: 8 variables, 10 functions +# Globals: 8 variables, 11 functions # Variables - API_URL, API_VERSION, QUIET, VERBOSE, VERBOSE_PROGRESS, CURL_PROGRESS, LOG_FILE_ID, ACCESS_TOKEN, DESCRIPTION_FILE -# Functions - _url_encode, _json_value, _print_center, _bytes_to_human +# Functions - _url_encode, _json_value, _json_escape, _print_center, _bytes_to_human # _generate_upload_link, _upload_file_from_uri, _log_upload_session, _remove_upload_session # _full_upload, _collect_file_info # Arguments: 3 @@ -215,6 +218,7 @@ _upload_file() { resume_args1_upload_file resume_args2_upload_file resume_args3_upload_file slug_upload_file="${input_upload_file##*/}" + escaped_slug_upload_file="$(_json_escape j "${slug_upload_file}")" print_slug_upload_file="$(_json_escape p "${slug_upload_file}")" inputname_upload_file="${slug_upload_file%.*}" extension_upload_file="${slug_upload_file##*.}" inputsize_upload_file="$(($(wc -c < "${input_upload_file}")))" && content_length_upload_file="${inputsize_upload_file}" @@ -231,28 +235,28 @@ _upload_file() { # create description data [ -n "${DESCRIPTION_FILE}" ] && { description_upload_file="$(printf "%s\n" "${DESCRIPTION_FILE}" | sed -e "s|%f|${slug_upload_file}|g" -e "s|%f|${readable_size_upload_file}|g" -e "s|%m|${mime_type_upload_file}|g")" - description_upload_file="$(_json_escape "${description_upload_file}")" # escape for json + description_upload_file="$(_json_escape j "${description_upload_file}")" # escape for json } - _print_center "justify" "${slug_upload_file}" " | ${readable_size_upload_file}" "=" + _print_center "justify" "${print_slug_upload_file}" " | ${readable_size_upload_file}" "=" # Set proper variables for overwriting files [ "${job_upload_file}" = update ] && { unset file_check_json_upload_file # Check if file actually exists, and create if not. - if file_check_json_upload_file="$(_check_existing_file "${slug_upload_file}" "${folder_id_upload_file}")"; then + if file_check_json_upload_file="$(_check_existing_file "${escaped_slug_upload_file}" "${folder_id_upload_file}")"; then if [ -n "${SKIP_DUPLICATES}" ]; then # Stop upload if already exists ( -d/--skip-duplicates ) - _collect_file_info "${file_check_json_upload_file}" "${slug_upload_file}" || return 1 + _collect_file_info "${file_check_json_upload_file}" "${print_slug_upload_file}" || return 1 _clear_line 1 - "${QUIET:-_print_center}" "justify" "${slug_upload_file} already exists." "=" && return 0 + "${QUIET:-_print_center}" "justify" "${print_slug_upload_file} already exists." "=" && return 0 else request_method_upload_file="PATCH" _file_id_upload_file="$(printf "%s\n" "${file_check_json_upload_file}" | _json_value id 1 1)" || - { _error_logging_upload "${slug_upload_file}" "${file_check_json_upload_file}" || return 1; } + { _error_logging_upload "${print_slug_upload_file}" "${file_check_json_upload_file}" || return 1; } url_upload_file="${API_URL}/upload/drive/${API_VERSION}/files/${_file_id_upload_file}?uploadType=resumable&supportsAllDrives=true&includeItemsFromAllDrives=true" # JSON post data to specify the file name and folder under while the file to be updated - postdata_upload_file="{\"mimeType\": \"${mime_type_upload_file}\",\"name\": \"${slug_upload_file}\",\"addParents\": [\"${folder_id_upload_file}\"]${description_upload_file:+,\"description\":\"${description_upload_file}\"}}" + postdata_upload_file="{\"mimeType\": \"${mime_type_upload_file}\",\"name\": \"${escaped_slug_upload_file}\",\"addParents\": [\"${folder_id_upload_file}\"]${description_upload_file:+,\"description\":\"${description_upload_file}\"}}" STRING="Updated" fi else @@ -265,11 +269,11 @@ _upload_file() { url_upload_file="${API_URL}/upload/drive/${API_VERSION}/files?uploadType=resumable&supportsAllDrives=true&includeItemsFromAllDrives=true" request_method_upload_file="POST" # JSON post data to specify the file name and folder under while the file to be created - postdata_upload_file="{\"mimeType\": \"${mime_type_upload_file}\",\"name\": \"${slug_upload_file}\",\"parents\": [\"${folder_id_upload_file}\"]${description_upload_file:+,\"description\":\"${description_upload_file}\"}}" + postdata_upload_file="{\"mimeType\": \"${mime_type_upload_file}\",\"name\": \"${escaped_slug_upload_file}\",\"parents\": [\"${folder_id_upload_file}\"]${description_upload_file:+,\"description\":\"${description_upload_file}\"}}" STRING="Uploaded" } - __file_upload_file="${INFO_PATH}/${slug_upload_file}__::__${folder_id_upload_file}__::__${inputsize_upload_file}" + __file_upload_file="${INFO_PATH}/${print_slug_upload_file}__::__${folder_id_upload_file}__::__${inputsize_upload_file}" # https://developers.google.com/drive/api/v3/manage-uploads if [ -r "${__file_upload_file}" ]; then uploadlink_upload_file="$(cat "${__file_upload_file}" || :)" @@ -287,7 +291,7 @@ _upload_file() { # Resuming interrupted uploads needs http1.1 resume_args1_upload_file='-s' resume_args2_upload_file='--http1.1' resume_args3_upload_file="Content-Range: ${content_range_upload_file}" _upload_file_from_uri _clear_line - _collect_file_info "${upload_body_upload_file}" "${slug_upload_file}" || return 1 + _collect_file_info "${upload_body_upload_file}" "${print_slug_upload_file}" || return 1 _normal_logging_upload _remove_upload_session else @@ -299,7 +303,7 @@ _upload_file() { ;; 201 | 200) # Completed Resumable URI give 20* status upload_body_upload_file="${http_code_upload_file}" - _collect_file_info "${upload_body_upload_file}" "${slug_upload_file}" || return 1 + _collect_file_info "${upload_body_upload_file}" "${print_slug_upload_file}" || return 1 _normal_logging_upload _remove_upload_session ;; @@ -341,7 +345,7 @@ _upload_file_from_uri() { -X PUT \ -H "Content-Type: ${mime_type_upload_file}" \ -H "Content-Length: ${content_length_upload_file}" \ - -H "Slug: ${slug_upload_file}" \ + -H "Slug: ${print_slug_upload_file}" \ -T "${input_upload_file}" \ -o- \ --url "${uploadlink_upload_file}" \ @@ -373,10 +377,10 @@ _remove_upload_session() { # wrapper to fully upload a file from scratch _full_upload() { - _generate_upload_link || { _error_logging_upload "${slug_upload_file}" "${uploadlink_upload_file}" || return 1; } + _generate_upload_link || { _error_logging_upload "${print_slug_upload_file}" "${uploadlink_upload_file}" || return 1; } _log_upload_session _upload_file_from_uri - _collect_file_info "${upload_body_upload_file}" "${slug_upload_file}" || return 1 + _collect_file_info "${upload_body_upload_file}" "${print_slug_upload_file}" || return 1 _normal_logging_upload _remove_upload_session return 0 diff --git a/sh/release/gsync b/sh/release/gsync index ade54a7..fa356b0 100755 --- a/sh/release/gsync +++ b/sh/release/gsync @@ -172,24 +172,34 @@ _get_latest_sha() { ################################################### # Encode the given string to parse properly as json # Globals: None -# Arguments: 1 -# ${1} = string -# Result: print encoded string +# Arguments: 2 +# ${1} = json or something else +# ${2} = input +# Result: if ${1} is j, then escape all chars, else only special chars # Reference: # https://tools.ietf.org/html/rfc7159#section-7 ################################################### _json_escape() { - input_json_escape="${1:?Provide Input}" output_json_escape="" - output_json_escape="$(printf "%s" "${input_json_escape}" | sed \ - -e "s|'\'|'\\'|g" \ - -e "s|'/'|'\/'|g" \ - -e "s|'|\'|g" \ - -e 's/\"/\\\"/g' \ - -e ':a; $!N' \ - -e 's|\t|\\t|g' \ - -e 's|\r|\\r|g' \ - -e 's|\f|\\f|g' | awk -v ORS='\\n' '1')" + mode_json_escape="${1:?Missing mode}" input_json_escape="${2:?Provide Input}" output_json_escape="" + # just for refrence "s|'|\'|g" + if [ "${mode_json_escape}" = "j" ]; then + output_json_escape="$(printf "%s" "${input_json_escape}" | sed \ + -e "s|'\'|'\\'|g" \ + -e "s|'/'|'\/'|g" \ + -e 's/\"/\\\"/g' \ + -e ':a; $!N' \ + -e 's|\t|\\t|g' \ + -e 's|\r|\\r|g' \ + -e 's|\f|\\f|g')" + else + output_json_escape="$(printf "%s" "${input_json_escape}" | sed \ + -e ':a; $!N' \ + -e 's|\t|\\t|g' \ + -e 's|\r|\\r|g' \ + -e 's|\f|\\f|g')" + fi # use awk because sed just messes up with newlines + output_json_escape="$(printf "%s" "${output_json_escape}" | awk '{printf "%s%s",sep,$0; sep="\\n"} END{print ""}')" printf "%s" "${output_json_escape}" } diff --git a/sh/release/gupload b/sh/release/gupload index 102eb0a..ed0b1dc 100755 --- a/sh/release/gupload +++ b/sh/release/gupload @@ -172,24 +172,34 @@ _get_latest_sha() { ################################################### # Encode the given string to parse properly as json # Globals: None -# Arguments: 1 -# ${1} = string -# Result: print encoded string +# Arguments: 2 +# ${1} = json or something else +# ${2} = input +# Result: if ${1} is j, then escape all chars, else only special chars # Reference: # https://tools.ietf.org/html/rfc7159#section-7 ################################################### _json_escape() { - input_json_escape="${1:?Provide Input}" output_json_escape="" - output_json_escape="$(printf "%s" "${input_json_escape}" | sed \ - -e "s|'\'|'\\'|g" \ - -e "s|'/'|'\/'|g" \ - -e "s|'|\'|g" \ - -e 's/\"/\\\"/g' \ - -e ':a; $!N' \ - -e 's|\t|\\t|g' \ - -e 's|\r|\\r|g' \ - -e 's|\f|\\f|g' | awk -v ORS='\\n' '1')" + mode_json_escape="${1:?Missing mode}" input_json_escape="${2:?Provide Input}" output_json_escape="" + # just for refrence "s|'|\'|g" + if [ "${mode_json_escape}" = "j" ]; then + output_json_escape="$(printf "%s" "${input_json_escape}" | sed \ + -e "s|'\'|'\\'|g" \ + -e "s|'/'|'\/'|g" \ + -e 's/\"/\\\"/g' \ + -e ':a; $!N' \ + -e 's|\t|\\t|g' \ + -e 's|\r|\\r|g' \ + -e 's|\f|\\f|g')" + else + output_json_escape="$(printf "%s" "${input_json_escape}" | sed \ + -e ':a; $!N' \ + -e 's|\t|\\t|g' \ + -e 's|\r|\\r|g' \ + -e 's|\f|\\f|g')" + fi # use awk because sed just messes up with newlines + output_json_escape="$(printf "%s" "${output_json_escape}" | awk '{printf "%s%s",sep,$0; sep="\\n"} END{print ""}')" printf "%s" "${output_json_escape}" } @@ -883,7 +893,7 @@ _check_existing_file() ( unset query_check_existing_file response_check_existing_file id_check_existing_file "${EXTRA_LOG}" "justify" "Checking if file" " exists on gdrive.." "-" 1>&2 - query_check_existing_file="$(_url_encode "name='${name_check_existing_file}' and '${rootdir_check_existing_file}' in parents and trashed=false and 'me' in writers")" + query_check_existing_file="$(_url_encode "name=\"${name_check_existing_file}\" and '${rootdir_check_existing_file}' in parents and trashed=false and 'me' in writers")" response_check_existing_file="$(_api_request "${CURL_PROGRESS_EXTRA}" \ "${API_URL}/drive/${API_VERSION}/files?q=${query_check_existing_file}&fields=files(id,name,mimeType)&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 @@ -895,9 +905,9 @@ _check_existing_file() ( ################################################### # Copy/Clone a public gdrive file/folder from another/same gdrive account -# Globals: 6 variables, 2 functions +# Globals: 6 variables, 6 functions # Variables - API_URL, API_VERSION, CURL_PROGRESS, LOG_FILE_ID, QUIET, ACCESS_TOKEN, DESCRIPTION_FILE -# Functions - _print_center, _check_existing_file, _json_value, _bytes_to_human, _clear_line +# Functions - _print_center, _check_existing_file, _json_value, _bytes_to_human, _clear_line, _json_escape # Arguments: 5 # ${1} = update or upload ( upload type ) # ${2} = file id to upload @@ -915,37 +925,38 @@ _clone_file() { job_clone_file="${1}" file_id_clone_file="${2}" file_root_id_clone_file="${3}" name_clone_file="${4}" size_clone_file="${5}" unset post_data_clone_file response_clone_file readable_size_clone_file description_clone_file && STRING="Cloned" readable_size_clone_file="$(printf "%s\n" "${size_clone_file}" | _bytes_to_human)" + escaped_name_clone_file="$(_json_escape j "${name_clone_file}")" print_name_clone_file="$(_json_escape p "${name_clone_file}")" # create description data [ -n "${DESCRIPTION_FILE}" ] && { description_clone_file="$(printf "%s\n" "${DESCRIPTION_FILE}" | sed -e "s|%f|${name_clone_file}|g|" -e "s|%f|${readable_size_clone_file}|g|")" - description_clone_file="$(_json_escape "${description_clone_file}")" # escape for json + description_clone_file="$(_json_escape j "${description_clone_file}")" # escape for json } post_data_clone_file="{\"parents\": [\"${file_root_id_clone_file}\"]${description_clone_file:+,\"description\":\"${description_clone_file}\"}}" - _print_center "justify" "${name_clone_file} " "| ${readable_size_clone_file}" "=" + _print_center "justify" "${print_name_clone_file} " "| ${readable_size_clone_file}" "=" if [ "${job_clone_file}" = update ]; then unset file_check_json_clone_file # Check if file actually exists. - if file_check_json_clone_file="$(_check_existing_file "${name_clone_file}" "${file_root_id_clone_file}")"; then + if file_check_json_clone_file="$(_check_existing_file "${escaped_name_clone_file}" "${file_root_id_clone_file}")"; then if [ -n "${SKIP_DUPLICATES}" ]; then - _collect_file_info "${file_check_json_clone_file}" "${name_clone_file}" || return 1 + _collect_file_info "${file_check_json_clone_file}" "${print_name_clone_file}" || return 1 _clear_line 1 - "${QUIET:-_print_center}" "justify" "${name_clone_file}" " already exists." "=" && return 0 + "${QUIET:-_print_center}" "justify" "${print_name_clone_file}" " already exists." "=" && return 0 else _print_center "justify" "Overwriting file.." "-" { _file_id_clone_file="$(printf "%s\n" "${file_check_json_clone_file}" | _json_value id 1 1)" && post_data_clone_file="$(_drive_info "${_file_id_clone_file}" "parents,writersCanShare")"; } || - { _error_logging_upload "${name_clone_file}" "${post_data_clone_file:-${file_check_json_clone_file}}" || return 1; } + { _error_logging_upload "${print_name_clone_file}" "${post_data_clone_file:-${file_check_json_clone_file}}" || return 1; } if [ "${_file_id_clone_file}" != "${file_id_clone_file}" ]; then _api_request -s \ -X DELETE \ "${API_URL}/drive/${API_VERSION}/files/${_file_id_clone_file}?supportsAllDrives=true&includeItemsFromAllDrives=true" 2>| /dev/null 1>&2 || : STRING="Updated" else - _collect_file_info "${file_check_json_clone_file}" "${name_clone_file}" || return 1 + _collect_file_info "${file_check_json_clone_file}" "${print_name_clone_file}" || return 1 fi fi else @@ -962,16 +973,16 @@ _clone_file() { -d "${post_data_clone_file}" \ "${API_URL}/drive/${API_VERSION}/files/${file_id_clone_file}/copy?supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" for _ in 1 2 3; do _clear_line 1; done - _collect_file_info "${response_clone_file}" "${name_clone_file}" || return 1 - "${QUIET:-_print_center}" "justify" "${name_clone_file} " "| ${readable_size_clone_file} | ${STRING}" "=" + _collect_file_info "${response_clone_file}" "${print_name_clone_file}" || return 1 + "${QUIET:-_print_center}" "justify" "${print_name_clone_file} " "| ${readable_size_clone_file} | ${STRING}" "=" return 0 } ################################################### # Create/Check directory in google drive. -# Globals: 3 variables, 2 functions +# Globals: 3 variables, 3 functions # Variables - API_URL, API_VERSION, ACCESS_TOKEN -# Functions - _url_encode, _json_value +# Functions - _url_encode, _json_value, _json_escape # Arguments: 2 # ${1} = dir name # ${2} = root dir id of given dir @@ -983,16 +994,18 @@ _create_directory() { [ $# -lt 2 ] && printf "Missing arguments\n" && return 1 dirname_create_directory="${1##*/}" rootdir_create_directory="${2}" unset query_create_directory search_response_create_directory folder_id_create_directory + escaped_dirname_create_directory="$(_json_escape j "${dirname_create_directory}")" + print_dirname_create_directory="$(_json_escape p "${dirname_create_directory}")" - "${EXTRA_LOG}" "justify" "Creating GDRIVE DIR:" " ${dirname_create_directory}" "-" 1>&2 - query_create_directory="$(_url_encode "mimeType='application/vnd.google-apps.folder' and name='${dirname_create_directory}' and trashed=false and '${rootdir_create_directory}' in parents")" + "${EXTRA_LOG}" "justify" "Creating GDRIVE DIR:" " ${print_dirname_create_directory}" "-" 1>&2 + query_create_directory="$(_url_encode "mimeType='application/vnd.google-apps.folder' and name=\"${escaped_dirname_create_directory}\" and trashed=false and '${rootdir_create_directory}' in parents")" search_response_create_directory="$(_api_request "${CURL_PROGRESS_EXTRA}" \ "${API_URL}/drive/${API_VERSION}/files?q=${query_create_directory}&fields=files(id)&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 if ! folder_id_create_directory="$(printf "%s\n" "${search_response_create_directory}" | _json_value id 1 1)"; then unset create_folder_post_data_create_directory create_folder_response_create_directory - create_folder_post_data_create_directory="{\"mimeType\": \"application/vnd.google-apps.folder\",\"name\": \"${dirname_create_directory}\",\"parents\": [\"${rootdir_create_directory}\"]}" + create_folder_post_data_create_directory="{\"mimeType\": \"application/vnd.google-apps.folder\",\"name\": \"${escaped_dirname_create_directory}\",\"parents\": [\"${rootdir_create_directory}\"]}" create_folder_response_create_directory="$(_api_request "${CURL_PROGRESS_EXTRA}" \ -X POST \ -H "Content-Type: application/json; charset=UTF-8" \ @@ -1055,9 +1068,9 @@ _extract_id() { ################################################### # Upload ( Create/Update ) files on gdrive. # Interrupted uploads can be resumed. -# Globals: 8 variables, 10 functions +# Globals: 8 variables, 11 functions # Variables - API_URL, API_VERSION, QUIET, VERBOSE, VERBOSE_PROGRESS, CURL_PROGRESS, LOG_FILE_ID, ACCESS_TOKEN, DESCRIPTION_FILE -# Functions - _url_encode, _json_value, _print_center, _bytes_to_human +# Functions - _url_encode, _json_value, _json_escape, _print_center, _bytes_to_human # _generate_upload_link, _upload_file_from_uri, _log_upload_session, _remove_upload_session # _full_upload, _collect_file_info # Arguments: 3 @@ -1080,6 +1093,7 @@ _upload_file() { resume_args1_upload_file resume_args2_upload_file resume_args3_upload_file slug_upload_file="${input_upload_file##*/}" + escaped_slug_upload_file="$(_json_escape j "${slug_upload_file}")" print_slug_upload_file="$(_json_escape p "${slug_upload_file}")" inputname_upload_file="${slug_upload_file%.*}" extension_upload_file="${slug_upload_file##*.}" inputsize_upload_file="$(($(wc -c < "${input_upload_file}")))" && content_length_upload_file="${inputsize_upload_file}" @@ -1096,28 +1110,28 @@ _upload_file() { # create description data [ -n "${DESCRIPTION_FILE}" ] && { description_upload_file="$(printf "%s\n" "${DESCRIPTION_FILE}" | sed -e "s|%f|${slug_upload_file}|g" -e "s|%f|${readable_size_upload_file}|g" -e "s|%m|${mime_type_upload_file}|g")" - description_upload_file="$(_json_escape "${description_upload_file}")" # escape for json + description_upload_file="$(_json_escape j "${description_upload_file}")" # escape for json } - _print_center "justify" "${slug_upload_file}" " | ${readable_size_upload_file}" "=" + _print_center "justify" "${print_slug_upload_file}" " | ${readable_size_upload_file}" "=" # Set proper variables for overwriting files [ "${job_upload_file}" = update ] && { unset file_check_json_upload_file # Check if file actually exists, and create if not. - if file_check_json_upload_file="$(_check_existing_file "${slug_upload_file}" "${folder_id_upload_file}")"; then + if file_check_json_upload_file="$(_check_existing_file "${escaped_slug_upload_file}" "${folder_id_upload_file}")"; then if [ -n "${SKIP_DUPLICATES}" ]; then # Stop upload if already exists ( -d/--skip-duplicates ) - _collect_file_info "${file_check_json_upload_file}" "${slug_upload_file}" || return 1 + _collect_file_info "${file_check_json_upload_file}" "${print_slug_upload_file}" || return 1 _clear_line 1 - "${QUIET:-_print_center}" "justify" "${slug_upload_file} already exists." "=" && return 0 + "${QUIET:-_print_center}" "justify" "${print_slug_upload_file} already exists." "=" && return 0 else request_method_upload_file="PATCH" _file_id_upload_file="$(printf "%s\n" "${file_check_json_upload_file}" | _json_value id 1 1)" || - { _error_logging_upload "${slug_upload_file}" "${file_check_json_upload_file}" || return 1; } + { _error_logging_upload "${print_slug_upload_file}" "${file_check_json_upload_file}" || return 1; } url_upload_file="${API_URL}/upload/drive/${API_VERSION}/files/${_file_id_upload_file}?uploadType=resumable&supportsAllDrives=true&includeItemsFromAllDrives=true" # JSON post data to specify the file name and folder under while the file to be updated - postdata_upload_file="{\"mimeType\": \"${mime_type_upload_file}\",\"name\": \"${slug_upload_file}\",\"addParents\": [\"${folder_id_upload_file}\"]${description_upload_file:+,\"description\":\"${description_upload_file}\"}}" + postdata_upload_file="{\"mimeType\": \"${mime_type_upload_file}\",\"name\": \"${escaped_slug_upload_file}\",\"addParents\": [\"${folder_id_upload_file}\"]${description_upload_file:+,\"description\":\"${description_upload_file}\"}}" STRING="Updated" fi else @@ -1130,11 +1144,11 @@ _upload_file() { url_upload_file="${API_URL}/upload/drive/${API_VERSION}/files?uploadType=resumable&supportsAllDrives=true&includeItemsFromAllDrives=true" request_method_upload_file="POST" # JSON post data to specify the file name and folder under while the file to be created - postdata_upload_file="{\"mimeType\": \"${mime_type_upload_file}\",\"name\": \"${slug_upload_file}\",\"parents\": [\"${folder_id_upload_file}\"]${description_upload_file:+,\"description\":\"${description_upload_file}\"}}" + postdata_upload_file="{\"mimeType\": \"${mime_type_upload_file}\",\"name\": \"${escaped_slug_upload_file}\",\"parents\": [\"${folder_id_upload_file}\"]${description_upload_file:+,\"description\":\"${description_upload_file}\"}}" STRING="Uploaded" } - __file_upload_file="${INFO_PATH}/${slug_upload_file}__::__${folder_id_upload_file}__::__${inputsize_upload_file}" + __file_upload_file="${INFO_PATH}/${print_slug_upload_file}__::__${folder_id_upload_file}__::__${inputsize_upload_file}" # https://developers.google.com/drive/api/v3/manage-uploads if [ -r "${__file_upload_file}" ]; then uploadlink_upload_file="$(cat "${__file_upload_file}" || :)" @@ -1152,7 +1166,7 @@ _upload_file() { # Resuming interrupted uploads needs http1.1 resume_args1_upload_file='-s' resume_args2_upload_file='--http1.1' resume_args3_upload_file="Content-Range: ${content_range_upload_file}" _upload_file_from_uri _clear_line - _collect_file_info "${upload_body_upload_file}" "${slug_upload_file}" || return 1 + _collect_file_info "${upload_body_upload_file}" "${print_slug_upload_file}" || return 1 _normal_logging_upload _remove_upload_session else @@ -1164,7 +1178,7 @@ _upload_file() { ;; 201 | 200) # Completed Resumable URI give 20* status upload_body_upload_file="${http_code_upload_file}" - _collect_file_info "${upload_body_upload_file}" "${slug_upload_file}" || return 1 + _collect_file_info "${upload_body_upload_file}" "${print_slug_upload_file}" || return 1 _normal_logging_upload _remove_upload_session ;; @@ -1206,7 +1220,7 @@ _upload_file_from_uri() { -X PUT \ -H "Content-Type: ${mime_type_upload_file}" \ -H "Content-Length: ${content_length_upload_file}" \ - -H "Slug: ${slug_upload_file}" \ + -H "Slug: ${print_slug_upload_file}" \ -T "${input_upload_file}" \ -o- \ --url "${uploadlink_upload_file}" \ @@ -1238,10 +1252,10 @@ _remove_upload_session() { # wrapper to fully upload a file from scratch _full_upload() { - _generate_upload_link || { _error_logging_upload "${slug_upload_file}" "${uploadlink_upload_file}" || return 1; } + _generate_upload_link || { _error_logging_upload "${print_slug_upload_file}" "${uploadlink_upload_file}" || return 1; } _log_upload_session _upload_file_from_uri - _collect_file_info "${upload_body_upload_file}" "${slug_upload_file}" || return 1 + _collect_file_info "${upload_body_upload_file}" "${print_slug_upload_file}" || return 1 _normal_logging_upload _remove_upload_session return 0