nginx-status 1.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384
  1. #!/bin/sh
  2. URL=""
  3. INFO=""
  4. CRAWLER=""
  5. INFOS="active accepted handled requests reading writing waiting"
  6. usage() {
  7. cat <<EOH
  8. USAGE: $(basename "$0") -u URL -i STATUS_INFO
  9. Get Nginx status
  10. Options:
  11. -u Nginx status URL (e.g: http://localhost/nginx_status
  12. -i Status info in $(echo "${INFOS}" | sed 's/ /, /g')
  13. EOH
  14. }
  15. while getopts "u:i:" OPT ; do
  16. case "$OPT" in
  17. \?|h)
  18. usage
  19. exit 0
  20. ;;
  21. u)
  22. URL="$OPTARG"
  23. ;;
  24. i)
  25. INFO="$OPTARG"
  26. ;;
  27. esac
  28. done
  29. if which curl >/dev/null 2>&1 ; then
  30. CRAWLER="curl -s"
  31. elif which wget >/dev/null 2>&1 ; then
  32. CRAWLER="wget -q -O -"
  33. else
  34. printf "[ERR] wget or curl not found\n"
  35. usage
  36. exit 1
  37. fi
  38. if [ -z "${URL}" ]; then
  39. printf "[ERR] URL not defined or empty\n"
  40. usage
  41. exit 1
  42. fi
  43. if ! echo "${INFO}" | egrep -q "^($(echo "${INFOS}" | sed 's/ /|/g'))$"; then
  44. printf "[ERR] info '${INFO}' not available in ${INFOS}\n"
  45. usage
  46. exit 1
  47. fi
  48. case "${INFO}" in
  49. #INFOS="active accepted handled requests reading writing waiting"
  50. active)
  51. $CRAWLER "${URL}" | sed -r -n 's/^Active connections:\s+//p'
  52. ;;
  53. accepted)
  54. $CRAWLER "${URL}" | sed -r -n 's/^\s+([0-9]+)\s+([0-9]+)\s+([0-9]+)/\1/p'
  55. ;;
  56. handled)
  57. $CRAWLER "${URL}" | sed -r -n 's/^\s+([0-9]+)\s+([0-9]+)\s+([0-9]+)/\2/p'
  58. ;;
  59. requests)
  60. $CRAWLER "${URL}" | sed -r -n 's/^\s+([0-9]+)\s+([0-9]+)\s+([0-9]+)/\3/p'
  61. ;;
  62. reading)
  63. $CRAWLER "${URL}" | sed -r -n 's/^Reading:\s+([0-9]+)\s+Writing:\s+([0-9]+)\s+Waiting:\s+([0-9]+)\s+$/\1/p'
  64. ;;
  65. writing)
  66. $CRAWLER "${URL}" | sed -r -n 's/^Reading:\s+([0-9]+)\s+Writing:\s+([0-9]+)\s+Waiting:\s+([0-9]+)\s+$/\2/p'
  67. ;;
  68. waiting)
  69. $CRAWLER "${URL}" | sed -r -n 's/^Reading:\s+([0-9]+)\s+Writing:\s+([0-9]+)\s+Waiting:\s+([0-9]+)\s+$/\3/p'
  70. ;;
  71. esac
  72. exit 0