nginx-status 2.0 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091
  1. #!/bin/sh
  2. # {{{ Variables
  3. URL=""
  4. INFO=""
  5. CRAWLER=""
  6. INFOS="active accepted handled requests reading writing waiting"
  7. # }}}
  8. # {{{ usage()
  9. usage() {
  10. cat <<EOH
  11. USAGE: $(basename "$0") -u URL -i STATUS_INFO
  12. Get Nginx status
  13. Options:
  14. -u Nginx status URL (e.g: http://localhost/nginx_status
  15. -i Status info in $(echo "${INFOS}" | sed 's/ /, /g')
  16. EOH
  17. }
  18. # }}}
  19. # {{{ main()
  20. while getopts "u:i:" OPT ; do
  21. case "$OPT" in
  22. \?|h)
  23. usage
  24. exit 0
  25. ;;
  26. u)
  27. URL="$OPTARG"
  28. ;;
  29. i)
  30. INFO="$OPTARG"
  31. ;;
  32. esac
  33. done
  34. if which curl >/dev/null 2>&1 ; then
  35. CRAWLER="curl -s"
  36. elif which wget >/dev/null 2>&1 ; then
  37. CRAWLER="wget -q -O -"
  38. else
  39. printf "[ERR] wget or curl not found\n"
  40. usage
  41. exit 1
  42. fi
  43. if [ -z "${URL}" ]; then
  44. printf "[ERR] URL not defined or empty\n"
  45. usage
  46. exit 1
  47. fi
  48. if ! echo "${INFO}" | egrep -q "^($(echo "${INFOS}" | sed 's/ /|/g'))$"; then
  49. printf "[ERR] info '${INFO}' not available in ${INFOS}\n"
  50. usage
  51. exit 1
  52. fi
  53. case "${INFO}" in
  54. #INFOS="active accepted handled requests reading writing waiting"
  55. active)
  56. $CRAWLER "${URL}" | sed -r -n 's/^Active connections:\s+//p'
  57. ;;
  58. accepted)
  59. $CRAWLER "${URL}" | sed -r -n 's/^\s+([0-9]+)\s+([0-9]+)\s+([0-9]+)/\1/p'
  60. ;;
  61. handled)
  62. $CRAWLER "${URL}" | sed -r -n 's/^\s+([0-9]+)\s+([0-9]+)\s+([0-9]+)/\2/p'
  63. ;;
  64. requests)
  65. $CRAWLER "${URL}" | sed -r -n 's/^\s+([0-9]+)\s+([0-9]+)\s+([0-9]+)/\3/p'
  66. ;;
  67. reading)
  68. $CRAWLER "${URL}" | sed -r -n 's/^Reading:\s+([0-9]+)\s+Writing:\s+([0-9]+)\s+Waiting:\s+([0-9]+)\s+$/\1/p'
  69. ;;
  70. writing)
  71. $CRAWLER "${URL}" | sed -r -n 's/^Reading:\s+([0-9]+)\s+Writing:\s+([0-9]+)\s+Waiting:\s+([0-9]+)\s+$/\2/p'
  72. ;;
  73. waiting)
  74. $CRAWLER "${URL}" | sed -r -n 's/^Reading:\s+([0-9]+)\s+Writing:\s+([0-9]+)\s+Waiting:\s+([0-9]+)\s+$/\3/p'
  75. ;;
  76. esac
  77. exit 0
  78. # }}}
  79. # vim: foldmethod=marker foldlevel=0 foldenable