From bbc619ef8404afc93e1aae7f127d896148e4997f Mon Sep 17 00:00:00 2001 From: Edouard DUPIN Date: Fri, 28 Oct 2016 00:26:33 +0200 Subject: [PATCH] [DEBUG] correct server down when to low activity --- build_send.py | 15 +++++++++++++-- coverage_send.py | 16 +++++++++++++--- test_send.py | 16 +++++++++++++--- warning_send.py | 16 +++++++++++++--- 4 files changed, 52 insertions(+), 11 deletions(-) diff --git a/build_send.py b/build_send.py index 6c8afde..045097c 100755 --- a/build_send.py +++ b/build_send.py @@ -10,6 +10,7 @@ import urllib, urllib2 import sys import os import argparse +import time parser = argparse.ArgumentParser() @@ -99,9 +100,19 @@ data = urllib.urlencode({'REPO':args.repo, 'TAG':args.tag, 'STATUS':args.status, 'ID':args.id}) +# I do this because my server is sometime down and need time to restart (return 408) +send_done = 5 +while send_done >= 0: + send_done = send_done - 1 + try: + req = urllib2.Request(args.url, data) + response = urllib2.urlopen(req) + send_done = -1 + except urllib2.HTTPError: + print("An error occured (maybe on server or network ... 'urllib2.HTTPError: HTTP Error 408: Request Timeout' ") + if send_done >= 0: + time.sleep(5) -req = urllib2.Request(args.url, data) -response = urllib2.urlopen(req) #print(response.geturl()) #print(response.info()) return_data = response.read() diff --git a/coverage_send.py b/coverage_send.py index 55afd4b..9300a11 100755 --- a/coverage_send.py +++ b/coverage_send.py @@ -10,6 +10,7 @@ import urllib, urllib2 import sys import os import argparse +import time parser = argparse.ArgumentParser() @@ -104,9 +105,18 @@ data = urllib.urlencode({'REPO':args.repo, 'SHA1':args.sha1, 'LIB_BRANCH':args.branch, 'JSON_FILE':json_data}) - -req = urllib2.Request(args.url, data) -response = urllib2.urlopen(req) +# I do this because my server is sometime down and need time to restart (return 408) +send_done = 5 +while send_done >= 0: + send_done = send_done - 1 + try: + req = urllib2.Request(args.url, data) + response = urllib2.urlopen(req) + send_done = -1 + except urllib2.HTTPError: + print("An error occured (maybe on server or network ... 'urllib2.HTTPError: HTTP Error 408: Request Timeout' ") + if send_done >= 0: + time.sleep(5) #print response.geturl() #print response.info() return_data = response.read() diff --git a/test_send.py b/test_send.py index a20ec06..5e2a71b 100755 --- a/test_send.py +++ b/test_send.py @@ -10,6 +10,7 @@ import urllib, urllib2 import sys import os import argparse +import time parser = argparse.ArgumentParser() @@ -170,9 +171,18 @@ data = urllib.urlencode({'REPO':args.repo, 'SHA1':args.sha1, 'JSON_FILE':json_data, 'LIB_BRANCH':args.branch}) - -req = urllib2.Request(args.url, data) -response = urllib2.urlopen(req) +# I do this because my server is sometime down and need time to restart (return 408) +send_done = 5 +while send_done >= 0: + send_done = send_done - 1 + try: + req = urllib2.Request(args.url, data) + response = urllib2.urlopen(req) + send_done = -1 + except urllib2.HTTPError: + print("An error occured (maybe on server or network ... 'urllib2.HTTPError: HTTP Error 408: Request Timeout' ") + if send_done >= 0: + time.sleep(5) #print response.geturl() #print response.info() return_data = response.read() diff --git a/warning_send.py b/warning_send.py index c5a854b..7c6de49 100755 --- a/warning_send.py +++ b/warning_send.py @@ -11,6 +11,7 @@ import sys import os import argparse import fnmatch +import time parser = argparse.ArgumentParser() @@ -184,9 +185,18 @@ data = urllib.urlencode({'REPO':args.repo, 'SHA1':args.sha1, 'LIB_BRANCH':args.branch, 'JSON_FILE':json_data}) - -req = urllib2.Request(args.url, data) -response = urllib2.urlopen(req) +# I do this because my server is sometime down and need time to restart (return 408) +send_done = 5 +while send_done >= 0: + send_done = send_done - 1 + try: + req = urllib2.Request(args.url, data) + response = urllib2.urlopen(req) + send_done = -1 + except urllib2.HTTPError: + print("An error occured (maybe on server or network ... 'urllib2.HTTPError: HTTP Error 408: Request Timeout' ") + if send_done >= 0: + time.sleep(5) #print response.geturl() #print response.info() return_data = response.read()