From 2960a68dde502831efba24767960c86d75709b4a Mon Sep 17 00:00:00 2001
From: Alessandro Cerioni <acerioni@grandlyon.com>
Date: Tue, 10 Sep 2019 19:31:28 +0200
Subject: [PATCH] Improved logging.

---
 Dockerfile |  6 +++---
 api.py     | 18 ++++++++++++++----
 main.py    | 18 ++++++++++++++----
 3 files changed, 31 insertions(+), 11 deletions(-)

diff --git a/Dockerfile b/Dockerfile
index 0265a08..2007f55 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -4,12 +4,12 @@ WORKDIR /app
 RUN mkdir cache
 
 COPY requirements.txt .
+RUN pip install -r requirements.txt
+
 COPY api.py .
 COPY main.py .
 
-RUN pip install -r requirements.txt
-
 EXPOSE 8000
 VOLUME /cache
 
-CMD gunicorn --workers=2 --timeout 300 -b 0.0.0.0:8000 --preload api:api
+CMD gunicorn --workers=2 --timeout 300 -b 0.0.0.0:8000 --log-level=info --preload api:api
diff --git a/api.py b/api.py
index 8a8b592..d0e1400 100644
--- a/api.py
+++ b/api.py
@@ -4,6 +4,8 @@ import os.path
 from apscheduler.schedulers.background import BackgroundScheduler
 import atexit
 import random
+import logging
+#logging.basicConfig(format="[%(asctime)s] [%(process)d] [%(levelname)s] %(message)s", datefmt="%Y-%m-%d %H:%M:%S %z")
 
 from main import refresh_cache
 
@@ -15,10 +17,9 @@ scheduler = BackgroundScheduler()
 # we let the the various instances of this service refresh the cache at different times,
 # with at least 5 minutes between each request. The following configuration allows for
 # 10 concurrent requests.
-random_minute = str(5*random.randint(0, 9))
-
-scheduler.add_job(refresh_cache, 'cron', hour='6', minute=random_minute)
-#scheduler.add_job(refresh_cache, 'interval', seconds=60)
+random_minute = 5*random.randint(0, 9)
+scheduler.add_job(refresh_cache, 'cron', hour=6, minute=random_minute)
+#scheduler.add_job(refresh_cache, 'interval', seconds=random_minute)
 scheduler.start()
 
 # Shut down the scheduler when exiting the app
@@ -26,6 +27,15 @@ atexit.register(lambda: scheduler.shutdown())
 
 api = Flask(__name__, static_url_path='')
 
+
+if __name__ != '__main__':
+    gunicorn_logger = logging.getLogger('gunicorn.error')
+    api.logger.handlers = gunicorn_logger.handlers
+    api.logger.setLevel(gunicorn_logger.level)
+
+
+api.logger.info("Cache will refreshed every day at %s minutes past 6 AM (GMT)." % random_minute)
+
 @api.route("/")
 def index():
 
diff --git a/main.py b/main.py
index 6e44221..5aa1d44 100644
--- a/main.py
+++ b/main.py
@@ -16,7 +16,14 @@ except ImportError:
 with open("config.yaml", 'r') as yamlfile:
     cfg = load(yamlfile, Loader=Loader)
 
-logging.basicConfig(format="[%(asctime)s] [%(process)d] [%(levelname)s] %(message)s", datefmt="%Y-%m-%d %H:%M:%S %z")
+#logging.basicConfig(format="[%(asctime)s] [%(process)d] [%(levelname)s] %(message)s", datefmt="%Y-%m-%d %H:%M:%S %z")
+if __name__ != '__main__':
+    gunicorn_logger = logging.getLogger('gunicorn.error')
+
+    log = logging.getLogger("cache-refresher")
+    log.handlers = gunicorn_logger.handlers
+    log.setLevel(gunicorn_logger.level)
+
 
 
 def refresh_cache():
@@ -37,18 +44,21 @@ def refresh_cache():
     #
     # if file_found == False or age > 86400:
 
-    logging.info('Fetching a new file...')
+    log.info('Refreshing cache...')
 
     done = False
     while not done:
         res = requests.get(url)
 
         if res.status_code != 200:
-            logging.error('Something went wrong. Sleeping for 5 seconds before retrying...')
+            log.error('Something went wrong when hitting the following URL: %s' % url)
+            log.error('Here is the response:')
+            log.error(res)
+            log.error('Sleeping for 5 seconds before retrying...')
             time.sleep(5)
             done = False
         else:
-            logging.info('Done.')
+            log.info('Done.')
             done = True
             break
 
-- 
GitLab