Commit 248df9ca authored by Nicolas CASTEJON's avatar Nicolas CASTEJON
Browse files

Merge branch 'development' into 'master'

Update LICENSE

See merge request !3
parents 8b3670ff d61304fc
Pipeline #4645 passed with stage
in 20 seconds
......@@ -25,6 +25,8 @@ build_development:
stage: build
only:
- development
tags:
- build
script:
- export TAG=$CI_COMMIT_SHORT_SHA
- export CONTENT_INDEXER_BIND_PORT=4000
......@@ -37,6 +39,8 @@ build_release:
stage: build
only:
- tags
tags:
- build
script:
- export TAG=$(echo $CI_COMMIT_TAG | sed 's/v//g')
- export CONTENT_INDEXER_BIND_PORT=4000
......
This diff is collapsed.
import logging
from elasticsearch import Elasticsearch, NotFoundError
import logging
logging.basicConfig()
logger = logging.getLogger(__name__)
......@@ -44,7 +45,7 @@ def index_cleaner( cfg, content_type, content_id=None ):
else:
logging.info('Trying to delete content with id = %s...' % content_id )
logger.info('Trying to delete content with id = %s...' % content_id )
successful = True
......
import json
import logging
from elasticsearch import Elasticsearch
from flask import current_app as my_app
from lib.ghost_content_processor import process_posts, process_pages
from lib.indexer import index_docs
from lib.reindexer import reindexer
from lib.alias_setter import alias_setter
from lib.index_cleaner import index_cleaner
from lib.ghost_content_getter import get_all_content_by_page
from lib.ghost_content_processor import process_pages, process_posts
from lib.index_cleaner import index_cleaner
from lib.indexer import index_docs
from lib.reindexer import reindexer
import logging
logging.root.handlers = []
logging.basicConfig(format="%(asctime)s [%(process)d] [%(levelname)s] [%(name)s] %(message)s", datefmt="[%Y-%m-%d %H:%M:%S %z]")
logger = logging.getLogger(__name__)
......@@ -26,7 +26,7 @@ def index_everything( cfg, content_type ):
cnt = 1
for content_page in content_pages:
logging.info('Processing content page no. %i...' % cnt)
logger.info('Processing content page no. %i...' % cnt)
processed_content_page = eval("process_%s" % content_type)( content_page[content_type] )
successful = index_docs(cfg, content_type, processed_content_page)
......@@ -37,17 +37,17 @@ def index_everything( cfg, content_type ):
cnt += 1
logging.info('Reindexing...')
logger.info('Reindexing...')
task_url = reindexer(cfg, content_type)
logging.info('Setting aliases...')
logger.info('Setting aliases...')
successful = alias_setter(cfg, content_type)
if not successful:
logging.critical('Something went wrong upon setting aliases: exiting.')
exit(1)
logging.info('done.')
logger.info('done.')
return task_url
......@@ -74,16 +74,16 @@ def add_content( cfg, content_type, data ):
if not successful:
raise Exception('Something went wrong upon indexing docs: exiting.')
logging.info('Reindexing...')
logger.info('Reindexing...')
task_url = reindexer(cfg, content_type, content['id'])
if not successful:
raise Exception('Something went wrong. Exiting...')
logging.info('Setting aliases...')
logger.info('Setting aliases...')
successful = alias_setter(cfg, content_type)
logging.info('done.')
logger.info('done.')
return task_url
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment