Commit 8b3670ff authored by Alessandro CERIONI's avatar Alessandro CERIONI
Browse files

Merge branch 'development' into 'master'

Development

See merge request !2
parents 9f23e7c8 c6a60b3c
Pipeline #4528 passed with stages
in 25 seconds
......@@ -2,6 +2,9 @@ stages:
- sonar-analysis
- build
variables:
SONAR_URL: https://sonarqube.forge.grandlyon.com
sonarqube:
stage: sonar-analysis
before_script:
......
......@@ -13,12 +13,12 @@ if __name__ != '__main__':
def get_token( admin_key_key ):
def get_token( admin_key ):
cmd = ['node', 'lib/jwt_factory.js']
process = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=True)
JWT = process.communicate( input=admin_key_key )[0]
JWT = process.communicate( input=admin_key )[0]
return JWT
......
......@@ -34,10 +34,6 @@ def find_relations( tags ):
return relations
return relations
def process_posts( posts ):
output_posts = []
......
......@@ -62,20 +62,18 @@ def index_docs( cfg, content_type, docs ):
}
}
try:
# create index, in case it doesn't exist yet
if not es.indices.exists(index=es_index):
rep = es.indices.create(es_index, es_body)
except Exception as e:
print(e)
logger.info(f'Index "{es_index}" created.')
logger.info("Pushing %i documents into Elasticsearch..." % len(docs))
header = {
"index" : {
"_index" : es_index,
"_type" : "_doc"
}
}
"index" : {
"_index" : es_index,
"_type" : "_doc"
}
}
es_body = ''
......
......@@ -36,12 +36,9 @@ def reindexer(cfg, content_type, content_id=None):
rep = destin_es.indices.put_template(cfg['reindexer']['template'][content_type], template)
try:
rep = destin_es.indices.create(cfg['reindexer']['destination_index'][content_type])
#print(rep)
except Exception as e:
logger.error(e)
pass
if not destin_es.indices.exists(index=destin_index):
rep = destin_es.indices.create(destin_index)
logger.info(f'Index "{destin_index}" created.')
body = {
# "conflicts": "proceed",
......
......@@ -2,13 +2,12 @@ import json
from elasticsearch import Elasticsearch
from flask import current_app as my_app
#from lib.content_getter import get_posts_by_page, get_post
from lib.ghost_content_processor import process_posts, process_pages
from lib.indexer import index_docs
from lib.reindexer import reindexer
from lib.alias_setter import alias_setter
from lib.index_cleaner import index_cleaner
from lib.ghost_content_getter import get_token, get_all_content_by_page, get_content
from lib.ghost_content_getter import get_all_content_by_page
import logging
logging.root.handlers = []
......@@ -28,7 +27,6 @@ def index_everything( cfg, content_type ):
for content_page in content_pages:
logging.info('Processing content page no. %i...' % cnt)
#processed_page = process_posts(page)
processed_content_page = eval("process_%s" % content_type)( content_page[content_type] )
successful = index_docs(cfg, content_type, processed_content_page)
......@@ -37,13 +35,10 @@ def index_everything( cfg, content_type ):
logger.critical('Something went wrong upon indexing docs: exiting.')
exit(1)
#print(processed_page)
cnt += 1
logging.info('Reindexing...')
task_url = reindexer(cfg, content_type)
#print(task_url)
logging.info('Setting aliases...')
successful = alias_setter(cfg, content_type)
......@@ -59,36 +54,34 @@ def index_everything( cfg, content_type ):
def add_content( cfg, content_type, data ):
# N.B. : pages -> page; posts -> post, that's why we remove the last letter from content_type
content_id = data[ content_type[0:len(content_type)-1] ]['current']['id']
logging.info('Getting content with id = %s...' % content_id)
# # N.B. : pages -> page; posts -> post, that's why we remove the last letter from content_type
content = data[ content_type[:-1] ]['current']
try:
content = get_content(cfg, content_type, content_id)
del content['authors']
del content['primary_author']
del content['mobiledoc']
except Exception as e:
logging.critical(e)
return False
pass
processed_content = eval('process_%s' % content_type)([content])
successful = index_cleaner(cfg, content_type, content_id)
successful = index_cleaner(cfg, content_type, content['id'])
if not successful:
raise Exception('Something went wrong upon cleaning indices: exiting.')
exit(1)
successful = index_docs(cfg, content_type, processed_content)
if not successful:
raise Exception('Something went wrong upon indexing docs: exiting.')
exit(1)
logging.info('Reindexing...')
task_url = reindexer(cfg, content_type, content_id)
#print(task_url)
task_url = reindexer(cfg, content_type, content['id'])
if not successful:
raise Exception('Something went wrong. Exiting...')
exit(1)
logging.info('Setting aliases...')
successful = alias_setter(cfg, content_type)
logging.info('done.')
......@@ -98,7 +91,7 @@ def add_content( cfg, content_type, data ):
def delete_content( cfg, content_type, data ):
# N.B. : pages -> page; posts -> post, that's why we remove the last letter from content_type
content_id = data[ content_type[0:len(content_type)-1] ]['previous']['id']
content_id = data[ content_type[:-1] ]['previous']['id']
return index_cleaner(cfg, content_type, content_id)
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment