Commit 9f6a4caf authored by Sébastien DA ROCHA's avatar Sébastien DA ROCHA
Browse files

Merge branch 'fix/9493_sorting' into 'development'

Fix/9493 sorting

See merge request !10
parents 17dee6bc c7cc6c6c
Pipeline #9050 passed with stages
in 51 seconds
......@@ -53,7 +53,7 @@ template = {
"analyzer": {
"my_sort_analyzer": {
"type": "custom",
"tokenizer": "standard",
"tokenizer": "keyword",
"filter": [
"original_preserving_ascii_folding"
]
......
......@@ -203,6 +203,7 @@ def main(**kwargs):
schema_names = conn.get_schema_names()
for schema in schema_names:
if schema_name and not schema_name == schema:
logging.debug(f"not {schema_name}, sckipping {schema}")
continue
for table in conn.get_tables(schema=schema):
if table_name and not table_name == table.name:
......
......@@ -151,12 +151,12 @@ def generate_field_catalog( cfg ):
logging.info('Getting schemas...')
schema_names = pg.get_schema_names()
logging.info('Done.')
logging.info('Done: %s', schema_names)
for schema_name in schema_names:
if schema_whitelist is not None:
if schema_name not in schema_whitelist:
logging.debug('Skipping schema %s' % schema_name)
logging.debug('Skipping schema %s (not in whitelist M%s)', schema_name, schema_whitelist)
continue
for table in pg.get_tables(schema_name):
if schema_dot_table_whitelist is not None:
......@@ -245,7 +245,9 @@ def main(cfg):
elected_field_types = elect_field_type( field_catalog_by_field )
with open(filename2, 'w') as fp:
logging.debug("writing %s", filename2)
json.dump(elected_field_types, fp, sort_keys=True)
logging.debug(json.dumps(elected_field_types) )
final_field_catalog_by_dbschematable = field_catalog_by_dbschematable.copy()
......@@ -254,7 +256,10 @@ def main(cfg):
final_field_catalog_by_dbschematable[db_schema_table]['types'][field] = elected_field_types[field]
with open(filename3, 'w') as fp:
logging.debug("writing %s", filename3)
json.dump(final_field_catalog_by_dbschematable, fp, sort_keys=True)
from pprint import pformat
logging.debug(pformat(json.dumps(final_field_catalog_by_dbschematable) ))
return
......
......@@ -30,7 +30,10 @@ def callback(channel, method, properties, body):
#data = res.json()
# get sample records from the ingest index
source_es = Elasticsearch([cfg['reindexer']['source_url']], timeout=60)
if 'source_url' in cfg['reindexer'].keys():
source_es = Elasticsearch([cfg['reindexer']['source_url']], timeout=60)
else:
source_es = Elasticsearch([cfg['reindexer']['destination_url']], timeout=60)
the_query = dict()
the_query['size'] = sample_size
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment