Skip to content
Snippets Groups Projects
Commit 9f6a4caf authored by Sébastien DA ROCHA's avatar Sébastien DA ROCHA
Browse files

Merge branch 'fix/9493_sorting' into 'development'

Fix/9493 sorting

See merge request !10
parents 17dee6bc c7cc6c6c
Branches
Tags
2 merge requests!111.2.0,!10Fix/9493 sorting
Pipeline #9050 passed
...@@ -53,7 +53,7 @@ template = { ...@@ -53,7 +53,7 @@ template = {
"analyzer": { "analyzer": {
"my_sort_analyzer": { "my_sort_analyzer": {
"type": "custom", "type": "custom",
"tokenizer": "standard", "tokenizer": "keyword",
"filter": [ "filter": [
"original_preserving_ascii_folding" "original_preserving_ascii_folding"
] ]
......
...@@ -203,6 +203,7 @@ def main(**kwargs): ...@@ -203,6 +203,7 @@ def main(**kwargs):
schema_names = conn.get_schema_names() schema_names = conn.get_schema_names()
for schema in schema_names: for schema in schema_names:
if schema_name and not schema_name == schema: if schema_name and not schema_name == schema:
logging.debug(f"not {schema_name}, sckipping {schema}")
continue continue
for table in conn.get_tables(schema=schema): for table in conn.get_tables(schema=schema):
if table_name and not table_name == table.name: if table_name and not table_name == table.name:
......
...@@ -151,12 +151,12 @@ def generate_field_catalog( cfg ): ...@@ -151,12 +151,12 @@ def generate_field_catalog( cfg ):
logging.info('Getting schemas...') logging.info('Getting schemas...')
schema_names = pg.get_schema_names() schema_names = pg.get_schema_names()
logging.info('Done.') logging.info('Done: %s', schema_names)
for schema_name in schema_names: for schema_name in schema_names:
if schema_whitelist is not None: if schema_whitelist is not None:
if schema_name not in schema_whitelist: if schema_name not in schema_whitelist:
logging.debug('Skipping schema %s' % schema_name) logging.debug('Skipping schema %s (not in whitelist M%s)', schema_name, schema_whitelist)
continue continue
for table in pg.get_tables(schema_name): for table in pg.get_tables(schema_name):
if schema_dot_table_whitelist is not None: if schema_dot_table_whitelist is not None:
...@@ -245,7 +245,9 @@ def main(cfg): ...@@ -245,7 +245,9 @@ def main(cfg):
elected_field_types = elect_field_type( field_catalog_by_field ) elected_field_types = elect_field_type( field_catalog_by_field )
with open(filename2, 'w') as fp: with open(filename2, 'w') as fp:
logging.debug("writing %s", filename2)
json.dump(elected_field_types, fp, sort_keys=True) json.dump(elected_field_types, fp, sort_keys=True)
logging.debug(json.dumps(elected_field_types) )
final_field_catalog_by_dbschematable = field_catalog_by_dbschematable.copy() final_field_catalog_by_dbschematable = field_catalog_by_dbschematable.copy()
...@@ -254,7 +256,10 @@ def main(cfg): ...@@ -254,7 +256,10 @@ def main(cfg):
final_field_catalog_by_dbschematable[db_schema_table]['types'][field] = elected_field_types[field] final_field_catalog_by_dbschematable[db_schema_table]['types'][field] = elected_field_types[field]
with open(filename3, 'w') as fp: with open(filename3, 'w') as fp:
logging.debug("writing %s", filename3)
json.dump(final_field_catalog_by_dbschematable, fp, sort_keys=True) json.dump(final_field_catalog_by_dbschematable, fp, sort_keys=True)
from pprint import pformat
logging.debug(pformat(json.dumps(final_field_catalog_by_dbschematable) ))
return return
......
...@@ -30,7 +30,10 @@ def callback(channel, method, properties, body): ...@@ -30,7 +30,10 @@ def callback(channel, method, properties, body):
#data = res.json() #data = res.json()
# get sample records from the ingest index # get sample records from the ingest index
source_es = Elasticsearch([cfg['reindexer']['source_url']], timeout=60) if 'source_url' in cfg['reindexer'].keys():
source_es = Elasticsearch([cfg['reindexer']['source_url']], timeout=60)
else:
source_es = Elasticsearch([cfg['reindexer']['destination_url']], timeout=60)
the_query = dict() the_query = dict()
the_query['size'] = sample_size the_query['size'] = sample_size
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment