Commit ed4add17 authored by Christine Plumejeaud's avatar Christine Plumejeaud
Browse files

python3 loadfilemaker.py

parent fc70de7c
# -*- coding: utf-8 -*-
'''
Created on 13 february 2019
@author: cplumejeaud
'''
# Comprendre les imports en Python : http://sametmax.com/les-imports-en-python/
# print sys.path
from __future__ import nested_scopes
import psycopg2
import logging
import configparser
from os import sys, path
import os, sys, traceback
import subprocess
import time
from stat import *
import random
from xlrd import open_workbook, cellname, XL_CELL_TEXT, XL_CELL_DATE, XL_CELL_BLANK, XL_CELL_EMPTY, XL_CELL_NUMBER, \
xldate_as_tuple
## pour avoir le path d'un package
## print (psycopg2.__file__)
## C:\Users\cplume01\AppData\Local\Programs\Python\Python37-32\lib\site-packages\psycopg2\__init__.py
class LoadFilemaker(object):
def __init__(self, config):
## Ouvrir le fichier de log
logging.basicConfig(filename=config.get('log', 'file'), level=int(config.get('log', 'level')), filemode='w')
self.logger = logging.getLogger('LoadFilemaker (v1.1)')
self.logger.debug('log file for DEBUG')
self.logger.info('log file for INFO')
self.logger.warning('log file for WARNINGS')
self.logger.error('log file for ERROR')
## Open both a ssh connexion for copy/remove, and a tunnel for postgres connexion
self.postgresconn = self.open_connection(config)
def close_connection(self):
'''
Cleanly close DB connection
:param postgresconn:
:return:
'''
if self.postgresconn is not None:
self.postgresconn.close()
def open_connection(self, config):
'''
Open database connection with Postgres
:param config:
:return:
'''
''''''
putty = config.get('ssh', 'putty')
localport = config.get('base', 'port') # local port for Postgres : 8003
remoteport = config.get('ssh', 'postgres_port') # remote server port for Postgres : 5432
identityfile = config.get('ssh', 'ppk')
password = config.get('ssh', 'passwd')
user = config.get('ssh', 'user')
server = config.get('ssh', 'server')
# Start tunnel
# self.tunnel = self.createTunnel(putty, localport, remoteport, identityfile, password, user, server)
# Acceder aux parametres de configuration
host = config.get('base', 'host')
port = config.get('base', 'port')
dbname = config.get('base', 'dbname')
user = config.get('base', 'user')
password = config.get('base', 'password')
# schema = config.get('base', 'schema')
driverPostgres = 'host=' + host + ' port=' + port + ' user=' + user + ' dbname=' + dbname + ' password=' + password
self.logger.debug(driverPostgres)
conn = None
try:
conn = psycopg2.connect(driverPostgres)
except Exception as e:
self.logger.error("I am unable to connect to the database. " + str(e))
# Test DB
if conn is not None:
cur = conn.cursor()
cur.execute('select count(*) from pg_namespace')
result = cur.fetchone()
if result is None:
print('open_connection Failed to get count / use of database failed')
else:
print('open_connection Got database connexion : ' + str(result[0]))
else:
print('open_connection Failed to get database connexion')
return conn
def do_the_job(self, config):
self.logger.info('\n---------------------------- Processing Filemaker files ---------------------------- \n')
try:
'''
self.loadfile(config, 'geo_general')
self.loadfile(config, 'pointcall')
self.loadfile(config, 'acting_parties')
self.loadfile(config, 'actions')
self.loadfile(config, 'taxes')
self.loadfile(config, 'cargo')
self.loadfile(config, 'component_description')
self.create_uncertain_check_tables(config, 'pointcall')
self.create_uncertain_check_tables(config, 'acting_parties')
self.create_uncertain_check_tables(config, 'actions')
self.create_uncertain_check_tables(config, 'taxes')
self.create_uncertain_check_tables(config, 'cargo')
self.create_uncertain_check_tables(config, 'component_description')
self.populate_uncertain_check_tables(config, 'pointcall')
self.populate_uncertain_check_tables(config, 'acting_parties')
self.populate_uncertain_check_tables(config, 'actions')
self.populate_uncertain_check_tables(config, 'taxes')
self.populate_uncertain_check_tables(config, 'cargo')
self.populate_uncertain_check_tables(config, 'component_description')
'''
except Exception as e:
exc_type, exc_value, exc_traceback = sys.exc_info()
print(e.message)
print(repr(traceback.format_exception(exc_type, exc_value, exc_traceback)))
self.logger.error(e)
self.logger.error(e.message)
self.logger.info('\n---------------------------- END ---------------------------- \n')
self.close_connection()
def create_uncertain_check_tables(self, config, relation):
print('create_uncertain_check_tables ', relation)
sql_createtable = """SELECT column_name FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA='navigo' and TABLE_NAME = '%s' order by ordinal_position""" % (
relation)
self.logger.info(sql_createtable)
columns = self.select_sql(sql_createtable);
sql_createtable = """ CREATE TABLE navigocheck.uncertainity_""" + relation + """ ( """
for c in columns:
sql_createtable += c[0] + ' int,'
sql_createtable = sql_createtable[:-1]
sql_createtable += ");"
self.logger.info(sql_createtable)
sql_droptable = "drop table IF EXISTS navigocheck.uncertainity_" + relation + " cascade;"
self.execute_sql(sql_droptable);
self.execute_sql(sql_createtable)
sql_createtable = """ CREATE TABLE navigocheck.check_""" + relation + """ ( """
for c in columns:
if (c[0] == 'pkid') :
sql_createtable += c[0] + ' int,'
else :
sql_createtable += c[0] + ' text,'
sql_createtable = sql_createtable[:-1]
sql_createtable += ");"
self.logger.info(sql_createtable)
sql_droptable = "drop table IF EXISTS navigo.check_" + relation + " cascade;"
self.execute_sql(sql_droptable);
self.execute_sql(sql_createtable)
def populate_uncertain_check_tables(self, config, relation):
print('populate_uncertain_check_tables ', relation)
query = """SELECT column_name FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA='navigo' and TABLE_NAME = '%s' order by ordinal_position""" % (relation)
self.logger.info(query)
columns = self.select_sql(query);
sql_insert_head = "insert into navigocheck.uncertainity_" + relation + " ( select "
for c in columns:
if (c[0] == 'pkid'):
sql_insert_head += c[0] + ','
else:
sql_insert_head += ' (navigo.rm_parentheses_crochets('+c[0]+')).code ,'
sql_insert_head = sql_insert_head[:-1]
sql_insert_head += " from navigo." + relation+");"
self.logger.info("\n"+sql_insert_head+"\n")
#self.execute_sql(sql_insert_head)
sql_insert_head = "insert into navigocheck.check_" + relation + " ( select "
for c in columns:
if (c[0] == 'pkid'):
sql_insert_head += c[0] + ','
else:
sql_insert_head += ' (navigo.rm_parentheses_crochets('+c[0]+')).value ,'
sql_insert_head = sql_insert_head[:-1]
sql_insert_head += " from navigo." + relation+");"
self.logger.info("\n"+sql_insert_head+"\n")
#self.execute_sql(sql_insert_head)
def loadfile(self, config, relation):
print('Processing ' + relation)
print('Data are in: ' + config.get(relation, 'file_name'))
wb = open_workbook(config.get(relation, 'file_name'))
sheet = wb.sheet_by_name(config.get(relation, 'sheet_name'))
print("Lignes : ", sheet.nrows, "Colonnes : ", sheet.ncols)
## Traitement de la première ligne : creation de la relation
row = 0
sql_createtable = "create table navigo." + relation + " ( \n\t pkid serial primary key,"
sql_insert_head = "insert into navigo." + relation + " ( "
for col in range(0, sheet.ncols):
if sheet.cell(row, col).ctype is not XL_CELL_BLANK and sheet.cell(row, col).ctype is not XL_CELL_EMPTY:
self.logger.info(sheet.cell(row, col).value)
column_name = sheet.cell(row, col).value
if column_name.find("::") > -1:
column_name = column_name[:column_name.index("::")] + '__' + column_name[
column_name.index("::") + 2:]
sql_createtable += "\n\t" + column_name + " text,"
sql_insert_head += column_name + ","
else:
print("l'entête du fichier excel ne peut contenir des colonnes vides")
exit(0)
sql_createtable = sql_createtable[:-1]
sql_createtable += ");"
self.logger.info(sql_createtable)
sql_insert_head = sql_insert_head[:-1]
sql_insert_head += ") values "
self.logger.info(sql_insert_head)
sql_droptable = "drop table IF EXISTS navigo." + relation + " cascade;"
self.execute_sql(sql_droptable);
self.execute_sql(sql_createtable);
sql_insert = sql_insert_head
for row in range(1, sheet.nrows): # sheet.nrows
# print (row)
sql_insert += "\n\t("
for col in range(0, sheet.ncols):
if sheet.cell(row, col).ctype is not XL_CELL_BLANK and sheet.cell(row, col).ctype is not XL_CELL_EMPTY:
if sheet.cell(row, col).ctype is XL_CELL_TEXT:
sql_insert += "'" + sheet.cell(row, col).value.replace('\'', '\'\' ') + "',"
else:
sql_insert += "'" + str(sheet.cell(row, col).value) + "',"
else:
sql_insert += "null,"
sql_insert = sql_insert[:-1]
sql_insert += "),"
if row % 1000 == 0:
self.logger.info(row)
sql_insert = sql_insert[:-1]
# self.logger.info(sql_insert.encode('UTF-8'))
self.execute_sql(sql_insert.encode('UTF-8'))
sql_insert = sql_insert_head
sql_insert = sql_insert[:-1]
# self.logger.info(sql_insert.encode('UTF-8'))
self.execute_sql(sql_insert.encode('UTF-8'))
def execute_sql(self, sql_query):
cur = self.postgresconn.cursor()
try:
cur.execute(sql_query)
except Exception as e:
exc_type, exc_value, exc_traceback = sys.exc_info()
print(e.message)
print(repr(traceback.format_exception(exc_type, exc_value, exc_traceback)))
self.logger.error(sql_query)
cur.close()
self.postgresconn.commit()
def select_sql(self, sql_query):
cur = self.postgresconn.cursor()
try:
cur.execute(sql_query)
return cur.fetchall()
except Exception as e:
exc_type, exc_value, exc_traceback = sys.exc_info()
print(e.message)
print(repr(traceback.format_exception(exc_type, exc_value, exc_traceback)))
self.logger.error(sql_query)
cur.close()
self.postgresconn.commit()
if __name__ == '__main__':
# Passer en parametre le nom du fichier de configuration
# configfile = sys.argv[1]
configfile = 'config_loadfilemaker.txt'
config = configparser.RawConfigParser()
config.read(configfile)
print("Fichier de LOGS : " + config.get('log', 'file'))
p = LoadFilemaker(config)
p.do_the_job(config)
## pour le lancer sur le serveur
# nohup python3 LoadFilemaker.py > out.txt &
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment