#!/usr/bin/env python
#
# Copyright (c) 2013 Ericsson, Inc.  All Rights Reserved.
#
# This module contains unpublished, confidential, proprietary
# material.  The use and dissemination of this material are
# governed by a license.  The above copyright notice does not
# evidence any actual or intended publication of this material.
#
# Author: Simon Meng
# Created: Nov 18, 2013
# Description:fdsa

import os
import json
import hashlib
import fnmatch
import dbutils
import optparse
import sys
import logging
import shutil
from decimal import Decimal
from FileAssembler import ContentClassHandler
from ContentClassDiffer import ContentClassDiffer
from CMSVersionDeploymentPolicy import CMSVersionDeploymentPolicy
from SSHCommander import SSHCommander

class ExcludeDifferentSets(object):
    FILES = "Files"
    DB_ITEMS = "DbItems"
    IMPORT_TEMPLATE = "ImportTemplate"
    def __init__(self, jsonObj=None):
        self.file_sets = FileExculdeDifferentSet(jsonObj[self.FILES]) if jsonObj and jsonObj.has_key(self.FILES) else FileExculdeDifferentSet(None)
        self.db_item_sets = DbItemExcludeDifferentSet(jsonObj[self.DB_ITEMS]) if jsonObj and jsonObj.has_key(self.DB_ITEMS) else DbItemExcludeDifferentSet(None)
        self.import_template_sets = FileExculdeDifferentSet(jsonObj[self.IMPORT_TEMPLATE]) if jsonObj and jsonObj.has_key(self.IMPORT_TEMPLATE) else FileExculdeDifferentSet(None)
    def getFileSets(self):
        return self.file_sets
    def getDbItemSets(self):
        return self.db_item_sets
        
class ExcludeDifferentSetBase(object):
    NEW = "NEW"
    DEL = "DEL"
    UPD = "UPD"
    def __init__(self, json_object=None):
        self.new_set = json_object[self.NEW] if json_object and json_object.has_key(self.NEW) else []
        self.delete_set = json_object[self.DEL] if json_object and json_object.has_key(self.DEL) else []
        self.update_set = json_object[self.UPD] if json_object and json_object.has_key(self.UPD) else []
        
class FileExculdeDifferentSet(ExcludeDifferentSetBase):
    def getNewSet(self):
        return self.new_set
    def getDeleteSet(self):
        return self.delete_set
    def getUpdateSet(self):
        return self.update_set
    
class DbItemExcludeDifferentSet(ExcludeDifferentSetBase): 
    TABLE = "TABLE"
    ID_COLUMN_VALUES = "ID_COLUMN_VALUES"
    def getNewSetByTable(self, table_name):
        return [r[self.ID_COLUMN_VALUES] for r in self.new_set if r[self.TABLE] == table_name]
    def getDeleteSetByTable(self, table_name):
        return [r[self.ID_COLUMN_VALUES] for r in self.delete_set if r[self.TABLE] == table_name]
    def getUpdateSetByTable(self, table_name):
        return [r[self.ID_COLUMN_VALUES] for r in self.update_set if r[self.TABLE] == table_name]

class FileMonitor(object):
    
    @staticmethod
    def exportFilesMD5(directories_, exclude_filters, jsonFilePath, sys_conf=None):        
        
        directories = directories_ if directories_ else []
        
        if CMSVersionDeploymentPolicy.getCMSVersion() >= 4.0 and sys_conf:
            FileMonitor.retrieveCMS4XAlertConfigFile(sys_conf)
            directories.append(LOCAL_ALERT_CNF_FILE)
            
        excludeFilters = exclude_filters if exclude_filters else []
        filesMD5 = {}
        for dirPath in directories:
            FileMonitor.calculateFileMD5(filesMD5, dirPath, excludeFilters)

        d = os.path.dirname(jsonFilePath)
        if not os.path.isdir(d):
            os.makedirs(d)
        with open(jsonFilePath, 'w') as f:
            json.dump({'directories':directories, 'excludeFilters':excludeFilters, 'filesMD5': filesMD5}, f, indent=4)

    @staticmethod
    def compare2Exported(jsonFilePath, fileExcludeDifferentSet=FileExculdeDifferentSet(), sys_conf=None):
        if not os.path.isfile(jsonFilePath):
            return
        print 'compare to exported...'
        
        if CMSVersionDeploymentPolicy.getCMSVersion() >= 4.0 and sys_conf:
            FileMonitor.retrieveCMS4XAlertConfigFile(sys_conf)

        json_data = open(jsonFilePath)
        org_data = json.load(json_data)
        org_files_md5 = org_data['filesMD5']
        org_directories = org_data['directories']
        org_excludefilters = org_data['excludeFilters']
        json_data.close()
        cur_files_md5 = {}
        for dirPath in org_directories:
            FileMonitor.calculateFileMD5(cur_files_md5, dirPath, org_excludefilters)

        org_files = org_files_md5.keys()
        cur_files = cur_files_md5.keys()
        new_files = [f for f in cur_files if f not in org_files]
        del_files = [f for f in org_files if f not in cur_files]
        overlap_files = [f for f in org_files if f not in del_files]
        upd_files = [f for f in overlap_files if org_files_md5[f] != cur_files_md5[f]]
        
        new_files_exclude_different = [f for f in new_files if not FileMonitor.matchFilters(f, fileExcludeDifferentSet.getNewSet())]
        del_files_exclude_different = [f for f in del_files if not FileMonitor.matchFilters(f, fileExcludeDifferentSet.getDeleteSet())]
        upd_files_exclude_different = [f for f in upd_files if not FileMonitor.matchFilters(f, fileExcludeDifferentSet.getUpdateSet())]
        
        if new_files:
            print ''
            print 'New files:--------------------------------------'
            FileMonitor.printExpectedDiffAndUnExpectedDiff(new_files, new_files_exclude_different)
            
        if del_files:
            print ''
            print 'Deleted files:----------------------------------'
            FileMonitor.printExpectedDiffAndUnExpectedDiff(del_files, del_files_exclude_different)
            
        if upd_files:
            print ''
            print 'Modified files:---------------------------------'
            FileMonitor.printExpectedDiffAndUnExpectedDiff(upd_files, upd_files_exclude_different)
        
        return new_files_exclude_different or del_files_exclude_different or upd_files_exclude_different
    
    @staticmethod
    def printExpectedDiffAndUnExpectedDiff(all_files, unexpected_differences):
        expeted_differences = [f for f in all_files if f not in unexpected_differences]
        if expeted_differences:
            print 'Expected Differences:---------------------'
            for f in expeted_differences:
                print f
        if unexpected_differences:
            print 'UnExpected Differences:---------------------'
            for f in unexpected_differences:
                print f     
    
    @staticmethod
    def exportMd5OfImportedTemplates(jsonFilePath):
        importedDir = '/opt/tandbergtv/cms/workflow/imported'
        excludeFilters = ["*.par"]
        filesMD5 = {}
        FileMonitor.calculateFileMD5(filesMD5, importedDir, excludeFilters)
        
        d = os.path.dirname(jsonFilePath)
        if not os.path.isdir(d):
            os.makedirs(d)
        with open(jsonFilePath, 'w') as f:
            json.dump({'directory':importedDir, 'excludeFilters':excludeFilters, 'filesMD5': filesMD5}, f, indent=4)
    
    @staticmethod
    def compareImportedTemplates(jsonFilePath, fileExcludeDifferentSet=FileExculdeDifferentSet()):
        if not os.path.isfile(jsonFilePath):
            print 'Invalid file to compare: ' + jsonFilePath
            return
        
        print 'Compare imported templates...'
        json_data = open(jsonFilePath)
        org_data = json.load(json_data)
        org_files_md5 = org_data['filesMD5']
        org_directory = org_data['directory']
        org_excludefilters = org_data['excludeFilters']
        json_data.close()
        
        cur_files_md5 = {}
        FileMonitor.calculateFileMD5(cur_files_md5, org_directory, org_excludefilters)
        
        org_files = org_files_md5.keys()
        cur_files = cur_files_md5.keys()
        missing_files = [f for f in org_files if f not in cur_files_md5]
        md5_mismatch_files = [f for f in org_files if cur_files_md5.has_key(f) and org_files_md5[f] != cur_files_md5[f]]
        
        missing_files_exclude_different = [f for f in missing_files if not FileMonitor.matchFilters(f, fileExcludeDifferentSet.getDeleteSet())]
        md5_mismatch_files_exclude_different = [f for f in md5_mismatch_files if not FileMonitor.matchFilters(f, fileExcludeDifferentSet.getUpdateSet())]
        
        if missing_files:
            print ''
            print 'Missing file:--------------------------------------'
            FileMonitor.printExpectedDiffAndUnExpectedDiff(missing_files, missing_files_exclude_different)
        if md5_mismatch_files:
            print ''
            print 'MD5 Mismatching file:--------------------------------------'
            FileMonitor.printExpectedDiffAndUnExpectedDiff(md5_mismatch_files, md5_mismatch_files_exclude_different)
        
        return missing_files_exclude_different or md5_mismatch_files_exclude_different
    
    @staticmethod
    def calculateFileMD5(filesMD5, dirPath, excludefilters):
        """
        Calculate the MD5 for all files under the specify directory and store MD5 values to dictionary, The file path as key and The MD5 as value
        """
        for path, dirNames, fileNames in os.walk(dirPath):
            for f in fileNames:
                fileAbsPath = path + os.sep + f
                if not FileMonitor.matchFilters(fileAbsPath, excludefilters):
                    filesMD5[fileAbsPath] = hashlib.md5(open(fileAbsPath).read()).hexdigest()

    @staticmethod
    def matchFilters(filePath, filters):
        if not filters:
            return False
        for pattern in filters:
            if fnmatch.fnmatch(filePath, pattern):
                return True
        return False
   
    @staticmethod
    def retrieveCMS4XAlertConfigFile(sys_conf=None):        
        '''Backup a remote file or folder to local. Add an entry into rollback.json.'''
        if sys_conf == None:
            return;
        nodes = sys_conf['cluster_service_nodes']
        if not nodes:
            return
        print('backup remote file [%s] of [%s]' % (REMOTE_ALERT_CNF_FILE, nodes[0]['ip']))
        
        # create necessary folders
        d = os.path.dirname(LOCAL_ALERT_CNF_FILE)
        if not os.path.exists(d):
            os.makedirs(d)
        
        shared_file = COMMON_SHARED_FOLDER + REMOTE_ALERT_CNF_FILE
        d = os.path.dirname(shared_file)
        if not os.path.exists(d):
            os.makedirs(d)        
        # only backup one of the remote nodes for now.
        client = None
        try:
            client = SSHCommander(nodes[0]['ip'], nodes[0]['user'], nodes[0]['password'])
            cmdstr = 'su root -c "\cp %s %s"' % (REMOTE_ALERT_CNF_FILE, shared_file)
            status, output = client.execute_interactive(cmdstr, nodes[0]['rootPasswd'])
            print(output)
            if status != 0:
                raise Exception("failed to backup remote file on " + nodes[0]['ip'])
            
            # use 'move' to reduce waste temporary files
            shutil.move(shared_file, LOCAL_ALERT_CNF_FILE)
        except Exception:
            raise   # NOP
        finally:
            if client:
                client.logout()    

class DataMonitor(object):
    ID_DELIMETER = "_$_"

    def __init__(self, cursor):
        self.__cursor = cursor
        self.__data = {}

    def export(self, cursor, table, id_columns, export_sql='SELECT * FROM %s'):
        if export_sql == 'SELECT * FROM %s':
            sql = export_sql % table
        else:
            sql = export_sql
        #print sql
        rows = DataMonitor._export(cursor, sql)
        self.__data[table] = {'rows':rows, 'ids':id_columns, 'sql':sql}

    def exportCMSTables(self):
        self.export(self.__cursor, 'ALERT_ACTIONS', ['IDENTIFIER'])
        self.export(self.__cursor, 'ALERT_NAMES', ['ALERT_NAME'])
        self.export(self.__cursor, 'ALERTS', ['IDENTIFIER'])
        self.export(self.__cursor, 'MDM_CUSTOM_FIELD', ['NAME', 'ASSET_PATH'], 
                    'SELECT CF.NAME,CF.DATA_TYPE,CF.JOB_PARAMETER,CF.JOB_SCHEDULE_PARAMETER,'\
                    'CF.DISPLAY_NAME,CF.MULTIVALUE,CF.FIELD_TYPE,CFG.ASSET_PATH '\
                    'FROM MDM_CUSTOM_FIELD CF JOIN MDM_CUSTOM_FIELD_GROUP CFG '\
                    'ON (CF.GROUP_ID = CFG.GROUP_ID)')
        self.export(self.__cursor, 'MDM_CUSTOM_FIELD_GROUP', ['ASSET_PATH'], 'SELECT NAME,DISPLAY_NAME,ASSET_PATH,UUID FROM MDM_CUSTOM_FIELD_GROUP')
        self.export(self.__cursor, 'PMM_PARTNER', ['NAME','PROVIDERID'], 'SELECT NAME,PARTNERTYPE,PROVIDERID,NOTES,ISACTIVE,LOOKUPKEY FROM PMM_PARTNER')
        self.export(self.__cursor, 'PMM_CONTENT_CLASS', ['NAME'], 'SELECT NAME,SPECIAL_USAGE,IS_DEFAULT,CONTENT_CLASS_TYPE_ID FROM PMM_CONTENT_CLASS')
        self.export(self.__cursor, 'PMM_CONTENT_CLASS_PARTNER', ['CONTENT_CLASS_NAME', 'PARTNER_PROVIDER_ID'], 
                    'SELECT PCC.NAME AS CONTENT_CLASS_NAME,PP.PROVIDERID AS PARTNER_PROVIDER_ID FROM PMM_CONTENT_CLASS_PARTNER PCCP '\
                    'JOIN PMM_CONTENT_CLASS PCC ON (PCCP.CONTENT_CLASS_ID = PCC.ID) '\
                    'JOIN PMM_PARTNER PP ON (PCCP.PARTNER_ID = PP.ID_)')
        self.export(self.__cursor, 'JBPM_PROCESSDEFINITION', ['NAME_'], 'SELECT NAME_, MAX(VERSION_) FROM JBPM_PROCESSDEFINITION WHERE PROCESSDEFINITIONTYPEID=2 GROUP BY NAME_')
        self.export(self.__cursor, 'TTV_RESOURCE', ['RESOURCENAME','RESOURCETYPENAME'], 
                    'SELECT RS.NAME as RESOURCENAME,RT.NAME as RESOURCETYPENAME,'\
                    'RS.CONNECTIONSTRING,RS.MAXCONCURRENTUSERS,RS.ADMINISTRATIVERESOURCESTATUSID,'\
                    'RS.HEARTBEATCONNECTIONSTRING,RS.HEARTBEATFREQUENCY,RS.ISACTIVE,RS.FUNCTIONALTYPE,RS.IGNORETIMEOUT,RS.USER_NAME '\
                    'FROM TTV_RESOURCE RS '\
                    'JOIN TTV_RESOURCETYPE RT '\
                    'ON (RS.RESOURCETYPEID = RT.RESOURCETYPEID)')
        self.export(self.__cursor, 'TTV_RESOURCETYPE', ['NAME'], 
                    'SELECT NAME,RESOURCECONNECTIONTYPEID,INITIALIZATIONSTRATEGYCLASSID,HEARTBEATSTRATEGYCLASSID,HEARTBEATCONNECTIONTYPEID,SYSTEMID '\
                    'FROM TTV_RESOURCETYPE')
        self.export(self.__cursor, 'TTV_RESOURCEGROUP', ['RESOURCEGROUPNAME','RESOURCETYPENAME'], 
                    'SELECT RG.NAME as RESOURCEGROUPNAME,RT.NAME as RESOURCETYPENAME,'\
                    'RG.ISINTERNALLYACQUIRED,RG.ALLOCATIONSTRATEGYCLASSID,RG.ACCESSLEVELID,RG.FUNCTIONALTYPE,RG.ISVISIBLE,RG.IMAGEPATH '\
                    'FROM TTV_RESOURCEGROUP RG '\
                    'JOIN TTV_RESOURCETYPE RT '\
                    'ON (RG.RESOURCETYPEID = RT.RESOURCETYPEID)')
        self.export(self.__cursor, 'TTV_RESOURCEGROUPMEMBERSHIP', ['RESOURCENAME', 'RESOURCEGROUPNAME'], 
                    "SELECT R.NAME AS RESOURCENAME, RG.NAME AS RESOURCEGROUPNAME "\
                    "FROM TTV_RESOURCEGROUPMEMBERSHIP RGM "\
                    "JOIN TTV_RESOURCE R "\
                    "ON (RGM.RESOURCEID = R.RESOURCEID) "\
                    "JOIN TTV_RESOURCEGROUP RG "\
                    "ON (RGM.RESOURCEGROUPID = RG.RESOURCEGROUPID)")
        self.export(self.__cursor, 'TRE_RULESET', ['UUID'], 'SELECT RULENAME,PACKAGEID,ENABLED,RUNONGLOBAL,UUID FROM TRE_RULESET')
        self.export(self.__cursor, 'TTV_SELECTOR', ['SELECTIONKEY'], 'SELECT S.SELECTIONKEY, PD.NAME_ '\
                    "FROM TTV_SELECTOR S "\
                    "JOIN JBPM_PROCESSDEFINITION PD "\
                    "ON (S.PROCESSDEFINITIONID = PD.ID_)")
        self.export(self.__cursor, 'SITES_SITE', ['EXTERNALID'], 
                    'SELECT EXTERNALID, ALERT_DELAY_PERIOD, ASSOCIATED_GROUP_NAME, NAME, FILE_LOOKUP_KEYS, DIST_TEMPLATE_NAME, ASSOCIATED_RESOURCE_ID, '\
                    'METADATA_FORMAT, EXPORT_PLUGIN_NAME, DISTRIBUTION_OPTION, ACTIVE, TYPE, DESCRIPTION FROM SITES_SITE')
        self.export(self.__cursor, 'SITES_DIST_CONF_PARAMS', ['EXTERNALID', 'PAM_NAME'],
                    'SELECT ST.EXTERNALID AS EXTERNALID, PAM.NAME AS PAM_NAME, PAM.VALUE AS PAM_VALUE '\
                    'FROM SITES_DIST_CONF_PARAMS PAM JOIN SITES_SITE ST ON PAM.SITEID = ST.ID_')
        self.export(self.__cursor, 'PREPACK_TEMPLATES',['PROJECT_NAME', 'TYPE_OF_TEMPLATE', 'WP_TEMPLATE_NAME'])
        self.export(self.__cursor, 'PREPACK_REPORT_TITLEINFO', ['WORKORDER_ID','TITLE_ID'])
        self.export(self.__cursor, 'ALL_VIEWS', ['VIEW_NAME', 'OWNER'], 'select VIEW_NAME, OWNER, TEXT from ALL_VIEWS WHERE OWNER =\'WFS\'')
        self.export(self.__cursor, 'TEMPLATE_SUB_TEMPLATE', ['TEMPLATE_NAME', 'NODE', 'NODE_SUB_TEMPLATE_VER'], 'SELECT P.NAME_ TEMPLATE_NAME, N.NAME_ NODE, SUB_P.VERSION_ NODE_SUB_TEMPLATE_VER FROM JBPM_PROCESSDEFINITION P , JBPM_NODE N LEFT JOIN JBPM_PROCESSDEFINITION SUB_P ON N.SUBPROCESSDEFINITION_ = SUB_P.ID_ WHERE N.PROCESSDEFINITION_ = P.ID_ AND N.SUBPROCESSDEFINITION_ IS NOT NULL AND P.PROCESSDEFINITIONTYPEID = 2')
        
        
    def export2JsonFile(self, filePath, create_directory=True):
        d = os.path.dirname(filePath)
        if not os.path.isdir(d) and create_directory:
            os.makedirs(d)
        with open(filePath, 'w') as f:
            json.dump(self.__data, f, cls=dbutils.DateTimeEncoder, indent=4)
            
    @staticmethod
    def _export(cursor, sql):
        try:
            return dbutils.Dao.export(cursor, sql, True)
        except Exception:
            return []

    @staticmethod
    def compare2Exported(cursor, jsonFilePath, dbItemsExcludeDifferentSet=DbItemExcludeDifferentSet()):
        different = False
        if not os.path.isfile(jsonFilePath):
            return
        print 'compare to exported data: '
        json_data = open(jsonFilePath)
        tableDatas = json.load(json_data)
        json_data.close()
        for tableName, d in tableDatas.iteritems():
            id_cols = d['ids']
            cur_exp_data = DataMonitor._export(cursor, d['sql'])
            pre_rows = dict([(DataMonitor.__getValuesByKeys(r, id_cols), r) for r in d['rows']])
            cur_rows = dict([(DataMonitor.__getValuesByKeys(r, id_cols), r) for r in cur_exp_data])
            pre_ids = pre_rows.keys()
            cur_ids = cur_rows.keys()
            new_ids = [id for id in cur_ids if id not in pre_ids]
            del_ids = [id for id in pre_ids if id not in cur_ids]
            overlap_ids = [id for id in pre_ids if id not in del_ids]
            upd_ids = [id for id in overlap_ids if DataMonitor.checkUpdate(id, cur_rows, pre_rows)]
            
            new_ids_exclude_different = [id for id in new_ids if id not in [DataMonitor.ID_DELIMETER.join(c_id) for c_id in dbItemsExcludeDifferentSet.getNewSetByTable(tableName)]]
            del_ids_exclude_different = [id for id in del_ids if id not in [DataMonitor.ID_DELIMETER.join(c_id) for c_id in dbItemsExcludeDifferentSet.getDeleteSetByTable(tableName)]]
            upd_ids_exclude_different = [id for id in upd_ids if id not in [DataMonitor.ID_DELIMETER.join(c_id) for c_id in dbItemsExcludeDifferentSet.getUpdateSetByTable(tableName)]]
            
            if new_ids or del_ids or upd_ids:
                print ''
                print tableName + '---------------------------------'
            
                if new_ids:
                    print 'insert rows:'
                    expeted_differences = [id for id in new_ids if id not in new_ids_exclude_different]
                    if expeted_differences:
                        print 'Expected Differences:---------------------'
                        for id in expeted_differences:
                            print cur_rows[id]
                    if new_ids_exclude_different:
                        print 'UnExpeted Differences:---------------------'
                        for id in new_ids_exclude_different:
                            print cur_rows[id]
                if del_ids:
                    print 'delete rows:'
                    expeted_differences = [id for id in del_ids if id not in del_ids_exclude_different]
                    if expeted_differences:
                        print 'Expected Differences:---------------------'
                        for id in expeted_differences:
                            print pre_rows[id]
                    if del_ids_exclude_different:
                        print 'UnExpeted Differences:---------------------'
                        for id in del_ids_exclude_different:
                            print pre_rows[id]
                if upd_ids:
                    print 'update rows:'
                    expeted_differences = [id for id in upd_ids if id not in upd_ids_exclude_different]
                    if expeted_differences:
                        print 'Expected Differences:---------------------'
                        for id in expeted_differences:
                            print ''
                            print pre_rows[id]
                            print 'update to '
                            print cur_rows[id]
                    if upd_ids_exclude_different:
                        print 'UnExpeted Differences:---------------------'
                        for id in upd_ids_exclude_different:
                            print ''
                            print pre_rows[id]
                            print 'update to '
                            print cur_rows[id]

            if new_ids_exclude_different or del_ids_exclude_different or upd_ids_exclude_different:
                different = True
        return different
    
    @staticmethod
    def checkUpdate(id, cur_rows, pre_rows):
        for k, v in pre_rows[id].iteritems():
            cur_val = cur_rows[id].get(k)  
            if isinstance(cur_rows[id].get(k), Decimal):
                cur_val ="%d" % cur_rows[id].get(k)        
            if v != cur_val:
                return True
        return False

    @staticmethod
    def __getValuesByKeys(dict, keys):
        values = []
        for key in keys:
            if dict.has_key(key):
                values.append(str(dict[key]))
            else:
                values.append('')
        return DataMonitor.ID_DELIMETER.join(values)
    
def getCMSWathpointVersion():    
    versionString = os.popen("rpm -qi watchpoint-cms  | grep Version | awk '{print $3}'", "r").read().strip() 
    if versionString is not None and len(versionString) > 0:
            # After 3.1, RPM is used instead of conary to manage CMS package
        return versionString
    else:
        # If reach this point, it means the cms version is before 3.1
        versionString = os.popen("conary q | grep watchpoint-cms | awk '{print $1}' | cut -d '=' -f2", "r").read().strip()
        return versionString

def initSysConfig(sys_info_file):
    CMSVersionDeploymentPolicy.setCMSVersion(getCMSWathpointVersion())
    
    with open(sys_info_file, 'r') as json_data:
        sys_conf = json.load(json_data)
    dbconfig = None
    if CMSVersionDeploymentPolicy.getCMSVersion() < 4.0:
        dbconfig = sys_conf["oracle"]
    else:
        dbconfig = sys_conf["postgresql"]
    if not dbconfig:
        raise Exception('No database configuration is found!')
    CMSVersionDeploymentPolicy.getPrepackDepolymentPolicy().initDbProvider(
            dbconfig["sid"], dbconfig["user"], dbconfig["password"], dbconfig["host"], dbconfig["port"])
    
    return sys_conf

def buildUpOptions():
    optparser = optparse.OptionParser()
    group = optparse.OptionGroup(optparser, 'Operations')
    group.add_option("-e", action="store_true", help="Export file MD5 information")
    group.add_option("-c", action="store_true", help="Compare to exported data")
    group.add_option("-E", action="store_true", 
                     help="Export MD5 information of files under /opt/tandbergtv/cms/workflow/imported")
    group.add_option("-C", action="store_true", 
                     help="Compare MD5 information of files under /opt/tandbergtv/cms/workflow/imported")
    optparser.add_option_group(group)
    
    optparser.add_option("-j", action="store", dest="exculde_config",
                         help="json File defining the exclude set when detecting differences, only available when use -c to compare exported data.")
    optparser.add_option("-s", action="store", dest="sys_config",
                         help="json File for system information configuration.")
    
    return optparser

REMOTE_ALERT_CNF_FILE = '/etc/logstash/alerts.yaml'
LOCAL_ALERT_CNF_FILE = '/tmp/etc/logstash/alerts.yaml'
COMMON_SHARED_FOLDER = '/data/backupFiles'
def main():
    optparser = buildUpOptions()
    (options, args) = optparser.parse_args()
    print("options: {0}".format(options))
    if not options.sys_config:
        optparser.print_help()
        optparser.error('Sorry, "sys_config" is required.')
        return
    
    sys_conf = initSysConfig(options.sys_config)
    
    contentClassFile = '/tmp/contentClassSrc.xml'
    fileMonitorFile = '/tmp/cms_file_monitor.json'
    dbItemMonitorFile = '/tmp/cms_data_monitor.json'
    userName = 'admin'
    passwd = 'admin'
    passwd_retry = 'admin1234'
    importedTemplateInfo = '/tmp/cms_imported_templates_info.json'
    
    if options.e:
        monitorDirs =    ['/opt/tandbergtv/cms']
        excludeFilters = ['/opt/tandbergtv/cms/jboss*',
                          '/opt/tandbergtv/cms/prepack/*',
                          '/opt/tandbergtv/cms/log*',
                          '*_ORIG_*',
                          '/opt/tandbergtv/cms/cms_stackdump.txt',
                          '/opt/tandbergtv/cms/workflow/stats*',
                          '/opt/tandbergtv/cms/workflow/history/*',
                          '/opt/tandbergtv/cms/workflow/imported/*',
                          '/opt/tandbergtv/cms/workflow/rejected/*']
        FileMonitor.exportFilesMD5(monitorDirs, excludeFilters , fileMonitorFile, sys_conf)
        conn = CMSVersionDeploymentPolicy.getPrepackDepolymentPolicy().getDatabaseConnection()
        cursor =conn.cursor()
        dataMonitor = DataMonitor(cursor)
        dataMonitor.exportCMSTables()
        dataMonitor.export2JsonFile(dbItemMonitorFile)
        CMSVersionDeploymentPolicy.getPrepackDepolymentPolicy().cleanDatabaseConnection()
        try:
            ContentClassHandler.exportById(contentClassFile, 1, userName, passwd)
        except Exception:
            print "Retry compare content class..."
            ContentClassHandler.exportById(contentClassFile, 1, userName, passwd_retry)
        
    elif options.c:
        includeImportedTemplate = False
        excludeDifferentSet = ExcludeDifferentSets()
        if options.exculde_config is not None:
            try:
                json_file = open(options.exculde_config)
                config_data = json.load(json_file)
                json_file.close()
                excludeDifferentSet = ExcludeDifferentSets(config_data)
            except Exception:
                print 'Error loading file: ' + args
                print traceback.format_exc()
        if options.C:
            includeImportedTemplate = True
        fileMonitorDiff = FileMonitor.compare2Exported(fileMonitorFile, excludeDifferentSet.getFileSets(), sys_conf)
        conn = CMSVersionDeploymentPolicy.getPrepackDepolymentPolicy().getDatabaseConnection()
        cursor =conn.cursor()
        dataMonitorDiff = DataMonitor.compare2Exported(cursor, dbItemMonitorFile, excludeDifferentSet.getDbItemSets())
        CMSVersionDeploymentPolicy.getPrepackDepolymentPolicy().cleanDatabaseConnection()
        ccMonitorDiff = ContentClassDiffer.compareContentClasses(contentClassFile)
        importedTemplateDiff = FileMonitor.compareImportedTemplates(importedTemplateInfo, excludeDifferentSet.getFileSets()) if includeImportedTemplate else False
        if fileMonitorDiff or dataMonitorDiff or ccMonitorDiff or importedTemplateDiff:
            print 'Unexpected Differences are detected...'
            sys.exit(1)
    elif options.E:
        FileMonitor.exportMd5OfImportedTemplates(importedTemplateInfo)
    elif options.C:
        FileMonitor.compareImportedTemplates(importedTemplateInfo)
    else:
        optparser.print_help()
    sys.exit(0)
        
if __name__ == "__main__":
    main()            
