# By: Riasat Ullah

from dateutil.relativedelta import relativedelta
from utils import constants, helpers, key_manager, times, var_names
import configuration as configs
import datetime
import psycopg2
import uuid

############################
#    for the DashBoard
############################

def get_open_and_acknowledged_count(conn, timestamp, user_id, org_id, check_adv_perm=False):
    '''
    Get the number of incidents that are open at the end of a period and have been acknowledged within the period.
    :param conn: db connection
    :param timestamp: timestamp this request is being made on
    :param user_id: user_id of the user assignee to filter by
    :param org_id: id of the organization to filter by
    :param check_adv_perm: (boolean) True if advanced team permissions should be checked
    :return: (tuple) -> ( [my open count, org open count], [my ack count, org ack count] )
    '''
    assert isinstance(timestamp, datetime.datetime)
    assert isinstance(user_id, int)
    assert isinstance(org_id, int)

    query_params = {'timestamp': timestamp, 'usr_id': user_id, 'org_id': org_id,
                    'open_state': constants.open_state, 'ack_state': constants.acknowledged_state,
                    'res_state': constants.resolved_state, 'min_tmsp': timestamp - datetime.timedelta(days=30)}

    conditions = []
    if check_adv_perm and user_id is not None:
        conditions.append('''
        (
            instanceid in (
                select instanceid from instance_assignments
                where user_policyid in (
                        select policyid from users
                        where start_timestamp <= %(timestamp)s
                            and end_timestamp > %(timestamp)s
                            and organization_id = %(org_id)s
                            and user_id = %(usr_id)s
                    ) or for_policyid not in (
                        select component_id from components_user_cannot_view(
                            %(timestamp)s, %(org_id)s, %(usr_id)s, %(pol_comp_type_id)s::smallint
                    )
                )
            ) and taskid in (
                select taskid from tasks
                where start_timestamp >= %(min_tmsp)s
                    and organization_id = %(org_id)s
                    and (
                        serviceid is null
                        or (
                            serviceid is not null
                            and serviceid not in (
                                select component_id from components_user_cannot_view(
                                    %(timestamp)s, %(org_id)s, %(usr_id)s, %(serv_comp_type_id)s::smallint
                                )
                            )
                        )
                    )
            )
        )
        ''')
        query_params['pol_comp_type_id'] = configs.policy_component_type_id
        query_params['serv_comp_type_id'] = configs.service_component_type_id

    query = '''
            with t1 as (
                select instanceid from instance_assignments
                where start_timestamp <= %(timestamp)s
                    and end_timestamp > %(timestamp)s
                    and user_policyid in (
                        select policyid from users
                        where start_timestamp <= %(timestamp)s
                            and end_timestamp > %(timestamp)s
                            and user_id = %(usr_id)s
                    )
            )
            select
                count(instanceid) filter (where is_open = true and status = %(open_state)s
                    and instanceid in (select instanceid from t1)) as mine_open,
                count(instanceid) filter (where is_open = true and status = %(ack_state)s
                    and instanceid in (select instanceid from t1)) as mine_ack,
                count(instanceid) filter (where is_open = true and status = %(open_state)s) as org_open,
                count(instanceid) filter (where is_open = true and status = %(ack_state)s) as org_ack,
                count(instanceid) filter (where is_open = false and status = %(res_state)s
                    and resolved_on >= %(min_tmsp)s) as org_res
            from task_instances
            where organization_id = %(org_id)s
            {0};
            '''.format(' and ' + ' and '.join(conditions) if len(conditions) > 0 else '')
    try:
        result = conn.fetch(query, query_params)
        if len(result) == 0:
            return [0, 0], [0, 0], 0
        else:
            for mine_open, mine_ack, org_open, org_ack, res_count in result:
                open_count = [mine_open, org_open]
                ack_count = [mine_ack, org_ack]
                return open_count, ack_count, res_count
    except psycopg2.DatabaseError:
        raise


def get_aggregate_notifications_count(conn, organization_id, check_start, check_end):
    '''
    Get the aggregate alerts over a period of time.
    :param conn: db connection
    :param organization_id: ID of the organization
    :param check_start: (datetime.date) the start date to check from
    :param check_end: (datetime.date) the end date to check till
    :return: (dict of list) -> of analytics details
    :errors: AssertionError, DatabaseError
    '''
    assert isinstance(organization_id, int)
    assert isinstance(check_start, datetime.date)
    assert isinstance(check_end, datetime.date)

    query = '''
            select log_date,
                count(log_id) filter (where event_method = %(app_method)s) as app_count,
                count(log_id) filter (where event_method = %(email_method)s) as email_count,
                count(log_id) filter (where event_method = %(text_method)s) as text_count,
                count(log_id) filter (where event_method = %(call_method)s) as call_count
            from alert_event_logs
            where event_type = %(disp_event)s
                and side = 2
                and log_date >= %(chk_start)s
                and log_date <= %(chk_end)s
                and user_id in (
                    select member_id from organization_members
                    where start_timestamp <= %(chk_end)s
                        and end_timestamp >= %(chk_start)s
                        and organization_id = %(org_id)s
                )
            group by log_date
            order by log_date;
            '''
    query_params = {'org_id': organization_id, 'disp_event': constants.dispatch_event, 'chk_start': check_start,
                    'chk_end': check_end, 'app_method': constants.app, 'email_method': constants.email,
                    'text_method': constants.text, 'call_method': constants.call}
    try:
        result = conn.fetch(query, query_params)

        data = {
            var_names.notifications: [],
            var_names.period: [],
            var_names.email_notification: [],
            var_names.push_notification: [],
            var_names.text_notification: [],
            var_names.call_notification: [],
            var_names.total_notification_count: []
        }

        dt_range = [check_end - datetime.timedelta(days=i) for i in range(0, (check_end - check_start).days + 1)]
        for date_, app_count, email_count, text_count, call_count in result:

            go_on = True
            while go_on:
                if len(dt_range) > 0:
                    exp_date = dt_range.pop()
                    if exp_date == date_:
                        go_on = False
                    else:
                        data[var_names.notifications].append([exp_date, 0, 0, 0, 0, 0])
                        data[var_names.period].append(str(exp_date.month) + '/' + str(exp_date.day))
                        data[var_names.push_notification].append(0)
                        data[var_names.email_notification].append(0)
                        data[var_names.text_notification].append(0)
                        data[var_names.call_notification].append(0)
                else:
                    go_on = False

            total_count = app_count + email_count + text_count + call_count

            data[var_names.notifications].append([date_, app_count, email_count, text_count, call_count, total_count])
            data[var_names.period].append(str(date_.month) + '/' + str(date_.day))
            data[var_names.push_notification].append(app_count)
            data[var_names.email_notification].append(email_count)
            data[var_names.text_notification].append(text_count)
            data[var_names.call_notification].append(call_count)

        while len(dt_range) > 0:
            exp_date = dt_range.pop()
            data[var_names.notifications].append([exp_date, 0, 0, 0, 0, 0])
            data[var_names.period].append(str(exp_date.month) + '/' + str(exp_date.day))
            data[var_names.push_notification].append(0)
            data[var_names.email_notification].append(0)
            data[var_names.text_notification].append(0)
            data[var_names.call_notification].append(0)

        return data
    except psycopg2.DatabaseError:
        raise


def get_day_notifications(conn, organization_id, check_date):
    '''
    Gets all the alerts that occurred during a period of time.
    :param conn: db connection
    :param organization_id: ID of the organization
    :param check_date: the dtae to check for
    :return: list (of list) -> [[log timestamp, event method, recipient phone number or email, user display name], ...]
    :errors: AssertionError, DatabaseError
    '''
    assert isinstance(organization_id, int)
    assert isinstance(check_date, datetime.date)

    # Make sure that you only have one entry for a user. If there are multiple entries in the users
    # table it will cause multiple entries to be returned for the same incident.
    # In this case we are fine because we are not using a map to get user_ids.
    query = '''
            select logs.log_timestamp, logs.event_method,
                case
                    when event_method = %s or event_method = %s then logs.recipient_phone_number
                    else users.email
                end as address,
                users.first_name || ' ' || users.last_name
            from alert_event_logs as logs
            join users
            on logs.user_id = users.user_id
                and users.organization_id = %s
                and users.start_timestamp <= logs.log_timestamp
                and users.end_timestamp > logs.log_timestamp
            where logs.log_date = %s
                and logs.event_type = %s
                and logs.side = 2;
            '''
    query_params = (constants.text, constants.call, organization_id, check_date, constants.dispatch_event,)
    try:
        result = conn.fetch(query, query_params)
        return result
    except psycopg2.DatabaseError:
        raise


def get_instances_for_analysis(conn, start_date, end_date, org_id, urgencies=None, service_ids=None, user_pol_id=None,
                               esc_pol_ids=None, only_team_components=False, tags=None, user_id=None):
    '''
    Get the list of all instances that are eligible for analysis given certain parameters.
    Only resolved instances are returned, and as a list of dict of instance details.
    :param conn: db connection
    :param start_date: (datetime.date) period start
    :param end_date: (datetime.date) period end
    :param org_id: (int) ID of the organization
    :param urgencies: (list of int) of urgency levels
    :param service_ids: (list of int) of service IDs
    :param user_pol_id: (list of int) policy ID of users
    :param esc_pol_ids: (list of int) ID of escalation policies
    :param only_team_components: (boolean) True if instances should be filtered by occurrences
            only on services and policies that are owned by teams of the organization
    :param tags: (list) of tags
    :param user_id: ID of the user to check team permissions for
    :return: (list of dict) of instance details
    '''
    assert isinstance(start_date, datetime.date)
    assert isinstance(end_date, datetime.date)
    assert isinstance(org_id, int)

    query_params = {'start_date': start_date, 'end_date': end_date, 'org_id': org_id, 'tag_lbl': var_names.tags}
    conditions = []

    if urgencies is not None:
        assert isinstance(urgencies, list)
        assert set(urgencies).issubset(set(configs.allowed_urgency_levels))
        conditions.append(" td.urgency_level = any(%(urg_list)s)")
        query_params['urg_list'] = urgencies

    if service_ids is not None:
        conditions.append(" td.serviceid = any(%(srv_list)s) ")
        query_params['srv_list'] = helpers.get_int_list(service_ids)

    if user_pol_id is not None:
        assert isinstance(user_pol_id, int)
        conditions.append('''
            inst.instanceid in (select instanceid from instance_assignments where user_policyid = %(user_pol_id)s)
        ''')
        query_params['user_pol_id'] = user_pol_id

    if esc_pol_ids is not None:
        conditions.append('''
            inst.instanceid in (select instanceid from instance_assignments where for_policyid = any(%(esc_ids)s))
        ''')
        query_params['esc_ids'] = esc_pol_ids

    if only_team_components:
        conditions.append('''
            (td.serviceid in (
                select component_id from team_components
                where start_timestamp <= %(end_date)s
                    and end_timestamp > %(start_date)s
                    and team_id in (select team_id from org_teams)
                    and component_type_id = %(serv_comp_type_id)s
            )
            or inst.instanceid in (
                select instanceid from instance_assignments
                where start_timestamp <= %(end_date)s
                    and end_timestamp > %(start_date)s
                    and for_policyid in (
                        select component_id from team_components
                        where start_timestamp <= %(end_date)s
                            and end_timestamp > %(start_date)s
                            and team_id in (select team_id from org_teams)
                            and component_type_id = %(pol_comp_type_id)s
                    )
            ))
        ''')
        query_params['serv_comp_type_id'] = configs.service_component_type_id
        query_params['pol_comp_type_id'] = configs.policy_component_type_id

    if user_id is not None:
        assert isinstance(user_id, int)
        conditions.append('''
        (
            inst.instanceid in (
                select instanceid from instance_assignments
                where user_policyid in (
                        select policyid from users
                        where start_timestamp <= %(end_date)s
                            and end_timestamp > %(start_date)s
                            and organization_id = %(org_id)s
                            and user_id = %(usr_id)s
                    ) or for_policyid not in (
                        select component_id from components_user_cannot_view(
                            %(end_date)s::timestamp, %(org_id)s, %(usr_id)s, %(pol_comp_type_id)s::smallint
                    )
                )
            ) and (
                td.serviceid is null
                or (
                    td.serviceid is not null
                    and td.serviceid not in (
                        select component_id from components_user_cannot_view(
                            %(end_date)s::timestamp, %(org_id)s, %(usr_id)s, %(serv_comp_type_id)s::smallint
                        )
                    )
                )
            )
        )
        ''')
        query_params['usr_id'] = user_id
        query_params['pol_comp_type_id'] = configs.policy_component_type_id
        query_params['serv_comp_type_id'] = configs.service_component_type_id

    if tags is not None:
        assert isinstance(tags, list)
        if len(tags) > 0:
            conditions.append('''
                td.taskid in (select taskid from task_labels where label_type = 'tags' and label = any(%(tgs)s))
            ''')
            query_params['tgs'] = tags

    query = '''
            with org_teams as (
                select team_id from teams
                where start_timestamp <= %(end_date)s
                    and end_timestamp > %(start_date)s
                    and organization_id = %(org_id)s
            )
            , t1 as (
                select inst.instanceid, inst.organization_instanceid, inst.instance_timestamp, inst.resolved_on,
                    inst.last_level, td.taskid, td.title, td.urgency_level, td.serviceid,
                    sd.service_name, sd.service_ref_id
                from task_instances as inst
                join tasks as td
                    on td.taskid = inst.taskid
                        and td.current_version
                left join services as sd
                    on td.serviceid is not null
                        and sd.serviceid = td.serviceid
                        and sd.start_timestamp <= inst.instance_timestamp
                        and sd.end_timestamp > inst.instance_timestamp
                where inst.instance_date >= %(start_date)s
                    and inst.instance_date <= %(end_date)s
                    and inst.organization_id = %(org_id)s
                    and inst.is_open = false
                    and td.related_taskid is null
                    {0}
            )
            , t2 as(
                select tal.taskid, array_agg(label) as tags
                from task_labels as tal
                join t1
                on tal.taskid = t1.taskid
                    and start_timestamp <= t1.instance_timestamp
                    and end_timestamp > t1.instance_timestamp
                where label_type = %(tag_lbl)s
                group by tal.taskid
            )
            , t3 as (
                select instanceid, json_agg(json_build_object(
                    'user_policyid', user_policyid,
                    'for_policyid', for_policyid,
                    'valid_start', ia.start_timestamp,
                    'valid_end', ia.end_timestamp,
                    'timezone', users.user_timezone
                )) as inst_assignees
                from instance_assignments as ia
                join users
                    on users.policyid = ia.user_policyid
                        and users.start_timestamp <= ia.start_timestamp
                        and users.end_timestamp > ia.start_timestamp
                where instanceid in (select instanceid from t1)
                group by instanceid
            )
            , t4 as (
                select instanceid, json_agg(json_build_object(
                    'event_timestamp', event_timestamp,
                    'event_type', event_type,
                    'event_method', event_method,
                    'event_by', (event_log::json->>'event_by')::int,
                    'display_name', users.first_name || ' ' || users.last_name
                ) order by event_timestamp) as inst_events
                from instance_events
                left join users
                    on (event_log::json ->> 'event_by')::int = users.user_id
                        and users.start_timestamp <= event_timestamp
                        and users.end_timestamp > event_timestamp
                where instanceid in (select instanceid from t1)
                group by instanceid
            )
            , t5 as (
                select instanceid, json_agg(json_build_object(
                    'business_service_id', iibs.business_serviceid,
                    'business_service_name', bss.service_name,
                    'business_service_ref_id', bss.business_service_ref_id,
                    'valid_start', iibs.start_timestamp,
                    'valid_end', iibs.end_timestamp
                )) as inst_impact
                from instance_impacted_business_services as iibs
                join business_services as bss
                    on bss.business_serviceid = iibs.business_serviceid
                        and bss.start_timestamp <= iibs.start_timestamp
                        and bss.end_timestamp > iibs.start_timestamp
                where instanceid in (select instanceid from t1)
                group by instanceid
            )
            select t1.*, t2.tags, t3.inst_assignees, t4.inst_events, t5.inst_impact
            from t1
            left join t2 using(taskid)
            left join t3 using(instanceid)
            left join t4 using(instanceid)
            left join t5 using(instanceid);
            '''.format(' and ' + ' and '.join(conditions) if len(conditions) > 0 else '')
    try:
        result = conn.fetch(query, query_params)

        data = []
        for inst_id, org_inst_id, inst_time, res_on, level, task_id, title, urg_lev, srv_id, srv_name, srv_ref_id, \
                tags, inst_assignees, inst_events, inst_imp_bus in result:

            # convert string timestamps from json aggregation to datetime.datetime objects
            for item in inst_events:
                item[var_names.event_timestamp] = times.get_timestamp_from_string(item[var_names.event_timestamp])

            if inst_assignees is not None:
                for item in inst_assignees:
                    item[var_names.valid_start] = times.get_timestamp_from_string(item[var_names.valid_start])
                    item[var_names.valid_end] = times.get_timestamp_from_string(item[var_names.valid_end])

            if inst_imp_bus is not None:
                for item in inst_imp_bus:
                    item[var_names.valid_start] = times.get_timestamp_from_string(item[var_names.valid_start])
                    item[var_names.valid_end] = times.get_timestamp_from_string(item[var_names.valid_end])
                    item[var_names.business_service_ref_id] = key_manager.conceal_reference_key(
                        uuid.UUID(item[var_names.business_service_ref_id]))

            data.append({
                var_names.instance_id: inst_id,
                var_names.organization_instance_id: org_inst_id,
                var_names.instance_timestamp: inst_time,
                var_names.resolved_on: res_on,
                var_names.assignee_level: level,
                var_names.task_id: task_id,
                var_names.task_title: title,
                var_names.urgency_level: urg_lev,
                var_names.service_id: srv_id,
                var_names.service_name: srv_name,
                var_names.service_ref_id: None if srv_ref_id is None else key_manager.conceal_reference_key(srv_ref_id),
                var_names.tags: tags,
                var_names.assignees: inst_assignees,
                var_names.events: inst_events,
                var_names.impacted_business_services: inst_imp_bus
            })

        return data
    except psycopg2.DatabaseError:
        raise


def get_monthly_instance_count(conn, start_date, end_date, org_id, urgencies=None, service_ids=None, user_pol_id=None,
                               esc_pol_ids=None):
    '''
    Get the number of instances that have occurred monthly within a given period of time.
    :param conn: db connection
    :param start_date: (datetime.date) period start date
    :param end_date: (datetime.date) period end date
    :param org_id: (int) organization ID
    :param urgencies: (list) of urgency levels
    :param service_ids: (list) of service IDs to filter by
    :param user_pol_id: (list) of user policy IDs to filter by
    :param esc_pol_ids: (list) of escalation policy IDs to filter by
    :return: (dict) -> {(year, month): count, ...}
    '''
    query_params = {'start_date': start_date, 'end_date': end_date, 'org_id': org_id}
    conditions = []

    if urgencies is not None:
        assert isinstance(urgencies, list)
        assert set(urgencies).issubset(set(configs.allowed_urgency_levels))
        conditions.append(" td.urgency_level = any(%(urg_list)s)")
        query_params['urg_list'] = urgencies

    if service_ids is not None:
        conditions.append(" td.serviceid = any(%(srv_list)s) ")
        query_params['srv_list'] = helpers.get_int_list(service_ids)

    if user_pol_id is not None:
        assert isinstance(user_pol_id, int)
        conditions.append('''
            inst.instanceid in (select instanceid from instance_assignments where user_policyid = %(user_pol_id)s)
        ''')
        query_params['user_pol_id'] = user_pol_id

    if esc_pol_ids is not None:
        conditions.append('''
            inst.instanceid in (select instanceid from instance_assignments where for_policyid = any(%(esc_ids)s))
        ''')
        query_params['esc_ids'] = esc_pol_ids

    query = '''
            select extract(year from instance_date) as year,
                extract(month from instance_date) as month,
                count(instance_id) as total
            from task_instances
            join tasks
                on tasks.taskid = task_instances.taskid
                    and tasks.start_timestamp <= task_instances.resolved_on
                    and tasks.end_timestamp > task_instances.resolved_on - interval '1 second'
            where instance_date >= %(start_date)s
                and instance_date <= %(end_date)s
                {0}
            group by 1, 2
            order by 1, 2;
            '''.format(' and ' + ' and '.join(conditions) if len(conditions) > 0 else '')
    try:
        result = conn.fetch(query, query_params)

        data = dict()
        check_date = start_date
        while check_date < end_date:
            data[(check_date.year, check_date.month)] = 0
            check_date = check_date - relativedelta(months=+1)

        for year, month, total in result:
            key = (year, month)
            if key in data:
                data[key] = total

        return data
    except psycopg2.DatabaseError:
        raise


def get_daily_instance_count(conn, start_date, end_date, org_id, urgencies=None, service_ids=None, user_pol_id=None,
                             esc_pol_ids=None):
    '''
    Get the daily total occurrences of instances within a given period of time.
    :param conn: db connection
    :param start_date: (datetime.date) period start date
    :param end_date: (datetime.date) period end date
    :param org_id: (int) organization ID
    :param urgencies: (list) of urgency levels
    :param service_ids: (list) of service IDs to filter by
    :param user_pol_id: (list) of user policy IDs to filter by
    :param esc_pol_ids: (list) of escalation policy IDs to filter by
    :return: (dict) -> {date: count, ...}
    '''
    query_params = {'start_date': start_date, 'end_date': end_date, 'org_id': org_id}
    conditions = []

    if urgencies is not None:
        assert isinstance(urgencies, list)
        assert set(urgencies).issubset(set(configs.allowed_urgency_levels))
        conditions.append(" td.urgency_level = any(%(urg_list)s)")
        query_params['urg_list'] = urgencies

    if service_ids is not None:
        conditions.append(" td.serviceid = any(%(srv_list)s) ")
        query_params['srv_list'] = helpers.get_int_list(service_ids)

    if user_pol_id is not None:
        assert isinstance(user_pol_id, int)
        conditions.append('''
            inst.instanceid in (select instanceid from instance_assignments where user_policyid = %(user_pol_id)s)
        ''')
        query_params['user_pol_id'] = user_pol_id

    if esc_pol_ids is not None:
        conditions.append('''
            inst.instanceid in (select instanceid from instance_assignments where for_policyid = any(%(esc_ids)s))
        ''')
        query_params['esc_ids'] = esc_pol_ids

    query = '''
            select instance_date, count(instanceid)
            from task_instances as inst
            join tasks as td
                on td.taskid = inst.taskid
                    and td.start_timestamp <= inst.resolved_on
                    and td.end_timestamp > inst.resolved_on - interval '1 second'
            where inst.instance_date >= %(start_date)s
                and inst.instance_date <= %(end_date)s
                {0}
            group by inst.instance_date;
            '''.format(' and ' + ' and '.join(conditions) if len(conditions) > 0 else '')
    try:
        result = conn.fetch(query, query_params)

        data = dict()
        for dt_, count in result:
            data[dt_] = count
        return data
    except psycopg2.DatabaseError:
        raise


def get_user_escalation_policy_ids_in_period(conn, start_date, end_date, organization_id, user_id):
    '''
    Get the list of escalation policies that a user is part of in a given period.
    :param conn: db connection
    :param start_date: (datetime.date) period start
    :param end_date: (datetime.date) period end
    :param organization_id: (int) ID of the organization
    :param user_id: (int) ID of the user
    :return: (list of list) -> [[policy name, policy ID], ...]
    '''
    assert isinstance(start_date, datetime.date)
    assert isinstance(end_date, datetime.date)
    assert isinstance(organization_id, int)
    assert isinstance(user_id, int)

    query = '''
            select policy_name, policyid from policies
            where start_timestamp <= %(end_date)s
                and end_timestamp > %(start_date)s
                and is_group_policy = true
                and organization_id = %(org_id)s
                and policyid in (
                    select policyid from policy_levels
                    where start_timestamp <= %(end_date)s
                        and end_timestamp > %(start_date)s
                        and routineid in (
                            select routineid from routine_rotations
                            where start_timestamp <= %(end_date)s
                                and end_timestamp > %(start_date)s
                                and assignee_user_id = %(usr_id)s
                        )
                )
            '''
    query_params = {'start_date': start_date, 'end_date': end_date, 'org_id': organization_id, 'usr_id': user_id}
    try:
        result = conn.fetch(query, query_params)
        data = []
        for name_, id_ in result:
            data.append([name_, id_])
        return data
    except psycopg2.DatabaseError:
        raise
