from flask import request, jsonify, render_template, request,flash,redirect,url_for,session, send_file
import io
import hashlib
from sqlalchemy import create_engine, MetaData, Table,insert, Column,Integer, Float, DateTime, Boolean,String,inspect,select
from app.models.model import User,DBData,db,data,data_error
from werkzeug.security import generate_password_hash
from werkzeug.utils import secure_filename
import logging
import os
import pandas as pd
from datetime import datetime
import numpy as np
import json
from sqlalchemy.exc import NoSuchTableError,IntegrityError
from pandas.api.types import infer_dtype
import re
from dateutil.parser import parse
import mysql.connector

def register():
    return render_template('register.html')

def add_user():
    return render_template('create_user.html')

def new_create():
    username = request.form.get('username')
    email = request.form.get('email')
    password = request.form.get('password')
    hashed_password = generate_password_hash(password)
    created_by = session.get('id')
    add_users(username, email, hashed_password, created_by)
    flash('User created successfully')
    return redirect(url_for('auth_blueprint.login'))

def create():
    username = request.form.get('username')
    role = request.form.get('role').lower() 
    email = request.form.get('email')
    password = request.form.get('password')
    hashed_password = generate_password_hash(password)
    created_by = session.get('id')
    add_users(username, role, email, hashed_password, created_by)
    flash('User added successfully')
    return redirect(url_for('auth_blueprint.users_display'))

def add_users(username, role, email, hashed_password, created_by):
    user = User(username=username, role=role, email=email, password=hashed_password, created_by=created_by)
    try:
        db.session.add(user)
        db.session.commit()
    except Exception as e:
        db.session.rollback()
        

#def add_db():
#    return render_template('add_db.html')
#
#def create_db():
#    connection_name=request.form.get('connection_name')
#    hostname=request.form.get('hostname')
#    port=request.form.get('port')
#    username=request.form.get('username')
#    schemaname=request.form.get('schemaname')
#    password=request.form.get('password')
#    tablename=request.form.get('tablename')
#    created_by=session['id']
#    status=1
#    print('dddddddddddd')
#    return insert_db(connection_name,hostname,port,username,password,schemaname,tablename,created_by,status)

#def insert_db(connection_name,hostname,port,username,schemaname,password,tablename,created_by,status):
#    print()
#    Db=DBData(connection_name=connection_name,hostname=hostname,port=port,username=username,schemaname=schemaname,password=password,tablename=tablename,created_by=created_by,status=status)
#    db.session.add(Db)
#    db.session.commit()
#    flash(' Data Source added successfully')
#    return redirect(url_for('auth_blueprint.data_display'))


def edit_user(user_id):
   # Handle GET request to display the edit page
    if request.method == 'GET':
        user = User.query.get(user_id)
        return render_template('edit_user.html', user=user)

    # Handle POST request to update the user data
    elif request.method == 'POST':
        user = User.query.get(user_id)
        if user is None:
            return "User not found!"

        username = request.form.get('username')
        email = request.form.get('email')

        user.username = username
        user.email = email
        db.session.commit()
        flash('User edited successfully')
        return redirect(url_for('auth_blueprint.users_display'))

def edit_db(user_id):
   # Handle GET request to display the edit page
    if request.method == 'GET':
        db_data = DBData.query.get(user_id)
        return render_template('edit_db.html', db_data=db_data)

    # Handle POST request to update the user data
    elif request.method == 'POST':
        db_data = DBData.query.get(user_id)
        if db_data is None:
            return "User not found!"

        connection_name=request.form.get('connection_name')
        hostname=request.form.get('hostname')
        port=request.form.get('port')
        username=request.form.get('username')
        password=request.form.get('password')
        schemaname=request.form.get('schemaname')
        tablename=request.form.get('tablename')

        db_data.connection_name = connection_name
        db_data.hostname = hostname
        db_data.port = port
        db_data.username = username
        db_data.password = password
        db_data.schemaname = schemaname
        db_data.tablename = tablename

        db.session.commit()
        flash(' Data Source edited successfully')
        return redirect(url_for('auth_blueprint.data_display'))
    
def upload_file():
    connection_name = DBData.query.with_entities(DBData.connection_name).all()
    print(connection_name)
    return render_template('upload.html',connection_name=connection_name)

def save_file():
    file = request.files['excelFile']
    uploaded_by = session['id']
    original_file = file.filename
    file.save(original_file)

    # Create a new data object and associate it with the user
    uploaded_file = data(file_path=original_file, uploaded_by=uploaded_by)
    db.session.add(uploaded_file)
    db.session.commit()
    uploaded_file_id = uploaded_file.id

    # MySQL database connection configuration
    config = {
        'user': 'root',
        'password': '',
        'host': '127.0.0.1',
        'port': 3306,
        'database': 'data_uploader',
        'raise_on_warnings': True,
    }

    # Connect to the MySQL database
    connection = mysql.connector.connect(**config)
    cursor = connection.cursor()

    # Read the Excel file into a DataFrame
    path = original_file
    print('file_path:', path)
    df = pd.read_excel(path)

    # Add additional columns to the DataFrame
    df['added_by'] = uploaded_by
    df['file_id'] = uploaded_file_id

    df.columns = df.columns.str.strip()

    # Create a logger to log the errors
    logging.basicConfig(filename='data_insertion_errors.log', level=logging.ERROR)

    # Loop through the rows and insert data into the MySQL table
    for _, row in df.iterrows():
        # Prepare a list to hold values of existing columns in the DataFrame
        values = []
        
        # List of columns you want to extract data from
        columns_to_extract = [
            'Airline_Code',
            'Airline',
            'Airline_PNR',
            'First_Name',
            'Last_Name',
            'E-mail',
            'Origin',
            'Destination',
            'Amount',
            'Refund_Amount',
            'Refund_Status',
            'Booking_Status',
            'Fare_Analyst',
            'Fare_amt',
            'all_Taxes',
        ]
        
        for col in columns_to_extract:
            if col in row.index:
                # If the column exists, append its value to the 'values' list
                value = row[col]
                # Convert NaN values to None
                value = None if pd.isna(value) else value
                values.append(value)
            else:
                # If the column does not exist, append None to the 'values' list
                values.append(None)
        
        # Append 'uploaded_by' and 'uploaded_file_id' to the 'values' list
        values.extend([uploaded_by, uploaded_file_id])
        # Prepare the SQL query for insertion
        sql = f"INSERT INTO ticket_data (Airline_Code,Airline,Airline_PNR, First_Name, Last_Name, `E-mail`, Origin, Destination, Amount, Refund_Amount, Refund_Status, Booking_Status, Fare_Analyst, Fare_amt, all_Taxes, added_by, file_id) VALUES (%s,%s,%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"

        print(sql,values)
        try:
            cursor.execute(sql, values)
        except IntegrityError as e:
            # Check if the error code corresponds to a duplicate entry
            if e.orig.args[0] == 1062:  # Error code for duplicate entry
                # Log only specific information when duplicate entry occurs
                logging.error(f"Duplicate Entry: Airline_PNR={values[0]}, First_Name={values[1]}, Last_Name={values[2]}")
            
            else:
                # Log other IntegrityError with full details
                logging.error(f"Error: {e}. Data: {values}")
        except Exception as e:
            # Log other exceptions with full details
            logging.error(f"Error: {e}. Data: {values}")

    # Commit the changes and close the connection
    connection.commit()
    cursor.close()
    connection.close()

    flash("Data inserted successfully.")
    return redirect(url_for('auth_blueprint.file_display'))


#ef edit_excel(item_id):
#   row_count=request.args.get('row_count')
#   print(row_count)
#   file = data.query.get(item_id)
#   file_id=file.id
#   file_path=file.file_path
#   data_source=file.data_source
#   session['file_path'] = file_path
#   file_data = pd.read_excel(file_path)
#   
#   # Replace NaN values with empty strings
#   file_data.replace(np.nan, '', inplace=True)
#
#   column_data_types = {}
#
#   connection_details = get_connection_details(data_source)
#   print(connection_details)
#   table_name = DBData.query.get(data_source)
#   if connection_details:
#       hostname, port, username, schemaname, password = connection_details
#       connection_string = f"mysql+mysqlconnector://{username}:{password}@{hostname}:{port}/{schemaname}"
#       engine = create_engine(connection_string)
#
#       inspector = inspect(engine)
#
#       table_name = table_name.tablename
#       for column in inspector.get_columns(table_name):
#           column_name = column['name']
#           data_type = column['type']
#           python_data_type = map_to_python_data_type(data_type)
#           column_data_types[column_name] = python_data_type
#           print(column_data_types)
#  
#
#   session['column_data_types'] =column_data_types
#   print(column_data_types)
#
#   # Convert datetime columns to MySQL date format
#   for column, data_type in column_data_types.items():
#       if data_type == 'datetime':
#           file_data[column] = file_data[column].apply(convert_to_mysql_datetime)
#
#   # Define the page size
#   if row_count==None:
#       row_count=20
#   else:    
#       row_count =int(row_count) 
#
#   # Get the current page from the request parameters
#   current_page = int(request.args.get('page', 1))
#
#   # Calculate the starting and ending indices for the current page
#   start_index = (current_page - 1) * row_count
#   end_index = start_index + row_count
#
#   # Retrieve the data for the current page
#   page_data = file_data.iloc[start_index:end_index]
#   print(page_data)
#   # Calculate the total number of pages
#   total_rows = len(file_data)
#   num_pages = (total_rows + row_count - 1) // row_count
#   print(num_pages)
#
#   return render_template('edit_file.html',file_id=file_id, page_data=page_data, current_page=current_page, num_pages=num_pages, row_count=row_count, start_index=start_index, end_index=end_index,item_id=item_id,data_source=data_source,column_data_types=column_data_types)

def show_data(item_id):
    row_count=request.args.get('row_count')
    print(row_count)
    
    data_source=1

    connection_details = get_connection_details(data_source)
    print(connection_details)
    table_name = DBData.query.get(data_source)
    table_name = table_name.tablename
    if connection_details:
        hostname, port, username, schemaname, password = connection_details
        connection_string = f"mysql+mysqlconnector://{username}:{password}@{hostname}:{port}/{schemaname}"
        engine = create_engine(connection_string)

        # Reflect the table schema into a SQLAlchemy Table object
        metadata = MetaData()
        db_table = Table(table_name, metadata, autoload=True, autoload_with=engine)

       # Load data from the table into a Python array where file_id matches the specified value (item_id)
        connection = engine.connect()
        query = select([db_table]).where(db_table.c.file_id == item_id)
        result_proxy = connection.execute(query)
        data_array = [dict(row) for row in result_proxy]

    # Remove the "added_by" column from the data_array
    for row in data_array:
        row.pop('added_by', None)
        row.pop('file_id',None)

    print(data_array)
    # Define the page size
    if row_count==None:
        row_count=20
    else:    
        row_count =int(row_count) 

    # Get the current page from the request parameters
    current_page = int(request.args.get('page', 1))

    # Calculate the starting and ending indices for the current page
    start_index = (current_page - 1) * row_count
    end_index = start_index + row_count

    # Retrieve the data for the current page
    page_data = data_array[start_index:end_index]
    print(page_data)

    # Calculate the total number of pages
    total_rows = len(data_array)
    num_pages = (total_rows + row_count - 1) // row_count
    print(num_pages)
    return render_template('show_data.html', page_data=page_data, current_page=current_page, num_pages=num_pages, row_count=row_count, start_index=start_index, end_index=end_index,item_id=item_id,data_source=data_source)


#def map_to_python_data_type(db_data_type):
#    # Mapping MySQL data types to Python data types
#    print(db_data_type)
#    data_type_mapping = {
#        'VARCHAR': 'str',
#        'INTEGER': 'int',
#        'FLOAT': 'float',
#        'DATETIME': 'datetime',
#        # Add more mappings for other data types as needed
#    }
#
#    # Try to map the database data type to the corresponding Python data type
#    return data_type_mapping.get(str(db_data_type), 'str')
#
#def convert_to_mysql_datetime(value):
#    try:
#        # Try parsing the value as a datetime using dateutil.parser.parse
#        datetime_value = parse(value)
#        return datetime_value.strftime('%Y-%m-%d %H:%M:%S')
#    except Exception:
#        # If the parsing fails, return the original value (assumed to be already in the desired format)
#        return value
    
#def save_excel():
#    data = request.form.get('data')
#    if data:
#        data = json.loads(data)
#        print('dataaaa=',data)
#        filepath = session.get('file_path')
#        column_data_types=session['column_data_types']
#        success = update_excel(filepath, data)
#        print(success,'success')
#        return "Excel file saved"
#    return "Invalid data"

#def update_excel(filepath, data):
#
#    df = pd.read_excel(filepath, dtype={'column_name': float})
#    maped_data=datatypemapping(data)
#    print('maped_data=',maped_data)
#    for row in maped_data:
#        row_index = row.get('rowIndex')
#        for column_name, edited_value in row.items():
#            if column_name != 'rowIndex':
#                df.at[row_index, column_name] = edited_value
#
#    df.to_excel(filepath, index=False)

#def datatypemapping(data):
#    column_data_types = session.get('column_data_types')
#    print("column_data_types:", column_data_types)
#    
#    # Define a mapping for string representations to Python types or callable functions
#    type_mapping = {
#        'str': str,
#        'int': int,
#        'float': float,
#        'datetime': lambda x: datetime.strptime(x, "%Y-%m-%d %H:%M:%S"),
#    }
#    
#    for item in data:
#        for column, data_type in column_data_types.items():
#            if column in item:
#                print("Converting column:", column)
#                print("Original value:", item[column])
#                
#                # Check if the data_type is valid in the type_mapping
#                if data_type in type_mapping:
#                    try:
#                        # Call the corresponding Python type or callable function to perform data type conversion
#                        item[column] = type_mapping[data_type](item[column])
#                    except ValueError:
#                        
#                        item[column] = None
#                    print("Converted value:", item[column])
#                else:
#                    
#                    item[column] = None
#    
#    return data


def get_connection_details(data_source):
    connection = DBData.query.get(data_source)
    if connection:
        return (
            connection.hostname,
            connection.port,
            connection.username,
            connection.schemaname,
            connection.password,
        )
    else:
        return None

def check_table_exists(connection_id, tablename):
    connection_details = get_connection_details(connection_id)
    print(connection_details)
    if connection_details:
        hostname, port, username, schemaname, password = connection_details
        connection_string = f"mysql+mysqlconnector://{username}:{password}@{hostname}:{port}/{schemaname}"
        engine = create_engine(connection_string)
        metadata = MetaData(bind=engine)

        try:
            table = Table(tablename, metadata, autoload=True, autoload_with=engine)
            table_exists = table.exists()
        except NoSuchTableError:
            table_exists = False

        return table_exists

    else:
        return False

def save_data_db():
    item_id = request.form.get('item_id')
    file_id = request.form.get('file_id')
    print('item ID', item_id)
    
    username='root'
    password=''
    hostname='127.0.0.1'
    port='3306'
    schemaname='data_uploader'
    tablename='ticket_detils'

    connection_string = f"mysql+mysqlconnector://{username}:{password}@{hostname}:{port}/{schemaname}"
    engine = create_engine(connection_string)
    metadata = MetaData(bind=engine)

    path = data.query.get(item_id)
    file_path = path.file_path
    print('file_path:', file_path)
    df = pd.read_excel(file_path)

    with engine.begin() as connection:
        print('inside_engin')
        df.to_sql(tablename, connection, if_exists='append', index=False)

    return "Data inserted successfully."

    
def download_excel():
    print('gi')
    config = {
        'user': 'root',
        'password': '',
        'host': '127.0.0.1',
        'port': 3306,
        'database': 'data_uploader',
        'raise_on_warnings': True,
    }

    # Connect to the database
    connection = mysql.connector.connect(**config)
    cursor = connection.cursor()

    # Fetch data from the database (You need to replace 'your_table_name' with the actual table name)
    query = 'SELECT * FROM ticket_data;'
    cursor.execute(query)
    data = cursor.fetchall()

    # Close the database connection
    cursor.close()
    connection.close()

    # Create a DataFrame from the data (assuming you have columns named 'id', 'file_path', and 'created_at')
    df = pd.DataFrame(data, columns=['id','added_by','file_id','Airline_Code', 'Airline', 'Airline_PNR', 'First_Name', 'Last_Name', 'E-mail', 'Origin', 'Destination', 'Amount', 'Refund_Amount', 'Refund_Status', 'Booking_Status', 'Fare_Analyst', 'Fare_amt', 'all_Taxes'])

    # Create an Excel file from the DataFrame
    excel_data = io.BytesIO()
    with pd.ExcelWriter(excel_data, engine='xlsxwriter') as writer:
        df.to_excel(writer, sheet_name='Sheet1', index=False)

    excel_data.seek(0)

    # Send the Excel file as a response to the client
    return send_file(
        excel_data,
        attachment_filename='data.xlsx',
        as_attachment=True
    )

# Set up logging configuration
logging.basicConfig(filename='error_log.txt', level=logging.WARNING, format='%(asctime)s - %(levelname)s - %(message)s')

