past-data-projects / personal_loan_credit_risk / 200-Feature_Creation / 200 - New Feature Engineering.ipynb
200 - New Feature Engineering.ipynb
Raw
{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import pandas as pd\n",
    "import numpy as np\n",
    "import pymysql\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "import warnings\n",
    "warnings.filterwarnings('ignore')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "import configparser\n",
    "config = configparser.ConfigParser()\n",
    "config.read('/home/ec2-user/SageMaker/zhilal/config.ini')\n",
    "\n",
    "host = config['MYSQL-ROOT']['HOST']\n",
    "user = config['MYSQL-ROOT']['USER']\n",
    "password = config['MYSQL-ROOT']['PASSWORD']\n",
    "\n",
    "def connect_sql():\n",
    "    cnx = pymysql.connect(host=host,\n",
    "                          user=user,\n",
    "                          password=password,\n",
    "                          cursorclass=pymysql.cursors.DictCursor)\n",
    "    return cnx"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "raw_df = pd.read_parquet('data/interim/pl_jumbo_mini_junjulaugsep_27012020.parquet')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(150672, 18)"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "raw_df.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "135508"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "raw_df['user_id'].nunique()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "150672"
      ]
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "raw_df['trx_id'].nunique()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Timestamp('2019-09-28 23:49:58')"
      ]
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "raw_df['transaction_date'].max()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Historical PL Transaction Features"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "def get_pl_history(base_df):\n",
    "    uids = tuple(base_df['user_id'])\n",
    "    max_trx_date = base_df['transaction_date'].max()\n",
    "\n",
    "    query = f\"\"\"\n",
    "        SELECT user_id,\n",
    "               id AS trx_id,\n",
    "               status,\n",
    "               payment_type,\n",
    "               amount,\n",
    "               transaction_date\n",
    "        FROM l2alpha.transaction\n",
    "        WHERE user_id IN {uids}\n",
    "        AND transaction_type = 2\n",
    "        AND transaction_date <= '{max_trx_date}'\n",
    "    \"\"\"\n",
    "\n",
    "    df = pd.read_sql(query, connect_sql())\n",
    "    df['gen_payment_type'] = df['payment_type'].apply(lambda x: 'mini' if x=='30_days' else 'jumbo')\n",
    "    \n",
    "    out_df = base_df[['user_id', 'trx_id', 'transaction_date']].merge(df, how='left', on='user_id')\\\n",
    "                     .rename(columns={'transaction_date_x':'trx_dt_current',\n",
    "                                      'transaction_date_y':'trx_dt_past',\n",
    "                                      'trx_id_x':'trx_id',\n",
    "                                      'trx_id_y':'trx_id_ref'})\n",
    "    out_df['time_diff'] = (out_df['trx_dt_current'] - out_df['trx_dt_past']).dt.total_seconds()\n",
    "    return out_df"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    "pl_hist = get_pl_history(raw_df)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Series nth PL transaction"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "def create_pl_series(df, base_df, pl_type='all'):\n",
    "    if pl_type != 'all':\n",
    "        df = df[df['gen_payment_type'] == pl_type]\n",
    "    \n",
    "    df = df[(df['time_diff'] >= 0) &\n",
    "            (df['status'] == 4)]\n",
    "    \n",
    "    series_df = df.groupby(['user_id', 'trx_id', 'gen_payment_type']).size()\\\n",
    "                  .unstack(level=[2]).fillna(0)\n",
    "    series_df.columns = [f'pl_series_{col}' for col in series_df.columns.values]\n",
    "    \n",
    "    series_df['pl_series_jumbomini'] = series_df['pl_series_jumbo'] + series_df['pl_series_mini']\n",
    "    series_df = series_df.reset_index()\n",
    "    return series_df"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [],
   "source": [
    "series_feats = create_pl_series(pl_hist, raw_df)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(150672, 5)"
      ]
     },
     "execution_count": 12,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "series_feats.shape"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Number of Previous PL Transactions"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "def create_prev_pl_feats(df, base_df, window='all'):\n",
    "    if isinstance(window, int):\n",
    "        df = df[(df['time_diff'] <= window*(24*60*60)) & (df['time_diff'] > 0)]\n",
    "        name = str(window)\n",
    "    elif window == 'all':\n",
    "        df = df[df['time_diff'] > 0]\n",
    "        name = 'ever'\n",
    "    \n",
    "    prev_df = df.groupby(['user_id', 'trx_id', 'status', 'gen_payment_type']).size()\\\n",
    "                .unstack(level=[3,2]).fillna(0)\n",
    "    prev_df.columns = [f'pl_trx_co_{col[0]}_{col[1]}_{name}' for col in prev_df.columns.values]\n",
    "    \n",
    "    keywords = [2, 4, 5, 6, 7, 11, 'jumbo', 'mini']\n",
    "    stat_cols_dict = {}\n",
    "    for word in keywords:\n",
    "        str_word = str(word) if isinstance(word, int) else word\n",
    "        stat_cols_dict[word] = [col for col in prev_df if str_word in col]\n",
    "    \n",
    "    name_status = 'pl_trx_co_jumbomini_{}_{}'\n",
    "    name_pl_type = 'pl_trx_co_{}_all_{}'\n",
    "    for k, v in stat_cols_dict.items():\n",
    "        if isinstance(k, int):\n",
    "            prev_df[name_status.format(str(k), name)] = prev_df[v].sum(axis=1)\n",
    "        elif isinstance(k, str):\n",
    "            prev_df[name_pl_type.format(k, name)] = prev_df[v].sum(axis=1)\n",
    "            \n",
    "    prev_df[f'pl_trx_co_jumbomini_all_{name}'] = prev_df[f'pl_trx_co_jumbo_all_{name}'] + \\\n",
    "                                         prev_df[f'pl_trx_co_mini_all_{name}']\n",
    "    \n",
    "    prev_df = base_df[['user_id', 'trx_id']].merge(prev_df, how='left', on=['user_id', 'trx_id']).fillna(0)\n",
    "    \n",
    "    return prev_df"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [],
   "source": [
    "prev_obs_feats_all = create_prev_pl_feats(pl_hist, raw_df, window='all')\n",
    "prev_obs_feats_180 = create_prev_pl_feats(pl_hist, raw_df, window=180)\n",
    "prev_obs_feats_90 = create_prev_pl_feats(pl_hist, raw_df, window=90)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(150672, 23)"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "text/plain": [
       "(150672, 23)"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "text/plain": [
       "(150672, 19)"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "display(prev_obs_feats_all.shape)\n",
    "display(prev_obs_feats_180.shape)\n",
    "display(prev_obs_feats_90.shape)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Early Settlement"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "def get_trx_settle_data(base_df):\n",
    "    uids = tuple(base_df['user_id'])\n",
    "    max_trx_date = base_df['transaction_date'].max()\n",
    "\n",
    "    early_settle_q = f'''\n",
    "    SELECT *,\n",
    "            DATE(transaction_date) + INTERVAL payment_type_int DAY AS due_date\n",
    "    FROM\n",
    "        (SELECT tr.user_id, tr.trx_id, transaction_date, payment_type, transaction_type, status,\n",
    "                CASE WHEN payment_type = '30_days' THEN '30'\n",
    "                WHEN payment_type = '3_months' THEN '90'\n",
    "                WHEN payment_type = '6_months' THEN '180' \n",
    "                WHEN payment_type = '12_months' THEN '360' ELSE NULL END AS payment_type_int,\n",
    "\n",
    "                settlement_date, loan_amount, is_active\n",
    "        FROM\n",
    "            (SELECT user_id,\n",
    "                   id AS trx_id,\n",
    "                   transaction_date,\n",
    "                   payment_type,\n",
    "                   transaction_type,\n",
    "                   status\n",
    "            FROM l2alpha.`transaction`\n",
    "            WHERE user_id IN {uids}\n",
    "            AND transaction_type = 2\n",
    "            AND status = 4\n",
    "            AND transaction_date <= '{max_trx_date}'\n",
    "            ) AS tr\n",
    "\n",
    "            LEFT JOIN\n",
    "\n",
    "            (SELECT transaction_id AS trx_id,\n",
    "                   settlement_date,\n",
    "                   loan_amount,\n",
    "                   is_active\n",
    "            FROM l2alpha.loan\n",
    "            ) AS lo\n",
    "\n",
    "            ON tr.trx_id = lo.trx_id\n",
    "            ) AS base\n",
    "        WHERE loan_amount > 0 AND is_active = 1\n",
    "    '''\n",
    "    query_df = pd.read_sql(early_settle_q, connect_sql())\n",
    "    query_df['settle_to_due'] = (query_df['due_date'] - query_df['settlement_date'].dt.date).dt.days\n",
    "    df = base_df[['user_id', 'trx_id', 'transaction_date']].merge(query_df, how='left', on='user_id')\\\n",
    "                                         .rename(columns={'transaction_date_x':'trx_dt_current',\n",
    "                                          'transaction_date_y':'trx_dt_past',\n",
    "                                          'trx_id_x':'trx_id',\n",
    "                                          'trx_id_y':'trx_id_ref'})\n",
    "    df['time_diff'] = (df['trx_dt_current'] - df['trx_dt_past']).dt.total_seconds()\n",
    "    df = df.sort_values(['user_id', 'trx_dt_past']).reset_index(drop=True)\n",
    "    return df"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "trx_settle = get_trx_settle_data(raw_df)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "def create_early_settle_feats(df, base_df, window='all'):\n",
    "    if isinstance(window, int):\n",
    "        df = df[(df['time_diff'] <= window*(24*60*60)) & (df['time_diff'] > 0)]\n",
    "        name = str(window)\n",
    "    elif window == 'all':\n",
    "        df = df[df['time_diff'] > 0]\n",
    "        name = 'ever'\n",
    "    df = df.sort_values(['user_id', 'trx_id', 'trx_dt_past'])\n",
    "    \n",
    "    # Calculate how many times the user has settled early in the last XX days\n",
    "    co_df = df.copy()\n",
    "    co_df['flag_early_settle'] = np.where(co_df['settle_to_due'] >= 30, 1, 0)\n",
    "    co_df['flag_early_settle'] = np.where(co_df['settle_to_due'].isna(), np.nan, co_df['flag_early_settle'])\n",
    "    co_df = co_df.groupby('trx_id').agg({'flag_early_settle':'sum'}).reset_index()\n",
    "    co_df.columns = ['trx_id', f'early_settle_co_{name}']\n",
    "    \n",
    "    # Calculate the min, max, average, and median days the user has settled early in the last XX days\n",
    "    # Also get the last time the user settled early (how many days early).\n",
    "    es_df = df.groupby('trx_id').agg({'settle_to_due':[max, min, 'mean', 'median', 'last']})\n",
    "    col_names = ['settle_to_due_max_{}', 'settle_to_due_min_{}', 'settle_to_due_avg_{}', \n",
    "                 'settle_to_due_med_{}', 'settle_to_due_last_pl_{}']\n",
    "    es_df.columns = [col.format(name) for col in col_names]\n",
    "    es_df = es_df.reset_index()\n",
    "    out_df = base_df[['user_id', 'trx_id']].merge(es_df, how='left', on='trx_id')\\\n",
    "                                           .merge(co_df, how='left', on='trx_id')\n",
    "    return out_df"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [],
   "source": [
    "early_settle_feats_ever = create_early_settle_feats(trx_settle, raw_df, window='all')\n",
    "early_settle_feats_180 = create_early_settle_feats(trx_settle, raw_df, window=180)\n",
    "early_settle_feats_90 = create_early_settle_feats(trx_settle, raw_df, window=90)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(150672, 8)"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "text/plain": [
       "(150672, 8)"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "text/plain": [
       "(150672, 8)"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "display(early_settle_feats_ever.shape)\n",
    "display(early_settle_feats_180.shape)\n",
    "display(early_settle_feats_90.shape)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Time from last PL Settlement"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "def create_time_from_last_pl_feat(df):\n",
    "    time_from_last_pl = df[df['time_diff'] >= 0]\n",
    "    time_from_last_pl.loc[(time_from_last_pl['time_diff'] == 0) & \n",
    "                          (time_from_last_pl['settlement_date'].notna()), 'settlement_date'] = np.nan\n",
    "    time_from_last_pl = time_from_last_pl.sort_values(['user_id', 'trx_id', 'trx_dt_past'])\n",
    "\n",
    "    time_from_last_pl_grp = time_from_last_pl.groupby('trx_id').tail(2)\n",
    "    time_from_last_pl_grp['settle_shift'] = time_from_last_pl_grp['settlement_date'].shift()\n",
    "    time_from_last_pl_grp['time_from_prev_pl_settle'] = (time_from_last_pl_grp['trx_dt_current'] - \\\n",
    "                                                          time_from_last_pl_grp['settle_shift']).dt.total_seconds()/3600\n",
    "    \n",
    "    time_from_last_pl_final = time_from_last_pl_grp.groupby('trx_id').tail(1)\n",
    "    time_from_last_pl_final = time_from_last_pl_final[['user_id', 'trx_id', \n",
    "                                                       'time_from_prev_pl_settle']].reset_index(drop=True)\n",
    "    return time_from_last_pl_final"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "time_from_prev_pl_settle = create_time_from_last_pl_feat(trx_settle)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "display(time_from_prev_pl_settle.shape)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Sanity Check\n",
    "Make sure that all 'time_from_prev_pl_settle' with value NaN are first PL transactions."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {},
   "outputs": [],
   "source": [
    "# uids = tuple(raw_df['user_id'])\n",
    "# first_trx_q = f'''\n",
    "# SELECT user_id, id, MIN(transaction_date) AS first_trx_date\n",
    "# FROM l2alpha.`transaction`\n",
    "# WHERE user_id IN {uids}\n",
    "# AND transaction_type = 2\n",
    "# AND status = 4\n",
    "# GROUP BY user_id\n",
    "# '''\n",
    "\n",
    "# first_pl_trx = pd.read_sql(first_trx_q, connect_sql())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [],
   "source": [
    "# nono = time_from_prev_pl_settle.merge(first_pl_trx[['user_id', 'id']])\n",
    "# nono['flag_first_pl'] = np.where((nono['trx_id'] == nono['id']), 1, 0)\n",
    "# nono['flag_first_pl'].value_counts()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {},
   "outputs": [],
   "source": [
    "# nono[nono['flag_first_pl'] == 1]['time_from_prev_pl_settle'].value_counts(dropna=False)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Date and day of week of transaction date"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {},
   "outputs": [],
   "source": [
    "date_feats = raw_df[['user_id', 'trx_id', 'transaction_date']].copy()\n",
    "date_feats['td_date'] = date_feats['transaction_date'].dt.day\n",
    "date_feats['td_day_of_week'] = date_feats['transaction_date'].dt.dayofweek\n",
    "date_feats = date_feats[['user_id', 'trx_id', 'td_date', 'td_day_of_week']]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Put all features in single dataframe"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {},
   "outputs": [],
   "source": [
    "new_feats = raw_df[['user_id', 'trx_id']].merge(series_feats, how='left', on=['user_id', 'trx_id'])\\\n",
    "                                         .merge(prev_obs_feats_all, how='left', on=['user_id', 'trx_id'])\\\n",
    "                                         .merge(prev_obs_feats_180, how='left', on=['user_id', 'trx_id'])\\\n",
    "                                         .merge(prev_obs_feats_90, how='left', on=['user_id', 'trx_id'])\\\n",
    "                                         .merge(early_settle_feats_ever, how='left', on=['user_id', 'trx_id'])\\\n",
    "                                         .merge(early_settle_feats_180, how='left', on=['user_id', 'trx_id'])\\\n",
    "                                         .merge(early_settle_feats_90, how='left', on=['user_id', 'trx_id'])\\\n",
    "                                         .merge(time_from_prev_pl_settle, how='left', on=['user_id', 'trx_id'])\\\n",
    "                                         .merge(date_feats, how='left', on=['user_id', 'trx_id'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "new_feats"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(150672, 85)"
      ]
     },
     "execution_count": 30,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "new_feats.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "['early_settle_co_180',\n",
       " 'early_settle_co_90',\n",
       " 'early_settle_co_ever',\n",
       " 'pl_series_jumbo',\n",
       " 'pl_series_jumbomini',\n",
       " 'pl_series_mini',\n",
       " 'pl_trx_co_jumbo_11_180',\n",
       " 'pl_trx_co_jumbo_11_ever',\n",
       " 'pl_trx_co_jumbo_2_180',\n",
       " 'pl_trx_co_jumbo_2_90',\n",
       " 'pl_trx_co_jumbo_2_ever',\n",
       " 'pl_trx_co_jumbo_4_180',\n",
       " 'pl_trx_co_jumbo_4_90',\n",
       " 'pl_trx_co_jumbo_4_ever',\n",
       " 'pl_trx_co_jumbo_5_180',\n",
       " 'pl_trx_co_jumbo_5_90',\n",
       " 'pl_trx_co_jumbo_5_ever',\n",
       " 'pl_trx_co_jumbo_6_180',\n",
       " 'pl_trx_co_jumbo_6_90',\n",
       " 'pl_trx_co_jumbo_6_ever',\n",
       " 'pl_trx_co_jumbo_7_180',\n",
       " 'pl_trx_co_jumbo_7_ever',\n",
       " 'pl_trx_co_jumbo_all_180',\n",
       " 'pl_trx_co_jumbo_all_90',\n",
       " 'pl_trx_co_jumbo_all_ever',\n",
       " 'pl_trx_co_jumbomini_11_180',\n",
       " 'pl_trx_co_jumbomini_11_90',\n",
       " 'pl_trx_co_jumbomini_11_ever',\n",
       " 'pl_trx_co_jumbomini_2_180',\n",
       " 'pl_trx_co_jumbomini_2_90',\n",
       " 'pl_trx_co_jumbomini_2_ever',\n",
       " 'pl_trx_co_jumbomini_4_180',\n",
       " 'pl_trx_co_jumbomini_4_90',\n",
       " 'pl_trx_co_jumbomini_4_ever',\n",
       " 'pl_trx_co_jumbomini_5_180',\n",
       " 'pl_trx_co_jumbomini_5_90',\n",
       " 'pl_trx_co_jumbomini_5_ever',\n",
       " 'pl_trx_co_jumbomini_6_180',\n",
       " 'pl_trx_co_jumbomini_6_90',\n",
       " 'pl_trx_co_jumbomini_6_ever',\n",
       " 'pl_trx_co_jumbomini_7_180',\n",
       " 'pl_trx_co_jumbomini_7_90',\n",
       " 'pl_trx_co_jumbomini_7_ever',\n",
       " 'pl_trx_co_jumbomini_all_180',\n",
       " 'pl_trx_co_jumbomini_all_90',\n",
       " 'pl_trx_co_jumbomini_all_ever',\n",
       " 'pl_trx_co_mini_11_180',\n",
       " 'pl_trx_co_mini_11_ever',\n",
       " 'pl_trx_co_mini_2_180',\n",
       " 'pl_trx_co_mini_2_90',\n",
       " 'pl_trx_co_mini_2_ever',\n",
       " 'pl_trx_co_mini_4_180',\n",
       " 'pl_trx_co_mini_4_90',\n",
       " 'pl_trx_co_mini_4_ever',\n",
       " 'pl_trx_co_mini_5_180',\n",
       " 'pl_trx_co_mini_5_90',\n",
       " 'pl_trx_co_mini_5_ever',\n",
       " 'pl_trx_co_mini_6_180',\n",
       " 'pl_trx_co_mini_6_90',\n",
       " 'pl_trx_co_mini_6_ever',\n",
       " 'pl_trx_co_mini_7_180',\n",
       " 'pl_trx_co_mini_7_ever',\n",
       " 'pl_trx_co_mini_all_180',\n",
       " 'pl_trx_co_mini_all_90',\n",
       " 'pl_trx_co_mini_all_ever',\n",
       " 'settle_to_due_avg_180',\n",
       " 'settle_to_due_avg_90',\n",
       " 'settle_to_due_avg_ever',\n",
       " 'settle_to_due_last_pl_180',\n",
       " 'settle_to_due_last_pl_90',\n",
       " 'settle_to_due_last_pl_ever',\n",
       " 'settle_to_due_max_180',\n",
       " 'settle_to_due_max_90',\n",
       " 'settle_to_due_max_ever',\n",
       " 'settle_to_due_med_180',\n",
       " 'settle_to_due_med_90',\n",
       " 'settle_to_due_med_ever',\n",
       " 'settle_to_due_min_180',\n",
       " 'settle_to_due_min_90',\n",
       " 'settle_to_due_min_ever',\n",
       " 'td_date',\n",
       " 'td_day_of_week',\n",
       " 'time_from_prev_pl_settle',\n",
       " 'trx_id',\n",
       " 'user_id']"
      ]
     },
     "execution_count": 31,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "sorted(new_feats.columns.tolist())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "metadata": {},
   "outputs": [],
   "source": [
    "new_feats.to_parquet('data/interim/pl_model_v3_new_feats_27012020.parquet', compression='gzip')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.9.4"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}