#!/bin/bash set -e # 配置信息 PG_PASSWORD="LdfjdjL83h3h3^$&**YGG*" FEISHU_APP_ID="cli_a929ae22e0b8dcc8" FEISHU_APP_SECRET="OtFjMy7p3qE3VvLbMdcWidwgHOnGD4FJ" RECEIVE_OPEN_ID="ou_e63ce6b760ad39382852472f28fbe2a2" # 计算时间范围:上周日到本周日(共7天) START_DATE=$(date -d "last sunday -6 days" +%Y-%m-%d) END_DATE=$(date -d "last sunday" +%Y-%m-%d) REPORT_DATE=$(date +%Y%m%d) CSV_PATH="/tmp/xueersi_weekly_data_${REPORT_DATE}.csv" # 提取月日,去掉前导零,生成文件名 START_MONTH=$(date -d "${START_DATE}" +%-m) START_DAY=$(date -d "${START_DATE}" +%-d) END_MONTH=$(date -d "${END_DATE}" +%-m) END_DAY=$(date -d "${END_DATE}" +%-d) # 本地保存路径用横杠避免路径问题,上传飞书时用带斜杠的显示名 EXCEL_PATH="/tmp/学而思-瓦拉英语${START_MONTH}-${START_DAY}-${END_MONTH}-${END_DAY}.xlsx" FEISHU_FILE_NAME="学而思-瓦拉英语${START_MONTH}/${START_DAY}-${END_MONTH}/${END_DAY}.xlsx" # 1. 查询数据导出CSV PGPASSWORD="${PG_PASSWORD}" psql -h bj-postgres-16pob4sg.sql.tencentcdb.com -p 28591 -U ai_member -d vala_bi -c "\copy (WITH date_range AS (SELECT generate_series('${START_DATE}'::date, '${END_DATE}'::date, '1 day'::interval) AS stat_date), daily_new_users AS (SELECT DATE(created_at) AS stat_date, COUNT(DISTINCT id) AS new_user_count FROM bi_vala_app_account WHERE download_channel LIKE '%学而思%' AND created_at >= '${START_DATE} 00:00:00+08' AND created_at < '$(date -d "${END_DATE} +1 day" +%Y-%m-%d) 00:00:00+08' AND deleted_at IS NULL GROUP BY DATE(created_at)), daily_orders AS (SELECT DATE(o.pay_success_date) AS stat_date, COUNT(DISTINCT o.id) AS total_order_count, COUNT(DISTINCT CASE WHEN r.status = 3 AND o.order_status = 4 THEN o.id END) AS refund_order_count, ROUND(SUM(o.pay_amount_int)/100.0, 2) AS gmv, ROUND(SUM(o.pay_amount_int)/100.0 - COALESCE(SUM(CASE WHEN r.status = 3 AND o.order_status = 4 THEN r.refund_amount_int ELSE 0 END)/100.0, 0), 2) AS gsv FROM bi_vala_order o LEFT JOIN bi_refund_order r ON o.out_trade_no = r.out_trade_no WHERE o.key_from = 'app-active-h5-0-0' AND o.sale_channel = 21 AND o.pay_success_date >= '${START_DATE} 00:00:00+08' AND o.pay_success_date < '$(date -d "${END_DATE} +1 day" +%Y-%m-%d) 00:00:00+08' AND o.pay_success_date IS NOT NULL GROUP BY DATE(o.pay_success_date)), daily_data AS (SELECT TO_CHAR(d.stat_date, 'YYYY-MM-DD') AS 日期, COALESCE(u.new_user_count, 0) AS 新增用户数, COALESCE(o.total_order_count - o.refund_order_count, 0) AS 有效订单数, COALESCE(o.gsv, 0) AS GSV_元 FROM date_range d LEFT JOIN daily_new_users u ON d.stat_date = u.stat_date LEFT JOIN daily_orders o ON d.stat_date = o.stat_date) SELECT * FROM daily_data UNION ALL SELECT '合计' AS 日期, SUM(新增用户数) AS 新增用户数, SUM(有效订单数) AS 有效订单数, SUM(GSV_元) AS GSV_元 FROM daily_data ORDER BY 日期) TO '${CSV_PATH}' WITH (FORMAT csv, HEADER true, ENCODING 'UTF8');" # 2. CSV转Excel python3 -c "import pandas as pd; df = pd.read_csv('${CSV_PATH}'); df.to_excel('${EXCEL_PATH}', index=False);" # 3. 获取飞书租户token TOKEN_RESP=$(curl -s -X POST "https://open.feishu.cn/open-apis/auth/v3/tenant_access_token/internal" \ -H "Content-Type: application/json" \ -d "{\"app_id\":\"${FEISHU_APP_ID}\",\"app_secret\":\"${FEISHU_APP_SECRET}\"}") TOKEN=$(echo "$TOKEN_RESP" | grep -o '"tenant_access_token":"[^"]*"' | cut -d'"' -f4) if [ -z "$TOKEN" ]; then echo "ERROR: 获取token失败"; exit 1; fi # 4. 上传文件 FILE_NAME="${FEISHU_FILE_NAME}" UPLOAD_RESP=$(curl -s -X POST "https://open.feishu.cn/open-apis/im/v1/files" \ -H "Authorization: Bearer ${TOKEN}" \ -F "file_type=xls" \ -F "file_name=${FILE_NAME}" \ -F "file=@${EXCEL_PATH}") FILE_KEY=$(echo "$UPLOAD_RESP" | grep -o '"file_key":"[^"]*"' | cut -d'"' -f4) if [ -z "$FILE_KEY" ]; then echo "ERROR: 上传文件失败"; exit 1; fi # 5. 发送文件消息 SEND_RESP=$(curl -s -X POST "https://open.feishu.cn/open-apis/im/v1/messages?receive_id_type=open_id" \ -H "Authorization: Bearer ${TOKEN}" \ -H "Content-Type: application/json" \ -d "{\"receive_id\":\"${RECEIVE_OPEN_ID}\",\"msg_type\":\"file\",\"content\":\"{\\\"file_key\\\":\\\"${FILE_KEY}\\\"}\"}") MSG_ID=$(echo "$SEND_RESP" | grep -o '"message_id":"[^"]*"' | cut -d'"' -f4) if [ -z "$MSG_ID" ]; then echo "ERROR: 发送消息失败"; exit 1; fi # 清理临时文件 rm -f "${CSV_PATH}" "${EXCEL_PATH}" echo "学而思周度报表发送成功,日期范围:${START_DATE} 至 ${END_DATE}"