ai_member_xiaokui/scripts/sync_wechat_feedback_minutely.sh
2026-04-18 08:10:01 +08:00

146 lines
4.2 KiB
Bash
Executable File
Raw Permalink Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#!/bin/bash
# 任务名称:微信用户反馈每分钟同步到收集表格
# 执行时间:* * * * * 每分钟执行
# 归属 Agent小葵 xiaokui
set +e
# 环境配置
export PATH=/root/.nvm/versions/node/v24.14.0/bin:$PATH
LOG_FILE="/var/log/sync_wechat_feedback.log"
WORKSPACE="/root/.openclaw/workspace-xiaokui"
LAST_SYNC_ID_FILE="$WORKSPACE/data/last_wechat_sync_id"
TMP_DATA="$WORKSPACE/tmp/wechat_feedback.json"
# 数据库配置
DB_HOST="bj-cdb-8frbdwju.sql.tencentcdb.com"
DB_PORT="25413"
DB_USER="read_only"
DB_PASS="fdsfiidier^\$*hjfdijjd232"
DB_NAME="vala_test"
# 目标表格配置
TARGET_SPREADSHEET_TOKEN="AOxbsifk3hybRZteGowcMxNnnqc"
TARGET_SHEET_ID="f17380"
log() {
echo "[$(date +'%Y-%m-%d %H:%M:%S')] $1" >> "$LOG_FILE"
}
# 初始化上次同步ID
if [ ! -f "$LAST_SYNC_ID_FILE" ]; then
echo "0" > "$LAST_SYNC_ID_FILE"
fi
LAST_SYNC_ID=$(cat "$LAST_SYNC_ID_FILE")
log "=== 微信反馈同步任务开始 ==="
log "上次同步最大ID$LAST_SYNC_ID"
# 1. 读取微信新增反馈数据
log "读取微信新增反馈数据..."
mysql -h "$DB_HOST" -P "$DB_PORT" -u "$DB_USER" -p"$DB_PASS" "$DB_NAME" -N -e "
SELECT id, msg_time, sender_name, msg_type, content
FROM wechat_group_message
WHERE id > $LAST_SYNC_ID
ORDER BY msg_time DESC;" > /tmp/wechat_raw.txt
if [ ! -s /tmp/wechat_raw.txt ]; then
log "无新增微信反馈数据,退出"
exit 0
fi
# 2. 转换为表格格式
log "转换数据格式..."
NEW_DATA=$(awk '
BEGIN {
print "["
first = 1
}
{
if (first == 0) print ","
first = 0
id = $1
time = $2 " " $3
sender = $4
type = $5
content = substr($0, index($0,$6))
# 处理字段
text = ""
image = ""
video = ""
if (type == "text") {
text = content
} else if (type == "image") {
image = content
} else if (type == "video") {
video = content
} else {
text = "[" type "] " content
}
gsub(/"/, "\\\"", text)
gsub(/"/, "\\\"", image)
gsub(/"/, "\\\"", video)
printf "[\"\", \"\", \"\", \"\", \"%s\", \"微信\", \"%s\", \"\", \"\", \"%s\", \"%s\", \"%s\", \"\", \"待确认\", \"\"]", time, sender, text, image, video
}
END {
print "\n]"
}' /tmp/wechat_raw.txt)
NEW_COUNT=$(echo "$NEW_DATA" | jq length)
NEW_MAX_ID=$(tail -1 /tmp/wechat_raw.txt | awk '{print $1}')
log "发现 $NEW_COUNT 条新增微信反馈数据最大ID$NEW_MAX_ID"
# 3. 读取目标表格现有数据(飞书+微信)
log "读取目标表格现有数据..."
TARGET_RAW=$(LARKSUITE_CLI_CONFIG_DIR=/root/.openclaw/credentials/xiaoyan lark-cli sheets +read --spreadsheet-token "$TARGET_SPREADSHEET_TOKEN" --range "$TARGET_SHEET_ID!A2:O" --as bot)
TARGET_DATA=$(echo "$TARGET_RAW" | jq '.data.valueRange.values | map(select(.[0] != null and .[0] != ""))')
if [ "$TARGET_DATA" == "null" ] || [ -z "$TARGET_DATA" ]; then
TARGET_DATA="[]"
fi
# 4. 合并数据并按时间倒序排列
echo "$NEW_DATA" > /tmp/new.json
echo "$TARGET_DATA" > /tmp/existing.json
ALL_DATA=$(jq -s 'add | sort_by(.[4]) | reverse' /tmp/new.json /tmp/existing.json)
# 5. 重新生成序号
ALL_DATA=$(echo "$ALL_DATA" | jq 'to_entries | map(.value[0] = (.key + 1 | tostring) | .value)')
echo "$ALL_DATA" > "$TMP_DATA"
ROW_COUNT=$(echo "$ALL_DATA" | jq length)
END_ROW=$((ROW_COUNT + 1))
# 6. 清空目标表格并写入合并后的数据
log "写入合并后的数据,共 $ROW_COUNT 条..."
# 先清空原有数据
EMPTY_DATA=$(printf '[]%.0s' {1..200} | jq -s '.')
LARKSUITE_CLI_CONFIG_DIR=/root/.openclaw/credentials/xiaoyan lark-cli sheets +write \
--spreadsheet-token "$TARGET_SPREADSHEET_TOKEN" \
--range "$TARGET_SHEET_ID!A2:O201" \
--values "$EMPTY_DATA" \
--as bot >/dev/null 2>&1
# 写入新数据
LARKSUITE_CLI_CONFIG_DIR=/root/.openclaw/credentials/xiaoyan lark-cli sheets +write \
--spreadsheet-token "$TARGET_SPREADSHEET_TOKEN" \
--range "$TARGET_SHEET_ID!A2:O$END_ROW" \
--values "$(cat $TMP_DATA)" \
--as bot
# 7. 更新上次同步ID
echo "$NEW_MAX_ID" > "$LAST_SYNC_ID_FILE"
log "✅ 同步完成:新增 $NEW_COUNT 条微信反馈,当前总记录数 $ROW_COUNT 条,已按时间倒序排列"
# 清理临时文件
rm -f /tmp/wechat_raw.txt /tmp/new.json /tmp/existing.json "$TMP_DATA"
log "=== 同步任务完成 ==="
exit 0