758 lines
27 KiB
Go
758 lines
27 KiB
Go
package main
|
|
|
|
import (
|
|
"encoding/json"
|
|
"fmt"
|
|
"log"
|
|
"math/rand"
|
|
"strconv"
|
|
"strings"
|
|
"time"
|
|
|
|
"github.com/streadway/amqp"
|
|
tb "gopkg.in/tucnak/telebot.v2"
|
|
)
|
|
|
|
func MQGetMsgWorker(id int, msgs chan<- ChatWarsMessage) {
|
|
//log.Printf("MQGetMsgWorker[" + strconv.Itoa(id) + "] : Starting.")
|
|
var err error
|
|
c := MQClient{
|
|
User: cfg.Rabbit.User,
|
|
Password: cfg.Rabbit.Password,
|
|
Host: cfg.Rabbit.Host,
|
|
Path: cfg.Rabbit.Path,
|
|
SSL: false,
|
|
}
|
|
|
|
for true {
|
|
c.Connection, err = amqp.Dial("amqp://" + c.User + ":" + c.Password + "@" + c.Host + "/" + c.Path)
|
|
logOnError(err, "MQGetMsgWorker["+strconv.Itoa(id)+"] : Cannot open MQ connection")
|
|
if err != nil {
|
|
//c.Connection.Close()
|
|
time.Sleep(15 * time.Second)
|
|
continue
|
|
}
|
|
|
|
c.Channel, err = c.Connection.Channel()
|
|
logOnError(err, "MQGetMsgWorker["+strconv.Itoa(id)+"] : Cannot open MQ channel")
|
|
if err != nil {
|
|
c.Channel.Close()
|
|
c.Connection.Close()
|
|
time.Sleep(15 * time.Second)
|
|
continue
|
|
}
|
|
|
|
c.Queue, err = c.Channel.QueueDeclare(
|
|
"msg", // name
|
|
false, // durable
|
|
false, // delete when unused
|
|
false, // exclusive
|
|
false, // no-wait
|
|
nil, // arguments
|
|
)
|
|
logOnError(err, "MQGetMsgWorker["+strconv.Itoa(id)+"] : Failed to declare a queue")
|
|
if err != nil {
|
|
c.Channel.Close()
|
|
c.Connection.Close()
|
|
time.Sleep(15 * time.Second)
|
|
continue
|
|
}
|
|
|
|
m, err := c.Channel.Consume(
|
|
c.Queue.Name, // queue
|
|
"", // consumer
|
|
true, // auto-ack
|
|
false, // exclusive
|
|
false, // no-local
|
|
false, // no-wait
|
|
nil, // args
|
|
)
|
|
logOnError(err, "MQGetMsgWorker["+strconv.Itoa(id)+"] : Failed to register a consumer")
|
|
if err != nil {
|
|
c.Channel.Close()
|
|
c.Connection.Close()
|
|
time.Sleep(15 * time.Second)
|
|
continue
|
|
}
|
|
|
|
for d := range m {
|
|
var x ChatWarsMessage
|
|
//log.Printf("MQGetMsgWorker["+strconv.Itoa(id)+"] : Received a message: %s", string(d.Body))
|
|
err = json.Unmarshal(d.Body, &x)
|
|
logOnError(err, "MQGetMsgWorker["+strconv.Itoa(id)+"] : Can't unmarshal.\n"+string(d.Body))
|
|
if err == nil {
|
|
msgs <- x
|
|
}
|
|
}
|
|
|
|
c.Channel.Close()
|
|
c.Connection.Close()
|
|
|
|
}
|
|
|
|
log.Printf("MQGetMsgWorker[" + strconv.Itoa(id) + "] : Closing.")
|
|
|
|
}
|
|
|
|
func MQKeepAliveWorker() {
|
|
//log.Printf("MQKeepAliveWorker : Starting.")
|
|
var err error
|
|
c := MQClient{
|
|
User: cfg.Rabbit.User,
|
|
Password: cfg.Rabbit.Password,
|
|
Host: cfg.Rabbit.Host,
|
|
Path: cfg.Rabbit.Path,
|
|
SSL: false,
|
|
}
|
|
|
|
for true {
|
|
c.Connection, err = amqp.Dial("amqp://" + c.User + ":" + c.Password + "@" + c.Host + "/" + c.Path)
|
|
logOnError(err, "MQKeepAliveWorker : Cannot open MQ connection")
|
|
if err != nil {
|
|
time.Sleep(15 * time.Second)
|
|
continue
|
|
}
|
|
|
|
c.Channel, err = c.Connection.Channel()
|
|
logOnError(err, "MQKeepAliveWorker : Cannot open MQ channel")
|
|
if err != nil {
|
|
c.Channel.Close()
|
|
c.Connection.Close()
|
|
time.Sleep(15 * time.Second)
|
|
continue
|
|
}
|
|
|
|
c.Queue, err = c.Channel.QueueDeclare(
|
|
"keepalive", // name
|
|
false, // durable
|
|
false, // delete when unused
|
|
false, // exclusive
|
|
false, // no-wait
|
|
nil, // arguments
|
|
)
|
|
logOnError(err, "MQKeepAliveWorker : Failed to declare a queue")
|
|
if err != nil {
|
|
c.Channel.Close()
|
|
c.Connection.Close()
|
|
time.Sleep(15 * time.Second)
|
|
continue
|
|
}
|
|
|
|
m, err := c.Channel.Consume(
|
|
c.Queue.Name, // queue
|
|
"", // consumer
|
|
true, // auto-ack
|
|
false, // exclusive
|
|
false, // no-local
|
|
false, // no-wait
|
|
nil, // args
|
|
)
|
|
logOnError(err, "MQKeepAliveWorker] : Failed to register a consumer")
|
|
if err != nil {
|
|
c.Channel.Close()
|
|
c.Connection.Close()
|
|
time.Sleep(15 * time.Second)
|
|
continue
|
|
}
|
|
|
|
for d := range m {
|
|
x := MQKeepAlive{}
|
|
err = json.Unmarshal(d.Body, &x)
|
|
logOnError(err, "MQKeepAliveWorker : Can't unmarshal.\n"+string(d.Body))
|
|
if err == nil {
|
|
//log.Printf("MQKeepAliveWorker : Received message from %s (%d).\n", x.Nickname, x.TGUserID64)
|
|
if x.Date.Add(10 * time.Second).Before(time.Now()) {
|
|
// outdated keep-alive coming from client
|
|
} else if clt, ok := getLockedClient(x.TGUserID64, true); ok {
|
|
clt.HeartBeat = x.Date
|
|
clt.Build = x.Build
|
|
if clt.Active {
|
|
//log.Printf("MQKeepAliveWorker : Client %s (%d) already active.\n", clt.Login, clt.TGUserID64)
|
|
clt.Mux.Unlock()
|
|
} else {
|
|
clt.Login = x.Nickname
|
|
//log.Printf("MQKeepAliveWorker : Connecting to %s (%d).\n", clt.Login, clt.TGUserID64)
|
|
clt.MQ.User = cfg.Rabbit.User
|
|
clt.MQ.Password = cfg.Rabbit.Password
|
|
clt.MQ.Host = cfg.Rabbit.Host
|
|
clt.MQ.Path = x.Queue
|
|
clt.MQ.SSL = false
|
|
clt.MQ.Connection, err = amqp.Dial("amqp://" + clt.MQ.User + ":" + clt.MQ.Password + "@" + clt.MQ.Host + "/" + clt.MQ.Path)
|
|
logOnError(err, "MQKeepAliveWorker : Failed to connect to RabbitMQ")
|
|
if err != nil {
|
|
clt.MQ.Connection.Close()
|
|
} else {
|
|
clt.MQ.Channel, err = clt.MQ.Connection.Channel()
|
|
logOnError(err, "MQKeepAliveWorker : Failed to open a channel")
|
|
if err != nil {
|
|
clt.MQ.Channel.Close()
|
|
clt.MQ.Connection.Close()
|
|
} else {
|
|
clt.MQ.Queue, err = clt.MQ.Channel.QueueDeclare(
|
|
"msg", // name
|
|
false, // durable
|
|
false, // delete when unused
|
|
false, // exclusive
|
|
false, // no-wait
|
|
nil, // arguments
|
|
)
|
|
logOnError(err, "MQKeepAliveWorker : Failed to declare a queue")
|
|
if err != nil {
|
|
clt.MQ.Channel.Close()
|
|
clt.MQ.Connection.Close()
|
|
} else {
|
|
clt.Active = true
|
|
//log.Printf("MQKeepAliveWorker : Connected to %s.\n", x.Nickname)
|
|
}
|
|
}
|
|
}
|
|
clt.Mux.Unlock()
|
|
if clt.Active {
|
|
if clt.CWRole == `` {
|
|
c := TGCommand{
|
|
Type: commandSendMsg,
|
|
ToUserID64: x.TGUserID64,
|
|
Text: "Your client is connected.",
|
|
}
|
|
TGCmdQueue <- c
|
|
c = TGCommand{
|
|
Type: commandSendMsg,
|
|
ToUserID64: cfg.Bot.Admin,
|
|
Text: fmt.Sprintf("Client `%s` is connected.", x.Nickname),
|
|
}
|
|
TGCmdQueue <- c
|
|
|
|
clientSendCWMsg(x.TGUserID64, `🏅Me`)
|
|
} else {
|
|
// silent reconnection
|
|
}
|
|
} else {
|
|
c := TGCommand{
|
|
Type: commandSendMsg,
|
|
ToUserID64: cfg.Bot.Admin,
|
|
Text: fmt.Sprintf("Cannot connect to client `%s`.", x.Nickname),
|
|
}
|
|
TGCmdQueue <- c
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
c.Channel.Close()
|
|
c.Connection.Close()
|
|
}
|
|
|
|
log.Printf("MQKeepAliveWorker : Closing.")
|
|
|
|
}
|
|
|
|
func MQTGCmdWorker(id int, cmds <-chan TGCommand) {
|
|
//log.Printf("MQTGCmdWorker[" + strconv.Itoa(id) + "] : Starting.")
|
|
for c := range cmds {
|
|
if clt, ok := getLockedClient(c.FromUserID64, false); ok {
|
|
j, err := json.Marshal(c)
|
|
logOnError(err, "MQTGCmdWorker["+strconv.Itoa(id)+"] : Marshal(c)")
|
|
//log.Printf("MQTGCmdWorker["+strconv.Itoa(id)+"] : new command.\n%s\n", string(j))
|
|
for clt.MQ.Connection == nil || clt.MQ.Connection.IsClosed() {
|
|
clt.Active = false
|
|
log.Printf("MQTGCmdWorker : Resetting MQ connection for #%d.\n", c.FromUserID64)
|
|
clt.MQ.Connection, err = amqp.Dial("amqp://" + clt.MQ.User + ":" + clt.MQ.Password + "@" + clt.MQ.Host + "/" + clt.MQ.Path)
|
|
logOnError(err, "MQTGCmdWorker : Cannot open MQ connection")
|
|
if err != nil {
|
|
clt.MQ.Connection.Close()
|
|
time.Sleep(15 * time.Second)
|
|
continue
|
|
}
|
|
|
|
clt.MQ.Channel, err = clt.MQ.Connection.Channel()
|
|
logOnError(err, "MQTGCmdWorker : Cannot open MQ channel")
|
|
if err != nil {
|
|
clt.MQ.Channel.Close()
|
|
clt.MQ.Connection.Close()
|
|
time.Sleep(15 * time.Second)
|
|
continue
|
|
}
|
|
|
|
clt.MQ.Queue, err = clt.MQ.Channel.QueueDeclare(
|
|
"msg", // name
|
|
false, // durable
|
|
false, // delete when unused
|
|
false, // exclusive
|
|
false, // no-wait
|
|
nil, // arguments
|
|
)
|
|
logOnError(err, "MQTGCmdWorker : Failed to declare a queue")
|
|
if err != nil {
|
|
clt.MQ.Channel.Close()
|
|
clt.MQ.Connection.Close()
|
|
time.Sleep(15 * time.Second)
|
|
continue
|
|
}
|
|
|
|
}
|
|
err = clt.MQ.Channel.Publish(
|
|
"", // exchange
|
|
clt.MQ.Queue.Name, // routing key
|
|
false, // mandatory
|
|
false, // immediate
|
|
amqp.Publishing{
|
|
ContentType: "application/json",
|
|
Body: []byte(j),
|
|
})
|
|
logOnError(err, "MQTGCmdWorker["+strconv.Itoa(id)+"] : Publishing message.")
|
|
//log.Printf("MQTGCmdWorker[" + strconv.Itoa(id) + "] : Message published.")
|
|
clt.Mux.Unlock()
|
|
} else {
|
|
log.Printf("MQTGCmdWorker["+strconv.Itoa(id)+"] : client %d offline.\n", c.FromUserID64)
|
|
}
|
|
}
|
|
|
|
log.Printf("MQTGCmdWorker[" + strconv.Itoa(id) + "] : Closing.")
|
|
|
|
}
|
|
|
|
func SQLCWMsgWorker(id int, msgs <-chan ChatWarsMessage, objIds chan<- int64) {
|
|
//log.Printf("SQLCWMsgWorker[" + strconv.Itoa(id) + "] : Starting.")
|
|
for m := range msgs {
|
|
objId, err := addObjMsg(m)
|
|
logOnError(err, "SQLCWMsgWorker["+strconv.Itoa(id)+"] : Inserting message.")
|
|
if err == nil && objId != 0 {
|
|
// log.Printf("SQLCWMsgWorker["+strconv.Itoa(id)+"] : Message inserted (%d).\n", objId)
|
|
objIds <- objId
|
|
} else {
|
|
log.Printf("SQLCWMsgWorker["+strconv.Itoa(id)+"] : Message in error\n%s\n", m.Text)
|
|
}
|
|
}
|
|
|
|
log.Printf("SQLCWMsgWorker[" + strconv.Itoa(id) + "] : Closing.")
|
|
|
|
}
|
|
|
|
func SQLIdentifyMsgWorker(id int, objIds <-chan int64) {
|
|
//log.Printf("SQLIdentifyMsgWorker[" + strconv.Itoa(id) + "] : Starting.")
|
|
for objId := range objIds {
|
|
m, err := getObjMsg(objId)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Retrieving message.")
|
|
if err == nil {
|
|
//log.Printf("SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Message retrieved (%d)\n%s\n", objId, m.Text)
|
|
rule, err := getMsgParsingRule(m)
|
|
if err != nil {
|
|
//logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : msgRegex.")
|
|
log.Printf("SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Message unidentified (%d)\n%s\n", objId, m.Text)
|
|
} else {
|
|
err = setObjSubTypeId(objId, rule.MsgTypeID64)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : setObjSubTypeId")
|
|
switch rule.MsgTypeID64 {
|
|
case cacheObjSubType[`msg_groles_req`]:
|
|
case cacheObjSubType[`msg_go`]:
|
|
case cacheObjSubType[`msg_report_req`]:
|
|
case cacheObjSubType[`msg_g_report_req`]:
|
|
case cacheObjSubType[`msg_hero_req`]:
|
|
case cacheObjSubType[`msg_me_req`]:
|
|
case cacheObjSubType[`msg_inv_req`]:
|
|
case cacheObjSubType[`msg_time_req`]:
|
|
case cacheObjSubType[`msg_pledge`]:
|
|
case cacheObjSubType[`msg_pillage_go`]:
|
|
case cacheObjSubType[`msg_pillage_timeout`]:
|
|
case cacheObjSubType[`msg_pillage_win`]:
|
|
case cacheObjSubType[`msg_pillage_loss`]:
|
|
case cacheObjSubType[`msg_go_quest_req`]:
|
|
case cacheObjSubType[`msg_fast_fight`]:
|
|
case cacheObjSubType[`msg_go_arena`]:
|
|
case cacheObjSubType[`msg_top_req`]:
|
|
case cacheObjSubType[`msg_menu`]:
|
|
case cacheObjSubType[`msg_buy_req`]:
|
|
case cacheObjSubType[`msg_sell_req`]:
|
|
case cacheObjSubType[`msg_orderbook_req`]:
|
|
case cacheObjSubType[`msg_withdraw_req`]:
|
|
case cacheObjSubType[`msg_withdraw_code`]:
|
|
case cacheObjSubType[`msg_withdraw_rcv`]:
|
|
cwm, err := parseSubTypeMessageWithdrawRcv(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_withdraw_rcv`]")
|
|
for _, i := range cwm.ItemList {
|
|
err = insertMsgItem(cwm.Msg.ObjID64, i.ItemID64, i.Quantity)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Inserting cacheObjSubType[`msg_withdraw_rcv`]")
|
|
}
|
|
case cacheObjSubType[`msg_stock_req`]:
|
|
case cacheObjSubType[`msg_misc_req`]:
|
|
case cacheObjSubType[`msg_gstock_res_req`]:
|
|
case cacheObjSubType[`msg_gstock_alch_req`]:
|
|
case cacheObjSubType[`msg_gstock_misc_req`]:
|
|
case cacheObjSubType[`msg_gstock_rec_req`]:
|
|
case cacheObjSubType[`msg_gstock_part_req`]:
|
|
case cacheObjSubType[`msg_gstock_oth_req`]:
|
|
case cacheObjSubType[`msg_gstock_any_ack`]:
|
|
_, err := parseSubTypeMessageGStockAnyAck(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_gstock_any_ack`]")
|
|
case cacheObjSubType[`msg_report_ack`]:
|
|
cwm, err := parseSubTypeMessageReportAck(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_report_ack`]")
|
|
cwm.ObjID64 = objId
|
|
case cacheObjSubType[`msg_quest_res_ambush`]:
|
|
cwm, err := parseSubTypeMessageQuestResultAmbush(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_quest_res_ambush`]")
|
|
if m.Date.Add(3 * time.Minute).After(time.Now().UTC()) {
|
|
if m.ChatID64 == chtwrsbotID64 && m.TGSenderUserID64 == chtwrsbotID64 {
|
|
s := TGCommand{
|
|
Type: commandForwardMsg,
|
|
FromUserID64: m.TGUserID64,
|
|
FromMsgID64: m.ID64,
|
|
FromChatID64: m.ChatID64,
|
|
ToChatID64: angrybirbsbotID64,
|
|
}
|
|
MQTGCmdQueue <- s
|
|
} else if m.ChatID64 == cfg.Bot.Mainchat {
|
|
err = clientSpreadQuestResultAmbush(cwm)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : clientSpreadQuestResultAmbush.")
|
|
}
|
|
}
|
|
case cacheObjSubType[`msg_pillage_inc`]:
|
|
fallthrough
|
|
case cacheObjSubType[`msg_pillage_inc2`]:
|
|
cwm, err := parseSubTypeMessagePillageInc(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_pillage_inc`]")
|
|
cwm.ObjID64 = objId
|
|
err = insertMsgPillageInc(cwm)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : insertMsgPillageInc")
|
|
// only catch live pillages
|
|
if m.Date.Add(3*time.Minute).After(time.Now()) && m.ChatID64 == chtwrsbotID64 {
|
|
s := TGCommand{
|
|
Type: commandSendMsg,
|
|
Text: fmt.Sprintf("Catching pillage (%s)", m.Date.Format(time.RFC3339)),
|
|
ToUserID64: m.TGUserID64,
|
|
}
|
|
TGCmdQueue <- s
|
|
p := JobPayloadPillage{
|
|
ObjID64: objId,
|
|
Date: m.Date,
|
|
}
|
|
b, _ := json.Marshal(&p)
|
|
_, err = createJob(cacheObjSubType[`job_pillage`], objJobPriority, m.TGUserID64, m.ObjID64, m.Date.Add(time.Duration(25+rand.Intn(35))*time.Second), b)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : createJob(JobPillage)")
|
|
}
|
|
case cacheObjSubType[`msg_me_ack`]:
|
|
cwm, err := parseSubTypeMessageMeAck(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_me_ack`]")
|
|
clientMsgMeAck(cwm)
|
|
_, err = addObjXP(cwm.CWUserID64, cwm.ExpNow, cwm.ExpLvl, cwm.Level, m.Date)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : addObjXP(MeAck)")
|
|
case cacheObjSubType[`msg_go_quest_ack`]:
|
|
cwm, err := parseSubTypeMessageGoQuestAck(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_go_quest_ack`]")
|
|
uid, err := clientGetCWUserID64(m.TGUserID64)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Retrieving UserID64.")
|
|
if err != nil {
|
|
err = setObjSubTypeId(objId, cacheObjSubType[`msg`])
|
|
} else {
|
|
clientMsgGoQuestAck(cwm)
|
|
_, err = addObjQuest(uid, cwm.QuestTypeID64, cwm.Duration, m.Date.UTC())
|
|
}
|
|
err = setClientBusy(m.TGUserID64, m.Date, cwm.Duration)
|
|
case cacheObjSubType[`msg_duel_fight`]:
|
|
cwm, err := parseSubTypeMessageDuelFight(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_duel_fight`]")
|
|
cwm.ObjID64 = objId
|
|
err = insertMsgDuelFight(cwm)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : insertMsgDuelFight")
|
|
case cacheObjSubType[`msg_union_war`]:
|
|
_, err := parseSubTypeMessageUnionWar(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_union_war`]")
|
|
case cacheObjSubType[`msg_groles_ack`]:
|
|
cwm, err := parseSubTypeMessageGRolesAck(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_groles_ack`]")
|
|
clientMsgGRolesAck(cwm)
|
|
case cacheObjSubType[`msg_auction_announce`]:
|
|
cwm, err := parseSubTypeMessageAuctionAnnounce(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_auction_announce`]")
|
|
cwm.ObjID64 = objId
|
|
err = insertMsgAuctionAnnounce(cwm)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : insertMsgAuctionAnnounce")
|
|
if cwm.End.After(time.Now().UTC()) || strings.Compare(cwm.Status, `#active`) == 0 {
|
|
p := JobPayloadMsgRefresh{
|
|
ObjID64: m.ObjID64,
|
|
}
|
|
b, _ := json.Marshal(&p)
|
|
_, err = createJob(cacheObjSubType[`job_msg_refresh`], objJobPriority, m.TGUserID64, m.ObjID64, cwm.End.Add(5*time.Minute).UTC(), b)
|
|
/* hack for autobid - FIXME */
|
|
/*
|
|
if cwm.Price == 0 && cwm.ItemID64 == getObjItemID(`k05`, `Hunter blade`) {
|
|
clientSendCWMsg(cfg.Bot.Admin, fmt.Sprintf("/bet_%d_1", cwm.LotID))
|
|
}
|
|
*/
|
|
}
|
|
case cacheObjSubType[`msg_time_ack`]:
|
|
_, err := parseSubTypeMessageTimeAck(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_time_ack`]")
|
|
case cacheObjSubType[`msg_orderbook_acl`]:
|
|
_, err := parseSubTypeMessageOrderbookAck(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_orderbook_acl`]")
|
|
case cacheObjSubType[`msg_stock_ack`]:
|
|
_, err := parseSubTypeMessageStockAck(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_stock_ack`]")
|
|
case cacheObjSubType[`msg_g_deposit_req`]:
|
|
_, err := parseSubTypeMessageGDepositReq(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_g_deposit_req`]")
|
|
case cacheObjSubType[`msg_g_deposit_ack`]:
|
|
cwm, err := parseSubTypeMessageGDepositAck(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_g_deposit_ack`]")
|
|
err = insertMsgItem(cwm.Msg.ObjID64, cwm.ItemID64, cwm.Quantity)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Inserting cacheObjSubType[`msg_g_deposit_ack`]")
|
|
case cacheObjSubType[`msg_stock_any_ack`]:
|
|
_, err := parseSubTypeMessageStockAnyAck(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_stock_any_ack`]")
|
|
case cacheObjSubType[`msg_exchange_ack`]:
|
|
_, err := parseSubTypeMessageExchangeAck(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_exchange_ack`]")
|
|
case cacheObjSubType[`msg_quest_res`]:
|
|
_, err := parseSubTypeMessageQuestResult(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_quest_res`]")
|
|
err = setClientIdle(m.TGUserID64, m.Date)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : setClientIdle")
|
|
case cacheObjSubType[`msg_job_gwithdraw_ack`]:
|
|
_, err := parseSubTypeMessageJobGWithdrawAck(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_quest_res`]")
|
|
case cacheObjSubType[`msg_bot_g_stock`]:
|
|
botGStock(m)
|
|
case cacheObjSubType[`msg_bot_shops`]:
|
|
botShops(m)
|
|
case cacheObjSubType[`msg_bot_craft_item`]:
|
|
botCraftItem(m, rule.re)
|
|
case cacheObjSubType[`msg_bot_craft_all`]:
|
|
botCraftAll(m, rule.re)
|
|
case cacheObjSubType[`msg_tributes_stats_req`]:
|
|
case cacheObjSubType[`msg_tributes_stats_ack`]:
|
|
cwm, err := parseSubTypeMessageTributesStatsAck(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_tributes_stats_ack`]")
|
|
err = insertMsgTributesStats(cwm)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : insertMsgTributesStats")
|
|
case cacheObjSubType[`msg_shop_main_req`]:
|
|
case cacheObjSubType[`msg_shop_main_ack`]:
|
|
cwm, err := parseSubTypeMessageShopMainAck(m, rule.re)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Parsing cacheObjSubType[`msg_shop_main_ack`]")
|
|
err = insertMsgShopMainAck(cwm)
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : insertMsgShopMainAck")
|
|
default:
|
|
//log.Printf("SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : Unknwon message type in rule %d : %d (%d)\n%s\n", msgParsingRules[i].ID, msgParsingRules[i].MsgTypeID64, objId, m.Text)
|
|
}
|
|
muxCallbacks.Lock()
|
|
if mc1, mok1 := callbacks[m.TGUserID64]; mok1 {
|
|
if mc2, mok2 := mc1[rule.MsgTypeID64]; mok2 {
|
|
for j := range mc2 {
|
|
err := rescheduleJob(mc2[j], m.ObjID64, time.Now().UTC())
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : callbacks triggering")
|
|
}
|
|
mc1[rule.MsgTypeID64] = nil
|
|
}
|
|
}
|
|
if mc1, mok1 := callbacks[int64(bot.Me.ID)]; mok1 {
|
|
if mc2, mok2 := mc1[rule.MsgTypeID64]; mok2 {
|
|
for j := range mc2 {
|
|
err := rescheduleJob(mc2[j], m.ObjID64, time.Now().UTC())
|
|
logOnError(err, "SQLIdentifyMsgWorker["+strconv.Itoa(id)+"] : callbacks triggering")
|
|
}
|
|
mc1[rule.MsgTypeID64] = nil
|
|
}
|
|
}
|
|
muxCallbacks.Unlock()
|
|
}
|
|
}
|
|
}
|
|
log.Printf("SQLIdentifyMsgWorker[" + strconv.Itoa(id) + "] : Closing.")
|
|
}
|
|
|
|
func SQLJobWorker() {
|
|
//log.Printf("SQLJobWorker : Starting.")
|
|
for true {
|
|
jobs, err := loadCurrentJobs()
|
|
logOnError(err, "SQLJobWorker : loadCurrentJobs")
|
|
/*
|
|
if len(jobs) > 0 {
|
|
log.Printf("SQLJobWorker : %d jobs.\n", len(jobs))
|
|
}
|
|
*/
|
|
if err == nil {
|
|
for _, j := range jobs {
|
|
JobQueue <- j
|
|
}
|
|
if len(jobs) < SQLJobSliceSize {
|
|
time.Sleep(100 * time.Millisecond)
|
|
}
|
|
} else {
|
|
time.Sleep(1 * time.Second)
|
|
}
|
|
}
|
|
log.Printf("SQLJobWorker : Closing.")
|
|
}
|
|
|
|
func JobWorker(id int, jobs <-chan Job) {
|
|
//log.Printf("jobWorker[" + strconv.Itoa(id) + "] : Starting.")
|
|
// FIXME : discard timed out jobs
|
|
for j := range jobs {
|
|
//log.Printf("JobWorker[%d] : Starting job %d (timeout : %s).\n", id, j.ID64, j.Timeout.Format(time.RFC3339))
|
|
if time.Now().UTC().Before(j.Timeout) {
|
|
switch j.JobTypeID64 {
|
|
case cacheObjSubType[`job_rescan_msg`]:
|
|
jobRescan(j)
|
|
case cacheObjSubType[`job_set_done`]:
|
|
jobSetDone(j)
|
|
case cacheObjSubType[`job_pillage`]:
|
|
jobPillage(j)
|
|
case cacheObjSubType[`job_msg_client`]:
|
|
jobMsgClient(j)
|
|
case cacheObjSubType[`job_msg_refresh`]:
|
|
jobMsgRefresh(j)
|
|
case cacheObjSubType[`job_msg_fwd`]:
|
|
jobMsgFwd(j)
|
|
case cacheObjSubType[`job_msg_del`]:
|
|
jobMsgDelete(j)
|
|
case cacheObjSubType[`job_backup_export`]:
|
|
jobBackupExport(j)
|
|
case cacheObjSubType[`job_backup_import`]:
|
|
jobBackupImport(j)
|
|
case cacheObjSubType[`job_gstock`]:
|
|
jobGStock(j)
|
|
case cacheObjSubType[`job_gdeposit`]:
|
|
jobGDeposit(j)
|
|
case cacheObjSubType[`job_gdeposit_fwd`]:
|
|
jobGDepositForward(j)
|
|
case cacheObjSubType[`job_gwithdraw`]:
|
|
jobGWithdraw(j)
|
|
case cacheObjSubType[`job_vault_user_status`]:
|
|
jobVaultUserStatus(j)
|
|
case cacheObjSubType[`job_vault_item_status`]:
|
|
jobVaultItemStatus(j)
|
|
case cacheObjSubType[`job_set_def`]:
|
|
jobSetDef(j)
|
|
case cacheObjSubType[`job_get_hammer_time`]:
|
|
jobGetHammerTime(j)
|
|
case cacheObjSubType[`job_get_vault`]:
|
|
jobGetVault(j)
|
|
case cacheObjSubType[`job_craft_item`]:
|
|
jobCraftItem(j)
|
|
case cacheObjSubType[`job_craft_all`]:
|
|
jobCraftAll(j)
|
|
case cacheObjSubType[`job_check_vault_limit`]:
|
|
jobCheckVaultLimit(j)
|
|
case cacheObjSubType[`job_shops`]:
|
|
jobShops(j)
|
|
case cacheObjSubType[`job_shops_slave`]:
|
|
jobShopsSlave(j)
|
|
default:
|
|
log.Printf("jobWorker["+strconv.Itoa(id)+"] : No handler for job type #%d.\n", j.JobTypeID64)
|
|
}
|
|
} else {
|
|
log.Printf("JobWorker[%d] : Job %d timed out.\n", id, j.ID64)
|
|
setJobDone(j.ID64)
|
|
}
|
|
}
|
|
log.Printf("jobWorker[" + strconv.Itoa(id) + "] : Closing.")
|
|
}
|
|
|
|
func TGCmdWorker(id int, b *tb.Bot, cmds <-chan TGCommand) {
|
|
//log.Printf("TGCmdWorker[" + strconv.Itoa(id) + "] : Starting.")
|
|
for c := range cmds {
|
|
//j, err := json.Marshal(c)
|
|
//logOnError(err, "TGCmdWorker["+strconv.Itoa(id)+"] : Marshal(c)")
|
|
//log.Printf("TGCmdWorker["+strconv.Itoa(id)+"] : new command.\n%s\n", string(j))
|
|
opt := tb.SendOptions{}
|
|
switch c.ParseMode {
|
|
case cmdParseModePlain:
|
|
opt.ParseMode = tb.ModeDefault
|
|
case cmdParseModeMarkDown:
|
|
opt.ParseMode = tb.ModeMarkdown
|
|
case cmdParseModeHTML:
|
|
opt.ParseMode = tb.ModeHTML
|
|
default:
|
|
opt.ParseMode = tb.ModeDefault
|
|
}
|
|
switch c.Type {
|
|
case commandSendMsg:
|
|
if c.ToChatID64 != 0 {
|
|
ch := tb.Chat{
|
|
ID: c.ToChatID64,
|
|
}
|
|
_, err := b.Send(&ch, c.Text, &opt)
|
|
logOnError(err, "TGCmdWorker["+strconv.Itoa(id)+"] : SendMsg Chat")
|
|
} else if c.ToUserID64 != 0 {
|
|
u := tb.User{
|
|
ID: int(c.ToUserID64),
|
|
}
|
|
_, err := b.Send(&u, c.Text, &opt)
|
|
logOnError(err, "TGCmdWorker["+strconv.Itoa(id)+"] : SendMsg User")
|
|
}
|
|
case commandReplyMsg:
|
|
ch := tb.Chat{
|
|
ID: c.FromChatID64,
|
|
}
|
|
m := tb.Message{
|
|
ID: int(c.FromMsgID64),
|
|
Chat: &ch,
|
|
}
|
|
_, err := b.Reply(&m, c.Text, &opt)
|
|
logOnError(err, "TGCmdWorker["+strconv.Itoa(id)+"] : ReplyMsg")
|
|
case commandSendDocument:
|
|
if c.ToChatID64 != 0 {
|
|
ch := tb.Chat{
|
|
ID: c.ToChatID64,
|
|
}
|
|
d := c.Document
|
|
_, err := b.Send(&ch, &d)
|
|
//_, err := c.Document.Send(b, ch, nil)
|
|
logOnError(err, "TGCmdWorker["+strconv.Itoa(id)+"] : SendDocument Chat")
|
|
} else if c.ToUserID64 != 0 {
|
|
u := tb.User{
|
|
ID: int(c.ToUserID64),
|
|
}
|
|
d := c.Document
|
|
_, err := b.Send(&u, &d)
|
|
//_, err := c.Document.Send(b, &ch, nil)
|
|
logOnError(err, "TGCmdWorker["+strconv.Itoa(id)+"] : SendDocument Chat")
|
|
}
|
|
default:
|
|
log.Printf("TGCmdWorker[" + strconv.Itoa(id) + "] : Unknown command.\n")
|
|
}
|
|
|
|
}
|
|
log.Printf("TGCmdWorker[" + strconv.Itoa(id) + "] : Closing.")
|
|
}
|
|
|
|
func MQTidyKeepAliveWorker() {
|
|
//log.Printf("MQTidyKeepAliveWorker : Starting.")
|
|
for true {
|
|
t := time.Now()
|
|
muxClients.Lock()
|
|
for id, clt := range clients {
|
|
clt.Mux.Lock()
|
|
if clt.Active && clt.HeartBeat.Add(3*KeepAliveHeartBeatSeconds*time.Second).Before(time.Now()) {
|
|
msgs, err := clt.MQ.Channel.QueuePurge(clt.MQ.Queue.Name, false)
|
|
logOnError(err, "MQTidyKeepAliveWorker : Channel.QueuePurge()")
|
|
err = clt.MQ.Channel.Close()
|
|
logOnError(err, "MQTidyKeepAliveWorker : Channel.Close()")
|
|
err = clt.MQ.Connection.Close()
|
|
logOnError(err, "MQTidyKeepAliveWorker : Connection.Close()")
|
|
cmd := TGCommand{
|
|
Type: commandSendMsg,
|
|
ToUserID64: id,
|
|
Text: "Timeout, purging and closing command queue.",
|
|
}
|
|
TGCmdQueue <- cmd
|
|
cmd = TGCommand{
|
|
Type: commandSendMsg,
|
|
ToUserID64: cfg.Bot.Admin,
|
|
Text: fmt.Sprintf("Client %s timed out (%d messages purged).", clt.Login, msgs),
|
|
}
|
|
TGCmdQueue <- cmd
|
|
clt.Active = false
|
|
} else if clt.Active {
|
|
//log.Printf("MQTidyKeepAliveWorker : Client %s is active.\n", clt.Login)
|
|
} else {
|
|
//log.Printf("MQTidyKeepAliveWorker : Client %s is inactive.\n", clt.Login)
|
|
}
|
|
clt.Mux.Unlock()
|
|
}
|
|
muxClients.Unlock()
|
|
time.Sleep(time.Until(t.Add(time.Second)))
|
|
}
|
|
log.Printf("MQTidyKeepAliveWorker : Closing.")
|
|
}
|