DelayNoMore/battle_srv/models/room.go

1194 lines
53 KiB
Go
Raw Normal View History

2022-09-20 15:50:01 +00:00
package models
import (
"encoding/xml"
"fmt"
"github.com/golang/protobuf/proto"
"github.com/gorilla/websocket"
"github.com/solarlune/resolv"
2022-09-20 15:50:01 +00:00
"go.uber.org/zap"
"io/ioutil"
2022-10-01 12:45:38 +00:00
"math"
2022-09-20 15:50:01 +00:00
"math/rand"
"os"
"path/filepath"
. "server/common"
"server/common/utils"
pb "server/pb_output"
"strings"
2022-09-20 15:50:01 +00:00
"sync"
"sync/atomic"
"time"
)
const (
UPSYNC_MSG_ACT_HB_PING = int32(1)
UPSYNC_MSG_ACT_PLAYER_CMD = int32(2)
UPSYNC_MSG_ACT_PLAYER_COLLIDER_ACK = int32(3)
2022-10-01 12:45:38 +00:00
DOWNSYNC_MSG_ACT_HB_REQ = int32(1)
DOWNSYNC_MSG_ACT_INPUT_BATCH = int32(2)
DOWNSYNC_MSG_ACT_ROOM_FRAME = int32(3)
DOWNSYNC_MSG_ACT_FORCED_RESYNC = int32(4)
2022-10-01 15:54:48 +00:00
DOWNSYNC_MSG_ACT_BATTLE_READY_TO_START = int32(-1)
DOWNSYNC_MSG_ACT_BATTLE_START = int32(0)
DOWNSYNC_MSG_ACT_PLAYER_ADDED_AND_ACKED = int32(-98)
DOWNSYNC_MSG_ACT_PLAYER_READDED_AND_ACKED = int32(-97)
2022-09-20 15:50:01 +00:00
)
const (
MAGIC_JOIN_INDEX_DEFAULT = 0
MAGIC_JOIN_INDEX_INVALID = -1
)
const (
COLLISION_CATEGORY_CONTROLLED_PLAYER = (1 << 1)
COLLISION_CATEGORY_BARRIER = (1 << 2)
COLLISION_MASK_FOR_CONTROLLED_PLAYER = (COLLISION_CATEGORY_BARRIER)
COLLISION_MASK_FOR_BARRIER = (COLLISION_CATEGORY_CONTROLLED_PLAYER)
COLLISION_PLAYER_INDEX_PREFIX = (1 << 17)
COLLISION_BARRIER_INDEX_PREFIX = (1 << 16)
2022-09-20 15:50:01 +00:00
)
var DIRECTION_DECODER = [][]int32{
{0, 0},
{0, +1},
{0, -1},
{+2, 0},
{-2, 0},
{+2, +1},
{-2, -1},
{+2, -1},
{-2, +1},
{+2, 0},
{-2, 0},
{0, +1},
{0, -1},
}
var DIRECTION_DECODER_INVERSE_LENGTH = []float64{
0.0,
1.0,
1.0,
0.5,
0.5,
0.4472,
0.4472,
0.4472,
0.4472,
0.5,
0.5,
1.0,
1.0,
}
2022-09-20 15:50:01 +00:00
type RoomBattleState struct {
IDLE int32
WAITING int32
PREPARE int32
IN_BATTLE int32
STOPPING_BATTLE_FOR_SETTLEMENT int32
IN_SETTLEMENT int32
IN_DISMISSAL int32
}
type BattleStartCbType func()
type SignalToCloseConnCbType func(customRetCode int, customRetMsg string)
// A single instance containing only "named constant integers" to be shared by all threads.
var RoomBattleStateIns RoomBattleState
func InitRoomBattleStateIns() {
RoomBattleStateIns = RoomBattleState{
IDLE: 0,
WAITING: -1,
PREPARE: 10000000,
IN_BATTLE: 10000001,
STOPPING_BATTLE_FOR_SETTLEMENT: 10000002,
IN_SETTLEMENT: 10000003,
IN_DISMISSAL: 10000004,
}
}
func calRoomScore(inRoomPlayerCount int32, roomPlayerCnt int, currentRoomBattleState int32) float32 {
x := float32(inRoomPlayerCount) / float32(roomPlayerCnt)
d := (x - 0.5)
d2 := d * d
return -7.8125*d2 + 5.0 - float32(currentRoomBattleState)
}
type Room struct {
Id int32
Capacity int
Players map[int32]*Player
PlayersArr []*Player // ordered by joinIndex
CollisionSysMap map[int32]*resolv.Object
2022-09-20 15:50:01 +00:00
/**
* The following `PlayerDownsyncSessionDict` is NOT individually put
* under `type Player struct` for a reason.
*
* Upon each connection establishment, a new instance `player Player` is created for the given `playerId`.
* To be specific, if
* - that `playerId == 42` accidentally reconnects in just several milliseconds after a passive disconnection, e.g. due to bad wireless signal strength, and
* - that `type Player struct` contains a `DownsyncSession` field
*
* , then we might have to
* - clean up `previousPlayerInstance.DownsyncSession`
* - initialize `currentPlayerInstance.DownsyncSession`
*
* to avoid chaotic flaws.
*
* Moreover, during the invocation of `PlayerSignalToCloseDict`, the `Player` instance is supposed to be deallocated (though not synchronously).
*/
PlayerDownsyncSessionDict map[int32]*websocket.Conn
PlayerSignalToCloseDict map[int32]SignalToCloseConnCbType
Score float32
State int32
Index int
RenderFrameId int32
CurDynamicsRenderFrameId int32 // [WARNING] The dynamics of backend is ALWAYS MOVING FORWARD BY ALL-CONFIRMED INPUTFRAMES (either by upsync or forced), i.e. no rollback
2022-10-02 03:33:40 +00:00
ServerFps int32
2022-09-20 15:50:01 +00:00
BattleDurationNanos int64
2022-10-02 03:33:40 +00:00
InputFrameUpsyncDelayTolerance int32
MaxChasingRenderFramesPerUpdate int32
2022-09-20 15:50:01 +00:00
EffectivePlayerCount int32
DismissalWaitGroup sync.WaitGroup
Barriers map[int32]*Barrier
AllPlayerInputsBuffer *RingBuffer
RenderFrameBuffer *RingBuffer
2022-09-20 15:50:01 +00:00
LastAllConfirmedInputFrameId int32
LastAllConfirmedInputFrameIdWithChange int32
LastAllConfirmedInputList []uint64
InputDelayFrames int32 // in the count of render frames
NstDelayFrames int32 // network-single-trip delay in the count of render frames, proposed to be (InputDelayFrames >> 1) because we expect a round-trip delay to be exactly "InputDelayFrames"
2022-09-20 15:50:01 +00:00
InputScaleFrames uint32 // inputDelayedAndScaledFrameId = ((originalFrameId - InputDelayFrames) >> InputScaleFrames)
JoinIndexBooleanArr []bool
RollbackEstimatedDt float64
2022-09-20 15:50:01 +00:00
StageName string
StageDiscreteW int32
StageDiscreteH int32
StageTileW int32
StageTileH int32
RawBattleStrToVec2DListMap StrToVec2DListMap
RawBattleStrToPolygon2DListMap StrToPolygon2DListMap
}
const (
PLAYER_DEFAULT_SPEED = float64(200) // Hardcoded
ADD_SPEED = float64(100) // Hardcoded
2022-09-20 15:50:01 +00:00
)
func (pR *Room) updateScore() {
pR.Score = calRoomScore(pR.EffectivePlayerCount, pR.Capacity, pR.State)
}
func (pR *Room) AddPlayerIfPossible(pPlayerFromDbInit *Player, session *websocket.Conn, signalToCloseConnOfThisPlayer SignalToCloseConnCbType) bool {
playerId := pPlayerFromDbInit.Id
// TODO: Any thread-safety concern for accessing "pR" here?
if RoomBattleStateIns.IDLE != pR.State && RoomBattleStateIns.WAITING != pR.State {
Logger.Warn("AddPlayerIfPossible error, roomState:", zap.Any("playerId", playerId), zap.Any("roomId", pR.Id), zap.Any("roomState", pR.State), zap.Any("roomEffectivePlayerCount", pR.EffectivePlayerCount))
return false
}
if _, existent := pR.Players[playerId]; existent {
Logger.Warn("AddPlayerIfPossible error, existing in the room.PlayersDict:", zap.Any("playerId", playerId), zap.Any("roomId", pR.Id), zap.Any("roomState", pR.State), zap.Any("roomEffectivePlayerCount", pR.EffectivePlayerCount))
return false
}
defer pR.onPlayerAdded(playerId)
pPlayerFromDbInit.AckingFrameId = 0
pPlayerFromDbInit.AckingInputFrameId = -1
pPlayerFromDbInit.LastSentInputFrameId = -1
pPlayerFromDbInit.BattleState = PlayerBattleStateIns.ADDED_PENDING_BATTLE_COLLIDER_ACK
pPlayerFromDbInit.FrozenAtGmtMillis = -1 // Hardcoded temporarily.
pPlayerFromDbInit.Speed = PLAYER_DEFAULT_SPEED // Hardcoded temporarily.
pPlayerFromDbInit.AddSpeedAtGmtMillis = -1 // Hardcoded temporarily.
pR.Players[playerId] = pPlayerFromDbInit
pR.PlayerDownsyncSessionDict[playerId] = session
pR.PlayerSignalToCloseDict[playerId] = signalToCloseConnOfThisPlayer
return true
}
func (pR *Room) ReAddPlayerIfPossible(pTmpPlayerInstance *Player, session *websocket.Conn, signalToCloseConnOfThisPlayer SignalToCloseConnCbType) bool {
playerId := pTmpPlayerInstance.Id
// TODO: Any thread-safety concern for accessing "pR" and "pEffectiveInRoomPlayerInstance" here?
if RoomBattleStateIns.PREPARE != pR.State && RoomBattleStateIns.WAITING != pR.State && RoomBattleStateIns.IN_BATTLE != pR.State && RoomBattleStateIns.IN_SETTLEMENT != pR.State && RoomBattleStateIns.IN_DISMISSAL != pR.State {
Logger.Warn("ReAddPlayerIfPossible error due to roomState:", zap.Any("playerId", playerId), zap.Any("roomId", pR.Id), zap.Any("roomState", pR.State), zap.Any("roomEffectivePlayerCount", pR.EffectivePlayerCount))
return false
}
if _, existent := pR.Players[playerId]; !existent {
Logger.Warn("ReAddPlayerIfPossible error due to player nonexistent for room:", zap.Any("playerId", playerId), zap.Any("roomId", pR.Id), zap.Any("roomState", pR.State), zap.Any("roomEffectivePlayerCount", pR.EffectivePlayerCount))
return false
}
/*
* WARNING: The "pTmpPlayerInstance *Player" used here is a temporarily constructed
* instance from "<proj-root>/battle_srv/ws/serve.go", which is NOT the same as "pR.Players[pTmpPlayerInstance.Id]".
* -- YFLu
*/
defer pR.onPlayerReAdded(playerId)
pR.PlayerDownsyncSessionDict[playerId] = session
pR.PlayerSignalToCloseDict[playerId] = signalToCloseConnOfThisPlayer
pEffectiveInRoomPlayerInstance := pR.Players[playerId]
pEffectiveInRoomPlayerInstance.AckingFrameId = 0
pEffectiveInRoomPlayerInstance.AckingInputFrameId = -1
pEffectiveInRoomPlayerInstance.LastSentInputFrameId = -1
pEffectiveInRoomPlayerInstance.BattleState = PlayerBattleStateIns.READDED_PENDING_BATTLE_COLLIDER_ACK
Logger.Warn("ReAddPlayerIfPossible finished.", zap.Any("roomId", pR.Id), zap.Any("playerId", playerId), zap.Any("roomState", pR.State), zap.Any("roomEffectivePlayerCount", pR.EffectivePlayerCount), zap.Any("player AckingFrameId", pEffectiveInRoomPlayerInstance.AckingFrameId), zap.Any("player AckingInputFrameId", pEffectiveInRoomPlayerInstance.AckingInputFrameId))
return true
}
func (pR *Room) ChooseStage() error {
/*
* We use the verb "refresh" here to imply that upon invocation of this function, all colliders will be recovered if they were destroyed in the previous battle.
*
* -- YFLu, 2019-09-04
*/
pwd, err := os.Getwd()
ErrFatal(err)
rand.Seed(time.Now().Unix())
stageNameList := []string{ /*"pacman" ,*/ "richsoil"}
2022-09-20 15:50:01 +00:00
chosenStageIndex := rand.Int() % len(stageNameList) // Hardcoded temporarily. -- YFLu
pR.StageName = stageNameList[chosenStageIndex]
relativePathForAllStages := "../frontend/assets/resources/map"
relativePathForChosenStage := fmt.Sprintf("%s/%s", relativePathForAllStages, pR.StageName)
pTmxMapIns := &TmxMap{}
absDirPathContainingDirectlyTmxFile := filepath.Join(pwd, relativePathForChosenStage)
absTmxFilePath := fmt.Sprintf("%s/map.tmx", absDirPathContainingDirectlyTmxFile)
if !filepath.IsAbs(absTmxFilePath) {
panic("Tmx filepath must be absolute!")
}
byteArr, err := ioutil.ReadFile(absTmxFilePath)
if nil != err {
panic(err)
}
err = xml.Unmarshal(byteArr, pTmxMapIns)
if nil != err {
panic(err)
}
// Obtain the content of `gidBoundariesMapInB2World`.
gidBoundariesMapInB2World := make(map[int]StrToPolygon2DListMap, 0)
for _, tileset := range pTmxMapIns.Tilesets {
relativeTsxFilePath := fmt.Sprintf("%s/%s", filepath.Join(pwd, relativePathForChosenStage), tileset.Source) // Note that "TmxTileset.Source" can be a string of "relative path".
absTsxFilePath, err := filepath.Abs(relativeTsxFilePath)
if nil != err {
panic(err)
}
if !filepath.IsAbs(absTsxFilePath) {
panic("Filepath must be absolute!")
}
byteArrOfTsxFile, err := ioutil.ReadFile(absTsxFilePath)
if nil != err {
panic(err)
}
DeserializeTsxToColliderDict(pTmxMapIns, byteArrOfTsxFile, int(tileset.FirstGid), gidBoundariesMapInB2World)
}
stageDiscreteW, stageDiscreteH, stageTileW, stageTileH, toRetStrToVec2DListMap, toRetStrToPolygon2DListMap, err := ParseTmxLayersAndGroups(pTmxMapIns, gidBoundariesMapInB2World)
if nil != err {
panic(err)
}
pR.StageDiscreteW = stageDiscreteW
pR.StageDiscreteH = stageDiscreteH
pR.StageTileW = stageTileW
pR.StageTileH = stageTileH
pR.RawBattleStrToVec2DListMap = toRetStrToVec2DListMap
pR.RawBattleStrToPolygon2DListMap = toRetStrToPolygon2DListMap
barrierPolygon2DList := *(toRetStrToPolygon2DListMap["Barrier"])
2022-09-20 15:50:01 +00:00
var barrierLocalIdInBattle int32 = 0
for _, polygon2D := range barrierPolygon2DList {
2022-09-20 15:50:01 +00:00
/*
// For debug-printing only.
Logger.Info("ChooseStage printing polygon2D for barrierPolygon2DList", zap.Any("barrierLocalIdInBattle", barrierLocalIdInBattle), zap.Any("polygon2D.Anchor", polygon2D.Anchor), zap.Any("polygon2D.Points", polygon2D.Points))
2022-09-20 15:50:01 +00:00
*/
pR.Barriers[barrierLocalIdInBattle] = &Barrier{
Boundary: polygon2D,
2022-09-20 15:50:01 +00:00
}
barrierLocalIdInBattle++
2022-09-20 15:50:01 +00:00
}
return nil
}
func (pR *Room) ConvertToInputFrameId(renderFrameId int32, inputDelayFrames int32) int32 {
2022-10-01 12:45:38 +00:00
// Specifically when "renderFrameId < inputDelayFrames", the result is 0.
return ((renderFrameId - inputDelayFrames) >> pR.InputScaleFrames)
}
func (pR *Room) ConvertToFirstUsedRenderFrameId(inputFrameId int32, inputDelayFrames int32) int32 {
return ((inputFrameId << pR.InputScaleFrames) + inputDelayFrames)
}
func (pR *Room) ConvertToLastUsedRenderFrameId(inputFrameId int32, inputDelayFrames int32) int32 {
2022-10-01 12:45:38 +00:00
return ((inputFrameId << pR.InputScaleFrames) + inputDelayFrames + (1 << pR.InputScaleFrames) - 1)
2022-09-20 15:50:01 +00:00
}
func (pR *Room) EncodeUpsyncCmd(upsyncCmd *pb.InputFrameUpsync) uint64 {
var ret uint64 = 0
// There're 13 possible directions, occupying the first 4 bits, no need to shift
ret += uint64(upsyncCmd.EncodedDir)
return ret
}
2022-10-01 12:45:38 +00:00
func (pR *Room) AllPlayerInputsBufferString(allDetails bool) string {
if allDetails {
2022-10-02 03:33:40 +00:00
// Appending of the array of strings can be very SLOW due to on-demand heap allocation! Use this printing with caution.
s := make([]string, 0)
s = append(s, fmt.Sprintf("{renderFrameId: %v, stInputFrameId: %v, edInputFrameId: %v, lastAllConfirmedInputFrameIdWithChange: %v, lastAllConfirmedInputFrameId: %v}", pR.RenderFrameId, pR.AllPlayerInputsBuffer.StFrameId, pR.AllPlayerInputsBuffer.EdFrameId, pR.LastAllConfirmedInputFrameIdWithChange, pR.LastAllConfirmedInputFrameId))
2022-10-01 12:45:38 +00:00
for playerId, player := range pR.Players {
s = append(s, fmt.Sprintf("{playerId: %v, ackingFrameId: %v, ackingInputFrameId: %v, lastSentInputFrameId: %v}", playerId, player.AckingFrameId, player.AckingInputFrameId, player.LastSentInputFrameId))
}
for i := pR.AllPlayerInputsBuffer.StFrameId; i < pR.AllPlayerInputsBuffer.EdFrameId; i++ {
tmp := pR.AllPlayerInputsBuffer.GetByFrameId(i)
if nil == tmp {
break
}
f := tmp.(*pb.InputFrameDownsync)
s = append(s, fmt.Sprintf("{inputFrameId: %v, inputList: %v, confirmedList: %v}", f.InputFrameId, f.InputList, f.ConfirmedList))
2022-09-20 15:50:01 +00:00
}
2022-10-02 03:33:40 +00:00
return strings.Join(s, "\n")
} else {
return fmt.Sprintf("{renderFrameId: %d, stInputFrameId: %d, edInputFrameId: %d, lastAllConfirmedInputFrameIdWithChange: %d, lastAllConfirmedInputFrameId: %d}", pR.RenderFrameId, pR.AllPlayerInputsBuffer.StFrameId, pR.AllPlayerInputsBuffer.EdFrameId, pR.LastAllConfirmedInputFrameIdWithChange, pR.LastAllConfirmedInputFrameId)
}
2022-09-20 15:50:01 +00:00
}
func (pR *Room) StartBattle() {
if RoomBattleStateIns.WAITING != pR.State {
Logger.Warn("[StartBattle] Battle not started after all players' battle state checked!", zap.Any("roomId", pR.Id), zap.Any("roomState", pR.State))
return
}
// Always instantiates a new channel and let the old one die out due to not being retained by any root reference.
2022-10-02 03:33:40 +00:00
nanosPerFrame := 1000000000 / int64(pR.ServerFps)
pR.RenderFrameId = 0
2022-10-01 12:45:38 +00:00
// Initialize the "collisionSys" as well as "RenderFrameBuffer"
pR.CurDynamicsRenderFrameId = 0
2022-10-01 12:45:38 +00:00
kickoffFrame := &pb.RoomDownsyncFrame{
Id: pR.RenderFrameId,
Players: toPbPlayers(pR.Players),
CountdownNanos: pR.BattleDurationNanos,
}
pR.RenderFrameBuffer.Put(kickoffFrame)
2022-09-20 15:50:01 +00:00
// Refresh "Colliders"
2022-09-20 15:50:01 +00:00
pR.refreshColliders()
/**
* Will be triggered from a goroutine which executes the critical `Room.AddPlayerIfPossible`, thus the `battleMainLoop` should be detached.
* All of the consecutive stages, e.g. settlement, dismissal, should share the same goroutine with `battleMainLoop`.
*/
battleMainLoop := func() {
defer func() {
if r := recover(); r != nil {
Logger.Error("battleMainLoop, recovery spot#1, recovered from: ", zap.Any("roomId", pR.Id), zap.Any("panic", r))
2022-10-02 03:33:40 +00:00
pR.StopBattleForSettlement()
}
2022-09-20 15:50:01 +00:00
Logger.Info("The `battleMainLoop` is stopped for:", zap.Any("roomId", pR.Id))
pR.onBattleStoppedForSettlement()
}()
battleMainLoopStartedNanos := utils.UnixtimeNano()
totalElapsedNanos := int64(0)
2022-09-20 15:50:01 +00:00
Logger.Info("The `battleMainLoop` is started for:", zap.Any("roomId", pR.Id))
for {
stCalculation := utils.UnixtimeNano()
2022-09-20 15:50:01 +00:00
if totalElapsedNanos > pR.BattleDurationNanos {
2022-10-01 12:45:38 +00:00
Logger.Info(fmt.Sprintf("The `battleMainLoop` for roomId=%v is stopped:\n%v", pR.Id, pR.AllPlayerInputsBufferString(true)))
2022-09-20 15:50:01 +00:00
pR.StopBattleForSettlement()
}
if swapped := atomic.CompareAndSwapInt32(&pR.State, RoomBattleStateIns.IN_BATTLE, RoomBattleStateIns.IN_BATTLE); !swapped {
return
}
// Prefab and buffer backend inputFrameDownsync
if pR.shouldPrefabInputFrameDownsync(pR.RenderFrameId) {
noDelayInputFrameId := pR.ConvertToInputFrameId(pR.RenderFrameId, 0)
pR.prefabInputFrameDownsync(noDelayInputFrameId)
2022-09-20 15:50:01 +00:00
}
2022-10-01 12:45:38 +00:00
// Force setting all-confirmed of buffered inputFrames periodically
unconfirmedMask := pR.forceConfirmationIfApplicable()
dynamicsDuration := int64(0)
if 0 <= pR.LastAllConfirmedInputFrameId {
dynamicsStartedAt := utils.UnixtimeNano()
// Apply "all-confirmed inputFrames" to move forward "pR.CurDynamicsRenderFrameId"
nextDynamicsRenderFrameId := pR.ConvertToLastUsedRenderFrameId(pR.LastAllConfirmedInputFrameId, pR.InputDelayFrames)
Logger.Debug(fmt.Sprintf("roomId=%v, room.RenderFrameId=%v, LastAllConfirmedInputFrameId=%v, InputDelayFrames=%v, nextDynamicsRenderFrameId=%v", pR.Id, pR.RenderFrameId, pR.LastAllConfirmedInputFrameId, pR.InputDelayFrames, nextDynamicsRenderFrameId))
pR.applyInputFrameDownsyncDynamics(pR.CurDynamicsRenderFrameId, nextDynamicsRenderFrameId)
dynamicsDuration = utils.UnixtimeNano() - dynamicsStartedAt
}
lastAllConfirmedInputFrameIdWithChange := atomic.LoadInt32(&(pR.LastAllConfirmedInputFrameIdWithChange))
for playerId, player := range pR.Players {
if swapped := atomic.CompareAndSwapInt32(&player.BattleState, PlayerBattleStateIns.ACTIVE, PlayerBattleStateIns.ACTIVE); !swapped {
// [WARNING] DON'T send anything if the player is disconnected, because it could jam the channel and cause significant delay upon "battle recovery for reconnected player".
continue
}
if 0 == pR.RenderFrameId {
kickoffFrame := pR.RenderFrameBuffer.GetByFrameId(0).(*pb.RoomDownsyncFrame)
2022-10-01 15:54:48 +00:00
pR.sendSafely(kickoffFrame, nil, DOWNSYNC_MSG_ACT_BATTLE_START, playerId)
2022-10-01 12:45:38 +00:00
} else {
// [WARNING] Websocket is TCP-based, thus no need to re-send a previously sent inputFrame to a same player!
toSendInputFrames := make([]*pb.InputFrameDownsync, 0, pR.AllPlayerInputsBuffer.Cnt)
candidateToSendInputFrameId := atomic.LoadInt32(&(pR.Players[playerId].LastSentInputFrameId)) + 1
if candidateToSendInputFrameId < pR.AllPlayerInputsBuffer.StFrameId {
// [WARNING] As "player.LastSentInputFrameId <= lastAllConfirmedInputFrameIdWithChange" for each iteration, and "lastAllConfirmedInputFrameIdWithChange <= lastAllConfirmedInputFrameId" where the latter is used to "applyInputFrameDownsyncDynamics" and then evict "pR.AllPlayerInputsBuffer", thus there's a very high possibility that "player.LastSentInputFrameId" is already evicted.
// Logger.Debug(fmt.Sprintf("LastSentInputFrameId already popped: roomId=%v, playerId=%v, lastSentInputFrameId=%v, playerAckingInputFrameId=%v, AllPlayerInputsBuffer=%v", pR.Id, playerId, candidateToSendInputFrameId-1, player.AckingInputFrameId, pR.AllPlayerInputsBufferString(false)))
candidateToSendInputFrameId = pR.AllPlayerInputsBuffer.StFrameId
}
// [WARNING] EDGE CASE HERE: Upon initialization, all of "lastAllConfirmedInputFrameId", "lastAllConfirmedInputFrameIdWithChange" and "anchorInputFrameId" are "-1", thus "candidateToSendInputFrameId" starts with "0", however "inputFrameId: 0" might not have been all confirmed!
debugSendingInputFrameId := int32(-1)
for candidateToSendInputFrameId <= lastAllConfirmedInputFrameIdWithChange {
tmp := pR.AllPlayerInputsBuffer.GetByFrameId(candidateToSendInputFrameId)
if nil == tmp {
panic(fmt.Sprintf("Required inputFrameId=%v for roomId=%v, playerId=%v doesn't exist! AllPlayerInputsBuffer=%v", candidateToSendInputFrameId, pR.Id, playerId, pR.AllPlayerInputsBufferString(false)))
}
f := tmp.(*pb.InputFrameDownsync)
if pR.inputFrameIdDebuggable(candidateToSendInputFrameId) {
debugSendingInputFrameId = candidateToSendInputFrameId
Logger.Debug("inputFrame lifecycle#3[sending]:", zap.Any("roomId", pR.Id), zap.Any("playerId", playerId), zap.Any("playerAckingInputFrameId", player.AckingInputFrameId), zap.Any("inputFrameId", candidateToSendInputFrameId), zap.Any("inputFrameId-doublecheck", f.InputFrameId), zap.Any("AllPlayerInputsBuffer", pR.AllPlayerInputsBufferString(false)), zap.Any("ConfirmedList", f.ConfirmedList))
2022-10-01 12:45:38 +00:00
}
toSendInputFrames = append(toSendInputFrames, f)
candidateToSendInputFrameId++
}
indiceInJoinIndexBooleanArr := uint32(player.JoinIndex - 1)
var joinMask uint64 = (1 << indiceInJoinIndexBooleanArr)
if 0 < (unconfirmedMask & joinMask) {
2022-10-02 03:33:40 +00:00
refRenderFrameId := pR.CurDynamicsRenderFrameId
if refRenderFrameId > pR.RenderFrameId {
// [WARNING] To avoid that in good network condition the frontend resyncs itself to a "too advanced frontend.renderFrameId", and then starts upsyncing "too advanced inputFrameId".
refRenderFrameId = pR.RenderFrameId
}
refRenderFrame := pR.RenderFrameBuffer.GetByFrameId(refRenderFrameId).(*pb.RoomDownsyncFrame)
2022-10-01 12:45:38 +00:00
pR.sendSafely(refRenderFrame, toSendInputFrames, DOWNSYNC_MSG_ACT_FORCED_RESYNC, playerId)
} else {
if 0 >= len(toSendInputFrames) {
continue
}
pR.sendSafely(nil, toSendInputFrames, DOWNSYNC_MSG_ACT_INPUT_BATCH, playerId)
}
2022-10-02 03:33:40 +00:00
if -1 != debugSendingInputFrameId {
Logger.Info("inputFrame lifecycle#4[sent]:", zap.Any("roomId", pR.Id), zap.Any("playerId", playerId), zap.Any("playerAckingInputFrameId", player.AckingInputFrameId), zap.Any("inputFrameId", debugSendingInputFrameId), zap.Any("AllPlayerInputsBuffer", pR.AllPlayerInputsBufferString(false)))
}
2022-10-01 12:45:38 +00:00
atomic.StoreInt32(&(pR.Players[playerId].LastSentInputFrameId), candidateToSendInputFrameId-1)
}
}
for 0 < pR.RenderFrameBuffer.Cnt && pR.RenderFrameBuffer.StFrameId < pR.CurDynamicsRenderFrameId {
_ = pR.RenderFrameBuffer.Pop()
}
toApplyInputFrameId := pR.ConvertToInputFrameId(pR.CurDynamicsRenderFrameId, pR.InputDelayFrames)
for 0 < pR.AllPlayerInputsBuffer.Cnt && pR.AllPlayerInputsBuffer.StFrameId < toApplyInputFrameId {
f := pR.AllPlayerInputsBuffer.Pop().(*pb.InputFrameDownsync)
if pR.inputFrameIdDebuggable(f.InputFrameId) {
// Popping of an "inputFrame" would be AFTER its being all being confirmed, because it requires the "inputFrame" to be all acked
Logger.Debug("inputFrame lifecycle#5[popped]:", zap.Any("roomId", pR.Id), zap.Any("inputFrameId", f.InputFrameId), zap.Any("AllPlayerInputsBuffer", pR.AllPlayerInputsBufferString(false)))
2022-10-01 12:45:38 +00:00
}
}
pR.RenderFrameId++
2022-10-01 12:45:38 +00:00
elapsedInCalculation := (utils.UnixtimeNano() - stCalculation)
totalElapsedNanos = (utils.UnixtimeNano() - battleMainLoopStartedNanos)
if elapsedInCalculation > nanosPerFrame {
Logger.Warn(fmt.Sprintf("SLOW FRAME! Elapsed time statistics: roomId=%v, room.RenderFrameId=%v, elapsedInCalculation=%v, dynamicsDuration=%v, nanosPerFrame=%v", pR.Id, pR.RenderFrameId, elapsedInCalculation, dynamicsDuration, nanosPerFrame))
}
2022-09-20 15:50:01 +00:00
time.Sleep(time.Duration(nanosPerFrame - elapsedInCalculation))
}
}
pR.onBattlePrepare(func() {
pR.onBattleStarted() // NOTE: Deliberately not using `defer`.
go battleMainLoop()
})
}
func (pR *Room) OnBattleCmdReceived(pReq *pb.WsReq) {
if swapped := atomic.CompareAndSwapInt32(&pR.State, RoomBattleStateIns.IN_BATTLE, RoomBattleStateIns.IN_BATTLE); !swapped {
return
}
playerId := pReq.PlayerId
indiceInJoinIndexBooleanArr := uint32(pReq.JoinIndex - 1)
inputFrameUpsyncBatch := pReq.InputFrameUpsyncBatch
ackingFrameId := pReq.AckingFrameId
ackingInputFrameId := pReq.AckingInputFrameId
if _, existent := pR.Players[playerId]; !existent {
2022-10-01 12:45:38 +00:00
Logger.Warn(fmt.Sprintf("upcmd player doesn't exist: roomId=%v, playerId=%v", pR.Id, playerId))
return
}
2022-09-20 15:50:01 +00:00
if swapped := atomic.CompareAndSwapInt32(&(pR.Players[playerId].AckingFrameId), pR.Players[playerId].AckingFrameId, ackingFrameId); !swapped {
panic(fmt.Sprintf("Failed to update AckingFrameId to %v for roomId=%v, playerId=%v", ackingFrameId, pR.Id, playerId))
}
2022-09-20 15:50:01 +00:00
if swapped := atomic.CompareAndSwapInt32(&(pR.Players[playerId].AckingInputFrameId), pR.Players[playerId].AckingInputFrameId, ackingInputFrameId); !swapped {
panic(fmt.Sprintf("Failed to update AckingInputFrameId to %v for roomId=%v, playerId=%v", ackingInputFrameId, pR.Id, playerId))
}
for _, inputFrameUpsync := range inputFrameUpsyncBatch {
2022-09-20 15:50:01 +00:00
clientInputFrameId := inputFrameUpsync.InputFrameId
if clientInputFrameId < pR.AllPlayerInputsBuffer.StFrameId {
2022-10-01 12:45:38 +00:00
// Obsolete is actually not as concerned as advanced inputFrame.
Logger.Debug(fmt.Sprintf("Obsolete inputFrameUpsync: roomId=%v, playerId=%v, clientInputFrameId=%v, AllPlayerInputsBuffer=%v", pR.Id, playerId, clientInputFrameId, pR.AllPlayerInputsBufferString(false)))
2022-09-20 15:50:01 +00:00
return
}
var joinMask uint64 = (1 << indiceInJoinIndexBooleanArr)
encodedInput := pR.EncodeUpsyncCmd(inputFrameUpsync)
if clientInputFrameId >= pR.AllPlayerInputsBuffer.EdFrameId {
2022-10-01 15:54:48 +00:00
Logger.Warn(fmt.Sprintf("inputFrame too advanced! is the player cheating? roomId=%v, playerId=%v, clientInputFrameId=%v, AllPlayerInputsBuffer=%v", pR.Id, playerId, clientInputFrameId, pR.AllPlayerInputsBufferString(false)))
return
}
tmp2 := pR.AllPlayerInputsBuffer.GetByFrameId(clientInputFrameId)
if nil == tmp2 {
// This shouldn't happen due to the previous 2 checks
2022-10-01 12:45:38 +00:00
Logger.Warn(fmt.Sprintf("Mysterious error getting an input frame: roomId=%v, playerId=%v, clientInputFrameId=%v, AllPlayerInputsBuffer=%v", pR.Id, playerId, clientInputFrameId, pR.AllPlayerInputsBufferString(false)))
return
}
inputFrameDownsync := tmp2.(*pb.InputFrameDownsync)
oldConfirmedList := atomic.LoadUint64(&(inputFrameDownsync.ConfirmedList))
if (oldConfirmedList & joinMask) > 0 {
Logger.Debug(fmt.Sprintf("Cmd already confirmed but getting set attempt, omitting this upsync cmd: roomId=%v, playerId=%v, clientInputFrameId=%v, AllPlayerInputsBuffer=%v", pR.Id, playerId, clientInputFrameId, pR.AllPlayerInputsBufferString(false)))
return
}
2022-09-20 15:50:01 +00:00
// In Golang 1.12, there's no "compare-and-swap primitive" on a custom struct (or it's pointer, unless it's an unsafe pointer https://pkg.go.dev/sync/atomic@go1.12#CompareAndSwapPointer). Although CAS on custom struct is possible in Golang 1.19 https://pkg.go.dev/sync/atomic@go1.19.1#Value.CompareAndSwap, using a single word is still faster whenever possible.
// [WARNING] No need to use CAS for updating "inputFrameDownsync.InputList[indiceInJoinIndexBooleanArr]", the upsync from frontend takes top priority.
atomic.StoreUint64(&inputFrameDownsync.InputList[indiceInJoinIndexBooleanArr], encodedInput);
2022-09-20 15:50:01 +00:00
newConfirmedList := (oldConfirmedList | joinMask)
if swapped := atomic.CompareAndSwapUint64(&(inputFrameDownsync.ConfirmedList), oldConfirmedList, newConfirmedList); !swapped {
// [WARNING] Upon this error, the actual input has already been updated, which is an expected result if it caused by the force confirmation from "battleMainLoop".
2022-10-01 09:26:37 +00:00
Logger.Warn(fmt.Sprintf("Failed confirm CAS: roomId=%v, playerId=%v, clientInputFrameId=%v", pR.Id, playerId, clientInputFrameId))
return
}
2022-09-20 15:50:01 +00:00
2022-10-01 09:26:37 +00:00
totPlayerCnt := uint32(pR.Capacity)
2022-10-01 12:45:38 +00:00
allConfirmedMask := uint64((1 << totPlayerCnt) - 1)
if allConfirmedMask == newConfirmedList {
pR.onInputFrameDownsyncAllConfirmed(inputFrameDownsync, playerId)
2022-09-20 15:50:01 +00:00
}
}
}
func (pR *Room) onInputFrameDownsyncAllConfirmed(inputFrameDownsync *pb.InputFrameDownsync, playerId int32) {
2022-10-01 12:45:38 +00:00
inputFrameId := inputFrameDownsync.InputFrameId
if -1 == pR.LastAllConfirmedInputFrameIdWithChange || false == pR.equalInputLists(inputFrameDownsync.InputList, pR.LastAllConfirmedInputList) {
2022-10-01 15:54:48 +00:00
Logger.Info(fmt.Sprintf("Key inputFrame change: roomId=%v, playerId=%v, newInputFrameId=%v, lastInputFrameId=%v, newInputList=%v, lastInputList=%v, AllPlayerInputsBuffer=%v", pR.Id, playerId, inputFrameId, pR.LastAllConfirmedInputFrameId, inputFrameDownsync.InputList, pR.LastAllConfirmedInputList, pR.AllPlayerInputsBufferString(false)))
atomic.StoreInt32(&(pR.LastAllConfirmedInputFrameIdWithChange), inputFrameId)
}
2022-10-01 12:45:38 +00:00
atomic.StoreInt32(&(pR.LastAllConfirmedInputFrameId), inputFrameId) // [WARNING] It's IMPORTANT that "pR.LastAllConfirmedInputFrameId" is NOT NECESSARILY CONSECUTIVE, i.e. if one of the players disconnects and reconnects within a considerable amount of frame delays!
for i, v := range inputFrameDownsync.InputList {
// To avoid potential misuse of pointers
pR.LastAllConfirmedInputList[i] = v
}
2022-10-01 12:45:38 +00:00
if pR.inputFrameIdDebuggable(inputFrameId) {
if -1 == playerId {
Logger.Info(fmt.Sprintf("inputFrame lifecycle#2[forced-allconfirmed]: roomId=%v, inputFrameId=%v, lastAllConfirmedInputFrameId=%v, AllPlayerInputsBuffer=%v", pR.Id, inputFrameId, pR.LastAllConfirmedInputFrameId, pR.AllPlayerInputsBufferString(false)))
} else {
Logger.Info(fmt.Sprintf("inputFrame lifecycle#2[allconfirmed]: roomId=%v, playerId=%v, inputFrameId=%v, lastAllConfirmedInputFrameId=%v, AllPlayerInputsBuffer=%v", pR.Id, playerId, inputFrameId, pR.LastAllConfirmedInputFrameId, pR.AllPlayerInputsBufferString(false)))
}
}
}
2022-09-20 15:50:01 +00:00
func (pR *Room) equalInputLists(lhs []uint64, rhs []uint64) bool {
if len(lhs) != len(rhs) {
return false
}
for i, _ := range lhs {
if lhs[i] != rhs[i] {
return false
}
}
return true
}
func (pR *Room) StopBattleForSettlement() {
if RoomBattleStateIns.IN_BATTLE != pR.State {
return
}
pR.State = RoomBattleStateIns.STOPPING_BATTLE_FOR_SETTLEMENT
Logger.Info("Stopping the `battleMainLoop` for:", zap.Any("roomId", pR.Id))
pR.RenderFrameId++
2022-09-20 15:50:01 +00:00
for playerId, _ := range pR.Players {
assembledFrame := pb.RoomDownsyncFrame{
Id: pR.RenderFrameId,
2022-09-20 15:50:01 +00:00
Players: toPbPlayers(pR.Players),
CountdownNanos: -1, // TODO: Replace this magic constant!
}
pR.sendSafely(&assembledFrame, nil, DOWNSYNC_MSG_ACT_ROOM_FRAME, playerId)
2022-09-20 15:50:01 +00:00
}
// Note that `pR.onBattleStoppedForSettlement` will be called by `battleMainLoop`.
}
func (pR *Room) onBattleStarted() {
if RoomBattleStateIns.PREPARE != pR.State {
return
}
pR.State = RoomBattleStateIns.IN_BATTLE
pR.updateScore()
}
func (pR *Room) onBattlePrepare(cb BattleStartCbType) {
if RoomBattleStateIns.WAITING != pR.State {
Logger.Warn("[onBattlePrepare] Battle not started after all players' battle state checked!", zap.Any("roomId", pR.Id), zap.Any("roomState", pR.State))
return
}
pR.State = RoomBattleStateIns.PREPARE
Logger.Info("Battle state transitted to RoomBattleStateIns.PREPARE for:", zap.Any("roomId", pR.Id))
playerMetas := make(map[int32]*pb.PlayerMeta, 0)
for _, player := range pR.Players {
playerMetas[player.Id] = &pb.PlayerMeta{
Id: player.Id,
Name: player.Name,
DisplayName: player.DisplayName,
Avatar: player.Avatar,
JoinIndex: player.JoinIndex,
}
}
2022-10-01 15:54:48 +00:00
battleReadyToStartFrame := &pb.RoomDownsyncFrame{
Id: DOWNSYNC_MSG_ACT_BATTLE_READY_TO_START,
Players: toPbPlayers(pR.Players),
PlayerMetas: playerMetas,
CountdownNanos: pR.BattleDurationNanos,
2022-09-20 15:50:01 +00:00
}
2022-10-02 03:33:40 +00:00
Logger.Info("Sending out frame for RoomBattleState.PREPARE:", zap.Any("battleReadyToStartFrame", battleReadyToStartFrame))
2022-09-20 15:50:01 +00:00
for _, player := range pR.Players {
2022-10-01 15:54:48 +00:00
pR.sendSafely(battleReadyToStartFrame, nil, DOWNSYNC_MSG_ACT_BATTLE_READY_TO_START, player.Id)
2022-09-20 15:50:01 +00:00
}
battlePreparationNanos := int64(6000000000)
preparationLoop := func() {
defer func() {
Logger.Info("The `preparationLoop` is stopped for:", zap.Any("roomId", pR.Id))
cb()
}()
preparationLoopStartedNanos := utils.UnixtimeNano()
totalElapsedNanos := int64(0)
for {
if totalElapsedNanos > battlePreparationNanos {
break
}
now := utils.UnixtimeNano()
totalElapsedNanos = (now - preparationLoopStartedNanos)
time.Sleep(time.Duration(battlePreparationNanos - totalElapsedNanos))
}
}
go preparationLoop()
}
func (pR *Room) onBattleStoppedForSettlement() {
if RoomBattleStateIns.STOPPING_BATTLE_FOR_SETTLEMENT != pR.State {
return
}
defer func() {
pR.onSettlementCompleted()
}()
pR.State = RoomBattleStateIns.IN_SETTLEMENT
Logger.Info("The room is in settlement:", zap.Any("roomId", pR.Id))
// TODO: Some settlement labor.
}
func (pR *Room) onSettlementCompleted() {
pR.Dismiss()
}
func (pR *Room) Dismiss() {
if RoomBattleStateIns.IN_SETTLEMENT != pR.State {
return
}
pR.State = RoomBattleStateIns.IN_DISMISSAL
if 0 < len(pR.Players) {
Logger.Info("The room is in dismissal:", zap.Any("roomId", pR.Id))
for playerId, _ := range pR.Players {
Logger.Info("Adding 1 to pR.DismissalWaitGroup:", zap.Any("roomId", pR.Id), zap.Any("playerId", playerId))
pR.DismissalWaitGroup.Add(1)
pR.expelPlayerForDismissal(playerId)
pR.DismissalWaitGroup.Done()
Logger.Info("Decremented 1 to pR.DismissalWaitGroup:", zap.Any("roomId", pR.Id), zap.Any("playerId", playerId))
}
pR.DismissalWaitGroup.Wait()
}
2022-10-02 03:33:40 +00:00
pR.OnDismissed()
2022-09-20 15:50:01 +00:00
}
2022-10-02 03:33:40 +00:00
func (pR *Room) OnDismissed() {
2022-09-20 15:50:01 +00:00
// Always instantiates new HeapRAM blocks and let the old blocks die out due to not being retained by any root reference.
pR.Players = make(map[int32]*Player)
pR.PlayersArr = make([]*Player, pR.Capacity)
pR.CollisionSysMap = make(map[int32]*resolv.Object)
2022-09-20 15:50:01 +00:00
pR.PlayerDownsyncSessionDict = make(map[int32]*websocket.Conn)
pR.PlayerSignalToCloseDict = make(map[int32]SignalToCloseConnCbType)
2022-10-02 03:33:40 +00:00
pR.JoinIndexBooleanArr = make([]bool, pR.Capacity)
pR.Barriers = make(map[int32]*Barrier)
pR.AllPlayerInputsBuffer = NewRingBuffer(1024)
pR.RenderFrameBuffer = NewRingBuffer(1024)
2022-09-20 15:50:01 +00:00
pR.LastAllConfirmedInputFrameId = -1
pR.LastAllConfirmedInputFrameIdWithChange = -1
pR.LastAllConfirmedInputList = make([]uint64, pR.Capacity)
2022-10-02 03:33:40 +00:00
pR.RenderFrameId = 0
pR.CurDynamicsRenderFrameId = 0
pR.InputDelayFrames = 8
pR.NstDelayFrames = 32
pR.InputScaleFrames = uint32(2)
pR.ServerFps = 60
pR.RollbackEstimatedDt = float64(1.0) / float64(pR.ServerFps)
pR.BattleDurationNanos = int64(30 * 1000 * 1000 * 1000)
pR.InputFrameUpsyncDelayTolerance = 2
pR.MaxChasingRenderFramesPerUpdate = 10
2022-09-20 15:50:01 +00:00
pR.ChooseStage()
pR.EffectivePlayerCount = 0
// [WARNING] It's deliberately ordered such that "pR.State = RoomBattleStateIns.IDLE" is put AFTER all the refreshing operations above.
pR.State = RoomBattleStateIns.IDLE
pR.updateScore()
Logger.Info("The room is completely dismissed:", zap.Any("roomId", pR.Id))
}
func (pR *Room) expelPlayerDuringGame(playerId int32) {
defer pR.onPlayerExpelledDuringGame(playerId)
}
func (pR *Room) expelPlayerForDismissal(playerId int32) {
pR.onPlayerExpelledForDismissal(playerId)
}
func (pR *Room) onPlayerExpelledDuringGame(playerId int32) {
pR.onPlayerLost(playerId)
}
func (pR *Room) onPlayerExpelledForDismissal(playerId int32) {
pR.onPlayerLost(playerId)
Logger.Info("onPlayerExpelledForDismissal:", zap.Any("playerId", playerId), zap.Any("roomId", pR.Id), zap.Any("nowRoomBattleState", pR.State), zap.Any("nowRoomEffectivePlayerCount", pR.EffectivePlayerCount))
}
func (pR *Room) OnPlayerDisconnected(playerId int32) {
defer func() {
if r := recover(); r != nil {
Logger.Error("Room OnPlayerDisconnected, recovery spot#1, recovered from: ", zap.Any("playerId", playerId), zap.Any("roomId", pR.Id), zap.Any("panic", r))
}
}()
if _, existent := pR.Players[playerId]; existent {
switch pR.Players[playerId].BattleState {
case PlayerBattleStateIns.DISCONNECTED:
case PlayerBattleStateIns.LOST:
case PlayerBattleStateIns.EXPELLED_DURING_GAME:
case PlayerBattleStateIns.EXPELLED_IN_DISMISSAL:
Logger.Info("Room OnPlayerDisconnected[early return #1]:", zap.Any("playerId", playerId), zap.Any("playerBattleState", pR.Players[playerId].BattleState), zap.Any("roomId", pR.Id), zap.Any("nowRoomBattleState", pR.State), zap.Any("nowRoomEffectivePlayerCount", pR.EffectivePlayerCount))
return
}
} else {
// Not even the "pR.Players[playerId]" exists.
Logger.Info("Room OnPlayerDisconnected[early return #2]:", zap.Any("playerId", playerId), zap.Any("roomId", pR.Id), zap.Any("nowRoomBattleState", pR.State), zap.Any("nowRoomEffectivePlayerCount", pR.EffectivePlayerCount))
return
}
switch pR.State {
case RoomBattleStateIns.WAITING:
pR.onPlayerLost(playerId)
delete(pR.Players, playerId) // Note that this statement MUST be put AFTER `pR.onPlayerLost(...)` to avoid nil pointer exception.
if 0 == pR.EffectivePlayerCount {
pR.State = RoomBattleStateIns.IDLE
}
pR.updateScore()
Logger.Info("Player disconnected while room is at RoomBattleStateIns.WAITING:", zap.Any("playerId", playerId), zap.Any("roomId", pR.Id), zap.Any("nowRoomBattleState", pR.State), zap.Any("nowRoomEffectivePlayerCount", pR.EffectivePlayerCount))
default:
pR.Players[playerId].BattleState = PlayerBattleStateIns.DISCONNECTED
pR.clearPlayerNetworkSession(playerId) // Still need clear the network session pointers, because "OnPlayerDisconnected" is only triggered from "signalToCloseConnOfThisPlayer" in "ws/serve.go", when the same player reconnects the network session pointers will be re-assigned
Logger.Info("Player disconnected from room:", zap.Any("playerId", playerId), zap.Any("playerBattleState", pR.Players[playerId].BattleState), zap.Any("roomId", pR.Id), zap.Any("nowRoomBattleState", pR.State), zap.Any("nowRoomEffectivePlayerCount", pR.EffectivePlayerCount))
}
}
func (pR *Room) onPlayerLost(playerId int32) {
defer func() {
if r := recover(); r != nil {
Logger.Error("Room OnPlayerLost, recovery spot, recovered from: ", zap.Any("playerId", playerId), zap.Any("roomId", pR.Id), zap.Any("panic", r))
}
}()
if player, existent := pR.Players[playerId]; existent {
player.BattleState = PlayerBattleStateIns.LOST
pR.clearPlayerNetworkSession(playerId)
pR.EffectivePlayerCount--
indiceInJoinIndexBooleanArr := int(player.JoinIndex - 1)
if (0 <= indiceInJoinIndexBooleanArr) && (indiceInJoinIndexBooleanArr < len(pR.JoinIndexBooleanArr)) {
pR.JoinIndexBooleanArr[indiceInJoinIndexBooleanArr] = false
} else {
Logger.Warn("Room OnPlayerLost, pR.JoinIndexBooleanArr is out of range: ", zap.Any("playerId", playerId), zap.Any("roomId", pR.Id), zap.Any("indiceInJoinIndexBooleanArr", indiceInJoinIndexBooleanArr), zap.Any("len(pR.JoinIndexBooleanArr)", len(pR.JoinIndexBooleanArr)))
}
player.JoinIndex = MAGIC_JOIN_INDEX_INVALID
Logger.Info("Room OnPlayerLost: ", zap.Any("playerId", playerId), zap.Any("roomId", pR.Id), zap.Any("resulted pR.JoinIndexBooleanArr", pR.JoinIndexBooleanArr))
}
}
func (pR *Room) clearPlayerNetworkSession(playerId int32) {
if _, y := pR.PlayerDownsyncSessionDict[playerId]; y {
Logger.Info("sending termination symbol for:", zap.Any("playerId", playerId), zap.Any("roomId", pR.Id))
delete(pR.PlayerDownsyncSessionDict, playerId)
delete(pR.PlayerSignalToCloseDict, playerId)
}
}
func (pR *Room) onPlayerAdded(playerId int32) {
pR.EffectivePlayerCount++
if 1 == pR.EffectivePlayerCount {
pR.State = RoomBattleStateIns.WAITING
}
for index, value := range pR.JoinIndexBooleanArr {
if false == value {
pR.Players[playerId].JoinIndex = int32(index) + 1
pR.JoinIndexBooleanArr[index] = true
// Lazily assign the initial position of "Player" for "RoomDownsyncFrame".
playerPosList := *(pR.RawBattleStrToVec2DListMap["PlayerStartingPos"])
if index > len(playerPosList) {
panic(fmt.Sprintf("onPlayerAdded error, index >= len(playerPosList), roomId=%v, playerId=%v, roomState=%v, roomEffectivePlayerCount=%v", pR.Id, playerId, pR.State, pR.EffectivePlayerCount))
}
playerPos := playerPosList[index]
if nil == playerPos {
panic(fmt.Sprintf("onPlayerAdded error, nil == playerPos, roomId=%v, playerId=%v, roomState=%v, roomEffectivePlayerCount=%v", pR.Id, playerId, pR.State, pR.EffectivePlayerCount))
}
pR.Players[playerId].X = playerPos.X
pR.Players[playerId].Y = playerPos.Y
break
}
}
pR.updateScore()
Logger.Info("onPlayerAdded:", zap.Any("playerId", playerId), zap.Any("roomId", pR.Id), zap.Any("joinIndex", pR.Players[playerId].JoinIndex), zap.Any("EffectivePlayerCount", pR.EffectivePlayerCount), zap.Any("resulted pR.JoinIndexBooleanArr", pR.JoinIndexBooleanArr), zap.Any("RoomBattleState", pR.State))
}
func (pR *Room) onPlayerReAdded(playerId int32) {
/*
* [WARNING]
*
* If a player quits at "RoomBattleState.WAITING", then his/her re-joining will always invoke `AddPlayerIfPossible(...)`. Therefore, this
* function will only be invoked for players who quit the battle at ">RoomBattleState.WAITING" and re-join at "RoomBattleState.IN_BATTLE", during which the `pR.JoinIndexBooleanArr` doesn't change.
*/
Logger.Info("Room got `onPlayerReAdded` invoked,", zap.Any("roomId", pR.Id), zap.Any("playerId", playerId), zap.Any("resulted pR.JoinIndexBooleanArr", pR.JoinIndexBooleanArr))
pR.updateScore()
}
func (pR *Room) OnPlayerBattleColliderAcked(playerId int32) bool {
2022-10-01 15:54:48 +00:00
targetPlayer, ok := pR.Players[playerId]
2022-09-20 15:50:01 +00:00
if false == ok {
return false
}
playerMetas := make(map[int32]*pb.PlayerMeta, 0)
2022-10-01 15:54:48 +00:00
for _, eachPlayer := range pR.Players {
playerMetas[eachPlayer.Id] = &pb.PlayerMeta{
Id: eachPlayer.Id,
Name: eachPlayer.Name,
DisplayName: eachPlayer.DisplayName,
Avatar: eachPlayer.Avatar,
JoinIndex: eachPlayer.JoinIndex,
2022-09-20 15:50:01 +00:00
}
}
2022-10-02 03:33:40 +00:00
// Broadcast added or readded player info to all players in the same room
2022-10-01 15:54:48 +00:00
for _, eachPlayer := range pR.Players {
2022-09-20 15:50:01 +00:00
/*
[WARNING]
This `playerAckedFrame` is the first ever "RoomDownsyncFrame" for every "PersistentSessionClient on the frontend", and it goes right after each "BattleColliderInfo".
By making use of the sequential nature of each ws session, all later "RoomDownsyncFrame"s generated after `pRoom.StartBattle()` will be put behind this `playerAckedFrame`.
*/
2022-10-02 03:33:40 +00:00
switch targetPlayer.BattleState {
case PlayerBattleStateIns.ADDED_PENDING_BATTLE_COLLIDER_ACK:
playerAckedFrame := &pb.RoomDownsyncFrame{
Id: pR.RenderFrameId,
Players: toPbPlayers(pR.Players),
PlayerMetas: playerMetas,
}
pR.sendSafely(playerAckedFrame, nil, DOWNSYNC_MSG_ACT_PLAYER_ADDED_AND_ACKED, eachPlayer.Id)
case PlayerBattleStateIns.READDED_PENDING_BATTLE_COLLIDER_ACK:
playerAckedFrame := &pb.RoomDownsyncFrame{
Id: pR.RenderFrameId,
Players: toPbPlayers(pR.Players),
PlayerMetas: playerMetas,
}
pR.sendSafely(playerAckedFrame, nil, DOWNSYNC_MSG_ACT_PLAYER_READDED_AND_ACKED, eachPlayer.Id)
default:
}
2022-09-20 15:50:01 +00:00
}
2022-10-01 15:54:48 +00:00
targetPlayer.BattleState = PlayerBattleStateIns.ACTIVE
Logger.Info(fmt.Sprintf("OnPlayerBattleColliderAcked: roomId=%v, roomState=%v, targetPlayerId=%v, capacity=%v, EffectivePlayerCount=%v", pR.Id, pR.State, targetPlayer.Id, pR.Capacity, pR.EffectivePlayerCount))
2022-09-20 15:50:01 +00:00
2022-10-01 15:54:48 +00:00
if pR.Capacity == int(pR.EffectivePlayerCount) {
2022-09-20 15:50:01 +00:00
allAcked := true
for _, p := range pR.Players {
if PlayerBattleStateIns.ACTIVE != p.BattleState {
Logger.Info("unexpectedly got an inactive player", zap.Any("roomId", pR.Id), zap.Any("playerId", p.Id), zap.Any("battleState", p.BattleState))
allAcked = false
break
}
}
if true == allAcked {
pR.StartBattle() // WON'T run if the battle state is not in WAITING.
}
}
pR.updateScore()
return true
}
func (pR *Room) sendSafely(roomDownsyncFrame *pb.RoomDownsyncFrame, toSendFrames []*pb.InputFrameDownsync, act int32, playerId int32) {
2022-09-20 15:50:01 +00:00
defer func() {
if r := recover(); r != nil {
pR.PlayerSignalToCloseDict[playerId](Constants.RetCode.UnknownError, fmt.Sprintf("%v", r))
}
}()
2022-10-01 12:45:38 +00:00
pResp := &pb.WsResp{
Ret: int32(Constants.RetCode.Ok),
Act: act,
Rdf: roomDownsyncFrame,
InputFrameDownsyncBatch: toSendFrames,
}
2022-09-20 15:50:01 +00:00
theBytes, marshalErr := proto.Marshal(pResp)
2022-09-20 15:50:01 +00:00
if nil != marshalErr {
panic(fmt.Sprintf("Error marshaling downsync message: roomId=%v, playerId=%v, roomState=%v, roomEffectivePlayerCount=%v", pR.Id, playerId, pR.State, pR.EffectivePlayerCount))
}
if err := pR.PlayerDownsyncSessionDict[playerId].WriteMessage(websocket.BinaryMessage, theBytes); nil != err {
panic(fmt.Sprintf("Error sending downsync message: roomId=%v, playerId=%v, roomState=%v, roomEffectivePlayerCount=%v", pR.Id, playerId, pR.State, pR.EffectivePlayerCount))
}
}
func (pR *Room) shouldPrefabInputFrameDownsync(renderFrameId int32) bool {
return ((renderFrameId & ((1 << pR.InputScaleFrames) - 1)) == 0)
2022-09-20 15:50:01 +00:00
}
func (pR *Room) prefabInputFrameDownsync(inputFrameId int32) *pb.InputFrameDownsync {
/*
Kindly note that on backend the prefab is much simpler than its frontend counterpart, because frontend will upsync its latest command immediately if there's any change w.r.t. its own prev cmd, thus if no upsync received from a frontend,
- EITHER it's due to local lag and bad network,
- OR there's no change w.r.t. to its prev cmd.
*/
var currInputFrameDownsync *pb.InputFrameDownsync = nil
if 0 == inputFrameId && 0 == pR.AllPlayerInputsBuffer.Cnt {
currInputFrameDownsync = &pb.InputFrameDownsync{
InputFrameId: 0,
InputList: make([]uint64, pR.Capacity),
ConfirmedList: uint64(0),
}
} else {
tmp := pR.AllPlayerInputsBuffer.GetByFrameId(inputFrameId - 1)
if nil == tmp {
2022-10-01 12:45:38 +00:00
panic(fmt.Sprintf("Error prefabbing inputFrameDownsync: roomId=%v, AllPlayerInputsBuffer=%v", pR.Id, pR.AllPlayerInputsBufferString(false)))
}
2022-10-01 12:45:38 +00:00
prevInputFrameDownsync := tmp.(*pb.InputFrameDownsync)
currInputList := prevInputFrameDownsync.InputList // Would be a clone of the values
currInputFrameDownsync = &pb.InputFrameDownsync{
InputFrameId: inputFrameId,
InputList: currInputList,
ConfirmedList: uint64(0),
}
}
pR.AllPlayerInputsBuffer.Put(currInputFrameDownsync)
return currInputFrameDownsync
}
func (pR *Room) forceConfirmationIfApplicable() uint64 {
2022-10-01 12:45:38 +00:00
// Force confirmation of non-all-confirmed inputFrame EXACTLY ONE AT A TIME, returns the non-confirmed mask of players, e.g. in a 4-player-battle returning 1001 means that players with JoinIndex=1 and JoinIndex=4 are non-confirmed for inputFrameId2
renderFrameId1 := (pR.RenderFrameId - pR.NstDelayFrames) // the renderFrameId which should've been rendered on frontend
if 0 > renderFrameId1 || !pR.shouldPrefabInputFrameDownsync(renderFrameId1) {
/*
The backend "shouldPrefabInputFrameDownsync" shares the same rule as frontend "shouldGenerateInputFrameUpsync".
It's also important that "forceConfirmationIfApplicable" is NOT EXECUTED for every renderFrame, such that when a player is forced to resync, it has some time, i.e. (1 << InputScaleFrames) renderFrames, to upsync again.
*/
return 0
}
inputFrameId2 := pR.ConvertToInputFrameId(renderFrameId1, 0) // The inputFrame to force confirmation (if necessary)
2022-10-02 03:33:40 +00:00
if inputFrameId2 < pR.LastAllConfirmedInputFrameId {
// No need to force confirmation, the inputFrames already arrived
Logger.Debug(fmt.Sprintf("inputFrameId2=%v is already all-confirmed for roomId=%v[type#1], no need to force confirmation of it", inputFrameId2, pR.Id))
return 0
}
2022-10-01 12:45:38 +00:00
tmp := pR.AllPlayerInputsBuffer.GetByFrameId(inputFrameId2)
if nil == tmp {
panic(fmt.Sprintf("inputFrameId2=%v doesn't exist for roomId=%v, this is abnormal because the server should prefab inputFrameDownsync in a most advanced pace, check the prefab logic! AllPlayerInputsBuffer=%v", inputFrameId2, pR.Id, pR.AllPlayerInputsBufferString(false)))
}
inputFrame2 := tmp.(*pb.InputFrameDownsync)
totPlayerCnt := uint32(pR.Capacity)
allConfirmedMask := uint64((1 << totPlayerCnt) - 1)
if swapped := atomic.CompareAndSwapUint64(&(inputFrame2.ConfirmedList), allConfirmedMask, allConfirmedMask); swapped {
2022-10-02 03:33:40 +00:00
// This could happen if the frontend upsync command arrived between type#1 and type#2 checks.
Logger.Debug(fmt.Sprintf("inputFrameId2=%v is already all-confirmed for roomId=%v[type#2], no need to force confirmation of it", inputFrameId2, pR.Id))
2022-10-01 12:45:38 +00:00
return 0
}
2022-10-01 12:45:38 +00:00
// Force confirmation of "inputFrame2"
oldConfirmedList := atomic.LoadUint64(&(inputFrame2.ConfirmedList))
atomic.StoreUint64(&(inputFrame2.ConfirmedList), allConfirmedMask)
pR.onInputFrameDownsyncAllConfirmed(inputFrame2, -1)
2022-10-01 12:45:38 +00:00
unconfirmedMask := (oldConfirmedList ^ allConfirmedMask)
return unconfirmedMask
2022-09-20 15:50:01 +00:00
}
func (pR *Room) applyInputFrameDownsyncDynamics(fromRenderFrameId int32, toRenderFrameId int32) {
if fromRenderFrameId >= toRenderFrameId {
return
2022-09-20 15:50:01 +00:00
}
2022-10-01 12:45:38 +00:00
Logger.Debug(fmt.Sprintf("Applying inputFrame dynamics: roomId=%v, room.RenderFrameId=%v, fromRenderFrameId=%v, toRenderFrameId=%v", pR.Id, pR.RenderFrameId, fromRenderFrameId, toRenderFrameId))
totPlayerCnt := uint32(pR.Capacity)
allConfirmedMask := uint64((1 << totPlayerCnt) - 1)
for collisionSysRenderFrameId := fromRenderFrameId; collisionSysRenderFrameId < toRenderFrameId; collisionSysRenderFrameId++ {
delayedInputFrameId := pR.ConvertToInputFrameId(collisionSysRenderFrameId, pR.InputDelayFrames)
if 0 <= delayedInputFrameId {
2022-10-01 12:45:38 +00:00
tmp := pR.AllPlayerInputsBuffer.GetByFrameId(delayedInputFrameId)
if nil == tmp {
panic(fmt.Sprintf("delayedInputFrameId=%v doesn't exist for roomId=%v, this is abnormal because it's to be used for applying dynamics to [fromRenderFrameId:%v, toRenderFrameId:%v) @ collisionSysRenderFrameId=%v! AllPlayerInputsBuffer=%v", delayedInputFrameId, pR.Id, fromRenderFrameId, toRenderFrameId, collisionSysRenderFrameId, pR.AllPlayerInputsBufferString(false)))
}
delayedInputFrame := tmp.(*pb.InputFrameDownsync)
if swapped := atomic.CompareAndSwapUint64(&(delayedInputFrame.ConfirmedList), allConfirmedMask, allConfirmedMask); !swapped {
panic(fmt.Sprintf("delayedInputFrameId=%v is not yet all-confirmed for roomId=%v, this is abnormal because it's to be used for applying dynamics to [fromRenderFrameId:%v, toRenderFrameId:%v) @ collisionSysRenderFrameId=%v! AllPlayerInputsBuffer=%v", delayedInputFrameId, pR.Id, fromRenderFrameId, toRenderFrameId, collisionSysRenderFrameId, pR.AllPlayerInputsBufferString(false)))
}
inputList := delayedInputFrame.InputList
// Ordered by joinIndex to guarantee determinism
for _, player := range pR.PlayersArr {
joinIndex := player.JoinIndex
encodedInput := inputList[joinIndex-1]
decodedInput := DIRECTION_DECODER[encodedInput]
decodedInputSpeedFactor := DIRECTION_DECODER_INVERSE_LENGTH[encodedInput]
baseChange := player.Speed * pR.RollbackEstimatedDt * decodedInputSpeedFactor
dx := baseChange * float64(decodedInput[0])
dy := baseChange * float64(decodedInput[1])
2022-10-01 12:45:38 +00:00
/*
2022-10-02 03:33:40 +00:00
// The collision lib seems very slow at worst cases, omitting for now
collisionPlayerIndex := COLLISION_PLAYER_INDEX_PREFIX + joinIndex
playerCollider := pR.CollisionSysMap[collisionPlayerIndex]
if collision := playerCollider.Check(dx, dy, "Barrier"); collision != nil {
changeWithCollision := collision.ContactWithObject(collision.Objects[0])
dx = changeWithCollision.X()
dy = changeWithCollision.Y()
}
playerCollider.X += dx
playerCollider.Y += dy
// Update in "collision space"
playerCollider.Update()
2022-10-01 12:45:38 +00:00
*/
player.Dir.Dx = decodedInput[0]
player.Dir.Dy = decodedInput[1]
player.X += dx
player.Y += dy
}
}
2022-09-20 15:50:01 +00:00
2022-10-01 12:45:38 +00:00
newRenderFrame := pb.RoomDownsyncFrame{
Id: collisionSysRenderFrameId + 1,
Players: toPbPlayers(pR.Players),
CountdownNanos: (pR.BattleDurationNanos - int64(collisionSysRenderFrameId)*int64(pR.RollbackEstimatedDt*1000000000)),
}
pR.RenderFrameBuffer.Put(&newRenderFrame)
pR.CurDynamicsRenderFrameId++
2022-09-20 15:50:01 +00:00
}
}
func (pR *Room) inputFrameIdDebuggable(inputFrameId int32) bool {
return 0 == (inputFrameId % 10)
2022-09-20 15:50:01 +00:00
}
func (pR *Room) refreshColliders() {
2022-10-01 12:45:38 +00:00
// Kindly note that by now, we've already got all the shapes in the tmx file into "pR.(Players | Barriers)" from "ParseTmxLayersAndGroups"
space := resolv.NewSpace(int(pR.StageDiscreteW), int(pR.StageDiscreteH), int(pR.StageTileW), int(pR.StageTileH)) // allocate a new collision space everytime after a battle is settled
for _, player := range pR.Players {
2022-10-01 12:45:38 +00:00
playerCollider := resolv.NewObject(player.X, player.Y, 12, 12) // Radius=12 is hardcoded
playerColliderShape := resolv.NewCircle(player.X, player.Y, 12)
playerCollider.SetShape(playerColliderShape)
space.Add(playerCollider)
// Keep track of the collider in "pR.CollisionSysMap"
joinIndex := player.JoinIndex
pR.PlayersArr[joinIndex-1] = player
2022-10-01 12:45:38 +00:00
collisionPlayerIndex := COLLISION_PLAYER_INDEX_PREFIX + joinIndex
pR.CollisionSysMap[collisionPlayerIndex] = playerCollider
}
for _, barrier := range pR.Barriers {
var w float64 = 0
var h float64 = 0
for i, pi := range barrier.Boundary.Points {
for j, pj := range barrier.Boundary.Points {
if i == j {
continue
}
if math.Abs(pj.X-pi.X) > w {
w = math.Abs(pj.X - pi.X)
}
if math.Abs(pj.Y-pi.Y) > h {
h = math.Abs(pj.Y - pi.Y)
}
}
}
barrierColliderShape := resolv.NewConvexPolygon()
for _, p := range barrier.Boundary.Points {
barrierColliderShape.AddPoints(p.X+barrier.Boundary.Anchor.X, p.Y+barrier.Boundary.Anchor.Y)
}
barrierCollider := resolv.NewObject(barrier.Boundary.Anchor.X, barrier.Boundary.Anchor.Y, w, h, "Barrier")
barrierCollider.SetShape(barrierColliderShape)
space.Add(barrierCollider)
}
2022-09-20 15:50:01 +00:00
}