mirror of
https://github.com/genxium/DelayNoMore
synced 2024-12-26 03:39:00 +00:00
Merge pull request #10 from genxium/force_confirmation_isolation
Improvement on smooth synchronization.
This commit is contained in:
commit
dd8b731ade
@ -1,5 +1,10 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
. "battle_srv/protos"
|
||||
"sync"
|
||||
)
|
||||
|
||||
type RingBuffer struct {
|
||||
Ed int32 // write index, open index
|
||||
St int32 // read index, closed index
|
||||
@ -21,6 +26,10 @@ func NewRingBuffer(n int32) *RingBuffer {
|
||||
}
|
||||
|
||||
func (rb *RingBuffer) Put(pItem interface{}) {
|
||||
for rb.Cnt >= rb.N-1 {
|
||||
// Make room for the new element
|
||||
rb.Pop()
|
||||
}
|
||||
rb.Eles[rb.Ed] = pItem
|
||||
rb.EdFrameId++
|
||||
rb.Cnt++
|
||||
@ -69,5 +78,47 @@ func (rb *RingBuffer) GetByOffset(offsetFromSt int32) interface{} {
|
||||
}
|
||||
|
||||
func (rb *RingBuffer) GetByFrameId(frameId int32) interface{} {
|
||||
if frameId >= rb.EdFrameId {
|
||||
return nil
|
||||
}
|
||||
return rb.GetByOffset(frameId - rb.StFrameId)
|
||||
}
|
||||
|
||||
func (rb *RingBuffer) cloneInputFrameDownsyncsByFrameIdRange(stFrameId, edFrameId int32, mux *sync.Mutex) (int32, []*InputFrameDownsync) {
|
||||
dst := make([]*InputFrameDownsync, 0, rb.Cnt)
|
||||
if nil != mux {
|
||||
mux.Lock()
|
||||
|
||||
defer func() {
|
||||
mux.Unlock()
|
||||
}()
|
||||
}
|
||||
|
||||
prevFrameFound := true
|
||||
j := stFrameId
|
||||
for j < edFrameId {
|
||||
tmp := rb.GetByFrameId(j)
|
||||
if nil == tmp {
|
||||
if false == prevFrameFound {
|
||||
// The "id"s are always consecutive
|
||||
break
|
||||
} else {
|
||||
prevFrameFound = false
|
||||
continue
|
||||
}
|
||||
}
|
||||
foo := tmp.(*InputFrameDownsync)
|
||||
bar := &InputFrameDownsync{
|
||||
InputFrameId: foo.InputFrameId,
|
||||
InputList: make([]uint64, len(foo.InputList)),
|
||||
ConfirmedList: foo.ConfirmedList,
|
||||
}
|
||||
for i, input := range foo.InputList {
|
||||
bar.InputList[i] = input
|
||||
}
|
||||
dst = append(dst, bar)
|
||||
j++
|
||||
}
|
||||
|
||||
return j, dst
|
||||
}
|
||||
|
@ -13,7 +13,6 @@ import (
|
||||
"github.com/solarlune/resolv"
|
||||
"go.uber.org/zap"
|
||||
"io/ioutil"
|
||||
"math"
|
||||
"math/rand"
|
||||
"os"
|
||||
"path/filepath"
|
||||
@ -37,7 +36,6 @@ const (
|
||||
DOWNSYNC_MSG_ACT_BATTLE_START = int32(0)
|
||||
|
||||
DOWNSYNC_MSG_ACT_PLAYER_ADDED_AND_ACKED = int32(-98)
|
||||
DOWNSYNC_MSG_ACT_PLAYER_READDED_AND_ACKED = int32(-97)
|
||||
)
|
||||
|
||||
const (
|
||||
@ -154,12 +152,12 @@ type Room struct {
|
||||
State int32
|
||||
Index int
|
||||
RenderFrameId int32
|
||||
CurDynamicsRenderFrameId int32 // [WARNING] The dynamics of backend is ALWAYS MOVING FORWARD BY ALL-CONFIRMED INPUTFRAMES (either by upsync or forced), i.e. no rollback
|
||||
CurDynamicsRenderFrameId int32 // [WARNING] The dynamics of backend is ALWAYS MOVING FORWARD BY ALL-CONFIRMED INPUTFRAMES (either by upsync or forced), i.e. no rollback; Moreover when "true == BackendDynamicsEnabled" we always have "Room.CurDynamicsRenderFrameId >= Room.RenderFrameId" because each "all-confirmed inputFrame" is applied on "all applicable renderFrames" in one-go hence often sees a future "renderFrame" earlier
|
||||
EffectivePlayerCount int32
|
||||
DismissalWaitGroup sync.WaitGroup
|
||||
Barriers map[int32]*Barrier
|
||||
InputsBuffer *RingBuffer // Indices are STRICTLY consecutive
|
||||
DiscreteInputsBuffer sync.Map // Indices are NOT NECESSARILY consecutive
|
||||
InputsBufferLock sync.Mutex
|
||||
RenderFrameBuffer *RingBuffer
|
||||
LastAllConfirmedInputFrameId int32
|
||||
LastAllConfirmedInputFrameIdWithChange int32
|
||||
@ -172,6 +170,7 @@ type Room struct {
|
||||
|
||||
BulletBattleLocalIdCounter int32
|
||||
dilutedRollbackEstimatedDtNanos int64
|
||||
|
||||
BattleColliderInfo // Compositing to send centralized magic numbers
|
||||
}
|
||||
|
||||
@ -372,7 +371,7 @@ func (pR *Room) InputsBufferString(allDetails bool) string {
|
||||
|
||||
func (pR *Room) StartBattle() {
|
||||
if RoomBattleStateIns.WAITING != pR.State {
|
||||
Logger.Warn("[StartBattle] Battle not started due to not being WAITING!", zap.Any("roomId", pR.Id), zap.Any("roomState", pR.State))
|
||||
Logger.Debug("[StartBattle] Battle not started due to not being WAITING!", zap.Any("roomId", pR.Id), zap.Any("roomState", pR.State))
|
||||
return
|
||||
}
|
||||
|
||||
@ -397,14 +396,16 @@ func (pR *Room) StartBattle() {
|
||||
/**
|
||||
* Will be triggered from a goroutine which executes the critical `Room.AddPlayerIfPossible`, thus the `battleMainLoop` should be detached.
|
||||
* All of the consecutive stages, e.g. settlement, dismissal, should share the same goroutine with `battleMainLoop`.
|
||||
*
|
||||
* As "defer" is only applicable to function scope, the use of "pR.InputsBufferLock" within "battleMainLoop" is embedded into each subroutine call.
|
||||
*/
|
||||
battleMainLoop := func() {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
Logger.Error("battleMainLoop, recovery spot#1, recovered from: ", zap.Any("roomId", pR.Id), zap.Any("panic", r))
|
||||
pR.StopBattleForSettlement()
|
||||
}
|
||||
Logger.Info("The `battleMainLoop` is stopped for:", zap.Any("roomId", pR.Id))
|
||||
pR.StopBattleForSettlement()
|
||||
Logger.Info(fmt.Sprintf("The `battleMainLoop` for roomId=%v is stopped@renderFrameId=%v, with battleDurationFrames=%v:\n%v", pR.Id, pR.RenderFrameId, pR.BattleDurationFrames, pR.InputsBufferString(false))) // This takes sometime to print
|
||||
pR.onBattleStoppedForSettlement()
|
||||
}()
|
||||
|
||||
@ -413,16 +414,12 @@ func (pR *Room) StartBattle() {
|
||||
Logger.Info("The `battleMainLoop` is started for:", zap.Any("roomId", pR.Id))
|
||||
for {
|
||||
stCalculation := utils.UnixtimeNano()
|
||||
|
||||
elapsedNanosSinceLastFrameIdTriggered := stCalculation - pR.LastRenderFrameIdTriggeredAt
|
||||
if elapsedNanosSinceLastFrameIdTriggered < pR.dilutedRollbackEstimatedDtNanos {
|
||||
Logger.Debug(fmt.Sprintf("Avoiding too fast frame@roomId=%v, renderFrameId=%v: elapsedNanosSinceLastFrameIdTriggered=%v", pR.Id, pR.RenderFrameId, elapsedNanosSinceLastFrameIdTriggered))
|
||||
continue
|
||||
if elapsedNanosSinceLastFrameIdTriggered < pR.RollbackEstimatedDtNanos {
|
||||
Logger.Debug(fmt.Sprintf("renderFrameId=%v@roomId=%v: Is backend running too fast? elapsedNanosSinceLastFrameIdTriggered=%v", pR.RenderFrameId, pR.Id, elapsedNanosSinceLastFrameIdTriggered))
|
||||
}
|
||||
|
||||
if pR.RenderFrameId > pR.BattleDurationFrames {
|
||||
Logger.Info(fmt.Sprintf("The `battleMainLoop` for roomId=%v is stopped@renderFrameId=%v, with battleDurationFrames=%v:\n%v", pR.Id, pR.RenderFrameId, pR.BattleDurationFrames, pR.InputsBufferString(true)))
|
||||
pR.StopBattleForSettlement()
|
||||
return
|
||||
}
|
||||
|
||||
@ -430,155 +427,58 @@ func (pR *Room) StartBattle() {
|
||||
return
|
||||
}
|
||||
|
||||
// Prefab and buffer backend inputFrameDownsync
|
||||
if pR.shouldPrefabInputFrameDownsync(pR.RenderFrameId) {
|
||||
noDelayInputFrameId := pR.ConvertToInputFrameId(pR.RenderFrameId, 0)
|
||||
pR.prefabInputFrameDownsync(noDelayInputFrameId)
|
||||
if 0 == pR.RenderFrameId {
|
||||
for playerId, player := range pR.Players {
|
||||
thatPlayerBattleState := atomic.LoadInt32(&(player.BattleState)) // Might be changed in "OnPlayerDisconnected/OnPlayerLost" from other threads
|
||||
// [WARNING] DON'T try to send any message to an inactive player!
|
||||
switch thatPlayerBattleState {
|
||||
case PlayerBattleStateIns.DISCONNECTED:
|
||||
case PlayerBattleStateIns.LOST:
|
||||
case PlayerBattleStateIns.EXPELLED_DURING_GAME:
|
||||
case PlayerBattleStateIns.EXPELLED_IN_DISMISSAL:
|
||||
continue
|
||||
}
|
||||
|
||||
pR.markConfirmationIfApplicable()
|
||||
unconfirmedMask := uint64(0)
|
||||
if pR.BackendDynamicsEnabled {
|
||||
// Force setting all-confirmed of buffered inputFrames periodically
|
||||
unconfirmedMask = pR.forceConfirmationIfApplicable()
|
||||
kickoffFrame := pR.RenderFrameBuffer.GetByFrameId(0).(*RoomDownsyncFrame)
|
||||
pR.sendSafely(kickoffFrame, nil, DOWNSYNC_MSG_ACT_BATTLE_START, playerId)
|
||||
}
|
||||
|
||||
upperToSendInputFrameId := atomic.LoadInt32(&(pR.LastAllConfirmedInputFrameId))
|
||||
/*
|
||||
[WARNING]
|
||||
Upon resynced on frontend, "refRenderFrameId" MUST BE CAPPED somehow by "upperToSendInputFrameId", if frontend resyncs itself to a more advanced value than given below, upon the next renderFrame tick on the frontend it might generate non-consecutive "nextInputFrameId > frontend.recentInputCache.edFrameId+1".
|
||||
|
||||
If "NstDelayFrames" becomes larger, "pR.RenderFrameId - refRenderFrameId" possibly becomes larger because the force confirmation is delayed more.
|
||||
|
||||
Upon resync, it's still possible that "refRenderFrameId < frontend.chaserRenderFrameId" -- and this is allowed.
|
||||
*/
|
||||
refRenderFrameId := pR.ConvertToGeneratingRenderFrameId(upperToSendInputFrameId) + (1 << pR.InputScaleFrames) - 1
|
||||
if refRenderFrameId > pR.RenderFrameId {
|
||||
refRenderFrameId = pR.RenderFrameId
|
||||
Logger.Info(fmt.Sprintf("In `battleMainLoop` for roomId=%v sent out kickoffFrame", pR.Id))
|
||||
}
|
||||
|
||||
dynamicsDuration := int64(0)
|
||||
unconfirmedMask := uint64(0)
|
||||
|
||||
// Prefab and buffer backend inputFrameDownsync
|
||||
if pR.BackendDynamicsEnabled {
|
||||
if pR.shouldPrefabInputFrameDownsync(pR.RenderFrameId) {
|
||||
noDelayInputFrameId := pR.ConvertToInputFrameId(pR.RenderFrameId, 0)
|
||||
if existingInputFrame := pR.InputsBuffer.GetByFrameId(noDelayInputFrameId); nil == existingInputFrame {
|
||||
pR.prefabInputFrameDownsync(noDelayInputFrameId, true)
|
||||
}
|
||||
}
|
||||
|
||||
// Force setting all-confirmed of buffered inputFrames periodically, kindly note that if "pR.BackendDynamicsEnabled", what we want to achieve is "recovery upon reconnection", which certainly requires "forceConfirmationIfApplicable" to move "pR.LastAllConfirmedInputFrameId" forward as much as possible
|
||||
unconfirmedMask = pR.forceConfirmationIfApplicable()
|
||||
|
||||
if 0 <= pR.LastAllConfirmedInputFrameId {
|
||||
dynamicsStartedAt := utils.UnixtimeNano()
|
||||
// Apply "all-confirmed inputFrames" to move forward "pR.CurDynamicsRenderFrameId"
|
||||
nextDynamicsRenderFrameId := pR.ConvertToLastUsedRenderFrameId(pR.LastAllConfirmedInputFrameId, pR.InputDelayFrames)
|
||||
Logger.Debug(fmt.Sprintf("roomId=%v, room.RenderFrameId=%v, LastAllConfirmedInputFrameId=%v, InputDelayFrames=%v, nextDynamicsRenderFrameId=%v", pR.Id, pR.RenderFrameId, pR.LastAllConfirmedInputFrameId, pR.InputDelayFrames, nextDynamicsRenderFrameId))
|
||||
Logger.Debug(fmt.Sprintf("roomId=%v, room.RenderFrameId=%v, room.CurDynamicsRenderFrameId=%v, LastAllConfirmedInputFrameId=%v, InputDelayFrames=%v, nextDynamicsRenderFrameId=%v", pR.Id, pR.RenderFrameId, pR.CurDynamicsRenderFrameId, pR.LastAllConfirmedInputFrameId, pR.InputDelayFrames, nextDynamicsRenderFrameId))
|
||||
pR.applyInputFrameDownsyncDynamics(pR.CurDynamicsRenderFrameId, nextDynamicsRenderFrameId, pR.collisionSpaceOffsetX, pR.collisionSpaceOffsetY)
|
||||
dynamicsDuration = utils.UnixtimeNano() - dynamicsStartedAt
|
||||
}
|
||||
|
||||
// [WARNING] The following inequality are seldom true, but just to avoid that in good network condition the frontend resyncs itself to a "too advanced frontend.renderFrameId", and then starts upsyncing "too advanced inputFrameId".
|
||||
if refRenderFrameId > pR.CurDynamicsRenderFrameId {
|
||||
refRenderFrameId = pR.CurDynamicsRenderFrameId
|
||||
}
|
||||
}
|
||||
|
||||
for playerId, player := range pR.Players {
|
||||
if swapped := atomic.CompareAndSwapInt32(&player.BattleState, PlayerBattleStateIns.ACTIVE, PlayerBattleStateIns.ACTIVE); !swapped {
|
||||
// [WARNING] DON'T send anything if the player is not yet active, because it could jam the channel and cause significant delay upon "battle recovery for reconnected player".
|
||||
continue
|
||||
}
|
||||
if 0 == pR.RenderFrameId {
|
||||
kickoffFrame := pR.RenderFrameBuffer.GetByFrameId(0).(*RoomDownsyncFrame)
|
||||
pR.sendSafely(kickoffFrame, nil, DOWNSYNC_MSG_ACT_BATTLE_START, playerId)
|
||||
} else {
|
||||
// [WARNING] Websocket is TCP-based, thus no need to re-send a previously sent inputFrame to a same player!
|
||||
toSendInputFrames := make([]*InputFrameDownsync, 0, pR.InputsBuffer.Cnt)
|
||||
candidateToSendInputFrameId := pR.Players[playerId].LastSentInputFrameId + 1
|
||||
if candidateToSendInputFrameId < pR.InputsBuffer.StFrameId {
|
||||
// [WARNING] As "player.LastSentInputFrameId <= lastAllConfirmedInputFrameIdWithChange" for each iteration, and "lastAllConfirmedInputFrameIdWithChange <= lastAllConfirmedInputFrameId" where the latter is used to "applyInputFrameDownsyncDynamics" and then evict "pR.InputsBuffer", thus there's a very high possibility that "player.LastSentInputFrameId" is already evicted.
|
||||
Logger.Warn(fmt.Sprintf("LastSentInputFrameId already popped: roomId=%v, playerId=%v, lastSentInputFrameId=%v, playerAckingInputFrameId=%v, InputsBuffer=%v", pR.Id, playerId, candidateToSendInputFrameId-1, player.AckingInputFrameId, pR.InputsBufferString(false)))
|
||||
candidateToSendInputFrameId = pR.InputsBuffer.StFrameId
|
||||
}
|
||||
|
||||
if MAGIC_LAST_SENT_INPUT_FRAME_ID_READDED == player.LastSentInputFrameId {
|
||||
// A rejoined player, should guarantee that when it resyncs to "refRenderFrameId" a matching inputFrame to apply exists
|
||||
candidateToSendInputFrameId = pR.ConvertToInputFrameId(refRenderFrameId, pR.InputDelayFrames)
|
||||
Logger.Warn(fmt.Sprintf("Resetting refRenderFrame for rejoined player: roomId=%v, playerId=%v, refRenderFrameId=%v, candidateToSendInputFrameId=%v, upperToSendInputFrameId=%v, lastSentInputFrameId=%v, playerAckingInputFrameId=%v", pR.Id, playerId, refRenderFrameId, candidateToSendInputFrameId, upperToSendInputFrameId, player.LastSentInputFrameId, player.AckingInputFrameId))
|
||||
}
|
||||
|
||||
// [WARNING] EDGE CASE HERE: Upon initialization, all of "lastAllConfirmedInputFrameId", "lastAllConfirmedInputFrameIdWithChange" and "anchorInputFrameId" are "-1", thus "candidateToSendInputFrameId" starts with "0", however "inputFrameId: 0" might not have been all confirmed!
|
||||
for candidateToSendInputFrameId <= upperToSendInputFrameId {
|
||||
tmp := pR.InputsBuffer.GetByFrameId(candidateToSendInputFrameId)
|
||||
if nil == tmp {
|
||||
panic(fmt.Sprintf("Required inputFrameId=%v for roomId=%v, playerId=%v doesn't exist! InputsBuffer=%v", candidateToSendInputFrameId, pR.Id, playerId, pR.InputsBufferString(false)))
|
||||
}
|
||||
f := tmp.(*InputFrameDownsync)
|
||||
if pR.inputFrameIdDebuggable(candidateToSendInputFrameId) {
|
||||
Logger.Debug("inputFrame lifecycle#3[sending]:", zap.Any("roomId", pR.Id), zap.Any("playerId", playerId), zap.Any("playerAckingInputFrameId", player.AckingInputFrameId), zap.Any("inputFrameId", candidateToSendInputFrameId), zap.Any("inputFrameId-doublecheck", f.InputFrameId), zap.Any("InputsBuffer", pR.InputsBufferString(false)), zap.Any("ConfirmedList", f.ConfirmedList))
|
||||
}
|
||||
toSendInputFrames = append(toSendInputFrames, f)
|
||||
candidateToSendInputFrameId++
|
||||
}
|
||||
|
||||
if 0 >= len(toSendInputFrames) {
|
||||
// [WARNING] When sending DOWNSYNC_MSG_ACT_FORCED_RESYNC, there MUST BE accompanying "toSendInputFrames" for calculating "refRenderFrameId"!
|
||||
if MAGIC_LAST_SENT_INPUT_FRAME_ID_READDED == player.LastSentInputFrameId {
|
||||
Logger.Warn(fmt.Sprintf("Not sending due to empty toSendInputFrames: roomId=%v, playerId=%v, refRenderFrameId=%v, upperToSendInputFrameId=%v, lastSentInputFrameId=%v, playerAckingInputFrameId=%v", pR.Id, playerId, refRenderFrameId, upperToSendInputFrameId, player.LastSentInputFrameId, player.AckingInputFrameId))
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
/*
|
||||
Resync helps
|
||||
1. when player with a slower frontend clock lags significantly behind and thus wouldn't get its inputUpsync recognized due to faster "forceConfirmation"
|
||||
2. reconnection
|
||||
*/
|
||||
shouldResync1 := (MAGIC_LAST_SENT_INPUT_FRAME_ID_READDED == player.LastSentInputFrameId)
|
||||
shouldResync2 := (0 < (unconfirmedMask & uint64(1<<uint32(player.JoinIndex-1)))) // This condition is critical, if we don't send resync upon this condition, the "reconnected or slowly-clocking player" might never get its input synced
|
||||
// shouldResync2 := (0 < unconfirmedMask) // An easier version of the above, might keep sending "refRenderFrame"s to still connected players when any player is disconnected
|
||||
if pR.BackendDynamicsEnabled && (shouldResync1 || shouldResync2) {
|
||||
tmp := pR.RenderFrameBuffer.GetByFrameId(refRenderFrameId)
|
||||
if nil == tmp {
|
||||
panic(fmt.Sprintf("Required refRenderFrameId=%v for roomId=%v, playerId=%v, candidateToSendInputFrameId=%v doesn't exist! InputsBuffer=%v, RenderFrameBuffer=%v", refRenderFrameId, pR.Id, playerId, candidateToSendInputFrameId, pR.InputsBufferString(false), pR.RenderFrameBufferString()))
|
||||
}
|
||||
refRenderFrame := tmp.(*RoomDownsyncFrame)
|
||||
pR.sendSafely(refRenderFrame, toSendInputFrames, DOWNSYNC_MSG_ACT_FORCED_RESYNC, playerId)
|
||||
} else {
|
||||
pR.sendSafely(nil, toSendInputFrames, DOWNSYNC_MSG_ACT_INPUT_BATCH, playerId)
|
||||
}
|
||||
pR.Players[playerId].LastSentInputFrameId = candidateToSendInputFrameId - 1
|
||||
}
|
||||
}
|
||||
|
||||
if pR.BackendDynamicsEnabled {
|
||||
// Evict no longer required "RenderFrameBuffer"
|
||||
for pR.RenderFrameBuffer.N < pR.RenderFrameBuffer.Cnt || (0 < pR.RenderFrameBuffer.Cnt && pR.RenderFrameBuffer.StFrameId < refRenderFrameId) {
|
||||
_ = pR.RenderFrameBuffer.Pop()
|
||||
}
|
||||
}
|
||||
|
||||
minToKeepInputFrameId := pR.ConvertToInputFrameId(refRenderFrameId, pR.InputDelayFrames) - pR.SpAtkLookupFrames
|
||||
/*
|
||||
[WARNING]
|
||||
The following updates to "minToKeepInputFrameId" is necessary because when "false == pR.BackendDynamicsEnabled", the variable "refRenderFrameId" is not well defined.
|
||||
*/
|
||||
minLastSentInputFrameId := int32(math.MaxInt32)
|
||||
for _, player := range pR.Players {
|
||||
if PlayerBattleStateIns.ACTIVE != player.BattleState {
|
||||
continue
|
||||
}
|
||||
if player.LastSentInputFrameId >= minLastSentInputFrameId {
|
||||
continue
|
||||
}
|
||||
minLastSentInputFrameId = player.LastSentInputFrameId
|
||||
}
|
||||
if minLastSentInputFrameId < minToKeepInputFrameId {
|
||||
minToKeepInputFrameId = minLastSentInputFrameId
|
||||
}
|
||||
for pR.InputsBuffer.N < pR.InputsBuffer.Cnt || (0 < pR.InputsBuffer.Cnt && pR.InputsBuffer.StFrameId < minToKeepInputFrameId) {
|
||||
f := pR.InputsBuffer.Pop().(*InputFrameDownsync)
|
||||
if pR.inputFrameIdDebuggable(f.InputFrameId) {
|
||||
// Popping of an "inputFrame" would be AFTER its being all being confirmed, because it requires the "inputFrame" to be all acked
|
||||
Logger.Debug("inputFrame lifecycle#4[popped]:", zap.Any("roomId", pR.Id), zap.Any("inputFrameId", f.InputFrameId), zap.Any("minToKeepInputFrameId", minToKeepInputFrameId), zap.Any("InputsBuffer", pR.InputsBufferString(false)))
|
||||
if 0 < unconfirmedMask {
|
||||
Logger.Debug(fmt.Sprintf("roomId=%v, room.RenderFrameId=%v, room.CurDynamicsRenderFrameId=%v, room.LastAllConfirmedInputFrameId=%v, unconfirmedMask=%v", pR.Id, pR.RenderFrameId, pR.CurDynamicsRenderFrameId, pR.LastAllConfirmedInputFrameId, unconfirmedMask))
|
||||
// Otherwise no need to downsync immediately
|
||||
pR.downsyncToAllPlayers(pR.LastAllConfirmedInputFrameId, unconfirmedMask, false)
|
||||
}
|
||||
}
|
||||
|
||||
pR.RenderFrameId++
|
||||
elapsedInCalculation := (utils.UnixtimeNano() - stCalculation)
|
||||
if elapsedInCalculation > pR.dilutedRollbackEstimatedDtNanos {
|
||||
Logger.Warn(fmt.Sprintf("SLOW FRAME! Elapsed time statistics: roomId=%v, room.RenderFrameId=%v, elapsedInCalculation=%v ns, dynamicsDuration=%v ns, dilutedRollbackEstimatedDtNanos=%v", pR.Id, pR.RenderFrameId, elapsedInCalculation, dynamicsDuration, pR.dilutedRollbackEstimatedDtNanos))
|
||||
pR.LastRenderFrameIdTriggeredAt = utils.UnixtimeNano()
|
||||
elapsedInCalculation := (pR.LastRenderFrameIdTriggeredAt - stCalculation)
|
||||
if elapsedInCalculation > pR.RollbackEstimatedDtNanos {
|
||||
Logger.Warn(fmt.Sprintf("SLOW FRAME! Elapsed time statistics: roomId=%v, room.RenderFrameId=%v, elapsedInCalculation=%v ns, dynamicsDuration=%v ns, RollbackEstimatedDtNanos=%v", pR.Id, pR.RenderFrameId, elapsedInCalculation, dynamicsDuration, pR.RollbackEstimatedDtNanos))
|
||||
}
|
||||
time.Sleep(time.Duration(pR.dilutedRollbackEstimatedDtNanos - elapsedInCalculation))
|
||||
}
|
||||
@ -595,39 +495,60 @@ func (pR *Room) toDiscreteInputsBufferIndex(inputFrameId int32, joinIndex int32)
|
||||
}
|
||||
|
||||
func (pR *Room) OnBattleCmdReceived(pReq *WsReq) {
|
||||
// [WARNING] This function "OnBattleCmdReceived" could be called by different ws sessions and thus from different threads!
|
||||
if swapped := atomic.CompareAndSwapInt32(&pR.State, RoomBattleStateIns.IN_BATTLE, RoomBattleStateIns.IN_BATTLE); !swapped {
|
||||
return
|
||||
}
|
||||
|
||||
playerId := pReq.PlayerId
|
||||
var player *Player = nil
|
||||
var existent bool = false
|
||||
inputFrameUpsyncBatch := pReq.InputFrameUpsyncBatch
|
||||
ackingFrameId := pReq.AckingFrameId
|
||||
ackingInputFrameId := pReq.AckingInputFrameId
|
||||
|
||||
if _, existent := pR.Players[playerId]; !existent {
|
||||
if player, existent = pR.Players[playerId]; !existent {
|
||||
Logger.Warn(fmt.Sprintf("upcmd player doesn't exist: roomId=%v, playerId=%v", pR.Id, playerId))
|
||||
return
|
||||
}
|
||||
|
||||
if swapped := atomic.CompareAndSwapInt32(&(pR.Players[playerId].AckingFrameId), pR.Players[playerId].AckingFrameId, ackingFrameId); !swapped {
|
||||
panic(fmt.Sprintf("Failed to update AckingFrameId to %v for roomId=%v, playerId=%v", ackingFrameId, pR.Id, playerId))
|
||||
}
|
||||
|
||||
if swapped := atomic.CompareAndSwapInt32(&(pR.Players[playerId].AckingInputFrameId), pR.Players[playerId].AckingInputFrameId, ackingInputFrameId); !swapped {
|
||||
panic(fmt.Sprintf("Failed to update AckingInputFrameId to %v for roomId=%v, playerId=%v", ackingInputFrameId, pR.Id, playerId))
|
||||
}
|
||||
atomic.StoreInt32(&(player.AckingFrameId), ackingFrameId)
|
||||
atomic.StoreInt32(&(player.AckingInputFrameId), ackingInputFrameId)
|
||||
|
||||
Logger.Debug(fmt.Sprintf("InputsBufferLock about to lock: roomId=%v", pR.Id))
|
||||
pR.InputsBufferLock.Lock()
|
||||
Logger.Debug(fmt.Sprintf("InputsBufferLock locked: roomId=%v", pR.Id))
|
||||
defer func() {
|
||||
pR.InputsBufferLock.Unlock()
|
||||
Logger.Debug(fmt.Sprintf("InputsBufferLock unlocked: roomId=%v", pR.Id))
|
||||
}()
|
||||
for _, inputFrameUpsync := range inputFrameUpsyncBatch {
|
||||
clientInputFrameId := inputFrameUpsync.InputFrameId
|
||||
if clientInputFrameId < pR.InputsBuffer.StFrameId {
|
||||
// The updates to "pR.InputsBuffer.StFrameId" is monotonically increasing, thus if "clientInputFrameId < pR.InputsBuffer.StFrameId" at any moment of time, it is obsolete in the future.
|
||||
Logger.Debug(fmt.Sprintf("Omitting obsolete inputFrameUpsync: roomId=%v, playerId=%v, clientInputFrameId=%v, InputsBuffer=%v", pR.Id, playerId, clientInputFrameId, pR.InputsBufferString(false)))
|
||||
Logger.Warn(fmt.Sprintf("Omitting obsolete inputFrameUpsync: roomId=%v, playerId=%v, clientInputFrameId=%v, InputsBuffer=%v", pR.Id, playerId, clientInputFrameId, pR.InputsBufferString(false)))
|
||||
continue
|
||||
}
|
||||
bufIndex := pR.toDiscreteInputsBufferIndex(clientInputFrameId, pReq.JoinIndex)
|
||||
pR.DiscreteInputsBuffer.Store(bufIndex, inputFrameUpsync)
|
||||
|
||||
// TODO: "pR.DiscreteInputsBuffer" might become too large with outdated "inputFrameUpsync" items, maintain another queue orderd by timestamp to evict them
|
||||
if clientInputFrameId > pR.InputsBuffer.EdFrameId {
|
||||
Logger.Warn(fmt.Sprintf("Dropping too advanced inputFrameUpsync: roomId=%v, playerId=%v, clientInputFrameId=%v, InputsBuffer=%v; is this player cheating?", pR.Id, playerId, clientInputFrameId, pR.InputsBufferString(false)))
|
||||
continue
|
||||
}
|
||||
var targetInputFrameDownsync *InputFrameDownsync = nil
|
||||
if clientInputFrameId == pR.InputsBuffer.EdFrameId {
|
||||
targetInputFrameDownsync = pR.prefabInputFrameDownsync(clientInputFrameId, false)
|
||||
Logger.Debug(fmt.Sprintf("OnBattleCmdReceived-Prefabbed new inputFrameDownsync from inputFrameUpsync: roomId=%v, playerId=%v, clientInputFrameId=%v, InputsBuffer=%v", pR.Id, playerId, clientInputFrameId, pR.InputsBufferString(false)))
|
||||
} else {
|
||||
targetInputFrameDownsync = pR.InputsBuffer.GetByFrameId(clientInputFrameId).(*InputFrameDownsync)
|
||||
Logger.Debug(fmt.Sprintf("OnBattleCmdReceived-stuffing inputFrameDownsync from inputFrameUpsync: roomId=%v, playerId=%v, clientInputFrameId=%v, InputsBuffer=%v", pR.Id, playerId, clientInputFrameId, pR.InputsBufferString(false)))
|
||||
}
|
||||
targetInputFrameDownsync.InputList[player.JoinIndex-1] = inputFrameUpsync.Encoded
|
||||
targetInputFrameDownsync.ConfirmedList |= uint64(1 << uint32(player.JoinIndex-1))
|
||||
}
|
||||
newAllConfirmedCount := pR.markConfirmationIfApplicable()
|
||||
if 0 < newAllConfirmedCount {
|
||||
// Downsync new all-confirmed inputFrames asap
|
||||
unconfirmedMask := uint64(0)
|
||||
pR.downsyncToAllPlayers(pR.LastAllConfirmedInputFrameId, unconfirmedMask, true)
|
||||
}
|
||||
}
|
||||
|
||||
@ -782,8 +703,7 @@ func (pR *Room) OnDismissed() {
|
||||
pR.Barriers = make(map[int32]*Barrier)
|
||||
pR.RenderCacheSize = 1024
|
||||
pR.RenderFrameBuffer = NewRingBuffer(pR.RenderCacheSize)
|
||||
pR.DiscreteInputsBuffer = sync.Map{}
|
||||
pR.InputsBuffer = NewRingBuffer((pR.RenderCacheSize >> 2) + 1)
|
||||
pR.InputsBuffer = NewRingBuffer((pR.RenderCacheSize >> 1) + 1)
|
||||
|
||||
pR.LastAllConfirmedInputFrameId = -1
|
||||
pR.LastAllConfirmedInputFrameIdWithChange = -1
|
||||
@ -796,13 +716,13 @@ func (pR *Room) OnDismissed() {
|
||||
pR.InputScaleFrames = uint32(2)
|
||||
pR.ServerFps = 60
|
||||
pR.RollbackEstimatedDtMillis = 16.667 // Use fixed-and-low-precision to mitigate the inconsistent floating-point-number issue between Golang and JavaScript
|
||||
pR.RollbackEstimatedDtNanos = 16666666 // A little smaller than the actual per frame time, just for preventing FAST FRAME
|
||||
dilutionFactor := 12
|
||||
pR.dilutedRollbackEstimatedDtNanos = int64(16666666 * (dilutionFactor) / (dilutionFactor - 1)) // [WARNING] Only used in controlling "battleMainLoop" to be keep a frame rate lower than that of the frontends, such that upon resync(i.e. BackendDynamicsEnabled=true), the frontends would have bigger chances to keep up with or even surpass the backend calculation
|
||||
pR.RollbackEstimatedDtNanos = 16666666 // A little smaller than the actual per frame time, just for logging FAST FRAME
|
||||
dilutedServerFps := int64(58)
|
||||
pR.dilutedRollbackEstimatedDtNanos = pR.RollbackEstimatedDtNanos * (int64(pR.ServerFps) / dilutedServerFps)
|
||||
pR.BattleDurationFrames = 30 * pR.ServerFps
|
||||
pR.BattleDurationNanos = int64(pR.BattleDurationFrames) * (pR.RollbackEstimatedDtNanos + 1)
|
||||
pR.InputFrameUpsyncDelayTolerance = 2
|
||||
pR.MaxChasingRenderFramesPerUpdate = 5
|
||||
pR.MaxChasingRenderFramesPerUpdate = 8
|
||||
|
||||
pR.BackendDynamicsEnabled = true // [WARNING] When "false", recovery upon reconnection wouldn't work!
|
||||
punchSkillId := int32(1)
|
||||
@ -843,10 +763,16 @@ func (pR *Room) OnDismissed() {
|
||||
}
|
||||
|
||||
func (pR *Room) expelPlayerDuringGame(playerId int32) {
|
||||
defer pR.onPlayerExpelledDuringGame(playerId)
|
||||
if signalToCloseConnOfThisPlayer, existent := pR.PlayerSignalToCloseDict[playerId]; existent {
|
||||
signalToCloseConnOfThisPlayer(Constants.RetCode.UnknownError, "") // TODO: Specify an error code
|
||||
}
|
||||
pR.onPlayerExpelledDuringGame(playerId)
|
||||
}
|
||||
|
||||
func (pR *Room) expelPlayerForDismissal(playerId int32) {
|
||||
if signalToCloseConnOfThisPlayer, existent := pR.PlayerSignalToCloseDict[playerId]; existent {
|
||||
signalToCloseConnOfThisPlayer(Constants.RetCode.UnknownError, "") // TODO: Specify an error code
|
||||
}
|
||||
pR.onPlayerExpelledForDismissal(playerId)
|
||||
}
|
||||
|
||||
@ -867,8 +793,9 @@ func (pR *Room) OnPlayerDisconnected(playerId int32) {
|
||||
}
|
||||
}()
|
||||
|
||||
if _, existent := pR.Players[playerId]; existent {
|
||||
switch pR.Players[playerId].BattleState {
|
||||
if player, existent := pR.Players[playerId]; existent {
|
||||
thatPlayerBattleState := atomic.LoadInt32(&(player.BattleState))
|
||||
switch thatPlayerBattleState {
|
||||
case PlayerBattleStateIns.DISCONNECTED:
|
||||
case PlayerBattleStateIns.LOST:
|
||||
case PlayerBattleStateIns.EXPELLED_DURING_GAME:
|
||||
@ -882,19 +809,20 @@ func (pR *Room) OnPlayerDisconnected(playerId int32) {
|
||||
return
|
||||
}
|
||||
|
||||
switch pR.State {
|
||||
currRoomBattleState := atomic.LoadInt32(&(pR.State))
|
||||
switch currRoomBattleState {
|
||||
case RoomBattleStateIns.WAITING:
|
||||
pR.onPlayerLost(playerId)
|
||||
delete(pR.Players, playerId) // Note that this statement MUST be put AFTER `pR.onPlayerLost(...)` to avoid nil pointer exception.
|
||||
if 0 == pR.EffectivePlayerCount {
|
||||
pR.State = RoomBattleStateIns.IDLE
|
||||
atomic.StoreInt32(&(pR.State), RoomBattleStateIns.IDLE)
|
||||
}
|
||||
pR.updateScore()
|
||||
Logger.Info("Player disconnected while room is at RoomBattleStateIns.WAITING:", zap.Any("playerId", playerId), zap.Any("roomId", pR.Id), zap.Any("nowRoomBattleState", pR.State), zap.Any("nowRoomEffectivePlayerCount", pR.EffectivePlayerCount))
|
||||
default:
|
||||
pR.Players[playerId].BattleState = PlayerBattleStateIns.DISCONNECTED
|
||||
atomic.StoreInt32(&(pR.Players[playerId].BattleState), PlayerBattleStateIns.DISCONNECTED)
|
||||
pR.clearPlayerNetworkSession(playerId) // Still need clear the network session pointers, because "OnPlayerDisconnected" is only triggered from "signalToCloseConnOfThisPlayer" in "ws/serve.go", when the same player reconnects the network session pointers will be re-assigned
|
||||
Logger.Info("Player disconnected from room:", zap.Any("playerId", playerId), zap.Any("playerBattleState", pR.Players[playerId].BattleState), zap.Any("roomId", pR.Id), zap.Any("nowRoomBattleState", pR.State), zap.Any("nowRoomEffectivePlayerCount", pR.EffectivePlayerCount))
|
||||
Logger.Warn("OnPlayerDisconnected finished:", zap.Any("roomId", pR.Id), zap.Any("playerId", playerId), zap.Any("playerBattleState", pR.Players[playerId].BattleState), zap.Any("nowRoomBattleState", pR.State), zap.Any("nowRoomEffectivePlayerCount", pR.EffectivePlayerCount))
|
||||
}
|
||||
}
|
||||
|
||||
@ -905,23 +833,23 @@ func (pR *Room) onPlayerLost(playerId int32) {
|
||||
}
|
||||
}()
|
||||
if player, existent := pR.Players[playerId]; existent {
|
||||
player.BattleState = PlayerBattleStateIns.LOST
|
||||
atomic.StoreInt32(&(player.BattleState), PlayerBattleStateIns.LOST)
|
||||
pR.clearPlayerNetworkSession(playerId)
|
||||
pR.EffectivePlayerCount--
|
||||
indiceInJoinIndexBooleanArr := int(player.JoinIndex - 1)
|
||||
if (0 <= indiceInJoinIndexBooleanArr) && (indiceInJoinIndexBooleanArr < len(pR.JoinIndexBooleanArr)) {
|
||||
pR.JoinIndexBooleanArr[indiceInJoinIndexBooleanArr] = false
|
||||
} else {
|
||||
Logger.Warn("Room OnPlayerLost, pR.JoinIndexBooleanArr is out of range: ", zap.Any("playerId", playerId), zap.Any("roomId", pR.Id), zap.Any("indiceInJoinIndexBooleanArr", indiceInJoinIndexBooleanArr), zap.Any("len(pR.JoinIndexBooleanArr)", len(pR.JoinIndexBooleanArr)))
|
||||
Logger.Warn("Room OnPlayerLost, pR.JoinIndexBooleanArr is out of range: ", zap.Any("roomId", pR.Id), zap.Any("playerId", playerId), zap.Any("indiceInJoinIndexBooleanArr", indiceInJoinIndexBooleanArr), zap.Any("len(pR.JoinIndexBooleanArr)", len(pR.JoinIndexBooleanArr)))
|
||||
}
|
||||
player.JoinIndex = MAGIC_JOIN_INDEX_INVALID
|
||||
Logger.Info("Room OnPlayerLost: ", zap.Any("playerId", playerId), zap.Any("roomId", pR.Id), zap.Any("resulted pR.JoinIndexBooleanArr", pR.JoinIndexBooleanArr))
|
||||
Logger.Warn("OnPlayerLost: ", zap.Any("roomId", pR.Id), zap.Any("playerId", playerId), zap.Any("resulted pR.JoinIndexBooleanArr", pR.JoinIndexBooleanArr))
|
||||
}
|
||||
}
|
||||
|
||||
func (pR *Room) clearPlayerNetworkSession(playerId int32) {
|
||||
if _, y := pR.PlayerDownsyncSessionDict[playerId]; y {
|
||||
Logger.Info("sending termination symbol for:", zap.Any("playerId", playerId), zap.Any("roomId", pR.Id))
|
||||
Logger.Debug("clearPlayerNetworkSession:", zap.Any("roomId", pR.Id), zap.Any("playerId", playerId))
|
||||
delete(pR.PlayerDownsyncSessionDict, playerId)
|
||||
delete(pR.PlayerSignalToCloseDict, playerId)
|
||||
}
|
||||
@ -964,7 +892,7 @@ func (pR *Room) onPlayerAdded(playerId int32) {
|
||||
}
|
||||
|
||||
pR.updateScore()
|
||||
Logger.Info("onPlayerAdded:", zap.Any("playerId", playerId), zap.Any("roomId", pR.Id), zap.Any("joinIndex", pR.Players[playerId].JoinIndex), zap.Any("EffectivePlayerCount", pR.EffectivePlayerCount), zap.Any("resulted pR.JoinIndexBooleanArr", pR.JoinIndexBooleanArr), zap.Any("RoomBattleState", pR.State))
|
||||
Logger.Info("onPlayerAdded:", zap.Any("roomId", pR.Id), zap.Any("playerId", playerId), zap.Any("playerBattleState", pR.Players[playerId].BattleState), zap.Any("joinIndex", pR.Players[playerId].JoinIndex), zap.Any("EffectivePlayerCount", pR.EffectivePlayerCount), zap.Any("resulted pR.JoinIndexBooleanArr", pR.JoinIndexBooleanArr), zap.Any("RoomBattleState", pR.State))
|
||||
}
|
||||
|
||||
func (pR *Room) onPlayerReAdded(playerId int32) {
|
||||
@ -985,8 +913,16 @@ func (pR *Room) OnPlayerBattleColliderAcked(playerId int32) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// Broadcast added or readded player info to all players in the same room
|
||||
for _, eachPlayer := range pR.Players {
|
||||
Logger.Debug(fmt.Sprintf("OnPlayerBattleColliderAcked-before: roomId=%v, roomState=%v, targetPlayerId=%v, targetPlayerBattleState=%v, capacity=%v, EffectivePlayerCount=%v", pR.Id, pR.State, targetPlayer.Id, targetPlayer.BattleState, pR.Capacity, pR.EffectivePlayerCount))
|
||||
switch targetPlayer.BattleState {
|
||||
case PlayerBattleStateIns.ADDED_PENDING_BATTLE_COLLIDER_ACK:
|
||||
playerAckedFrame := &RoomDownsyncFrame{
|
||||
Id: pR.RenderFrameId,
|
||||
Players: toPbPlayers(pR.Players, true),
|
||||
}
|
||||
|
||||
// Broadcast normally added player info to all players in the same room
|
||||
for thatPlayerId, thatPlayer := range pR.Players {
|
||||
/*
|
||||
[WARNING]
|
||||
This `playerAckedFrame` is the first ever "RoomDownsyncFrame" for every "PersistentSessionClient on the frontend", and it goes right after each "BattleColliderInfo".
|
||||
@ -995,37 +931,28 @@ func (pR *Room) OnPlayerBattleColliderAcked(playerId int32) bool {
|
||||
|
||||
This function is triggered by an upsync message via WebSocket, thus downsync sending is also available by now.
|
||||
*/
|
||||
currPlayerBattleState := atomic.LoadInt32(&(eachPlayer.BattleState))
|
||||
if PlayerBattleStateIns.DISCONNECTED == currPlayerBattleState || PlayerBattleStateIns.LOST == currPlayerBattleState {
|
||||
// [WARNING] DON'T try to send any message to an inactive player!
|
||||
continue
|
||||
thatPlayerBattleState := atomic.LoadInt32(&(thatPlayer.BattleState))
|
||||
Logger.Debug(fmt.Sprintf("OnPlayerBattleColliderAcked-middle: roomId=%v, roomState=%v, targetPlayerId=%v, targetPlayerBattleState=%v, thatPlayerId=%v, thatPlayerBattleState=%v", pR.Id, pR.State, targetPlayer.Id, targetPlayer.BattleState, thatPlayer.Id, thatPlayerBattleState))
|
||||
if thatPlayerId == targetPlayer.Id || (PlayerBattleStateIns.ADDED_PENDING_BATTLE_COLLIDER_ACK == thatPlayerBattleState || PlayerBattleStateIns.ACTIVE == thatPlayerBattleState) {
|
||||
Logger.Debug(fmt.Sprintf("OnPlayerBattleColliderAcked-sending DOWNSYNC_MSG_ACT_PLAYER_ADDED_AND_ACKED: roomId=%v, roomState=%v, targetPlayerId=%v, targetPlayerBattleState=%v, capacity=%v, EffectivePlayerCount=%v", pR.Id, pR.State, targetPlayer.Id, targetPlayer.BattleState, pR.Capacity, pR.EffectivePlayerCount))
|
||||
pR.sendSafely(playerAckedFrame, nil, DOWNSYNC_MSG_ACT_PLAYER_ADDED_AND_ACKED, thatPlayer.Id)
|
||||
}
|
||||
|
||||
switch targetPlayer.BattleState {
|
||||
case PlayerBattleStateIns.ADDED_PENDING_BATTLE_COLLIDER_ACK:
|
||||
playerAckedFrame := &RoomDownsyncFrame{
|
||||
Id: pR.RenderFrameId,
|
||||
Players: toPbPlayers(pR.Players, true),
|
||||
}
|
||||
pR.sendSafely(playerAckedFrame, nil, DOWNSYNC_MSG_ACT_PLAYER_ADDED_AND_ACKED, eachPlayer.Id)
|
||||
case PlayerBattleStateIns.READDED_PENDING_BATTLE_COLLIDER_ACK:
|
||||
playerAckedFrame := &RoomDownsyncFrame{
|
||||
Id: pR.RenderFrameId,
|
||||
Players: toPbPlayers(pR.Players, true),
|
||||
}
|
||||
pR.sendSafely(playerAckedFrame, nil, DOWNSYNC_MSG_ACT_PLAYER_READDED_AND_ACKED, eachPlayer.Id)
|
||||
// only send resync info to the targetPlayer
|
||||
// i.e. implies that "targetPlayer.LastSentInputFrameId == MAGIC_LAST_SENT_INPUT_FRAME_ID_READDED"
|
||||
pR.downsyncToSinglePlayer(playerId, targetPlayer, pR.LastAllConfirmedInputFrameId, uint64(0), false)
|
||||
default:
|
||||
}
|
||||
}
|
||||
|
||||
targetPlayer.BattleState = PlayerBattleStateIns.ACTIVE
|
||||
Logger.Info(fmt.Sprintf("OnPlayerBattleColliderAcked: roomId=%v, roomState=%v, targetPlayerId=%v, targetPlayerBattleState=%v, capacity=%v, EffectivePlayerCount=%v", pR.Id, pR.State, targetPlayer.Id, targetPlayer.BattleState, pR.Capacity, pR.EffectivePlayerCount))
|
||||
Logger.Debug(fmt.Sprintf("OnPlayerBattleColliderAcked-post-downsync: roomId=%v, roomState=%v, targetPlayerId=%v, targetPlayerBattleState=%v, capacity=%v, EffectivePlayerCount=%v", pR.Id, pR.State, targetPlayer.Id, targetPlayer.BattleState, pR.Capacity, pR.EffectivePlayerCount))
|
||||
|
||||
if pR.Capacity == int(pR.EffectivePlayerCount) {
|
||||
allAcked := true
|
||||
for _, p := range pR.Players {
|
||||
if PlayerBattleStateIns.ACTIVE != p.BattleState {
|
||||
Logger.Info("unexpectedly got an inactive player", zap.Any("roomId", pR.Id), zap.Any("playerId", p.Id), zap.Any("battleState", p.BattleState))
|
||||
Logger.Warn("unexpectedly got an inactive player", zap.Any("roomId", pR.Id), zap.Any("playerId", p.Id), zap.Any("battleState", p.BattleState))
|
||||
allAcked = false
|
||||
break
|
||||
}
|
||||
@ -1039,18 +966,19 @@ func (pR *Room) OnPlayerBattleColliderAcked(playerId int32) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (pR *Room) sendSafely(roomDownsyncFrame *RoomDownsyncFrame, toSendFrames []*InputFrameDownsync, act int32, playerId int32) {
|
||||
func (pR *Room) sendSafely(roomDownsyncFrame *RoomDownsyncFrame, toSendInputFrameDownsyncs []*InputFrameDownsync, act int32, playerId int32) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
pR.PlayerSignalToCloseDict[playerId](Constants.RetCode.UnknownError, fmt.Sprintf("%v", r))
|
||||
Logger.Error("sendSafely, recovered from: ", zap.Any("roomId", pR.Id), zap.Any("playerId", playerId), zap.Any("panic", r))
|
||||
}
|
||||
}()
|
||||
|
||||
if playerDownsyncSession, existent := pR.PlayerDownsyncSessionDict[playerId]; existent {
|
||||
pResp := &WsResp{
|
||||
Ret: int32(Constants.RetCode.Ok),
|
||||
Act: act,
|
||||
Rdf: roomDownsyncFrame,
|
||||
InputFrameDownsyncBatch: toSendFrames,
|
||||
InputFrameDownsyncBatch: toSendInputFrameDownsyncs,
|
||||
}
|
||||
|
||||
theBytes, marshalErr := proto.Marshal(pResp)
|
||||
@ -1058,21 +986,34 @@ func (pR *Room) sendSafely(roomDownsyncFrame *RoomDownsyncFrame, toSendFrames []
|
||||
panic(fmt.Sprintf("Error marshaling downsync message: roomId=%v, playerId=%v, roomState=%v, roomEffectivePlayerCount=%v", pR.Id, playerId, pR.State, pR.EffectivePlayerCount))
|
||||
}
|
||||
|
||||
if err := pR.PlayerDownsyncSessionDict[playerId].WriteMessage(websocket.BinaryMessage, theBytes); nil != err {
|
||||
if err := playerDownsyncSession.WriteMessage(websocket.BinaryMessage, theBytes); nil != err {
|
||||
panic(fmt.Sprintf("Error sending downsync message: roomId=%v, playerId=%v, roomState=%v, roomEffectivePlayerCount=%v, err=%v", pR.Id, playerId, pR.State, pR.EffectivePlayerCount, err))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (pR *Room) shouldPrefabInputFrameDownsync(renderFrameId int32) bool {
|
||||
return ((renderFrameId & ((1 << pR.InputScaleFrames) - 1)) == 0)
|
||||
}
|
||||
|
||||
func (pR *Room) prefabInputFrameDownsync(inputFrameId int32) *InputFrameDownsync {
|
||||
func (pR *Room) prefabInputFrameDownsync(inputFrameId int32, lockInputsBuffer bool) *InputFrameDownsync {
|
||||
/*
|
||||
Kindly note that on backend the prefab is much simpler than its frontend counterpart, because frontend will upsync its latest command immediately if there's any change w.r.t. its own prev cmd, thus if no upsync received from a frontend,
|
||||
- EITHER it's due to local lag and bad network,
|
||||
- OR there's no change w.r.t. to its prev cmd.
|
||||
*/
|
||||
|
||||
if lockInputsBuffer {
|
||||
Logger.Debug(fmt.Sprintf("prefabInputFrameDownsync-InputsBufferLock to about lock: roomId=%v", pR.Id))
|
||||
pR.InputsBufferLock.Lock()
|
||||
Logger.Debug(fmt.Sprintf("prefabInputFrameDownsync-InputsBufferLock locked: roomId=%v", pR.Id))
|
||||
|
||||
defer func() {
|
||||
pR.InputsBufferLock.Unlock()
|
||||
Logger.Debug(fmt.Sprintf("prefabInputFrameDownsync-InputsBufferLock unlocked: roomId=%v", pR.Id))
|
||||
}()
|
||||
}
|
||||
|
||||
var currInputFrameDownsync *InputFrameDownsync = nil
|
||||
|
||||
if 0 == inputFrameId && 0 == pR.InputsBuffer.Cnt {
|
||||
@ -1102,34 +1043,45 @@ func (pR *Room) prefabInputFrameDownsync(inputFrameId int32) *InputFrameDownsync
|
||||
return currInputFrameDownsync
|
||||
}
|
||||
|
||||
func (pR *Room) markConfirmationIfApplicable() {
|
||||
func (pR *Room) markConfirmationIfApplicable() int {
|
||||
newAllConfirmedCount := 0
|
||||
inputFrameId1 := pR.LastAllConfirmedInputFrameId + 1
|
||||
totPlayerCnt := uint32(pR.Capacity)
|
||||
allConfirmedMask := uint64((1 << totPlayerCnt) - 1)
|
||||
|
||||
for inputFrameId := inputFrameId1; inputFrameId < pR.InputsBuffer.EdFrameId; inputFrameId++ {
|
||||
tmp := pR.InputsBuffer.GetByFrameId(inputFrameId)
|
||||
if nil == tmp {
|
||||
panic(fmt.Sprintf("inputFrameId=%v doesn't exist for roomId=%v, this is abnormal because the server should prefab inputFrameDownsync in a most advanced pace, check the prefab logic (Or maybe you're having a 'Room.RenderCacheSize' too small)! InputsBuffer=%v", inputFrameId, pR.Id, pR.InputsBufferString(false)))
|
||||
panic(fmt.Sprintf("inputFrameId=%v doesn't exist for roomId=%v! InputsBuffer=%v", inputFrameId, pR.Id, pR.InputsBufferString(false)))
|
||||
}
|
||||
shouldBreakConfirmation := false
|
||||
inputFrameDownsync := tmp.(*InputFrameDownsync)
|
||||
|
||||
if allConfirmedMask != inputFrameDownsync.ConfirmedList {
|
||||
for _, player := range pR.Players {
|
||||
bufIndex := pR.toDiscreteInputsBufferIndex(inputFrameId, player.JoinIndex)
|
||||
tmp, loaded := pR.DiscreteInputsBuffer.LoadAndDelete(bufIndex) // It's safe to "LoadAndDelete" here because the "inputFrameUpsync" of this player is already remembered by the corresponding "inputFrameDown".
|
||||
if !loaded {
|
||||
continue
|
||||
thatPlayerBattleState := atomic.LoadInt32(&(player.BattleState))
|
||||
thatPlayerJoinMask := uint64(1 << uint32(player.JoinIndex-1))
|
||||
if 0 == (inputFrameDownsync.ConfirmedList & thatPlayerJoinMask) {
|
||||
if thatPlayerBattleState == PlayerBattleStateIns.ACTIVE {
|
||||
shouldBreakConfirmation = true
|
||||
break
|
||||
} else {
|
||||
Logger.Debug(fmt.Sprintf("markConfirmationIfApplicable for roomId=%v, skipping UNCONFIRMED BUT INACTIVE player(id:%v, joinIndex:%v) while checking inputFrameId=[%v, %v): InputsBuffer=%v", pR.Id, player.Id, player.JoinIndex, inputFrameId1, pR.InputsBuffer.EdFrameId, pR.InputsBufferString(false)))
|
||||
}
|
||||
}
|
||||
}
|
||||
inputFrameUpsync := tmp.(*InputFrameUpsync)
|
||||
indiceInJoinIndexBooleanArr := uint32(player.JoinIndex - 1)
|
||||
inputFrameDownsync.InputList[indiceInJoinIndexBooleanArr] = inputFrameUpsync.Encoded
|
||||
inputFrameDownsync.ConfirmedList |= (1 << indiceInJoinIndexBooleanArr)
|
||||
}
|
||||
|
||||
if allConfirmedMask == inputFrameDownsync.ConfirmedList {
|
||||
pR.onInputFrameDownsyncAllConfirmed(inputFrameDownsync, -1)
|
||||
} else {
|
||||
if shouldBreakConfirmation {
|
||||
break
|
||||
} else {
|
||||
newAllConfirmedCount += 1
|
||||
pR.onInputFrameDownsyncAllConfirmed(inputFrameDownsync, -1)
|
||||
}
|
||||
}
|
||||
|
||||
Logger.Debug(fmt.Sprintf("markConfirmationIfApplicable checking inputFrameId=[%v, %v) for roomId=%v, newAllConfirmedCount=%d: InputsBuffer=%v", inputFrameId1, pR.InputsBuffer.EdFrameId, pR.Id, newAllConfirmedCount, pR.InputsBufferString(false)))
|
||||
return newAllConfirmedCount
|
||||
}
|
||||
|
||||
func (pR *Room) forceConfirmationIfApplicable() uint64 {
|
||||
@ -1150,6 +1102,15 @@ func (pR *Room) forceConfirmationIfApplicable() uint64 {
|
||||
Logger.Debug(fmt.Sprintf("inputFrameId2=%v is already all-confirmed for roomId=%v[type#1], no need to force confirmation of it", inputFrameId2, pR.Id))
|
||||
return 0
|
||||
}
|
||||
|
||||
Logger.Debug(fmt.Sprintf("forceConfirmation-InputsBufferLock about to lock: roomId=%v", pR.Id))
|
||||
pR.InputsBufferLock.Lock()
|
||||
Logger.Debug(fmt.Sprintf("forceConfirmation-InputsBufferLock locked: roomId=%v", pR.Id))
|
||||
|
||||
defer func() {
|
||||
pR.InputsBufferLock.Unlock()
|
||||
Logger.Debug(fmt.Sprintf("forceConfirmation-InputsBufferLock unlocked: roomId=%v", pR.Id))
|
||||
}()
|
||||
tmp := pR.InputsBuffer.GetByFrameId(inputFrameId2)
|
||||
if nil == tmp {
|
||||
panic(fmt.Sprintf("inputFrameId2=%v doesn't exist for roomId=%v, this is abnormal because the server should prefab inputFrameDownsync in a most advanced pace, check the prefab logic! InputsBuffer=%v", inputFrameId2, pR.Id, pR.InputsBufferString(false)))
|
||||
@ -1177,6 +1138,14 @@ func (pR *Room) applyInputFrameDownsyncDynamics(fromRenderFrameId int32, toRende
|
||||
totPlayerCnt := uint32(pR.Capacity)
|
||||
allConfirmedMask := uint64((1 << totPlayerCnt) - 1)
|
||||
|
||||
Logger.Debug(fmt.Sprintf("applyInputFrameDownsyncDynamics-InputsBufferLock about to lock: roomId=%v", pR.Id))
|
||||
pR.InputsBufferLock.Lock()
|
||||
Logger.Debug(fmt.Sprintf("applyInputFrameDownsyncDynamics-InputsBufferLock locked: roomId=%v", pR.Id))
|
||||
|
||||
defer func() {
|
||||
pR.InputsBufferLock.Unlock()
|
||||
Logger.Debug(fmt.Sprintf("applyInputFrameDownsyncDynamics-InputsBufferLock unlocked: roomId=%v", pR.Id))
|
||||
}()
|
||||
for collisionSysRenderFrameId := fromRenderFrameId; collisionSysRenderFrameId < toRenderFrameId; collisionSysRenderFrameId++ {
|
||||
currRenderFrameTmp := pR.RenderFrameBuffer.GetByFrameId(collisionSysRenderFrameId)
|
||||
if nil == currRenderFrameTmp {
|
||||
@ -1350,7 +1319,7 @@ func (pR *Room) applyInputFrameDownsyncDynamicsOnSingleRenderFrame(delayedInputF
|
||||
// Update in the collision system
|
||||
playerCollider.Update()
|
||||
if 0 != bulletPushbacks[joinIndex-1].X || 0 != bulletPushbacks[joinIndex-1].Y {
|
||||
Logger.Info(fmt.Sprintf("roomId=%v, playerId=%v is pushed back by (%.2f, %.2f) by bullet impacts, now its framesToRecover is %d at currRenderFrame.id=%v", pR.Id, playerId, bulletPushbacks[joinIndex-1].X, bulletPushbacks[joinIndex-1].Y, thatPlayerInNextFrame.FramesToRecover, currRenderFrame.Id))
|
||||
Logger.Debug(fmt.Sprintf("roomId=%v, playerId=%v is pushed back by (%.2f, %.2f) by bullet impacts, now its framesToRecover is %d at currRenderFrame.id=%v", pR.Id, playerId, bulletPushbacks[joinIndex-1].X, bulletPushbacks[joinIndex-1].Y, thatPlayerInNextFrame.FramesToRecover, currRenderFrame.Id))
|
||||
}
|
||||
continue
|
||||
}
|
||||
@ -1374,7 +1343,7 @@ func (pR *Room) applyInputFrameDownsyncDynamicsOnSingleRenderFrame(delayedInputF
|
||||
toRet.MeleeBullets = append(toRet.MeleeBullets, &newMeleeBullet)
|
||||
thatPlayerInNextFrame.FramesToRecover = newMeleeBullet.RecoveryFrames
|
||||
thatPlayerInNextFrame.CharacterState = ATK_CHARACTER_STATE_ATK1
|
||||
Logger.Info(fmt.Sprintf("roomId=%v, playerId=%v triggered a rising-edge of btnA at currRenderFrame.id=%v, delayedInputFrame.id=%v", pR.Id, playerId, currRenderFrame.Id, delayedInputFrame.InputFrameId))
|
||||
Logger.Debug(fmt.Sprintf("roomId=%v, playerId=%v triggered a rising-edge of btnA at currRenderFrame.id=%v, delayedInputFrame.id=%v", pR.Id, playerId, currRenderFrame.Id, delayedInputFrame.InputFrameId))
|
||||
|
||||
} else if decodedInput.BtnALevel < prevBtnALevel {
|
||||
Logger.Debug(fmt.Sprintf("roomId=%v, playerId=%v triggered a falling-edge of btnA at currRenderFrame.id=%v, delayedInputFrame.id=%v", pR.Id, playerId, currRenderFrame.Id, delayedInputFrame.InputFrameId))
|
||||
@ -1477,3 +1446,87 @@ func (pR *Room) refreshColliders(spaceW, spaceH int32) {
|
||||
func (pR *Room) printBarrier(barrierCollider *resolv.Object) {
|
||||
Logger.Info(fmt.Sprintf("Barrier in roomId=%v: w=%v, h=%v, shape=%v", pR.Id, barrierCollider.W, barrierCollider.H, barrierCollider.Shape))
|
||||
}
|
||||
|
||||
func (pR *Room) downsyncToAllPlayers(upperToSendInputFrameId int32, unconfirmedMask uint64, prohibitsInputsBufferLock bool) {
|
||||
for playerId, player := range pR.Players {
|
||||
thatPlayerBattleState := atomic.LoadInt32(&(player.BattleState))
|
||||
switch thatPlayerBattleState {
|
||||
case PlayerBattleStateIns.DISCONNECTED:
|
||||
case PlayerBattleStateIns.LOST:
|
||||
case PlayerBattleStateIns.EXPELLED_DURING_GAME:
|
||||
case PlayerBattleStateIns.EXPELLED_IN_DISMISSAL:
|
||||
case PlayerBattleStateIns.READDED_PENDING_BATTLE_COLLIDER_ACK: // This is the reason why battleState filter is put at "downsyncToAllPlayers" instead of "downsyncToSinglePlayer"
|
||||
continue
|
||||
}
|
||||
pR.downsyncToSinglePlayer(playerId, player, pR.LastAllConfirmedInputFrameId, unconfirmedMask, prohibitsInputsBufferLock)
|
||||
}
|
||||
}
|
||||
|
||||
func (pR *Room) downsyncToSinglePlayer(playerId int32, player *Player, upperToSendInputFrameId int32, unconfirmedMask uint64, prohibitsInputsBufferLock bool) {
|
||||
// TODO: DON'T try to send any message to an inactive player! However, note that player.BattleState could be modified from different threads and needs to be synced!
|
||||
// [WARNING] Websocket is TCP-based, thus no need to re-send a previously sent inputFrame to a same player!
|
||||
lowerToSentInputFrameId := player.LastSentInputFrameId + 1
|
||||
/*
|
||||
[WARNING]
|
||||
Upon resynced on frontend, "refRenderFrameId" is now set to as advanced as possible, and it's the frontend's responsibility now to pave way for the "gap inputFrames"
|
||||
|
||||
If "NstDelayFrames" becomes larger, "pR.RenderFrameId - refRenderFrameId" possibly becomes larger because the force confirmation is delayed more.
|
||||
|
||||
Upon resync, it's still possible that "refRenderFrameId < frontend.chaserRenderFrameId" -- and this is allowed.
|
||||
*/
|
||||
refRenderFrameId := pR.CurDynamicsRenderFrameId - 1
|
||||
|
||||
shouldResync1 := (MAGIC_LAST_SENT_INPUT_FRAME_ID_READDED == player.LastSentInputFrameId)
|
||||
shouldResync2 := (0 < (unconfirmedMask & uint64(1<<uint32(player.JoinIndex-1)))) // This condition is critical, if we don't send resync upon this condition, the "reconnected or slowly-clocking player" might never get its input synced
|
||||
// shouldResync2 := (0 < unconfirmedMask) // An easier version of the above, might keep sending "refRenderFrame"s to still connected players when any player is disconnected
|
||||
shouldResyncOverall := (shouldResync1 || shouldResync2)
|
||||
|
||||
if shouldResyncOverall {
|
||||
// A rejoined player, should guarantee that when it resyncs to "refRenderFrameId" a matching inputFrame to apply exists
|
||||
lowerToSentInputFrameId = pR.ConvertToInputFrameId(refRenderFrameId, pR.InputDelayFrames)
|
||||
Logger.Info(fmt.Sprintf("Resyncing player: roomId=%v, playerId=%v, playerJoinIndex=%v, renderFrameId=%v, curDynamicsRenderFrameId=%v, playerLastSentInputFrameId=%v, refRenderFrameId=%v, lowerToSentInputFrameId=%v, upperToSendInputFrameId=%v", pR.Id, playerId, player.JoinIndex, pR.RenderFrameId, pR.CurDynamicsRenderFrameId, player.LastSentInputFrameId, refRenderFrameId, lowerToSentInputFrameId, upperToSendInputFrameId))
|
||||
// TODO: What if "lowerToSentInputFrameId > pR.InputsBuffer.StFrameId" now?
|
||||
}
|
||||
|
||||
if lowerToSentInputFrameId > upperToSendInputFrameId {
|
||||
Logger.Debug(fmt.Sprintf("Not sending due to potentially empty toSendInputFrameDownsyncs: roomId=%v, playerId=%v, playerLastSentInputFrameId=%v, refRenderFrameId=%v, lowerToSentInputFrameId=%v, upperToSendInputFrameId=%v, lastSentInputFrameId=%v, playerAckingInputFrameId=%v", pR.Id, playerId, player.LastSentInputFrameId, refRenderFrameId, lowerToSentInputFrameId, upperToSendInputFrameId, player.LastSentInputFrameId, player.AckingInputFrameId))
|
||||
return
|
||||
}
|
||||
|
||||
// [WARNING] EDGE CASE HERE: Upon initialization, all of "lastAllConfirmedInputFrameId", "lastAllConfirmedInputFrameIdWithChange" and "anchorInputFrameId" are "-1", thus "j" starts with "0", however "inputFrameId: 0" might not have been all confirmed!
|
||||
var theInputsBufferLockToUse *sync.Mutex = &pR.InputsBufferLock
|
||||
if prohibitsInputsBufferLock {
|
||||
// Already locked in caller function
|
||||
theInputsBufferLockToUse = nil
|
||||
}
|
||||
// [WARNING] Clone to deliberately avoid holding "pR.InputsBufferLock" while using network I/O
|
||||
j, toSendInputFrameDownsyncs := pR.InputsBuffer.cloneInputFrameDownsyncsByFrameIdRange(lowerToSentInputFrameId, upperToSendInputFrameId+1, theInputsBufferLockToUse)
|
||||
|
||||
if 0 >= len(toSendInputFrameDownsyncs) {
|
||||
Logger.Debug(fmt.Sprintf("Not sending due to actually empty toSendInputFrameDownsyncs: roomId=%v, playerId=%v, playerLastSentInputFrameId=%v, refRenderFrameId=%v, lowerToSentInputFrameId=%v, upperToSendInputFrameId=%v, j=%v, lastSentInputFrameId=%v, playerAckingInputFrameId=%v", pR.Id, playerId, player.LastSentInputFrameId, refRenderFrameId, lowerToSentInputFrameId, upperToSendInputFrameId, j, player.LastSentInputFrameId, player.AckingInputFrameId))
|
||||
return
|
||||
}
|
||||
|
||||
/*
|
||||
Resync helps
|
||||
1. when player with a slower frontend clock lags significantly behind and thus wouldn't get its inputUpsync recognized due to faster "forceConfirmation"
|
||||
2. reconnection
|
||||
*/
|
||||
if pR.BackendDynamicsEnabled && shouldResyncOverall {
|
||||
tmp := pR.RenderFrameBuffer.GetByFrameId(refRenderFrameId)
|
||||
if nil == tmp {
|
||||
panic(fmt.Sprintf("Required refRenderFrameId=%v for roomId=%v, renderFrameId=%v, playerId=%v, playerLastSentInputFrameId=%v, j=%v doesn't exist! InputsBuffer=%v, RenderFrameBuffer=%v", refRenderFrameId, pR.Id, pR.RenderFrameId, playerId, player.LastSentInputFrameId, j, pR.InputsBufferString(false), pR.RenderFrameBufferString()))
|
||||
}
|
||||
|
||||
refRenderFrame := tmp.(*RoomDownsyncFrame)
|
||||
for playerId, player := range pR.Players {
|
||||
refRenderFrame.Players[playerId].ColliderRadius = player.ColliderRadius // hardcoded for now
|
||||
}
|
||||
refRenderFrame.BackendUnconfirmedMask = unconfirmedMask
|
||||
Logger.Warn(fmt.Sprintf("Sending refRenderFrameId=%v for roomId=%v, , playerId=%v, playerJoinIndex=%v, renderFrameId=%v, curDynamicsRenderFrameId=%v, playerLastSentInputFrameId=%v, lowerToSentInputFrameId=%v, upperToSendInputFrameId=%v, j=%v: InputsBuffer=%v", refRenderFrameId, pR.Id, playerId, player.JoinIndex, pR.RenderFrameId, pR.CurDynamicsRenderFrameId, player.LastSentInputFrameId, lowerToSentInputFrameId, upperToSendInputFrameId, j, pR.InputsBufferString(false)))
|
||||
pR.sendSafely(refRenderFrame, toSendInputFrameDownsyncs, DOWNSYNC_MSG_ACT_FORCED_RESYNC, playerId)
|
||||
} else {
|
||||
pR.sendSafely(nil, toSendInputFrameDownsyncs, DOWNSYNC_MSG_ACT_INPUT_BATCH, playerId)
|
||||
}
|
||||
player.LastSentInputFrameId = j - 1
|
||||
}
|
||||
|
@ -1063,6 +1063,7 @@ type RoomDownsyncFrame struct {
|
||||
Players map[int32]*PlayerDownsync `protobuf:"bytes,2,rep,name=players,proto3" json:"players,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||||
CountdownNanos int64 `protobuf:"varint,3,opt,name=countdownNanos,proto3" json:"countdownNanos,omitempty"`
|
||||
MeleeBullets []*MeleeBullet `protobuf:"bytes,4,rep,name=meleeBullets,proto3" json:"meleeBullets,omitempty"` // I don't know how to mimic inheritance/composition in protobuf by far, thus using an array for each type of bullet as a compromise
|
||||
BackendUnconfirmedMask uint64 `protobuf:"varint,5,opt,name=backendUnconfirmedMask,proto3" json:"backendUnconfirmedMask,omitempty"` // Indexed by "joinIndex", same compression concern as stated in InputFrameDownsync
|
||||
}
|
||||
|
||||
func (x *RoomDownsyncFrame) Reset() {
|
||||
@ -1125,6 +1126,13 @@ func (x *RoomDownsyncFrame) GetMeleeBullets() []*MeleeBullet {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *RoomDownsyncFrame) GetBackendUnconfirmedMask() uint64 {
|
||||
if x != nil {
|
||||
return x.BackendUnconfirmedMask
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
var File_room_downsync_frame_proto protoreflect.FileDescriptor
|
||||
|
||||
var file_room_downsync_frame_proto_rawDesc = []byte{
|
||||
@ -1377,7 +1385,7 @@ var file_room_downsync_frame_proto_rawDesc = []byte{
|
||||
0x6b, 0x65, 0x79, 0x12, 0x29, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01,
|
||||
0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2e, 0x4d, 0x65, 0x6c, 0x65,
|
||||
0x65, 0x42, 0x75, 0x6c, 0x6c, 0x65, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02,
|
||||
0x38, 0x01, 0x22, 0x9a, 0x02, 0x0a, 0x11, 0x52, 0x6f, 0x6f, 0x6d, 0x44, 0x6f, 0x77, 0x6e, 0x73,
|
||||
0x38, 0x01, 0x22, 0xd2, 0x02, 0x0a, 0x11, 0x52, 0x6f, 0x6f, 0x6d, 0x44, 0x6f, 0x77, 0x6e, 0x73,
|
||||
0x79, 0x6e, 0x63, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01,
|
||||
0x20, 0x01, 0x28, 0x05, 0x52, 0x02, 0x69, 0x64, 0x12, 0x40, 0x0a, 0x07, 0x70, 0x6c, 0x61, 0x79,
|
||||
0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x70, 0x72, 0x6f, 0x74,
|
||||
@ -1389,14 +1397,18 @@ var file_room_downsync_frame_proto_rawDesc = []byte{
|
||||
0x6f, 0x73, 0x12, 0x37, 0x0a, 0x0c, 0x6d, 0x65, 0x6c, 0x65, 0x65, 0x42, 0x75, 0x6c, 0x6c, 0x65,
|
||||
0x74, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
|
||||
0x73, 0x2e, 0x4d, 0x65, 0x6c, 0x65, 0x65, 0x42, 0x75, 0x6c, 0x6c, 0x65, 0x74, 0x52, 0x0c, 0x6d,
|
||||
0x65, 0x6c, 0x65, 0x65, 0x42, 0x75, 0x6c, 0x6c, 0x65, 0x74, 0x73, 0x1a, 0x52, 0x0a, 0x0c, 0x50,
|
||||
0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b,
|
||||
0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2c, 0x0a,
|
||||
0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x70,
|
||||
0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2e, 0x50, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x44, 0x6f, 0x77, 0x6e,
|
||||
0x73, 0x79, 0x6e, 0x63, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42,
|
||||
0x13, 0x5a, 0x11, 0x62, 0x61, 0x74, 0x74, 0x6c, 0x65, 0x5f, 0x73, 0x72, 0x76, 0x2f, 0x70, 0x72,
|
||||
0x6f, 0x74, 0x6f, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
|
||||
0x65, 0x6c, 0x65, 0x65, 0x42, 0x75, 0x6c, 0x6c, 0x65, 0x74, 0x73, 0x12, 0x36, 0x0a, 0x16, 0x62,
|
||||
0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x55, 0x6e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x72, 0x6d, 0x65,
|
||||
0x64, 0x4d, 0x61, 0x73, 0x6b, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x52, 0x16, 0x62, 0x61, 0x63,
|
||||
0x6b, 0x65, 0x6e, 0x64, 0x55, 0x6e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x72, 0x6d, 0x65, 0x64, 0x4d,
|
||||
0x61, 0x73, 0x6b, 0x1a, 0x52, 0x0a, 0x0c, 0x50, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x45, 0x6e,
|
||||
0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05,
|
||||
0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02,
|
||||
0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2e, 0x50, 0x6c,
|
||||
0x61, 0x79, 0x65, 0x72, 0x44, 0x6f, 0x77, 0x6e, 0x73, 0x79, 0x6e, 0x63, 0x52, 0x05, 0x76, 0x61,
|
||||
0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x13, 0x5a, 0x11, 0x62, 0x61, 0x74, 0x74, 0x6c,
|
||||
0x65, 0x5f, 0x73, 0x72, 0x76, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x62, 0x06, 0x70, 0x72,
|
||||
0x6f, 0x74, 0x6f, 0x33,
|
||||
}
|
||||
|
||||
var (
|
||||
|
@ -365,7 +365,7 @@ func Serve(c *gin.Context) {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Tries to receive from client-side in a non-blocking manner.
|
||||
// TODO: Is there any potential edge-trigger improvement like the epoll approach mentioned above for the following statement? See discussion in https://github.com/gorilla/websocket/issues/122
|
||||
_, bytes, err := conn.ReadMessage()
|
||||
if nil != err {
|
||||
Logger.Error("About to `signalToCloseConnOfThisPlayer`", zap.Any("roomId", pRoom.Id), zap.Any("playerId", playerId), zap.Error(err))
|
||||
|
@ -134,4 +134,5 @@ message RoomDownsyncFrame {
|
||||
map<int32, PlayerDownsync> players = 2;
|
||||
int64 countdownNanos = 3;
|
||||
repeated MeleeBullet meleeBullets = 4; // I don't know how to mimic inheritance/composition in protobuf by far, thus using an array for each type of bullet as a compromise
|
||||
uint64 backendUnconfirmedMask = 5; // Indexed by "joinIndex", same compression concern as stated in InputFrameDownsync
|
||||
}
|
||||
|
@ -440,7 +440,7 @@
|
||||
"array": [
|
||||
0,
|
||||
0,
|
||||
210.5241291124452,
|
||||
216.50635094610968,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
|
@ -454,7 +454,7 @@
|
||||
"array": [
|
||||
0,
|
||||
0,
|
||||
210.5241291124452,
|
||||
216.50635094610968,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
|
@ -82,21 +82,21 @@ cc.Class({
|
||||
// It turns out that "prevRdfPlayer.characterState" is not useful in this function :)
|
||||
if (newAnimName == playingAnimName && window.ATK_CHARACTER_STATE_INTERRUPT_WAIVE_SET.has(newCharacterState)) {
|
||||
// No need to interrupt
|
||||
// console.warn(`JoinIndex=${rdfPlayer.joinIndex}, not interrupting ${newAnimName} while the playing anim is also ${playingAnimName}, player rdf changed from: ${null == prevRdfPlayer ? null : JSON.stringify(prevRdfPlayer)}, , to: ${JSON.stringify(rdfPlayer)}`);
|
||||
// console.warn(`JoinIndex=${rdfPlayer.joinIndex}, not interrupting ${newAnimName} while the playing anim is also ${playingAnimName}, player rdf changed from: ${null == prevRdfPlayer ? null : JSON.stringify(prevRdfPlayer)}, to: ${JSON.stringify(rdfPlayer)}`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.animComp instanceof dragonBones.ArmatureDisplay) {
|
||||
this._interruptPlayingAnimAndPlayNewAnimDragonBones(rdfPlayer, prevRdfPlayer, newCharacterState, newAnimName, underlyingAnimationCtrl);
|
||||
this._interruptPlayingAnimAndPlayNewAnimDragonBones(rdfPlayer, prevRdfPlayer, newCharacterState, newAnimName, underlyingAnimationCtrl, playingAnimName);
|
||||
} else {
|
||||
this._interruptPlayingAnimAndPlayNewAnimFrameAnim(rdfPlayer, prevRdfPlayer, newCharacterState, newAnimName);
|
||||
this._interruptPlayingAnimAndPlayNewAnimFrameAnim(rdfPlayer, prevRdfPlayer, newCharacterState, newAnimName, playingAnimName);
|
||||
}
|
||||
},
|
||||
|
||||
_interruptPlayingAnimAndPlayNewAnimDragonBones(rdfPlayer, prevRdfPlayer, newCharacterState, newAnimName, underlyingAnimationCtrl) {
|
||||
if (ATK_CHARACTER_STATE.Idle1[0] == newCharacterState || ATK_CHARACTER_STATE.Walking[0] == newCharacterState) {
|
||||
_interruptPlayingAnimAndPlayNewAnimDragonBones(rdfPlayer, prevRdfPlayer, newCharacterState, newAnimName, underlyingAnimationCtrl, playingAnimName) {
|
||||
if (window.ATK_CHARACTER_STATE_INTERRUPT_WAIVE_SET.has(newCharacterState)) {
|
||||
// No "framesToRecover"
|
||||
// console.warn(`JoinIndex=${rdfPlayer.joinIndex}, playing new ${newAnimName} from the beginning: while the playing anim is ${playAnimation}, player rdf changed from: ${null == prevRdfPlayer ? null : JSON.stringify(prevRdfPlayer)}, , to: ${JSON.stringify(rdfPlayer)}`);
|
||||
//console.warn(`#DragonBones JoinIndex=${rdfPlayer.joinIndex}, ${playingAnimName} -> ${newAnimName}`);
|
||||
underlyingAnimationCtrl.gotoAndPlayByFrame(newAnimName, 0, -1);
|
||||
} else {
|
||||
const animationData = underlyingAnimationCtrl._animations[newAnimName];
|
||||
@ -109,10 +109,10 @@ cc.Class({
|
||||
}
|
||||
},
|
||||
|
||||
_interruptPlayingAnimAndPlayNewAnimFrameAnim(rdfPlayer, prevRdfPlayer, newCharacterState, newAnimName) {
|
||||
_interruptPlayingAnimAndPlayNewAnimFrameAnim(rdfPlayer, prevRdfPlayer, newCharacterState, newAnimName, playingAnimName) {
|
||||
if (window.ATK_CHARACTER_STATE_INTERRUPT_WAIVE_SET.has(newCharacterState)) {
|
||||
// No "framesToRecover"
|
||||
// console.warn(`JoinIndex=${rdfPlayer.joinIndex}, playing new ${newAnimName} from the beginning: while the playing anim is ${playAnimation}, player rdf changed from: ${null == prevRdfPlayer ? null : JSON.stringify(prevRdfPlayer)}, , to: ${JSON.stringify(rdfPlayer)}`);
|
||||
//console.warn(`#DragonBones JoinIndex=${rdfPlayer.joinIndex}, ${playingAnimName} -> ${newAnimName}`);
|
||||
this.animComp.play(newAnimName, 0);
|
||||
return;
|
||||
}
|
||||
|
@ -104,32 +104,6 @@ cc.Class({
|
||||
return (0 == inputFrameId % 10);
|
||||
},
|
||||
|
||||
dumpToRenderCache: function(rdf) {
|
||||
const self = this;
|
||||
const minToKeepRenderFrameId = self.lastAllConfirmedRenderFrameId - 1; // Keep at least 1 prev render frame for anim triggering
|
||||
while (0 < self.recentRenderCache.cnt && self.recentRenderCache.stFrameId < minToKeepRenderFrameId) {
|
||||
self.recentRenderCache.pop();
|
||||
}
|
||||
const ret = self.recentRenderCache.setByFrameId(rdf, rdf.id);
|
||||
return ret;
|
||||
},
|
||||
|
||||
dumpToInputCache: function(inputFrameDownsync) {
|
||||
const self = this;
|
||||
let minToKeepInputFrameId = self._convertToInputFrameId(self.lastAllConfirmedRenderFrameId, self.inputDelayFrames) - self.spAtkLookupFrames; // [WARNING] This could be different from "self.lastAllConfirmedInputFrameId". We'd like to keep the corresponding delayedInputFrame for "self.lastAllConfirmedRenderFrameId" such that a rollback could place "self.chaserRenderFrameId = self.lastAllConfirmedRenderFrameId" for the worst case incorrect prediction.
|
||||
if (minToKeepInputFrameId > self.lastAllConfirmedInputFrameId) {
|
||||
minToKeepInputFrameId = self.lastAllConfirmedInputFrameId;
|
||||
}
|
||||
while (0 < self.recentInputCache.cnt && self.recentInputCache.stFrameId < minToKeepInputFrameId) {
|
||||
self.recentInputCache.pop();
|
||||
}
|
||||
const ret = self.recentInputCache.setByFrameId(inputFrameDownsync, inputFrameDownsync.inputFrameId);
|
||||
if (-1 < self.lastAllConfirmedInputFrameId && self.recentInputCache.stFrameId > self.lastAllConfirmedInputFrameId) {
|
||||
console.error("Invalid input cache dumped! lastAllConfirmedRenderFrameId=", self.lastAllConfirmedRenderFrameId, ", lastAllConfirmedInputFrameId=", self.lastAllConfirmedInputFrameId, ", recentRenderCache=", self._stringifyRecentRenderCache(false), ", recentInputCache=", self._stringifyRecentInputCache(false));
|
||||
}
|
||||
return ret;
|
||||
},
|
||||
|
||||
_convertToInputFrameId(renderFrameId, inputDelayFrames) {
|
||||
if (renderFrameId < inputDelayFrames) return 0;
|
||||
return ((renderFrameId - inputDelayFrames) >> this.inputScaleFrames);
|
||||
@ -153,28 +127,35 @@ cc.Class({
|
||||
null == self.ctrl ||
|
||||
null == self.selfPlayerInfo
|
||||
) {
|
||||
return [null, null];
|
||||
throw `noDelayInputFrameId=${inputFrameId} couldn't be generated: recentInputCache=${self._stringifyRecentInputCache(false)}`;
|
||||
}
|
||||
|
||||
let previousSelfInput = null,
|
||||
currSelfInput = null;
|
||||
const joinIndex = self.selfPlayerInfo.joinIndex;
|
||||
const previousInputFrameDownsyncWithPrediction = self.getCachedInputFrameDownsyncWithPrediction(inputFrameId);
|
||||
const previousSelfInput = (null == previousInputFrameDownsyncWithPrediction ? null : previousInputFrameDownsyncWithPrediction.inputList[joinIndex - 1]);
|
||||
// [WARNING] The while-loop here handles a situation where the "resync rdf & accompaniedInputFrameDownsyncBatch" mismatched and we have to predict some "gap-inputFrames"!
|
||||
while (self.recentInputCache.edFrameId <= inputFrameId) {
|
||||
// TODO: find some kind of synchronization mechanism against "onInputFrameDownsyncBatch"!
|
||||
const previousInputFrameDownsyncWithPrediction = self.getCachedInputFrameDownsyncWithPrediction(inputFrameId - 1);
|
||||
previousSelfInput = (null == previousInputFrameDownsyncWithPrediction ? null : previousInputFrameDownsyncWithPrediction.inputList[joinIndex - 1]);
|
||||
|
||||
// If "forceConfirmation" is active on backend, we shouldn't override the already downsynced "inputFrameDownsync"s.
|
||||
// If "forceConfirmation" is active on backend, there's a chance that the already downsynced "inputFrameDownsync"s are ahead of a locally generating inputFrameId, in this case we respect the downsynced one.
|
||||
const existingInputFrame = self.recentInputCache.getByFrameId(inputFrameId);
|
||||
if (null != existingInputFrame && self._allConfirmed(existingInputFrame.confirmedList)) {
|
||||
console.log(`noDelayInputFrameId=${inputFrameId} already exists in recentInputCache and is all-confirmed: recentInputCache=${self._stringifyRecentInputCache(false)}`);
|
||||
return [previousSelfInput, existingInputFrame.inputList[joinIndex - 1]];
|
||||
}
|
||||
const prefabbedInputList = (null == previousInputFrameDownsyncWithPrediction ? new Array(self.playerRichInfoDict.size).fill(0) : previousInputFrameDownsyncWithPrediction.inputList.slice());
|
||||
const currSelfInput = self.ctrl.getEncodedInput();
|
||||
currSelfInput = self.ctrl.getEncodedInput();
|
||||
prefabbedInputList[(joinIndex - 1)] = currSelfInput;
|
||||
const prefabbedInputFrameDownsync = {
|
||||
inputFrameId: inputFrameId,
|
||||
const prefabbedInputFrameDownsync = window.pb.protos.InputFrameDownsync.create({
|
||||
inputFrameId: self.recentInputCache.edFrameId,
|
||||
inputList: prefabbedInputList,
|
||||
confirmedList: (1 << (self.selfPlayerInfo.joinIndex - 1))
|
||||
};
|
||||
});
|
||||
|
||||
self.dumpToInputCache(prefabbedInputFrameDownsync); // A prefabbed inputFrame, would certainly be adding a new inputFrame to the cache, because server only downsyncs "all-confirmed inputFrames"
|
||||
self.recentInputCache.put(prefabbedInputFrameDownsync); // A prefabbed inputFrame, would certainly be adding a new inputFrame to the cache, because server only downsyncs "all-confirmed inputFrames"
|
||||
}
|
||||
|
||||
return [previousSelfInput, currSelfInput];
|
||||
},
|
||||
@ -203,7 +184,7 @@ cc.Class({
|
||||
for (let i = batchInputFrameIdSt; i <= latestLocalInputFrameId; ++i) {
|
||||
const inputFrameDownsync = self.recentInputCache.getByFrameId(i);
|
||||
if (null == inputFrameDownsync) {
|
||||
console.error("sendInputFrameUpsyncBatch: recentInputCache is NOT having inputFrameId=", i, ": latestLocalInputFrameId=", latestLocalInputFrameId, ", recentInputCache=", self._stringifyRecentInputCache(false));
|
||||
console.error(`sendInputFrameUpsyncBatch: recentInputCache is NOT having inputFrameId=i: latestLocalInputFrameId=${latestLocalInputFrameId}, recentInputCache=${self._stringifyRecentInputCache(false)}`);
|
||||
} else {
|
||||
const inputFrameUpsync = {
|
||||
inputFrameId: i,
|
||||
@ -225,6 +206,9 @@ cc.Class({
|
||||
}).finish();
|
||||
window.sendSafely(reqData);
|
||||
self.lastUpsyncInputFrameId = latestLocalInputFrameId;
|
||||
if (self.lastUpsyncInputFrameId >= self.recentInputCache.edFrameId) {
|
||||
throw `noDelayInputFrameId=${self.lastUpsyncInputFrameId} == latestLocalInputFrameId=${latestLocalInputFrameId} seems not properly dumped #2: recentInputCache=${self._stringifyRecentInputCache(false)}`;
|
||||
}
|
||||
},
|
||||
|
||||
onEnable() {
|
||||
@ -312,7 +296,7 @@ cc.Class({
|
||||
self.recentRenderCache = new RingBuffer(self.renderCacheSize);
|
||||
|
||||
self.selfPlayerInfo = null; // This field is kept for distinguishing "self" and "others".
|
||||
self.recentInputCache = new RingBuffer((self.renderCacheSize >> 2) + 1);
|
||||
self.recentInputCache = new RingBuffer((self.renderCacheSize >> 1) + 1);
|
||||
|
||||
self.collisionSys = new collisions.Collisions();
|
||||
|
||||
@ -320,6 +304,8 @@ cc.Class({
|
||||
self.collisionBulletIndexPrefix = (1 << 15); // For tracking the movements of bullets
|
||||
self.collisionSysMap = new Map();
|
||||
|
||||
console.log(`collisionSys & collisionSysMap reset`);
|
||||
|
||||
self.transitToState(ALL_MAP_STATES.VISUAL);
|
||||
|
||||
self.battleState = ALL_BATTLE_STATES.WAITING;
|
||||
@ -413,7 +399,10 @@ cc.Class({
|
||||
/** Init required prefab ended. */
|
||||
|
||||
window.handleBattleColliderInfo = function(parsedBattleColliderInfo) {
|
||||
console.log(`Received parsedBattleColliderInfo via ws`);
|
||||
// TODO: Upon reconnection, the backend might have already been sending down data that'd trigger "onRoomDownsyncFrame & onInputFrameDownsyncBatch", but frontend could reject those data due to "battleState != PlayerBattleState.ACTIVE".
|
||||
Object.assign(self, parsedBattleColliderInfo);
|
||||
self.tooFastDtIntervalMillis = 0.5 * self.rollbackEstimatedDtMillis;
|
||||
|
||||
const tiledMapIns = self.node.getComponent(cc.TiledMap);
|
||||
|
||||
@ -458,7 +447,7 @@ cc.Class({
|
||||
const x0 = boundaryObj.anchor.x,
|
||||
y0 = boundaryObj.anchor.y;
|
||||
|
||||
const newBarrier = self.collisionSys.createPolygon(x0, y0, Array.from(boundaryObj, p => {
|
||||
const newBarrierCollider = self.collisionSys.createPolygon(x0, y0, Array.from(boundaryObj, p => {
|
||||
return [p.x, p.y];
|
||||
}));
|
||||
|
||||
@ -493,10 +482,10 @@ cc.Class({
|
||||
}
|
||||
|
||||
}
|
||||
// console.log("Created barrier: ", newBarrier);
|
||||
++barrierIdCounter;
|
||||
const collisionBarrierIndex = (self.collisionBarrierIndexPrefix + barrierIdCounter);
|
||||
self.collisionSysMap.set(collisionBarrierIndex, newBarrier);
|
||||
self.collisionSysMap.set(collisionBarrierIndex, newBarrierCollider);
|
||||
// console.log(`Created new barrier collider: ${collisionBarrierIndex}`);
|
||||
}
|
||||
|
||||
self.selfPlayerInfo = JSON.parse(cc.sys.localStorage.getItem('selfPlayer'));
|
||||
@ -509,6 +498,7 @@ cc.Class({
|
||||
act: window.UPSYNC_MSG_ACT_PLAYER_COLLIDER_ACK,
|
||||
}).finish();
|
||||
window.sendSafely(reqData);
|
||||
console.log(`Sent UPSYNC_MSG_ACT_PLAYER_COLLIDER_ACK via ws`);
|
||||
});
|
||||
};
|
||||
|
||||
@ -571,17 +561,21 @@ cc.Class({
|
||||
this._inputControlEnabled = false;
|
||||
},
|
||||
|
||||
onRoomDownsyncFrame(rdf) {
|
||||
onRoomDownsyncFrame(rdf, accompaniedInputFrameDownsyncBatch) {
|
||||
// This function is also applicable to "re-joining".
|
||||
const self = window.mapIns;
|
||||
if (rdf.id < self.lastAllConfirmedRenderFrameId) {
|
||||
return window.RING_BUFF_FAILED_TO_SET;
|
||||
if (!self.recentRenderCache) {
|
||||
return;
|
||||
}
|
||||
if (ALL_BATTLE_STATES.IN_SETTLEMENT == self.battleState) {
|
||||
return;
|
||||
}
|
||||
const shouldForceDumping1 = (window.MAGIC_ROOM_DOWNSYNC_FRAME_ID.BATTLE_START == rdf.id);
|
||||
const shouldForceDumping2 = (rdf.id > self.renderFrameId + self.renderFrameIdLagTolerance);
|
||||
|
||||
const dumpRenderCacheRet = self.dumpToRenderCache(rdf);
|
||||
const [dumpRenderCacheRet, oldStRenderFrameId, oldEdRenderFrameId] = (shouldForceDumping1 || shouldForceDumping2) ? self.recentRenderCache.setByFrameId(rdf, rdf.id) : [window.RING_BUFF_CONSECUTIVE_SET, null, null];
|
||||
if (window.RING_BUFF_FAILED_TO_SET == dumpRenderCacheRet) {
|
||||
console.error("Something is wrong while setting the RingBuffer by frameId!");
|
||||
return dumpRenderCacheRet;
|
||||
throw `Failed to dump render cache#1 (maybe recentRenderCache too small)! rdf.id=${rdf.id}, lastAllConfirmedRenderFrameId=${self.lastAllConfirmedRenderFrameId}, lastAllConfirmedInputFrameId=${self.lastAllConfirmedInputFrameId}; recentRenderCache=${self._stringifyRecentRenderCache(false)}, recentInputCache=${self._stringifyRecentInputCache(false)}`;
|
||||
}
|
||||
if (window.MAGIC_ROOM_DOWNSYNC_FRAME_ID.BATTLE_START < rdf.id && window.RING_BUFF_CONSECUTIVE_SET == dumpRenderCacheRet) {
|
||||
/*
|
||||
@ -592,13 +586,7 @@ cc.Class({
|
||||
return dumpRenderCacheRet;
|
||||
}
|
||||
|
||||
// The logic below applies to ( || window.RING_BUFF_NON_CONSECUTIVE_SET == dumpRenderCacheRet)
|
||||
if (window.MAGIC_ROOM_DOWNSYNC_FRAME_ID.BATTLE_START == rdf.id) {
|
||||
console.log('On battle started! renderFrameId=', rdf.id);
|
||||
} else {
|
||||
console.log('On battle resynced! renderFrameId=', rdf.id);
|
||||
}
|
||||
|
||||
// The logic below applies to (window.MAGIC_ROOM_DOWNSYNC_FRAME_ID.BATTLE_START == rdf.id || window.RING_BUFF_NON_CONSECUTIVE_SET == dumpRenderCacheRet)
|
||||
const players = rdf.players;
|
||||
self._initPlayerRichInfoDict(players);
|
||||
|
||||
@ -612,11 +600,24 @@ cc.Class({
|
||||
|
||||
if (null == self.renderFrameId || self.renderFrameId <= rdf.id) {
|
||||
// In fact, not having "window.RING_BUFF_CONSECUTIVE_SET == dumpRenderCacheRet" should already imply that "self.renderFrameId <= rdf.id", but here we double check and log the anomaly
|
||||
|
||||
if (window.MAGIC_ROOM_DOWNSYNC_FRAME_ID.BATTLE_START == rdf.id) {
|
||||
console.log('On battle started! renderFrameId=', rdf.id);
|
||||
} else {
|
||||
self.hideFindingPlayersGUI(rdf);
|
||||
self.onInputFrameDownsyncBatch(accompaniedInputFrameDownsyncBatch); // Important to do this step before setting IN_BATTLE
|
||||
console.warn(`Got resync@localRenderFrameId=${self.renderFrameId} -> rdf.id=${rdf.id} & rdf.backendUnconfirmedMask=${rdf.backendUnconfirmedMask}, @lastAllConfirmedRenderFrameId=${self.lastAllConfirmedRenderFrameId}, @lastAllConfirmedInputFrameId=${self.lastAllConfirmedInputFrameId}, @chaserRenderFrameId=${self.chaserRenderFrameId}, @localRecentInputCache=${mapIns._stringifyRecentInputCache(false)}`);
|
||||
}
|
||||
|
||||
self.renderFrameId = rdf.id;
|
||||
self.lastRenderFrameIdTriggeredAt = performance.now();
|
||||
// In this case it must be true that "rdf.id > chaserRenderFrameId >= lastAllConfirmedRenderFrameId".
|
||||
self.lastAllConfirmedRenderFrameId = rdf.id;
|
||||
self.chaserRenderFrameId = rdf.id;
|
||||
const candidateLastAllConfirmedInputFrame = self._convertToInputFrameId(rdf.id - 1, self.inputDelayFrames);
|
||||
if (self.lastAllConfirmedInputFrame < candidateLastAllConfirmedInputFrame) {
|
||||
self.lastAllConfirmedInputFrame = candidateLastAllConfirmedInputFrame;
|
||||
}
|
||||
|
||||
const canvasNode = self.canvasNode;
|
||||
self.ctrl = canvasNode.getComponent("TouchEventsManager");
|
||||
@ -650,9 +651,12 @@ cc.Class({
|
||||
},
|
||||
|
||||
onInputFrameDownsyncBatch(batch) {
|
||||
// TODO: find some kind of synchronization mechanism against "_generateInputFrameUpsync"!
|
||||
const self = this;
|
||||
if (ALL_BATTLE_STATES.IN_BATTLE != self.battleState
|
||||
&& ALL_BATTLE_STATES.IN_SETTLEMENT != self.battleState) {
|
||||
if (!self.recentInputCache) {
|
||||
return;
|
||||
}
|
||||
if (ALL_BATTLE_STATES.IN_SETTLEMENT == self.battleState) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -663,6 +667,7 @@ cc.Class({
|
||||
if (inputFrameDownsyncId < self.lastAllConfirmedInputFrameId) {
|
||||
continue;
|
||||
}
|
||||
self.lastAllConfirmedInputFrameId = inputFrameDownsyncId;
|
||||
const localInputFrame = self.recentInputCache.getByFrameId(inputFrameDownsyncId);
|
||||
if (null != localInputFrame
|
||||
&&
|
||||
@ -672,10 +677,12 @@ cc.Class({
|
||||
) {
|
||||
firstPredictedYetIncorrectInputFrameId = inputFrameDownsyncId;
|
||||
}
|
||||
self.lastAllConfirmedInputFrameId = inputFrameDownsyncId;
|
||||
// [WARNING] Take all "inputFrameDownsync" from backend as all-confirmed, it'll be later checked by "rollbackAndChase".
|
||||
inputFrameDownsync.confirmedList = (1 << self.playerRichInfoDict.size) - 1;
|
||||
self.dumpToInputCache(inputFrameDownsync);
|
||||
const [ret, oldStFrameId, oldEdFrameId] = self.recentInputCache.setByFrameId(inputFrameDownsync, inputFrameDownsync.inputFrameId);
|
||||
if (window.RING_BUFF_FAILED_TO_SET == ret) {
|
||||
throw `Failed to dump input cache (maybe recentInputCache too small)! inputFrameDownsync.inputFrameId=${inputFrameDownsync.inputFrameId}, lastAllConfirmedRenderFrameId=${self.lastAllConfirmedRenderFrameId}, lastAllConfirmedInputFrameId=${self.lastAllConfirmedInputFrameId}; recentRenderCache=${self._stringifyRecentRenderCache(false)}, recentInputCache=${self._stringifyRecentInputCache(false)}`;
|
||||
}
|
||||
}
|
||||
|
||||
if (null == firstPredictedYetIncorrectInputFrameId) return;
|
||||
@ -716,7 +723,7 @@ cc.Class({
|
||||
logBattleStats() {
|
||||
const self = this;
|
||||
let s = [];
|
||||
s.push(`Battle stats: renderFrameId=${self.renderFrameId}, lastAllConfirmedRenderFrameId=${self.lastAllConfirmedRenderFrameId}, lastUpsyncInputFrameId=${self.lastUpsyncInputFrameId}, lastAllConfirmedInputFrameId=${self.lastAllConfirmedInputFrameId}, chaserRenderFrameId=${self.chaserRenderFrameId}`);
|
||||
s.push(`Battle stats: renderFrameId=${self.renderFrameId}, lastAllConfirmedRenderFrameId=${self.lastAllConfirmedRenderFrameId}, lastUpsyncInputFrameId=${self.lastUpsyncInputFrameId}, lastAllConfirmedInputFrameId=${self.lastAllConfirmedInputFrameId}, chaserRenderFrameId=${self.chaserRenderFrameId}; recentRenderCache=${self._stringifyRecentRenderCache(false)}, recentInputCache=${self._stringifyRecentInputCache(false)}`);
|
||||
|
||||
for (let i = self.recentInputCache.stFrameId; i < self.recentInputCache.edFrameId; ++i) {
|
||||
const inputFrameDownsync = self.recentInputCache.getByFrameId(i);
|
||||
@ -761,7 +768,8 @@ cc.Class({
|
||||
const [wx, wy] = self.virtualGridToWorldPos(vx, vy);
|
||||
newPlayerNode.setPosition(wx, wy);
|
||||
playerScriptIns.mapNode = self.node;
|
||||
const colliderWidth = playerDownsyncInfo.colliderRadius * 2, colliderHeight = playerDownsyncInfo.colliderRadius * 3;
|
||||
const colliderWidth = playerDownsyncInfo.colliderRadius * 2,
|
||||
colliderHeight = playerDownsyncInfo.colliderRadius * 3;
|
||||
const [x0, y0] = self.virtualGridToPolygonColliderAnchorPos(vx, vy, colliderWidth, colliderHeight),
|
||||
pts = [[0, 0], [colliderWidth, 0], [colliderWidth, colliderHeight], [0, colliderHeight]];
|
||||
|
||||
@ -770,6 +778,8 @@ cc.Class({
|
||||
newPlayerCollider.data = playerDownsyncInfo;
|
||||
self.collisionSysMap.set(collisionPlayerIndex, newPlayerCollider);
|
||||
|
||||
console.log(`Created new player collider: joinIndex=${joinIndex}, colliderRadius=${playerDownsyncInfo.colliderRadius}`);
|
||||
|
||||
safelyAddChild(self.node, newPlayerNode);
|
||||
setLocalZOrder(newPlayerNode, 5);
|
||||
|
||||
@ -783,7 +793,8 @@ cc.Class({
|
||||
const self = this;
|
||||
if (ALL_BATTLE_STATES.IN_BATTLE == self.battleState) {
|
||||
const elapsedMillisSinceLastFrameIdTriggered = performance.now() - self.lastRenderFrameIdTriggeredAt;
|
||||
if (elapsedMillisSinceLastFrameIdTriggered < (self.rollbackEstimatedDtMillis)) {
|
||||
if (elapsedMillisSinceLastFrameIdTriggered < self.tooFastDtIntervalMillis) {
|
||||
// [WARNING] We should avoid a frontend ticking too fast to prevent cheating, as well as ticking too slow to cause a "resync avalanche" that impacts user experience!
|
||||
// console.debug("Avoiding too fast frame@renderFrameId=", self.renderFrameId, ": elapsedMillisSinceLastFrameIdTriggered=", elapsedMillisSinceLastFrameIdTriggered);
|
||||
return;
|
||||
}
|
||||
@ -822,17 +833,13 @@ cc.Class({
|
||||
*/
|
||||
// [WARNING] Don't try to get "prevRdf(i.e. renderFrameId == latest-1)" by "self.recentRenderCache.getByFrameId(...)" here, as the cache might have been updated by asynchronous "onRoomDownsyncFrame(...)" calls!
|
||||
self.applyRoomDownsyncFrameDynamics(rdf, prevRdf);
|
||||
++self.renderFrameId; // [WARNING] It's important to increment the renderFrameId AFTER all the operations above!!!
|
||||
self.lastRenderFrameIdTriggeredAt = performance.now();
|
||||
let t3 = performance.now();
|
||||
} catch (err) {
|
||||
console.error("Error during Map.update", err);
|
||||
self.onBattleStopped(); // TODO: Popup to ask player to refresh browser
|
||||
} finally {
|
||||
// Update countdown
|
||||
self.countdownNanos = self.battleDurationNanos - self.renderFrameId * self.rollbackEstimatedDtNanos;
|
||||
if (self.countdownNanos <= 0) {
|
||||
self.onBattleStopped(self.playerRichInfoDict);
|
||||
return;
|
||||
}
|
||||
|
||||
const countdownSeconds = parseInt(self.countdownNanos / 1000000000);
|
||||
if (isNaN(countdownSeconds)) {
|
||||
console.warn(`countdownSeconds is NaN for countdownNanos == ${self.countdownNanos}.`);
|
||||
@ -840,8 +847,6 @@ cc.Class({
|
||||
if (null != self.countdownLabel) {
|
||||
self.countdownLabel.string = countdownSeconds;
|
||||
}
|
||||
++self.renderFrameId; // [WARNING] It's important to increment the renderFrameId AFTER all the operations above!!!
|
||||
self.lastRenderFrameIdTriggeredAt = performance.now();
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -921,9 +926,6 @@ cc.Class({
|
||||
const self = this;
|
||||
if (null == self.findingPlayerNode.parent) return;
|
||||
self.findingPlayerNode.parent.removeChild(self.findingPlayerNode);
|
||||
if (null != rdf) {
|
||||
self._initPlayerRichInfoDict(rdf.players);
|
||||
}
|
||||
},
|
||||
|
||||
onBattleReadyToStart(rdf) {
|
||||
@ -967,15 +969,21 @@ cc.Class({
|
||||
playerRichInfo.scriptIns.updateSpeed(immediatePlayerInfo.speed);
|
||||
playerRichInfo.scriptIns.updateCharacterAnim(immediatePlayerInfo, prevRdfPlayer, false);
|
||||
}
|
||||
|
||||
// Update countdown
|
||||
self.countdownNanos = self.battleDurationNanos - self.renderFrameId * self.rollbackEstimatedDtNanos;
|
||||
if (self.countdownNanos <= 0) {
|
||||
self.onBattleStopped(self.playerRichInfoDict);
|
||||
}
|
||||
},
|
||||
|
||||
getCachedInputFrameDownsyncWithPrediction(inputFrameId) {
|
||||
const self = this;
|
||||
let inputFrameDownsync = self.recentInputCache.getByFrameId(inputFrameId);
|
||||
if (null != inputFrameDownsync && -1 != self.lastAllConfirmedInputFrameId && inputFrameId > self.lastAllConfirmedInputFrameId) {
|
||||
const inputFrameDownsync = self.recentInputCache.getByFrameId(inputFrameId);
|
||||
const lastAllConfirmedInputFrame = self.recentInputCache.getByFrameId(self.lastAllConfirmedInputFrameId);
|
||||
if (null != inputFrameDownsync && null != lastAllConfirmedInputFrame && inputFrameId > self.lastAllConfirmedInputFrameId) {
|
||||
for (let i = 0; i < inputFrameDownsync.inputList.length; ++i) {
|
||||
if (i == self.selfPlayerInfo.joinIndex - 1) continue;
|
||||
if (i == (self.selfPlayerInfo.joinIndex - 1)) continue;
|
||||
inputFrameDownsync.inputList[i] = (lastAllConfirmedInputFrame.inputList[i] & 15); // Don't predict attack input!
|
||||
}
|
||||
}
|
||||
@ -1007,11 +1015,7 @@ cc.Class({
|
||||
};
|
||||
}
|
||||
|
||||
const toRet = {
|
||||
id: currRenderFrame.id + 1,
|
||||
players: nextRenderFramePlayers,
|
||||
meleeBullets: []
|
||||
};
|
||||
const nextRenderFrameMeleeBullets = [];
|
||||
|
||||
const bulletPushbacks = new Array(self.playerRichInfoArr.length); // Guaranteed determinism regardless of traversal order
|
||||
const effPushbacks = new Array(self.playerRichInfoArr.length); // Guaranteed determinism regardless of traversal order
|
||||
@ -1104,7 +1108,7 @@ cc.Class({
|
||||
collisionSysMap.delete(collisionBulletIndex);
|
||||
}
|
||||
if (removedBulletsAtCurrFrame.has(collisionBulletIndex)) continue;
|
||||
toRet.meleeBullets.push(meleeBullet);
|
||||
nextRenderFrameMeleeBullets.push(meleeBullet);
|
||||
}
|
||||
|
||||
// Process player inputs
|
||||
@ -1145,7 +1149,7 @@ cc.Class({
|
||||
punch.offenderJoinIndex = joinIndex;
|
||||
punch.offenderPlayerId = playerId;
|
||||
punch.originatedRenderFrameId = currRenderFrame.id;
|
||||
toRet.meleeBullets.push(punch);
|
||||
nextRenderFrameMeleeBullets.push(punch);
|
||||
// console.log(`A rising-edge of meleeBullet is created at renderFrame.id=${currRenderFrame.id}, delayedInputFrame.id=${delayedInputFrame.inputFrameId}: ${self._stringifyRecentInputCache(true)}`);
|
||||
// console.log(`A rising-edge of meleeBullet is created at renderFrame.id=${currRenderFrame.id}, delayedInputFrame.id=${delayedInputFrame.inputFrameId}`);
|
||||
|
||||
@ -1198,7 +1202,11 @@ cc.Class({
|
||||
|
||||
}
|
||||
|
||||
return toRet;
|
||||
return window.pb.protos.RoomDownsyncFrame.create({
|
||||
id: currRenderFrame.id + 1,
|
||||
players: nextRenderFramePlayers,
|
||||
meleeBullets: nextRenderFrameMeleeBullets,
|
||||
});
|
||||
},
|
||||
|
||||
rollbackAndChase(renderFrameIdSt, renderFrameIdEd, collisionSys, collisionSysMap, isChasing) {
|
||||
@ -1206,31 +1214,24 @@ cc.Class({
|
||||
This function eventually calculates a "RoomDownsyncFrame" where "RoomDownsyncFrame.id == renderFrameIdEd" if not interruptted.
|
||||
*/
|
||||
const self = this;
|
||||
let prevLatestRdf = null;
|
||||
let latestRdf = self.recentRenderCache.getByFrameId(renderFrameIdSt); // typed "RoomDownsyncFrame"
|
||||
let i = renderFrameIdSt,
|
||||
prevLatestRdf = null,
|
||||
latestRdf = null;
|
||||
|
||||
do {
|
||||
latestRdf = self.recentRenderCache.getByFrameId(i); // typed "RoomDownsyncFrame"; [WARNING] When "true == isChasing", this function can be interruptted by "onRoomDownsyncFrame(rdf)" asynchronously anytime, making this line return "null"!
|
||||
if (null == latestRdf) {
|
||||
console.error(`Couldn't find renderFrameId=${renderFrameIdSt}, to rollback, lastAllConfirmedRenderFrameId=${self.lastAllConfirmedRenderFrameId}, lastAllConfirmedInputFrameId=${self.lastAllConfirmedInputFrameId}, recentRenderCache=${self._stringifyRecentRenderCache(false)}, recentInputCache=${self._stringifyRecentInputCache(false)}`);
|
||||
return [prevLatestRdf, latestRdf];
|
||||
}
|
||||
|
||||
if (renderFrameIdSt >= renderFrameIdEd) {
|
||||
return [prevLatestRdf, latestRdf];
|
||||
}
|
||||
|
||||
for (let i = renderFrameIdSt; i < renderFrameIdEd; ++i) {
|
||||
const currRenderFrame = self.recentRenderCache.getByFrameId(i); // typed "RoomDownsyncFrame"; [WARNING] When "true == isChasing", this function can be interruptted by "onRoomDownsyncFrame(rdf)" asynchronously anytime, making this line return "null"!
|
||||
if (null == currRenderFrame) {
|
||||
console.warn(`Couldn't find renderFrame for i=${i} to rollback, self.renderFrameId=${self.renderFrameId}, lastAllConfirmedRenderFrameId=${self.lastAllConfirmedRenderFrameId}, lastAllConfirmedInputFrameId=${self.lastAllConfirmedInputFrameId}, might've been interruptted by onRoomDownsyncFrame`);
|
||||
return [prevLatestRdf, latestRdf];
|
||||
}
|
||||
const j = self._convertToInputFrameId(i, self.inputDelayFrames);
|
||||
const delayedInputFrame = self.getCachedInputFrameDownsyncWithPrediction(j);
|
||||
if (null == delayedInputFrame) {
|
||||
console.warn(`Failed to get cached delayedInputFrame for i=${i}, j=${j}, self.renderFrameId=${self.renderFrameId}, lastAllConfirmedRenderFrameId=${self.lastAllConfirmedRenderFrameId}, lastAllConfirmedInputFrameId=${self.lastAllConfirmedInputFrameId}`);
|
||||
return [prevLatestRdf, latestRdf];
|
||||
// Shouldn't happen!
|
||||
throw `Failed to get cached delayedInputFrame for i=${i}, j=${j}, renderFrameId=${self.renderFrameId}, lastAllConfirmedRenderFrameId=${self.lastAllConfirmedRenderFrameId}, lastUpsyncInputFrameId=${self.lastUpsyncInputFrameId}, lastAllConfirmedInputFrameId=${self.lastAllConfirmedInputFrameId}, chaserRenderFrameId=${self.chaserRenderFrameId}; recentRenderCache=${self._stringifyRecentRenderCache(false)}, recentInputCache=${self._stringifyRecentInputCache(false)}`;
|
||||
}
|
||||
prevLatestRdf = latestRdf;
|
||||
latestRdf = self.applyInputFrameDownsyncDynamicsOnSingleRenderFrame(delayedInputFrame, currRenderFrame, collisionSys, collisionSysMap);
|
||||
latestRdf = self.applyInputFrameDownsyncDynamicsOnSingleRenderFrame(delayedInputFrame, prevLatestRdf, collisionSys, collisionSysMap);
|
||||
if (
|
||||
self._allConfirmed(delayedInputFrame.confirmedList)
|
||||
&&
|
||||
@ -1248,8 +1249,9 @@ cc.Class({
|
||||
// Move the cursor "self.chaserRenderFrameId", keep in mind that "self.chaserRenderFrameId" is not monotonic!
|
||||
self.chaserRenderFrameId = latestRdf.id;
|
||||
}
|
||||
self.dumpToRenderCache(latestRdf);
|
||||
}
|
||||
self.recentRenderCache.setByFrameId(latestRdf, latestRdf.id);
|
||||
++i;
|
||||
} while (i < renderFrameIdEd);
|
||||
|
||||
return [prevLatestRdf, latestRdf];
|
||||
},
|
||||
|
@ -33,6 +33,7 @@ cc.Class({
|
||||
self.inputScaleFrames = 2;
|
||||
self.inputFrameUpsyncDelayTolerance = 2;
|
||||
|
||||
self.renderCacheSize = 1024;
|
||||
self.rollbackEstimatedDt = 0.016667;
|
||||
self.rollbackEstimatedDtMillis = 16.667;
|
||||
self.rollbackEstimatedDtNanos = 16666666;
|
||||
|
@ -13,6 +13,10 @@ var RingBuffer = function(capacity) {
|
||||
};
|
||||
|
||||
RingBuffer.prototype.put = function(item) {
|
||||
while (this.cnt >= this.n - 1) {
|
||||
// Make room for the new element
|
||||
this.pop();
|
||||
}
|
||||
this.eles[this.ed] = item
|
||||
this.edFrameId++;
|
||||
this.cnt++;
|
||||
@ -61,40 +65,41 @@ RingBuffer.prototype.getArrIdxByOffset = function(offsetFromSt) {
|
||||
};
|
||||
|
||||
RingBuffer.prototype.getByFrameId = function(frameId) {
|
||||
if (frameId >= this.edFrameId) return null;
|
||||
const arrIdx = this.getArrIdxByOffset(frameId - this.stFrameId);
|
||||
return (null == arrIdx ? null : this.eles[arrIdx]);
|
||||
};
|
||||
|
||||
// [WARNING] During a battle, frontend could receive non-consecutive frames (either renderFrame or inputFrame) due to resync, the buffer should handle these frames properly.
|
||||
RingBuffer.prototype.setByFrameId = function(item, frameId) {
|
||||
if (frameId < this.stFrameId) {
|
||||
console.error("Invalid putByFrameId#1: stFrameId=", this.stFrameId, ", edFrameId=", this.edFrameId, ", incoming item=", item);
|
||||
return window.RING_BUFF_FAILED_TO_SET;
|
||||
const oldStFrameId = this.stFrameId,
|
||||
oldEdFrameId = this.edFrameId;
|
||||
if (frameId < oldStFrameId) {
|
||||
return [window.RING_BUFF_FAILED_TO_SET, oldStFrameId, oldEdFrameId];
|
||||
}
|
||||
// By now "this.stFrameId <= frameId"
|
||||
|
||||
if (oldEdFrameId > frameId) {
|
||||
const arrIdx = this.getArrIdxByOffset(frameId - this.stFrameId);
|
||||
if (null != arrIdx) {
|
||||
this.eles[arrIdx] = item;
|
||||
return window.RING_BUFF_CONSECUTIVE_SET;
|
||||
return [window.RING_BUFF_CONSECUTIVE_SET, oldStFrameId, oldEdFrameId];
|
||||
}
|
||||
}
|
||||
|
||||
// When "null == arrIdx", should it still be deemed consecutive if "frameId == edFrameId" prior to the reset?
|
||||
// By now "this.edFrameId <= frameId"
|
||||
let ret = window.RING_BUFF_CONSECUTIVE_SET;
|
||||
if (this.edFrameId < frameId) {
|
||||
if (oldEdFrameId < frameId) {
|
||||
this.st = this.ed = 0;
|
||||
this.stFrameId = this.edFrameId = frameId;
|
||||
this.cnt = 0;
|
||||
ret = window.RING_BUFF_NON_CONSECUTIVE_SET;
|
||||
}
|
||||
|
||||
this.eles[this.ed] = item
|
||||
this.edFrameId++;
|
||||
this.cnt++;
|
||||
this.ed++;
|
||||
if (this.ed >= this.n) {
|
||||
this.ed -= this.n; // Deliberately not using "%" operator for performance concern
|
||||
}
|
||||
// By now "this.edFrameId == frameId"
|
||||
this.put(item);
|
||||
|
||||
return ret;
|
||||
return [ret, oldStFrameId, oldEdFrameId];
|
||||
};
|
||||
|
||||
module.exports = RingBuffer;
|
||||
|
@ -5,7 +5,6 @@ window.UPSYNC_MSG_ACT_PLAYER_CMD = 2;
|
||||
window.UPSYNC_MSG_ACT_PLAYER_COLLIDER_ACK = 3;
|
||||
|
||||
window.DOWNSYNC_MSG_ACT_PLAYER_ADDED_AND_ACKED = -98;
|
||||
window.DOWNSYNC_MSG_ACT_PLAYER_READDED_AND_ACKED = -97;
|
||||
window.DOWNSYNC_MSG_ACT_BATTLE_READY_TO_START = -1;
|
||||
window.DOWNSYNC_MSG_ACT_BATTLE_START = 0;
|
||||
window.DOWNSYNC_MSG_ACT_HB_REQ = 1;
|
||||
@ -135,7 +134,7 @@ window.initPersistentSessionClient = function(onopenCb, expectedRoomId) {
|
||||
clientSession.binaryType = 'arraybuffer'; // Make 'event.data' of 'onmessage' an "ArrayBuffer" instead of a "Blob"
|
||||
|
||||
clientSession.onopen = function(evt) {
|
||||
console.log("The WS clientSession is opened. clientSession.id=", clientSession.id);
|
||||
console.log("The WS clientSession is opened.");
|
||||
window.clientSession = clientSession;
|
||||
if (null == onopenCb) return;
|
||||
onopenCb();
|
||||
@ -147,17 +146,14 @@ window.initPersistentSessionClient = function(onopenCb, expectedRoomId) {
|
||||
}
|
||||
try {
|
||||
const resp = window.pb.protos.WsResp.decode(new Uint8Array(evt.data));
|
||||
// console.log(`Got non-empty onmessage decoded: resp.act=${resp.act}`);
|
||||
switch (resp.act) {
|
||||
case window.DOWNSYNC_MSG_ACT_HB_REQ:
|
||||
window.handleHbRequirements(resp); // 获取boundRoomId并存储到localStorage
|
||||
window.handleHbRequirements(resp);
|
||||
break;
|
||||
case window.DOWNSYNC_MSG_ACT_PLAYER_ADDED_AND_ACKED:
|
||||
mapIns.onPlayerAdded(resp.rdf);
|
||||
break;
|
||||
case window.DOWNSYNC_MSG_ACT_PLAYER_READDED_AND_ACKED:
|
||||
// Deliberately left blank for now
|
||||
mapIns.hideFindingPlayersGUI(resp.rdf);
|
||||
break;
|
||||
case window.DOWNSYNC_MSG_ACT_BATTLE_READY_TO_START:
|
||||
mapIns.onBattleReadyToStart(resp.rdf);
|
||||
break;
|
||||
@ -172,16 +168,10 @@ window.initPersistentSessionClient = function(onopenCb, expectedRoomId) {
|
||||
break;
|
||||
case window.DOWNSYNC_MSG_ACT_FORCED_RESYNC:
|
||||
if (null == resp.inputFrameDownsyncBatch || 0 >= resp.inputFrameDownsyncBatch.length) {
|
||||
console.error(`Got empty inputFrameDownsyncBatch upon resync@localRenderFrameId=${mapIns.renderFrameId}, @lastAllConfirmedRenderFrameId=${mapIns.lastAllConfirmedRenderFrameId}, @lastAllConfirmedInputFrameId=${mapIns.lastAllConfirmedInputFrameId}, @chaserRenderFrameId=${mapIns.chaserRenderFrameId}, @localRecentInputCache=${mapIns._stringifyRecentInputCache(false)}, the incoming resp=
|
||||
${JSON.stringify(resp, null, 2)}`);
|
||||
console.error(`Got empty inputFrameDownsyncBatch upon resync@localRenderFrameId=${mapIns.renderFrameId}, @lastAllConfirmedRenderFrameId=${mapIns.lastAllConfirmedRenderFrameId}, @lastAllConfirmedInputFrameId=${mapIns.lastAllConfirmedInputFrameId}, @chaserRenderFrameId=${mapIns.chaserRenderFrameId}, @localRecentInputCache=${mapIns._stringifyRecentInputCache(false)}, the incoming resp=${JSON.stringify(resp, null, 2)}`);
|
||||
return;
|
||||
}
|
||||
const inputFrameIdConsecutive = (resp.inputFrameDownsyncBatch[0].inputFrameId == mapIns.lastAllConfirmedInputFrameId + 1);
|
||||
const renderFrameIdConsecutive = (resp.rdf.id <= mapIns.renderFrameId + mapIns.renderFrameIdLagTolerance);
|
||||
console.warn(`Got resync@localRenderFrameId=${mapIns.renderFrameId}, @lastAllConfirmedRenderFrameId=${mapIns.lastAllConfirmedRenderFrameId}, @lastAllConfirmedInputFrameId=${mapIns.lastAllConfirmedInputFrameId}, @chaserRenderFrameId=${mapIns.chaserRenderFrameId}, @localRecentInputCache=${mapIns._stringifyRecentInputCache(false)}, inputFrameIdConsecutive=${inputFrameIdConsecutive}, renderFrameIdConsecutive=${renderFrameIdConsecutive}`);
|
||||
// The following order of execution is important
|
||||
mapIns.onRoomDownsyncFrame(resp.rdf);
|
||||
mapIns.onInputFrameDownsyncBatch(resp.inputFrameDownsyncBatch);
|
||||
mapIns.onRoomDownsyncFrame(resp.rdf, resp.inputFrameDownsyncBatch);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
|
@ -5082,6 +5082,7 @@ $root.protos = (function() {
|
||||
* @property {Object.<string,protos.PlayerDownsync>|null} [players] RoomDownsyncFrame players
|
||||
* @property {number|Long|null} [countdownNanos] RoomDownsyncFrame countdownNanos
|
||||
* @property {Array.<protos.MeleeBullet>|null} [meleeBullets] RoomDownsyncFrame meleeBullets
|
||||
* @property {number|Long|null} [backendUnconfirmedMask] RoomDownsyncFrame backendUnconfirmedMask
|
||||
*/
|
||||
|
||||
/**
|
||||
@ -5133,6 +5134,14 @@ $root.protos = (function() {
|
||||
*/
|
||||
RoomDownsyncFrame.prototype.meleeBullets = $util.emptyArray;
|
||||
|
||||
/**
|
||||
* RoomDownsyncFrame backendUnconfirmedMask.
|
||||
* @member {number|Long} backendUnconfirmedMask
|
||||
* @memberof protos.RoomDownsyncFrame
|
||||
* @instance
|
||||
*/
|
||||
RoomDownsyncFrame.prototype.backendUnconfirmedMask = $util.Long ? $util.Long.fromBits(0,0,true) : 0;
|
||||
|
||||
/**
|
||||
* Creates a new RoomDownsyncFrame instance using the specified properties.
|
||||
* @function create
|
||||
@ -5169,6 +5178,8 @@ $root.protos = (function() {
|
||||
if (message.meleeBullets != null && message.meleeBullets.length)
|
||||
for (var i = 0; i < message.meleeBullets.length; ++i)
|
||||
$root.protos.MeleeBullet.encode(message.meleeBullets[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim();
|
||||
if (message.backendUnconfirmedMask != null && Object.hasOwnProperty.call(message, "backendUnconfirmedMask"))
|
||||
writer.uint32(/* id 5, wireType 0 =*/40).uint64(message.backendUnconfirmedMask);
|
||||
return writer;
|
||||
};
|
||||
|
||||
@ -5240,6 +5251,10 @@ $root.protos = (function() {
|
||||
message.meleeBullets.push($root.protos.MeleeBullet.decode(reader, reader.uint32()));
|
||||
break;
|
||||
}
|
||||
case 5: {
|
||||
message.backendUnconfirmedMask = reader.uint64();
|
||||
break;
|
||||
}
|
||||
default:
|
||||
reader.skipType(tag & 7);
|
||||
break;
|
||||
@ -5304,6 +5319,9 @@ $root.protos = (function() {
|
||||
return "meleeBullets." + error;
|
||||
}
|
||||
}
|
||||
if (message.backendUnconfirmedMask != null && message.hasOwnProperty("backendUnconfirmedMask"))
|
||||
if (!$util.isInteger(message.backendUnconfirmedMask) && !(message.backendUnconfirmedMask && $util.isInteger(message.backendUnconfirmedMask.low) && $util.isInteger(message.backendUnconfirmedMask.high)))
|
||||
return "backendUnconfirmedMask: integer|Long expected";
|
||||
return null;
|
||||
};
|
||||
|
||||
@ -5350,6 +5368,15 @@ $root.protos = (function() {
|
||||
message.meleeBullets[i] = $root.protos.MeleeBullet.fromObject(object.meleeBullets[i]);
|
||||
}
|
||||
}
|
||||
if (object.backendUnconfirmedMask != null)
|
||||
if ($util.Long)
|
||||
(message.backendUnconfirmedMask = $util.Long.fromValue(object.backendUnconfirmedMask)).unsigned = true;
|
||||
else if (typeof object.backendUnconfirmedMask === "string")
|
||||
message.backendUnconfirmedMask = parseInt(object.backendUnconfirmedMask, 10);
|
||||
else if (typeof object.backendUnconfirmedMask === "number")
|
||||
message.backendUnconfirmedMask = object.backendUnconfirmedMask;
|
||||
else if (typeof object.backendUnconfirmedMask === "object")
|
||||
message.backendUnconfirmedMask = new $util.LongBits(object.backendUnconfirmedMask.low >>> 0, object.backendUnconfirmedMask.high >>> 0).toNumber(true);
|
||||
return message;
|
||||
};
|
||||
|
||||
@ -5377,6 +5404,11 @@ $root.protos = (function() {
|
||||
object.countdownNanos = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long;
|
||||
} else
|
||||
object.countdownNanos = options.longs === String ? "0" : 0;
|
||||
if ($util.Long) {
|
||||
var long = new $util.Long(0, 0, true);
|
||||
object.backendUnconfirmedMask = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long;
|
||||
} else
|
||||
object.backendUnconfirmedMask = options.longs === String ? "0" : 0;
|
||||
}
|
||||
if (message.id != null && message.hasOwnProperty("id"))
|
||||
object.id = message.id;
|
||||
@ -5396,6 +5428,11 @@ $root.protos = (function() {
|
||||
for (var j = 0; j < message.meleeBullets.length; ++j)
|
||||
object.meleeBullets[j] = $root.protos.MeleeBullet.toObject(message.meleeBullets[j], options);
|
||||
}
|
||||
if (message.backendUnconfirmedMask != null && message.hasOwnProperty("backendUnconfirmedMask"))
|
||||
if (typeof message.backendUnconfirmedMask === "number")
|
||||
object.backendUnconfirmedMask = options.longs === String ? String(message.backendUnconfirmedMask) : message.backendUnconfirmedMask;
|
||||
else
|
||||
object.backendUnconfirmedMask = options.longs === String ? $util.Long.prototype.toString.call(message.backendUnconfirmedMask) : options.longs === Number ? new $util.LongBits(message.backendUnconfirmedMask.low >>> 0, message.backendUnconfirmedMask.high >>> 0).toNumber(true) : message.backendUnconfirmedMask;
|
||||
return object;
|
||||
};
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user