mirror of
https://github.com/genxium/DelayNoMore
synced 2024-12-25 11:18:55 +00:00
Added prediction fault detection in frontend.
This commit is contained in:
parent
727e66787f
commit
3c15e21652
@ -772,13 +772,13 @@ func (pR *Room) OnDismissed() {
|
||||
pR.RollbackEstimatedDtNanos = 16666666 // A little smaller than the actual per frame time, just for logging FAST FRAME
|
||||
dilutedServerFps := float64(58.0) // Don't set this value too small, otherwise we might miss force confirmation needs for slow tickers!
|
||||
pR.dilutedRollbackEstimatedDtNanos = int64(float64(pR.RollbackEstimatedDtNanos) * float64(pR.ServerFps) / dilutedServerFps)
|
||||
pR.BattleDurationFrames = 120 * pR.ServerFps
|
||||
pR.BattleDurationFrames = 60 * pR.ServerFps
|
||||
pR.BattleDurationNanos = int64(pR.BattleDurationFrames) * (pR.RollbackEstimatedDtNanos + 1)
|
||||
pR.InputFrameUpsyncDelayTolerance = (pR.NstDelayFrames >> pR.InputScaleFrames) - 1 // this value should be strictly smaller than (NstDelayFrames >> InputScaleFrames), otherwise "type#1 forceConfirmation" might become a lag avalanche
|
||||
pR.MaxChasingRenderFramesPerUpdate = 12 // Don't set this value too high to avoid exhausting frontend CPU within a single frame
|
||||
|
||||
pR.BackendDynamicsEnabled = true // [WARNING] When "false", recovery upon reconnection wouldn't work!
|
||||
pR.ForceAllResyncOnAnyActiveSlowTicker = false // See tradeoff discussion in "downsyncToAllPlayers"
|
||||
pR.ForceAllResyncOnAnyActiveSlowTicker = true // See tradeoff discussion in "downsyncToAllPlayers"
|
||||
punchSkillId := int32(1)
|
||||
pR.MeleeSkillConfig = make(map[int32]*MeleeBullet, 0)
|
||||
pR.MeleeSkillConfig[punchSkillId] = &MeleeBullet{
|
||||
@ -879,7 +879,7 @@ func (pR *Room) OnPlayerDisconnected(playerId int32) {
|
||||
default:
|
||||
atomic.StoreInt32(&(pR.Players[playerId].BattleState), PlayerBattleStateIns.DISCONNECTED)
|
||||
pR.clearPlayerNetworkSession(playerId) // Still need clear the network session pointers, because "OnPlayerDisconnected" is only triggered from "signalToCloseConnOfThisPlayer" in "ws/serve.go", when the same player reconnects the network session pointers will be re-assigned
|
||||
Logger.Warn("OnPlayerDisconnected finished:", zap.Any("roomId", pR.Id), zap.Any("playerId", playerId), zap.Any("playerBattleState", pR.Players[playerId].BattleState), zap.Any("nowRoomBattleState", pR.State), zap.Any("nowRoomEffectivePlayerCount", pR.EffectivePlayerCount))
|
||||
Logger.Warn("OnPlayerDisconnected finished:", zap.Any("roomId", pR.Id), zap.Any("playerId", playerId), zap.Any("playerBattleState", pR.Players[playerId].BattleState), zap.Any("nowRoomBattleState", pR.State), zap.Any("nowRoomEffectivePlayerCount", pR.EffectivePlayerCount), zap.Any("now InputsBuffer", pR.InputsBufferString(true)))
|
||||
}
|
||||
}
|
||||
|
||||
@ -1065,7 +1065,7 @@ func (pR *Room) shouldPrefabInputFrameDownsync(prevRenderFrameId int32, renderFr
|
||||
return false, -1
|
||||
}
|
||||
|
||||
func (pR *Room) prefabInputFrameDownsync(inputFrameId int32) *InputFrameDownsync {
|
||||
func (pR *Room) getOrPrefabInputFrameDownsync(inputFrameId int32) *InputFrameDownsync {
|
||||
/*
|
||||
[WARNING] This function MUST BE called while "pR.InputsBufferLock" is locked.
|
||||
|
||||
@ -1108,6 +1108,7 @@ func (pR *Room) prefabInputFrameDownsync(inputFrameId int32) *InputFrameDownsync
|
||||
|
||||
func (pR *Room) markConfirmationIfApplicable(inputFrameUpsyncBatch []*InputFrameUpsync, playerId int32, player *Player) *InputsBufferSnapshot {
|
||||
// [WARNING] This function MUST BE called while "pR.InputsBufferLock" is locked!
|
||||
// Step#1, put the received "inputFrameUpsyncBatch" into "pR.InputsBuffer"
|
||||
for _, inputFrameUpsync := range inputFrameUpsyncBatch {
|
||||
clientInputFrameId := inputFrameUpsync.InputFrameId
|
||||
if clientInputFrameId < pR.InputsBuffer.StFrameId {
|
||||
@ -1123,22 +1124,17 @@ func (pR *Room) markConfirmationIfApplicable(inputFrameUpsyncBatch []*InputFrame
|
||||
Logger.Warn(fmt.Sprintf("Dropping too advanced inputFrameUpsync: roomId=%v, playerId=%v, clientInputFrameId=%v, InputsBuffer=%v; is this player cheating?", pR.Id, playerId, clientInputFrameId, pR.InputsBufferString(false)))
|
||||
continue
|
||||
}
|
||||
var targetInputFrameDownsync *InputFrameDownsync = nil
|
||||
if clientInputFrameId == pR.InputsBuffer.EdFrameId {
|
||||
targetInputFrameDownsync = pR.prefabInputFrameDownsync(clientInputFrameId)
|
||||
Logger.Debug(fmt.Sprintf("OnBattleCmdReceived-Prefabbed new inputFrameDownsync from inputFrameUpsync: roomId=%v, playerId=%v, clientInputFrameId=%v, InputsBuffer=%v", pR.Id, playerId, clientInputFrameId, pR.InputsBufferString(false)))
|
||||
} else {
|
||||
targetInputFrameDownsync = pR.InputsBuffer.GetByFrameId(clientInputFrameId).(*InputFrameDownsync)
|
||||
Logger.Debug(fmt.Sprintf("OnBattleCmdReceived-stuffing inputFrameDownsync from inputFrameUpsync: roomId=%v, playerId=%v, clientInputFrameId=%v, InputsBuffer=%v", pR.Id, playerId, clientInputFrameId, pR.InputsBufferString(false)))
|
||||
}
|
||||
// by now "clientInputFrameId <= pR.InputsBuffer.EdFrameId"
|
||||
targetInputFrameDownsync := pR.getOrPrefabInputFrameDownsync(clientInputFrameId)
|
||||
targetInputFrameDownsync.InputList[player.JoinIndex-1] = inputFrameUpsync.Encoded
|
||||
targetInputFrameDownsync.ConfirmedList |= uint64(1 << uint32(player.JoinIndex-1))
|
||||
|
||||
if inputFrameUpsync.InputFrameId > pR.LatestPlayerUpsyncedInputFrameId {
|
||||
pR.LatestPlayerUpsyncedInputFrameId = inputFrameUpsync.InputFrameId
|
||||
if clientInputFrameId > pR.LatestPlayerUpsyncedInputFrameId {
|
||||
pR.LatestPlayerUpsyncedInputFrameId = clientInputFrameId
|
||||
}
|
||||
}
|
||||
|
||||
// Step#2, mark confirmation without forcing
|
||||
newAllConfirmedCount := int32(0)
|
||||
inputFrameId1 := pR.LastAllConfirmedInputFrameId + 1
|
||||
totPlayerCnt := uint32(pR.Capacity)
|
||||
@ -1156,23 +1152,21 @@ func (pR *Room) markConfirmationIfApplicable(inputFrameUpsyncBatch []*InputFrame
|
||||
for _, player := range pR.PlayersArr {
|
||||
thatPlayerBattleState := atomic.LoadInt32(&(player.BattleState))
|
||||
thatPlayerJoinMask := uint64(1 << uint32(player.JoinIndex-1))
|
||||
if 0 == (inputFrameDownsync.ConfirmedList & thatPlayerJoinMask) {
|
||||
if thatPlayerBattleState == PlayerBattleStateIns.ACTIVE {
|
||||
shouldBreakConfirmation = true // Could be an `ACTIVE SLOW TICKER` here, but no action needed for now
|
||||
break
|
||||
} else {
|
||||
Logger.Debug(fmt.Sprintf("markConfirmationIfApplicable for roomId=%v, skipping UNCONFIRMED BUT INACTIVE player(id:%v, joinIndex:%v) while checking inputFrameId=[%v, %v): InputsBuffer=%v", pR.Id, player.Id, player.JoinIndex, inputFrameId1, pR.InputsBuffer.EdFrameId, pR.InputsBufferString(false)))
|
||||
}
|
||||
isSlowTicker := (0 == (inputFrameDownsync.ConfirmedList & thatPlayerJoinMask))
|
||||
isActiveSlowTicker := (isSlowTicker && thatPlayerBattleState == PlayerBattleStateIns.ACTIVE)
|
||||
if isActiveSlowTicker {
|
||||
shouldBreakConfirmation = true // Could be an `ACTIVE SLOW TICKER` here, but no action needed for now
|
||||
break
|
||||
}
|
||||
Logger.Debug(fmt.Sprintf("markConfirmationIfApplicable for roomId=%v, skipping UNCONFIRMED BUT INACTIVE player(id:%v, joinIndex:%v) while checking inputFrameId=[%v, %v): InputsBuffer=%v", pR.Id, player.Id, player.JoinIndex, inputFrameId1, pR.InputsBuffer.EdFrameId, pR.InputsBufferString(false)))
|
||||
}
|
||||
}
|
||||
|
||||
if shouldBreakConfirmation {
|
||||
break
|
||||
} else {
|
||||
newAllConfirmedCount += 1
|
||||
pR.onInputFrameDownsyncAllConfirmed(inputFrameDownsync, -1)
|
||||
}
|
||||
newAllConfirmedCount += 1
|
||||
pR.onInputFrameDownsyncAllConfirmed(inputFrameDownsync, -1)
|
||||
}
|
||||
|
||||
if 0 < newAllConfirmedCount {
|
||||
@ -1217,7 +1211,7 @@ func (pR *Room) forceConfirmationIfApplicable(prevRenderFrameId int32) uint64 {
|
||||
pR.onInputFrameDownsyncAllConfirmed(inputFrameDownsync, -1)
|
||||
}
|
||||
if 0 < unconfirmedMask {
|
||||
Logger.Warn(fmt.Sprintf("[type#1 forceConfirmation] For roomId=%d@renderFrameId=%d, curDynamicsRenderFrameId=%d, LatestPlayerUpsyncedInputFrameId:%d, LastAllConfirmedInputFrameId:%d, (pR.NstDelayFrames >> pR.InputScaleFrames):%d, InputFrameUpsyncDelayTolerance:%d, unconfirmedMask=%d; there's a slow ticker suspect, forcing all-confirmation", pR.Id, pR.RenderFrameId, pR.CurDynamicsRenderFrameId, pR.LatestPlayerUpsyncedInputFrameId, oldLastAllConfirmedInputFrameId, (pR.NstDelayFrames >> pR.InputScaleFrames), pR.InputFrameUpsyncDelayTolerance, unconfirmedMask))
|
||||
Logger.Debug(fmt.Sprintf("[type#1 forceConfirmation] For roomId=%d@renderFrameId=%d, curDynamicsRenderFrameId=%d, LatestPlayerUpsyncedInputFrameId:%d, LastAllConfirmedInputFrameId:%d, (pR.NstDelayFrames >> pR.InputScaleFrames):%d, InputFrameUpsyncDelayTolerance:%d, unconfirmedMask=%d; there's a slow ticker suspect, forcing all-confirmation", pR.Id, pR.RenderFrameId, pR.CurDynamicsRenderFrameId, pR.LatestPlayerUpsyncedInputFrameId, oldLastAllConfirmedInputFrameId, (pR.NstDelayFrames >> pR.InputScaleFrames), pR.InputFrameUpsyncDelayTolerance, unconfirmedMask))
|
||||
}
|
||||
} else {
|
||||
// Type#2 helps resolve the edge case when all players are disconnected temporarily
|
||||
@ -1360,9 +1354,9 @@ func (pR *Room) applyInputFrameDownsyncDynamicsOnSingleRenderFrame(delayedInputF
|
||||
}
|
||||
if !characStateAlreadyInAir && characStateIsInterruptWaivable {
|
||||
thatPlayerInNextFrame.VelY = pR.JumpingInitVelY
|
||||
// if 1 == currPlayerDownsync.JoinIndex {
|
||||
// Logger.Info(fmt.Sprintf("playerId=%v, joinIndex=%v jumped at {renderFrame.id: %d, virtualX: %d, virtualY: %d, nextVelX: %d, nextVelY: %d, nextCharacterState=%d, inAir=%v}, delayedInputFrame.id=%d", playerId, joinIndex, currRenderFrame.Id, currPlayerDownsync.VirtualGridX, currPlayerDownsync.VirtualGridY, thatPlayerInNextFrame.VelX, thatPlayerInNextFrame.VelY, thatPlayerInNextFrame.CharacterState, currPlayerDownsync.InAir, delayedInputFrame.InputFrameId))
|
||||
// }
|
||||
if 1 == currPlayerDownsync.JoinIndex {
|
||||
Logger.Info(fmt.Sprintf("playerId=%v, joinIndex=%v jumped at {renderFrame.id: %d, virtualX: %d, virtualY: %d, nextVelX: %d, nextVelY: %d, nextCharacterState=%d, inAir=%v}, delayedInputFrame.id=%d", playerId, joinIndex, currRenderFrame.Id, currPlayerDownsync.VirtualGridX, currPlayerDownsync.VirtualGridY, thatPlayerInNextFrame.VelX, thatPlayerInNextFrame.VelY, thatPlayerInNextFrame.CharacterState, currPlayerDownsync.InAir, delayedInputFrame.InputFrameId))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1700,7 +1694,7 @@ func (pR *Room) doBattleMainLoopPerTickBackendDynamicsWithProperLocking(prevRend
|
||||
|
||||
if ok, thatRenderFrameId := pR.shouldPrefabInputFrameDownsync(prevRenderFrameId, pR.RenderFrameId); ok {
|
||||
noDelayInputFrameId := pR.ConvertToInputFrameId(thatRenderFrameId, 0)
|
||||
pR.prefabInputFrameDownsync(noDelayInputFrameId)
|
||||
pR.getOrPrefabInputFrameDownsync(noDelayInputFrameId)
|
||||
}
|
||||
|
||||
// Force setting all-confirmed of buffered inputFrames periodically, kindly note that if "pR.BackendDynamicsEnabled", what we want to achieve is "recovery upon reconnection", which certainly requires "forceConfirmationIfApplicable" to move "pR.LastAllConfirmedInputFrameId" forward as much as possible
|
||||
|
@ -12,7 +12,7 @@
|
||||
<object id="135" x="999" y="1608">
|
||||
<point/>
|
||||
</object>
|
||||
<object id="137" x="1164" y="1548">
|
||||
<object id="137" x="875" y="1450">
|
||||
<point/>
|
||||
</object>
|
||||
</objectgroup>
|
||||
|
@ -440,7 +440,7 @@
|
||||
"array": [
|
||||
0,
|
||||
0,
|
||||
215.95961841836203,
|
||||
216.05530045313827,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
|
@ -99,6 +99,9 @@ cc.Class({
|
||||
bulletTriggerEnabled: {
|
||||
default: false
|
||||
},
|
||||
closeOnForcedtoResyncNotSelf: {
|
||||
default: true
|
||||
},
|
||||
},
|
||||
|
||||
_inputFrameIdDebuggable(inputFrameId) {
|
||||
@ -122,7 +125,8 @@ cc.Class({
|
||||
return (confirmedList + 1) == (1 << this.playerRichInfoDict.size);
|
||||
},
|
||||
|
||||
_generateInputFrameUpsync(inputFrameId) {
|
||||
getOrPrefabInputFrameUpsync(inputFrameId) {
|
||||
// TODO: find some kind of synchronization mechanism against "onInputFrameDownsyncBatch"!
|
||||
const self = this;
|
||||
if (
|
||||
null == self.ctrl ||
|
||||
@ -134,28 +138,27 @@ cc.Class({
|
||||
let previousSelfInput = null,
|
||||
currSelfInput = null;
|
||||
const joinIndex = self.selfPlayerInfo.joinIndex;
|
||||
// [WARNING] The while-loop here handles a situation where the "resync rdf & accompaniedInputFrameDownsyncBatch" mismatched and we have to predict some "gap-inputFrames"!
|
||||
while (self.recentInputCache.edFrameId <= inputFrameId) {
|
||||
// TODO: find some kind of synchronization mechanism against "onInputFrameDownsyncBatch"!
|
||||
const previousInputFrameDownsyncWithPrediction = self.getCachedInputFrameDownsyncWithPrediction(inputFrameId - 1);
|
||||
previousSelfInput = (null == previousInputFrameDownsyncWithPrediction ? null : previousInputFrameDownsyncWithPrediction.inputList[joinIndex - 1]);
|
||||
const existingInputFrame = self.recentInputCache.getByFrameId(inputFrameId);
|
||||
const previousInputFrameDownsyncWithPrediction = self.getCachedInputFrameDownsyncWithPrediction(inputFrameId - 1);
|
||||
previousSelfInput = (null == previousInputFrameDownsyncWithPrediction ? null : previousInputFrameDownsyncWithPrediction.inputList[joinIndex - 1]);
|
||||
if (null != existingInputFrame) {
|
||||
// This could happen upon either [type#1] or [type#2] forceConfirmation, where "refRenderFrame" is accompanied by some "inputFrameDownsyncs". The check here also guarantees that we don't override history
|
||||
console.log(`noDelayInputFrameId=${inputFrameId} already exists in recentInputCache: recentInputCache=${self._stringifyRecentInputCache(false)}`);
|
||||
return [previousSelfInput, existingInputFrame.inputList[joinIndex - 1]];
|
||||
}
|
||||
|
||||
// If "forceConfirmation" is active on backend, there's a chance that the already downsynced "inputFrameDownsync"s are ahead of a locally generating inputFrameId, in this case we respect the downsynced one.
|
||||
const existingInputFrame = self.recentInputCache.getByFrameId(inputFrameId);
|
||||
if (null != existingInputFrame && self._allConfirmed(existingInputFrame.confirmedList)) {
|
||||
console.log(`noDelayInputFrameId=${inputFrameId} already exists in recentInputCache and is all-confirmed: recentInputCache=${self._stringifyRecentInputCache(false)}`);
|
||||
return [previousSelfInput, existingInputFrame.inputList[joinIndex - 1]];
|
||||
}
|
||||
const prefabbedInputList = (null == previousInputFrameDownsyncWithPrediction ? new Array(self.playerRichInfoDict.size).fill(0) : previousInputFrameDownsyncWithPrediction.inputList.slice());
|
||||
currSelfInput = self.ctrl.getEncodedInput();
|
||||
prefabbedInputList[(joinIndex - 1)] = currSelfInput;
|
||||
const prefabbedInputList = (null == previousInputFrameDownsyncWithPrediction ? new Array(self.playerRichInfoDict.size).fill(0) : previousInputFrameDownsyncWithPrediction.inputList.slice());
|
||||
currSelfInput = self.ctrl.getEncodedInput(); // When "null == existingInputFrame", it'd be safe to say that the realtime "self.ctrl.getEncodedInput()" is for the requested "inputFrameId"
|
||||
prefabbedInputList[(joinIndex - 1)] = currSelfInput;
|
||||
while (self.recentInputCache.edFrameId <= inputFrameId) {
|
||||
// Fill the gap
|
||||
const prefabbedInputFrameDownsync = window.pb.protos.InputFrameDownsync.create({
|
||||
inputFrameId: self.recentInputCache.edFrameId,
|
||||
inputList: prefabbedInputList,
|
||||
confirmedList: (1 << (self.selfPlayerInfo.joinIndex - 1))
|
||||
});
|
||||
|
||||
self.recentInputCache.put(prefabbedInputFrameDownsync); // A prefabbed inputFrame, would certainly be adding a new inputFrame to the cache, because server only downsyncs "all-confirmed inputFrames"
|
||||
self.recentInputCache.put(prefabbedInputFrameDownsync);
|
||||
}
|
||||
|
||||
return [previousSelfInput, currSelfInput];
|
||||
@ -315,6 +318,8 @@ cc.Class({
|
||||
|
||||
self.battleState = ALL_BATTLE_STATES.WAITING;
|
||||
|
||||
self.othersForcedDownsyncRenderFrameDict = new Map();
|
||||
|
||||
self.countdownNanos = null;
|
||||
if (self.countdownLabel) {
|
||||
self.countdownLabel.string = "";
|
||||
@ -581,8 +586,14 @@ cc.Class({
|
||||
return;
|
||||
}
|
||||
const shouldForceDumping1 = (window.MAGIC_ROOM_DOWNSYNC_FRAME_ID.BATTLE_START == rdf.id);
|
||||
const shouldForceDumping2 = (rdf.id > self.renderFrameId + self.renderFrameIdLagTolerance);
|
||||
const shouldForceResync = rdf.shouldForceResync;
|
||||
let shouldForceDumping2 = (rdf.id >= self.renderFrameId + self.renderFrameIdLagTolerance);
|
||||
let shouldForceResync = rdf.shouldForceResync;
|
||||
const notSelfUnconfirmed = (0 == (rdf.backendUnconfirmedMask & (1 << (self.selfPlayerInfo.joinIndex - 1))));
|
||||
if (notSelfUnconfirmed) {
|
||||
shouldForceDumping2 = false;
|
||||
shouldForceResync = false;
|
||||
self.othersForcedDownsyncRenderFrameDict.set(rdf.id, rdf);
|
||||
}
|
||||
/*
|
||||
TODO
|
||||
|
||||
@ -614,7 +625,7 @@ cc.Class({
|
||||
}
|
||||
}
|
||||
|
||||
if (null == self.renderFrameId || self.renderFrameId <= rdf.id || shouldForceResync) {
|
||||
if (shouldForceDumping1 || shouldForceDumping2 || shouldForceResync) {
|
||||
// In fact, not having "window.RING_BUFF_CONSECUTIVE_SET == dumpRenderCacheRet" should already imply that "self.renderFrameId <= rdf.id", but here we double check and log the anomaly
|
||||
|
||||
if (window.MAGIC_ROOM_DOWNSYNC_FRAME_ID.BATTLE_START == rdf.id) {
|
||||
@ -666,8 +677,45 @@ cc.Class({
|
||||
return true;
|
||||
},
|
||||
|
||||
equalPlayers(lhs, rhs) {
|
||||
if (null == lhs || null == rhs) return false;
|
||||
if (lhs.virtualGridX != rhs.virtualGridX) return false;
|
||||
if (lhs.virtualGridY != rhs.virtualGridY) return false;
|
||||
if (lhs.dirX != rhs.dirX) return false;
|
||||
if (lhs.dirY != rhs.dirY) return false;
|
||||
if (lhs.velX != rhs.velX) return false;
|
||||
if (lhs.velY != rhs.velY) return false;
|
||||
if (lhs.speed != rhs.speed) return false;
|
||||
if (lhs.framesToRecover != rhs.framesToRecover) return false;
|
||||
if (lhs.hp != rhs.hp) return false;
|
||||
if (lhs.maxHp != rhs.maxHp) return false;
|
||||
if (lhs.characterState != rhs.characterState) return false;
|
||||
if (lhs.inAir != rhs.inAir) return false;
|
||||
return true;
|
||||
},
|
||||
|
||||
equalMeleeBullets(lhs, rhs) {
|
||||
if (null == lhs || null == rhs) return false;
|
||||
if (lhs.battleLocalId != rhs.battleLocalId) return false;
|
||||
if (lhs.offenderPlayerId != rhs.offenderPlayerId) return false;
|
||||
if (lhs.offenderJoinIndex != rhs.offenderJoinIndex) return false;
|
||||
if (lhs.originatedRenderFrameId != rhs.originatedRenderFrameId) return false;
|
||||
return true;
|
||||
},
|
||||
|
||||
equalRoomDownsyncFrames(lhs, rhs) {
|
||||
if (null == lhs || null == rhs) return false;
|
||||
for (let k in lhs.players) {
|
||||
if (!this.equalPlayers(lhs.players[k], rhs.players[k])) return false;
|
||||
}
|
||||
for (let k in lhs.meleeBullets) {
|
||||
if (!this.equalMeleeBullets(lhs.meleeBullets[k], rhs.meleeBullets[k])) return false;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
|
||||
onInputFrameDownsyncBatch(batch) {
|
||||
// TODO: find some kind of synchronization mechanism against "_generateInputFrameUpsync"!
|
||||
// TODO: find some kind of synchronization mechanism against "getOrPrefabInputFrameUpsync"!
|
||||
const self = this;
|
||||
if (!self.recentInputCache) {
|
||||
return;
|
||||
@ -683,6 +731,7 @@ cc.Class({
|
||||
if (inputFrameDownsyncId < self.lastAllConfirmedInputFrameId) {
|
||||
continue;
|
||||
}
|
||||
// [WARNING] Take all "inputFrameDownsync" from backend as all-confirmed, it'll be later checked by "rollbackAndChase".
|
||||
self.lastAllConfirmedInputFrameId = inputFrameDownsyncId;
|
||||
const localInputFrame = self.recentInputCache.getByFrameId(inputFrameDownsyncId);
|
||||
if (null != localInputFrame
|
||||
@ -693,7 +742,6 @@ cc.Class({
|
||||
) {
|
||||
firstPredictedYetIncorrectInputFrameId = inputFrameDownsyncId;
|
||||
}
|
||||
// [WARNING] Take all "inputFrameDownsync" from backend as all-confirmed, it'll be later checked by "rollbackAndChase".
|
||||
inputFrameDownsync.confirmedList = (1 << self.playerRichInfoDict.size) - 1;
|
||||
const [ret, oldStFrameId, oldEdFrameId] = self.recentInputCache.setByFrameId(inputFrameDownsync, inputFrameDownsync.inputFrameId);
|
||||
if (window.RING_BUFF_FAILED_TO_SET == ret) {
|
||||
@ -828,7 +876,7 @@ cc.Class({
|
||||
currSelfInput = null;
|
||||
const noDelayInputFrameId = self._convertToInputFrameId(self.renderFrameId, 0); // It's important that "inputDelayFrames == 0" here
|
||||
if (self.shouldGenerateInputFrameUpsync(self.renderFrameId)) {
|
||||
[prevSelfInput, currSelfInput] = self._generateInputFrameUpsync(noDelayInputFrameId);
|
||||
[prevSelfInput, currSelfInput] = self.getOrPrefabInputFrameUpsync(noDelayInputFrameId);
|
||||
}
|
||||
|
||||
let t0 = performance.now();
|
||||
@ -844,11 +892,16 @@ cc.Class({
|
||||
if (nextChaserRenderFrameId > self.renderFrameId) {
|
||||
nextChaserRenderFrameId = self.renderFrameId;
|
||||
}
|
||||
self.rollbackAndChase(prevChaserRenderFrameId, nextChaserRenderFrameId, self.collisionSys, self.collisionSysMap, true);
|
||||
if (prevChaserRenderFrameId < nextChaserRenderFrameId) {
|
||||
// Do not execute "rollbackAndChase" when "prevChaserRenderFrameId == nextChaserRenderFrameId", otherwise if "nextChaserRenderFrameId == self.renderFrameId" we'd be wasting computing power once.
|
||||
self.rollbackAndChase(prevChaserRenderFrameId, nextChaserRenderFrameId, self.collisionSys, self.collisionSysMap, true);
|
||||
}
|
||||
let t2 = performance.now();
|
||||
|
||||
// Inside the following "self.rollbackAndChase" actually ROLLS FORWARD w.r.t. the corresponding delayedInputFrame, REGARDLESS OF whether or not "self.chaserRenderFrameId == self.renderFrameId" now.
|
||||
const [prevRdf, rdf] = self.rollbackAndChase(self.renderFrameId, self.renderFrameId + 1, self.collisionSys, self.collisionSysMap, false);
|
||||
const latestRdfResults = self.rollbackAndChase(self.renderFrameId, self.renderFrameId + 1, self.collisionSys, self.collisionSysMap, false);
|
||||
let prevRdf = latestRdfResults[0],
|
||||
rdf = latestRdfResults[1];
|
||||
/*
|
||||
const nonTrivialChaseEnded = (prevChaserRenderFrameId < nextChaserRenderFrameId && nextChaserRenderFrameId == self.renderFrameId);
|
||||
if (nonTrivialChaseEnded) {
|
||||
@ -856,6 +909,15 @@ cc.Class({
|
||||
}
|
||||
*/
|
||||
// [WARNING] Don't try to get "prevRdf(i.e. renderFrameId == latest-1)" by "self.recentRenderCache.getByFrameId(...)" here, as the cache might have been updated by asynchronous "onRoomDownsyncFrame(...)" calls!
|
||||
if (self.othersForcedDownsyncRenderFrameDict.has(rdf.id)) {
|
||||
const othersForcedDownsyncRenderFrame = self.othersForcedDownsyncRenderFrameDict.get(rdf.id);
|
||||
if (!self.equalRoomDownsyncFrames(othersForcedDownsyncRenderFrame, rdf)) {
|
||||
console.warn(`Mismatched render frame prediction@rdf.id=${rdf.id}, @localRenderFrameId=${self.renderFrameId}, @lastAllConfirmedRenderFrameId=${self.lastAllConfirmedRenderFrameId}, @lastAllConfirmedInputFrameId=${self.lastAllConfirmedInputFrameId}, @chaserRenderFrameId=${self.chaserRenderFrameId}, @localRecentInputCache=${mapIns._stringifyRecentInputCache(false)}:
|
||||
rdf=${JSON.stringify(rdf)}
|
||||
othersForcedDownsyncRenderFrame=${JSON.stringify(othersForcedDownsyncRenderFrame)}`);
|
||||
rdf = othersForcedDownsyncRenderFrame;
|
||||
}
|
||||
}
|
||||
self.applyRoomDownsyncFrameDynamics(rdf, prevRdf);
|
||||
self.showDebugBoundaries(rdf);
|
||||
++self.renderFrameId; // [WARNING] It's important to increment the renderFrameId AFTER all the operations above!!!
|
||||
@ -1079,11 +1141,13 @@ cc.Class({
|
||||
getCachedInputFrameDownsyncWithPrediction(inputFrameId) {
|
||||
const self = this;
|
||||
const inputFrameDownsync = self.recentInputCache.getByFrameId(inputFrameId);
|
||||
const lastAllConfirmedInputFrame = self.recentInputCache.getByFrameId(self.lastAllConfirmedInputFrameId);
|
||||
if (null != inputFrameDownsync && null != lastAllConfirmedInputFrame && inputFrameId > self.lastAllConfirmedInputFrameId) {
|
||||
for (let i = 0; i < inputFrameDownsync.inputList.length; ++i) {
|
||||
if (i == (self.selfPlayerInfo.joinIndex - 1)) continue;
|
||||
inputFrameDownsync.inputList[i] = (lastAllConfirmedInputFrame.inputList[i] & 15); // Don't predict attack input!
|
||||
if (null != inputFrameDownsync && inputFrameId > self.lastAllConfirmedInputFrameId) {
|
||||
const lastAllConfirmedInputFrame = self.recentInputCache.getByFrameId(self.lastAllConfirmedInputFrameId);
|
||||
if (null != lastAllConfirmedInputFrame) {
|
||||
for (let i = 0; i < inputFrameDownsync.inputList.length; ++i) {
|
||||
if (i == (self.selfPlayerInfo.joinIndex - 1)) continue;
|
||||
inputFrameDownsync.inputList[i] = (lastAllConfirmedInputFrame.inputList[i] & 15); // Don't predict attack input!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1324,9 +1388,11 @@ cc.Class({
|
||||
|
||||
if (1 == joinIndex) {
|
||||
if (fallStopping) {
|
||||
/*
|
||||
console.info(`playerId=${playerId}, joinIndex=${thatPlayerInNextFrame.joinIndex} fallStopping#1:
|
||||
{renderFrame.id: ${currRenderFrame.id}, possiblyFallStoppedOnAnotherPlayer: ${possiblyFallStoppedOnAnotherPlayer}}
|
||||
playerColliderPos=${self.stringifyColliderCenterInWorld(playerCollider, halfColliderWidth, halfColliderHeight, topPadding, bottomPadding, leftPadding, rightPadding)}, effPushback={${effPushbacks[joinIndex - 1][0].toFixed(3)}, ${effPushbacks[joinIndex - 1][1].toFixed(3)}}, overlayMag=${result.overlap.toFixed(4)}`);
|
||||
*/
|
||||
} else if (currPlayerDownsync.inAir && isBarrier && !landedOnGravityPushback) {
|
||||
/*
|
||||
console.warn(`playerId=${playerId}, joinIndex=${currPlayerDownsync.joinIndex} inAir & pushed back by barrier & not landed:
|
||||
|
@ -146,7 +146,7 @@ window.initPersistentSessionClient = function(onopenCb, expectedRoomId) {
|
||||
if (null == evt || null == evt.data) {
|
||||
return;
|
||||
}
|
||||
// FIXME: In practice, it seems like the thread invoking "onmessage" could be different from "Map.update(dt)", which makes it necessary to guard "recentRenderCache & recentInputCache" for "_generateInputFrameUpsync & rollbackAndChase & onRoomDownsyncFrame & onInputFrameDownsyncBatch" to avoid mysterious RAM contamination, but there's no explicit mutex in JavaScript for browsers -- this issue is found in Firefox (108.0.1, 64-bit, Windows 11), but not in Chrome (108.0.5359.125, Official Build, 64-bit, Windows 11) -- just breakpoint in "Map.rollbackAndChase" then see whether the logs of "onmessage" can still be printed and whether the values of "recentRenderCache & recentInputCache" change in console).
|
||||
// FIXME: In practice, it seems like the thread invoking "onmessage" could be different from "Map.update(dt)", which makes it necessary to guard "recentRenderCache & recentInputCache" for "getOrPrefabInputFrameUpsync & rollbackAndChase & onRoomDownsyncFrame & onInputFrameDownsyncBatch" to avoid mysterious RAM contamination, but there's no explicit mutex in JavaScript for browsers -- this issue is found in Firefox (108.0.1, 64-bit, Windows 11), but not in Chrome (108.0.5359.125, Official Build, 64-bit, Windows 11) -- just breakpoint in "Map.rollbackAndChase" then see whether the logs of "onmessage" can still be printed and whether the values of "recentRenderCache & recentInputCache" change in console).
|
||||
try {
|
||||
const resp = window.pb.protos.WsResp.decode(new Uint8Array(evt.data));
|
||||
//console.log(`Got non-empty onmessage decoded: resp.act=${resp.act}`);
|
||||
|
Loading…
Reference in New Issue
Block a user