Use sync access for the temp JSON file database
Replace all async reads and writes to the JSON database with the sync reads and writes to prevent a data corruption race condition where two requests are processed at the same time and write to the same file, or one reads while the other writes causing read of partially written data.
This commit is contained in:
parent
f9d188b2ba
commit
80cec71308
23 changed files with 138 additions and 138 deletions
|
@ -33,8 +33,8 @@ export async function createEventStream(
|
|||
) {
|
||||
const runtimeConfig = useRuntimeConfig(event);
|
||||
const now = Date.now();
|
||||
const events = (await readEvents()).filter(e => e.id > lastEventId);
|
||||
const users = await readUsers();
|
||||
const events = (readEvents()).filter(e => e.id > lastEventId);
|
||||
const users = readUsers();
|
||||
const apiSession = session ? await serverSessionToApi(event, session) : undefined;
|
||||
let userType: ApiAccount["type"] | undefined;
|
||||
if (session?.accountId !== undefined) {
|
||||
|
@ -182,9 +182,9 @@ function encodeEvent(event: ApiEvent, userType: ApiAccount["type"] | undefined)
|
|||
}
|
||||
|
||||
export async function broadcastEvent(event: ApiEvent) {
|
||||
const events = await readEvents();
|
||||
const events = readEvents();
|
||||
events.push(event);
|
||||
await writeEvents(events);
|
||||
writeEvents(events);
|
||||
}
|
||||
|
||||
function sendEventToStream(stream: EventStream, event: ApiEvent) {
|
||||
|
@ -222,7 +222,7 @@ async function sendEventUpdates() {
|
|||
|
||||
// Send events.
|
||||
const skipEventId = Math.min(...[...streams.values()].map(s => s.lastEventId));
|
||||
const events = (await readEvents()).filter(e => e.id > skipEventId);
|
||||
const events = (readEvents()).filter(e => e.id > skipEventId);
|
||||
if (events.length)
|
||||
console.log(`broadcasting ${events.length} event(s) to ${streams.size} client(s)`);
|
||||
for (const stream of streams.values()) {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue