@@ -87,6 +87,29 @@ export interface FsWriteOptions {
8787 EOL ?: string ;
8888}
8989
90+ enum Operation {
91+ Clear = 0 ,
92+ Write = 1 ,
93+ Delete = 2 ,
94+ }
95+
96+ type LazyEntry < V extends unknown = unknown > = (
97+ | {
98+ op : Operation . Clear ;
99+ }
100+ | {
101+ op : Operation . Delete ;
102+ key : string ;
103+ }
104+ | {
105+ op : Operation . Write ;
106+ key : string ;
107+ value : V ;
108+ }
109+ ) & {
110+ serialize ( ) : string ;
111+ } ;
112+
90113/**
91114 * fsync on a directory ensures there are no rename operations etc. which haven't been persisted to disk.
92115 */
@@ -443,7 +466,7 @@ export class JsonlDB<V extends unknown = unknown> {
443466 throw new Error ( "The database is not open!" ) ;
444467 }
445468 this . _db . clear ( ) ;
446- this . write ( "" ) ;
469+ this . write ( this . makeLazyClear ( ) ) ;
447470 }
448471 public delete ( key : string ) : boolean {
449472 if ( ! this . _isOpen ) {
@@ -452,7 +475,7 @@ export class JsonlDB<V extends unknown = unknown> {
452475 const ret = this . _db . delete ( key ) ;
453476 if ( ret ) {
454477 // Something was deleted
455- this . write ( this . entryToLine ( key ) ) ;
478+ this . write ( this . makeLazyDelete ( key ) ) ;
456479 }
457480 return ret ;
458481 }
@@ -461,7 +484,7 @@ export class JsonlDB<V extends unknown = unknown> {
461484 throw new Error ( "The database is not open!" ) ;
462485 }
463486 this . _db . set ( key , value ) ;
464- this . write ( this . entryToLine ( key , value ) ) ;
487+ this . write ( this . makeLazyWrite ( key , value ) ) ;
465488 return this ;
466489 }
467490
@@ -488,7 +511,7 @@ export class JsonlDB<V extends unknown = unknown> {
488511
489512 for ( const [ key , value ] of Object . entries ( jsonOrFile ) ) {
490513 this . _db . set ( key , value ) ;
491- this . write ( this . entryToLine ( key , value ) , true ) ;
514+ this . write ( this . makeLazyWrite ( key , value ) , true ) ;
492515 }
493516 }
494517
@@ -502,8 +525,8 @@ export class JsonlDB<V extends unknown = unknown> {
502525 return fs . writeJSON ( filename , composeObject ( [ ...this . _db ] ) , options ) ;
503526 }
504527
505- private updateStatistics ( command : string ) : void {
506- if ( command === "" ) {
528+ private updateStatistics ( entry : LazyEntry < V > ) : void {
529+ if ( entry . op === Operation . Clear ) {
507530 this . _uncompressedSize = 0 ;
508531 } else {
509532 this . _uncompressedSize ++ ;
@@ -570,20 +593,20 @@ export class JsonlDB<V extends unknown = unknown> {
570593 * Writes a line into the correct backlog
571594 * @param noAutoCompress Whether auto-compression should be disabled
572595 */
573- private write ( line : string , noAutoCompress : boolean = false ) : void {
596+ private write ( lazy : LazyEntry < V > , noAutoCompress : boolean = false ) : void {
574597 /* istanbul ignore else */
575598 if ( this . _compressBacklog && ! this . _compressBacklog . destroyed ) {
576599 // The compress backlog handling also handles the file statistics
577- this . _compressBacklog . write ( line ) ;
600+ this . _compressBacklog . write ( lazy ) ;
578601 } else if ( this . _writeBacklog && ! this . _writeBacklog . destroyed ) {
579602 // Update line statistics
580- this . updateStatistics ( line ) ;
603+ this . updateStatistics ( lazy ) ;
581604
582605 // Either compress or write to the main file, never both
583606 if ( ! noAutoCompress && this . needToCompress ( ) ) {
584607 this . compress ( ) ;
585608 } else {
586- this . _writeBacklog . write ( line ) ;
609+ this . _writeBacklog . write ( lazy ) ;
587610 // If this is a throttled stream, uncork it as soon as the write
588611 // buffer is larger than configured
589612 if (
@@ -601,7 +624,7 @@ export class JsonlDB<V extends unknown = unknown> {
601624 }
602625 // If necessary, write to the dump backlog, so the dump doesn't miss any data
603626 if ( this . _dumpBacklog && ! this . _dumpBacklog . destroyed ) {
604- this . _dumpBacklog . write ( line ) ;
627+ this . _dumpBacklog . write ( lazy ) ;
605628 }
606629 }
607630
@@ -616,6 +639,48 @@ export class JsonlDB<V extends unknown = unknown> {
616639 }
617640 }
618641
642+ private makeLazyClear ( ) : LazyEntry & { op : Operation . Clear } {
643+ return {
644+ op : Operation . Clear ,
645+
646+ serialize :
647+ /* istanbul ignore next - this is impossible to test since it requires exact timing */ ( ) =>
648+ "" ,
649+ } ;
650+ }
651+
652+ private makeLazyDelete ( key : string ) : LazyEntry & { op : Operation . Delete } {
653+ let serialized : string | undefined ;
654+ return {
655+ op : Operation . Delete ,
656+ key,
657+ serialize : ( ) => {
658+ if ( serialized == undefined ) {
659+ serialized = this . entryToLine ( key ) ;
660+ }
661+ return serialized ;
662+ } ,
663+ } ;
664+ }
665+
666+ private makeLazyWrite (
667+ key : string ,
668+ value : V ,
669+ ) : LazyEntry < V > & { op : Operation . Write } {
670+ let serialized : string | undefined ;
671+ return {
672+ op : Operation . Write ,
673+ key,
674+ value,
675+ serialize : ( ) => {
676+ if ( serialized == undefined ) {
677+ serialized = this . entryToLine ( key , value ) ;
678+ }
679+ return serialized ;
680+ } ,
681+ } ;
682+ }
683+
619684 /**
620685 * Saves a compressed copy of the DB into the given path.
621686 * @param targetFilename Where the compressed copy should be written. Default: `<filename>.dump`
@@ -635,13 +700,14 @@ export class JsonlDB<V extends unknown = unknown> {
635700 for ( const [ key , value ] of entries ) {
636701 await fs . appendFile (
637702 this . _dumpFd ,
703+ // No need to serialize lazily here
638704 this . entryToLine ( key , value ) + "\n" ,
639705 ) ;
640706 }
641707 // In case there is any data in the backlog stream, persist that too
642- let line : string ;
643- while ( null !== ( line = this . _dumpBacklog . read ( ) ) ) {
644- await fs . appendFile ( this . _dumpFd , line + "\n" ) ;
708+ let lazy : LazyEntry < V > ;
709+ while ( null !== ( lazy = this . _dumpBacklog . read ( ) ) ) {
710+ await fs . appendFile ( this . _dumpFd , lazy . serialize ( ) + "\n" ) ;
645711 }
646712 this . _dumpBacklog . destroy ( ) ;
647713 this . _dumpBacklog = undefined ;
@@ -665,16 +731,35 @@ export class JsonlDB<V extends unknown = unknown> {
665731 // Open the file for appending and reading
666732 this . _fd = await fs . open ( this . filename , "a+" ) ;
667733 this . _openPromise ?. resolve ( ) ;
734+ // The chunk map is used to buffer all entries that are currently waiting in line
735+ // so we avoid serializing redundant entries. When the write backlog is throttled,
736+ // the chunk map will only be used for a short time.
737+ const chunk = new Map < string , LazyEntry > ( ) ;
668738 for await ( const action of this
669- . _writeBacklog as AsyncIterable < string > ) {
670- if ( action === "" ) {
671- // Since we opened the file in append mode, we cannot truncate
672- // therefore close and open in write mode again
673- await fs . close ( this . _fd ) ;
674- this . _fd = await fs . open ( this . filename , "w+" ) ;
739+ . _writeBacklog as AsyncIterable < LazyEntry > ) {
740+ if ( action . op === Operation . Clear ) {
741+ chunk . clear ( ) ;
742+ chunk . set ( "" , action ) ;
675743 } else {
676- await fs . appendFile ( this . _fd , action + "\n" ) ;
744+ // Only remember the last entry for each key
745+ chunk . set ( action . key , action ) ;
677746 }
747+
748+ // When the backlog has been drained, perform the necessary write actions
749+ if ( this . _writeBacklog . readableLength === 0 ) {
750+ for ( const entry of chunk . values ( ) ) {
751+ if ( entry . op === Operation . Clear ) {
752+ // Since we opened the file in append mode, we cannot truncate
753+ // therefore close and open in write mode again
754+ await fs . close ( this . _fd ) ;
755+ this . _fd = await fs . open ( this . filename , "w+" ) ;
756+ } else {
757+ await fs . appendFile ( this . _fd , entry . serialize ( ) + "\n" ) ;
758+ }
759+ }
760+ chunk . clear ( ) ;
761+ }
762+
678763 // When this is a throttled stream, auto-cork it when it was drained
679764 if ( this . _writeBacklog . readableLength === 0 && this . _isOpen ) {
680765 this . autoCork ( ) ;
@@ -737,10 +822,10 @@ export class JsonlDB<V extends unknown = unknown> {
737822 }
738823
739824 // In case there is any data in the backlog stream, persist that too
740- let line : string ;
741- while ( null !== ( line = this . _compressBacklog . read ( ) ) ) {
742- this . updateStatistics ( line ) ;
743- this . _writeBacklog ! . write ( line ) ;
825+ let lazy : LazyEntry < V > ;
826+ while ( null !== ( lazy = this . _compressBacklog . read ( ) ) ) {
827+ this . updateStatistics ( lazy ) ;
828+ this . _writeBacklog ! . write ( lazy ) ;
744829 }
745830 this . _compressBacklog . destroy ( ) ;
746831 this . _compressBacklog = undefined ;
0 commit comments