( function ( ) {
var r = function ( ) { var e = "function" == typeof require && require , r = function ( i , o , u ) { o || ( o = 0 ) ; var n = r . resolve ( i , o ) , t = r . m [ o ] [ n ] ; if ( ! t && e ) { if ( t = e ( n ) ) return t } else if ( t && t . c && ( o = t . c , n = t . m , t = r . m [ o ] [ t . m ] , ! t ) ) throw new Error ( 'failed to require "' + n + '" from ' + o ) ; if ( ! t ) throw new Error ( 'failed to require "' + i + '" from ' + u ) ; return t . exports || ( t . exports = { } , t . call ( t . exports , t , t . exports , r . relative ( n , o ) ) ) , t . exports } ; return r . resolve = function ( e , n ) { var i = e , t = e + ".js" , o = e + "/index.js" ; return r . m [ n ] [ t ] && t ? t : r . m [ n ] [ o ] && o ? o : i } , r . relative = function ( e , t ) { return function ( n ) { if ( "." != n . charAt ( 0 ) ) return r ( n , t , e ) ; var o = e . split ( "/" ) , f = n . split ( "/" ) ; o . pop ( ) ; for ( var i = 0 ; i < f . length ; i ++ ) { var u = f [ i ] ; ".." == u ? o . pop ( ) : "." != u && o . push ( u ) } return r ( o . join ( "/" ) , t , e ) } } , r } ( ) ; r . m = [ ] ;
r . m [ 0 ] = {
"Patch.js" : function ( module , exports , require ) {
/ *
* Copyright 2014 XWiki SAS
*
* This program is free software : you can redistribute it and / or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Affero General Public License for more details .
*
* You should have received a copy of the GNU Affero General Public License
* along with this program . If not , see < http : //www.gnu.org/licenses/>.
* /
var Common = require ( './Common' ) ;
var Operation = require ( './Operation' ) ;
var Sha = require ( './SHA256' ) ;
var Patch = module . exports ;
var create = Patch . create = function ( parentHash ) {
return {
type : 'Patch' ,
operations : [ ] ,
parentHash : parentHash ,
isCheckpoint : false
} ;
} ;
var check = Patch . check = function ( patch , docLength _opt ) {
Common . assert ( patch . type === 'Patch' ) ;
Common . assert ( Array . isArray ( patch . operations ) ) ;
Common . assert ( /^[0-9a-f]{64}$/ . test ( patch . parentHash ) ) ;
for ( var i = patch . operations . length - 1 ; i >= 0 ; i -- ) {
Operation . check ( patch . operations [ i ] , docLength _opt ) ;
if ( i > 0 ) {
Common . assert ( ! Operation . shouldMerge ( patch . operations [ i ] , patch . operations [ i - 1 ] ) ) ;
}
if ( typeof ( docLength _opt ) === 'number' ) {
docLength _opt += Operation . lengthChange ( patch . operations [ i ] ) ;
}
}
if ( patch . isCheckpoint ) {
Common . assert ( patch . operations . length === 1 ) ;
Common . assert ( patch . operations [ 0 ] . offset === 0 ) ;
if ( typeof ( docLength _opt ) === 'number' ) {
Common . assert ( ! docLength _opt || patch . operations [ 0 ] . toRemove === docLength _opt ) ;
}
}
} ;
var toObj = Patch . toObj = function ( patch ) {
if ( Common . PARANOIA ) { check ( patch ) ; }
var out = new Array ( patch . operations . length + 1 ) ;
var i ;
for ( i = 0 ; i < patch . operations . length ; i ++ ) {
out [ i ] = Operation . toObj ( patch . operations [ i ] ) ;
}
out [ i ] = patch . parentHash ;
return out ;
} ;
var fromObj = Patch . fromObj = function ( obj ) {
Common . assert ( Array . isArray ( obj ) && obj . length > 0 ) ;
var patch = create ( ) ;
var i ;
for ( i = 0 ; i < obj . length - 1 ; i ++ ) {
patch . operations [ i ] = Operation . fromObj ( obj [ i ] ) ;
}
patch . parentHash = obj [ i ] ;
if ( Common . PARANOIA ) { check ( patch ) ; }
return patch ;
} ;
var hash = function ( text ) {
return Sha . hex _sha256 ( text ) ;
} ;
var addOperation = Patch . addOperation = function ( patch , op ) {
if ( Common . PARANOIA ) {
check ( patch ) ;
Operation . check ( op ) ;
}
for ( var i = 0 ; i < patch . operations . length ; i ++ ) {
if ( Operation . shouldMerge ( patch . operations [ i ] , op ) ) {
op = Operation . merge ( patch . operations [ i ] , op ) ;
patch . operations . splice ( i , 1 ) ;
if ( op === null ) {
//console.log("operations cancelled eachother");
return ;
}
i -- ;
} else {
var out = Operation . rebase ( patch . operations [ i ] , op ) ;
if ( out === op ) {
// op could not be rebased further, insert it here to keep the list ordered.
patch . operations . splice ( i , 0 , op ) ;
return ;
} else {
op = out ;
// op was rebased, try rebasing it against the next operation.
}
}
}
patch . operations . push ( op ) ;
if ( Common . PARANOIA ) { check ( patch ) ; }
} ;
var createCheckpoint = Patch . createCheckpoint =
function ( parentContent , checkpointContent , parentContentHash _opt )
{
var op = Operation . create ( 0 , parentContent . length , checkpointContent ) ;
if ( Common . PARANOIA && parentContentHash _opt ) {
Common . assert ( parentContentHash _opt === hash ( parentContent ) ) ;
}
parentContentHash _opt = parentContentHash _opt || hash ( parentContent ) ;
var out = create ( parentContentHash _opt ) ;
addOperation ( out , op ) ;
out . isCheckpoint = true ;
return out ;
} ;
var clone = Patch . clone = function ( patch ) {
if ( Common . PARANOIA ) { check ( patch ) ; }
var out = create ( ) ;
out . parentHash = patch . parentHash ;
for ( var i = 0 ; i < patch . operations . length ; i ++ ) {
out . operations [ i ] = Operation . clone ( patch . operations [ i ] ) ;
}
return out ;
} ;
var merge = Patch . merge = function ( oldPatch , newPatch ) {
if ( Common . PARANOIA ) {
check ( oldPatch ) ;
check ( newPatch ) ;
}
oldPatch = clone ( oldPatch ) ;
for ( var i = newPatch . operations . length - 1 ; i >= 0 ; i -- ) {
addOperation ( oldPatch , newPatch . operations [ i ] ) ;
}
return oldPatch ;
} ;
var apply = Patch . apply = function ( patch , doc )
{
if ( Common . PARANOIA ) {
check ( patch ) ;
Common . assert ( typeof ( doc ) === 'string' ) ;
Common . assert ( Sha . hex _sha256 ( doc ) === patch . parentHash ) ;
}
var newDoc = doc ;
for ( var i = patch . operations . length - 1 ; i >= 0 ; i -- ) {
newDoc = Operation . apply ( patch . operations [ i ] , newDoc ) ;
}
return newDoc ;
} ;
var lengthChange = Patch . lengthChange = function ( patch )
{
if ( Common . PARANOIA ) { check ( patch ) ; }
var out = 0 ;
for ( var i = 0 ; i < patch . operations . length ; i ++ ) {
out += Operation . lengthChange ( patch . operations [ i ] ) ;
}
return out ;
} ;
var invert = Patch . invert = function ( patch , doc )
{
if ( Common . PARANOIA ) {
check ( patch ) ;
Common . assert ( typeof ( doc ) === 'string' ) ;
Common . assert ( Sha . hex _sha256 ( doc ) === patch . parentHash ) ;
}
var rpatch = create ( ) ;
var newDoc = doc ;
for ( var i = patch . operations . length - 1 ; i >= 0 ; i -- ) {
rpatch . operations [ i ] = Operation . invert ( patch . operations [ i ] , newDoc ) ;
newDoc = Operation . apply ( patch . operations [ i ] , newDoc ) ;
}
for ( var i = rpatch . operations . length - 1 ; i >= 0 ; i -- ) {
for ( var j = i - 1 ; j >= 0 ; j -- ) {
rpatch . operations [ i ] . offset += rpatch . operations [ j ] . toRemove ;
rpatch . operations [ i ] . offset -= rpatch . operations [ j ] . toInsert . length ;
}
}
rpatch . parentHash = Sha . hex _sha256 ( newDoc ) ;
if ( Common . PARANOIA ) { check ( rpatch ) ; }
return rpatch ;
} ;
var simplify = Patch . simplify = function ( patch , doc , operationSimplify )
{
if ( Common . PARANOIA ) {
check ( patch ) ;
Common . assert ( typeof ( doc ) === 'string' ) ;
Common . assert ( Sha . hex _sha256 ( doc ) === patch . parentHash ) ;
}
operationSimplify = operationSimplify || Operation . simplify ;
var spatch = create ( patch . parentHash ) ;
var newDoc = doc ;
var outOps = [ ] ;
var j = 0 ;
for ( var i = patch . operations . length - 1 ; i >= 0 ; i -- ) {
outOps [ j ] = operationSimplify ( patch . operations [ i ] , newDoc , Operation . simplify ) ;
if ( outOps [ j ] ) {
newDoc = Operation . apply ( outOps [ j ] , newDoc ) ;
j ++ ;
}
}
spatch . operations = outOps . reverse ( ) ;
if ( ! spatch . operations [ 0 ] ) {
spatch . operations . shift ( ) ;
}
if ( Common . PARANOIA ) {
check ( spatch ) ;
}
return spatch ;
} ;
var equals = Patch . equals = function ( patchA , patchB ) {
if ( patchA . operations . length !== patchB . operations . length ) { return false ; }
for ( var i = 0 ; i < patchA . operations . length ; i ++ ) {
if ( ! Operation . equals ( patchA . operations [ i ] , patchB . operations [ i ] ) ) { return false ; }
}
return true ;
} ;
var transform = Patch . transform = function ( origToTransform , transformBy , doc , transformFunction ) {
if ( Common . PARANOIA ) {
check ( origToTransform , doc . length ) ;
check ( transformBy , doc . length ) ;
Common . assert ( Sha . hex _sha256 ( doc ) === origToTransform . parentHash ) ;
}
Common . assert ( origToTransform . parentHash === transformBy . parentHash ) ;
var resultOfTransformBy = apply ( transformBy , doc ) ;
var toTransform = clone ( origToTransform ) ;
var text = doc ;
for ( var i = toTransform . operations . length - 1 ; i >= 0 ; i -- ) {
for ( var j = transformBy . operations . length - 1 ; j >= 0 ; j -- ) {
try {
toTransform . operations [ i ] = Operation . transform ( text ,
toTransform . operations [ i ] ,
transformBy . operations [ j ] ,
transformFunction ) ;
} catch ( e ) {
console . error ( "The pluggable transform function threw an error, " +
"failing operational transformation" ) ;
return create ( Sha . hex _sha256 ( resultOfTransformBy ) ) ;
}
if ( ! toTransform . operations [ i ] ) {
break ;
}
}
if ( Common . PARANOIA && toTransform . operations [ i ] ) {
Operation . check ( toTransform . operations [ i ] , resultOfTransformBy . length ) ;
}
}
var out = create ( transformBy . parentHash ) ;
for ( var i = toTransform . operations . length - 1 ; i >= 0 ; i -- ) {
if ( toTransform . operations [ i ] ) {
addOperation ( out , toTransform . operations [ i ] ) ;
}
}
out . parentHash = Sha . hex _sha256 ( resultOfTransformBy ) ;
if ( Common . PARANOIA ) {
check ( out , resultOfTransformBy . length ) ;
}
return out ;
} ;
var random = Patch . random = function ( doc , opCount ) {
Common . assert ( typeof ( doc ) === 'string' ) ;
opCount = opCount || ( Math . floor ( Math . random ( ) * 30 ) + 1 ) ;
var patch = create ( Sha . hex _sha256 ( doc ) ) ;
var docLength = doc . length ;
while ( opCount -- > 0 ) {
var op = Operation . random ( docLength ) ;
docLength += Operation . lengthChange ( op ) ;
addOperation ( patch , op ) ;
}
check ( patch ) ;
return patch ;
} ;
} ,
"SHA256.js" : function ( module , exports , require ) {
/ * A J a v a S c r i p t i m p l e m e n t a t i o n o f t h e S e c u r e H a s h A l g o r i t h m , S H A - 2 5 6
* Version 0.3 Copyright Angel Marin 2003 - 2004 - http : //anmar.eu.org/
* Distributed under the BSD License
* Some bits taken from Paul Johnston ' s SHA - 1 implementation
* /
( function ( ) {
var chrsz = 8 ; /* bits per input character. 8 - ASCII; 16 - Unicode */
function safe _add ( x , y ) {
var lsw = ( x & 0xFFFF ) + ( y & 0xFFFF ) ;
var msw = ( x >> 16 ) + ( y >> 16 ) + ( lsw >> 16 ) ;
return ( msw << 16 ) | ( lsw & 0xFFFF ) ;
}
function S ( X , n ) { return ( X >>> n ) | ( X << ( 32 - n ) ) ; }
function R ( X , n ) { return ( X >>> n ) ; }
function Ch ( x , y , z ) { return ( ( x & y ) ^ ( ( ~ x ) & z ) ) ; }
function Maj ( x , y , z ) { return ( ( x & y ) ^ ( x & z ) ^ ( y & z ) ) ; }
function Sigma0256 ( x ) { return ( S ( x , 2 ) ^ S ( x , 13 ) ^ S ( x , 22 ) ) ; }
function Sigma1256 ( x ) { return ( S ( x , 6 ) ^ S ( x , 11 ) ^ S ( x , 25 ) ) ; }
function Gamma0256 ( x ) { return ( S ( x , 7 ) ^ S ( x , 18 ) ^ R ( x , 3 ) ) ; }
function Gamma1256 ( x ) { return ( S ( x , 17 ) ^ S ( x , 19 ) ^ R ( x , 10 ) ) ; }
function newArray ( n ) {
var a = [ ] ;
for ( ; n > 0 ; n -- ) {
a . push ( undefined ) ;
}
return a ;
}
function core _sha256 ( m , l ) {
var K = [ 0x428A2F98 , 0x71374491 , 0xB5C0FBCF , 0xE9B5DBA5 , 0x3956C25B , 0x59F111F1 , 0x923F82A4 , 0xAB1C5ED5 , 0xD807AA98 , 0x12835B01 , 0x243185BE , 0x550C7DC3 , 0x72BE5D74 , 0x80DEB1FE , 0x9BDC06A7 , 0xC19BF174 , 0xE49B69C1 , 0xEFBE4786 , 0xFC19DC6 , 0x240CA1CC , 0x2DE92C6F , 0x4A7484AA , 0x5CB0A9DC , 0x76F988DA , 0x983E5152 , 0xA831C66D , 0xB00327C8 , 0xBF597FC7 , 0xC6E00BF3 , 0xD5A79147 , 0x6CA6351 , 0x14292967 , 0x27B70A85 , 0x2E1B2138 , 0x4D2C6DFC , 0x53380D13 , 0x650A7354 , 0x766A0ABB , 0x81C2C92E , 0x92722C85 , 0xA2BFE8A1 , 0xA81A664B , 0xC24B8B70 , 0xC76C51A3 , 0xD192E819 , 0xD6990624 , 0xF40E3585 , 0x106AA070 , 0x19A4C116 , 0x1E376C08 , 0x2748774C , 0x34B0BCB5 , 0x391C0CB3 , 0x4ED8AA4A , 0x5B9CCA4F , 0x682E6FF3 , 0x748F82EE , 0x78A5636F , 0x84C87814 , 0x8CC70208 , 0x90BEFFFA , 0xA4506CEB , 0xBEF9A3F7 , 0xC67178F2 ] ;
var HASH = [ 0x6A09E667 , 0xBB67AE85 , 0x3C6EF372 , 0xA54FF53A , 0x510E527F , 0x9B05688C , 0x1F83D9AB , 0x5BE0CD19 ] ;
var W = newArray ( 64 ) ;
var a , b , c , d , e , f , g , h , i , j ;
var T1 , T2 ;
/* append padding */
m [ l >> 5 ] |= 0x80 << ( 24 - l % 32 ) ;
m [ ( ( l + 64 >> 9 ) << 4 ) + 15 ] = l ;
for ( var i = 0 ; i < m . length ; i += 16 ) {
a = HASH [ 0 ] ; b = HASH [ 1 ] ; c = HASH [ 2 ] ; d = HASH [ 3 ] ;
e = HASH [ 4 ] ; f = HASH [ 5 ] ; g = HASH [ 6 ] ; h = HASH [ 7 ] ;
for ( var j = 0 ; j < 64 ; j ++ ) {
if ( j < 16 ) {
W [ j ] = m [ j + i ] ;
} else {
W [ j ] = safe _add ( safe _add ( safe _add ( Gamma1256 (
W [ j - 2 ] ) , W [ j - 7 ] ) , Gamma0256 ( W [ j - 15 ] ) ) , W [ j - 16 ] ) ;
}
T1 = safe _add ( safe _add ( safe _add (
safe _add ( h , Sigma1256 ( e ) ) , Ch ( e , f , g ) ) , K [ j ] ) , W [ j ] ) ;
T2 = safe _add ( Sigma0256 ( a ) , Maj ( a , b , c ) ) ;
h = g ; g = f ; f = e ; e = safe _add ( d , T1 ) ;
d = c ; c = b ; b = a ; a = safe _add ( T1 , T2 ) ;
}
HASH [ 0 ] = safe _add ( a , HASH [ 0 ] ) ; HASH [ 1 ] = safe _add ( b , HASH [ 1 ] ) ;
HASH [ 2 ] = safe _add ( c , HASH [ 2 ] ) ; HASH [ 3 ] = safe _add ( d , HASH [ 3 ] ) ;
HASH [ 4 ] = safe _add ( e , HASH [ 4 ] ) ; HASH [ 5 ] = safe _add ( f , HASH [ 5 ] ) ;
HASH [ 6 ] = safe _add ( g , HASH [ 6 ] ) ; HASH [ 7 ] = safe _add ( h , HASH [ 7 ] ) ;
}
return HASH ;
}
function str2binb ( str ) {
var bin = Array ( ) ;
var mask = ( 1 << chrsz ) - 1 ;
for ( var i = 0 ; i < str . length * chrsz ; i += chrsz )
bin [ i >> 5 ] |= ( str . charCodeAt ( i / chrsz ) & mask ) << ( 24 - i % 32 ) ;
return bin ;
}
function binb2hex ( binarray ) {
var hexcase = 0 ; /* hex output format. 0 - lowercase; 1 - uppercase */
var hex _tab = hexcase ? "0123456789ABCDEF" : "0123456789abcdef" ;
var str = "" ;
for ( var i = 0 ; i < binarray . length * 4 ; i ++ ) {
str += hex _tab . charAt ( ( binarray [ i >> 2 ] >> ( ( 3 - i % 4 ) * 8 + 4 ) ) & 0xF ) +
hex _tab . charAt ( ( binarray [ i >> 2 ] >> ( ( 3 - i % 4 ) * 8 ) ) & 0xF ) ;
}
return str ;
}
function hex _sha256 ( s ) {
return binb2hex ( core _sha256 ( str2binb ( s ) , s . length * chrsz ) ) ;
}
module . exports . hex _sha256 = hex _sha256 ;
} ( ) ) ;
} ,
"Common.js" : function ( module , exports , require ) {
/ *
* Copyright 2014 XWiki SAS
*
* This program is free software : you can redistribute it and / or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Affero General Public License for more details .
*
* You should have received a copy of the GNU Affero General Public License
* along with this program . If not , see < http : //www.gnu.org/licenses/>.
* /
var PARANOIA = module . exports . PARANOIA = true ;
/* Good testing but slooooooooooow */
var VALIDATE _ENTIRE _CHAIN _EACH _MSG = module . exports . VALIDATE _ENTIRE _CHAIN _EACH _MSG = false ;
/* throw errors over non-compliant messages which would otherwise be treated as invalid */
var TESTING = module . exports . TESTING = false ;
var assert = module . exports . assert = function ( expr ) {
if ( ! expr ) { throw new Error ( "Failed assertion" ) ; }
} ;
var isUint = module . exports . isUint = function ( integer ) {
return ( typeof ( integer ) === 'number' ) &&
( Math . floor ( integer ) === integer ) &&
( integer >= 0 ) ;
} ;
var randomASCII = module . exports . randomASCII = function ( length ) {
var content = [ ] ;
for ( var i = 0 ; i < length ; i ++ ) {
content [ i ] = String . fromCharCode ( Math . floor ( Math . random ( ) * 256 ) % 57 + 65 ) ;
}
return content . join ( '' ) ;
} ;
var strcmp = module . exports . strcmp = function ( a , b ) {
if ( PARANOIA && typeof ( a ) !== 'string' ) { throw new Error ( ) ; }
if ( PARANOIA && typeof ( b ) !== 'string' ) { throw new Error ( ) ; }
return ( ( a === b ) ? 0 : ( ( a > b ) ? 1 : - 1 ) ) ;
}
} ,
"Message.js" : function ( module , exports , require ) {
/ *
* Copyright 2014 XWiki SAS
*
* This program is free software : you can redistribute it and / or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Affero General Public License for more details .
*
* You should have received a copy of the GNU Affero General Public License
* along with this program . If not , see < http : //www.gnu.org/licenses/>.
* /
var Common = require ( './Common' ) ;
var Operation = require ( './Operation' ) ;
var Patch = require ( './Patch' ) ;
var Sha = require ( './SHA256' ) ;
var Message = module . exports ;
var REGISTER = Message . REGISTER = 0 ;
var REGISTER _ACK = Message . REGISTER _ACK = 1 ;
var PATCH = Message . PATCH = 2 ;
var DISCONNECT = Message . DISCONNECT = 3 ;
var CHECKPOINT = Message . CHECKPOINT = 4 ;
var check = Message . check = function ( msg ) {
Common . assert ( msg . type === 'Message' ) ;
if ( msg . messageType === PATCH || msg . messageType === CHECKPOINT ) {
Patch . check ( msg . content ) ;
Common . assert ( typeof ( msg . lastMsgHash ) === 'string' ) ;
} else {
throw new Error ( "invalid message type [" + msg . messageType + "]" ) ;
}
} ;
var create = Message . create = function ( type , content , lastMsgHash ) {
var msg = {
type : 'Message' ,
messageType : type ,
content : content ,
lastMsgHash : lastMsgHash
} ;
if ( Common . PARANOIA ) { check ( msg ) ; }
return msg ;
} ;
var toString = Message . toString = function ( msg ) {
if ( Common . PARANOIA ) { check ( msg ) ; }
if ( msg . messageType === PATCH || msg . messageType === CHECKPOINT ) {
return JSON . stringify ( [ msg . messageType , Patch . toObj ( msg . content ) , msg . lastMsgHash ] ) ;
} else {
throw new Error ( ) ;
}
} ;
var discardBencode = function ( msg , arr ) {
var len = msg . substring ( 0 , msg . indexOf ( ':' ) ) ;
msg = msg . substring ( len . length + 1 ) ;
var value = msg . substring ( 0 , Number ( len ) ) ;
msg = msg . substring ( value . length ) ;
if ( arr ) { arr . push ( value ) ; }
return msg ;
} ;
var fromString = Message . fromString = function ( str ) {
var m = JSON . parse ( str ) ;
if ( m [ 0 ] !== CHECKPOINT && m [ 0 ] !== PATCH ) { throw new Error ( "invalid message type " + m [ 0 ] ) ; }
var msg = create ( m [ 0 ] , Patch . fromObj ( m [ 1 ] ) , m [ 2 ] ) ;
if ( m [ 0 ] === CHECKPOINT ) { msg . content . isCheckpoint = true ; }
return msg ;
} ;
var hashOf = Message . hashOf = function ( msg ) {
if ( Common . PARANOIA ) { check ( msg ) ; }
var hash = Sha . hex _sha256 ( toString ( msg ) ) ;
return hash ;
} ;
} ,
"ChainPad.js" : function ( module , exports , require ) {
/ *
* Copyright 2014 XWiki SAS
*
* This program is free software : you can redistribute it and / or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Affero General Public License for more details .
*
* You should have received a copy of the GNU Affero General Public License
* along with this program . If not , see < http : //www.gnu.org/licenses/>.
* /
var Common = module . exports . Common = require ( './Common' ) ;
var Operation = module . exports . Operation = require ( './Operation' ) ;
var Patch = module . exports . Patch = require ( './Patch' ) ;
var Message = module . exports . Message = require ( './Message' ) ;
var Sha = module . exports . Sha = require ( './SHA256' ) ;
var ChainPad = { } ;
// hex_sha256('')
var EMPTY _STR _HASH = module . exports . EMPTY _STR _HASH =
'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855' ;
var ZERO = '0000000000000000000000000000000000000000000000000000000000000000' ;
// Default number of patches between checkpoints (patches older than this will be pruned)
// default for realtime.config.checkpointInterval
var DEFAULT _CHECKPOINT _INTERVAL = 200 ;
// Default number of milliseconds to wait before syncing to the server
var DEFAULT _AVERAGE _SYNC _MILLISECONDS = 300 ;
var enterChainPad = function ( realtime , func ) {
return function ( ) {
if ( realtime . failed ) { return ; }
func . apply ( null , arguments ) ;
} ;
} ;
var debug = function ( realtime , msg ) {
if ( realtime . logLevel > 0 ) {
console . log ( "[" + realtime . userName + "] " + msg ) ;
}
} ;
var schedule = function ( realtime , func , timeout ) {
if ( realtime . aborted ) { return ; }
if ( ! timeout ) {
timeout = Math . floor ( Math . random ( ) * 2 * realtime . config . avgSyncMilliseconds ) ;
}
var to = setTimeout ( enterChainPad ( realtime , function ( ) {
realtime . schedules . splice ( realtime . schedules . indexOf ( to ) , 1 ) ;
func ( ) ;
} ) , timeout ) ;
realtime . schedules . push ( to ) ;
return to ;
} ;
var unschedule = function ( realtime , schedule ) {
var index = realtime . schedules . indexOf ( schedule ) ;
if ( index > - 1 ) {
realtime . schedules . splice ( index , 1 ) ;
}
clearTimeout ( schedule ) ;
} ;
var onMessage = function ( realtime , message , callback ) {
if ( ! realtime . messageHandlers . length ) {
callback ( "no onMessage() handler registered" ) ;
}
for ( var i = 0 ; i < realtime . messageHandlers . length ; i ++ ) {
realtime . messageHandlers [ i ] ( message , function ( ) {
callback . apply ( null , arguments ) ;
callback = function ( ) { } ;
} ) ;
}
} ;
var sendMessage = function ( realtime , msg , callback ) {
var strMsg = Message . toString ( msg ) ;
onMessage ( realtime , strMsg , function ( err ) {
if ( err ) {
debug ( realtime , "Posting to server failed [" + err + "]" ) ;
realtime . pending = null ;
} else {
var pending = realtime . pending ;
realtime . pending = null ;
Common . assert ( pending . hash === msg . hashOf ) ;
handleMessage ( realtime , strMsg , true ) ;
pending . callback ( ) ;
}
} ) ;
msg . hashOf = msg . hashOf || Message . hashOf ( msg ) ;
var timeout = schedule ( realtime , function ( ) {
debug ( realtime , "Failed to send message [" + msg . hashOf + "] to server" ) ;
sync ( realtime ) ;
} , 10000 + ( Math . random ( ) * 5000 ) ) ;
if ( realtime . pending ) { throw new Error ( "there is already a pending message" ) ; }
realtime . pending = {
hash : msg . hashOf ,
callback : function ( ) {
if ( realtime . initialMessage && realtime . initialMessage . hashOf === msg . hashOf ) {
debug ( realtime , "initial Ack received [" + msg . hashOf + "]" ) ;
realtime . initialMessage = null ;
}
unschedule ( realtime , timeout ) ;
realtime . syncSchedule = schedule ( realtime , function ( ) { sync ( realtime ) ; } , 0 ) ;
callback ( ) ;
}
} ;
if ( Common . PARANOIA ) { check ( realtime ) ; }
} ;
var sync = function ( realtime ) {
if ( Common . PARANOIA ) { check ( realtime ) ; }
if ( realtime . syncSchedule && ! realtime . pending ) {
unschedule ( realtime , realtime . syncSchedule ) ;
realtime . syncSchedule = null ;
} else {
//debug(realtime, "already syncing...");
// we're currently waiting on something from the server.
return ;
}
realtime . uncommitted = Patch . simplify (
realtime . uncommitted , realtime . authDoc , realtime . config . operationSimplify ) ;
if ( realtime . uncommitted . operations . length === 0 ) {
//debug(realtime, "No data to sync to the server, sleeping");
realtime . syncSchedule = schedule ( realtime , function ( ) { sync ( realtime ) ; } ) ;
return ;
}
if ( ( ( parentCount ( realtime , realtime . best ) + 1 ) % realtime . config . checkpointInterval ) === 0 ) {
var best = realtime . best ;
debug ( realtime , "Sending checkpoint" ) ;
var cpp = Patch . createCheckpoint ( realtime . authDoc ,
realtime . authDoc ,
realtime . best . content . inverseOf . parentHash ) ;
var cp = Message . create ( Message . CHECKPOINT , cpp , realtime . best . hashOf ) ;
sendMessage ( realtime , cp , function ( ) {
debug ( realtime , "Checkpoint sent and accepted" ) ;
} ) ;
return ;
}
var msg ;
if ( realtime . best === realtime . initialMessage ) {
msg = realtime . initialMessage ;
} else {
msg = Message . create ( Message . PATCH , realtime . uncommitted , realtime . best . hashOf ) ;
}
sendMessage ( realtime , msg , function ( ) {
//debug(realtime, "patch sent");
} ) ;
} ;
var create = ChainPad . create = function ( config ) {
config = config || { } ;
var initialState = config . initialState || '' ;
config . checkpointInterval = config . checkpointInterval || DEFAULT _CHECKPOINT _INTERVAL ;
config . avgSyncMilliseconds = config . avgSyncMilliseconds || DEFAULT _AVERAGE _SYNC _MILLISECONDS ;
var realtime = {
type : 'ChainPad' ,
authDoc : '' ,
config : config ,
logLevel : ( typeof ( config . logLevel ) === 'number' ) ? config . logLevel : 1 ,
/** A patch representing all uncommitted work. */
uncommitted : null ,
uncommittedDocLength : initialState . length ,
patchHandlers : [ ] ,
changeHandlers : [ ] ,
messageHandlers : [ ] ,
schedules : [ ] ,
aborted : false ,
syncSchedule : null ,
registered : false ,
// this is only used if PARANOIA is enabled.
userInterfaceContent : undefined ,
failed : false ,
// hash and callback for previously send patch, currently in flight.
pending : null ,
messages : { } ,
messagesByParent : { } ,
rootMessage : null ,
userName : config . userName || 'anonymous' ,
} ;
if ( Common . PARANOIA ) {
realtime . userInterfaceContent = initialState ;
}
var zeroPatch = Patch . create ( EMPTY _STR _HASH ) ;
if ( initialState !== '' ) {
var initialOp = Operation . create ( 0 , 0 , initialState ) ;
Patch . addOperation ( zeroPatch , initialOp ) ;
}
zeroPatch . inverseOf = Patch . invert ( zeroPatch , '' ) ;
zeroPatch . inverseOf . inverseOf = zeroPatch ;
var zeroMsg = Message . create ( Message . PATCH , zeroPatch , ZERO ) ;
zeroMsg . hashOf = Message . hashOf ( zeroMsg ) ;
zeroMsg . parentCount = 0 ;
realtime . messages [ zeroMsg . hashOf ] = zeroMsg ;
( realtime . messagesByParent [ zeroMsg . lastMessageHash ] || [ ] ) . push ( zeroMsg ) ;
realtime . rootMessage = zeroMsg ;
realtime . best = zeroMsg ;
realtime . authDoc = initialState ;
realtime . uncommitted = Patch . create ( zeroPatch . inverseOf . parentHash ) ;
if ( Common . PARANOIA ) {
realtime . userInterfaceContent = initialState ;
}
return realtime ;
} ;
var getParent = function ( realtime , message ) {
return message . parent = message . parent || realtime . messages [ message . lastMsgHash ] ;
} ;
var check = ChainPad . check = function ( realtime ) {
Common . assert ( realtime . type === 'ChainPad' ) ;
Common . assert ( typeof ( realtime . authDoc ) === 'string' ) ;
Patch . check ( realtime . uncommitted , realtime . authDoc . length ) ;
var uiDoc = Patch . apply ( realtime . uncommitted , realtime . authDoc ) ;
if ( uiDoc . length !== realtime . uncommittedDocLength ) {
Common . assert ( 0 ) ;
}
if ( realtime . userInterfaceContent !== '' ) {
Common . assert ( uiDoc === realtime . userInterfaceContent ) ;
}
if ( ! Common . VALIDATE _ENTIRE _CHAIN _EACH _MSG ) { return ; }
var doc = realtime . authDoc ;
var patchMsg = realtime . best ;
Common . assert ( patchMsg . content . inverseOf . parentHash === realtime . uncommitted . parentHash ) ;
var patches = [ ] ;
do {
patches . push ( patchMsg ) ;
doc = Patch . apply ( patchMsg . content . inverseOf , doc ) ;
} while ( ( patchMsg = getParent ( realtime , patchMsg ) ) ) ;
Common . assert ( doc === '' ) ;
while ( ( patchMsg = patches . pop ( ) ) ) {
doc = Patch . apply ( patchMsg . content , doc ) ;
}
Common . assert ( doc === realtime . authDoc ) ;
} ;
var doOperation = ChainPad . doOperation = function ( realtime , op ) {
if ( Common . PARANOIA ) {
check ( realtime ) ;
realtime . userInterfaceContent = Operation . apply ( op , realtime . userInterfaceContent ) ;
}
Operation . check ( op , realtime . uncommittedDocLength ) ;
Patch . addOperation ( realtime . uncommitted , op ) ;
realtime . uncommittedDocLength += Operation . lengthChange ( op ) ;
} ;
var doPatch = ChainPad . doPatch = function ( realtime , patch ) {
if ( Common . PARANOIA ) {
check ( realtime ) ;
Common . assert ( Patch . invert ( realtime . uncommitted ) . parentHash === patch . parentHash ) ;
realtime . userInterfaceContent = Patch . apply ( patch , realtime . userInterfaceContent ) ;
}
Patch . check ( patch , realtime . uncommittedDocLength ) ;
realtime . uncommitted = Patch . merge ( realtime . uncommitted , patch ) ;
realtime . uncommittedDocLength += Patch . lengthChange ( patch ) ;
} ;
var isAncestorOf = function ( realtime , ancestor , decendent ) {
if ( ! decendent || ! ancestor ) { return false ; }
if ( ancestor === decendent ) { return true ; }
return isAncestorOf ( realtime , ancestor , getParent ( realtime , decendent ) ) ;
} ;
var parentCount = function ( realtime , message ) {
if ( typeof ( message . parentCount ) !== 'number' ) {
message . parentCount = parentCount ( realtime , getParent ( realtime , message ) ) + 1 ;
}
return message . parentCount ;
} ;
var applyPatch = function ( realtime , isFromMe , patch ) {
Common . assert ( patch ) ;
Common . assert ( patch . inverseOf ) ;
if ( isFromMe && ! patch . isInitialStatePatch ) {
var inverseOldUncommitted = Patch . invert ( realtime . uncommitted , realtime . authDoc ) ;
var userInterfaceContent = Patch . apply ( realtime . uncommitted , realtime . authDoc ) ;
if ( Common . PARANOIA ) {
Common . assert ( userInterfaceContent === realtime . userInterfaceContent ) ;
}
realtime . uncommitted = Patch . merge ( inverseOldUncommitted , patch ) ;
realtime . uncommitted = Patch . invert ( realtime . uncommitted , userInterfaceContent ) ;
} else {
realtime . uncommitted =
Patch . transform (
realtime . uncommitted , patch , realtime . authDoc , realtime . config . transformFunction ) ;
}
realtime . uncommitted . parentHash = patch . inverseOf . parentHash ;
realtime . authDoc = Patch . apply ( patch , realtime . authDoc ) ;
if ( Common . PARANOIA ) {
Common . assert ( realtime . uncommitted . parentHash === patch . inverseOf . parentHash ) ;
Common . assert ( Sha . hex _sha256 ( realtime . authDoc ) === realtime . uncommitted . parentHash ) ;
realtime . userInterfaceContent = Patch . apply ( realtime . uncommitted , realtime . authDoc ) ;
}
} ;
var revertPatch = function ( realtime , isFromMe , patch ) {
applyPatch ( realtime , isFromMe , patch . inverseOf ) ;
} ;
var getBestChild = function ( realtime , msg ) {
var best = msg ;
( realtime . messagesByParent [ msg . hashOf ] || [ ] ) . forEach ( function ( child ) {
Common . assert ( child . lastMsgHash === msg . hashOf ) ;
child = getBestChild ( realtime , child ) ;
if ( parentCount ( realtime , child ) > parentCount ( realtime , best ) ) { best = child ; }
} ) ;
return best ;
} ;
var pushUIPatch = function ( realtime , patch ) {
if ( patch . operations . length ) {
// push the uncommittedPatch out to the user interface.
for ( var i = 0 ; i < realtime . patchHandlers . length ; i ++ ) {
realtime . patchHandlers [ i ] ( patch ) ;
}
for ( var i = 0 ; i < realtime . changeHandlers . length ; i ++ ) {
for ( var j = patch . operations . length ; j >= 0 ; j -- ) {
var op = patch . operations [ j ] ;
realtime . changeHandlers [ i ] ( op . offset , op . toRemove , op . toInsert ) ;
}
}
}
} ;
var handleMessage = ChainPad . handleMessage = function ( realtime , msgStr , isFromMe ) {
if ( Common . PARANOIA ) { check ( realtime ) ; }
var msg = Message . fromString ( msgStr ) ;
// otherwise it's a disconnect.
if ( msg . messageType !== Message . PATCH && msg . messageType !== Message . CHECKPOINT ) {
debug ( realtime , "unrecognized message type " + msg . messageType ) ;
return ;
}
msg . hashOf = Message . hashOf ( msg ) ;
if ( realtime . messages [ msg . hashOf ] ) {
debug ( realtime , "Patch [" + msg . hashOf + "] is already known" ) ;
if ( Common . PARANOIA ) { check ( realtime ) ; }
return ;
}
realtime . messages [ msg . hashOf ] = msg ;
( realtime . messagesByParent [ msg . lastMsgHash ] =
realtime . messagesByParent [ msg . lastMsgHash ] || [ ] ) . push ( msg ) ;
if ( ! isAncestorOf ( realtime , realtime . rootMessage , msg ) ) {
if ( realtime . rootMessage === realtime . best && msg . content . isCheckpoint ) {
// We're starting with a trucated chain from a checkpoint, we will adopt this
// as the root message and go with it...
var userDoc = Patch . apply ( realtime . uncommitted , realtime . authDoc ) ;
Common . assert ( ! Common . PARANOIA || realtime . userInterfaceContent === userDoc ) ;
var fixUserDocPatch = Patch . invert ( realtime . uncommitted , realtime . authDoc ) ;
Patch . addOperation ( fixUserDocPatch ,
Operation . create ( 0 , realtime . authDoc . length , msg . content . operations [ 0 ] . toInsert ) ) ;
fixUserDocPatch =
Patch . simplify ( fixUserDocPatch , userDoc , realtime . config . operationSimplify ) ;
msg . parentCount = 0 ;
realtime . rootMessage = realtime . best = msg ;
realtime . authDoc = msg . content . operations [ 0 ] . toInsert ;
realtime . uncommitted = Patch . create ( Sha . hex _sha256 ( realtime . authDoc ) ) ;
realtime . uncommittedDocLength = realtime . authDoc . length ;
pushUIPatch ( realtime , fixUserDocPatch ) ;
if ( Common . PARANOIA ) { realtime . userInterfaceContent = realtime . authDoc ; }
return ;
} else {
// we'll probably find the missing parent later.
debug ( realtime , "Patch [" + msg . hashOf + "] not connected to root" ) ;
if ( Common . PARANOIA ) { check ( realtime ) ; }
return ;
}
}
// of this message fills in a hole in the chain which makes another patch better, swap to the
// best child of this patch since longest chain always wins.
msg = getBestChild ( realtime , msg ) ;
msg . isFromMe = isFromMe ;
var patch = msg . content ;
// Find the ancestor of this patch which is in the main chain, reverting as necessary
var toRevert = [ ] ;
var commonAncestor = realtime . best ;
if ( ! isAncestorOf ( realtime , realtime . best , msg ) ) {
var pcBest = parentCount ( realtime , realtime . best ) ;
var pcMsg = parentCount ( realtime , msg ) ;
if ( pcBest < pcMsg
|| ( pcBest === pcMsg
&& Common . strcmp ( realtime . best . hashOf , msg . hashOf ) > 0 ) )
{
// switch chains
while ( commonAncestor && ! isAncestorOf ( realtime , commonAncestor , msg ) ) {
toRevert . push ( commonAncestor ) ;
commonAncestor = getParent ( realtime , commonAncestor ) ;
}
Common . assert ( commonAncestor ) ;
} else {
debug ( realtime , "Patch [" + msg . hashOf + "] chain is [" + pcMsg + "] best chain is [" + pcBest + "]" ) ;
if ( Common . PARANOIA ) { check ( realtime ) ; }
return ;
}
}
// Find the parents of this patch which are not in the main chain.
var toApply = [ ] ;
var current = msg ;
do {
toApply . unshift ( current ) ;
current = getParent ( realtime , current ) ;
Common . assert ( current ) ;
} while ( current !== commonAncestor ) ;
var authDocAtTimeOfPatch = realtime . authDoc ;
for ( var i = 0 ; i < toRevert . length ; i ++ ) {
Common . assert ( typeof ( toRevert [ i ] . content . inverseOf ) !== 'undefined' ) ;
authDocAtTimeOfPatch = Patch . apply ( toRevert [ i ] . content . inverseOf , authDocAtTimeOfPatch ) ;
}
// toApply.length-1 because we do not want to apply the new patch.
for ( var i = 0 ; i < toApply . length - 1 ; i ++ ) {
if ( typeof ( toApply [ i ] . content . inverseOf ) === 'undefined' ) {
toApply [ i ] . content . inverseOf = Patch . invert ( toApply [ i ] . content , authDocAtTimeOfPatch ) ;
toApply [ i ] . content . inverseOf . inverseOf = toApply [ i ] . content ;
}
authDocAtTimeOfPatch = Patch . apply ( toApply [ i ] . content , authDocAtTimeOfPatch ) ;
}
if ( Sha . hex _sha256 ( authDocAtTimeOfPatch ) !== patch . parentHash ) {
debug ( realtime , "patch [" + msg . hashOf + "] parentHash is not valid" ) ;
if ( Common . PARANOIA ) { check ( realtime ) ; }
if ( Common . TESTING ) { throw new Error ( ) ; }
delete realtime . messages [ msg . hashOf ] ;
return ;
}
if ( patch . isCheckpoint ) {
// Ok, we have a checkpoint patch.
// If the chain length is not equal to checkpointInterval then this patch is invalid.
var i = 0 ;
var checkpointP ;
for ( var m = getParent ( realtime , msg ) ; m ; m = getParent ( realtime , m ) ) {
if ( m . content . isCheckpoint ) {
if ( checkpointP ) {
checkpointP = m ;
break ;
}
checkpointP = m ;
}
}
if ( checkpointP && checkpointP !== realtime . rootMessage ) {
var point = parentCount ( realtime , checkpointP ) ;
if ( ( point % realtime . config . checkpointInterval ) !== 0 ) {
debug ( realtime , "checkpoint [" + msg . hashOf + "] at invalid point [" + point + "]" ) ;
if ( Common . PARANOIA ) { check ( realtime ) ; }
if ( Common . TESTING ) { throw new Error ( ) ; }
delete realtime . messages [ msg . hashOf ] ;
return ;
}
// Time to prune some old messages from the chain
debug ( realtime , "checkpoint [" + msg . hashOf + "]" ) ;
for ( var m = getParent ( realtime , checkpointP ) ; m ; m = getParent ( realtime , m ) ) {
debug ( realtime , "pruning [" + m . hashOf + "]" ) ;
delete realtime . messages [ m . hashOf ] ;
delete realtime . messagesByParent [ m . hashOf ] ;
}
realtime . rootMessage = checkpointP ;
}
} else {
var simplePatch =
Patch . simplify ( patch , authDocAtTimeOfPatch , realtime . config . operationSimplify ) ;
if ( ! Patch . equals ( simplePatch , patch ) ) {
debug ( realtime , "patch [" + msg . hashOf + "] can be simplified" ) ;
if ( Common . PARANOIA ) { check ( realtime ) ; }
if ( Common . TESTING ) { throw new Error ( ) ; }
delete realtime . messages [ msg . hashOf ] ;
return ;
}
}
patch . inverseOf = Patch . invert ( patch , authDocAtTimeOfPatch ) ;
patch . inverseOf . inverseOf = patch ;
realtime . uncommitted = Patch . simplify (
realtime . uncommitted , realtime . authDoc , realtime . config . operationSimplify ) ;
var oldUserInterfaceContent = Patch . apply ( realtime . uncommitted , realtime . authDoc ) ;
if ( Common . PARANOIA ) {
Common . assert ( oldUserInterfaceContent === realtime . userInterfaceContent ) ;
}
// Derive the patch for the user's uncommitted work
var uncommittedPatch = Patch . invert ( realtime . uncommitted , realtime . authDoc ) ;
for ( var i = 0 ; i < toRevert . length ; i ++ ) {
debug ( realtime , "reverting [" + toRevert [ i ] . hashOf + "]" ) ;
uncommittedPatch = Patch . merge ( uncommittedPatch , toRevert [ i ] . content . inverseOf ) ;
revertPatch ( realtime , toRevert [ i ] . isFromMe , toRevert [ i ] . content ) ;
}
for ( var i = 0 ; i < toApply . length ; i ++ ) {
debug ( realtime , "applying [" + toApply [ i ] . hashOf + "]" ) ;
uncommittedPatch = Patch . merge ( uncommittedPatch , toApply [ i ] . content ) ;
applyPatch ( realtime , toApply [ i ] . isFromMe , toApply [ i ] . content ) ;
}
uncommittedPatch = Patch . merge ( uncommittedPatch , realtime . uncommitted ) ;
uncommittedPatch = Patch . simplify (
uncommittedPatch , oldUserInterfaceContent , realtime . config . operationSimplify ) ;
realtime . uncommittedDocLength += Patch . lengthChange ( uncommittedPatch ) ;
realtime . best = msg ;
if ( Common . PARANOIA ) {
// apply the uncommittedPatch to the userInterface content.
var newUserInterfaceContent = Patch . apply ( uncommittedPatch , oldUserInterfaceContent ) ;
Common . assert ( realtime . userInterfaceContent . length === realtime . uncommittedDocLength ) ;
Common . assert ( newUserInterfaceContent === realtime . userInterfaceContent ) ;
}
pushUIPatch ( realtime , uncommittedPatch ) ;
if ( Common . PARANOIA ) { check ( realtime ) ; }
} ;
var getDepthOfState = function ( content , minDepth , realtime ) {
Common . assert ( typeof ( content ) === 'string' ) ;
// minimum depth is an optional argument which defaults to zero
var minDepth = minDepth || 0 ;
if ( minDepth === 0 && realtime . authDoc === content ) {
return 0 ;
}
var hash = Sha . hex _sha256 ( content ) ;
var patchMsg = realtime . best ;
var depth = 0 ;
do {
if ( depth < minDepth ) {
// you haven't exceeded the minimum depth
} else {
//console.log("Exceeded minimum depth");
// you *have* exceeded the minimum depth
if ( patchMsg . content . parentHash === hash ) {
// you found it!
return depth + 1 ;
}
}
depth ++ ;
} while ( ( patchMsg = getParent ( realtime , patchMsg ) ) ) ;
return - 1 ;
} ;
module . exports . create = function ( conf ) {
var realtime = ChainPad . create ( conf ) ;
var out = {
onPatch : enterChainPad ( realtime , function ( handler ) {
Common . assert ( typeof ( handler ) === 'function' ) ;
realtime . patchHandlers . push ( handler ) ;
} ) ,
patch : enterChainPad ( realtime , function ( patch , x , y ) {
if ( typeof ( patch ) === 'number' ) {
// Actually they meant to call realtime.change()
out . change ( patch , x , y ) ;
return ;
}
doPatch ( realtime , patch ) ;
} ) ,
onChange : enterChainPad ( realtime , function ( handler ) {
Common . assert ( typeof ( handler ) === 'function' ) ;
realtime . changeHandlers . push ( handler ) ;
} ) ,
change : enterChainPad ( realtime , function ( offset , count , chars ) {
if ( count === 0 && chars === '' ) { return ; }
doOperation ( realtime , Operation . create ( offset , count , chars ) ) ;
} ) ,
onMessage : enterChainPad ( realtime , function ( handler ) {
Common . assert ( typeof ( handler ) === 'function' ) ;
realtime . messageHandlers . push ( handler ) ;
} ) ,
message : enterChainPad ( realtime , function ( message ) {
handleMessage ( realtime , message , false ) ;
} ) ,
start : enterChainPad ( realtime , function ( ) {
if ( realtime . syncSchedule ) { unschedule ( realtime , realtime . syncSchedule ) ; }
realtime . syncSchedule = schedule ( realtime , function ( ) { sync ( realtime ) ; } ) ;
} ) ,
abort : enterChainPad ( realtime , function ( ) {
realtime . aborted = true ;
realtime . schedules . forEach ( function ( s ) { clearTimeout ( s ) } ) ;
} ) ,
sync : enterChainPad ( realtime , function ( ) { sync ( realtime ) ; } ) ,
getAuthDoc : function ( ) { return realtime . authDoc ; } ,
getUserDoc : function ( ) { return Patch . apply ( realtime . uncommitted , realtime . authDoc ) ; } ,
getDepthOfState : function ( content , minDepth ) {
return getDepthOfState ( content , minDepth , realtime ) ;
}
} ;
return out ;
} ;
} ,
"Operation.js" : function ( module , exports , require ) {
/ *
* Copyright 2014 XWiki SAS
*
* This program is free software : you can redistribute it and / or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Affero General Public License for more details .
*
* You should have received a copy of the GNU Affero General Public License
* along with this program . If not , see < http : //www.gnu.org/licenses/>.
* /
var Common = require ( './Common' ) ;
var Operation = module . exports ;
var check = Operation . check = function ( op , docLength _opt ) {
Common . assert ( op . type === 'Operation' ) ;
Common . assert ( Common . isUint ( op . offset ) ) ;
Common . assert ( Common . isUint ( op . toRemove ) ) ;
Common . assert ( typeof ( op . toInsert ) === 'string' ) ;
Common . assert ( op . toRemove > 0 || op . toInsert . length > 0 ) ;
Common . assert ( typeof ( docLength _opt ) !== 'number' || op . offset + op . toRemove <= docLength _opt ) ;
} ;
var create = Operation . create = function ( offset , toRemove , toInsert ) {
var out = {
type : 'Operation' ,
offset : offset || 0 ,
toRemove : toRemove || 0 ,
toInsert : toInsert || '' ,
} ;
if ( Common . PARANOIA ) { check ( out ) ; }
return out ;
} ;
var toObj = Operation . toObj = function ( op ) {
if ( Common . PARANOIA ) { check ( op ) ; }
return [ op . offset , op . toRemove , op . toInsert ] ;
} ;
var fromObj = Operation . fromObj = function ( obj ) {
Common . assert ( Array . isArray ( obj ) && obj . length === 3 ) ;
return create ( obj [ 0 ] , obj [ 1 ] , obj [ 2 ] ) ;
} ;
var clone = Operation . clone = function ( op ) {
return create ( op . offset , op . toRemove , op . toInsert ) ;
} ;
/ * *
* @ param op the operation to apply .
* @ param doc the content to apply the operation on
* /
var apply = Operation . apply = function ( op , doc )
{
if ( Common . PARANOIA ) {
check ( op ) ;
Common . assert ( typeof ( doc ) === 'string' ) ;
Common . assert ( op . offset + op . toRemove <= doc . length ) ;
}
return doc . substring ( 0 , op . offset ) + op . toInsert + doc . substring ( op . offset + op . toRemove ) ;
} ;
var invert = Operation . invert = function ( op , doc ) {
if ( Common . PARANOIA ) {
check ( op ) ;
Common . assert ( typeof ( doc ) === 'string' ) ;
Common . assert ( op . offset + op . toRemove <= doc . length ) ;
}
var rop = clone ( op ) ;
rop . toInsert = doc . substring ( op . offset , op . offset + op . toRemove ) ;
rop . toRemove = op . toInsert . length ;
return rop ;
} ;
var simplify = Operation . simplify = function ( op , doc ) {
if ( Common . PARANOIA ) {
check ( op ) ;
Common . assert ( typeof ( doc ) === 'string' ) ;
Common . assert ( op . offset + op . toRemove <= doc . length ) ;
}
var rop = invert ( op , doc ) ;
op = clone ( op ) ;
var minLen = Math . min ( op . toInsert . length , rop . toInsert . length ) ;
var i ;
for ( i = 0 ; i < minLen && rop . toInsert [ i ] === op . toInsert [ i ] ; i ++ ) ;
op . offset += i ;
op . toRemove -= i ;
op . toInsert = op . toInsert . substring ( i ) ;
rop . toInsert = rop . toInsert . substring ( i ) ;
if ( rop . toInsert . length === op . toInsert . length ) {
for ( i = rop . toInsert . length - 1 ; i >= 0 && rop . toInsert [ i ] === op . toInsert [ i ] ; i -- ) ;
op . toInsert = op . toInsert . substring ( 0 , i + 1 ) ;
op . toRemove = i + 1 ;
}
if ( op . toRemove === 0 && op . toInsert . length === 0 ) { return null ; }
return op ;
} ;
var equals = Operation . equals = function ( opA , opB ) {
return ( opA . toRemove === opB . toRemove
&& opA . toInsert === opB . toInsert
&& opA . offset === opB . offset ) ;
} ;
var lengthChange = Operation . lengthChange = function ( op )
{
if ( Common . PARANOIA ) { check ( op ) ; }
return op . toInsert . length - op . toRemove ;
} ;
/ *
* @ return the merged operation OR null if the result of the merger is a noop .
* /
var merge = Operation . merge = function ( oldOpOrig , newOpOrig ) {
if ( Common . PARANOIA ) {
check ( newOpOrig ) ;
check ( oldOpOrig ) ;
}
var newOp = clone ( newOpOrig ) ;
var oldOp = clone ( oldOpOrig ) ;
var offsetDiff = newOp . offset - oldOp . offset ;
if ( newOp . toRemove > 0 ) {
var origOldInsert = oldOp . toInsert ;
oldOp . toInsert = (
oldOp . toInsert . substring ( 0 , offsetDiff )
+ oldOp . toInsert . substring ( offsetDiff + newOp . toRemove )
) ;
newOp . toRemove -= ( origOldInsert . length - oldOp . toInsert . length ) ;
if ( newOp . toRemove < 0 ) { newOp . toRemove = 0 ; }
oldOp . toRemove += newOp . toRemove ;
newOp . toRemove = 0 ;
}
if ( offsetDiff < 0 ) {
oldOp . offset += offsetDiff ;
oldOp . toInsert = newOp . toInsert + oldOp . toInsert ;
} else if ( oldOp . toInsert . length === offsetDiff ) {
oldOp . toInsert = oldOp . toInsert + newOp . toInsert ;
} else if ( oldOp . toInsert . length > offsetDiff ) {
oldOp . toInsert = (
oldOp . toInsert . substring ( 0 , offsetDiff )
+ newOp . toInsert
+ oldOp . toInsert . substring ( offsetDiff )
) ;
} else {
throw new Error ( "should never happen\n" +
JSON . stringify ( [ oldOpOrig , newOpOrig ] , null , ' ' ) ) ;
}
if ( oldOp . toInsert === '' && oldOp . toRemove === 0 ) {
return null ;
}
if ( Common . PARANOIA ) { check ( oldOp ) ; }
return oldOp ;
} ;
/ * *
* If the new operation deletes what the old op inserted or inserts content in the middle of
* the old op ' s content or if they abbut one another , they should be merged .
* /
var shouldMerge = Operation . shouldMerge = function ( oldOp , newOp ) {
if ( Common . PARANOIA ) {
check ( oldOp ) ;
check ( newOp ) ;
}
if ( newOp . offset < oldOp . offset ) {
return ( oldOp . offset <= ( newOp . offset + newOp . toRemove ) ) ;
} else {
return ( newOp . offset <= ( oldOp . offset + oldOp . toInsert . length ) ) ;
}
} ;
/ * *
* Rebase newOp against oldOp .
*
* @ param oldOp the eariler operation to have happened .
* @ param newOp the later operation to have happened ( in time ) .
* @ return either the untouched newOp if it need not be rebased ,
* the rebased clone of newOp if it needs rebasing , or
* null if newOp and oldOp must be merged .
* /
var rebase = Operation . rebase = function ( oldOp , newOp ) {
if ( Common . PARANOIA ) {
check ( oldOp ) ;
check ( newOp ) ;
}
if ( newOp . offset < oldOp . offset ) { return newOp ; }
newOp = clone ( newOp ) ;
newOp . offset += oldOp . toRemove ;
newOp . offset -= oldOp . toInsert . length ;
return newOp ;
} ;
/ * *
* this is a lossy and dirty algorithm , everything else is nice but transformation
* has to be lossy because both operations have the same base and they diverge .
* This could be made nicer and / or tailored to a specific data type .
*
* @ param toTransform the operation which is converted * MUTATED * .
* @ param transformBy an existing operation which also has the same base .
* @ return toTransform * or * null if the result is a no - op .
* /
var transform0 = Operation . transform0 = function ( text , toTransformOrig , transformByOrig ) {
// Cloning the original transformations makes this algorithm such that it
// **DOES NOT MUTATE ANYMORE**
var toTransform = Operation . clone ( toTransformOrig ) ;
var transformBy = Operation . clone ( transformByOrig ) ;
if ( toTransform . offset > transformBy . offset ) {
if ( toTransform . offset > transformBy . offset + transformBy . toRemove ) {
// simple rebase
toTransform . offset -= transformBy . toRemove ;
toTransform . offset += transformBy . toInsert . length ;
return toTransform ;
}
// goto the end, anything you deleted that they also deleted should be skipped.
var newOffset = transformBy . offset + transformBy . toInsert . length ;
toTransform . toRemove = 0 ; //-= (newOffset - toTransform.offset);
if ( toTransform . toRemove < 0 ) { toTransform . toRemove = 0 ; }
toTransform . offset = newOffset ;
if ( toTransform . toInsert . length === 0 && toTransform . toRemove === 0 ) {
return null ;
}
return toTransform ;
}
if ( toTransform . offset + toTransform . toRemove < transformBy . offset ) {
return toTransform ;
}
toTransform . toRemove = transformBy . offset - toTransform . offset ;
if ( toTransform . toInsert . length === 0 && toTransform . toRemove === 0 ) {
return null ;
}
return toTransform ;
} ;
/ * *
* @ param toTransform the operation which is converted
* @ param transformBy an existing operation which also has the same base .
* @ return a modified clone of toTransform * or * toTransform itself if no change was made .
* /
var transform = Operation . transform = function ( text , toTransform , transformBy , transformFunction ) {
if ( Common . PARANOIA ) {
check ( toTransform ) ;
check ( transformBy ) ;
}
transformFunction = transformFunction || transform0 ;
toTransform = clone ( toTransform ) ;
var result = transformFunction ( text , toTransform , transformBy ) ;
if ( Common . PARANOIA && result ) { check ( result ) ; }
return result ;
} ;
/** Used for testing. */
var random = Operation . random = function ( docLength ) {
Common . assert ( Common . isUint ( docLength ) ) ;
var offset = Math . floor ( Math . random ( ) * 100000000 % docLength ) || 0 ;
var toRemove = Math . floor ( Math . random ( ) * 100000000 % ( docLength - offset ) ) || 0 ;
var toInsert = '' ;
do {
var toInsert = Common . randomASCII ( Math . floor ( Math . random ( ) * 20 ) ) ;
} while ( toRemove === 0 && toInsert === '' ) ;
return create ( offset , toRemove , toInsert ) ;
} ;
}
} ;
ChainPad = r ( "ChainPad.js" ) ; } ( ) ) ;