|
| 1 | +#!/usr/bin/env node |
| 2 | +var firebase = require('firebase'), |
| 3 | + optimist = require('optimist'), |
| 4 | + ProgressBar = require('progress'), |
| 5 | + assert = require('assert'), |
| 6 | + path = require('path'); |
| 7 | + util = require('util'); |
| 8 | + |
| 9 | +// We try to write data in ~1MB chunks (in reality this often ends up being much smaller, due to the JSON structure). |
| 10 | +var CHUNK_SIZE = 1024*1024; |
| 11 | + |
| 12 | +// Keep ~50 writes outstanding at a time (this increases throughput, so we're not delayed by server round-trips). |
| 13 | +var OUTSTANDING_WRITE_COUNT = 50; |
| 14 | + |
| 15 | +var argv = require('optimist') |
| 16 | + .usage('Usage: $0') |
| 17 | + |
| 18 | + .demand('database_url') |
| 19 | + .describe('database_url', 'Firebase database URL (e.g. https://databaseName.firebaseio.com).') |
| 20 | + .alias('d', 'database_url') |
| 21 | + |
| 22 | + .demand('path') |
| 23 | + .describe('path', 'Database path (e.g. /products).') |
| 24 | + .alias('p', 'path') |
| 25 | + |
| 26 | + .demand('json') |
| 27 | + .describe('json', 'The JSON file to import.') |
| 28 | + .alias('j', 'json') |
| 29 | + |
| 30 | + .boolean('merge') |
| 31 | + .describe('merge', 'Write the top-level children without overwriting the whole parent.') |
| 32 | + .alias('m', 'merge') |
| 33 | + |
| 34 | + .boolean('force') |
| 35 | + .describe('force', 'Don\'t prompt before overwriting data.') |
| 36 | + |
| 37 | + .describe('service_account', 'Path to a JSON file with your service account credentials.') |
| 38 | + .alias('s', 'service_account') |
| 39 | + |
| 40 | + .argv; |
| 41 | + |
| 42 | +function main() { |
| 43 | + firebase.initializeApp({ |
| 44 | + databaseURL: argv.database_url, |
| 45 | + serviceAccount: argv.service_account, |
| 46 | + }); |
| 47 | + var db = firebase.database(); |
| 48 | + var ref = db.ref(argv.path); |
| 49 | + |
| 50 | + var connFailTimeout = setTimeout(function() { |
| 51 | + console.log('Failed to connect to Firebase.'); |
| 52 | + process.exit(); |
| 53 | + }, 10000); |
| 54 | + |
| 55 | + function ready() { |
| 56 | + clearTimeout(connFailTimeout); |
| 57 | + promptToContinue(ref, function() { start(ref); }); |
| 58 | + } |
| 59 | + |
| 60 | + var connFunc = db.ref('.info/connected').on('value', function(s) { |
| 61 | + if(s.val() === true) { |
| 62 | + db.ref('.info/connected').off('value', connFunc); |
| 63 | + ready(); |
| 64 | + } |
| 65 | + }); |
| 66 | +} |
| 67 | + |
| 68 | +function promptToContinue(ref, next) { |
| 69 | + if (argv.force) { |
| 70 | + next(); |
| 71 | + } else { |
| 72 | + if (argv.merge) { |
| 73 | + console.log('Each top-level child in ' + argv.json + ' will be written under ' + ref.toString() + '. If a child already exists, it will be overwritten.'); |
| 74 | + } else { |
| 75 | + console.log('All data at ' + ref.toString() + ' will be overwritten.'); |
| 76 | + } |
| 77 | + console.log('Press <enter> to proceed, Ctrl-C to abort.'); |
| 78 | + process.stdin.resume(); |
| 79 | + process.stdin.once('data', next); |
| 80 | + } |
| 81 | +} |
| 82 | + |
| 83 | +function start(ref) { |
| 84 | + var file = path.resolve(argv.json); |
| 85 | + console.log('Reading ' + file + '... (may take a minute)'); |
| 86 | + var json = require(file); |
| 87 | + |
| 88 | + var clearFirst = true, splitTopLevel = false; |
| 89 | + if (argv.merge) { |
| 90 | + clearFirst = false; |
| 91 | + // Need to split into chunks at the top level to ensure we don't overwrite the parent. |
| 92 | + splitTopLevel = true; |
| 93 | + } |
| 94 | + |
| 95 | + console.log('Preparing JSON for import... (may take a minute)'); |
| 96 | + var chunks = createChunks(ref, json, splitTopLevel); |
| 97 | + |
| 98 | + if (clearFirst) { |
| 99 | + ref.remove(function(error) { |
| 100 | + if (error) throw(error); |
| 101 | + uploadChunks(chunks); |
| 102 | + }); |
| 103 | + } else { |
| 104 | + uploadChunks(chunks); |
| 105 | + } |
| 106 | +} |
| 107 | + |
| 108 | +function uploadChunks(chunks) { |
| 109 | + var uploader = new ChunkUploader(chunks); |
| 110 | + uploader.go(function() { |
| 111 | + console.log('\nImport completed.'); |
| 112 | + process.exit(); |
| 113 | + }); |
| 114 | +} |
| 115 | + |
| 116 | +function createChunks(ref, json, forceSplit) { |
| 117 | + var chunkRes = chunkInternal(ref, json, forceSplit); |
| 118 | + if (!chunkRes.chunks) { |
| 119 | + return [{ref: ref, json: json}]; |
| 120 | + } else { |
| 121 | + return chunkRes.chunks; |
| 122 | + } |
| 123 | +} |
| 124 | + |
| 125 | +function chunkInternal(ref, json, forceSplit) { |
| 126 | + var size = 0; |
| 127 | + var priority = null; |
| 128 | + var jsonIsObject = json !== null && typeof json === 'object'; |
| 129 | + if (jsonIsObject) { |
| 130 | + size += 2; // {} |
| 131 | + } |
| 132 | + |
| 133 | + if (jsonIsObject && ('.priority' in json)) { |
| 134 | + size += 12; // ".priority": |
| 135 | + priority = json['.priority']; |
| 136 | + size += json['.priority'].toString().length; |
| 137 | + } |
| 138 | + |
| 139 | + var value = json; |
| 140 | + if (jsonIsObject && ('.value' in json)) { |
| 141 | + size += 9; // ".value": |
| 142 | + value = json['.value']; |
| 143 | + } |
| 144 | + |
| 145 | + if (value === null || typeof value !== 'object') { |
| 146 | + // It's a leaf, it can't be chunked. |
| 147 | + size += JSON.stringify(value).length; |
| 148 | + return { chunks: null, size: size }; |
| 149 | + } else { |
| 150 | + // children node. |
| 151 | + var chunks = []; |
| 152 | + var splitUp = false; |
| 153 | + for(var key in json) { |
| 154 | + if (key !== '.priority') { |
| 155 | + size += key.length + 3; |
| 156 | + |
| 157 | + var chunkRes = chunkInternal(ref.child(key), json[key]); |
| 158 | + size += chunkRes.size; |
| 159 | + |
| 160 | + if (chunkRes.chunks) { |
| 161 | + for(var i = 0; i < chunkRes.chunks.length; i++) { |
| 162 | + chunks.push(chunkRes.chunks[i]); |
| 163 | + } |
| 164 | + // One of the children had to be broken into chunks. We have to break all of them. |
| 165 | + splitUp = true; |
| 166 | + } else { |
| 167 | + chunks.push({ref: ref.child(key), json: json[key]}); |
| 168 | + } |
| 169 | + } |
| 170 | + } |
| 171 | + |
| 172 | + // Add priority last since it must be added after at least one child. |
| 173 | + if (priority !== null) { |
| 174 | + chunks.push({ref: ref, priority: priority}); |
| 175 | + } |
| 176 | + |
| 177 | + if (forceSplit || splitUp || size >= CHUNK_SIZE) { |
| 178 | + return { chunks: chunks, size: size }; |
| 179 | + } else { |
| 180 | + return { chunks: null, size: size } |
| 181 | + } |
| 182 | + } |
| 183 | +} |
| 184 | + |
| 185 | +function ChunkUploader(chunks) { |
| 186 | + this.next = 0; |
| 187 | + this.chunks = chunks; |
| 188 | + if (process.stdout.isTTY) { |
| 189 | + this.bar = new ProgressBar('Importing [:bar] :percent (:current/:total)', { width: 50, total: chunks.length, incomplete: ' ' }); |
| 190 | + } else { |
| 191 | + console.log('Importing... (may take a while)'); |
| 192 | + } |
| 193 | +} |
| 194 | + |
| 195 | +ChunkUploader.prototype.go = function(onComplete) { |
| 196 | + this.onComplete = onComplete; |
| 197 | + |
| 198 | + for(var i = 0; i < OUTSTANDING_WRITE_COUNT && i < this.chunks.length; i++) { |
| 199 | + this.uploadNext(); |
| 200 | + } |
| 201 | +}; |
| 202 | + |
| 203 | +ChunkUploader.prototype.uploadNext = function() { |
| 204 | + var chunkNum = this.next, chunk = this.chunks[chunkNum]; |
| 205 | + assert(chunkNum < this.chunks.length); |
| 206 | + this.next++; |
| 207 | + |
| 208 | + var self = this; |
| 209 | + var onComplete = function(error) { |
| 210 | + if (error) { |
| 211 | + console.log('Error uploading to ' + self.chunks[i].ref.toString() + ': ' + util.inspect(json)); |
| 212 | + console.error(error); |
| 213 | + throw error; |
| 214 | + } |
| 215 | + |
| 216 | + if (process.stdout.isTTY && self.bar) { |
| 217 | + self.bar.tick(); |
| 218 | + } |
| 219 | + |
| 220 | + if (chunkNum === self.chunks.length - 1) { |
| 221 | + self.onComplete(); |
| 222 | + } else { |
| 223 | + // upload next chunk. |
| 224 | + assert(self.next === self.chunks.length || self.next === chunkNum + OUTSTANDING_WRITE_COUNT); |
| 225 | + if (self.next < self.chunks.length) |
| 226 | + self.uploadNext(); |
| 227 | + } |
| 228 | + }; |
| 229 | + |
| 230 | + if ('json' in chunk) { |
| 231 | + chunk.ref.set(chunk.json, onComplete); |
| 232 | + } else { |
| 233 | + assert('priority' in chunk) |
| 234 | + chunk.ref.setPriority(chunk.priority, onComplete); |
| 235 | + } |
| 236 | +} |
| 237 | + |
| 238 | +main(); |
0 commit comments