Merge branch 'staging' into small-config

pull/1/head
ansuz 5 years ago
commit 02fc343727

@ -1,4 +1,3 @@
/*@flow*/
/*
globals module
*/
@ -200,6 +199,9 @@ module.exports = {
*/
maxUploadSize: 20 * 1024 * 1024,
// XXX
premiumUploadSize: 100 * 1024 * 1024,
/* =====================
* DATABASE VOLUMES
* ===================== */

@ -12,9 +12,41 @@ Upload.status = function (Env, safeKey, filesize, _cb) { // FIXME FILES
if (typeof(filesize) !== 'number' &&
filesize >= 0) { return void cb('E_INVALID_SIZE'); }
if (filesize >= Env.maxUploadSize) { return cb('TOO_LARGE'); }
nThen(function (w) {
// if the proposed upload size is within the regular limit
// jump ahead to the next block
if (filesize <= Env.maxUploadSize) { return; }
// if larger uploads aren't explicitly enabled then reject them
if (typeof(Env.premiumUploadSize) !== 'number') {
w.abort();
return void cb('TOO_LARGE');
}
// otherwise go and retrieve info about the user's quota
Pinning.getLimit(Env, safeKey, w(function (err, limit) {
if (err) {
w.abort();
return void cb("E_BAD_LIMIT");
}
var plan = limit[1];
// see if they have a special plan, reject them if not
if (plan === '') {
w.abort();
return void cb('TOO_LARGE');
}
// and that they're not over the greater limit
if (filesize >= Env.premiumUploadSize) {
w.abort();
return void cb("TOO_LARGE");
}
// fallthrough will proceed to the next block
}));
}).nThen(function (w) {
var abortAndCB = Util.both(w.abort, cb);
Env.blobStore.status(safeKey, w(function (err, inProgress) {
// if there's an error something is weird

@ -43,6 +43,7 @@ module.exports.create = function (config, cb) {
//historyKeeper: config.historyKeeper,
intervals: config.intervals || {},
maxUploadSize: config.maxUploadSize || (20 * 1024 * 1024),
premiumUploadSize: false, // overridden below...
Sessions: {},
paths: {},
//msgStore: config.store,
@ -70,6 +71,13 @@ module.exports.create = function (config, cb) {
domain: config.domain
};
(function () {
var pes = config.premiumUploadSize;
if (!isNaN(pes) && pes >= Env.maxUploadSize) {
Env.premiumUploadSize = pes;
}
}());
var paths = Env.paths;
var keyOrDefaultString = function (key, def) {

@ -7,6 +7,7 @@ var Fs = require('fs');
var Package = require('./package.json');
var Path = require("path");
var nThen = require("nthen");
var Util = require("./lib/common-util");
var config = require("./lib/load-config");
@ -34,7 +35,9 @@ if (process.env.PACKAGE) {
FRESH_KEY = +new Date();
}
var configCache = {};
config.flushCache = function () {
configCache = {};
FRESH_KEY = +new Date();
if (!(DEV_MODE || FRESH_MODE)) { FRESH_MODE = true; }
if (!config.log) { return; }
@ -143,38 +146,72 @@ try {
});
} catch (e) { console.error("Can't parse admin keys"); }
// TODO, cache this /api/config responses instead of re-computing it each time
app.get('/api/config', function(req, res){
// TODO precompute any data that isn't dynamic to save some CPU time
var host = req.headers.host.replace(/\:[0-9]+/, '');
res.setHeader('Content-Type', 'text/javascript');
res.send('define(function(){\n' + [
'var obj = ' + JSON.stringify({
requireConf: {
waitSeconds: 600,
urlArgs: 'ver=' + Package.version + (FRESH_KEY? '-' + FRESH_KEY: '') + (DEV_MODE? '-' + (+new Date()): ''),
},
removeDonateButton: (config.removeDonateButton === true),
allowSubscriptions: (config.allowSubscriptions === true),
websocketPath: config.externalWebsocketURL,
httpUnsafeOrigin: config.httpUnsafeOrigin.replace(/^\s*/, ''),
adminEmail: config.adminEmail,
adminKeys: admins,
inactiveTime: config.inactiveTime,
supportMailbox: config.supportMailboxPublicKey
}, null, '\t'),
'obj.httpSafeOrigin = ' + (function () {
if (config.httpSafeOrigin) { return '"' + config.httpSafeOrigin + '"'; }
if (config.httpSafePort) {
return "(function () { return window.location.origin.replace(/\:[0-9]+$/, ':" +
config.httpSafePort + "'); }())";
}
return 'window.location.origin';
}()),
'return obj',
'});'
].join(';\n'));
});
var serveConfig = (function () {
// if dev mode: never cache
var cacheString = function () {
return (FRESH_KEY? '-' + FRESH_KEY: '') + (DEV_MODE? '-' + (+new Date()): '');
};
var template = function (host) {
return [
'define(function(){',
'var obj = ' + JSON.stringify({
requireConf: {
waitSeconds: 600,
urlArgs: 'ver=' + Package.version + cacheString(),
},
removeDonateButton: (config.removeDonateButton === true),
allowSubscriptions: (config.allowSubscriptions === true),
websocketPath: config.externalWebsocketURL,
httpUnsafeOrigin: config.httpUnsafeOrigin.replace(/^\s*/, ''),
adminEmail: config.adminEmail,
adminKeys: admins,
inactiveTime: config.inactiveTime,
supportMailbox: config.supportMailboxPublicKey
}, null, '\t'),
'obj.httpSafeOrigin = ' + (function () {
if (config.httpSafeOrigin) { return '"' + config.httpSafeOrigin + '"'; }
if (config.httpSafePort) {
return "(function () { return window.location.origin.replace(/\:[0-9]+$/, ':" +
config.httpSafePort + "'); }())";
}
return 'window.location.origin';
}()),
'return obj',
'});'
].join(';\n')
};
var cleanUp = {};
return function (req, res) {
var host = req.headers.host.replace(/\:[0-9]+/, '');
res.setHeader('Content-Type', 'text/javascript');
// don't cache anything if you're in dev mode
if (DEV_MODE) {
return void res.send(template(host));
}
// generate a lookup key for the cache
var cacheKey = host + ':' + cacheString();
// if there's nothing cached for that key...
if (!configCache[cacheKey]) {
// generate the response and cache it in memory
configCache[cacheKey] = template(host);
// and create a function to conditionally evict cache entries
// which have not been accessed in the last 20 seconds
cleanUp[cacheKey] = Util.throttle(function () {
delete cleanUp[cacheKey];
delete configCache[cacheKey];
}, 20000);
}
// successive calls to this function
cleanUp[cacheKey]();
return void res.send(configCache[cacheKey]);
};
}());
app.get('/api/config', serveConfig);
var four04_path = Path.resolve(__dirname + '/customize.dist/404.html');
var custom_four04_path = Path.resolve(__dirname + '/customize/404.html');

@ -2762,7 +2762,7 @@ define([
});
if (keys.length < 2) { return keys; }
var mult = asc ? 1 : -1;
var getProp = function (el, prop) {
var getProp = function (el) {
if (folder && root[el] && manager.isSharedFolder(root[el])) {
var title = manager.getSharedFolderData(root[el]).title || el;
return title.toLowerCase();
@ -2777,13 +2777,19 @@ define([
return hrefData.type;
}
if (prop === 'atime' || prop === 'ctime') {
return new Date(data[prop]);
return typeof(data[prop]) === "number" ? data[prop] : new Date(data[prop]);
}
return (manager.getTitle(id) || "").toLowerCase();
};
var props = {};
keys.forEach(function (k) {
props[k] = getProp(k);
});
keys.sort(function(a, b) {
if (getProp(a, prop) < getProp(b, prop)) { return mult * -1; }
if (getProp(a, prop) > getProp(b, prop)) { return mult * 1; }
var _a = props[a];
var _b = props[b];
if (_a < _b) { return mult * -1; }
if (_b > _a) { return mult; }
return 0;
});
return keys;

@ -720,13 +720,17 @@ define([
var fixTemplate = function () {
if (sharedFolder) { return; }
if (!Array.isArray(files[TEMPLATE])) { debug("TEMPLATE was not an array"); files[TEMPLATE] = []; }
files[TEMPLATE] = Util.deduplicateString(files[TEMPLATE].slice());
var us = files[TEMPLATE];
var rootFiles = exp.getFiles([ROOT]).slice();
var dedup = Util.deduplicateString(files[TEMPLATE]);
if (dedup.length !== files[TEMPLATE].length) {
files[TEMPLATE] = dedup;
}
var us = files[TEMPLATE].slice();
var rootFiles = exp.getFiles([ROOT]);
var toClean = [];
us.forEach(function (el, idx) {
if (!exp.isFile(el, true) || rootFiles.indexOf(el) !== -1) {
toClean.push(el);
return;
}
if (typeof el === "string") {
// We have an old file (href) which is not in filesData: add it
@ -735,6 +739,7 @@ define([
href: exp.cryptor.encrypt(el)
};
us[idx] = id;
return;
}
if (typeof el === "number") {
var data = files[FILES_DATA][el];
@ -866,7 +871,7 @@ define([
var sf = files[SHARED_FOLDERS];
var rootFiles = exp.getFiles([ROOT]);
var root = exp.find([ROOT]);
var parsed, secret, el;
var parsed /*, secret */, el;
for (var id in sf) {
el = sf[id];
id = Number(id);
@ -878,8 +883,7 @@ define([
// Fix undefined hash
parsed = Hash.parsePadUrl(href || el.roHref);
secret = Hash.getSecrets('drive', parsed.hash, el.password);
if (!secret.keys) {
if (!parsed || !parsed.hash || parsed.hash === "undefined") {
delete sf[id];
continue;
}

@ -158,10 +158,15 @@ define([
});
onError = function (e) {
// XXX if we included the max upload sizes in /api/config
// then we could check if a file is too large without going to the server...
queue.inProgress = false;
queue.next();
if (e === 'TOO_LARGE') {
$pv.text(Messages.upload_tooLargeBrief);
// XXX translate
// instead of "This file exceeds the maximum upload size"
// use "the maximum upload size allowed for your account"
return void UI.alert(Messages.upload_tooLarge);
}
if (e === 'NOT_ENOUGH_SPACE') {

Loading…
Cancel
Save