1
0
Fork 0
mirror of https://github.com/Eggbertx/gochan.git synced 2025-09-16 07:56:24 -07:00

Move page building functions to building.go

This commit is contained in:
Joshua Merrell 2018-10-22 11:01:51 -07:00
parent 5af4074325
commit 6a77bb9424
10 changed files with 567 additions and 1240 deletions

View file

@ -1,31 +0,0 @@
// Drop down link menu
var down_arrow_symbol = "▼";
var up_arrow_symbol = "▲";
var $jq = jQuery.noConflict();
var DropDownMenu = function(title,html) {
this.html = html;
this.buttonTitle = title;
$jq("div#topmenu").append("<a href=\"#\" style=\"float:right;\" class=\"dropdown-button\" id=\""+title.toLowerCase()+"\">"+title+"</a>");
this.button = $jq("div#topmenu a#"+title.toLowerCase());
this.button_jq = $jq("a#"+title.);
this.button_jq.click(function(){
$jq(document.body).append("<div id="+title.toLowerCase())
})
}
DropDownMenu.prototype.open = function() {
}
DropDownMenu.prototype.close = function() {
}
DropDownMenu.prototype.isOpen = function() {
}

View file

@ -1,670 +0,0 @@
/*!{id:msgpack.js,ver:1.05,license:"MIT",author:"uupaa.js@gmail.com"}*/
// === msgpack ===
// MessagePack -> http://msgpack.sourceforge.net/
this.msgpack || (function(globalScope) {
globalScope.msgpack = {
pack: msgpackpack, // msgpack.pack(data:Mix,
// toString:Boolean = false):ByteArray/ByteString/false
// [1][mix to String] msgpack.pack({}, true) -> "..."
// [2][mix to ByteArray] msgpack.pack({}) -> [...]
unpack: msgpackunpack, // msgpack.unpack(data:BinaryString/ByteArray):Mix
// [1][String to mix] msgpack.unpack("...") -> {}
// [2][ByteArray to mix] msgpack.unpack([...]) -> {}
worker: "msgpack.js", // msgpack.worker - WebWorkers script filename
upload: msgpackupload, // msgpack.upload(url:String, option:Hash, callback:Function)
download: msgpackdownload // msgpack.download(url:String, option:Hash, callback:Function)
};
var _ie = /MSIE/.test(navigator.userAgent),
_bin2num = {}, // BinaryStringToNumber { "\00": 0, ... "\ff": 255 }
_num2bin = {}, // NumberToBinaryString { 0: "\00", ... 255: "\ff" }
_num2b64 = ("ABCDEFGHIJKLMNOPQRSTUVWXYZ" +
"abcdefghijklmnopqrstuvwxyz0123456789+/").split(""),
_buf = [], // decode buffer
_idx = 0, // decode buffer[index]
_error = 0, // msgpack.pack() error code. 1 = CYCLIC_REFERENCE_ERROR
_isArray = Array.isArray || (function(mix) {
return Object.prototype.toString.call(mix) === "[object Array]";
}),
_toString = String.fromCharCode, // CharCode/ByteArray to String
_MAX_DEPTH = 512;
// for WebWorkers Code Block
self.importScripts && (onmessage = function(event) {
if (event.data.method === "pack") {
postMessage(base64encode(msgpackpack(event.data.data)));
} else {
postMessage(msgpackunpack(event.data.data));
}
});
// msgpack.pack
function msgpackpack(data, // @param Mix:
toString) { // @param Boolean(= false):
// @return ByteArray/BinaryString/false:
// false is error return
// [1][mix to String] msgpack.pack({}, true) -> "..."
// [2][mix to ByteArray] msgpack.pack({}) -> [...]
_error = 0;
var byteArray = encode([], data, 0);
return _error ? false
: toString ? byteArrayToByteString(byteArray)
: byteArray;
}
// msgpack.unpack
function msgpackunpack(data) { // @param BinaryString/ByteArray:
// @return Mix/undefined:
// undefined is error return
// [1][String to mix] msgpack.unpack("...") -> {}
// [2][ByteArray to mix] msgpack.unpack([...]) -> {}
_buf = typeof data === "string" ? toByteArray(data) : data;
_idx = -1;
return decode(); // mix or undefined
}
// inner - encoder
function encode(rv, // @param ByteArray: result
mix, // @param Mix: source data
depth) { // @param Number: depth
var size, i, iz, c, pos, // for UTF8.encode, Array.encode, Hash.encode
high, low, sign, exp, frac; // for IEEE754
if (mix == null) { // null or undefined -> 0xc0 ( null )
rv.push(0xc0);
} else if (mix === false) { // false -> 0xc2 ( false )
rv.push(0xc2);
} else if (mix === true) { // true -> 0xc3 ( true )
rv.push(0xc3);
} else {
switch (typeof mix) {
case "number":
if (mix !== mix) { // isNaN
rv.push(0xcb, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff); // quiet NaN
} else if (mix === Infinity) {
rv.push(0xcb, 0x7f, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00); // positive infinity
} else if (Math.floor(mix) === mix) { // int or uint
if (mix < 0) {
// int
if (mix >= -32) { // negative fixnum
rv.push(0xe0 + mix + 32);
} else if (mix > -0x80) {
rv.push(0xd0, mix + 0x100);
} else if (mix > -0x8000) {
mix += 0x10000;
rv.push(0xd1, mix >> 8, mix & 0xff);
} else if (mix > -0x80000000) {
mix += 0x100000000;
rv.push(0xd2, mix >>> 24, (mix >> 16) & 0xff,
(mix >> 8) & 0xff, mix & 0xff);
} else {
high = Math.floor(mix / 0x100000000);
low = mix & 0xffffffff;
rv.push(0xd3, (high >> 24) & 0xff, (high >> 16) & 0xff,
(high >> 8) & 0xff, high & 0xff,
(low >> 24) & 0xff, (low >> 16) & 0xff,
(low >> 8) & 0xff, low & 0xff);
}
} else {
// uint
if (mix < 0x80) {
rv.push(mix); // positive fixnum
} else if (mix < 0x100) { // uint 8
rv.push(0xcc, mix);
} else if (mix < 0x10000) { // uint 16
rv.push(0xcd, mix >> 8, mix & 0xff);
} else if (mix < 0x100000000) { // uint 32
rv.push(0xce, mix >>> 24, (mix >> 16) & 0xff,
(mix >> 8) & 0xff, mix & 0xff);
} else {
high = Math.floor(mix / 0x100000000);
low = mix & 0xffffffff;
rv.push(0xcf, (high >> 24) & 0xff, (high >> 16) & 0xff,
(high >> 8) & 0xff, high & 0xff,
(low >> 24) & 0xff, (low >> 16) & 0xff,
(low >> 8) & 0xff, low & 0xff);
}
}
} else { // double
// THX!! @edvakf
// http://javascript.g.hatena.ne.jp/edvakf/20101128/1291000731
sign = mix < 0;
sign && (mix *= -1);
// add offset 1023 to ensure positive
// 0.6931471805599453 = Math.LN2;
exp = ((Math.log(mix) / 0.6931471805599453) + 1023) | 0;
// shift 52 - (exp - 1023) bits to make integer part exactly 53 bits,
// then throw away trash less than decimal point
frac = mix * Math.pow(2, 52 + 1023 - exp);
// S+-Exp(11)--++-----------------Fraction(52bits)-----------------------+
// || || |
// v+----------++--------------------------------------------------------+
// 00000000|00000000|00000000|00000000|00000000|00000000|00000000|00000000
// 6 5 55 4 4 3 2 1 8 0
// 3 6 21 8 0 2 4 6
//
// +----------high(32bits)-----------+ +----------low(32bits)------------+
// | | | |
// +---------------------------------+ +---------------------------------+
// 3 2 21 1 8 0
// 1 4 09 6
low = frac & 0xffffffff;
sign && (exp |= 0x800);
high = ((frac / 0x100000000) & 0xfffff) | (exp << 20);
rv.push(0xcb, (high >> 24) & 0xff, (high >> 16) & 0xff,
(high >> 8) & 0xff, high & 0xff,
(low >> 24) & 0xff, (low >> 16) & 0xff,
(low >> 8) & 0xff, low & 0xff);
}
break;
case "string":
// http://d.hatena.ne.jp/uupaa/20101128
iz = mix.length;
pos = rv.length; // keep rewrite position
rv.push(0); // placeholder
// utf8.encode
for (i = 0; i < iz; ++i) {
c = mix.charCodeAt(i);
if (c < 0x80) { // ASCII(0x00 ~ 0x7f)
rv.push(c & 0x7f);
} else if (c < 0x0800) {
rv.push(((c >>> 6) & 0x1f) | 0xc0, (c & 0x3f) | 0x80);
} else if (c < 0x10000) {
rv.push(((c >>> 12) & 0x0f) | 0xe0,
((c >>> 6) & 0x3f) | 0x80, (c & 0x3f) | 0x80);
}
}
size = rv.length - pos - 1;
if (size < 32) {
rv[pos] = 0xa0 + size; // rewrite
} else if (size < 0x10000) { // 16
rv.splice(pos, 1, 0xda, size >> 8, size & 0xff);
} else if (size < 0x100000000) { // 32
rv.splice(pos, 1, 0xdb,
size >>> 24, (size >> 16) & 0xff,
(size >> 8) & 0xff, size & 0xff);
}
break;
default: // array or hash
if (++depth >= _MAX_DEPTH) {
_error = 1; // CYCLIC_REFERENCE_ERROR
return rv = []; // clear
}
if (_isArray(mix)) {
size = mix.length;
if (size < 16) {
rv.push(0x90 + size);
} else if (size < 0x10000) { // 16
rv.push(0xdc, size >> 8, size & 0xff);
} else if (size < 0x100000000) { // 32
rv.push(0xdd, size >>> 24, (size >> 16) & 0xff,
(size >> 8) & 0xff, size & 0xff);
}
for (i = 0; i < size; ++i) {
encode(rv, mix[i], depth);
}
} else { // hash
// http://d.hatena.ne.jp/uupaa/20101129
pos = rv.length; // keep rewrite position
rv.push(0); // placeholder
size = 0;
for (i in mix) {
++size;
encode(rv, i, depth);
encode(rv, mix[i], depth);
}
if (size < 16) {
rv[pos] = 0x80 + size; // rewrite
} else if (size < 0x10000) { // 16
rv.splice(pos, 1, 0xde, size >> 8, size & 0xff);
} else if (size < 0x100000000) { // 32
rv.splice(pos, 1, 0xdf,
size >>> 24, (size >> 16) & 0xff,
(size >> 8) & 0xff, size & 0xff);
}
}
}
}
return rv;
}
// inner - decoder
function decode() { // @return Mix:
var size, i, iz, c, num = 0,
sign, exp, frac, ary, hash,
buf = _buf, type = buf[++_idx];
if (type >= 0xe0) { // Negative FixNum (111x xxxx) (-32 ~ -1)
return type - 0x100;
}
if (type < 0xc0) {
if (type < 0x80) { // Positive FixNum (0xxx xxxx) (0 ~ 127)
return type;
}
if (type < 0x90) { // FixMap (1000 xxxx)
num = type - 0x80;
type = 0x80;
} else if (type < 0xa0) { // FixArray (1001 xxxx)
num = type - 0x90;
type = 0x90;
} else { // if (type < 0xc0) { // FixRaw (101x xxxx)
num = type - 0xa0;
type = 0xa0;
}
}
switch (type) {
case 0xc0: return null;
case 0xc2: return false;
case 0xc3: return true;
case 0xca: // float
num = buf[++_idx] * 0x1000000 + (buf[++_idx] << 16) +
(buf[++_idx] << 8) + buf[++_idx];
sign = num & 0x80000000; // 1bit
exp = (num >> 23) & 0xff; // 8bits
frac = num & 0x7fffff; // 23bits
if (!num || num === 0x80000000) { // 0.0 or -0.0
return 0;
}
if (exp === 0xff) { // NaN or Infinity
return frac ? NaN : Infinity;
}
return (sign ? -1 : 1) *
(frac | 0x800000) * Math.pow(2, exp - 127 - 23); // 127: bias
case 0xcb: // double
num = buf[++_idx] * 0x1000000 + (buf[++_idx] << 16) +
(buf[++_idx] << 8) + buf[++_idx];
sign = num & 0x80000000; // 1bit
exp = (num >> 20) & 0x7ff; // 11bits
frac = num & 0xfffff; // 52bits - 32bits (high word)
if (!num || num === 0x80000000) { // 0.0 or -0.0
_idx += 4;
return 0;
}
if (exp === 0x7ff) { // NaN or Infinity
_idx += 4;
return frac ? NaN : Infinity;
}
num = buf[++_idx] * 0x1000000 + (buf[++_idx] << 16) +
(buf[++_idx] << 8) + buf[++_idx];
return (sign ? -1 : 1) *
((frac | 0x100000) * Math.pow(2, exp - 1023 - 20) // 1023: bias
+ num * Math.pow(2, exp - 1023 - 52));
// 0xcf: uint64, 0xce: uint32, 0xcd: uint16
case 0xcf: num = buf[++_idx] * 0x1000000 + (buf[++_idx] << 16) +
(buf[++_idx] << 8) + buf[++_idx];
return num * 0x100000000 +
buf[++_idx] * 0x1000000 + (buf[++_idx] << 16) +
(buf[++_idx] << 8) + buf[++_idx];
case 0xce: num += buf[++_idx] * 0x1000000 + (buf[++_idx] << 16);
case 0xcd: num += buf[++_idx] << 8;
case 0xcc: return num + buf[++_idx];
// 0xd3: int64, 0xd2: int32, 0xd1: int16, 0xd0: int8
case 0xd3: num = buf[++_idx];
if (num & 0x80) { // sign -> avoid overflow
return ((num ^ 0xff) * 0x100000000000000 +
(buf[++_idx] ^ 0xff) * 0x1000000000000 +
(buf[++_idx] ^ 0xff) * 0x10000000000 +
(buf[++_idx] ^ 0xff) * 0x100000000 +
(buf[++_idx] ^ 0xff) * 0x1000000 +
(buf[++_idx] ^ 0xff) * 0x10000 +
(buf[++_idx] ^ 0xff) * 0x100 +
(buf[++_idx] ^ 0xff) + 1) * -1;
}
return num * 0x100000000000000 +
buf[++_idx] * 0x1000000000000 +
buf[++_idx] * 0x10000000000 +
buf[++_idx] * 0x100000000 +
buf[++_idx] * 0x1000000 +
buf[++_idx] * 0x10000 +
buf[++_idx] * 0x100 +
buf[++_idx];
case 0xd2: num = buf[++_idx] * 0x1000000 + (buf[++_idx] << 16) +
(buf[++_idx] << 8) + buf[++_idx];
return num < 0x80000000 ? num : num - 0x100000000; // 0x80000000 * 2
case 0xd1: num = (buf[++_idx] << 8) + buf[++_idx];
return num < 0x8000 ? num : num - 0x10000; // 0x8000 * 2
case 0xd0: num = buf[++_idx];
return num < 0x80 ? num : num - 0x100; // 0x80 * 2
// 0xdb: raw32, 0xda: raw16, 0xa0: raw ( string )
case 0xdb: num += buf[++_idx] * 0x1000000 + (buf[++_idx] << 16);
case 0xda: num += (buf[++_idx] << 8) + buf[++_idx];
case 0xa0: // utf8.decode
for (ary = [], i = _idx, iz = i + num; i < iz; ) {
c = buf[++i]; // lead byte
ary.push(c < 0x80 ? c : // ASCII(0x00 ~ 0x7f)
c < 0xe0 ? ((c & 0x1f) << 6 | (buf[++i] & 0x3f)) :
((c & 0x0f) << 12 | (buf[++i] & 0x3f) << 6
| (buf[++i] & 0x3f)));
}
_idx = i;
return ary.length < 10240 ? _toString.apply(null, ary)
: byteArrayToByteString(ary);
// 0xdf: map32, 0xde: map16, 0x80: map
case 0xdf: num += buf[++_idx] * 0x1000000 + (buf[++_idx] << 16);
case 0xde: num += (buf[++_idx] << 8) + buf[++_idx];
case 0x80: hash = {};
while (num--) {
// make key/value pair
size = buf[++_idx] - 0xa0;
for (ary = [], i = _idx, iz = i + size; i < iz; ) {
c = buf[++i]; // lead byte
ary.push(c < 0x80 ? c : // ASCII(0x00 ~ 0x7f)
c < 0xe0 ? ((c & 0x1f) << 6 | (buf[++i] & 0x3f)) :
((c & 0x0f) << 12 | (buf[++i] & 0x3f) << 6
| (buf[++i] & 0x3f)));
}
_idx = i;
hash[_toString.apply(null, ary)] = decode();
}
return hash;
// 0xdd: array32, 0xdc: array16, 0x90: array
case 0xdd: num += buf[++_idx] * 0x1000000 + (buf[++_idx] << 16);
case 0xdc: num += (buf[++_idx] << 8) + buf[++_idx];
case 0x90: ary = [];
while (num--) {
ary.push(decode());
}
return ary;
}
return;
}
// inner - byteArray To ByteString
function byteArrayToByteString(byteArray) { // @param ByteArray
// @return String
// http://d.hatena.ne.jp/uupaa/20101128
try {
return _toString.apply(this, byteArray); // toString
} catch(err) {
; // avoid "Maximum call stack size exceeded"
}
var rv = [], i = 0, iz = byteArray.length, num2bin = _num2bin;
for (; i < iz; ++i) {
rv[i] = num2bin[byteArray[i]];
}
return rv.join("");
}
// msgpack.download - load from server
function msgpackdownload(url, // @param String:
option, // @param Hash: { worker, timeout, before, after }
// option.worker - Boolean(= false): true is use WebWorkers
// option.timeout - Number(= 10): timeout sec
// option.before - Function: before(xhr, option)
// option.after - Function: after(xhr, option, { status, ok })
callback) { // @param Function: callback(data, option, { status, ok })
// data - Mix/null:
// option - Hash:
// status - Number: HTTP status code
// ok - Boolean:
option.method = "GET";
option.binary = true;
ajax(url, option, callback);
}
// msgpack.upload - save to server
function msgpackupload(url, // @param String:
option, // @param Hash: { data, worker, timeout, before, after }
// option.data - Mix:
// option.worker - Boolean(= false): true is use WebWorkers
// option.timeout - Number(= 10): timeout sec
// option.before - Function: before(xhr, option)
// option.after - Function: after(xhr, option, { status, ok })
callback) { // @param Function: callback(data, option, { status, ok })
// data - String: responseText
// option - Hash:
// status - Number: HTTP status code
// ok - Boolean:
option.method = "PUT";
option.binary = true;
if (option.worker && globalScope.Worker) {
var worker = new Worker(msgpack.worker);
worker.onmessage = function(event) {
option.data = event.data;
ajax(url, option, callback);
};
worker.postMessage({ method: "pack", data: option.data });
} else {
// pack and base64 encode
option.data = base64encode(msgpackpack(option.data));
ajax(url, option, callback);
}
}
// inner -
function ajax(url, // @param String:
option, // @param Hash: { data, ifmod, method, timeout,
// header, binary, before, after, worker }
// option.data - Mix: upload data
// option.ifmod - Boolean: true is "If-Modified-Since" header
// option.method - String: "GET", "POST", "PUT"
// option.timeout - Number(= 10): timeout sec
// option.header - Hash(= {}): { key: "value", ... }
// option.binary - Boolean(= false): true is binary data
// option.before - Function: before(xhr, option)
// option.after - Function: after(xhr, option, { status, ok })
// option.worker - Boolean(= false): true is use WebWorkers
callback) { // @param Function: callback(data, option, { status, ok })
// data - String/Mix/null:
// option - Hash:
// status - Number: HTTP status code
// ok - Boolean:
function readyStateChange() {
if (xhr.readyState === 4) {
var data, status = xhr.status, worker, byteArray,
rv = { status: status, ok: status >= 200 && status < 300 };
if (!run++) {
if (method === "PUT") {
data = rv.ok ? xhr.responseText : "";
} else {
if (rv.ok) {
if (option.worker && globalScope.Worker) {
worker = new Worker(msgpack.worker);
worker.onmessage = function(event) {
callback(event.data, option, rv);
};
worker.postMessage({ method: "unpack",
data: xhr.responseText });
gc();
return;
} else {
byteArray = _ie ? toByteArrayIE(xhr)
: toByteArray(xhr.responseText);
data = msgpackunpack(byteArray);
}
}
}
after && after(xhr, option, rv);
callback(data, option, rv);
gc();
}
}
}
function ng(abort, status) {
if (!run++) {
var rv = { status: status || 400, ok: false };
after && after(xhr, option, rv);
callback(null, option, rv);
gc(abort);
}
}
function gc(abort) {
abort && xhr && xhr.abort && xhr.abort();
watchdog && (clearTimeout(watchdog), watchdog = 0);
xhr = null;
globalScope.addEventListener &&
globalScope.removeEventListener("beforeunload", ng, false);
}
var watchdog = 0,
method = option.method || "GET",
header = option.header || {},
before = option.before,
after = option.after,
data = option.data || null,
xhr = globalScope.XMLHttpRequest ? new XMLHttpRequest() :
globalScope.ActiveXObject ? new ActiveXObject("Microsoft.XMLHTTP") :
null,
run = 0, i,
overrideMimeType = "overrideMimeType",
setRequestHeader = "setRequestHeader",
getbinary = method === "GET" && option.binary;
try {
xhr.onreadystatechange = readyStateChange;
xhr.open(method, url, true); // ASync
before && before(xhr, option);
getbinary && xhr[overrideMimeType] &&
xhr[overrideMimeType]("text/plain; charset=x-user-defined");
data &&
xhr[setRequestHeader]("Content-Type",
"application/x-www-form-urlencoded");
for (i in header) {
xhr[setRequestHeader](i, header[i]);
}
globalScope.addEventListener &&
globalScope.addEventListener("beforeunload", ng, false); // 400: Bad Request
xhr.send(data);
watchdog = setTimeout(function() {
ng(1, 408); // 408: Request Time-out
}, (option.timeout || 10) * 1000);
} catch (err) {
ng(0, 400); // 400: Bad Request
}
}
// inner - BinaryString To ByteArray
function toByteArray(data) { // @param BinaryString: "\00\01"
// @return ByteArray: [0x00, 0x01]
var rv = [], bin2num = _bin2num, remain,
ary = data.split(""),
i = -1, iz;
iz = ary.length;
remain = iz % 8;
while (remain--) {
++i;
rv[i] = bin2num[ary[i]];
}
remain = iz >> 3;
while (remain--) {
rv.push(bin2num[ary[++i]], bin2num[ary[++i]],
bin2num[ary[++i]], bin2num[ary[++i]],
bin2num[ary[++i]], bin2num[ary[++i]],
bin2num[ary[++i]], bin2num[ary[++i]]);
}
return rv;
}
// inner - BinaryString to ByteArray
function toByteArrayIE(xhr) {
var rv = [], data, remain,
charCodeAt = "charCodeAt",
loop, v0, v1, v2, v3, v4, v5, v6, v7,
i = -1, iz;
iz = vblen(xhr);
data = vbstr(xhr);
loop = Math.ceil(iz / 2);
remain = loop % 8;
while (remain--) {
v0 = data[charCodeAt](++i); // 0x00,0x01 -> 0x0100
rv.push(v0 & 0xff, v0 >> 8);
}
remain = loop >> 3;
while (remain--) {
v0 = data[charCodeAt](++i);
v1 = data[charCodeAt](++i);
v2 = data[charCodeAt](++i);
v3 = data[charCodeAt](++i);
v4 = data[charCodeAt](++i);
v5 = data[charCodeAt](++i);
v6 = data[charCodeAt](++i);
v7 = data[charCodeAt](++i);
rv.push(v0 & 0xff, v0 >> 8, v1 & 0xff, v1 >> 8,
v2 & 0xff, v2 >> 8, v3 & 0xff, v3 >> 8,
v4 & 0xff, v4 >> 8, v5 & 0xff, v5 >> 8,
v6 & 0xff, v6 >> 8, v7 & 0xff, v7 >> 8);
}
iz % 2 && rv.pop();
return rv;
}
// inner - base64.encode
function base64encode(data) { // @param ByteArray:
// @return Base64String:
var rv = [],
c = 0, i = -1, iz = data.length,
pad = [0, 2, 1][data.length % 3],
num2bin = _num2bin,
num2b64 = _num2b64;
if (globalScope.btoa) {
while (i < iz) {
rv.push(num2bin[data[++i]]);
}
return btoa(rv.join(""));
}
--iz;
while (i < iz) {
c = (data[++i] << 16) | (data[++i] << 8) | (data[++i]); // 24bit
rv.push(num2b64[(c >> 18) & 0x3f],
num2b64[(c >> 12) & 0x3f],
num2b64[(c >> 6) & 0x3f],
num2b64[ c & 0x3f]);
}
pad > 1 && (rv[rv.length - 2] = "=");
pad > 0 && (rv[rv.length - 1] = "=");
return rv.join("");
}
// --- init ---
(function() {
var i = 0, v;
for (; i < 0x100; ++i) {
v = _toString(i);
_bin2num[v] = i; // "\00" -> 0x00
_num2bin[i] = v; // 0 -> "\00"
}
// http://twitter.com/edvakf/statuses/15576483807
for (i = 0x80; i < 0x100; ++i) { // [Webkit][Gecko]
_bin2num[_toString(0xf700 + i)] = i; // "\f780" -> 0x80
}
})();
_ie && document.write('<script type="text/vbscript">\
Function vblen(b)vblen=LenB(b.responseBody)End Function\n\
Function vbstr(b)vbstr=CStr(b.responseBody)+chr(0)End Function</'+'script>');
})(this);

View file

@ -76,7 +76,8 @@ CREATE TABLE IF NOT EXISTS `DBPREFIXboards` (
`redirect_to_thread` TINYINT(1) UNSIGNED NOT NULL DEFAULT 0,
`require_file` TINYINT(1) UNSIGNED NOT NULL DEFAULT 0,
`enable_catalog` TINYINT(1) UNSIGNED NOT NULL DEFAULT 1,
PRIMARY KEY (`id`)
PRIMARY KEY (`id`),
UNIQUE (`dir`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=0;
CREATE TABLE IF NOT EXISTS `DBPREFIXembeds` (
@ -214,7 +215,8 @@ CREATE TABLE IF NOT EXISTS `DBPREFIXstaff` (
`boards` VARCHAR(128) NOT NULL DEFAULT 'all',
`added_on` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
`last_active` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`)
PRIMARY KEY (`id`),
UNIQUE (`username`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
-- create a temp table with the same columns as the posts table to be stored in memory

521
src/building.go Normal file
View file

@ -0,0 +1,521 @@
package main
import (
"encoding/json"
"fmt"
"os"
"path"
"strconv"
"syscall"
"time"
)
// build front page using templates/front.html
// TODO: provide alternative layouts (like 4chan, tinyboard, etc)
func buildFrontPage() (html string) {
initTemplates()
var front_arr []interface{}
var recent_posts_arr []interface{}
os.Remove(path.Join(config.DocumentRoot, "index.html"))
front_file, err := os.OpenFile(path.Join(config.DocumentRoot, "index.html"), os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0777)
defer closeFile(front_file)
if err != nil {
return handleError(1, "Failed opening front page for writing: "+err.Error()) + "<br />\n"
}
// get front pages
rows, err := querySQL("SELECT * FROM `" + config.DBprefix + "frontpage`")
defer closeRows(rows)
if err != nil {
return handleError(1, "Failed getting front page rows: "+err.Error())
}
for rows.Next() {
frontpage := new(FrontTable)
if err = rows.Scan(&frontpage.ID, &frontpage.Page, &frontpage.Order, &frontpage.Subject,
&frontpage.Message, &frontpage.Timestamp, &frontpage.Poster, &frontpage.Email); err != nil {
return handleError(1, err.Error())
}
front_arr = append(front_arr, frontpage)
}
// get recent posts
rows, err = querySQL("SELECT `"+config.DBprefix+"posts`.`id`, "+
"`"+config.DBprefix+"posts`.`parentid`, "+
"`"+config.DBprefix+"boards`.`dir` AS boardname, "+
"`"+config.DBprefix+"posts`.`boardid` AS boardid, "+
"`name`, `tripcode`, `message`, `filename`, `thumb_w`, `thumb_h` "+
"FROM `"+config.DBprefix+"posts`, `"+config.DBprefix+"boards` "+
"WHERE `"+config.DBprefix+"posts`.`deleted_timestamp` = ? "+
"AND `boardid` = `"+config.DBprefix+"boards`.`id` "+
"ORDER BY `timestamp` DESC LIMIT ?",
nilTimestamp, config.MaxRecentPosts,
)
defer closeRows(rows)
if err != nil {
return handleError(1, err.Error())
}
for rows.Next() {
recent_post := new(RecentPost)
err = rows.Scan(&recent_post.PostID, &recent_post.ParentID, &recent_post.BoardName, &recent_post.BoardID, &recent_post.Name, &recent_post.Tripcode, &recent_post.Message, &recent_post.Filename, &recent_post.ThumbW, &recent_post.ThumbH)
if err != nil {
return handleError(1, "Failed getting list of recent posts for front page: "+err.Error())
}
recent_posts_arr = append(recent_posts_arr, recent_post)
}
if err = front_page_tmpl.Execute(front_file, map[string]interface{}{
"config": config,
"fronts": front_arr,
"boards": allBoards,
"sections": allSections,
"recent_posts": recent_posts_arr,
}); err != nil {
return handleError(1, "Failed executing front page template: "+err.Error())
}
return "Front page rebuilt successfully."
}
func buildBoardListJSON() (html string) {
board_list_file, err := os.OpenFile(path.Join(config.DocumentRoot, "boards.json"), os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0777)
defer closeFile(board_list_file)
if err != nil {
return handleError(1, "Failed opening board.json for writing: "+err.Error()) + "<br />\n"
}
board_list_wrapper := new(BoardJSONWrapper)
// Our cooldowns are site-wide currently.
cooldowns_obj := BoardCooldowns{NewThread: config.NewThreadDelay, Reply: config.ReplyDelay, ImageReply: config.ReplyDelay}
for _, board_int := range allBoards {
board := board_int.(BoardsTable)
board_obj := BoardJSON{BoardName: board.Dir, Title: board.Title, WorkSafeBoard: 1,
ThreadsPerPage: config.ThreadsPerPage, Pages: board.MaxPages, MaxFilesize: board.MaxImageSize,
MaxMessageLength: board.MaxMessageLength, BumpLimit: 200, ImageLimit: board.NoImagesAfter,
Cooldowns: cooldowns_obj, Description: board.Description, IsArchived: 0}
if board.EnableNSFW {
board_obj.WorkSafeBoard = 0
}
board_list_wrapper.Boards = append(board_list_wrapper.Boards, board_obj)
}
boardJSON, err := json.Marshal(board_list_wrapper)
if err != nil {
return handleError(1, "Failed marshal to JSON: "+err.Error()) + "<br />\n"
}
if _, err = board_list_file.Write(boardJSON); err != nil {
return handleError(1, "Failed writing boards.json file: "+err.Error()) + "<br />\n"
}
return "Board list JSON rebuilt successfully.<br />"
}
// buildBoardPages builds the pages for the board archive.
// `board` is a BoardsTable object representing the board to build archive pages for.
// The return value is a string of HTML with debug information from the build process.
func buildBoardPages(board *BoardsTable) (html string) {
start_time := benchmarkTimer("buildBoard"+strconv.Itoa(board.ID), time.Now(), true)
var current_page_file *os.File
var threads []interface{}
var thread_pages [][]interface{}
var stickied_threads []interface{}
var nonstickied_threads []interface{}
// Check that the board's configured directory is indeed a directory
results, err := os.Stat(path.Join(config.DocumentRoot, board.Dir))
if err != nil {
// Try creating the board's configured directory if it doesn't exist
err = os.Mkdir(path.Join(config.DocumentRoot, board.Dir), 0777)
if err != nil {
html += handleError(1, "Failed creating /"+board.Dir+"/: "+err.Error())
return
}
} else if !results.IsDir() {
// If the file exists, but is not a folder, notify the user
html += handleError(1, "Error: /"+board.Dir+"/ exists, but is not a folder.")
return
}
// Get all top level posts for the board.
op_posts, err := getPostArr(map[string]interface{}{
"boardid": board.ID,
"parentid": 0,
"deleted_timestamp": nilTimestamp,
}, " ORDER BY `bumped` DESC")
if err != nil {
html += handleError(1, err.Error()) + "<br />"
op_posts = nil
return
}
// For each top level post, start building a Thread struct
for _, op := range op_posts {
var thread Thread
var posts_in_thread []PostTable
// Get the number of replies to this thread.
if err = queryRowSQL("SELECT COUNT(*) FROM `"+config.DBprefix+"posts` WHERE `boardid` = ? AND `parentid` = ? AND `deleted_timestamp` = ?",
[]interface{}{board.ID, op.ID, nilTimestamp},
[]interface{}{&thread.NumReplies},
); err != nil {
html += err.Error() + "<br />\n"
}
// Get the number of image replies in this thread
if err = queryRowSQL("SELECT COUNT(*) FROM `"+config.DBprefix+"posts` WHERE `boardid` = ? AND `parentid` = ? AND `deleted_timestamp` = ? AND `filesize` <> 0",
[]interface{}{board.ID, op.ID, nilTimestamp},
[]interface{}{&thread.NumImages},
); err != nil {
html += err.Error() + "<br />\n"
}
thread.OP = op
var numRepliesOnBoardPage int
if op.Stickied {
// If the thread is stickied, limit replies on the archive page to the
// configured value for stickied threads.
numRepliesOnBoardPage = config.StickyRepliesOnBoardPage
} else {
// Otherwise, limit the replies to the configured value for normal threads.
numRepliesOnBoardPage = config.RepliesOnBoardPage
}
posts_in_thread, err = getPostArr(map[string]interface{}{
"boardid": board.ID,
"parentid": op.ID,
"deleted_timestamp": nilTimestamp,
}, fmt.Sprintf(" ORDER BY `id` DESC LIMIT %d", numRepliesOnBoardPage))
if err != nil {
html += err.Error() + "<br />"
}
var reversedPosts []PostTable
for i := len(posts_in_thread); i > 0; i-- {
reversedPosts = append(reversedPosts, posts_in_thread[i-1])
}
if len(posts_in_thread) > 0 {
// Store the posts to show on board page
//thread.BoardReplies = posts_in_thread
thread.BoardReplies = reversedPosts
// Count number of images on board page
image_count := 0
for _, reply := range posts_in_thread {
if reply.Filesize != 0 {
image_count++
}
}
// Then calculate number of omitted images.
thread.OmittedImages = thread.NumImages - image_count
}
// Add thread struct to appropriate list
if op.Stickied {
stickied_threads = append(stickied_threads, thread)
} else {
nonstickied_threads = append(nonstickied_threads, thread)
}
}
num, _ := deleteMatchingFiles(path.Join(config.DocumentRoot, board.Dir), "\\d.html$")
printf(2, "Number of files deleted: %d\n", num)
// Order the threads, stickied threads first, then nonstickied threads.
threads = append(stickied_threads, nonstickied_threads...)
// If there are no posts on the board
if len(threads) == 0 {
board.CurrentPage = 1
// Open board.html for writing to the first page.
board_page_file, err := os.OpenFile(path.Join(config.DocumentRoot, board.Dir, "board.html"), os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0777)
if err != nil {
html += handleError(1, "Failed opening /"+board.Dir+"/board.html: "+err.Error()) + "<br />"
return
}
// Render board page template to the file,
// packaging the board/section list, threads, and board info
if err = img_boardpage_tmpl.Execute(board_page_file, map[string]interface{}{
"config": config,
"boards": allBoards,
"sections": allSections,
"threads": threads,
"board": board,
}); err != nil {
html += handleError(1, "Failed building /"+board.Dir+"/: "+err.Error()) + "<br />"
return
}
html += "/" + board.Dir + "/ built successfully, no threads to build.\n"
benchmarkTimer("buildBoard"+strconv.Itoa(board.ID), start_time, false)
return
} else {
// Create the archive pages.
thread_pages = paginate(config.ThreadsPerPage, threads)
board.NumPages = len(thread_pages) - 1
// Create array of page wrapper objects, and open the file.
var pages_obj []BoardPageJSON
catalog_json_file, err := os.OpenFile(path.Join(config.DocumentRoot, board.Dir, "catalog.json"), os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0777)
defer closeFile(catalog_json_file)
if err != nil {
html += handleError(1, "Failed opening /"+board.Dir+"/catalog.json: "+err.Error())
return
}
currentBoardPage := board.CurrentPage
for _, page_threads := range thread_pages {
board.CurrentPage++
var current_page_filepath string
pageFilename := strconv.Itoa(board.CurrentPage) + ".html"
current_page_filepath = path.Join(config.DocumentRoot, board.Dir, pageFilename)
current_page_file, err = os.OpenFile(current_page_filepath, os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0777)
defer closeFile(current_page_file)
if err != nil {
html += handleError(1, "Failed opening board page: "+err.Error()) + "<br />"
continue
}
// Render the boardpage template, don't forget config
if err = img_boardpage_tmpl.Execute(current_page_file, map[string]interface{}{
"config": config,
"boards": allBoards,
"sections": allSections,
"threads": page_threads,
"board": board,
"posts": []interface{}{
PostTable{BoardID: board.ID},
},
}); err != nil {
html += handleError(1, "Failed building /"+board.Dir+"/ boardpage: "+err.Error()) + "<br />"
return
}
if board.CurrentPage == 1 {
boardPage := path.Join(config.DocumentRoot, board.Dir, "board.html")
os.Remove(boardPage)
if err = syscall.Symlink(current_page_filepath, boardPage); !os.IsExist(err) && err != nil {
html += handleError(1, "Failed building /"+board.Dir+"/: "+err.Error()) + "<br />"
}
}
// Collect up threads for this page.
var page_obj BoardPageJSON
page_obj.Page = board.CurrentPage
for _, thread_int := range page_threads {
thread := thread_int.(Thread)
post_json := makePostJSON(thread.OP, board.Anonymous)
var thread_json ThreadJSON
thread_json.PostJSON = &post_json
thread_json.Replies = thread.NumReplies
thread_json.ImagesOnArchive = thread.NumImages
thread_json.OmittedImages = thread.OmittedImages
if thread.Stickied {
if thread.NumReplies > config.StickyRepliesOnBoardPage {
thread_json.OmittedPosts = thread.NumReplies - config.StickyRepliesOnBoardPage
}
thread_json.Sticky = 1
} else {
if thread.NumReplies > config.RepliesOnBoardPage {
thread_json.OmittedPosts = thread.NumReplies - config.RepliesOnBoardPage
}
}
if thread.OP.Locked {
thread_json.Locked = 1
}
page_obj.Threads = append(page_obj.Threads, thread_json)
}
pages_obj = append(pages_obj, page_obj)
}
board.CurrentPage = currentBoardPage
catalog_json, err := json.Marshal(pages_obj)
if err != nil {
html += handleError(1, "Failed to marshal to JSON: "+err.Error()) + "<br />"
return
}
if _, err = catalog_json_file.Write(catalog_json); err != nil {
html += handleError(1, "Failed writing /"+board.Dir+"/catalog.json: "+err.Error()) + "<br />"
return
}
html += "/" + board.Dir + "/ built successfully.\n"
}
benchmarkTimer("buildBoard"+strconv.Itoa(board.ID), start_time, false)
return
}
// buildBoards builds one or all boards.
// If `all` == true, all boards will have their pages built and `which` is ignored
// Otherwise, the board with the id equal to the value specified as which.
// The return value is a string of HTML with debug information produced by the build process.
// TODO: make this a variadic function (which ...int)
func buildBoards(all bool, which int) (html string) {
// if all is set to true, ignore which, otherwise, which = build only specified boardid
if !all {
boardArr, _ := getBoardArr(map[string]interface{}{"id": which}, "")
board := boardArr[0]
html += buildBoardPages(&board) + "<br />\n"
html += buildThreads(true, board.ID, 0)
return
}
boards, _ := getBoardArr(nil, "")
if len(boards) == 0 {
return html + "No boards to build.<br />\n"
}
for _, board := range boards {
html += buildBoardPages(&board) + "<br />\n"
html += buildThreads(true, board.ID, 0)
}
return
}
// buildThreadPages builds the pages for a thread given by a PostTable object.
func buildThreadPages(op *PostTable) (html string) {
var replies []PostTable
var current_page_file *os.File
board, err := getBoardFromID(op.BoardID)
if err != nil {
html += handleError(1, err.Error())
}
replies, err = getPostArr(map[string]interface{}{
"boardid": op.BoardID,
"parentid": op.ID,
"deleted_timestamp": nilTimestamp,
}, "ORDER BY `id` ASC")
if err != nil {
html += handleError(1, "Error building thread "+strconv.Itoa(op.ID)+":"+err.Error())
return
}
os.Remove(path.Join(config.DocumentRoot, board.Dir, "res", strconv.Itoa(op.ID)+".html"))
var repliesInterface []interface{}
for _, reply := range replies {
repliesInterface = append(repliesInterface, reply)
}
thread_pages := paginate(config.PostsPerThreadPage, repliesInterface)
deleteMatchingFiles(path.Join(config.DocumentRoot, board.Dir, "res"), "^"+strconv.Itoa(op.ID)+"p")
op.NumPages = len(thread_pages)
current_page_filepath := path.Join(config.DocumentRoot, board.Dir, "res", strconv.Itoa(op.ID)+".html")
current_page_file, err = os.OpenFile(current_page_filepath, os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0777)
if err != nil {
html += handleError(1, "Failed opening "+current_page_filepath+": "+err.Error())
return
}
// render main page
if err = img_threadpage_tmpl.Execute(current_page_file, map[string]interface{}{
"config": config,
"boards": allBoards,
"board": board,
"sections": allSections,
"posts": replies,
"op": op,
}); err != nil {
html += handleError(1, "Failed building /%s/res/%d threadpage: %s", board.Dir, op.ID, err.Error()) + "<br />\n"
return
}
// Put together the thread JSON
threadJSONFile, err := os.OpenFile(path.Join(config.DocumentRoot, board.Dir, "res", strconv.Itoa(op.ID)+".json"), os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0777)
defer closeFile(threadJSONFile)
if err != nil {
html += handleError(1, "Failed opening /%s/res/%d.json: %s", board.Dir, op.ID, err.Error())
return
}
// Create the wrapper object
thread_json_wrapper := new(ThreadJSONWrapper)
// Handle the OP, of type *PostTable
op_post_obj := makePostJSON(*op, board.Anonymous)
thread_json_wrapper.Posts = append(thread_json_wrapper.Posts, op_post_obj)
// Iterate through each reply, which are of type PostTable
for _, reply := range replies {
postJSON := makePostJSON(reply, board.Anonymous)
thread_json_wrapper.Posts = append(thread_json_wrapper.Posts, postJSON)
}
threadJSON, err := json.Marshal(thread_json_wrapper)
if err != nil {
html += handleError(1, "Failed to marshal to JSON: %s", err.Error()) + "<br />"
return
}
if _, err = threadJSONFile.Write(threadJSON); err != nil {
html += handleError(1, "Failed writing /%s/res/%d.json: %s", board.Dir, op.ID, err.Error()) + "<br />"
return
}
success_text := fmt.Sprintf("Built /%s/%d successfully", board.Dir, op.ID)
html += success_text + "<br />\n"
println(2, success_text)
for page_num, page_posts := range thread_pages {
op.CurrentPage = page_num + 1
current_page_filepath := path.Join(config.DocumentRoot, board.Dir, "res", strconv.Itoa(op.ID)+"p"+strconv.Itoa(op.CurrentPage)+".html")
current_page_file, err = os.OpenFile(current_page_filepath, os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0777)
if err != nil {
html += handleError(1, "Failed opening "+current_page_filepath+": "+err.Error()) + "<br />\n"
return
}
if err = img_threadpage_tmpl.Execute(current_page_file, map[string]interface{}{
"config": config,
"boards": allBoards,
"board": board,
"sections": allSections,
"posts": page_posts,
"op": op,
}); err != nil {
html += handleError(1, "Failed building /%s/%d: %s", board.Dir, op.ID, err.Error())
return
}
success_text := fmt.Sprintf("Built /%s/%dp%d successfully", board.Dir, op.ID, op.CurrentPage)
html += success_text + "<br />\n"
println(2, success_text)
}
return
}
// buildThreads builds thread(s) given a boardid, or if all = false, also given a threadid.
// if all is set to true, ignore which, otherwise, which = build only specified boardid
// TODO: detect which page will be built and only build that one and the board page
func buildThreads(all bool, boardid, threadid int) (html string) {
if !all {
threads, _ := getPostArr(map[string]interface{}{
"boardid": boardid,
"id": threadid,
"parentid": 0,
"deleted_timestamp": nilTimestamp,
}, "")
thread := threads[0]
html += buildThreadPages(&thread) + "<br />\n"
return
}
threads, _ := getPostArr(map[string]interface{}{
"boardid": boardid,
"parentid": 0,
"deleted_timestamp": nilTimestamp,
}, "")
if len(threads) == 0 {
return
}
for _, op := range threads {
html += buildThreadPages(&op) + "<br />\n"
}
return
}

View file

@ -53,7 +53,7 @@ func parseCommandLine() {
}
printf(0, "Creating new staff: '%s', with password: '%s' and rank: %d\n", arr[0], arr[1], rank)
if err = newStaff(arr[0], arr[1], rank); err != nil {
handleError(0, "%s\n", err.Error())
handleError(0, err.Error())
os.Exit(2)
}
os.Exit(0)

View file

@ -516,7 +516,7 @@ var manage_functions = map[string]ManageFunction{
}
if username == "" || password == "" {
//assume that they haven't logged in
html = "\t<form method=\"POST\" action=\"/manage?action=login\" id=\"login-box\" class=\"staff-form\">\n" +
html = "\t<form method=\"POST\" action=\"" + config.SiteWebfolder + "manage?action=login\" id=\"login-box\" class=\"staff-form\">\n" +
"\t\t<input type=\"hidden\" name=\"redirect\" value=\"" + redirect_action + "\" />\n" +
"\t\t<input type=\"text\" name=\"username\" class=\"logindata\" /><br />\n" +
"\t\t<input type=\"password\" name=\"password\" class=\"logindata\" /> <br />\n" +
@ -525,7 +525,7 @@ var manage_functions = map[string]ManageFunction{
} else {
key := md5Sum(request.RemoteAddr + username + password + config.RandomSeed + generateSalt())[0:10]
createSession(key, username, password, request, writer)
http.Redirect(writer, request, path.Join(config.SiteWebfolder, "/manage?action="+request.FormValue("redirect")), http.StatusFound)
http.Redirect(writer, request, path.Join(config.SiteWebfolder, "manage?action="+request.FormValue("redirect")), http.StatusFound)
}
return
}},
@ -631,9 +631,11 @@ var manage_functions = map[string]ManageFunction{
}, "")
if err != nil {
pageHTML += handleError(1, err.Error())
return
}
if len(posts) < 1 {
pageHTML += handleError(1, "Post doesn't exist")
return
}
post = posts[0]
}

View file

@ -6,7 +6,6 @@ import (
"bytes"
"crypto/md5"
"database/sql"
"encoding/json"
"errors"
"fmt"
"html"
@ -39,520 +38,6 @@ var (
allBoards []interface{}
)
// buildBoards builds one or all boards. If all == true, all boards will have their pages built.
// If all == false, the board with the id equal to the value specified as which.
// The return value is a string of HTML with debug information produced by the build process.
func buildBoards(all bool, which int) (html string) {
// if all is set to true, ignore which, otherwise, which = build only specified boardid
if !all {
boardArr, _ := getBoardArr(map[string]interface{}{"id": which}, "")
board := boardArr[0]
html += buildBoardPages(&board) + "<br />\n"
html += buildThreads(true, board.ID, 0)
return
}
boards, _ := getBoardArr(nil, "")
if len(boards) == 0 {
return html + "No boards to build.<br />\n"
}
for _, board := range boards {
html += buildBoardPages(&board) + "<br />\n"
html += buildThreads(true, board.ID, 0)
}
return
}
// buildBoardPages builds the pages for the board archive. board is a BoardsTable object representing the board to
// build archive pages for. The return value is a string of HTML with debug information from the build process.
func buildBoardPages(board *BoardsTable) (html string) {
start_time := benchmarkTimer("buildBoard"+strconv.Itoa(board.ID), time.Now(), true)
var current_page_file *os.File
var threads []interface{}
var thread_pages [][]interface{}
var stickied_threads []interface{}
var nonstickied_threads []interface{}
defer func() {
// Recover and print, log error (if there is one)
/* if errmsg, panicked := recover().(error); panicked {
handleError(0, "Recovered from panic: "+errmsg.Error())
} */
}()
// Check that the board's configured directory is indeed a directory
results, err := os.Stat(path.Join(config.DocumentRoot, board.Dir))
if err != nil {
// Try creating the board's configured directory if it doesn't exist
err = os.Mkdir(path.Join(config.DocumentRoot, board.Dir), 0777)
if err != nil {
html += handleError(1, "Failed creating /"+board.Dir+"/: "+err.Error())
return
}
} else if !results.IsDir() {
// If the file exists, but is not a folder, notify the user
html += handleError(1, "Error: /"+board.Dir+"/ exists, but is not a folder.")
return
}
// Get all top level posts for the board.
op_posts, err := getPostArr(map[string]interface{}{
"boardid": board.ID,
"parentid": 0,
"deleted_timestamp": nilTimestamp,
}, " ORDER BY `bumped` DESC")
if err != nil {
html += handleError(1, err.Error()) + "<br />"
op_posts = nil
return
}
// For each top level post, start building a Thread struct
for _, op := range op_posts {
var thread Thread
var posts_in_thread []PostTable
// Get the number of replies to this thread.
if err = queryRowSQL("SELECT COUNT(*) FROM `"+config.DBprefix+"posts` WHERE `boardid` = ? AND `parentid` = ? AND `deleted_timestamp` = ?",
[]interface{}{board.ID, op.ID, nilTimestamp},
[]interface{}{&thread.NumReplies},
); err != nil {
html += err.Error() + "<br />\n"
}
// Get the number of image replies in this thread
if err = queryRowSQL("SELECT COUNT(*) FROM `"+config.DBprefix+"posts` WHERE `boardid` = ? AND `parentid` = ? AND `deleted_timestamp` = ? AND `filesize` <> 0",
[]interface{}{board.ID, op.ID, nilTimestamp},
[]interface{}{&thread.NumImages},
); err != nil {
html += err.Error() + "<br />\n"
}
thread.OP = op
var numRepliesOnBoardPage int
if op.Stickied {
// If the thread is stickied, limit replies on the archive page to the
// configured value for stickied threads.
numRepliesOnBoardPage = config.StickyRepliesOnBoardPage
} else {
// Otherwise, limit the replies to the configured value for normal threads.
numRepliesOnBoardPage = config.RepliesOnBoardPage
}
posts_in_thread, err = getPostArr(map[string]interface{}{
"boardid": board.ID,
"parentid": op.ID,
"deleted_timestamp": nilTimestamp,
}, fmt.Sprintf(" ORDER BY `id` DESC LIMIT %d", numRepliesOnBoardPage))
if err != nil {
html += err.Error() + "<br />"
}
var reversedPosts []PostTable
for i := len(posts_in_thread); i > 0; i-- {
reversedPosts = append(reversedPosts, posts_in_thread[i-1])
}
if len(posts_in_thread) > 0 {
// Store the posts to show on board page
//thread.BoardReplies = posts_in_thread
thread.BoardReplies = reversedPosts
// Count number of images on board page
image_count := 0
for _, reply := range posts_in_thread {
if reply.Filesize != 0 {
image_count++
}
}
// Then calculate number of omitted images.
thread.OmittedImages = thread.NumImages - image_count
}
// Add thread struct to appropriate list
if op.Stickied {
stickied_threads = append(stickied_threads, thread)
} else {
nonstickied_threads = append(nonstickied_threads, thread)
}
}
num, _ := deleteMatchingFiles(path.Join(config.DocumentRoot, board.Dir), "\\d.html$")
printf(2, "Number of files deleted: %d\n", num)
// Order the threads, stickied threads first, then nonstickied threads.
threads = append(stickied_threads, nonstickied_threads...)
// If there are no posts on the board
if len(threads) == 0 {
board.CurrentPage = 1
// Open board.html for writing to the first page.
board_page_file, err := os.OpenFile(path.Join(config.DocumentRoot, board.Dir, "board.html"), os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0777)
if err != nil {
html += handleError(1, "Failed opening /"+board.Dir+"/board.html: "+err.Error()) + "<br />"
return
}
// Render board page template to the file,
// packaging the board/section list, threads, and board info
if err = img_boardpage_tmpl.Execute(board_page_file, map[string]interface{}{
"config": config,
"boards": allBoards,
"sections": allSections,
"threads": threads,
"board": board,
}); err != nil {
html += handleError(1, "Failed building /"+board.Dir+"/: "+err.Error()) + "<br />"
return
}
html += "/" + board.Dir + "/ built successfully, no threads to build.\n"
benchmarkTimer("buildBoard"+strconv.Itoa(board.ID), start_time, false)
return
} else {
// Create the archive pages.
thread_pages = paginate(config.ThreadsPerPage, threads)
board.NumPages = len(thread_pages) - 1
// Create array of page wrapper objects, and open the file.
var pages_obj []BoardPageJSON
catalog_json_file, err := os.OpenFile(path.Join(config.DocumentRoot, board.Dir, "catalog.json"), os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0777)
defer closeFile(catalog_json_file)
if err != nil {
html += handleError(1, "Failed opening /"+board.Dir+"/catalog.json: "+err.Error())
return
}
currentBoardPage := board.CurrentPage
for _, page_threads := range thread_pages {
board.CurrentPage++
var current_page_filepath string
pageFilename := strconv.Itoa(board.CurrentPage) + ".html"
current_page_filepath = path.Join(config.DocumentRoot, board.Dir, pageFilename)
current_page_file, err = os.OpenFile(current_page_filepath, os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0777)
defer closeFile(current_page_file)
if err != nil {
html += handleError(1, "Failed opening board page: "+err.Error()) + "<br />"
continue
}
// Render the boardpage template, don't forget config
if err = img_boardpage_tmpl.Execute(current_page_file, map[string]interface{}{
"config": config,
"boards": allBoards,
"sections": allSections,
"threads": page_threads,
"board": board,
"posts": []interface{}{
PostTable{BoardID: board.ID},
},
}); err != nil {
html += handleError(1, "Failed building /"+board.Dir+"/ boardpage: "+err.Error()) + "<br />"
return
}
if board.CurrentPage == 1 {
boardPage := path.Join(config.DocumentRoot, board.Dir, "board.html")
os.Remove(boardPage)
if err = syscall.Symlink(current_page_filepath, boardPage); !os.IsExist(err) && err != nil {
html += handleError(1, "Failed building /"+board.Dir+"/: "+err.Error()) + "<br />"
}
}
// Collect up threads for this page.
var page_obj BoardPageJSON
page_obj.Page = board.CurrentPage
for _, thread_int := range page_threads {
thread := thread_int.(Thread)
post_json := makePostJSON(thread.OP, board.Anonymous)
var thread_json ThreadJSON
thread_json.PostJSON = &post_json
thread_json.Replies = thread.NumReplies
thread_json.ImagesOnArchive = thread.NumImages
thread_json.OmittedImages = thread.OmittedImages
if thread.Stickied {
if thread.NumReplies > config.StickyRepliesOnBoardPage {
thread_json.OmittedPosts = thread.NumReplies - config.StickyRepliesOnBoardPage
}
thread_json.Sticky = 1
} else {
if thread.NumReplies > config.RepliesOnBoardPage {
thread_json.OmittedPosts = thread.NumReplies - config.RepliesOnBoardPage
}
}
if thread.OP.Locked {
thread_json.Locked = 1
}
page_obj.Threads = append(page_obj.Threads, thread_json)
}
pages_obj = append(pages_obj, page_obj)
}
board.CurrentPage = currentBoardPage
catalog_json, err := json.Marshal(pages_obj)
if err != nil {
html += handleError(1, "Failed to marshal to JSON: "+err.Error()) + "<br />"
return
}
if _, err = catalog_json_file.Write(catalog_json); err != nil {
html += handleError(1, "Failed writing /"+board.Dir+"/catalog.json: "+err.Error()) + "<br />"
return
}
html += "/" + board.Dir + "/ built successfully.\n"
}
benchmarkTimer("buildBoard"+strconv.Itoa(board.ID), start_time, false)
return
}
// buildThreads builds thread(s) given a boardid, or if all = false, also given a threadid.
func buildThreads(all bool, boardid, threadid int) (html string) {
// TODO: detect which page will be built and only build that one and the board page
// if all is set to true, ignore which, otherwise, which = build only specified boardid
if !all {
threads, _ := getPostArr(map[string]interface{}{
"boardid": boardid,
"id": threadid,
"parentid": 0,
"deleted_timestamp": nilTimestamp,
}, "")
thread := threads[0]
html += buildThreadPages(&thread) + "<br />\n"
return
}
threads, _ := getPostArr(map[string]interface{}{
"boardid": boardid,
"parentid": 0,
"deleted_timestamp": nilTimestamp,
}, "")
if len(threads) == 0 {
return
}
for _, op := range threads {
html += buildThreadPages(&op) + "<br />\n"
}
return
}
// buildThreadPages builds the pages for a thread given by a PostTable object.
func buildThreadPages(op *PostTable) (html string) {
var replies []PostTable
var current_page_file *os.File
board, err := getBoardFromID(op.BoardID)
if err != nil {
html += handleError(1, err.Error())
}
replies, err = getPostArr(map[string]interface{}{
"boardid": op.BoardID,
"parentid": op.ID,
"deleted_timestamp": nilTimestamp,
}, "ORDER BY `id` ASC")
if err != nil {
html += handleError(1, "Error building thread "+strconv.Itoa(op.ID)+":"+err.Error())
return
}
os.Remove(path.Join(config.DocumentRoot, board.Dir, "res", strconv.Itoa(op.ID)+".html"))
var repliesInterface []interface{}
for _, reply := range replies {
repliesInterface = append(repliesInterface, reply)
}
//thread_pages := paginate(config.PostsPerThreadPage, replies)
thread_pages := paginate(config.PostsPerThreadPage, repliesInterface)
deleteMatchingFiles(path.Join(config.DocumentRoot, board.Dir, "res"), "^"+strconv.Itoa(op.ID)+"p")
op.NumPages = len(thread_pages)
current_page_filepath := path.Join(config.DocumentRoot, board.Dir, "res", strconv.Itoa(op.ID)+".html")
current_page_file, err = os.OpenFile(current_page_filepath, os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0777)
if err != nil {
html += handleError(1, "Failed opening "+current_page_filepath+": "+err.Error())
return
}
// render main page
if err = img_threadpage_tmpl.Execute(current_page_file, map[string]interface{}{
"config": config,
"boards": allBoards,
"board": board,
"sections": allSections,
"posts": replies,
"op": op,
}); err != nil {
html += handleError(1, "Failed building /%s/res/%d threadpage: %s", board.Dir, op.ID, err.Error()) + "<br />\n"
return
}
// Put together the thread JSON
threadJSONFile, err := os.OpenFile(path.Join(config.DocumentRoot, board.Dir, "res", strconv.Itoa(op.ID)+".json"), os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0777)
defer closeFile(threadJSONFile)
if err != nil {
html += handleError(1, "Failed opening /%s/res/%d.json: %s", board.Dir, op.ID, err.Error())
return
}
// Create the wrapper object
thread_json_wrapper := new(ThreadJSONWrapper)
// Handle the OP, of type *PostTable
op_post_obj := makePostJSON(*op, board.Anonymous)
thread_json_wrapper.Posts = append(thread_json_wrapper.Posts, op_post_obj)
// Iterate through each reply, which are of type PostTable
for _, reply := range replies {
postJSON := makePostJSON(reply, board.Anonymous)
thread_json_wrapper.Posts = append(thread_json_wrapper.Posts, postJSON)
}
threadJSON, err := json.Marshal(thread_json_wrapper)
if err != nil {
html += handleError(1, "Failed to marshal to JSON: %s", err.Error()) + "<br />"
return
}
if _, err = threadJSONFile.Write(threadJSON); err != nil {
html += handleError(1, "Failed writing /%s/res/%d.json: %s", board.Dir, op.ID, err.Error()) + "<br />"
return
}
success_text := fmt.Sprintf("Built /%s/%d successfully", board.Dir, op.ID)
html += success_text + "<br />\n"
println(2, success_text)
for page_num, page_posts := range thread_pages {
op.CurrentPage = page_num + 1
current_page_filepath := path.Join(config.DocumentRoot, board.Dir, "res", strconv.Itoa(op.ID)+"p"+strconv.Itoa(op.CurrentPage)+".html")
current_page_file, err = os.OpenFile(current_page_filepath, os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0777)
if err != nil {
html += handleError(1, "Failed opening "+current_page_filepath+": "+err.Error()) + "<br />\n"
return
}
if err = img_threadpage_tmpl.Execute(current_page_file, map[string]interface{}{
"config": config,
"boards": allBoards,
"board": board,
"sections": allSections,
"posts": page_posts,
"op": op,
}); err != nil {
html += handleError(1, "Failed building /%s/%d: %s", board.Dir, op.ID, err.Error())
return
}
success_text := fmt.Sprintf("Built /%s/%dp%d successfully", board.Dir, op.ID, op.CurrentPage)
html += success_text + "<br />\n"
println(2, success_text)
}
return
}
func buildFrontPage() (html string) {
initTemplates()
var front_arr []interface{}
var recent_posts_arr []interface{}
os.Remove(path.Join(config.DocumentRoot, "index.html"))
front_file, err := os.OpenFile(path.Join(config.DocumentRoot, "index.html"), os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0777)
defer closeFile(front_file)
if err != nil {
return handleError(1, "Failed opening front page for writing: "+err.Error()) + "<br />\n"
}
// get front pages
rows, err := querySQL("SELECT * FROM `" + config.DBprefix + "frontpage`")
defer closeRows(rows)
if err != nil {
return handleError(1, "Failed getting front page rows: "+err.Error())
}
for rows.Next() {
frontpage := new(FrontTable)
if err = rows.Scan(&frontpage.ID, &frontpage.Page, &frontpage.Order, &frontpage.Subject,
&frontpage.Message, &frontpage.Timestamp, &frontpage.Poster, &frontpage.Email); err != nil {
return handleError(1, err.Error())
}
front_arr = append(front_arr, frontpage)
}
// get recent posts
rows, err = querySQL(
"SELECT `"+config.DBprefix+"posts`.`id`, "+
"`"+config.DBprefix+"posts`.`parentid`, "+
"`"+config.DBprefix+"boards`.`dir` AS boardname, "+
"`"+config.DBprefix+"posts`.`boardid` AS boardid, "+
"`name`, `tripcode`, `message`, `filename`, `thumb_w`, `thumb_h` "+
"FROM `"+config.DBprefix+"posts`, `"+config.DBprefix+"boards` "+
"WHERE `"+config.DBprefix+"posts`.`deleted_timestamp` = ? "+
"AND `boardid` = `"+config.DBprefix+"boards`.`id` "+
"ORDER BY `timestamp` DESC LIMIT ?",
nilTimestamp, config.MaxRecentPosts,
)
defer closeRows(rows)
if err != nil {
return handleError(1, err.Error())
}
for rows.Next() {
recent_post := new(RecentPost)
err = rows.Scan(&recent_post.PostID, &recent_post.ParentID, &recent_post.BoardName, &recent_post.BoardID, &recent_post.Name, &recent_post.Tripcode, &recent_post.Message, &recent_post.Filename, &recent_post.ThumbW, &recent_post.ThumbH)
if err != nil {
return handleError(1, "Failed getting list of recent posts for front page: "+err.Error())
}
recent_posts_arr = append(recent_posts_arr, recent_post)
}
if err = front_page_tmpl.Execute(front_file, map[string]interface{}{
"config": config,
"fronts": front_arr,
"boards": allBoards,
"sections": allSections,
"recent_posts": recent_posts_arr,
}); err != nil {
return handleError(1, "Failed executing front page template: "+err.Error())
}
return "Front page rebuilt successfully."
}
func buildBoardListJSON() (html string) {
board_list_file, err := os.OpenFile(path.Join(config.DocumentRoot, "boards.json"), os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0777)
defer closeFile(board_list_file)
if err != nil {
return handleError(1, "Failed opening board.json for writing: "+err.Error()) + "<br />\n"
}
board_list_wrapper := new(BoardJSONWrapper)
// Our cooldowns are site-wide currently.
cooldowns_obj := BoardCooldowns{NewThread: config.NewThreadDelay, Reply: config.ReplyDelay, ImageReply: config.ReplyDelay}
for _, board_int := range allBoards {
board := board_int.(BoardsTable)
board_obj := BoardJSON{BoardName: board.Dir, Title: board.Title, WorkSafeBoard: 1,
ThreadsPerPage: config.ThreadsPerPage, Pages: board.MaxPages, MaxFilesize: board.MaxImageSize,
MaxMessageLength: board.MaxMessageLength, BumpLimit: 200, ImageLimit: board.NoImagesAfter,
Cooldowns: cooldowns_obj, Description: board.Description, IsArchived: 0}
if board.EnableNSFW {
board_obj.WorkSafeBoard = 0
}
board_list_wrapper.Boards = append(board_list_wrapper.Boards, board_obj)
}
boardJSON, err := json.Marshal(board_list_wrapper)
if err != nil {
return handleError(1, "Failed marshal to JSON: "+err.Error()) + "<br />\n"
}
if _, err = board_list_file.Write(boardJSON); err != nil {
return handleError(1, "Failed writing boards.json file: "+err.Error()) + "<br />\n"
}
return "Board list JSON rebuilt successfully.<br />"
}
// bumps the given thread on the given board and returns true if there were no errors
func bumpThread(postID, boardID int) error {
_, err := execSQL("UPDATE `"+config.DBprefix+"posts` SET `bumped` = ? WHERE `id` = ? AND `boardid` = ?",
@ -796,7 +281,7 @@ func makePost(writer http.ResponseWriter, request *http.Request) {
var formEmail string
// fix new cookie domain for when you use a port number
chopPortNumRegex := regexp.MustCompile("(.+|\\w+):(\\d+)$")
chopPortNumRegex := regexp.MustCompile(`(.+|\w+):(\d+)$`)
domain = chopPortNumRegex.Split(domain, -1)[0]
post.ParentID, _ = strconv.Atoi(request.FormValue("threadid"))
@ -808,7 +293,6 @@ func makePost(writer http.ResponseWriter, request *http.Request) {
post.Name = parsedName["name"]
post.Tripcode = parsedName["tripcode"]
nameCookie = post.Name + post.Tripcode
formEmail = request.FormValue("postemail")
http.SetCookie(writer, &http.Cookie{Name: "email", Value: formEmail, Path: "/", Domain: domain, RawExpires: getSpecificSQLDateTime(time.Now().Add(time.Duration(yearInSeconds))), MaxAge: yearInSeconds})
@ -860,8 +344,6 @@ func makePost(writer http.ResponseWriter, request *http.Request) {
//post has no referrer, or has a referrer from a different domain, probably a spambot
if !validReferrer(request) {
accessLog.Print("Rejected post from possible spambot @ " + post.IP)
//TODO: insert post into temporary post table and add to report list
// or maybe not
return
}
@ -1113,12 +595,12 @@ func makePost(writer http.ResponseWriter, request *http.Request) {
if emailCommand == "noko" {
if post.ParentID == 0 {
http.Redirect(writer, request, "/"+boards[post.BoardID-1].Dir+"/res/"+strconv.Itoa(post.ID)+".html", http.StatusFound)
http.Redirect(writer, request, config.SiteWebfolder+boards[post.BoardID-1].Dir+"/res/"+strconv.Itoa(post.ID)+".html", http.StatusFound)
} else {
http.Redirect(writer, request, "/"+boards[post.BoardID-1].Dir+"/res/"+strconv.Itoa(post.ParentID)+".html#"+strconv.Itoa(post.ID), http.StatusFound)
http.Redirect(writer, request, config.SiteWebfolder+boards[post.BoardID-1].Dir+"/res/"+strconv.Itoa(post.ParentID)+".html#"+strconv.Itoa(post.ID), http.StatusFound)
}
} else {
http.Redirect(writer, request, "/"+boards[post.BoardID-1].Dir+"/", http.StatusFound)
http.Redirect(writer, request, config.SiteWebfolder+boards[post.BoardID-1].Dir+"/", http.StatusFound)
}
benchmarkTimer("makePost", startTime, false)
}
@ -1129,7 +611,6 @@ func formatMessage(message string) string {
postLines := strings.Split(message, "<br>")
for i, line := range postLines {
trimmedLine := strings.TrimSpace(line)
//lineWords := regexp.MustCompile("\\s").Split(trimmedLine, -1)
lineWords := strings.Split(trimmedLine, " ")
isGreentext := false // if true, append </span> to end of line
for w, word := range lineWords {
@ -1151,9 +632,9 @@ func formatMessage(message string) string {
if boardDir == "" {
lineWords[w] = "<a href=\"javascript:;\"><strike>" + word + "</strike></a>"
} else if linkParent == 0 {
lineWords[w] = "<a href=\"/" + boardDir + "/res/" + word[8:] + ".html\">" + word + "</a>"
lineWords[w] = "<a href=\"" + config.SiteWebfolder + boardDir + "/res/" + word[8:] + ".html\">" + word + "</a>"
} else {
lineWords[w] = "<a href=\"/" + boardDir + "/res/" + strconv.Itoa(linkParent) + ".html#" + word[8:] + "\">" + word + "</a>"
lineWords[w] = "<a href=\"" + config.SiteWebfolder + boardDir + "/res/" + strconv.Itoa(linkParent) + ".html#" + word[8:] + "\">" + word + "</a>"
}
}
} else if strings.Index(word, gt) == 0 && w == 0 {

View file

@ -112,7 +112,7 @@ func serveErrorPage(writer http.ResponseWriter, err string) {
func (s GochanServer) ServeHTTP(writer http.ResponseWriter, request *http.Request) {
for name, namespaceFunction := range s.namespaces {
if request.URL.Path == "/"+name {
if request.URL.Path == config.SiteWebfolder+name {
// writer.WriteHeader(200)
namespaceFunction(writer, request)
return
@ -133,7 +133,7 @@ func initServer() {
// Check if Akismet API key is usable at startup.
if err = checkAkismetAPIKey(config.AkismetAPIKey); err != nil {
config.AkismetAPIKey = ""
handleError(0, "%s", err.Error())
handleError(0, err.Error())
}
// Compile regex for checking referrers.

View file

@ -68,10 +68,9 @@ func connectToSQLServer() {
if newInstall {
printf(0, "\nThis looks like a new install or one that needs updating, setting up the database...")
if _, err = db.Exec(
"INSERT INTO `" + config.DBname + "`.`" + config.DBprefix + "staff` " +
"(`username`, `password_checksum`, `salt`, `rank`) " +
"VALUES ('admin', '" + bcryptSum("password") + "', 'abc', 3)",
if _, err = db.Exec("INSERT INTO `" + config.DBname + "`.`" + config.DBprefix + "staff` " +
"(`username`, `password_checksum`, `salt`, `rank`) " +
"VALUES ('admin', '" + bcryptSum("password") + "', 'abc', 3)",
); err != nil {
handleError(0, "failed with error: %s\n", customError(err))
os.Exit(2)

View file

@ -9,10 +9,12 @@ export DEBIAN_FRONTEND=noninteractive
export GOCHAN_PATH=/home/vagrant/gochan
export GOPATH=/vagrant/lib
apt-get update
apt-get -y upgrade
apt-get -y install git subversion mercurial golang-1.10 nginx redis-server mariadb-server mariadb-client ffmpeg
export DBTYPE=mysql
apt-get update
if [ "$DBTYPE" == "mysql" ]; then
apt-get -y install mariadb-server mariadb-client
# Make sure any imported database is utf8mb4
# http://mathiasbynens.be/notes/mysql-utf8mb4
# Put in /etc/mysql/conf.d/local.cnf
@ -40,6 +42,26 @@ cat - <<EOF123 >/etc/mysql/conf.d/open.cnf
[mysqld]
bind-address = 0.0.0.0
EOF123
elif [ "$DBTYPE" == "postgresql" ]; then
# apt-get -y install postgresql postgresql-contrib
# useradd gochan
# passwd -d gochan
# sudo -u postgres createuser -d gochan
echo "PostgreSQL not supported yet"
exit 1
elif [ "$DBTYPE" == "mssql" ]; then
echo "Microsoft SQL Server not supported yet";
exit 1
elif [ "$DBTYPE" == "sqlite" ]; then
echo "SQLite not supported yet"
exit 1
else
echo "Invalid DB type: $DBTYPE"
exit 1
fi
apt-get -y install git subversion mercurial golang-1.10 nginx ffmpeg
apt-get -y upgrade
rm -f /etc/nginx/sites-enabled/* /etc/nginx/sites-available/*
ln -sf /vagrant/gochan-fastcgi.nginx /etc/nginx/sites-available/gochan.nginx
@ -102,6 +124,7 @@ go get github.com/disintegration/imaging
go get github.com/nranchev/go-libGeoIP
go get github.com/nyarla/go-crypt
go get github.com/go-sql-driver/mysql
go get github.com/lib/pq
go get golang.org/x/crypto/bcrypt
go get github.com/frustra/bbcode
make debug