2020-12-20 09:16:23 +01:00
2020-08-13 19:59:33 +02:00
print ( " Really fast print from init.lua " )
2020-12-20 09:16:23 +01:00
--Luarocks libraries
2020-05-16 01:10:11 +02:00
local et = require ( " etlua " )
local sql = require ( " lsqlite3 " )
local zlib = require ( " zlib " )
2020-08-13 19:59:33 +02:00
2020-12-20 09:16:23 +01:00
--smr code
local cache = require ( " cache " )
local pages = require ( " pages " )
local util = require ( " util " )
local config = require ( " config " )
local db = require ( " db " )
if config.production then
2020-05-20 02:11:11 +02:00
local function print ( ) end --squash prints
end
2020-12-20 09:16:23 +01:00
--[[
2020-05-16 01:10:11 +02:00
local parser_names = { " plain " , " imageboard " }
local parsers = { }
for _ , v in pairs ( parser_names ) do
parsers [ v ] = require ( " parser_ " .. v )
end
2020-12-20 09:16:23 +01:00
] ]
--pages
read_get = require ( " read_get " )
read_post = require ( " read_post " )
preview_post = require ( " preview_post " )
--local db,cache --databases
--local domain = "test.monster:8888" --The domain to write links as
--[[
2020-05-16 01:10:11 +02:00
local pagenames = {
" index " ,
" author_index " ,
" claim " ,
" paste " ,
2020-05-17 18:05:00 +02:00
" edit " ,
2020-05-16 01:10:11 +02:00
" read " ,
" nostory " ,
2020-05-17 18:05:00 +02:00
" cantedit " ,
2020-05-16 01:10:11 +02:00
" noauthor " ,
" login " ,
" author_paste " ,
" author_edit " ,
2020-10-11 01:28:39 +02:00
" search " ,
2020-05-16 01:10:11 +02:00
}
local pages = { }
2020-05-17 18:05:00 +02:00
for k , v in pairs ( pagenames ) do
print ( " Compiling page: " , v )
local f = assert ( io.open ( " pages/ " .. v .. " .etlua " , " r " ) )
pages [ v ] = assert ( et.compile ( f : read ( " *a " ) ) )
f : close ( )
end
2020-12-20 09:16:23 +01:00
] ]
2020-05-17 18:05:00 +02:00
2020-12-20 09:16:23 +01:00
--[=[
2020-05-17 18:05:00 +02:00
local queries = { }
--These are all loaded during startup, won't affect ongoing performance.
setmetatable ( queries , {
__index = function ( self , key )
local f = assert ( io.open ( " sql/ " .. key .. " .sql " , " r " ) )
local ret = f : read ( " *a " )
f : close ( )
return ret
end
} )
2020-12-20 09:16:23 +01:00
] = ]
2020-05-17 18:05:00 +02:00
---sql queries
2020-12-20 09:16:23 +01:00
local --[[stmnt_index,]] stmnt_author_index , stmnt_read , stmnt_paste , stmnt_raw
2020-10-11 01:28:39 +02:00
local stmnt_update_views
local stmnt_ins_tag , stmnt_drop_tags , stmnt_get_tags
2020-05-16 01:10:11 +02:00
local stmnt_author_create , stmnt_author_acct , stmnt_author_bio
2020-12-20 09:16:23 +01:00
--local stmnt_cache, stmnt_insert_cache, stmnt_dirty_cache
2020-05-16 01:10:11 +02:00
local stmnt_get_session , stmnt_insert_session
2020-05-17 18:05:00 +02:00
local stmnt_edit , stmnt_update , stmnt_update_raw , stmnt_author_of
2020-08-13 19:59:33 +02:00
local stmnt_comments , stmnt_comment_insert
2020-10-11 01:28:39 +02:00
local stmnt_search
2020-05-16 01:10:11 +02:00
--see https://perishablepress.com/stop-using-unsafe-characters-in-urls/
--no underscore because we use that for our operative pages
local url_characters =
[[abcdefghijklmnopqrstuvwxyz]] ..
[[ABCDEFGHIJKLMNOPQRSTUVWXYZ]] ..
[[0123456789]] ..
[[$-+!*'(),]]
local url_characters_rev = { }
for i = 1 , string.len ( url_characters ) do
url_characters_rev [ string.sub ( url_characters , i , i ) ] = i
end
local function decodeentities ( capture )
local n = tonumber ( capture , 16 )
local c = string.char ( n )
if escapes [ c ] then
return escapes [ c ]
else
return c
end
end
2020-12-20 09:16:23 +01:00
--[[
2020-05-16 01:10:11 +02:00
local function sqlassert ( ... )
local r , errcode , err = ...
if not r then
error ( string.format ( " %d: %s " , errcode , err ) )
end
return r
end
2020-12-20 09:16:23 +01:00
] ]
2020-05-16 01:10:11 +02:00
2020-08-19 00:40:09 +02:00
local function sqlbind ( stmnt , call , position , data )
assert ( call == " bind " or call == " bind_blob " , " Bad bind call, call was: " .. call )
local f = stmnt [ call ] ( stmnt , position , data )
if f ~= sql.OK then
error ( string.format ( " Failed to %s at %d with %q: %s " , call , position , data , db : errmsg ( ) ) , 2 )
end
end
2020-10-11 01:28:39 +02:00
2020-05-16 01:10:11 +02:00
print ( " Hello from init.lua " )
function configure ( )
2020-12-20 09:16:23 +01:00
--db = sqlassert(sql.open("data/posts.db"))
----db = sqlassert(sql.open_memory())
--cache = sqlassert(sql.open_memory())
--Test that compression works. For some reason, the zlib library
--fails if this is done as a one-liner
2020-05-16 01:10:11 +02:00
local msg = " test message "
local one = zlib.compress ( msg )
local two = zlib.decompress ( one )
assert ( two == msg , " zlib not working as expected " )
2020-05-17 18:05:00 +02:00
--Create sql tables
2020-12-20 09:16:23 +01:00
--assert(db:exec(queries.create_table_authors))
2020-05-16 01:10:11 +02:00
--Create a fake "anonymous" user, so we don't run into trouble
--so that no one runs into touble being able to paste under this account.
2020-12-20 09:16:23 +01:00
--assert(db:exec(queries.insert_anon_author))
2020-05-16 01:10:11 +02:00
--If/when an author delets their account, all posts
--and comments by that author are also deleted (on
--delete cascade) this is intentional. This also
--means that all comments by other users on a post
--an author makes will also be deleted.
--
--Post text uses zlib compression
2020-12-20 09:16:23 +01:00
--assert(db:exec(queries.create_table_posts))
2020-05-16 01:10:11 +02:00
--Store the raw text so people can download it later, maybe
--we can use it for "download as image" or "download as pdf"
--in the future too. Stil stored zlib compressed
2020-12-20 09:16:23 +01:00
--assert(db:exec(queries.create_table_raw_text))
--assert(db:exec(queries.create_table_images))
--assert(db:exec(queries.create_table_comments))
--assert(db:exec(queries.create_table_tags))
--assert(db:exec(queries.create_index_tags))
2020-05-17 18:05:00 +02:00
--Store a cookie for logged in users. Logged in users can edit
--their own posts.
2020-12-20 09:16:23 +01:00
--assert(db:exec(queries.create_table_session))
--print("Created db tables")
2020-05-17 18:05:00 +02:00
--A cache table to store rendered pages that do not need to be
--rerendered. In theory this could OOM the program eventually and start
--swapping to disk. TODO: fixme
2020-12-20 09:16:23 +01:00
--[=[
2020-05-16 01:10:11 +02:00
assert ( cache : exec ( [ [
CREATE TABLE IF NOT EXISTS cache (
path TEXT PRIMARY KEY ,
data BLOB ,
updated INTEGER ,
dirty INTEGER
) ;
] ] ) )
2020-12-20 09:16:23 +01:00
] = ]
2020-05-17 18:05:00 +02:00
--Select the data we need to display the on the front page
2020-12-20 09:16:23 +01:00
--stmnt_index = assert(db:prepare(queries.select_site_index))
2020-05-17 18:05:00 +02:00
--Select the data we need to read a story (and maybe display an edit
--button
2020-12-20 09:16:23 +01:00
--stmnt_read = assert(db:prepare(queries.select_post))
2020-10-11 01:28:39 +02:00
--Update the view counter when someone reads a story
2020-12-20 09:16:23 +01:00
--stmnt_update_views = assert(db:prepare(queries.update_views))
2020-10-11 01:28:39 +02:00
--Retreive comments on a story
2020-12-20 09:16:23 +01:00
--stmnt_comments = assert(db:prepare(queries.select_comments))
2020-10-11 01:28:39 +02:00
--Add a new comment to a story
2020-12-20 09:16:23 +01:00
--stmnt_comment_insert = assert(db:prepare(queries.insert_comment))
2020-05-17 18:05:00 +02:00
--TODO: actually let authors edit their bio
2020-12-20 09:16:23 +01:00
--[=[stmnt_author_bio = assert(db.conn:prepare([[
2020-05-16 01:10:11 +02:00
SELECT authors.biography FROM authors WHERE authors.name = : author ;
] ] ) )
2020-12-20 09:16:23 +01:00
] = ]
2020-05-17 18:05:00 +02:00
--Get the author of a story, used to check when editing that the
--author really owns the story they're trying to edit
2020-12-20 09:16:23 +01:00
--stmnt_author_of = assert(db:prepare(queries.select_author_of_post))
2020-05-17 18:05:00 +02:00
--Get the data we need to display a particular author's latest
--stories
2020-12-20 09:16:23 +01:00
--stmnt_author = assert(db:prepare(queries.select_author_index))
2020-05-17 18:05:00 +02:00
--Get the data we need to check if someone can log in
2020-12-20 09:16:23 +01:00
--[=[
2020-05-16 01:10:11 +02:00
stmnt_author_acct = assert ( db : prepare ( [ [
SELECT id , salt , passhash FROM authors WHERE name = : name ;
] ] ) )
2020-12-20 09:16:23 +01:00
] = ]
2020-05-17 18:05:00 +02:00
--Create a new author on the site
2020-12-20 09:16:23 +01:00
--stmnt_author_create = assert(db:prepare(queries.insert_author))
--[=[
2020-05-16 01:10:11 +02:00
stmnt_author_login = assert ( db : prepare ( [ [
SELECT name , passhash FROM authors WHERE name = : name ;
] ] ) )
2020-12-20 09:16:23 +01:00
] = ]
2020-05-17 18:05:00 +02:00
--Create a new post
stmnt_paste = assert ( db : prepare ( queries.insert_post ) )
--Keep a copy of the plain text of a post so we can edit it later
--It might also be useful for migrations, if that ever needs to happen
stmnt_raw = assert ( db : prepare ( queries.insert_raw ) )
2020-10-11 01:28:39 +02:00
--Tags for a story
2020-12-20 09:16:23 +01:00
--[[
2020-10-11 01:28:39 +02:00
stmnt_ins_tag = assert ( db : prepare ( queries.insert_tag ) )
stmnt_get_tags = assert ( db : prepare ( queries.select_tags ) )
stmnt_drop_tags = assert ( db : prepare ( queries.delete_tags ) )
2020-12-20 09:16:23 +01:00
] ]
2020-05-17 18:05:00 +02:00
--Get the data we need to display the edit screen
stmnt_edit = assert ( db : prepare ( queries.select_edit ) )
2020-08-13 19:59:33 +02:00
--Get the data we need when someone wants to download a paste
stmnt_download = assert ( db : prepare ( queries.select_download ) )
2020-05-17 18:05:00 +02:00
--When we update a post, store the plaintext again
2020-12-20 09:16:23 +01:00
--stmnt_update_raw = assert(db:prepare(queries.update_raw))
2020-05-17 18:05:00 +02:00
--Should we really reset the update time every time someone makes a post?
--Someone could keep their story on the front page by just editing it a lot.
--If it gets abused I can disable it I guess.
2020-12-20 09:16:23 +01:00
--stmnt_update = assert(db:prepare(queries.update_post))
2020-10-11 01:28:39 +02:00
--Check sessions for login support
2020-05-17 18:05:00 +02:00
stmnt_insert_session = assert ( db : prepare ( queries.insert_session ) )
stmnt_get_session = assert ( db : prepare ( queries.select_valid_sessions ) )
2020-10-11 01:28:39 +02:00
--Search by tag name
stmnt_search = assert ( db : prepare ( queries.select_post_tags ) )
2020-05-16 01:10:11 +02:00
--only refresh pages at most once every 10 seconds
2020-12-20 09:16:23 +01:00
--[=[
2020-05-16 01:10:11 +02:00
stmnt_cache = cache : prepare ( [ [
SELECT data
FROM cache
WHERE
path = : path AND
2020-10-11 01:28:39 +02:00
( ( dirty = 0 ) OR ( strftime ( ' %s ' , ' now ' ) - updated ) < 20 )
2020-05-16 01:10:11 +02:00
;
] ] )
stmnt_insert_cache = cache : prepare ( [ [
INSERT OR REPLACE INTO cache (
path , data , updated , dirty
) VALUES (
: path , : data , strftime ( ' %s ' , ' now ' ) , 0
) ;
] ] )
stmnt_dirty_cache = cache : prepare ( [ [
UPDATE OR IGNORE cache
SET dirty = 1
WHERE path = : path ;
] ] )
2020-12-20 09:16:23 +01:00
] = ]
2020-05-16 01:10:11 +02:00
--[=[
] = ]
2020-08-13 19:59:33 +02:00
print ( " finished running configure() " )
2020-05-16 01:10:11 +02:00
end
print ( " Created configure function " )
--[[
find a string url for a number
] ]
local function encode_id ( number )
local result = { }
local charlen = string.len ( url_characters )
repeat
local pos = ( number % charlen ) + 1
number = math.floor ( number / charlen )
table.insert ( result , string.sub ( url_characters , pos , pos ) )
until number == 0
return table.concat ( result )
end
local function decode_id ( s )
2020-08-24 19:20:33 +02:00
local res , id = pcall ( function ( )
local n = 0
local charlen = string.len ( url_characters )
for i = 1 , string.len ( s ) do
local char = string.sub ( s , i , i )
local pos = url_characters_rev [ char ] - 1
n = n + ( pos * math.pow ( charlen , i - 1 ) )
end
return n
end )
if res then
return id
else
error ( " Failed to decode id: " .. s )
2020-05-16 01:10:11 +02:00
end
end
local function do_sql ( stmnt )
if not stmnt then error ( " No statement " , 2 ) end
local err
repeat
err = stmnt : step ( )
print ( " After stepping, err is " , err )
if err == sql.BUSY then
coroutine.yield ( )
end
until ( err ~= sql.BUSY )
return err
end
2020-10-11 01:28:39 +02:00
local function get_tags ( id )
local ret = { }
stmnt_get_tags : bind_names {
id = id
}
local err
repeat
err = stmnt_get_tags : step ( )
if err == sql.BUSY then
coroutine.yield ( )
elseif err == sql.ROW then
table.insert ( ret , stmnt_get_tags : get_value ( 0 ) )
elseif err == sql.DONE then
stmnt_get_tags : reset ( )
return ret
else
error ( string.format ( " Failed to get tags for story %d : %d " , id , err ) )
end
until false
end
2020-12-20 09:16:23 +01:00
--[=[
2020-05-16 01:10:11 +02:00
local function dirty_cache ( url )
2020-08-13 19:59:33 +02:00
print ( " Dirtying cache: " , url )
2020-05-16 01:10:11 +02:00
stmnt_dirty_cache : bind_names {
2020-05-20 02:11:11 +02:00
path = url
2020-05-16 01:10:11 +02:00
}
err = do_sql ( stmnt_dirty_cache )
stmnt_dirty_cache : reset ( )
end
2020-12-20 09:16:23 +01:00
] = ]
2020-05-16 01:10:11 +02:00
--[[
Start a session for someone who logged in
] ]
local function start_session ( who )
local rngf = assert ( io.open ( " /dev/urandom " , " rb " ) )
local session_t = { }
for i = 1 , 64 do
local r = string.byte ( rngf : read ( 1 ) )
local s = string.char ( ( r % 26 ) + 65 )
table.insert ( session_t , s )
end
local session = table.concat ( session_t )
rngf : close ( )
print ( " sessionid: " , session )
print ( " authorid: " , who )
stmnt_insert_session : bind_names {
sessionid = session ,
authorid = who
}
local err = do_sql ( stmnt_insert_session )
2020-05-17 18:05:00 +02:00
stmnt_insert_session : reset ( )
2020-05-16 01:10:11 +02:00
print ( " Err: " , err )
assert ( err == sql.DONE )
return session
end
--[[
Retreive the name and authorid of the logged in person ,
or nil + error message if not logged in
] ]
local function get_session ( req )
http_populate_cookies ( req )
local sessionid = http_request_cookie ( req , " session " )
if sessionid == nil then
return nil , " No session cookie passed by client "
end
stmnt_get_session : bind_names {
key = sessionid
}
local err = do_sql ( stmnt_get_session )
if err ~= sql.ROW then
return nil , " No such session by logged in users "
end
print ( " get session err: " , err )
local data = stmnt_get_session : get_values ( )
stmnt_get_session : reset ( )
local author = data [ 1 ]
local authorid = data [ 2 ]
return author , authorid
end
2020-12-20 09:16:23 +01:00
--[=[
2020-05-16 01:10:11 +02:00
--Render a page, with cacheing. If you need to dirty a cache, call dirty_cache()
local function render ( pagename , callback )
print ( " Running render... " )
stmnt_cache : bind_names { path = pagename }
local err = do_sql ( stmnt_cache )
if err == sql.DONE then
stmnt_cache : reset ( )
--page is not cached
elseif err == sql.ROW then
2020-08-24 19:20:33 +02:00
print ( " Cache hit: " .. pagename )
2020-05-16 01:10:11 +02:00
data = stmnt_cache : get_values ( )
stmnt_cache : reset ( )
return data [ 1 ]
else --sql.ERROR or sql.MISUSE
error ( " Failed to check cache for page " .. pagename )
end
--We didn't have the paged cached, render it
print ( " Cache miss, running function " )
local text = callback ( )
print ( " Saving data... " )
--And save the data back into the cache
stmnt_insert_cache : bind_names {
path = pagename ,
data = text ,
}
err = do_sql ( stmnt_insert_cache )
if err == sql.ERROR or err == sql.MISUSE then
error ( " Failed to update cache for page " .. pagename )
end
stmnt_insert_cache : reset ( )
return text
end
2020-12-20 09:16:23 +01:00
] = ]
2020-05-16 01:10:11 +02:00
2020-12-20 09:16:23 +01:00
--[=[
2020-10-11 01:28:39 +02:00
--[[Parses a semicolon seperated string into it's parts, trims whitespace, lowercases, and capitalizes the first letter. Tags will not be empty. Returns an array of tags]]
local function parse_tags ( str )
local tags = { }
for tag in string.gmatch ( str , " ([^;]+) " ) do
assert ( tag , " Found a nil or false tag in: " .. str )
local tag_trimmed = string.match ( tag , " %s*(.*)%s* " )
local tag_lower = string.lower ( tag_trimmed )
local tag_capitalized = string.gsub ( tag_lower , " ^%w " , string.upper )
assert ( tag_capitalized , " After processing tag: " .. tag .. " it was falsey. " )
if string.len ( tag_capitalized ) > 0 then
table.insert ( tags , tag_capitalized )
end
end
return tags
end
2020-12-20 09:16:23 +01:00
] = ]
2020-10-11 01:28:39 +02:00
2020-05-16 01:10:11 +02:00
function home ( req )
print ( " Hello from lua! " )
print ( " Method: " , http_method_text ( req ) )
local method = http_method_text ( req )
local host = http_request_get_host ( req )
local path = http_request_get_path ( req )
local text
if host == domain then
2020-05-17 18:05:00 +02:00
--Default home page
2020-12-20 09:16:23 +01:00
text = cache.render ( string.format ( " %s " , domain ) , function ( )
2020-05-16 01:10:11 +02:00
print ( " Cache miss, rendering index " )
stmnt_index : bind_names { }
local err = do_sql ( stmnt_index )
local latest = { }
2020-05-17 18:05:00 +02:00
--err may be sql.ROW or sql.DONE if we don't have any stories yet
2020-05-16 01:10:11 +02:00
while err == sql.ROW do
local data = stmnt_index : get_values ( )
2020-10-11 01:28:39 +02:00
local tags = get_tags ( data [ 1 ] )
2020-05-16 01:10:11 +02:00
table.insert ( latest , {
url = encode_id ( data [ 1 ] ) ,
title = data [ 2 ] ,
isanon = data [ 3 ] == 1 ,
posted = os.date ( " %B %d %Y " , tonumber ( data [ 4 ] ) ) ,
author = data [ 5 ] ,
2020-10-11 01:28:39 +02:00
tags = tags ,
2020-05-16 01:10:11 +02:00
} )
err = stmnt_index : step ( )
end
stmnt_index : reset ( )
return pages.index {
domain = domain ,
stories = latest
}
end )
else
2020-05-17 18:05:00 +02:00
--Home page for an author
2020-05-16 01:10:11 +02:00
local subdomain = host : match ( " ([^ \\ .]+) " )
2020-12-20 09:16:23 +01:00
text = cache.render ( string.format ( " %s.%s " , subdomain , domain ) , function ( )
2020-05-16 01:10:11 +02:00
print ( " Cache miss, rendering author: " .. subdomain )
stmnt_author_bio : bind_names { author = subdomain }
local err = do_sql ( stmnt_author_bio )
if err == sql.DONE then
print ( " No such author " )
stmnt_author_bio : reset ( )
return pages.noauthor {
author = subdomain
}
end
print ( " err: " , err )
assert ( err == sql.ROW , " failed to get author: " .. subdomain .. " error: " .. tostring ( err ) )
local data = stmnt_author_bio : get_values ( )
local bio = data [ 1 ]
stmnt_author_bio : reset ( )
print ( " Getting author's stories " )
stmnt_author : bind_names { author = subdomain }
err = do_sql ( stmnt_author )
print ( " err: " , err )
local stories = { }
2020-05-17 18:05:00 +02:00
while err == sql.ROW do
2020-05-16 01:10:11 +02:00
local data = stmnt_author : get_values ( )
2020-05-17 18:05:00 +02:00
local id , title , time = unpack ( data )
2020-10-11 01:28:39 +02:00
local tags = get_tags ( id )
2020-05-16 01:10:11 +02:00
table.insert ( stories , {
2020-05-17 18:05:00 +02:00
url = encode_id ( id ) ,
title = title ,
2020-10-11 01:28:39 +02:00
posted = os.date ( " %B %d %Y " , tonumber ( time ) ) ,
tags = tags ,
2020-05-16 01:10:11 +02:00
} )
err = stmnt_author : step ( )
end
stmnt_author : reset ( )
return pages.author_index {
domain = domain ,
author = subdomain ,
stories = stories ,
bio = bio
}
end )
end
assert ( text )
http_response ( req , 200 , text )
end
--We prevent people from changing their password file, this way we don't really
--need to worry about logged in accounts being hijacked if someone gets at the
2020-05-17 18:05:00 +02:00
--database. The attacker can still paste & edit from the logged in account for
--a while, but whatever.
2020-05-16 01:10:11 +02:00
function claim ( req )
local method = http_method_text ( req )
local host = http_request_get_host ( req )
local path = http_request_get_path ( req )
if host ~= domain then
http_response_header ( req , " Location " , string.format ( " https://%s/_claim " , domain ) )
http_response ( req , 303 , " " )
return
end
assert ( host == domain )
local text
if method == " GET " then
2020-05-17 18:05:00 +02:00
--Get the page to claim a name
2020-12-20 09:16:23 +01:00
text = cache.render ( string.format ( " %s/_claim " , domain ) , function ( )
2020-05-16 01:10:11 +02:00
print ( " cache miss, rendering claim page " )
2020-10-17 22:03:40 +02:00
return pages.claim { err = " " }
2020-05-16 01:10:11 +02:00
end )
elseif method == " POST " then
2020-05-17 18:05:00 +02:00
--Actually claim a name
2020-05-16 01:10:11 +02:00
http_request_populate_post ( req )
local name = assert ( http_argument_get_string ( req , " user " ) )
2020-10-13 20:10:55 +02:00
--What in the world, Kore should be rejecting names that
--are not lower case & no symbols, but some still get through somehow.
2020-10-17 22:03:40 +02:00
if not name : match ( " ^[a-z0-9]*$ " ) then
2020-10-13 20:10:55 +02:00
print ( " Bad username: " , name )
text = pages.claim {
err = " Usernames must match ^[a-z0-9]{1,30}$ "
}
http_response ( req , 200 , text )
return
end
2020-05-16 01:10:11 +02:00
local rngf = assert ( io.open ( " /dev/urandom " , " rb " ) )
local passlength = string.byte ( rngf : read ( 1 ) ) + 64
local salt = rngf : read ( 64 )
local password = rngf : read ( passlength )
rngf : close ( )
local hash = sha3 ( salt .. password )
stmnt_author_create : bind_names {
name = name ,
}
stmnt_author_create : bind_blob ( 2 , salt )
stmnt_author_create : bind_blob ( 3 , hash )
local err = do_sql ( stmnt_author_create )
if err == sql.DONE then
2020-05-17 18:05:00 +02:00
--We sucessfully made athe new author
2020-05-16 01:10:11 +02:00
local id = stmnt_author_create : last_insert_rowid ( )
stmnt_author_create : reset ( )
2020-05-17 18:05:00 +02:00
--Give them a file back
2020-05-16 01:10:11 +02:00
http_response_header ( req , " Content-Type " , " application/octet-stream " )
2020-05-17 18:05:00 +02:00
http_response_header ( req , " Content-Disposition " , " attachment; filename= \" " .. name .. " . " .. domain .. " .passfile \" " )
2020-05-16 01:10:11 +02:00
local session = start_session ( id )
text = password
elseif err == sql.CONSTRAINT then
2020-05-17 18:05:00 +02:00
--If the creation failed, they probably just tried
--to use a name that was already taken
2020-05-16 01:10:11 +02:00
text = pages.claim {
err = " Failed to claim. That name may already be taken. "
}
elseif err == sql.ERROR or err == sql.MISUSE then
2020-05-17 18:05:00 +02:00
--This is bad though
2020-05-16 01:10:11 +02:00
text = pages.claim {
err = " Failed to claim "
}
end
stmnt_author_create : reset ( )
end
assert ( text )
http_response ( req , 200 , text )
end
function paste ( req )
local host = http_request_get_host ( req )
local path = http_request_get_path ( req )
local method = http_method_text ( req )
local err
local ret
if method == " GET " then
2020-05-17 18:05:00 +02:00
--Get the paste page
2020-05-16 01:10:11 +02:00
if host == domain then
2020-08-13 19:59:33 +02:00
local author , _ = get_session ( req )
if author then
http_response_header ( req , " Location " , string.format ( " https://%s.%s/_paste " , author , domain ) )
http_response ( req , 303 , " " )
return
else
--For an anonymous user
2020-12-20 09:16:23 +01:00
ret = cache.render ( string.format ( " %s/_paste " , host ) , function ( )
2020-08-13 19:59:33 +02:00
print ( " Cache missing, rendering post page " )
return pages.paste {
domain = domain ,
err = " " ,
}
end )
end
2020-05-16 01:10:11 +02:00
else
2020-05-17 18:05:00 +02:00
--Or for someone that's logged in
2020-05-16 01:10:11 +02:00
print ( " Looks like a logged in user wants to paste! " )
2020-08-13 19:59:33 +02:00
local subdomain = host : match ( " ([^%.]+) " )
2020-05-16 01:10:11 +02:00
local author , _ = get_session ( req )
2020-08-13 19:59:33 +02:00
print ( " subdomain: " , subdomain , " author: " , author )
2020-05-17 18:05:00 +02:00
--If they try to paste as an author, but are on the
--wrong subdomain, or or not logged in, redirect them
--to the right place. Their own subdomain for authors
--or the anonymous paste page for not logged in users.
2020-05-16 01:10:11 +02:00
if author == nil then
http_response_header ( req , " Location " , " https:// " .. domain .. " /_paste " )
http_response ( req , 303 , " " )
return
end
if author ~= subdomain then
http_response_header ( req , " Location " , string.format ( " https://%s.%s/_paste " , author , domain ) )
http_response ( req , 303 , " " )
return
end
assert ( author == subdomain , " someone wants to paste as someone else " )
2020-05-17 18:05:00 +02:00
--We're where we want to be, serve up this users's
--paste page. No cache, because how often is a user
--going to paste?
2020-05-16 01:10:11 +02:00
ret = pages.author_paste {
domain = domain ,
user = author ,
text = " " ,
2020-05-20 02:11:11 +02:00
err = " " ,
2020-05-16 01:10:11 +02:00
}
end
elseif method == " POST " then
2020-05-17 18:05:00 +02:00
--We're creatinga new paste
2020-05-16 01:10:11 +02:00
http_request_populate_post ( req )
local title = assert ( http_argument_get_string ( req , " title " ) )
local text = assert ( http_argument_get_string ( req , " text " ) )
local markup = assert ( http_argument_get_string ( req , " markup " ) )
2020-10-12 17:07:26 +02:00
local tag_str = http_argument_get_string ( req , " tags " )
local tags = { }
if tag_str then
tags = parse_tags ( tag_str )
end
2020-05-16 01:10:11 +02:00
local pasteas
local raw = zlib.compress ( text )
text = string.gsub ( text , " %%(%x%x) " , decodeentities )
text = parsers [ markup ] ( text )
text = zlib.compress ( text )
local esctitle = string.gsub ( title , " %%(%x%x) " , decodeentities )
2020-05-17 18:05:00 +02:00
--Always sanatize the title with the plain parser. no markup
--in the title.
2020-05-16 01:10:11 +02:00
esctitle = parsers.plain ( title )
if host == domain then
2020-05-17 18:05:00 +02:00
--Public paste
2020-05-16 01:10:11 +02:00
--[[
This doesn ' t actually do much for IPv4 addresses,
2020-05-17 18:05:00 +02:00
since there are only 32 bits of address . Someone who
got a copy of the database could
2020-05-16 01:10:11 +02:00
just generate all 2 ^ 32 hashes and look up who posted
what . Use IPv6 , Tor or I2P where possible . ( but then I
guess it ' s harder to ban spammers... hmm..)
] ]
--local ip = http_request_get_ip(req)
--local iphash = sha3(ip)
2020-05-17 18:05:00 +02:00
--Don't store this information for now, until I come up
--with a more elegent solution.
2020-08-19 00:40:09 +02:00
sqlbind ( stmnt_paste , " bind_blob " , 1 , text )
--assert(stmnt_paste:bind_blob(1,text) == sql.OK)
sqlbind ( stmnt_paste , " bind " , 2 , esctitle )
--assert(stmnt_paste:bind(2,esctitle) == sql.OK)
sqlbind ( stmnt_paste , " bind " , 3 , - 1 )
--assert(stmnt_paste:bind(3,-1) == sql.OK)
sqlbind ( stmnt_paste , " bind " , 4 , true )
--assert(stmnt_paste:bind(4,true) == sql.OK)
sqlbind ( stmnt_paste , " bind_blob " , 5 , " " )
--assert(stmnt_paste:bind_blob(5,"") == sql.OK)
2020-05-16 01:10:11 +02:00
err = do_sql ( stmnt_paste )
2020-10-11 01:28:39 +02:00
stmnt_paste : reset ( )
2020-05-16 01:10:11 +02:00
if err == sql.DONE then
local rowid = stmnt_paste : last_insert_rowid ( )
assert ( stmnt_raw : bind ( 1 , rowid ) == sql.OK )
assert ( stmnt_raw : bind_blob ( 2 , raw ) == sql.OK )
2020-10-11 01:28:39 +02:00
assert ( stmnt_raw : bind ( 3 , markup ) == sql.OK )
2020-05-16 01:10:11 +02:00
err = do_sql ( stmnt_raw )
2020-10-11 01:28:39 +02:00
stmnt_raw : reset ( )
2020-05-16 01:10:11 +02:00
if err ~= sql.DONE then
print ( " Failed to save raw text, but paste still went though " )
end
2020-10-11 01:28:39 +02:00
for _ , tag in pairs ( tags ) do
print ( " tag 1: " , stmnt_ins_tag : bind ( 1 , rowid ) )
print ( " Looking at tag " , tag )
print ( " tag 2: " , stmnt_ins_tag : bind ( 2 , tag ) )
err = do_sql ( stmnt_ins_tag )
stmnt_ins_tag : reset ( )
end
2020-05-16 01:10:11 +02:00
local url = encode_id ( rowid )
local loc = string.format ( " https://%s/%s " , domain , url )
http_response_header ( req , " Location " , loc )
http_response ( req , 303 , " " )
2020-12-20 09:16:23 +01:00
cache.dirty ( string.format ( " %s/%s " , domain , url ) )
cache.dirty ( string.format ( " %s " , domain ) )
2020-05-16 01:10:11 +02:00
return
elseif err == sql.ERROR or err == sql.MISUSE then
ret = " Failed to paste: " .. tostring ( err )
else
2020-08-13 19:59:33 +02:00
error ( " Error pasting: " .. tostring ( err ) )
2020-05-16 01:10:11 +02:00
end
stmnt_paste : reset ( )
else
2020-05-17 18:05:00 +02:00
--Author paste
2020-05-16 01:10:11 +02:00
local author , authorid = get_session ( req )
if author == nil then
ret = pages.author_paste {
domain = domain ,
author = subdomain ,
err = " You are not logged in, you must be logged in to post as " .. subdomain .. " . " ,
text = text
}
end
local asanon = assert ( http_argument_get_string ( req , " pasteas " ) )
--No need to check if the author is posting to the
2020-05-17 18:05:00 +02:00
--"right" sudomain, just post it to the one they have
--the session key for.
2020-05-16 01:10:11 +02:00
assert ( stmnt_paste : bind_blob ( 1 , text ) == sql.OK )
assert ( stmnt_paste : bind ( 2 , esctitle ) == sql.OK )
assert ( stmnt_paste : bind ( 3 , authorid ) == sql.OK )
if asanon == " anonymous " then
assert ( stmnt_paste : bind ( 4 , true ) == sql.OK )
else
assert ( stmnt_paste : bind ( 4 , false ) == sql.OK )
end
assert ( stmnt_paste : bind_blob ( 5 , " " ) == sql.OK )
err = do_sql ( stmnt_paste )
2020-10-11 01:28:39 +02:00
stmnt_paste : reset ( )
2020-05-16 01:10:11 +02:00
if err == sql.DONE then
local rowid = stmnt_paste : last_insert_rowid ( )
assert ( stmnt_raw : bind ( 1 , rowid ) == sql.OK )
assert ( stmnt_raw : bind_blob ( 2 , raw ) == sql.OK )
assert ( stmnt_raw : bind ( 3 , markup ) == sql.OK )
err = do_sql ( stmnt_raw )
2020-10-11 01:28:39 +02:00
stmnt_raw : reset ( )
for _ , tag in pairs ( tags ) do
print ( " tag 1: " , stmnt_ins_tag : bind ( 1 , rowid ) )
print ( " Looking at tag " , tag )
print ( " tag 2: " , stmnt_ins_tag : bind ( 2 , tag ) )
err = do_sql ( stmnt_ins_tag )
stmnt_ins_tag : reset ( )
end
2020-05-16 01:10:11 +02:00
if err ~= sql.DONE then
print ( " Failed to save raw text, but paste still went through " )
end
local url = encode_id ( rowid )
local loc
if asanon == " anonymous " then
loc = string.format ( " https://%s/%s " , domain , url )
else
loc = string.format ( " https://%s.%s/%s " , author , domain , url )
end
http_response_header ( req , " Location " , loc )
http_response ( req , 303 , " " )
2020-12-20 09:16:23 +01:00
cache.dirty ( string.format ( " %s.%s " , author , domain ) )
cache.dirty ( string.format ( " %s/%s " , domain , url ) )
cache.dirty ( string.format ( " %s " , domain ) )
2020-05-16 01:10:11 +02:00
return
elseif err == sql.ERROR or err == sql.MISUSE then
ret = " Failed to paste: " .. tostring ( err )
else
error ( " Error pasting: " , err )
end
stmnt_paste : reset ( )
end
end
assert ( ret )
http_response ( req , 200 , ret )
end
2020-05-17 18:05:00 +02:00
--A helper function for below
2020-08-13 19:59:33 +02:00
local function read_story ( host , path , idp , show_comments , iam )
local cachestr
if show_comments then
cachestr = string.format ( " %s%s?comments=1 " , host , path )
else
cachestr = string.format ( " %s%s " , host , path )
end
2020-10-11 01:28:39 +02:00
local id = decode_id ( idp )
stmnt_update_views : bind_names {
id = id
}
print ( " update: " , do_sql ( stmnt_update_views ) )
stmnt_update_views : reset ( )
2020-12-20 09:16:23 +01:00
cache.dirty ( cachestr )
2020-08-24 19:46:11 +02:00
print ( " cachestr was: " , cachestr )
2020-08-13 19:59:33 +02:00
local readstoryf = function ( )
2020-05-17 18:05:00 +02:00
stmnt_read : bind_names {
id = id
}
local err = do_sql ( stmnt_read )
if err == sql.DONE then
stmnt_read : reset ( )
return pages.nostory {
path = path
}
end
2020-10-11 01:28:39 +02:00
local tags = get_tags ( id )
2020-05-17 18:05:00 +02:00
assert ( err == sql.ROW , " Could not get row: " .. tostring ( id ) .. " Error: " .. tostring ( err ) )
2020-10-11 01:28:39 +02:00
local title , text , authorid , isanon , authorname , views = unpack ( stmnt_read : get_values ( ) )
2020-08-13 19:59:33 +02:00
stmnt_comments : bind_names {
id = id
}
err = do_sql ( stmnt_comments )
local comments = { }
while err ~= sql.DONE do
local com_author , com_isanon , com_text = unpack ( stmnt_comments : get_values ( ) )
table.insert ( comments , {
author = com_author ,
isanon = com_isanon == 1 , --int to boolean
text = com_text
} )
err = stmnt_comments : step ( )
end
2020-08-24 19:46:11 +02:00
stmnt_comments : reset ( )
2020-05-17 18:05:00 +02:00
text = zlib.decompress ( text )
stmnt_read : reset ( )
return pages.read {
domain = domain ,
title = title ,
text = text ,
idp = idp ,
isanon = isanon == 1 ,
2020-08-13 19:59:33 +02:00
author = authorname ,
comments = comments ,
show_comments = show_comments ,
iam = iam ,
2020-10-11 01:28:39 +02:00
tags = tags ,
views = views ,
2020-05-17 18:05:00 +02:00
}
2020-08-13 19:59:33 +02:00
end
--Don't cache if we're logged in, someone might see dirty cache information on the page.
2020-08-24 19:20:33 +02:00
--(I.e. When the user has loaded comments, the form to past a comment may contain a username,
--which is not the user's, from whoever loaded the cache last) to fix this bug, don't cache
--pages when the user is logged in. All non-logged-in users can see the same page no problem.
2020-08-13 19:59:33 +02:00
if not iam then
2020-12-20 09:16:23 +01:00
return cache.render ( cachestr , readstoryf )
2020-08-13 19:59:33 +02:00
else
return readstoryf ( )
end
2020-05-17 18:05:00 +02:00
end
2020-05-16 01:10:11 +02:00
function read ( req )
local host = http_request_get_host ( req )
local path = http_request_get_path ( req )
2020-08-13 19:59:33 +02:00
local method = http_method_text ( req )
if method == " GET " then
2020-12-20 09:16:23 +01:00
read_get ( req )
--[=[
2020-08-13 19:59:33 +02:00
local idp = string.sub ( path , 2 ) --remove leading "/"
assert ( string.len ( path ) > 0 , " Tried to read 0-length story id " )
local author , authorid = get_session ( req )
http_request_populate_qs ( req )
local show_comments = http_argument_get_string ( req , " comments " )
--parameters needed for the read page
local text
if author then
--We're logged in as someone
local id = decode_id ( idp )
stmnt_read : bind_names {
id = id
2020-05-16 01:10:11 +02:00
}
2020-08-13 19:59:33 +02:00
local err = do_sql ( stmnt_read )
2020-10-11 01:28:39 +02:00
local tags = get_tags ( id )
2020-08-13 19:59:33 +02:00
if err == sql.DONE then
--We got no story
stmnt_read : reset ( )
text = pages.nostory {
path = path
2020-05-20 02:11:11 +02:00
}
else
2020-08-13 19:59:33 +02:00
--If we can edit this story, we don't want to cache
--the page, since it'll have an edit button on it.
assert ( err == sql.ROW )
2020-10-11 01:28:39 +02:00
local title , storytext , tauthor , isanon , authorname , views = unpack ( stmnt_read : get_values ( ) )
2020-10-12 02:15:38 +02:00
stmnt_update_views : bind_names {
id = id
}
print ( " update: " , do_sql ( stmnt_update_views ) )
stmnt_update_views : reset ( )
2020-08-13 19:59:33 +02:00
storytext = zlib.decompress ( storytext )
stmnt_read : reset ( )
if tauthor == authorid then
--The story exists and we're logged in as the
--owner, display the edit button
text = pages.read {
domain = domain ,
title = title ,
text = storytext ,
idp = idp ,
isanon = isanon == 1 ,
author = authorname ,
iam = authorname ,
2020-10-11 01:28:39 +02:00
owner = true ,
tags = tags ,
views = views ,
2020-08-13 19:59:33 +02:00
}
else
text = read_story ( host , path , idp , show_comments , author )
end
2020-05-20 02:11:11 +02:00
end
2020-08-13 19:59:33 +02:00
else
--We're not logged in as anyone
http_request_populate_qs ( req )
text = read_story ( host , path , idp , show_comments , author )
end
assert ( text )
http_response ( req , 200 , text )
return
2020-12-20 09:16:23 +01:00
] = ]
2020-08-13 19:59:33 +02:00
elseif method == " POST " then
2020-12-20 09:16:23 +01:00
read_post ( req )
--[=[
2020-08-13 19:59:33 +02:00
--We're posting a comment
http_request_populate_post ( req )
http_populate_cookies ( req )
local author , authorid = get_session ( req )
local comment_text = assert ( http_argument_get_string ( req , " text " ) )
local pasteas = assert ( http_argument_get_string ( req , " postas " ) )
local idp = string.sub ( path , 2 ) --remove leading "/"
local id = decode_id ( idp )
local isanon = 1
2020-08-24 19:20:33 +02:00
--Even if an author is logged in, they may post their comment anonymously
2020-08-13 19:59:33 +02:00
if author and pasteas ~= " Anonymous " then
isanon = 0
end
stmnt_comment_insert : bind_names {
postid = id ,
authorid = author and authorid or - 1 ,
isanon = isanon ,
comment_text = comment_text ,
}
local err = do_sql ( stmnt_comment_insert )
stmnt_comment_insert : reset ( )
if err ~= sql.DONE then
http_response ( req , 500 , " Internal error, failed to post comment. Go back and try again. " )
else
2020-08-24 19:20:33 +02:00
--When we post a comment, we need to dirty the cache for the "comments displayed" page.
2020-12-20 09:16:23 +01:00
cache.dirty ( string.format ( " %s%s?comments=1 " , host , path ) )
2020-08-13 19:59:33 +02:00
local redir = string.format ( " https://%s%s?comments=1 " , domain , path )
http_response_header ( req , " Location " , redir )
http_response ( req , 303 , " " )
2020-05-16 01:10:11 +02:00
end
2020-12-20 09:16:23 +01:00
] = ]
2020-05-16 01:10:11 +02:00
end
end
function login ( req )
local host = http_request_get_host ( req )
local path = http_request_get_path ( req )
local method = http_method_text ( req )
if host ~= domain then
2020-05-17 18:05:00 +02:00
--Don't allow logging into subdomains, I guess
2020-05-16 01:10:11 +02:00
http_response_header ( req , " Location " , string.format ( " https://%s/_login " , domain ) )
http_response ( req , 303 , " " )
return
end
local text
if method == " GET " then
2020-05-17 18:05:00 +02:00
--Just give them the login page
2020-12-20 09:16:23 +01:00
text = cache.render ( string.format ( " %s/_login " , domain ) , function ( )
2020-05-19 23:12:44 +02:00
return pages.login {
err = " " ,
}
2020-05-16 01:10:11 +02:00
end )
elseif method == " POST " then
2020-05-17 18:05:00 +02:00
--Try to log in
2020-05-16 01:10:11 +02:00
http_populate_multipart_form ( req )
local name = assert ( http_argument_get_string ( req , " user " ) )
local pass = assert ( http_file_get ( req , " pass " ) )
stmnt_author_acct : bind_names {
name = name
}
local err = do_sql ( stmnt_author_acct )
if err == sql.ROW then
local id , salt , passhash = unpack ( stmnt_author_acct : get_values ( ) )
stmnt_author_acct : reset ( )
local todigest = salt .. pass
local hash = sha3 ( todigest )
if hash == passhash then
local session = start_session ( id )
http_response_cookie ( req , " session " , session , " / " , 0 , 0 )
local loc = string.format ( " https://%s.%s " , name , domain )
http_response_header ( req , " Location " , loc )
http_response ( req , 303 , " " )
return
else
text = pages.login {
err = " Incorrect username or password "
}
end
elseif err == sql.DONE then --Allows user enumeration, do we want this?
--Probably not a problem since all passwords are forced to be "good"
2020-05-17 18:05:00 +02:00
stmnt_author_acct : reset ( )
2020-05-16 01:10:11 +02:00
text = pages.login {
err = " Failed to find user: " .. name
}
else
2020-05-17 18:05:00 +02:00
stmnt_author_acct : reset ( )
2020-05-16 01:10:11 +02:00
error ( " Other sql error during login " )
end
end
assert ( text )
http_response ( req , 200 , text )
end
2020-05-17 18:05:00 +02:00
--Edit a story
2020-05-16 01:10:11 +02:00
function edit ( req )
local host = http_request_get_host ( req )
local path = http_request_get_path ( req )
local method = http_method_text ( req )
2020-05-17 18:05:00 +02:00
local author , author_id = get_session ( req )
local ret
2020-05-16 01:10:11 +02:00
if method == " GET " then
http_request_populate_qs ( req )
local story = assert ( http_argument_get_string ( req , " story " ) )
local story_id = decode_id ( story )
print ( " we want to edit story: " , story )
2020-05-17 18:05:00 +02:00
--Check that the logged in user is the owner of the story
--sql-side. If we're not the owner, we'll get 0 rows back.
2020-05-16 01:10:11 +02:00
stmnt_edit : bind_names {
2020-05-17 18:05:00 +02:00
postid = story_id ,
authorid = author_id
2020-05-16 01:10:11 +02:00
}
local err = do_sql ( stmnt_edit )
if err == sql.DONE then
2020-05-17 18:05:00 +02:00
--No rows, we're probably not the owner (it might
--also be because there's no such story)
ret = pages.cantedit {
path = story ,
}
stmnt_edit : reset ( )
http_response ( req , 200 , ret )
return
2020-05-16 01:10:11 +02:00
end
assert ( err == sql.ROW )
local data = stmnt_edit : get_values ( )
2020-05-17 18:05:00 +02:00
local txt_compressed , markup , isanon , title = unpack ( data )
local text = zlib.decompress ( txt_compressed )
2020-10-11 01:28:39 +02:00
local tags = get_tags ( story_id )
local tags_txt = table.concat ( tags , " ; " )
2020-05-17 18:05:00 +02:00
stmnt_edit : reset ( )
ret = pages.edit {
title = title ,
text = text ,
markup = markup ,
user = author ,
isanon = isanon == 1 ,
domain = domain ,
story = story_id ,
err = " " ,
2020-10-11 01:28:39 +02:00
tags = tags_txt
2020-05-17 18:05:00 +02:00
}
2020-05-16 01:10:11 +02:00
elseif method == " POST " then
http_request_populate_post ( req )
2020-05-17 18:05:00 +02:00
local storyid = tonumber ( assert ( http_argument_get_string ( req , " story " ) ) )
local title = assert ( http_argument_get_string ( req , " title " ) )
local text = assert ( http_argument_get_string ( req , " text " ) )
local pasteas = assert ( http_argument_get_string ( req , " pasteas " ) )
local markup = assert ( http_argument_get_string ( req , " markup " ) )
2020-10-12 17:07:26 +02:00
local tags_str = http_argument_get_string ( req , " tags " )
2020-05-17 18:05:00 +02:00
stmnt_author_of : bind_names {
id = storyid
}
local err = do_sql ( stmnt_author_of )
if err ~= sql.ROW then
stmnt_author_of : reset ( )
error ( " No author found for story: " .. storyid )
end
local data = stmnt_author_of : get_values ( )
stmnt_author_of : reset ( )
local realauthor = data [ 1 ]
assert ( realauthor == author_id ) --Make sure the author of the story is the currently logged in user
local parsed = parsers [ markup ] ( text )
local compr_raw = zlib.compress ( text )
local compr = zlib.compress ( parsed )
2020-10-12 17:07:26 +02:00
local tags = { }
if tags_str then
tags = parse_tags ( tags_str )
end
2020-05-17 18:05:00 +02:00
assert ( stmnt_update_raw : bind_blob ( 1 , compr_raw ) == sql.OK )
assert ( stmnt_update_raw : bind ( 2 , markup ) == sql.OK )
assert ( stmnt_update_raw : bind ( 3 , storyid ) == sql.OK )
assert ( do_sql ( stmnt_update_raw ) == sql.DONE , " Failed to update raw " )
stmnt_update_raw : reset ( )
assert ( stmnt_update : bind ( 1 , title ) == sql.OK )
assert ( stmnt_update : bind_blob ( 2 , compr ) == sql.OK )
assert ( stmnt_update : bind ( 3 , pasteas == " anonymous " and 1 or 0 ) == sql.OK )
assert ( stmnt_update : bind ( 4 , storyid ) == sql.OK )
assert ( do_sql ( stmnt_update ) == sql.DONE , " Failed to update text " )
stmnt_update : reset ( )
2020-10-11 01:28:39 +02:00
assert ( stmnt_drop_tags : bind_names { postid = storyid } == sql.OK )
do_sql ( stmnt_drop_tags )
stmnt_drop_tags : reset ( )
for _ , tag in pairs ( tags ) do
print ( " Looking at tag " , tag )
assert ( stmnt_ins_tag : bind ( 1 , storyid ) == sql.OK )
assert ( stmnt_ins_tag : bind ( 2 , tag ) == sql.OK )
err = do_sql ( stmnt_ins_tag )
stmnt_ins_tag : reset ( )
end
2020-05-17 18:05:00 +02:00
local id_enc = encode_id ( storyid )
local loc = string.format ( " https://%s/%s " , domain , id_enc )
2020-12-20 09:16:23 +01:00
cache.dirty ( string.format ( " %s/%s " , domain , id_enc ) ) -- This place to read this post
cache.dirty ( string.format ( " %s " , domain ) ) -- The site index (ex, if the author changed the paste from their's to "Anonymous", the cache should reflect that).
cache.dirty ( string.format ( " %s.%s " , author , domain ) ) -- The author's index, same reasoning as above.
2020-05-17 18:05:00 +02:00
http_response_header ( req , " Location " , loc )
http_response ( req , 303 , " " )
return
2020-05-16 01:10:11 +02:00
end
2020-05-17 18:05:00 +02:00
assert ( ret )
http_response ( req , 200 , ret )
2020-05-16 01:10:11 +02:00
end
2020-05-17 18:05:00 +02:00
--TODO
2020-05-16 01:10:11 +02:00
function edit_bio ( )
print ( " we want to edit bio " )
end
function teardown ( )
print ( " Exiting... " )
if db then
2020-12-20 09:16:23 +01:00
db.close ( )
2020-05-16 01:10:11 +02:00
end
if cache then
2020-12-20 09:16:23 +01:00
cache.close ( )
2020-05-16 01:10:11 +02:00
end
print ( " Finished lua teardown " )
end
2020-08-13 19:59:33 +02:00
function download ( req )
local host = http_request_get_host ( req )
local path = http_request_get_path ( req )
print ( " host: " , host , " path: " , path )
http_request_populate_qs ( req )
local story = assert ( http_argument_get_string ( req , " story " ) )
local story_id = decode_id ( story )
2020-12-20 09:16:23 +01:00
print ( " Downloading " , story_id )
2020-08-13 19:59:33 +02:00
stmnt_download : bind_names {
postid = story_id
}
local err = do_sql ( stmnt_download )
if err == sql.DONE then
--No rows, story not found
http_responose ( req , 404 , pages.nostory { path = story } )
stmnt_download : reset ( )
return
end
2020-12-20 09:16:23 +01:00
assert ( err == sql.ROW , " after doing download sql, result was not a row, was: " .. tostring ( err ) )
2020-08-13 19:59:33 +02:00
local txt_compressed , title = unpack ( stmnt_download : get_values ( ) )
local text = zlib.decompress ( txt_compressed )
stmnt_download : reset ( )
http_response_header ( req , " Content-Type " , " application/octet-stream " )
local nicetitle = title : gsub ( " %W " , " _ " )
http_response_header ( req , " Content-Disposition " , " attachment; filename= \" " .. nicetitle .. " .txt \" " )
http_response ( req , 200 , text )
end
2020-08-24 23:38:24 +02:00
function preview ( req )
2020-12-20 09:16:23 +01:00
preview_post ( req )
--[[
2020-08-24 23:38:24 +02:00
print ( " We want to preview a paste! " )
local host = http_request_get_host ( req )
local path = http_request_get_path ( req )
http_request_populate_post ( req )
local title = assert ( http_argument_get_string ( req , " title " ) )
local text = assert ( http_argument_get_string ( req , " text " ) )
local markup = assert ( http_argument_get_string ( req , " markup " ) )
2020-10-12 17:07:26 +02:00
local tag_str = http_argument_get_string ( req , " tags " )
local tags = { }
if tag_str then
tags = parse_tags ( tag_str )
end
2020-08-24 23:38:24 +02:00
print ( " title: " , title , " text: " , text , " markup: " , markup )
local parsed = parsers [ markup ] ( text )
local ret = pages.read {
domain = domain ,
title = title ,
author = " preview " ,
idp = " preview " ,
text = parsed ,
2020-10-11 01:28:39 +02:00
tags = tags ,
2020-08-24 23:38:24 +02:00
}
http_response ( req , 200 , ret )
2020-12-20 09:16:23 +01:00
] ]
2020-08-24 23:38:24 +02:00
end
2020-10-11 01:28:39 +02:00
function search ( req )
local host = http_request_get_host ( req )
local path = http_request_get_path ( req )
http_request_populate_qs ( req )
local tag = http_argument_get_string ( req , " tag " )
if tag then
stmnt_search : bind_names {
tag = tag
}
local results = { }
local err
repeat
err = stmnt_search : step ( )
if err == sql.BUSY then
coroutine.yield ( )
elseif err == sql.ROW then
local id , title , anon , time , author = unpack ( stmnt_search : get_values ( ) )
local idp = encode_id ( id )
local tags = get_tags ( id )
table.insert ( results , {
id = idp ,
title = title ,
anon = anon ,
time = os.date ( " %B %d %Y " , tonumber ( time ) ) ,
author = author ,
tags = tags
} )
elseif err == sql.DONE then
2020-10-12 01:55:02 +02:00
stmnt_search : reset ( )
2020-10-11 01:28:39 +02:00
else
error ( " Failed to search, sql error: " .. tostring ( err ) )
end
until err == sql.DONE
local ret = pages.search {
domain = domain ,
results = results ,
tag = tag ,
}
http_response ( req , 200 , ret )
end
end
2020-05-16 01:10:11 +02:00
print ( " Done with init.lua " )