Compare commits
129 Commits
hotfix_tag
...
master
Author | SHA1 | Date |
---|---|---|
Robin Malley | be259dc9f4 | |
Robin Malley | 0a6829ec9f | |
Robin Malley | 8c7a1927c1 | |
Robin Malley | e4f8f0914f | |
Robin Malley | 3d06a48939 | |
Robin Malley | 2680dff0f3 | |
Robin Malley | 4ff9f5bb07 | |
Robin Malley | 93cee859ce | |
Robin Malley | 665616f1e3 | |
Robin Malley | 677f05d69c | |
Robin Malley | e430d9b512 | |
Robin Malley | 60213086dc | |
Robin Malley | 68e73aba3e | |
Robin Malley | fea953fdef | |
Robin Malley | fee8997e80 | |
Robin Malley | 4e3bd03af4 | |
Robin Malley | 1adc547efe | |
Robin Malley | fb553be69a | |
Robin Malley | 354c019a2d | |
Robin Malley | 50cfdc584c | |
Robin Malley | e63ce61ca4 | |
Robin Malley | 3bb9e5e4f3 | |
Robin Malley | 4da25d600c | |
Robin Malley | 78eb69afd0 | |
Robin Malley | de63a8660c | |
Robin Malley | adda5ab547 | |
Robin Malley | f83bf55d3f | |
Robin Malley | 5c5637ff40 | |
Robin Malley | 76c462cb60 | |
Robin Malley | 652e673d39 | |
Robin Malley | 443a6331e6 | |
Robin Malley | 66b48f06a4 | |
Robin Malley | 87bccb959d | |
Robin Malley | 4514a42cfc | |
Robin Malley | fc7d3f9f79 | |
Robin Malley | 39c7f2f638 | |
Robin Malley | 1930ade3f7 | |
Robin Malley | 8b03c78346 | |
Robin Malley | afe144d554 | |
Robin Malley | 720e826d4d | |
Robin Malley | 9daf7e90cd | |
Robin Malley | 411bcb494d | |
Robin Malley | e25d2fd06a | |
Robin Malley | 3431daee0b | |
Robin Malley | 223cfb9e46 | |
Robin Malley | 49d0b0a397 | |
Robin Malley | d5ec6d6864 | |
Robin Malley | f5c729bfde | |
Robin Malley | f0c7ae13fe | |
Robin Malley | acebec5d73 | |
Robin Malley | 872760c9ff | |
Robin Malley | 040587701e | |
Robin Malley | ec6aed9866 | |
Robin Malley | 647e7f2ac2 | |
Robin Malley | 3b1d3dd910 | |
Robin Malley | e0fabca908 | |
Robin Malley | a0c8907f71 | |
Robin Malley | 77d8c0e66b | |
Robin Malley | 138cf12028 | |
Robin Malley | 87556f77cc | |
Robin Malley | 63e2b0b663 | |
Robin Malley | ac3fc81741 | |
Robin Malley | 9667aa1c3e | |
Robin Malley | 16054156a1 | |
Robin Malley | 68561443a5 | |
Robin Malley | 069c75b72e | |
Robin Malley | 81ad49ae80 | |
Robin Malley | 680a341db5 | |
Robin Malley | 296777d3fc | |
Robin Malley | 1487835478 | |
Robin Malley | e0a8b3d60a | |
Robin Malley | de76d31fe8 | |
Robin Malley | 37a9bbd63d | |
Robin Malley | d5a3197262 | |
Robin Malley | c00903505b | |
Robin Malley | 9dcc743199 | |
Robin Malley | 6398e97498 | |
Robin Malley | 4e0a23ee95 | |
Robin Malley | 1ddd446297 | |
Robin Malley | 9444d300b8 | |
Robin Malley | 3bd07ebf6a | |
Robin Malley | a16f2dfe02 | |
Robin Malley | ffc34295e9 | |
Robin Malley | e0a8e01513 | |
Robin Malley | ab6572314e | |
Robin Malley | 41f68f45b8 | |
Robin Malley | d11695b5eb | |
Robin Malley | eac2a38c6c | |
Robin Malley | 4913e7765e | |
Robin Malley | f88ec0e22a | |
Robin Malley | 7e5e38c3f2 | |
Robin Malley | e3468136e5 | |
Robin Malley | 3db891800b | |
Robin Malley | 3b6a631dc4 | |
Robin Malley | e5d1904b1f | |
Robin Malley | 7cc5e8d0ef | |
Robin Malley | 9e51de6c8e | |
Robin Malley | fd87cf95ee | |
Robin Malley | fdf0b67f3a | |
Robin Malley | 33a23ef20c | |
Robin Malley | 58565bc088 | |
Robin Malley | 73df8d400e | |
Robin Malley | 53b1a19c05 | |
Robin Malley | 55923a9cd6 | |
Robin Malley | 8cf7344e7b | |
Robin Malley | 701800cfe2 | |
Robin Malley | 9d2c95bb33 | |
Robin Malley | 896f452fa6 | |
Robin Malley | 67de40c02b | |
Robin Malley | 4eb5b4a7bd | |
Robin Malley | 0f17393cd8 | |
Robin Malley | 45e1ba3fcb | |
Robin Malley | ec00fcaf08 | |
Robin Malley | 879e89fa8d | |
Robin Malley | 7462b1e7ac | |
Robin Malley | c173fae026 | |
Robin Malley | 4a99441008 | |
Robin Malley | 1f5b113daa | |
Robin Malley | 6cd786be1a | |
Robin Malley | 18d1f3ea87 | |
Robin Malley | 2c732db22d | |
Robin Malley | 861e53cc29 | |
Robin Malley | c9ab25f65f | |
Robin Malley | ff2e560a2c | |
Robin Malley | 02efaf9631 | |
Robin Malley | d5a7d2be9f | |
Robin Malley | 1a2244b686 | |
Robin Malley | 18f47bf708 | |
Robin Malley | 85a730ebcb |
|
@ -5,3 +5,6 @@ smr.so
|
|||
assets.h
|
||||
cert
|
||||
kore_chroot/*
|
||||
conf/smr.conf
|
||||
src/lua/config.lua
|
||||
src/pages/error.etlua
|
||||
|
|
230
Makefile
230
Makefile
|
@ -5,124 +5,186 @@ MKDIR=mkdir -p
|
|||
COPY=cp
|
||||
RM=rm -f
|
||||
SPP=spp
|
||||
|
||||
CD=cd
|
||||
AWK=awk
|
||||
GREP=grep
|
||||
SORT=sort
|
||||
TEST=test
|
||||
CAT=cat
|
||||
TRBLDOC=trbldoc
|
||||
|
||||
# Config
|
||||
chroot_dir=kore_chroot/
|
||||
worker_chroot = $(smr_var)/kore_worker
|
||||
kmgr_chroot = $(smr_var)/kore_kmgr
|
||||
parent_chroot = $(smr_var)/kore_parent
|
||||
conf_path = /etc/smr
|
||||
smr_bin_path = /usr/local/lib
|
||||
app_root=$(worker_chroot)/var/smr
|
||||
ifeq ($(DEV),true)
|
||||
worker_chroot = ./kore_chroot
|
||||
kmgr_chroot = ./kore_chroot
|
||||
parent_chroot = ./kore_chroot
|
||||
conf_path = ./kore_chroot/conf
|
||||
smr_bin_path = ./kore_chroot
|
||||
app_root=./kore_chroot/app
|
||||
endif
|
||||
|
||||
mirror=http://dl-cdn.alpinelinux.org/alpine/
|
||||
arch=aarch64
|
||||
version=2.10.5-r0
|
||||
version=2.12.9
|
||||
certbot_email=--register-unsafely-without-email
|
||||
#certbot_email=-m you@cock.li
|
||||
user=robin
|
||||
port=8888
|
||||
domain=test.monster:$(port)
|
||||
server_cert=/root/cert/server.pem
|
||||
server_key=/root/cert/key.pem
|
||||
|
||||
#squelch prints
|
||||
Q=@
|
||||
#Q=
|
||||
SPPFLAGS=-D port=$(port) -D kore_chroot=$(worker_chroot) -D chuser=$(user) -D domain=$(domain) -D bin_path="$(bin_path)" -D server_cert="$(server_cert)" -D server_key="$(server_key)" -D worker_chroot="$(worker_chroot)" -D kmgr_chroot="$(kmgr_chroot)"
|
||||
# squelch prints, flip to print verbose information
|
||||
#Q=@
|
||||
Q=
|
||||
LUAROCKS_FLAGS=--tree $(worker_chroot)/usr/lib/luarocks --lua-version 5.1
|
||||
chroot_packages=\
|
||||
-p luarocks5.1 \
|
||||
-p "build-base" \
|
||||
-p "ca-certificates" \
|
||||
-p ssl_client \
|
||||
-p luajit \
|
||||
-p "lua5.1-dev" \
|
||||
-p "luajit-dev" \
|
||||
-p sqlite \
|
||||
-p "sqlite-dev" \
|
||||
-p certbot \
|
||||
-p zlib \
|
||||
-p "zlib-dev" \
|
||||
-p git
|
||||
|
||||
lua_packages = \
|
||||
lsqlite3 \
|
||||
etlua \
|
||||
lpeg \
|
||||
zlib
|
||||
|
||||
# Probably don't change stuff past here if you're just using smr
|
||||
lua_in_files=$(shell find src/lua/*.in -type f)
|
||||
lua_files=$(shell find src/lua/*.lua -type f) $(shell find src/lua/endpoints -type f) $(lua_in_files:%.in=%)
|
||||
lua_files=$(shell find src/lua/*.lua -type f) $(shell find src/lua/endpoints -type f)
|
||||
src_files=$(shell find src -type f) $(shell find conf -type f)
|
||||
sql_files=$(shell find src/sql -type f)
|
||||
test_files=$(shell find spec -type f)
|
||||
built_tests=$(test_files:%=$(chroot_dir)%)
|
||||
built_files=$(lua_files:src/lua/%.lua=$(chroot_dir)%.lua)
|
||||
in_page_files=$(shell find src/pages/*.in -type f)
|
||||
in_part_files=$(shell find src/pages/parts/*.in -type f)
|
||||
page_files=$(in_page_files:%.in=%)
|
||||
part_files=$(in_part_files:%.in=%) $(shell find src/pages/parts/*.etlua -type f)
|
||||
built_pages=$(page_files:src/pages/%.etlua=$(chroot_dir)pages/%.etlua)
|
||||
built_sql=$(sql_files:src/sql/%.sql=$(chroot_dir)sql/%.sql)
|
||||
test_files=$(shell find spec -type f) $(shell find spec/parser_tests -type f)
|
||||
page_files=$(shell find src/pages -type f)
|
||||
built_tests=$(test_files:%=$(app_root)/%)
|
||||
built_files=$(lua_files:src/lua/%.lua=$(app_root)/%.lua)
|
||||
part_files=$(shell find src/pages/parts/*.etlua -type f)
|
||||
built_parts=$(part_files:src/%=$(app_root)/%)
|
||||
built_pages=$(page_files:src/pages/%.etlua=$(app_root)/pages/%.etlua)
|
||||
built_sql=$(sql_files:src/sql/%.sql=$(app_root)/sql/%.sql)
|
||||
built=$(built_files) $(built_sql) $(built_pages) $(built_tests)
|
||||
sql_create_table_files=$(sql_files:src/sql/create_table_%.sql=doc/schema/%.dot)
|
||||
sql_docs=$(sql_create_table_files)
|
||||
initscript=/lib/systemd/system/smr.service
|
||||
config=$(conf_path)/smr.conf
|
||||
built_bin=$(smr_bin_path)/smr.so
|
||||
|
||||
all: $(chroot_dir) smr.so $(built_files) $(built_pages) $(built_sql)
|
||||
$(Q)$(ECHO) "[running] $@"
|
||||
$(Q)$(KODEV) run
|
||||
APK_aarch64_HASH=0164d47954c8a52e8ed10db1633174974a3b1e4182a1993a5a8343e394ee1bbc
|
||||
APK_x86_64_HASH=5176da3d4c41f12a08b82809aca8e7e2e383b7930979651b8958eca219815af5
|
||||
apk_hash := $(APK_$(arch)_HASH)
|
||||
|
||||
conf/smr.conf : conf/smr.conf.in Makefile
|
||||
$(Q)$(ECHO) "[preprocess] $@"
|
||||
$(Q)$(SPP) -o $@ -D port=$(port) -D kore_chroot=$(chroot_dir) -D chuser=$(user) $<
|
||||
help: ## Print this help
|
||||
$(Q)$(GREP) -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | $(SORT) | $(AWK) 'BEGIN {FS = ":.*?## "}; {printf "%-10s %s\n", $$1, $$2}'
|
||||
|
||||
all: $(app_root) smr.so $(built_files) $(built_pages) $(built_sql) ## Build and run smr in a chroot
|
||||
|
||||
apk-tools-static-$(version).apk:
|
||||
# wget -q $(mirror)latest-stable/main/$(arch)/apk-tools-static-$(version).apk
|
||||
wget -q $(mirror)latest-stable/main/$(arch)/apk-tools-static-$(version).apk
|
||||
|
||||
clean:
|
||||
clean: ## clean up all the files generated by this makefile
|
||||
$(Q)$(ECHO) "[clean] $@"
|
||||
$(Q)$(KODEV) clean
|
||||
$(Q)$(RM) $(page_files)
|
||||
$(Q)$(RM) conf/smr.conf
|
||||
$(Q)$(RM) src/pages/parts/story_breif.etlua
|
||||
$(Q)$(RM) src/lua/config.lua
|
||||
$(Q)$(RM) $(asset_files)
|
||||
$(Q)$(RM) smr.so
|
||||
|
||||
install: $(app_root) $(kmgr_chroot) $(parent_chroot) $(initscript) $(config) smr.so $(built_files) $(built_pages) $(built_sql) ## Install smr into a new host system
|
||||
$(Q)$(COPY) smr.so $(built_bin)
|
||||
|
||||
$(chroot_dir): apk-tools-static-$(version).apk
|
||||
$(Q)$(MKDIR) $(chroot_dir)
|
||||
$(Q)$(MKDIR) $(chroot_dir)/pages
|
||||
$(Q)$(MKDIR) $(chroot_dir)/sql
|
||||
$(Q)$(MKDIR) $(chroot_dir)/data
|
||||
$(Q)$(MKDIR) $(chroot_dir)/endpoints
|
||||
#cd $(chroot_dir) && tar -xvzf ../apk-tools-static-*.apk
|
||||
#cd $(chroot_dir) && sudo ./sbin/apk.static -X $(mirror)latest-stable/main -U --allow-untrusted --root $(chroot_dir) --no-cache --initdb add alpine-base
|
||||
#ln -s /dev/urandom $(chroot_dir)/dev/random #Prevent an attacker with access to the chroot from exhausting our entropy pool and causing a dos
|
||||
#ln -s /dev/urandom $(chroot_dir)/dev/urandom
|
||||
#mount /dev/ $(chroot_dir)/dev --bind
|
||||
#mount -o remount,ro,bind $(chroot_dir)/dev
|
||||
#mount -t proc none $(chroot_dir)/proc
|
||||
#mount -o bind /sys $(chroot_dir)/sys
|
||||
#cp /etc/resolv.conf $(chroot_dir)/etc/resolv.conf
|
||||
#echo "$(mirror)/$(branch)/main" > $(chroot)/etc/apk/repositories
|
||||
#echo "$(mirror)/$(branch)/community" >> $(chroot)/etc/apk/repositories
|
||||
#cp /etc/apk/repositories $(chroot_dir)/etc/apk/repositories
|
||||
#mkdir $(chroot_dir)/var/sm
|
||||
## Things to build lua libraries
|
||||
#chroot $(chroot_dir) apk add luarocks5.1 sqlite sqlite-dev lua5.1-dev build-base zlib zlib-dev
|
||||
#chroot $(chroot_dir) luarocks-5.1 install etlua
|
||||
#chroot $(chroot_dir) luarocks-5.1 install lsqlite3
|
||||
#chroot $(chroot_dir) luarocks-5.1 install lpeg
|
||||
#chroot $(chroot_dir) luarocks-5.1 install lua-zlib ZLIB_LIBDIR=/lib #for some reason lzlib looks in /usr/lib for libz, when it needs to look at /lib
|
||||
## Once we've built + installed everything, delete extra stuff from the chroot
|
||||
#chroot $(chroot_dir) apk del sqlite-dev lua5.1-dev build-base zlib-dev
|
||||
## SSL certificates, if you don't trust EFF (they have an antifa black block member as their favicon at time of writing) you may want to replace this.
|
||||
#chroot $(chroot_dir) apk add certbot
|
||||
## After chroot, apk add luarocks5.1 sqlite sqlite-dev lua5.1-dev build-base
|
||||
## After chroot, luarocks install etlua; luarocks install lsqlite3
|
||||
$(config) : conf/smr.conf
|
||||
$(Q)$(MKDIR) $(conf_path)
|
||||
$(Q)$(TEST) ! -e $@ && $(COPY) $< $@
|
||||
|
||||
$(initscript) : packaging/systemd/smr.service
|
||||
$(Q)$(COPY) $< $@
|
||||
|
||||
cloc: ## calculate source lines of code in smr
|
||||
cloc --force-lang="html",etlua --force-lang="lua",lua src assets Makefile
|
||||
|
||||
$(app_root):
|
||||
$(Q)$(MKDIR) $(app_root)
|
||||
|
||||
$(app_root): $(worker_chroot)
|
||||
$(Q)$(MKDIR) $(app_root)
|
||||
$(Q)$(MKDIR) $(app_root)/pages
|
||||
$(Q)$(MKDIR) $(app_root)/pages/parts
|
||||
$(Q)$(MKDIR) $(app_root)/sql
|
||||
$(Q)$(MKDIR) $(app_root)/data
|
||||
$(Q)$(MKDIR) $(app_root)/data/archive
|
||||
$(Q)$(MKDIR) $(app_root)/endpoints
|
||||
|
||||
alpine-chroot-install:
|
||||
$(Q)wget https://raw.githubusercontent.com/alpinelinux/alpine-chroot-install/v0.14.0/alpine-chroot-install \
|
||||
&& echo 'ccbf65f85cdc351851f8ad025bb3e65bae4d5b06 alpine-chroot-install' | sha1sum -c \
|
||||
|| exit 1
|
||||
$(Q)chmod +x alpine-chroot-install
|
||||
|
||||
$(worker_chroot) $(kmgr_chroot) $(parent_chroot): alpine-chroot-install
|
||||
$(Q)export APK_TOOLS_URI="https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic/v2.12.9/$(arch)/apk.static"; \
|
||||
export APK_TOOLS_SHA256="$(apk_hash)"; \
|
||||
./alpine-chroot-install -d $@ -a $(arch) $(chroot_packages)
|
||||
|
||||
code : $(built_files)
|
||||
|
||||
$(built_files): $(chroot_dir)%.lua : src/lua/%.lua
|
||||
$(built_files): $(app_root)/%.lua : src/lua/%.lua $(app_root)
|
||||
$(Q)$(ECHO) "[copy] $@"
|
||||
$(Q)$(COPY) $< $@
|
||||
|
||||
$(built_pages): $(app_root)/pages/%.etlua : src/pages/%.etlua $(app_root)
|
||||
$(Q)$(ECHO) "[copy] $@"
|
||||
$(Q)$(COPY) $< $@
|
||||
|
||||
$(built_parts): $(app_root)/% : src/%
|
||||
$(Q)$(ECHO) "[copy] $@"
|
||||
$(Q)$(COPY) $< $@
|
||||
|
||||
$(built_sql): $(app_root)/sql/%.sql : src/sql/%.sql
|
||||
$(Q)$(ECHO) "[copy] $@"
|
||||
$(Q)$(COPY) $^ $@
|
||||
|
||||
$(built_pages): $(chroot_dir)pages/%.etlua : src/pages/%.etlua
|
||||
$(built_tests): $(app_root)/spec/% : spec/% $(app_root)/spec
|
||||
$(Q)$(ECHO) "[copy] $@"
|
||||
$(Q)$(COPY) $^ $@
|
||||
$(Q)$(COPY) $< $@
|
||||
|
||||
src/lua/config.lua : src/lua/config.lua.in Makefile
|
||||
$(Q)$(ECHO) "[preprocess] $@"
|
||||
$(Q)$(SPP) -o $@ -D domain=$(domain) $<
|
||||
$(app_root)/spec: $(app_root)
|
||||
$(Q)$(MKDIR) $@
|
||||
$(Q)$(MKDIR) $@/parser_tests
|
||||
|
||||
$(page_files) : % : %.in $(part_files)
|
||||
$(Q)$(ECHO) "[preprocess] $@"
|
||||
$(Q)$(SPP) -o $@ $<
|
||||
|
||||
src/pages/parts/story_breif.etlua : src/pages/parts/story_breif.etlua.in
|
||||
$(Q)$(ECHO) "[preprocess] $@"
|
||||
$(Q)$(SPP) -o $@ $<
|
||||
|
||||
$(built_sql): $(chroot_dir)sql/%.sql : src/sql/%.sql
|
||||
$(Q)$(ECHO) "[copy] $@"
|
||||
$(Q)$(COPY) $^ $@
|
||||
|
||||
$(built_tests) : $(chroot_dir)% : %
|
||||
$(Q)$(ECHO) "[copy] $@"
|
||||
$(Q)$(COPY) $^ $@
|
||||
|
||||
smr.so : $(src_files) conf/smr.conf conf/build.conf
|
||||
smr.so: $(src_files) conf/build.conf $(asset_files)
|
||||
$(Q)$(ECHO) "[build] $@"
|
||||
$(Q)$(KODEV) build
|
||||
|
||||
test : $(built)
|
||||
$(Q)$(CD) kore_chroot && busted
|
||||
test: $(built) ## run the unit tests
|
||||
$(Q)$(CD) $(app_root) && busted -v --no-keep-going --exclude-tags "slow,todo,working"
|
||||
|
||||
cov: $(built) ## code coverage (based on unit tests)
|
||||
$(Q)$(RM) $(app_root)/luacov.stats.out
|
||||
$(Q)$(CD) $(app_root) && busted -v -c --no-keep-going --exclude-tags "slow,todo,working"
|
||||
$(Q)$(CD) $(app_root) && luacov endpoints/
|
||||
$(Q)$(ECHO) "open $(app_root)/luacov.report.out to view coverage results."
|
||||
|
||||
$(sql_docs) : doc/schema/%.dot : src/sql/create_table_%.sql
|
||||
$(Q)$(CAT) $< | tools/doc_sql.sh > $@
|
||||
|
||||
doc: $(sql_docs)
|
||||
$(Q)$(RM) -rf .trblcache
|
||||
$(Q)$(TRBLDOC) doc src README.md
|
||||
# cd .trblcache/built && python3 -m http.server
|
||||
|
||||
.PHONY: doc
|
||||
|
|
66
README.md
66
README.md
|
@ -1,13 +1,28 @@
|
|||
@name .
|
||||
|
||||
# SMR
|
||||
|
||||
|
||||
## Overview
|
||||
|
||||
This repository contains the source code to a pastebin clone. It was made after
|
||||
concerns with pastebin.com taking down certain kinds of content. SMR aims to
|
||||
be small, fast, and secure. It is built on top of [Kore](https://kore.io), using
|
||||
[luajit](https://luajit.org) to expose a Lua programming environment. It uses
|
||||
[sqlite3](https://sqlite.org) as it's database. SMR is implemented in just over
|
||||
2k SLOC and is expected to never exceed 5k SLOC. Contributions welcome.
|
||||
[sqlite3](https://sqlite.org) as it's database. SMR is implemented in about
|
||||
5k SLOC. Contributions welcome.
|
||||
|
||||
Language|files|blank|comment|code
|
||||
:-------|-------:|-------:|-------:|-------:
|
||||
Lua|37|331|678|2197
|
||||
HTML|22|100|0|1021
|
||||
C|4|102|251|712
|
||||
JavaScript|4|23|34|293
|
||||
SQL|36|6|61|274
|
||||
make|1|30|6|146
|
||||
CSS|3|4|8|74
|
||||
C/C++ Header|4|3|0|48
|
||||
SUM:|111|599|1038|4765
|
||||
|
||||
## Roadmap
|
||||
|
||||
|
@ -15,8 +30,21 @@ be small, fast, and secure. It is built on top of [Kore](https://kore.io), using
|
|||
* Comments (complete)
|
||||
* Tags (complete)
|
||||
* Search (complete)
|
||||
* Author biographies
|
||||
* Archive
|
||||
* Archive (complete)
|
||||
* Author biographies (complete)
|
||||
* Kore 4.2.0 (complete)
|
||||
* addon api
|
||||
|
||||
TODO's:
|
||||
|
||||
* Currently, people can post comments to unlisted stories even if they don't have
|
||||
the correct link.
|
||||
* Find a replacement preprocessor
|
||||
* The archive is currently generated weekly from a cron job, and served
|
||||
syncronously. We can generate a zip file on-the-fly instead, and if the client
|
||||
disconnects, it's fine to drop the whole thing.
|
||||
* We can simplify a lot of error handling logic by setting sql prepared statements to reset during error unwinding.
|
||||
* We can simplify a lot of business logic by having requests parse their parameters eagerly.
|
||||
|
||||
## Hacking
|
||||
|
||||
|
@ -33,9 +61,35 @@ If you want to contribute to this repository:
|
|||
but everything should still work with later versions.
|
||||
6. Install [spp](https://github.com/radare/spp)
|
||||
7. Clone this repository into the smr folder, cd into the root, and run `make`!
|
||||
* You may need to modify the configuration in the Makefile, add test.monster 127.0.0.1 to your `/etc/hosts`, modify command invocation, ect.
|
||||
* You may need to modify the configuration in the Makefile, add `test.monster 127.0.0.1` to your `/etc/hosts`, modify command invocation, ect.
|
||||
|
||||
## Misc notes.
|
||||
## Folder layout
|
||||
|
||||
While the core business logic of SMR is kept under 5k SLOC, tests, documentation,
|
||||
and other resources exceed this limit. The following is an explanation of what
|
||||
goes where:
|
||||
|
||||
<pre>
|
||||
smr/
|
||||
assets/ - kore assets, compiled into the binary. Javascript and CSS are kept here.
|
||||
cert/ - kore certificates. This is a default to get you started, but in production you should set certificates appropriately.
|
||||
conf/ - kore configuration. See https://docs.kore.io/4.2.0/applications/koreconf.html
|
||||
doc/ - documentation for smr that doesn't belong to any particular file
|
||||
kore_chroot/ - a chroot to get you started modifying smr. In production this should be a properly configured chroot
|
||||
packaging/ - scripts for packaging smr for different systems
|
||||
spec/ - unit and system tests for smr
|
||||
src/ - all the business logic of smr
|
||||
lua/ - Lua shared code between endpoints
|
||||
endpoints/ - 1-per endpoint business logic
|
||||
pages/ - Etlua templated html, exposed from src/lua/pages.lua
|
||||
sql/ - Sqlite queries, exposed from src/lua/queries.lua
|
||||
tools/ - command line tools for working with the smr database
|
||||
accounts/ - tool for modifying author accounts
|
||||
archive/ - tool for generating archives of the site (NOT the same as backups)
|
||||
migrate/ - tool for migrating/upgrading the smr database
|
||||
</pre>
|
||||
|
||||
## Misc. notes
|
||||
|
||||
SMR requires a slightly modified version of Kore to run. See [my kore patches](https://git.fuwafuwa.moe/rmalley/kore_patches)
|
||||
for the changes I needed to make to get the JIT compiler playing nice with
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
/*
|
||||
Stores the scroll location on a story to local storage, and re-scroll to the
|
||||
position next time the page is loaded.
|
||||
*/
|
||||
window.onbeforeunload = function(e) {
|
||||
localStorage.setItem(window.location.pathname,window.scrollY)
|
||||
}
|
||||
document.addEventListener("DOMContentLoaded", function(e) {
|
||||
var scrollpos = localStorage.getItem(window.location.pathname)
|
||||
if(scrollpos)
|
||||
window.scrollTo(0,scrollpos)
|
||||
})
|
|
@ -0,0 +1,107 @@
|
|||
/*
|
||||
Allows for lazy loading of stories on the main page as the user scrolls down
|
||||
*/
|
||||
|
||||
|
||||
function add_stories(stories){
|
||||
var tbody_el = document.querySelector("table#story_list tbody")
|
||||
for(var i = 0; i < stories.length; i++){
|
||||
var story = stories[i];
|
||||
/* This chunk should match /src/pages/parts/story_brief */
|
||||
console.log("Adding story:",story)
|
||||
var row = document.createElement("tr");
|
||||
row.appendChild(
|
||||
document.createElement("td")
|
||||
); // unlisted cell
|
||||
var link_cell = document.createElement("td");
|
||||
var link = document.createElement("a");
|
||||
link.textContent = story.title;
|
||||
link.href = story.url;
|
||||
link_cell.appendChild(link);
|
||||
row.appendChild(link_cell);
|
||||
|
||||
var author_cell = document.createElement("td");
|
||||
author_cell.appendChild(
|
||||
document.createTextNode("By ")
|
||||
);
|
||||
if(story.isanon){
|
||||
author_cell.appendChild(
|
||||
document.createTextNode("Anonymous")
|
||||
);
|
||||
}else{
|
||||
var author_page = document.createElement("a");
|
||||
author_page.textContent = story.author;
|
||||
author_page.href = story.author; // TODO: fix
|
||||
author_cell.appendChild(author_page);
|
||||
}
|
||||
row.appendChild(author_cell);
|
||||
var hits_cell = document.createElement("td")
|
||||
hits_cell.appendChild(
|
||||
document.createTextNode(story.hits + " hits")
|
||||
);
|
||||
row.appendChild(hits_cell);
|
||||
var comments_cell = document.createElement("td");
|
||||
comments_cell.appendChild(
|
||||
document.createTextNode(story.ncomments + " comments")
|
||||
);
|
||||
row.appendChild(comments_cell);
|
||||
var tag_cell = document.createElement("td");
|
||||
var tag_list = document.createElement("ul");
|
||||
tag_list.className = "row tag-list";
|
||||
tag_cell.appendChild(tag_list);
|
||||
for(var j = 0; j < Math.min(story.tags.length,5); j++){
|
||||
var tag = story.tags[j];
|
||||
var tag_item = document.createElement("li");
|
||||
var tag_button = document.createElement("a");
|
||||
tag_button.className = "tag button button-outline";
|
||||
tag_button.textContent = tag;
|
||||
tag_button.href = "/_search?q=%2B" + tag;
|
||||
tag_item.appendChild(tag_button);
|
||||
tag_list.appendChild(tag_item);
|
||||
}
|
||||
row.appendChild(tag_cell);
|
||||
var date_cell = document.createElement("td");
|
||||
date_cell.appendChild(
|
||||
document.createTextNode(story.posted)
|
||||
);
|
||||
row.appendChild(date_cell);
|
||||
tbody_el.appendChild(row);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
A tiny state machine:
|
||||
0 - idle
|
||||
1 - loading more stories (do not send another request)
|
||||
2 - stories loaded, waiting for next scroll event to transition to idle
|
||||
*/
|
||||
var state = 0
|
||||
var loaded = 50 // by default we load 50 stories on the site index
|
||||
document.addEventListener("scroll",function(e){
|
||||
var tobot = window.scrollMaxY - window.scrollY
|
||||
if (tobot < 100){
|
||||
if (state == 0){
|
||||
//Ask the server for stories
|
||||
// TODO: Finish this
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open("GET", "/_api?call=stories&data=" + loaded);
|
||||
xhr.onreadystatechange = function(e){
|
||||
if(xhr.readyState === 4){
|
||||
console.log("response:",xhr.response)
|
||||
resp = JSON.parse(xhr.response);
|
||||
console.log("resp:",resp)
|
||||
add_stories(resp.stories);
|
||||
loaded += resp.stories.length;
|
||||
}
|
||||
state = 2
|
||||
}
|
||||
xhr.send()
|
||||
state = 1
|
||||
}else if (state == 1){
|
||||
// Do nothing
|
||||
}else if (state == 2){
|
||||
state = 0
|
||||
}
|
||||
console.log("we should load more stories")
|
||||
}
|
||||
})
|
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
There's a delete buttotn to delete a post. If javascript is enabled, replace
|
||||
the button with one that will ask for confirmation before deleting.
|
||||
*/
|
||||
|
||||
function delete_intervine(){
|
||||
var forms = document.getElementsByTagName("form");
|
||||
if(forms.length == 0){return;}//Don't load if the story is missing.
|
||||
var delete_form;
|
||||
for(var i = 0; i < forms.length; i++){
|
||||
if(forms[i].action.endsWith("_delete")){
|
||||
delete_form = forms[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
if(delete_form == null){return;}//Don't load if we're not logged in
|
||||
var delete_parent = delete_form.parentNode;
|
||||
delete_parent.removeChild(delete_form);
|
||||
var delete_wrapper = document.createElement("div");
|
||||
var delete_button = document.createElement("button");
|
||||
delete_button.classList.add("button");
|
||||
delete_button.classList.add("column");
|
||||
delete_button.classList.add("column-0");
|
||||
delete_button.textContent = "Delete";
|
||||
delete_button.addEventListener("click",function(){
|
||||
if(confirm("Are you sure you want to delete this story?")){
|
||||
document.documentElement.appendChild(delete_form);
|
||||
delete_form.submit();
|
||||
}
|
||||
});
|
||||
delete_parent.appendChild(delete_wrapper);
|
||||
delete_wrapper.appendChild(delete_button);
|
||||
|
||||
}
|
||||
|
||||
document.addEventListener("DOMContentLoaded",delete_intervine,false);
|
|
@ -9,6 +9,7 @@ body{
|
|||
}
|
||||
h1,h2,h3{line-height:1.2}
|
||||
p,.tag-list{margin-bottom:0px}
|
||||
.spacer{margin-bottom:1em}
|
||||
.spoiler,.spoiler2{background:#444}
|
||||
.spoiler:hover,.spoiler2:hover{color:#FFF}
|
||||
.greentext{color:#282}
|
||||
|
@ -29,7 +30,9 @@ p,.tag-list{margin-bottom:0px}
|
|||
flex:10 10 auto;
|
||||
translate: -100%;
|
||||
}
|
||||
.column-0{margin-right:5px;}
|
||||
.column-0{margin-right:5px}
|
||||
.label-inline{margin:0.5rem}
|
||||
.biography{border:1px solid #9b4dca}
|
||||
|
||||
@media (prefers-color-scheme: dark){
|
||||
body, input, select, textarea, pre, code{
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
|
||||
.tag-suggestion, .tag-suggestion>input {
|
||||
height: 1rem !important;
|
||||
margin:0px;
|
||||
}
|
||||
.tag-suggestion{
|
||||
font-size:0.8rem !important;
|
||||
display:block !important;
|
||||
}
|
||||
.tag-suggestion>input{
|
||||
line-height:1rem !important;
|
||||
width:100% !important;
|
||||
text-align:left;
|
||||
background-color:transparent;
|
||||
color:black;
|
||||
border:none;
|
||||
padding:0px;
|
||||
}
|
||||
.tag-suggestion-list{
|
||||
list-style: none;
|
||||
margin-top:3.8rem;
|
||||
background: white;
|
||||
border: 1px solid black;
|
||||
border-top: 0px;
|
||||
}
|
||||
@media (prefers-color-scheme: dark){
|
||||
body, input, select, textarea, pre, code, .tag-suggestion-list{
|
||||
background: #1c1428;
|
||||
color: #d0d4d8 !important;
|
||||
}
|
||||
.spoiler, .spoiler2{color:#444;}
|
||||
}
|
|
@ -0,0 +1,195 @@
|
|||
|
||||
/*Singleton object*/
|
||||
var tag_suggestion_list = {
|
||||
input_el: null,
|
||||
list_element: document.createElement('ol'),
|
||||
suggestion_elements: [],
|
||||
hover_last: -1,
|
||||
}
|
||||
tag_suggestion_list.list_element.setAttribute("class","tag-suggestion-list");
|
||||
tag_suggestion_list.list_element.setAttribute("style","position:absolute;");
|
||||
|
||||
function appendTag(name){
|
||||
return function(event){
|
||||
var ie = tag_suggestion_list.input_el;
|
||||
var prev = ie.value.split(";");
|
||||
prev.pop();
|
||||
prev.push(name);
|
||||
ie.value = prev.join(";");
|
||||
ie.value += ";";
|
||||
ie.focus();
|
||||
tag_suggestion_list.list_element.style = "display:none;";
|
||||
}
|
||||
}
|
||||
|
||||
function hoverTag(name, root){
|
||||
return function(event){
|
||||
var ie = tag_suggestion_list.input_el;
|
||||
if(ie.value.slice(-1) == ";"){//comming from another tab completion
|
||||
var prev = ie.value.slice(hover_last);
|
||||
|
||||
}else{
|
||||
var prev = ie.value.split(";");
|
||||
prev.pop()
|
||||
prev.push(name)
|
||||
ie.value = prev.join(";");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stolen from medium.com/@jh3y
|
||||
* returns x, y coordinates for absolute positioning of a span within a given text input
|
||||
* at a given selection point
|
||||
* @param {object} input - the input element to obtain coordinates for
|
||||
* @param {number} selectionPoint - the selection point for the input
|
||||
*/
|
||||
function getCursorXY(input, selectionPoint){
|
||||
const {
|
||||
offsetLeft: inputX,
|
||||
offsetTop: inputY,
|
||||
} = input
|
||||
const div = document.createElement('div')
|
||||
const copyStyle = getComputedStyle(input)
|
||||
for (const prop of copyStyle) {
|
||||
div.style[prop] = copyStyle[prop]
|
||||
}
|
||||
const swap = '.'
|
||||
const inputValue = input.tagName === 'INPUT' ? input.value.replace(/ /g, swap) : input.value
|
||||
const textContent = inputValue.substr(0, selectionPoint)
|
||||
div.textContent = textContent
|
||||
if (input.tagName === 'TEXTAREA') div.style.height = 'auto'
|
||||
if (input.tagName === 'INPUT') div.style.width = 'auto'
|
||||
const span = document.createElement('span')
|
||||
span.textContent = inputValue.substr(selectionPoint) || '.'
|
||||
div.appendChild(span)
|
||||
document.body.appendChild(div)
|
||||
const { offsetLeft: spanX, offsetTop: spanY } = span
|
||||
document.body.removeChild(div)
|
||||
return {
|
||||
x: inputX + spanX,
|
||||
y: inputY + spanY,
|
||||
}
|
||||
}
|
||||
|
||||
function display_suggestions(elem, sugg, event){
|
||||
//Check that the value hasn't change since we fired
|
||||
//off the request
|
||||
var tags_so_far = elem.value.split(";");
|
||||
recent = elem.value.split(";").pop().trim();
|
||||
if(recent == sugg[0]){
|
||||
var v = getCursorXY(elem,elem.value.length);
|
||||
var sugx = v.x;
|
||||
var sugy = v.y;
|
||||
var sty = `position:absolute; margin-left:${sugx}px;`;
|
||||
tag_suggestion_list.list_element.style = sty;
|
||||
for(var i in tag_suggestion_list.suggestion_elements){
|
||||
tag_suggestion_list.list_element.removeChild(tag_suggestion_list.suggestion_elements[i]);
|
||||
|
||||
}
|
||||
tag_suggestion_list.suggestion_elements = [];
|
||||
var hover_last = 0;
|
||||
for(var i in tags_so_far){
|
||||
hover_last += tags_so_far[i].length + 1;
|
||||
}
|
||||
tag_suggestion_list.hover_last = hover_last;
|
||||
for(var i in sugg){
|
||||
if(i == 0){
|
||||
continue;
|
||||
}
|
||||
var suggestion_el = document.createElement("li");
|
||||
var suggestion_but = document.createElement("input")
|
||||
suggestion_el.appendChild(suggestion_but);
|
||||
suggestion_but.setAttribute("type","button");
|
||||
suggestion_but.setAttribute("value",sugg[i]);
|
||||
suggestion_el.setAttribute("class"," button-clear tag-suggestion");
|
||||
tag_suggestion_list.list_element.appendChild(suggestion_el);
|
||||
tag_suggestion_list.suggestion_elements.push(suggestion_el);
|
||||
suggestion_but.onkeyup = function(event){
|
||||
if(event.key == "Tab"){
|
||||
hoverTag(event.target.value)(event);
|
||||
}else if(event.key == ";"){
|
||||
appendTag(event.target.value)(event);
|
||||
}
|
||||
|
||||
}
|
||||
suggestion_but.onclick = function(event){
|
||||
appendTag(event.target.value)(event);
|
||||
}
|
||||
suggestion_but.onblur = function(event){
|
||||
var other_input = false;
|
||||
for(var i in tag_suggestion_list.suggestion_elements){
|
||||
if(tag_suggestion_list.suggestion_elements[i].firstChild == event.relatedTarget){
|
||||
other_input = true;
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
if(!other_input){
|
||||
tag_suggestion_list.list_element.style = "display:none;";
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
if(tag_suggestion_list.suggestion_elements.length > 0){
|
||||
|
||||
var last_element = tag_suggestion_list.suggestion_elements[tag_suggestion_list.suggestion_elements.length - 1];
|
||||
//last_element.firstChild.last_element = true;
|
||||
last_element.firstChild.onblur = function(event){
|
||||
tag_suggestion_list.suggestion_elements[0].firstChild.focus();
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function hint_tags(elem, event){
|
||||
//Get the most recent tag
|
||||
var recent = elem.value.split(";").pop().trim();
|
||||
if(recent.length > 0){
|
||||
//Ask the server for tags that look like this
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open("GET", "/_api?call=suggest&data=" + recent);
|
||||
xhr.onreadystatechange = function(e){
|
||||
if(xhr.readyState === 4){
|
||||
suggestions = xhr.response.split(";");
|
||||
display_suggestions(elem,suggestions, event);
|
||||
|
||||
}
|
||||
}
|
||||
xhr.send()
|
||||
}
|
||||
}
|
||||
|
||||
function init(){
|
||||
var head_el = document.head;
|
||||
var extra_css_el = document.createElement("link");
|
||||
document.head.appendChild(extra_css_el);
|
||||
extra_css_el.setAttribute("rel","stylesheet");
|
||||
extra_css_el.setAttribute("href","/_css/suggest_tags.css");
|
||||
var tag_el_list = document.getElementsByName("tags");
|
||||
console.assert(tag_el_list.length == 1);
|
||||
var tag_el = tag_el_list[0];
|
||||
tag_suggestion_list.input_el = tag_el;
|
||||
tag_el.onkeyup = function(event){
|
||||
hint_tags(tag_el, event);
|
||||
}
|
||||
tag_el.onblur = function(event){
|
||||
var not_suggestion = true;
|
||||
var ies = tag_suggestion_list.suggestion_elements;
|
||||
for(var i in ies){
|
||||
if(event.relatedTarget == ies[i].firstChild){
|
||||
not_suggestion = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if(not_suggestion){
|
||||
tag_suggestion_list.list_element.style = "display:none;";
|
||||
}
|
||||
}
|
||||
|
||||
var fieldset = tag_el.parentNode;
|
||||
fieldset.appendChild(tag_suggestion_list.list_element);
|
||||
var paste_el = document.getElementsByName("tags");
|
||||
}
|
||||
document.addEventListener("DOMContentLoaded",init,false);
|
|
@ -26,13 +26,13 @@ dev {
|
|||
# These flags are added to the shared ones when
|
||||
# you build the "dev" flavor.
|
||||
ldflags=-llua5.1
|
||||
cflags=-g -Wextra
|
||||
cflags=-g -Wall -Wextra -Werror
|
||||
cflags=-I/usr/include/lua5.1
|
||||
cxxflags=-g -Wextra
|
||||
}
|
||||
|
||||
prod {
|
||||
cflags=-D BUILD_PROD
|
||||
cflags=-D BUILD_PROD
|
||||
cflags=-I/usr/include/luajit-2.1
|
||||
cflags=-lluajit-5.1
|
||||
ldflags=-lluajit-5.1
|
||||
|
|
101
conf/smr.conf.in
101
conf/smr.conf.in
|
@ -1,101 +0,0 @@
|
|||
# smr configuration
|
||||
|
||||
server tls {
|
||||
bind 0.0.0.0 <{get port}>
|
||||
|
||||
}
|
||||
|
||||
seccomp_tracing yes
|
||||
load ./smr.so
|
||||
root <{get kore_chroot}>
|
||||
|
||||
runas <{get chuser }>
|
||||
|
||||
keymgr_runas <{get chuser }>
|
||||
|
||||
keymgr_root .
|
||||
workers 1
|
||||
|
||||
http_body_max 8388608
|
||||
|
||||
tls_dhparam dh2048.pem
|
||||
|
||||
validator v_any regex [\s\S]*
|
||||
validator v_storyid regex [a-zA-Z0-9$+!*'(),-]+
|
||||
validator v_subdomain regex [a-z0-9]{1,30}
|
||||
validator v_markup regex (plain|imageboard)
|
||||
validator v_bool regex (0|1)
|
||||
|
||||
domain * {
|
||||
attach tls
|
||||
|
||||
certfile cert/server.pem
|
||||
certkey cert/key.pem
|
||||
|
||||
#I run kore behind a lighttpd reverse proxy, so this is a bit useless to me
|
||||
accesslog /dev/null
|
||||
|
||||
route / home
|
||||
route /_css/style.css asset_serve_style_css
|
||||
route /_css/milligram.css asset_serve_milligram_css
|
||||
route /_css/milligram.min.css.map asset_serve_milligram_min_css_map
|
||||
route /_faq asset_serve_faq_html
|
||||
route /favicon.ico asset_serve_favicon_ico
|
||||
route /_paste post_story
|
||||
route /_edit edit_story
|
||||
route /_bio edit_bio
|
||||
route /_login login
|
||||
route ^/_claim claim
|
||||
route /_download download
|
||||
route /_preview preview
|
||||
route /_search search
|
||||
# Leading ^ is needed for dynamic routes, kore says the route is dynamic if it does not start with '/'
|
||||
route ^/[^_].* read_story
|
||||
|
||||
params get /_edit {
|
||||
validate story v_storyid
|
||||
}
|
||||
params get /_download {
|
||||
validate story v_storyid
|
||||
}
|
||||
params post /_edit {
|
||||
validate title v_any
|
||||
validate story v_storyid
|
||||
validate text v_any
|
||||
validate pasteas v_subdomain
|
||||
validate markup v_markup
|
||||
validate tags v_any
|
||||
}
|
||||
params post /_paste {
|
||||
validate title v_any
|
||||
validate text v_any
|
||||
validate pasteas v_subdomain
|
||||
validate markup v_markup
|
||||
validate tags v_any
|
||||
}
|
||||
params post /_preview {
|
||||
validate title v_any
|
||||
validate text v_any
|
||||
validate pasteas v_subdomain
|
||||
validate markup v_markup
|
||||
validate tags v_any
|
||||
}
|
||||
params get /_search {
|
||||
validate q v_any
|
||||
}
|
||||
params get ^/[^_].* {
|
||||
validate comments v_bool
|
||||
}
|
||||
params post ^/[^_].* {
|
||||
validate text v_any
|
||||
validate postas v_subdomain
|
||||
}
|
||||
params post /_login {
|
||||
validate user v_subdomain
|
||||
validate pass v_any
|
||||
}
|
||||
params post ^/_claim {
|
||||
validate user v_subdomain
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
|
||||
## falsey
|
||||
|
||||
In Lua, the values `false` and `nil` are considered falsey.
|
||||
All other values are considered truethy.
|
||||
In addition, tables may have a metatable that has a `__toboolean` function
|
||||
to implement their own truethyness or falseyness. `false` should be used in
|
||||
places where a boolean is expected, and `nil` should be used otherwise.
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
|
||||
# A gentle introduction to SMR
|
||||
|
||||
SMR is a small Pastebin clone, with a few extra features. One of it's goals is
|
||||
to be simple and easy to modify. This document will explain how:
|
||||
|
||||
## Downloading
|
||||
|
||||
The canonical location for the SMR source repository is
|
||||
`https://git.fuwafuwa.moe/rmalley/smr`
|
||||
In the case of downtime, a mirror is available at
|
||||
`https://dev.sum7.eu/Robin.Malley/smr`
|
||||
|
||||
## Dependencies
|
||||
|
||||
SMR requires the following things to be install on the host system.
|
||||
|
||||
* kore - Web server framework for C https://kore.io
|
||||
* luarocks - package manager for the Lua programming language. Will require a
|
||||
working Lua environment. The maintainer recommends `luajit`, SMR is written
|
||||
against Lua 5.1, but should still work on 5.2. Lua 5.3 introduced breaking
|
||||
changes and may require additional modification.
|
||||
* spp - A simple preprocessor used to preprocess some files.
|
||||
|
||||
## Installing
|
||||
|
||||
After doing `git clone`, `make` and `make install`, SMR will install itself
|
||||
under `/var/lib/smr/`, in this folder, there will be 2 more folders, each
|
||||
corresponding to a chroot environment:
|
||||
|
||||
* `kore_worker` - The chroot where business logic runs. All application code
|
||||
lives under `/var/smr/`. This chroot also holds the database for
|
||||
at `/var/smr/data/posts.db` (so the full path from the host to the database
|
||||
is `/var/lib/smr/kore_worker/var/smr/data/posts.db`). After the kore worker
|
||||
starts and chroots, it runs the file `/var/smr/init.lua`. In addition, SMR
|
||||
will install lua rocks under `/usr/lib/luarocks` under the chroot.
|
||||
|
||||
* `kore_keymgr` - The chroot where https keys are stored.
|
||||
The entire point of the keymgr process is to separate the processes
|
||||
that runs business logic from the process that holds encryption keys.
|
||||
In theory, even in the event of an arbitrary code execution vulnerability in the
|
||||
SMR code, the encryption keys used for the site should still be safe.
|
||||
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
!{ md
|
||||
@file doc/hooks
|
||||
|
||||
# Hooks
|
||||
|
||||
Various functions that are exposed to the lua environment. These functions may be detoured to effect their behavior.
|
||||
|
||||
```
|
||||
connect(req :: http_request) :: boolean
|
||||
```
|
||||
Called before smr business logic is run, may run additional validation on the request. Return @{falsey} by default, and continues onto the business logic of smr, return `true` if this function has handled the request, and no further processing is nessessary.
|
||||
|
||||
```
|
||||
create_user(details :: table) :: boolean
|
||||
```
|
||||
Called when a user creates a user account on the site.
|
||||
|
||||
```
|
||||
authenticate(data :: table) :: number | nil, string
|
||||
```
|
||||
Called when a user attempts to log in. Return a number, userid if the login is successful, or nil and an error message if the login is not successful. By default, smr puts "user" and "passfile" fields into the data table.
|
||||
|
||||
!}
|
|
@ -0,0 +1 @@
|
|||
*.dot
|
|
@ -0,0 +1,16 @@
|
|||
# Service file for systemd based systems
|
||||
|
||||
[Unit]
|
||||
Description=smr server daemon
|
||||
Documentation=https://git.fuwafuwa.moe/rmalley/smr
|
||||
After=network.target syslog.target
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
ExecStart=/usr/local/bin/kore -c /etc/smr/smr.conf
|
||||
RemainAfterExit=true
|
||||
ExecStop=/usr/bin/pkill -9 kore
|
||||
StandardOutput=journal
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,69 @@
|
|||
_G.spy = spy
|
||||
local mock_env = require("spec.env_mock")
|
||||
local rng = require("spec.fuzzgen")
|
||||
|
||||
describe("smr biography",function()
|
||||
setup(mock_env.setup)
|
||||
teardown(mock_env.teardown)
|
||||
it("should allow users to set their biography",function()
|
||||
local claim_post = require("endpoints.claim_post")
|
||||
local login_post = require("endpoints.login_post")
|
||||
local index_get = require("endpoints.index_get")
|
||||
local bio_get = require("endpoints.bio_get")
|
||||
local bio_post = require("endpoints.bio_post")
|
||||
local db = require("db")
|
||||
local config = require("config")
|
||||
config.domain = "test.host"
|
||||
configure()
|
||||
local username = rng.subdomain()
|
||||
local claim_req = {
|
||||
method = "POST",
|
||||
host = "test.host",
|
||||
path = "/_claim",
|
||||
args = {
|
||||
user = username
|
||||
}
|
||||
}
|
||||
claim_post(claim_req)
|
||||
local login_req = {
|
||||
method = "POST",
|
||||
host = "test.host",
|
||||
path = "/_login",
|
||||
args = {
|
||||
user = username
|
||||
},
|
||||
file = {
|
||||
pass = claim_req.response
|
||||
}
|
||||
}
|
||||
login_post(login_req)
|
||||
local cookie = login_req.response_headers["set-cookie"]
|
||||
local sessionid = cookie:match("session=([^;]+)")
|
||||
local home_req_get = {
|
||||
method = "GET",
|
||||
host = username .. ".test.host",
|
||||
path = "/",
|
||||
cookies = {
|
||||
session = sessionid
|
||||
}
|
||||
}
|
||||
index_get(home_req_get)
|
||||
local edit_bio_button = '<a href="/_bio"'
|
||||
assert(
|
||||
home_req_get.response:find(edit_bio_button),
|
||||
"After logging in the user should have a button to" ..
|
||||
" edit their biography. Looking for " .. edit_bio_button
|
||||
.. " but didn't find it in " .. home_req_get.response
|
||||
)
|
||||
local edit_bio_req_get = {
|
||||
method = "GET",
|
||||
host = username .. ".test.host",
|
||||
path = "/_bio",
|
||||
cookies = {session = sessionid},
|
||||
args = {}
|
||||
}
|
||||
bio_get(edit_bio_req_get)
|
||||
assert(edit_bio_req_get.responsecode == 200)
|
||||
assert(edit_bio_req_get.response:find("<textarea"))
|
||||
end)
|
||||
end)
|
|
@ -0,0 +1,126 @@
|
|||
|
||||
_G.spy = spy
|
||||
local env_mock = require("spec.env_mock")
|
||||
|
||||
describe("smr cacheing",function()
|
||||
setup(env_mock.setup)
|
||||
teardown(env_mock.teardown)
|
||||
it("caches a page if the page is requested twice #working",function()
|
||||
local read_get = require("endpoints.read_get")
|
||||
local cache = require("cache")
|
||||
renderspy = spy.on(cache,"render")
|
||||
configure()
|
||||
local req = {
|
||||
method = "GET",
|
||||
path = "/a",
|
||||
args = {},
|
||||
host = "test.host"
|
||||
}
|
||||
assert.spy(renderspy).called(0)
|
||||
read_get(req)
|
||||
assert.spy(renderspy).called(1)
|
||||
read_get(req)
|
||||
assert.spy(renderspy).called(2)
|
||||
for row in cache.cache:rows("SELECT COUNT(*) FROM cache") do
|
||||
assert(row[1] == 1, string.format(
|
||||
"Exepected only one cache entry after" ..
|
||||
"calling test.host/a 2 times " ..
|
||||
", but got %d rows.", row[1]
|
||||
))
|
||||
end
|
||||
end)
|
||||
it("should expose the database connection", function()
|
||||
local cache = require("cache")
|
||||
configure()
|
||||
assert(cache.cache, "Not exposed under .cache")
|
||||
end)
|
||||
it("does not cache the page if the user is logged in", function()
|
||||
local read_get = require("endpoints.read_get")
|
||||
local cache = require("cache")
|
||||
env_mock.mockdb()
|
||||
renderspy = spy.on(cache,"render")
|
||||
configure()
|
||||
for row in cache.cache:rows("SELECT COUNT(*) FROM cache") do
|
||||
assert(row[1] == 0, string.format(
|
||||
"Cache should not have any rows before " ..
|
||||
"request have been made."
|
||||
))
|
||||
end
|
||||
local req = env_mock.session()
|
||||
req.method = "GET"
|
||||
req.path = "/a"
|
||||
req.args = {}
|
||||
read_get(req)
|
||||
for row in cache.cache:rows("SELECT COUNT(*) FROM cache") do
|
||||
assert(row[1] == 0, string.format(
|
||||
"Cache should not cache requests made by " ..
|
||||
"logged in users."
|
||||
))
|
||||
end
|
||||
end)
|
||||
it("caches one page for domain/id and author.domain/id",function()
|
||||
local read_get = require("endpoints.read_get")
|
||||
local cache = require("cache")
|
||||
configure()
|
||||
local req_m = {__index = {
|
||||
method = "GET",
|
||||
path = "/a",
|
||||
args = {}
|
||||
}}
|
||||
local base_host = {host="test.host"}
|
||||
local base_req = setmetatable({host="test.host"},req_m)
|
||||
read_get(base_req)
|
||||
local user_req = setmetatable({host="admin.test.host"},req_m)
|
||||
read_get(user_req)
|
||||
for row in cache.cache:rows("SELECT COUNT(*) FROM cache") do
|
||||
assert(row[1] == 1, string.format(
|
||||
"Exepected only one cache entry for" ..
|
||||
"'test.host/a' and 'admin.test.host/a'" ..
|
||||
", but got %d rows.", row[1]
|
||||
))
|
||||
end
|
||||
end)
|
||||
it("detours configure",function()
|
||||
local s = {}
|
||||
local c = false
|
||||
local oldconfigure = configure
|
||||
--local index_get = require("endpoints.index_get")
|
||||
--configure(s)
|
||||
--assert(c)
|
||||
end)
|
||||
describe("author home page",function()
|
||||
it("lists all stories by that author",function()
|
||||
local read_get = require("endpoints.index_get")
|
||||
local cache = require("cache")
|
||||
configure()
|
||||
local req_m = {__index = {
|
||||
method = "GET",
|
||||
path = "/a",
|
||||
args = {}
|
||||
}}
|
||||
local base_host = {host="user.test.host"}
|
||||
for row in cache.cache:rows("SELECT COUNT(*) FROM cache") do
|
||||
assert(row[1] == 0, string.format(
|
||||
"Before requesting user homepage " ..
|
||||
"there should not be any pages in the " ..
|
||||
"cache."
|
||||
))
|
||||
end
|
||||
local base_req = setmetatable({host="user.test.host"},req_m)
|
||||
read_get(base_req)
|
||||
for row in cache.cache:rows("SELECT COUNT(*) FROM cache") do
|
||||
assert(row[1] == 1, string.format(
|
||||
"After reading the autor home page, " ..
|
||||
" only that page should be cached."
|
||||
))
|
||||
end
|
||||
read_get(base_req)
|
||||
for row in cache.cache:rows("SELECT COUNT(*) FROM cache") do
|
||||
assert(row[1] == 1, string.format(
|
||||
"After reading the autor home page " ..
|
||||
" twice only that page should be cached."
|
||||
))
|
||||
end
|
||||
end)
|
||||
end)
|
||||
end)
|
|
@ -0,0 +1,255 @@
|
|||
|
||||
local config = require("config")
|
||||
config.db = "data/unittest.db"
|
||||
local mock = {}
|
||||
local env = {}
|
||||
mock.env = env
|
||||
--Mirror print prior to lua 5.4
|
||||
--local oldprint = print
|
||||
local ntostring
|
||||
|
||||
-- Modules that get required lazily
|
||||
local login_post
|
||||
local fuzzy
|
||||
local claim_post
|
||||
local session
|
||||
print_table= function(...)
|
||||
print("Print called")
|
||||
local args = {...}
|
||||
local mapped_args = {}
|
||||
for k,v in ipairs(args) do
|
||||
print("mapping",v)
|
||||
mapped_args[k] = ntostring(v)
|
||||
end
|
||||
print(table.concat(mapped_args,"\t"))
|
||||
end
|
||||
|
||||
local tables_called = {}
|
||||
function ntostring(arg)
|
||||
io.stdout:write("Calling tostring with:",tostring(arg),"\n")
|
||||
if type(arg) ~= "table" then
|
||||
return tostring(arg)
|
||||
end
|
||||
local function tbl_to_string(tbl,indent)
|
||||
if tables_called[tbl] then
|
||||
return tostring(tbl)
|
||||
end
|
||||
tables_called[tbl] = true
|
||||
if type(tbl) ~= "table" then
|
||||
error("tbl_to_string must be called with a table, got a " .. type(tbl))
|
||||
end
|
||||
local lines = {string.rep("\t",indent) .. "{"}
|
||||
for k, v in pairs(tbl) do
|
||||
local kv = {}
|
||||
for i,n in pairs{k,v} do
|
||||
if type(n) == "table" then
|
||||
kv[i] = string.format("%q",tbl_to_string(n,indent+1))
|
||||
else
|
||||
kv[i] = string.format("%q",tostring(n))
|
||||
end
|
||||
end
|
||||
table.insert(
|
||||
lines,
|
||||
string.rep("\t",indent+1) .. kv[1] .. ":" .. kv[2]
|
||||
)
|
||||
end
|
||||
table.insert(lines,string.rep("\t",indent) .. "}")
|
||||
return table.concat(lines,"\n")
|
||||
end
|
||||
--It's a table
|
||||
local ret = tbl_to_string(arg,0)
|
||||
tables_called = {}
|
||||
return ret
|
||||
end
|
||||
|
||||
local smr_mock_env = {
|
||||
--An empty function that gets called to set up databases and do other
|
||||
--startup-time stuff, runs once for each worker process.
|
||||
configure = spy.new(function(...) end),
|
||||
http_request_get_host = spy.new(function(req) return req.host or "test.host" end),
|
||||
http_request_get_path = spy.new(function(req) return req.path or "/" end),
|
||||
http_request_populate_qs = spy.new(function(req) req.qs_populated = true end),
|
||||
http_request_populate_post = spy.new(function(req) req.post_populated = true end),
|
||||
http_populate_multipart_form = spy.new(function(req)
|
||||
req.post_populated = true
|
||||
req.multipart_form_populated = true
|
||||
end),
|
||||
http_argument_get_string = spy.new(function(req,str)
|
||||
assert(req.args,"requests should have a .args table")
|
||||
assert(
|
||||
req.method == "GET" and req.qs_populated or
|
||||
req.method == "POST" and req.post_populated,[[
|
||||
http_argument_get_string() can only be called after
|
||||
the appropriate populate method has been called, either
|
||||
http_request_populate_qs(req) or
|
||||
http_request_populate_post(req)]]
|
||||
)
|
||||
return req.args[str]
|
||||
end),
|
||||
http_file_get = spy.new(function(req,filename)
|
||||
assert(req.multipart_form_populated,[[
|
||||
http_file_get() can only be called after the approriate
|
||||
populate method has been called. (http_populate_multipart_form())
|
||||
]])
|
||||
return req.file["pass"]
|
||||
end),
|
||||
http_response = spy.new(function(req,errcode,html)
|
||||
req.responsecode = errcode
|
||||
req.response = html
|
||||
end),
|
||||
http_response_header = spy.new(function(req,name,value)
|
||||
req.response_headers = req.response_headers or {}
|
||||
req.response_headers[name] = value
|
||||
end),
|
||||
http_method_text = spy.new(function(req) return req.method end),
|
||||
http_populate_cookies = spy.new(function(req)
|
||||
req.cookies_populated = true
|
||||
req.cookies = req.cookies or {}
|
||||
end),
|
||||
http_request_cookie = spy.new(function(req,cookie_name)
|
||||
assert(req.cookies_populated,[[
|
||||
http_request_cookie() can only be called after
|
||||
http_populate_cookies() has been called.
|
||||
]])
|
||||
return req.cookies[cookie_name]
|
||||
end),
|
||||
http_response_cookie = spy.new(function(req,name,value) req.cookies = {[name] = value} end),
|
||||
log = spy.new(function(priority, message) --[[print(string.format("[LOG %q]: %s",priority,message))]] end),
|
||||
--Logging:
|
||||
LOG_DEBUG = "debug",
|
||||
LOG_INFO = "info",
|
||||
LOG_NOTICE = "notice",
|
||||
LOG_WARNING = "warning",
|
||||
LOG_ERR = "error",
|
||||
LOG_CRIT = "critical",
|
||||
LOG_ALERT = "alert",
|
||||
LOG_EMERG = "emergency",
|
||||
sha3 = spy.new(function(message) return "digest" end),
|
||||
}
|
||||
|
||||
local smr_mock_env_m = {
|
||||
__index = smr_mock_env,
|
||||
__newindex = function(self,key,value)
|
||||
local setter = debug.getinfo(2)
|
||||
if setter.source ~= "=[C]" and setter.source ~= "@./global.lua" and key ~= "configure" then
|
||||
error(string.format(
|
||||
"Tried to create a global %q with value %s\n%s",
|
||||
key,
|
||||
tostring(value),
|
||||
debug.traceback()
|
||||
),2)
|
||||
else
|
||||
rawset(self,key,value)
|
||||
end
|
||||
end
|
||||
}
|
||||
|
||||
local sfmt = string.format
|
||||
local string_fmt_override = {
|
||||
format = spy.new(function(fmt,...)
|
||||
local args = {...}
|
||||
for i = 1,#args do
|
||||
if args[i] == nil then
|
||||
args[i] = "nil"
|
||||
end
|
||||
end
|
||||
table.insert(args,1,fmt)
|
||||
return sfmt(unpack(args))
|
||||
end)
|
||||
}
|
||||
setmetatable(string_fmt_override,{__index = string})
|
||||
local smr_override_env = {
|
||||
--Detour assert so we don't actually perform any checks
|
||||
--assert = spy.new(function(bool,msg,level) return bool end),
|
||||
--Allow string.format to accept nil as arguments
|
||||
--string = string_fmt_override
|
||||
}
|
||||
|
||||
mock.olds = {}
|
||||
|
||||
function mock.setup()
|
||||
setmetatable(_G,smr_mock_env_m)
|
||||
for k,v in pairs(smr_override_env) do
|
||||
mock.olds[k] = _G[k]
|
||||
_G[k] = v
|
||||
end
|
||||
end
|
||||
|
||||
function mock.mockdb()
|
||||
local config = require("config")
|
||||
--config.db = "data/unittest.db"
|
||||
config.db = ":memory:"
|
||||
assert(os.execute("rm " .. config.db))
|
||||
package.loaded.db = nil
|
||||
local db = require("db")
|
||||
configure()
|
||||
end
|
||||
|
||||
function mock.teardown()
|
||||
setmetatable(_G,{})
|
||||
for k,v in pairs(mock.olds) do
|
||||
_G[k] = v
|
||||
end
|
||||
end
|
||||
|
||||
local session_m = {__index = {
|
||||
login = function(self, who, pass)
|
||||
if not self.args then
|
||||
error("Request should have a .args table")
|
||||
end
|
||||
print("Right before requireing login_post endpoint, self.args is " .. tostring(self.args))
|
||||
print("After requireing login_post edpoint, self.args is " .. tostring(self.args))
|
||||
self.args.user = who
|
||||
self.args.pass = pass
|
||||
login_post(self)
|
||||
error("TODO")
|
||||
end,
|
||||
logout = function(self)
|
||||
error("TODO")
|
||||
end,
|
||||
req = function(self, args)
|
||||
|
||||
end
|
||||
}}
|
||||
|
||||
function mock.session(tbl)
|
||||
if login_post == nil then
|
||||
login_post = require("endpoints.login_post")
|
||||
fuzzy = require("spec.fuzzgen")
|
||||
claim_post = require("endpoints.claim_post")
|
||||
configure()
|
||||
end
|
||||
local username = fuzzy.subdomain()
|
||||
local claim_req = {
|
||||
method = "POST",
|
||||
host = "test.host",
|
||||
path = "/_claim",
|
||||
args = {
|
||||
user = username
|
||||
}
|
||||
}
|
||||
claim_post(claim_req)
|
||||
local login_req = {
|
||||
method = "POST",
|
||||
host = "test.host",
|
||||
path = "/_login",
|
||||
args = {
|
||||
user = username
|
||||
},
|
||||
file = {
|
||||
pass = claim_req.response
|
||||
}
|
||||
}
|
||||
login_post(login_req)
|
||||
local cookie = login_req.response_headers["set-cookie"]
|
||||
local sessionid = cookie:match("session=([^;]+)")
|
||||
local req = {
|
||||
host = "test.host",
|
||||
cookies = {
|
||||
session = sessionid
|
||||
}
|
||||
}
|
||||
return req, username
|
||||
end
|
||||
|
||||
return mock
|
|
@ -0,0 +1,44 @@
|
|||
local rng = {}
|
||||
function rng.markup() return math.random() > 0.5 and "plain" or "imageboard" end
|
||||
function rng.generate_str(length,characters)
|
||||
return function()
|
||||
local t = {}
|
||||
local rnglength = math.random(2,length)
|
||||
for i = 1,rnglength do
|
||||
local rngpos = math.random(#characters)
|
||||
local rngchar = string.sub(characters,rngpos,rngpos)
|
||||
table.insert(t,rngchar)
|
||||
end
|
||||
local ret = table.concat(t)
|
||||
return ret
|
||||
end
|
||||
end
|
||||
function rng.characters(mask)
|
||||
local t = {}
|
||||
for i = 1,255 do
|
||||
if string.match(string.char(i), mask) then
|
||||
table.insert(t,string.char(i))
|
||||
end
|
||||
end
|
||||
return table.concat(t)
|
||||
end
|
||||
function rng.maybe(input,chance)
|
||||
chance = chance or 0.5
|
||||
if math.random() < chance then
|
||||
return input
|
||||
end
|
||||
end
|
||||
rng.any = rng.generate_str(1024,rng.characters("."))
|
||||
rng.subdomain = rng.generate_str(30,rng.characters("[0-9a-z]"))
|
||||
rng.storyname = rng.generate_str(10,"[a-zA-Z0-9$+!*'(),-]")
|
||||
rng.storyid = function() return tostring(math.random(1,10)) end
|
||||
rng.tags = function()
|
||||
local tag_gen = rng.generate_str(10,"[%w%d ]")
|
||||
local t = {}
|
||||
for i = 1,10 do
|
||||
table.insert(t,tag_gen())
|
||||
end
|
||||
return table.concat(t,";")
|
||||
end
|
||||
|
||||
return rng
|
|
@ -0,0 +1,225 @@
|
|||
|
||||
_G.spy = spy
|
||||
local env_mock = require("spec.env_mock")
|
||||
local rng = require("spec.fuzzgen")
|
||||
|
||||
describe("smr login",function()
|
||||
setup(env_mock.setup)
|
||||
teardown(env_mock.teardown)
|
||||
it("should allow someone to claim an account",function()
|
||||
env_mock.mockdb()
|
||||
local claim_post = require("endpoints.claim_post")
|
||||
configure()
|
||||
claim_req = {
|
||||
method = "POST",
|
||||
host = "test.host",
|
||||
path = "/_claim",
|
||||
args = {
|
||||
user = "user"
|
||||
}
|
||||
}
|
||||
claim_post(claim_req)
|
||||
assert(
|
||||
claim_req.responsecode == 200,
|
||||
"Login did not respond with a 200 code"
|
||||
)
|
||||
assert(
|
||||
claim_req.response_headers,
|
||||
"Login did not have response headers."
|
||||
)
|
||||
assert(
|
||||
claim_req.response_headers["Content-Disposition"],
|
||||
"Login did not have a Content Disposition header to set filename"
|
||||
)
|
||||
assert(
|
||||
string.find(claim_req.response_headers["Content-Disposition"],"attachment"),
|
||||
"Login did not mark passfile as an attachment"
|
||||
)
|
||||
assert(
|
||||
claim_req.response_headers["Content-Disposition"]:find(".passfile"),
|
||||
"Login did not name the returned file with the .passfile extension."
|
||||
)
|
||||
assert(
|
||||
claim_req.response_headers["Content-Type"],
|
||||
"Login did not respond with a Content-Type"
|
||||
)
|
||||
assert(
|
||||
claim_req.response_headers["Content-Type"] == "application/octet-stream",
|
||||
"Login did not mark Content-Type correctly (application/octet-stream)"
|
||||
)
|
||||
assert(
|
||||
claim_req.response,
|
||||
"Login did not return a passfile"
|
||||
)
|
||||
end)
|
||||
it("should give a session cookie when logging in with a user",function()
|
||||
local claim_post = require("endpoints.claim_post")
|
||||
local login_post = require("endpoints.login_post")
|
||||
local config = require("config")
|
||||
local db = require("db")
|
||||
local session = require("session")
|
||||
configure()
|
||||
|
||||
local username = rng.subdomain()
|
||||
local claim_req = {
|
||||
method = "POST",
|
||||
host = "test.host",
|
||||
path = "/_claim",
|
||||
args = {
|
||||
user = username
|
||||
}
|
||||
}
|
||||
claim_post(claim_req)
|
||||
login_req = {
|
||||
method = "POST",
|
||||
host = "test.host",
|
||||
path = "/_login",
|
||||
args = {
|
||||
user = username
|
||||
},
|
||||
file = {
|
||||
pass = claim_req.response
|
||||
}
|
||||
}
|
||||
sessionspy = spy.on(session,"start")
|
||||
login_post(login_req)
|
||||
assert.spy(sessionspy).was.called()
|
||||
local code = login_req.responsecode
|
||||
assert(
|
||||
code >= 300 and code <= 400,
|
||||
"Sucessful login should redirect the user, code:" .. tostring(code)
|
||||
)
|
||||
assert(
|
||||
login_req.response_headers,
|
||||
"Sucessful login should have response headers"
|
||||
)
|
||||
assert(
|
||||
login_req.response_headers["set-cookie"],
|
||||
"Sucessful login should set a cookie on the client"
|
||||
)
|
||||
local cookie = login_req.response_headers["set-cookie"]
|
||||
local domain_noport = string.match(config.domain,"(.-):?%d*$")
|
||||
assert(
|
||||
string.find(cookie,"session="),
|
||||
"Sucessful login should set a cookie named 'session'"
|
||||
)
|
||||
assert(
|
||||
string.find(cookie,"Domain="..domain_noport),
|
||||
"Cookies should only be set for the configured domain"
|
||||
)
|
||||
assert(
|
||||
string.find(cookie,"HttpOnly"),
|
||||
"Cookies should have the HttpOnly flag set"
|
||||
)
|
||||
assert(
|
||||
string.find(cookie,"Secure"),
|
||||
"Cookies should have the secure flag set"
|
||||
)
|
||||
assert(
|
||||
login_req.response_headers["Location"],
|
||||
"Sucessful login should redirect to a location"
|
||||
)
|
||||
assert(
|
||||
login_req.response_headers["Location"] == "https://" .. username .. "." .. config.domain,
|
||||
"Login redirect should get domain from config file"
|
||||
)
|
||||
end)
|
||||
it("should allow logged in users the option of posting under their username",function()
|
||||
local claim_post = require("endpoints.claim_post")
|
||||
local login_post = require("endpoints.login_post")
|
||||
local paste_get = require("endpoints.paste_get")
|
||||
local paste_post = require("endpoints.paste_post")
|
||||
local read_get = require("endpoints.read_get")
|
||||
local db = require("db")
|
||||
local config = require("config")
|
||||
config.domain = "test.host"
|
||||
configure()
|
||||
local username = rng.subdomain()
|
||||
local claim_req = {
|
||||
method = "POST",
|
||||
host = "test.host",
|
||||
path = "/_claim",
|
||||
args = {
|
||||
user = username
|
||||
}
|
||||
}
|
||||
claim_post(claim_req)
|
||||
login_req = {
|
||||
method = "POST",
|
||||
host = "test.host",
|
||||
path = "/_login",
|
||||
args = {
|
||||
user = username
|
||||
},
|
||||
file = {
|
||||
pass = claim_req.response
|
||||
}
|
||||
}
|
||||
login_post(login_req)
|
||||
local cookie = login_req.response_headers["set-cookie"]
|
||||
local sessionid = cookie:match("session=([^;]+)")
|
||||
local paste_req_get = {
|
||||
method = "GET",
|
||||
host = username .. ".test.host",
|
||||
path = "/_paste",
|
||||
cookies = {
|
||||
session = sessionid
|
||||
}
|
||||
}
|
||||
paste_get(paste_req_get)
|
||||
local option = '<option value="' .. username .. '">' .. username .. '</option>'
|
||||
assert(
|
||||
paste_req_get.response:find(option),
|
||||
"After logging in the user should have an option to "..
|
||||
"make posts as themselves. Looking for " .. option ..
|
||||
" but didn't find it in " .. paste_req_get.response
|
||||
)
|
||||
local paste_req_post = {
|
||||
method = "POST",
|
||||
host = username .. ".test.host",
|
||||
path = "/_paste",
|
||||
cookies = {
|
||||
session = sessionid
|
||||
},
|
||||
args = {
|
||||
title = "post title",
|
||||
text = "post text",
|
||||
markup = "plain",
|
||||
tags = "",
|
||||
pasteas = username
|
||||
}
|
||||
}
|
||||
paste_post(paste_req_post)
|
||||
for row in db.conn:rows("SELECT COUNT(*) FROM posts") do
|
||||
assert(row[1] == 1, "Expected exactly 1 post in sample db")
|
||||
end
|
||||
local code = paste_req_post.responsecode
|
||||
assert(code >= 300 and code <= 400, "Should receive a redirect after posting, got:" .. tostring(code))
|
||||
assert(paste_req_post.response_headers, "Should have received some response headers")
|
||||
assert(paste_req_post.response_headers.Location, "Should have received a location in response headers")
|
||||
local redirect = paste_req_post.response_headers.Location:match("(/[^/]*)$")
|
||||
local read_req_get = {
|
||||
method = "GET",
|
||||
host = username .. ".test.host",
|
||||
path = redirect,
|
||||
cookies = {
|
||||
session = sessionid
|
||||
},
|
||||
args = {}
|
||||
}
|
||||
read_get(read_req_get)
|
||||
local response = read_req_get.response
|
||||
assert(
|
||||
response:find([[post title]]),
|
||||
"Failed to find post title in response."
|
||||
)
|
||||
assert(
|
||||
response:find('By <a href="https://' .. username .. '.test.host">' .. username .. '</a>'),
|
||||
"Failed to find the author name after a paste."
|
||||
)
|
||||
assert(
|
||||
response:find([[post text]]),
|
||||
"Failed to find post text in response."
|
||||
)
|
||||
end)
|
||||
end)
|
|
@ -1,4 +1,44 @@
|
|||
function() return math.random() > 0.5 and "plain" or "imageboard" end
|
||||
local function rng_markup() return math.random() > 0.5 and "plain" or "imageboard" end
|
||||
local function generate_str(length,characters)
|
||||
return function()
|
||||
local t = {}
|
||||
local rnglength = math.random(2,length)
|
||||
for i = 1,rnglength do
|
||||
local rngpos = math.random(#characters)
|
||||
local rngchar = string.sub(characters,rngpos,rngpos)
|
||||
table.insert(t,rngchar)
|
||||
end
|
||||
local ret = table.concat(t)
|
||||
return ret
|
||||
end
|
||||
end
|
||||
local function characters(mask)
|
||||
local t = {}
|
||||
for i = 1,255 do
|
||||
if string.match(string.char(i), mask) then
|
||||
table.insert(t,string.char(i))
|
||||
end
|
||||
end
|
||||
return table.concat(t)
|
||||
end
|
||||
local function maybe(input,chance)
|
||||
chance = chance or 0.5
|
||||
if math.random() < chance then
|
||||
return input
|
||||
end
|
||||
end
|
||||
local rng_any = generate_str(1024,characters("."))
|
||||
local rng_subdomain = generate_str(30,characters("[0-9a-z]"))
|
||||
local rng_storyname = generate_str(10,"[a-zA-Z0-9$+!*'(),-]")
|
||||
local rng_storyid = function() return tostring(math.random(1,10)) end
|
||||
local rng_tags = function()
|
||||
local tag_gen = generate_str(10,"[%w%d ]")
|
||||
local t = {}
|
||||
for i = 1,10 do
|
||||
table.insert(t,tag_gen())
|
||||
end
|
||||
return table.concat(t,";")
|
||||
end
|
||||
|
||||
local pages = {
|
||||
index = {
|
||||
|
@ -17,8 +57,8 @@ local pages = {
|
|||
title = rng_any,
|
||||
text = rng_any,
|
||||
pasteas = rng_subdomain,
|
||||
markup = rng_markup
|
||||
tags = rng_any;
|
||||
markup = rng_markup,
|
||||
tags = rng_any,
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -30,6 +70,7 @@ local pages = {
|
|||
story=rng_storyid
|
||||
},
|
||||
POST={
|
||||
story=rng_storyid,
|
||||
title = rng_any,
|
||||
text = rng_any,
|
||||
pasteas = rng_subdomain,
|
||||
|
@ -38,7 +79,17 @@ local pages = {
|
|||
},
|
||||
}
|
||||
},
|
||||
--TODO:bio
|
||||
bio = {
|
||||
route = "/_bio",
|
||||
name = "edit_bio",
|
||||
methods = {
|
||||
GET={},
|
||||
POST={
|
||||
user = rng_subdomain,
|
||||
pass = rng_any
|
||||
},
|
||||
}
|
||||
},
|
||||
login = {
|
||||
route = "/_login",
|
||||
name = "login",
|
||||
|
@ -73,7 +124,12 @@ local pages = {
|
|||
route = "/_preview",
|
||||
name = "preview",
|
||||
methods = {
|
||||
POST = {},
|
||||
POST = {
|
||||
title = rng_any,
|
||||
text = rng_any,
|
||||
markup = rng_markup,
|
||||
tags = maybe(rng_tags)
|
||||
},
|
||||
}
|
||||
},
|
||||
search = {
|
||||
|
@ -109,26 +165,131 @@ local function fuzz_endpoint(endpoint, parameters)
|
|||
return true
|
||||
end
|
||||
|
||||
local function generate_req(tbl)
|
||||
assert(({GET=true,POST=true})[tbl.method])
|
||||
return tbl
|
||||
end
|
||||
local env = {}
|
||||
local smr_mock_env = {
|
||||
--An empty function that gets called to set up databases and do other
|
||||
--startup-time stuff, runs once for each worker process.
|
||||
configure = spy.new(function(...) end),
|
||||
http_request_get_host = spy.new(function(req) return env.host or "test.host" end),
|
||||
http_request_get_path = spy.new(function(req) return env.path or "/" end),
|
||||
http_request_populate_qs = spy.new(function(req) req.qs_populated = true end),
|
||||
http_request_populate_post = spy.new(function(req) req.post_populated = true end),
|
||||
http_populate_multipart_form = spy.new(function(req) req.post_populated = true end),
|
||||
http_argument_get_string = spy.new(function(req,str)
|
||||
assert(
|
||||
req.method == "GET" and req.qs_populated or
|
||||
req.method == "POST" and req.post_populated,[[
|
||||
http_argument_get_string() can only be called after
|
||||
the appropriate populate method has been called, either
|
||||
http_request_populate_qs(req) or
|
||||
http_request_populate_post(req)]]
|
||||
)
|
||||
return req.args[str]
|
||||
end),
|
||||
http_file_get = spy.new(function(req,filename) return "file data" end),
|
||||
http_response = spy.new(function(req,errcode,html) end),
|
||||
http_response_header = spy.new(function(req,name,value) end),
|
||||
http_method_text = spy.new(function(req) return req.method end),
|
||||
http_populate_cookies = spy.new(function(req) req.cookies_populated = true end),
|
||||
http_request_cookie = spy.new(function(req,cookie_name)
|
||||
|
||||
end),
|
||||
http_response_cookie = spy.new(function(req,name,value) req.cookies = {[name] = value} end),
|
||||
log = spy.new(function(priority, message) end),
|
||||
sha3 = spy.new(function(message) return "digest" end),
|
||||
}
|
||||
local sfmt = string.format
|
||||
local string_fmt_override = {
|
||||
--[[
|
||||
format = spy.new(function(fmt,...)
|
||||
local args = {...}
|
||||
for i = 1,#args do
|
||||
if args[i] == nil then
|
||||
args[i] = "nil"
|
||||
end
|
||||
end
|
||||
table.insert(args,1,fmt)
|
||||
return sfmt(unpack(args))
|
||||
end)
|
||||
]]
|
||||
}
|
||||
setmetatable(string_fmt_override,{__index = string})
|
||||
local smr_override_env = {
|
||||
--Detour assert so we don't actually perform any checks
|
||||
--assert = spy.new(function(bool,msg,level) return bool end),
|
||||
--Allow string.format to accept nil as arguments
|
||||
--string = string_fmt_override
|
||||
}
|
||||
local smr_mock_env_m = {
|
||||
__index = smr_mock_env,
|
||||
__newindex = function(self,key,value)
|
||||
local setter = debug.getinfo(2)
|
||||
if setter.source ~= "=[C]" and setter.source ~= "@./global.lua" and key ~= "configure" then
|
||||
error(string.format(
|
||||
"Tried to create a global %q with value %s\n%s",
|
||||
key,
|
||||
tostring(value),
|
||||
debug.traceback()
|
||||
),2)
|
||||
else
|
||||
rawset(self,key,value)
|
||||
end
|
||||
end
|
||||
}
|
||||
|
||||
describe("smr",function()
|
||||
for name, obj in pairs(pages) do
|
||||
describe("endpoint " .. name,function()
|
||||
for method,parameters in pairs(obj.methods) do
|
||||
describe("method " .. method,function()
|
||||
local fname = string.format("%s_%s",name,string.lower(method))
|
||||
local olds = {}
|
||||
setup(function()
|
||||
setmetatable(_G,smr_mock_env_m)
|
||||
for k,v in pairs(smr_override_env) do
|
||||
olds[k] = _G[k]
|
||||
_G[k] = v
|
||||
end
|
||||
end)
|
||||
teardown(function()
|
||||
setmetatable(_G,{})
|
||||
for k,v in pairs(olds) do
|
||||
_G[k] = v
|
||||
end
|
||||
end)
|
||||
it("should be named appropriately",function()
|
||||
local f = assert(io.open("endpoints/"..fname .. ".lua","r"))
|
||||
f:close()
|
||||
end)
|
||||
it("should run without errors",function()
|
||||
require("endpoints." .. fname)
|
||||
end)
|
||||
it("should configure without errors",function()
|
||||
require("endpoints." .. fname)
|
||||
configure()
|
||||
end)
|
||||
it("should return a function",function()
|
||||
function configure(...) print("configure called") end
|
||||
local pagefunc = assert(require("endpoints." .. fname))
|
||||
assert(type(pagefunc) == "function")
|
||||
end)
|
||||
it("should call http_response() at some point",function()
|
||||
it("should call http_response() at some point #slow",function()
|
||||
local pagefunc = require("endpoints." .. fname)
|
||||
assert(fuzz_endpoint(pagefunc,parameters))
|
||||
for i = 1,10 do
|
||||
local req = {}
|
||||
req.method = method
|
||||
req.path = obj.route
|
||||
req.args = {}
|
||||
for param_name,param_rng_func in pairs(parameters) do
|
||||
local param = param_rng_func()
|
||||
req.args[param_name] = param
|
||||
end
|
||||
pagefunc(req)
|
||||
assert.spy(smr_mock_env.http_response).was_called()
|
||||
end
|
||||
end)
|
||||
|
||||
end)
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
|
||||
describe("smr imageboard parser",function()
|
||||
function assertf(stmt, fmt, ...)
|
||||
if not stmt then
|
||||
error(string.format(fmt,...))
|
||||
end
|
||||
end
|
||||
describe("smr imageboard parser #parsers",function()
|
||||
it("should load without error",function()
|
||||
local parser = require("parser_imageboard")
|
||||
end)
|
||||
|
@ -7,6 +11,151 @@ describe("smr imageboard parser",function()
|
|||
local parser = require("parser_imageboard")
|
||||
local input = "Hello, world!"
|
||||
local output = parser(input)
|
||||
assert(type(output) == "str
|
||||
assert(type(output) == "string","Expected string, got: %s",type(output))
|
||||
end)
|
||||
it("should spoiler text in asterisks ",function()
|
||||
local parser = require("parser_imageboard")
|
||||
local input = "Hello, **world**!"
|
||||
local output = parser(input)
|
||||
local expected = [[<p>Hello, <span class="spoiler">world</span>!</p> ]]
|
||||
assertf(output == expected, "Expected\n%s\ngot\n%s\n", expected, output)
|
||||
end)
|
||||
it("should spoiler text in [spoiler] tags",function()
|
||||
local parser = require("parser_imageboard")
|
||||
local input = "Hello, [spoiler]world[/spoiler]!"
|
||||
local output = parser(input)
|
||||
local expected = [[<p>Hello, <span class="spoiler2">world</span>!</p> ]]
|
||||
assertf(output == expected, "Expected\n%s\ngot\n%s\n", expected, output)
|
||||
end)
|
||||
it("should italicize words in double single quotes ('')",function()
|
||||
local parser = require("parser_imageboard")
|
||||
local input = "Hello, ''world''!"
|
||||
local output = parser(input)
|
||||
local expected = [[<p>Hello, <i>world</i>!</p> ]]
|
||||
assertf(output == expected, "Expected\n%s\ngot\n%s\n", expected, output)
|
||||
end)
|
||||
it("should bold words in tripple single quotes (''')",function()
|
||||
local parser = require("parser_imageboard")
|
||||
local input = "Hello, '''world'''!"
|
||||
local output = parser(input)
|
||||
local expected = [[<p>Hello, <b>world</b>!</p> ]]
|
||||
assertf(output == expected, "Expected\n%s\ngot\n%s\n", expected, output)
|
||||
end)
|
||||
it("should underline words in double underscores (__)",function()
|
||||
local parser = require("parser_imageboard")
|
||||
local input = "Hello, __world__!"
|
||||
local output = parser(input)
|
||||
local expected = [[<p>Hello, <u>world</u>!</p> ]]
|
||||
assertf(output == expected, "Expected\n%s\ngot\n%s\n", expected, output)
|
||||
end)
|
||||
it("should make a heading out of things in double equals(==)",function()
|
||||
local parser = require("parser_imageboard")
|
||||
local input = "Hello, ==world==!"
|
||||
local output = parser(input)
|
||||
local expected = [[<p>Hello, <h2>world</h2>!</p> ]]
|
||||
assertf(output == expected, "Expected\n%s\ngot\n%s\n", expected, output)
|
||||
end)
|
||||
it("should strikethrough words in double tildes (~~)",function()
|
||||
local parser = require("parser_imageboard")
|
||||
local input = "Hello, ~~world~~!"
|
||||
local output = parser(input)
|
||||
local expected = [[<p>Hello, <s>world</s>!</p> ]]
|
||||
assertf(output == expected, "Expected\n%s\ngot\n%s\n", expected, output)
|
||||
end)
|
||||
it("should codify words in [code] tags",function()
|
||||
local parser = require("parser_imageboard")
|
||||
local input = "Hello, [code]world[/code]!"
|
||||
local output = parser(input)
|
||||
local expected = [[<p>Hello, <pre><code>world</code></pre>!</p> ]]
|
||||
assertf(output == expected, "Expected\n%s\ngot\n%s\n", expected, output)
|
||||
end)
|
||||
it("should greentext lines that start with >",function()
|
||||
local parser = require("parser_imageboard")
|
||||
local input = "Hello,\n> world!"
|
||||
local output = parser(input)
|
||||
local expected = [[<p>Hello,</p> <p><span class="greentext">> world!</span></p> ]]
|
||||
assertf(output == expected, "Expected\n%s\ngot\n%s\n", expected, output)
|
||||
end)
|
||||
it("should pinktext lines that start with <",function()
|
||||
local parser = require("parser_imageboard")
|
||||
local input = "Hello,\n< world!"
|
||||
local output = parser(input)
|
||||
local expected = [[<p>Hello,</p> <p><span class="pinktext">< world!</span></p> ]]
|
||||
assertf(output == expected, "Expected\n%s\ngot\n%s\n", expected, output)
|
||||
end)
|
||||
it("should allow for bold+italic text",function()
|
||||
local parser = require("parser_imageboard")
|
||||
local input = "Hello,'''''world!'''''"
|
||||
local output = parser(input)
|
||||
local expected = [[<p>Hello,<i><b>world!</b></i></p> ]]
|
||||
end)
|
||||
local formatting = {
|
||||
{"**","**"},
|
||||
{"[spoiler]","[/spoiler]"},
|
||||
{"''","''"},
|
||||
{"'''","'''"},
|
||||
{"__","__"},
|
||||
{"==","=="},
|
||||
{"~~","~~"},
|
||||
{"[code]","[/code]"}
|
||||
}
|
||||
local formatting_line = {"> ", "< "}
|
||||
for k,v in pairs(formatting) do
|
||||
for i = 1, 50 do
|
||||
it("should not break with " .. i .. " " .. v[1] .. " indicators in a row ",function()
|
||||
local parser = require("parser_imageboard")
|
||||
local input = "Hello, " .. string.rep(v[1],i) .. " world!"
|
||||
local start_time = os.clock()
|
||||
local output = parser(input)
|
||||
local end_time = os.clock()
|
||||
assert(end_time - start_time < 1, "Took too long")
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
for i = 1, 50 do
|
||||
it("Should withstand a random string of " .. i .. " formatters and words. ",function()
|
||||
local parser = require("parser_imageboard")
|
||||
local input = {}
|
||||
local function random_text()
|
||||
if math.random() > 0.5 then
|
||||
return "Hello"
|
||||
else
|
||||
return "world"
|
||||
end
|
||||
end
|
||||
local function random_wrap(text)
|
||||
local rngwrap = formatting[math.random(#formatting)]
|
||||
return rngwrap[1] .. text .. rngwrap[2]
|
||||
end
|
||||
local function random_text_recursive(i)
|
||||
if i == 0 then
|
||||
return ""
|
||||
end
|
||||
local j = math.random()
|
||||
if j < 0.33 then
|
||||
return random_text_recursive(i-1) .. random_wrap(random_text())
|
||||
elseif j < 0.66 then
|
||||
return random_wrap(random_text() .. random_text_recursive(i-1)) .. random_wrap(random_text())
|
||||
else
|
||||
return random_wrap(random_text() .. random_text_recursive(i - 1))
|
||||
end
|
||||
end
|
||||
input = random_text_recursive(i)
|
||||
local start_time = os.clock()
|
||||
local output = parser(input)
|
||||
local end_time = os.clock()
|
||||
assert(end_time - start_time < 1, "Took too long")
|
||||
end)
|
||||
end
|
||||
for _,file_name in ipairs{
|
||||
"Beauty_and_the_Banchou_1"
|
||||
} do
|
||||
it("should parser " .. file_name,function()
|
||||
local parser = require("parser_imageboard")
|
||||
local input = require("spec.parser_tests." .. file_name)
|
||||
local output = parser(input)
|
||||
--print("output:",output)
|
||||
end)
|
||||
end
|
||||
end)
|
||||
|
|
|
@ -0,0 +1,79 @@
|
|||
|
||||
_G.spy = spy
|
||||
function assertf(stmt, fmt, ...)
|
||||
if not stmt then
|
||||
error(string.format(fmt,...))
|
||||
end
|
||||
end
|
||||
|
||||
local mock_env = require("spec.env_mock")
|
||||
|
||||
describe("smr search parser #parsers #working",function()
|
||||
setup(mock_env.setup)
|
||||
teardown(mock_env.teardown)
|
||||
it("should load without error",function()
|
||||
local parser = require("parser_search")
|
||||
end)
|
||||
it("should accept a string and return a string",function()
|
||||
local parser = require("parser_search")
|
||||
local input = "Hello, world!"
|
||||
local output = parser(input)
|
||||
assert(type(output) == "string","Expected string, got: %s",type(output))
|
||||
end)
|
||||
it("should parse a string into it's components",function()
|
||||
local parser = require("parser_search")
|
||||
local input = "+search +test +author=admin"
|
||||
local search_tag, test_tag, author_parsed = false, false, false
|
||||
local sql, ast = parser(input)
|
||||
for _,v in pairs(ast.tags) do
|
||||
if v[3] == "Search" then
|
||||
search_tag = true
|
||||
elseif v[3] == "Test" then
|
||||
test_tag = true
|
||||
end
|
||||
end
|
||||
for _,v in pairs(ast.author) do
|
||||
if v[3] == "%admin%" then
|
||||
author_parsed = true
|
||||
end
|
||||
end
|
||||
|
||||
assert(search_tag, "Search tag must be found")
|
||||
assert(test_tag, "Test tag must be found")
|
||||
assert(author_parsed, "Author tag must be found")
|
||||
end)
|
||||
it("should parse tags with a hyphen in the middle",function()
|
||||
local parser = require("parser_search")
|
||||
local input = "+post-modern"
|
||||
local sql, ast = parser(input)
|
||||
assert(#ast.tags == 1, "+post-modern should be one tag")
|
||||
end)
|
||||
it("should parse an empty string without errors",function()
|
||||
local parser = require("parser_search")
|
||||
local input = ""
|
||||
local sql, ast = parser(input)
|
||||
assert(sql,"Did not receive sql")
|
||||
assert(ast,"Did not receive ast")
|
||||
end)
|
||||
it("should parse a hits request",function()
|
||||
local parser = require("parser_search")
|
||||
local input = "+hits>=0"
|
||||
local sql, ast = parser(input)
|
||||
assert(ast.hits, "should have a .hits table")
|
||||
local hit = ast.hits[1]
|
||||
assert(hit[1] == "+", "Failed to have an intersect constraint for hits, got " .. hit[1])
|
||||
assert(hit[2] == ">=", "Failed to have a greater-than-or-equal constraint for hits, got " .. hit[2])
|
||||
assert(hit[3] == 0, "Failed to find >=0 for hits, got " .. hit[3])
|
||||
end)
|
||||
it("should parse a title request", function()
|
||||
local parser = require("parser_search")
|
||||
local input = "+title=the balled of pala-al-din"
|
||||
local sql, ast = parser(input)
|
||||
assert(ast.title, "should have a .title table")
|
||||
local title = ast.title[1]
|
||||
assert(title[1] == "+", "Failed to have an intersect constraint for title, got " .. title[1])
|
||||
assert(title[2] == "=", "Failed to have a like constraint for title, got " .. title[2])
|
||||
assert(title[3] == "%the balled of pala-al-din%", "Failed to find title name, got " .. title[3])
|
||||
end)
|
||||
end)
|
||||
|
|
@ -0,0 +1,105 @@
|
|||
return [==[
|
||||
The angry dogs, sirens, and the occasional angry shout in the distance. A normal high schooler would be afraid to walk a rough road like this. The road to the local high-school, it's a testament to one's strength in itself. "Last Shot High" they call it, the school that takes on all the kids that aren't accepted at any other educational institution. That includes the massive bodybuilder-esque man in a school uniform walking down the road right now. His pants are baggy and held up by a studded belt around his waist with a chain dangling on his leg. His blazer is modified to hang down to his knees, his long pompadour stands strong against the wind. Who is this menace to society? This rebellious youth?
|
||||
He is Tankaroshi Ryuji, the ones who orbit him call him Tank or Tank-sama. And for who he is? He's the second year banchou of Last Shot High.
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
I open the front door of the school and march past the lockers to the stairway. A group of first years are squatting at the bottom and keep their heads down as I pass. I climb the stairs to the second year floor. When I pass one of the science rooms I can hear jeers and sounds of fighting. The door opens and a teacher hustles out, deciding it's best to go somewhere else and wait for it to blow over. The usual scenes that play out before me cause me to daze out before I realize I've reached my destination. The old Art room. It's long since been abandoned. After all, not much use for an art room when most of the kids here have never even picked up a pencil. So now it's the hangout for me and anyone who associates with me. As I reach for the doorknob a sound catches my attention, the sound of a feminine yelp. I stand there with my hand on the door, listening for any familiar voices in the mix. All I can hear is a few guys and that one voice.
|
||||
"Hmm..." Situations like this are tricky, this isn't exactly a school with damsels in distress, usually when I try to help some girl out it turns out she started the fight and now I'm the asshole for interfering. But as usual I can't keep myself from it, I'm a man after all. I walk down the hall and turn the corner, from the back it looks like three third years are gathered around someone.
|
||||
"Oi!" I yell at the three seniors who turn towards me.
|
||||
"EH?! Well if isn't the big bad Tankaroshi-kun." The tall one seems to be the leader here as his cronies laugh.
|
||||
"Picking on a girl in my hallway eh? I hope you weren't planning on getting out of here alive." I slide my blazer off and set it to the side. A freezing wind blows right through my tank top and bites at my arms, I can't help but shiver lightly. Jesus we can't afford heat anymore?
|
||||
"Oh!? Is that right? Look at the little second year shivering in his britches." They shove someone behind them as they saunter up to me.
|
||||
The tallest one stops right in front of me and looks up. He starts to poke my chest as he talks to me. "You think just because you're some roided up gorilla with half a foot on us you can take all three of us? How about you just go back to your classroo-"
|
||||
I cut him off my grabbing his finger with my hand and lifting him up off the ground by it, his feet flailing wildly under him. I rear my head back and slam my forehead into his shocked face. Quickly, I throw him onto the mook on the left before turning my attention to the senior on my right. He swings his arm into my frame but it stops like he had just punched a wall. I grab onto his hair and swing him face first into the wall beside me.
|
||||
"Ora!" The leader had gotten back up and tried to tackle me in the back. I stumble a foot forward and drop my elbow backwards on top of him, slamming him straight down into the linoleum. I turn around to see the last one staring at me in shock.
|
||||
"Well come on senior, teach me something." I sneer at him. And with that the fight is over, he ran away as his "friend" is clutching his face on the ground beside the wall. I take a step and kick the leader in the stomach. "Ora! You thought you could take me?! You're a thousand years too early!" He grabs his stomach and I throw my hands into my pockets, then turn to walk away.
|
||||
"W-Wait please!" the sound of wood against tile approaches me at the sound of the feminine voice. Oh right, I did come here to save some chick huh?
|
||||
I turn around to see a slender, tall girl wearing wooden sandals, white socks, and a long white kimono. Oh it's her.
|
||||
"You're Tankaroshi-san Ryuji." She asks smiling.
|
||||
I look her up and down, up to this point I've got glempses of her but haven't gotten to really see her up close yet.
|
||||
I reach my hand out towards her cheek and pluck a frozen tear off of her and flick it down the hallway. "Yeah, Fubuki right?"
|
||||
Her cheeks turn a dark shade of blue, "Oh I didn't know you knew me...and you're using my first name too..."
|
||||
Hard not to, she sticks out more than any other person here. The new kid Tsuma Fubuki, and she's a Yuki-Onna.Now, monster girls aren't too common outside of the large metropolises, even a big city like ours seeing one is a rare sight. But even still that's not the only reason she sticks out.
|
||||
She wears long outdated traditional garb around. (The girls here wouldn't be caught in anything so lady-like)
|
||||
She scores the highest in all of the school. (Not a hard achievement by any means, but they rival even outside schools as well.)
|
||||
And she's probably the only one here who wont end up dead in the streets or in the Yakuza.
|
||||
Hell she's a model student, I'd say the only reason she ended up here is because no school wants a ghost haunting their halls.
|
||||
We sit there in silence for a a few minutes as we awkwardly stare at each other. Her white kimono grips all the right spots, her hourglass figure and modest bust accented by it. Her face is spotless and pure as snow. Her light blue skin and white, almost dead looking eyes are pretty in a way. Whitish blue hair hangs down to her lower back right above her Kimono's sash. Well this conversation is going nowhere.
|
||||
"Right...bye." I turn around with my hands in my pocket and walk away.
|
||||
"Oh! Please wait!"
|
||||
I sigh and turn back around.
|
||||
"I wanted to thank you for saving me." She gives me a deep bow at the waist and holds it for an uncomfortable amount of time.
|
||||
"Right...bye." I turn around and take a step forward before I feel something on the back of my arm.
|
||||
"Tankaroshi-san!" Her small hand tries to grip the back of my arm. It felt icy cold at first but has quickly grown to a comforting coolness. After she notices me staring at her hand she takes it back and folds her hands into her lap.
|
||||
"Tankaroshi-san!"
|
||||
I pinch the bridge of my nose, "Just call me tank or Tankaroshi if you'd rather. Drop the san."
|
||||
She nods and continues, "Tankaroshi! What do you like to eat?"
|
||||
"Meat. Protein." I answer her out of left field question.
|
||||
She nods her head and looks at me with a look of fiery determination, "From now on I will make you lunch! Chicken and Beef!" She bows deeply and walks off down the hall, she walks so elegantly she seems to glide over the linoleum floors.
|
||||
I shrug and go back to the Art Room.
|
||||
|
||||
"Please accept this!" Fubuki hands me a small box. It's of course cold to the touch, like it has been for the last week. I crack open the lid and pick out a brownish ice cube with my fingers, I toss it into my mouth. I never thought I could get brain freeze from fried chicken.
|
||||
"Well how is it?" She studies my face as I crunch through the ice.
|
||||
"It's...homemade."
|
||||
She nods her head and keeps staring; I guess she wasn't satisfied with that answer.
|
||||
I sigh, "Well it's the thought that counts Fubuki, you don't have to keep doing this we're even now you can forget about last week."
|
||||
She cocks her head at me, "Hmm? OH! I'm not cooking for you because of that!" She hides her mouth as she giggles. I guess I'm left out of the loop here. "A woman cooks for her husband right?"
|
||||
The new ice cube I had thrown in my mouth now slowly slides out from my lips and falls back into the box with a clatter.
|
||||
"Uh...yeah." I raise my brow and lean back from the box. "So what does that have to do with this situation here?"
|
||||
"Oh I've taking a liking to you! So now we're married." She leans in and tries to wrap herself around my arm, she can just barely do it. Uh...I've never been confessed to before. Is this a confession? Is this how they work? And she's confessing to me? I'm terrible with the ladies, I'm rough, they say I'm way too muscly and big, and the worst part, they say my face is scary. I'm not going around trying to make my face scary dammit!
|
||||
Her cold body feels comfortable on my arm even in the middle of fall, I've always had a high body temperature so I've grown fond of her touch over the past week, but now I kind of feel self conscious about it.
|
||||
"So married now huh? I don't think that's really how it works-"
|
||||
"Oh maybe not for you humans but for Yuki-Onnas it is." She cuts me off. "Yeah I saw you walking down the hall one day and took an interest in you. You saving me is what made me decide to keep you." She smiles as if she's remembering a good childhood memory.
|
||||
Yeah, this isn't a proposal this is a fucking notice of marriage.
|
||||
"You know we haven't even been on a date yet, right?"
|
||||
She jumps in the bench we're sitting in, "Oh! Yes let's go on a date!" She claps as she pesters me about where we're going.
|
||||
Man I want to say she misunderstood, but I got a feeling she knows what she's doing. Maybe I should just roll with it. Really, she's extremely attractive, like one of the prettiest women I've ever seen. Plus if I don't she'll probably freeze me to death or some shit, and I really don't want to die some Yanki Virgin.
|
||||
"Alright." I groan, but a grin forms on my face. "This weekend then." She hugs my arm tightly before opening up a box of her own.
|
||||
I see she doesn't eat her own cooking...
|
||||
|
||||
"Hey come on man, I left my wallet on the bus, just let me borrow a bit huh? Everything in it should do." The kid from a rival school shrinks and hands me the money from his wallet.
|
||||
"Hey you're the best pal." I pat him on the back and put it into my wallet.
|
||||
"That should be enough." I mumble to myself as I make my way to the school gates. These posers want to act tough but they sure get shook down easy. "Man I'm like crazy nervous right now." I wipe the sweat from my palms on my jeans as I walk down the road. First date, and with the prettiest girl you've ever seen...who is apparently your new wife. The air around me grows cold and I look up to see Fubuki waiting by the school. I jog the distance up to her.
|
||||
"Wow..." Her kimono looks fancy, icicle like trinkets dangle from it and ornate designs are woven into the fabric.
|
||||
"Now love don't stare, it's embarrassing." She shifts to the side giving me a real good look at her side profile.
|
||||
>deposited into spank bank
|
||||
I clear my throat, "Y-you-" *Ahem*, "You look really pretty today Fubuki."
|
||||
Her smile is blinding. She holds out her arms, her light blue hands and perfect nails glisten. Unsure of what to do I step forward. She wraps herself around my forearm. "Alright husband, where are we going?"
|
||||
I've thought of a few places over the past few days but I sure as hell don't know what girls like. I think the arcade's a bad idea, she probably doesn't want to go walking around looking for trouble, the gym would make a bad date. I'll stick with a classic, dinner. Just got to think of a place to eat at now.
|
||||
"Welcome to NcDaniels, what can I get for you today?"
|
||||
My eyes squeeze shut in frustration. This is the best I could come up with...?
|
||||
"Oh, hamburgers, I'm fond of these. Love, will you order for me?" She squeezes the arm she's wrapped around.
|
||||
She seems genuinely happy with the place I picked, looking at her I figured she'd have a more expensive pallet.
|
||||
"Don't see why not." I order my food first to get the easy part out of the way. I sweat bullets as the menu hanging over the employee starts to blur. Oh shit what do I get here? She's a girl so she shouldn't eat too much right? But if I get a kids meal or something I'll definitely be in for it then. I take a breath and grab the employee's collar, "I want a NcSingle with small fry, it better be the best burger you all ever made if you know what's good for ya!" I show my top teeth as I let go of him. Ah fuck, I panicked and went into delinquent mode in front of her, now she'll think I'm just a big, dumb brute like the rest of the gir-
|
||||
"Oh that's so sweet Tankaroshi! Taking care of me like that." She puts her hand on her cheek as she blushes a dark blue.
|
||||
Alright so far so good...
|
||||
I look around to find the best seat in the building, in the back corner there is a fantastic view of the bustling city, but of course there's obstacles. I stare a hole in the back of one of the kid's head. He rubs his neck and turns around to see me staring at him. He grabs his friend and runs out as fast as he can. Seat secured.
|
||||
I escort the lady back to the booth and help her into her side.
|
||||
"Oh what wonderful seats! Not as lovely as the view on the mountains are, but the city's activity sure is fun to watch!" She gazes out the window in awe as the passing cars blur and the occasional pedestrian walks by.
|
||||
"Hey Fubuki, just a question here..." I rub the back of my head awkwardly. She perks up, giving me her undivided attention. "Why did you decide to date me?" It's kind of a shitty question on a first date, but I'm genuinely curious.
|
||||
She puts her delicate finger to her pouty, blue lips. "Well...Maybe it's your strength." She smiles and continues, "Your hair is super cool, the way you talk when you get into a fight, that look of determination."
|
||||
So she's saying she just likes me because I'm the toughest kid in school, that's disappointing.
|
||||
"Even when you're out of a fight and you're thinking that same look is on your face. I guess I like your face." She giggles behind her hand as she squeezes her eyes shut in embarrassment. She looks back at the window, "I think...maybe it's fate really..."
|
||||
I switch the subject to keep from getting too red in front of everybody. And as we enjoy each others company while the food is prepared we overhear a conversation behind us.
|
||||
"Ugh it sure is ugly out, I know it was so pretty out just a while ago. I don't know how it got so cold so quickly, I even thought I saw a snowflake outside."
|
||||
The girl in front of me shifts uncomfortable as she hangs her head in shame, "Umm...Tankaroshi. I'm sorry..."
|
||||
My body and mind wants me to beat the people behind me until an apology is cried out from them, but even I know that wouldn't help this situation. I have to think tactfuly here but I can't lie to her either. "Fubuki. I love the winter, I love snow. The grey sky out is as beautiful to me as any sunny day and to be honest I think that's because I've gotten used to being around you." I nod earnestly as I say that. She looks in my eyes shocked to see that I'm telling the truth. She smiles at me. I've grown soft to this girl over the past couple weeks, more than I thought normal, maybe it's just a Yuki-Onna's ability over men, maybe I'm just actually into her. The prospect of such a ludicrously sudden marriage seems less and less profound as I spend time with her.
|
||||
The bell rings giving me the perfect time to run from this face redening moment. I shake my thoughts out of my head. No it's definitely crazy.
|
||||
As I sit down with the food she does a little prayer and we dig in.
|
||||
"You sure eat a lot Tankaroshi." She giggles as she watches me stuff the burgers into my mouth.
|
||||
"Well you know, I'm still growing after all..." I mumble to myself as she keeps giggling.
|
||||
We enjoy our dinner and laugh with each other, a date well done if it wasn't for the people who walked in next.
|
||||
Four delinquent chicks had walked by the window and caught eye of me. Now I'm the biggest, baddest banchou in all of the world. Though I do have one weakness...
|
||||
"Oi! It's Tank-Chan!" One of the girls slam their hands on the table as they hover around us.
|
||||
I don't hit women. And around my school the girls are just as bad as the boys.
|
||||
"Finally got yourself a girl eh? Your ugly ass wasn't too interested in me huh? But you'll settle for some monster bitch!?" She points at Fubuki's face.
|
||||
I shrug, never looking her in the eye "It isn't any of your business, please leave sis."
|
||||
She sneers and laughs, "Oh am I ruining your date? What are you going to do about it? You gunna take a swing at me big man?" She pats her cheek as she leans in to give me a shot.
|
||||
"Go away, you're bothering me again." I roll my eyes as I ignore her. After a while she gets her jeers in. Guess she gets off on talking down to people she has to look up to. I slump into the booth, ready for another four or five minutes of this shit.
|
||||
"Yeah just lay back and ta-huurk*" My eyes widen as I see her face being slammed into the table by her blond hair.
|
||||
"Eh!? You ruining my date bitch?" I see Fubuki's usually feminine and beautiful face twisted into a jeer as she shows her teeth and curls her lip, her eyelids half open as if uninterested in the prey in front of her. The bully is grabbing at her head where Fubuki has her hand wrapped up. "Tch, you shouldn't poke your nose where it doesn't belong, it's bad for your health you know?"
|
||||
As Fubuki's fist tightens my fem-bully winces. "Now what did you call my stud? Ugly? I'll show you ugly." She slams her face on the table one more time before letting her fall backwards onto the floor. Fubuki lights up a cigarette as she stands up. Even for a guy she's quite tall, so she has a good five or six inches over the other girls. She lets the cigarette dangle from her pretty blue lips as she leans over them, "Idiots. You bore me, go home and get stuffed." The air kicks up inside the NcDaniels as drinks freeze in peoples hands and nipples poke through sweaters. They all scramble off the floor and run out as she tosses the cigarette at them.
|
||||
"Tch, shit eaters..." She mumbles as she sits in the booth next to me and takes my arm around her.
|
||||
"Mmm, I love this date dear! Let's have many more!" She smiles cutely as she nuzzles into my side.
|
||||
|
||||
On that day Tankaroshi, the banchou of Last Chance High fell in love with Fubuki the ex-banchou of Frozen Pass high.
|
||||
]==]
|
|
@ -0,0 +1,43 @@
|
|||
_G.spy = spy
|
||||
local mock_env = require("spec.env_mock")
|
||||
local fuzzy = require("spec.fuzzgen")
|
||||
require("spec.utils")
|
||||
|
||||
describe("smr",function()
|
||||
setup(mock_env.setup)
|
||||
teardown(mock_env.teardown)
|
||||
it("should display an anonymously submitted post on the front page", function()
|
||||
local paste_post = require("endpoints.paste_post")
|
||||
local index_get = require("endpoints.index_get")
|
||||
local pages = require("pages")
|
||||
local config = require("config")
|
||||
config.domain = "test.host"
|
||||
pages_mock = mock(pages)
|
||||
configure()
|
||||
assert.stub(pages_mock.index).was_not_called()
|
||||
local post_req = {
|
||||
host = "test.host",
|
||||
method = "POST",
|
||||
path = "/_paste",
|
||||
args = {
|
||||
title = fuzzy.any(),
|
||||
text = fuzzy.any(),
|
||||
pasteas = "anonymous",
|
||||
markup = "plain",
|
||||
tags = "one;two;",
|
||||
}
|
||||
}
|
||||
paste_post(post_req)
|
||||
local get_req = {
|
||||
host = "test.host",
|
||||
method = "GET",
|
||||
path = "/",
|
||||
args = {},
|
||||
}
|
||||
index_get(get_req)
|
||||
assert.stub(pages_mock.index).was_called()
|
||||
assertf(get_req.responsecode >= 200 and get_req.responsecode < 300, "Should give a 2XX response code, got %d", get_req.responsecode)
|
||||
assert(get_req.responsecode == 200, "Error code should be 200 - OK")
|
||||
assert(get_req.response:find(get_req.response,1,true), "Failed to find title in string")
|
||||
end)
|
||||
end)
|
|
@ -0,0 +1,26 @@
|
|||
--Make sure the type checking works
|
||||
|
||||
describe("smr type checking",function()
|
||||
it("should load without errors",function()
|
||||
local types = require("types")
|
||||
end)
|
||||
it("should not error when an argument is a number",function()
|
||||
local types = require("types")
|
||||
local n = 5
|
||||
assert(types.number(n))
|
||||
end)
|
||||
it("should error when an argument is a table",function()
|
||||
local types = require("types")
|
||||
local t = {}
|
||||
assert.has.errors(function()
|
||||
types.number(t)
|
||||
end)
|
||||
end)
|
||||
it("should check multiple types passed as arugments", function()
|
||||
local types = require("types")
|
||||
local num, tbl = 5, {}
|
||||
types.check(num, types.number, nil)
|
||||
end)
|
||||
end)
|
||||
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
|
||||
function assertf(bool, ...)
|
||||
if bool then return end
|
||||
local args = {...}
|
||||
local assertmsg = args[1] or "Assetion failed"
|
||||
table.remove(args,1)
|
||||
error(string.format(assertmsg, table.unpack(args)),2)
|
||||
end
|
|
@ -30,6 +30,7 @@ void KeccakF1600(void *s)
|
|||
void Keccak(ui r, ui c, const u8 *in, u64 inLen, u8 sfx, u8 *out, u64 outLen)
|
||||
{
|
||||
/*initialize*/ u8 s[200]; ui R=r/8; ui i,b=0; FOR(i,200) s[i]=0;
|
||||
/*san-check*/ if (((r+c)!= 1600) || ((r % 8 ) != 0)) return;
|
||||
/*absorb*/ while(inLen>0) { b=(inLen<R)?inLen:R; FOR(i,b) s[i]^=in[i]; in+=b; inLen-=b; if (b==R) { KeccakF1600(s); b=0; } }
|
||||
/*pad*/ s[b]^=sfx; if((sfx&0x80)&&(b==(R-1))) KeccakF1600(s); s[R-1]^=0x80; KeccakF1600(s);
|
||||
/*squeeze*/ while(outLen>0) { b=(outLen<R)?outLen:R; FOR(i,b) out[i]=s[i]; out+=b; outLen-=b; if(outLen>0) KeccakF1600(s); }
|
||||
|
|
|
@ -10,8 +10,27 @@ borrowed sha3 implementation from https://keccak.team
|
|||
#include "libcrypto.h"
|
||||
#include "keccak.h"
|
||||
|
||||
/*
|
||||
sha3(data::string)::string
|
||||
/* md
|
||||
@name lua/kore
|
||||
|
||||
### sha3
|
||||
|
||||
Provides a sha3 implementation. Uses the header-only library from https://keccak.team
|
||||
|
||||
Parameters:
|
||||
|
||||
0. data - {{ lua/string }} - The data to hash
|
||||
|
||||
Returns:
|
||||
|
||||
0. data - {{ lua/string }} - The hash as a string. May contain embedded nulls.
|
||||
|
||||
Example:
|
||||
|
||||
local data = "Hello, world!"
|
||||
local hashed_data = sha3(data)
|
||||
print(hashed_data)
|
||||
|
||||
*/
|
||||
int
|
||||
lsha3(lua_State *L){
|
||||
|
@ -19,11 +38,8 @@ lsha3(lua_State *L){
|
|||
unsigned char out[64];
|
||||
const unsigned char *data = (const unsigned char*)luaL_checklstring(L,-1,&len);
|
||||
lua_pop(L,1);
|
||||
printf("All data gotten, about to hash\n");
|
||||
FIPS202_SHA3_512(data, len, out);
|
||||
printf("Finished hashing\n");
|
||||
lua_pushlstring(L,(char*)out,64);
|
||||
printf("Finished pushing string to lua\n");
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
|
297
src/libkore.c
297
src/libkore.c
|
@ -9,10 +9,25 @@
|
|||
#include <lualib.h>
|
||||
//#include <inet/in.h>//linux only I guess
|
||||
#include "libkore.h"
|
||||
#include "smr.h" //Where the error handler code is
|
||||
#include <syslog.h>
|
||||
|
||||
// Used to push "string" = number onto the table at the top of the stack
|
||||
#define LUA_PUSH_CONST(L,a) lua_pushnumber(L,a); lua_setfield(L,-2,#a);
|
||||
|
||||
/* md
|
||||
@name lua/kore
|
||||
@ref http_request
|
||||
|
||||
### http_request {#http_request}
|
||||
|
||||
An `http_request` userdata logically represents a request that the kore webserver has received. You can get arguments, files uploaded with the request, and respond to the request. The userdata does not have any methods on it. It is backed by a {{ kore_request }}
|
||||
|
||||
*/
|
||||
|
||||
/*
|
||||
Checks that the argument at *pos* is a kore_request userdata
|
||||
*/
|
||||
struct http_request*
|
||||
luaL_checkrequest(lua_State *L, int pos){
|
||||
if(!lua_isuserdata(L,pos)){
|
||||
|
@ -24,13 +39,38 @@ luaL_checkrequest(lua_State *L, int pos){
|
|||
return lua_touserdata(L,pos);
|
||||
}
|
||||
|
||||
/* md
|
||||
@name lua/kore
|
||||
|
||||
### http_response
|
||||
|
||||
Sends a response to the request given. After this method is called, calls to other methods that accept a request userdata may not work correctly (the data may have been garbage collected).
|
||||
|
||||
Parameters:
|
||||
0. request - {{ http_request }} - The request to serve
|
||||
0. errcode - {{ lua/number }} - The http error code. See [http error codes](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status) for error codes generally accepted by web browsers, but you can return any number here and kore will attempt to send it.
|
||||
0. data - {{ lua/string }} | {{ lua/nil }} - The data to return with the request. If `nil` is passed, the request's return will have an empty body.
|
||||
|
||||
No returns.
|
||||
|
||||
Example:
|
||||
|
||||
TODO
|
||||
|
||||
*/
|
||||
/*
|
||||
http_response(request::userdata, errcode::number, data::string)
|
||||
http_response(request::userdata, errcode::number, (data::string | nil))
|
||||
*/
|
||||
int
|
||||
lhttp_response(lua_State *L){
|
||||
size_t size;
|
||||
const char *data = luaL_checklstring(L,-1,&size);
|
||||
const char *data;
|
||||
if(lua_isnil(L,-1)){
|
||||
data = NULL;
|
||||
size = 0;
|
||||
}else{
|
||||
data = luaL_checklstring(L,-1,&size);
|
||||
}
|
||||
int httpcode = luaL_checkint(L,-2);
|
||||
struct http_request *req = luaL_checkrequest(L,-3);
|
||||
http_response(req,httpcode,data,size);
|
||||
|
@ -38,6 +78,177 @@ lhttp_response(lua_State *L){
|
|||
return 0;
|
||||
}
|
||||
|
||||
char response[] = "0\r\n\r\n";
|
||||
|
||||
/*Helpers for response coroutines*/
|
||||
int
|
||||
coroutine_iter_sent(struct netbuf *buf){
|
||||
struct co_obj *obj = (struct co_obj*)buf->extra;
|
||||
int ret;
|
||||
lua_State *L = obj->L;
|
||||
|
||||
lua_getglobal(L,"coroutine");
|
||||
lua_getfield(L,-1,"status");
|
||||
lua_rawgeti(L,LUA_REGISTRYINDEX,obj->ref);
|
||||
lua_call(L,1,1);
|
||||
const char *status = luaL_checklstring(L,-1,NULL);
|
||||
|
||||
if(strcmp(status,"dead") == 0){
|
||||
ret = KORE_RESULT_OK;
|
||||
}else{
|
||||
ret = coroutine_iter_next(obj);
|
||||
}
|
||||
|
||||
if(ret == KORE_RESULT_RETRY){
|
||||
ret = KORE_RESULT_OK;
|
||||
}else{
|
||||
if(obj->removed == 0){
|
||||
http_start_recv(obj->c);
|
||||
}
|
||||
obj->c->hdlr_extra = NULL;
|
||||
obj->c->disconnect = NULL;
|
||||
obj->c->flags &= ~CONN_IS_BUSY;
|
||||
net_send_queue(obj->c,response,strlen(response));
|
||||
net_send_flush(obj->c);
|
||||
free(obj);
|
||||
}
|
||||
return (ret);
|
||||
}
|
||||
|
||||
int coroutine_iter_next(struct co_obj *obj){
|
||||
lua_State *L = obj->L;
|
||||
lua_getglobal(L,"coroutine");
|
||||
lua_getfield(L,-1,"status");
|
||||
lua_rawgeti(L,LUA_REGISTRYINDEX,obj->ref);
|
||||
lua_call(L,1,1);
|
||||
const char *status = luaL_checklstring(L,-1,NULL);
|
||||
if(strcmp(status,"dead") == 0){
|
||||
kore_log(LOG_ERR,"Coroutine was dead when it was passed to coroutine iter next");
|
||||
lua_pushstring(L,"Coroutine was dead when passed to coroutine iter next");
|
||||
lua_error(L);
|
||||
}
|
||||
lua_pop(L,lua_gettop(L));
|
||||
lua_getglobal(L,"coroutine");
|
||||
lua_getfield(L,-1,"resume");
|
||||
lua_rawgeti(L,LUA_REGISTRYINDEX,obj->ref);
|
||||
luaL_checktype(L,-1,LUA_TTHREAD);
|
||||
int err = lua_pcall(L,1,2,0);
|
||||
if(err != 0){
|
||||
return (KORE_RESULT_ERROR);
|
||||
}
|
||||
if(!lua_toboolean(L,-2)){ //Runtime error
|
||||
lua_pushstring(L,":\n");//"error",":"
|
||||
lua_getglobal(L,"debug");//"error",":",{debug}
|
||||
lua_getfield(L,-1,"traceback");//"error",":",{debug},debug.traceback()
|
||||
lua_call(L,0,1);//"error",":",{debug},"traceback"
|
||||
lua_remove(L,-2);//"error",":","traceback"
|
||||
lua_concat(L,3);
|
||||
size_t size;
|
||||
const char *s = luaL_checklstring(L,-1,&size);
|
||||
kore_log(LOG_ERR,"Error: %s\n",s);
|
||||
lua_pop(L,lua_gettop(L));
|
||||
return (KORE_RESULT_ERROR);
|
||||
}
|
||||
//No runtime error
|
||||
if(lua_type(L,-1) == LUA_TSTRING){
|
||||
size_t size;
|
||||
const char *data = luaL_checklstring(L,-1,&size);
|
||||
struct netbuf *nb;
|
||||
struct kore_buf *kb = kore_buf_alloc(4096);
|
||||
kore_buf_appendf(kb,"%lu\r\n",size);
|
||||
kore_buf_append(kb,data,size);
|
||||
kore_buf_appendf(kb,"\r\n");
|
||||
//size_t ssize;
|
||||
//char *sstr = kore_buf_stringify(kb,&ssize);
|
||||
net_send_stream(obj->c, kb->data, kb->offset, coroutine_iter_sent, &nb);
|
||||
nb->extra = obj;
|
||||
lua_pop(L,lua_gettop(L));
|
||||
kore_buf_free(kb);
|
||||
return (KORE_RESULT_RETRY);
|
||||
//return err == 0 ? (KORE_RESULT_OK) : (KORE_RESULT_RETRY);
|
||||
}else if(lua_type(L,-1) == LUA_TNIL){
|
||||
struct netbuf *nb;
|
||||
struct kore_buf *kb = kore_buf_alloc(4096);
|
||||
kore_buf_appendf(kb,"0\r\n\r\n");
|
||||
net_send_queue(obj->c, kb->data, kb->offset);
|
||||
net_send_stream(obj->c, response, strlen(response) + 0, coroutine_iter_sent, &nb);
|
||||
nb->extra = obj;
|
||||
|
||||
lua_pop(L,lua_gettop(L));
|
||||
kore_buf_free(kb);
|
||||
return (KORE_RESULT_OK);
|
||||
}else{
|
||||
kore_log(LOG_CRIT,"Coroutine used for response returned something that was not a string:%s\n",lua_typename(L,lua_type(L,-1)));
|
||||
return (KORE_RESULT_ERROR);
|
||||
}
|
||||
}
|
||||
static void
|
||||
coroutine_disconnect(struct connection *c){
|
||||
kore_log(LOG_ERR,"Disconnect routine called\n");
|
||||
struct co_obj *obj = (struct co_obj*)c->hdlr_extra;
|
||||
lua_State *L = obj->L;
|
||||
int ref = obj->ref;
|
||||
int Lref = obj->Lref;
|
||||
obj->removed = 1;
|
||||
luaL_unref(L,LUA_REGISTRYINDEX,ref);
|
||||
luaL_unref(L,LUA_REGISTRYINDEX,Lref);
|
||||
c->hdlr_extra = NULL;
|
||||
}
|
||||
/*
|
||||
The coroutine passed to this function should yield() the data to send to the
|
||||
client, then return when done.
|
||||
|
||||
TODO: Broken and leaks memory
|
||||
http_response_co(request::userdata, co::coroutine)
|
||||
*/
|
||||
int
|
||||
lhttp_response_co(lua_State *L){
|
||||
struct connection *c;
|
||||
printf("Start response coroutine\n");
|
||||
int coroutine_ref = luaL_ref(L,LUA_REGISTRYINDEX);
|
||||
struct http_request *req = luaL_checkrequest(L,-1);
|
||||
c = req->owner;
|
||||
if(c->state == CONN_STATE_DISCONNECTING){
|
||||
return 0;
|
||||
}
|
||||
lua_pop(L,1);
|
||||
struct co_obj *obj = (struct co_obj*)malloc(sizeof(struct co_obj));
|
||||
obj->removed = 0;
|
||||
obj->L = lua_newthread(L);
|
||||
obj->Lref = luaL_ref(L,LUA_REGISTRYINDEX);
|
||||
obj->ref = coroutine_ref;
|
||||
obj->c = c;
|
||||
obj->c->disconnect = coroutine_disconnect;
|
||||
|
||||
obj->c->hdlr_extra = obj;
|
||||
obj->c->flags |= CONN_IS_BUSY;
|
||||
req->flags |= HTTP_REQUEST_NO_CONTENT_LENGTH;
|
||||
http_response_header(req,"transfer-encoding","chunked");
|
||||
http_response(req,200,NULL,0);
|
||||
printf("About to call iter next\n");
|
||||
coroutine_iter_next(obj);
|
||||
printf("Done calling iter next\n");
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* md
|
||||
@name lua/kore
|
||||
|
||||
### http_method_text
|
||||
|
||||
Gets the http method the request was called with (ex `GET`, `POST`, ect.)
|
||||
|
||||
Parameters:
|
||||
|
||||
0. request - {{ http_request }} - The request to get the method string off of.
|
||||
|
||||
Returns:
|
||||
|
||||
0. method - {{ lua/string }} - The string from the request
|
||||
|
||||
Example:
|
||||
TODO
|
||||
*/
|
||||
/*
|
||||
http_method_text(request::userdata)::string
|
||||
*/
|
||||
|
@ -50,6 +261,27 @@ lhttp_method_text(lua_State *L){
|
|||
return 1;
|
||||
}
|
||||
|
||||
/* md
|
||||
@name lua/kore
|
||||
|
||||
### http_request_get_path
|
||||
|
||||
Gets the path from the end of the url.
|
||||
|
||||
Parameters:
|
||||
|
||||
0. request - {{http_request}} - The request to get the path from.
|
||||
|
||||
Returns:
|
||||
|
||||
0. path - {{ lua/string }} - The path part of the url.
|
||||
|
||||
Example:
|
||||
|
||||
local req = ...
|
||||
print(http_request_get_path(req))
|
||||
|
||||
*/
|
||||
/*
|
||||
http_request_get_path(request::userdata)::string
|
||||
*/
|
||||
|
@ -108,6 +340,29 @@ lhttp_response_header(lua_State *L){
|
|||
return 0;
|
||||
}
|
||||
|
||||
/*
|
||||
http_request_header(request::userdata, header::string)::(string || false, string)
|
||||
*/
|
||||
int
|
||||
lhttp_request_header(lua_State *L){
|
||||
const char *header = luaL_checkstring(L,-1);
|
||||
struct http_request *req = luaL_checkrequest(L,-2);
|
||||
lua_pop(L,2);
|
||||
const char *data;
|
||||
int err = http_request_header(req,header,&data);
|
||||
if(err == KORE_RESULT_OK){
|
||||
lua_pushstring(L,data);
|
||||
return 1;
|
||||
}else{
|
||||
lua_pushboolean(L,0);
|
||||
lua_pushstring(L,"Failed to get header: ");
|
||||
lua_pushstring(L,header);
|
||||
lua_concat(L,2);
|
||||
return 2;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
http_response_cookie(req::userdata, name::string, value::string, path::string, expires::number, maxage::number)
|
||||
*/
|
||||
|
@ -255,6 +510,29 @@ lhttp_file_get(lua_State *L){
|
|||
return 1;
|
||||
}
|
||||
|
||||
/*
|
||||
http_set_flags(request::userdata, flags::number)
|
||||
*/
|
||||
int
|
||||
lhttp_set_flags(lua_State *L){
|
||||
int flags = luaL_checkint(L,-1);
|
||||
struct http_request *req = luaL_checkrequest(L,-2);
|
||||
lua_pop(L,2);
|
||||
req->flags = flags;
|
||||
return 0;
|
||||
}
|
||||
|
||||
/*
|
||||
http_get_flags(request::userdata) :: number
|
||||
*/
|
||||
int
|
||||
lhttp_get_flags(lua_State *L){
|
||||
struct http_request *req = luaL_checkrequest(L,-1);
|
||||
lua_pop(L,1);
|
||||
lua_pushnumber(L,req->flags);
|
||||
return 1;
|
||||
}
|
||||
|
||||
/*
|
||||
log(priority::integer,message::string) //formating must be done before calling
|
||||
*/
|
||||
|
@ -269,7 +547,9 @@ lkore_log(lua_State *L){
|
|||
|
||||
static const luaL_Reg kore_funcs[] = {
|
||||
{"http_response", lhttp_response},
|
||||
{"http_response_co", lhttp_response_co},
|
||||
{"http_response_header", lhttp_response_header},
|
||||
{"http_request_header", lhttp_request_header},
|
||||
{"http_method_text",lhttp_method_text},
|
||||
{"http_request_get_path",lhttp_request_get_path},
|
||||
{"http_request_get_host",lhttp_request_get_host},
|
||||
|
@ -282,6 +562,8 @@ static const luaL_Reg kore_funcs[] = {
|
|||
{"http_populate_cookies",lhttp_populate_cookies},
|
||||
{"http_populate_multipart_form",lhttp_populate_multipart_form},
|
||||
{"http_file_get",lhttp_file_get},
|
||||
{"http_set_flags",lhttp_set_flags},
|
||||
{"http_get_flags",lhttp_get_flags},
|
||||
{"log",lkore_log},
|
||||
{NULL,NULL}
|
||||
};
|
||||
|
@ -309,6 +591,17 @@ load_kore_libs(lua_State *L){
|
|||
LUA_PUSH_CONST(L,LOG_NOTICE);
|
||||
LUA_PUSH_CONST(L,LOG_INFO);
|
||||
LUA_PUSH_CONST(L,LOG_DEBUG);
|
||||
|
||||
//Push flags for use with http_set_flags()
|
||||
LUA_PUSH_CONST(L,HTTP_REQUEST_COMPLETE);
|
||||
LUA_PUSH_CONST(L,HTTP_REQUEST_DELETE);
|
||||
LUA_PUSH_CONST(L,HTTP_REQUEST_SLEEPING);
|
||||
LUA_PUSH_CONST(L,HTTP_REQUEST_EXPECT_BODY);
|
||||
LUA_PUSH_CONST(L,HTTP_REQUEST_RETAIN_EXTRA);
|
||||
LUA_PUSH_CONST(L,HTTP_REQUEST_NO_CONTENT_LENGTH);
|
||||
LUA_PUSH_CONST(L,HTTP_REQUEST_AUTHED);
|
||||
|
||||
//Set a global variable "PRODUCTION" true or false
|
||||
#ifdef BUILD_PROD
|
||||
lua_pushboolean(L,1);
|
||||
#else
|
||||
|
|
|
@ -1,6 +1,17 @@
|
|||
|
||||
struct co_obj {
|
||||
lua_State *L;
|
||||
int ref;
|
||||
int Lref;
|
||||
int removed;
|
||||
struct connection *c;
|
||||
};
|
||||
int lhttp_response(lua_State *L);
|
||||
int lhttp_response_co(lua_State *L);
|
||||
int coroutine_iter_sent(struct netbuf *buf);
|
||||
int coroutine_iter_next(struct co_obj *obj);
|
||||
int lhttp_response_header(lua_State *L);
|
||||
int lhttp_request_header(lua_State *L);
|
||||
int lhttp_method_text(lua_State *L);
|
||||
int lhttp_request_get_path(lua_State *L);
|
||||
int lhttp_request_get_host(lua_State *L);
|
||||
|
@ -12,6 +23,8 @@ int lhttp_argument_get_string(lua_State *L);
|
|||
int lhttp_request_get_ip(lua_State *L);
|
||||
int lhttp_populate_cookies(lua_State *L);
|
||||
int lhttp_file_get(lua_State *L);
|
||||
int lhttp_set_flags(lua_State *L);
|
||||
int lhttp_get_flags(lua_State *L);
|
||||
int lhttp_populate_multipart_form(lua_State *L);
|
||||
int lkore_log(lua_State *L);
|
||||
void load_kore_libs(lua_State *L);
|
||||
|
|
|
@ -0,0 +1,42 @@
|
|||
--[[ md
|
||||
@name lua/addon
|
||||
|
||||
## Addon
|
||||
|
||||
Addons allow you to modify the behavior of smr without modifying the smr
|
||||
source code. This is intended to make updates easier, and to modularize parts
|
||||
of smr that are not minimally required.
|
||||
|
||||
Addon loader - Addons are either:
|
||||
* A folder with at least two files:
|
||||
- meta.lua - contains addon information
|
||||
- init.lua - entrypoint that gets run to load the addon
|
||||
* A zip file with the same
|
||||
* A sqlite3 database with a table "files" that has at least the columns
|
||||
* name :: TEXT - The filename
|
||||
* data :: BINARY - The file data
|
||||
|
||||
And there are at least 2 rows with filenames `meta.lua` and `init.lua`
|
||||
as described above. Addons should be placed in {{config/addons_folder},
|
||||
defined in `config.lua`
|
||||
|
||||
The `meta.lua` file is run at worker init time (i.e. it will be run once for
|
||||
each worker), and should return a table with at least the following information
|
||||
{
|
||||
name :: string - A name for the addon (all addons must have unique names)
|
||||
desc :: string - A description for the addon.
|
||||
order :: number - When should we run init.lua relative to other addons?
|
||||
Each addon's meta.lua is run (in any order), addons are sorted
|
||||
according to their order, and finally each addon's init.lua is
|
||||
called according to this order.
|
||||
}
|
||||
|
||||
meta.lua may include additional information that can be read and used by other
|
||||
addons. meta.lua is run in a restricted environment with almost no functions
|
||||
available.
|
||||
]]
|
||||
|
||||
local oldconfigure = configure
|
||||
function configure(...)
|
||||
return oldconfigure(...)
|
||||
end
|
|
@ -1,26 +1,89 @@
|
|||
--[[
|
||||
Implements a simple in-memory cache. The cache has no upper size limit, and
|
||||
may cause out-of-memory errors. When this happens, the OS will kill the kore
|
||||
worker process, and the kore parent process will restart with a fresh, empty
|
||||
cache
|
||||
--[[ md
|
||||
|
||||
@name lua/cache
|
||||
|
||||
Implements a simple in memory read through cache.
|
||||
The cache has no upper size limit, and may cause out-of-memory errors.
|
||||
When this happens, the OS will kill the kore worker process,
|
||||
and the kore parent process will restart with a fresh, empty cache.
|
||||
]]
|
||||
|
||||
local sql = require("lsqlite3")
|
||||
|
||||
local queries = require("queries")
|
||||
local util = require("util")
|
||||
local db = require("db")
|
||||
|
||||
local ret = {}
|
||||
|
||||
local stmnt_cache, stmnt_insert_cache
|
||||
local stmnt_cache, stmnt_insert_cache, stmnt_dirty_cache
|
||||
|
||||
--[[ cat
|
||||
@name lua/cache
|
||||
<h3>Schema for Cache</h3>
|
||||
<p>The cache mechanism is a in-memeory sqlite3 database behind the scenes, it
|
||||
can ensure consistency and atomic updates & dirtying, though it doesn't today.</p>
|
||||
<table>
|
||||
<caption>cache</caption>
|
||||
<tr>
|
||||
<th>Attributes</th>
|
||||
<th>Field</th>
|
||||
<th>Type</th>
|
||||
<th>Description</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Primary Key</td>
|
||||
<td>path</td>
|
||||
<td>TEXT</td>
|
||||
<td>
|
||||
The logical path this text was rendered at,
|
||||
before browser-specific headers (like accept-encoding)
|
||||
are applied
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td></td>
|
||||
<td>data</td>
|
||||
<td>BLOB</td>
|
||||
<td>
|
||||
The returned result from the function passed into
|
||||
cache.render(), the result must be a string, and
|
||||
may contain nulls.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td></td>
|
||||
<td>updated</td>
|
||||
<td>INTEGER</td>
|
||||
<td>
|
||||
The time this item was rendered at, can be used to set
|
||||
a minimum update frequency. This is used so that web
|
||||
scrapers don't constantly trigger re-renders of the
|
||||
index page.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td></td>
|
||||
<td>dirty</td>
|
||||
<td>INTEGER</td>
|
||||
<td>
|
||||
Does this page need to be re-rendered the next time it
|
||||
is called? For example, an author's story could have
|
||||
multiple hits, which would require rerendering their
|
||||
author page to show the new hit count, but we don't
|
||||
actually need to do it until someone requests the
|
||||
author page. In this case, we keep the old page around
|
||||
to save time trying to clear it and potentially hit
|
||||
sqlite's garbage collector.
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
]]
|
||||
|
||||
local oldconfigure = configure
|
||||
function configure(...)
|
||||
local cache = util.sqlassert(sql.open_memory())
|
||||
ret.cache = db.sqlassert(sql.open_memory())-- Expose db for testing
|
||||
--A cache table to store rendered pages that do not need to be
|
||||
--rerendered. In theory this could OOM the program eventually and start
|
||||
--swapping to disk. TODO: fixme
|
||||
assert(cache:exec([[
|
||||
--swapping to disk. TODO
|
||||
assert(ret.cache:exec([[
|
||||
CREATE TABLE IF NOT EXISTS cache (
|
||||
path TEXT PRIMARY KEY,
|
||||
data BLOB,
|
||||
|
@ -28,22 +91,22 @@ function configure(...)
|
|||
dirty INTEGER
|
||||
);
|
||||
]]))
|
||||
stmnt_cache = assert(cache:prepare([[
|
||||
stmnt_cache = assert(ret.cache:prepare([[
|
||||
SELECT data
|
||||
FROM cache
|
||||
WHERE
|
||||
path = :path AND
|
||||
((dirty = 0) OR (strftime('%s','now') - updated) < 20)
|
||||
((dirty = 0) OR (strftime('%s','now') - updated) > 20)
|
||||
;
|
||||
]]))
|
||||
stmnt_insert_cache = assert(cache:prepare([[
|
||||
stmnt_insert_cache = assert(ret.cache:prepare([[
|
||||
INSERT OR REPLACE INTO cache (
|
||||
path, data, updated, dirty
|
||||
) VALUES (
|
||||
:path, :data, strftime('%s','now'), 0
|
||||
);
|
||||
]]))
|
||||
stmnt_dirty_cache = assert(cache:prepare([[
|
||||
stmnt_dirty_cache = assert(ret.cache:prepare([[
|
||||
UPDATE OR IGNORE cache
|
||||
SET dirty = 1
|
||||
WHERE path = :path;
|
||||
|
@ -51,15 +114,48 @@ function configure(...)
|
|||
return oldconfigure(...)
|
||||
end
|
||||
|
||||
--Render a page, with cacheing. If you need to dirty a cache, call dirty_cache()
|
||||
--[[ md
|
||||
@name lua/cache
|
||||
|
||||
### cache.render
|
||||
|
||||
Render a page with cacheing.
|
||||
The callback will be called with no arguments, and must return a string.
|
||||
|
||||
Parameters:
|
||||
|
||||
0. pagename - {{lua/string}} - A logical string to associate with this
|
||||
rendered page, this must be passed exactly into render() in order
|
||||
to (potentially) retrive the cached page.
|
||||
0. callback - {{lua/function}} - A function that may be called,
|
||||
if it is called, it is called with no arguments, and must return a string.
|
||||
The returned string may have embedded nulls.
|
||||
|
||||
Returns:
|
||||
|
||||
0. {{lua/string}} - Either the return of the passed function, or the cached
|
||||
string.
|
||||
|
||||
Example:
|
||||
|
||||
cache = require("cache")
|
||||
func = function()
|
||||
print("Called")
|
||||
return "Hello, world!"
|
||||
end
|
||||
print(cache.render("/test",func)) -- prints "Called", then "Hello, world!"
|
||||
print(cache.render("/test",func)) -- prints "Hello, world!"
|
||||
print(cache.render("/test",func)) -- prints "Hello, world!"
|
||||
|
||||
]]
|
||||
function ret.render(pagename,callback)
|
||||
stmnt_cache:bind_names{path=pagename}
|
||||
local err = util.do_sql(stmnt_cache)
|
||||
local err = db.do_sql(stmnt_cache)
|
||||
if err == sql.DONE then
|
||||
stmnt_cache:reset()
|
||||
--page is not cached
|
||||
elseif err == sql.ROW then
|
||||
data = stmnt_cache:get_values()
|
||||
local data = stmnt_cache:get_values()
|
||||
stmnt_cache:reset()
|
||||
return data[1]
|
||||
else --sql.ERROR or sql.MISUSE
|
||||
|
@ -72,7 +168,7 @@ function ret.render(pagename,callback)
|
|||
path=pagename,
|
||||
data=text,
|
||||
}
|
||||
err = util.do_sql(stmnt_insert_cache)
|
||||
err = db.do_sql(stmnt_insert_cache)
|
||||
if err == sql.ERROR or err == sql.MISUSE then
|
||||
error("Failed to update cache for page " .. pagename)
|
||||
end
|
||||
|
@ -80,11 +176,42 @@ function ret.render(pagename,callback)
|
|||
return text
|
||||
end
|
||||
|
||||
--[[ md
|
||||
@name lua/cache
|
||||
|
||||
### cache.dirty
|
||||
|
||||
Dirty a cached page, causing it to be re-rendered the next time it's
|
||||
requested. Doesn't actually delete it or free memory, just sets its dirty bit.
|
||||
If the page does not exists or has not been rendered yet, this function does
|
||||
not error.
|
||||
|
||||
Parameters:
|
||||
|
||||
0. url - {{lua/string}} - `pagename` from the render function, the logical
|
||||
string associcated with this rendered page.
|
||||
|
||||
No returns
|
||||
|
||||
Example:
|
||||
|
||||
cache = require("cache")
|
||||
func = function()
|
||||
print("Called")
|
||||
return "Hello, world!"
|
||||
end
|
||||
print(cache.render("/test",func)) -- prints "Called", then "Hello, world!"
|
||||
print(cache.render("/test",func)) -- prints "Hello, world!")
|
||||
cache.dirty("/test")
|
||||
print(cache.render("/test",func)) -- prints "Called", then "Hello, world!"
|
||||
print(cache.render("/test",func)) -- prints "Hello, world!"
|
||||
]]
|
||||
|
||||
function ret.dirty(url)
|
||||
stmnt_dirty_cache:bind_names{
|
||||
path = url
|
||||
}
|
||||
err = util.do_sql(stmnt_dirty_cache)
|
||||
db.do_sql(stmnt_dirty_cache)
|
||||
stmnt_dirty_cache:reset()
|
||||
end
|
||||
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
--[[
|
||||
Holds configuration.
|
||||
A one-stop-shop for runtime configuration
|
||||
]]
|
||||
return {
|
||||
domain = "<{get domain}>",
|
||||
production = false,
|
||||
}
|
253
src/lua/db.lua
253
src/lua/db.lua
|
@ -1,49 +1,236 @@
|
|||
--[[
|
||||
--[[ md
|
||||
@name lua/db
|
||||
|
||||
## Overview
|
||||
|
||||
Does most of the database interaction.
|
||||
Creates default empty database during configure()
|
||||
Notably, holds a connection to the open sqlite3 database in .conn
|
||||
]]
|
||||
|
||||
--[[ sh
|
||||
@name sql/table
|
||||
echo "digraph schema {" \
|
||||
"$(cat doc/schema/*.dot)" \
|
||||
"}" | dot -Tsvg
|
||||
]]
|
||||
local sql = require("lsqlite3")
|
||||
|
||||
local queries = require("queries")
|
||||
local util = require("util")
|
||||
local config = require("config")
|
||||
|
||||
local db = {}
|
||||
|
||||
--[[ md
|
||||
@name lua/db
|
||||
|
||||
### db.sqlassert
|
||||
|
||||
Runs an sql query and receives the 3 arguments back, prints a nice error
|
||||
message on fail, and returns true on success.
|
||||
|
||||
Parameters:
|
||||
|
||||
0. r - {{lsqlite/stmnt}} | {{lua/nil}} - The userdata returned from
|
||||
{{lsqlite/db/prepare}}
|
||||
0. errcode - {{lua/nil}} | {{lua/number}} - If the first argument back from
|
||||
{{lsqlite/db/prepare}} is nil, this second argument is a numeric errorcode,
|
||||
see {{lsqlite/errcodes}}
|
||||
0. err - {{lua/nil}} | {{lua/string}} - The string error returned from
|
||||
{{lsqlite/db/prepare}}. Only non-nil if the first return value was nil. A
|
||||
string message describing what went wrong in the statment. If this argument is
|
||||
also {{lua/nil}}, this function retrives the error mssage from
|
||||
{{lsqlite/db/errmsg}}.
|
||||
|
||||
Returns:
|
||||
|
||||
0. r - {{lua/userdata}} | {{lua/nil}} - The first argument passed in. Used so
|
||||
that error checking and assignment can all be done on a single line.
|
||||
|
||||
Example:
|
||||
|
||||
db = require("db")
|
||||
query = db.sqlassert(db.conn:parepare("SELECT 'Hello, world!'"))
|
||||
]]
|
||||
function db.sqlassert(r, errcode, err)
|
||||
if not r then
|
||||
if err then
|
||||
error(string.format("%d: %s",errcode, err))
|
||||
elseif errcode then
|
||||
error(string.format("%d: %s",errcode, db.conn:errmsg()))
|
||||
end
|
||||
end
|
||||
return r
|
||||
end
|
||||
|
||||
--[[ md
|
||||
@name lua/db
|
||||
|
||||
### db.do_sql
|
||||
|
||||
Continuously tries to perform an sql statement until it goes through. This function may call {{lua/coroutine/yield}}
|
||||
|
||||
Parameters:
|
||||
|
||||
0. stmnt - {{lsqlite/stmnt}} - The userdata returned form {{lsqlite/db/prepare}}
|
||||
|
||||
Returns:
|
||||
|
||||
0. err - {{lua/number}} - The error code returned from running the statement.
|
||||
Will be `lsqlite.OK` on success, see {{lsqlite/errcodes}}
|
||||
|
||||
Example:
|
||||
|
||||
sql = require("lsqlite3")
|
||||
configure = function(...) end -- Mock smr environment
|
||||
db = require("db")
|
||||
configure()
|
||||
query = db.conn:prepare("SELECT 'Hello, world!';")
|
||||
assert(db.do_sql(query))
|
||||
]]
|
||||
function db.do_sql(stmnt)
|
||||
if not stmnt then error("No statement",2) end
|
||||
local err
|
||||
local i = 0
|
||||
repeat
|
||||
err = stmnt:step()
|
||||
if err == sql.BUSY then
|
||||
i = i + 1
|
||||
coroutine.yield()
|
||||
end
|
||||
until(err ~= sql.BUSY or i > 10)
|
||||
assert(i < 10, "Database busy")
|
||||
return err
|
||||
end
|
||||
|
||||
--[[ md
|
||||
@name lua/db
|
||||
|
||||
### db.sql_rows
|
||||
|
||||
Provides an iterator that loops over results in an sql statement or throws an
|
||||
error, then resets the statement after the loop is done.
|
||||
Returned iterator returns varargs, so the values can be unpacked in-line in the
|
||||
for loop. This statement is approximately the same as {{sqlite/stmt/rows}}, but
|
||||
may yield when the db connection is busy, and continue execution when the
|
||||
connection is free again.
|
||||
|
||||
Parameters:
|
||||
|
||||
0. stmnt - {{lsqlite/stmnt}} - The userdata returned from {{sqlite/db/prepare}}
|
||||
|
||||
Returns:
|
||||
|
||||
0. iterator - {{lua/iterator}} - The iterator function that returns varargs of the returns from the sql statement.
|
||||
|
||||
Example:
|
||||
|
||||
db = require("db")
|
||||
query = db.conn:prepare("SELECT 'Hello, world!';")
|
||||
for row in db.sql_rows(query) do
|
||||
print(row) -- prints 'Hello, world!'
|
||||
end
|
||||
|
||||
]]
|
||||
function db.sql_rows(stmnt)
|
||||
if not stmnt then error("No statement",2) end
|
||||
local err
|
||||
return function()
|
||||
err = stmnt:step()
|
||||
if err == sql.BUSY then
|
||||
coroutine.yield()
|
||||
elseif err == sql.ROW then
|
||||
return unpack(stmnt:get_values())
|
||||
elseif err == sql.DONE then
|
||||
stmnt:reset()
|
||||
return nil
|
||||
else
|
||||
stmnt:reset()
|
||||
local msg = string.format(
|
||||
"SQL Iteration failed: %s : %s\n%s",
|
||||
tostring(err),
|
||||
db.conn:errmsg(),
|
||||
debug.traceback()
|
||||
)
|
||||
log(LOG_CRIT,msg)
|
||||
error(msg)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
--[[ md
|
||||
@name lua/db
|
||||
|
||||
### db.sqlbind
|
||||
|
||||
Binds an argument to a prepared statement,
|
||||
with nice error reporting on failure.
|
||||
Wraps {{lsqlite/stmnt/bind_name}}
|
||||
with better error reporting.
|
||||
|
||||
Parameters:
|
||||
|
||||
0. stmnt - {{lsqlite/stmnt}} - The prepared statement from {{sqlite/db/prepare}}
|
||||
0. call - {{lua/string}} - Literal string, options are `bind` for most types,
|
||||
or `bind_blob` for strings that may contain embedded nulls
|
||||
0. position - {{lua/number}} - The argument position to bind to,
|
||||
does not support named parameters
|
||||
0. data - Any - the data to bind
|
||||
|
||||
Returns nothing
|
||||
]]
|
||||
function db.sqlbind(stmnt,call,position,data)
|
||||
assert(call == "bind" or call == "bind_blob","Bad bind call, call was:" .. call)
|
||||
local f = stmnt[call](stmnt,position,data)
|
||||
if f ~= sql.OK then
|
||||
local errs = string.format(
|
||||
"Failed call %s(%d,%q): %s",
|
||||
call,
|
||||
position,
|
||||
data,
|
||||
db.conn:errmsg()
|
||||
)
|
||||
log(LOG_ERR,errs)
|
||||
error(errs,2)
|
||||
end
|
||||
end
|
||||
|
||||
local oldconfigure = configure
|
||||
db.conn = util.sqlassert(sql.open("data/posts.db"))
|
||||
db.conn = db.sqlassert(sql.open(config.db))
|
||||
function configure(...)
|
||||
|
||||
--Create sql tables
|
||||
assert(db.conn:exec(queries.create_table_authors))
|
||||
--Create a fake "anonymous" user, so we don't run into trouble
|
||||
--so that no one runs into trouble being able to paste under this account.
|
||||
assert(db.conn:exec(queries.insert_anon_author))
|
||||
--If/when an author deletes their account, all posts
|
||||
--and comments by that author are also deleted (on
|
||||
--delete cascade) this is intentional. This also
|
||||
--means that all comments by other users on a post
|
||||
--an author makes will also be deleted.
|
||||
--
|
||||
--Post text uses zlib compression
|
||||
assert(db.conn:exec(queries.create_table_posts))
|
||||
--Store the raw text so people can download it later, maybe
|
||||
--we can use it for "download as image" or "download as pdf"
|
||||
--in the future too. Stil stored zlib compressed
|
||||
assert(db.conn:exec(queries.create_table_raw_text))
|
||||
--Maybe we want to store images one day?
|
||||
assert(db.conn:exec(queries.create_table_images))
|
||||
--Comments on a post
|
||||
assert(db.conn:exec(queries.create_table_comments))
|
||||
--Tags for a post
|
||||
assert(db.conn:exec(queries.create_table_tags))
|
||||
--Index for tags
|
||||
assert(db.conn:exec(queries.create_index_tags))
|
||||
--Store a cookie for logged in users. Logged in users can edit
|
||||
--their own posts, and edit their biographies.
|
||||
assert(db.conn:exec(queries.create_table_session))
|
||||
local statements = {
|
||||
"create_table_authors",
|
||||
"insert_anon_author",
|
||||
"create_table_posts",
|
||||
"create_table_raw_text",
|
||||
"create_table_images",
|
||||
"create_table_comments",
|
||||
"create_table_tags",
|
||||
"create_index_tags",
|
||||
"create_table_session"
|
||||
}
|
||||
-- ipairs() needed, "create table authors" must be executed before
|
||||
-- "insert anon author"
|
||||
for _, statement in ipairs(statements) do
|
||||
db.sqlassert(db.conn:exec(queries[statement]))
|
||||
end
|
||||
|
||||
return oldconfigure(...)
|
||||
end
|
||||
configure()
|
||||
|
||||
--[[ md
|
||||
|
||||
@name lua/db
|
||||
|
||||
### db.close()
|
||||
|
||||
Closes the database connection. Not called during normal operation, used to
|
||||
assist in unit testing.
|
||||
|
||||
No parameters
|
||||
|
||||
No returns
|
||||
]]
|
||||
|
||||
function db.close()
|
||||
db.conn:close()
|
||||
|
|
|
@ -0,0 +1,126 @@
|
|||
local db = require("db")
|
||||
local queries = require("queries")
|
||||
local util = require("util")
|
||||
local tags = require("tags")
|
||||
require("global")
|
||||
|
||||
local stmnt_tags_get, stmnt_stories_get
|
||||
|
||||
local oldconfigure = configure
|
||||
function configure(...)
|
||||
stmnt_tags_get = db.sqlassert(db.conn:prepare(queries.select_suggest_tags))
|
||||
stmnt_stories_get = db.sqlassert(db.conn:prepare(queries.select_site_index))
|
||||
return oldconfigure(...)
|
||||
end
|
||||
|
||||
--[[
|
||||
When a user is typing in the "tags" editbox when posting a story, suggest
|
||||
tags for them to include based on what they've typed so far.
|
||||
]]
|
||||
local function suggest_tags(req,data)
|
||||
--[[
|
||||
Prevent a malicious user from injecting '%' into the string
|
||||
we're searching for, potentially causing a DoS with a
|
||||
sufficiently backtrack-ey search/tag combination.
|
||||
]]
|
||||
assert(data:match("^[a-zA-Z0-9,%s-]+$"),string.format("Bad characters in tag: %q",data))
|
||||
stmnt_tags_get:bind_names{match = data .. "%"}
|
||||
local sug_tags = {data}
|
||||
for tag in stmnt_tags_get:rows() do
|
||||
table.insert(sug_tags,tag[1])
|
||||
end
|
||||
stmnt_tags_get:reset()
|
||||
http_response_header(req,"Content-Type","text/plain")
|
||||
http_response(req,200,table.concat(sug_tags,";"))
|
||||
end
|
||||
|
||||
--[[
|
||||
A poor mans json builder, since I don't need one big enough to pull in a
|
||||
dependency for it (yet)
|
||||
]]
|
||||
local function poor_json(builder, ltbl)
|
||||
local function write_bool(builder,bool)
|
||||
table.insert(builder,bool and "true" or "false")
|
||||
end
|
||||
local function write_number(builder,num)
|
||||
local number
|
||||
if num % 1 == 0 then
|
||||
num = string.format("%d",num)
|
||||
else
|
||||
num = string.format("%f",num)
|
||||
end
|
||||
table.insert(builder,num)
|
||||
end
|
||||
local function write_string(builder,s)
|
||||
table.insert(builder, string.format("%q",s))
|
||||
end
|
||||
local function write_array(builder,tbl)
|
||||
table.insert(builder,"[")
|
||||
for _,item in ipairs(tbl) do
|
||||
write_string(builder,item)
|
||||
table.insert(builder,",")
|
||||
end
|
||||
if #tbl > 0 then
|
||||
table.remove(builder,#builder) -- Remove the last comma
|
||||
end
|
||||
table.insert(builder,"]")
|
||||
end
|
||||
local lua_to_json = {
|
||||
boolean = write_bool,
|
||||
number = write_number,
|
||||
string = write_string,
|
||||
table = write_array
|
||||
}
|
||||
table.insert(builder,"{")
|
||||
for k,v in pairs(ltbl) do
|
||||
assert(type(k) == "string", "Field was not a string, was: " .. type(k))
|
||||
table.insert(builder,string.format("%q",k))
|
||||
table.insert(builder,":")
|
||||
assert(lua_to_json[type(v)], "Unknown type for json:" .. type(v) .. " at " .. k)
|
||||
lua_to_json[type(v)](builder,v)
|
||||
table.insert(builder,",")
|
||||
end
|
||||
table.remove(builder,#builder) -- Remove the last comma before closing object
|
||||
table.insert(builder,"}")
|
||||
table.insert(builder,",") -- Can't do this on the same line as above
|
||||
-- we need to remove the last comma, but not }
|
||||
end
|
||||
|
||||
local function get_stories(req,data)
|
||||
local nstories = tonumber(data)
|
||||
stmnt_stories_get:bind_names{offset=nstories}
|
||||
local builder = setmetatable({'{"stories":['},table)
|
||||
for id, title, anon, time, author, hits, ncomments in db.sql_rows(stmnt_stories_get) do
|
||||
local story = {
|
||||
url = util.encode_id(id),
|
||||
title = title,
|
||||
isanon = tonumber(anon) == 1,
|
||||
posted = os.date("%B %d %Y",tonumber(time)),
|
||||
author = author,
|
||||
tags = tags.get(id),
|
||||
hits = hits,
|
||||
ncomments = ncomments
|
||||
}
|
||||
poor_json(builder,story)
|
||||
end
|
||||
table.remove(builder,#builder) -- Remove last comma before closing list
|
||||
table.insert(builder,"]}")
|
||||
stmnt_stories_get:reset()
|
||||
http_response_header(req,"Content-Type","text/plain")
|
||||
http_response(req,200,table.concat(builder))
|
||||
end
|
||||
|
||||
local api_points = {}
|
||||
local function register_api(call,func)
|
||||
api_points[call] = func
|
||||
end
|
||||
register_api("suggest",suggest_tags)
|
||||
register_api("stories",get_stories)
|
||||
local function api_get(req)
|
||||
http_request_populate_qs(req)
|
||||
local call = assert(http_argument_get_string(req,"call"))
|
||||
local data = assert(http_argument_get_string(req,"data"))
|
||||
assertf(api_points[call], "Unknown api endpoint: %s", call)
|
||||
api_points[call](req,data)
|
||||
end
|
||||
return api_get
|
|
@ -0,0 +1,57 @@
|
|||
local config = require("config")
|
||||
|
||||
local function archive(req)
|
||||
local archive_fp = assert(io.open(config.approot .. "data/archive.zip","rb"))
|
||||
--[=[
|
||||
local archive_size = archive:seek("end")
|
||||
archive:seek("set")
|
||||
local archive_cursor = 0
|
||||
local co = coroutine.create(function()
|
||||
print("Inside coroutine!")
|
||||
--[[
|
||||
for i = 1,10 do
|
||||
local str = {tostring(i),":",}
|
||||
for i = 1,10 do
|
||||
table.insert(str,tostring(math.random()))
|
||||
end
|
||||
coroutine.yield(table.concat(str))
|
||||
end
|
||||
]]
|
||||
for i = 1, 1000 do
|
||||
coroutine.yield("Hello, world!" .. tostring(i))
|
||||
end
|
||||
--[[
|
||||
while archive_cursor ~= archive_size do
|
||||
print("Inside while")
|
||||
local bytes_left = archive_size - archive_cursor
|
||||
local next_chunk = math.min(4096,bytes_left)
|
||||
print("Before yield")
|
||||
coroutine.yield(archive:read(next_chunk))
|
||||
print("After yield")
|
||||
end
|
||||
archive:close()
|
||||
]]
|
||||
end)
|
||||
print("co status:",coroutine.status(co))
|
||||
--local bytes_start,bytes_end = 0, 200
|
||||
--http_response_header(req,"content-type","application/zip")
|
||||
--http_response_header(req,"accept-ranges","bytes")
|
||||
http_response_header(req,"transfer-encoding","chunked")
|
||||
http_response_co(req,co)
|
||||
print("a print after our response")
|
||||
--[[
|
||||
local bytes_start,bytes_end = 0, 200
|
||||
http_response_header(req,"content-type","application/zip")
|
||||
http_response_header(req,"accept-ranges","bytes")
|
||||
assert(archive:seek("set",bytes_start))
|
||||
local data = assert(archive:read(bytes_end - bytes_start))
|
||||
http_response_stream(req,200,data,function()
|
||||
print("Callback completed!")
|
||||
end)
|
||||
]]
|
||||
]=]
|
||||
http_response_header(req,"Content-Disposition","attachment; filename=\"slash_monster_archive.zip\"")
|
||||
http_response(req,200,archive_fp:read("*a"))
|
||||
archive_fp:close()
|
||||
end
|
||||
return archive
|
|
@ -0,0 +1,68 @@
|
|||
local zlib = require("zlib")
|
||||
local sql = require("lsqlite3")
|
||||
|
||||
local db = require("db")
|
||||
local queries = require("queries")
|
||||
local pages = require("pages")
|
||||
local session = require("session")
|
||||
local config = require("config")
|
||||
|
||||
local stmnt_bio
|
||||
local oldconfigure = configure
|
||||
function configure(...)
|
||||
stmnt_bio = assert(db.conn:prepare(queries.select_author_bio))
|
||||
return oldconfigure(...)
|
||||
end
|
||||
|
||||
local function bio_edit_get(req)
|
||||
local author, authorid = session.get(req)
|
||||
|
||||
http_request_populate_qs(req)
|
||||
local ret
|
||||
|
||||
if (not author) or (not authorid) then
|
||||
ret = pages.error{
|
||||
errcode = 401,
|
||||
errcodemsg = "Not authorized",
|
||||
explanation = "You must be logged in to edit your biography."
|
||||
}
|
||||
http_response(req,401,ret)
|
||||
end
|
||||
|
||||
--Get the logged in author's bio to display
|
||||
stmnt_bio:bind_names{
|
||||
authorid = authorid
|
||||
}
|
||||
local err = db.do_sql(stmnt_bio)
|
||||
if err == sql.DONE then
|
||||
--No rows, we're logged in but an author with our id doesn't
|
||||
--exist? Something has gone wrong.
|
||||
ret = pages.error{
|
||||
errcode = 500,
|
||||
errcodemsg = "Server error",
|
||||
explanation = string.format([[
|
||||
Tried to get the biography of author %q (%s) but no author with that id was
|
||||
found, please report this error.
|
||||
]], tostring(author), tostring(authorid)),
|
||||
should_traceback = true
|
||||
}
|
||||
stmnt_bio:reset()
|
||||
http_response(req,500,ret)
|
||||
return
|
||||
end
|
||||
assert(err == sql.ROW)
|
||||
local data = stmnt_bio:get_values()
|
||||
stmnt_bio:reset()
|
||||
local bio_text = data[1]
|
||||
if data[1] ~= "" then
|
||||
bio_text = zlib.decompress(data[1])
|
||||
end
|
||||
ret = pages.edit_bio{
|
||||
text = bio_text,
|
||||
user = author,
|
||||
domain = config.domain,
|
||||
}
|
||||
http_response(req,200,ret)
|
||||
end
|
||||
|
||||
return bio_edit_get
|
|
@ -0,0 +1,55 @@
|
|||
|
||||
local sql = require("lsqlite3")
|
||||
local zlib = require("zlib")
|
||||
|
||||
local db = require("db")
|
||||
local queries = require("queries")
|
||||
local pages = require("pages")
|
||||
local parsers = require("parsers")
|
||||
local cache = require("cache")
|
||||
local config = require("config")
|
||||
local session = require("session")
|
||||
|
||||
local stmnt_update_bio
|
||||
|
||||
local oldconfigure = configure
|
||||
function configure(...)
|
||||
stmnt_update_bio = assert(db.conn:prepare(queries.update_bio))
|
||||
return oldconfigure(...)
|
||||
end
|
||||
|
||||
local function edit_bio(req)
|
||||
local author, author_id = session.get(req)
|
||||
if not (author and author_id) then
|
||||
local response = pages.error{
|
||||
errcode = 401,
|
||||
errcodemsg = "Unauthorized",
|
||||
explanation = string.format("You must be logged in to edit a biography. Your login session may have expiried."),
|
||||
should_traceback = true,
|
||||
}
|
||||
http_response(req,401,response)
|
||||
return
|
||||
end
|
||||
|
||||
http_request_populate_post(req)
|
||||
local text = http_argument_get_string(req,"text") or ""
|
||||
|
||||
local parsed = parsers.plain(text) -- Make sure the plain parser can deal with it, even though we don't store this result.
|
||||
local compr_raw = zlib.compress(text)
|
||||
|
||||
db.sqlbind(stmnt_update_bio, "bind_blob", 1,compr_raw)
|
||||
db.sqlbind(stmnt_update_bio, "bind", 2, author_id)
|
||||
if db.do_sql(stmnt_update_bio) ~= sql.DONE then
|
||||
stmnt_update_bio:reset()
|
||||
error("Failed to update biography")
|
||||
end
|
||||
stmnt_update_bio:reset()
|
||||
local loc = string.format("https://%s.%s",author,config.domain)
|
||||
-- Dirty the cache for the author's index, the only place where the bio is displayed.
|
||||
cache.dirty(string.format("%s.%s",author,config.domain))
|
||||
http_response_header(req,"Location",loc)
|
||||
http_response(req,303,"")
|
||||
return
|
||||
end
|
||||
|
||||
return edit_bio
|
|
@ -3,16 +3,18 @@ local sql = require("lsqlite3")
|
|||
local pages = require("pages")
|
||||
local db = require("db")
|
||||
local queries = require("queries")
|
||||
local util = require("util")
|
||||
local sessionlib = require("session")
|
||||
local config = require("config")
|
||||
|
||||
local stmnt_author_create
|
||||
|
||||
--We prevent people from changing their password file, this way we don't really
|
||||
--need to worry about logged in accounts being hijacked if someone gets at the
|
||||
--database. The attacker can still paste & edit from the logged in account for
|
||||
--a while, but whatever.
|
||||
local oldconfigure = configure
|
||||
function configure(...)
|
||||
|
||||
stmnt_author_create = util.sqlassert(db.conn:prepare(queries.insert_author))
|
||||
stmnt_author_create = db.sqlassert(db.conn:prepare(queries.insert_author))
|
||||
return oldconfigure(...)
|
||||
end
|
||||
|
||||
|
@ -42,7 +44,7 @@ local function claim_post(req)
|
|||
}
|
||||
stmnt_author_create:bind_blob(2,salt)
|
||||
stmnt_author_create:bind_blob(3,hash)
|
||||
local err = util.do_sql(stmnt_author_create)
|
||||
local err = db.do_sql(stmnt_author_create)
|
||||
if err == sql.DONE then
|
||||
log(LOG_INFO,"Account creation successful:" .. name)
|
||||
--We sucessfully made the new author
|
||||
|
@ -51,7 +53,7 @@ local function claim_post(req)
|
|||
--Give them a file back
|
||||
http_response_header(req,"Content-Type","application/octet-stream")
|
||||
http_response_header(req,"Content-Disposition","attachment; filename=\"" .. name .. "." .. config.domain .. ".passfile\"")
|
||||
local session = sessionlib.start(id)
|
||||
sessionlib.start(id)
|
||||
text = password
|
||||
http_response(req,200,text)
|
||||
return
|
||||
|
@ -64,9 +66,7 @@ local function claim_post(req)
|
|||
elseif err == sql.ERROR or err == sql.MISUSE then
|
||||
log(LOG_ALERT,"Account creation failed in an unusual way:" .. err)
|
||||
--This is bad though
|
||||
text = pages.claim {
|
||||
err = "Failed to claim"
|
||||
}
|
||||
text = pages.claim {err = "Failed to claim"}
|
||||
end
|
||||
stmnt_author_create:reset()
|
||||
http_response(req,200,text)
|
||||
|
|
|
@ -0,0 +1,74 @@
|
|||
local util = require("util")
|
||||
local pages = require("pages")
|
||||
local config = require("config")
|
||||
local session = require("session")
|
||||
local db = require("db")
|
||||
local queries = require("queries")
|
||||
local sql = require("lsqlite3")
|
||||
local cache = require("cache")
|
||||
local api = require("hooks")
|
||||
|
||||
local oldconfigure = configure
|
||||
local stmnt_delete
|
||||
function configure(...)
|
||||
stmnt_delete = assert(db.conn:prepare(queries.delete_post),db.conn:errmsg())
|
||||
return oldconfigure(...)
|
||||
end
|
||||
|
||||
local oldspec = api.get.page_owner
|
||||
api.get.page_owner = function(env)
|
||||
local ret = oldspec(env)
|
||||
table.insert(ret,{
|
||||
endpoint = string.format("https://%s/_delete",env.domain),
|
||||
method = "POST",
|
||||
fields = {story = env.short},
|
||||
text = "Delete"
|
||||
})
|
||||
return ret
|
||||
end
|
||||
|
||||
local function delete_post(req)
|
||||
http_request_populate_post(req)
|
||||
local storystr = assert(http_argument_get_string(req,"story"))
|
||||
print("Looking at storystr:",storystr)
|
||||
local storyid = util.decode_id(storystr)
|
||||
local author, authorid = session.get(req)
|
||||
if not author then
|
||||
http_response(req, 401, pages.error{
|
||||
errcode = 401,
|
||||
errcodemsg = "Not authorized",
|
||||
explanation = "You must be logged in to delete posts. You are either not logged in or your session has expired.",
|
||||
should_traceback = true
|
||||
})
|
||||
return
|
||||
end
|
||||
log(LOG_DEBUG,string.format("Deleting post %d with proposed owner %d",storyid, authorid))
|
||||
stmnt_delete:bind_names{
|
||||
postid = storyid,
|
||||
authorid = authorid
|
||||
}
|
||||
local err = db.do_sql(stmnt_delete)
|
||||
if err ~= sql.DONE then
|
||||
log(LOG_DEBUG,string.format("Failed to delete: %d:%s",err, db.conn:errmsg()))
|
||||
http_response(req,500,pages.error{
|
||||
errcode = 500,
|
||||
errcodemsg = "Internal error",
|
||||
explanation = "Failed to delete posts from database:" .. db.conn:errmsg(),
|
||||
should_traceback = true,
|
||||
})
|
||||
stmnt_delete:reset()
|
||||
else
|
||||
local loc = string.format("https://%s/%s",config.domain,storystr)
|
||||
http_response_header(req,"Location",loc)
|
||||
http_response(req,303,"")
|
||||
stmnt_delete:reset()
|
||||
cache.dirty(string.format("%s",config.domain))
|
||||
cache.dirty(string.format("%s-logout",config.domain))
|
||||
cache.dirty(string.format("%s.%s",author,config.domain))
|
||||
cache.dirty(string.format("%s",storystr))
|
||||
cache.dirty(string.format("%s?comments=1",storystr))
|
||||
|
||||
end
|
||||
end
|
||||
|
||||
return delete_post
|
|
@ -5,6 +5,7 @@ local db = require("db")
|
|||
local queries = require("queries")
|
||||
local util = require("util")
|
||||
local pages = require("pages")
|
||||
local api = require("hooks")
|
||||
|
||||
local stmnt_download
|
||||
local oldconfigure = configure
|
||||
|
@ -13,17 +14,33 @@ function configure(...)
|
|||
return oldconfigure(...)
|
||||
end
|
||||
|
||||
local oldget = api.get.page_reader
|
||||
api.get.page_reader = function(env)
|
||||
local ret = oldget(env)
|
||||
local button = {
|
||||
endpoint = string.format("https://%s/_download",env.domain),
|
||||
method = "GET",
|
||||
fields = {
|
||||
story = env.short,
|
||||
},
|
||||
text = "Download TXT"
|
||||
}
|
||||
if env.unlisted then
|
||||
button.fields.pwd = env.hashstr
|
||||
end
|
||||
table.insert(ret,button)
|
||||
return ret
|
||||
end
|
||||
|
||||
local function download_get(req)
|
||||
local host = http_request_get_host(req)
|
||||
local path = http_request_get_path(req)
|
||||
http_request_populate_qs(req)
|
||||
local story = assert(http_argument_get_string(req,"story"))
|
||||
local hashstr = http_argument_get_string(req,"pwd")
|
||||
local ihash = hashstr and util.decode_unlisted(hashstr)
|
||||
story = util.decodeentities(story)
|
||||
local story_id = util.decode_id(story)
|
||||
stmnt_download:bind_names{
|
||||
postid = story_id
|
||||
}
|
||||
local err = util.do_sql(stmnt_download)
|
||||
stmnt_download:bind_names{postid = story_id}
|
||||
local err = db.do_sql(stmnt_download)
|
||||
if err == sql.DONE then
|
||||
--No rows, story not found
|
||||
http_response(req,404,pages.nostory{path=story})
|
||||
|
@ -31,7 +48,14 @@ local function download_get(req)
|
|||
return
|
||||
end
|
||||
assert(err == sql.ROW, "after doing download sql, result was not a row, was:" .. tostring(err))
|
||||
local txt_compressed, title = unpack(stmnt_download:get_values())
|
||||
local txt_compressed, title, unlisted, hash = unpack(stmnt_download:get_values())
|
||||
unlisted = unlisted == 1
|
||||
if unlisted and hash ~= ihash then
|
||||
--Unlisted and hash was incorrect, pretend we don't have it
|
||||
http_response(req,404,pages.nostory{path=story})
|
||||
stmnt_download:reset()
|
||||
return
|
||||
end
|
||||
local text = zlib.decompress(txt_compressed)
|
||||
stmnt_download:reset()
|
||||
http_response_header(req,"Content-Type","application/octet-stream")
|
||||
|
|
|
@ -8,6 +8,7 @@ local pages = require("pages")
|
|||
local tags = require("tags")
|
||||
local session = require("session")
|
||||
local config = require("config")
|
||||
local api = require("hooks")
|
||||
|
||||
local stmnt_edit
|
||||
local oldconfigure = configure
|
||||
|
@ -16,9 +17,19 @@ function configure(...)
|
|||
return oldconfigure(...)
|
||||
end
|
||||
|
||||
local oldspec = api.get.page_owner
|
||||
api.get.page_owner = function(env)
|
||||
local ret = oldspec(env)
|
||||
table.insert(ret,{
|
||||
endpoint = string.format("https://%s/_edit",env.domain),
|
||||
method = "GET",
|
||||
fields = {story = env.short},
|
||||
text = "Edit"
|
||||
})
|
||||
return ret
|
||||
end
|
||||
|
||||
local function edit_get(req)
|
||||
local host = http_request_get_host(req)
|
||||
local path = http_request_get_path(req)
|
||||
local author, authorid = session.get(req)
|
||||
|
||||
http_request_populate_qs(req)
|
||||
|
@ -32,7 +43,7 @@ local function edit_get(req)
|
|||
postid = story_id,
|
||||
authorid = authorid
|
||||
}
|
||||
local err = util.do_sql(stmnt_edit)
|
||||
local err = db.do_sql(stmnt_edit)
|
||||
if err == sql.DONE then
|
||||
--No rows, we're probably not the owner (it might
|
||||
--also be because there's no such story)
|
||||
|
@ -45,10 +56,10 @@ local function edit_get(req)
|
|||
end
|
||||
assert(err == sql.ROW)
|
||||
local data = stmnt_edit:get_values()
|
||||
local txt_compressed, markup, isanon, title = unpack(data)
|
||||
local txt_compressed, markup, isanon, title, unlisted = unpack(data)
|
||||
local text = zlib.decompress(txt_compressed)
|
||||
local tags = tags.get(story_id)
|
||||
local tags_txt = table.concat(tags,";")
|
||||
local tags_raw = tags.get(story_id)
|
||||
local tags_txt = table.concat(tags_raw,";")
|
||||
stmnt_edit:reset()
|
||||
ret = pages.edit{
|
||||
title = title,
|
||||
|
@ -59,7 +70,11 @@ local function edit_get(req)
|
|||
domain = config.domain,
|
||||
story = story_id,
|
||||
err = "",
|
||||
tags = tags_txt
|
||||
tags = tags_txt,
|
||||
unlisted = unlisted == 1,
|
||||
extra_load = {
|
||||
'<script src="/_js/suggest_tags.js"></script>'
|
||||
}
|
||||
}
|
||||
http_response(req,200,ret)
|
||||
end
|
||||
|
|
|
@ -11,19 +11,18 @@ local cache = require("cache")
|
|||
local config = require("config")
|
||||
local session = require("session")
|
||||
|
||||
local stmnt_author_of, stmnt_update_raw, stmnt_update
|
||||
local stmnt_author_of, stmnt_update_raw, stmnt_update, stmnt_hash
|
||||
|
||||
local oldconfigure = configure
|
||||
function configure(...)
|
||||
stmnt_author_of = assert(db.conn:prepare(queries.select_author_of_post))
|
||||
stmnt_update_raw = assert(db.conn:prepare(queries.update_raw))
|
||||
stmnt_update = assert(db.conn:prepare(queries.update_post))
|
||||
stmnt_hash = assert(db.conn:prepare(queries.select_post_hash))
|
||||
return oldconfigure(...)
|
||||
end
|
||||
|
||||
local function edit_post(req)
|
||||
local host = http_request_get_host(req)
|
||||
local path = http_request_get_path(req)
|
||||
local author, author_id = session.get(req)
|
||||
|
||||
http_request_populate_post(req)
|
||||
|
@ -32,19 +31,38 @@ local function edit_post(req)
|
|||
local text = assert(http_argument_get_string(req,"text"))
|
||||
local pasteas = assert(http_argument_get_string(req,"pasteas"))
|
||||
local markup = assert(http_argument_get_string(req,"markup"))
|
||||
local unlisted = http_argument_get_string(req,"unlisted") == "on"
|
||||
local tags_str = http_argument_get_string(req,"tags")
|
||||
stmnt_author_of:bind_names{
|
||||
id = storyid
|
||||
}
|
||||
local err = util.do_sql(stmnt_author_of)
|
||||
local err = db.do_sql(stmnt_author_of)
|
||||
if err ~= sql.ROW then
|
||||
stmnt_author_of:reset()
|
||||
error("No author found for story:" .. storyid)
|
||||
local msg = string.format("No author found for story: %d", storyid)
|
||||
log(LOG_ERR,msg)
|
||||
local response = pages.error{
|
||||
errcode = 404,
|
||||
errcodemsg = "Not Found",
|
||||
explanation = msg,
|
||||
should_traceback = true,
|
||||
}
|
||||
http_response(req,404,response)
|
||||
return
|
||||
end
|
||||
local data = stmnt_author_of:get_values()
|
||||
stmnt_author_of:reset()
|
||||
local realauthor = data[1]
|
||||
assert(realauthor == author_id) --Make sure the author of the story is the currently logged in user
|
||||
if realauthor ~= author_id then
|
||||
local response = pages.error{
|
||||
errcode = 401,
|
||||
errcodemsg = "Unauthorized",
|
||||
explanation = string.format("You are trying to edit post %d, but it is another user's post. You are %s.",storyid, author_id),
|
||||
should_traceback = true,
|
||||
}
|
||||
http_response(req,401,response)
|
||||
return
|
||||
end
|
||||
local parsed = parsers[markup](text)
|
||||
local compr_raw = zlib.compress(text)
|
||||
local compr = zlib.compress(parsed)
|
||||
|
@ -55,20 +73,41 @@ local function edit_post(req)
|
|||
assert(stmnt_update_raw:bind_blob(1,compr_raw) == sql.OK)
|
||||
assert(stmnt_update_raw:bind(2,markup) == sql.OK)
|
||||
assert(stmnt_update_raw:bind(3,storyid) == sql.OK)
|
||||
assert(util.do_sql(stmnt_update_raw) == sql.DONE, "Failed to update raw")
|
||||
assert(db.do_sql(stmnt_update_raw) == sql.DONE, "Failed to update raw")
|
||||
stmnt_update_raw:reset()
|
||||
assert(stmnt_update:bind(1,title) == sql.OK)
|
||||
assert(stmnt_update:bind_blob(2,compr) == sql.OK)
|
||||
assert(stmnt_update:bind(3,pasteas == "anonymous" and 1 or 0) == sql.OK)
|
||||
assert(stmnt_update:bind(4,storyid) == sql.OK)
|
||||
assert(util.do_sql(stmnt_update) == sql.DONE, "Failed to update text")
|
||||
assert(stmnt_update:bind(4,unlisted) == sql.OK)
|
||||
assert(stmnt_update:bind(5,storyid) == sql.OK)
|
||||
assert(db.do_sql(stmnt_update) == sql.DONE, "Failed to update text")
|
||||
stmnt_update:reset()
|
||||
tagslib.set(storyid,tags)
|
||||
local id_enc = util.encode_id(storyid)
|
||||
local loc = string.format("https://%s/%s",config.domain,id_enc)
|
||||
if unlisted then
|
||||
stmnt_hash:bind_names{id=storyid}
|
||||
err = db.do_sql(stmnt_hash)
|
||||
if err ~= sql.ROW then
|
||||
error("Failed to get a post's hash while trying to make it unlisted")
|
||||
end
|
||||
local hash = stmnt_hash:get_value(0)
|
||||
-- TODO: Remove this
|
||||
-- Posts added before the unlisted feature will throw errors
|
||||
-- when their hash is used to display them, or their url's.
|
||||
-- when proper database migration tools are in place, remove
|
||||
-- this bit of code.
|
||||
if hash == -1 then
|
||||
error("This post was created before the unlisting feature was added. Temporarily, this breaks. You will be able to unlist it in the future.")
|
||||
end
|
||||
loc = loc .. "?pwd=" .. util.encode_unlisted(hash)
|
||||
end
|
||||
--Turning something from not unlisted to unlisted should dirty all these
|
||||
--places anyway, so the post can now be hidden.
|
||||
cache.dirty(string.format("%s/%s",config.domain,id_enc)) -- This place to read this post
|
||||
cache.dirty(string.format("%s",config.domain)) -- The site index (ex, if the author changed the paste from their's to "Anonymous", the cache should reflect that).
|
||||
cache.dirty(string.format("%s.%s",author,config.domain)) -- The author's index, same reasoning as above.
|
||||
cache.dirty(string.format("%s-logout",config.domain))
|
||||
http_response_header(req,"Location",loc)
|
||||
http_response(req,303,"")
|
||||
return
|
||||
|
|
|
@ -7,26 +7,29 @@ local util = require("util")
|
|||
local config = require("config")
|
||||
local pages = require("pages")
|
||||
local libtags = require("tags")
|
||||
local session = require("session")
|
||||
local parsers = require("parsers")
|
||||
local zlib = require("zlib")
|
||||
|
||||
local stmnt_index, stmnt_author, stmnt_author_bio
|
||||
|
||||
local oldconfigure = configure
|
||||
function configure(...)
|
||||
stmnt_index = assert(db.conn:prepare(queries.select_site_index))
|
||||
stmnt_index = db.sqlassert(db.conn:prepare(queries.select_site_index))
|
||||
--TODO: actually let authors edit their bio
|
||||
stmnt_author_bio = assert(db.conn:prepare([[
|
||||
stmnt_author_bio = db.sqlassert(db.conn:prepare([[
|
||||
SELECT authors.biography FROM authors WHERE authors.name = :author;
|
||||
]]))
|
||||
stmnt_author = assert(db.conn:prepare(queries.select_author_index))
|
||||
stmnt_author = db.sqlassert(db.conn:prepare(queries.select_author_index))
|
||||
return oldconfigure(...)
|
||||
end
|
||||
|
||||
local function get_site_home(req)
|
||||
local function get_site_home(req, loggedin)
|
||||
log(LOG_DEBUG,"Cache miss, rendering site index")
|
||||
stmnt_index:bind_names{}
|
||||
stmnt_index:bind_names{offset=0}
|
||||
local latest = {}
|
||||
for idr, title, iar, dater, author, hits in util.sql_rows(stmnt_index) do
|
||||
table.insert(latest,{
|
||||
for idr, title, iar, dater, author, hits, cmts in db.sql_rows(stmnt_index) do
|
||||
local story = {
|
||||
url = util.encode_id(idr),
|
||||
title = title,
|
||||
isanon = tonumber(iar) == 1,
|
||||
|
@ -34,18 +37,27 @@ local function get_site_home(req)
|
|||
author = author,
|
||||
tags = libtags.get(idr),
|
||||
hits = hits,
|
||||
})
|
||||
ncomments = cmts
|
||||
}
|
||||
table.insert(latest,story)
|
||||
|
||||
end
|
||||
return pages.index{
|
||||
domain = config.domain,
|
||||
stories = latest
|
||||
stories = latest,
|
||||
loggedin = loggedin,
|
||||
extra_load = {
|
||||
'<script src="/_js/index_scroll.js"></script>'
|
||||
}
|
||||
|
||||
}
|
||||
end
|
||||
local function get_author_home(req)
|
||||
local function get_author_home(req, loggedin)
|
||||
local host = http_request_get_host(req)
|
||||
local subdomain = host:match("([^\\.]+)")
|
||||
stmnt_author_bio:bind_names{author=subdomain}
|
||||
local err = util.do_sql(stmnt_author_bio)
|
||||
local author, _ = session.get(req)
|
||||
local err = db.do_sql(stmnt_author_bio)
|
||||
if err == sql.DONE then
|
||||
log(LOG_INFO,"No such author:" .. subdomain)
|
||||
stmnt_author_bio:reset()
|
||||
|
@ -53,25 +65,49 @@ local function get_author_home(req)
|
|||
author = subdomain
|
||||
}
|
||||
end
|
||||
assert(err == sql.ROW,"failed to get author:" .. subdomain .. " error:" .. tostring(err))
|
||||
if err ~= sql.ROW then
|
||||
stmnt_author_bio:reset()
|
||||
error(string.format("Failed to get author %q error: %q",subdomain, tostring(err)))
|
||||
end
|
||||
local data = stmnt_author_bio:get_values()
|
||||
local bio = data[1]
|
||||
local bio_text = data[1]
|
||||
if data[1] ~= "" then
|
||||
bio_text = zlib.decompress(data[1])
|
||||
end
|
||||
local bio = parsers.plain(bio_text)
|
||||
stmnt_author_bio:reset()
|
||||
stmnt_author:bind_names{author=subdomain}
|
||||
local stories = {}
|
||||
for id, title, time, hits in util.sql_rows(stmnt_author) do
|
||||
table.insert(stories,{
|
||||
url = util.encode_id(id),
|
||||
title = title,
|
||||
author=subdomain,
|
||||
posted = os.date("%B %d %Y",tonumber(time)),
|
||||
tags = libtags.get(id),
|
||||
hits = hits,
|
||||
})
|
||||
for id, title, time, hits, unlisted, hash, cmts in db.sql_rows(stmnt_author) do
|
||||
if unlisted == 1 and author == subdomain then
|
||||
local url = util.encode_id(id) .. "?pwd=" .. util.encode_unlisted(hash)
|
||||
table.insert(stories,{
|
||||
url = url,
|
||||
title = title,
|
||||
author=subdomain,
|
||||
posted = os.date("%B %d %Y",tonumber(time)),
|
||||
tags = libtags.get(id),
|
||||
hits = hits,
|
||||
unlisted = true,
|
||||
ncomments = cmts
|
||||
})
|
||||
elseif unlisted == 0 then
|
||||
table.insert(stories,{
|
||||
url = util.encode_id(id),
|
||||
title = title,
|
||||
author=subdomain,
|
||||
posted = os.date("%B %d %Y",tonumber(time)),
|
||||
tags = libtags.get(id),
|
||||
hits = hits,
|
||||
unlisted = false,
|
||||
ncomments = cmts
|
||||
})
|
||||
end
|
||||
end
|
||||
return pages.author_index{
|
||||
domain=config.domain,
|
||||
author=subdomain,
|
||||
loggedin = author,
|
||||
stories=stories,
|
||||
bio=bio
|
||||
}
|
||||
|
@ -82,19 +118,29 @@ local function index_get(req)
|
|||
local method = http_method_text(req)
|
||||
local host = http_request_get_host(req)
|
||||
local subdomain = host:match("([^\\.]+)")
|
||||
local author, _ = session.get(req)
|
||||
local text
|
||||
if host == config.domain then
|
||||
if host == config.domain and author == nil then
|
||||
--Default home page
|
||||
local cachepath = string.format("%s",config.domain)
|
||||
text = cache.render(cachepath, function()
|
||||
return get_site_home(req)
|
||||
end)
|
||||
else
|
||||
elseif host == config.domain and author then
|
||||
--Display home page with "log out" button
|
||||
local cachepath = string.format("%s-logout",config.domain)
|
||||
text = cache.render(cachepath, function()
|
||||
return get_site_home(req,true)
|
||||
end)
|
||||
elseif host ~= config.domain and author ~= subdomain then
|
||||
--author home page
|
||||
local cachepath = string.format("%s.%s",subdomain,config.domain)
|
||||
text = cache.render(cachepath, function()
|
||||
return get_author_home(req)
|
||||
return get_author_home(req, author ~= nil)
|
||||
end)
|
||||
elseif host ~= config.domain and author == subdomain then
|
||||
--author's home page for the author, don't cache, display unlisted
|
||||
text = get_author_home(req, author ~= nil)
|
||||
end
|
||||
assert(text)
|
||||
http_response(req,200,text)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
local config = require("config")
|
||||
local cache = require("cache")
|
||||
local config = require("config")
|
||||
local pages = require("pages")
|
||||
|
@ -6,11 +5,10 @@ local pages = require("pages")
|
|||
|
||||
local function login_get(req)
|
||||
--Just give them the login page
|
||||
local ret = cache.render(string.format("%s/_login",config.domain),function()
|
||||
return pages.login{
|
||||
err = "",
|
||||
}
|
||||
end)
|
||||
local ret = cache.render(
|
||||
string.format("%s/_login",config.domain),
|
||||
function() return pages.login{} end
|
||||
)
|
||||
http_response(req,200,ret)
|
||||
end
|
||||
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
local sql = require("lsqlite3")
|
||||
|
||||
local db = require("db")
|
||||
local util = require("util")
|
||||
local session = require("session")
|
||||
local config = require("config")
|
||||
local pages = require("pages")
|
||||
local api = require("hooks")
|
||||
|
||||
local stmnt_author_acct
|
||||
|
||||
|
@ -18,44 +18,44 @@ function configure(...)
|
|||
return oldconfigure(...)
|
||||
end
|
||||
|
||||
local old_authenticate = api.authenticate
|
||||
function api.authenticate(data)
|
||||
stmnt_author_acct:bind_names{name=data.user}
|
||||
local err = db.do_sql(stmnt_author_acct)
|
||||
if err ~= sql.ROW then
|
||||
stmnt_author_acct:reset()
|
||||
log(LOG_NOTICE,string.format("User %q failed to log in",data.user))
|
||||
end
|
||||
local id, salt, passhash = unpack(stmnt_author_acct:get_values())
|
||||
stmnt_author_acct:reset()
|
||||
local hash = sha3(salt .. data.pass)
|
||||
if hash == passhash then
|
||||
return id
|
||||
end
|
||||
return old_authenticate(data)
|
||||
end
|
||||
|
||||
local function login_post(req)
|
||||
--Try to log in
|
||||
http_populate_multipart_form(req)
|
||||
local name = assert(http_argument_get_string(req,"user"))
|
||||
local pass = assert(http_file_get(req,"pass"))
|
||||
stmnt_author_acct:bind_names{
|
||||
name = name
|
||||
}
|
||||
local text
|
||||
local err = util.do_sql(stmnt_author_acct)
|
||||
if err == sql.ROW then
|
||||
local id, salt, passhash = unpack(stmnt_author_acct:get_values())
|
||||
stmnt_author_acct:reset()
|
||||
local todigest = salt .. pass
|
||||
local hash = sha3(todigest)
|
||||
if hash == passhash then
|
||||
local mysession = session.start(id)
|
||||
http_response_cookie(req,"session",mysession,"/",0,0)
|
||||
local loc = string.format("https://%s.%s",name,config.domain)
|
||||
http_response_header(req,"Location",loc)
|
||||
http_response(req,303,"")
|
||||
return
|
||||
else
|
||||
text = pages.login{
|
||||
err = "Incorrect username or password"
|
||||
}
|
||||
end
|
||||
elseif err == sql.DONE then --Allows user enumeration, do we want this?
|
||||
--Probably not a problem since all passwords are forced to be "good"
|
||||
stmnt_author_acct:reset()
|
||||
text = pages.login{
|
||||
err = "Failed to find user:" .. name
|
||||
}
|
||||
else
|
||||
stmnt_author_acct:reset()
|
||||
error("Other sql error during login")
|
||||
local uid, err = api.authenticate({user=name,pass=pass})
|
||||
if not uid then
|
||||
http_response(req,200,pages.login{err=err})
|
||||
return
|
||||
end
|
||||
http_response(req,200,text)
|
||||
local user_session = session.start(uid)
|
||||
local domain_no_port = config.domain:match("(.*):.*") or config.domain
|
||||
local cookie_string = string.format(
|
||||
[[session=%s; SameSite=Lax; Path=/; Domain=%s; HttpOnly; Secure]],
|
||||
user_session,
|
||||
domain_no_port
|
||||
)
|
||||
http_response_header(req,"set-cookie",cookie_string)
|
||||
local loc = string.format("https://%s.%s",name,config.domain)
|
||||
http_response_header(req,"Location",loc)
|
||||
http_response(req,303,"")
|
||||
end
|
||||
|
||||
return login_post
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
local session = require("session")
|
||||
local config = require("config")
|
||||
|
||||
local function logout(req)
|
||||
local _, authorid = session.get(req)
|
||||
session.finish(authorid)
|
||||
http_response_header(req,"Location","https://" .. config.domain)
|
||||
http_response(req,303,"")
|
||||
end
|
||||
|
||||
return logout
|
|
@ -18,6 +18,9 @@ local function paste_get(req)
|
|||
return assert(pages.paste{
|
||||
domain = config.domain,
|
||||
err = "",
|
||||
extra_load = {
|
||||
'<script src="/_js/suggest_tags.js"></script>'
|
||||
}
|
||||
})
|
||||
end)
|
||||
http_response(req,200,text)
|
||||
|
@ -27,6 +30,9 @@ local function paste_get(req)
|
|||
user = author,
|
||||
err = "",
|
||||
text="",
|
||||
extra_load = {
|
||||
'<script src="/_js/suggest_tags.js"></script>'
|
||||
}
|
||||
})
|
||||
elseif host ~= config.domain and author == nil then
|
||||
http_response_header(req,"Location",string.format("https://%s/_paste",config.domain))
|
||||
|
|
|
@ -9,16 +9,24 @@ local db = require("db")
|
|||
local cache = require("cache")
|
||||
local tags = require("tags")
|
||||
local session = require("session")
|
||||
local pages = require("pages")
|
||||
|
||||
local stmnt_raw,stmnt_paste
|
||||
|
||||
local oldconfigure = configure
|
||||
function configure(...)
|
||||
stmnt_paste = assert(db.conn:prepare(queries.insert_post))
|
||||
stmnt_raw = assert(db.conn:prepare(queries.insert_raw))
|
||||
stmnt_paste = assert(db.conn:prepare(queries.insert_post),db.conn:errmsg())
|
||||
stmnt_raw = assert(db.conn:prepare(queries.insert_raw),db.conn:errmsg())
|
||||
return oldconfigure(...)
|
||||
end
|
||||
|
||||
local function get_random_bytes(n)
|
||||
local f = assert(io.open("/dev/urandom","r"))
|
||||
local ret = assert(f:read(n))
|
||||
assert(f:close())
|
||||
return ret
|
||||
end
|
||||
|
||||
local function anon_paste(req,ps)
|
||||
--Public paste
|
||||
--[[
|
||||
|
@ -26,7 +34,7 @@ local function anon_paste(req,ps)
|
|||
since there are only 32 bits of address. Someone who
|
||||
got a copy of the database could
|
||||
just generate all 2^32 hashes and look up who posted
|
||||
what. Use IPv6, Tor or I2P where possible. (but then I
|
||||
what. Use IPv6, Tor or I2P where possible. (but then I
|
||||
guess it's harder to ban spammers... hmm..)
|
||||
]]
|
||||
--local ip = http_request_get_ip(req)
|
||||
|
@ -34,20 +42,27 @@ local function anon_paste(req,ps)
|
|||
--Don't store this information for now, until I come up
|
||||
--with a more elegent solution.
|
||||
|
||||
util.sqlbind(stmnt_paste,"bind_blob",1,ps.text)
|
||||
util.sqlbind(stmnt_paste,"bind",2,ps.title)
|
||||
util.sqlbind(stmnt_paste,"bind",3,-1)
|
||||
util.sqlbind(stmnt_paste,"bind",4,true)
|
||||
util.sqlbind(stmnt_paste,"bind_blob",5,"")
|
||||
err = util.do_sql(stmnt_paste)
|
||||
log(LOG_DEBUG,string.format("new story: %q, length: %d",ps.title,string.len(ps.text)))
|
||||
local textsha3 = sha3(ps.text .. get_random_bytes(32))
|
||||
db.sqlbind(stmnt_paste,"bind_blob",1,ps.text)
|
||||
db.sqlbind(stmnt_paste,"bind",2,ps.title)
|
||||
db.sqlbind(stmnt_paste,"bind",3,-1)
|
||||
db.sqlbind(stmnt_paste,"bind",4,true)
|
||||
db.sqlbind(stmnt_paste,"bind_blob",5,"")
|
||||
db.sqlbind(stmnt_paste,"bind",6,ps.unlisted)
|
||||
db.sqlbind(stmnt_paste,"bind_blob",7,textsha3)
|
||||
local err = db.do_sql(stmnt_paste)
|
||||
stmnt_paste:reset()
|
||||
if err == sql.DONE then
|
||||
local rowid = stmnt_paste:last_insert_rowid()
|
||||
local url = util.encode_id(rowid)
|
||||
if ps.unlisted then
|
||||
url = url .. "?pwd=" .. util.encode_unlisted(textsha3)
|
||||
end
|
||||
assert(stmnt_raw:bind(1,rowid) == sql.OK)
|
||||
assert(stmnt_raw:bind_blob(2,ps.raw) == sql.OK)
|
||||
assert(stmnt_raw:bind(3,ps.markup) == sql.OK)
|
||||
err = util.do_sql(stmnt_raw)
|
||||
err = db.do_sql(stmnt_raw)
|
||||
stmnt_raw:reset()
|
||||
if err ~= sql.DONE then
|
||||
local msg = string.format(
|
||||
|
@ -61,10 +76,12 @@ local function anon_paste(req,ps)
|
|||
end
|
||||
tags.set(rowid,ps.tags)
|
||||
local loc = string.format("https://%s/%s",config.domain,url)
|
||||
if not ps.unlisted then
|
||||
cache.dirty(string.format("%s/%s",config.domain,url))
|
||||
cache.dirty(string.format("%s",config.domain))
|
||||
end
|
||||
http_response_header(req,"Location",loc)
|
||||
http_response(req,303,"")
|
||||
cache.dirty(string.format("%s/%s",config.domain,url))
|
||||
cache.dirty(string.format("%s",config.domain))
|
||||
return
|
||||
elseif err == sql.ERROR or err == sql.MISUSE then
|
||||
error("Failed to paste:" .. tostring(err))
|
||||
|
@ -84,7 +101,8 @@ local function author_paste(req,ps)
|
|||
text = ps.text
|
||||
}
|
||||
end
|
||||
local asanon = assert(http_argument_get_string(req,"pasteas"))
|
||||
local asanon = assert(http_argument_get_string(req,"pasteas") or "anonymous")
|
||||
local textsha3 = sha3(ps.text .. get_random_bytes(32))
|
||||
--No need to check if the author is posting to the
|
||||
--"right" sudomain, just post it to the one they have
|
||||
--the session key for.
|
||||
|
@ -93,15 +111,20 @@ local function author_paste(req,ps)
|
|||
assert(stmnt_paste:bind(3,authorid) == sql.OK)
|
||||
assert(stmnt_paste:bind(4,asanon == "anonymous") == sql.OK)
|
||||
assert(stmnt_paste:bind_blob(5,"") == sql.OK)
|
||||
err = util.do_sql(stmnt_paste)
|
||||
db.sqlbind(stmnt_paste,"bind",6,ps.unlisted)
|
||||
db.sqlbind(stmnt_paste,"bind_blob",7,textsha3)
|
||||
local err = db.do_sql(stmnt_paste)
|
||||
stmnt_paste:reset()
|
||||
if err == sql.DONE then
|
||||
local rowid = stmnt_paste:last_insert_rowid()
|
||||
local url = util.encode_id(rowid)
|
||||
if ps.unlisted then
|
||||
url = url .. "?pwd=" .. util.encode_unlisted(textsha3)
|
||||
end
|
||||
assert(stmnt_raw:bind(1,rowid) == sql.OK)
|
||||
assert(stmnt_raw:bind_blob(2,ps.raw) == sql.OK)
|
||||
assert(stmnt_raw:bind(3,ps.markup) == sql.OK)
|
||||
err = util.do_sql(stmnt_raw)
|
||||
err = db.do_sql(stmnt_raw)
|
||||
stmnt_raw:reset()
|
||||
if err ~= sql.DONE then
|
||||
local msg = string.format(
|
||||
|
@ -120,11 +143,14 @@ local function author_paste(req,ps)
|
|||
else
|
||||
loc = string.format("https://%s.%s/%s",author,config.domain,url)
|
||||
end
|
||||
if not ps.unlisted then
|
||||
cache.dirty(string.format("%s.%s",author,config.domain))
|
||||
cache.dirty(string.format("%s/%s",config.domain,url))
|
||||
cache.dirty(string.format("%s",config.domain))
|
||||
cache.dirty(string.format("%s-logout",config.domain))
|
||||
end
|
||||
http_response_header(req,"Location",loc)
|
||||
http_response(req,303,"")
|
||||
cache.dirty(string.format("%s.%s",author,config.domain))
|
||||
cache.dirty(string.format("%s/%s",config.domain,url))
|
||||
cache.dirty(string.format("%s",config.domain))
|
||||
return
|
||||
elseif err == sql.ERROR or err == sql.MISUSE then
|
||||
error("Failed to paste: " .. tostring(err) .. " : " .. db.conn:errmsg())
|
||||
|
@ -134,19 +160,9 @@ local function author_paste(req,ps)
|
|||
stmnt_paste:reset()
|
||||
|
||||
end
|
||||
local function decodeentities(capture)
|
||||
local n = tonumber(capture,16)
|
||||
local c = string.char(n)
|
||||
if escapes[c] then
|
||||
return escapes[c]
|
||||
else
|
||||
return c
|
||||
end
|
||||
end
|
||||
local function paste_post(req)
|
||||
local host = http_request_get_host(req)
|
||||
local path = http_request_get_path(req)
|
||||
|
||||
|
||||
local ps = {}
|
||||
--We're creating a new paste
|
||||
ps.subdomain = host:match("([^\\.]+)")
|
||||
|
@ -159,7 +175,6 @@ local function paste_post(req)
|
|||
if tag_str then
|
||||
ps.tags = util.parse_tags(tag_str)
|
||||
end
|
||||
local pasteas
|
||||
ps.raw = zlib.compress(text)
|
||||
text = util.decodeentities(text)
|
||||
text = parsers[ps.markup](text)
|
||||
|
@ -167,10 +182,11 @@ local function paste_post(req)
|
|||
text = zlib.compress(text)
|
||||
assert(text,"Failed to compress text")
|
||||
ps.text = text
|
||||
local esctitle = util.decodeentities(title)
|
||||
--Always sanatize the title with the plain parser. no markup
|
||||
--in the title.
|
||||
ps.title = parsers.plain(title)
|
||||
local unlisted = http_argument_get_string(req,"unlisted")
|
||||
ps.unlisted = unlisted == "on" --might be nil
|
||||
if host == config.domain then
|
||||
anon_paste(req,ps)
|
||||
else
|
||||
|
|
|
@ -1,12 +1,9 @@
|
|||
local parsers = require("parsers")
|
||||
local tags = require("tags")
|
||||
local util = require("util")
|
||||
local pages = require("pages")
|
||||
local config = require("config")
|
||||
|
||||
local function preview_post(req)
|
||||
local host = http_request_get_host(req)
|
||||
local path = http_request_get_path(req)
|
||||
http_request_populate_post(req)
|
||||
local title = assert(http_argument_get_string(req,"title"))
|
||||
local text = assert(http_argument_get_string(req,"text"))
|
||||
|
@ -24,6 +21,7 @@ local function preview_post(req)
|
|||
idp = "preview",
|
||||
text = parsed,
|
||||
tags = tags,
|
||||
comments = {}
|
||||
}
|
||||
http_response(req,200,ret)
|
||||
end
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
local sql = require("sqlite3")
|
||||
local sql = require("lsqlite3")
|
||||
|
||||
local session = require("session")
|
||||
local tags = require("tags")
|
||||
|
@ -8,14 +8,14 @@ local util = require("util")
|
|||
local cache = require("cache")
|
||||
local pages = require("pages")
|
||||
local config = require("config")
|
||||
local zlib = require("zlib")
|
||||
|
||||
local stmnt_read, stmnt_update_views, stmnt_comments
|
||||
local stmnt_read, stmnt_update_views
|
||||
|
||||
local oldconfigure = configure
|
||||
function configure(...)
|
||||
stmnt_read = assert(db.conn:prepare(queries.select_post))
|
||||
stmnt_update_views = assert(db.conn:prepare(queries.update_views))
|
||||
stmnt_comments = assert(db.conn:prepare(queries.select_comments))
|
||||
stmnt_read = db.sqlassert(db.conn:prepare(queries.select_post))
|
||||
stmnt_update_views = db.sqlassert(db.conn:prepare(queries.update_views))
|
||||
return oldconfigure(...)
|
||||
end
|
||||
|
||||
|
@ -25,9 +25,9 @@ Increases a story's hit counter by 1
|
|||
]]
|
||||
local function add_view(storyid)
|
||||
stmnt_update_views:bind_names{
|
||||
id = storyid
|
||||
id = storyid
|
||||
}
|
||||
local err = util.do_sql(stmnt_update_views)
|
||||
local err = db.do_sql(stmnt_update_views)
|
||||
assert(err == sql.DONE, "Failed to update view counter:"..tostring(err))
|
||||
stmnt_update_views:reset()
|
||||
end
|
||||
|
@ -39,9 +39,9 @@ or nil if it wasn't
|
|||
local function populate_ps_story(req,ps)
|
||||
--Make sure our story exists
|
||||
stmnt_read:bind_names{
|
||||
id = ps.storyid
|
||||
id = ps.storyid,
|
||||
}
|
||||
local err = util.do_sql(stmnt_read)
|
||||
local err = db.do_sql(stmnt_read)
|
||||
if err == sql.DONE then
|
||||
--We got no story
|
||||
stmnt_read:reset()
|
||||
|
@ -51,9 +51,15 @@ local function populate_ps_story(req,ps)
|
|||
--If we've made it here, we have a story. Populate our settings
|
||||
--with title, text, ect.
|
||||
assert(err == sql.ROW)
|
||||
local title, storytext, tauthor, isanon, authorname, views = unpack(
|
||||
local title, storytext, tauthor, isanon, authorname, views, unlisted, hash = unpack(
|
||||
stmnt_read:get_values()
|
||||
)
|
||||
ps.unlisted = unlisted == 1
|
||||
if ps.unlisted and hash ~= ps.hash then
|
||||
log(LOG_DEBUG,"Tried to get story id:" .. ps.storyid .. " but it was unlisted and hash was incorrect.")
|
||||
stmnt_read:reset()
|
||||
return false
|
||||
end
|
||||
ps.title = title
|
||||
ps.text = zlib.decompress(storytext)
|
||||
ps.tauthor = tauthor
|
||||
|
@ -66,24 +72,6 @@ local function populate_ps_story(req,ps)
|
|||
return true
|
||||
end
|
||||
|
||||
--[[
|
||||
Get the comments for a story
|
||||
]]
|
||||
local function get_comments(req,ps)
|
||||
stmnt_comments:bind_names{
|
||||
id = ps.storyid
|
||||
}
|
||||
local comments = {}
|
||||
for com_author, com_isanon, com_text in util.sql_rows(stmnt_comments) do
|
||||
table.insert(comments,{
|
||||
author = com_author,
|
||||
isanon = com_isanon == 1, --int to boolean
|
||||
text = com_text
|
||||
})
|
||||
end
|
||||
return comments
|
||||
end
|
||||
|
||||
local function read_get(req)
|
||||
--Pages settings
|
||||
local ps = {
|
||||
|
@ -91,12 +79,26 @@ local function read_get(req)
|
|||
host = http_request_get_host(req),
|
||||
path = http_request_get_path(req),
|
||||
method = http_method_text(req),
|
||||
extra_load = {
|
||||
'<script src="/_js/bookmark.js"></script>',
|
||||
'<script src="/_js/intervine_deletion.js"></script>',
|
||||
}
|
||||
}
|
||||
|
||||
local err
|
||||
--Get our story id
|
||||
assert(string.len(ps.path) > 0,"Tried to read 0-length story id")
|
||||
ps.idp = string.sub(ps.path,2)--remove leading "/"
|
||||
ps.storyid = util.decode_id(ps.idp)
|
||||
ps.short = string.sub(ps.path,2)--remove leading "/"
|
||||
ps.storyid,err = util.decode_id(ps.short)
|
||||
if not ps.storyid then
|
||||
local page = pages.error{
|
||||
errcode = 400,
|
||||
errcodemsg = "Bad Request",
|
||||
explanation = string.format("Failed to find story id %q: %s",ps.path,err)
|
||||
}
|
||||
http_response(req,400,page)
|
||||
return
|
||||
end
|
||||
|
||||
add_view(ps.storyid)
|
||||
|
||||
--If we're logged in, set author and authorid
|
||||
|
@ -106,22 +108,39 @@ local function read_get(req)
|
|||
ps.iam = author
|
||||
ps.loggedauthorid = authorid
|
||||
end
|
||||
|
||||
|
||||
--If we need to show comments
|
||||
http_request_populate_qs(req)
|
||||
ps.show_comments = http_argument_get_string(req,"comments")
|
||||
ps.show_comments = true
|
||||
if ps.show_comments then
|
||||
ps.comments = get_comments(req,ps)
|
||||
ps.comments = util.get_comments(ps.storyid)
|
||||
end
|
||||
|
||||
--If this post is unlisted, get the hash
|
||||
local hashstr = http_argument_get_string(req,"pwd")
|
||||
if hashstr then
|
||||
ps.hash = util.decode_unlisted(hashstr)
|
||||
ps.hashstr = hashstr
|
||||
end
|
||||
|
||||
local text
|
||||
--normal story display
|
||||
if (not ps.loggedauthor) then
|
||||
local cachestr = string.format("%s%s%s",
|
||||
ps.host,
|
||||
local params = {}
|
||||
if ps.show_comments then
|
||||
table.insert(params,"comments=1")
|
||||
end
|
||||
if ps.hash then
|
||||
table.insert(params,"pwd=" .. hashstr)
|
||||
end
|
||||
local cachestrparts = {
|
||||
ps.path,
|
||||
ps.show_comments and "?comments=1" or ""
|
||||
)
|
||||
}
|
||||
if #params > 0 then
|
||||
table.insert(cachestrparts,"?")
|
||||
table.insert(cachestrparts,table.concat(params,"&"))
|
||||
end
|
||||
local cachestr = table.concat(cachestrparts)
|
||||
text = cache.render(cachestr,function()
|
||||
log(LOG_DEBUG,"Cache miss, rendering story " .. cachestr)
|
||||
if not populate_ps_story(req,ps) then
|
||||
|
@ -139,6 +158,12 @@ local function read_get(req)
|
|||
text = pages.read(ps)
|
||||
end
|
||||
end
|
||||
|
||||
--Dirty everywhere the hit counter is shown
|
||||
cache.dirty(string.format("%s",config.domain))
|
||||
cache.dirty(string.format("%s/%s",config.domain,ps.short)) -- This place to read this post
|
||||
cache.dirty(string.format("%s.%s",config.domain,ps.short)) -- The author's index page
|
||||
|
||||
assert(text)
|
||||
http_response(req,200,text)
|
||||
return
|
||||
|
|
|
@ -24,6 +24,7 @@ local function read_post(req)
|
|||
local author, authorid = session.get(req)
|
||||
local comment_text = assert(http_argument_get_string(req,"text"))
|
||||
local pasteas = assert(http_argument_get_string(req,"postas"))
|
||||
local hashstr = http_argument_get_string(req,"pwd")
|
||||
local idp = string.sub(path,2)--remove leading "/"
|
||||
local id = util.decode_id(idp)
|
||||
local isanon = 1
|
||||
|
@ -37,14 +38,15 @@ local function read_post(req)
|
|||
isanon = isanon,
|
||||
comment_text = comment_text,
|
||||
}
|
||||
local err = util.do_sql(stmnt_comment_insert)
|
||||
local err = db.do_sql(stmnt_comment_insert)
|
||||
stmnt_comment_insert:reset()
|
||||
if err ~= sql.DONE then
|
||||
http_response(req,500,"Internal error, failed to post comment. Go back and try again.")
|
||||
else
|
||||
local needspwd = hashstr and "&pwd=" .. hashstr or ""
|
||||
--When we post a comment, we need to dirty the cache for the "comments displayed" page.
|
||||
cache.dirty(string.format("%s%s?comments=1",host,path))
|
||||
local redir = string.format("https://%s%s?comments=1", config.domain, path)
|
||||
cache.dirty(string.format("%s%s?comments=1%s",host,path,needspwd))
|
||||
local redir = string.format("https://%s%s?comments=1%s", config.domain, path, needspwd)
|
||||
http_response_header(req,"Location",redir)
|
||||
http_response(req,303,"")
|
||||
end
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
local sql = require("lsqlite3")
|
||||
|
||||
local db = require("db")
|
||||
local queries = require("queries")
|
||||
local util = require("util")
|
||||
|
@ -8,18 +6,9 @@ local pages = require("pages")
|
|||
local config = require("config")
|
||||
local search_parser = require("parser_search")
|
||||
|
||||
local stmnt_search
|
||||
local oldconfigure = configure
|
||||
function configure(...)
|
||||
stmnt_search = assert(db.conn:prepare(queries.select_post_tags))
|
||||
return oldconfigure(...)
|
||||
end
|
||||
|
||||
local function search_get(req)
|
||||
local host = http_request_get_host(req)
|
||||
local path = http_request_get_path(req)
|
||||
http_request_populate_qs(req)
|
||||
local searchq, err = http_argument_get_string(req,"q")
|
||||
local searchq, _ = http_argument_get_string(req,"q")
|
||||
if not searchq then
|
||||
local ret = pages.search{
|
||||
domain = config.domain,
|
||||
|
@ -54,7 +43,8 @@ local function search_get(req)
|
|||
author = row[4],
|
||||
posted = os.date("%B %d %Y",tonumber(row[5])),
|
||||
tags = libtags.get(row[1]),
|
||||
hits = row[6]
|
||||
hits = row[6],
|
||||
ncomments = row[7]
|
||||
})
|
||||
end
|
||||
local ret = pages.search{
|
||||
|
|
|
@ -0,0 +1,55 @@
|
|||
-- Various global functions to cause less typing.
|
||||
|
||||
|
||||
function assertf(bool, fmt, ...)
|
||||
fmt = fmt or "Assetion Failed"
|
||||
if not bool then
|
||||
error(string.format(fmt,...),2)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function errorf(str, ...)
|
||||
--try calling string.format, if it errors, try calling it with exactly
|
||||
--1 less argument (the error level)
|
||||
local args = {...}
|
||||
local succ, ret = pcall(string.format,str,...)
|
||||
if not succ and type(args[#args]) ~= "number" then
|
||||
errorf("Failed displaying error that looks like %q",str)
|
||||
elseif type(args[#args]) == "number" then
|
||||
local errlevel = table.remove(args,#args)
|
||||
local succ2, ret = pcall(string.format,str,unpack(args))
|
||||
if not succ2 then
|
||||
errorf("Failed displaying error that looks like %q",str)
|
||||
end
|
||||
error(ret, errlevel+1)
|
||||
end
|
||||
error(ret,2)
|
||||
end
|
||||
|
||||
local oldtostring = tostring
|
||||
function tostring(any)
|
||||
--Pretty print tables by default
|
||||
local printed_tables = {}
|
||||
local function tostring_helper(a,tabs)
|
||||
if type(a) ~= "table" then
|
||||
return oldtostring(a)
|
||||
end
|
||||
if printed_tables[a] then
|
||||
return oldtostring(a)
|
||||
end
|
||||
printed_tables[a] = true
|
||||
local sbuilder = {"{\n"}
|
||||
for k,v in pairs(a) do
|
||||
table.insert(sbuilder,string.rep("\t",tabs))
|
||||
table.insert(sbuilder,tostring_helper(k,tabs+1))
|
||||
table.insert(sbuilder,":")
|
||||
table.insert(sbuilder,tostring_helper(v,tabs+1))
|
||||
table.insert(sbuilder,"\n")
|
||||
end
|
||||
table.insert(sbuilder,string.rep("\t",tabs-1))
|
||||
table.insert(sbuilder,"}")
|
||||
return table.concat(sbuilder)
|
||||
end
|
||||
return tostring_helper(any,1)
|
||||
end
|
|
@ -0,0 +1,85 @@
|
|||
--[[ md
|
||||
@name lua/hooks
|
||||
|
||||
Global functions that smr exposes that can be detoured by addons
|
||||
|
||||
]]
|
||||
|
||||
--[[ md
|
||||
@name doc/detouring
|
||||
|
||||
# Detouring
|
||||
|
||||
In Lua, functions are given a name, but more generally, they values on a table
|
||||
, perhaps the global table `_G`, and their names are the keys on the table.
|
||||
When you want to modify a function that exists either in smr or in other addons
|
||||
you can **detour** the function by saving a reference to the original function,
|
||||
and then creating a new function that calls the original, maybe after doing
|
||||
other work, or modifying the arguments. For example:
|
||||
|
||||
local pages = require("pages")
|
||||
|
||||
-- Get notified when the index page is rendered to the user
|
||||
local oldindex = pages.index
|
||||
function pages.index(...)
|
||||
print("Index page is getting rendered!")
|
||||
oldindex(...)
|
||||
end
|
||||
]]
|
||||
|
||||
local api = {}
|
||||
|
||||
--[[ md
|
||||
@name lua/hooks
|
||||
|
||||
## pre_request
|
||||
|
||||
Called before any request processing. Returning true "traps" the request, and
|
||||
does not continue calling smr logic. Well-behaved addons should check for
|
||||
"true" from the detoured function, and return true immediately if the check
|
||||
succeeds.
|
||||
|
||||
@param req {{http_request}} - The request about to be processed
|
||||
@returns boolean - If true, further processing is not done on this request.
|
||||
]]
|
||||
api.pre_request = function(req) end
|
||||
|
||||
-- Called after smr request processing. Returning true "traps" the request.
|
||||
-- Well-behaved addons should check for true from the detoured function, and
|
||||
-- immediately return true if the check succeeds. This will not stop smr from
|
||||
-- responding to the request, since by this time http_request_response() has
|
||||
-- already been called.
|
||||
api.post_request = function(req) end
|
||||
|
||||
-- Called during startup of the worker process
|
||||
-- calling error() in this function will prevent kore from starting.
|
||||
-- Return value is ignored.
|
||||
api.worker_init = function() end
|
||||
|
||||
-- Called during shutdown of the worker process
|
||||
-- Failures in this function cause other addon hooks to this function to be skipped.
|
||||
-- Return value is ignored.
|
||||
api.worker_shutdown = function() end
|
||||
|
||||
-- The following are tables and their options:
|
||||
-- "buttonspec" - specifies a button to display on the front end, has the fields:
|
||||
-- .endpoint - the url to go to when the button is pressed
|
||||
-- .method - the HTTP method to use to call the endpoint
|
||||
-- .fields - key/value pairs to send as arguments when calling the endpoint
|
||||
-- These are usually "hidden" fields for a form.
|
||||
-- .text - The text that displays on the button
|
||||
|
||||
-- Called to display configuration as html.
|
||||
api.get = {
|
||||
-- returns an array of buttonspec, displayed at the top of a story,
|
||||
-- only for the logged in owner of a story.
|
||||
page_owner = function(env) return {} end,
|
||||
|
||||
-- returns an array of buttonspec, displayed at the bottom of a story
|
||||
page_reader = function(env) return {} end,
|
||||
}
|
||||
|
||||
-- Called when the /_api endpoint is accessed
|
||||
api.call = function() end
|
||||
|
||||
return api
|
202
src/lua/init.lua
202
src/lua/init.lua
|
@ -4,43 +4,51 @@ It registers a bunch of global functions that get called from kore when users
|
|||
visit particular pages. See src/smr.c for the names of the public functions.
|
||||
See conf/smr.conf for the data that can be access in each function
|
||||
]]
|
||||
|
||||
--[[ md
|
||||
@name lua
|
||||
|
||||
# Lua namespace
|
||||
|
||||
By default, smr will run init.lua defined by smr, and then addons in order,
|
||||
see {{lua/addon}} for information on how smr loads addons. You can use any
|
||||
of the modules that ship with smr by including them, and then calling the
|
||||
functions defined in that module.
|
||||
|
||||
For example, the module {{lua/db}} holdes a reference to the sqlite3 database
|
||||
that smr uses for data storage. If you addon needs to set up a table and
|
||||
prepare sql statements for an api endpoint, you might set it up like this:
|
||||
|
||||
local db = require("db")
|
||||
|
||||
local oldconfigure = configure -- Hold a refrence to configure()
|
||||
function configure(...) -- Detour the configure function
|
||||
db.sqlassert(db.conn:exec([=[
|
||||
CREATE TABLE IF NOT EXISTS foo (
|
||||
id INTEGER AUTOINCREMENT PRIMARY KEY
|
||||
value TEXT
|
||||
);
|
||||
]=]))
|
||||
oldconfigure(...)
|
||||
end
|
||||
|
||||
Be sure to always {{doc/appendix/detourin}}
|
||||
]]
|
||||
print("Really fast print from init.lua")
|
||||
|
||||
--Luarocks libraries
|
||||
local et = require("etlua")
|
||||
local sql = require("lsqlite3")
|
||||
local zlib = require("zlib")
|
||||
local api = require("hooks")
|
||||
|
||||
--stub for detouring
|
||||
--stubs for detouring
|
||||
function configure(...) end
|
||||
|
||||
--smr code
|
||||
require("global")
|
||||
local cache = require("cache")
|
||||
local pages = require("pages")
|
||||
local util = require("util")
|
||||
local config = require("config")
|
||||
require("pages")
|
||||
local db = require("db")
|
||||
|
||||
--Pages
|
||||
local endpoint_names = {
|
||||
read = {"get","post"},
|
||||
preview = {"post"},
|
||||
index = {"get"},
|
||||
paste = {"get","post"},
|
||||
download = {"get"},
|
||||
login = {"get","post"},
|
||||
edit = {"get","post"},
|
||||
claim = {"get","post"},
|
||||
search = {"get"},
|
||||
}
|
||||
local endpoints = {}
|
||||
for name, methods in pairs(endpoint_names) do
|
||||
for _,method in pairs(methods) do
|
||||
local epn = string.format("%s_%s",name,method)
|
||||
endpoints[epn] = require("endpoints." .. epn)
|
||||
end
|
||||
end
|
||||
|
||||
print("Hello from init.lua")
|
||||
local oldconfigure = configure
|
||||
function configure(...)
|
||||
|
@ -54,67 +62,81 @@ function configure(...)
|
|||
end
|
||||
print("Created configure function")
|
||||
|
||||
function home(req)
|
||||
local method = http_method_text(req)
|
||||
if method == "GET" then
|
||||
endpoints.index_get(req)
|
||||
end
|
||||
-- TODO: Fill this out
|
||||
local http_methods = {"GET","POST"}
|
||||
local http_m_rev = {}
|
||||
for _,v in pairs(http_methods) do
|
||||
http_m_rev[v] = true
|
||||
end
|
||||
|
||||
--We prevent people from changing their password file, this way we don't really
|
||||
--need to worry about logged in accounts being hijacked if someone gets at the
|
||||
--database. The attacker can still paste & edit from the logged in account for
|
||||
--a while, but whatever.
|
||||
function claim(req)
|
||||
local method = http_method_text(req)
|
||||
if method == "GET" then
|
||||
endpoints.claim_get(req)
|
||||
elseif method == "POST" then
|
||||
endpoints.claim_post(req)
|
||||
--Endpoints, all this stuff gets required here.
|
||||
for funcname, spec in pairs({
|
||||
home = {
|
||||
GET = require("endpoints.index_get"),
|
||||
},
|
||||
claim = {
|
||||
GET = require("endpoints.claim_get"),
|
||||
POST = require("endpoints.claim_post"),
|
||||
},
|
||||
paste = {
|
||||
GET = require("endpoints.paste_get"),
|
||||
POST = require("endpoints.paste_post"),
|
||||
},
|
||||
read = {
|
||||
GET = require("endpoints.read_get"),
|
||||
POST = require("endpoints.read_post"),
|
||||
},
|
||||
login = {
|
||||
GET = require("endpoints.login_get"),
|
||||
POST = require("endpoints.login_post"),
|
||||
},
|
||||
logout = {
|
||||
GET = require("endpoints.logout_get"),
|
||||
},
|
||||
edit = {
|
||||
GET = require("endpoints.edit_get"),
|
||||
POST = require("endpoints.edit_post"),
|
||||
},
|
||||
delete = {
|
||||
POST = require("endpoints.delete_post"),
|
||||
},
|
||||
edit_bio = {
|
||||
GET = require("endpoints.bio_get"),
|
||||
POST = require("endpoints.bio_post"),
|
||||
},
|
||||
download = {
|
||||
GET = require("endpoints.download_get"),
|
||||
},
|
||||
preview = {
|
||||
POST = require("endpoints.preview_post"),
|
||||
},
|
||||
search = {
|
||||
GET = require("endpoints.search_get"),
|
||||
},
|
||||
archive = {
|
||||
GET = require("endpoints.archive_get"),
|
||||
},
|
||||
api = {
|
||||
GET = require("endpoints.api_get"),
|
||||
},
|
||||
}) do
|
||||
assert(_G[funcname] == nil, "Tried to overwrite an endpoint, please define endpoints exactly once")
|
||||
for k,v in pairs(spec) do
|
||||
assert(http_m_rev[k], "Unknown http method '" .. k .. "' defined for endpoint '" .. funcname .. "'")
|
||||
assert(type(v) == "function", "Endpoint %s %s must be a function, but was a %s",funcname, k, type(v))
|
||||
end
|
||||
end
|
||||
|
||||
--Create a new paste on the site
|
||||
function paste(req)
|
||||
local method = http_method_text(req)
|
||||
if method == "GET" then
|
||||
endpoints.paste_get(req)
|
||||
elseif method == "POST" then
|
||||
endpoints.paste_post(req)
|
||||
_G[funcname] = function(req)
|
||||
local method = http_method_text(req)
|
||||
if spec[method] == nil then
|
||||
log(LOG_WARNING,string.format("Endpoint %s called with http method %s, but no such route defined.", funcname, method))
|
||||
else
|
||||
log(LOG_DEBUG,string.format("Endpoint %s called with method %s",funcname,method))
|
||||
end
|
||||
api.pre_request(req)
|
||||
spec[method](req)
|
||||
api.post_request(req)
|
||||
end
|
||||
end
|
||||
|
||||
function read(req)
|
||||
local method = http_method_text(req)
|
||||
if method == "GET" then
|
||||
endpoints.read_get(req)
|
||||
elseif method == "POST" then
|
||||
endpoints.read_post(req)
|
||||
end
|
||||
end
|
||||
|
||||
function login(req)
|
||||
local method = http_method_text(req)
|
||||
if method == "GET" then
|
||||
endpoints.login_get(req)
|
||||
elseif method == "POST" then
|
||||
endpoints.login_post(req)
|
||||
end
|
||||
end
|
||||
|
||||
--Edit a story
|
||||
function edit(req)
|
||||
local method = http_method_text(req)
|
||||
if method == "GET" then
|
||||
endpoints.edit_get(req)
|
||||
elseif method == "POST" then
|
||||
endpoints.edit_post(req)
|
||||
end
|
||||
end
|
||||
|
||||
--TODO
|
||||
function edit_bio()
|
||||
error("Not yet implemented")
|
||||
log(LOG_INFO,string.format("Associateing endpoint %q", funcname))
|
||||
end
|
||||
|
||||
function teardown()
|
||||
|
@ -125,19 +147,9 @@ function teardown()
|
|||
if cache then
|
||||
cache.close()
|
||||
end
|
||||
api.worker_shutdown()
|
||||
print("Finished lua teardown")
|
||||
end
|
||||
|
||||
function download(req)
|
||||
endpoints.download_get(req)
|
||||
end
|
||||
|
||||
function preview(req)
|
||||
endpoints.preview_post(req)
|
||||
end
|
||||
|
||||
function search(req)
|
||||
endpoints.search_get(req)
|
||||
end
|
||||
|
||||
api.worker_init()
|
||||
print("Done with init.lua")
|
||||
|
|
|
@ -3,6 +3,8 @@ Compiles all the pages under src/pages/ with etlua. See the etlua documentation
|
|||
for more info (https://github.com/leafo/etlua)
|
||||
]]
|
||||
local et = require("etlua")
|
||||
local config = require("config")
|
||||
require("global")
|
||||
local pagenames = {
|
||||
"index",
|
||||
"author_index",
|
||||
|
@ -17,19 +19,76 @@ local pagenames = {
|
|||
"author_paste",
|
||||
"author_edit",
|
||||
"search",
|
||||
"error",
|
||||
"edit_bio",
|
||||
"parts/header",
|
||||
"parts/footer",
|
||||
"parts/motd",
|
||||
"parts/search",
|
||||
"parts/story_breif",
|
||||
"parts/taglist"
|
||||
}
|
||||
--Functions available to all templates
|
||||
local global_env = {
|
||||
include = function(filename)
|
||||
local fp = assert(io.open(filename,"r"))
|
||||
local data = assert(fp:read("*a"))
|
||||
fp:close()
|
||||
return data
|
||||
end,
|
||||
}
|
||||
local global_env_m = {
|
||||
__index=global_env
|
||||
}
|
||||
local pages = {}
|
||||
for k,v in pairs(pagenames) do
|
||||
local path = string.format("pages/%s.etlua",v)
|
||||
local etlua_short_pat = '%[string "etlua"%]'
|
||||
for _,v in pairs(pagenames) do
|
||||
local path = string.format(config.approot .. "pages/%s.etlua",v)
|
||||
local parser = et.Parser()
|
||||
local f = assert(io.open(path,"r"))
|
||||
local fdata = assert(f:read("*a"))
|
||||
local code = assert(parser:parse(fdata))
|
||||
local func = assert(parser:load(parser:chunks_to_lua(),path))
|
||||
local code, err = parser:compile_to_lua(fdata)
|
||||
if not code then
|
||||
errorf("Failed to parse %s: %s",path,err)
|
||||
end
|
||||
local func, err = parser:load(code)
|
||||
if not func then
|
||||
error(string.format("Failed to load %s: %s",path, err))
|
||||
end
|
||||
f:close()
|
||||
pages[v] = function(...)
|
||||
local buf = assert(parser:run(func,...))
|
||||
return table.concat(buf)
|
||||
assert(func, "Failed to load " .. path)
|
||||
pages[v] = function(env)
|
||||
assert(type(env) == "table","env must be a table")
|
||||
-- Add our global metatable functions at the bottom metatable's __index
|
||||
local cursor,max_depth = env, 10
|
||||
while cursor ~= nil and getmetatable(cursor) and getmetatable(cursor).__index and max_depth > 0 do
|
||||
cursor = getmetatable(cursor)
|
||||
max_depth = max_depth - 1
|
||||
end
|
||||
if max_depth == 0 then
|
||||
log(
|
||||
LOG_WARN,
|
||||
string.format([[
|
||||
Failed to set environment on page %s correctly,
|
||||
exceeded max depth when applying global functions: %s
|
||||
]],
|
||||
path,
|
||||
debug.traceback()
|
||||
)
|
||||
)
|
||||
end
|
||||
setmetatable(cursor,global_env_m)
|
||||
local success, ret = xpcall(function()
|
||||
return parser:run(func, env)
|
||||
end,function(err)
|
||||
-- A function to tell us what template we errored in
|
||||
-- if an error occures
|
||||
return debug.traceback(err:gsub(etlua_short_pat,path))
|
||||
end)
|
||||
if not success then
|
||||
error(ret:gsub(etlua_short_pat,path))
|
||||
end
|
||||
return table.concat(ret)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -60,8 +60,9 @@ local function wrap(seq,format,V"sup")
|
|||
end
|
||||
end
|
||||
]]
|
||||
|
||||
local function wrap(seq,format,s)
|
||||
return P(seq) * Cs((((V"marked" - s) + word + P"\n"))^1) * P(seq) / function(a)
|
||||
return P(seq) * Cs(((s + word + P"\n"))^0) * P(seq) / function(a)
|
||||
return string.format(format,a)
|
||||
end
|
||||
end
|
||||
|
@ -71,21 +72,22 @@ end
|
|||
local function tag(name,format)
|
||||
local start_tag = P(string.format("[%s]",name))
|
||||
local end_tag = P(string.format("[/%s]",name))
|
||||
return start_tag * Cs(((1 - end_tag))^1) * end_tag / function(a)
|
||||
return start_tag * Cs(((1 - end_tag))^0) * end_tag / function(a)
|
||||
return string.format(format,sanitize(a))
|
||||
end
|
||||
end
|
||||
|
||||
--local grammar = P(require('pegdebug').trace({
|
||||
local grammar = P{
|
||||
"chunk";
|
||||
--regular
|
||||
spoiler = wrap("**",[[<span class="spoiler">%s</span>]],V"spoiler"),
|
||||
spoiler2 = tag("spoiler",[[<span class="spoiler2">%s</span>]]),
|
||||
italic = wrap("''",[[<i>%s</i>]], V"italic"),
|
||||
bold = wrap("'''",[[<b>%s</b>]], V"bold"),
|
||||
underline = wrap("__",[[<u>%s</u>]], V"underline"),
|
||||
heading = wrap("==",[[<h2>%s</h2>]], V"heading"),
|
||||
strike = wrap("~~",[[<s>%s</s>]], V"strike"),
|
||||
heading = wrap("==",[[<h2>%s</h2>]], V"underline" + V"strike" + V"italic"),
|
||||
bold = wrap("'''",[[<b>%s</b>]], V"italic" + V"underline" + V"strike"),
|
||||
italic = wrap("''",[[<i>%s</i>]], V"underline" + V"strike"),
|
||||
underline = wrap("__",[[<u>%s</u>]], V"strike"),
|
||||
strike = wrap("~~",[[<s>%s</s>]], P("blah")),
|
||||
spoiler = wrap("**",[[<span class="spoiler">%s</span>]],V"spoiler2" + V"bold" + V"italic" + V"underline" + V"strike"),
|
||||
spoiler2 = tag("spoiler",[[<span class="spoiler2">%s</span>]],V"spoiler" + V"bold" + V"italic" + V"underline" + V"strike"),
|
||||
code = tag("code",[[<pre><code>%s</code></pre>]]),
|
||||
greentext = P">" * (B"\n>" + B">") * Cs((V"marked" + word)^0) / function(a)
|
||||
return string.format([[<span class="greentext">>%s</span>]],a)
|
||||
|
@ -97,7 +99,7 @@ local grammar = P{
|
|||
plainline = (V"marked" + word)^0,
|
||||
line = Cs(V"greentext" + V"pinktext" + V"plainline" + P"") * P"\n" / function(a)
|
||||
if a == "\r" then
|
||||
return "<br/>"
|
||||
return [[<p class="spacer"></p>]]
|
||||
else
|
||||
return string.format("<p>%s</p>",a)
|
||||
end
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
local lpeg = require('lpeg')
|
||||
local etlua = require('etlua')
|
||||
local config = require("config")
|
||||
local args = {...}
|
||||
lpeg.locale(lpeg)
|
||||
local V,P,C,S,B,Cs = lpeg.V,lpeg.P,lpeg.C,lpeg.S,lpeg.B,lpeg.Cs
|
||||
|
@ -41,7 +42,7 @@ local fields
|
|||
local grammar = P{
|
||||
"chunk";
|
||||
whitespace = S" \t\n"^0,
|
||||
itm = C(P(1-S"+-")^0), --go until the next '+' or '-'
|
||||
itm = C((P(1 - (P" " * S"+-")))^0), --go until the next '+' or '-'
|
||||
likefield = C(P"title" + P"author") * V"whitespace" * C(P"=") * V"whitespace" * V"itm",
|
||||
rangeop = P"<=" + P">=" + P">" + P"<" + P"=",
|
||||
rangefield = C(P"date" + P"hits") * V"whitespace" * C(V"rangeop") * V"whitespace" * C(V"itm"),
|
||||
|
@ -54,12 +55,12 @@ local grammar = P{
|
|||
table.insert(fields.tags,{pn,"=",field})
|
||||
end
|
||||
end,
|
||||
chunk = V"field"^0
|
||||
chunk = V"field" * (P" " * V"field")^0
|
||||
|
||||
}
|
||||
--Grammar
|
||||
--Transpile a sting with + and - into an sql query that searches tags
|
||||
local fname = "pages/search_sql.etlua"
|
||||
local fname = config.approot .. "pages/search_sql.etlua"
|
||||
local sqltmpl = assert(io.open(fname))
|
||||
local c = etlua.compile(sqltmpl:read("*a"),fname)
|
||||
sqltmpl:close()
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
|
||||
local queries = {}
|
||||
local config = require("config")
|
||||
|
||||
setmetatable(queries,{
|
||||
__index = function(self,key)
|
||||
local f = assert(io.open("sql/" .. key .. ".sql","r"))
|
||||
local f = assert(io.open(config.approot .. "sql/" .. key .. ".sql","r"))
|
||||
local ret = f:read("*a")
|
||||
f:close()
|
||||
return ret
|
||||
|
|
|
@ -1,17 +1,18 @@
|
|||
local sql = require("lsqlite3")
|
||||
|
||||
local db = require("db")
|
||||
local util = require("util")
|
||||
local queries = require("queries")
|
||||
|
||||
local stmnt_get_session, stmnt_insert_session, stmnt_delete_session
|
||||
|
||||
local oldconfigure = configure
|
||||
local stmnt_get_session, stmnt_insert_session
|
||||
function configure(...)
|
||||
stmnt_get_session = assert(db.conn:prepare(queries.select_valid_sessions))
|
||||
stmnt_insert_session = assert(db.conn:prepare(queries.insert_session))
|
||||
stmnt_get_session = db.sqlassert(db.conn:prepare(queries.select_valid_sessions))
|
||||
stmnt_insert_session = db.sqlassert(db.conn:prepare(queries.insert_session))
|
||||
stmnt_delete_session = db.sqlassert(db.conn:prepare(queries.delete_session))
|
||||
return oldconfigure(...)
|
||||
end
|
||||
|
||||
|
||||
local session = {}
|
||||
|
||||
--[[
|
||||
|
@ -27,15 +28,14 @@ function session.get(req)
|
|||
stmnt_get_session:bind_names{
|
||||
key = sessionid
|
||||
}
|
||||
local err = util.do_sql(stmnt_get_session)
|
||||
local err = db.do_sql(stmnt_get_session)
|
||||
if err ~= sql.ROW then
|
||||
stmnt_get_session:reset()
|
||||
return nil, "No such session by logged in users"
|
||||
end
|
||||
local data = stmnt_get_session:get_values()
|
||||
stmnt_get_session:reset()
|
||||
local author = data[1]
|
||||
local authorid = data[2]
|
||||
return author,authorid
|
||||
return data[1],data[2]
|
||||
end
|
||||
|
||||
--[[
|
||||
|
@ -44,21 +44,36 @@ Start a session for someone who logged in
|
|||
function session.start(who)
|
||||
local rngf = assert(io.open("/dev/urandom","rb"))
|
||||
local session_t = {}
|
||||
for i = 1,64 do
|
||||
for _ = 1,64 do
|
||||
local r = string.byte(rngf:read(1))
|
||||
local s = string.char((r % 26) + 65)
|
||||
table.insert(session_t,s)
|
||||
end
|
||||
local session = table.concat(session_t)
|
||||
local session_str = table.concat(session_t)
|
||||
rngf:close()
|
||||
stmnt_insert_session:bind_names{
|
||||
sessionid = session,
|
||||
sessionid = session_str,
|
||||
authorid = who
|
||||
}
|
||||
local err = util.do_sql(stmnt_insert_session)
|
||||
local err = db.do_sql(stmnt_insert_session)
|
||||
stmnt_insert_session:reset()
|
||||
assert(err == sql.DONE, "Error should have been 'DONE', was: " .. tostring(err))
|
||||
return session_str
|
||||
end
|
||||
|
||||
--[[
|
||||
End a session, log someone out
|
||||
]]
|
||||
function session.finish(who,sessionid)
|
||||
stmnt_delete_session:bind_names{
|
||||
authorid = who,
|
||||
sessionid = sessionid
|
||||
}
|
||||
local err = db.do_sql(stmnt_delete_session)
|
||||
stmnt_delete_session:reset()
|
||||
assert(err == sql.DONE)
|
||||
return session
|
||||
return true
|
||||
|
||||
end
|
||||
|
||||
return session
|
||||
|
|
|
@ -1,8 +1,18 @@
|
|||
--[[ md
|
||||
@name lua/tags
|
||||
|
||||
Helper methods for cleaning story tags.
|
||||
Tags are the main way to search smr, a simple `+<tag>` or `-<tag>` will show all
|
||||
stories that include (+) or do not include (-) a particular tag.
|
||||
|
||||
Tags are stored in the {{table_tags}} and are deleted if the story they are
|
||||
attached to is deleted. If an author is deleted, all their stories are deleted,
|
||||
and this will cascade to deleting tags on their stories too.
|
||||
]]
|
||||
local sql = require("lsqlite3")
|
||||
|
||||
local db = require("db")
|
||||
local queries = require("queries")
|
||||
local util = require("util")
|
||||
local tags = {}
|
||||
|
||||
local stmnt_get_tags, stmnt_ins_tag, stmnt_drop_tags
|
||||
|
@ -13,7 +23,7 @@ function configure(...)
|
|||
stmnt_ins_tag = assert(db.conn:prepare(queries.insert_tag))
|
||||
stmnt_get_tags = assert(db.conn:prepare(queries.select_tags))
|
||||
stmnt_drop_tags = assert(db.conn:prepare(queries.delete_tags))
|
||||
|
||||
|
||||
return oldconfigure(...)
|
||||
end
|
||||
|
||||
|
@ -39,15 +49,15 @@ function tags.get(id)
|
|||
until false
|
||||
end
|
||||
|
||||
function tags.set(storyid,tags)
|
||||
function tags.set(storyid,tags_list)
|
||||
assert(stmnt_drop_tags:bind_names{postid = storyid} == sql.OK)
|
||||
util.do_sql(stmnt_drop_tags)
|
||||
db.do_sql(stmnt_drop_tags)
|
||||
stmnt_drop_tags:reset()
|
||||
local err
|
||||
for _,tag in pairs(tags) do
|
||||
for _,tag in pairs(tags_list) do
|
||||
assert(stmnt_ins_tag:bind(1,storyid) == sql.OK)
|
||||
assert(stmnt_ins_tag:bind(2,tag) == sql.OK)
|
||||
err = util.do_sql(stmnt_ins_tag)
|
||||
err = db.do_sql(stmnt_ins_tag)
|
||||
stmnt_ins_tag:reset()
|
||||
end
|
||||
if err ~= sql.DONE then
|
||||
|
|
|
@ -0,0 +1,51 @@
|
|||
--[[
|
||||
Type checking, vaguely inspired by Python3's typing module.
|
||||
]]
|
||||
|
||||
local types = {}
|
||||
|
||||
function types.positive(arg)
|
||||
local is_number, err = types.number(arg)
|
||||
if not is_number then
|
||||
return false, err
|
||||
end
|
||||
if arg < 0 then
|
||||
return false, string.format("was not positive")
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
--Basic lua types
|
||||
local builtin_types = {
|
||||
"nil","boolean","number","string","table","function","coroutine","userdata"
|
||||
}
|
||||
for _,type_ in pairs(builtin_types) do
|
||||
types[type_] = function(arg)
|
||||
local argtype = type(arg)
|
||||
if argtype ~= type_ then
|
||||
return false, string.format("was not a %s, was a %s",type_,argtype)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
function types.matches_pattern(pattern)
|
||||
return function(arg)
|
||||
local is_string, err = types.string(arg)
|
||||
if not is_string then
|
||||
return false, err
|
||||
end
|
||||
if not string.match(arg, pattern) then
|
||||
return false, string.format(
|
||||
"Expected %q to match pattern %q, but it did not.",
|
||||
arg,
|
||||
pattern
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
function types.check(...)
|
||||
|
||||
end
|
||||
|
||||
return types
|
275
src/lua/util.lua
275
src/lua/util.lua
|
@ -1,107 +1,94 @@
|
|||
--[[ md
|
||||
@name lua/util
|
||||
Various utilities that aren't big enough for their own module, but are still
|
||||
used in more than one place.
|
||||
]]
|
||||
|
||||
local sql = require("lsqlite3")
|
||||
|
||||
local config = require("config")
|
||||
local db = require("db")
|
||||
local queries = require("queries")
|
||||
|
||||
local util = {}
|
||||
|
||||
--[[
|
||||
Runs an sql query and receives the 3 arguments back, prints a nice error
|
||||
message on fail, and returns true on success.
|
||||
]]
|
||||
function util.sqlassert(...)
|
||||
local r,errcode,err = ...
|
||||
if not r then
|
||||
error(string.format("%d: %s",errcode, err))
|
||||
end
|
||||
return r
|
||||
|
||||
local stmnt_comments
|
||||
local oldconfigure = configure
|
||||
function configure(...)
|
||||
stmnt_comments = assert(db.conn:prepare(queries.select_comments))
|
||||
return oldconfigure(...)
|
||||
end
|
||||
|
||||
--[[
|
||||
Continuously tries to perform an sql statement until it goes through
|
||||
]]
|
||||
function util.do_sql(stmnt)
|
||||
if not stmnt then error("No statement",2) end
|
||||
local err
|
||||
local i = 0
|
||||
repeat
|
||||
err = stmnt:step()
|
||||
if err == sql.BUSY then
|
||||
i = i + 1
|
||||
coroutine.yield()
|
||||
end
|
||||
until(err ~= sql.BUSY or i > 10)
|
||||
assert(i < 10, "Database busy")
|
||||
return err
|
||||
end
|
||||
--[[ md
|
||||
@name doc/url_spec
|
||||
|
||||
--[[
|
||||
Provides an iterator that loops over results in an sql statement
|
||||
or throws an error, then resets the statement after the loop is done.
|
||||
]]
|
||||
function util.sql_rows(stmnt)
|
||||
if not stmnt then error("No statement",2) end
|
||||
local err
|
||||
return function()
|
||||
err = stmnt:step()
|
||||
if err == sql.BUSY then
|
||||
coroutine.yield()
|
||||
elseif err == sql.ROW then
|
||||
return unpack(stmnt:get_values())
|
||||
elseif err == sql.DONE then
|
||||
stmnt:reset()
|
||||
return nil
|
||||
else
|
||||
stmnt:reset()
|
||||
local msg = string.format(
|
||||
"SQL Iteration failed: %s : %s\n%s",
|
||||
tostring(err),
|
||||
db.conn:errmsg(),
|
||||
debug.traceback()
|
||||
)
|
||||
log(LOG_CRIT,msg)
|
||||
error(msg)
|
||||
end
|
||||
end
|
||||
end
|
||||
URLs generated from smr use letters and numbers to encode a monotonically
|
||||
increasing post id into a url that can easily be shared (and ends up
|
||||
considerably shorter). The characters used in url generation are:
|
||||
[a-z][A-Z][0-9], and numbers are encoded to use the second available 1-character
|
||||
permuation, then the first available 2-character permutation, and so on.
|
||||
|
||||
--[[
|
||||
Binds an argument to as statement with nice error reporting on failure
|
||||
stmnt :: sql.stmnt - the prepared sql statemnet
|
||||
call :: string - a string "bind" or "bind_blob"
|
||||
position :: number - the argument position to bind to
|
||||
data :: string - The data to bind
|
||||
]]
|
||||
function util.sqlbind(stmnt,call,position,data)
|
||||
assert(call == "bind" or call == "bind_blob","Bad bind call, call was:" .. call)
|
||||
local f = stmnt[call](stmnt,position,data)
|
||||
if f ~= sql.OK then
|
||||
error(string.format("Failed to %s at %d with %q: %s", call, position, data, db.conn:errmsg()),2)
|
||||
end
|
||||
end
|
||||
For example, the first post is encoded as 'b', the second as 'c', the thrid
|
||||
as 'd', and so on. The off-by-one nature is to simplify implementation of
|
||||
2-character and 3-character combinations with Lua's 1-indexed arrays.
|
||||
|
||||
--see https://perishablepress.com/stop-using-unsafe-characters-in-urls/
|
||||
--no underscore because we use that for our operative pages
|
||||
local url_characters =
|
||||
see https://perishablepress.com/stop-using-unsafe-characters-in-urls/
|
||||
no underscore because we use that for our operative pages
|
||||
|
||||
A set of legacy characters that are no longer in use (because they were invalid
|
||||
to use in URL's) is also defined, but unused as long as
|
||||
{{config/legacy_url_cutoff}} is set to 0.
|
||||
]]
|
||||
local url_characters =
|
||||
[[abcdefghijklmnopqrstuvwxyz]]..
|
||||
[[ABCDEFGHIJKLMNOPQRSTUVWXYZ]]..
|
||||
[[0123456789]]..
|
||||
[[0123456789]]
|
||||
|
||||
local url_characters_legacy =
|
||||
url_characters ..
|
||||
[[$-+!*'(),]]
|
||||
local url_characters_rev = {}
|
||||
for i = 1,string.len(url_characters) do
|
||||
url_characters_rev[string.sub(url_characters,i,i)] = i
|
||||
|
||||
local function str2set(str)
|
||||
local tbl = {}
|
||||
for i = 1, #str do
|
||||
tbl[string.sub(str,i,i)] = i
|
||||
end
|
||||
return tbl
|
||||
end
|
||||
--[[
|
||||
Encode a number to a shorter HTML-safe url path
|
||||
local url_characters_rev = str2set(url_characters)
|
||||
local url_characters_rev_legacy = str2set(url_characters_legacy)
|
||||
|
||||
--[[ md
|
||||
@name lua/util/encode_id
|
||||
Encode a number to a shorter HTML-safe url path. Url paths are generated
|
||||
according to the {{doc/url_spec}
|
||||
]]
|
||||
function util.encode_id(number)
|
||||
local result = {}
|
||||
local charlen = string.len(url_characters)
|
||||
repeat
|
||||
local pos = (number % charlen) + 1
|
||||
number = math.floor(number / charlen)
|
||||
local pos = (number % #url_characters) + 1
|
||||
number = math.floor(number / #url_characters)
|
||||
table.insert(result,string.sub(url_characters,pos,pos))
|
||||
until number == 0
|
||||
return table.concat(result)
|
||||
end
|
||||
--[[
|
||||
Legacy code, try to encode with invalid characters in the url first
|
||||
]]
|
||||
local new_encode = util.encode_id
|
||||
function util.encode_id(number)
|
||||
if number >= config.legacy_url_cutoff then
|
||||
return new_encode(number)
|
||||
else
|
||||
local result = {}
|
||||
repeat
|
||||
local pos = (number % #url_characters_legacy) + 1
|
||||
number = math.floor(number / #url_characters_legacy)
|
||||
table.insert(result,string.sub(url_characters_legacy,pos,pos))
|
||||
until number == 0
|
||||
return table.concat(result)
|
||||
end
|
||||
end
|
||||
|
||||
--[[
|
||||
Given a short HTML-safe url path, convert it to a storyid
|
||||
|
@ -109,21 +96,65 @@ Given a short HTML-safe url path, convert it to a storyid
|
|||
function util.decode_id(s)
|
||||
local res, id = pcall(function()
|
||||
local n = 0
|
||||
local charlen = string.len(url_characters)
|
||||
for i = 1,string.len(s) do
|
||||
local char = string.sub(s,i,i)
|
||||
local pos = url_characters_rev[char] - 1
|
||||
n = n + (pos*math.pow(charlen,i-1))
|
||||
n = n + (pos*math.pow(#url_characters,i-1))
|
||||
end
|
||||
return n
|
||||
end)
|
||||
if res then
|
||||
return id
|
||||
else
|
||||
error("Failed to decode id:" .. s)
|
||||
return false,"Failed to decode id:" .. s
|
||||
end
|
||||
end
|
||||
|
||||
--[[
|
||||
Legacy code, try to decode with invalid characters in the url first
|
||||
]]
|
||||
local new_decode = util.decode_id
|
||||
function util.decode_id(s)
|
||||
local res, id = pcall(function()
|
||||
local n = 0
|
||||
for i = 1,string.len(s) do
|
||||
local char = string.sub(s,i,i)
|
||||
local pos = url_characters_rev_legacy[char] - 1
|
||||
n = n + (pos * math.pow(#url_characters_legacy,i-1))
|
||||
end
|
||||
return n
|
||||
end)
|
||||
if res then
|
||||
if id > config.legacy_url_cutoff then
|
||||
return new_decode(s)
|
||||
else
|
||||
return id
|
||||
end
|
||||
else
|
||||
return false,"Failed to decode id:" .. s
|
||||
end
|
||||
end
|
||||
|
||||
--arbitary data to hex encoded string
|
||||
function util.encode_unlisted(str)
|
||||
assert(type(str) == "string","Tried to encode something not a string:" .. type(str))
|
||||
local safe = {}
|
||||
for i = 1,#str do
|
||||
local byte = str:byte(i)
|
||||
table.insert(safe,string.format("%02x",byte))
|
||||
end
|
||||
return table.concat(safe)
|
||||
end
|
||||
|
||||
--hex encoded string to arbitrary data
|
||||
function util.decode_unlisted(str)
|
||||
local output = {}
|
||||
for byte in str:gmatch("%x%x") do
|
||||
table.insert(output, string.char(tonumber(byte,16)))
|
||||
end
|
||||
return table.concat(output)
|
||||
end
|
||||
|
||||
--[[
|
||||
Parses a semicolon seperated string into it's parts:
|
||||
1. seperates by semicolon
|
||||
|
@ -138,25 +169,71 @@ function util.parse_tags(str)
|
|||
local tags = {}
|
||||
for tag in string.gmatch(str,"([^;]+)") do
|
||||
assert(tag, "Found a nil or false tag in:" .. str)
|
||||
local tag_trimmed = string.match(tag,"%s*(.*)%s*")
|
||||
local tag_lower = string.lower(tag_trimmed)
|
||||
local tag_capitalized = string.gsub(tag_lower,"^.",string.upper)
|
||||
assert(tag_capitalized, "After processing tag:" .. tag .. " it was falsey.")
|
||||
if string.len(tag_capitalized) > 0 then
|
||||
table.insert(tags, tag_capitalized)
|
||||
local tag_fmt = tag:match("%s*(.*)%s*"):lower():gsub("^.",string.upper)
|
||||
assert(tag_fmt, "After processing tag:" .. tag .. " it was falsey.")
|
||||
if string.len(tag_fmt) > 0 then
|
||||
table.insert(tags, tag_fmt)
|
||||
end
|
||||
end
|
||||
return tags
|
||||
end
|
||||
|
||||
local function decodeentity(capture)
|
||||
local n = tonumber(capture,16)
|
||||
local c = string.char(n)
|
||||
if escapes[c] then
|
||||
return escapes[c]
|
||||
else
|
||||
return c
|
||||
--[[
|
||||
Get the comments for a story
|
||||
|
||||
Comments are a table with the structure:
|
||||
comment :: table {
|
||||
author :: string - The author's text name
|
||||
isanon :: boolean - True if the author is anon (author string will be "Anonymous")
|
||||
text :: string - The text of the comment
|
||||
}
|
||||
]]
|
||||
function util.get_comments(sid)
|
||||
stmnt_comments:bind_names{id = sid}
|
||||
local comments = {}
|
||||
for com_author, com_isanon, com_text in db.sql_rows(stmnt_comments) do
|
||||
table.insert(comments,{
|
||||
author = com_author,
|
||||
isanon = com_isanon == 1, --int to boolean
|
||||
text = com_text
|
||||
})
|
||||
end
|
||||
return comments
|
||||
end
|
||||
|
||||
|
||||
if config.debugging then
|
||||
function util.checktypes(...)
|
||||
local args = {...}
|
||||
if #args == 1 then
|
||||
args = table.unpack(args)
|
||||
end
|
||||
assert(
|
||||
#args % 3 == 0,
|
||||
"Arguments to checktypes() must be triplets of " ..
|
||||
"<variable>, <lua type>, <type check function> "
|
||||
)
|
||||
for i = 1,#args,3 do
|
||||
local var, ltype, veri_f = args[i+0], args[i+1], args[i+2]
|
||||
assert(
|
||||
type(var) == ltype,
|
||||
string.format(
|
||||
"Expected argument %d (%q) to be type %s, but was %s",
|
||||
i/3
|
||||
)
|
||||
)
|
||||
if veri_f then
|
||||
assert(veri_f(var))
|
||||
end
|
||||
end
|
||||
end
|
||||
else
|
||||
function util.checktypes()
|
||||
end
|
||||
end
|
||||
|
||||
local function decodeentity(capture)
|
||||
return string.char(tonumber(capture,16)) --Decode base 16 and conver to character
|
||||
end
|
||||
function util.decodeentities(str)
|
||||
return string.gsub(str,"%%(%x%x)",decodeentity)
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
author_edit.etlua
|
||||
author_index.etlua
|
||||
author_paste.etlua
|
||||
cantedit.etlua
|
||||
claim.etlua
|
||||
edit.etlua
|
||||
index.etlua
|
||||
login.etlua
|
||||
noauthor.etlua
|
||||
nostory.etlua
|
||||
paste.etlua
|
||||
read.etlua
|
||||
search.etlua
|
||||
search_sql.etlua
|
|
@ -1,27 +0,0 @@
|
|||
<% assert(author,"No author specified") %>
|
||||
<% assert(bio,"No bio included") %>
|
||||
<{system cat src/pages/parts/header.etlua}>
|
||||
<h1 class="title">
|
||||
<a href="https://<%= author %>.<%= domain %>"><%= author %></a>.<a href="https://<%= domain %>"><%= domain %></a>
|
||||
</h1>
|
||||
|
||||
<div class="content">
|
||||
<form action="https://<%= author %>.<%= domain %>/" method="post" class="container">
|
||||
<textarea name="" cols=80 rows=24 class="column">
|
||||
<%= bio %>
|
||||
</textarea><br/>
|
||||
<input type="submit">
|
||||
</form>
|
||||
</div>
|
||||
<div class="content">
|
||||
<% if #stories == 0 then %>
|
||||
This author has not made any pastes yet.
|
||||
<% else %>
|
||||
<table>
|
||||
<% for k,story in pairs(stories) do %>
|
||||
<{system cat src/pages/parts/story_breif.etlua}>
|
||||
<% end %>
|
||||
</table>
|
||||
<% end %>
|
||||
</div>
|
||||
<{system cat src/pages/parts/footer.etlua}>
|
|
@ -1,25 +0,0 @@
|
|||
<% assert(author,"No author specified") %>
|
||||
<% assert(bio,"No bio included") %>
|
||||
<{system cat src/pages/parts/header.etlua}>
|
||||
<h1 class="title">
|
||||
<a href="https://<%= author %>.<%= domain %>"><%= author %></a>.<a href="https://<%= domain %>"><%= domain %></a>
|
||||
</h1>
|
||||
<div class="container">
|
||||
<a href="/_paste" class="button">New paste</a>
|
||||
</div>
|
||||
<div class="content">
|
||||
<%= bio %>
|
||||
</div>
|
||||
<div class="content">
|
||||
<% if #stories == 0 then %>
|
||||
This author has not made any pastes yet.
|
||||
<% else %>
|
||||
<table>
|
||||
<% for k,story in pairs(stories) do %>
|
||||
<{system cat src/pages/parts/story_breif.etlua}>
|
||||
<% end %>
|
||||
</table>
|
||||
<% end %>
|
||||
</div>
|
||||
<{system cat src/pages/parts/footer.etlua}>
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
<{system cat src/pages/parts/header.etlua}>
|
||||
<h1 class="title">
|
||||
Paste
|
||||
</h1>
|
||||
<% if err then %><em class="error"><%= err %></em><% end %>
|
||||
<form action="https://<%= user %>.<%= domain %>/_paste" method="post" class="container">
|
||||
<fieldset>
|
||||
<div class="row">
|
||||
<input type="text" name="title" placeholder="Title" class="column column-80"></input>
|
||||
<select id="pasteas" name="pasteas" class="column column-10">
|
||||
<option value="<%= user %>"><%= user %></option>
|
||||
<option value="anonymous">Anonymous</option>
|
||||
</select>
|
||||
<select id="markup" name="markup" class="column column-10">
|
||||
<option value="plain">Plain</option>
|
||||
<option value="imageboard">Imageboard</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="row">
|
||||
<input type="text" name="tags" placeholder="Tags (semicolon;seperated)" class="column"></input>
|
||||
</div>
|
||||
<div class="row">
|
||||
<textarea name="text" cols=80 rows=24 class="column"><%= text %></textarea><br/>
|
||||
</div>
|
||||
<input type="submit">
|
||||
<input type="submit" formtarget="_blank" value="Preview" formaction="https://<%= domain %>/_preview">
|
||||
</fieldset>
|
||||
</form>
|
||||
<{system cat src/pages/parts/footer.etlua}>
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
<{system cat src/pages/parts/header.etlua}>
|
||||
<h1 class="title">
|
||||
🙁
|
||||
</h1>
|
||||
<div class="container">
|
||||
<p>
|
||||
You don't have permission to edit: <%= path %>
|
||||
</p>
|
||||
</div>
|
||||
<{system cat src/pages/parts/footer.etlua}>
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
<{system cat src/pages/parts/header.etlua}>
|
||||
<h1 class="title">
|
||||
Register
|
||||
</h1>
|
||||
Once you press submit, you will be prompted to download a file.<br/>
|
||||
slash.monster uses this file in place of a password, keep it safe.<br/>
|
||||
Consider keeping a copy on a USB in case your hard drive fails.<br/>
|
||||
The admin cannot recover your passfile, and will not reset accounts.<br/>
|
||||
<b>Names may be up to 30 characters, alphanumeric, no symbols, all lower case.</b><br/>
|
||||
<% if err then %><em class="error"><%= err %></em><% end %>
|
||||
<form action="/_claim" method="post">
|
||||
<fieldset>
|
||||
<label for="user">Name:</label>
|
||||
<input type="text" name="user" id="user" placeholder="name">
|
||||
<input type="submit">
|
||||
</fieldset>
|
||||
</form>
|
||||
Once you have your file, you can <a href="/_login">log in</a>
|
||||
<{system cat src/pages/parts/footer.etlua}>
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
<{system cat src/pages/parts/header.etlua}>
|
||||
<h1 class="title">
|
||||
Paste
|
||||
</h1>
|
||||
<% if err then %><em class="error"><%= err %></em><% end %>
|
||||
<form action="https://<%= user %>.<%= domain %>/_edit" method="post" class="container">
|
||||
<fieldset>
|
||||
<div class="row">
|
||||
<input type="text" name="title" placeholder="Title" class="column column-80" value="<%= title %>"></input>
|
||||
<input type="hidden" name="story" value="<%= story %>">
|
||||
<select id="pasteas" name="pasteas" class="column column-10">
|
||||
<% if isanon then %>
|
||||
<option value="<%= user %>"><%= user %></option>
|
||||
<option value="anonymous" selected>Anonymous</option>
|
||||
<% else %>
|
||||
<option value="<%= user %>" selected><%= user %></option>
|
||||
<option value="anonymous">Anonymous</option>
|
||||
<% end %>
|
||||
</select>
|
||||
<select id="markup" name="markup" class="column column-10">
|
||||
<option value="plain">Plain</option>
|
||||
<option value="imageboard">Imageboard</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="row">
|
||||
<input type="text" name="tags" value="<%= tags %>" placeholder="Tags (semicolon;seperated)" class="column"></input>
|
||||
</div>
|
||||
<div class="row">
|
||||
<textarea name="text" cols=80 rows=24 class="column"><%= text %></textarea><br/>
|
||||
</div>
|
||||
<input type="submit">
|
||||
|
||||
</fieldset>
|
||||
</form>
|
||||
<{cat src/pages/parts/footer.etlua}>
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
|
||||
<!DOCTYPE html>
|
||||
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<% if author then %>
|
||||
<meta name="author" content="<%= author %>">
|
||||
<% end %>
|
||||
<% if title then %>
|
||||
<title><%- title %></title>
|
||||
<% else %>
|
||||
<title>🍑</title>
|
||||
<% end %>
|
||||
<link href="/_css/milligram.css" rel="stylesheet">
|
||||
<link href="/_css/style.css" rel="stylesheet">
|
||||
<% if extra_load then %>
|
||||
<% for _,load in ipairs(extra_load) do %>
|
||||
<%- load %>
|
||||
<% end %>
|
||||
<% end %>
|
||||
</head>
|
||||
<body class="container">
|
||||
<main class="wrapper">
|
||||
|
||||
<h1 class="title">
|
||||
Edit Biography for <%= user %>
|
||||
</h1>
|
||||
<% if err then %><em class="error"><%= err %></em><% end %>
|
||||
<form action="https://<%= user %>.<%= domain %>/_bio" method="post" class="container">
|
||||
<fieldset>
|
||||
<input type="hidden" name="author" value="<%= user %>">
|
||||
<div class="row">
|
||||
<textarea name="text" cols=80 rows=24 class="column"><%= text %></textarea><br/>
|
||||
</div>
|
||||
<div class="row">
|
||||
<input type="submit">
|
||||
</div>
|
||||
</fieldset>
|
||||
</form>
|
||||
<footer class="footer">
|
||||
|
||||
</footer>
|
||||
</main>
|
||||
</body>
|
||||
<body>
|
||||
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
<{system cat src/pages/parts/header.etlua}>
|
||||
<h1 class="title column">
|
||||
<a href="https://<%= domain %>">
|
||||
<%= domain %>
|
||||
</a>
|
||||
</h1>
|
||||
|
||||
<div class="container">
|
||||
<div class="row">
|
||||
<a href="/_paste" class="button column column-0">New paste</a>
|
||||
<a href="/_login" class="button column column-0">Log in</a>
|
||||
<a href="/_claim" class="button column column-0">Register</a>
|
||||
<form action="https://<%= domain %>/_search" method="get" class="search column row">
|
||||
<input class="column" type="text" name="q" placeholder="+greentext -dotr +hits>20"/>
|
||||
<input class="column column-0 button button-clear" type="submit" value="🔎"/>
|
||||
</form>
|
||||
</div>
|
||||
<p>
|
||||
Welcome to slash.monster, stories of fiction and fantasy<br/>
|
||||
Not safe for work<br/>
|
||||
18+
|
||||
</p>
|
||||
</div>
|
||||
<div class="content">
|
||||
<% if #stories == 0 then %>
|
||||
No stories available.
|
||||
<% else %>
|
||||
<table>
|
||||
<% for k,story in pairs(stories) do %>
|
||||
<{system cat src/pages/parts/story_breif.etlua}>
|
||||
<% end %>
|
||||
</table>
|
||||
<% end %>
|
||||
</div>
|
||||
<{system cat src/pages/parts/footer.etlua}>
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
<{system cat src/pages/parts/header.etlua}>
|
||||
<h1 class="title">
|
||||
Login
|
||||
</h1>
|
||||
<% if err then %><em class="error"><%= err %></em><% end %>
|
||||
<form action="/_login" method="post" enctype="multipart/form-data">
|
||||
<fieldset>
|
||||
<label for="user">Name:</label>
|
||||
<input type="text" name="user" id="user" placeholder="name" autocorrect="off" autocapitalize="none">
|
||||
<label for="pass">Passfile:</label>
|
||||
<input type="file" name="pass" id="pass">
|
||||
<input type="submit" value="Log In"/>
|
||||
</fieldset>
|
||||
</form>
|
||||
<{system cat src/pages/parts/footer.etlua}>
|
|
@ -1,11 +0,0 @@
|
|||
<{system cat src/pages/parts/header.etlua}>
|
||||
<h1 class="title">
|
||||
🙁
|
||||
</h1>
|
||||
<div class="container">
|
||||
<p>
|
||||
No author found: <%= author %>
|
||||
</p>
|
||||
</div>
|
||||
<{system cat src/pages/parts/footer.etlua}>
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
<{system cat src/pages/parts/header.etlua}>
|
||||
<h1 class="title">
|
||||
🙁
|
||||
</h1>
|
||||
<div class="container">
|
||||
<p>
|
||||
No story found: <%= path %>
|
||||
</p>
|
||||
</div>
|
||||
<{system cat src/pages/parts/footer.etlua}>
|
|
@ -14,6 +14,11 @@
|
|||
<% end %>
|
||||
<link href="/_css/milligram.css" rel="stylesheet">
|
||||
<link href="/_css/style.css" rel="stylesheet">
|
||||
<% if extra_load then %>
|
||||
<% for _,load in ipairs(extra_load) do %>
|
||||
<%- load %>
|
||||
<% end %>
|
||||
<% end %>
|
||||
</head>
|
||||
<body class="container">
|
||||
<main class="wrapper">
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
<form action="https://<%= domain %>/_search" method="get" class="search column row">
|
||||
<input class="column" type="text" name="q" placeholder="+greentext -dotr +title=dragon +hits>20" <% if q then %> value="<%= q %>" <% end %>/>
|
||||
<input class="column column-0 button button-clear" type="submit" value="🔎"/>
|
||||
</form>
|
|
@ -1,26 +0,0 @@
|
|||
|
||||
<tr><td>
|
||||
<a href="<%= story.url %>">
|
||||
<%- story.title %>
|
||||
</a>
|
||||
</td><td>
|
||||
<% if story.isanon then %>
|
||||
By Anonymous
|
||||
<% else %>
|
||||
By <a href="https://<%= story.author %>.<%= domain %>"><%= story.author %></a>
|
||||
<% end %>
|
||||
</td><td>
|
||||
<%= story.hits %> hits
|
||||
</td><td>
|
||||
<ul class="row tag-list">
|
||||
<% for i = 1,math.min(#story.tags, 5) do %>
|
||||
<% local tag = story.tags[i] %>
|
||||
<{system cat src/pages/parts/taglist.etlua}>
|
||||
<% end %>
|
||||
<% if #story.tags > 5 then %>
|
||||
<li>+<%= #story.tags - 5 %></li>
|
||||
<% end %>
|
||||
</ul>
|
||||
</td><td>
|
||||
<%= story.posted %>
|
||||
</td></tr>
|
|
@ -1,23 +0,0 @@
|
|||
<{system cat src/pages/parts/header.etlua}>
|
||||
<h1 class="title">
|
||||
Paste
|
||||
</h1>
|
||||
<% if err then %><em class="error"><%= err %></em><% end %>
|
||||
<form action="https://<%= domain %>/_paste" method="post" class="container"><fieldset>
|
||||
<div class="row">
|
||||
<input type="text" name="title" placeholder="Title" class="column column-80"></input>
|
||||
<select id="markup" name="markup" class="column column-20">
|
||||
<option value="plain">Plain</option>
|
||||
<option value="imageboard">Imageboard</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="row">
|
||||
<input type="text" name="tags" placeholder="Tags (semicolon;seperated)" class="column"></input>
|
||||
</div>
|
||||
<div class="row">
|
||||
<textarea name="text" cols=80 rows=24 class="column"></textarea><br/>
|
||||
</div>
|
||||
<input type="submit">
|
||||
<input type="submit" formtarget="_blank" value="Preview" formaction="https://<%= domain %>/_preview">
|
||||
</fieldset></form>
|
||||
<{system cat src/pages/parts/footer.etlua}>
|
|
@ -1,73 +0,0 @@
|
|||
<{system cat src/pages/parts/header.etlua}>
|
||||
<nav>
|
||||
<a href="https://<%= domain %>"><%= domain %></a>/<a href="https://<%= domain %>/<%= idp %>"><%= idp %></a>
|
||||
</nav>
|
||||
<% if owner then -%>
|
||||
<form action="https://<%= domain %>/_edit" method="get"><fieldset>
|
||||
<input type="hidden" name="story" value="<%= idp %>"/>
|
||||
<input type="submit" value="edit" class="button"/>
|
||||
</fieldset></form>
|
||||
<% end -%>
|
||||
<article>
|
||||
<h2 class="title"> <%- title %> </h2>
|
||||
<h3>
|
||||
<% if isanon or author == nil then -%>
|
||||
By Anonymous
|
||||
<% else -%>
|
||||
By <a href="https://<%= author %>.<%= domain %>"><%= author %></a>
|
||||
<% end -%>
|
||||
</h3>
|
||||
<%- text %>
|
||||
</article>
|
||||
|
||||
<hr/>
|
||||
|
||||
<p><%= views %> Hits</p>
|
||||
|
||||
<ul class="tag-list">
|
||||
<% for _,tag in pairs(tags) do -%>
|
||||
<{system cat src/pages/parts/taglist.etlua}>
|
||||
<% end -%>
|
||||
</ul>
|
||||
|
||||
<form action="https://<%= domain %>/_download" method="get">
|
||||
<input type="hidden" name="story" value="<%= idp %>"/>
|
||||
<input type="submit" value="Download TXT" class="button"/>
|
||||
</form>
|
||||
<% if not show_comments then -%>
|
||||
<form action="https://<%= domain %>/<%= idp %>"><fieldset>
|
||||
<input type="hidden" name="comments" value="1">
|
||||
<input type="submit" value="load comments" class="button">
|
||||
</fieldset></form>
|
||||
<% else %>
|
||||
<form action="https://<%= domain %>/<%= idp %>" method="POST">
|
||||
<textarea name="text" cols=60 rows=10 class="column"></textarea>
|
||||
</div><% if iam then %>
|
||||
<select id="postas" name="postas">
|
||||
<option value="Anonymous">Anonymous</option>
|
||||
<option value="<%= iam %>"><%= iam %></option>
|
||||
</select>
|
||||
<input type="submit" value="post" class="button">
|
||||
<% else %>
|
||||
<input type="hidden" name="postas" value="Anonymous">
|
||||
<input type="submit" value="post" class="button">
|
||||
<% end %>
|
||||
</form>
|
||||
<% if comments and #comments == 0 then %>
|
||||
<p><i>No comments yet</i></p>
|
||||
<% else %>
|
||||
<section>
|
||||
<% for _,comment in pairs(comments) do %>
|
||||
<article>
|
||||
<% if comment.isanon then %>
|
||||
<p><b>Anonymous</b></p>
|
||||
<% else %>
|
||||
<p><b><%= comment.author %></b></p>
|
||||
<% end %>
|
||||
<p><%= comment.text %></p>
|
||||
</article>
|
||||
<% end %>
|
||||
</section>
|
||||
<% end %>
|
||||
<% end %>
|
||||
<{system cat src/pages/parts/footer.etlua}>
|
|
@ -1,22 +0,0 @@
|
|||
<{system cat src/pages/parts/header.etlua}>
|
||||
<h1 class="title">
|
||||
<a href="https://<%= domain %>"><%= domain %></a>/
|
||||
</h1>
|
||||
<div class="row">
|
||||
<form action="https://<%= domain %>/_search" method="get" class="search column row">
|
||||
<input class="column" type="text" name="q" placeholder="+greentext -dotr +hits>20" value="<%= q %>"/>
|
||||
<input class="column column-0 button button-clear" type="submit" value="🔎"/>
|
||||
</form>
|
||||
</div>
|
||||
<div class="content">
|
||||
<% if #results == 0 then %>
|
||||
No stories matched your search.
|
||||
<% else %>
|
||||
<table>
|
||||
<% for k,story in pairs(results) do %>
|
||||
<{system cat src/pages/parts/story_breif.etlua}>
|
||||
<% end %>
|
||||
</table>
|
||||
<% end %>
|
||||
</div>
|
||||
<{system cat src/pages/parts/footer.etlua}>
|
|
@ -1,45 +0,0 @@
|
|||
SELECT
|
||||
posts.id,
|
||||
posts.post_title,
|
||||
posts.isanon,
|
||||
authors.name,
|
||||
posts.post_time,
|
||||
posts.views
|
||||
FROM
|
||||
posts,authors
|
||||
WHERE
|
||||
authors.id = posts.authorid
|
||||
<% for field, values in pairs(result) do -%>
|
||||
<% for _,value in pairs(values) do -%>
|
||||
<% local pn,expr,value = unpack(value) -%>
|
||||
<% local n = (pn == "+" and "" or "NOT") -%>
|
||||
<% if field == "title" then -%>
|
||||
AND <%= n %> posts.post_title LIKE ?
|
||||
<% elseif field == "author" then -%>
|
||||
AND <%= n %> authors.name LIKE ?
|
||||
<% elseif field == "date" then -%>
|
||||
AND <%= n %> posts.post_time <%- expr %> ?
|
||||
<% elseif field == "hits" then -%>
|
||||
AND posts.views <%- expr -%> ?
|
||||
<% end -%>
|
||||
<% end -%>
|
||||
<% end -%>
|
||||
<% for _,tag in pairs(result.tags) do -%>
|
||||
INTERSECT
|
||||
SELECT
|
||||
posts.id,
|
||||
posts.post_title,
|
||||
posts.isanon,
|
||||
authors.name,
|
||||
posts.post_time,
|
||||
posts.views
|
||||
FROM
|
||||
posts,authors,tags
|
||||
WHERE
|
||||
posts.authorid = authors.id
|
||||
AND tags.postid = posts.id
|
||||
<% local n,v,t = unpack(tag) -%>
|
||||
<% n = (n == "-" and "NOT" or "") -%>
|
||||
AND <%= n %> tags.tag = ?
|
||||
<% end -%>
|
||||
;
|
201
src/smr.c
201
src/smr.c
|
@ -20,16 +20,24 @@ int edit_story(struct http_request *);
|
|||
int edit_bio(struct http_request *);
|
||||
int read_story(struct http_request *);
|
||||
int login(struct http_request *);
|
||||
int logout(struct http_request *);
|
||||
int claim(struct http_request *);
|
||||
int download(struct http_request *);
|
||||
int preview(struct http_request *);
|
||||
int search(struct http_request *);
|
||||
int archive(struct http_request *);
|
||||
int api(struct http_request *);
|
||||
int style(struct http_request *);
|
||||
int miligram(struct http_request *);
|
||||
int delete(struct http_request *);
|
||||
int do_lua(struct http_request *req, const char *name);
|
||||
int errhandeler(lua_State *);
|
||||
lua_State *L;
|
||||
|
||||
/*
|
||||
These should be defined in in kore somewhere and included here
|
||||
*/
|
||||
void kore_worker_configure(void);
|
||||
void kore_worker_teardown(void);
|
||||
/*
|
||||
static / index
|
||||
static / _post post
|
||||
|
@ -53,32 +61,32 @@ KORE_SECCOMP_FILTER("app",
|
|||
);
|
||||
|
||||
int
|
||||
errhandeler(lua_State *L){
|
||||
printf("Error: %s\n",lua_tostring(L,1));//"error"
|
||||
lua_getglobal(L,"debug");//"error",{debug}
|
||||
lua_getglobal(L,"print");//"error",{debug},print()
|
||||
lua_getfield(L,-2,"traceback");//"error",{debug},print(),traceback()
|
||||
lua_call(L,0,1);//"error",{debug},print(),"traceback"
|
||||
lua_call(L,1,0);//"error",{debug}
|
||||
errhandeler(lua_State *state){
|
||||
printf("Error: %s\n",lua_tostring(state,1));//"error"
|
||||
lua_getglobal(state,"debug");//"error",{debug}
|
||||
lua_getglobal(state,"print");//"error",{debug},print()
|
||||
lua_getfield(state,-2,"traceback");//"error",{debug},print(),traceback()
|
||||
lua_call(state,0,1);//"error",{debug},print(),"traceback"
|
||||
lua_call(state,1,0);//"error",{debug}
|
||||
printf("Called print()\n");
|
||||
lua_getfield(L,-1,"traceback");//"error",{debug},traceback()
|
||||
lua_getfield(state,-1,"traceback");//"error",{debug},traceback()
|
||||
printf("got traceback\n");
|
||||
lua_call(L,0,1);//"error",{debug},"traceback"
|
||||
lua_pushstring(L,"\n");
|
||||
lua_call(state,0,1);//"error",{debug},"traceback"
|
||||
lua_pushstring(state,"\n");
|
||||
printf("called traceback\n");
|
||||
lua_pushvalue(L,-4);//"error",{debug},"traceback","error"
|
||||
lua_pushvalue(state,-4);//"error",{debug},"traceback","error"
|
||||
printf("pushed error\n");
|
||||
lua_concat(L,3);//"error",{debug},"traceback .. error"
|
||||
lua_concat(state,3);//"error",{debug},"traceback .. error"
|
||||
printf("concated\n");
|
||||
int ref = luaL_ref(L,LUA_REGISTRYINDEX);//"error",{debug}
|
||||
lua_pop(L,2);//
|
||||
lua_rawgeti(L,LUA_REGISTRYINDEX,ref);//"traceback .. error"
|
||||
int ref = luaL_ref(state,LUA_REGISTRYINDEX);//"error",{debug}
|
||||
lua_pop(state,2);//
|
||||
lua_rawgeti(state,LUA_REGISTRYINDEX,ref);//"traceback .. error"
|
||||
return 1;
|
||||
}
|
||||
|
||||
int
|
||||
do_lua(struct http_request *req, const char *name){
|
||||
printf("About to do lua %s\n",name);
|
||||
//printf("About to do lua %s\n",name);
|
||||
lua_pushcfunction(L,errhandeler);
|
||||
lua_getglobal(L,name);//err(),name()
|
||||
if(!lua_isfunction(L,-1)){
|
||||
|
@ -99,65 +107,157 @@ do_lua(struct http_request *req, const char *name){
|
|||
return KORE_RESULT_OK;
|
||||
}
|
||||
|
||||
int
|
||||
post_story(struct http_request *req){
|
||||
printf("We want to post!\n");
|
||||
return do_lua(req,"paste");
|
||||
}
|
||||
#define route(method, lua_method) \
|
||||
int\
|
||||
method(struct http_request *req){\
|
||||
return do_lua(req,#lua_method);\
|
||||
}
|
||||
|
||||
int
|
||||
edit_story(struct http_request *req){
|
||||
printf("We want to edit!\n");
|
||||
return do_lua(req,"edit");
|
||||
}
|
||||
/* md
|
||||
@name http/_paste
|
||||
Called at the endpoint <domain>/_paste.
|
||||
This method doesn't need any parameters for GET requests.
|
||||
This method expects the following for POST requests:
|
||||
* title :: string - story title
|
||||
* text :: string - text to put through markup
|
||||
* markup :: string - a valid markup type
|
||||
In addition to the normal assets, this page includes
|
||||
suggest_tags.js, which suggests tags that have been
|
||||
submitted to the site before.
|
||||
@custom http_method GET POST
|
||||
*/
|
||||
/* md
|
||||
@name lua/paste
|
||||
This function is called automatically with the request submitted at
|
||||
<endpoint>/_paste
|
||||
@param http_request req The request to service
|
||||
*/
|
||||
route(post_story,"paste");
|
||||
|
||||
int
|
||||
edit_bio(struct http_request *req){
|
||||
printf("We want to edit bio!\n");
|
||||
return do_lua(req,"edit_bio");
|
||||
}
|
||||
/***
|
||||
Called at the endpoint <domain>/_edit.
|
||||
This method requires the following for GET requests:
|
||||
* story :: string - The url of the story to edit
|
||||
This method requires the following for POST requests:
|
||||
* title :: string - story title
|
||||
* text :: string - text to put through markup
|
||||
* markup :: string - a valid markup type
|
||||
* story :: string - the story we're editing
|
||||
In addition to normal assets, this page includes
|
||||
suggest_tags.js, which suggests tags that have been
|
||||
submitted to the site before.
|
||||
@function _G.edit
|
||||
@custom http_method GET POST
|
||||
@param http_request req The request to service
|
||||
***/
|
||||
route(edit_story, "edit");
|
||||
|
||||
int
|
||||
read_story(struct http_request *req){
|
||||
printf("We want to read!\n");
|
||||
return do_lua(req,"read");
|
||||
}
|
||||
/***
|
||||
Called at the endpoint <domain>/_bio
|
||||
This method does not need any parameters for GET requests.
|
||||
This method requires the following for POST requests:
|
||||
* text :: string - The text to use as the author bio
|
||||
* author :: string - The author to modify
|
||||
If the logged in user does not match the author being
|
||||
modified, the user recives a 401 Unauthorized error.
|
||||
@function _G.edit_bio
|
||||
@custom http_method GET POST
|
||||
@param http_request req The request to service
|
||||
***/
|
||||
route(edit_bio, "edit_bio");
|
||||
|
||||
/***
|
||||
Called at the endpoint <domain>/[^_]*
|
||||
This method does not require any parameters for GET requests, but may include:
|
||||
* load_comments :: 0 | 1 - Legacy parameter for loading comments
|
||||
* pwd :: [0-9a-f]{128} - If the post is marked as "unlisted", this parameter is
|
||||
needed, if it is not passed, the user receives a 401 Unauthorized error.
|
||||
This method requires the following for POST requests:
|
||||
* text :: string - Comment text
|
||||
* postas :: string - The user to post as, if this is not "Anonymous", the
|
||||
request must include a session cookie. If it does not, the user receives
|
||||
a 401 Unauthorized error.
|
||||
* pwd :: [0-9a-f]{128} - Currently unused, but it's intended use is to validate
|
||||
the user has the password for unlisted stories.
|
||||
@function _G.read
|
||||
@custom http_method GET POST
|
||||
@param http_request req The request to service
|
||||
***/
|
||||
route(read_story, "read");
|
||||
|
||||
/***
|
||||
Called at the endpoint <domain>/_login
|
||||
This method does not require any parameters for GET requests.
|
||||
This method requiries the following for POST requests:
|
||||
* user :: [a-z0-9]{1,30} - The username to log in as
|
||||
* pass :: any - The passfile for this user
|
||||
To overload login functionality in an addon, see @{api.authenticate}
|
||||
@function _G.login
|
||||
@custom http_method GET POST
|
||||
@param http_request req The request to service.
|
||||
***/
|
||||
int
|
||||
login(struct http_request *req){
|
||||
printf("We want to login!\n");
|
||||
return do_lua(req,"login");
|
||||
}
|
||||
|
||||
int
|
||||
logout(struct http_request *req){
|
||||
return do_lua(req,"logout");
|
||||
}
|
||||
|
||||
int
|
||||
claim(struct http_request *req){
|
||||
printf("We want to claim!\n");
|
||||
return do_lua(req,"claim");
|
||||
}
|
||||
|
||||
int
|
||||
download(struct http_request *req){
|
||||
printf("We want to do download!\n");
|
||||
return do_lua(req,"download");
|
||||
}
|
||||
|
||||
int
|
||||
preview(struct http_request *req){
|
||||
printf("We want to do preview!\n");
|
||||
return do_lua(req,"preview");
|
||||
}
|
||||
|
||||
int
|
||||
search(struct http_request *req){
|
||||
printf("We want to do search!\n");
|
||||
return do_lua(req,"search");
|
||||
}
|
||||
|
||||
int
|
||||
archive(struct http_request *req){
|
||||
/*
|
||||
struct kore_fileref *ref = kore_fileref_get("data/archive.zip",1);
|
||||
if(ref != NULL){
|
||||
http_response_fileref(ref,HTTP_STATUS_OK,ref);
|
||||
kore_fileref_release(ref);
|
||||
return KORE_RESULT_OK;
|
||||
}else{
|
||||
char msg[] = "Failed to create file ref";
|
||||
http_response(req,200,msg,strlen(msg));
|
||||
return KORE_RESULT_OK;
|
||||
}
|
||||
*/
|
||||
return do_lua(req,"archive");
|
||||
}
|
||||
|
||||
int
|
||||
api(struct http_request *req){
|
||||
return do_lua(req,"api");
|
||||
}
|
||||
|
||||
int
|
||||
home(struct http_request *req){
|
||||
return do_lua(req,"home");
|
||||
}
|
||||
|
||||
int
|
||||
delete(struct http_request *req){
|
||||
return do_lua(req,"delete");
|
||||
}
|
||||
|
||||
void
|
||||
kore_worker_configure(void){
|
||||
printf("Configuring worker...\n");
|
||||
|
@ -172,9 +272,26 @@ kore_worker_configure(void){
|
|||
/*closedir(dp);*/
|
||||
/*}*/
|
||||
L = luaL_newstate();
|
||||
|
||||
|
||||
// Open libraries
|
||||
luaL_openlibs(L);
|
||||
load_kore_libs(L);
|
||||
load_crypto_libs(L);
|
||||
|
||||
// Set package.path
|
||||
lua_getglobal(L,"package"); // {package}
|
||||
lua_getfield(L,-1,"path"); // {package}, "package.path"
|
||||
lua_pushstring(L,";/var/smr/?.lua;/usr/local/share/lua/5.1/?.lua"); // {package}, "package.path", "/var/smr/?.lua"
|
||||
lua_concat(L,2); //{package}, "package.path;/var/app_name/?.lua"
|
||||
lua_setfield(L,-2,"path"); //{package}
|
||||
lua_getfield(L,-1,"cpath");
|
||||
lua_pushstring(L,";/usr/local/lib/lua/5.1/?.so");
|
||||
lua_concat(L,2);
|
||||
lua_setfield(L,-2,"cpath");
|
||||
lua_pop(L,1);
|
||||
|
||||
// Run init
|
||||
lua_pushcfunction(L,errhandeler);
|
||||
printf("About to run loadfile...\n");
|
||||
luaL_loadfile(L,SM_INIT);
|
||||
|
|
|
@ -1 +1,5 @@
|
|||
#define SM_INIT "init.lua"
|
||||
#ifndef SM_INIT
|
||||
#define SM_INIT "/var/smr/init.lua"
|
||||
#endif
|
||||
|
||||
int errhandeler(lua_State *);
|
||||
|
|
|
@ -1 +1,4 @@
|
|||
/*
|
||||
The tags table is indexed on tag, so that search is fast
|
||||
*/
|
||||
CREATE INDEX tag_index ON tags(tag);
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
CREATE INDEX unlisted_index ON posts(hash);
|
|
@ -1,3 +1,18 @@
|
|||
/* md
|
||||
@name sql/table/authors
|
||||
If an author deletes their account, all posts
|
||||
and comments by that author are also deleted (on
|
||||
delete cascade) this is intentional. This also
|
||||
means that all comments by other users on a post
|
||||
an author makes will also be deleted.
|
||||
*/
|
||||
|
||||
/* sh
|
||||
@name sql/table/authors
|
||||
echo "digraph authors {" \
|
||||
"$(cat doc/schema/authors.dot)" \
|
||||
"}" | dot -Tsvg
|
||||
*/
|
||||
CREATE TABLE IF NOT EXISTS authors (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
name TEXT UNIQUE ON CONFLICT FAIL,
|
||||
|
|
|
@ -1,3 +1,19 @@
|
|||
/* md
|
||||
@name sql/table/comments
|
||||
Comments on a post.
|
||||
|
||||
When an author deletes their account or the posts this comment
|
||||
is posted on is deleted, this comment will also be deleted.
|
||||
*/
|
||||
|
||||
/* sh
|
||||
@name sql/table/comments
|
||||
echo "digraph comments{" \
|
||||
"$(cat doc/schema/authors.dot)" \
|
||||
"$(cat doc/schema/posts.dot)" \
|
||||
"$(cat doc/schema/comments.dot)" \
|
||||
"}" | dot -Tsvg
|
||||
*/
|
||||
CREATE TABLE IF NOT EXISTS comments (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
postid REFERENCES posts(id) ON DELETE CASCADE,
|
||||
|
|
|
@ -1,3 +1,15 @@
|
|||
/* md
|
||||
@name sql/table/images
|
||||
We may want to store images one day. This is unused for now
|
||||
*/
|
||||
|
||||
/* sh
|
||||
@name sql/table/images
|
||||
echo "digraph images {" \
|
||||
"$(cat doc/schema/images.dot)" \
|
||||
"$(cat doc/schema/authors.dot)" \
|
||||
"}" | dot -Tsvg
|
||||
*/
|
||||
CREATE TABLE IF NOT EXISTS images (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
name TEXT,
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
/*
|
||||
/* md
|
||||
@name sql/table/posts
|
||||
If/when an author delets their account, all posts
|
||||
and comments by that author are also deleted (on
|
||||
delete cascade) this is intentional. This also
|
||||
|
@ -6,6 +7,16 @@ means that all comments by other users on a post
|
|||
an author makes will also be deleted.
|
||||
|
||||
Post text uses zlib compression
|
||||
|
||||
Unlisted hashes are SHAv3 521
|
||||
*/
|
||||
|
||||
/* sh
|
||||
@name sql/table/posts
|
||||
echo "digraph comments{" \
|
||||
"$(cat doc/schema/authors.dot)" \
|
||||
"$(cat doc/schema/posts.dot)" \
|
||||
"}" | dot -Tsvg
|
||||
*/
|
||||
CREATE TABLE IF NOT EXISTS posts (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
|
@ -15,5 +26,7 @@ CREATE TABLE IF NOT EXISTS posts (
|
|||
isanon INTEGER,
|
||||
hashedip BLOB,
|
||||
post_time INTEGER,
|
||||
views INTEGER DEFAULT 0
|
||||
views INTEGER DEFAULT 0,
|
||||
unlisted INTEGER,
|
||||
hash BLOB
|
||||
);
|
||||
|
|
|
@ -1,7 +1,18 @@
|
|||
/*
|
||||
Store the raw text so people can download it later, maybe
|
||||
we can use it for "download as image" or "download as pdf"
|
||||
in the future too. Stil stored zlib compressed
|
||||
/* md
|
||||
@name sql/table/raw_text
|
||||
Store the raw text.
|
||||
Used so people can edit their posts and get their original uploaded text.
|
||||
Also used so users can download it,
|
||||
maybe we can use it for "download as image" or "download as pdf" in the future.
|
||||
Stored zlib compressed
|
||||
*/
|
||||
|
||||
/* sh
|
||||
@name sql/table/raw_text
|
||||
echo "digraph comments{" \
|
||||
"$(cat doc/schema/raw_text.dot)" \
|
||||
"$(cat doc/schema/posts.dot)" \
|
||||
"}" | dot -Tsvg
|
||||
*/
|
||||
CREATE TABLE IF NOT EXISTS raw_text (
|
||||
id INTEGER PRIMARY KEY REFERENCES posts(id) ON DELETE CASCADE,
|
||||
|
|
|
@ -1,8 +1,19 @@
|
|||
/*
|
||||
/* md
|
||||
@name sql/table/sessions
|
||||
Store a cookie for logged in users. Logged in users can edit
|
||||
their own posts.
|
||||
their own posts, edit their biographies, and post stories and comment under their own name.
|
||||
TODO: We can hash the "key" so that even if the database gets
|
||||
dumped, a hacker can't cookie-steal with only read access
|
||||
to the db.
|
||||
*/
|
||||
|
||||
/* sh
|
||||
@name sql/table/sessions
|
||||
echo "digraph comments{" \
|
||||
"$(cat doc/schema/sessions.dot)" \
|
||||
"$(cat doc/schema/authors.dot)" \
|
||||
"}" | dot -Tsvg
|
||||
*/
|
||||
CREATE TABLE IF NOT EXISTS sessions (
|
||||
key TEXT PRIMARY KEY,
|
||||
author REFERENCES authors(id) ON DELETE CASCADE,
|
||||
|
|
|
@ -1,3 +1,47 @@
|
|||
/*
|
||||
Tags on a post
|
||||
A post's tags are deleted if the post is deleted.
|
||||
*/
|
||||
/* dot -Tsvg
|
||||
@name db/schema/tags
|
||||
digraph tags {
|
||||
tags [
|
||||
shape="plaintext"
|
||||
label=<<table>
|
||||
<tr><td colspan="3"><b>tags</b></td></tr>
|
||||
<tr>
|
||||
<td port="id">PK</td>
|
||||
<td>id</td>
|
||||
<td>INT, AUTOINCREMENT, NOT NULL</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>FK</td>
|
||||
<td>postid</td>
|
||||
<td port="postid">ON DELETE CASCADE</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td></td>
|
||||
<td>tag</td>
|
||||
<td>TEXT</td>
|
||||
</tr>
|
||||
</table>>
|
||||
|
||||
];
|
||||
posts [
|
||||
shape="plaintext"
|
||||
label=<<table>
|
||||
<tr><td colspan="3"><b>posts</b></td></tr>
|
||||
<tr>
|
||||
<td port="id">PK</td>
|
||||
<td>id</td>
|
||||
<td>INT, AUTOINCREMENT, NOT NULL</td>
|
||||
</tr>
|
||||
<tr><td colspan="3">...</td></tr>
|
||||
</table>>
|
||||
];
|
||||
tags:postid -> posts:id
|
||||
}
|
||||
*/
|
||||
CREATE TABLE IF NOT EXISTS tags (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
postid REFERENCES posts(id) ON DELETE CASCADE,
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
DELETE FROM posts
|
||||
WHERE posts.id = :postid AND
|
||||
posts.authorid = :authorid
|
|
@ -0,0 +1 @@
|
|||
DELETE FROM sessions WHERE author = :authorid;
|
|
@ -1,4 +1,6 @@
|
|||
/* Add a new comment to a story */
|
||||
/*
|
||||
Add a new comment to a story
|
||||
*/
|
||||
INSERT INTO comments(
|
||||
postid,
|
||||
author,
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue