This commit is contained in:
maqp 2019-12-03 03:30:38 +02:00
parent f098a709f2
commit 7a608a475a
121 changed files with 30486 additions and 19666 deletions

View File

@ -2,8 +2,16 @@
### Tinfoil Chat
[![License: GPL v3](https://img.shields.io/badge/License-GPLv3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0)
[![Python 3.7](https://img.shields.io/badge/python-3.7-informational.svg)](https://www.python.org/downloads/release/python-370/)
[![Checked with mypy](http://www.mypy-lang.org/static/mypy_badge.svg)](http://mypy-lang.org/)
[![Build Status](https://travis-ci.org/maqp/tfc.svg?branch=master)](https://travis-ci.org/maqp/tfc)
[![Coverage Status](https://coveralls.io/repos/github/maqp/tfc/badge.svg?branch=master)](https://coveralls.io/github/maqp/tfc?branch=master)
[![Codacy Badge](https://api.codacy.com/project/badge/Grade/71fa9cc1da424f52a576a04c2722da26)](https://www.codacy.com/manual/maqp/tfc?utm_source=github.com&utm_medium=referral&utm_content=maqp/tfc&utm_campaign=Badge_Grade)
[![CodeFactor](https://www.codefactor.io/repository/github/maqp/tfc/badge)](https://www.codefactor.io/repository/github/maqp/tfc)
[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
[![Requirements Status](https://requires.io/github/maqp/tfc/requirements.svg?branch=master)](https://requires.io/github/maqp/tfc/requirements/?branch=master)
[![Known Vulnerabilities](https://snyk.io/test/github/maqp/tfc/badge.svg)](https://snyk.io/test/github/maqp/tfc)
Tinfoil Chat (TFC) is a
[FOSS](https://www.gnu.org/philosophy/free-sw.html)+[FHD](https://www.gnu.org/philosophy/free-hardware-designs.en.html)

122
dd.py
View File

@ -25,27 +25,49 @@ import sys
import time
from multiprocessing import Process, Queue
from typing import Any, Dict, Tuple
from typing import Any, Dict, Tuple
from src.common.misc import get_terminal_height, get_terminal_width, ignored, monitor_processes
from src.common.output import clear_screen
from src.common.statics import (DATA_FLOW, DD_ANIMATION_LENGTH, DD_OFFSET_FROM_CENTER, DST_DD_LISTEN_SOCKET,
DST_LISTEN_SOCKET, EXIT_QUEUE, IDLE, LOCALHOST, NC, NCDCLR, NCDCRL, RP_LISTEN_SOCKET,
SCNCLR, SCNCRL, SRC_DD_LISTEN_SOCKET)
from src.common.misc import (
get_terminal_height,
get_terminal_width,
ignored,
monitor_processes,
)
from src.common.output import clear_screen
from src.common.statics import (
DATA_FLOW,
DD_ANIMATION_LENGTH,
DD_OFFSET_FROM_CENTER,
DST_DD_LISTEN_SOCKET,
DST_LISTEN_SOCKET,
EXIT_QUEUE,
IDLE,
LOCALHOST,
NC,
NCDCLR,
NCDCRL,
RP_LISTEN_SOCKET,
SCNCLR,
SCNCRL,
SRC_DD_LISTEN_SOCKET,
)
def draw_frame(argv: str, # Arguments for the simulator position/orientation
message: str, # Status message to print
high: bool = False # Determines the signal's state (high/low)
) -> None:
def draw_frame(
argv: str, # Arguments for the simulator position/orientation
message: str, # Status message to print
high: bool = False, # Determines the signal's state (high/low)
) -> None:
"""Draw a data diode animation frame."""
l, indicator, arrow, r = {NCDCLR: ('Rx', '<', '', 'Tx'),
SCNCLR: ('Tx', '>', '', 'Rx'),
NCDCRL: ('Tx', '>', '', 'Rx'),
SCNCRL: ('Rx', '<', '', 'Tx')}[argv]
l, indicator, arrow, r = {
NCDCLR: ("Rx", "<", "", "Tx"),
SCNCLR: ("Tx", ">", "", "Rx"),
NCDCRL: ("Tx", ">", "", "Rx"),
SCNCRL: ("Rx", "<", "", "Tx"),
}[argv]
indicator = indicator if high else ' '
arrow = arrow if message != IDLE else ' '
indicator = indicator if high else " "
arrow = arrow if message != IDLE else " "
terminal_width = get_terminal_width()
@ -53,13 +75,13 @@ def draw_frame(argv: str, # Arguments for the simulator position/ori
"""Print string on the center of the screen."""
print(string.center(terminal_width))
print('\n' * ((get_terminal_height() // 2) - DD_OFFSET_FROM_CENTER))
print("\n" * ((get_terminal_height() // 2) - DD_OFFSET_FROM_CENTER))
c_print(message)
c_print(arrow)
c_print( "────╮ " + ' ' + " ╭────" )
c_print("────╮ " + " " + " ╭────")
c_print(f" {l}" + indicator + f"{r} ")
c_print( "────╯ " + ' ' + " ╰────" )
c_print("────╯ " + " " + " ╰────")
def animate(argv: str) -> None:
@ -73,11 +95,12 @@ def animate(argv: str) -> None:
draw_frame(argv, IDLE)
def rx_loop(io_queue: 'Queue[Any]', # Queue through which to push datagrams through
input_socket: int # Socket number for Transmitter/Relay Program
) -> None:
def rx_loop(
io_queue: "Queue[Any]", # Queue through which to push datagrams through
input_socket: int, # Socket number for Transmitter/Relay Program
) -> None:
"""Read datagrams from a transmitting program."""
listener = multiprocessing.connection.Listener((LOCALHOST, input_socket))
listener = multiprocessing.connection.Listener((LOCALHOST, input_socket))
interface = listener.accept()
while True:
@ -89,11 +112,12 @@ def rx_loop(io_queue: 'Queue[Any]', # Queue through which to push datagrams
sys.exit(0)
def tx_loop(io_queue: 'Queue[Any]', # Queue through which to push datagrams through
output_socket: int, # Socket number for the Relay/Receiver Program
argv: str, # Arguments for the simulator position/orientation
unit_test: bool = False # Break out from the loop during unit testing
) -> None:
def tx_loop(
io_queue: "Queue[Any]", # Queue through which to push datagrams through
output_socket: int, # Socket number for the Relay/Receiver Program
argv: str, # Arguments for the simulator position/orientation
unit_test: bool = False, # Break out from the loop during unit testing
) -> None:
"""Send queued datagrams to a receiving program."""
draw_frame(argv, IDLE)
@ -106,7 +130,7 @@ def tx_loop(io_queue: 'Queue[Any]', # Queue through which to push datagram
while True:
with ignored(EOFError, KeyboardInterrupt):
while io_queue.qsize() == 0:
while not io_queue.qsize():
time.sleep(0.01)
animate(argv)
interface.send(io_queue.get())
@ -118,27 +142,31 @@ def tx_loop(io_queue: 'Queue[Any]', # Queue through which to push datagram
def process_arguments() -> Tuple[str, int, int]:
"""Load simulator settings from the command line argument."""
try:
argv = str(sys.argv[1])
input_socket, output_socket = {SCNCLR: (SRC_DD_LISTEN_SOCKET, RP_LISTEN_SOCKET),
SCNCRL: (SRC_DD_LISTEN_SOCKET, RP_LISTEN_SOCKET),
NCDCLR: (DST_DD_LISTEN_SOCKET, DST_LISTEN_SOCKET),
NCDCRL: (DST_DD_LISTEN_SOCKET, DST_LISTEN_SOCKET)}[argv]
argv = str(sys.argv[1])
input_socket, output_socket = {
SCNCLR: (SRC_DD_LISTEN_SOCKET, RP_LISTEN_SOCKET),
SCNCRL: (SRC_DD_LISTEN_SOCKET, RP_LISTEN_SOCKET),
NCDCLR: (DST_DD_LISTEN_SOCKET, DST_LISTEN_SOCKET),
NCDCRL: (DST_DD_LISTEN_SOCKET, DST_LISTEN_SOCKET),
}[argv]
return argv, input_socket, output_socket
except (IndexError, KeyError):
clear_screen()
print(f"\nUsage: python3.7 dd.py [OPTION]\n\n"
f"\nMandatory arguments"
f"\n Argument Simulate data diode between..."
f"\n {SCNCLR} Source Computer and Networked Computer (left to right)"
f"\n {SCNCRL} Source Computer and Networked Computer (right to left)"
f"\n {NCDCLR} Networked Computer and Destination Computer (left to right)"
f"\n {NCDCRL} Networked Computer and Destination Computer (right to left)")
print(
f"\nUsage: python3.7 dd.py [OPTION]\n\n"
f"\nMandatory arguments"
f"\n Argument Simulate data diode between..."
f"\n {SCNCLR} Source Computer and Networked Computer (left to right)"
f"\n {SCNCRL} Source Computer and Networked Computer (right to left)"
f"\n {NCDCLR} Networked Computer and Destination Computer (left to right)"
f"\n {NCDCRL} Networked Computer and Destination Computer (right to left)"
)
sys.exit(1)
def main(queues: Dict[bytes, 'Queue[Any]']) -> None:
def main(queues: Dict[bytes, "Queue[Any]"]) -> None:
"""\
Read the argument from the command line and launch the data diode simulator.
@ -163,9 +191,11 @@ def main(queues: Dict[bytes, 'Queue[Any]']) -> None:
argv, input_socket, output_socket = process_arguments()
io_queue = Queue() # type: Queue[Any]
process_list = [Process(target=rx_loop, args=(io_queue, input_socket )),
Process(target=tx_loop, args=(io_queue, output_socket, argv))]
io_queue = Queue() # type: Queue[Any]
process_list = [
Process(target=rx_loop, args=(io_queue, input_socket)),
Process(target=tx_loop, args=(io_queue, output_socket, argv)),
]
for p in process_list:
p.start()
@ -173,5 +203,5 @@ def main(queues: Dict[bytes, 'Queue[Any]']) -> None:
monitor_processes(process_list, NC, queues, error_exit_code=0)
if __name__ == '__main__': # pragma: no cover
if __name__ == "__main__": # pragma: no cover
main({EXIT_QUEUE: Queue()})

View File

@ -18,7 +18,7 @@
# PIP dependency file names
ARGON2=argon2_cffi-19.2.0-cp34-abi3-manylinux1_x86_64.whl
CERTIFI=certifi-2019.9.11-py2.py3-none-any.whl
CERTIFI=certifi-2019.11.28-py2.py3-none-any.whl
CFFI=cffi-1.13.2-cp37-cp37m-manylinux1_x86_64.whl
CHARDET=chardet-3.0.4-py2.py3-none-any.whl
CLICK=Click-7.0-py2.py3-none-any.whl
@ -33,21 +33,21 @@ PYNACL=PyNaCl-1.3.0-cp34-abi3-manylinux1_x86_64.whl
PYSERIAL=pyserial-3.4-py2.py3-none-any.whl
PYSOCKS=PySocks-1.7.1-py3-none-any.whl
REQUESTS=requests-2.22.0-py2.py3-none-any.whl
SETUPTOOLS=setuptools-41.6.0-py2.py3-none-any.whl
SETUPTOOLS=setuptools-42.0.2-py2.py3-none-any.whl
SIX=six-1.13.0-py2.py3-none-any.whl
STEM=stem-1.7.1.tar.gz
# STEM=stem-1.7.1.tar.gz
URLLIB3=urllib3-1.25.7-py2.py3-none-any.whl
VIRTUALENV=virtualenv-16.7.7-py2.py3-none-any.whl
VIRTUALENV=virtualenv-16.7.8-py2.py3-none-any.whl
WERKZEUG=Werkzeug-0.16.0-py2.py3-none-any.whl
function compare_digest {
# Compare the SHA512 digest of TFC file against the digest pinned in
# this installer.
if sha512sum /opt/tfc/$2$3 | grep -Eo '^\w+' | cmp -s <(echo "$1"); then
echo OK - Pinned SHA512 hash matched file /opt/tfc/$2$3
if sha512sum "/opt/tfc/${2}${3}" | grep -Eo '^\w+' | cmp -s <(echo "$1"); then
echo "OK - Pinned SHA512 hash matched file /opt/tfc/${2}${3}"
else
echo Error: /opt/tfc/$2$3 had an invalid SHA512 hash
echo "Error: /opt/tfc/${2}${3} had an invalid SHA512 hash"
exit 1
fi
}
@ -56,99 +56,100 @@ function compare_digest {
function verify_tcb_requirements_files {
# To minimize the time TCB installer configuration stays online, only
# the requirements files are authenticated between downloads.
compare_digest 3a3f8a79420ddb792f647c0bb2a82ac6bfec70f4497005a6ca77ba113cfda40bda502456156860b2a92b464eaf26b23e78bcf907d849ec40a08357955f31549d '' requirements.txt
compare_digest 97558ed189976ccd54e3a25bcf639f1944aa43f4a4f42ff5ef2cf22349a7b649272e91746041b4e04b2f33adf1fab8818c339b1cc58f9353af3e5ac76cb1ec0b '' requirements-venv.txt
compare_digest b2ac8925070d9f304aac6c7500a752b3907b236fe796b5fd82491d02ce9a8b6e2f739a5efd175a2205ecc9241d5e0465a748ad373e8e2a1346eb4f674cf16e65 '' requirements.txt
compare_digest 1c95643d28addf2e8a631b7ec54b2c03cdbe8135695aa5c74b7729bbd272d8590fa3ac03ced5034429c2a3012334713924a83550ff835bc1d0fff77cf43500f6 '' requirements-venv.txt
}
function verify_files {
# Verify the authenticity of the rest of the TFC files.
compare_digest bcb8a7ce1eb2d2f064b560ca5a8e467f84e3a0c3d643771e7782c792e89494600436e52c12f0a8471bf4a1da116f82ed732b8e06783534227a31f576f7adbd6c '' dd.py
compare_digest 941cc47f9846ea9a6fd067a1bc7ecd9e8a945ec8d9a4997b7c24c28072b8b1ab5cb278e93fb3c9d8bb2acca5616c9c32f697af66f5f648a8f56761edddc2564c '' dd.py
compare_digest d361e5e8201481c6346ee6a886592c51265112be550d5224f1a7a6e116255c2f1ab8788df579d9b8372ed7bfd19bac4b6e70e00b472642966ab5b319b99a2686 '' LICENSE
compare_digest 7e519d20fef24e25e88ec4a9c03abadf513b084e05038f17c62ca7899c2f9174a953caa0bfbd3b61e455e243513cdab737c22a34d73ebab07b65d3ce99100f0a '' LICENSE-3RD-PARTY
compare_digest 99815d0cfbca7d83409b7317947fe940fe93fd94b50e6099a566563ee6999e33830fd883ff61e5367a040d5fda3f2a43165ef0dc6155e14a573e07dc27eba70d '' relay.py
compare_digest 3904003688f993c6566fb8cc39e7bd8f820ef3ec0d097b7e467a5aa3019f480a026ae424bfb5473ff71c01002dc386528a010a8fb36cd0f5a03eb0c355450d61 '' requirements-dev.txt
compare_digest 119fbe604a01ad0ef1d6e758ed6ee8dc46be4d746207b0cda1d4c17ba12621d32e6f479229856c6008a77f796bbd778dbecc27bb38dca817d88c257a9d3b27b8 '' requirements-relay.txt
compare_digest 1696663138ca74e4c85caeeea82e34168ddbb1dd1a626a12064c43515859590e17c982dd0f1de2d807039794884bf053c147060760c84751143214e2af3611de '' requirements-relay-tails.txt
compare_digest 550a82b9c07376e9eaf1117f77362f89c401169a848010110c2f8f2d99d50376f4cc5308d8b7e1928e68f15834aca7d5c9a9a7e7b8db956e5e55755ab7ea0a25 '' requirements-setuptools.txt
compare_digest e81bb00e894a14419365b43ecf45443a4fed9ab0332c468066840e8ba17e2e099ff0dc1346c98fbb9d979093afaec4323d53a35d7ffdaca1fe41a4e797a07f29 '' relay.py
compare_digest cef01f168a92975a2e1fb7d514e60fb95995f51d750596f08fdb62c27912e7d6502e1ab5e1cf5dd621c77f5f1423240f75c7269d45eecf5a56a40ba863360f5d '' requirements-dev.txt
compare_digest 6d3c903bc74f5d1f2d20072a73aaac9b3c5f55de6a844f627a1e9d2b3522ecd7516d8637a52ccddb74bb8a854703bc28ec0349049a0e3c9cc59838dfdd22b328 '' requirements-relay.txt
compare_digest fd6073d05c3dc24b44fe1a3b24fcbc6d3b4ffff44d7a96cb5f99c4e431bf7ebe6838fde80384f18fce75bc4f2be752a446bc2cb5bb0335de80366d60eccfdfcc '' requirements-relay-tails.txt
compare_digest c9ac159bb9a7969ab152ea192f3c7597f852493b088bd1801fc36aee8870e319459509abb253915f4d9bfb4f9482d2b0f004fbccce2d41305557ded33cf8c19e '' requirements-setuptools.txt
compare_digest 79f8272a2ab122a48c60630c965cd9d000dcafabf5ee9d69b1c33c58ec321feb17e4654dbbbf783cc8868ccdfe2777d60c6c3fc9ef16f8264d9fcf43724e83c2 '' tfc.png
compare_digest 7e24d0962e0be4b8e206d9390e888caab10604f5bf1bb29af4b91c4c20e42bcc04ef4ef15ce3248ac68c0acfd2e391a96a5567d87d91223f7be63a05b9dbf843 '' tfc.py
compare_digest a6776ed2f82b8afec830c7cfb57473ea15656445ca14f3cea5065f8775ea7829f36a3212462b0c72bf6ec002cff2e309e788a5ca43c742d03d98b6d5691bbaaf '' tfc.py
compare_digest 7ae1c2a393d96761843bea90edd569244bfb4e0f9943e68a4549ee46d93180d26d4101c2471c1a37785ccdfaef45eedecf15057c0a9cc6c056460c5f9a69d37b '' tfc.yml
compare_digest c6a61b3050624874cabc28cc51e947aa1ba629b0fd62564466b902cc433c08be6ae64d53bb2f33158e198c60ef2eb7c38b0bee1a64ef9659d101dee07557ddc7 '' uninstall.sh
compare_digest 50bb3db478184b954480069e40e47167f28f13e55aa87961eed36804c612a8c25e9ab0c0505cf5d36d790051ccfb465a2d7641ab3efb659503b634541d07e9c2 '' uninstall.sh
compare_digest d4f503df2186db02641f54a545739d90974b6d9d920f76ad7e93fe1a38a68a85c167da6c19f7574d11fbb69e57d563845d174d420c55691bc2cd75a1a72806dc launchers/ terminator-config-local-test
compare_digest 9e670036e264cc9f5e5fa33d71f903e996ecc317e8958d99a73dd873639ce5921e31789041813b20ad89b0d09194fea3c0439619e16e7a9331c98917dee762b3 launchers/ TFC-Local-test.desktop
compare_digest 496a0828096944b067feb5210d46a89b320ff2789d8b17f9a4faf787d93559b93335cec77139304981654195b7d32474dc62624209510a8ae5c4690e27d26fce launchers/ TFC-RP.desktop
compare_digest 496a0828096944b067feb5210d46a89b320ff2789d8b17f9a4faf787d93559b93335cec77139304981654195b7d32474dc62624209510a8ae5c4690e27d26fce launchers/ TFC-RP-Tails.desktop
compare_digest 98db2dcc5226632383c1924b678d0bfeceaaf7696071ea5b22116fa46216e40af140b0bd127da53c6cde5f09ba200d2afa7e643284f12b6214f4c353d307ef6e launchers/ TFC-RxP.desktop
compare_digest eec5b006779bbdfe5a3e3da0ea5ea4655f5b77a45ac9834941fba5b5861ca503808400d29ad0b2d9790dea3ce5ba9e6747f345dbaf4398025778cf4b70de8dee launchers/ TFC-TxP.desktop
compare_digest 6e1c1082b7850e55fe19fb2ebe0f622dea16e038072adcfe1347305324d10b97bbc443d7ed1ff3ee141d647b4561874c7736ba449c1e8e34dccd4be9dab5db8b launchers/ TFC-Local-test.desktop
compare_digest 6a6469b5b11cb081e1f9e2848cb328d92f283f94f977f8e89984fa115fbeb719e6b094c9de0c1ff5a4f5f3fd66d3ca71bce1a3a5e4ca3ae454557ad261f8acf6 launchers/ TFC-RP.desktop
compare_digest 6a6469b5b11cb081e1f9e2848cb328d92f283f94f977f8e89984fa115fbeb719e6b094c9de0c1ff5a4f5f3fd66d3ca71bce1a3a5e4ca3ae454557ad261f8acf6 launchers/ TFC-RP-Tails.desktop
compare_digest 4b387996983b6b900a53aedaba0a542eb89416fed0e99ed845680e41748bbad65956c5d4662dfce4b5519412a10404e6c995464c26c74298e0db37f55b3dcd2c launchers/ TFC-RxP.desktop
compare_digest 54b1ff5b89f12548594f65f20b4bd615f6659cdf47188be720c05d3126b8efb13e86257e4f2a1728fca758613519805da66eea3dee01215d389d9d9af6944f4d launchers/ TFC-TxP.desktop
compare_digest cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e src/ __init__.py
compare_digest cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e src/common/ __init__.py
compare_digest baac9291c49fb4b963e200757b5abefbda4f19fa30e27066a41d51009db946d882ed36e4d62f984b33b1b5d0b27de2238200b4b3bdc40b114061639cd9d3deba src/common/ crypto.py
compare_digest c7c8b3782f7a5fd3e035ee2ffc776860caf62f501cb7f1cc8438f03c83e474e2f6b2fbbf9b8dae815fa0a44305e6978b38889cd78349f6ac4814fd2f96eddc8e src/common/ database.py
compare_digest 2f912fdf06b4ca20df7192b9f927473e05d4633d541f0f33346bdeb561a08ac5ee267d513ce02bfd21d2324d1ae615df1653cd4be99516464f770bc84957eb0d src/common/ db_contacts.py
compare_digest 226fa936d6306f7beba36d3fa9ddf9191eb45b7bd217404cdb46824b127e8e906da95c2b53083c7dd4d05cf598b159135c08049c4493b2231081822ef02a7a2f src/common/ db_groups.py
compare_digest b7f0b1ce630c41ded5d36a86cd0e6b17c7f708b059e613235faf4e868adcf51226d233f2e0bfe47ac54454c1561af8b77426d30f94ae26433392eeb70087c517 src/common/ db_keys.py
compare_digest db37a75872c207e82e83377d03bab7d72496e94a311cccbcdeccbbb089bf4ff385fa9c5a2e6a836b7a6cd552493cb215a8296e346727035a41a00e7f6d6980f2 src/common/ db_logs.py
compare_digest b1134de8a791cfcb7aea0dab9f2f9439621badf236cd850b344117fb791f0a2c1597445317f7e35b070dcc10e40a635801cf55b555ccfb12e354df930cdcfdd6 src/common/ db_masterkey.py
compare_digest c9000d541149835aa0812aa4b3108356d2f9465a767ea4273ece6340212eff287447af9247df4cea309ef7f8a5cfc887a01c8190523d1616164dd0f98718905f src/common/ db_onion.py
compare_digest 8b9feb9fc4a9897b00cd01a2970fea98e2139676149b75891376d4714c477a06a2cffc42f31e927e12f063c7c673d6f05a3a8e5f0a8f0194d3450192f619921d src/common/ db_settings.py
compare_digest 7a673e6feb7a5b2e3417d2c0eee82a59b3730a5d241938a84fd866dfc838c3cd63d7ef96772d43f62df740a2ba1001456746dd6c86e950484eac3ebabed498ce src/common/ encoding.py
compare_digest 00ad45d8fba1a605817a9f5d64cdfd6aad9c618db66befe682728a2291384c67bf5e80a5257211717a86e4a51c7e8c74f8f7ccccc3b4ac3c6f0f4c4e1b3cc98f src/common/ exceptions.py
compare_digest 36fe744ac924cdf4c43babc037392e1717c6be0b5ef518be896ca76c74385a93dfe84fe5d2e4437e00b24ce694e0be243305c7c1f1b8722adbd0ae9f5806114c src/common/ gateway.py
compare_digest 604893a2814219b2ed4e69b45d9ac2f8c2b5fc066bd085e86b76ef9df9984e6113f79fcaf3b9eb1197a9c9fc92cf524269e595d03b5009c46e8889d813475408 src/common/ input.py
compare_digest 3379d330272e599bfb07f0591256725eabdf503423993e29f0d2f335581c23934b13ad9d477d198b866bf2c7bb2f59f2a93d634046ed619f1441019e6172aa93 src/common/ misc.py
compare_digest a4246dc24cccf5d1751a1c80ec6f195954fd6541b7b1573acdf29bd8d3fd4085949201a2950a298c8ec7bb153fe33626a24e0fa8856d0bc4cfd3183a5ef931fa src/common/ output.py
compare_digest c4d97b497b341f0e7865a4e27a2a2ffd3b3c5a7bfbf72f4676f6b65d6ba66a2adb8fed563f88fa25cef555f0042290ef0ae4cbeed1697a2e19a3b8cff0b9ef1b src/common/ path.py
compare_digest 4365ed3b6951525cb1ec8dc1177d7fd74d5dfa5eab1ca8934775391a8736eed4df039684f19ccc2d8022f20c8cf93a57a736b259e8c7235da5060c5f62057c98 src/common/ reed_solomon.py
compare_digest 0de1ca8036e50c24cd0637b44ea7444d601cdde4d247e96a3bfc5ad817de99f0a8a53257a16296fe92c254a7e566b1d4abde93dccdaa416c3bb139fc0f3393a5 src/common/ statics.py
compare_digest f05c27de8d1a90dc72aeefc9e9b7ac350140668cf2ec2a18c8f33ab7219fe5e13557717f68bada8357bb98d41fbafd56754eaae523d9e660670066727ef137f7 src/common/ word_list.py
compare_digest 2e9a7e53ed381f7c75499fa84c4e5c1d29d891fb5ebde5f404ded73689e8794327604876955b98c40c2e711fb4869edebf1f06d8b17ed92a02e28938091bd958 src/ __init__.py
compare_digest 2e9a7e53ed381f7c75499fa84c4e5c1d29d891fb5ebde5f404ded73689e8794327604876955b98c40c2e711fb4869edebf1f06d8b17ed92a02e28938091bd958 src/common/ __init__.py
compare_digest b6ed487f95631e2fb72f88e27cc612090f79a232e1984c3da0bb6a6cc0205b7843eec2525135503a1f363ebcd41acf46255103e0ba6a91cbb926a6525dd8f1c9 src/common/ crypto.py
compare_digest 70e6a3638e3b5953153b4ab70aed16763bae68c0a5d9284057cdd8dcce2491a5caf2061e9d40083e7cfe4eec7c7625ff5127a532661bcc02c27026821397e49b src/common/ database.py
compare_digest 1a4ca913dcd30418d0340f8c34e51fce4949e2d16149c7f7b41a02c536066cb24d4168de5ba64086c1299f5b6ad10b35fa1c16037fecd2e4576094c106294806 src/common/ db_contacts.py
compare_digest 2478f5dfb1f0b0493a6692294aae064f2b26671d84006a926e0b7e71e1d70995f3406a22ab12e3fca909ae448218a9e5cd6b9802c2df310e32573efc767303b5 src/common/ db_groups.py
compare_digest f04237c84aa8df5ed5f08c8f5c275fa3f97db557f441feaaf71045538bd1f33a0fc910ff43bd7153a7aeac05a348b3c1020c534cc342c761f91280d09019b6c9 src/common/ db_keys.py
compare_digest e85583d1bbe9f04640f9347600a27bfa98a28d208de988dcf923d8158c165a6badb91a176fc3cf138f32aab6350cfabc365619652bd25c6d250359be008fc3e2 src/common/ db_logs.py
compare_digest cefbb2f59fc5e0cff3e86f59db3a00bf5c6ad07ff056fc82252aa838732c4d5ce759a84dc278b9dcdbbcfe24f26077072f939947281a60e67627d5248655599e src/common/ db_masterkey.py
compare_digest b46670b84d392cb748e76554a4ed72dd8c020ee843d4d9b6d1f4d54ea2c77ca783b4ff9fd0dcca7100bfdb4661ca1248f786b50b5230dc76e8f200d352989758 src/common/ db_onion.py
compare_digest 1d92f8e369b8a8d1b3b9edf2a66519e74209ca5ddd1f0d3e321e5075466e13035ba2f34eb5f93de0213a273db7e0aa88bdd110c6a6e63b1fd83c55305efb8917 src/common/ db_settings.py
compare_digest 5ba06fd066cbcb055a6c14683bd3b424b41a135213501d604929b2ddb667c34a9ab25fb74fb8cd5d3c8a709cf3747c0e7f06b3a8ef80f7e5ad02ed7e87dabff5 src/common/ encoding.py
compare_digest c85299b1f59a350f3284fef956f6627397da36f35ed85161cc018d9b3422943018a99c57841516cc6f5a818a558d05ab9d768ffa4eea0b9fc5baa2d470ce5296 src/common/ exceptions.py
compare_digest 170a5db2b1d9e1b3445fcaa3e3e76fda11a1e8df7459b98efab8d8c634f94233706aa7b71e766251d211af93c062eb9b7fb18d9b3d0cd8e223262bc01faf26ba src/common/ gateway.py
compare_digest 45471974fe553d516e81b1548d93e38f92caf2104963827a04988c1627afb08429ef3abc82e2400e8706a0071fa2b4d5255f8ebfca607ff25fffa6bc1c9658c5 src/common/ input.py
compare_digest dea694844fe207a1df84e5e954c0f009449a06513cdb7341f7cdc98761fb81b663258d6ecd7741b2f5c3db9d19730e83a35fbb638d448aaca333735811238c92 src/common/ misc.py
compare_digest c30a5df2a0eadfce97d2df1f142ce8ab0064a9de5231f855df0da95ef2e5378fbcb4373ca03efb6d43c598fe0d36bb3703124ce1ff77d035fc7a4cc54bb5b7e0 src/common/ output.py
compare_digest a13de0bd9308db2b566d9a2fde25debd617f09dfc403a126a4d0f0015206a1b2e2b1ff23e32f48bfad4dd8fee95756d6ee4dbd3f2ccb6aeaf13c0321b91bdba6 src/common/ path.py
compare_digest 2bbc79ad9621d7529c44665525840fa92ad97fb65959e8cc35b1b36344d33dc29a75ace3bcf48338195500a7fddc668f9b3c8775d74617551e46f6f92c8b90c3 src/common/ reed_solomon.py
compare_digest a412d6f1004f9515dc07519b27b6ed860380a7a328ada27eda99a282082e71c7ddf4a4e6ad8aabb9db3ec38dac2ab09ca56b2c69e2ee35e53867d9d4b5bb0b99 src/common/ statics.py
compare_digest 0ca623e729844bb569eab70c12c6f31c74e342bb131faec37bbcb8db9c3b2eb806357937f6ae764604d8a4482ba95fe1cf61cd1e6ceea4882189f38f8a93db4d src/common/ word_list.py
compare_digest cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e src/receiver/ __init__.py
compare_digest ace8e64f6fddb546154fc912876240227d83b817780225f122320b99990c60743e9d98af1c45dd070d3f49f3e6a7a9eea621e565b4edd51536ff8b2cf56ca2ac src/receiver/ commands.py
compare_digest 051a1eac8e1e177bdf1c94972ed511d56d9ccfb3dc97c6418355b4415b2d1dff42c4ef5420de05d90e1697375f4db119e04932c05d6a93a89e03e7ca4c7c7346 src/receiver/ commands_g.py
compare_digest 9f8fe1cfdeeb1a8bef02b213626f77cd86d6013c55b868497a5976a04e458abdd7957a9287fe02caa050835124a2880e578d1172d72093e7099431a3fe07d8f9 src/receiver/ files.py
compare_digest 6eae2793bdd72b9581cbbebc012a70b11744c2585fda1d1e253ff4d67cdc1d316d1f3ad7e391f5197aa1447ce21b0ecbc3e34517a8932ecd9eec7ff5d7313b5b src/receiver/ key_exchanges.py
compare_digest 5b933072ba33349d2ef46bd253748a6ca82ef503de83761e7aa2e8bca5880f12d0c20b6eecd8ae71fe300094d78e18bb8a7a0af5d0de21144e8e27cf51bfe3a7 src/receiver/ messages.py
compare_digest 755774a1954690abcb4905a03377f6367c8ea4f9b08b05dac53326c0576ffab38cf49f4e5a4357f56d3597385489800b2e1e0cbfb5f858510328cbded4a9d002 src/receiver/ output_loop.py
compare_digest a69bb7fb303072c813917e008126726cb93d921232051d9276901afb72287d79bdaf4e98f8547754c64aeb4453f723dbb59906e59e78b3c8fcd7d4d3194504a3 src/receiver/ packet.py
compare_digest 20c6754ddb6261c7a3b479e6ab7bf78eb0ef8783e2141373d7aba857f413091b78dcc9c32667dd8f8d5c41927102da7e35c4c4fcb0aa7376dc42b08c0c01d6e2 src/receiver/ receiver_loop.py
compare_digest 96550b54c0b9a287974debadf838294dfc7d2f1b59340f8af7b0cfe91bedde8b35c5f6d0e5d61b359524513c6f15c50de6bf7d55b350c26329986493dfadd5f7 src/receiver/ windows.py
compare_digest 2e9a7e53ed381f7c75499fa84c4e5c1d29d891fb5ebde5f404ded73689e8794327604876955b98c40c2e711fb4869edebf1f06d8b17ed92a02e28938091bd958 src/receiver/ __init__.py
compare_digest 2da8d697103a3a4fd95e1a885a40be89779aef7f8f1ca3d1567b5edcf50692b7a899c42140eb9777622bc80a0f0a20c4b2d751ef394d108893f4d04c2afe637e src/receiver/ commands.py
compare_digest 1cc28058c8efbd8a9597455375a4f45ec7f3368bf269c93c07c9c8f26bfb4fe7120b96ed24231ee634e5a5e7c72a157a0976bf1aced2ab4de030903b27bb25e0 src/receiver/ commands_g.py
compare_digest 4e253f29869de701cd0a7f642b4e5e0637c0ec0bcda6c94ee2ac6dac7b78d18626c5d099d475338bc8bfe03502782b873bb8e0e4fa5b6b38a2d1b1a6f7e32e60 src/receiver/ files.py
compare_digest 452bcb094829bec416b09679d3d566e668d23a16a3bd67bc76fc1d020f4d7de6ac66911cfcfbe40386e35f70392215c9979b1bb264a75506c83e7c27f9980a08 src/receiver/ key_exchanges.py
compare_digest d6f54bdc5c000ac2addf8a40d359fad289e8926d04807bfc784cfe1033a91bc6cc05a2c65cfdea4cfb383cbb53d9614275d4d0ae567c726bee269b5ffff734ff src/receiver/ messages.py
compare_digest e123ac2b4f568875e0d7b801a41fdd37d2d8062d8bcd98ec2913d696070e948d6c161577d82105a21f60dd5619a9a704a7dec6828d676f617efda6d08c3423b1 src/receiver/ output_loop.py
compare_digest 4bcbe8364c33f3b9d69d5a52768b4779f493ed174308bd4bfff9f9748dcd7530d1c9d91b53fa5fddb211ff687afc90e88c513515f8ce991e7a43eb8326a23f2f src/receiver/ packet.py
compare_digest 62d8f02f133edc70fa7a46d53f4e44ef22f9d16541424103001db20f2db6cfb5f8d96ed34c0eb9a61d8c6ae56b5f51f95ef0edfda5b0b5a2c23d85d988f7c10e src/receiver/ receiver_loop.py
compare_digest 40b8c61f0439e64ba6fdc994a944dce22d556b20e9aa76722921bb92a79d8a561f23ce3924ca33fda1f8f5a83b6bd0d089575779b301ce0fee1f51fcb83065e6 src/receiver/ windows.py
compare_digest cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e src/relay/ __init__.py
compare_digest 5d34be330731b8b722c3580f12abd2515984ba0589ea95c0960ae099a13b9d66118a5af5cdf137bcf376bde88b0edf055888d2a5fc267081ea118fffc05a2b08 src/relay/ client.py
compare_digest c32b5b78e28567d5ef0c6f41f1a3c69f6d31b1cb3b9d58faf6516fa27fc62e12b2f359f7b60176b5fe20a2d94725f5fd76a879d4b795513d1588f8ecf9bae5b0 src/relay/ commands.py
compare_digest c72a57dda6054b9c020f694740751159df4602f11f7759ff76e48a8b7f07ec829b39d6c366613f3a69e36d3dca0823491f3232506f3a03ecc9ded3e2a4f0230a src/relay/ onion.py
compare_digest fe108f1f642bdfd01d813fd0a183e2f6039c1e64a5ee57f6159fdc67d7574a0ba0ee23608a2a8499071f0844b7d2db6b6a14740046d5d664e09856c35680a0dc src/relay/ server.py
compare_digest 9459e6cbe17fefac356e5ce183d923efff66f6d304111f2c0dbacdfb22a92df77bb11134faf8c15400bc59174ecbec1ea0b436065a9d49d3af70b46b24a77764 src/relay/ tcb.py
compare_digest 2e9a7e53ed381f7c75499fa84c4e5c1d29d891fb5ebde5f404ded73689e8794327604876955b98c40c2e711fb4869edebf1f06d8b17ed92a02e28938091bd958 src/relay/ __init__.py
compare_digest 946baf7d5e67dc30adfcaa92dceb4f8ddc7421f0171c4a328ceef886d9bf8f78bf044a19ff25490fac6ba51293b7beceee2feb21457d5fb80a4b93966db6ec68 src/relay/ client.py
compare_digest 69df9dfee65de516f835174189d388b377aa0a08fc71ac660e50da7bb912319bb526b735f7cb83e560bbef9acfe40dbf04f433d185ced4cc295bb8bf63b2afcb src/relay/ commands.py
compare_digest ef65dce3e6cc0b0f362972ceaab4151a798c18ca872af9eb23927b854c28d883344fa00813546eb28b4cabf074f3da97d7cf978f9e5261efd84497510f154057 src/relay/ onion.py
compare_digest fc355ee1118a20202a9e029a80f0af83a876843c4f8a7458e5af99a96427dd039c61601c9fc3f90d13512a7837241609825988a482118dff3916fc955e8bfce2 src/relay/ server.py
compare_digest a61d9d56efabc7a302e0bbf3a7c7b52d8552ea0d736582ecfe3a7c768fbcc67beaf07c2087e310ab45cdb440004063986cb7bb76b81fb140a236c79399dc7fd0 src/relay/ tcb.py
compare_digest cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e src/transmitter/ __init__.py
compare_digest 1fe0fda8d611ab7db6aa2911e7f6362007b1bbd53172eb213b69f9a14bb14e87c27a9ffad6def1357b926b3b51cb429d7b8db03f54c7465f6d8c95baedc23988 src/transmitter/ commands.py
compare_digest 74291c8b952588caf7c3c6ac3e99679eaf97ba9113bfd560da4c461e16cd36c28d78a4e8750090d18606a9c1610a1a781d37fc6fe52baf47a955e0b5ec801b97 src/transmitter/ commands_g.py
compare_digest 3a2940afcf8752f33c8f5a06293046a83d245630dde1a6877eb3c724cb03ee7b84147b4a57a62135a32862b40db1dc6c6823bedc52404146aeb6e9ef1f79692f src/transmitter/ contact.py
compare_digest 2e78e578e62771adf7ae9f2a576d72b69a64e6b28649361244bd7a75959f2022845d73d68c3d6a4586841bb10cce906edf1c5d863fbf99b6d081dfc030f98a3d src/transmitter/ files.py
compare_digest 7cb9fc9d095f40ce2de6b49c9bd58b9dcab6b835fe7749dce8642c3c87b0eee10c4e53ff986c09ae26fb7b8aad7fe87c5fd56a734f2e013f69195213b9d5e9ec src/transmitter/ input_loop.py
compare_digest 29b71c3341395eec84b5f68719cd434da5ecc8a4a7ad1da91d49a5177b50c13a7d33e4a051fa5e0dba5639b2ffabd1c0bba367af27490fc711d4a0c340ca24f8 src/transmitter/ key_exchanges.py
compare_digest 41798dfe91868b37c130a373accac93c4200dc77bd8b6c40a38835ecf4187b955ccfaa53f842ccddf78ce5607b3e361a30a4bb53bd7cb5ab6d2fb4785454dead src/transmitter/ packet.py
compare_digest 31c6bbe595a1966ce3f0396ea7bfd5a7e070b2f5dce1987de2e471fddc698159b51a60a2734be1d4921c3e66a92ca7519d99793c1e61e626bf30607f6966137a src/transmitter/ sender_loop.py
compare_digest c5a6c85e57d4456353f89fc4b2d30fc60775511720a32287720b3b301e0d6e7539677b47c4ff8c6b6f223b93da7dfbb38d1830f43e6f25c598efd54799262956 src/transmitter/ traffic_masking.py
compare_digest 678ae2b63667d93b1d4467d029ab04778614ddf6c09dff4bb61d262373353cd7fe6b8b535292fdf28e1be36c8b57534dee9eb745ee94c72b051798ac4e1cbccd src/transmitter/ user_input.py
compare_digest 00e247854f067194f80c86c9a3b9fbe1975e600844a1f33af79e36618680e0c9ddebaa25ef6df1a48e324e241f2b113f719fc29a2b43626eeeba4b92bdbb8528 src/transmitter/ windows.py
compare_digest 2e9a7e53ed381f7c75499fa84c4e5c1d29d891fb5ebde5f404ded73689e8794327604876955b98c40c2e711fb4869edebf1f06d8b17ed92a02e28938091bd958 src/transmitter/ __init__.py
compare_digest 9629bf56ac1b2ca2e1f0164ece2333a2ea73ecc5bedc3231cf34b4bc199f9c03e9bb567b7fb6e2e73ccc80d4689c419a9e1241b9d5c6351c467f7754e81d7fbc src/transmitter/ commands.py
compare_digest 7edf5c9b72486af7e4ec870bd5b3b6fed5f1c143463b83ea8732842662d8509604319c2bc68edca40714371c992fcfb2810dbb1e105a0061c32cf66a31e2d7ed src/transmitter/ commands_g.py
compare_digest d98bd8b8097e024a255de0783265d7d521368d31a234e1118408fb9353a90ac7fac3286b2aacbe2c417d7855f8bfc126675926ee505607d7ed2a3539225b5ad2 src/transmitter/ contact.py
compare_digest 2c1eceb95d0e3dced8d5b598c028bfcbe749a1e331e82a8a81976b1e13e33eaab1d378e652c1f420dcb099ab78cd62659fd3e589a11631497b247b3f8f59c3e1 src/transmitter/ files.py
compare_digest 47e91c019b4a606309f48f1c60b19a6883e460769769e8e4de1c1a9f7113642b61e8f4de3292c36a6eb8f51bfde0cb7e6687d07234f6479cb28bf9a194916bcc src/transmitter/ input_loop.py
compare_digest 750e8f0f1b0a243d3c0a9c42d32160aea213d094d3aaffa5422da3a9fa2de5ef0bd2f9e13e186776527b46c7fb800067389bb0b3db626c1fa0d64100216bf0ba src/transmitter/ key_exchanges.py
compare_digest f4e0d9c913382b6745a2823294802a603db5fba41d68a42061d0e8c244da2199638d52e3b8b3150a165f3348e293cff48c4663d2bd1b3c36b3a4b4505deb7cd1 src/transmitter/ packet.py
compare_digest 22886a86c203a97410fdd1f3b7831eb8f091de45aa323ebbdd2901533c61e7418e468d1e2d37c74074f66c24083917fe3fda94dc809236fb51342414cf0e4436 src/transmitter/ sender_loop.py
compare_digest 9ea30785b8459e342ac71666012251c76a16a55f059cfdc7b8ad6c74fc7aae69965adf4382654bf686d9acb79b525a3513ddcf6a49bb6459caea124c5fb69eea src/transmitter/ traffic_masking.py
compare_digest 1f082487590125de9ddeefe696be062ce5bc3fc1f82c3117dab3de2349dca5f859c7aaa3f626d8fe306d5b64c34bfdb1b0b50a0d7f4ed156f1043d35cdb2b618 src/transmitter/ user_input.py
compare_digest 827869782567511343923f7164d5469733691664257c94bd488be451467edcfa4a2513f1e3ce48094f0aa4067b61c1b52d8b90ed3c479907e66e9d870ad6d18d src/transmitter/ window_mock.py
compare_digest ff1ff1c5fe95726607f15a2e3e2cecae899b424497eae17a2e52d9279b012752a2330fb70a68491b4b3cf60f205f9ade02aaa7c5e28af86b94c703c16be8abad src/transmitter/ windows.py
}
function process_tcb_dependencies {
# Manage TCB dependencies in batch. The command that uses the files
# is passed to the function as a parameter.
sudo $1 /opt/tfc/${SIX}
sudo $1 /opt/tfc/${PYCPARSER}
sudo $1 /opt/tfc/${CFFI}
sudo $1 /opt/tfc/${ARGON2}
sudo $1 /opt/tfc/${SETUPTOOLS}
sudo $1 /opt/tfc/${PYNACL}
sudo $1 /opt/tfc/${PYSERIAL}
sudo $1 /opt/tfc/${CRYPTOGRAPHY}
sudo $1 "/opt/tfc/${SIX}"
sudo $1 "/opt/tfc/${PYCPARSER}"
sudo $1 "/opt/tfc/${CFFI}"
sudo $1 "/opt/tfc/${ARGON2}"
sudo $1 "/opt/tfc/${SETUPTOOLS}"
sudo $1 "/opt/tfc/${PYNACL}"
sudo $1 "/opt/tfc/${PYSERIAL}"
sudo $1 "/opt/tfc/${CRYPTOGRAPHY}"
}
@ -156,66 +157,66 @@ function process_tails_dependencies {
# Manage Tails dependencies in batch. The command that uses the
# files is passed to the function as a parameter.
t_sudo -E $1 /opt/tfc/${PYSERIAL}
# t_sudo -E $1 /opt/tfc/${STEM}
t_sudo -E $1 /opt/tfc/${PYSOCKS}
t_sudo -E $1 "/opt/tfc/${PYSERIAL}"
# t_sudo -E $1 "/opt/tfc/${STEM}"
t_sudo -E $1 "/opt/tfc/${PYSOCKS}"
# Requests
t_sudo -E $1 /opt/tfc/${URLLIB3}
t_sudo -E $1 /opt/tfc/${IDNA}
t_sudo -E $1 /opt/tfc/${CHARDET}
t_sudo -E $1 /opt/tfc/${CERTIFI}
t_sudo -E $1 /opt/tfc/${REQUESTS}
t_sudo -E $1 "/opt/tfc/${URLLIB3}"
t_sudo -E $1 "/opt/tfc/${IDNA}"
t_sudo -E $1 "/opt/tfc/${CHARDET}"
t_sudo -E $1 "/opt/tfc/${CERTIFI}"
t_sudo -E $1 "/opt/tfc/${REQUESTS}"
# Flask
t_sudo -E $1 /opt/tfc/${WERKZEUG}
t_sudo -E $1 /opt/tfc/${MARKUPSAFE}
t_sudo -E $1 /opt/tfc/${JINJA2}
t_sudo -E $1 /opt/tfc/${ITSDANGEROUS}
t_sudo -E $1 /opt/tfc/${CLICK}
t_sudo -E $1 /opt/tfc/${FLASK}
t_sudo -E $1 "/opt/tfc/${WERKZEUG}"
t_sudo -E $1 "/opt/tfc/${MARKUPSAFE}"
t_sudo -E $1 "/opt/tfc/${JINJA2}"
t_sudo -E $1 "/opt/tfc/${ITSDANGEROUS}"
t_sudo -E $1 "/opt/tfc/${CLICK}"
t_sudo -E $1 "/opt/tfc/${FLASK}"
# Cryptography
t_sudo -E $1 /opt/tfc/${SIX}
t_sudo -E $1 /opt/tfc/${PYCPARSER}
t_sudo -E $1 /opt/tfc/${CFFI}
t_sudo -E $1 /opt/tfc/${CRYPTOGRAPHY}
t_sudo -E $1 "/opt/tfc/${SIX}"
t_sudo -E $1 "/opt/tfc/${PYCPARSER}"
t_sudo -E $1 "/opt/tfc/${CFFI}"
t_sudo -E $1 "/opt/tfc/${CRYPTOGRAPHY}"
# PyNaCl
t_sudo -E $1 /opt/tfc/${PYNACL}
t_sudo -E $1 "/opt/tfc/${PYNACL}"
}
function move_tails_dependencies {
# Move Tails dependencies in batch.
t_sudo mv $HOME/${VIRTUALENV} /opt/tfc/
t_sudo mv $HOME/${PYSERIAL} /opt/tfc/
# t_sudo mv $HOME/${STEM} /opt/tfc/
t_sudo mv $HOME/${PYSOCKS} /opt/tfc/
t_sudo mv "$HOME/${VIRTUALENV}" "/opt/tfc/"
t_sudo mv "$HOME/${PYSERIAL}" "/opt/tfc/"
# t_sudo mv "$HOME/${STEM}" "/opt/tfc/"
t_sudo mv "$HOME/${PYSOCKS}" "/opt/tfc/"
# Requests
t_sudo mv $HOME/${URLLIB3} /opt/tfc/
t_sudo mv $HOME/${IDNA} /opt/tfc/
t_sudo mv $HOME/${CHARDET} /opt/tfc/
t_sudo mv $HOME/${CERTIFI} /opt/tfc/
t_sudo mv $HOME/${REQUESTS} /opt/tfc/
t_sudo mv "$HOME/${URLLIB3}" "/opt/tfc/"
t_sudo mv "$HOME/${IDNA}" "/opt/tfc/"
t_sudo mv "$HOME/${CHARDET}" "/opt/tfc/"
t_sudo mv "$HOME/${CERTIFI}" "/opt/tfc/"
t_sudo mv "$HOME/${REQUESTS}" "/opt/tfc/"
# Flask
t_sudo mv $HOME/${WERKZEUG} /opt/tfc/
t_sudo mv $HOME/${MARKUPSAFE} /opt/tfc/
t_sudo mv $HOME/${JINJA2} /opt/tfc/
t_sudo mv $HOME/${ITSDANGEROUS} /opt/tfc/
t_sudo mv $HOME/${CLICK} /opt/tfc/
t_sudo mv $HOME/${FLASK} /opt/tfc/
t_sudo mv "$HOME/${WERKZEUG}" "/opt/tfc/"
t_sudo mv "$HOME/${MARKUPSAFE}" "/opt/tfc/"
t_sudo mv "$HOME/${JINJA2}" "/opt/tfc/"
t_sudo mv "$HOME/${ITSDANGEROUS}" "/opt/tfc/"
t_sudo mv "$HOME/${CLICK}" "/opt/tfc/"
t_sudo mv "$HOME/${FLASK}" "/opt/tfc/"
# Cryptography
t_sudo mv $HOME/${SIX} /opt/tfc/
t_sudo mv $HOME/${PYCPARSER} /opt/tfc/
t_sudo mv $HOME/${CFFI} /opt/tfc/
t_sudo mv $HOME/${CRYPTOGRAPHY} /opt/tfc/
t_sudo mv "$HOME/${SIX}" "/opt/tfc/"
t_sudo mv "$HOME/${PYCPARSER}" "/opt/tfc/"
t_sudo mv "$HOME/${CFFI}" "/opt/tfc/"
t_sudo mv "$HOME/${CRYPTOGRAPHY}" "/opt/tfc/"
# PyNaCl
t_sudo mv $HOME/${PYNACL} /opt/tfc/
t_sudo mv "$HOME/${PYNACL}" "/opt/tfc/"
}
@ -223,7 +224,7 @@ function verify_tails_dependencies {
# Tails doesn't allow downloading over PIP to /opt/tfc, so we
# first download to $HOME, move the files to /opt/tfc, and then
# perform additional hash verification
compare_digest e80eb04615d1dcd2546bd5ceef5408bbb577fa0dd725bc69f20dd7840518af575f0b41e629e8164fdaea398628813720a6f70a42e7748336601391605b79f542 '' ${VIRTUALENV}
compare_digest 4483bdd81d63cc38e0003cd3cba995f3e21d506e2f6a64bc98a673f1ef5ccd56e8e1109ec049c9394a538b879ea47dbafa0c575cdc02eedb1b9172e8fc045ca6 '' ${VIRTUALENV}
compare_digest 8333ac2843fd136d5d0d63b527b37866f7d18afc3bb33c4938b63af077492aeb118eb32a89ac78547f14d59a2adb1e5d00728728275de62317da48dadf6cdff9 '' ${PYSERIAL}
# compare_digest a275f59bba650cb5bb151cf53fb1dd820334f9abbeae1a25e64502adc854c7f54c51bc3d6c1656b595d142fc0695ffad53aab3c57bc285421c1f4f10c9c3db4c '' ${STEM}
compare_digest 313b954102231d038d52ab58f41e3642579be29f827135b8dd92c06acb362effcb0a7fd5f35de9273372b92d9fe29f38381ae44f8b41aa90d2564d6dd07ecd12 '' ${PYSOCKS}
@ -232,7 +233,7 @@ function verify_tails_dependencies {
compare_digest f6a78508cb87050e176005a088118f8ad87b17cf541457d949e5712c356f8c4de7e7516ba066e5c4bb9ced5c7e7590ba7e07d4ae7fc7190487bf27f1bb9d0668 '' ${URLLIB3}
compare_digest fb07dbec1de86efbad82a4f73d98123c59b083c1f1277445204bef75de99ca200377ad2f1db8924ae79b31b3dd984891c87d0a6344ec4d07a0ddbbbc655821a3 '' ${IDNA}
compare_digest bfae58c8ea19c87cc9c9bf3d0b6146bfdb3630346bd954fe8e9f7da1f09da1fc0d6943ff04802798a665ea3b610ee2d65658ce84fe5a89f9e93625ea396a17f4 '' ${CHARDET}
compare_digest 06e8e1546d375e528a1486e1dee4fda3e585a03ef23ede85d1dad006e0eda837ebade1edde62fdc987a7f310bda69159e94ec36b79a066e0e13bbe8bf7019cfc '' ${CERTIFI}
compare_digest fe5b05c29c1e1d9079150aaea28b09d84f0dd15907e276ccabb314433cfaac948a9615e10d6d01cbd537f99eed8072fbda7cb901e932fbab4f1286ae8c50471b '' ${CERTIFI}
compare_digest 9186ce4e39bb64f5931a205ffc9afac61657bc42078bc4754ed12a2b66a12b7a620583440849fc2e161d1061ac0750ddef4670f54916931ace1e9abd2a9fb09c '' ${REQUESTS}
# Flask
@ -257,11 +258,11 @@ function verify_tails_dependencies {
function install_tails_setuptools {
# Download setuptools package for Tails and then authenticate and install it.
torsocks python3.7 -m pip download --no-cache-dir -r /opt/tfc/requirements-setuptools.txt --require-hashes --no-deps -d $HOME/
t_sudo mv $HOME/${SETUPTOOLS} /opt/tfc/
compare_digest 2e90929aa61c847e1d414d427b08403679ba5f512a56d58b92ee64d47e8a2c5da18e47126e5f59faca335b3a4b5ec9857aa323d866252546a6df42c3e3ef3884 '' ${SETUPTOOLS}
t_sudo python3.7 -m pip install /opt/tfc/${SETUPTOOLS}
t_sudo -E rm /opt/tfc/${SETUPTOOLS}
torsocks python3.7 -m pip download --no-cache-dir -r "/opt/tfc/requirements-setuptools.txt" --require-hashes --no-deps -d "${HOME}/"
t_sudo mv "$HOME/${SETUPTOOLS}" "/opt/tfc/"
compare_digest dd18da86ba566a7abde86890f6fa7c5a4dee34970927ef883a07a44ca8992713f9c2c4c87538d18d7bbf19073f1cc7887b150474375f24a0938cef5db097c841 '' ${SETUPTOOLS}
t_sudo python3.7 -m pip install "/opt/tfc/${SETUPTOOLS}"
t_sudo -E rm "/opt/tfc/${SETUPTOOLS}"
}
@ -302,11 +303,11 @@ function steps_before_network_kill {
sudo torsocks apt update
sudo torsocks apt install git gnome-terminal libssl-dev python3-pip python3-tk net-tools -y
sudo torsocks git clone --depth 1 https://github.com/maqp/tfc.git /opt/tfc
sudo torsocks git clone --depth 1 https://github.com/tfctesting/tfc.git /opt/tfc
verify_tcb_requirements_files
sudo torsocks python3.7 -m pip download --no-cache-dir -r /opt/tfc/requirements-venv.txt --require-hashes --no-deps -d /opt/tfc/
sudo torsocks python3.7 -m pip download --no-cache-dir -r /opt/tfc/requirements.txt --require-hashes --no-deps -d /opt/tfc/
sudo torsocks python3.7 -m pip download --no-cache-dir -r "/opt/tfc/requirements-venv.txt" --require-hashes --no-deps -d /opt/tfc/
sudo torsocks python3.7 -m pip download --no-cache-dir -r "/opt/tfc/requirements.txt" --require-hashes --no-deps -d /opt/tfc/
}
@ -323,8 +324,8 @@ function install_tcb {
verify_files
create_user_data_dir
sudo python3.7 -m pip install /opt/tfc/${VIRTUALENV}
sudo python3.7 -m virtualenv /opt/tfc/venv_tcb --system-site-packages --never-download
sudo python3.7 -m pip install "/opt/tfc/${VIRTUALENV}"
sudo python3.7 -m virtualenv "/opt/tfc/venv_tcb" --system-site-packages --never-download
. /opt/tfc/venv_tcb/bin/activate
process_tcb_dependencies "python3.7 -m pip install"
@ -397,26 +398,26 @@ function install_developer {
sudo torsocks apt update
sudo torsocks apt install git libssl-dev python3-pip python3-tk terminator -y
torsocks git clone https://github.com/maqp/tfc.git $HOME/tfc
torsocks git clone https://github.com/tfctesting/tfc.git "${HOME}/tfc"
torsocks python3.7 -m pip install -r $HOME/tfc/requirements-venv.txt --require-hashes --no-deps
torsocks python3.7 -m pip install -r "${HOME}/tfc/requirements-venv.txt" --require-hashes --no-deps
python3.7 -m virtualenv $HOME/tfc/venv_tfc --system-site-packages
python3.7 -m virtualenv "${HOME}/tfc/venv_tfc" --system-site-packages
. $HOME/tfc/venv_tfc/bin/activate
torsocks python3.7 -m pip install -r $HOME/tfc/requirements-dev.txt
. "${HOME}/tfc/venv_tfc/bin/activate"
torsocks python3.7 -m pip install -r "${HOME}/tfc/requirements-dev.txt"
deactivate
sudo cp $HOME/tfc/tfc.png /usr/share/pixmaps/
sudo cp $HOME/tfc/launchers/TFC-Dev.desktop /usr/share/applications/
sudo sed -i "s|\$HOME|${HOME}|g" /usr/share/applications/TFC-Dev.desktop
sudo cp "${HOME}/tfc/tfc.png" "/usr/share/pixmaps/"
sudo cp "${HOME}/tfc/launchers/TFC-Dev.desktop" "/usr/share/applications/"
sudo sed -i "s|\$HOME|${HOME}|g" "/usr/share/applications/TFC-Dev.desktop"
modify_terminator_font_size "" "${HOME}/tfc/launchers/terminator-config-dev"
chmod a+rwx -R $HOME/tfc/
chmod a+rwx -R "${HOME}/tfc/"
# Remove unnecessary files
sudo rm -f /opt/install.sh
sudo rm -f /opt/install.sh.asc
sudo rm -f /opt/pubkey.asc
sudo rm -f "/opt/install.sh"
sudo rm -f "/opt/install.sh.asc"
sudo rm -f "/opt/pubkey.asc"
add_serial_permissions
@ -444,12 +445,12 @@ function install_relay_ubuntu {
# Remove unnecessary files
remove_common_files "sudo"
process_tcb_dependencies "rm"
sudo rm -r /opt/tfc/src/receiver/
sudo rm -r /opt/tfc/src/transmitter/
sudo rm /opt/tfc/dd.py
sudo rm /opt/tfc/tfc.py
sudo rm /opt/tfc/tfc.yml
sudo rm /opt/tfc/${VIRTUALENV}
sudo rm -r "/opt/tfc/src/receiver/"
sudo rm -r "/opt/tfc/src/transmitter/"
sudo rm "/opt/tfc/dd.py"
sudo rm "/opt/tfc/tfc.py"
sudo rm "/opt/tfc/tfc.yml"
sudo rm "/opt/tfc/${VIRTUALENV}"
add_serial_permissions
@ -467,8 +468,8 @@ function install_relay_tails {
t_sudo apt update
t_sudo apt install git libssl-dev python3-pip -y || true # Ignore error in case packets can not be persistently installed
torsocks git clone --depth 1 https://github.com/maqp/tfc.git $HOME/tfc
t_sudo mv $HOME/tfc/ /opt/tfc/
torsocks git clone --depth 1 https://github.com/tfctesting/tfc.git "${HOME}/tfc"
t_sudo mv "${HOME}/tfc/ /opt/tfc/"
t_sudo chown -R root /opt/tfc/
verify_tcb_requirements_files
@ -478,8 +479,8 @@ function install_relay_tails {
install_tails_setuptools
torsocks python3.7 -m pip download --no-cache-dir -r /opt/tfc/requirements-venv.txt --require-hashes --no-deps -d $HOME/
torsocks python3.7 -m pip download --no-cache-dir -r /opt/tfc/requirements-relay-tails.txt --require-hashes --no-deps -d $HOME/
torsocks python3.7 -m pip download --no-cache-dir -r "/opt/tfc/requirements-venv.txt" --require-hashes --no-deps -d "${HOME}/"
torsocks python3.7 -m pip download --no-cache-dir -r "/opt/tfc/requirements-relay-tails.txt" --require-hashes --no-deps -d "${HOME}/"
move_tails_dependencies
verify_tails_dependencies
@ -499,11 +500,11 @@ function install_relay_tails {
remove_common_files "t_sudo"
process_tails_dependencies "rm"
t_sudo rm /opt/tfc/${VIRTUALENV}
t_sudo rm -r /opt/tfc/src/receiver/
t_sudo rm -r /opt/tfc/src/transmitter/
t_sudo rm /opt/tfc/dd.py
t_sudo rm /opt/tfc/tfc.py
t_sudo rm "/opt/tfc/${VIRTUALENV}"
t_sudo rm -r "/opt/tfc/src/receiver/"
t_sudo rm -r "/opt/tfc/src/transmitter/"
t_sudo rm "/opt/tfc/dd.py"
t_sudo rm "/opt/tfc/tfc.py"
install_complete "Installation of the TFC Relay configuration is now complete."
}
@ -511,13 +512,13 @@ function install_relay_tails {
function t_sudo {
# Execute command as root on Tails.
echo ${sudo_pwd} | sudo -S $@
echo "${sudo_pwd}" | sudo -S $@
}
function install_relay {
# Determine the Networked Computer OS for Relay Program installation.
if [[ "$(cat /etc/os-release 2>/dev/null | grep Tails)" ]]; then
if [[ $(grep "Tails" /etc/os-release 2>/dev/null) ]]; then
install_relay_tails
else
install_relay_ubuntu
@ -562,10 +563,10 @@ function check_tails_tor_version {
function kill_network {
# Kill network interfaces to protect the TCB from remote compromise.
for interface in /sys/class/net/*; do
name=`basename ${interface}`
name=$(basename "${interface}")
if [[ $name != "lo" ]]; then
echo "Disabling network interface ${name}"
sudo ifconfig ${name} down
sudo ifconfig "${name}" down
fi
done
@ -592,23 +593,23 @@ function add_serial_permissions {
sleep 3 # Wait for USB serial interfaces to register
# Add user to the dialout group to allow serial access after reboot
sudo adduser ${USER} dialout
sudo adduser "${USER}" dialout
# Add temporary permissions for serial interfaces until reboot
arr=($(ls /sys/class/tty | grep USB)) || true
for i in "${arr[@]}"; do
sudo chmod 666 /dev/${i}
sudo chmod 666 "/dev/${i}"
done
if [[ -e /dev/ttyS0 ]]; then
sudo chmod 666 /dev/ttyS0
sudo chmod 666 "/dev/ttyS0"
fi
}
function c_echo {
# Justify printed text to the center of the terminal.
printf "%*s\n" $(( ( $(echo $1 | wc -c ) + 80 ) / 2 )) "$1"
printf "%*s\n" "$(( ( $(echo "${1}" | wc -c ) + 80 ) / 2 ))" "${1}"
}
@ -627,11 +628,11 @@ function check_rm_existing_installation {
function create_user_data_dir {
# Backup TFC user data directory if it exists and has files in it.
if [[ -d "$HOME/tfc" ]]; then
if ! [[ -z "$(ls -A $HOME/tfc/)" ]]; then
mv $HOME/tfc $HOME/tfc_userdata_backup_at_$(date +%Y-%m-%d_%H-%M-%S)
if ! [[ -z "$(ls -A "${HOME}/tfc/")" ]]; then
mv "${HOME}/tfc" "${HOME}/tfc_userdata_backup_at_$(date +%Y-%m-%d_%H-%M-%S)"
fi
fi
mkdir -p $HOME/tfc 2>/dev/null
mkdir -p "${HOME}/tfc" 2>/dev/null
}
@ -642,12 +643,12 @@ function modify_terminator_font_size {
# wide screens. The lowest resolution (width) supported is 1366px.
width=$(get_screen_width)
if (( $width < 1600 )); then
$1 sed -i -e 's/font = Monospace 11/font = Monospace 8/g' $2 # Normal config
$1 sed -i -e 's/font = Monospace 10.5/font = Monospace 7/g' $2 # Data diode config
elif (( $width < 1920 )); then
$1 sed -i -e 's/font = Monospace 11/font = Monospace 9/g' $2 # Normal config
$1 sed -i -e 's/font = Monospace 10.5/font = Monospace 8.5/g' $2 # Data diode config
if (( width < 1600 )); then
$1 sed -i -e 's/font = Monospace 11/font = Monospace 8/g' "${2}" # Normal config
$1 sed -i -e 's/font = Monospace 10.5/font = Monospace 7/g' "${2}" # Data diode config
elif (( width < 1920 )); then
$1 sed -i -e 's/font = Monospace 11/font = Monospace 9/g' "${2}" # Normal config
$1 sed -i -e 's/font = Monospace 10.5/font = Monospace 8.5/g' "${2}" # Data diode config
fi
}
@ -678,7 +679,7 @@ function dpkg_check {
i=0
tput sc
while sudo fuser /var/lib/dpkg/lock >/dev/null 2>&1 ; do
case $(($i % 4)) in
case $((i % 4)) in
0 ) j="." ;;
1 ) j="o" ;;
2 ) j="O" ;;

View File

@ -1,16 +1,16 @@
-----BEGIN PGP SIGNATURE-----
iQIzBAABCAAdFiEE6o84umdLJC6ZRIRcmBNw6XJaD7oFAlxJNJEACgkQmBNw6XJa
D7pw8RAAsqRkR/my0pRlGEbUdMcEfi/vszdflOF6jCAXdACT+eptutFG9Yeu1U9c
A0ynOS2C8BsKjW9BY7wATTyjmGcXvqrP8XaCJ62W7tkPWSgu+ulaxM0At+LwO1R4
p0uls9SonpwLwBTQHbnq9WvzQgp4htpqk0zayh5+QpZz8ca8KoFzHyTufGTvnHBI
OHW02hfZhR7fmYzqURGzuO6oDS1Xx4gtOnR8Lq9tmesStT0K7wPmSpJw0KSwXWUW
1c4j2acBMLwWX3sfrZOrB6Ov+X2bUOsfPHDz9wppbmQQATem5KCzxS4OAFVysb5u
1Tp6/lGkWe6Tl+yI37M7mb8XbHZYm58XH8h4T3Zs4ZKm+3kJK5T5kRhqIe1lxYyf
i+wZc5uBoWGVHOVea2jF+4l4GtqmgHwDiIpQ7jmVMwWZgyPm0nLgjdvyj0cHukab
uvCX1yfHKP/A/EQPNjbd0EOOARQuNcLt86v4NVg2MZnVNFVqiNhpwOP3tzL+tJPt
Cyibzvvhy+FMMdfHqi/b6HgFazw0dhMLS9OfmBeLOeJxFwmf7yoVusY8a6dEQLAW
VJgOVvRq3bjHM/sk6Qc+Qk2fsudaJDgtTVbvoINnQqrII0E5hNqfzSNdkM8G3TW4
gz7BX8drjJ6L/6LNDRmfaEfvAfrwWUv1AtHY01A4+Fn4o6N5xjw=
=+4ea
iQIzBAABCAAdFiEE6o84umdLJC6ZRIRcmBNw6XJaD7oFAl3lt0MACgkQmBNw6XJa
D7qXAw/9Ht6hSg89F4Yuw3pzq/6j5Ab5kVcE7OO1Nk0FPzJ5wPHuTqjetZhg1XGA
C/DzKsEE5ArSgvRGCWPd/dRgcyQ5IHDEcCB8VtKLaxTaZzP9vmM4KlRRK+ISs8te
ufH7RdAUdHJrhR5eO2LHy+xb4y736/sl9FHYq3MtnSXSBphhNQrb+lSjgDn+AEZt
t0VDN0V0MIwQ4iT65p+6rURUab18NvRzEmdfh71a89NL8sMEP4Ww5dGxhEQVAkxP
bGMuPKc4fVlhj3HA61aHtFSWxboIchzVM/tVeLSh1Rw2rHPvRmmbR/zHbN+2ezdt
RFWqdoDQ32lpzT6eTi6XpE8B6HInAp1pgOhzBsv98TirURvx1nF9O5yQKPUyDxYr
zajp/RpHsuf8j65pJNWQntvU7pVkQG7oZUPkVvrN12ekQi5kKLy0+9L/TuC53uTd
5g7cr+HGg3dU+zh/iHN7AEL0ozp3z7AKfgEtZHiq2NV6fZiq9gxiyHYSYtQ+vuz+
ATb9pYDOy+wqI8IxL+7X9Wckk192hB63U5ML4sjlVXK8+gct4p8lE8ePO2GI8kwm
d/5jeGUiwEdMURAoWASVkwASBezkWVchSBoPt2BqjuhKvirygKddPU+RCRUoMzqn
5EEgscR903+9FpwOhmgm0iqn5UEvBubuJA0LeoSlx/+IAjgQKP4=
=dQkc
-----END PGP SIGNATURE-----

View File

@ -1,5 +1,5 @@
[Desktop Entry]
Version=1.19.11
Version=1.19.12
Name=TFC-Dev-LR
Comment=Developer configuration
Exec=terminator -m -u -g $HOME/tfc/launchers/terminator-config-dev -p tfc -l tfc-lr

View File

@ -1,5 +1,5 @@
[Desktop Entry]
Version=1.19.11
Version=1.19.12
Name=TFC-Local-Test-LR
Comment=Local testing configuration
Exec=terminator -m -u -g /opt/tfc/terminator-config-local-test -p tfc -l tfc-lr

View File

@ -1,5 +1,5 @@
[Desktop Entry]
Version=1.19.11
Version=1.19.12
Name=TFC-Relay
Exec=gnome-terminal -x bash -c "cd /opt/tfc && source venv_relay/bin/activate && python3.7 'relay.py' && deactivate || bash"
Icon=tfc.png

View File

@ -1,5 +1,5 @@
[Desktop Entry]
Version=1.19.11
Version=1.19.12
Name=TFC-Relay
Exec=gnome-terminal -x bash -c "cd /opt/tfc && source venv_relay/bin/activate && python3.7 'relay.py' && deactivate || bash"
Icon=tfc.png

View File

@ -1,5 +1,5 @@
[Desktop Entry]
Version=1.19.11
Version=1.19.12
Name=TFC-Receiver
Exec=gnome-terminal --maximize -x bash -c "cd /opt/tfc && source venv_tcb/bin/activate && python3.7 'tfc.py' -r && deactivate || bash"
Icon=tfc.png

View File

@ -1,5 +1,5 @@
[Desktop Entry]
Version=1.19.11
Version=1.19.12
Name=TFC-Transmitter
Exec=gnome-terminal --maximize -x bash -c "cd /opt/tfc && source venv_tcb/bin/activate && python3.7 'tfc.py' && deactivate || bash"
Icon=tfc.png

108
relay.py
View File

@ -23,24 +23,41 @@ import os
import sys
from multiprocessing import Process, Queue
from typing import Any, Dict
from typing import Any, Dict
from cryptography.hazmat.primitives.asymmetric.x448 import X448PrivateKey
from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat
from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat
from src.common.gateway import Gateway, gateway_loop
from src.common.misc import ensure_dir, monitor_processes, process_arguments
from src.common.output import print_title
from src.common.statics import (CONTACT_MGMT_QUEUE, CONTACT_REQ_QUEUE, C_REQ_MGMT_QUEUE, C_REQ_STATE_QUEUE, DIR_TFC,
DST_COMMAND_QUEUE, DST_MESSAGE_QUEUE, EXIT_QUEUE, F_TO_FLASK_QUEUE, GATEWAY_QUEUE,
GROUP_MGMT_QUEUE, GROUP_MSG_QUEUE, M_TO_FLASK_QUEUE, NC, ONION_CLOSE_QUEUE,
ONION_KEY_QUEUE, SRC_TO_RELAY_QUEUE, TOR_DATA_QUEUE, URL_TOKEN_QUEUE)
from src.common.misc import ensure_dir, monitor_processes, process_arguments
from src.common.output import print_title
from src.common.statics import (
CONTACT_MGMT_QUEUE,
CONTACT_REQ_QUEUE,
C_REQ_MGMT_QUEUE,
C_REQ_STATE_QUEUE,
DIR_TFC,
DST_COMMAND_QUEUE,
DST_MESSAGE_QUEUE,
EXIT_QUEUE,
F_TO_FLASK_QUEUE,
GATEWAY_QUEUE,
GROUP_MGMT_QUEUE,
GROUP_MSG_QUEUE,
M_TO_FLASK_QUEUE,
NC,
ONION_CLOSE_QUEUE,
ONION_KEY_QUEUE,
SRC_TO_RELAY_QUEUE,
TOR_DATA_QUEUE,
URL_TOKEN_QUEUE,
)
from src.relay.client import c_req_manager, client_scheduler, g_msg_manager
from src.relay.client import c_req_manager, client_scheduler, g_msg_manager
from src.relay.commands import relay_command
from src.relay.onion import onion_service
from src.relay.server import flask_server
from src.relay.tcb import dst_outgoing, src_incoming
from src.relay.onion import onion_service
from src.relay.server import flask_server
from src.relay.tcb import dst_outgoing, src_incoming
def main() -> None:
@ -143,38 +160,43 @@ def main() -> None:
print_title(NC)
url_token_private_key = X448PrivateKey.generate()
url_token_public_key = url_token_private_key.public_key().public_bytes(encoding=Encoding.Raw,
format=PublicFormat.Raw).hex() # type: str
url_token_public_key = (
url_token_private_key.public_key()
.public_bytes(encoding=Encoding.Raw, format=PublicFormat.Raw)
.hex()
) # type: str
queues = \
{GATEWAY_QUEUE: Queue(), # All datagrams from `gateway_loop` to `src_incoming`
DST_MESSAGE_QUEUE: Queue(), # Message datagrams from `src_incoming`/`client` to `dst_outgoing`
M_TO_FLASK_QUEUE: Queue(), # Message/pubkey datagrams from `src_incoming` to `flask_server`
F_TO_FLASK_QUEUE: Queue(), # File datagrams from `src_incoming` to `flask_server`
SRC_TO_RELAY_QUEUE: Queue(), # Command datagrams from `src_incoming` to `relay_command`
DST_COMMAND_QUEUE: Queue(), # Command datagrams from `src_incoming` to `dst_outgoing`
CONTACT_MGMT_QUEUE: Queue(), # Contact management commands from `relay_command` to `client_scheduler`
C_REQ_STATE_QUEUE: Queue(), # Contact req. notify setting from `relay_command` to `c_req_manager`
URL_TOKEN_QUEUE: Queue(), # URL tokens from `client` to `flask_server`
GROUP_MSG_QUEUE: Queue(), # Group management messages from `client` to `g_msg_manager`
CONTACT_REQ_QUEUE: Queue(), # Contact requests from `flask_server` to `c_req_manager`
C_REQ_MGMT_QUEUE: Queue(), # Contact list management from `relay_command` to `c_req_manager`
GROUP_MGMT_QUEUE: Queue(), # Contact list management from `relay_command` to `g_msg_manager`
ONION_CLOSE_QUEUE: Queue(), # Onion Service close command from `relay_command` to `onion_service`
ONION_KEY_QUEUE: Queue(), # Onion Service private key from `relay_command` to `onion_service`
TOR_DATA_QUEUE: Queue(), # Open port for Tor from `onion_service` to `client_scheduler`
EXIT_QUEUE: Queue() # EXIT/WIPE signal from `relay_command` to `main`
} # type: Dict[bytes, Queue[Any]]
queues = {
GATEWAY_QUEUE: Queue(), # All datagrams from `gateway_loop` to `src_incoming`
DST_MESSAGE_QUEUE: Queue(), # Message datagrams from `src_incoming`/`client` to `dst_outgoing`
M_TO_FLASK_QUEUE: Queue(), # Message/pubkey datagrams from `src_incoming` to `flask_server`
F_TO_FLASK_QUEUE: Queue(), # File datagrams from `src_incoming` to `flask_server`
SRC_TO_RELAY_QUEUE: Queue(), # Command datagrams from `src_incoming` to `relay_command`
DST_COMMAND_QUEUE: Queue(), # Command datagrams from `src_incoming` to `dst_outgoing`
CONTACT_MGMT_QUEUE: Queue(), # Contact management commands from `relay_command` to `client_scheduler`
C_REQ_STATE_QUEUE: Queue(), # Contact req. notify setting from `relay_command` to `c_req_manager`
URL_TOKEN_QUEUE: Queue(), # URL tokens from `client` to `flask_server`
GROUP_MSG_QUEUE: Queue(), # Group management messages from `client` to `g_msg_manager`
CONTACT_REQ_QUEUE: Queue(), # Contact requests from `flask_server` to `c_req_manager`
C_REQ_MGMT_QUEUE: Queue(), # Contact list management from `relay_command` to `c_req_manager`
GROUP_MGMT_QUEUE: Queue(), # Contact list management from `relay_command` to `g_msg_manager`
ONION_CLOSE_QUEUE: Queue(), # Onion Service close command from `relay_command` to `onion_service`
ONION_KEY_QUEUE: Queue(), # Onion Service private key from `relay_command` to `onion_service`
TOR_DATA_QUEUE: Queue(), # Open port for Tor from `onion_service` to `client_scheduler`
EXIT_QUEUE: Queue(), # EXIT/WIPE signal from `relay_command` to `main`
} # type: Dict[bytes, Queue[Any]]
process_list = [Process(target=gateway_loop, args=(queues, gateway )),
Process(target=src_incoming, args=(queues, gateway )),
Process(target=dst_outgoing, args=(queues, gateway )),
Process(target=client_scheduler, args=(queues, gateway, url_token_private_key)),
Process(target=g_msg_manager, args=(queues, )),
Process(target=c_req_manager, args=(queues, )),
Process(target=flask_server, args=(queues, url_token_public_key )),
Process(target=onion_service, args=(queues, )),
Process(target=relay_command, args=(queues, gateway, sys.stdin.fileno()) )]
process_list = [
Process(target=gateway_loop, args=(queues, gateway)),
Process(target=src_incoming, args=(queues, gateway)),
Process(target=dst_outgoing, args=(queues, gateway)),
Process(target=client_scheduler, args=(queues, gateway, url_token_private_key)),
Process(target=g_msg_manager, args=(queues,)),
Process(target=c_req_manager, args=(queues,)),
Process(target=flask_server, args=(queues, url_token_public_key)),
Process(target=onion_service, args=(queues,)),
Process(target=relay_command, args=(queues, gateway, sys.stdin.fileno())),
]
for p in process_list:
p.start()
@ -182,5 +204,5 @@ def main() -> None:
monitor_processes(process_list, NC, queues)
if __name__ == '__main__':
if __name__ == "__main__":
main()

View File

@ -1,5 +1,5 @@
# Static type checking tool
mypy>=0.740
mypy>=0.750
# Unit test tools
pytest>=5.2.1
@ -19,7 +19,7 @@ six>=1.12.0
# pyca/pynacl
PyNaCl>=1.3.0
setuptools>=41.6.0
setuptools>=42.0.2
# pyca/cryptography
cryptography>=2.8
@ -32,7 +32,7 @@ pysocks>=1.7.1
# Requests
requests>=2.22.0
certifi>=2019.9.11
certifi>=2019.11.28
chardet>=3.0.4
idna>=2.8
urllib3>=1.25.7
@ -44,3 +44,13 @@ itsdangerous>=1.1.0
jinja2>=2.10.3
markupsafe>=1.1.1
werkzeug>=0.16.0
# Black
black>=19.10b0
appdirs>=1.4.3
attrs>=19.3.0
Click>=7.0
pathspec>=0.6.0
regex>=2019.11.1
toml>=0.10.0
typed_ast>=1.4.0

View File

@ -11,7 +11,7 @@ pysocks==1.7.1 --hash=sha512:313b954102231d038d52ab58f41e3642579be29f827135
# Requests (Connects to the contact's Tor Onion Service)
requests==2.22.0 --hash=sha512:9186ce4e39bb64f5931a205ffc9afac61657bc42078bc4754ed12a2b66a12b7a620583440849fc2e161d1061ac0750ddef4670f54916931ace1e9abd2a9fb09c
certifi==2019.9.11 --hash=sha512:06e8e1546d375e528a1486e1dee4fda3e585a03ef23ede85d1dad006e0eda837ebade1edde62fdc987a7f310bda69159e94ec36b79a066e0e13bbe8bf7019cfc
certifi==2019.11.28 --hash=sha512:fe5b05c29c1e1d9079150aaea28b09d84f0dd15907e276ccabb314433cfaac948a9615e10d6d01cbd537f99eed8072fbda7cb901e932fbab4f1286ae8c50471b
chardet==3.0.4 --hash=sha512:bfae58c8ea19c87cc9c9bf3d0b6146bfdb3630346bd954fe8e9f7da1f09da1fc0d6943ff04802798a665ea3b610ee2d65658ce84fe5a89f9e93625ea396a17f4
idna==2.8 --hash=sha512:fb07dbec1de86efbad82a4f73d98123c59b083c1f1277445204bef75de99ca200377ad2f1db8924ae79b31b3dd984891c87d0a6344ec4d07a0ddbbbc655821a3
urllib3==1.25.7 --hash=sha512:f6a78508cb87050e176005a088118f8ad87b17cf541457d949e5712c356f8c4de7e7516ba066e5c4bb9ced5c7e7590ba7e07d4ae7fc7190487bf27f1bb9d0668

View File

@ -11,7 +11,7 @@ pysocks==1.7.1 --hash=sha512:313b954102231d038d52ab58f41e3642579be29f827135
# Requests (Connects to the contact's Tor Onion Service)
requests==2.22.0 --hash=sha512:9186ce4e39bb64f5931a205ffc9afac61657bc42078bc4754ed12a2b66a12b7a620583440849fc2e161d1061ac0750ddef4670f54916931ace1e9abd2a9fb09c
certifi==2019.9.11 --hash=sha512:06e8e1546d375e528a1486e1dee4fda3e585a03ef23ede85d1dad006e0eda837ebade1edde62fdc987a7f310bda69159e94ec36b79a066e0e13bbe8bf7019cfc
certifi==2019.11.28 --hash=sha512:fe5b05c29c1e1d9079150aaea28b09d84f0dd15907e276ccabb314433cfaac948a9615e10d6d01cbd537f99eed8072fbda7cb901e932fbab4f1286ae8c50471b
chardet==3.0.4 --hash=sha512:bfae58c8ea19c87cc9c9bf3d0b6146bfdb3630346bd954fe8e9f7da1f09da1fc0d6943ff04802798a665ea3b610ee2d65658ce84fe5a89f9e93625ea396a17f4
idna==2.8 --hash=sha512:fb07dbec1de86efbad82a4f73d98123c59b083c1f1277445204bef75de99ca200377ad2f1db8924ae79b31b3dd984891c87d0a6344ec4d07a0ddbbbc655821a3
urllib3==1.25.7 --hash=sha512:f6a78508cb87050e176005a088118f8ad87b17cf541457d949e5712c356f8c4de7e7516ba066e5c4bb9ced5c7e7590ba7e07d4ae7fc7190487bf27f1bb9d0668
@ -32,5 +32,5 @@ six==1.13.0 --hash=sha512:387d94f37a74e2d86ac0a41f482638dd9aec9e94215ffc
# PyNaCl (Derives TFC account from Onion Service private key)
PyNaCl==1.3.0 --hash=sha512:c4017c38b026a5c531b15839b8d61d1fae9907ba1960c2f97f4cd67fe0827729346d5186a6d6927ba84f64b4cbfdece12b287aa7750a039f4160831be871cea3
setuptools==41.6.0 --hash=sha512:2e90929aa61c847e1d414d427b08403679ba5f512a56d58b92ee64d47e8a2c5da18e47126e5f59faca335b3a4b5ec9857aa323d866252546a6df42c3e3ef3884
setuptools==42.0.2 --hash=sha512:dd18da86ba566a7abde86890f6fa7c5a4dee34970927ef883a07a44ca8992713f9c2c4c87538d18d7bbf19073f1cc7887b150474375f24a0938cef5db097c841
# Duplicate sub-dependencies: cffi, pycparser, six

View File

@ -1,2 +1,2 @@
# Setuptools (Allows installation of pycparser which is a sub-dependency of the cryptography and PyNaCl packages)
setuptools==41.6.0 --hash=sha512:2e90929aa61c847e1d414d427b08403679ba5f512a56d58b92ee64d47e8a2c5da18e47126e5f59faca335b3a4b5ec9857aa323d866252546a6df42c3e3ef3884 # Tails4: 40.8.0 OnionShare2: -
setuptools==42.0.2 --hash=sha512:dd18da86ba566a7abde86890f6fa7c5a4dee34970927ef883a07a44ca8992713f9c2c4c87538d18d7bbf19073f1cc7887b150474375f24a0938cef5db097c841

View File

@ -1,2 +1,2 @@
# Virtual environment (Used to create an isolated Python environment for TFC dependencies)
virtualenv==16.7.7 --hash=sha512:e80eb04615d1dcd2546bd5ceef5408bbb577fa0dd725bc69f20dd7840518af575f0b41e629e8164fdaea398628813720a6f70a42e7748336601391605b79f542
virtualenv==16.7.8 --hash=sha512:4483bdd81d63cc38e0003cd3cba995f3e21d506e2f6a64bc98a673f1ef5ccd56e8e1109ec049c9394a538b879ea47dbafa0c575cdc02eedb1b9172e8fc045ca6

View File

@ -11,7 +11,7 @@ six==1.13.0 --hash=sha512:387d94f37a74e2d86ac0a41f482638dd9aec9e94215ffc
# PyNaCl (Handles TCB-side XChaCha20-Poly1305 symmetric encryption)
PyNaCl==1.3.0 --hash=sha512:c4017c38b026a5c531b15839b8d61d1fae9907ba1960c2f97f4cd67fe0827729346d5186a6d6927ba84f64b4cbfdece12b287aa7750a039f4160831be871cea3
setuptools==41.6.0 --hash=sha512:2e90929aa61c847e1d414d427b08403679ba5f512a56d58b92ee64d47e8a2c5da18e47126e5f59faca335b3a4b5ec9857aa323d866252546a6df42c3e3ef3884
setuptools==42.0.2 --hash=sha512:dd18da86ba566a7abde86890f6fa7c5a4dee34970927ef883a07a44ca8992713f9c2c4c87538d18d7bbf19073f1cc7887b150474375f24a0938cef5db097c841
# Duplicate sub-dependencies: cffi, pycparser, six
# Cryptography (Handles TCB-side X448 key exchange)

View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3.7
# -*- coding: utf-8 -*-
"""
TFC - Onion-routed, endpoint secure messaging system
Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
TFC is free software: you can redistribute it and/or modify it under the terms
of the GNU General Public License as published by the Free Software Foundation,
either version 3 of the License, or (at your option) any later version.
TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with TFC. If not, see <https://www.gnu.org/licenses/>.
"""

View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3.7
# -*- coding: utf-8 -*-
"""
TFC - Onion-routed, endpoint secure messaging system
Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
TFC is free software: you can redistribute it and/or modify it under the terms
of the GNU General Public License as published by the Free Software Foundation,
either version 3 of the License, or (at your option) any later version.
TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with TFC. If not, see <https://www.gnu.org/licenses/>.
"""

View File

@ -42,23 +42,39 @@ import nacl.exceptions
import nacl.secret
import nacl.utils
from cryptography.hazmat.primitives import padding
from typing import Tuple
from cryptography.hazmat.primitives import padding
from cryptography.hazmat.primitives.asymmetric.x448 import X448PrivateKey, X448PublicKey
from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat
from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat
from src.common.exceptions import CriticalError
from src.common.misc import separate_header
from src.common.statics import (ARGON2_SALT_LENGTH, BITS_PER_BYTE, BLAKE2_DIGEST_LENGTH, BLAKE2_DIGEST_LENGTH_MAX,
BLAKE2_DIGEST_LENGTH_MIN, PADDING_LENGTH, SYMMETRIC_KEY_LENGTH,
TFC_PUBLIC_KEY_LENGTH, X448_SHARED_SECRET_LENGTH, XCHACHA20_NONCE_LENGTH)
from src.common.misc import separate_header
from src.common.statics import (
ARGON2_SALT_LENGTH,
BITS_PER_BYTE,
BLAKE2_DIGEST_LENGTH,
BLAKE2_DIGEST_LENGTH_MAX,
BLAKE2_DIGEST_LENGTH_MIN,
FINGERPRINT,
FINGERPRINT_LENGTH,
MESSAGE_KEY,
HEADER_KEY,
PADDING_LENGTH,
SYMMETRIC_KEY_LENGTH,
TFC_PUBLIC_KEY_LENGTH,
X448_SHARED_SECRET_LENGTH,
XCHACHA20_NONCE_LENGTH,
)
def blake2b(message: bytes, # Message to hash
key: bytes = b'', # Key for keyed hashing
salt: bytes = b'', # Salt for randomized hashing
person: bytes = b'', # Personalization string
digest_size: int = BLAKE2_DIGEST_LENGTH # Length of the digest
) -> bytes: # The BLAKE2b digest
def blake2b(
message: bytes, # Message to hash
key: bytes = b"", # Key for keyed hashing
salt: bytes = b"", # Salt for randomized hashing
person: bytes = b"", # Personalization string
digest_size: int = BLAKE2_DIGEST_LENGTH, # Length of the digest
) -> bytes: # The BLAKE2b digest
"""Generate BLAKE2b digest (i.e. cryptographic hash) of a message.
BLAKE2 is the successor of SHA3-finalist BLAKE*, designed by
@ -118,16 +134,16 @@ def blake2b(message: bytes, # Message to hash
https://github.com/python/cpython/blob/3.7/Lib/hashlib.py
"""
try:
digest = hashlib.blake2b(message,
digest_size=digest_size,
key=key,
salt=salt,
person=person).digest() # type: bytes
digest = hashlib.blake2b(
message, digest_size=digest_size, key=key, salt=salt, person=person
).digest() # type: bytes
except ValueError as e:
raise CriticalError(str(e))
if not isinstance(digest, bytes):
raise CriticalError(f"BLAKE2b returned an invalid type ({type(digest)}) digest.")
raise CriticalError(
f"BLAKE2b returned an invalid type ({type(digest)}) digest."
)
if len(digest) != digest_size:
raise CriticalError(f"BLAKE2b digest had invalid length ({len(digest)} bytes).")
@ -135,12 +151,13 @@ def blake2b(message: bytes, # Message to hash
return digest
def argon2_kdf(password: str, # Password to derive the key from
salt: bytes, # Salt to derive the key from
time_cost: int, # Number of iterations
memory_cost: int, # Amount of memory to use (in bytes)
parallelism: int # Number of threads to use
) -> bytes: # The derived key
def argon2_kdf(
password: str, # Password to derive the key from
salt: bytes, # Salt to derive the key from
time_cost: int, # Number of iterations
memory_cost: int, # Amount of memory to use (in bytes)
parallelism: int, # Number of threads to use
) -> bytes: # The derived key
"""Derive an encryption key from password and salt using Argon2id.
Argon2 is a password hashing function designed by Alex Biryukov,
@ -215,13 +232,15 @@ def argon2_kdf(password: str, # Password to derive the key from
raise CriticalError(f"Invalid salt length ({len(salt)} bytes).")
try:
key = argon2.low_level.hash_secret_raw(secret=password.encode(),
salt=salt,
time_cost=time_cost,
memory_cost=memory_cost,
parallelism=parallelism,
hash_len=SYMMETRIC_KEY_LENGTH,
type=argon2.Type.ID) # type: bytes
key = argon2.low_level.hash_secret_raw(
secret=password.encode(),
salt=salt,
time_cost=time_cost,
memory_cost=memory_cost,
parallelism=parallelism,
hash_len=SYMMETRIC_KEY_LENGTH,
type=argon2.Type.ID,
) # type: bytes
except argon2.exceptions.Argon2Error as e:
raise CriticalError(str(e))
@ -230,7 +249,9 @@ def argon2_kdf(password: str, # Password to derive the key from
raise CriticalError(f"Argon2 returned an invalid type ({type(key)}) key.")
if len(key) != SYMMETRIC_KEY_LENGTH:
raise CriticalError(f"Derived an invalid length key from password ({len(key)} bytes).")
raise CriticalError(
f"Derived an invalid length key from password ({len(key)} bytes)."
)
return key
@ -315,7 +336,7 @@ class X448(object):
"""
@staticmethod
def generate_private_key() -> 'X448PrivateKey':
def generate_private_key() -> "X448PrivateKey":
"""Generate the X448 private key.
The pyca/cryptography's key generation process is as follows:
@ -364,21 +385,26 @@ class X448(object):
return X448PrivateKey.generate()
@staticmethod
def derive_public_key(private_key: 'X448PrivateKey') -> bytes:
def derive_public_key(private_key: "X448PrivateKey") -> bytes:
"""Derive public key from an X448 private key."""
public_key = private_key.public_key().public_bytes(encoding=Encoding.Raw,
format=PublicFormat.Raw) # type: bytes
public_key = private_key.public_key().public_bytes(
encoding=Encoding.Raw, format=PublicFormat.Raw
) # type: bytes
if not isinstance(public_key, bytes):
raise CriticalError(f"Generated an invalid type ({type(public_key)}) public key.")
raise CriticalError(
f"Generated an invalid type ({type(public_key)}) public key."
)
if len(public_key) != TFC_PUBLIC_KEY_LENGTH:
raise CriticalError(f"Generated an invalid size public key from private key ({len(public_key)} bytes).")
raise CriticalError(
f"Generated an invalid size public key from private key ({len(public_key)} bytes)."
)
return public_key
@staticmethod
def shared_key(private_key: 'X448PrivateKey', public_key: bytes) -> bytes:
def shared_key(private_key: "X448PrivateKey", public_key: bytes) -> bytes:
"""Derive the X448 shared key.
The pyca/cryptography library validates the length of the public
@ -403,23 +429,96 @@ class X448(object):
extract unidirectional message/header keys and fingerprints.
"""
try:
shared_secret = private_key.exchange(X448PublicKey.from_public_bytes(public_key)) # type: bytes
shared_secret = private_key.exchange(
X448PublicKey.from_public_bytes(public_key)
) # type: bytes
except ValueError as e:
raise CriticalError(str(e))
if not isinstance(shared_secret, bytes): # pragma: no cover
raise CriticalError(f"Derived an invalid type ({type(shared_secret)}) shared secret.")
raise CriticalError(
f"Derived an invalid type ({type(shared_secret)}) shared secret."
)
if len(shared_secret) != X448_SHARED_SECRET_LENGTH: # pragma: no cover
raise CriticalError(f"Generated an invalid size shared secret ({len(shared_secret)} bytes).")
raise CriticalError(
f"Generated an invalid size shared secret ({len(shared_secret)} bytes)."
)
return blake2b(shared_secret, digest_size=SYMMETRIC_KEY_LENGTH)
@staticmethod
def derive_keys(
dh_shared_key: bytes, tfc_public_key_user: bytes, tfc_public_key_contact: bytes
) -> Tuple[bytes, bytes, bytes, bytes, bytes, bytes]:
"""Create domain separated message and header keys and fingerprints from shared key.
def encrypt_and_sign(plaintext: bytes, # Plaintext to encrypt
key: bytes, # 32-byte symmetric key
ad: bytes = b'' # Associated data
) -> bytes: # Nonce + ciphertext + tag
Domain separate unidirectional keys from shared key by using public
keys as message and the context variable as personalization string.
Domain separate fingerprints of public keys by using the shared
secret as key and the context variable as personalization string.
This way entities who might monitor fingerprint verification
channel are unable to correlate spoken values with public keys
that they might see on RAM or screen of Networked Computer:
Public keys can not be derived from the fingerprints due to
preimage resistance of BLAKE2b, and fingerprints can not be
derived from public key without the X448 shared key. Using the
context variable ensures fingerprints are distinct from derived
message and header keys.
"""
tx_mk = blake2b(
tfc_public_key_contact,
dh_shared_key,
person=MESSAGE_KEY,
digest_size=SYMMETRIC_KEY_LENGTH,
)
rx_mk = blake2b(
tfc_public_key_user,
dh_shared_key,
person=MESSAGE_KEY,
digest_size=SYMMETRIC_KEY_LENGTH,
)
tx_hk = blake2b(
tfc_public_key_contact,
dh_shared_key,
person=HEADER_KEY,
digest_size=SYMMETRIC_KEY_LENGTH,
)
rx_hk = blake2b(
tfc_public_key_user,
dh_shared_key,
person=HEADER_KEY,
digest_size=SYMMETRIC_KEY_LENGTH,
)
tx_fp = blake2b(
tfc_public_key_user,
dh_shared_key,
person=FINGERPRINT,
digest_size=FINGERPRINT_LENGTH,
)
rx_fp = blake2b(
tfc_public_key_contact,
dh_shared_key,
person=FINGERPRINT,
digest_size=FINGERPRINT_LENGTH,
)
key_tuple = tx_mk, rx_mk, tx_hk, rx_hk, tx_fp, rx_fp
if len(key_tuple) != len(set(key_tuple)):
raise CriticalError("Derived keys were not unique.")
return key_tuple
def encrypt_and_sign(
plaintext: bytes, # Plaintext to encrypt
key: bytes, # 32-byte symmetric key
ad: bytes = b"", # Associated data
) -> bytes: # Nonce + ciphertext + tag
"""Encrypt plaintext with XChaCha20-Poly1305 (IETF variant).
ChaCha20 is a stream cipher published by Daniel J. Bernstein (djb)
@ -498,18 +597,21 @@ def encrypt_and_sign(plaintext: bytes, # Plaintext to encrypt
nonce = csprng(XCHACHA20_NONCE_LENGTH)
try:
ct_tag = nacl.bindings.crypto_aead_xchacha20poly1305_ietf_encrypt(plaintext, ad, nonce, key) # type: bytes
ct_tag = nacl.bindings.crypto_aead_xchacha20poly1305_ietf_encrypt(
plaintext, ad, nonce, key
) # type: bytes
except nacl.exceptions.CryptoError as e:
raise CriticalError(str(e))
return nonce + ct_tag
def auth_and_decrypt(nonce_ct_tag: bytes, # Nonce + ciphertext + tag
key: bytes, # 32-byte symmetric key
database: str = '', # When provided, gracefully exits TFC when the tag is invalid
ad: bytes = b'' # Associated data
) -> bytes: # Plaintext
def auth_and_decrypt(
nonce_ct_tag: bytes, # Nonce + ciphertext + tag
key: bytes, # 32-byte symmetric key
database: str = "", # When provided, gracefully exits TFC when the tag is invalid
ad: bytes = b"", # Associated data
) -> bytes: # Plaintext
"""Authenticate and decrypt XChaCha20-Poly1305 ciphertext.
The Poly1305 tag is checked using constant time `sodium_memcmp`:
@ -534,16 +636,21 @@ def auth_and_decrypt(nonce_ct_tag: bytes, # Nonce + ciphertext + tag
nonce, ct_tag = separate_header(nonce_ct_tag, XCHACHA20_NONCE_LENGTH)
try:
plaintext = nacl.bindings.crypto_aead_xchacha20poly1305_ietf_decrypt(ct_tag, ad, nonce, key) # type: bytes
plaintext = nacl.bindings.crypto_aead_xchacha20poly1305_ietf_decrypt(
ct_tag, ad, nonce, key
) # type: bytes
return plaintext
except nacl.exceptions.CryptoError:
if database:
raise CriticalError(f"Authentication of data in database '{database}' failed.")
raise CriticalError(
f"Authentication of data in database '{database}' failed."
)
raise
def byte_padding(bytestring: bytes # Bytestring to be padded
) -> bytes: # Padded bytestring
def byte_padding(
bytestring: bytes, # Bytestring to be padded
) -> bytes: # Padded bytestring
"""Pad bytestring to next 255 bytes.
TFC adds padding to messages it outputs. The padding ensures each
@ -565,8 +672,8 @@ def byte_padding(bytestring: bytes # Bytestring to be padded
For a better explanation, see
https://en.wikipedia.org/wiki/Padding_(cryptography)#PKCS#5_and_PKCS#7
"""
padder = padding.PKCS7(PADDING_LENGTH * BITS_PER_BYTE).padder()
padded = padder.update(bytestring) # type: bytes
padder = padding.PKCS7(PADDING_LENGTH * BITS_PER_BYTE).padder()
padded = padder.update(bytestring) # type: bytes
padded += padder.finalize()
if not isinstance(padded, bytes):
@ -578,22 +685,24 @@ def byte_padding(bytestring: bytes # Bytestring to be padded
return padded
def rm_padding_bytes(bytestring: bytes # Padded bytestring
) -> bytes: # Bytestring without padding
def rm_padding_bytes(
bytestring: bytes, # Padded bytestring
) -> bytes: # Bytestring without padding
"""Remove padding from plaintext.
The length of padding is determined by the ord-value of the last
byte that is always part of the padding.
"""
unpadder = padding.PKCS7(PADDING_LENGTH * BITS_PER_BYTE).unpadder()
unpadded = unpadder.update(bytestring) # type: bytes
unpadder = padding.PKCS7(PADDING_LENGTH * BITS_PER_BYTE).unpadder()
unpadded = unpadder.update(bytestring) # type: bytes
unpadded += unpadder.finalize()
return unpadded
def csprng(key_length: int = SYMMETRIC_KEY_LENGTH # Length of the key
) -> bytes: # The generated key
def csprng(
key_length: int = SYMMETRIC_KEY_LENGTH, # Length of the key
) -> bytes: # The generated key
"""Generate a cryptographically secure random key.
The default key length is 32 bytes (256 bits).
@ -605,7 +714,7 @@ def csprng(key_length: int = SYMMETRIC_KEY_LENGTH # Length of the key
For more details, see
https://www.2uo.de/myths-about-urandom/
https://www.chronox.de/lrng/doc/lrng.pdf
https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/Studies/LinuxRNG/LinuxRNG_EN.pdf?__blob=publicationFile&v=16
https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/Studies/LinuxRNG/LinuxRNG_EN.pdf
https://github.com/torvalds/linux/blob/master/drivers/char/random.c
@ -648,7 +757,7 @@ def csprng(key_length: int = SYMMETRIC_KEY_LENGTH # Length of the key
_randomness _randomness _randomness_randomness _randomness
[1] https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/Studies/LinuxRNG/LinuxRNG_EN.pdf?__blob=publicationFile&v=16
[1] https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/Studies/LinuxRNG/LinuxRNG_EN.pdf
Entropy sources
@ -768,11 +877,11 @@ def csprng(key_length: int = SYMMETRIC_KEY_LENGTH # Length of the key
o User space IOCTL of RNDADDENTROPY.[1; p.39]
[1] https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/Studies/LinuxRNG/LinuxRNG_EN.pdf?__blob=publicationFile&v=16
[1] https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/Studies/LinuxRNG/LinuxRNG_EN.pdf
[2] https://spectrum.ieee.org/computing/hardware/behind-intels-new-randomnumber-generator
[3] https://software.intel.com/sites/default/files/managed/98/4a/DRNG_Software_Implementation_Guide_2.1.pdf
[4] https://www.amd.com/system/files/TechDocs/amd-random-number-generator.pdf
[5] https://security.stackexchange.com/questions/183506/random-seed-not-propagating-to-the-entropy-pools-in-a-timely-manner
[5] https://security.stackexchange.com/q/183506
The input_pool
@ -846,7 +955,7 @@ def csprng(key_length: int = SYMMETRIC_KEY_LENGTH # Length of the key
the use of /dev/random, and it will then reduce the input_pool's
entropy estimator by 1024 bits.[1; pp.59-60]
[1] https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/Studies/LinuxRNG/LinuxRNG_EN.pdf?__blob=publicationFile&v=16
[1] https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/Studies/LinuxRNG/LinuxRNG_EN.pdf
[2] https://github.com/torvalds/linux/blob/master/drivers/char/random.c#L791
[3] https://github.com/torvalds/linux/blob/master/drivers/char/random.c#L1032
@ -952,7 +1061,7 @@ def csprng(key_length: int = SYMMETRIC_KEY_LENGTH # Length of the key
The result is then XORed with the key component of the DRNG state
[1; p.34].
[1] https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/Studies/LinuxRNG/LinuxRNG_EN.pdf?__blob=publicationFile&v=16
[1] https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/Studies/LinuxRNG/LinuxRNG_EN.pdf
[2] https://lkml.org/lkml/2019/5/30/867
[3] https://github.com/torvalds/linux/blob/master/drivers/char/random.c#L889
https://github.com/torvalds/linux/blob/master/drivers/char/random.c#L1058
@ -1028,7 +1137,9 @@ def csprng(key_length: int = SYMMETRIC_KEY_LENGTH # Length of the key
raise CriticalError(f"GETRANDOM returned invalid type data ({type(entropy)}).")
if len(entropy) != key_length:
raise CriticalError(f"GETRANDOM returned invalid amount of entropy ({len(entropy)} bytes).")
raise CriticalError(
f"GETRANDOM returned invalid amount of entropy ({len(entropy)} bytes)."
)
compressed = blake2b(entropy, digest_size=key_length)
@ -1048,9 +1159,9 @@ def check_kernel_version() -> None:
trusted).[2; p.138]
[1] https://lkml.org/lkml/2016/7/25/43
[2] https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/Studies/LinuxRNG/LinuxRNG_EN.pdf?__blob=publicationFile&v=16
[2] https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/Studies/LinuxRNG/LinuxRNG_EN.pdf
"""
major_v, minor_v = [int(i) for i in os.uname()[2].split('.')[:2]] # type: int, int
major_v, minor_v = [int(i) for i in os.uname()[2].split(".")[:2]] # type: int, int
if major_v < 4 or (major_v == 4 and minor_v < 17):
raise CriticalError("Insecure kernel CSPRNG version detected.")

View File

@ -27,10 +27,15 @@ from typing import Iterator
import nacl.exceptions
from src.common.crypto import auth_and_decrypt, blake2b, encrypt_and_sign
from src.common.crypto import auth_and_decrypt, blake2b, encrypt_and_sign
from src.common.exceptions import CriticalError
from src.common.misc import ensure_dir, separate_trailer
from src.common.statics import BLAKE2_DIGEST_LENGTH, DB_WRITE_RETRY_LIMIT, DIR_USER_DATA
from src.common.misc import ensure_dir, separate_trailer
from src.common.statics import (
BLAKE2_DIGEST_LENGTH,
DB_WRITE_RETRY_LIMIT,
DIR_USER_DATA,
TEMP_POSTFIX,
)
if typing.TYPE_CHECKING:
from src.common.db_masterkey import MasterKey
@ -42,16 +47,16 @@ class TFCDatabase(object):
as atomicity to ensure database writing always succeeds or fails.
"""
def __init__(self, database_name: str, master_key: 'MasterKey') -> None:
def __init__(self, database_name: str, master_key: "MasterKey") -> None:
"""Initialize TFC database."""
self.database_name = database_name
self.database_temp = database_name + '_temp'
self.database_key = master_key.master_key
self.database_temp = database_name + TEMP_POSTFIX
self.database_key = master_key.master_key
@staticmethod
def write_to_file(file_name: str, data: bytes) -> None:
"""Write data to file."""
with open(file_name, 'wb+') as f:
with open(file_name, "wb+") as f:
f.write(data)
# Write data from program buffer to operating system buffer.
@ -65,7 +70,7 @@ class TFCDatabase(object):
def verify_file(self, database_name: str) -> bool:
"""Verify integrity of file content."""
with open(database_name, 'rb') as f:
with open(database_name, "rb") as f:
purp_data = f.read()
try:
@ -82,14 +87,13 @@ class TFCDatabase(object):
while not self.verify_file(self.database_temp):
retries += 1
if retries >= DB_WRITE_RETRY_LIMIT:
raise CriticalError(f"Writing to database '{self.database_temp}' failed after {retries} retries.")
raise CriticalError(
f"Writing to database '{self.database_temp}' failed after {retries} retries."
)
self.write_to_file(self.database_temp, ct_bytes)
def store_database(self,
pt_bytes: bytes,
replace: bool = True
) -> None:
def store_database(self, pt_bytes: bytes, replace: bool = True) -> None:
"""Encrypt and store data into database."""
ct_bytes = encrypt_and_sign(pt_bytes, self.database_key)
ensure_dir(DIR_USER_DATA)
@ -98,7 +102,7 @@ class TFCDatabase(object):
# Replace original file with temp file. (`os.replace` is atomic as per POSIX
# requirements): https://docs.python.org/3/library/os.html#os.replace
if replace:
os.replace(self.database_temp, self.database_name)
self.replace_database()
def replace_database(self) -> None:
"""Replace database with temporary database."""
@ -123,10 +127,12 @@ class TFCDatabase(object):
# we delete it and continue using the old file to ensure atomicity.
os.remove(self.database_temp)
with open(self.database_name, 'rb') as f:
with open(self.database_name, "rb") as f:
database_data = f.read()
return auth_and_decrypt(database_data, self.database_key, database=self.database_name)
return auth_and_decrypt(
database_data, self.database_key, database=self.database_name
)
class TFCUnencryptedDatabase(object):
@ -137,12 +143,12 @@ class TFCUnencryptedDatabase(object):
def __init__(self, database_name: str) -> None:
"""Initialize unencrypted TFC database."""
self.database_name = database_name
self.database_temp = database_name + '_temp'
self.database_temp = database_name + TEMP_POSTFIX
@staticmethod
def write_to_file(file_name: str, data: bytes) -> None:
"""Write data to file."""
with open(file_name, 'wb+') as f:
with open(file_name, "wb+") as f:
f.write(data)
f.flush()
os.fsync(f.fileno())
@ -150,10 +156,10 @@ class TFCUnencryptedDatabase(object):
@staticmethod
def verify_file(database_name: str) -> bool:
"""Verify integrity of file content."""
with open(database_name, 'rb') as f:
purp_data = f.read()
with open(database_name, "rb") as f:
file_data = f.read()
purp_data, digest = separate_trailer(purp_data, BLAKE2_DIGEST_LENGTH)
purp_data, digest = separate_trailer(file_data, BLAKE2_DIGEST_LENGTH)
return blake2b(purp_data) == digest
@ -165,7 +171,9 @@ class TFCUnencryptedDatabase(object):
while not self.verify_file(self.database_temp):
retries += 1
if retries >= DB_WRITE_RETRY_LIMIT:
raise CriticalError(f"Writing to database '{self.database_temp}' failed after {retries} retries.")
raise CriticalError(
f"Writing to database '{self.database_temp}' failed after {retries} retries."
)
self.write_to_file(self.database_temp, data)
@ -206,7 +214,7 @@ class TFCUnencryptedDatabase(object):
# so we delete it and continue using the old file to ensure atomicity.
os.remove(self.database_temp)
with open(self.database_name, 'rb') as f:
with open(self.database_name, "rb") as f:
database_data = f.read()
database_data, digest = separate_trailer(database_data, BLAKE2_DIGEST_LENGTH)
@ -218,47 +226,50 @@ class TFCUnencryptedDatabase(object):
class MessageLog(object):
"""MessageLog stores message logs into an SQLite3 database."""
def __init__(self, database_name: str, database_key: bytes) -> None:
"""Create a new MessageLog object."""
self.database_name = database_name
self.database_temp = self.database_name + '_temp'
self.database_key = database_key
self.database_temp = self.database_name + TEMP_POSTFIX
self.database_key = database_key
ensure_dir(DIR_USER_DATA)
if os.path.isfile(self.database_name):
self.load_database()
self.check_for_temp_database()
self.conn = sqlite3.connect(self.database_name)
self.c = self.conn.cursor()
self.c = self.conn.cursor()
self.create_table()
def __iter__(self) -> Iterator[bytes]:
"""Iterate over encrypted log entries."""
for log_entry in self.c.execute("SELECT log_entry FROM log_entries"):
plaintext = auth_and_decrypt(log_entry[0], self.database_key, database=self.database_name)
plaintext = auth_and_decrypt(
log_entry[0], self.database_key, database=self.database_name
)
yield plaintext
def verify_file(self, database_name: str) -> bool:
"""Verify integrity of database file content."""
conn = sqlite3.connect(database_name)
c = conn.cursor()
c = conn.cursor()
try:
log_entries = c.execute("SELECT log_entry FROM log_entries")
except sqlite3.DatabaseError:
return False
for log_entry in log_entries:
for ct_log_entry in log_entries:
try:
_ = auth_and_decrypt(log_entry[0], self.database_key)
auth_and_decrypt(ct_log_entry[0], self.database_key)
except nacl.exceptions.CryptoError:
return False
else:
return True
def load_database(self) -> None:
""""Load database from file."""
return True
def check_for_temp_database(self) -> None:
""""Check if temporary log database exists."""
if os.path.isfile(self.database_temp):
if self.verify_file(self.database_temp):
os.replace(self.database_temp, self.database_name)
@ -268,21 +279,26 @@ class MessageLog(object):
os.remove(self.database_temp)
def create_table(self) -> None:
"""Create new log database."""
self.c.execute("""CREATE TABLE IF NOT EXISTS log_entries (id INTEGER PRIMARY KEY, log_entry BLOB NOT NULL)""")
"""Create new table for logged messages."""
self.c.execute(
"""CREATE TABLE IF NOT EXISTS log_entries (id INTEGER PRIMARY KEY, log_entry BLOB NOT NULL)"""
)
def insert_log_entry(self, pt_log_entry: bytes) -> None:
"""Encrypt and insert log entry into the sqlite3 log database."""
"""Encrypt log entry and insert the ciphertext into the sqlite3 database."""
ct_log_entry = encrypt_and_sign(pt_log_entry, self.database_key)
params = (ct_log_entry,)
try:
self.c.execute(f"""INSERT INTO log_entries (log_entry) VALUES (?)""", params)
self.c.execute(
f"""INSERT INTO log_entries (log_entry) VALUES (?)""", (ct_log_entry,)
)
self.conn.commit()
except sqlite3.Error:
# Re-connect to database
self.conn = sqlite3.connect(self.database_name)
self.c = self.conn.cursor()
self.insert_log_entry(pt_log_entry)
self.conn.commit()
def close_database(self) -> None:
"""Close the database cursor."""
self.c.close()

View File

@ -24,20 +24,46 @@ import typing
from typing import Iterable, Iterator, List, Optional, Sized
from src.common.database import TFCDatabase
from src.common.encoding import bool_to_bytes, pub_key_to_onion_address, str_to_bytes, pub_key_to_short_address
from src.common.encoding import bytes_to_bool, onion_address_to_pub_key, bytes_to_str
from src.common.database import TFCDatabase
from src.common.encoding import (
bool_to_bytes,
pub_key_to_onion_address,
str_to_bytes,
pub_key_to_short_address,
)
from src.common.encoding import bytes_to_bool, onion_address_to_pub_key, bytes_to_str
from src.common.exceptions import CriticalError
from src.common.misc import ensure_dir, get_terminal_width, separate_headers, split_byte_string
from src.common.output import clear_screen
from src.common.statics import (CONTACT_LENGTH, CONTACT_LIST_INDENT, DIR_USER_DATA, DUMMY_CONTACT, DUMMY_NICK, ECDHE,
ENCODED_BOOLEAN_LENGTH, FINGERPRINT_LENGTH, KEX_STATUS_HAS_RX_PSK, KEX_STATUS_LENGTH,
KEX_STATUS_NONE, KEX_STATUS_NO_RX_PSK, KEX_STATUS_PENDING, KEX_STATUS_UNVERIFIED,
KEX_STATUS_VERIFIED, LOCAL_ID, ONION_SERVICE_PUBLIC_KEY_LENGTH, PSK)
from src.common.misc import (
ensure_dir,
get_terminal_width,
separate_headers,
split_byte_string,
)
from src.common.output import clear_screen
from src.common.statics import (
CONTACT_LENGTH,
CONTACT_LIST_INDENT,
DIR_USER_DATA,
DUMMY_CONTACT,
DUMMY_NICK,
ECDHE,
ENCODED_BOOLEAN_LENGTH,
FINGERPRINT_LENGTH,
KEX_STATUS_HAS_RX_PSK,
KEX_STATUS_LENGTH,
KEX_STATUS_NONE,
KEX_STATUS_NO_RX_PSK,
KEX_STATUS_PENDING,
KEX_STATUS_UNVERIFIED,
KEX_STATUS_VERIFIED,
LOCAL_ID,
ONION_SERVICE_PUBLIC_KEY_LENGTH,
PSK,
)
if typing.TYPE_CHECKING:
from src.common.db_masterkey import MasterKey
from src.common.db_settings import Settings
from src.common.db_settings import Settings
from cryptography.hazmat.primitives.asymmetric.x448 import X448PrivateKey
@ -127,31 +153,32 @@ class Contact(object):
with each other.
"""
def __init__(self,
onion_pub_key: bytes,
nick: str,
tx_fingerprint: bytes,
rx_fingerprint: bytes,
kex_status: bytes,
log_messages: bool,
file_reception: bool,
notifications: bool
) -> None:
def __init__(
self,
onion_pub_key: bytes,
nick: str,
tx_fingerprint: bytes,
rx_fingerprint: bytes,
kex_status: bytes,
log_messages: bool,
file_reception: bool,
notifications: bool,
) -> None:
"""Create a new Contact object.
`self.short_address` is a truncated version of the account used
to identify TFC account in printed messages.
"""
self.onion_pub_key = onion_pub_key
self.nick = nick
self.tx_fingerprint = tx_fingerprint
self.rx_fingerprint = rx_fingerprint
self.kex_status = kex_status
self.log_messages = log_messages
self.file_reception = file_reception
self.notifications = notifications
self.onion_address = pub_key_to_onion_address(self.onion_pub_key)
self.short_address = pub_key_to_short_address(self.onion_pub_key)
self.onion_pub_key = onion_pub_key
self.nick = nick
self.tx_fingerprint = tx_fingerprint
self.rx_fingerprint = rx_fingerprint
self.kex_status = kex_status
self.log_messages = log_messages
self.file_reception = file_reception
self.notifications = notifications
self.onion_address = pub_key_to_onion_address(self.onion_pub_key)
self.short_address = pub_key_to_short_address(self.onion_pub_key)
self.tfc_private_key = None # type: Optional[X448PrivateKey]
def serialize_c(self) -> bytes:
@ -165,14 +192,16 @@ class Contact(object):
metadata about the contact the ciphertext length of the contact
database would reveal.
"""
return (self.onion_pub_key
+ self.tx_fingerprint
+ self.rx_fingerprint
+ self.kex_status
+ bool_to_bytes(self.log_messages)
+ bool_to_bytes(self.file_reception)
+ bool_to_bytes(self.notifications)
+ str_to_bytes(self.nick))
return (
self.onion_pub_key
+ self.tx_fingerprint
+ self.rx_fingerprint
+ self.kex_status
+ bool_to_bytes(self.log_messages)
+ bool_to_bytes(self.file_reception)
+ bool_to_bytes(self.notifications)
+ str_to_bytes(self.nick)
)
def uses_psk(self) -> bool:
"""\
@ -209,13 +238,13 @@ class ContactList(Iterable[Contact], Sized):
readable names for making queries to the database.
"""
def __init__(self, master_key: 'MasterKey', settings: 'Settings') -> None:
def __init__(self, master_key: "MasterKey", settings: "Settings") -> None:
"""Create a new ContactList object."""
self.settings = settings
self.contacts = [] # type: List[Contact]
self.settings = settings
self.contacts = [] # type: List[Contact]
self.dummy_contact = self.generate_dummy_contact()
self.file_name = f'{DIR_USER_DATA}{settings.software_operation}_contacts'
self.database = TFCDatabase(self.file_name, master_key)
self.file_name = f"{DIR_USER_DATA}{settings.software_operation}_contacts"
self.database = TFCDatabase(self.file_name, master_key)
ensure_dir(DIR_USER_DATA)
if os.path.isfile(self.file_name):
@ -251,7 +280,9 @@ class ContactList(Iterable[Contact], Sized):
and a 16-byte tag, so the size of the final database is 57313
bytes.
"""
pt_bytes = b''.join([c.serialize_c() for c in self.contacts + self._dummy_contacts()])
pt_bytes = b"".join(
[c.serialize_c() for c in self.contacts + self._dummy_contacts()]
)
self.database.store_database(pt_bytes, replace)
def _load_contacts(self) -> None:
@ -265,30 +296,45 @@ class ContactList(Iterable[Contact], Sized):
populate the `self.contacts` list with Contact objects, the data
of which is sliced and decoded from the dummy-free blocks.
"""
pt_bytes = self.database.load_database()
blocks = split_byte_string(pt_bytes, item_len=CONTACT_LENGTH)
df_blocks = [b for b in blocks if not b.startswith(self.dummy_contact.onion_pub_key)]
pt_bytes = self.database.load_database()
blocks = split_byte_string(pt_bytes, item_len=CONTACT_LENGTH)
df_blocks = [
b for b in blocks if not b.startswith(self.dummy_contact.onion_pub_key)
]
for block in df_blocks:
if len(block) != CONTACT_LENGTH:
raise CriticalError("Invalid data in contact database.")
(onion_pub_key, tx_fingerprint, rx_fingerprint, kex_status_byte,
log_messages_byte, file_reception_byte, notifications_byte,
nick_bytes) = separate_headers(block,
[ONION_SERVICE_PUBLIC_KEY_LENGTH]
+ 2*[FINGERPRINT_LENGTH]
+ [KEX_STATUS_LENGTH]
+ 3*[ENCODED_BOOLEAN_LENGTH])
(
onion_pub_key,
tx_fingerprint,
rx_fingerprint,
kex_status_byte,
log_messages_byte,
file_reception_byte,
notifications_byte,
nick_bytes,
) = separate_headers(
block,
[ONION_SERVICE_PUBLIC_KEY_LENGTH]
+ 2 * [FINGERPRINT_LENGTH]
+ [KEX_STATUS_LENGTH]
+ 3 * [ENCODED_BOOLEAN_LENGTH],
)
self.contacts.append(Contact(onion_pub_key =onion_pub_key,
tx_fingerprint=tx_fingerprint,
rx_fingerprint=rx_fingerprint,
kex_status =kex_status_byte,
log_messages =bytes_to_bool(log_messages_byte),
file_reception=bytes_to_bool(file_reception_byte),
notifications =bytes_to_bool(notifications_byte),
nick =bytes_to_str(nick_bytes)))
self.contacts.append(
Contact(
onion_pub_key=onion_pub_key,
tx_fingerprint=tx_fingerprint,
rx_fingerprint=rx_fingerprint,
kex_status=kex_status_byte,
log_messages=bytes_to_bool(log_messages_byte),
file_reception=bytes_to_bool(file_reception_byte),
notifications=bytes_to_bool(notifications_byte),
nick=bytes_to_str(nick_bytes),
)
)
@staticmethod
def generate_dummy_contact() -> Contact:
@ -298,14 +344,16 @@ class ContactList(Iterable[Contact], Sized):
serialization when the data is stored to, or read from the
database.
"""
return Contact(onion_pub_key =onion_address_to_pub_key(DUMMY_CONTACT),
nick =DUMMY_NICK,
tx_fingerprint=bytes(FINGERPRINT_LENGTH),
rx_fingerprint=bytes(FINGERPRINT_LENGTH),
kex_status =KEX_STATUS_NONE,
log_messages =False,
file_reception=False,
notifications =False)
return Contact(
onion_pub_key=onion_address_to_pub_key(DUMMY_CONTACT),
nick=DUMMY_NICK,
tx_fingerprint=bytes(FINGERPRINT_LENGTH),
rx_fingerprint=bytes(FINGERPRINT_LENGTH),
kex_status=KEX_STATUS_NONE,
log_messages=False,
file_reception=False,
notifications=False,
)
def _dummy_contacts(self) -> List[Contact]:
"""\
@ -320,19 +368,20 @@ class ContactList(Iterable[Contact], Sized):
KeyList database that contains the local key.
"""
number_of_contacts_to_store = self.settings.max_number_of_contacts + 1
number_of_dummies = number_of_contacts_to_store - len(self.contacts)
number_of_dummies = number_of_contacts_to_store - len(self.contacts)
return [self.dummy_contact] * number_of_dummies
def add_contact(self,
onion_pub_key: bytes,
nick: str,
tx_fingerprint: bytes,
rx_fingerprint: bytes,
kex_status: bytes,
log_messages: bool,
file_reception: bool,
notifications: bool
) -> None:
def add_contact(
self,
onion_pub_key: bytes,
nick: str,
tx_fingerprint: bytes,
rx_fingerprint: bytes,
kex_status: bytes,
log_messages: bool,
file_reception: bool,
notifications: bool,
) -> None:
"""\
Add a new contact to `self.contacts` list and write changes to
the database.
@ -349,19 +398,23 @@ class ContactList(Iterable[Contact], Sized):
"""
if self.has_pub_key(onion_pub_key):
current_contact = self.get_contact_by_pub_key(onion_pub_key)
log_messages = current_contact.log_messages
file_reception = current_contact.file_reception
notifications = current_contact.notifications
log_messages = current_contact.log_messages
file_reception = current_contact.file_reception
notifications = current_contact.notifications
self.remove_contact_by_pub_key(onion_pub_key)
self.contacts.append(Contact(onion_pub_key,
nick,
tx_fingerprint,
rx_fingerprint,
kex_status,
log_messages,
file_reception,
notifications))
self.contacts.append(
Contact(
onion_pub_key,
nick,
tx_fingerprint,
rx_fingerprint,
kex_status,
log_messages,
file_reception,
notifications,
)
)
self.store_contacts()
def remove_contact_by_pub_key(self, onion_pub_key: bytes) -> None:
@ -422,13 +475,23 @@ class ContactList(Iterable[Contact], Sized):
def get_list_of_pending_pub_keys(self) -> List[bytes]:
"""Return list of public keys for contacts that haven't completed key exchange yet."""
return [c.onion_pub_key for c in self.contacts if c.kex_status == KEX_STATUS_PENDING]
return [
c.onion_pub_key for c in self.contacts if c.kex_status == KEX_STATUS_PENDING
]
def get_list_of_existing_pub_keys(self) -> List[bytes]:
"""Return list of public keys for contacts with whom key exchange has been completed."""
return [c.onion_pub_key for c in self.get_list_of_contacts()
if c.kex_status in [KEX_STATUS_UNVERIFIED, KEX_STATUS_VERIFIED,
KEX_STATUS_HAS_RX_PSK, KEX_STATUS_NO_RX_PSK]]
return [
c.onion_pub_key
for c in self.get_list_of_contacts()
if c.kex_status
in [
KEX_STATUS_UNVERIFIED,
KEX_STATUS_VERIFIED,
KEX_STATUS_HAS_RX_PSK,
KEX_STATUS_NO_RX_PSK,
]
]
def contact_selectors(self) -> List[str]:
"""Return list of string-type UIDs that can be used to select a contact."""
@ -440,7 +503,9 @@ class ContactList(Iterable[Contact], Sized):
def has_only_pending_contacts(self) -> bool:
"""Return True if ContactList only has pending contacts, else False."""
return all(c.kex_status == KEX_STATUS_PENDING for c in self.get_list_of_contacts())
return all(
c.kex_status == KEX_STATUS_PENDING for c in self.get_list_of_contacts()
)
def has_pub_key(self, onion_pub_key: bytes) -> bool:
"""Return True if contact with public key exists, else False."""
@ -461,41 +526,46 @@ class ContactList(Iterable[Contact], Sized):
corresponds to what nick etc.
"""
# Initialize columns
c1 = ['Contact']
c2 = ['Account']
c3 = ['Logging']
c4 = ['Notify']
c5 = ['Files ']
c6 = ['Key Ex']
c1 = ["Contact"]
c2 = ["Account"]
c3 = ["Logging"]
c4 = ["Notify"]
c5 = ["Files "]
c6 = ["Key Ex"]
# Key exchange status dictionary
kex_dict = {KEX_STATUS_PENDING: f"{ECDHE} (Pending)",
KEX_STATUS_UNVERIFIED: f"{ECDHE} (Unverified)",
KEX_STATUS_VERIFIED: f"{ECDHE} (Verified)",
KEX_STATUS_NO_RX_PSK: f"{PSK} (No contact key)",
KEX_STATUS_HAS_RX_PSK: PSK
}
kex_dict = {
KEX_STATUS_PENDING: f"{ECDHE} (Pending)",
KEX_STATUS_UNVERIFIED: f"{ECDHE} (Unverified)",
KEX_STATUS_VERIFIED: f"{ECDHE} (Verified)",
KEX_STATUS_NO_RX_PSK: f"{PSK} (No contact key)",
KEX_STATUS_HAS_RX_PSK: PSK,
}
# Populate columns with contact data
for c in self.get_list_of_contacts():
c1.append(c.nick)
c2.append(c.short_address)
c3.append('Yes' if c.log_messages else 'No')
c4.append('Yes' if c.notifications else 'No')
c5.append('Accept' if c.file_reception else 'Reject')
c3.append("Yes" if c.log_messages else "No")
c4.append("Yes" if c.notifications else "No")
c5.append("Accept" if c.file_reception else "Reject")
c6.append(kex_dict[c.kex_status])
# Calculate column widths
c1w, c2w, c3w, c4w, c5w, = [max(len(v) for v in column) + CONTACT_LIST_INDENT
for column in [c1, c2, c3, c4, c5]]
c1w, c2w, c3w, c4w, c5w, = [
max(len(v) for v in column) + CONTACT_LIST_INDENT
for column in [c1, c2, c3, c4, c5]
]
# Align columns by adding whitespace between fields of each line
lines = [f'{f1:{c1w}}{f2:{c2w}}{f3:{c3w}}{f4:{c4w}}{f5:{c5w}}{f6}'
for f1, f2, f3, f4, f5, f6 in zip(c1, c2, c3, c4, c5, c6)]
lines = [
f"{f1:{c1w}}{f2:{c2w}}{f3:{c3w}}{f4:{c4w}}{f5:{c5w}}{f6}"
for f1, f2, f3, f4, f5, f6 in zip(c1, c2, c3, c4, c5, c6)
]
# Add a terminal-wide line between the column names and the data
lines.insert(1, get_terminal_width() * '')
lines.insert(1, get_terminal_width() * "")
# Print the contact list
clear_screen()
print('\n' + '\n'.join(lines) + '\n\n')
print("\n" + "\n".join(lines) + "\n\n")

View File

@ -25,22 +25,43 @@ import typing
from typing import Callable, Iterable, Iterator, List, Sized
from src.common.database import TFCDatabase
from src.common.database import TFCDatabase
from src.common.db_contacts import Contact
from src.common.encoding import bool_to_bytes, int_to_bytes, str_to_bytes, onion_address_to_pub_key, b58encode
from src.common.encoding import bytes_to_bool, bytes_to_int, bytes_to_str
from src.common.exceptions import CriticalError
from src.common.misc import ensure_dir, get_terminal_width, round_up, separate_header, separate_headers
from src.common.misc import split_byte_string
from src.common.statics import (CONTACT_LIST_INDENT, DIR_USER_DATA, DUMMY_GROUP, DUMMY_MEMBER,
ENCODED_BOOLEAN_LENGTH, ENCODED_INTEGER_LENGTH, GROUP_DB_HEADER_LENGTH,
GROUP_ID_LENGTH, GROUP_STATIC_LENGTH, ONION_SERVICE_PUBLIC_KEY_LENGTH,
PADDED_UTF32_STR_LENGTH)
from src.common.encoding import (
bool_to_bytes,
int_to_bytes,
str_to_bytes,
onion_address_to_pub_key,
b58encode,
)
from src.common.encoding import bytes_to_bool, bytes_to_int, bytes_to_str
from src.common.exceptions import CriticalError
from src.common.misc import (
ensure_dir,
get_terminal_width,
round_up,
separate_header,
separate_headers,
)
from src.common.misc import split_byte_string
from src.common.statics import (
CONTACT_LIST_INDENT,
DIR_USER_DATA,
DUMMY_GROUP,
DUMMY_MEMBER,
ENCODED_BOOLEAN_LENGTH,
ENCODED_INTEGER_LENGTH,
GROUP_DB_HEADER_LENGTH,
GROUP_ID_LENGTH,
GROUP_STATIC_LENGTH,
ONION_SERVICE_PUBLIC_KEY_LENGTH,
PADDED_UTF32_STR_LENGTH,
)
if typing.TYPE_CHECKING:
from src.common.db_contacts import ContactList
from src.common.db_contacts import ContactList
from src.common.db_masterkey import MasterKey
from src.common.db_settings import Settings
from src.common.db_settings import Settings
class Group(Iterable[Contact], Sized):
@ -95,28 +116,29 @@ class Group(Iterable[Contact], Sized):
header.
"""
def __init__(self,
name: str,
group_id: bytes,
log_messages: bool,
notifications: bool,
members: List['Contact'],
settings: 'Settings',
store_groups: Callable[..., None]
) -> None:
def __init__(
self,
name: str,
group_id: bytes,
log_messages: bool,
notifications: bool,
members: List["Contact"],
settings: "Settings",
store_groups: Callable[..., None],
) -> None:
"""Create a new Group object.
The `self.store_groups` is a reference to the method of the
parent object GroupList that stores the list of groups into an
encrypted database.
"""
self.name = name
self.group_id = group_id
self.log_messages = log_messages
self.name = name
self.group_id = group_id
self.log_messages = log_messages
self.notifications = notifications
self.members = members
self.settings = settings
self.store_groups = store_groups
self.members = members
self.settings = settings
self.store_groups = store_groups
def __iter__(self) -> Iterator[Contact]:
"""Iterate over members (Contact objects) in the Group object."""
@ -139,21 +161,27 @@ class Group(Iterable[Contact], Sized):
metadata the ciphertext length of the group database could
reveal.
"""
members = self.get_list_of_member_pub_keys()
number_of_dummies = self.settings.max_number_of_group_members - len(self.members)
members += number_of_dummies * [onion_address_to_pub_key(DUMMY_MEMBER)]
member_bytes = b''.join(members)
members = self.get_list_of_member_pub_keys()
number_of_dummies = self.settings.max_number_of_group_members - len(
self.members
)
members += number_of_dummies * [onion_address_to_pub_key(DUMMY_MEMBER)]
member_bytes = b"".join(members)
return (str_to_bytes(self.name)
+ self.group_id
+ bool_to_bytes(self.log_messages)
+ bool_to_bytes(self.notifications)
+ member_bytes)
return (
str_to_bytes(self.name)
+ self.group_id
+ bool_to_bytes(self.log_messages)
+ bool_to_bytes(self.notifications)
+ member_bytes
)
def add_members(self, contacts: List['Contact']) -> None:
def add_members(self, contacts: List["Contact"]) -> None:
"""Add a list of Contact objects to the group."""
pre_existing = self.get_list_of_member_pub_keys()
self.members.extend((c for c in contacts if c.onion_pub_key not in pre_existing))
self.members.extend(
(c for c in contacts if c.onion_pub_key not in pre_existing)
)
self.store_groups()
def remove_members(self, pub_keys: List[bytes]) -> bool:
@ -207,17 +235,15 @@ class GroupList(Iterable[Group], Sized):
names for making queries to the database.
"""
def __init__(self,
master_key: 'MasterKey',
settings: 'Settings',
contact_list: 'ContactList'
) -> None:
def __init__(
self, master_key: "MasterKey", settings: "Settings", contact_list: "ContactList"
) -> None:
"""Create a new GroupList object."""
self.settings = settings
self.settings = settings
self.contact_list = contact_list
self.groups = [] # type: List[Group]
self.file_name = f'{DIR_USER_DATA}{settings.software_operation}_groups'
self.database = TFCDatabase(self.file_name, master_key)
self.groups = [] # type: List[Group]
self.file_name = f"{DIR_USER_DATA}{settings.software_operation}_groups"
self.database = TFCDatabase(self.file_name, master_key)
ensure_dir(DIR_USER_DATA)
if os.path.isfile(self.file_name):
@ -253,8 +279,10 @@ class GroupList(Iterable[Group], Sized):
The ciphertext includes a 24-byte nonce and a 16-byte tag, so
the size of the final database is 131572 bytes.
"""
pt_bytes = self._generate_group_db_header()
pt_bytes += b''.join([g.serialize_g() for g in (self.groups + self._dummy_groups())])
pt_bytes = self._generate_group_db_header()
pt_bytes += b"".join(
[g.serialize_g() for g in (self.groups + self._dummy_groups())]
)
self.database.store_database(pt_bytes, replace)
def _load_groups(self) -> None:
@ -275,18 +303,29 @@ class GroupList(Iterable[Group], Sized):
# Slice and decode headers
group_db_headers, pt_bytes = separate_header(pt_bytes, GROUP_DB_HEADER_LENGTH)
padding_for_group_db, padding_for_members, number_of_groups, members_in_largest_group \
= list(map(bytes_to_int, split_byte_string(group_db_headers, ENCODED_INTEGER_LENGTH)))
(
padding_for_group_db,
padding_for_members,
number_of_groups,
members_in_largest_group,
) = list(
map(
bytes_to_int,
split_byte_string(group_db_headers, ENCODED_INTEGER_LENGTH),
)
)
# Slice dummy groups
bytes_per_group = GROUP_STATIC_LENGTH + padding_for_members * ONION_SERVICE_PUBLIC_KEY_LENGTH
dummy_data_len = (padding_for_group_db - number_of_groups) * bytes_per_group
group_data = pt_bytes[:-dummy_data_len]
bytes_per_group = (
GROUP_STATIC_LENGTH + padding_for_members * ONION_SERVICE_PUBLIC_KEY_LENGTH
)
dummy_data_len = (padding_for_group_db - number_of_groups) * bytes_per_group
group_data = pt_bytes[:-dummy_data_len]
update_db = self._check_db_settings(number_of_groups, members_in_largest_group)
blocks = split_byte_string(group_data, item_len=bytes_per_group)
blocks = split_byte_string(group_data, item_len=bytes_per_group)
all_pub_keys = self.contact_list.get_list_of_pub_keys()
all_pub_keys = self.contact_list.get_list_of_pub_keys()
dummy_pub_key = onion_address_to_pub_key(DUMMY_MEMBER)
# Deserialize group objects
@ -294,30 +333,48 @@ class GroupList(Iterable[Group], Sized):
if len(block) != bytes_per_group:
raise CriticalError("Invalid data in group database.")
name_bytes, group_id, log_messages_byte, notification_byte, ser_pub_keys \
= separate_headers(block, [PADDED_UTF32_STR_LENGTH, GROUP_ID_LENGTH] + 2*[ENCODED_BOOLEAN_LENGTH])
(
name_bytes,
group_id,
log_messages_byte,
notification_byte,
ser_pub_keys,
) = separate_headers(
block,
[PADDED_UTF32_STR_LENGTH, GROUP_ID_LENGTH]
+ 2 * [ENCODED_BOOLEAN_LENGTH],
)
pub_key_list = split_byte_string(ser_pub_keys, item_len=ONION_SERVICE_PUBLIC_KEY_LENGTH)
pub_key_list = split_byte_string(
ser_pub_keys, item_len=ONION_SERVICE_PUBLIC_KEY_LENGTH
)
group_pub_keys = [k for k in pub_key_list if k != dummy_pub_key]
group_members = [self.contact_list.get_contact_by_pub_key(k) for k in group_pub_keys if k in all_pub_keys]
group_members = [
self.contact_list.get_contact_by_pub_key(k)
for k in group_pub_keys
if k in all_pub_keys
]
self.groups.append(Group(name =bytes_to_str(name_bytes),
group_id =group_id,
log_messages =bytes_to_bool(log_messages_byte),
notifications=bytes_to_bool(notification_byte),
members =group_members,
settings =self.settings,
store_groups =self.store_groups))
self.groups.append(
Group(
name=bytes_to_str(name_bytes),
group_id=group_id,
log_messages=bytes_to_bool(log_messages_byte),
notifications=bytes_to_bool(notification_byte),
members=group_members,
settings=self.settings,
store_groups=self.store_groups,
)
)
update_db |= set(all_pub_keys) > set(group_pub_keys)
if update_db:
self.store_groups()
def _check_db_settings(self,
number_of_actual_groups: int,
members_in_largest_group: int
) -> bool:
def _check_db_settings(
self, number_of_actual_groups: int, members_in_largest_group: int
) -> bool:
"""\
Adjust TFC's settings automatically if loaded group database was
stored using larger database setting values.
@ -332,7 +389,9 @@ class GroupList(Iterable[Group], Sized):
update_db = True
if members_in_largest_group > self.settings.max_number_of_group_members:
self.settings.max_number_of_group_members = round_up(members_in_largest_group)
self.settings.max_number_of_group_members = round_up(
members_in_largest_group
)
update_db = True
if update_db:
@ -370,12 +429,21 @@ class GroupList(Iterable[Group], Sized):
setting (e.g., in cases like the
one described above).
"""
return b''.join(list(map(int_to_bytes, [self.settings.max_number_of_groups,
self.settings.max_number_of_group_members,
len(self.groups),
self.largest_group()])))
return b"".join(
list(
map(
int_to_bytes,
[
self.settings.max_number_of_groups,
self.settings.max_number_of_group_members,
len(self.groups),
self.largest_group(),
],
)
)
)
def _generate_dummy_group(self) -> 'Group':
def _generate_dummy_group(self) -> "Group":
"""Generate a dummy Group object.
The dummy group simplifies the code around the constant length
@ -384,37 +452,45 @@ class GroupList(Iterable[Group], Sized):
"""
dummy_member = self.contact_list.generate_dummy_contact()
return Group(name =DUMMY_GROUP,
group_id =bytes(GROUP_ID_LENGTH),
log_messages =False,
notifications=False,
members =self.settings.max_number_of_group_members * [dummy_member],
settings =self.settings,
store_groups =lambda: None)
return Group(
name=DUMMY_GROUP,
group_id=bytes(GROUP_ID_LENGTH),
log_messages=False,
notifications=False,
members=self.settings.max_number_of_group_members * [dummy_member],
settings=self.settings,
store_groups=lambda: None,
)
def _dummy_groups(self) -> List[Group]:
"""Generate a proper size list of dummy groups for database padding."""
number_of_dummies = self.settings.max_number_of_groups - len(self.groups)
dummy_group = self._generate_dummy_group()
dummy_group = self._generate_dummy_group()
return [dummy_group] * number_of_dummies
def add_group(self,
name: str,
group_id: bytes,
log_messages: bool,
notifications: bool,
members: List['Contact']) -> None:
def add_group(
self,
name: str,
group_id: bytes,
log_messages: bool,
notifications: bool,
members: List["Contact"],
) -> None:
"""Add a new group to `self.groups` and write changes to the database."""
if self.has_group(name):
self.remove_group_by_name(name)
self.groups.append(Group(name,
group_id,
log_messages,
notifications,
members,
self.settings,
self.store_groups))
self.groups.append(
Group(
name,
group_id,
log_messages,
notifications,
members,
self.settings,
self.store_groups,
)
)
self.store_groups()
def remove_group_by_name(self, name: str) -> None:
@ -461,7 +537,7 @@ class GroupList(Iterable[Group], Sized):
"""Return list of human readable (B58 encoded) group IDs."""
return [b58encode(g.group_id) for g in self.groups]
def get_group_members(self, group_id: bytes) -> List['Contact']:
def get_group_members(self, group_id: bytes) -> List["Contact"]:
"""Return list of group members (Contact objects)."""
return self.get_group_by_id(group_id).members
@ -486,46 +562,54 @@ class GroupList(Iterable[Group], Sized):
corresponds to what group, and which contacts are in the group.
"""
# Initialize columns
c1 = ['Group' ]
c2 = ['Group ID']
c3 = ['Logging ']
c4 = ['Notify' ]
c5 = ['Members' ]
c1 = ["Group"]
c2 = ["Group ID"]
c3 = ["Logging "]
c4 = ["Notify"]
c5 = ["Members"]
# Populate columns with group data that has only a single line
for g in self.groups:
c1.append(g.name)
c2.append(b58encode(g.group_id))
c3.append('Yes' if g.log_messages else 'No')
c4.append('Yes' if g.notifications else 'No')
c3.append("Yes" if g.log_messages else "No")
c4.append("Yes" if g.notifications else "No")
# Calculate the width of single-line columns
c1w, c2w, c3w, c4w = [max(len(v) for v in column) + CONTACT_LIST_INDENT for column in [c1, c2, c3, c4]]
c1w, c2w, c3w, c4w = [
max(len(v) for v in column) + CONTACT_LIST_INDENT
for column in [c1, c2, c3, c4]
]
# Create a wrapper for Members-column
wrapped_members_line_indent = c1w + c2w + c3w + c4w
members_column_width = max(1, get_terminal_width() - wrapped_members_line_indent)
wrapper = textwrap.TextWrapper(width=members_column_width)
members_column_width = max(
1, get_terminal_width() - wrapped_members_line_indent
)
wrapper = textwrap.TextWrapper(width=members_column_width)
# Populate the Members-column
for g in self.groups:
if g.empty():
c5.append("<Empty group>\n")
else:
comma_separated_nicks = ', '.join(sorted([m.nick for m in g.members]))
members_column_lines = wrapper.fill(comma_separated_nicks).split('\n')
comma_separated_nicks = ", ".join(sorted([m.nick for m in g.members]))
members_column_lines = wrapper.fill(comma_separated_nicks).split("\n")
final_str = members_column_lines[0] + '\n'
final_str = members_column_lines[0] + "\n"
for line in members_column_lines[1:]:
final_str += wrapped_members_line_indent * ' ' + line + '\n'
final_str += wrapped_members_line_indent * " " + line + "\n"
c5.append(final_str)
# Align columns by adding whitespace between fields of each line
lines = [f'{f1:{c1w}}{f2:{c2w}}{f3:{c3w}}{f4:{c4w}}{f5}' for f1, f2, f3, f4, f5 in zip(c1, c2, c3, c4, c5)]
lines = [
f"{f1:{c1w}}{f2:{c2w}}{f3:{c3w}}{f4:{c4w}}{f5}"
for f1, f2, f3, f4, f5 in zip(c1, c2, c3, c4, c5)
]
# Add a terminal-wide line between the column names and the data
lines.insert(1, get_terminal_width() * '')
lines.insert(1, get_terminal_width() * "")
# Print the group list
print('\n'.join(lines) + '\n')
print("\n".join(lines) + "\n")

View File

@ -25,23 +25,40 @@ import typing
from typing import Any, Callable, Dict, List
from src.common.crypto import blake2b, csprng
from src.common.database import TFCDatabase
from src.common.encoding import int_to_bytes, onion_address_to_pub_key
from src.common.encoding import bytes_to_int
from src.common.crypto import blake2b, csprng
from src.common.database import TFCDatabase
from src.common.encoding import int_to_bytes, onion_address_to_pub_key
from src.common.encoding import bytes_to_int
from src.common.exceptions import CriticalError
from src.common.misc import ensure_dir, separate_headers, split_byte_string
from src.common.statics import (DIR_USER_DATA, DUMMY_CONTACT, HARAC_LENGTH, INITIAL_HARAC, KDB_ADD_ENTRY_HEADER,
KDB_HALT_ACK_HEADER, KDB_M_KEY_CHANGE_HALT_HEADER, KDB_REMOVE_ENTRY_HEADER,
KDB_UPDATE_SIZE_HEADER, KEY_MANAGEMENT_QUEUE, KEY_MGMT_ACK_QUEUE, KEYSET_LENGTH,
LOCAL_PUBKEY, ONION_SERVICE_PUBLIC_KEY_LENGTH, RX, SYMMETRIC_KEY_LENGTH, TX)
from src.common.misc import ensure_dir, separate_headers, split_byte_string
from src.common.statics import (
DIR_USER_DATA,
DUMMY_CONTACT,
HARAC_LENGTH,
INITIAL_HARAC,
KDB_ADD_ENTRY_HEADER,
KDB_HALT_ACK_HEADER,
KDB_M_KEY_CHANGE_HALT_HEADER,
KDB_REMOVE_ENTRY_HEADER,
KDB_UPDATE_SIZE_HEADER,
KEY_MANAGEMENT_QUEUE,
KEY_MGMT_ACK_QUEUE,
KEYSET_LENGTH,
LOCAL_PUBKEY,
ONION_SERVICE_PUBLIC_KEY_LENGTH,
RX,
SYMMETRIC_KEY_LENGTH,
TX,
)
if typing.TYPE_CHECKING:
from multiprocessing import Queue
from multiprocessing import Queue
from src.common.db_masterkey import MasterKey
from src.common.db_settings import Settings
from src.common.db_settings import Settings
QueueDict = Dict[bytes, Queue[Any]]
class KeySet(object):
"""\
KeySet object contains frequently changing keys and hash ratchet
@ -70,16 +87,17 @@ class KeySet(object):
only by the Receiver Program.
"""
def __init__(self,
onion_pub_key: bytes,
tx_mk: bytes,
rx_mk: bytes,
tx_hk: bytes,
rx_hk: bytes,
tx_harac: int,
rx_harac: int,
store_keys: Callable[..., None]
) -> None:
def __init__(
self,
onion_pub_key: bytes,
tx_mk: bytes,
rx_mk: bytes,
tx_hk: bytes,
rx_hk: bytes,
tx_harac: int,
rx_harac: int,
store_keys: Callable[..., None],
) -> None:
"""Create a new KeySet object.
The `self.store_keys` is a reference to the method of the parent
@ -87,13 +105,13 @@ class KeySet(object):
encrypted database.
"""
self.onion_pub_key = onion_pub_key
self.tx_mk = tx_mk
self.rx_mk = rx_mk
self.tx_hk = tx_hk
self.rx_hk = rx_hk
self.tx_harac = tx_harac
self.rx_harac = rx_harac
self.store_keys = store_keys
self.tx_mk = tx_mk
self.rx_mk = rx_mk
self.tx_hk = tx_hk
self.rx_hk = rx_hk
self.tx_harac = tx_harac
self.rx_harac = rx_harac
self.store_keys = store_keys
def serialize_k(self) -> bytes:
"""Return KeySet data as a constant length byte string.
@ -105,13 +123,15 @@ class KeySet(object):
serialization is to hide any metadata about the KeySet database
the ciphertext length of the key database would reveal.
"""
return (self.onion_pub_key
+ self.tx_mk
+ self.rx_mk
+ self.tx_hk
+ self.rx_hk
+ int_to_bytes(self.tx_harac)
+ int_to_bytes(self.rx_harac))
return (
self.onion_pub_key
+ self.tx_mk
+ self.rx_mk
+ self.tx_hk
+ self.rx_hk
+ int_to_bytes(self.tx_harac)
+ int_to_bytes(self.rx_harac)
)
def rotate_tx_mk(self) -> None:
"""\
@ -129,15 +149,13 @@ class KeySet(object):
[1] (pp. 17-18) https://netzpolitik.org/wp-upload/SCIMP-paper.pdf
[2] https://signal.org/blog/advanced-ratcheting/
"""
self.tx_mk = blake2b(self.tx_mk + int_to_bytes(self.tx_harac), digest_size=SYMMETRIC_KEY_LENGTH)
self.tx_mk = blake2b(
self.tx_mk + int_to_bytes(self.tx_harac), digest_size=SYMMETRIC_KEY_LENGTH
)
self.tx_harac += 1
self.store_keys()
def update_mk(self,
direction: str,
key: bytes,
offset: int
) -> None:
def update_mk(self, direction: str, key: bytes, offset: int) -> None:
"""Update Receiver Program's tx/rx-message key and tx/rx-harac.
This method provides per-message forward secrecy for received
@ -147,11 +165,11 @@ class KeySet(object):
function is not linear like in the case of `rotate_tx_mk`.
"""
if direction == TX:
self.tx_mk = key
self.tx_mk = key
self.tx_harac += offset
self.store_keys()
elif direction == RX:
self.rx_mk = key
self.rx_mk = key
self.rx_harac += offset
self.store_keys()
else:
@ -178,15 +196,15 @@ class KeyList(object):
being stored in the database.
"""
def __init__(self, master_key: 'MasterKey', settings: 'Settings') -> None:
def __init__(self, master_key: "MasterKey", settings: "Settings") -> None:
"""Create a new KeyList object."""
self.master_key = master_key
self.settings = settings
self.keysets = [] # type: List[KeySet]
self.master_key = master_key
self.settings = settings
self.keysets = [] # type: List[KeySet]
self.dummy_keyset = self.generate_dummy_keyset()
self.dummy_id = self.dummy_keyset.onion_pub_key
self.file_name = f'{DIR_USER_DATA}{settings.software_operation}_keys'
self.database = TFCDatabase(self.file_name, master_key)
self.dummy_id = self.dummy_keyset.onion_pub_key
self.file_name = f"{DIR_USER_DATA}{settings.software_operation}_keys"
self.database = TFCDatabase(self.file_name, master_key)
ensure_dir(DIR_USER_DATA)
if os.path.isfile(self.file_name):
@ -209,7 +227,9 @@ class KeyList(object):
ciphertext includes a 24-byte nonce and a 16-byte tag, so the
size of the final database is 9016 bytes.
"""
pt_bytes = b''.join([k.serialize_k() for k in self.keysets + self._dummy_keysets()])
pt_bytes = b"".join(
[k.serialize_k() for k in self.keysets + self._dummy_keysets()]
)
self.database.store_database(pt_bytes, replace)
def _load_keys(self) -> None:
@ -223,28 +243,44 @@ class KeyList(object):
populate the `self.keysets` list with KeySet objects, the data
of which is sliced and decoded from the dummy-free blocks.
"""
pt_bytes = self.database.load_database()
blocks = split_byte_string(pt_bytes, item_len=KEYSET_LENGTH)
pt_bytes = self.database.load_database()
blocks = split_byte_string(pt_bytes, item_len=KEYSET_LENGTH)
df_blocks = [b for b in blocks if not b.startswith(self.dummy_id)]
for block in df_blocks:
if len(block) != KEYSET_LENGTH:
raise CriticalError("Invalid data in key database.")
onion_pub_key, tx_mk, rx_mk, tx_hk, rx_hk, tx_harac_bytes, rx_harac_bytes \
= separate_headers(block, [ONION_SERVICE_PUBLIC_KEY_LENGTH] + 4*[SYMMETRIC_KEY_LENGTH] + [HARAC_LENGTH])
(
onion_pub_key,
tx_mk,
rx_mk,
tx_hk,
rx_hk,
tx_harac_bytes,
rx_harac_bytes,
) = separate_headers(
block,
[ONION_SERVICE_PUBLIC_KEY_LENGTH]
+ 4 * [SYMMETRIC_KEY_LENGTH]
+ [HARAC_LENGTH],
)
self.keysets.append(KeySet(onion_pub_key=onion_pub_key,
tx_mk=tx_mk,
rx_mk=rx_mk,
tx_hk=tx_hk,
rx_hk=rx_hk,
tx_harac=bytes_to_int(tx_harac_bytes),
rx_harac=bytes_to_int(rx_harac_bytes),
store_keys=self.store_keys))
self.keysets.append(
KeySet(
onion_pub_key=onion_pub_key,
tx_mk=tx_mk,
rx_mk=rx_mk,
tx_hk=tx_hk,
rx_hk=rx_hk,
tx_harac=bytes_to_int(tx_harac_bytes),
rx_harac=bytes_to_int(rx_harac_bytes),
store_keys=self.store_keys,
)
)
@staticmethod
def generate_dummy_keyset() -> 'KeySet':
def generate_dummy_keyset() -> "KeySet":
"""Generate a dummy KeySet object.
The dummy KeySet simplifies the code around the constant length
@ -254,14 +290,16 @@ class KeyList(object):
In case the dummy keyset would ever be loaded accidentally, it
uses a set of random keys to prevent decryption by eavesdropper.
"""
return KeySet(onion_pub_key=onion_address_to_pub_key(DUMMY_CONTACT),
tx_mk=csprng(),
rx_mk=csprng(),
tx_hk=csprng(),
rx_hk=csprng(),
tx_harac=INITIAL_HARAC,
rx_harac=INITIAL_HARAC,
store_keys=lambda: None)
return KeySet(
onion_pub_key=onion_address_to_pub_key(DUMMY_CONTACT),
tx_mk=csprng(),
rx_mk=csprng(),
tx_hk=csprng(),
rx_hk=csprng(),
tx_harac=INITIAL_HARAC,
rx_harac=INITIAL_HARAC,
store_keys=lambda: None,
)
def _dummy_keysets(self) -> List[KeySet]:
"""\
@ -271,15 +309,17 @@ class KeyList(object):
The additional contact (+1) is the local key.
"""
number_of_contacts_to_store = self.settings.max_number_of_contacts + 1
number_of_dummies = number_of_contacts_to_store - len(self.keysets)
number_of_dummies = number_of_contacts_to_store - len(self.keysets)
return [self.dummy_keyset] * number_of_dummies
def add_keyset(self,
onion_pub_key: bytes,
tx_mk: bytes,
rx_mk: bytes,
tx_hk: bytes,
rx_hk: bytes) -> None:
def add_keyset(
self,
onion_pub_key: bytes,
tx_mk: bytes,
rx_mk: bytes,
tx_hk: bytes,
rx_hk: bytes,
) -> None:
"""\
Add a new KeySet to `self.keysets` list and write changes to the
database.
@ -287,14 +327,18 @@ class KeyList(object):
if self.has_keyset(onion_pub_key):
self.remove_keyset(onion_pub_key)
self.keysets.append(KeySet(onion_pub_key=onion_pub_key,
tx_mk=tx_mk,
rx_mk=rx_mk,
tx_hk=tx_hk,
rx_hk=rx_hk,
tx_harac=INITIAL_HARAC,
rx_harac=INITIAL_HARAC,
store_keys=self.store_keys))
self.keysets.append(
KeySet(
onion_pub_key=onion_pub_key,
tx_mk=tx_mk,
rx_mk=rx_mk,
tx_hk=tx_hk,
rx_hk=rx_hk,
tx_harac=INITIAL_HARAC,
rx_harac=INITIAL_HARAC,
store_keys=self.store_keys,
)
)
self.store_keys()
def remove_keyset(self, onion_pub_key: bytes) -> None:
@ -309,7 +353,7 @@ class KeyList(object):
self.store_keys()
break
def change_master_key(self, queues: 'QueueDict') -> None:
def change_master_key(self, queues: "QueueDict") -> None:
"""Change the master key and encrypt the database with the new key."""
key_queue = queues[KEY_MANAGEMENT_QUEUE]
ack_queue = queues[KEY_MGMT_ACK_QUEUE]
@ -317,7 +361,7 @@ class KeyList(object):
# Halt sender loop here until keys have been replaced by the
# `input_loop` process, and new master key is delivered.
ack_queue.put(KDB_HALT_ACK_HEADER)
while key_queue.qsize() == 0:
while not key_queue.qsize():
time.sleep(0.001)
new_master_key = key_queue.get()
@ -328,7 +372,7 @@ class KeyList(object):
# Send new master key back to `input_loop` process to verify it was received.
ack_queue.put(new_master_key)
def update_database(self, settings: 'Settings') -> None:
def update_database(self, settings: "Settings") -> None:
"""Update settings and database size."""
self.settings = settings
self.store_keys()
@ -342,7 +386,9 @@ class KeyList(object):
def get_list_of_pub_keys(self) -> List[bytes]:
"""Return list of Onion Service public keys for KeySets."""
return [k.onion_pub_key for k in self.keysets if k.onion_pub_key != LOCAL_PUBKEY]
return [
k.onion_pub_key for k in self.keysets if k.onion_pub_key != LOCAL_PUBKEY
]
def has_keyset(self, onion_pub_key: bytes) -> bool:
"""Return True if KeySet with matching Onion Service public key exists, else False."""
@ -364,7 +410,7 @@ class KeyList(object):
"""Return True if local KeySet object exists, else False."""
return any(k.onion_pub_key == LOCAL_PUBKEY for k in self.keysets)
def manage(self, queues: 'QueueDict', command: str, *params: Any) -> None:
def manage(self, queues: "QueueDict", command: str, *params: Any) -> None:
"""Manage KeyList based on a command.
The command is delivered from `input_process` to `sender_loop`

View File

@ -27,39 +27,73 @@ import time
import typing
from datetime import datetime
from typing import Any, Dict, List, Tuple, Union
from typing import Any, Dict, List, Tuple, Union
from src.common.database import MessageLog
from src.common.encoding import b58encode, bytes_to_bool, bytes_to_timestamp, pub_key_to_short_address
from src.common.exceptions import CriticalError, FunctionReturn
from src.common.misc import ensure_dir, get_terminal_width, ignored, separate_header, separate_headers
from src.common.output import clear_screen
from src.common.statics import (ASSEMBLY_PACKET_HEADER_LENGTH, DIR_USER_DATA, GROUP_ID_LENGTH, GROUP_MESSAGE_HEADER,
GROUP_MSG_ID_LENGTH, LOGFILE_MASKING_QUEUE, LOG_ENTRY_LENGTH, LOG_PACKET_QUEUE,
LOG_SETTING_QUEUE, MESSAGE, MESSAGE_HEADER_LENGTH, ONION_SERVICE_PUBLIC_KEY_LENGTH,
ORIGIN_HEADER_LENGTH, ORIGIN_USER_HEADER, PLACEHOLDER_DATA, PRIVATE_MESSAGE_HEADER,
P_N_HEADER, RX, TIMESTAMP_LENGTH, TRAFFIC_MASKING_QUEUE, TX, UNIT_TEST_QUEUE,
WHISPER_FIELD_LENGTH, WIN_TYPE_CONTACT, WIN_TYPE_GROUP)
from src.common.database import MessageLog
from src.common.encoding import (
b58encode,
bytes_to_bool,
bytes_to_timestamp,
pub_key_to_short_address,
)
from src.common.exceptions import CriticalError, SoftError
from src.common.misc import (
ensure_dir,
get_terminal_width,
ignored,
separate_header,
separate_headers,
)
from src.common.output import clear_screen
from src.common.statics import (
ASSEMBLY_PACKET_HEADER_LENGTH,
DIR_USER_DATA,
GROUP_ID_LENGTH,
GROUP_MESSAGE_HEADER,
GROUP_MSG_ID_LENGTH,
LOGFILE_MASKING_QUEUE,
LOG_ENTRY_LENGTH,
LOG_PACKET_QUEUE,
LOG_SETTING_QUEUE,
MESSAGE,
MESSAGE_HEADER_LENGTH,
ONION_SERVICE_PUBLIC_KEY_LENGTH,
ORIGIN_HEADER_LENGTH,
ORIGIN_USER_HEADER,
PLACEHOLDER_DATA,
PRIVATE_MESSAGE_HEADER,
P_N_HEADER,
RX,
TEMP_POSTFIX,
TIMESTAMP_LENGTH,
TRAFFIC_MASKING_QUEUE,
TX,
UNIT_TEST_QUEUE,
WHISPER_FIELD_LENGTH,
WIN_TYPE_CONTACT,
WIN_TYPE_GROUP,
)
from src.receiver.packet import PacketList
from src.receiver.packet import Packet, PacketList
from src.receiver.windows import RxWindow
if typing.TYPE_CHECKING:
from multiprocessing import Queue
from src.common.db_contacts import ContactList
from src.common.db_groups import GroupList
from multiprocessing import Queue
from src.common.db_contacts import ContactList
from src.common.db_groups import GroupList
from src.common.db_masterkey import MasterKey
from src.common.db_settings import Settings
from src.common.db_settings import Settings
from src.transmitter.windows import TxWindow
MsgTuple = Tuple[datetime, str, bytes, bytes, bool, bool]
def log_writer_loop(queues: Dict[bytes, 'Queue[Any]'], # Dictionary of queues
settings: 'Settings', # Settings object
message_log: 'MessageLog', # MessageLog object
unit_test: bool = False # True, exits loop when UNIT_TEST_QUEUE is no longer empty.
) -> None:
def log_writer_loop(
queues: Dict[bytes, "Queue[Any]"], # Dictionary of queues
settings: "Settings", # Settings object
message_log: "MessageLog", # MessageLog object
unit_test: bool = False, # True, exits loop when UNIT_TEST_QUEUE is no longer empty.
) -> None:
"""Write assembly packets to log database.
When traffic masking is enabled, the fact this loop is run as a
@ -69,27 +103,35 @@ def log_writer_loop(queues: Dict[bytes, 'Queue[Any]'], # Dictionary of que
even from an adversary performing timing attacks from within the
Networked Computer of the user.
"""
log_packet_queue = queues[LOG_PACKET_QUEUE]
log_setting_queue = queues[LOG_SETTING_QUEUE]
log_packet_queue = queues[LOG_PACKET_QUEUE]
log_setting_queue = queues[LOG_SETTING_QUEUE]
traffic_masking_queue = queues[TRAFFIC_MASKING_QUEUE]
logfile_masking_queue = queues[LOGFILE_MASKING_QUEUE]
logging_state = False
logging_state = False
logfile_masking = settings.log_file_masking
traffic_masking = settings.traffic_masking
while True:
with ignored(EOFError, KeyboardInterrupt):
while log_packet_queue.qsize() == 0:
while not log_packet_queue.qsize():
time.sleep(0.01)
if traffic_masking_queue.qsize() != 0:
traffic_masking = traffic_masking_queue.get()
if logfile_masking_queue.qsize() != 0:
logfile_masking = logfile_masking_queue.get()
traffic_masking, logfile_masking = check_log_setting_queues(
traffic_masking,
traffic_masking_queue,
logfile_masking,
logfile_masking_queue,
)
onion_pub_key, assembly_packet, log_messages, log_as_ph, master_key = log_packet_queue.get()
(
onion_pub_key,
assembly_packet,
log_messages,
log_as_ph,
master_key,
) = log_packet_queue.get()
# Update log database key
message_log.database_key = master_key.master_key
@ -98,18 +140,9 @@ def log_writer_loop(queues: Dict[bytes, 'Queue[Any]'], # Dictionary of que
if onion_pub_key is None:
continue
# `logging_state` retains the logging setting for noise packets
# that do not know the log setting of the window. To prevent
# logging of noise packets in situation where logging has
# been disabled, but no new message assembly packet carrying
# the logging setting is received, the LOG_SETTING_QUEUE
# is checked for up-to-date logging setting for every
# received noise packet.
if assembly_packet[:ASSEMBLY_PACKET_HEADER_LENGTH] == P_N_HEADER:
if log_setting_queue.qsize() != 0:
logging_state = log_setting_queue.get()
else:
logging_state = log_messages
logging_state = update_logging_state(
assembly_packet, logging_state, log_messages, log_setting_queue
)
# Detect if we are going to log the packet at all.
if not logging_state:
@ -146,11 +179,51 @@ def log_writer_loop(queues: Dict[bytes, 'Queue[Any]'], # Dictionary of que
break
def write_log_entry(assembly_packet: bytes, # Assembly packet to log
onion_pub_key: bytes, # Onion Service public key of the associated contact
message_log: MessageLog, # MessageLog object
origin: bytes = ORIGIN_USER_HEADER, # The direction of logged packet
) -> None:
def check_log_setting_queues(
traffic_masking: bool,
traffic_masking_queue: "Queue[Any]",
logfile_masking: bool,
logfile_masking_queue: "Queue[Any]",
) -> Tuple[bool, bool]:
"""Check for updates to logging settings."""
if traffic_masking_queue.qsize():
traffic_masking = traffic_masking_queue.get()
if logfile_masking_queue.qsize():
logfile_masking = logfile_masking_queue.get()
return traffic_masking, logfile_masking
def update_logging_state(
assembly_packet: bytes,
logging_state: bool,
log_messages: bool,
log_setting_queue: "Queue[Any]",
) -> bool:
"""Update logging state.
`logging_state` retains the logging setting for noise packets that
do not know the log setting of the window. To prevent logging of
noise packets in situation where logging has been disabled, but no
new message assembly packet carrying the logging setting is received,
the LOG_SETTING_QUEUE is checked for up-to-date logging setting for
every received noise packet.
"""
if assembly_packet[:ASSEMBLY_PACKET_HEADER_LENGTH] == P_N_HEADER:
if log_setting_queue.qsize():
logging_state = log_setting_queue.get()
else:
logging_state = log_messages
return logging_state
def write_log_entry(
assembly_packet: bytes, # Assembly packet to log
onion_pub_key: bytes, # Onion Service public key of the associated contact
message_log: MessageLog, # MessageLog object
origin: bytes = ORIGIN_USER_HEADER, # The direction of logged packet
) -> None:
"""Add an assembly packet to the encrypted log database.
Logging assembly packets allows reconstruction of conversation while
@ -172,7 +245,7 @@ def write_log_entry(assembly_packet: bytes, # Assembly pac
`settings.log_file_masking` is enabled, instead of file data, TFC
writes placeholder data to the log database.
"""
timestamp = struct.pack('<L', int(time.time()))
timestamp = struct.pack("<L", int(time.time()))
log_entry = onion_pub_key + timestamp + origin + assembly_packet
if len(log_entry) != LOG_ENTRY_LENGTH:
@ -186,17 +259,18 @@ def check_log_file_exists(file_name: str) -> None:
"""Check that the log file exists."""
ensure_dir(DIR_USER_DATA)
if not os.path.isfile(file_name):
raise FunctionReturn("No log database available.")
raise SoftError("No log database available.")
def access_logs(window: Union['TxWindow', 'RxWindow'],
contact_list: 'ContactList',
group_list: 'GroupList',
settings: 'Settings',
master_key: 'MasterKey',
msg_to_load: int = 0,
export: bool = False
) -> None:
def access_logs(
window: Union["TxWindow", "RxWindow"],
contact_list: "ContactList",
group_list: "GroupList",
settings: "Settings",
master_key: "MasterKey",
msg_to_load: int = 0,
export: bool = False,
) -> None:
"""\
Load 'msg_to_load' last messages from log database and display or
export them.
@ -204,103 +278,155 @@ def access_logs(window: Union['TxWindow', 'RxWindow'],
The default value of zero for `msg_to_load` means all messages for
the window will be retrieved from the log database.
"""
file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs'
packet_list = PacketList(settings, contact_list)
file_name = f"{DIR_USER_DATA}{settings.software_operation}_logs"
packet_list = PacketList(settings, contact_list)
message_list = [] # type: List[MsgTuple]
group_msg_id = b''
group_msg_id = b""
check_log_file_exists(file_name)
message_log = MessageLog(file_name, master_key.master_key)
for log_entry in message_log:
onion_pub_key, timestamp, origin, assembly_packet \
= separate_headers(log_entry, [ONION_SERVICE_PUBLIC_KEY_LENGTH, TIMESTAMP_LENGTH, ORIGIN_HEADER_LENGTH])
onion_pub_key, timestamp, origin, assembly_packet = separate_headers(
log_entry,
[ONION_SERVICE_PUBLIC_KEY_LENGTH, TIMESTAMP_LENGTH, ORIGIN_HEADER_LENGTH],
)
if window.type == WIN_TYPE_CONTACT and onion_pub_key != window.uid:
continue
packet = packet_list.get_packet(onion_pub_key, origin, MESSAGE, log_access=True)
try:
packet.add_packet(assembly_packet)
except FunctionReturn:
except SoftError:
continue
if not packet.is_complete:
continue
whisper_byte, header, message = separate_headers(packet.assemble_message_packet(), [WHISPER_FIELD_LENGTH,
MESSAGE_HEADER_LENGTH])
whisper = bytes_to_bool(whisper_byte)
if header == PRIVATE_MESSAGE_HEADER and window.type == WIN_TYPE_CONTACT:
message_list.append(
(bytes_to_timestamp(timestamp), message.decode(), onion_pub_key, packet.origin, whisper, False))
elif header == GROUP_MESSAGE_HEADER and window.type == WIN_TYPE_GROUP:
purp_group_id, message = separate_header(message, GROUP_ID_LENGTH)
if window.group is not None and purp_group_id != window.group.group_id:
continue
purp_msg_id, message = separate_header(message, GROUP_MSG_ID_LENGTH)
if packet.origin == ORIGIN_USER_HEADER:
if purp_msg_id == group_msg_id:
continue
group_msg_id = purp_msg_id
message_list.append(
(bytes_to_timestamp(timestamp), message.decode(), onion_pub_key, packet.origin, whisper, False))
group_msg_id = add_complete_message_to_message_list(
timestamp, onion_pub_key, group_msg_id, packet, message_list, window
)
message_log.close_database()
print_logs(message_list[-msg_to_load:], export, msg_to_load, window, contact_list, group_list, settings)
print_logs(
message_list[-msg_to_load:],
export,
msg_to_load,
window,
contact_list,
group_list,
settings,
)
def print_logs(message_list: List[MsgTuple],
export: bool,
msg_to_load: int,
window: Union['TxWindow', 'RxWindow'],
contact_list: 'ContactList',
group_list: 'GroupList',
settings: 'Settings'
) -> None:
def add_complete_message_to_message_list(
timestamp: bytes,
onion_pub_key: bytes,
group_msg_id: bytes,
packet: "Packet",
message_list: List[MsgTuple],
window: Union["TxWindow", "RxWindow"],
) -> bytes:
"""Add complete log file message to `message_list`."""
whisper_byte, header, message = separate_headers(
packet.assemble_message_packet(), [WHISPER_FIELD_LENGTH, MESSAGE_HEADER_LENGTH]
)
whisper = bytes_to_bool(whisper_byte)
if header == PRIVATE_MESSAGE_HEADER and window.type == WIN_TYPE_CONTACT:
message_list.append(
(
bytes_to_timestamp(timestamp),
message.decode(),
onion_pub_key,
packet.origin,
whisper,
False,
)
)
elif header == GROUP_MESSAGE_HEADER and window.type == WIN_TYPE_GROUP:
purp_group_id, message = separate_header(message, GROUP_ID_LENGTH)
if window.group is not None and purp_group_id != window.group.group_id:
return group_msg_id
purp_msg_id, message = separate_header(message, GROUP_MSG_ID_LENGTH)
if packet.origin == ORIGIN_USER_HEADER:
if purp_msg_id == group_msg_id:
return group_msg_id
group_msg_id = purp_msg_id
message_list.append(
(
bytes_to_timestamp(timestamp),
message.decode(),
onion_pub_key,
packet.origin,
whisper,
False,
)
)
return group_msg_id
def print_logs(
message_list: List[MsgTuple],
export: bool,
msg_to_load: int,
window: Union["TxWindow", "RxWindow"],
contact_list: "ContactList",
group_list: "GroupList",
settings: "Settings",
) -> None:
"""Print list of logged messages to screen or export them to file."""
terminal_width = get_terminal_width()
system, m_dir = {TX: ("Transmitter", "sent to"),
RX: ("Receiver", "to/from")}[settings.software_operation]
system, m_dir = {TX: ("Transmitter", "sent to"), RX: ("Receiver", "to/from")}[
settings.software_operation
]
f_name = open(f"{system} - Plaintext log ({window.name})", 'w+') if export else sys.stdout
subset = '' if msg_to_load == 0 else f"{msg_to_load} most recent "
title = textwrap.fill(f"Log file of {subset}message(s) {m_dir} {window.type} {window.name}", terminal_width)
f_name = (
open(f"{system} - Plaintext log ({window.name})", "w+")
if export
else sys.stdout
)
subset = "" if msg_to_load == 0 else f"{msg_to_load} most recent "
title = textwrap.fill(
f"Log file of {subset}message(s) {m_dir} {window.type} {window.name}",
terminal_width,
)
packet_list = PacketList(settings, contact_list)
log_window = RxWindow(window.uid, contact_list, group_list, settings, packet_list)
log_window.is_active = True
packet_list = PacketList(settings, contact_list)
log_window = RxWindow(window.uid, contact_list, group_list, settings, packet_list)
log_window.is_active = True
log_window.message_log = message_list
if message_list:
if not export:
clear_screen()
print(title, file=f_name)
print(terminal_width * '', file=f_name)
log_window.redraw( file=f_name)
print(title, file=f_name)
print(terminal_width * "", file=f_name)
log_window.redraw(file=f_name)
print("<End of log file>\n", file=f_name)
else:
raise FunctionReturn(f"No logged messages for {window.type} '{window.name}'.", head_clear=True)
raise SoftError(
f"No logged messages for {window.type} '{window.name}'.", head_clear=True
)
if export:
f_name.close()
def change_log_db_key(old_key: bytes,
new_key: bytes,
settings: 'Settings'
) -> None:
def change_log_db_key(old_key: bytes, new_key: bytes, settings: "Settings") -> None:
"""Re-encrypt log database with a new master key."""
ensure_dir(DIR_USER_DATA)
file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs'
temp_name = f'{file_name}_temp'
file_name = f"{DIR_USER_DATA}{settings.software_operation}_logs"
temp_name = file_name + TEMP_POSTFIX
if not os.path.isfile(file_name):
raise FunctionReturn("No log database available.")
raise SoftError("No log database available.")
if os.path.isfile(temp_name):
os.remove(temp_name)
@ -315,22 +441,23 @@ def change_log_db_key(old_key: bytes,
message_log_tmp.close_database()
def replace_log_db(settings: 'Settings') -> None:
def replace_log_db(settings: "Settings") -> None:
"""Replace log database with temp file."""
ensure_dir(DIR_USER_DATA)
file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs'
temp_name = f'{file_name}_temp'
file_name = f"{DIR_USER_DATA}{settings.software_operation}_logs"
temp_name = file_name + TEMP_POSTFIX
if os.path.isfile(temp_name):
os.replace(temp_name, file_name)
def remove_logs(contact_list: 'ContactList',
group_list: 'GroupList',
settings: 'Settings',
master_key: 'MasterKey',
selector: bytes
) -> None:
def remove_logs(
contact_list: "ContactList",
group_list: "GroupList",
settings: "Settings",
master_key: "MasterKey",
selector: bytes,
) -> None:
"""\
Remove log entries for selector (public key of an account/group ID).
@ -340,21 +467,22 @@ def remove_logs(contact_list: 'ContactList',
ID, only messages for group determined by that group ID are removed.
"""
ensure_dir(DIR_USER_DATA)
file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs'
temp_name = f'{file_name}_temp'
packet_list = PacketList(settings, contact_list)
file_name = f"{DIR_USER_DATA}{settings.software_operation}_logs"
temp_name = file_name + TEMP_POSTFIX
packet_list = PacketList(settings, contact_list)
entries_to_keep = [] # type: List[bytes]
removed = False
contact = len(selector) == ONION_SERVICE_PUBLIC_KEY_LENGTH
removed = False
contact = len(selector) == ONION_SERVICE_PUBLIC_KEY_LENGTH
check_log_file_exists(file_name)
message_log = MessageLog(file_name, master_key.master_key)
for log_entry in message_log:
onion_pub_key, _, origin, assembly_packet = separate_headers(log_entry, [ONION_SERVICE_PUBLIC_KEY_LENGTH,
TIMESTAMP_LENGTH,
ORIGIN_HEADER_LENGTH])
onion_pub_key, _, origin, assembly_packet = separate_headers(
log_entry,
[ONION_SERVICE_PUBLIC_KEY_LENGTH, TIMESTAMP_LENGTH, ORIGIN_HEADER_LENGTH],
)
if contact:
if onion_pub_key == selector:
removed = True
@ -362,28 +490,17 @@ def remove_logs(contact_list: 'ContactList',
entries_to_keep.append(log_entry)
else: # Group
packet = packet_list.get_packet(onion_pub_key, origin, MESSAGE, log_access=True)
packet = packet_list.get_packet(
onion_pub_key, origin, MESSAGE, log_access=True
)
try:
packet.add_packet(assembly_packet, log_entry)
except FunctionReturn:
except SoftError:
continue
if not packet.is_complete:
continue
_, header, message = separate_headers(packet.assemble_message_packet(), [WHISPER_FIELD_LENGTH,
MESSAGE_HEADER_LENGTH])
if header == PRIVATE_MESSAGE_HEADER:
entries_to_keep.extend(packet.log_ct_list)
packet.clear_assembly_packets()
elif header == GROUP_MESSAGE_HEADER:
group_id, _ = separate_header(message, GROUP_ID_LENGTH)
if group_id == selector:
removed = True
else:
entries_to_keep.extend(packet.log_ct_list)
packet.clear_assembly_packets()
removed = check_packet_fate(entries_to_keep, packet, removed, selector)
message_log.close_database()
@ -396,11 +513,37 @@ def remove_logs(contact_list: 'ContactList',
os.replace(temp_name, file_name)
try:
name = contact_list.get_nick_by_pub_key(selector) if contact else group_list.get_group_by_id(selector).name
name = (
contact_list.get_nick_by_pub_key(selector)
if contact
else group_list.get_group_by_id(selector).name
)
except StopIteration:
name = pub_key_to_short_address(selector) if contact else b58encode(selector)
name = pub_key_to_short_address(selector) if contact else b58encode(selector)
action = "Removed" if removed else "Found no"
action = "Removed" if removed else "Found no"
win_type = "contact" if contact else "group"
raise FunctionReturn(f"{action} log entries for {win_type} '{name}'.")
raise SoftError(f"{action} log entries for {win_type} '{name}'.")
def check_packet_fate(
entries_to_keep: List[bytes], packet: "Packet", removed: bool, selector: bytes
) -> bool:
"""Check whether the packet should be kept."""
_, header, message = separate_headers(
packet.assemble_message_packet(), [WHISPER_FIELD_LENGTH, MESSAGE_HEADER_LENGTH]
)
if header == PRIVATE_MESSAGE_HEADER:
entries_to_keep.extend(packet.log_ct_list)
packet.clear_assembly_packets()
elif header == GROUP_MESSAGE_HEADER:
group_id, _ = separate_header(message, GROUP_ID_LENGTH)
if group_id == selector:
removed = True
else:
entries_to_keep.extend(packet.log_ct_list)
packet.clear_assembly_packets()
return removed

View File

@ -27,18 +27,29 @@ import time
from typing import List, Optional, Tuple
from src.common.crypto import argon2_kdf, blake2b, csprng
from src.common.database import TFCUnencryptedDatabase
from src.common.encoding import bytes_to_int, int_to_bytes
from src.common.exceptions import CriticalError, FunctionReturn, graceful_exit
from src.common.input import pwd_prompt
from src.common.misc import ensure_dir, separate_headers
from src.common.output import clear_screen, m_print, phase, print_on_previous_line
from src.common.word_list import eff_wordlist
from src.common.statics import (ARGON2_MIN_MEMORY_COST, ARGON2_MIN_PARALLELISM, ARGON2_MIN_TIME_COST,
ARGON2_SALT_LENGTH, BLAKE2_DIGEST_LENGTH, DIR_USER_DATA, DONE,
ENCODED_INTEGER_LENGTH, GENERATE, MASTERKEY_DB_SIZE, MAX_KEY_DERIVATION_TIME,
MIN_KEY_DERIVATION_TIME, PASSWORD_MIN_BIT_STRENGTH, RESET)
from src.common.crypto import argon2_kdf, blake2b, csprng
from src.common.database import TFCUnencryptedDatabase
from src.common.encoding import bytes_to_int, int_to_bytes
from src.common.exceptions import CriticalError, graceful_exit, SoftError
from src.common.input import pwd_prompt
from src.common.misc import ensure_dir, reset_terminal, separate_headers
from src.common.output import clear_screen, m_print, phase, print_on_previous_line
from src.common.word_list import eff_wordlist
from src.common.statics import (
ARGON2_MIN_MEMORY_COST,
ARGON2_MIN_PARALLELISM,
ARGON2_MIN_TIME_COST,
ARGON2_SALT_LENGTH,
BLAKE2_DIGEST_LENGTH,
DIR_USER_DATA,
DONE,
ENCODED_INTEGER_LENGTH,
GENERATE,
MASTERKEY_DB_SIZE,
MAX_KEY_DERIVATION_TIME,
MIN_KEY_DERIVATION_TIME,
PASSWORD_MIN_BIT_STRENGTH,
)
class MasterKey(object):
@ -49,9 +60,10 @@ class MasterKey(object):
def __init__(self, operation: str, local_test: bool) -> None:
"""Create a new MasterKey object."""
self.file_name = f'{DIR_USER_DATA}{operation}_login_data'
self.database = TFCUnencryptedDatabase(self.file_name)
self.local_test = local_test
self.operation = operation
self.file_name = f"{DIR_USER_DATA}{self.operation}_login_data"
self.database = TFCUnencryptedDatabase(self.file_name)
self.local_test = local_test
self.database_data = None # type: Optional[bytes]
ensure_dir(DIR_USER_DATA)
@ -64,42 +76,41 @@ class MasterKey(object):
graceful_exit()
@staticmethod
def timed_key_derivation(password: str,
salt: bytes,
time_cost: int,
memory_cost: int,
parallelism: int
) -> Tuple[bytes, float]:
def timed_key_derivation(
password: str, salt: bytes, time_cost: int, memory_cost: int, parallelism: int
) -> Tuple[bytes, float]:
"""Derive key and measure its derivation time."""
time_start = time.monotonic()
master_key = argon2_kdf(password, salt, time_cost, memory_cost, parallelism)
kd_time = time.monotonic() - time_start
kd_time = time.monotonic() - time_start
return master_key, kd_time
@staticmethod
def get_available_memory() -> int:
def get_available_memory(self) -> int:
"""Return the amount of available memory in the system."""
fields = os.popen("cat /proc/meminfo").read().splitlines()
field = [f for f in fields if f.startswith('MemAvailable')][0]
fields = os.popen("/bin/cat /proc/meminfo").read().splitlines()
field = [f for f in fields if f.startswith("MemAvailable")][0]
mem_avail = int(field.split()[1])
if self.local_test:
mem_avail //= 2
return mem_avail
@staticmethod
def generate_master_password() -> Tuple[int, str]:
"""Generate a strong password using the EFF wordlist."""
word_space = len(eff_wordlist)
sys_rand = random.SystemRandom()
sys_rand = random.SystemRandom()
pwd_bit_strength = 0.0
password_words = [] # type: List[str]
password_words = [] # type: List[str]
while pwd_bit_strength < PASSWORD_MIN_BIT_STRENGTH:
password_words.append(sys_rand.choice(eff_wordlist))
pwd_bit_strength = math.log2(word_space ** len(password_words))
password = ' '.join(password_words)
password = " ".join(password_words)
return int(pwd_bit_strength), password
@ -164,29 +175,35 @@ class MasterKey(object):
slow even with GPUs/ASICs/FPGAs, as long as the password is
sufficiently strong.
"""
password = MasterKey.new_password()
salt = csprng(ARGON2_SALT_LENGTH)
password = MasterKey.new_password()
salt = csprng(ARGON2_SALT_LENGTH)
time_cost = ARGON2_MIN_TIME_COST
# Determine the amount of memory used from the amount of free RAM in the system.
memory_cost = self.get_available_memory()
if self.local_test:
memory_cost //= 2
# Determine the amount of threads to use
parallelism = multiprocessing.cpu_count()
if self.local_test:
parallelism = max(ARGON2_MIN_PARALLELISM, parallelism // 2)
phase("Deriving master key", head=2)
# Initial key derivation
master_key, kd_time = self.timed_key_derivation(password, salt, time_cost, memory_cost, parallelism)
phase("Deriving master key", head=2, offset=0)
master_key, kd_time = self.timed_key_derivation(
password, salt, time_cost, memory_cost, parallelism
)
phase("", done=True)
print()
# If derivation was too fast, increase time_cost
while kd_time < MIN_KEY_DERIVATION_TIME:
print_on_previous_line()
phase(f"Trying time cost {time_cost+1}")
time_cost += 1
master_key, kd_time = self.timed_key_derivation(password, salt, time_cost, memory_cost, parallelism)
master_key, kd_time = self.timed_key_derivation(
password, salt, time_cost, memory_cost, parallelism
)
phase(f"{kd_time:.1f}s", done=True)
# At this point time_cost may have value of 1 or it may have increased to e.g. 3, which might make it take
# longer than MAX_KEY_DERIVATION_TIME. If that's the case, it makes no sense to lower it back to 2 because even
@ -201,10 +218,18 @@ class MasterKey(object):
lower_bound = ARGON2_MIN_MEMORY_COST
upper_bound = memory_cost
while kd_time < MIN_KEY_DERIVATION_TIME or kd_time > MAX_KEY_DERIVATION_TIME:
while (
kd_time < MIN_KEY_DERIVATION_TIME or kd_time > MAX_KEY_DERIVATION_TIME
):
middle = (lower_bound + upper_bound) // 2
master_key, kd_time = self.timed_key_derivation(password, salt, time_cost, middle, parallelism)
middle = (lower_bound + upper_bound) // 2
print_on_previous_line()
phase(f"Trying memory cost {middle} KiB")
master_key, kd_time = self.timed_key_derivation(
password, salt, time_cost, middle, parallelism
)
phase(f"{kd_time:.1f}s", done=True)
# The search might fail e.g. if external CPU load causes delay in key derivation, which causes the
# search to continue into wrong branch. In such a situation the search is restarted. The binary search
@ -214,7 +239,7 @@ class MasterKey(object):
# and user experience (negatively).
if middle == lower_bound or middle == upper_bound:
lower_bound = ARGON2_MIN_MEMORY_COST
upper_bound = memory_cost
upper_bound = self.get_available_memory()
continue
if kd_time < MIN_KEY_DERIVATION_TIME:
@ -226,11 +251,13 @@ class MasterKey(object):
memory_cost = middle if middle is not None else memory_cost
# Store values to database
database_data = (salt
+ blake2b(master_key)
+ int_to_bytes(time_cost)
+ int_to_bytes(memory_cost)
+ int_to_bytes(parallelism))
database_data = (
salt
+ blake2b(master_key)
+ int_to_bytes(time_cost)
+ int_to_bytes(memory_cost)
+ int_to_bytes(parallelism)
)
if replace:
self.database.store_unencrypted_database(database_data)
@ -240,7 +267,10 @@ class MasterKey(object):
# before all new databases have been successfully written. We therefore just cache
# the database data.
self.database_data = database_data
phase(DONE)
print_on_previous_line(2)
phase("Deriving master key")
phase(DONE, delay=1)
return master_key
@ -264,11 +294,17 @@ class MasterKey(object):
if len(database_data) != MASTERKEY_DB_SIZE:
raise CriticalError(f"Invalid {self.file_name} database size.")
salt, key_hash, time_bytes, memory_bytes, parallelism_bytes \
= separate_headers(database_data, [ARGON2_SALT_LENGTH, BLAKE2_DIGEST_LENGTH,
ENCODED_INTEGER_LENGTH, ENCODED_INTEGER_LENGTH])
salt, key_hash, time_bytes, memory_bytes, parallelism_bytes = separate_headers(
database_data,
[
ARGON2_SALT_LENGTH,
BLAKE2_DIGEST_LENGTH,
ENCODED_INTEGER_LENGTH,
ENCODED_INTEGER_LENGTH,
],
)
time_cost = bytes_to_int(time_bytes)
time_cost = bytes_to_int(time_bytes)
memory_cost = bytes_to_int(memory_bytes)
parallelism = bytes_to_int(parallelism_bytes)
@ -281,9 +317,9 @@ class MasterKey(object):
phase("Password correct", done=True, delay=1)
clear_screen()
return purp_key
else:
phase("Invalid password", done=True, delay=1)
print_on_previous_line(reps=5)
phase("Invalid password", done=True, delay=1)
print_on_previous_line(reps=5)
@classmethod
def new_password(cls, purpose: str = "master password") -> str:
@ -293,11 +329,21 @@ class MasterKey(object):
if password_1 == GENERATE:
pwd_bit_strength, password_1 = MasterKey.generate_master_password()
m_print([f"Generated a {pwd_bit_strength}-bit password:",
'', password_1, '',
"Write down this password and dispose of the copy once you remember it.",
"Press <Enter> to continue."], manual_proceed=True, box=True, head=1, tail=1)
os.system(RESET)
m_print(
[
f"Generated a {pwd_bit_strength}-bit password:",
"",
password_1,
"",
"Write down this password and dispose of the copy once you remember it.",
"Press <Enter> to continue.",
],
manual_proceed=True,
box=True,
head=1,
tail=1,
)
reset_terminal()
password_2 = password_1
else:
@ -305,10 +351,10 @@ class MasterKey(object):
if password_1 == password_2:
return password_1
else:
m_print("Error: Passwords did not match. Try again.", head=1, tail=1)
print_on_previous_line(delay=1, reps=7)
return cls.new_password(purpose)
m_print("Error: Passwords did not match. Try again.", head=1, tail=1)
print_on_previous_line(delay=1, reps=7)
return cls.new_password(purpose)
@classmethod
def get_password(cls, purpose: str = "master password") -> str:
@ -320,6 +366,8 @@ class MasterKey(object):
try:
authenticated = self.load_master_key() == self.master_key
except (EOFError, KeyboardInterrupt):
raise FunctionReturn(f"Authentication aborted.", tail_clear=True, head=2, delay=1)
raise SoftError(
f"Authentication aborted.", tail_clear=True, head=2, delay=1
)
return authenticated

View File

@ -24,13 +24,19 @@ import typing
import nacl.signing
from src.common.crypto import csprng
from src.common.database import TFCDatabase
from src.common.encoding import pub_key_to_onion_address, pub_key_to_short_address
from src.common.crypto import csprng
from src.common.database import TFCDatabase
from src.common.encoding import pub_key_to_onion_address, pub_key_to_short_address
from src.common.exceptions import CriticalError
from src.common.misc import ensure_dir
from src.common.output import phase
from src.common.statics import CONFIRM_CODE_LENGTH, DIR_USER_DATA, DONE, ONION_SERVICE_PRIVATE_KEY_LENGTH, TX
from src.common.misc import ensure_dir
from src.common.output import phase
from src.common.statics import (
CONFIRM_CODE_LENGTH,
DIR_USER_DATA,
DONE,
ONION_SERVICE_PRIVATE_KEY_LENGTH,
TX,
)
if typing.TYPE_CHECKING:
from src.common.db_masterkey import MasterKey
@ -53,13 +59,13 @@ class OnionService(object):
anyway.
"""
def __init__(self, master_key: 'MasterKey') -> None:
def __init__(self, master_key: "MasterKey") -> None:
"""Create a new OnionService object."""
self.master_key = master_key
self.file_name = f'{DIR_USER_DATA}{TX}_onion_db'
self.database = TFCDatabase(self.file_name, self.master_key)
self.master_key = master_key
self.file_name = f"{DIR_USER_DATA}{TX}_onion_db"
self.database = TFCDatabase(self.file_name, self.master_key)
self.is_delivered = False
self.conf_code = csprng(CONFIRM_CODE_LENGTH)
self.conf_code = csprng(CONFIRM_CODE_LENGTH)
ensure_dir(DIR_USER_DATA)
if os.path.isfile(self.file_name):
@ -68,7 +74,9 @@ class OnionService(object):
self.onion_private_key = self.new_onion_service_private_key()
self.store_onion_service_private_key()
self.public_key = bytes(nacl.signing.SigningKey(seed=self.onion_private_key).verify_key)
self.public_key = bytes(
nacl.signing.SigningKey(seed=self.onion_private_key).verify_key
)
self.user_onion_address = pub_key_to_onion_address(self.public_key)
self.user_short_address = pub_key_to_short_address(self.public_key)

View File

@ -25,20 +25,28 @@ import typing
from typing import Union
from src.common.database import TFCDatabase
from src.common.encoding import bool_to_bytes, double_to_bytes, int_to_bytes
from src.common.encoding import bytes_to_bool, bytes_to_double, bytes_to_int
from src.common.exceptions import CriticalError, FunctionReturn
from src.common.input import yes
from src.common.misc import ensure_dir, get_terminal_width, round_up
from src.common.output import clear_screen, m_print
from src.common.statics import (DIR_USER_DATA, ENCODED_BOOLEAN_LENGTH, ENCODED_FLOAT_LENGTH, ENCODED_INTEGER_LENGTH,
MAX_INT, SETTINGS_INDENT, TRAFFIC_MASKING_MIN_RANDOM_DELAY,
TRAFFIC_MASKING_MIN_STATIC_DELAY, TX)
from src.common.database import TFCDatabase
from src.common.encoding import bool_to_bytes, double_to_bytes, int_to_bytes
from src.common.encoding import bytes_to_bool, bytes_to_double, bytes_to_int
from src.common.exceptions import CriticalError, SoftError
from src.common.input import yes
from src.common.misc import ensure_dir, get_terminal_width, round_up
from src.common.output import clear_screen, m_print
from src.common.statics import (
DIR_USER_DATA,
ENCODED_BOOLEAN_LENGTH,
ENCODED_FLOAT_LENGTH,
ENCODED_INTEGER_LENGTH,
MAX_INT,
SETTINGS_INDENT,
TRAFFIC_MASKING_MIN_RANDOM_DELAY,
TRAFFIC_MASKING_MIN_STATIC_DELAY,
TX,
)
if typing.TYPE_CHECKING:
from src.common.db_contacts import ContactList
from src.common.db_groups import GroupList
from src.common.db_contacts import ContactList
from src.common.db_groups import GroupList
from src.common.db_masterkey import MasterKey
@ -48,11 +56,12 @@ class Settings(object):
related to serial interface) under an encrypted database.
"""
def __init__(self,
master_key: 'MasterKey', # MasterKey object
operation: str, # Operation mode of the program (Tx or Rx)
local_test: bool, # Local testing setting from command-line argument
) -> None:
def __init__(
self,
master_key: "MasterKey", # MasterKey object
operation: str, # Operation mode of the program (Tx or Rx)
local_test: bool, # Local testing setting from command-line argument
) -> None:
"""Create a new Settings object.
The settings below are defaults, and are only to be altered from
@ -61,41 +70,41 @@ class Settings(object):
are loaded when the program starts.
"""
# Common settings
self.disable_gui_dialog = False
self.max_number_of_group_members = 50
self.max_number_of_groups = 50
self.max_number_of_contacts = 50
self.log_messages_by_default = False
self.accept_files_by_default = False
self.disable_gui_dialog = False
self.max_number_of_group_members = 50
self.max_number_of_groups = 50
self.max_number_of_contacts = 50
self.log_messages_by_default = False
self.accept_files_by_default = False
self.show_notifications_by_default = True
self.log_file_masking = False
self.ask_password_for_log_access = True
self.log_file_masking = False
self.ask_password_for_log_access = True
# Transmitter settings
self.nc_bypass_messages = False
self.confirm_sent_files = True
self.double_space_exits = False
self.traffic_masking = False
self.tm_static_delay = 2.0
self.tm_random_delay = 2.0
self.traffic_masking = False
self.tm_static_delay = 2.0
self.tm_random_delay = 2.0
# Relay Settings
self.allow_contact_requests = True
# Receiver settings
self.new_message_notify_preview = False
self.new_message_notify_preview = False
self.new_message_notify_duration = 1.0
self.max_decompress_size = 100_000_000
self.max_decompress_size = 100_000_000
self.master_key = master_key
self.master_key = master_key
self.software_operation = operation
self.local_testing_mode = local_test
self.file_name = f'{DIR_USER_DATA}{operation}_settings'
self.database = TFCDatabase(self.file_name, master_key)
self.file_name = f"{DIR_USER_DATA}{operation}_settings"
self.database = TFCDatabase(self.file_name, master_key)
self.all_keys = list(vars(self).keys())
self.key_list = self.all_keys[:self.all_keys.index('master_key')]
self.key_list = self.all_keys[: self.all_keys.index("master_key")]
self.defaults = {k: self.__dict__[k] for k in self.key_list}
ensure_dir(DIR_USER_DATA)
@ -123,7 +132,7 @@ class Settings(object):
else:
raise CriticalError("Invalid attribute type in settings.")
pt_bytes = b''.join(bytes_lst)
pt_bytes = b"".join(bytes_lst)
self.database.store_database(pt_bytes, replace)
def load_settings(self) -> None:
@ -136,15 +145,15 @@ class Settings(object):
attribute = self.__getattribute__(key)
if isinstance(attribute, bool):
value = bytes_to_bool(pt_bytes[0]) # type: Union[bool, int, float]
value = bytes_to_bool(pt_bytes[0]) # type: Union[bool, int, float]
pt_bytes = pt_bytes[ENCODED_BOOLEAN_LENGTH:]
elif isinstance(attribute, int):
value = bytes_to_int(pt_bytes[:ENCODED_INTEGER_LENGTH])
value = bytes_to_int(pt_bytes[:ENCODED_INTEGER_LENGTH])
pt_bytes = pt_bytes[ENCODED_INTEGER_LENGTH:]
elif isinstance(attribute, float):
value = bytes_to_double(pt_bytes[:ENCODED_FLOAT_LENGTH])
value = bytes_to_double(pt_bytes[:ENCODED_FLOAT_LENGTH])
pt_bytes = pt_bytes[ENCODED_FLOAT_LENGTH:]
else:
@ -152,18 +161,21 @@ class Settings(object):
setattr(self, key, value)
def change_setting(self,
key: str, # Name of the setting
value_str: str, # Value of the setting
contact_list: 'ContactList',
group_list: 'GroupList'
) -> None:
def change_setting(
self,
key: str, # Name of the setting
value_str: str, # Value of the setting
contact_list: "ContactList",
group_list: "GroupList",
) -> None:
"""Parse, update and store new setting value."""
attribute = self.__getattribute__(key)
try:
if isinstance(attribute, bool):
value = dict(true=True, false=False)[value_str.lower()] # type: Union[bool, int, float]
value = dict(true=True, false=False)[
value_str.lower()
] # type: Union[bool, int, float]
elif isinstance(attribute, int):
value = int(value_str)
@ -179,7 +191,9 @@ class Settings(object):
raise CriticalError("Invalid attribute type in settings.")
except (KeyError, ValueError):
raise FunctionReturn(f"Error: Invalid setting value '{value_str}'.", head_clear=True)
raise SoftError(
f"Error: Invalid setting value '{value_str}'.", head_clear=True
)
self.validate_key_value_pair(key, value, contact_list, group_list)
@ -187,50 +201,120 @@ class Settings(object):
self.store_settings()
@staticmethod
def validate_key_value_pair(key: str, # Name of the setting
value: Union[int, float, bool], # Value of the setting
contact_list: 'ContactList',
group_list: 'GroupList'
) -> None:
def validate_key_value_pair(
key: str, # Name of the setting
value: Union[int, float, bool], # Value of the setting
contact_list: "ContactList", # ContactList object
group_list: "GroupList", # GroupList object
) -> None:
"""Evaluate values for settings that have further restrictions."""
if key in ['max_number_of_group_members', 'max_number_of_groups', 'max_number_of_contacts']:
if value % 10 != 0 or value == 0:
raise FunctionReturn("Error: Database padding settings must be divisible by 10.", head_clear=True)
Settings.validate_database_limit(key, value)
if key == 'max_number_of_group_members':
Settings.validate_max_number_of_group_members(key, value, group_list)
Settings.validate_max_number_of_groups(key, value, group_list)
Settings.validate_max_number_of_contacts(key, value, contact_list)
Settings.validate_new_message_notify_duration(key, value)
Settings.validate_traffic_maskig_delay(key, value, contact_list)
@staticmethod
def validate_database_limit(key: str, value: Union[int, float, bool]) -> None:
"""Validate setting values for database entry limits."""
if key in [
"max_number_of_group_members",
"max_number_of_groups",
"max_number_of_contacts",
]:
if value % 10 != 0 or value == 0:
raise SoftError(
"Error: Database padding settings must be divisible by 10.",
head_clear=True,
)
@staticmethod
def validate_max_number_of_group_members(
key: str, value: Union[int, float, bool], group_list: "GroupList"
) -> None:
"""Validate setting value for maximum number of group members."""
if key == "max_number_of_group_members":
min_size = round_up(group_list.largest_group())
if value < min_size:
raise FunctionReturn(
f"Error: Can't set the max number of members lower than {min_size}.", head_clear=True)
raise SoftError(
f"Error: Can't set the max number of members lower than {min_size}.",
head_clear=True,
)
if key == 'max_number_of_groups':
@staticmethod
def validate_max_number_of_groups(
key: str, value: Union[int, float, bool], group_list: "GroupList"
) -> None:
"""Validate setting value for maximum number of groups."""
if key == "max_number_of_groups":
min_size = round_up(len(group_list))
if value < min_size:
raise FunctionReturn(
f"Error: Can't set the max number of groups lower than {min_size}.", head_clear=True)
raise SoftError(
f"Error: Can't set the max number of groups lower than {min_size}.",
head_clear=True,
)
if key == 'max_number_of_contacts':
@staticmethod
def validate_max_number_of_contacts(
key: str, value: Union[int, float, bool], contact_list: "ContactList"
) -> None:
"""Validate setting value for maximum number of contacts."""
if key == "max_number_of_contacts":
min_size = round_up(len(contact_list))
if value < min_size:
raise FunctionReturn(
f"Error: Can't set the max number of contacts lower than {min_size}.", head_clear=True)
raise SoftError(
f"Error: Can't set the max number of contacts lower than {min_size}.",
head_clear=True,
)
if key == 'new_message_notify_duration' and value < 0.05:
raise FunctionReturn("Error: Too small value for message notify duration.", head_clear=True)
@staticmethod
def validate_new_message_notify_duration(
key: str, value: Union[int, float, bool]
) -> None:
"""Validate setting value for duration of new message notification."""
if key == "new_message_notify_duration" and value < 0.05:
raise SoftError(
"Error: Too small value for message notify duration.", head_clear=True
)
if key in ['tm_static_delay', 'tm_random_delay']:
@staticmethod
def validate_traffic_maskig_delay(
key: str, value: Union[int, float, bool], contact_list: "ContactList"
) -> None:
"""Validate setting value for traffic masking delays."""
if key in ["tm_static_delay", "tm_random_delay"]:
for key_, name, min_setting in [('tm_static_delay', 'static', TRAFFIC_MASKING_MIN_STATIC_DELAY),
('tm_random_delay', 'random', TRAFFIC_MASKING_MIN_RANDOM_DELAY)]:
for key_, name, min_setting in [
("tm_static_delay", "static", TRAFFIC_MASKING_MIN_STATIC_DELAY),
("tm_random_delay", "random", TRAFFIC_MASKING_MIN_RANDOM_DELAY),
]:
if key == key_ and value < min_setting:
raise FunctionReturn(f"Error: Can't set {name} delay lower than {min_setting}.", head_clear=True)
raise SoftError(
f"Error: Can't set {name} delay lower than {min_setting}.",
head_clear=True,
)
if contact_list.settings.software_operation == TX:
m_print(["WARNING!", "Changing traffic masking delay can make your endpoint and traffic look unique!"],
bold=True, head=1, tail=1)
m_print(
[
"WARNING!",
"Changing traffic masking delay can make your endpoint and traffic look unique!",
],
bold=True,
head=1,
tail=1,
)
if not yes("Proceed anyway?"):
raise FunctionReturn("Aborted traffic masking setting change.", head_clear=True)
raise SoftError(
"Aborted traffic masking setting change.", head_clear=True
)
m_print("Traffic masking setting will change on restart.", head=1, tail=1)
@ -241,43 +325,41 @@ class Settings(object):
"""
desc_d = {
# Common settings
"disable_gui_dialog": "True replaces GUI dialogs with CLI prompts",
"max_number_of_group_members": "Maximum number of members in a group",
"max_number_of_groups": "Maximum number of groups",
"max_number_of_contacts": "Maximum number of contacts",
"log_messages_by_default": "Default logging setting for new contacts/groups",
"accept_files_by_default": "Default file reception setting for new contacts",
"disable_gui_dialog": "True replaces GUI dialogs with CLI prompts",
"max_number_of_group_members": "Maximum number of members in a group",
"max_number_of_groups": "Maximum number of groups",
"max_number_of_contacts": "Maximum number of contacts",
"log_messages_by_default": "Default logging setting for new contacts/groups",
"accept_files_by_default": "Default file reception setting for new contacts",
"show_notifications_by_default": "Default message notification setting for new contacts/groups",
"log_file_masking": "True hides real size of log file during traffic masking",
"ask_password_for_log_access": "False disables password prompt when viewing/exporting logs",
"log_file_masking": "True hides real size of log file during traffic masking",
"ask_password_for_log_access": "False disables password prompt when viewing/exporting logs",
# Transmitter settings
"nc_bypass_messages": "False removes Networked Computer bypass interrupt messages",
"confirm_sent_files": "False sends files without asking for confirmation",
"double_space_exits": "True exits, False clears screen with double space command",
"traffic_masking": "True enables traffic masking to hide metadata",
"tm_static_delay": "The static delay between traffic masking packets",
"tm_random_delay": "Max random delay for traffic masking timing obfuscation",
"nc_bypass_messages": "False removes Networked Computer bypass interrupt messages",
"confirm_sent_files": "False sends files without asking for confirmation",
"double_space_exits": "True exits, False clears screen with double space command",
"traffic_masking": "True enables traffic masking to hide metadata",
"tm_static_delay": "The static delay between traffic masking packets",
"tm_random_delay": "Max random delay for traffic masking timing obfuscation",
# Relay settings
"allow_contact_requests": "When False, does not show TFC contact requests",
"allow_contact_requests": "When False, does not show TFC contact requests",
# Receiver settings
"new_message_notify_preview": "When True, shows a preview of the received message",
"new_message_notify_duration": "Number of seconds new message notification appears",
"max_decompress_size": "Max size Receiver accepts when decompressing file"}
"new_message_notify_preview": "When True, shows a preview of the received message",
"new_message_notify_duration": "Number of seconds new message notification appears",
"max_decompress_size": "Max size Receiver accepts when decompressing file",
}
# Columns
c1 = ['Setting name']
c2 = ['Current value']
c3 = ['Default value']
c4 = ['Description']
c1 = ["Setting name"]
c2 = ["Current value"]
c3 = ["Default value"]
c4 = ["Description"]
terminal_width = get_terminal_width()
terminal_width = get_terminal_width()
description_indent = 64
if terminal_width < description_indent + 1:
raise FunctionReturn("Error: Screen width is too small.", head_clear=True)
raise SoftError("Error: Screen width is too small.", head_clear=True)
# Populate columns with setting data
for key in self.defaults:
@ -286,27 +368,34 @@ class Settings(object):
c3.append(str(self.defaults[key]))
description = desc_d[key]
wrapper = textwrap.TextWrapper(width=max(1, (terminal_width - description_indent)))
desc_lines = wrapper.fill(description).split('\n')
wrapper = textwrap.TextWrapper(
width=max(1, (terminal_width - description_indent))
)
desc_lines = wrapper.fill(description).split("\n")
desc_string = desc_lines[0]
for line in desc_lines[1:]:
desc_string += '\n' + description_indent * ' ' + line
desc_string += "\n" + description_indent * " " + line
if len(desc_lines) > 1:
desc_string += '\n'
desc_string += "\n"
c4.append(desc_string)
# Calculate column widths
c1w, c2w, c3w = [max(len(v) for v in column) + SETTINGS_INDENT for column in [c1, c2, c3]]
c1w, c2w, c3w = [
max(len(v) for v in column) + SETTINGS_INDENT for column in [c1, c2, c3]
]
# Align columns by adding whitespace between fields of each line
lines = [f'{f1:{c1w}} {f2:{c2w}} {f3:{c3w}} {f4}' for f1, f2, f3, f4 in zip(c1, c2, c3, c4)]
lines = [
f"{f1:{c1w}} {f2:{c2w}} {f3:{c3w}} {f4}"
for f1, f2, f3, f4 in zip(c1, c2, c3, c4)
]
# Add a terminal-wide line between the column names and the data
lines.insert(1, get_terminal_width() * '')
lines.insert(1, get_terminal_width() * "")
# Print the settings
clear_screen()
print('\n' + '\n'.join(lines))
print("\n" + "\n".join(lines))

View File

@ -24,18 +24,25 @@ import hashlib
import struct
from datetime import datetime
from typing import List, Union
from typing import List, Union
from src.common.statics import (B58_ALPHABET, B58_CHECKSUM_LENGTH, MAINNET_HEADER, ONION_ADDRESS_CHECKSUM_ID,
ONION_ADDRESS_CHECKSUM_LENGTH, ONION_SERVICE_VERSION, ONION_SERVICE_VERSION_LENGTH,
PADDING_LENGTH, TESTNET_HEADER, TRUNC_ADDRESS_LENGTH)
from src.common.statics import (
B58_ALPHABET,
B58_CHECKSUM_LENGTH,
MAINNET_HEADER,
ONION_ADDRESS_CHECKSUM_ID,
ONION_ADDRESS_CHECKSUM_LENGTH,
ONION_SERVICE_VERSION,
ONION_SERVICE_VERSION_LENGTH,
PADDING_LENGTH,
TESTNET_HEADER,
TRUNC_ADDRESS_LENGTH,
)
def sha256d(message: bytes) -> bytes:
"""Chain SHA256 twice for Bitcoin WIF format."""
return hashlib.sha256(
hashlib.sha256(message).digest()
).digest()
return hashlib.sha256(hashlib.sha256(message).digest()).digest()
def b58encode(byte_string: bytes, public_key: bool = False) -> str:
@ -45,20 +52,20 @@ def b58encode(byte_string: bytes, public_key: bool = False) -> str:
(WIF) for mainnet and testnet addresses.
https://en.bitcoin.it/wiki/Wallet_import_format
"""
net_id = TESTNET_HEADER if public_key else MAINNET_HEADER
byte_string = net_id + byte_string
net_id = TESTNET_HEADER if public_key else MAINNET_HEADER
byte_string = net_id + byte_string
byte_string += sha256d(byte_string)[:B58_CHECKSUM_LENGTH]
original_len = len(byte_string)
byte_string = byte_string.lstrip(b'\x00')
new_len = len(byte_string)
byte_string = byte_string.lstrip(b"\x00")
new_len = len(byte_string)
p, acc = 1, 0
for byte in bytearray(byte_string[::-1]):
acc += p * byte
p *= 256
p *= 256
encoded = ''
encoded = ""
while acc > 0:
acc, mod = divmod(acc, 58)
encoded += B58_ALPHABET[mod]
@ -68,30 +75,35 @@ def b58encode(byte_string: bytes, public_key: bool = False) -> str:
def b58decode(string: str, public_key: bool = False) -> bytes:
"""Decode a Base58-encoded string and verify the checksum."""
net_id = TESTNET_HEADER if public_key else MAINNET_HEADER
net_id = TESTNET_HEADER if public_key else MAINNET_HEADER
orig_len = len(string)
string = string.lstrip(B58_ALPHABET[0])
new_len = len(string)
string = string.lstrip(B58_ALPHABET[0])
new_len = len(string)
p, acc = 1, 0
for c in string[::-1]:
acc += p * B58_ALPHABET.index(c)
p *= 58
p *= 58
decoded = []
while acc > 0:
acc, mod = divmod(acc, 256)
decoded.append(mod)
decoded_ = (bytes(decoded) + (orig_len - new_len) * b'\x00')[::-1] # type: Union[bytes, List[int]]
decoded_ = (bytes(decoded) + (orig_len - new_len) * b"\x00")[
::-1
] # type: Union[bytes, List[int]]
if sha256d(bytes(decoded_[:-B58_CHECKSUM_LENGTH]))[:B58_CHECKSUM_LENGTH] != decoded_[-B58_CHECKSUM_LENGTH:]:
if (
sha256d(bytes(decoded_[:-B58_CHECKSUM_LENGTH]))[:B58_CHECKSUM_LENGTH]
!= decoded_[-B58_CHECKSUM_LENGTH:]
):
raise ValueError
if decoded_[:len(net_id)] != net_id:
if decoded_[: len(net_id)] != net_id:
raise ValueError
return bytes(decoded_[len(net_id):-B58_CHECKSUM_LENGTH])
return bytes(decoded_[len(net_id) : -B58_CHECKSUM_LENGTH])
def b85encode(data: bytes) -> str:
@ -133,6 +145,7 @@ def b10encode(fingerprint: bytes) -> str:
# Database unicode string padding
def unicode_padding(string: str) -> str:
"""Pad Unicode string to 255 chars.
@ -144,7 +157,7 @@ def unicode_padding(string: str) -> str:
if len(string) >= PADDING_LENGTH:
raise CriticalError("Invalid input size.")
length = PADDING_LENGTH - (len(string) % PADDING_LENGTH)
length = PADDING_LENGTH - (len(string) % PADDING_LENGTH)
string += length * chr(length)
if len(string) != PADDING_LENGTH: # pragma: no cover
@ -155,18 +168,21 @@ def unicode_padding(string: str) -> str:
def rm_padding_str(string: str) -> str:
"""Remove padding from plaintext."""
return string[:-ord(string[-1:])]
return string[: -ord(string[-1:])]
# Database constant length encoding
def onion_address_to_pub_key(account: str) -> bytes:
"""Encode TFC account to a public key byte string.
The public key is the most compact possible representation of a TFC
account, so it is useful when storing the address into databases.
"""
return base64.b32decode(account.upper())[:-(ONION_ADDRESS_CHECKSUM_LENGTH + ONION_SERVICE_VERSION_LENGTH)]
return base64.b32decode(account.upper())[
: -(ONION_ADDRESS_CHECKSUM_LENGTH + ONION_SERVICE_VERSION_LENGTH)
]
def bool_to_bytes(boolean: bool) -> bytes:
@ -176,12 +192,12 @@ def bool_to_bytes(boolean: bool) -> bytes:
def int_to_bytes(integer: int) -> bytes:
"""Convert integer to an 8-byte byte string."""
return struct.pack('!Q', integer)
return struct.pack("!Q", integer)
def double_to_bytes(double_: float) -> bytes:
"""Convert double to an 8-byte byte string."""
return struct.pack('d', double_)
return struct.pack("d", double_)
def str_to_bytes(string: str) -> bytes:
@ -189,11 +205,12 @@ def str_to_bytes(string: str) -> bytes:
Length of padded string is 255 * 4 + 4 (BOM) = 1024 bytes.
"""
return unicode_padding(string).encode('utf-32')
return unicode_padding(string).encode("utf-32")
# Decoding
def pub_key_to_onion_address(public_key: bytes) -> str:
"""Decode public key byte string to TFC account.
@ -201,12 +218,13 @@ def pub_key_to_onion_address(public_key: bytes) -> str:
public key of v3 Onion Service into service ID:
https://gitweb.torproject.org/torspec.git/tree/rend-spec-v3.txt#n2019
"""
checksum = hashlib.sha3_256(ONION_ADDRESS_CHECKSUM_ID
+ public_key
+ ONION_SERVICE_VERSION
).digest()[:ONION_ADDRESS_CHECKSUM_LENGTH]
checksum = hashlib.sha3_256(
ONION_ADDRESS_CHECKSUM_ID + public_key + ONION_SERVICE_VERSION
).digest()[:ONION_ADDRESS_CHECKSUM_LENGTH]
return base64.b32encode(public_key + checksum + ONION_SERVICE_VERSION).lower().decode()
return (
base64.b32encode(public_key + checksum + ONION_SERVICE_VERSION).lower().decode()
)
def pub_key_to_short_address(public_key: bytes) -> str:
@ -223,13 +241,13 @@ def bytes_to_bool(byte_string: Union[bytes, int]) -> bool:
def bytes_to_int(byte_string: bytes) -> int:
"""Convert 8-byte byte string to an integer."""
int_format = struct.unpack('!Q', byte_string)[0] # type: int
int_format = struct.unpack("!Q", byte_string)[0] # type: int
return int_format
def bytes_to_double(byte_string: bytes) -> float:
"""Convert 8-byte byte string to double."""
float_format = struct.unpack('d', byte_string)[0] # type: float
float_format = struct.unpack("d", byte_string)[0] # type: float
return float_format
@ -238,9 +256,9 @@ def bytes_to_str(byte_string: bytes) -> str:
Decode byte string with UTF-32 and remove Unicode padding.
"""
return rm_padding_str(byte_string.decode('utf-32'))
return rm_padding_str(byte_string.decode("utf-32"))
def bytes_to_timestamp(byte_string: bytes) -> datetime:
"""Covert 4-byte byte string to datetime object."""
return datetime.fromtimestamp(struct.unpack('<L', byte_string)[0])
return datetime.fromtimestamp(struct.unpack("<L", byte_string)[0])

View File

@ -24,9 +24,9 @@ import sys
import typing
from datetime import datetime
from typing import Optional
from typing import Optional
from src.common.output import clear_screen, m_print
from src.common.output import clear_screen, m_print
from src.common.statics import TFC
if typing.TYPE_CHECKING:
@ -38,49 +38,62 @@ class CriticalError(Exception):
def __init__(self, error_message: str, exit_code: int = 1) -> None:
"""A severe exception that requires TFC to gracefully exit."""
graceful_exit(f"Critical error in function '{inspect.stack()[1][3]}':\n{error_message}",
clear=False, exit_code=exit_code)
graceful_exit(
f"Critical error in function '{inspect.stack()[1][3]}':\n{error_message}",
clear=False,
exit_code=exit_code,
)
class FunctionReturn(Exception):
"""Print return message and return to exception handler function."""
class SoftError(Exception):
"""A soft exception from which TFC can automatically recover from.
def __init__(self,
message: str,
window: Optional['RxWindow'] = None, # The window to include the message in
output: bool = True, # When False, doesn't print message when adding it to window
bold: bool = False, # When True, prints the message in bold
head_clear: bool = False, # When True, clears the screen before printing message
tail_clear: bool = False, # When True, clears the screen after message (needs delay)
delay: float = 0, # The delay before continuing
head: int = 1, # The number of new-lines to print before the message
tail: int = 1, # The number of new-lines to print after message
) -> None:
When a SoftError is raised, TFC prints a message
and returns to the exception handler function.
"""
def __init__(
self,
message: str,
window: Optional["RxWindow"] = None, # The window to include the message in
output: bool = True, # When False, doesn't print message when adding it to window
bold: bool = False, # When True, prints the message in bold
head_clear: bool = False, # When True, clears the screen before printing message
tail_clear: bool = False, # When True, clears the screen after message (needs delay)
delay: float = 0, # The delay before continuing
head: int = 1, # The number of new-lines to print before the message
tail: int = 1, # The number of new-lines to print after message
ts: Optional["datetime"] = None, # Datetime object
) -> None:
"""Print return message and return to exception handler function."""
self.message = message
if window is None:
if output:
m_print(self.message,
bold=bold,
head_clear=head_clear,
tail_clear=tail_clear,
delay=delay,
head=head,
tail=tail)
m_print(
self.message,
bold=bold,
head_clear=head_clear,
tail_clear=tail_clear,
delay=delay,
head=head,
tail=tail,
)
else:
window.add_new(datetime.now(), self.message, output=output)
ts = datetime.now() if ts is None else ts
window.add_new(ts, self.message, output=output)
def graceful_exit(message: str = '', # Exit message to print
clear: bool = True, # When False, does not clear screen before printing message
exit_code: int = 0 # Value returned to parent process
) -> None:
def graceful_exit(
message: str = "", # Exit message to print
clear: bool = True, # When False, does not clear screen before printing message
exit_code: int = 0, # Value returned to parent process
) -> None:
"""Display a message and exit TFC."""
if clear:
clear_screen()
if message:
print('\n' + message)
print("\n" + message)
print(f"\nExiting {TFC}.\n")
sys.exit(exit_code)

View File

@ -31,29 +31,55 @@ import time
import typing
from datetime import datetime
from typing import Dict, Optional, Tuple, Union
from typing import Any, Dict, Optional, Tuple, Union
from serial.serialutil import SerialException
from src.common.exceptions import CriticalError, FunctionReturn, graceful_exit
from src.common.input import yes
from src.common.misc import calculate_race_condition_delay, ensure_dir, ignored, get_terminal_width
from src.common.misc import separate_trailer
from src.common.output import m_print, phase, print_on_previous_line
from src.common.exceptions import CriticalError, graceful_exit, SoftError
from src.common.input import yes
from src.common.misc import (
calculate_race_condition_delay,
ensure_dir,
ignored,
get_terminal_width,
)
from src.common.misc import separate_trailer
from src.common.output import m_print, phase, print_on_previous_line
from src.common.reed_solomon import ReedSolomonError, RSCodec
from src.common.statics import (BAUDS_PER_BYTE, DIR_USER_DATA, DONE, DST_DD_LISTEN_SOCKET, DST_LISTEN_SOCKET,
GATEWAY_QUEUE, LOCALHOST, LOCAL_TESTING_PACKET_DELAY, MAX_INT, NC,
PACKET_CHECKSUM_LENGTH, RECEIVER, RELAY, RP_LISTEN_SOCKET, RX,
SERIAL_RX_MIN_TIMEOUT, SETTINGS_INDENT, SRC_DD_LISTEN_SOCKET, TRANSMITTER, TX)
from src.common.statics import (
BAUDS_PER_BYTE,
DIR_USER_DATA,
DONE,
DST_DD_LISTEN_SOCKET,
DST_LISTEN_SOCKET,
GATEWAY_QUEUE,
LOCALHOST,
LOCAL_TESTING_PACKET_DELAY,
MAX_INT,
NC,
PACKET_CHECKSUM_LENGTH,
RECEIVER,
RELAY,
RP_LISTEN_SOCKET,
RX,
SERIAL_RX_MIN_TIMEOUT,
SETTINGS_INDENT,
SRC_DD_LISTEN_SOCKET,
TRANSMITTER,
TX,
)
if typing.TYPE_CHECKING:
from multiprocessing import Queue
JSONDict = Dict[str, Union[int, bool, str]]
def gateway_loop(queues: Dict[bytes, 'Queue[Tuple[datetime, bytes]]'],
gateway: 'Gateway',
unit_test: bool = False
) -> None:
def gateway_loop(
queues: Dict[bytes, "Queue[Tuple[datetime, bytes]]"],
gateway: "Gateway",
unit_test: bool = False,
) -> None:
"""Load data from serial interface or socket into a queue.
Also place the current timestamp to queue to be delivered to the
@ -77,13 +103,9 @@ class Gateway(object):
Source/Destination Computer with the Networked computer.
"""
def __init__(self,
operation: str,
local_test: bool,
dd_sockets: bool
) -> None:
def __init__(self, operation: str, local_test: bool, dd_sockets: bool) -> None:
"""Create a new Gateway object."""
self.settings = GatewaySettings(operation, local_test, dd_sockets)
self.settings = GatewaySettings(operation, local_test, dd_sockets)
self.tx_serial = None # type: Optional[serial.Serial]
self.rx_serial = None # type: Optional[serial.Serial]
self.rx_socket = None # type: Optional[multiprocessing.connection.Connection]
@ -134,11 +156,15 @@ class Gateway(object):
the time it takes to send one byte with given baud rate.
"""
try:
self.tx_serial = self.rx_serial = serial.Serial(self.search_serial_interface(),
self.settings.session_serial_baudrate,
timeout=0)
self.tx_serial = self.rx_serial = serial.Serial(
self.search_serial_interface(),
self.settings.session_serial_baudrate,
timeout=0,
)
except SerialException:
raise CriticalError("SerialException. Ensure $USER is in the dialout group by restarting this computer.")
raise CriticalError(
"SerialException. Ensure $USER is in the dialout group by restarting this computer."
)
def write(self, orig_packet: bytes) -> None:
"""Add error correction data and output data via socket/serial interface.
@ -165,47 +191,61 @@ class Gateway(object):
self.establish_serial()
self.write(orig_packet)
def read(self) -> bytes:
"""Read data via socket/serial interface.
def read_socket(self) -> bytes:
"""Read packet from socket interface."""
if self.rx_socket is None:
raise CriticalError("Socket interface has not been initialized.")
while True:
try:
packet = self.rx_socket.recv() # type: bytes
return packet
except KeyboardInterrupt:
pass
except EOFError:
raise CriticalError("Relay IPC client disconnected.", exit_code=0)
def read_serial(self) -> bytes:
"""Read packet from serial interface.
Read 0..N bytes from serial interface, where N is the buffer
size of the serial interface. Once `read_buffer` has data, and
the interface hasn't returned data long enough for the timer to
exceed the timeout value, return received data.
"""
if self.settings.local_testing_mode and self.rx_socket is not None:
while True:
try:
packet = self.rx_socket.recv() # type: bytes
return packet
except KeyboardInterrupt:
pass
except EOFError:
raise CriticalError("Relay IPC client disconnected.", exit_code=0)
else:
if self.rx_serial is None:
raise CriticalError("Serial interface has not been initialized.")
while True:
try:
start_time = 0.0
read_buffer = bytearray()
while True:
read = self.rx_serial.read_all()
if read:
start_time = time.monotonic()
read_buffer.extend(read)
else:
if read_buffer:
delta = time.monotonic() - start_time
if delta > self.settings.rx_receive_timeout:
return bytes(read_buffer)
else:
time.sleep(0.0001)
if self.rx_serial is None:
raise CriticalError("Serial interface has not been initialized.")
except (EOFError, KeyboardInterrupt):
pass
except (OSError, SerialException):
self.establish_serial()
while True:
try:
start_time = 0.0
read_buffer = bytearray()
while True:
read = self.rx_serial.read_all()
if read:
start_time = time.monotonic()
read_buffer.extend(read)
else:
if read_buffer:
delta = time.monotonic() - start_time
if delta > self.settings.rx_receive_timeout:
return bytes(read_buffer)
else:
time.sleep(0.0001)
except (EOFError, KeyboardInterrupt):
pass
except (OSError, SerialException):
self.establish_serial()
def read(self) -> bytes:
"""Read data via socket/serial interface."""
data = (
self.read_socket()
if self.settings.local_testing_mode
else self.read_serial()
)
return data
def add_error_correction(self, packet: bytes) -> bytes:
"""Add error correction to packet that will be output.
@ -223,7 +263,10 @@ class Gateway(object):
if self.settings.session_serial_error_correction:
packet = self.rs.encode(packet)
else:
packet = packet + hashlib.blake2b(packet, digest_size=PACKET_CHECKSUM_LENGTH).digest()
packet = (
packet
+ hashlib.blake2b(packet, digest_size=PACKET_CHECKSUM_LENGTH).digest()
)
return packet
def detect_errors(self, packet: bytes) -> bytes:
@ -233,11 +276,19 @@ class Gateway(object):
packet, _ = self.rs.decode(packet)
return bytes(packet)
except ReedSolomonError:
raise FunctionReturn("Error: Reed-Solomon failed to correct errors in the received packet.", bold=True)
raise SoftError(
"Error: Reed-Solomon failed to correct errors in the received packet.",
bold=True,
)
else:
packet, checksum = separate_trailer(packet, PACKET_CHECKSUM_LENGTH)
if hashlib.blake2b(packet, digest_size=PACKET_CHECKSUM_LENGTH).digest() != checksum:
raise FunctionReturn("Warning! Received packet had an invalid checksum.", bold=True)
if (
hashlib.blake2b(packet, digest_size=PACKET_CHECKSUM_LENGTH).digest()
!= checksum
):
raise SoftError(
"Warning! Received packet had an invalid checksum.", bold=True
)
return packet
def search_serial_interface(self) -> str:
@ -246,28 +297,34 @@ class Gateway(object):
search_announced = False
if not self.init_found:
phase("Searching for USB-to-serial interface", offset=len('Found'))
phase("Searching for USB-to-serial interface", offset=len("Found"))
while True:
for f in sorted(os.listdir('/dev/')):
if f.startswith('ttyUSB'):
for f in sorted(os.listdir("/dev/")):
if f.startswith("ttyUSB"):
if self.init_found:
time.sleep(1)
phase('Found', done=True)
phase("Found", done=True)
if self.init_found:
print_on_previous_line(reps=2)
self.init_found = True
return f'/dev/{f}'
else:
time.sleep(0.1)
if self.init_found and not search_announced:
phase("Serial adapter disconnected. Waiting for interface", head=1, offset=len('Found'))
search_announced = True
return f"/dev/{f}"
time.sleep(0.1)
if self.init_found and not search_announced:
phase(
"Serial adapter disconnected. Waiting for interface",
head=1,
offset=len("Found"),
)
search_announced = True
else:
if self.settings.built_in_serial_interface in sorted(os.listdir('/dev/')):
return f'/dev/{self.settings.built_in_serial_interface}'
raise CriticalError(f"Error: /dev/{self.settings.built_in_serial_interface} was not found.")
if self.settings.built_in_serial_interface in sorted(os.listdir("/dev/")):
return f"/dev/{self.settings.built_in_serial_interface}"
raise CriticalError(
f"Error: /dev/{self.settings.built_in_serial_interface} was not found."
)
# Local testing
@ -303,8 +360,12 @@ class Gateway(object):
under a threat model where endpoint security is of importance.
"""
try:
socket_number = RP_LISTEN_SOCKET if self.settings.software_operation == NC else DST_LISTEN_SOCKET
listener = multiprocessing.connection.Listener((LOCALHOST, socket_number))
socket_number = (
RP_LISTEN_SOCKET
if self.settings.software_operation == NC
else DST_LISTEN_SOCKET
)
listener = multiprocessing.connection.Listener((LOCALHOST, socket_number))
self.rx_socket = listener.accept()
except KeyboardInterrupt:
graceful_exit()
@ -317,12 +378,22 @@ class Gateway(object):
while True:
try:
if self.settings.software_operation == TX:
socket_number = SRC_DD_LISTEN_SOCKET if self.settings.data_diode_sockets else RP_LISTEN_SOCKET
socket_number = (
SRC_DD_LISTEN_SOCKET
if self.settings.data_diode_sockets
else RP_LISTEN_SOCKET
)
else:
socket_number = DST_DD_LISTEN_SOCKET if self.settings.data_diode_sockets else DST_LISTEN_SOCKET
socket_number = (
DST_DD_LISTEN_SOCKET
if self.settings.data_diode_sockets
else DST_LISTEN_SOCKET
)
try:
self.tx_socket = multiprocessing.connection.Client((LOCALHOST, socket_number))
self.tx_socket = multiprocessing.connection.Client(
(LOCALHOST, socket_number)
)
except ConnectionRefusedError:
time.sleep(0.1)
continue
@ -352,11 +423,7 @@ class GatewaySettings(object):
inconvenience of encrypting the setting values.
"""
def __init__(self,
operation: str,
local_test: bool,
dd_sockets: bool
) -> None:
def __init__(self, operation: str, local_test: bool, dd_sockets: bool) -> None:
"""Create a new Settings object.
The settings below are altered from within the program itself.
@ -364,20 +431,22 @@ class GatewaySettings(object):
file under $HOME/tfc/user_data from where, if needed, they can
be manually altered by the user.
"""
self.serial_baudrate = 19200
self.serial_error_correction = 5
self.use_serial_usb_adapter = True
self.built_in_serial_interface = 'ttyS0'
self.serial_baudrate = 19200
self.serial_error_correction = 5
self.use_serial_usb_adapter = True
self.built_in_serial_interface = "ttyS0"
self.software_operation = operation
self.local_testing_mode = local_test
self.data_diode_sockets = dd_sockets
self.all_keys = list(vars(self).keys())
self.key_list = self.all_keys[:self.all_keys.index('software_operation')]
self.key_list = self.all_keys[: self.all_keys.index("software_operation")]
self.defaults = {k: self.__dict__[k] for k in self.key_list}
self.file_name = f'{DIR_USER_DATA}{self.software_operation}_serial_settings.json'
self.file_name = (
f"{DIR_USER_DATA}{self.software_operation}_serial_settings.json"
)
ensure_dir(DIR_USER_DATA)
if os.path.isfile(self.file_name):
@ -386,14 +455,18 @@ class GatewaySettings(object):
self.setup()
self.store_settings()
self.session_serial_baudrate = self.serial_baudrate
self.session_serial_baudrate = self.serial_baudrate
self.session_serial_error_correction = self.serial_error_correction
self.session_usb_serial_adapter = self.use_serial_usb_adapter
self.session_usb_serial_adapter = self.use_serial_usb_adapter
self.tx_inter_packet_delay, self.rx_receive_timeout = self.calculate_serial_delays(self.session_serial_baudrate)
(
self.tx_inter_packet_delay,
self.rx_receive_timeout,
) = self.calculate_serial_delays(self.session_serial_baudrate)
self.race_condition_delay = calculate_race_condition_delay(self.session_serial_error_correction,
self.serial_baudrate)
self.race_condition_delay = calculate_race_condition_delay(
self.session_serial_error_correction, self.serial_baudrate
)
@classmethod
def calculate_serial_delays(cls, baud_rate: int) -> Tuple[float, float]:
@ -407,7 +480,7 @@ class GatewaySettings(object):
bytes_per_sec = baud_rate / BAUDS_PER_BYTE
byte_travel_t = 1 / bytes_per_sec
rx_receive_timeout = max(2 * byte_travel_t, SERIAL_RX_MIN_TIMEOUT)
rx_receive_timeout = max(2 * byte_travel_t, SERIAL_RX_MIN_TIMEOUT)
tx_inter_packet_delay = 2 * rx_receive_timeout
return tx_inter_packet_delay, rx_receive_timeout
@ -420,35 +493,48 @@ class GatewaySettings(object):
if not self.local_testing_mode:
name = {TX: TRANSMITTER, NC: RELAY, RX: RECEIVER}[self.software_operation]
self.use_serial_usb_adapter = yes(f"Use USB-to-serial/TTL adapter for {name} Computer?", head=1, tail=1)
self.use_serial_usb_adapter = yes(
f"Use USB-to-serial/TTL adapter for {name} Computer?", head=1, tail=1
)
if self.use_serial_usb_adapter:
for f in sorted(os.listdir('/dev/')):
if f.startswith('ttyUSB'):
for f in sorted(os.listdir("/dev/")):
if f.startswith("ttyUSB"):
return None
else:
m_print("Error: USB-to-serial/TTL adapter not found.")
self.setup()
m_print("Error: USB-to-serial/TTL adapter not found.")
self.setup()
else:
if self.built_in_serial_interface in sorted(os.listdir('/dev/')):
return None
else:
m_print(f"Error: Serial interface /dev/{self.built_in_serial_interface} not found.")
if self.built_in_serial_interface not in sorted(os.listdir("/dev/")):
m_print(
f"Error: Serial interface /dev/{self.built_in_serial_interface} not found."
)
self.setup()
def store_settings(self) -> None:
"""Store serial settings in JSON format."""
serialized = json.dumps(self, default=(lambda o: {k: self.__dict__[k] for k in self.key_list}), indent=4)
serialized = json.dumps(
self,
default=(lambda _: {k: self.__dict__[k] for k in self.key_list}),
indent=4,
)
with open(self.file_name, 'w+') as f:
with open(self.file_name, "w+") as f:
f.write(serialized)
f.flush()
os.fsync(f.fileno())
def invalid_setting(self, key: str, json_dict: Dict[str, Union[bool, int, str]]) -> None:
def invalid_setting(
self, key: str, json_dict: Dict[str, Union[bool, int, str]]
) -> None:
"""Notify about setting an invalid value to default value."""
m_print([f"Error: Invalid value '{json_dict[key]}' for setting '{key}' in '{self.file_name}'.",
f"The value has been set to default ({self.defaults[key]})."], head=1, tail=1)
m_print(
[
f"Error: Invalid value '{json_dict[key]}' for setting '{key}' in '{self.file_name}'.",
f"The value has been set to default ({self.defaults[key]}).",
],
head=1,
tail=1,
)
setattr(self, key, self.defaults[key])
def load_settings(self) -> None:
@ -459,52 +545,71 @@ class GatewaySettings(object):
except json.decoder.JSONDecodeError:
os.remove(self.file_name)
self.store_settings()
print(f"\nError: Invalid JSON format in '{self.file_name}'."
"\nSerial interface settings have been set to default values.\n")
print(
f"\nError: Invalid JSON format in '{self.file_name}'."
"\nSerial interface settings have been set to default values.\n"
)
return None
# Check for missing setting
for key in self.key_list:
if key not in json_dict:
m_print([f"Error: Missing setting '{key}' in '{self.file_name}'.",
f"The value has been set to default ({self.defaults[key]})."], head=1, tail=1)
setattr(self, key, self.defaults[key])
continue
# Closer inspection of each setting value
if key == 'serial_baudrate' and json_dict[key] not in serial.Serial().BAUDRATES:
self.invalid_setting(key, json_dict)
continue
elif key == 'serial_error_correction' and (not isinstance(json_dict[key], int) or json_dict[key] < 0):
self.invalid_setting(key, json_dict)
continue
elif key == 'use_serial_usb_adapter':
if not isinstance(json_dict[key], bool):
self.invalid_setting(key, json_dict)
continue
elif key == 'built_in_serial_interface':
if not isinstance(json_dict[key], str):
self.invalid_setting(key, json_dict)
continue
if not any(json_dict[key] == f for f in os.listdir('/sys/class/tty')):
self.invalid_setting(key, json_dict)
continue
setattr(self, key, json_dict[key])
self.check_missing_settings(json_dict)
# Store after loading to add missing, to replace invalid settings,
# and to remove settings that do not belong in the JSON file.
self.store_settings()
def check_missing_settings(self, json_dict: Any) -> None:
"""Check for missing JSON fields and invalid values."""
for key in self.key_list:
if key not in json_dict:
m_print(
[
f"Error: Missing setting '{key}' in '{self.file_name}'.",
f"The value has been set to default ({self.defaults[key]}).",
],
head=1,
tail=1,
)
setattr(self, key, self.defaults[key])
continue
# Closer inspection of each setting value
if (
key == "serial_baudrate"
and json_dict[key] not in serial.Serial().BAUDRATES
):
self.invalid_setting(key, json_dict)
continue
elif key == "serial_error_correction" and (
not isinstance(json_dict[key], int) or json_dict[key] < 0
):
self.invalid_setting(key, json_dict)
continue
elif key == "use_serial_usb_adapter":
if not isinstance(json_dict[key], bool):
self.invalid_setting(key, json_dict)
continue
elif key == "built_in_serial_interface":
if not isinstance(json_dict[key], str):
self.invalid_setting(key, json_dict)
continue
if not any(json_dict[key] == f for f in os.listdir("/sys/class/tty")):
self.invalid_setting(key, json_dict)
continue
setattr(self, key, json_dict[key])
def change_setting(self, key: str, value_str: str) -> None:
"""Parse, update and store new setting value."""
attribute = self.__getattribute__(key)
try:
if isinstance(attribute, bool):
value = dict(true=True, false=False)[value_str.lower()] # type: Union[bool, int]
value = dict(true=True, false=False)[
value_str.lower()
] # type: Union[bool, int]
elif isinstance(attribute, int):
value = int(value_str)
@ -515,7 +620,9 @@ class GatewaySettings(object):
raise CriticalError("Invalid attribute type in settings.")
except (KeyError, ValueError):
raise FunctionReturn(f"Error: Invalid setting value '{value_str}'.", delay=1, tail_clear=True)
raise SoftError(
f"Error: Invalid setting value '{value_str}'.", delay=1, tail_clear=True
)
self.validate_key_value_pair(key, value)
@ -528,14 +635,14 @@ class GatewaySettings(object):
Perform further evaluation on settings the values of which have
restrictions.
"""
if key == 'serial_baudrate':
if key == "serial_baudrate":
if value not in serial.Serial().BAUDRATES:
raise FunctionReturn("Error: The specified baud rate is not supported.")
raise SoftError("Error: The specified baud rate is not supported.")
m_print("Baud rate will change on restart.", head=1, tail=1)
if key == 'serial_error_correction':
if key == "serial_error_correction":
if value < 0:
raise FunctionReturn("Error: Invalid value for error correction ratio.")
raise SoftError("Error: Invalid value for error correction ratio.")
m_print("Error correction ratio will change on restart.", head=1, tail=1)
def print_settings(self) -> None:
@ -543,20 +650,22 @@ class GatewaySettings(object):
Print list of settings, their current and
default values, and setting descriptions.
"""
desc_d = {"serial_baudrate": "The speed of serial interface in bauds per second",
"serial_error_correction": "Number of byte errors serial datagrams can recover from"}
desc_d = {
"serial_baudrate": "The speed of serial interface in bauds per second",
"serial_error_correction": "Number of byte errors serial datagrams can recover from",
}
# Columns
c1 = ['Serial interface setting']
c2 = ['Current value']
c3 = ['Default value']
c4 = ['Description']
c1 = ["Serial interface setting"]
c2 = ["Current value"]
c3 = ["Default value"]
c4 = ["Description"]
terminal_width = get_terminal_width()
description_indent = 64
if terminal_width < description_indent + 1:
raise FunctionReturn("Error: Screen width is too small.")
raise SoftError("Error: Screen width is too small.")
# Populate columns with setting data
for key in desc_d:
@ -565,26 +674,33 @@ class GatewaySettings(object):
c3.append(str(self.defaults[key]))
description = desc_d[key]
wrapper = textwrap.TextWrapper(width=max(1, (terminal_width - description_indent)))
desc_lines = wrapper.fill(description).split('\n')
wrapper = textwrap.TextWrapper(
width=max(1, (terminal_width - description_indent))
)
desc_lines = wrapper.fill(description).split("\n")
desc_string = desc_lines[0]
for line in desc_lines[1:]:
desc_string += '\n' + description_indent * ' ' + line
desc_string += "\n" + description_indent * " " + line
if len(desc_lines) > 1:
desc_string += '\n'
desc_string += "\n"
c4.append(desc_string)
# Calculate column widths
c1w, c2w, c3w = [max(len(v) for v in column) + SETTINGS_INDENT for column in [c1, c2, c3]]
c1w, c2w, c3w = [
max(len(v) for v in column) + SETTINGS_INDENT for column in [c1, c2, c3]
]
# Align columns by adding whitespace between fields of each line
lines = [f'{f1:{c1w}} {f2:{c2w}} {f3:{c3w}} {f4}' for f1, f2, f3, f4 in zip(c1, c2, c3, c4)]
lines = [
f"{f1:{c1w}} {f2:{c2w}} {f3:{c3w}} {f4}"
for f1, f2, f3, f4 in zip(c1, c2, c3, c4)
]
# Add a terminal-wide line between the column names and the data
lines.insert(1, get_terminal_width() * '')
lines.insert(1, get_terminal_width() * "")
# Print the settings
print('\n' + '\n'.join(lines) + '\n')
print("\n" + "\n".join(lines) + "\n")

View File

@ -24,12 +24,25 @@ import typing
from typing import Any, Callable, Optional
from src.common.encoding import b58decode
from src.common.encoding import b58decode
from src.common.exceptions import CriticalError
from src.common.misc import get_terminal_width, terminal_width_check
from src.common.output import clear_screen, m_print, print_on_previous_line, print_spacing
from src.common.statics import (B58_LOCAL_KEY, B58_LOCAL_KEY_GUIDE, B58_PUBLIC_KEY, B58_PUBLIC_KEY_GUIDE,
CURSOR_UP_ONE_LINE, ECDHE, NC_BYPASS_START, NC_BYPASS_STOP)
from src.common.misc import get_terminal_width, terminal_width_check
from src.common.output import (
clear_screen,
m_print,
print_on_previous_line,
print_spacing,
)
from src.common.statics import (
B58_LOCAL_KEY,
B58_LOCAL_KEY_GUIDE,
B58_PUBLIC_KEY,
B58_PUBLIC_KEY_GUIDE,
CURSOR_UP_ONE_LINE,
ECDHE,
NC_BYPASS_START,
NC_BYPASS_STOP,
)
if typing.TYPE_CHECKING:
from src.common.db_settings import Settings
@ -38,18 +51,19 @@ if typing.TYPE_CHECKING:
Validator = Callable[..., str]
def ask_confirmation_code(source: str # The system the confirmation code is displayed by
) -> str: # The confirmation code entered by the user
def ask_confirmation_code(
source: str, # The system the confirmation code is displayed by
) -> str: # The confirmation code entered by the user
"""\
Ask the user to input confirmation code from Source Computer to
verify local key has been installed.
"""
title = f"Enter confirmation code (from {source}): "
input_space = len(' ff ')
title = f"Enter confirmation code (from {source}): "
input_space = len(" ff ")
upper_line = ('' + (len(title) + input_space) * '' + '')
title_line = ('' + title + input_space * ' ' + '')
lower_line = ('' + (len(title) + input_space) * '' + '')
upper_line = "" + (len(title) + input_space) * "" + ""
title_line = "" + title + input_space * " " + ""
lower_line = "" + (len(title) + input_space) * "" + ""
terminal_w = get_terminal_width()
upper_line = upper_line.center(terminal_w)
@ -63,41 +77,44 @@ def ask_confirmation_code(source: str # The system the confirmation code is dis
print(lower_line)
print(3 * CURSOR_UP_ONE_LINE)
indent = title_line.find('')
return input(indent * ' ' + f'{title}')
indent = title_line.find("")
return input(indent * " " + f"{title}")
def box_input(message: str, # Input prompt message
default: str = '', # Default return value
head: int = 0, # Number of new lines to print before the input
tail: int = 1, # Number of new lines to print after input
expected_len: int = 0, # Expected length of the input
key_type: str = '', # When specified, sets input width
guide: bool = False, # When True, prints the guide for key
validator: Optional[Validator] = None, # Input validator function
validator_args: Optional[Any] = None # Arguments required by the validator
) -> str: # Input from user
def box_input(
message: str, # Input prompt message
default: str = "", # Default return value
head: int = 0, # Number of new lines to print before the input
tail: int = 1, # Number of new lines to print after input
expected_len: int = 0, # Expected length of the input
key_type: str = "", # When specified, sets input width
guide: bool = False, # When True, prints the guide for key
validator: Optional[Validator] = None, # Input validator function
validator_args: Optional[Any] = None, # Arguments required by the validator
) -> str: # Input from user
"""Display boxed input prompt with a message."""
print_spacing(head)
terminal_width = get_terminal_width()
if key_type:
key_guide = {B58_LOCAL_KEY: B58_LOCAL_KEY_GUIDE,
B58_PUBLIC_KEY: B58_PUBLIC_KEY_GUIDE}.get(key_type, '')
key_guide = {
B58_LOCAL_KEY: B58_LOCAL_KEY_GUIDE,
B58_PUBLIC_KEY: B58_PUBLIC_KEY_GUIDE,
}.get(key_type, "")
if guide:
inner_spc = len(key_guide) + 2
else:
inner_spc = (86 if key_type == B58_PUBLIC_KEY else 53)
inner_spc = 86 if key_type == B58_PUBLIC_KEY else 53
else:
key_guide = ''
key_guide = ""
inner_spc = terminal_width - 2 if expected_len == 0 else expected_len + 2
upper_line = '' + inner_spc * '' + ''
guide_line = '' + key_guide + ''
input_line = '' + inner_spc * ' ' + ''
lower_line = '' + inner_spc * '' + ''
box_indent = (terminal_width - len(upper_line)) // 2 * ' '
upper_line = "" + inner_spc * "" + ""
guide_line = "" + key_guide + ""
input_line = "" + inner_spc * " " + ""
lower_line = "" + inner_spc * "" + ""
box_indent = (terminal_width - len(upper_line)) // 2 * " "
terminal_width_check(len(upper_line))
@ -107,15 +124,15 @@ def box_input(message: str, # Input prompt messa
print(box_indent + input_line)
print(box_indent + lower_line)
print((5 if guide else 4) * CURSOR_UP_ONE_LINE)
print(box_indent + '┌─┤' + message + '')
print(box_indent + "┌─┤" + message + "")
if guide:
print('')
print("")
user_input = input(box_indent + '')
user_input = input(box_indent + "")
if user_input == '':
if user_input == "":
print(2 * CURSOR_UP_ONE_LINE)
print(box_indent + '' + default)
print(box_indent + "" + default)
user_input = default
if validator is not None:
@ -123,22 +140,36 @@ def box_input(message: str, # Input prompt messa
if error_msg:
m_print(error_msg, head=1)
print_on_previous_line(reps=4, delay=1)
return box_input(message, default, head, tail, expected_len, key_type, guide, validator, validator_args)
return box_input(
message,
default,
head,
tail,
expected_len,
key_type,
guide,
validator,
validator_args,
)
print_spacing(tail)
return user_input
def get_b58_key(key_type: str, # The type of Base58 key to be entered
settings: 'Settings', # Settings object
short_address: str = '' # The contact's short Onion address
) -> bytes: # The Base58 decoded key
def get_b58_key(
key_type: str, # The type of Base58 key to be entered
settings: "Settings", # Settings object
short_address: str = "", # The contact's short Onion address
) -> bytes: # The Base58 decoded key
"""Ask the user to input a Base58 encoded key."""
if key_type == B58_PUBLIC_KEY:
clear_screen()
m_print(f"{ECDHE} key exchange", head=1, tail=1, bold=True)
m_print("If needed, resend your public key to the contact by pressing <Enter>", tail=1)
m_print(
"If needed, resend your public key to the contact by pressing <Enter>",
tail=1,
)
box_msg = f"Enter public key of {short_address} (from Relay)"
elif key_type == B58_LOCAL_KEY:
@ -147,20 +178,24 @@ def get_b58_key(key_type: str, # The type of Base58 key to be enter
raise CriticalError("Invalid key type")
while True:
rx_pk = box_input(box_msg, key_type=key_type, guide=not settings.local_testing_mode)
rx_pk = ''.join(rx_pk.split())
rx_pk = box_input(
box_msg, key_type=key_type, guide=not settings.local_testing_mode
)
rx_pk = "".join(rx_pk.split())
if key_type == B58_PUBLIC_KEY and rx_pk == '':
if key_type == B58_PUBLIC_KEY and rx_pk == "":
return rx_pk.encode()
try:
return b58decode(rx_pk, public_key=(key_type == B58_PUBLIC_KEY))
except ValueError:
m_print("Checksum error - Check that the entered key is correct.")
print_on_previous_line(reps=(4 if settings.local_testing_mode else 5), delay=1)
print_on_previous_line(
reps=(4 if settings.local_testing_mode else 5), delay=1
)
def nc_bypass_msg(key: str, settings: 'Settings') -> None:
def nc_bypass_msg(key: str, settings: "Settings") -> None:
"""Print messages about bypassing Networked Computer.
During ciphertext delivery of local key exchange, these bypass
@ -170,29 +205,37 @@ def nc_bypass_msg(key: str, settings: 'Settings') -> None:
key. Without the ciphertext, e.g. a visually collected local key
decryption key is useless.
"""
m = {NC_BYPASS_START: "Bypass Networked Computer if needed. Press <Enter> to send local key.",
NC_BYPASS_STOP: "Remove bypass of Networked Computer. Press <Enter> to continue."}
m = {
NC_BYPASS_START: "Bypass Networked Computer if needed. Press <Enter> to send local key.",
NC_BYPASS_STOP: "Remove bypass of Networked Computer. Press <Enter> to continue.",
}
if settings.nc_bypass_messages:
m_print(m[key], manual_proceed=True, box=True, head=(1 if key == NC_BYPASS_STOP else 0))
m_print(
m[key],
manual_proceed=True,
box=True,
head=(1 if key == NC_BYPASS_STOP else 0),
)
def pwd_prompt(message: str, # Prompt message
repeat: bool = False # When True, prints corner chars for the second box
) -> str: # Password from user
def pwd_prompt(
message: str, # Prompt message
repeat: bool = False, # When True, prints corner chars for the second box
) -> str: # Password from user
"""Prompt the user to enter a password.
The getpass library ensures the password is not echoed on screen
when it is typed.
"""
l, r = ('', '') if repeat else ('', '')
l, r = ("", "") if repeat else ("", "")
terminal_w = get_terminal_width()
input_space = len(' c ') # `c` is where the caret sits
terminal_w = get_terminal_width()
input_space = len(" c ") # `c` is where the caret sits
upper_line = ( l + (len(message) + input_space) * '' + r ).center(terminal_w)
title_line = ('' + message + input_space * ' ' + '').center(terminal_w)
lower_line = ('' + (len(message) + input_space) * '' + '').center(terminal_w)
upper_line = (l + (len(message) + input_space) * "" + r).center(terminal_w)
title_line = ("" + message + input_space * " " + "").center(terminal_w)
lower_line = ("" + (len(message) + input_space) * "" + "").center(terminal_w)
terminal_width_check(len(upper_line))
@ -201,33 +244,34 @@ def pwd_prompt(message: str, # Prompt message
print(lower_line)
print(3 * CURSOR_UP_ONE_LINE)
indent = title_line.find('')
user_input = getpass.getpass(indent * ' ' + f'{message}')
indent = title_line.find("")
user_input = getpass.getpass(indent * " " + f"{message}")
return user_input
def yes(prompt: str, # Question to be asked
abort: Optional[bool] = None, # Determines the return value of ^C and ^D
head: int = 0, # Number of new lines to print before prompt
tail: int = 0 # Number of new lines to print after prompt
) -> bool: # True/False depending on input
def yes(
prompt: str, # Question to be asked
abort: Optional[bool] = None, # Determines the return value of ^C and ^D
head: int = 0, # Number of new lines to print before prompt
tail: int = 0, # Number of new lines to print after prompt
) -> bool: # True/False depending on input
"""Prompt the user a question that is answered with yes/no."""
print_spacing(head)
prompt = f"{prompt} (y/n): "
input_space = len(' yes ')
prompt = f"{prompt} (y/n): "
input_space = len(" yes ")
upper_line = ('' + (len(prompt) + input_space) * '' + '')
title_line = ('' + prompt + input_space * ' ' + '')
lower_line = ('' + (len(prompt) + input_space) * '' + '')
upper_line = "" + (len(prompt) + input_space) * "" + ""
title_line = "" + prompt + input_space * " " + ""
lower_line = "" + (len(prompt) + input_space) * "" + ""
terminal_w = get_terminal_width()
upper_line = upper_line.center(terminal_w)
title_line = title_line.center(terminal_w)
lower_line = lower_line.center(terminal_w)
indent = title_line.find('')
indent = title_line.find("")
terminal_width_check(len(upper_line))
@ -238,24 +282,24 @@ def yes(prompt: str, # Question to be asked
print(3 * CURSOR_UP_ONE_LINE)
try:
user_input = input(indent * ' ' + f'{prompt}')
user_input = input(indent * " " + f"{prompt}")
except (EOFError, KeyboardInterrupt):
if abort is None:
raise
print('')
user_input = 'y' if abort else 'n'
print("")
user_input = "y" if abort else "n"
print_on_previous_line()
if user_input == '':
if user_input == "":
continue
if user_input.lower() in ['y', 'yes']:
print(indent * ' ' + f'{prompt}Yes │\n')
if user_input.lower() in ["y", "yes"]:
print(indent * " " + f"{prompt}Yes │\n")
print_spacing(tail)
return True
elif user_input.lower() in ['n', 'no']:
print(indent * ' ' + f'{prompt}No │\n')
if user_input.lower() in ["n", "no"]:
print(indent * " " + f"{prompt}No │\n")
print_spacing(tail)
return False

View File

@ -25,35 +25,64 @@ import binascii
import hashlib
import math
import os
import random
import shutil
import subprocess
import sys
import time
import threading
import typing
import zlib
from contextlib import contextmanager
from typing import Any, Callable, Dict, Iterator, List, Optional, Tuple, Type, Union
from contextlib import contextmanager
from typing import Any, Callable, Dict, Iterator, List, Optional, Tuple, Type, Union
from multiprocessing import Process
from src.common.reed_solomon import RSCodec
from src.common.statics import (BAUDS_PER_BYTE, COMMAND_LENGTH, CURSOR_UP_ONE_LINE, DIR_RECV_FILES, DIR_USER_DATA,
DUMMY_CONTACT, DUMMY_GROUP, DUMMY_MEMBER, ECDHE, EVENT, EXIT, EXIT_QUEUE, LOCAL_ID,
LOCAL_PUBKEY, ME, ONION_ADDRESS_CHECKSUM_ID, ONION_ADDRESS_CHECKSUM_LENGTH,
ONION_ADDRESS_LENGTH, ONION_SERVICE_PUBLIC_KEY_LENGTH, PACKET_LENGTH,
PADDING_LENGTH, POWEROFF, PSK, RX, TAILS, TX, WIPE)
from src.common.statics import (
BAUDS_PER_BYTE,
COMMAND_LENGTH,
CURSOR_UP_ONE_LINE,
DIR_RECV_FILES,
DIR_USER_DATA,
DUMMY_CONTACT,
DUMMY_GROUP,
DUMMY_MEMBER,
ECDHE,
EVENT,
EXIT,
EXIT_QUEUE,
LOCAL_ID,
LOCAL_PUBKEY,
ME,
ONION_ADDRESS_CHECKSUM_ID,
ONION_ADDRESS_CHECKSUM_LENGTH,
ONION_ADDRESS_LENGTH,
ONION_SERVICE_PUBLIC_KEY_LENGTH,
PACKET_LENGTH,
PADDING_LENGTH,
POWEROFF,
PSK,
RESET,
RX,
STATIC,
TAILS,
TRAFFIC_MASKING,
TX,
WIPE,
)
if typing.TYPE_CHECKING:
from multiprocessing import Queue
from multiprocessing import Queue
from src.common.db_contacts import ContactList
from src.common.db_groups import GroupList
from src.common.db_groups import GroupList
from src.common.db_settings import Settings
from src.common.gateway import Gateway
from src.common.gateway import Gateway
def calculate_race_condition_delay(serial_error_correction: int,
serial_baudrate: int
) -> float:
def calculate_race_condition_delay(
serial_error_correction: int, serial_baudrate: int
) -> float:
"""\
Calculate the delay required to prevent Relay Program race condition.
@ -66,29 +95,30 @@ def calculate_race_condition_delay(serial_error_correction: int,
before outputting command for Relay Program, to ensure Receiver
Program has received the encrypted command.
"""
rs = RSCodec(2 * serial_error_correction)
rs = RSCodec(2 * serial_error_correction)
message_length = PACKET_LENGTH + ONION_ADDRESS_LENGTH
enc_msg_length = len(rs.encode(os.urandom(message_length)))
enc_cmd_length = len(rs.encode(os.urandom(COMMAND_LENGTH)))
max_bytes = enc_msg_length + (2 * enc_cmd_length)
max_bytes = enc_msg_length + (2 * enc_cmd_length)
return (max_bytes * BAUDS_PER_BYTE) / serial_baudrate
def decompress(data: bytes, # Data to be decompressed
max_size: int # The maximum size of decompressed data.
) -> bytes: # Decompressed data
def decompress(
data: bytes, # Data to be decompressed
max_size: int, # The maximum size of decompressed data.
) -> bytes: # Decompressed data
"""Decompress received data.
The decompressed data has a maximum size, designed to prevent zip
bombs from filling the drive of an unsuspecting user.
"""
from src.common.exceptions import FunctionReturn # Avoid circular import
from src.common.exceptions import SoftError # Avoid circular import
dec = zlib.decompressobj()
dec = zlib.decompressobj()
data = dec.decompress(data, max_size)
if dec.unconsumed_tail:
raise FunctionReturn("Error: Decompression aborted due to possible zip bomb.")
raise SoftError("Error: Decompression aborted due to possible zip bomb.")
del dec
return data
@ -108,39 +138,72 @@ def ensure_dir(directory: str) -> None:
os.makedirs(name)
def get_tab_complete_list(contact_list: 'ContactList',
group_list: 'GroupList',
settings: 'Settings',
gateway: 'Gateway'
) -> List[str]:
def get_tab_complete_list(
contact_list: "ContactList",
group_list: "GroupList",
settings: "Settings",
gateway: "Gateway",
) -> List[str]:
"""Return a list of tab-complete words."""
commands = ['about', 'add ', 'clear', 'cmd', 'connect', 'exit', 'export ', 'file', 'group ', 'help', 'history ',
'localkey', 'logging ', 'msg ', 'names', 'nick ', 'notify ', 'passwd ', 'psk', 'reset', 'rmlogs ',
'set ', 'settings', 'store ', 'unread', 'verify', 'whisper ', 'whois ']
commands = [
"about",
"add ",
"clear",
"cmd",
"connect",
"exit",
"export ",
"file",
"group ",
"help",
"history ",
"localkey",
"logging ",
"msg ",
"names",
"nick ",
"notify ",
"passwd ",
"psk",
"reset",
"rmlogs ",
"set ",
"settings",
"store ",
"unread",
"verify",
"whisper ",
"whois ",
]
tc_list = ['all', 'create ', 'false', 'False', 'join ', 'true', 'True']
tc_list = ["all", "create ", "false", "False", "join ", "true", "True"]
tc_list += commands
tc_list += [(a + ' ') for a in contact_list.get_list_of_addresses()]
tc_list += [(n + ' ') for n in contact_list.get_list_of_nicks()]
tc_list += [(g + ' ') for g in group_list.get_list_of_group_names()]
tc_list += [(i + ' ') for i in group_list.get_list_of_hr_group_ids()]
tc_list += [(s + ' ') for s in settings.key_list]
tc_list += [(s + ' ') for s in gateway.settings.key_list]
tc_list += [(a + " ") for a in contact_list.get_list_of_addresses()]
tc_list += [(n + " ") for n in contact_list.get_list_of_nicks()]
tc_list += [(g + " ") for g in group_list.get_list_of_group_names()]
tc_list += [(i + " ") for i in group_list.get_list_of_hr_group_ids()]
tc_list += [(s + " ") for s in settings.key_list]
tc_list += [(s + " ") for s in gateway.settings.key_list]
return tc_list
def get_tab_completer(contact_list: 'ContactList',
group_list: 'GroupList',
settings: 'Settings',
gateway: 'Gateway'
) -> Optional[Callable[[str, Any], Any]]:
def get_tab_completer(
contact_list: "ContactList",
group_list: "GroupList",
settings: "Settings",
gateway: "Gateway",
) -> Optional[Callable[[str, Any], Any]]:
"""Return the tab completer object."""
def tab_complete(text: str, state: Any) -> List[str]:
"""Return tab-complete options."""
tab_complete_list = get_tab_complete_list(contact_list, group_list, settings, gateway)
options = [t for t in tab_complete_list if t.startswith(text)] # type: List[str]
tab_complete_list = get_tab_complete_list(
contact_list, group_list, settings, gateway
)
options = [
t for t in tab_complete_list if t.startswith(text)
] # type: List[str]
with ignored(IndexError):
tc = options[state] # type: List[str]
return tc
@ -158,6 +221,40 @@ def get_terminal_width() -> int:
return shutil.get_terminal_size()[0]
class HideRunTime(object):
"""Runtime hiding time context manager.
By joining a thread that sleeps for a longer time than it takes for
the function to run, this context manager hides the actual running
time of the function.
Note that random.SystemRandom() uses the Kernel CSPRNG (/dev/urandom),
not Python's weak PRNG based on Mersenne Twister:
https://docs.python.org/2/library/random.html#random.SystemRandom
"""
def __init__(
self,
settings: Optional["Settings"] = None,
delay_type: str = STATIC,
duration: float = 0.0,
) -> None:
if delay_type == TRAFFIC_MASKING and settings is not None:
self.length = settings.tm_static_delay
self.length += random.SystemRandom().uniform(0, settings.tm_random_delay)
elif delay_type == STATIC:
self.length = duration
def __enter__(self) -> None:
self.timer = threading.Thread(target=time.sleep, args=(self.length,))
self.timer.start()
def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
self.timer.join()
@contextmanager
def ignored(*exceptions: Type[BaseException]) -> Iterator[Any]:
"""Ignore an exception."""
@ -167,11 +264,12 @@ def ignored(*exceptions: Type[BaseException]) -> Iterator[Any]:
pass
def monitor_processes(process_list: List[Process],
software_operation: str,
queues: Dict[bytes, 'Queue[bytes]'],
error_exit_code: int = 1
) -> None:
def monitor_processes(
process_list: List[Process],
software_operation: str,
queues: Dict[bytes, "Queue[bytes]"],
error_exit_code: int = 1,
) -> None:
"""Monitor the status of `process_list` and EXIT_QUEUE.
This function monitors a list of processes. If one of them dies, it
@ -200,21 +298,16 @@ def monitor_processes(process_list: List[Process],
sys.exit(0)
if command == WIPE:
with open('/etc/os-release') as f:
with open("/etc/os-release") as f:
data = f.read()
if TAILS not in data:
if software_operation == RX:
subprocess.Popen("find {} -type f -exec shred -n 3 -z -u {{}} \\;"
.format(DIR_RECV_FILES), shell=True).wait()
shred_databases(software_operation)
power_off_system()
subprocess.Popen("find {} -name '{}*' -type f -exec shred -n 3 -z -u {{}} \\;"
.format(DIR_USER_DATA, software_operation), shell=True).wait()
for d in [DIR_USER_DATA, DIR_RECV_FILES]:
with ignored(FileNotFoundError):
shutil.rmtree(d)
os.system(POWEROFF)
def power_off_system() -> None:
"""Power off system."""
os.system(POWEROFF)
def process_arguments() -> Tuple[str, bool, bool]:
@ -224,29 +317,37 @@ def process_arguments() -> Tuple[str, bool, bool]:
Terminator configuration file for local testing. The descriptions
here are provided for the sake of completeness.
"""
parser = argparse.ArgumentParser(f'python3.7 {sys.argv[0]}',
usage='%(prog)s [OPTION]',
epilog='Full documentation at: <https://github.com/maqp/tfc/wiki>')
parser = argparse.ArgumentParser(
f"python3.7 {sys.argv[0]}",
usage="%(prog)s [OPTION]",
epilog="Full documentation at: <https://github.com/maqp/tfc/wiki>",
)
parser.add_argument('-r',
action='store_true',
default=False,
dest='operation',
help="run Receiver instead of Transmitter Program")
parser.add_argument(
"-r",
action="store_true",
default=False,
dest="operation",
help="run Receiver instead of Transmitter Program",
)
parser.add_argument('-l',
action='store_true',
default=False,
dest='local_test',
help="enable local testing mode")
parser.add_argument(
"-l",
action="store_true",
default=False,
dest="local_test",
help="enable local testing mode",
)
parser.add_argument('-d',
action='store_true',
default=False,
dest='data_diode_sockets',
help="use data diode simulator sockets during local testing mode")
parser.add_argument(
"-d",
action="store_true",
default=False,
dest="data_diode_sockets",
help="use data diode simulator sockets during local testing mode",
)
args = parser.parse_args()
args = parser.parse_args()
operation = RX if args.operation else TX
return operation, args.local_test, args.data_diode_sockets
@ -255,11 +356,16 @@ def process_arguments() -> Tuple[str, bool, bool]:
def readable_size(size: int) -> str:
"""Convert file size from bytes to a human-readable form."""
f_size = float(size)
for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
for unit in ["", "K", "M", "G", "T", "P", "E", "Z"]:
if abs(f_size) < 1024.0:
return f'{f_size:3.1f}{unit}B'
return f"{f_size:3.1f}{unit}B"
f_size /= 1024.0
return f'{f_size:3.1f}YB'
return f"{f_size:3.1f}YB"
def reset_terminal() -> None:
"""Reset terminal."""
os.system(RESET)
def round_up(value: Union[int, float]) -> int:
@ -267,30 +373,52 @@ def round_up(value: Union[int, float]) -> int:
return int(math.ceil(value / 10.0)) * 10
def split_byte_string(bytestring: bytes, # Bytestring to split
item_len: int # Length of each substring
) -> List[bytes]: # List of substrings
def shred_databases(software_operation: str) -> None:
"""Shred TFC databases and remove directories."""
if software_operation == RX:
subprocess.Popen(
"find {} -type f -exec shred -n 3 -z -u {{}} \\;".format(DIR_RECV_FILES),
shell=True,
).wait()
subprocess.Popen(
"find {} -name '{}*' -type f -exec shred -n 3 -z -u {{}} \\;".format(
DIR_USER_DATA, software_operation
),
shell=True,
).wait()
for d in [DIR_USER_DATA, DIR_RECV_FILES]:
with ignored(FileNotFoundError):
shutil.rmtree(d)
def split_byte_string(
bytestring: bytes, item_len: int # Bytestring to split # Length of each substring
) -> List[bytes]: # List of substrings
"""Split a bytestring into a list of specific length substrings."""
return [bytestring[i:i + item_len] for i in range(0, len(bytestring), item_len)]
return [bytestring[i : i + item_len] for i in range(0, len(bytestring), item_len)]
def split_string(string: str, # String to split
item_len: int # Length of each substring
) -> List[str]: # List of substrings
def split_string(
string: str, item_len: int # String to split # Length of each substring
) -> List[str]: # List of substrings
"""Split a string into a list of specific length substrings."""
return [string[i:i + item_len] for i in range(0, len(string), item_len)]
return [string[i : i + item_len] for i in range(0, len(string), item_len)]
def separate_header(bytestring: bytes, # Bytestring to slice
header_length: int # Number of header bytes to separate
) -> Tuple[bytes, bytes]: # Header and payload
def separate_header(
bytestring: bytes, # Bytestring to slice
header_length: int, # Number of header bytes to separate
) -> Tuple[bytes, bytes]: # Header and payload
"""Separate `header_length` first bytes from a bytestring."""
return bytestring[:header_length], bytestring[header_length:]
def separate_headers(bytestring: bytes, # Bytestring to slice
header_length_list: List[int], # List of header lengths
) -> List[bytes]: # Header and payload
def separate_headers(
bytestring: bytes, # Bytestring to slice
header_length_list: List[int], # List of header lengths
) -> List[bytes]: # Header and payload
"""Separate a list of headers from bytestring.
Length of each header is determined in the `header_length_list`.
@ -304,9 +432,10 @@ def separate_headers(bytestring: bytes, # Bytestring to slice
return fields
def separate_trailer(bytestring: bytes, # Bytestring to slice
trailer_length: int # Number of trailer bytes to separate
) -> Tuple[bytes, bytes]: # Payload and trailer
def separate_trailer(
bytestring: bytes, # Bytestring to slice
trailer_length: int, # Number of trailer bytes to separate
) -> Tuple[bytes, bytes]: # Payload and trailer
"""Separate `trailer_length` last bytes from a bytestring.
This saves space and makes trailer separation more readable.
@ -321,31 +450,38 @@ def terminal_width_check(minimum_width: int) -> None:
while get_terminal_width() < minimum_width:
time.sleep(0.1)
time.sleep(0.1)
print(2*CURSOR_UP_ONE_LINE)
print(2 * CURSOR_UP_ONE_LINE)
def validate_onion_addr(onion_address_contact: str, # String to slice
onion_address_user: str = '' # Number of header chars to separate
) -> str: # Payload and trailer
def validate_onion_addr(
onion_address_contact: str, # String to slice
onion_address_user: str = "", # Number of header chars to separate
) -> str: # Payload and trailer
"""Validate a v3 Onion Service address."""
error_msg = ''
error_msg = ""
try:
decoded = base64.b32decode(onion_address_contact.upper())
public_key, checksum, version \
= separate_headers(decoded, [ONION_SERVICE_PUBLIC_KEY_LENGTH, ONION_ADDRESS_CHECKSUM_LENGTH])
public_key, checksum, version = separate_headers(
decoded, [ONION_SERVICE_PUBLIC_KEY_LENGTH, ONION_ADDRESS_CHECKSUM_LENGTH]
)
if checksum != hashlib.sha3_256(ONION_ADDRESS_CHECKSUM_ID
+ public_key
+ version
).digest()[:ONION_ADDRESS_CHECKSUM_LENGTH]:
if (
checksum
!= hashlib.sha3_256(
ONION_ADDRESS_CHECKSUM_ID + public_key + version
).digest()[:ONION_ADDRESS_CHECKSUM_LENGTH]
):
error_msg = "Checksum error - Check that the entered account is correct."
except (binascii.Error, ValueError):
return "Error: Invalid account format."
if onion_address_contact in (LOCAL_ID, DUMMY_CONTACT, DUMMY_MEMBER) or public_key == LOCAL_PUBKEY:
if (
onion_address_contact in (LOCAL_ID, DUMMY_CONTACT, DUMMY_MEMBER)
or public_key == LOCAL_PUBKEY
):
error_msg = "Error: Can not add reserved account."
if onion_address_user and onion_address_contact == onion_address_user:
@ -354,12 +490,13 @@ def validate_onion_addr(onion_address_contact: str, # String to slice
return error_msg
def validate_group_name(group_name: str, # Name of the group
contact_list: 'ContactList', # ContactList object
group_list: 'GroupList' # GroupList object
) -> str: # Error message if validation failed, else empty string
def validate_group_name(
group_name: str, # Name of the group
contact_list: "ContactList", # ContactList object
group_list: "GroupList", # GroupList object
) -> str: # Error message if validation failed, else empty string
"""Validate the specified group name."""
error_msg = ''
error_msg = ""
# Avoids collision with delimiters
if not group_name.isprintable():
@ -370,7 +507,9 @@ def validate_group_name(group_name: str, # Name of the group
error_msg = f"Error: Group name must be less than {PADDING_LENGTH} chars long."
if group_name == DUMMY_GROUP:
error_msg = "Error: Group name cannot use the name reserved for database padding."
error_msg = (
"Error: Group name cannot use the name reserved for database padding."
)
if not validate_onion_addr(group_name):
error_msg = "Error: Group name cannot have the format of an account."
@ -384,11 +523,11 @@ def validate_group_name(group_name: str, # Name of the group
return error_msg
def validate_key_exchange(key_ex: str, # Key exchange selection to validate
*_: Any # Unused arguments
) -> str: # Error message if validation failed, else empty string
def validate_key_exchange(
key_ex: str, *_: Any # Key exchange selection to validate # Unused arguments
) -> str: # Error message if validation failed, else empty string
"""Validate the specified key exchange."""
error_msg = ''
error_msg = ""
if key_ex.upper() not in [ECDHE, ECDHE[:1], PSK, PSK[:1]]:
error_msg = "Invalid key exchange selection."
@ -396,13 +535,16 @@ def validate_key_exchange(key_ex: str, # Key exchange selection to validate
return error_msg
def validate_nick(nick: str, # Nick to validate
args: Tuple['ContactList', 'GroupList', bytes] # Contact list and group list databases
) -> str: # Error message if validation failed, else ''
def validate_nick(
nick: str, # Nick to validate
args: Tuple[
"ContactList", "GroupList", bytes
], # Contact list and group list databases
) -> str: # Error message if validation failed, else ''
"""Validate the specified nickname."""
contact_list, group_list, onion_pub_key = args
error_msg = ''
error_msg = ""
# Length is limited by database's Unicode padding
if len(nick) >= PADDING_LENGTH:
@ -412,7 +554,7 @@ def validate_nick(nick: str, # Nick to vali
if not nick.isprintable():
error_msg = "Error: Nick must be printable."
if nick == '':
if nick == "":
error_msg = "Error: Nick cannot be empty."
# Receiver displays sent messages under 'Me'
@ -424,21 +566,31 @@ def validate_nick(nick: str, # Nick to vali
error_msg = f"Error: '{EVENT}' is a reserved nick."
# Ensure that nicks, accounts and group names are UIDs in recipient selection
if validate_onion_addr(nick) == '': # If no error message was received, nick had format of account
if (
validate_onion_addr(nick) == ""
): # If no error message was received, nick had format of account
error_msg = "Error: Nick cannot have the format of an account."
if nick in (LOCAL_ID, DUMMY_CONTACT, DUMMY_MEMBER):
error_msg = "Error: Nick cannot have the format of an account."
if nick in contact_list.get_list_of_nicks():
error_msg = "Error: Nick already in use."
# Allow existing nick if it matches the account being replaced.
if contact_list.has_pub_key(onion_pub_key):
if nick == contact_list.get_nick_by_pub_key(onion_pub_key):
error_msg = ''
error_msg = same_contact_check(onion_pub_key, nick, contact_list)
if nick in group_list.get_list_of_group_names():
error_msg = "Error: Nick cannot be a group name."
return error_msg
def same_contact_check(
onion_pub_key: bytes, nick: str, contact_list: "ContactList"
) -> str:
"""Check if nick matches the account being replaced."""
error_msg = "Error: Nick already in use."
if contact_list.has_pub_key(onion_pub_key):
if nick == contact_list.get_nick_by_pub_key(onion_pub_key):
error_msg = ""
return error_msg

View File

@ -25,19 +25,42 @@ import typing
import sys
from datetime import datetime
from typing import List, Optional, Union
from typing import List, Optional, Tuple, Union
from src.common.encoding import b10encode, b58encode, pub_key_to_onion_address
from src.common.misc import get_terminal_width, split_string
from src.common.statics import (ADDED_MEMBERS, ALREADY_MEMBER, B58_LOCAL_KEY_GUIDE, B58_PUBLIC_KEY_GUIDE, BOLD_ON,
CLEAR_ENTIRE_LINE, CLEAR_ENTIRE_SCREEN, CURSOR_LEFT_UP_CORNER, CURSOR_UP_ONE_LINE,
DONE, NC, NEW_GROUP, NORMAL_TEXT, NOT_IN_GROUP, RECEIVER, RELAY, REMOVED_MEMBERS, RX,
TFC, TRANSMITTER, TX, UNKNOWN_ACCOUNTS, VERSION)
from src.common.misc import get_terminal_width, split_string
from src.common.statics import (
ADDED_MEMBERS,
ALREADY_MEMBER,
B58_LOCAL_KEY_GUIDE,
B58_PUBLIC_KEY_GUIDE,
BOLD_ON,
CLEAR_ENTIRE_LINE,
CLEAR_ENTIRE_SCREEN,
CURSOR_LEFT_UP_CORNER,
CURSOR_UP_ONE_LINE,
DONE,
NC,
NEW_GROUP,
NORMAL_TEXT,
NOT_IN_GROUP,
RECEIVER,
RELAY,
REMOVED_MEMBERS,
RX,
TFC,
TRANSMITTER,
TX,
UNKNOWN_ACCOUNTS,
VERSION,
)
if typing.TYPE_CHECKING:
from src.common.db_contacts import ContactList
from src.common.db_settings import Settings
from src.common.gateway import GatewaySettings as GWSettings
from src.common.gateway import GatewaySettings as GWSettings
msg_list_type = Union[str, List[str]]
def clear_screen(delay: float = 0.0) -> None:
@ -47,40 +70,55 @@ def clear_screen(delay: float = 0.0) -> None:
sys.stdout.flush()
def group_management_print(key: str, # Group management message identifier
members: List[bytes], # List of members' Onion public keys
contact_list: 'ContactList', # ContactList object
group_name: str = '' # Name of the group
) -> None:
def group_management_print(
key: str, # Group management message identifier
members: List[bytes], # List of members' Onion public keys
contact_list: "ContactList", # ContactList object
group_name: str = "", # Name of the group
) -> None:
"""Print group management command results."""
m = {NEW_GROUP: "Created new group '{}' with following members:".format(group_name),
ADDED_MEMBERS: "Added following accounts to group '{}':" .format(group_name),
ALREADY_MEMBER: "Following accounts were already in group '{}':".format(group_name),
REMOVED_MEMBERS: "Removed following members from group '{}':" .format(group_name),
NOT_IN_GROUP: "Following accounts were not in group '{}':" .format(group_name),
UNKNOWN_ACCOUNTS: "Following unknown accounts were ignored:"}[key]
m = {
NEW_GROUP: "Created new group '{}' with following members:".format(group_name),
ADDED_MEMBERS: "Added following accounts to group '{}':".format(group_name),
ALREADY_MEMBER: "Following accounts were already in group '{}':".format(
group_name
),
REMOVED_MEMBERS: "Removed following members from group '{}':".format(
group_name
),
NOT_IN_GROUP: "Following accounts were not in group '{}':".format(group_name),
UNKNOWN_ACCOUNTS: "Following unknown accounts were ignored:",
}[key]
if members:
m_list = ([contact_list.get_nick_by_pub_key(m) for m in members if contact_list.has_pub_key(m)]
+ [pub_key_to_onion_address(m) for m in members if not contact_list.has_pub_key(m)])
m_list = [
contact_list.get_nick_by_pub_key(m)
for m in members
if contact_list.has_pub_key(m)
] + [
pub_key_to_onion_address(m)
for m in members
if not contact_list.has_pub_key(m)
]
just_len = max(len(m) for m in m_list)
just_len = max(len(m) for m in m_list)
justified = [m] + [f" * {m.ljust(just_len)}" for m in m_list]
m_print(justified, box=True)
def m_print(msg_list: Union[str, List[str]], # List of lines to print
manual_proceed: bool = False, # Wait for user input before continuing
bold: bool = False, # When True, prints the message in bold style
center: bool = True, # When False, does not center message
box: bool = False, # When True, prints a box around the message
head_clear: bool = False, # When True, clears screen before printing message
tail_clear: bool = False, # When True, clears screen after printing message (requires delay)
delay: float = 0, # Delay before continuing
max_width: int = 0, # Maximum width of message
head: int = 0, # Number of new lines to print before the message
tail: int = 0, # Number of new lines to print after the message
) -> None:
def m_print(
msg_list: "msg_list_type", # List of lines to print
manual_proceed: bool = False, # Wait for user input before continuing
bold: bool = False, # When True, prints the message in bold style
center: bool = True, # When False, does not center message
box: bool = False, # When True, prints a box around the message
head_clear: bool = False, # When True, clears screen before printing message
tail_clear: bool = False, # When True, clears screen after printing message (requires delay)
delay: float = 0, # Delay before continuing
max_width: int = 0, # Maximum width of message
head: int = 0, # Number of new lines to print before the message
tail: int = 0, # Number of new lines to print after the message
) -> None:
"""Print message to screen.
The message automatically wraps if the terminal is too narrow to
@ -90,34 +128,19 @@ def m_print(msg_list: Union[str, List[str]], # List of lines to print
msg_list = [msg_list]
terminal_width = get_terminal_width()
len_widest_msg = max(len(m) for m in msg_list)
spc_around_msg = 4 if box else 2
max_msg_width = terminal_width - spc_around_msg
if max_width:
max_msg_width = min(max_width, max_msg_width)
# Split any message too wide on separate lines
if len_widest_msg > max_msg_width:
new_msg_list = []
for msg in msg_list:
if len(msg) > max_msg_width:
new_msg_list.extend(textwrap.fill(msg, max_msg_width).split('\n'))
else:
new_msg_list.append(msg)
msg_list = new_msg_list
len_widest_msg = max(len(m) for m in msg_list)
len_widest_msg, msg_list = split_too_wide_messages(
box, max_width, msg_list, terminal_width
)
if box or center:
# Insert whitespace around every line to make them equally long
msg_list = [f'{m:^{len_widest_msg}}' for m in msg_list]
msg_list = [f"{m:^{len_widest_msg}}" for m in msg_list]
if box:
# Add box chars around the message
msg_list = [f'{m}' for m in msg_list]
msg_list.insert(0, '' + (len_widest_msg + 2) * '' + '')
msg_list.append( '' + (len_widest_msg + 2) * '' + '')
msg_list = [f"{m}" for m in msg_list]
msg_list.insert(0, "" + (len_widest_msg + 2) * "" + "")
msg_list.append("" + (len_widest_msg + 2) * "" + "")
# Print the message
if head_clear:
@ -138,16 +161,42 @@ def m_print(msg_list: Union[str, List[str]], # List of lines to print
# Check if message needs to be manually dismissed
if manual_proceed:
input('')
input("")
print_on_previous_line()
def phase(string: str, # Description of the phase
done: bool = False, # When True, uses string as the phase completion message
head: int = 0, # Number of inserted new lines before print
offset: int = 4, # Offset of phase string from center to left
delay: float = 0.5 # Duration of phase completion message
) -> None:
def split_too_wide_messages(
box: bool, max_width: int, msg_list: "msg_list_type", terminal_width: int
) -> Tuple[int, "msg_list_type"]:
"""Split too wide messages to multiple lines."""
len_widest_msg = max(len(m) for m in msg_list)
spc_around_msg = 4 if box else 2
max_msg_width = terminal_width - spc_around_msg
if max_width:
max_msg_width = min(max_width, max_msg_width)
if len_widest_msg > max_msg_width:
new_msg_list = []
for msg in msg_list:
if len(msg) > max_msg_width:
new_msg_list.extend(textwrap.fill(msg, max_msg_width).split("\n"))
else:
new_msg_list.append(msg)
msg_list = new_msg_list
len_widest_msg = max(len(m) for m in msg_list)
return len_widest_msg, msg_list
def phase(
string: str, # Description of the phase
done: bool = False, # When True, uses string as the phase completion message
head: int = 0, # Number of inserted new lines before print
offset: int = 4, # Offset of phase string from center to left
delay: float = 0.5, # Duration of phase completion message
) -> None:
"""Print the name of the next phase.
The notification of completion of the phase is printed on the same
@ -159,15 +208,15 @@ def phase(string: str, # Description of the phase
print(string)
time.sleep(delay)
else:
string += '... '
indent = ((get_terminal_width() - (len(string) + offset)) // 2) * ' '
string += "... "
indent = ((get_terminal_width() - (len(string) + offset)) // 2) * " "
print(indent + string, end='', flush=True)
print(indent + string, end="", flush=True)
def print_fingerprint(fp: bytes, # Contact's fingerprint
msg: str = '' # Title message
) -> None:
def print_fingerprint(
fp: bytes, msg: str = "" # Contact's fingerprint # Title message
) -> None:
"""Print a formatted message and fingerprint inside the box.
Truncate fingerprint for clean layout with three rows that have
@ -175,19 +224,20 @@ def print_fingerprint(fp: bytes, # Contact's fingerprint
249.15 bits of entropy which is more than the symmetric security
of X448.
"""
p_lst = [msg, ''] if msg else []
b10fp = b10encode(fp)[:(3*5*5)]
parts = split_string(b10fp, item_len=(5*5))
p_lst += [' '.join(split_string(p, item_len=5)) for p in parts]
p_lst = [msg, ""] if msg else []
b10fp = b10encode(fp)[: (3 * 5 * 5)]
parts = split_string(b10fp, item_len=(5 * 5))
p_lst += [" ".join(split_string(p, item_len=5)) for p in parts]
m_print(p_lst, box=True)
def print_key(message: str, # Instructive message
key_bytes: bytes, # 32-byte key to be displayed
settings: Union['Settings', 'GWSettings'], # Settings object
public_key: bool = False # When True, uses Testnet address WIF format
) -> None:
def print_key(
message: str, # Instructive message
key_bytes: bytes, # 32-byte key to be displayed
settings: Union["Settings", "GWSettings"], # Settings object
public_key: bool = False, # When True, uses Testnet address WIF format
) -> None:
"""Print a symmetric key in WIF format.
If local testing is not enabled, this function adds spacing in the
@ -208,22 +258,31 @@ def print_key(message: str, # Instructive messag
if settings.local_testing_mode:
m_print([message, b58key], box=True)
else:
guide, chunk_length = (B58_PUBLIC_KEY_GUIDE, 7) if public_key else (B58_LOCAL_KEY_GUIDE, 3)
guide, chunk_length = (
(B58_PUBLIC_KEY_GUIDE, 7) if public_key else (B58_LOCAL_KEY_GUIDE, 3)
)
key = ' '.join(split_string(b58key, item_len=chunk_length))
key = " ".join(split_string(b58key, item_len=chunk_length))
m_print([message, guide, key], box=True)
def print_title(operation: str) -> None:
"""Print the TFC title."""
operation_name = {TX: TRANSMITTER, RX: RECEIVER, NC: RELAY}[operation]
m_print(f"{TFC} - {operation_name} {VERSION}", bold=True, head_clear=True, head=1, tail=1)
m_print(
f"{TFC} - {operation_name} {VERSION}",
bold=True,
head_clear=True,
head=1,
tail=1,
)
def print_on_previous_line(reps: int = 1, # Number of times to repeat the action
delay: float = 0.0, # Time to sleep before clearing lines above
flush: bool = False # Flush stdout when true
) -> None:
def print_on_previous_line(
reps: int = 1, # Number of times to repeat the action
delay: float = 0.0, # Time to sleep before clearing lines above
flush: bool = False, # Flush stdout when true
) -> None:
"""Next message is printed on upper line."""
time.sleep(delay)
@ -239,14 +298,15 @@ def print_spacing(count: int = 0) -> None:
print()
def rp_print(message: str, # Message to print
ts: Optional['datetime'] = None, # Timestamp for displayed event
bold: bool = False # When True, prints the message in bold style
) -> None:
def rp_print(
message: str, # Message to print
ts: Optional["datetime"] = None, # Timestamp for displayed event
bold: bool = False, # When True, prints the message in bold style
) -> None:
"""Print an event in Relay Program."""
if ts is None:
ts = datetime.now()
ts_fmt = ts.strftime('%b %d - %H:%M:%S.%f')[:-4]
ts_fmt = ts.strftime("%b %d - %H:%M:%S.%f")[:-4]
if bold:
print(f"{BOLD_ON}{ts_fmt} - {message}{NORMAL_TEXT}")

View File

@ -29,17 +29,18 @@ from typing import Any, List, Optional
import tkinter
from tkinter import filedialog
from src.common.exceptions import FunctionReturn
from src.common.output import m_print, print_on_previous_line
from src.common.exceptions import SoftError
from src.common.output import m_print, print_on_previous_line
if typing.TYPE_CHECKING:
from src.common.db_settings import Settings
def ask_path_gui(prompt_msg: str, # Directory selection prompt
settings: 'Settings', # Settings object
get_file: bool = False # When True, prompts for a path to file instead of a directory
) -> str: # Selected directory or file
def ask_path_gui(
prompt_msg: str, # Directory selection prompt
settings: "Settings", # Settings object
get_file: bool = False, # When True, prompts for a path to file instead of a directory
) -> str: # Selected directory or file
"""Prompt (file) path with Tkinter / CLI prompt."""
try:
if settings.disable_gui_dialog:
@ -56,7 +57,10 @@ def ask_path_gui(prompt_msg: str, # Directory selection prompt
root.destroy()
if not file_path:
raise FunctionReturn(("File" if get_file else "Path") + " selection aborted.", head_clear=True)
raise SoftError(
("File" if get_file else "Path") + " selection aborted.",
head_clear=True,
)
return file_path
@ -87,11 +91,13 @@ class Completer(object):
def complete_path(self, path: Optional[str] = None) -> Any:
"""Perform completion of the filesystem path."""
if not path:
return self.listdir('.')
return self.listdir(".")
dir_name, rest = os.path.split(path)
tmp = dir_name if dir_name else '.'
matches = [os.path.join(dir_name, p) for p in self.listdir(tmp) if p.startswith(rest)]
tmp = dir_name if dir_name else "."
matches = [
os.path.join(dir_name, p) for p in self.listdir(tmp) if p.startswith(rest)
]
# More than one match, or single match which does not exist (typo)
if len(matches) > 1 or not os.path.exists(path):
@ -102,12 +108,12 @@ class Completer(object):
return [os.path.join(path, p) for p in self.listdir(path)]
# Exact file match terminates this completion
return [path + ' ']
return [path + " "]
def path_complete(self, args: Optional[List[str]] = None) -> Any:
"""Return the list of directories from the current directory."""
if not args:
return self.complete_path('.')
return self.complete_path(".")
# Treat the last arg as a path and complete it
return self.complete_path(args[-1])
@ -118,22 +124,22 @@ class Completer(object):
return self.path_complete(line)[state]
def ask_path_cli(prompt_msg: str, # File selection prompt
get_file: bool = False # When True, prompts for a file instead of a directory
) -> str: # Selected directory or file
def ask_path_cli(
prompt_msg: str, # File selection prompt
get_file: bool = False, # When True, prompts for a file instead of a directory
) -> str: # Selected directory or file
"""\
Prompt file location or store directory for a file with tab-complete
supported CLI.
"""
readline.set_completer_delims(' \t\n;')
readline.parse_and_bind('tab: complete')
readline.set_completer_delims(" \t\n;")
readline.parse_and_bind("tab: complete")
readline.set_completer(Completer(get_file).complete)
print('')
print("")
if get_file:
return cli_get_file(prompt_msg)
else:
return cli_get_path(prompt_msg)
return cli_get_path(prompt_msg)
def cli_get_file(prompt_msg: str) -> str:
@ -147,9 +153,9 @@ def cli_get_file(prompt_msg: str) -> str:
raise KeyboardInterrupt
if os.path.isfile(path_to_file):
if path_to_file.startswith('./'):
path_to_file = path_to_file[len('./'):]
print('')
if path_to_file.startswith("./"):
path_to_file = path_to_file[len("./") :]
print("")
return path_to_file
m_print("File selection error.", head=1, tail=1)
@ -157,7 +163,7 @@ def cli_get_file(prompt_msg: str) -> str:
except (EOFError, KeyboardInterrupt):
print_on_previous_line()
raise FunctionReturn("File selection aborted.", head_clear=True)
raise SoftError("File selection aborted.", head_clear=True)
def cli_get_path(prompt_msg: str) -> str:
@ -166,8 +172,8 @@ def cli_get_path(prompt_msg: str) -> str:
try:
directory = input(prompt_msg + ": ")
if directory.startswith('./'):
directory = directory[len('./'):]
if directory.startswith("./"):
directory = directory[len("./") :]
if not directory.endswith(os.sep):
directory += os.sep
@ -181,4 +187,4 @@ def cli_get_path(prompt_msg: str) -> str:
except (EOFError, KeyboardInterrupt):
print_on_previous_line()
raise FunctionReturn("File path selection aborted.", head_clear=True)
raise SoftError("File path selection aborted.", head_clear=True)

View File

@ -187,32 +187,31 @@ import itertools
import math
import shutil
from array import array
from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
from array import array
from typing import Any, Dict, Iterator, List, Optional, overload, Tuple, Union
class ReedSolomonError(Exception):
"""Reed-Solomon exception stub."""
pass
"""
For efficiency, gf_exp[] has size 2*GF_SIZE, so that a simple
multiplication of two numbers can be resolved without calling % 255.
For more info on how to generate this extended exponentiation table,
see paper:
"Fast software implementation of finite field operations",
Cheng Huang and Lihao Xu
Washington University in St. Louis, Tech. Rep (2003).
"""
_bytearray = bytearray # type: Any
gf_exp = _bytearray([1] * 512)
gf_log = _bytearray(256)
# For efficiency, gf_exp[] has size 2*GF_SIZE, so that a simple
# multiplication of two numbers can be resolved without calling % 255.
# For more info on how to generate this extended exponentiation table,
# see paper:
# "Fast software implementation of finite field operations",
# Cheng Huang and Lihao Xu
# Washington University in St. Louis, Tech. Rep (2003).
_bytearray = bytearray # type: Any
gf_exp = _bytearray([1] * 512)
gf_log = _bytearray(256)
field_charac = int(2 ** 8 - 1) # type: int
# Galois Field elements maths
def rwh_primes1(n: int) -> List[int]:
"""Returns a list of primes < n
https://stackoverflow.com/questions/2068372/fastest-way-to-list-all-primes-below-n/3035188#3035188
@ -220,15 +219,13 @@ def rwh_primes1(n: int) -> List[int]:
sieve = [True] * int(n / 2)
for i in range(3, int(n ** 0.5) + 1, 2):
if sieve[int(i / 2)]:
sieve[int((i * i) / 2)::i] = [False] * int((n - i * i - 1) / (2 * i) + 1)
sieve[int((i * i) / 2) :: i] = [False] * int((n - i * i - 1) / (2 * i) + 1)
return [2] + [2 * i + 1 for i in range(1, int(n / 2)) if sieve[i]]
def find_prime_polys(generator: int = 2,
c_exp: int = 8,
fast_primes: bool = False,
single: bool = False
) -> Any:
def find_prime_polys(
generator: int = 2, c_exp: int = 8, fast_primes: bool = False, single: bool = False
) -> Any:
"""
Compute the list of prime polynomials for the given generator and
Galois Field characteristic exponent.
@ -290,15 +287,17 @@ def find_prime_polys(generator: int = 2,
# Prepare the finite field characteristic (2^p - 1), this
# also represent the maximum possible value in this field
root_charac = 2 # we're in GF(2)
field_charac_ = int(root_charac ** c_exp - 1)
root_charac = 2 # we're in GF(2)
field_charac_ = int(root_charac ** c_exp - 1)
field_charac_next = int(root_charac ** (c_exp + 1) - 1)
if fast_primes:
# Generate maybe prime polynomials and
# check later if they really are irreducible
prim_candidates = rwh_primes1(field_charac_next)
prim_candidates = [x for x in prim_candidates if x > field_charac_] # filter out too small primes
prim_candidates = [
x for x in prim_candidates if x > field_charac_
] # filter out too small primes
else:
# try each possible prime polynomial, but skip even numbers
# (because divisible by 2 so necessarily not irreducible)
@ -312,12 +311,12 @@ def find_prime_polys(generator: int = 2,
# memory variable to indicate if a value was already generated
# in the field (value at index x is set to 1) or not (set to
# 0 by default)
seen = _bytearray(field_charac_ + 1)
seen = _bytearray(field_charac_ + 1)
conflict = False # flag to know if there was at least one conflict
# Second loop, build the whole Galois Field
x = 1
for i in range(field_charac_):
for _ in range(field_charac_):
# Compute the next value in the field
# (i.e., the next power of alpha/generator)
x = gf_mult_nolut(x, generator, prim, field_charac_ + 1)
@ -348,10 +347,9 @@ def find_prime_polys(generator: int = 2,
# of each prime polynomial: print [hex(i) for i in correct_primes]
def init_tables(prim: int = 0x11d,
generator: int = 2,
c_exp: int = 8
) -> List[Union[Any, Any, int]]:
def init_tables(
prim: int = 0x11D, generator: int = 2, c_exp: int = 8
) -> List[Union[Any, Any, int]]:
"""\
Precompute the logarithm and anti-log tables for faster computation
later, using the provided primitive polynomial. These tables are
@ -402,7 +400,10 @@ def init_tables(prim: int = 0x11d,
if c_exp <= 8:
_bytearray = bytearray
else:
def _bytearray(obj: Union[str, bytes, int, List[int]] = 0, encoding: str = "latin-1") -> Any:
def _bytearray(
obj: Union[str, bytes, int, List[int]] = 0, encoding: str = "latin-1"
) -> Any:
"""Fake bytearray replacement, supporting int values above 255"""
# always use Latin-1 and not UTF8 because Latin-1 maps the
# first 256 characters to their byte value equivalents. UTF8
@ -422,7 +423,7 @@ def init_tables(prim: int = 0x11d,
global gf_exp, gf_log, field_charac
field_charac = int(2 ** c_exp - 1)
gf_exp = _bytearray(field_charac * 2)
gf_exp = _bytearray(field_charac * 2)
# Anti-log (exponential) table. The first two
# elements will always be [GF256int(1), generator]
@ -442,7 +443,7 @@ def init_tables(prim: int = 0x11d,
for i in range(field_charac):
gf_exp[i] = x # compute anti-log for this value and store it in a table
gf_log[x] = i # compute log at the same time
x = gf_mult_nolut(x, generator, prim, field_charac + 1)
x = gf_mult_nolut(x, generator, prim, field_charac + 1)
# If you use only generator==2 or a power of 2, you can use the
# following which is faster than gf_mult_noLUT():
@ -472,7 +473,7 @@ def gf_sub(x: int, y: int) -> int:
"""Do substraction in binary Galois Field.
In binary Galois Field, subtraction is just
the same as addition (since we mod 2)
the same as addition (since we mod 2).
"""
return x ^ y
@ -499,9 +500,9 @@ def gf_mul(x: int, y: int) -> int:
def gf_div(x: int, y: int) -> int:
"""Perform division in the binary Galois Field."""
if y == 0:
if not y:
raise ZeroDivisionError()
if x == 0:
if not x:
return 0
ret_val = gf_exp[(gf_log[x] + field_charac - gf_log[y]) % field_charac] # type: int
return ret_val
@ -513,6 +514,54 @@ def gf_pow(x: int, power: int) -> int:
return ret_val
def cl_mult(x_: int, y_: int) -> int:
"""Bitwise carry-less multiplication on integers."""
z = 0
i = 0
while (y_ >> i) > 0:
if y_ & (1 << i):
z ^= x_ << i
i += 1
return z
def bit_length(n: int) -> int:
"""\
Compute the position of the most significant bit
(1) of an integer. Equivalent to int.bit_length()
"""
bits = 0
while n >> bits:
bits += 1
return bits
def cl_div(dividend: int, divisor: int) -> int:
"""\
Bitwise carry-less long division on
integers and returns the remainder.
"""
# Compute the position of the most
# significant bit for each integers
dl1 = bit_length(dividend)
dl2 = bit_length(divisor)
# If the dividend is smaller than the divisor, just exit
if dl1 < dl2: # pragma: no cover
return dividend
# Else, align the most significant 1 of the divisor to the
# most significant 1 of the dividend (by shifting the divisor)
for i in range(dl1 - dl2, -1, -1):
# Check that the dividend is divisible (useless for the
# first iteration but important for the next ones)
if dividend & (1 << i + dl2 - 1):
# If divisible, then shift the divisor to align the most
# significant bits and XOR (carry-less substraction)
dividend ^= divisor << i
return dividend
def gf_mult_nolut_slow(x: int, y: int, prim: int = 0) -> int:
"""\
Multiplication in Galois Fields without using a precomputed look-up
@ -520,55 +569,6 @@ def gf_mult_nolut_slow(x: int, y: int, prim: int = 0) -> int:
multiplication + modular reduction using an irreducible prime
polynomial.
"""
# Define bitwise carry-less operations as inner functions
def cl_mult(x_: int, y_: int) -> int:
"""Bitwise carry-less multiplication on integers"""
z = 0
i = 0
while (y_ >> i) > 0:
if y_ & (1 << i):
z ^= x_ << i
i += 1
return z
def bit_length(n: int) -> int:
"""\
Compute the position of the most significant bit
(1) of an integer. Equivalent to int.bit_length()
"""
bits = 0
while n >> bits:
bits += 1
return bits
def cl_div(dividend: int, divisor: int) -> int:
"""\
Bitwise carry-less long division on
integers and returns the remainder
"""
# Compute the position of the most
# significant bit for each integers
dl1 = bit_length(dividend)
dl2 = bit_length(divisor)
# If the dividend is smaller than the divisor, just exit
if dl1 < dl2: # pragma: no cover
return dividend
# Else, align the most significant 1 of the divisor to the
# most significant 1 of the dividend (by shifting the divisor)
for i in range(dl1 - dl2, -1, -1):
# Check that the dividend is divisible (useless for the
# first iteration but important for the next ones)
if dividend & (1 << i + dl2 - 1):
# If divisible, then shift the divisor to align the most
# significant bits and XOR (carry-less substraction)
dividend ^= divisor << i
return dividend
# --- Main GF multiplication routine ---
# Multiply the gf numbers
result = cl_mult(x, y)
@ -580,12 +580,9 @@ def gf_mult_nolut_slow(x: int, y: int, prim: int = 0) -> int:
return result
def gf_mult_nolut(x: int,
y: int,
prim: int = 0,
field_charac_full: int = 256,
carryless: bool = True
) -> int:
def gf_mult_nolut(
x: int, y: int, prim: int = 0, field_charac_full: int = 256, carryless: bool = True
) -> int:
"""\
Galois Field integer multiplication using Russian Peasant
Multiplication algorithm (faster than the standard multiplication
@ -615,6 +612,7 @@ def gf_mult_nolut(x: int,
# Galois Field polynomials maths
def gf_poly_scale(p: bytes, x: int) -> bytearray:
"""No docstring provided."""
ret_val = _bytearray([gf_mul(p[i], x) for i in range(len(p))]) # type: bytearray
@ -625,16 +623,14 @@ def gf_poly_add(p: bytes, q: Union[bytearray, List[int]]) -> Any:
"""No docstring provided."""
r = _bytearray(max(len(p), len(q))) # type: bytearray
r[len(r) - len(p):len(r)] = p
r[len(r) - len(p) : len(r)] = p
for i in range(len(q)):
for i, _ in enumerate(q):
r[i + len(r) - len(q)] ^= q[i]
return r
def gf_poly_mul(p: Any,
q: List[Any]
) -> Any:
def gf_poly_mul(p: Any, q: List[Any]) -> Any:
"""\
Multiply two polynomials, inside Galois Field (but the procedure
is generic). Optimized function by precomputation of log.
@ -648,26 +644,24 @@ def gf_poly_mul(p: Any,
# Compute the polynomial multiplication (just like the
# outer product of two vectors, we multiply each
# coefficients of p with all coefficients of q)
for j in range(len(q)):
for j, _ in enumerate(q):
# Optimization: load the coefficient once
qj = q[j]
# log(0) is undefined, we need to check that
if qj != 0:
if qj:
# Optimization: precache the logarithm
# of the current coefficient of q
lq = gf_log[qj]
for i in range(len(p)):
for i, _ in enumerate(p):
# log(0) is undefined, need to check that...
if p[i] != 0:
if p[i]:
# Equivalent to:
# r[i + j] = gf_add(r[i+j], gf_mul(p[i], q[j]))
r[i + j] ^= gf_exp[lp[i] + lq]
return r
def gf_poly_mul_simple(p: List[int],
q: List[int]
) -> bytearray:
def gf_poly_mul_simple(p: List[int], q: List[int]) -> bytearray:
"""Multiply two polynomials, inside Galois Field
Simple equivalent way of multiplying two polynomials
@ -679,8 +673,8 @@ def gf_poly_mul_simple(p: List[int],
# Compute the polynomial multiplication (just like the outer product
# of two vectors, we multiply each coefficients of p with all
# coefficients of q)
for j in range(len(q)):
for i in range(len(p)):
for j, _ in enumerate(q):
for i, _ in enumerate(p):
# equivalent to: r[i + j] = gf_add(r[i+j], gf_mul(p[i], q[j]))
# -- you can see it's your usual polynomial multiplication
r[i + j] ^= gf_mul(p[i], q[j])
@ -696,9 +690,9 @@ def gf_poly_neg(poly: List[int]) -> List[int]:
return poly
def gf_poly_div(dividend: bytearray,
divisor: Union[bytearray, List[int]]
) -> Tuple[bytearray, bytearray]:
def gf_poly_div(
dividend: bytearray, divisor: Union[bytearray, List[int]]
) -> Tuple[bytearray, bytearray]:
"""Fast polynomial division by using Extended Synthetic Division and
optimized for GF(2^p) computations (doesn't work with standard
polynomials outside of this Galois Field).
@ -728,13 +722,13 @@ def gf_poly_div(dividend: bytearray,
# it should still work because gf_mul() will take care of the
# condition. But it's still a good practice to put the condition
# here.
if coef != 0:
if coef:
# In synthetic division, we always skip the first coefficient
# of the divisor, because it's only used to normalize the
# dividend coefficient
for j in range(1, len(divisor)):
# log(0) is undefined
if divisor[j] != 0:
if divisor[j]:
# Equivalent to the more mathematically correct (but
# XORing directly is faster):
# msg_out[i + j] += -divisor[j] * coef
@ -765,10 +759,8 @@ def gf_poly_eval(poly: Union[bytearray, List[int]], x: int) -> int:
# Reed-Solomon encoding
def rs_generator_poly(nsym: int,
fcr: int = 0,
generator: int = 2
) -> bytearray:
def rs_generator_poly(nsym: int, fcr: int = 0, generator: int = 2) -> bytearray:
"""\
Generate an irreducible generator polynomial
(necessary to encode a message into Reed-Solomon)
@ -779,10 +771,9 @@ def rs_generator_poly(nsym: int,
return g
def rs_generator_poly_all(max_nsym: int,
fcr: int = 0,
generator: int = 2
) -> Dict[int, bytearray]:
def rs_generator_poly_all(
max_nsym: int, fcr: int = 0, generator: int = 2
) -> Dict[int, bytearray]:
"""\
Generate all irreducible generator polynomials up to max_nsym
(usually you can use n, the length of the message+ecc). Very useful
@ -795,11 +786,9 @@ def rs_generator_poly_all(max_nsym: int,
return g_all
def rs_simple_encode_msg(msg_in: bytearray,
nsym: int,
fcr: int = 0,
generator: int = 2
) -> bytearray:
def rs_simple_encode_msg(
msg_in: bytearray, nsym: int, fcr: int = 0, generator: int = 2
) -> bytearray:
"""\
Simple Reed-Solomon encoding (mainly an example for you to
understand how it works, because it's slower than the in-lined
@ -808,8 +797,10 @@ def rs_simple_encode_msg(msg_in: bytearray,
global field_charac
if (len(msg_in) + nsym) > field_charac: # pragma: no cover
raise ValueError("Message is too long (%i when max is %i)"
% (len(msg_in) + nsym, field_charac))
raise ValueError(
"Message is too long (%i when max is %i)"
% (len(msg_in) + nsym, field_charac)
)
gen = rs_generator_poly(nsym, fcr, generator)
@ -826,12 +817,13 @@ def rs_simple_encode_msg(msg_in: bytearray,
return msg_out
def rs_encode_msg(msg_in: bytes,
nsym: int,
fcr: int = 0,
generator: int = 2,
gen: Optional[bytearray] = None
) -> bytearray:
def rs_encode_msg(
msg_in: bytes,
nsym: int,
fcr: int = 0,
generator: int = 2,
gen: Optional[bytearray] = None,
) -> bytearray:
"""\
Reed-Solomon main encoding function, using polynomial division
(Extended Synthetic Division, the fastest algorithm available to my
@ -839,8 +831,10 @@ def rs_encode_msg(msg_in: bytes,
"""
global field_charac
if (len(msg_in) + nsym) > field_charac: # pragma: no cover
raise ValueError("Message is too long (%i when max is %i)"
% (len(msg_in) + nsym, field_charac))
raise ValueError(
"Message is too long (%i when max is %i)"
% (len(msg_in) + nsym, field_charac)
)
if gen is None:
gen = rs_generator_poly(nsym, fcr, generator)
msg_in = _bytearray(msg_in)
@ -872,7 +866,7 @@ def rs_encode_msg(msg_in: bytes,
# log(0) is undefined, so we need to manually check for this
# case. There's no need to check the divisor here because we
# know it can't be 0 since we generated it.
if coef != 0:
if coef:
lcoef = gf_log[coef] # precaching
# In synthetic division, we always skip the first
@ -898,17 +892,16 @@ def rs_encode_msg(msg_in: bytes,
# Equivalent to c = mprime - b, where
# mprime is msg_in padded with [0]*nsym
msg_out[:len(msg_in)] = msg_in
msg_out[: len(msg_in)] = msg_in
return msg_out
# Reed-Solomon decoding
def rs_calc_syndromes(msg: bytearray,
nsym: int,
fcr: int = 0,
generator: int = 2
) -> List[int]:
def rs_calc_syndromes(
msg: bytearray, nsym: int, fcr: int = 0, generator: int = 2
) -> List[int]:
"""\
Given the received codeword msg and the number of error correcting
symbols (nsym), computes the syndromes polynomial. Mathematically,
@ -931,12 +924,13 @@ def rs_calc_syndromes(msg: bytearray,
return [0] + [gf_poly_eval(msg, gf_pow(generator, i + fcr)) for i in range(nsym)]
def rs_correct_errata(msg_in: bytearray,
synd: List[int],
err_pos: List[int],
fcr: int = 0,
generator: int = 2
) -> bytearray:
def rs_correct_errata(
msg_in: bytearray,
synd: List[int],
err_pos: List[int],
fcr: int = 0,
generator: int = 2,
) -> bytearray:
"""\
Forney algorithm, computes the values (error
magnitude) to correct the input message.
@ -954,7 +948,7 @@ def rs_correct_errata(msg_in: bytearray,
# errata locator algorithm to work (e.g. instead of [0, 1, 2] it
# will become [len(msg)-1, len(msg)-2, len(msg) -3])
coef_pos = [len(msg) - 1 - p for p in err_pos]
err_loc = rs_find_errata_locator(coef_pos, generator)
err_loc = rs_find_errata_locator(coef_pos, generator)
# Calculate errata evaluator polynomial (often
# called Omega or Gamma in academic papers)
@ -964,14 +958,14 @@ def rs_correct_errata(msg_in: bytearray,
# from the error positions in err_pos (the roots of the error
# locator polynomial, i.e., where it evaluates to 0)
x = [] # will store the position of the errors
for i in range(len(coef_pos)):
for i, _ in enumerate(coef_pos):
pos = field_charac - coef_pos[i]
x.append(gf_pow(generator, -pos))
# Forney algorithm: Compute the magnitudes will store the values
# that need to be corrected (subtracted) to the message containing
# errors. This is sometimes called the error magnitude polynomial.
e = _bytearray(len(msg))
e = _bytearray(len(msg))
xlength = len(x)
for i, xi in enumerate(x):
xi_inv = gf_inverse(xi)
@ -1042,11 +1036,12 @@ def rs_correct_errata(msg_in: bytearray,
return msg
def rs_find_error_locator(synd: List[int],
nsym: int,
erase_loc: Optional[bytearray] = None,
erase_count: int = 0
) -> List[int]:
def rs_find_error_locator(
synd: List[int],
nsym: int,
erase_loc: Optional[bytearray] = None,
erase_count: int = 0,
) -> List[int]:
"""\
Find error/errata locator and evaluator
polynomials with Berlekamp-Massey algorithm
@ -1148,7 +1143,7 @@ def rs_find_error_locator(synd: List[int],
old_loc += _bytearray([0])
# Iteratively estimate the errata locator and evaluator polynomials
if delta != 0: # Update only if there's a discrepancy
if delta: # Update only if there's a discrepancy
# Rule B (rule A is implicitly defined because rule A just
# says that we skip any modification for this iteration)
if len(old_loc) > len(err_loc):
@ -1181,16 +1176,14 @@ def rs_find_error_locator(synd: List[int],
# Check if the result is correct, that there's not too many errors to
# correct drop leading 0s, else errs will not be of the correct size
err_loc_ = list(itertools.dropwhile(lambda x: x == 0, err_loc)) # type: List[int]
errs = len(err_loc_) - 1
errs = len(err_loc_) - 1
if (errs - erase_count) * 2 + erase_count > nsym: # pragma: no cover
raise ReedSolomonError("Too many errors to correct")
return err_loc_
def rs_find_errata_locator(e_pos: List[int],
generator: int = 2
) -> List[int]:
def rs_find_errata_locator(e_pos: List[int], generator: int = 2) -> List[int]:
"""\
Compute the erasures/errors/errata locator polynomial from the
erasures/errors/errata positions (the positions must be relative to
@ -1223,15 +1216,17 @@ def rs_find_errata_locator(e_pos: List[int],
print(string.center(terminal_width))
if len(e_pos) > 0:
print('')
for s in ["Warning! Reed-Solomon erasure code",
"detected and corrected {} errors in ".format(len(e_pos)),
"a received packet. This might indicate",
"bad connection, an eminent adapter or",
"data diode HW failure or that serial",
"interface's baud rate is set too high."]:
print("")
for s in [
"Warning! Reed-Solomon erasure code",
"detected and corrected {} errors in ".format(len(e_pos)),
"a received packet. This might indicate",
"bad connection, an eminent adapter or",
"data diode HW failure or that serial",
"interface's baud rate is set too high.",
]:
c_print(s)
print('')
print("")
# erasures_loc is very simple to compute:
# erasures_loc = prod(1 - x*alpha**i) for i in erasures_pos and
@ -1240,13 +1235,15 @@ def rs_find_errata_locator(e_pos: List[int],
# we simply generate a Polynomial([c, 0]) where 0 is the constant
# and c is positioned to be the coefficient for x^1.
for i in e_pos:
e_loc = gf_poly_mul(e_loc, gf_poly_add(_bytearray([1]), [gf_pow(generator, i), 0]))
e_loc = gf_poly_mul(
e_loc, gf_poly_add(_bytearray([1]), [gf_pow(generator, i), 0])
)
return e_loc
def rs_find_error_evaluator(synd: List[int],
err_loc: List[int],
nsym: int) -> bytearray:
def rs_find_error_evaluator(
synd: List[int], err_loc: List[int], nsym: int
) -> bytearray:
"""\
Compute the error (or erasures if you supply sigma=erasures locator
polynomial, or errata) evaluator polynomial Omega from the syndrome
@ -1271,23 +1268,22 @@ def rs_find_error_evaluator(synd: List[int],
return remainder
def rs_find_errors(err_loc: Union[bytearray, List[int]],
nmess: int,
generator: int = 2
) -> List[int]:
def rs_find_errors(
err_loc: Union[bytearray, List[int]], nmess: int, generator: int = 2
) -> List[int]:
"""\
Find the roots (i.e., where evaluation = zero) of error polynomial by
brute-force trial, this is a sort of Chien's search (but less
efficient, Chien's search is a way to evaluate the polynomial such
that each evaluation only takes constant time).
"""
errs = len(err_loc) - 1
errs = len(err_loc) - 1
err_pos = []
# Normally we should try all 2^8 possible values, but here
# we optimize to just check the interesting symbols
for i in range(nmess):
if gf_poly_eval(err_loc, gf_pow(generator, i)) == 0:
if not gf_poly_eval(err_loc, gf_pow(generator, i)):
# It's a 0? Bingo, it's a root of the error locator
# polynomial, in other terms this is the location of an error
err_pos.append(nmess - 1 - i)
@ -1307,16 +1303,16 @@ def rs_find_errors(err_loc: Union[bytearray, List[int]],
# all 0), so we may not even be able to check if that's correct
# or not, so I'm not sure the brute-force approach may even be
# possible.
raise ReedSolomonError("Too many (or few) errors found by Chien"
" Search for the errata locator polynomial!")
raise ReedSolomonError(
"Too many (or few) errors found by Chien"
" Search for the errata locator polynomial!"
)
return err_pos
def rs_forney_syndromes(synd: List[int],
pos: List[int],
nmess: int,
generator: int = 2
) -> List[int]:
def rs_forney_syndromes(
synd: List[int], pos: List[int], nmess: int, generator: int = 2
) -> List[int]:
"""\
Compute Forney syndromes, which computes a modified syndromes to
compute only errors (erasures are trimmed out). Do not confuse this
@ -1363,13 +1359,14 @@ def rs_forney_syndromes(synd: List[int],
return fsynd
def rs_correct_msg(msg_in: bytearray,
nsym: int,
fcr: int = 0,
generator: int = 2,
erase_pos: Optional[List[int]] = None,
only_erasures: bool = False
) -> Tuple[bytearray, bytearray]:
def rs_correct_msg(
msg_in: Union[bytes, bytearray],
nsym: int,
fcr: int = 0,
generator: int = 2,
erase_pos: Optional[List[int]] = None,
only_erasures: bool = False,
) -> Tuple[bytearray, bytearray]:
"""Reed-Solomon main decoding function"""
global field_charac
if len(msg_in) > field_charac: # pragma: no cover
@ -1386,8 +1383,9 @@ def rs_correct_msg(msg_in: bytearray,
# with a position above the length of field_charac -- if you
# really need a bigger message without chunking, then you should
# better enlarge c_exp so that you get a bigger field).
raise ValueError("Message is too long (%i when max is %i)"
% (len(msg_in), field_charac))
raise ValueError(
"Message is too long (%i when max is %i)" % (len(msg_in), field_charac)
)
msg_out = _bytearray(msg_in) # copy of message
@ -1414,7 +1412,7 @@ def rs_correct_msg(msg_in: bytearray,
# Check if there's any error/erasure in the input codeword. If not
# (all syndromes coefficients are 0), then just return the codeword
# as-is.
if max(synd) == 0:
if not max(synd):
return msg_out[:-nsym], msg_out[-nsym:] # no errors
# Find errors locations
@ -1453,21 +1451,23 @@ def rs_correct_msg(msg_in: bytearray,
return msg_out[:-nsym], msg_out[-nsym:]
def rs_correct_msg_nofsynd(msg_in: bytearray,
nsym: int,
fcr: int = 0,
generator: int = 2,
erase_pos: Optional[List[int]] = None,
only_erasures: bool = False
) -> Tuple[bytearray, bytearray]:
def rs_correct_msg_nofsynd(
msg_in: bytearray,
nsym: int,
fcr: int = 0,
generator: int = 2,
erase_pos: Optional[List[int]] = None,
only_erasures: bool = False,
) -> Tuple[bytearray, bytearray]:
"""\
Reed-Solomon main decoding function, without using the modified
Forney syndromes.
"""
global field_charac
if len(msg_in) > field_charac: # pragma: no cover
raise ValueError("Message is too long (%i when max is %i)"
% (len(msg_in), field_charac))
raise ValueError(
"Message is too long (%i when max is %i)" % (len(msg_in), field_charac)
)
msg_out = _bytearray(msg_in) # copy of message
@ -1493,7 +1493,7 @@ def rs_correct_msg_nofsynd(msg_in: bytearray,
# Check if there's any error/erasure in the input codeword. If not
# (all syndromes coefficients are 0), then just return the codeword
# as-is.
if max(synd) == 0:
if not max(synd):
return msg_out[:-nsym], msg_out[-nsym:] # no errors
# Prepare erasures locator and evaluator polynomials.
@ -1502,15 +1502,21 @@ def rs_correct_msg_nofsynd(msg_in: bytearray,
# erase_eval = None
erase_count = 0
if erase_pos:
erase_count = len(erase_pos)
erase_count = len(erase_pos)
erase_pos_reversed = [len(msg_out) - 1 - eras for eras in erase_pos]
erase_loc = bytearray(rs_find_errata_locator(erase_pos_reversed, generator=generator))
erase_loc = bytearray(
rs_find_errata_locator(erase_pos_reversed, generator=generator)
)
# Prepare errors/errata locator polynomial
if only_erasures:
err_loc = erase_loc[::-1]
else:
err_loc = bytearray(rs_find_error_locator(synd, nsym, erase_loc=erase_loc, erase_count=erase_count))
err_loc = bytearray(
rs_find_error_locator(
synd, nsym, erase_loc=erase_loc, erase_count=erase_count
)
)
err_loc = err_loc[::-1]
# Locate the message errors
@ -1535,11 +1541,7 @@ def rs_correct_msg_nofsynd(msg_in: bytearray,
return msg_out[:-nsym], msg_out[-nsym:]
def rs_check(msg: bytearray,
nsym: int,
fcr: int = 0,
generator: int = 2
) -> bool:
def rs_check(msg: bytearray, nsym: int, fcr: int = 0, generator: int = 2) -> bool:
"""\
Returns true if the message + ecc has no error of false otherwise
(may not always catch a wrong decoding or a wrong message,
@ -1549,6 +1551,26 @@ def rs_check(msg: bytearray,
return max(rs_calc_syndromes(msg, nsym, fcr, generator)) == 0
@overload
def chunk(data: bytearray, chunk_size: int) -> Iterator[bytearray]:
"""Split a long message into chunks."""
@overload
def chunk(data: bytes, chunk_size: int) -> Iterator[bytes]:
"""Split a long message into chunks."""
def chunk(
data: Union[bytearray, bytes], chunk_size: int
) -> Iterator[Union[bytearray, bytes]]:
"""Split a long message into chunks."""
for i in range(0, len(data), chunk_size):
# Split the long message in a chunk.
chunk_ = data[i : i + chunk_size]
yield chunk_
class RSCodec(object):
"""\
A Reed Solomon encoder/decoder. After initializing the object, use
@ -1567,15 +1589,16 @@ class RSCodec(object):
previous values (0 and 0x11d).
"""
def __init__(self,
nsym: int = 10,
nsize: int = 255,
fcr: int = 0,
prim: int = 0x11d,
generator: int = 2,
c_exp: int = 8,
single_gen: bool = True
) -> None:
def __init__(
self,
nsym: int = 10,
nsize: int = 255,
fcr: int = 0,
prim: int = 0x11D,
generator: int = 2,
c_exp: int = 8,
single_gen: bool = True,
) -> None:
"""\
Initialize the Reed-Solomon codec. Note that different
parameters change the internal values (the ecc symbols, look-up
@ -1601,11 +1624,15 @@ class RSCodec(object):
# resize the Galois Field.
if nsize > 255 and c_exp <= 8:
# Get the next closest power of two
c_exp = int(math.log(2 ** (math.floor(math.log(nsize) / math.log(2)) + 1), 2))
c_exp = int(
math.log(2 ** (math.floor(math.log(nsize) / math.log(2)) + 1), 2)
)
# prim was not correctly defined, find one
if c_exp != 8 and prim == 0x11d:
prim = find_prime_polys(generator=generator, c_exp=c_exp, fast_primes=True, single=True)
if c_exp != 8 and prim == 0x11D:
prim = find_prime_polys(
generator=generator, c_exp=c_exp, fast_primes=True, single=True
)
if nsize == 255: # Resize chunk size if not set
nsize = int(2 ** c_exp - 1)
@ -1639,7 +1666,9 @@ class RSCodec(object):
# Initialize the look-up tables for easy
# and quick multiplication/division
self.gf_log, self.gf_exp, self.field_charac = init_tables(prim, generator, c_exp)
self.gf_log, self.gf_exp, self.field_charac = init_tables(
prim, generator, c_exp
)
# Pre-compute the generator polynomials
if single_gen:
@ -1647,20 +1676,7 @@ class RSCodec(object):
else: # pragma: no cover
self.gen = rs_generator_poly_all(nsize, fcr=fcr, generator=generator)
@staticmethod
def chunk(data: bytes,
chunk_size: int
) -> Iterator[Any]:
"""Split a long message into chunks"""
for i in range(0, len(data), chunk_size):
# Split the long message in a chunk.
chunk = data[i:i + chunk_size]
yield chunk
def encode(self,
data_: Union[bytes, str],
nsym: Optional[int] = None
) -> bytearray:
def encode(self, data_: Union[bytes, str], nsym: Optional[int] = None) -> bytearray:
"""\
Encode a message (i.e., add the ecc symbols) using Reed-Solomon,
whatever the length of the message because we use chunking.
@ -1678,16 +1694,25 @@ class RSCodec(object):
else:
data = data_
enc = _bytearray() # type: bytearray
for chunk in self.chunk(data, self.nsize - self.nsym):
enc.extend(rs_encode_msg(chunk, self.nsym, fcr=self.fcr, generator=self.generator, gen=self.gen[nsym]))
for chunk_ in chunk(data, self.nsize - self.nsym):
enc.extend(
rs_encode_msg(
chunk_,
self.nsym,
fcr=self.fcr,
generator=self.generator,
gen=self.gen[nsym],
)
)
return enc
def decode(self,
data: bytes,
nsym: Optional[int] = None,
erase_pos: Optional[List[int]] = None,
only_erasures: bool = False
) -> Tuple[bytearray, bytearray]:
def decode(
self,
data: bytes,
nsym: Optional[int] = None,
erase_pos: Optional[List[int]] = None,
only_erasures: bool = False,
) -> Tuple[bytearray, bytearray]:
"""\
Repair a message, whatever its size is, by using chunking. May
return a wrong result if number of errors > nsym. Note that it
@ -1713,9 +1738,9 @@ class RSCodec(object):
if isinstance(data, str): # pragma: no cover
data = _bytearray(data)
dec = _bytearray()
dec = _bytearray()
dec_full = _bytearray()
for chunk in self.chunk(data, self.nsize):
for chunk_ in chunk(data, self.nsize):
# Extract the erasures for this chunk
e_pos = [] # type: List[int]
if erase_pos: # pragma: no cover
@ -1730,16 +1755,19 @@ class RSCodec(object):
erase_pos = [x - (self.nsize + 1) for x in erase_pos if x > self.nsize]
# Decode/repair this chunk!
rmes, recc = rs_correct_msg(chunk, nsym, fcr=self.fcr, generator=self.generator,
erase_pos=e_pos, only_erasures=only_erasures)
rmes, recc = rs_correct_msg(
chunk_,
nsym,
fcr=self.fcr,
generator=self.generator,
erase_pos=e_pos,
only_erasures=only_erasures,
)
dec.extend(rmes)
dec_full.extend(rmes + recc)
return dec, dec_full
def check(self,
data: bytearray,
nsym: Optional[int] = None
) -> List[bool]:
def check(self, data: bytearray, nsym: Optional[int] = None) -> List[bool]:
"""\
Check if a message+ecc stream is not corrupted (or fully repaired).
Note: may return a wrong result if number of errors > nsym.
@ -1749,6 +1777,6 @@ class RSCodec(object):
if isinstance(data, str): # pragma: no cover
data = _bytearray(data)
check = []
for chunk in self.chunk(data, self.nsize):
check.append(rs_check(chunk, nsym, fcr=self.fcr, generator=self.generator))
for chunk_ in chunk(data, self.nsize):
check.append(rs_check(chunk_, nsym, fcr=self.fcr, generator=self.generator))
return check

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3.7
# -*- coding: utf-8 -*-
"""
TFC - Onion-routed, endpoint secure messaging system
Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
TFC is free software: you can redistribute it and/or modify it under the terms
of the GNU General Public License as published by the Free Software Foundation,
either version 3 of the License, or (at your option) any later version.
TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with TFC. If not, see <https://www.gnu.org/licenses/>.
"""

View File

@ -19,143 +19,320 @@ You should have received a copy of the GNU General Public License
along with TFC. If not, see <https://www.gnu.org/licenses/>.
"""
import os
import typing
from typing import Any, Dict, Union
from typing import Any, Dict, Tuple, Union
from src.common.db_logs import access_logs, change_log_db_key, remove_logs, replace_log_db
from src.common.encoding import bytes_to_int, pub_key_to_short_address
from src.common.exceptions import FunctionReturn
from src.common.misc import ignored, separate_header
from src.common.output import clear_screen, m_print, phase, print_on_previous_line
from src.common.statics import (CH_FILE_RECV, CH_LOGGING, CH_MASTER_KEY, CH_NICKNAME, CH_NOTIFY, CH_SETTING,
CLEAR_SCREEN, COMMAND, CONTACT_REM, CONTACT_SETTING_HEADER_LENGTH, DISABLE, DONE,
ENABLE, ENCODED_INTEGER_LENGTH, ENCRYPTED_COMMAND_HEADER_LENGTH, EXIT, EXIT_PROGRAM,
GROUP_ADD, GROUP_CREATE, GROUP_DELETE, GROUP_REMOVE, GROUP_RENAME, KEY_EX_ECDHE,
KEY_EX_PSK_RX, KEY_EX_PSK_TX, LOCAL_KEY_RDY, LOCAL_PUBKEY, LOG_DISPLAY, LOG_EXPORT,
LOG_REMOVE, ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_USER_HEADER, RESET, RESET_SCREEN,
US_BYTE, WIN_ACTIVITY, WIN_SELECT, WIN_TYPE_CONTACT, WIN_TYPE_GROUP, WIN_UID_FILE,
WIN_UID_LOCAL, WIPE, WIPE_USR_DATA)
from src.common.db_logs import (
access_logs,
change_log_db_key,
remove_logs,
replace_log_db,
)
from src.common.encoding import bytes_to_int, pub_key_to_short_address
from src.common.exceptions import SoftError
from src.common.misc import ignored, reset_terminal, separate_header
from src.common.output import clear_screen, m_print, phase, print_on_previous_line
from src.common.statics import (
CH_FILE_RECV,
CH_LOGGING,
CH_MASTER_KEY,
CH_NICKNAME,
CH_NOTIFY,
CH_SETTING,
CLEAR_SCREEN,
COMMAND,
CONTACT_REM,
CONTACT_SETTING_HEADER_LENGTH,
DISABLE,
DONE,
ENABLE,
ENCODED_INTEGER_LENGTH,
ENCRYPTED_COMMAND_HEADER_LENGTH,
EXIT,
EXIT_PROGRAM,
GROUP_ADD,
GROUP_CREATE,
GROUP_DELETE,
GROUP_REMOVE,
GROUP_RENAME,
KEY_EX_ECDHE,
KEY_EX_PSK_RX,
KEY_EX_PSK_TX,
LOCAL_KEY_RDY,
LOCAL_PUBKEY,
LOG_DISPLAY,
LOG_EXPORT,
LOG_REMOVE,
ONION_SERVICE_PUBLIC_KEY_LENGTH,
ORIGIN_USER_HEADER,
RESET_SCREEN,
US_BYTE,
WIN_ACTIVITY,
WIN_SELECT,
WIN_TYPE_CONTACT,
WIN_TYPE_GROUP,
WIN_UID_COMMAND,
WIN_UID_FILE,
WIPE,
WIPE_USR_DATA,
)
from src.receiver.commands_g import group_add, group_create, group_delete, group_remove, group_rename
from src.receiver.key_exchanges import key_ex_ecdhe, key_ex_psk_rx, key_ex_psk_tx, local_key_rdy
from src.receiver.packet import decrypt_assembly_packet
from src.receiver.commands_g import (
group_add,
group_create,
group_delete,
group_remove,
group_rename,
)
from src.receiver.key_exchanges import (
key_ex_ecdhe,
key_ex_psk_rx,
key_ex_psk_tx,
local_key_rdy,
)
from src.receiver.packet import decrypt_assembly_packet
if typing.TYPE_CHECKING:
from datetime import datetime
from multiprocessing import Queue
from src.common.db_contacts import Contact, ContactList
from src.common.db_groups import Group, GroupList
from src.common.db_keys import KeyList
from datetime import datetime
from multiprocessing import Queue
from src.common.db_contacts import Contact, ContactList
from src.common.db_groups import Group, GroupList
from src.common.db_keys import KeyList
from src.common.db_masterkey import MasterKey
from src.common.db_settings import Settings
from src.common.gateway import Gateway
from src.receiver.packet import PacketList
from src.receiver.windows import WindowList
from src.common.db_settings import Settings
from src.common.gateway import Gateway
from src.receiver.packet import PacketList
from src.receiver.windows import WindowList
def process_command(ts: 'datetime',
assembly_ct: bytes,
window_list: 'WindowList',
packet_list: 'PacketList',
contact_list: 'ContactList',
key_list: 'KeyList',
group_list: 'GroupList',
settings: 'Settings',
master_key: 'MasterKey',
gateway: 'Gateway',
exit_queue: 'Queue[bytes]'
) -> None:
def process_command(
ts: "datetime",
assembly_ct: bytes,
window_list: "WindowList",
packet_list: "PacketList",
contact_list: "ContactList",
key_list: "KeyList",
group_list: "GroupList",
settings: "Settings",
master_key: "MasterKey",
gateway: "Gateway",
exit_queue: "Queue[bytes]",
) -> None:
"""Decrypt command assembly packet and process command."""
assembly_packet = decrypt_assembly_packet(assembly_ct, LOCAL_PUBKEY, ORIGIN_USER_HEADER,
window_list, contact_list, key_list)
assembly_packet = decrypt_assembly_packet(
assembly_ct,
LOCAL_PUBKEY,
ORIGIN_USER_HEADER,
window_list,
contact_list,
key_list,
)
cmd_packet = packet_list.get_packet(LOCAL_PUBKEY, ORIGIN_USER_HEADER, COMMAND)
cmd_packet.add_packet(assembly_packet)
if not cmd_packet.is_complete:
raise FunctionReturn("Incomplete command.", output=False)
raise SoftError("Incomplete command.", output=False)
header, cmd = separate_header(cmd_packet.assemble_command_packet(), ENCRYPTED_COMMAND_HEADER_LENGTH)
no = None
header, cmd = separate_header(
cmd_packet.assemble_command_packet(), ENCRYPTED_COMMAND_HEADER_LENGTH
)
no = None
# Keyword Function to run ( Parameters )
# --------------------------------------------------------------------------------------------------------------
d = {LOCAL_KEY_RDY: (local_key_rdy, ts, window_list, contact_list ),
WIN_ACTIVITY: (win_activity, window_list ),
WIN_SELECT: (win_select, cmd, window_list ),
CLEAR_SCREEN: (clear_screen, ),
RESET_SCREEN: (reset_screen, cmd, window_list ),
EXIT_PROGRAM: (exit_tfc, exit_queue),
LOG_DISPLAY: (log_command, cmd, no, window_list, contact_list, group_list, settings, master_key),
LOG_EXPORT: (log_command, cmd, ts, window_list, contact_list, group_list, settings, master_key),
LOG_REMOVE: (remove_log, cmd, contact_list, group_list, settings, master_key),
CH_MASTER_KEY: (ch_master_key, ts, window_list, contact_list, group_list, key_list, settings, master_key),
CH_NICKNAME: (ch_nick, cmd, ts, window_list, contact_list, ),
CH_SETTING: (ch_setting, cmd, ts, window_list, contact_list, group_list, key_list, settings, gateway ),
CH_LOGGING: (ch_contact_s, cmd, ts, window_list, contact_list, group_list, header ),
CH_FILE_RECV: (ch_contact_s, cmd, ts, window_list, contact_list, group_list, header ),
CH_NOTIFY: (ch_contact_s, cmd, ts, window_list, contact_list, group_list, header ),
GROUP_CREATE: (group_create, cmd, ts, window_list, contact_list, group_list, settings ),
GROUP_ADD: (group_add, cmd, ts, window_list, contact_list, group_list, settings ),
GROUP_REMOVE: (group_remove, cmd, ts, window_list, contact_list, group_list ),
GROUP_DELETE: (group_delete, cmd, ts, window_list, group_list ),
GROUP_RENAME: (group_rename, cmd, ts, window_list, contact_list, group_list ),
KEY_EX_ECDHE: (key_ex_ecdhe, cmd, ts, window_list, contact_list, key_list, settings ),
KEY_EX_PSK_TX: (key_ex_psk_tx, cmd, ts, window_list, contact_list, key_list, settings ),
KEY_EX_PSK_RX: (key_ex_psk_rx, cmd, ts, window_list, contact_list, key_list, settings ),
CONTACT_REM: (contact_rem, cmd, ts, window_list, contact_list, group_list, key_list, settings, master_key),
WIPE_USR_DATA: (wipe, exit_queue)
} # type: Dict[bytes, Any]
d = {
LOCAL_KEY_RDY: (local_key_rdy, ts, window_list, contact_list),
WIN_ACTIVITY: (win_activity, window_list),
WIN_SELECT: (win_select, cmd, window_list),
CLEAR_SCREEN: (clear_screen,),
RESET_SCREEN: (reset_screen, cmd, window_list),
EXIT_PROGRAM: (exit_tfc, exit_queue),
LOG_DISPLAY: (
log_command,
cmd,
no,
window_list,
contact_list,
group_list,
settings,
master_key,
),
LOG_EXPORT: (
log_command,
cmd,
ts,
window_list,
contact_list,
group_list,
settings,
master_key,
),
LOG_REMOVE: (remove_log, cmd, contact_list, group_list, settings, master_key),
CH_MASTER_KEY: (
ch_master_key,
ts,
window_list,
contact_list,
group_list,
key_list,
settings,
master_key,
),
CH_NICKNAME: (ch_nick, cmd, ts, window_list, contact_list,),
CH_SETTING: (
ch_setting,
cmd,
ts,
window_list,
contact_list,
group_list,
key_list,
settings,
gateway,
),
CH_LOGGING: (
ch_contact_s,
cmd,
ts,
window_list,
contact_list,
group_list,
header,
),
CH_FILE_RECV: (
ch_contact_s,
cmd,
ts,
window_list,
contact_list,
group_list,
header,
),
CH_NOTIFY: (
ch_contact_s,
cmd,
ts,
window_list,
contact_list,
group_list,
header,
),
GROUP_CREATE: (
group_create,
cmd,
ts,
window_list,
contact_list,
group_list,
settings,
),
GROUP_ADD: (
group_add,
cmd,
ts,
window_list,
contact_list,
group_list,
settings,
),
GROUP_REMOVE: (group_remove, cmd, ts, window_list, contact_list, group_list),
GROUP_DELETE: (group_delete, cmd, ts, window_list, group_list),
GROUP_RENAME: (group_rename, cmd, ts, window_list, contact_list, group_list),
KEY_EX_ECDHE: (
key_ex_ecdhe,
cmd,
ts,
window_list,
contact_list,
key_list,
settings,
),
KEY_EX_PSK_TX: (
key_ex_psk_tx,
cmd,
ts,
window_list,
contact_list,
key_list,
settings,
),
KEY_EX_PSK_RX: (
key_ex_psk_rx,
cmd,
ts,
window_list,
contact_list,
key_list,
settings,
),
CONTACT_REM: (
contact_rem,
cmd,
ts,
window_list,
contact_list,
group_list,
key_list,
settings,
master_key,
),
WIPE_USR_DATA: (wipe, exit_queue),
} # type: Dict[bytes, Any]
try:
from_dict = d[header]
except KeyError:
raise FunctionReturn("Error: Received an invalid command.")
raise SoftError("Error: Received an invalid command.")
func = from_dict[0]
func = from_dict[0]
parameters = from_dict[1:]
func(*parameters)
raise SoftError("Command completed.", output=False)
def win_activity(window_list: 'WindowList') -> None:
def win_activity(window_list: "WindowList") -> None:
"""Show number of unread messages in each window."""
unread_wins = [w for w in window_list if (w.uid != WIN_UID_LOCAL and w.unread_messages > 0)]
print_list = ["Window activity"] if unread_wins else ["No window activity"]
unread_wins = [
w for w in window_list if (w.uid != WIN_UID_COMMAND and w.unread_messages > 0)
]
print_list = ["Window activity"] if unread_wins else ["No window activity"]
print_list += [f"{w.name}: {w.unread_messages}" for w in unread_wins]
m_print(print_list, box=True)
print_on_previous_line(reps=(len(print_list) + 2), delay=1)
def win_select(window_uid: bytes, window_list: 'WindowList') -> None:
def win_select(window_uid: bytes, window_list: "WindowList") -> None:
"""Select window specified by the Transmitter Program."""
if window_uid == WIN_UID_FILE:
clear_screen()
window_list.set_active_rx_window(window_uid)
def reset_screen(win_uid: bytes, window_list: 'WindowList') -> None:
def reset_screen(win_uid: bytes, window_list: "WindowList") -> None:
"""Reset window specified by the Transmitter Program."""
window = window_list.get_window(win_uid)
window.reset_window()
os.system(RESET)
reset_terminal()
def exit_tfc(exit_queue: 'Queue[str]') -> None:
def exit_tfc(exit_queue: "Queue[str]") -> None:
"""Exit TFC."""
exit_queue.put(EXIT)
def log_command(cmd_data: bytes,
ts: 'datetime',
window_list: 'WindowList',
contact_list: 'ContactList',
group_list: 'GroupList',
settings: 'Settings',
master_key: 'MasterKey'
) -> None:
def log_command(
cmd_data: bytes,
ts: "datetime",
window_list: "WindowList",
contact_list: "ContactList",
group_list: "GroupList",
settings: "Settings",
master_key: "MasterKey",
) -> None:
"""Display or export log file for the active window.
Having the capability to export the log file from the encrypted
@ -167,39 +344,51 @@ def log_command(cmd_data: bytes,
password to ensure no unauthorized user who gains momentary
access to the system can the export logs from the database.
"""
export = ts is not None
export = ts is not None
ser_no_msg, uid = separate_header(cmd_data, ENCODED_INTEGER_LENGTH)
no_messages = bytes_to_int(ser_no_msg)
window = window_list.get_window(uid)
no_messages = bytes_to_int(ser_no_msg)
window = window_list.get_window(uid)
access_logs(window, contact_list, group_list, settings, master_key, msg_to_load=no_messages, export=export)
access_logs(
window,
contact_list,
group_list,
settings,
master_key,
msg_to_load=no_messages,
export=export,
)
if export:
local_win = window_list.get_local_window()
local_win.add_new(ts, f"Exported log file of {window.type} '{window.name}'.", output=True)
cmd_win = window_list.get_command_window()
cmd_win.add_new(
ts, f"Exported log file of {window.type} '{window.name}'.", output=True
)
def remove_log(cmd_data: bytes,
contact_list: 'ContactList',
group_list: 'GroupList',
settings: 'Settings',
master_key: 'MasterKey'
) -> None:
def remove_log(
cmd_data: bytes,
contact_list: "ContactList",
group_list: "GroupList",
settings: "Settings",
master_key: "MasterKey",
) -> None:
"""Remove log entries for contact or group."""
remove_logs(contact_list, group_list, settings, master_key, selector=cmd_data)
def ch_master_key(ts: 'datetime',
window_list: 'WindowList',
contact_list: 'ContactList',
group_list: 'GroupList',
key_list: 'KeyList',
settings: 'Settings',
master_key: 'MasterKey'
) -> None:
def ch_master_key(
ts: "datetime",
window_list: "WindowList",
contact_list: "ContactList",
group_list: "GroupList",
key_list: "KeyList",
settings: "Settings",
master_key: "MasterKey",
) -> None:
"""Prompt the user for a new master password and derive a new master key from that."""
if not master_key.authenticate_action():
raise FunctionReturn("Error: Invalid password.", tail_clear=True, delay=1, head=2)
raise SoftError("Error: Invalid password.", tail_clear=True, delay=1, head=2)
# Cache old master key to allow log file re-encryption.
old_master_key = master_key.master_key[:]
@ -210,12 +399,12 @@ def ch_master_key(ts: 'datetime',
# Update encryption keys for databases
contact_list.database.database_key = new_master_key
key_list.database.database_key = new_master_key
group_list.database.database_key = new_master_key
settings.database.database_key = new_master_key
key_list.database.database_key = new_master_key
group_list.database.database_key = new_master_key
settings.database.database_key = new_master_key
# Create temp databases for each database, do not replace original.
with ignored(FunctionReturn):
with ignored(SoftError):
change_log_db_key(old_master_key, new_master_key, settings)
contact_list.store_contacts(replace=False)
key_list.store_keys(replace=False)
@ -236,157 +425,211 @@ def ch_master_key(ts: 'datetime',
settings.database.replace_database()
phase(DONE)
m_print("Master password successfully changed.", bold=True, tail_clear=True, delay=1, head=1)
m_print(
"Master password successfully changed.",
bold=True,
tail_clear=True,
delay=1,
head=1,
)
local_win = window_list.get_local_window()
local_win.add_new(ts, "Changed Receiver master password.")
cmd_win = window_list.get_command_window()
cmd_win.add_new(ts, "Changed Receiver master password.")
def ch_nick(cmd_data: bytes,
ts: 'datetime',
window_list: 'WindowList',
contact_list: 'ContactList'
) -> None:
def ch_nick(
cmd_data: bytes,
ts: "datetime",
window_list: "WindowList",
contact_list: "ContactList",
) -> None:
"""Change nickname of contact."""
onion_pub_key, nick_bytes = separate_header(cmd_data, header_length=ONION_SERVICE_PUBLIC_KEY_LENGTH)
nick = nick_bytes.decode()
short_addr = pub_key_to_short_address(onion_pub_key)
onion_pub_key, nick_bytes = separate_header(
cmd_data, header_length=ONION_SERVICE_PUBLIC_KEY_LENGTH
)
nick = nick_bytes.decode()
short_addr = pub_key_to_short_address(onion_pub_key)
try:
contact = contact_list.get_contact_by_pub_key(onion_pub_key)
except StopIteration:
raise FunctionReturn(f"Error: Receiver has no contact '{short_addr}' to rename.")
raise SoftError(f"Error: Receiver has no contact '{short_addr}' to rename.")
contact.nick = nick
contact_list.store_contacts()
window = window_list.get_window(onion_pub_key)
window = window_list.get_window(onion_pub_key)
window.name = nick
window.handle_dict[onion_pub_key] = nick
if window.type == WIN_TYPE_CONTACT:
window.redraw()
cmd_win = window_list.get_local_window()
cmd_win = window_list.get_command_window()
cmd_win.add_new(ts, f"Changed {short_addr} nick to '{nick}'.", output=True)
def ch_setting(cmd_data: bytes,
ts: 'datetime',
window_list: 'WindowList',
contact_list: 'ContactList',
group_list: 'GroupList',
key_list: 'KeyList',
settings: 'Settings',
gateway: 'Gateway'
) -> None:
def ch_setting(
cmd_data: bytes,
ts: "datetime",
window_list: "WindowList",
contact_list: "ContactList",
group_list: "GroupList",
key_list: "KeyList",
settings: "Settings",
gateway: "Gateway",
) -> None:
"""Change TFC setting."""
try:
setting, value = [f.decode() for f in cmd_data.split(US_BYTE)]
except ValueError:
raise FunctionReturn("Error: Received invalid setting data.")
raise SoftError("Error: Received invalid setting data.")
if setting in settings.key_list:
settings.change_setting(setting, value, contact_list, group_list)
elif setting in gateway.settings.key_list:
gateway.settings.change_setting(setting, value)
else:
raise FunctionReturn(f"Error: Invalid setting '{setting}'.")
raise SoftError(f"Error: Invalid setting '{setting}'.")
local_win = window_list.get_local_window()
local_win.add_new(ts, f"Changed setting '{setting}' to '{value}'.", output=True)
cmd_win = window_list.get_command_window()
cmd_win.add_new(ts, f"Changed setting '{setting}' to '{value}'.", output=True)
if setting == 'max_number_of_contacts':
if setting == "max_number_of_contacts":
contact_list.store_contacts()
key_list.store_keys()
if setting in ['max_number_of_group_members', 'max_number_of_groups']:
if setting in ["max_number_of_group_members", "max_number_of_groups"]:
group_list.store_groups()
def ch_contact_s(cmd_data: bytes,
ts: 'datetime',
window_list: 'WindowList',
contact_list: 'ContactList',
group_list: 'GroupList',
header: bytes
) -> None:
def ch_contact_s(
cmd_data: bytes,
ts: "datetime",
window_list: "WindowList",
contact_list: "ContactList",
group_list: "GroupList",
header: bytes,
) -> None:
"""Change contact/group related setting."""
setting, win_uid = separate_header(cmd_data, CONTACT_SETTING_HEADER_LENGTH)
attr, desc, file_cmd = {CH_LOGGING: ('log_messages', "Logging of messages", False),
CH_FILE_RECV: ('file_reception', "Reception of files", True),
CH_NOTIFY: ('notifications', "Message notifications", False)}[header]
setting, win_uid = separate_header(cmd_data, CONTACT_SETTING_HEADER_LENGTH)
attr, desc, file_cmd = {
CH_LOGGING: ("log_messages", "Logging of messages", False),
CH_FILE_RECV: ("file_reception", "Reception of files", True),
CH_NOTIFY: ("notifications", "Message notifications", False),
}[header]
action, b_value = {ENABLE: ('enabled', True),
DISABLE: ('disabled', False)}[setting.lower()]
action, b_value = {ENABLE: ("enabled", True), DISABLE: ("disabled", False)}[
setting.lower()
]
if setting.isupper():
# Change settings for all contacts (and groups)
enabled = [getattr(c, attr) for c in contact_list.get_list_of_contacts()]
enabled += [getattr(g, attr) for g in group_list] if not file_cmd else []
status = "was already" if ((all(enabled) and b_value) or (not any(enabled) and not b_value)) else "has been"
specifier = "every "
w_type = "contact"
w_name = "." if file_cmd else " and group."
specifier, status, w_name, w_type = change_setting_for_all_contacts(
attr, file_cmd, b_value, contact_list, group_list
)
else:
status, specifier, w_type, w_name = change_setting_for_one_contact(
attr, file_cmd, b_value, win_uid, window_list, contact_list, group_list
)
# Set values
for c in contact_list.get_list_of_contacts():
message = f"{desc} {status} {action} for {specifier}{w_type}{w_name}"
cmd_win = window_list.get_command_window()
cmd_win.add_new(ts, message, output=True)
def change_setting_for_one_contact(
attr: str,
file_cmd: bool,
b_value: bool,
win_uid: bytes,
window_list: "WindowList",
contact_list: "ContactList",
group_list: "GroupList",
) -> Tuple[str, str, str, str]:
"""Change setting for contacts in specified window."""
if not window_list.has_window(win_uid):
raise SoftError(
f"Error: Found no window for '{pub_key_to_short_address(win_uid)}'."
)
window = window_list.get_window(win_uid)
group_window = window.type == WIN_TYPE_GROUP
contact_window = window.type == WIN_TYPE_CONTACT
if contact_window:
target = contact_list.get_contact_by_pub_key(
win_uid
) # type: Union[Contact, Group]
else:
target = group_list.get_group_by_id(win_uid)
if file_cmd:
enabled = [getattr(m, attr) for m in window.window_contacts]
changed = not all(enabled) if b_value else any(enabled)
else:
changed = getattr(target, attr) != b_value
status = "has been" if changed else "was already"
specifier = "members in " if (file_cmd and group_window) else ""
w_type = window.type
w_name = f" {window.name}."
# Set values
if contact_window or (group_window and file_cmd):
for c in window.window_contacts:
setattr(c, attr, b_value)
contact_list.store_contacts()
if not file_cmd:
for g in group_list:
setattr(g, attr, b_value)
group_list.store_groups()
elif group_window:
setattr(group_list.get_group_by_id(win_uid), attr, b_value)
group_list.store_groups()
else:
# Change setting for contacts in specified window
if not window_list.has_window(win_uid):
raise FunctionReturn(f"Error: Found no window for '{pub_key_to_short_address(win_uid)}'.")
window = window_list.get_window(win_uid)
group_window = window.type == WIN_TYPE_GROUP
contact_window = window.type == WIN_TYPE_CONTACT
if contact_window:
target = contact_list.get_contact_by_pub_key(win_uid) # type: Union[Contact, Group]
else:
target = group_list.get_group_by_id(win_uid)
if file_cmd:
enabled = [getattr(m, attr) for m in window.window_contacts]
changed = not all(enabled) if b_value else any(enabled)
else:
changed = getattr(target, attr) != b_value
status = "has been" if changed else "was already"
specifier = "members in " if (file_cmd and group_window) else ''
w_type = window.type
w_name = f" {window.name}."
# Set values
if contact_window or (group_window and file_cmd):
for c in window.window_contacts:
setattr(c, attr, b_value)
contact_list.store_contacts()
elif group_window:
setattr(group_list.get_group_by_id(win_uid), attr, b_value)
group_list.store_groups()
message = f"{desc} {status} {action} for {specifier}{w_type}{w_name}"
local_win = window_list.get_local_window()
local_win.add_new(ts, message, output=True)
return status, specifier, w_type, w_name
def contact_rem(onion_pub_key: bytes,
ts: 'datetime',
window_list: 'WindowList',
contact_list: 'ContactList',
group_list: 'GroupList',
key_list: 'KeyList',
settings: 'Settings',
master_key: 'MasterKey'
) -> None:
def change_setting_for_all_contacts(
attr: str,
file_cmd: bool,
b_value: bool,
contact_list: "ContactList",
group_list: "GroupList",
) -> Tuple[str, str, str, str]:
"""Change settings for all contacts (and groups)."""
enabled = [getattr(c, attr) for c in contact_list.get_list_of_contacts()]
enabled += [getattr(g, attr) for g in group_list] if not file_cmd else []
status = (
"was already"
if ((all(enabled) and b_value) or (not any(enabled) and not b_value))
else "has been"
)
specifier = "every "
w_type = "contact"
w_name = "." if file_cmd else " and group."
# Set values
for c in contact_list.get_list_of_contacts():
setattr(c, attr, b_value)
contact_list.store_contacts()
if not file_cmd:
for g in group_list:
setattr(g, attr, b_value)
group_list.store_groups()
return status, specifier, w_type, w_name
def contact_rem(
onion_pub_key: bytes,
ts: "datetime",
window_list: "WindowList",
contact_list: "ContactList",
group_list: "GroupList",
key_list: "KeyList",
settings: "Settings",
master_key: "MasterKey",
) -> None:
"""Remove contact from Receiver Program."""
key_list.remove_keyset(onion_pub_key)
window_list.remove_window(onion_pub_key)
@ -395,9 +638,9 @@ def contact_rem(onion_pub_key: bytes,
try:
contact = contact_list.get_contact_by_pub_key(onion_pub_key)
except StopIteration:
raise FunctionReturn(f"Receiver has no account '{short_addr}' to remove.")
raise SoftError(f"Receiver has no account '{short_addr}' to remove.")
nick = contact.nick
nick = contact.nick
in_group = any([g.remove_members([onion_pub_key]) for g in group_list])
contact_list.remove_contact_by_pub_key(onion_pub_key)
@ -405,13 +648,13 @@ def contact_rem(onion_pub_key: bytes,
message = f"Removed {nick} ({short_addr}) from contacts{' and groups' if in_group else ''}."
m_print(message, bold=True, head=1, tail=1)
local_win = window_list.get_local_window()
local_win.add_new(ts, message)
cmd_win = window_list.get_command_window()
cmd_win.add_new(ts, message)
remove_logs(contact_list, group_list, settings, master_key, onion_pub_key)
def wipe(exit_queue: 'Queue[str]') -> None:
def wipe(exit_queue: "Queue[str]") -> None:
"""\
Reset terminals, wipe all TFC user data on Destination Computer and
power off the system.
@ -421,5 +664,5 @@ def wipe(exit_queue: 'Queue[str]') -> None:
of user data becomes impossible very fast:
https://www1.cs.fau.de/filepool/projects/coldboot/fares_coldboot.pdf
"""
os.system(RESET)
reset_terminal()
exit_queue.put(WIPE)

View File

@ -21,96 +21,115 @@ along with TFC. If not, see <https://www.gnu.org/licenses/>.
import typing
from src.common.encoding import b58encode
from src.common.exceptions import FunctionReturn
from src.common.misc import separate_header, split_byte_string, validate_group_name
from src.common.output import group_management_print, m_print
from src.common.statics import (ADDED_MEMBERS, ALREADY_MEMBER, GROUP_ID_LENGTH, NEW_GROUP, NOT_IN_GROUP,
ONION_SERVICE_PUBLIC_KEY_LENGTH, REMOVED_MEMBERS, UNKNOWN_ACCOUNTS, US_BYTE,
WIN_UID_LOCAL)
from src.common.encoding import b58encode
from src.common.exceptions import SoftError
from src.common.misc import separate_header, split_byte_string, validate_group_name
from src.common.output import group_management_print, m_print
from src.common.statics import (
ADDED_MEMBERS,
ALREADY_MEMBER,
GROUP_ID_LENGTH,
NEW_GROUP,
NOT_IN_GROUP,
ONION_SERVICE_PUBLIC_KEY_LENGTH,
REMOVED_MEMBERS,
UNKNOWN_ACCOUNTS,
US_BYTE,
WIN_UID_COMMAND,
)
if typing.TYPE_CHECKING:
from datetime import datetime
from datetime import datetime
from src.common.db_contacts import ContactList
from src.common.db_groups import GroupList
from src.common.db_groups import GroupList
from src.common.db_settings import Settings
from src.receiver.windows import WindowList
from src.receiver.windows import WindowList
def group_create(cmd_data: bytes,
ts: 'datetime',
window_list: 'WindowList',
contact_list: 'ContactList',
group_list: 'GroupList',
settings: 'Settings'
) -> None:
def group_create(
cmd_data: bytes,
ts: "datetime",
window_list: "WindowList",
contact_list: "ContactList",
group_list: "GroupList",
settings: "Settings",
) -> None:
"""Create a new group."""
group_id, variable_len_data = separate_header(cmd_data, GROUP_ID_LENGTH)
group_id, variable_len_data = separate_header(cmd_data, GROUP_ID_LENGTH)
group_name_bytes, ser_members = variable_len_data.split(US_BYTE, 1)
group_name = group_name_bytes.decode()
group_name = group_name_bytes.decode()
purp_pub_keys = set(split_byte_string(ser_members, ONION_SERVICE_PUBLIC_KEY_LENGTH))
pub_keys = set(contact_list.get_list_of_pub_keys())
accepted = list(purp_pub_keys & pub_keys)
rejected = list(purp_pub_keys - pub_keys)
pub_keys = set(contact_list.get_list_of_pub_keys())
accepted = list(purp_pub_keys & pub_keys)
rejected = list(purp_pub_keys - pub_keys)
if len(accepted) > settings.max_number_of_group_members:
raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_group_members} "
f"members per group.")
raise SoftError(
f"Error: TFC settings only allow {settings.max_number_of_group_members} "
f"members per group."
)
if len(group_list) == settings.max_number_of_groups:
raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_groups} groups.")
raise SoftError(
f"Error: TFC settings only allow {settings.max_number_of_groups} groups."
)
accepted_contacts = [contact_list.get_contact_by_pub_key(k) for k in accepted]
group_list.add_group(group_name,
group_id,
settings.log_messages_by_default,
settings.show_notifications_by_default,
accepted_contacts)
group_list.add_group(
group_name,
group_id,
settings.log_messages_by_default,
settings.show_notifications_by_default,
accepted_contacts,
)
group = group_list.get_group(group_name)
window = window_list.get_window(group.group_id)
group = group_list.get_group(group_name)
window = window_list.get_window(group.group_id)
window.window_contacts = accepted_contacts
window.message_log = []
window.message_log = []
window.unread_messages = 0
window.create_handle_dict()
group_management_print(NEW_GROUP, accepted, contact_list, group_name)
group_management_print(NEW_GROUP, accepted, contact_list, group_name)
group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
local_win = window_list.get_window(WIN_UID_LOCAL)
local_win.add_new(ts, f"Created new group {group_name}.")
cmd_win = window_list.get_window(WIN_UID_COMMAND)
cmd_win.add_new(ts, f"Created new group {group_name}.")
def group_add(cmd_data: bytes,
ts: 'datetime',
window_list: 'WindowList',
contact_list: 'ContactList',
group_list: 'GroupList',
settings: 'Settings'
) -> None:
def group_add(
cmd_data: bytes,
ts: "datetime",
window_list: "WindowList",
contact_list: "ContactList",
group_list: "GroupList",
settings: "Settings",
) -> None:
"""Add member(s) to group."""
group_id, ser_members = separate_header(cmd_data, GROUP_ID_LENGTH)
purp_pub_keys = set(split_byte_string(ser_members, ONION_SERVICE_PUBLIC_KEY_LENGTH))
purp_pub_keys = set(split_byte_string(ser_members, ONION_SERVICE_PUBLIC_KEY_LENGTH))
try:
group_name = group_list.get_group_by_id(group_id).name
except StopIteration:
raise FunctionReturn(f"Error: No group with ID '{b58encode(group_id)}' found.")
raise SoftError(f"Error: No group with ID '{b58encode(group_id)}' found.")
pub_keys = set(contact_list.get_list_of_pub_keys())
before_adding = set(group_list.get_group(group_name).get_list_of_member_pub_keys())
ok_accounts = set(pub_keys & purp_pub_keys)
pub_keys = set(contact_list.get_list_of_pub_keys())
before_adding = set(group_list.get_group(group_name).get_list_of_member_pub_keys())
ok_accounts = set(pub_keys & purp_pub_keys)
new_in_group_set = set(ok_accounts - before_adding)
end_assembly = list(before_adding | new_in_group_set)
already_in_g = list(purp_pub_keys & before_adding)
rejected = list(purp_pub_keys - pub_keys)
rejected = list(purp_pub_keys - pub_keys)
new_in_group = list(new_in_group_set)
if len(end_assembly) > settings.max_number_of_group_members:
raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_group_members} "
f"members per group.")
raise SoftError(
f"Error: TFC settings only allow {settings.max_number_of_group_members} "
f"members per group."
)
group = group_list.get_group(group_name)
group.add_members([contact_list.get_contact_by_pub_key(k) for k in new_in_group])
@ -119,37 +138,38 @@ def group_add(cmd_data: bytes,
window.add_contacts(new_in_group)
window.create_handle_dict()
group_management_print(ADDED_MEMBERS, new_in_group, contact_list, group_name)
group_management_print(ALREADY_MEMBER, already_in_g, contact_list, group_name)
group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
group_management_print(ADDED_MEMBERS, new_in_group, contact_list, group_name)
group_management_print(ALREADY_MEMBER, already_in_g, contact_list, group_name)
group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
local_win = window_list.get_window(WIN_UID_LOCAL)
local_win.add_new(ts, f"Added members to group {group_name}.")
cmd_win = window_list.get_window(WIN_UID_COMMAND)
cmd_win.add_new(ts, f"Added members to group {group_name}.")
def group_remove(cmd_data: bytes,
ts: 'datetime',
window_list: 'WindowList',
contact_list: 'ContactList',
group_list: 'GroupList'
) -> None:
def group_remove(
cmd_data: bytes,
ts: "datetime",
window_list: "WindowList",
contact_list: "ContactList",
group_list: "GroupList",
) -> None:
"""Remove member(s) from the group."""
group_id, ser_members = separate_header(cmd_data, GROUP_ID_LENGTH)
purp_pub_keys = set(split_byte_string(ser_members, ONION_SERVICE_PUBLIC_KEY_LENGTH))
purp_pub_keys = set(split_byte_string(ser_members, ONION_SERVICE_PUBLIC_KEY_LENGTH))
try:
group_name = group_list.get_group_by_id(group_id).name
except StopIteration:
raise FunctionReturn(f"Error: No group with ID '{b58encode(group_id)}' found.")
raise SoftError(f"Error: No group with ID '{b58encode(group_id)}' found.")
pub_keys = set(contact_list.get_list_of_pub_keys())
before_removal = set(group_list.get_group(group_name).get_list_of_member_pub_keys())
ok_accounts_set = set(purp_pub_keys & pub_keys)
removable_set = set(before_removal & ok_accounts_set)
pub_keys = set(contact_list.get_list_of_pub_keys())
before_removal = set(group_list.get_group(group_name).get_list_of_member_pub_keys())
ok_accounts_set = set(purp_pub_keys & pub_keys)
removable_set = set(before_removal & ok_accounts_set)
not_in_group = list(ok_accounts_set - before_removal)
rejected = list(purp_pub_keys - pub_keys)
removable = list(removable_set)
rejected = list(purp_pub_keys - pub_keys)
removable = list(removable_set)
group = group_list.get_group(group_name)
group.remove_members(removable)
@ -157,22 +177,20 @@ def group_remove(cmd_data: bytes,
window = window_list.get_window(group.group_id)
window.remove_contacts(removable)
group_management_print(REMOVED_MEMBERS, removable, contact_list, group_name)
group_management_print(NOT_IN_GROUP, not_in_group, contact_list, group_name)
group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
group_management_print(REMOVED_MEMBERS, removable, contact_list, group_name)
group_management_print(NOT_IN_GROUP, not_in_group, contact_list, group_name)
group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
local_win = window_list.get_window(WIN_UID_LOCAL)
local_win.add_new(ts, f"Removed members from group {group_name}.")
cmd_win = window_list.get_window(WIN_UID_COMMAND)
cmd_win.add_new(ts, f"Removed members from group {group_name}.")
def group_delete(group_id: bytes,
ts: 'datetime',
window_list: 'WindowList',
group_list: 'GroupList'
) -> None:
def group_delete(
group_id: bytes, ts: "datetime", window_list: "WindowList", group_list: "GroupList"
) -> None:
"""Remove the group."""
if not group_list.has_group_id(group_id):
raise FunctionReturn(f"Error: No group with ID '{b58encode(group_id)}' found.")
raise SoftError(f"Error: No group with ID '{b58encode(group_id)}' found.")
name = group_list.get_group_by_id(group_id).name
window_list.remove_window(group_id)
@ -181,40 +199,41 @@ def group_delete(group_id: bytes,
message = f"Removed group '{name}'."
m_print(message, bold=True, head=1, tail=1)
local_win = window_list.get_window(WIN_UID_LOCAL)
local_win.add_new(ts, message)
cmd_win = window_list.get_window(WIN_UID_COMMAND)
cmd_win.add_new(ts, message)
def group_rename(cmd_data: bytes,
ts: 'datetime',
window_list: 'WindowList',
contact_list: 'ContactList',
group_list: 'GroupList'
) -> None:
def group_rename(
cmd_data: bytes,
ts: "datetime",
window_list: "WindowList",
contact_list: "ContactList",
group_list: "GroupList",
) -> None:
"""Rename the group."""
group_id, new_name_bytes = separate_header(cmd_data, GROUP_ID_LENGTH)
try:
group = group_list.get_group_by_id(group_id)
except StopIteration:
raise FunctionReturn(f"Error: No group with ID '{b58encode(group_id)}' found.")
raise SoftError(f"Error: No group with ID '{b58encode(group_id)}' found.")
try:
new_name = new_name_bytes.decode()
except UnicodeError:
raise FunctionReturn(f"Error: New name for group '{group.name}' was invalid.")
raise SoftError(f"Error: New name for group '{group.name}' was invalid.")
error_msg = validate_group_name(new_name, contact_list, group_list)
if error_msg:
raise FunctionReturn(error_msg)
raise SoftError(error_msg)
old_name = group.name
old_name = group.name
group.name = new_name
group_list.store_groups()
window = window_list.get_window(group.group_id)
window = window_list.get_window(group.group_id)
window.name = new_name
message = f"Renamed group '{old_name}' to '{new_name}'."
local_win = window_list.get_window(WIN_UID_LOCAL)
local_win.add_new(ts, message, output=True)
message = f"Renamed group '{old_name}' to '{new_name}'."
cmd_win = window_list.get_window(WIN_UID_COMMAND)
cmd_win.add_new(ts, message, output=True)

View File

@ -27,25 +27,33 @@ from typing import Dict, Tuple
import nacl.exceptions
from src.common.crypto import auth_and_decrypt, blake2b
from src.common.encoding import bytes_to_str
from src.common.exceptions import FunctionReturn
from src.common.misc import decompress, ensure_dir, separate_headers, separate_trailer
from src.common.output import phase, print_on_previous_line
from src.common.statics import (DIR_RECV_FILES, DONE, ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_HEADER_LENGTH,
PADDED_UTF32_STR_LENGTH, SYMMETRIC_KEY_LENGTH, US_BYTE)
from src.common.crypto import auth_and_decrypt, blake2b
from src.common.encoding import bytes_to_str
from src.common.exceptions import SoftError
from src.common.misc import decompress, ensure_dir, separate_headers, separate_trailer
from src.common.output import phase, print_on_previous_line
from src.common.statics import (
DIR_RECV_FILES,
DONE,
ONION_SERVICE_PUBLIC_KEY_LENGTH,
ORIGIN_HEADER_LENGTH,
PADDED_UTF32_STR_LENGTH,
SYMMETRIC_KEY_LENGTH,
US_BYTE,
)
if typing.TYPE_CHECKING:
from datetime import datetime
from datetime import datetime
from src.common.db_contacts import ContactList
from src.common.db_settings import Settings
from src.receiver.windows import WindowList
from src.receiver.windows import WindowList
def store_unique(file_data: bytes, # File data to store
file_dir: str, # Directory to store file
file_name: str # Preferred name for the file.
) -> str:
def store_unique(
file_data: bytes, # File data to store
file_dir: str, # Directory to store file
file_name: str, # Preferred name for the file.
) -> str:
"""Store file under a unique filename.
If file exists, add trailing counter .# with value as large as
@ -55,11 +63,11 @@ def store_unique(file_data: bytes, # File data to store
if os.path.isfile(file_dir + file_name):
ctr = 1
while os.path.isfile(file_dir + file_name + f'.{ctr}'):
while os.path.isfile(file_dir + file_name + f".{ctr}"):
ctr += 1
file_name += f'.{ctr}'
file_name += f".{ctr}"
with open(file_dir + file_name, 'wb+') as f:
with open(file_dir + file_name, "wb+") as f:
f.write(file_data)
f.flush()
os.fsync(f.fileno())
@ -67,91 +75,125 @@ def store_unique(file_data: bytes, # File data to store
return file_name
def process_assembled_file(ts: 'datetime', # Timestamp last received packet
payload: bytes, # File name and content
onion_pub_key: bytes, # Onion Service pubkey of sender
nick: str, # Nickname of sender
settings: 'Settings', # Settings object
window_list: 'WindowList', # WindowList object
) -> None:
def process_assembled_file(
ts: "datetime", # Timestamp last received packet
payload: bytes, # File name and content
onion_pub_key: bytes, # Onion Service pubkey of sender
nick: str, # Nickname of sender
settings: "Settings", # Settings object
window_list: "WindowList", # WindowList object
) -> None:
"""Process received file assembly packets."""
try:
file_name_b, file_data = payload.split(US_BYTE, 1)
file_name_b, file_data = payload.split(US_BYTE, 1) # type: bytes, bytes
except ValueError:
raise FunctionReturn("Error: Received file had an invalid structure.")
raise SoftError("Error: Received file had an invalid structure.")
try:
file_name = file_name_b.decode()
except UnicodeError:
raise FunctionReturn("Error: Received file name had an invalid encoding.")
raise SoftError("Error: Received file name had an invalid encoding.")
if not file_name.isprintable() or not file_name or '/' in file_name:
raise FunctionReturn("Error: Received file had an invalid name.")
if not file_name.isprintable() or not file_name or "/" in file_name:
raise SoftError("Error: Received file had an invalid name.")
file_ct, file_key = separate_trailer(file_data, SYMMETRIC_KEY_LENGTH)
if len(file_key) != SYMMETRIC_KEY_LENGTH:
raise FunctionReturn("Error: Received file had an invalid key.")
raise SoftError("Error: Received file had an invalid key.")
decrypt_and_store_file(
ts, file_ct, file_key, file_name, onion_pub_key, nick, window_list, settings
)
def decrypt_and_store_file(
ts: "datetime",
file_ct: bytes,
file_key: bytes,
file_name: str,
onion_pub_key: bytes,
nick: str,
window_list: "WindowList",
settings: "Settings",
) -> None:
"""Decrypt and store file."""
try:
file_pt = auth_and_decrypt(file_ct, file_key)
except nacl.exceptions.CryptoError:
raise FunctionReturn("Error: Decryption of file data failed.")
raise SoftError("Error: Decryption of file data failed.")
try:
file_dc = decompress(file_pt, settings.max_decompress_size)
except zlib.error:
raise FunctionReturn("Error: Decompression of file data failed.")
raise SoftError("Error: Decompression of file data failed.")
file_dir = f'{DIR_RECV_FILES}{nick}/'
file_dir = f"{DIR_RECV_FILES}{nick}/"
final_name = store_unique(file_dc, file_dir, file_name)
message = f"Stored file from {nick} as '{final_name}'."
if settings.traffic_masking and window_list.active_win is not None:
window = window_list.active_win
else:
window = window_list.get_window(onion_pub_key)
window.add_new(ts, message, onion_pub_key, output=True, event_msg=True)
def new_file(ts: 'datetime', # Timestamp of received_packet
packet: bytes, # Sender of file and file ciphertext
file_keys: Dict[bytes, bytes], # Dictionary for file decryption keys
file_buf: Dict[bytes, Tuple['datetime', bytes]], # Dictionary for cached file ciphertexts
contact_list: 'ContactList', # ContactList object
window_list: 'WindowList', # WindowList object
settings: 'Settings' # Settings object
) -> None:
def new_file(
ts: "datetime", # Timestamp of received_packet
packet: bytes, # Sender of file and file ciphertext
file_keys: Dict[bytes, bytes], # Dictionary for file decryption keys
file_buf: Dict[
bytes, Tuple["datetime", bytes]
], # Dictionary for cached file ciphertexts
contact_list: "ContactList", # ContactList object
window_list: "WindowList", # WindowList object
settings: "Settings", # Settings object
) -> None:
"""Validate received file and process or cache it."""
onion_pub_key, _, file_ct = separate_headers(packet, [ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_HEADER_LENGTH])
onion_pub_key, _, file_ct = separate_headers(
packet, [ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_HEADER_LENGTH]
)
if not contact_list.has_pub_key(onion_pub_key):
raise FunctionReturn("File from an unknown account.", output=False)
raise SoftError("File from an unknown account.", output=False)
contact = contact_list.get_contact_by_pub_key(onion_pub_key)
if not contact.file_reception:
raise FunctionReturn(
f"Alert! Discarded file from {contact.nick} as file reception for them is disabled.", bold=True)
raise SoftError(
f"Alert! Discarded file from {contact.nick} as file reception for them is disabled.",
bold=True,
)
k = onion_pub_key + blake2b(file_ct) # Dictionary key
if k in file_keys:
decryption_key = file_keys[k]
process_file(ts, onion_pub_key, file_ct, decryption_key, contact_list, window_list, settings)
process_file(
ts,
onion_pub_key,
file_ct,
decryption_key,
contact_list,
window_list,
settings,
)
file_keys.pop(k)
else:
file_buf[k] = (ts, file_ct)
def process_file(ts: 'datetime', # Timestamp of received_packet
onion_pub_key: bytes, # Onion Service pubkey of sender
file_ct: bytes, # File ciphertext
file_key: bytes, # File decryption key
contact_list: 'ContactList', # ContactList object
window_list: 'WindowList', # WindowList object
settings: 'Settings' # Settings object
) -> None:
def process_file(
ts: "datetime", # Timestamp of received_packet
onion_pub_key: bytes, # Onion Service pubkey of sender
file_ct: bytes, # File ciphertext
file_key: bytes, # File decryption key
contact_list: "ContactList", # ContactList object
window_list: "WindowList", # WindowList object
settings: "Settings", # Settings object
) -> None:
"""Store file received from a contact."""
nick = contact_list.get_nick_by_pub_key(onion_pub_key)
@ -159,27 +201,27 @@ def process_file(ts: 'datetime', # Timestamp of received_packet
try:
file_pt = auth_and_decrypt(file_ct, file_key)
except nacl.exceptions.CryptoError:
raise FunctionReturn(f"Error: Decryption key for file from {nick} was invalid.")
raise SoftError(f"Error: Decryption key for file from {nick} was invalid.")
try:
file_dc = decompress(file_pt, settings.max_decompress_size)
except zlib.error:
raise FunctionReturn(f"Error: Failed to decompress file from {nick}.")
raise SoftError(f"Error: Failed to decompress file from {nick}.")
phase(DONE)
print_on_previous_line(reps=2)
try:
file_name = bytes_to_str(file_dc[:PADDED_UTF32_STR_LENGTH])
except UnicodeError:
raise FunctionReturn(f"Error: Name of file from {nick} had an invalid encoding.")
raise SoftError(f"Error: Name of file from {nick} had an invalid encoding.")
if not file_name.isprintable() or not file_name or '/' in file_name:
raise FunctionReturn(f"Error: Name of file from {nick} was invalid.")
if not file_name.isprintable() or not file_name or "/" in file_name:
raise SoftError(f"Error: Name of file from {nick} was invalid.")
file_data = file_dc[PADDED_UTF32_STR_LENGTH:]
file_dir = f'{DIR_RECV_FILES}{nick}/'
file_data = file_dc[PADDED_UTF32_STR_LENGTH:]
file_dir = f"{DIR_RECV_FILES}{nick}/"
final_name = store_unique(file_data, file_dir, file_name)
message = f"Stored file from {nick} as '{final_name}'."
message = f"Stored file from {nick} as '{final_name}'."
if settings.traffic_masking and window_list.active_win is not None:
window = window_list.active_win

View File

@ -27,164 +27,219 @@ import subprocess
import tkinter
import typing
from datetime import datetime
from typing import List, Tuple
import nacl.exceptions
from src.common.crypto import argon2_kdf, auth_and_decrypt, blake2b, csprng
from src.common.crypto import argon2_kdf, auth_and_decrypt, blake2b, csprng
from src.common.db_masterkey import MasterKey
from src.common.encoding import b58encode, bytes_to_str, pub_key_to_short_address
from src.common.exceptions import FunctionReturn
from src.common.input import get_b58_key
from src.common.misc import separate_header, separate_headers
from src.common.output import m_print, phase, print_on_previous_line
from src.common.path import ask_path_gui
from src.common.statics import (ARGON2_PSK_MEMORY_COST, ARGON2_PSK_PARALLELISM, ARGON2_PSK_TIME_COST,
ARGON2_SALT_LENGTH, B58_LOCAL_KEY, CONFIRM_CODE_LENGTH, DONE, FINGERPRINT_LENGTH,
KEX_STATUS_HAS_RX_PSK, KEX_STATUS_LOCAL_KEY, KEX_STATUS_NONE, KEX_STATUS_NO_RX_PSK,
LOCAL_NICK, LOCAL_PUBKEY, ONION_SERVICE_PUBLIC_KEY_LENGTH, PSK_FILE_SIZE, RESET,
SYMMETRIC_KEY_LENGTH, WIN_TYPE_CONTACT, WIN_TYPE_GROUP)
from src.common.encoding import b58encode, bytes_to_str, pub_key_to_short_address
from src.common.exceptions import SoftError
from src.common.input import get_b58_key
from src.common.misc import reset_terminal, separate_header, separate_headers
from src.common.output import m_print, phase, print_on_previous_line
from src.common.path import ask_path_gui
from src.common.statics import (
ARGON2_PSK_MEMORY_COST,
ARGON2_PSK_PARALLELISM,
ARGON2_PSK_TIME_COST,
ARGON2_SALT_LENGTH,
B58_LOCAL_KEY,
CONFIRM_CODE_LENGTH,
DONE,
FINGERPRINT_LENGTH,
KEX_STATUS_HAS_RX_PSK,
KEX_STATUS_LOCAL_KEY,
KEX_STATUS_NONE,
KEX_STATUS_NO_RX_PSK,
LOCAL_NICK,
LOCAL_PUBKEY,
ONION_SERVICE_PUBLIC_KEY_LENGTH,
PSK_FILE_SIZE,
SYMMETRIC_KEY_LENGTH,
WIN_TYPE_CONTACT,
WIN_TYPE_GROUP,
)
if typing.TYPE_CHECKING:
from datetime import datetime
from multiprocessing import Queue
from multiprocessing import Queue
from src.common.db_contacts import ContactList
from src.common.db_keys import KeyList
from src.common.db_keys import KeyList
from src.common.db_settings import Settings
from src.receiver.windows import WindowList
from src.receiver.windows import WindowList
local_key_queue = Queue[Tuple[datetime, bytes]]
# Local key
def process_local_key(ts: 'datetime',
packet: bytes,
window_list: 'WindowList',
contact_list: 'ContactList',
key_list: 'KeyList',
settings: 'Settings',
kdk_hashes: List[bytes],
packet_hashes: List[bytes],
l_queue: 'Queue[Tuple[datetime, bytes]]'
) -> None:
"""Decrypt local key packet and add local contact/keyset."""
bootstrap = not key_list.has_local_keyset()
plaintext = None
def protect_kdk(kdk: bytes) -> None:
"""Prevent leak of KDK via terminal history / clipboard."""
readline.clear_history()
reset_terminal()
root = tkinter.Tk()
root.withdraw()
try:
packet_hash = blake2b(packet)
if root.clipboard_get() == b58encode(kdk): # type: ignore
root.clipboard_clear() # type: ignore
except tkinter.TclError:
pass
# Check if the packet is an old one
if packet_hash in packet_hashes:
raise FunctionReturn("Error: Received old local key packet.", output=False)
root.destroy()
while True:
m_print("Local key setup", bold=True, head_clear=True, head=1, tail=1)
kdk = get_b58_key(B58_LOCAL_KEY, settings)
kdk_hash = blake2b(kdk)
try:
plaintext = auth_and_decrypt(packet, kdk)
break
except nacl.exceptions.CryptoError:
# Check if key was an old one
if kdk_hash in kdk_hashes:
m_print("Error: Entered an old local key decryption key.", delay=1)
continue
def process_local_key_buffer(
kdk: bytes, l_queue: "local_key_queue",
) -> Tuple[datetime, bytes]:
"""Check if the kdk was for a packet further ahead in the queue."""
buffer = [] # type: List[Tuple[datetime, bytes]]
while l_queue.qsize() > 0:
tup = l_queue.get() # type: Tuple[datetime, bytes]
if tup not in buffer:
buffer.append(tup)
# Check if the kdk was for a packet further ahead in the queue
buffer = [] # type: List[Tuple[datetime, bytes]]
while l_queue.qsize() > 0:
tup = l_queue.get() # type: Tuple[datetime, bytes]
if tup not in buffer:
buffer.append(tup)
for i, tup in enumerate(buffer):
try:
plaintext = auth_and_decrypt(tup[1], kdk)
for i, tup in enumerate(buffer):
try:
plaintext = auth_and_decrypt(tup[1], kdk)
# If we reach this point, decryption was successful.
for unexamined in buffer[i + 1 :]:
l_queue.put(unexamined)
buffer = []
ts = tup[0]
# If we reach this point, decryption was successful.
for unexamined in buffer[i+1:]:
l_queue.put(unexamined)
buffer = []
ts = tup[0]
break
return ts, plaintext
except nacl.exceptions.CryptoError:
continue
else:
# Finished the buffer without finding local key CT
# for the kdk. Maybe the kdk is from another session.
raise FunctionReturn("Error: Incorrect key decryption key.", delay=1)
except nacl.exceptions.CryptoError:
continue
break
# Finished the buffer without finding local key CT
# for the kdk. Maybe the kdk is from another session.
raise SoftError("Error: Incorrect key decryption key.", delay=1)
# This catches PyCharm's weird claim that plaintext might be referenced before assignment
if plaintext is None: # pragma: no cover
raise FunctionReturn("Error: Could not decrypt local key.")
def decrypt_local_key(
ts: "datetime",
packet: bytes,
kdk_hashes: List[bytes],
packet_hashes: List[bytes],
settings: "Settings",
l_queue: "local_key_queue",
) -> Tuple["datetime", bytes]:
"""Decrypt local key packet."""
while True:
kdk = get_b58_key(B58_LOCAL_KEY, settings)
kdk_hash = blake2b(kdk)
# Check if the key was an old one.
if kdk_hash in kdk_hashes:
m_print("Error: Entered an old local key decryption key.", delay=1)
continue
try:
plaintext = auth_and_decrypt(packet, kdk)
except nacl.exceptions.CryptoError:
ts, plaintext = process_local_key_buffer(kdk, l_queue)
protect_kdk(kdk)
# Cache hashes needed to recognize reissued local key packets and key decryption keys.
kdk_hashes.append(kdk_hash)
packet_hashes.append(blake2b(packet))
return ts, plaintext
def process_local_key(
ts: "datetime",
packet: bytes,
window_list: "WindowList",
contact_list: "ContactList",
key_list: "KeyList",
settings: "Settings",
kdk_hashes: List[bytes],
packet_hashes: List[bytes],
l_queue: "Queue[Tuple[datetime, bytes]]",
) -> None:
"""Decrypt local key packet and add local contact/keyset."""
first_local_key = not key_list.has_local_keyset()
try:
if blake2b(packet) in packet_hashes:
raise SoftError("Error: Received old local key packet.", output=False)
m_print("Local key setup", bold=True, head_clear=True, head=1, tail=1)
ts, plaintext = decrypt_local_key(
ts, packet, kdk_hashes, packet_hashes, settings, l_queue
)
# Add local contact to contact list database
contact_list.add_contact(LOCAL_PUBKEY,
LOCAL_NICK,
KEX_STATUS_LOCAL_KEY,
bytes(FINGERPRINT_LENGTH),
bytes(FINGERPRINT_LENGTH),
False, False, True)
contact_list.add_contact(
LOCAL_PUBKEY,
LOCAL_NICK,
KEX_STATUS_LOCAL_KEY,
bytes(FINGERPRINT_LENGTH),
bytes(FINGERPRINT_LENGTH),
False,
False,
True,
)
tx_mk, tx_hk, c_code = separate_headers(plaintext, 2 * [SYMMETRIC_KEY_LENGTH])
# Add local keyset to keyset database
key_list.add_keyset(onion_pub_key=LOCAL_PUBKEY,
tx_mk=tx_mk,
rx_mk=csprng(),
tx_hk=tx_hk,
rx_hk=csprng())
key_list.add_keyset(
onion_pub_key=LOCAL_PUBKEY,
tx_mk=tx_mk,
rx_mk=csprng(),
tx_hk=tx_hk,
rx_hk=csprng(),
)
# Cache hashes needed to recognize reissued local key packets and key decryption keys.
packet_hashes.append(packet_hash)
kdk_hashes.append(kdk_hash)
m_print(
[
"Local key successfully installed.",
f"Confirmation code (to Transmitter): {c_code.hex()}",
],
box=True,
head=1,
)
# Prevent leak of KDK via terminal history / clipboard
readline.clear_history()
os.system(RESET)
root = tkinter.Tk()
root.withdraw()
try:
if root.clipboard_get() == b58encode(kdk): # type: ignore
root.clipboard_clear() # type: ignore
except tkinter.TclError:
pass
root.destroy()
cmd_win = window_list.get_command_window()
m_print(["Local key successfully installed.",
f"Confirmation code (to Transmitter): {c_code.hex()}"], box=True, head=1)
if first_local_key:
window_list.active_win = cmd_win
local_win = window_list.get_local_window()
local_win.add_new(ts, "Added new local key.")
if bootstrap:
window_list.active_win = local_win
raise SoftError("Added new local key.", window=cmd_win, ts=ts, output=False)
except (EOFError, KeyboardInterrupt):
m_print("Local key setup aborted.", bold=True, tail_clear=True, delay=1, head=2)
if window_list.active_win is not None and not bootstrap:
if window_list.active_win is not None and not first_local_key:
window_list.active_win.redraw()
raise FunctionReturn("Local key setup aborted.", output=False)
raise SoftError("Local key setup aborted.", output=False)
def local_key_rdy(ts: 'datetime',
window_list: 'WindowList',
contact_list: 'ContactList') -> None:
def local_key_rdy(
ts: "datetime", window_list: "WindowList", contact_list: "ContactList"
) -> None:
"""Clear local key bootstrap process from the screen."""
message = "Successfully completed the local key setup."
local_win = window_list.get_local_window()
local_win.add_new(ts, message)
message = "Successfully completed the local key setup."
cmd_win = window_list.get_command_window()
cmd_win.add_new(ts, message)
m_print(message, bold=True, tail_clear=True, delay=1)
if contact_list.has_contacts():
if window_list.active_win is not None and window_list.active_win.type in [WIN_TYPE_CONTACT, WIN_TYPE_GROUP]:
if window_list.active_win is not None and window_list.active_win.type in [
WIN_TYPE_CONTACT,
WIN_TYPE_GROUP,
]:
window_list.active_win.redraw()
else:
m_print("Waiting for new contacts", bold=True, head=1, tail=1)
@ -192,36 +247,42 @@ def local_key_rdy(ts: 'datetime',
# ECDHE
def key_ex_ecdhe(packet: bytes,
ts: 'datetime',
window_list: 'WindowList',
contact_list: 'ContactList',
key_list: 'KeyList',
settings: 'Settings'
) -> None:
def key_ex_ecdhe(
packet: bytes,
ts: "datetime",
window_list: "WindowList",
contact_list: "ContactList",
key_list: "KeyList",
settings: "Settings",
) -> None:
"""Add contact and symmetric keys derived from X448 shared key."""
onion_pub_key, tx_mk, rx_mk, tx_hk, rx_hk, nick_bytes \
= separate_headers(packet, [ONION_SERVICE_PUBLIC_KEY_LENGTH] + 4*[SYMMETRIC_KEY_LENGTH])
onion_pub_key, tx_mk, rx_mk, tx_hk, rx_hk, nick_bytes = separate_headers(
packet, [ONION_SERVICE_PUBLIC_KEY_LENGTH] + 4 * [SYMMETRIC_KEY_LENGTH]
)
try:
nick = bytes_to_str(nick_bytes)
except (struct.error, UnicodeError):
raise FunctionReturn("Error: Received invalid contact data")
raise SoftError("Error: Received invalid contact data")
contact_list.add_contact(onion_pub_key, nick,
bytes(FINGERPRINT_LENGTH),
bytes(FINGERPRINT_LENGTH),
KEX_STATUS_NONE,
settings.log_messages_by_default,
settings.accept_files_by_default,
settings.show_notifications_by_default)
contact_list.add_contact(
onion_pub_key,
nick,
bytes(FINGERPRINT_LENGTH),
bytes(FINGERPRINT_LENGTH),
KEX_STATUS_NONE,
settings.log_messages_by_default,
settings.accept_files_by_default,
settings.show_notifications_by_default,
)
key_list.add_keyset(onion_pub_key, tx_mk, rx_mk, tx_hk, rx_hk)
message = f"Successfully added {nick}."
local_win = window_list.get_local_window()
local_win.add_new(ts, message)
message = f"Successfully added {nick}."
cmd_win = window_list.get_command_window()
cmd_win.add_new(ts, message)
c_code = blake2b(onion_pub_key, digest_size=CONFIRM_CODE_LENGTH)
m_print([message, f"Confirmation code (to Transmitter): {c_code.hex()}"], box=True)
@ -229,98 +290,120 @@ def key_ex_ecdhe(packet: bytes,
# PSK
def key_ex_psk_tx(packet: bytes,
ts: 'datetime',
window_list: 'WindowList',
contact_list: 'ContactList',
key_list: 'KeyList',
settings: 'Settings'
) -> None:
"""Add contact and Tx-PSKs."""
onion_pub_key, tx_mk, _, tx_hk, _, nick_bytes \
= separate_headers(packet, [ONION_SERVICE_PUBLIC_KEY_LENGTH] + 4*[SYMMETRIC_KEY_LENGTH])
def key_ex_psk_tx(
packet: bytes,
ts: "datetime",
window_list: "WindowList",
contact_list: "ContactList",
key_list: "KeyList",
settings: "Settings",
) -> None:
"""Add contact and Tx-PSKs."""
onion_pub_key, tx_mk, _, tx_hk, _, nick_bytes = separate_headers(
packet, [ONION_SERVICE_PUBLIC_KEY_LENGTH] + 4 * [SYMMETRIC_KEY_LENGTH]
)
try:
nick = bytes_to_str(nick_bytes)
except (struct.error, UnicodeError):
raise FunctionReturn("Error: Received invalid contact data")
raise SoftError("Error: Received invalid contact data")
contact_list.add_contact(onion_pub_key, nick,
bytes(FINGERPRINT_LENGTH),
bytes(FINGERPRINT_LENGTH),
KEX_STATUS_NO_RX_PSK,
settings.log_messages_by_default,
settings.accept_files_by_default,
settings.show_notifications_by_default)
contact_list.add_contact(
onion_pub_key,
nick,
bytes(FINGERPRINT_LENGTH),
bytes(FINGERPRINT_LENGTH),
KEX_STATUS_NO_RX_PSK,
settings.log_messages_by_default,
settings.accept_files_by_default,
settings.show_notifications_by_default,
)
# The Rx-side keys are set as null-byte strings to indicate they have not
# been added yet. The zero-keys do not allow existential forgeries as
# `decrypt_assembly_packet`does not allow the use of zero-keys for decryption.
key_list.add_keyset(onion_pub_key=onion_pub_key,
tx_mk=tx_mk,
rx_mk=bytes(SYMMETRIC_KEY_LENGTH),
tx_hk=tx_hk,
rx_hk=bytes(SYMMETRIC_KEY_LENGTH))
key_list.add_keyset(
onion_pub_key=onion_pub_key,
tx_mk=tx_mk,
rx_mk=bytes(SYMMETRIC_KEY_LENGTH),
tx_hk=tx_hk,
rx_hk=bytes(SYMMETRIC_KEY_LENGTH),
)
c_code = blake2b(onion_pub_key, digest_size=CONFIRM_CODE_LENGTH)
message = f"Added Tx-side PSK for {nick} ({pub_key_to_short_address(onion_pub_key)})."
local_win = window_list.get_local_window()
local_win.add_new(ts, message)
c_code = blake2b(onion_pub_key, digest_size=CONFIRM_CODE_LENGTH)
message = (
f"Added Tx-side PSK for {nick} ({pub_key_to_short_address(onion_pub_key)})."
)
cmd_win = window_list.get_command_window()
cmd_win.add_new(ts, message)
m_print([message, f"Confirmation code (to Transmitter): {c_code.hex()}"], box=True)
def key_ex_psk_rx(packet: bytes,
ts: 'datetime',
window_list: 'WindowList',
contact_list: 'ContactList',
key_list: 'KeyList',
settings: 'Settings'
) -> None:
"""Import Rx-PSK of contact."""
c_code, onion_pub_key = separate_header(packet, CONFIRM_CODE_LENGTH)
short_addr = pub_key_to_short_address(onion_pub_key)
if not contact_list.has_pub_key(onion_pub_key):
raise FunctionReturn(f"Error: Unknown account '{short_addr}'.", head_clear=True)
contact = contact_list.get_contact_by_pub_key(onion_pub_key)
psk_file = ask_path_gui(f"Select PSK for {contact.nick} ({short_addr})", settings, get_file=True)
try:
with open(psk_file, 'rb') as f:
psk_data = f.read()
except PermissionError:
raise FunctionReturn("Error: No read permission for the PSK file.")
if len(psk_data) != PSK_FILE_SIZE:
raise FunctionReturn("Error: The PSK data in the file was invalid.", head_clear=True)
salt, ct_tag = separate_header(psk_data, ARGON2_SALT_LENGTH)
def decrypt_rx_psk(ct_tag: bytes, salt: bytes) -> bytes:
"""Get PSK password from user and decrypt Rx-PSK."""
while True:
try:
password = MasterKey.get_password("PSK password")
phase("Deriving the key decryption key", head=2)
kdk = argon2_kdf(password, salt, ARGON2_PSK_TIME_COST, ARGON2_PSK_MEMORY_COST, ARGON2_PSK_PARALLELISM)
kdk = argon2_kdf(
password,
salt,
ARGON2_PSK_TIME_COST,
ARGON2_PSK_MEMORY_COST,
ARGON2_PSK_PARALLELISM,
)
psk = auth_and_decrypt(ct_tag, kdk)
phase(DONE)
break
return psk
except nacl.exceptions.CryptoError:
print_on_previous_line()
m_print("Invalid password. Try again.", head=1)
print_on_previous_line(reps=5, delay=1)
except (EOFError, KeyboardInterrupt):
raise FunctionReturn("PSK import aborted.", head=2, delay=1, tail_clear=True)
raise SoftError("PSK import aborted.", head=2, delay=1, tail_clear=True)
def key_ex_psk_rx(
packet: bytes,
ts: "datetime",
window_list: "WindowList",
contact_list: "ContactList",
key_list: "KeyList",
settings: "Settings",
) -> None:
"""Import Rx-PSK of contact."""
c_code, onion_pub_key = separate_header(packet, CONFIRM_CODE_LENGTH)
short_addr = pub_key_to_short_address(onion_pub_key)
if not contact_list.has_pub_key(onion_pub_key):
raise SoftError(f"Error: Unknown account '{short_addr}'.", head_clear=True)
contact = contact_list.get_contact_by_pub_key(onion_pub_key)
psk_file = ask_path_gui(
f"Select PSK for {contact.nick} ({short_addr})", settings, get_file=True
)
try:
with open(psk_file, "rb") as f:
psk_data = f.read()
except PermissionError:
raise SoftError("Error: No read permission for the PSK file.")
if len(psk_data) != PSK_FILE_SIZE:
raise SoftError("Error: The PSK data in the file was invalid.", head_clear=True)
salt, ct_tag = separate_header(psk_data, ARGON2_SALT_LENGTH)
psk = decrypt_rx_psk(ct_tag, salt)
rx_mk, rx_hk = separate_header(psk, SYMMETRIC_KEY_LENGTH)
if any(k == bytes(SYMMETRIC_KEY_LENGTH) for k in [rx_mk, rx_hk]):
raise FunctionReturn("Error: Received invalid keys from contact.", head_clear=True)
raise SoftError("Error: Received invalid keys from contact.", head_clear=True)
keyset = key_list.get_keyset(onion_pub_key)
keyset = key_list.get_keyset(onion_pub_key)
keyset.rx_mk = rx_mk
keyset.rx_hk = rx_hk
key_list.store_keys()
@ -332,14 +415,27 @@ def key_ex_psk_rx(packet: bytes,
# the program itself, and therefore trusted, but it's still good practice.
subprocess.Popen(f"shred -n 3 -z -u {pipes.quote(psk_file)}", shell=True).wait()
if os.path.isfile(psk_file):
m_print(f"Warning! Overwriting of PSK ({psk_file}) failed. Press <Enter> to continue.",
manual_proceed=True, box=True)
m_print(
f"Warning! Overwriting of PSK ({psk_file}) failed. Press <Enter> to continue.",
manual_proceed=True,
box=True,
)
message = f"Added Rx-side PSK for {contact.nick} ({short_addr})."
local_win = window_list.get_local_window()
local_win.add_new(ts, message)
message = f"Added Rx-side PSK for {contact.nick} ({short_addr})."
cmd_win = window_list.get_command_window()
cmd_win.add_new(ts, message)
m_print([message, '', "Warning!",
"Physically destroy the keyfile transmission media ",
"to ensure it does not steal data from this computer!", '',
f"Confirmation code (to Transmitter): {c_code.hex()}"], box=True, head=1, tail=1)
m_print(
[
message,
"",
"Warning!",
"Physically destroy the keyfile transmission media ",
"to ensure it does not steal data from this computer!",
"",
f"Confirmation code (to Transmitter): {c_code.hex()}",
],
box=True,
head=1,
tail=1,
)

View File

@ -24,147 +24,276 @@ import typing
from typing import Dict
from src.common.db_logs import write_log_entry
from src.common.encoding import bytes_to_bool
from src.common.exceptions import FunctionReturn
from src.common.misc import separate_header, separate_headers
from src.common.statics import (ASSEMBLY_PACKET_HEADER_LENGTH, BLAKE2_DIGEST_LENGTH, FILE, FILE_KEY_HEADER,
GROUP_ID_LENGTH, GROUP_MESSAGE_HEADER, GROUP_MSG_ID_LENGTH, LOCAL_PUBKEY, MESSAGE,
MESSAGE_HEADER_LENGTH, ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_CONTACT_HEADER,
ORIGIN_HEADER_LENGTH, ORIGIN_USER_HEADER, PLACEHOLDER_DATA, PRIVATE_MESSAGE_HEADER,
SYMMETRIC_KEY_LENGTH, WHISPER_FIELD_LENGTH)
from src.common.db_logs import write_log_entry
from src.common.encoding import bytes_to_bool
from src.common.exceptions import SoftError
from src.common.misc import separate_header, separate_headers
from src.common.statics import (
ASSEMBLY_PACKET_HEADER_LENGTH,
BLAKE2_DIGEST_LENGTH,
FILE,
FILE_KEY_HEADER,
GROUP_ID_LENGTH,
GROUP_MESSAGE_HEADER,
GROUP_MSG_ID_LENGTH,
LOCAL_PUBKEY,
MESSAGE,
MESSAGE_HEADER_LENGTH,
ONION_SERVICE_PUBLIC_KEY_LENGTH,
ORIGIN_CONTACT_HEADER,
ORIGIN_HEADER_LENGTH,
ORIGIN_USER_HEADER,
PLACEHOLDER_DATA,
PRIVATE_MESSAGE_HEADER,
SYMMETRIC_KEY_LENGTH,
WHISPER_FIELD_LENGTH,
)
from src.receiver.packet import decrypt_assembly_packet
if typing.TYPE_CHECKING:
from datetime import datetime
from src.common.database import MessageLog
from src.common.db_contacts import ContactList
from src.common.db_groups import GroupList
from src.common.db_keys import KeyList
from src.common.db_settings import Settings
from src.receiver.packet import PacketList
from src.receiver.windows import WindowList
from datetime import datetime
from src.common.database import MessageLog
from src.common.db_contacts import ContactList
from src.common.db_groups import GroupList
from src.common.db_keys import KeyList
from src.common.db_settings import Settings
from src.receiver.packet import Packet, PacketList
from src.receiver.windows import WindowList
def process_message(ts: 'datetime',
assembly_packet_ct: bytes,
window_list: 'WindowList',
packet_list: 'PacketList',
contact_list: 'ContactList',
key_list: 'KeyList',
group_list: 'GroupList',
settings: 'Settings',
file_keys: Dict[bytes, bytes],
message_log: 'MessageLog'
) -> None:
"""Process received private / group message."""
local_window = window_list.get_local_window()
def log_masking_packets(
onion_pub_key: bytes, # Onion address of associated contact
origin: bytes, # Origin of packet (user / contact)
logging: bool, # When True, message will be logged
settings: "Settings", # Settings object
packet: "Packet", # Packet object
message_log: "MessageLog", # MessageLog object
completed: bool = False, # When True, logs placeholder data for completed message
) -> None:
"""Add masking packets to log file.
onion_pub_key, origin, assembly_packet_ct = separate_headers(assembly_packet_ct, [ONION_SERVICE_PUBLIC_KEY_LENGTH,
ORIGIN_HEADER_LENGTH])
If logging and log file masking are enabled, this function will
in case of erroneous transmissions, store the correct number of
placeholder data packets to log file to hide the quantity of
communication that log file observation would otherwise reveal.
"""
if logging and settings.log_file_masking and (packet.log_masking_ctr or completed):
no_masking_packets = (
len(packet.assembly_pt_list) if completed else packet.log_masking_ctr
)
for _ in range(no_masking_packets):
write_log_entry(PLACEHOLDER_DATA, onion_pub_key, message_log, origin)
packet.log_masking_ctr = 0
def process_message_packet(
ts: "datetime", # Timestamp of received message packet
assembly_packet_ct: bytes, # Encrypted assembly packet
window_list: "WindowList", # WindowList object
packet_list: "PacketList", # PacketList object
contact_list: "ContactList", # ContactList object
key_list: "KeyList", # KeyList object
group_list: "GroupList", # GroupList object
settings: "Settings", # Settings object
file_keys: Dict[bytes, bytes], # Dictionary of file decryption keys
message_log: "MessageLog", # MessageLog object
) -> None:
"""Process received message packet."""
command_window = window_list.get_command_window()
onion_pub_key, origin, assembly_packet_ct = separate_headers(
assembly_packet_ct, [ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_HEADER_LENGTH]
)
if onion_pub_key == LOCAL_PUBKEY:
raise FunctionReturn("Warning! Received packet masqueraded as a command.", window=local_window)
raise SoftError(
"Warning! Received packet masqueraded as a command.", window=command_window
)
if origin not in [ORIGIN_USER_HEADER, ORIGIN_CONTACT_HEADER]:
raise FunctionReturn("Error: Received packet had an invalid origin-header.", window=local_window)
raise SoftError(
"Error: Received packet had an invalid origin-header.",
window=command_window,
)
assembly_packet = decrypt_assembly_packet(assembly_packet_ct, onion_pub_key, origin,
window_list, contact_list, key_list)
assembly_packet = decrypt_assembly_packet(
assembly_packet_ct, onion_pub_key, origin, window_list, contact_list, key_list
)
p_type = FILE if assembly_packet[:ASSEMBLY_PACKET_HEADER_LENGTH].isupper() else MESSAGE
packet = packet_list.get_packet(onion_pub_key, origin, p_type)
p_type = (
FILE if assembly_packet[:ASSEMBLY_PACKET_HEADER_LENGTH].isupper() else MESSAGE
)
packet = packet_list.get_packet(onion_pub_key, origin, p_type)
logging = contact_list.get_contact_by_pub_key(onion_pub_key).log_messages
def log_masking_packets(completed: bool = False) -> None:
"""Add masking packets to log file.
If logging and log file masking are enabled, this function will
in case of erroneous transmissions, store the correct number of
placeholder data packets to log file to hide the quantity of
communication that log file observation would otherwise reveal.
"""
if logging and settings.log_file_masking and (packet.log_masking_ctr or completed):
no_masking_packets = len(packet.assembly_pt_list) if completed else packet.log_masking_ctr
for _ in range(no_masking_packets):
write_log_entry(PLACEHOLDER_DATA, onion_pub_key, message_log, origin)
packet.log_masking_ctr = 0
try:
packet.add_packet(assembly_packet)
except FunctionReturn:
log_masking_packets()
except SoftError:
log_masking_packets(
onion_pub_key, origin, logging, settings, packet, message_log
)
raise
log_masking_packets()
log_masking_packets(onion_pub_key, origin, logging, settings, packet, message_log)
if not packet.is_complete:
return None
if packet.is_complete:
process_complete_message_packet(
ts,
onion_pub_key,
p_type,
origin,
logging,
packet,
window_list,
contact_list,
group_list,
settings,
message_log,
file_keys,
)
def process_complete_message_packet(
ts: "datetime", # Timestamp of received message packet
onion_pub_key: bytes, # Onion address of associated contact
p_type: str, # Packet type (file, message)
origin: bytes, # Origin of packet (user / contact)
logging: bool, # When True, message will be logged
packet: "Packet", # Packet object
window_list: "WindowList", # WindowList object
contact_list: "ContactList", # ContactList object
group_list: "GroupList", # GroupList object
settings: "Settings", # Settings object
message_log: "MessageLog", # MessageLog object
file_keys: Dict[bytes, bytes], # Dictionary of file decryption keys
) -> None:
"""Process complete message packet.
The assembled message packet might contain a file if the sender
has traffic masking enabled, or it might contain other data.
"""
try:
if p_type == FILE:
packet.assemble_and_store_file(ts, onion_pub_key, window_list)
raise FunctionReturn("File storage complete.", output=False) # Raising allows calling log_masking_packets
raise SoftError(
"File storage complete.", output=False
) # Raising allows calling log_masking_packets
elif p_type == MESSAGE:
whisper_byte, header, assembled = separate_headers(packet.assemble_message_packet(),
[WHISPER_FIELD_LENGTH, MESSAGE_HEADER_LENGTH])
if len(whisper_byte) != WHISPER_FIELD_LENGTH:
raise FunctionReturn("Error: Message from contact had an invalid whisper header.")
if p_type == MESSAGE:
process_message(
ts,
onion_pub_key,
origin,
logging,
packet,
window_list,
contact_list,
group_list,
message_log,
file_keys,
)
whisper = bytes_to_bool(whisper_byte)
if header == GROUP_MESSAGE_HEADER:
logging = process_group_message(assembled, ts, onion_pub_key, origin, whisper, group_list, window_list)
elif header == PRIVATE_MESSAGE_HEADER:
window = window_list.get_window(onion_pub_key)
window.add_new(ts, assembled.decode(), onion_pub_key, origin, output=True, whisper=whisper)
elif header == FILE_KEY_HEADER:
nick = process_file_key_message(assembled, onion_pub_key, origin, contact_list, file_keys)
raise FunctionReturn(f"Received file decryption key from {nick}", window=local_window)
else:
raise FunctionReturn("Error: Message from contact had an invalid header.")
# Logging
if whisper:
raise FunctionReturn("Whisper message complete.", output=False)
if logging:
for p in packet.assembly_pt_list:
write_log_entry(p, onion_pub_key, message_log, origin)
except (FunctionReturn, UnicodeError):
log_masking_packets(completed=True)
except (SoftError, UnicodeError):
log_masking_packets(
onion_pub_key,
origin,
logging,
settings,
packet,
message_log,
completed=True,
)
raise
finally:
packet.clear_assembly_packets()
def process_group_message(assembled: bytes, # Group message and its headers
ts: 'datetime', # Timestamp of group message
onion_pub_key: bytes, # Onion address of associated contact
origin: bytes, # Origin of group message (user / contact)
whisper: bool, # When True, message is not logged.
group_list: 'GroupList', # GroupList object
window_list: 'WindowList' # WindowList object
) -> bool:
def process_message(
ts: "datetime", # Timestamp of received message packet
onion_pub_key: bytes, # Onion address of associated contact
origin: bytes, # Origin of message (user / contact)
logging: bool, # When True, message will be logged
packet: "Packet", # Packet object
window_list: "WindowList", # WindowList object
contact_list: "ContactList", # ContactList object
group_list: "GroupList", # GroupList object
message_log: "MessageLog", # MessageLog object
file_keys: Dict[bytes, bytes], # Dictionary of file decryption keys
) -> None:
"""Process message packet.
The received message might be a private or group message, or it
might contain decryption key for file received earlier.
Each received message contains a whisper header that allows the
sender to request the message to not be logged. This request will
be obeyed as long as the recipient does not edit the source code
below. Thus, the sender should not trust a whisper message is
never logged.
"""
whisper_byte, header, assembled = separate_headers(
packet.assemble_message_packet(), [WHISPER_FIELD_LENGTH, MESSAGE_HEADER_LENGTH]
)
if len(whisper_byte) != WHISPER_FIELD_LENGTH:
raise SoftError("Error: Message from contact had an invalid whisper header.")
whisper = bytes_to_bool(whisper_byte)
if header == GROUP_MESSAGE_HEADER:
logging = process_group_message(
ts, assembled, onion_pub_key, origin, whisper, group_list, window_list
)
elif header == PRIVATE_MESSAGE_HEADER:
window = window_list.get_window(onion_pub_key)
window.add_new(
ts, assembled.decode(), onion_pub_key, origin, output=True, whisper=whisper
)
elif header == FILE_KEY_HEADER:
nick = process_file_key_message(
assembled, onion_pub_key, origin, contact_list, file_keys
)
raise SoftError(
f"Received file decryption key from {nick}",
window=window_list.get_command_window(),
)
else:
raise SoftError("Error: Message from contact had an invalid header.")
# Logging
if whisper:
raise SoftError("Whisper message complete.", output=False)
if logging:
for p in packet.assembly_pt_list:
write_log_entry(p, onion_pub_key, message_log, origin)
def process_group_message(
ts: "datetime", # Timestamp of group message
assembled: bytes, # Group message and its headers
onion_pub_key: bytes, # Onion address of associated contact
origin: bytes, # Origin of group message (user / contact)
whisper: bool, # When True, message is not logged.
group_list: "GroupList", # GroupList object
window_list: "WindowList", # WindowList object
) -> bool:
"""Process a group message."""
group_id, assembled = separate_header(assembled, GROUP_ID_LENGTH)
if not group_list.has_group_id(group_id):
raise FunctionReturn("Error: Received message to an unknown group.", output=False)
raise SoftError("Error: Received message to an unknown group.", output=False)
group = group_list.get_group_by_id(group_id)
if not group.has_member(onion_pub_key):
raise FunctionReturn("Error: Account is not a member of the group.", output=False)
raise SoftError("Error: Account is not a member of the group.", output=False)
group_msg_id, group_message = separate_header(assembled, GROUP_MSG_ID_LENGTH)
try:
group_message_str = group_message.decode()
except UnicodeError:
raise FunctionReturn("Error: Received an invalid group message.")
raise SoftError("Error: Received an invalid group message.")
window = window_list.get_window(group.group_id)
@ -174,33 +303,45 @@ def process_group_message(assembled: bytes, # Group message and its h
if origin == ORIGIN_USER_HEADER:
if window.group_msg_id != group_msg_id:
window.group_msg_id = group_msg_id
window.add_new(ts, group_message_str, onion_pub_key, origin, output=True, whisper=whisper)
window.add_new(
ts,
group_message_str,
onion_pub_key,
origin,
output=True,
whisper=whisper,
)
elif origin == ORIGIN_CONTACT_HEADER:
window.add_new(ts, group_message_str, onion_pub_key, origin, output=True, whisper=whisper)
window.add_new(
ts, group_message_str, onion_pub_key, origin, output=True, whisper=whisper
)
# Return the group's logging setting because it might be different
# from the logging setting of the contact who sent group message.
return group.log_messages
def process_file_key_message(assembled: bytes, # File decryption key
onion_pub_key: bytes, # Onion address of associated contact
origin: bytes, # Origin of file key packet (user / contact)
contact_list: 'ContactList', # ContactList object
file_keys: Dict[bytes, bytes] # Dictionary of file identifiers and decryption keys
) -> str:
def process_file_key_message(
assembled: bytes, # File decryption key
onion_pub_key: bytes, # Onion address of associated contact
origin: bytes, # Origin of file key packet (user / contact)
contact_list: "ContactList", # ContactList object
file_keys: Dict[bytes, bytes], # Dictionary of file identifiers and decryption keys
) -> str:
"""Process received file key delivery message."""
if origin == ORIGIN_USER_HEADER:
raise FunctionReturn("File key message from the user.", output=False)
raise SoftError("File key message from the user.", output=False)
try:
decoded = base64.b85decode(assembled)
except ValueError:
raise FunctionReturn("Error: Received an invalid file key message.")
raise SoftError("Error: Received an invalid file key message.")
ct_hash, file_key = separate_header(decoded, BLAKE2_DIGEST_LENGTH)
if len(ct_hash) != BLAKE2_DIGEST_LENGTH or len(file_key) != SYMMETRIC_KEY_LENGTH:
raise FunctionReturn("Error: Received an invalid file key message.")
raise SoftError("Error: Received an invalid file key message.")
file_keys[onion_pub_key + ct_hash] = file_key
nick = contact_list.get_nick_by_pub_key(onion_pub_key)

View File

@ -26,55 +26,60 @@ import typing
from typing import Any, Dict, List, Tuple
from src.common.database import MessageLog
from src.common.exceptions import FunctionReturn
from src.common.output import clear_screen
from src.common.statics import (COMMAND_DATAGRAM_HEADER, EXIT_QUEUE, FILE_DATAGRAM_HEADER, LOCAL_KEY_DATAGRAM_HEADER,
MESSAGE_DATAGRAM_HEADER, ONION_SERVICE_PUBLIC_KEY_LENGTH, UNIT_TEST_QUEUE,
WIN_UID_FILE)
from src.common.database import MessageLog
from src.common.exceptions import SoftError
from src.common.output import clear_screen
from src.common.statics import (
COMMAND_DATAGRAM_HEADER,
EXIT_QUEUE,
FILE_DATAGRAM_HEADER,
LOCAL_KEY_DATAGRAM_HEADER,
MESSAGE_DATAGRAM_HEADER,
ONION_SERVICE_PUBLIC_KEY_LENGTH,
UNIT_TEST_QUEUE,
)
from src.receiver.commands import process_command
from src.receiver.files import new_file, process_file
from src.receiver.commands import process_command
from src.receiver.files import new_file, process_file
from src.receiver.key_exchanges import process_local_key
from src.receiver.messages import process_message
from src.receiver.packet import PacketList
from src.receiver.windows import WindowList
from src.receiver.messages import process_message_packet
from src.receiver.packet import PacketList
from src.receiver.windows import WindowList
if typing.TYPE_CHECKING:
from datetime import datetime
from multiprocessing import Queue
from src.common.db_contacts import ContactList
from src.common.db_groups import GroupList
from src.common.db_keys import KeyList
from datetime import datetime
from multiprocessing import Queue
from src.common.db_contacts import ContactList
from src.common.db_groups import GroupList
from src.common.db_keys import KeyList
from src.common.db_masterkey import MasterKey
from src.common.db_settings import Settings
from src.common.gateway import Gateway
from src.common.db_settings import Settings
from src.common.gateway import Gateway
packet_buffer_type = Dict[bytes, List[Tuple[datetime, bytes]]]
file_buffer_type = Dict[bytes, Tuple[datetime, bytes]]
file_keys_type = Dict[bytes, bytes]
queue_dict = Dict[bytes, Queue[Any]]
def output_loop(queues: Dict[bytes, 'Queue[Any]'],
gateway: 'Gateway',
settings: 'Settings',
contact_list: 'ContactList',
key_list: 'KeyList',
group_list: 'GroupList',
master_key: 'MasterKey',
message_log: 'MessageLog',
stdin_fd: int,
unit_test: bool = False
) -> None:
def output_loop(
queues: Dict[bytes, "Queue[Any]"],
gateway: "Gateway",
settings: "Settings",
contact_list: "ContactList",
key_list: "KeyList",
group_list: "GroupList",
master_key: "MasterKey",
message_log: "MessageLog",
stdin_fd: int,
unit_test: bool = False,
) -> None:
"""Process packets in message queues according to their priority."""
local_key_queue = queues[LOCAL_KEY_DATAGRAM_HEADER]
message_queue = queues[MESSAGE_DATAGRAM_HEADER]
file_queue = queues[FILE_DATAGRAM_HEADER]
command_queue = queues[COMMAND_DATAGRAM_HEADER]
exit_queue = queues[EXIT_QUEUE]
sys.stdin = os.fdopen(stdin_fd)
packet_buffer = dict() # type: Dict[bytes, List[Tuple[datetime, bytes]]]
file_buffer = dict() # type: Dict[bytes, Tuple[datetime, bytes]]
file_keys = dict() # type: Dict[bytes, bytes]
kdk_hashes = [] # type: List[bytes]
sys.stdin = os.fdopen(stdin_fd)
packet_buffer = dict() # type: packet_buffer_type
file_buffer = dict() # type: file_buffer_type
file_keys = dict() # type: file_keys_type
kdk_hashes = [] # type: List[bytes]
packet_hashes = [] # type: List[bytes]
packet_list = PacketList(settings, contact_list)
@ -83,75 +88,267 @@ def output_loop(queues: Dict[bytes, 'Queue[Any]'],
clear_screen()
while True:
try:
if local_key_queue.qsize() != 0:
ts, packet = local_key_queue.get()
process_local_key(ts, packet, window_list, contact_list, key_list,
settings, kdk_hashes, packet_hashes, local_key_queue)
continue
if not contact_list.has_local_contact():
time.sleep(0.1)
continue
# Local key packets
process_local_key_queue(
queues,
window_list,
contact_list,
key_list,
settings,
kdk_hashes,
packet_hashes,
)
# Commands
if command_queue.qsize() != 0:
ts, packet = command_queue.get()
process_command(ts, packet, window_list, packet_list, contact_list, key_list,
group_list, settings, master_key, gateway, exit_queue)
continue
process_command_queue(
queues,
window_list,
contact_list,
group_list,
key_list,
settings,
packet_list,
master_key,
gateway,
)
# File window refresh
if window_list.active_win is not None and window_list.active_win.uid == WIN_UID_FILE:
window_list.active_win.redraw_file_win()
window_list.refresh_file_window_check()
# Cached message packets
for onion_pub_key in packet_buffer:
if (contact_list.has_pub_key(onion_pub_key)
and key_list.has_rx_mk(onion_pub_key)
and packet_buffer[onion_pub_key]):
ts, packet = packet_buffer[onion_pub_key].pop(0)
process_message(ts, packet, window_list, packet_list, contact_list, key_list,
group_list, settings, file_keys, message_log)
continue
# Cached messages
process_cached_messages(
window_list,
contact_list,
group_list,
key_list,
settings,
packet_list,
message_log,
file_keys,
packet_buffer,
)
# New messages
if message_queue.qsize() != 0:
ts, packet = message_queue.get()
onion_pub_key = packet[:ONION_SERVICE_PUBLIC_KEY_LENGTH]
if contact_list.has_pub_key(onion_pub_key) and key_list.has_rx_mk(onion_pub_key):
process_message(ts, packet, window_list, packet_list, contact_list, key_list,
group_list, settings, file_keys, message_log)
else:
packet_buffer.setdefault(onion_pub_key, []).append((ts, packet))
continue
process_message_queue(
queues,
window_list,
contact_list,
group_list,
key_list,
settings,
packet_list,
message_log,
file_keys,
packet_buffer,
)
# Cached files
if file_buffer:
for k in file_buffer:
key_to_remove = b''
try:
if k in file_keys:
key_to_remove = k
ts_, file_ct = file_buffer[k]
dec_key = file_keys[k]
onion_pub_key = k[:ONION_SERVICE_PUBLIC_KEY_LENGTH]
process_file(ts_, onion_pub_key, file_ct, dec_key, contact_list, window_list, settings)
finally:
if key_to_remove:
file_buffer.pop(k)
file_keys.pop(k)
break
process_cached_files(
window_list, contact_list, settings, file_keys, file_buffer
)
# New files
if file_queue.qsize() != 0:
ts, packet = file_queue.get()
new_file(ts, packet, file_keys, file_buffer, contact_list, window_list, settings)
process_file_queue(
queues, window_list, contact_list, settings, file_keys, file_buffer
)
time.sleep(0.01)
if unit_test and queues[UNIT_TEST_QUEUE].qsize() != 0:
break
except (FunctionReturn, KeyError, KeyboardInterrupt):
except (KeyError, KeyboardInterrupt, SoftError):
pass
def process_local_key_queue(
queues: "queue_dict",
window_list: "WindowList",
contact_list: "ContactList",
key_list: "KeyList",
settings: "Settings",
kdk_hashes: List[bytes],
packet_hashes: List[bytes],
) -> None:
"""Check local key queue for packets.
This function also checks that local key is installed.
"""
local_key_queue = queues[LOCAL_KEY_DATAGRAM_HEADER]
if local_key_queue.qsize():
ts, packet = local_key_queue.get()
process_local_key(
ts,
packet,
window_list,
contact_list,
key_list,
settings,
kdk_hashes,
packet_hashes,
local_key_queue,
)
if not contact_list.has_local_contact():
time.sleep(0.1)
raise SoftError("No local key", output=False)
def process_command_queue(
queues: "queue_dict",
window_list: "WindowList",
contact_list: "ContactList",
group_list: "GroupList",
key_list: "KeyList",
settings: "Settings",
packet_list: "PacketList",
master_key: "MasterKey",
gateway: "Gateway",
) -> None:
"""Check command queue for packets."""
command_queue = queues[COMMAND_DATAGRAM_HEADER]
exit_queue = queues[EXIT_QUEUE]
if command_queue.qsize():
ts, packet = command_queue.get()
process_command(
ts,
packet,
window_list,
packet_list,
contact_list,
key_list,
group_list,
settings,
master_key,
gateway,
exit_queue,
)
def process_cached_messages(
window_list: "WindowList",
contact_list: "ContactList",
group_list: "GroupList",
key_list: "KeyList",
settings: "Settings",
packet_list: "PacketList",
message_log: "MessageLog",
file_keys: "file_keys_type",
packet_buffer: "packet_buffer_type",
) -> None:
"""Process cached message packets."""
for onion_pub_key in packet_buffer:
if (
contact_list.has_pub_key(onion_pub_key)
and key_list.has_rx_mk(onion_pub_key)
and packet_buffer[onion_pub_key]
):
ts, packet = packet_buffer[onion_pub_key].pop(0)
process_message_packet(
ts,
packet,
window_list,
packet_list,
contact_list,
key_list,
group_list,
settings,
file_keys,
message_log,
)
raise SoftError("Cached message processing complete.", output=False)
def process_message_queue(
queues: "queue_dict",
window_list: "WindowList",
contact_list: "ContactList",
group_list: "GroupList",
key_list: "KeyList",
settings: "Settings",
packet_list: "PacketList",
message_log: "MessageLog",
file_keys: "file_keys_type",
packet_buffer: "packet_buffer_type",
) -> None:
"""Check message queue for packets."""
message_queue = queues[MESSAGE_DATAGRAM_HEADER]
if message_queue.qsize():
ts, packet = message_queue.get()
onion_pub_key = packet[:ONION_SERVICE_PUBLIC_KEY_LENGTH]
if contact_list.has_pub_key(onion_pub_key) and key_list.has_rx_mk(
onion_pub_key
):
process_message_packet(
ts,
packet,
window_list,
packet_list,
contact_list,
key_list,
group_list,
settings,
file_keys,
message_log,
)
else:
packet_buffer.setdefault(onion_pub_key, []).append((ts, packet))
raise SoftError("Message processing complete.", output=False)
def process_cached_files(
window_list: "WindowList",
contact_list: "ContactList",
settings: "Settings",
file_keys: "file_keys_type",
file_buffer: "file_buffer_type",
) -> None:
"""Check if file key has been received for cached file packet."""
if file_buffer:
for k in file_buffer:
key_to_remove = b""
try:
if k in file_keys:
key_to_remove = k
ts_, file_ct = file_buffer[k]
dec_key = file_keys[k]
onion_pub_key = k[:ONION_SERVICE_PUBLIC_KEY_LENGTH]
process_file(
ts_,
onion_pub_key,
file_ct,
dec_key,
contact_list,
window_list,
settings,
)
finally:
if key_to_remove:
file_buffer.pop(k)
file_keys.pop(k)
raise SoftError("Cached file processing complete.", output=False)
def process_file_queue(
queues: "queue_dict",
window_list: "WindowList",
contact_list: "ContactList",
settings: "Settings",
file_keys: "file_keys_type",
file_buffer: "file_buffer_type",
) -> None:
"""Check file queue for packets."""
file_queue = queues[FILE_DATAGRAM_HEADER]
if file_queue.qsize():
ts, packet = file_queue.get()
new_file(
ts, packet, file_keys, file_buffer, contact_list, window_list, settings
)
raise SoftError("File processing complete.", output=False)

View File

@ -24,64 +24,107 @@ import typing
import zlib
from datetime import datetime, timedelta
from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Sized
from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Sized
import nacl.exceptions
from src.common.crypto import auth_and_decrypt, blake2b, rm_padding_bytes
from src.common.encoding import bytes_to_int, int_to_bytes
from src.common.exceptions import FunctionReturn
from src.common.input import yes
from src.common.misc import decompress, readable_size, separate_header, separate_headers, separate_trailer
from src.common.output import m_print
from src.common.statics import (ASSEMBLY_PACKET_HEADER_LENGTH, BLAKE2_DIGEST_LENGTH, COMMAND, C_A_HEADER, C_C_HEADER,
C_E_HEADER, C_L_HEADER, C_N_HEADER, C_S_HEADER, ENCODED_INTEGER_LENGTH, FILE,
F_A_HEADER, F_C_HEADER, F_E_HEADER, F_L_HEADER, F_S_HEADER, HARAC_CT_LENGTH,
HARAC_WARN_THRESHOLD, LOCAL_PUBKEY, MAX_MESSAGE_SIZE, MESSAGE, M_A_HEADER,
M_C_HEADER, M_E_HEADER, M_L_HEADER, M_S_HEADER, ORIGIN_CONTACT_HEADER,
ORIGIN_USER_HEADER, P_N_HEADER, RX, SYMMETRIC_KEY_LENGTH, TX, US_BYTE)
from src.common.crypto import auth_and_decrypt, blake2b, rm_padding_bytes
from src.common.encoding import bytes_to_int, int_to_bytes
from src.common.exceptions import SoftError
from src.common.input import yes
from src.common.misc import (
decompress,
readable_size,
separate_header,
separate_headers,
separate_trailer,
)
from src.common.output import m_print
from src.common.statics import (
ASSEMBLY_PACKET_HEADER_LENGTH,
BLAKE2_DIGEST_LENGTH,
COMMAND,
C_A_HEADER,
C_C_HEADER,
C_E_HEADER,
C_L_HEADER,
C_N_HEADER,
C_S_HEADER,
ENCODED_INTEGER_LENGTH,
FILE,
F_A_HEADER,
F_C_HEADER,
F_E_HEADER,
F_L_HEADER,
F_S_HEADER,
HARAC_CT_LENGTH,
HARAC_WARN_THRESHOLD,
LOCAL_PUBKEY,
MAX_MESSAGE_SIZE,
MESSAGE,
M_A_HEADER,
M_C_HEADER,
M_E_HEADER,
M_L_HEADER,
M_S_HEADER,
ORIGIN_CONTACT_HEADER,
ORIGIN_USER_HEADER,
P_N_HEADER,
RX,
SYMMETRIC_KEY_LENGTH,
TX,
US_BYTE,
)
from src.receiver.files import process_assembled_file
if typing.TYPE_CHECKING:
from src.common.db_contacts import Contact, ContactList
from src.common.db_keys import KeyList
from src.common.db_keys import KeyList
from src.common.db_settings import Settings
from src.receiver.windows import RxWindow, WindowList
from src.receiver.windows import RxWindow, WindowList
def process_offset(offset: int, # Number of dropped packets
origin: bytes, # "to/from" preposition
direction: str, # Direction of packet
nick: str, # Nickname of associated contact
window: 'RxWindow' # RxWindow object
) -> None:
def process_offset(
offset: int, # Number of dropped packets
origin: bytes, # "to/from" preposition
direction: str, # Direction of packet
nick: str, # Nickname of associated contact
window: "RxWindow", # RxWindow object
) -> None:
"""Display warnings about increased offsets.
If the offset has increased over the threshold, ask the user to
confirm hash ratchet catch up.
"""
if offset > HARAC_WARN_THRESHOLD and origin == ORIGIN_CONTACT_HEADER:
m_print([f"Warning! {offset} packets from {nick} were not received.",
f"This might indicate that {offset} most recent packets were ",
f"lost during transmission, or that the contact is attempting ",
f"a DoS attack. You can wait for TFC to attempt to decrypt the ",
"packet, but it might take a very long time or even forever."])
m_print(
[
f"Warning! {offset} packets from {nick} were not received.",
f"This might indicate that {offset} most recent packets were ",
f"lost during transmission, or that the contact is attempting ",
f"a DoS attack. You can wait for TFC to attempt to decrypt the ",
"packet, but it might take a very long time or even forever.",
]
)
if not yes("Proceed with the decryption?", abort=False, tail=1):
raise FunctionReturn(f"Dropped packet from {nick}.", window=window)
raise SoftError(f"Dropped packet from {nick}.", window=window)
elif offset:
m_print(f"Warning! {offset} packet{'s' if offset > 1 else ''} {direction} {nick} were not received.")
m_print(
f"Warning! {offset} packet{'s' if offset > 1 else ''} {direction} {nick} were not received."
)
def decrypt_assembly_packet(packet: bytes, # Assembly packet ciphertext
onion_pub_key: bytes, # Onion Service pubkey of associated contact
origin: bytes, # Direction of packet
window_list: 'WindowList', # WindowList object
contact_list: 'ContactList', # ContactList object
key_list: 'KeyList' # Keylist object
) -> bytes: # Decrypted assembly packet
def decrypt_assembly_packet(
packet: bytes, # Assembly packet ciphertext
onion_pub_key: bytes, # Onion Service pubkey of associated contact
origin: bytes, # Direction of packet
window_list: "WindowList", # WindowList object
contact_list: "ContactList", # ContactList object
key_list: "KeyList", # Keylist object
) -> bytes: # Decrypted assembly packet
"""Decrypt assembly packet from contact/local Transmitter.
This function authenticates and decrypts incoming message and
@ -104,51 +147,62 @@ def decrypt_assembly_packet(packet: bytes, # Assembly packet cip
let alone processed.
"""
ct_harac, ct_assemby_packet = separate_header(packet, header_length=HARAC_CT_LENGTH)
local_window = window_list.get_local_window()
command = onion_pub_key == LOCAL_PUBKEY
cmd_win = window_list.get_command_window()
command = onion_pub_key == LOCAL_PUBKEY
p_type = "command" if command else "packet"
direction = "from" if command or (origin == ORIGIN_CONTACT_HEADER) else "sent to"
nick = contact_list.get_nick_by_pub_key(onion_pub_key)
p_type = "command" if command else "packet"
direction = "from" if command or (origin == ORIGIN_CONTACT_HEADER) else "sent to"
nick = contact_list.get_nick_by_pub_key(onion_pub_key)
# Load keys
keyset = key_list.get_keyset(onion_pub_key)
keyset = key_list.get_keyset(onion_pub_key)
key_dir = TX if origin == ORIGIN_USER_HEADER else RX
header_key = getattr(keyset, f'{key_dir}_hk') # type: bytes
message_key = getattr(keyset, f'{key_dir}_mk') # type: bytes
header_key = getattr(keyset, f"{key_dir}_hk") # type: bytes
message_key = getattr(keyset, f"{key_dir}_mk") # type: bytes
if any(k == bytes(SYMMETRIC_KEY_LENGTH) for k in [header_key, message_key]):
raise FunctionReturn("Warning! Loaded zero-key for packet decryption.")
raise SoftError("Warning! Loaded zero-key for packet decryption.")
# Decrypt hash ratchet counter
try:
harac_bytes = auth_and_decrypt(ct_harac, header_key)
except nacl.exceptions.CryptoError:
raise FunctionReturn(f"Warning! Received {p_type} {direction} {nick} had an invalid hash ratchet MAC.",
window=local_window)
raise SoftError(
f"Warning! Received {p_type} {direction} {nick} had an invalid hash ratchet MAC.",
window=cmd_win,
)
# Catch up with hash ratchet offset
purp_harac = bytes_to_int(harac_bytes)
stored_harac = getattr(keyset, f'{key_dir}_harac')
offset = purp_harac - stored_harac
purp_harac = bytes_to_int(harac_bytes)
stored_harac = getattr(keyset, f"{key_dir}_harac")
offset = purp_harac - stored_harac
if offset < 0:
raise FunctionReturn(f"Warning! Received {p_type} {direction} {nick} had an expired hash ratchet counter.",
window=local_window)
raise SoftError(
f"Warning! Received {p_type} {direction} {nick} had an expired hash ratchet counter.",
window=cmd_win,
)
process_offset(offset, origin, direction, nick, local_window)
process_offset(offset, origin, direction, nick, cmd_win)
for harac in range(stored_harac, stored_harac + offset):
message_key = blake2b(message_key + int_to_bytes(harac), digest_size=SYMMETRIC_KEY_LENGTH)
message_key = blake2b(
message_key + int_to_bytes(harac), digest_size=SYMMETRIC_KEY_LENGTH
)
# Decrypt packet
try:
assembly_packet = auth_and_decrypt(ct_assemby_packet, message_key)
except nacl.exceptions.CryptoError:
raise FunctionReturn(f"Warning! Received {p_type} {direction} {nick} had an invalid MAC.",
window=local_window)
raise SoftError(
f"Warning! Received {p_type} {direction} {nick} had an invalid MAC.",
window=cmd_win,
)
# Update message key and harac
new_key = blake2b(message_key + int_to_bytes(stored_harac + offset), digest_size=SYMMETRIC_KEY_LENGTH)
new_key = blake2b(
message_key + int_to_bytes(stored_harac + offset),
digest_size=SYMMETRIC_KEY_LENGTH,
)
keyset.update_mk(key_dir, new_key, offset + 1)
return assembly_packet
@ -157,38 +211,51 @@ def decrypt_assembly_packet(packet: bytes, # Assembly packet cip
class Packet(object):
"""Packet objects collect and keep track of received assembly packets."""
def __init__(self,
onion_pub_key: bytes, # Public key of the contact associated with the packet <─┐
origin: bytes, # Origin of packet (user, contact) <─┼─ Form packet UID
p_type: str, # Packet type (message, file, command) <─┘
contact: 'Contact', # Contact object of contact associated with the packet
settings: 'Settings' # Settings object
) -> None:
def __init__(
self,
onion_pub_key: bytes, # Public key of the contact associated with the packet <─┐
origin: bytes, # Origin of packet (user, contact) <─┼─ Form packet UID
p_type: str, # Packet type (message, file, command) <─┘
contact: "Contact", # Contact object of contact associated with the packet
settings: "Settings", # Settings object
) -> None:
"""Create a new Packet object."""
self.onion_pub_key = onion_pub_key
self.contact = contact
self.origin = origin
self.type = p_type
self.settings = settings
self.contact = contact
self.origin = origin
self.type = p_type
self.settings = settings
# File transmission metadata
self.packets = None # type: Optional[int]
self.time = None # type: Optional[str]
self.size = None # type: Optional[str]
self.name = None # type: Optional[str]
self.time = None # type: Optional[str]
self.size = None # type: Optional[str]
self.name = None # type: Optional[str]
self.sh = {MESSAGE: M_S_HEADER, FILE: F_S_HEADER, COMMAND: C_S_HEADER}[self.type]
self.lh = {MESSAGE: M_L_HEADER, FILE: F_L_HEADER, COMMAND: C_L_HEADER}[self.type]
self.ah = {MESSAGE: M_A_HEADER, FILE: F_A_HEADER, COMMAND: C_A_HEADER}[self.type]
self.eh = {MESSAGE: M_E_HEADER, FILE: F_E_HEADER, COMMAND: C_E_HEADER}[self.type]
self.ch = {MESSAGE: M_C_HEADER, FILE: F_C_HEADER, COMMAND: C_C_HEADER}[self.type]
self.nh = {MESSAGE: P_N_HEADER, FILE: P_N_HEADER, COMMAND: C_N_HEADER}[self.type]
self.sh = {MESSAGE: M_S_HEADER, FILE: F_S_HEADER, COMMAND: C_S_HEADER}[
self.type
]
self.lh = {MESSAGE: M_L_HEADER, FILE: F_L_HEADER, COMMAND: C_L_HEADER}[
self.type
]
self.ah = {MESSAGE: M_A_HEADER, FILE: F_A_HEADER, COMMAND: C_A_HEADER}[
self.type
]
self.eh = {MESSAGE: M_E_HEADER, FILE: F_E_HEADER, COMMAND: C_E_HEADER}[
self.type
]
self.ch = {MESSAGE: M_C_HEADER, FILE: F_C_HEADER, COMMAND: C_C_HEADER}[
self.type
]
self.nh = {MESSAGE: P_N_HEADER, FILE: P_N_HEADER, COMMAND: C_N_HEADER}[
self.type
]
self.log_masking_ctr = 0 # type: int
self.log_masking_ctr = 0 # type: int
self.assembly_pt_list = [] # type: List[bytes]
self.log_ct_list = [] # type: List[bytes]
self.long_active = False
self.is_complete = False
self.log_ct_list = [] # type: List[bytes]
self.long_active = False
self.is_complete = False
def add_masking_packet_to_log_file(self, increase: int = 1) -> None:
"""Increase `log_masking_ctr` for message and file packets."""
@ -198,50 +265,55 @@ class Packet(object):
def clear_file_metadata(self) -> None:
"""Clear file metadata."""
self.packets = None
self.time = None
self.size = None
self.name = None
self.time = None
self.size = None
self.name = None
def clear_assembly_packets(self) -> None:
"""Clear packet state."""
self.assembly_pt_list = []
self.log_ct_list = []
self.long_active = False
self.is_complete = False
self.log_ct_list = []
self.long_active = False
self.is_complete = False
def new_file_packet(self) -> None:
"""New file transmission handling logic."""
name = self.name
name = self.name
was_active = self.long_active
self.clear_file_metadata()
self.clear_assembly_packets()
if self.origin == ORIGIN_USER_HEADER:
self.add_masking_packet_to_log_file()
raise FunctionReturn("Ignored file from the user.", output=False)
raise SoftError("Ignored file from the user.", output=False)
if not self.contact.file_reception:
self.add_masking_packet_to_log_file()
raise FunctionReturn(f"Alert! File transmission from {self.contact.nick} but reception is disabled.")
raise SoftError(
f"Alert! File transmission from {self.contact.nick} but reception is disabled."
)
if was_active:
m_print(f"Alert! File '{name}' from {self.contact.nick} never completed.", head=1, tail=1)
m_print(
f"Alert! File '{name}' from {self.contact.nick} never completed.",
head=1,
tail=1,
)
def check_long_packet(self) -> None:
"""Check if the long packet has permission to be extended."""
if not self.long_active:
self.add_masking_packet_to_log_file()
raise FunctionReturn("Missing start packet.", output=False)
raise SoftError("Missing start packet.", output=False)
if self.type == FILE and not self.contact.file_reception:
self.add_masking_packet_to_log_file(increase=len(self.assembly_pt_list) + 1)
self.clear_assembly_packets()
raise FunctionReturn("Alert! File reception disabled mid-transfer.")
raise SoftError("Alert! File reception disabled mid-transfer.")
def process_short_header(self,
packet: bytes,
packet_ct: Optional[bytes] = None
) -> None:
def process_short_header(
self, packet: bytes, packet_ct: Optional[bytes] = None
) -> None:
"""Process short packet."""
if self.long_active:
self.add_masking_packet_to_log_file(increase=len(self.assembly_pt_list))
@ -250,16 +322,15 @@ class Packet(object):
self.new_file_packet()
self.assembly_pt_list = [packet]
self.long_active = False
self.is_complete = True
self.long_active = False
self.is_complete = True
if packet_ct is not None:
self.log_ct_list = [packet_ct]
def process_long_header(self,
packet: bytes,
packet_ct: Optional[bytes] = None
) -> None:
def process_long_header(
self, packet: bytes, packet_ct: Optional[bytes] = None
) -> None:
"""Process first packet of long transmission."""
if self.long_active:
self.add_masking_packet_to_log_file(increase=len(self.assembly_pt_list))
@ -267,33 +338,43 @@ class Packet(object):
if self.type == FILE:
self.new_file_packet()
try:
lh, no_p_bytes, time_bytes, size_bytes, name_us_data \
= separate_headers(packet, [ASSEMBLY_PACKET_HEADER_LENGTH] + 3*[ENCODED_INTEGER_LENGTH])
_, no_p_bytes, time_bytes, size_bytes, name_us_data = separate_headers(
packet,
[ASSEMBLY_PACKET_HEADER_LENGTH] + 3 * [ENCODED_INTEGER_LENGTH],
)
self.packets = bytes_to_int(no_p_bytes) # added by transmitter.packet.split_to_assembly_packets
self.time = str(timedelta(seconds=bytes_to_int(time_bytes)))
self.size = readable_size(bytes_to_int(size_bytes))
self.name = name_us_data.split(US_BYTE, 1)[0].decode()
self.packets = bytes_to_int(
no_p_bytes
) # added by transmitter.packet.split_to_assembly_packets
self.time = str(timedelta(seconds=bytes_to_int(time_bytes)))
self.size = readable_size(bytes_to_int(size_bytes))
self.name = name_us_data.split(US_BYTE, 1)[0].decode()
m_print([f'Receiving file from {self.contact.nick}:',
f'{self.name} ({self.size})',
f'ETA {self.time} ({self.packets} packets)'], bold=True, head=1, tail=1)
m_print(
[
f"Receiving file from {self.contact.nick}:",
f"{self.name} ({self.size})",
f"ETA {self.time} ({self.packets} packets)",
],
bold=True,
head=1,
tail=1,
)
except (struct.error, UnicodeError, ValueError):
self.add_masking_packet_to_log_file()
raise FunctionReturn("Error: Received file packet had an invalid header.")
raise SoftError("Error: Received file packet had an invalid header.")
self.assembly_pt_list = [packet]
self.long_active = True
self.is_complete = False
self.long_active = True
self.is_complete = False
if packet_ct is not None:
self.log_ct_list = [packet_ct]
def process_append_header(self,
packet: bytes,
packet_ct: Optional[bytes] = None
) -> None:
def process_append_header(
self, packet: bytes, packet_ct: Optional[bytes] = None
) -> None:
"""Process consecutive packet(s) of long transmission."""
self.check_long_packet()
self.assembly_pt_list.append(packet)
@ -301,10 +382,9 @@ class Packet(object):
if packet_ct is not None:
self.log_ct_list.append(packet_ct)
def process_end_header(self,
packet: bytes,
packet_ct: Optional[bytes] = None
) -> None:
def process_end_header(
self, packet: bytes, packet_ct: Optional[bytes] = None
) -> None:
"""Process last packet of long transmission."""
self.check_long_packet()
self.assembly_pt_list.append(packet)
@ -316,7 +396,11 @@ class Packet(object):
def abort_packet(self, cancel: bool = False) -> None:
"""Process cancel/noise packet."""
if self.type == FILE and self.origin == ORIGIN_CONTACT_HEADER and self.long_active:
if (
self.type == FILE
and self.origin == ORIGIN_CONTACT_HEADER
and self.long_active
):
if cancel:
message = f"{self.contact.nick} cancelled file."
else:
@ -334,29 +418,31 @@ class Packet(object):
"""Process traffic masking noise packet."""
self.abort_packet()
def add_packet(self,
packet: bytes,
packet_ct: Optional[bytes] = None
) -> None:
def add_packet(self, packet: bytes, packet_ct: Optional[bytes] = None) -> None:
"""Add a new assembly packet to the object."""
try:
func_d = {self.sh: self.process_short_header,
self.lh: self.process_long_header,
self.ah: self.process_append_header,
self.eh: self.process_end_header,
self.ch: self.process_cancel_header,
self.nh: self.process_noise_header
} # type: Dict[bytes, Callable[[bytes, Optional[bytes]], None]]
func_d = {
self.sh: self.process_short_header,
self.lh: self.process_long_header,
self.ah: self.process_append_header,
self.eh: self.process_end_header,
self.ch: self.process_cancel_header,
self.nh: self.process_noise_header,
} # type: Dict[bytes, Callable[[bytes, Optional[bytes]], None]]
func = func_d[packet[:ASSEMBLY_PACKET_HEADER_LENGTH]]
except KeyError:
# Erroneous headers are ignored but stored as placeholder data.
self.add_masking_packet_to_log_file()
raise FunctionReturn("Error: Received packet had an invalid assembly packet header.")
raise SoftError(
"Error: Received packet had an invalid assembly packet header."
)
func(packet, packet_ct)
def assemble_message_packet(self) -> bytes:
"""Assemble message packet."""
padded = b''.join([p[ASSEMBLY_PACKET_HEADER_LENGTH:] for p in self.assembly_pt_list])
padded = b"".join(
[p[ASSEMBLY_PACKET_HEADER_LENGTH:] for p in self.assembly_pt_list]
)
payload = rm_padding_bytes(padded)
if len(self.assembly_pt_list) > 1:
@ -364,54 +450,55 @@ class Packet(object):
try:
payload = auth_and_decrypt(msg_ct, msg_key)
except nacl.exceptions.CryptoError:
raise FunctionReturn("Error: Decryption of message failed.")
raise SoftError("Error: Decryption of message failed.")
try:
return decompress(payload, MAX_MESSAGE_SIZE)
except zlib.error:
raise FunctionReturn("Error: Decompression of message failed.")
raise SoftError("Error: Decompression of message failed.")
def assemble_and_store_file(self,
ts: 'datetime',
onion_pub_key: bytes,
window_list: 'WindowList'
) -> None:
def assemble_and_store_file(
self, ts: "datetime", onion_pub_key: bytes, window_list: "WindowList"
) -> None:
"""Assemble file packet and store it."""
padded = b''.join([p[ASSEMBLY_PACKET_HEADER_LENGTH:] for p in self.assembly_pt_list])
padded = b"".join(
[p[ASSEMBLY_PACKET_HEADER_LENGTH:] for p in self.assembly_pt_list]
)
payload = rm_padding_bytes(padded)
no_fields = 3 if len(self.assembly_pt_list) > 1 else 2
no_fields = 3 if len(self.assembly_pt_list) > 1 else 2
*_, payload = separate_headers(payload, no_fields * [ENCODED_INTEGER_LENGTH])
process_assembled_file(ts, payload, onion_pub_key, self.contact.nick, self.settings, window_list)
process_assembled_file(
ts, payload, onion_pub_key, self.contact.nick, self.settings, window_list
)
def assemble_command_packet(self) -> bytes:
"""Assemble command packet."""
padded = b''.join([p[ASSEMBLY_PACKET_HEADER_LENGTH:] for p in self.assembly_pt_list])
padded = b"".join(
[p[ASSEMBLY_PACKET_HEADER_LENGTH:] for p in self.assembly_pt_list]
)
payload = rm_padding_bytes(padded)
if len(self.assembly_pt_list) > 1:
payload, cmd_hash = separate_trailer(payload, BLAKE2_DIGEST_LENGTH)
if blake2b(payload) != cmd_hash:
raise FunctionReturn("Error: Received an invalid command.")
raise SoftError("Error: Received an invalid command.")
try:
return decompress(payload, self.settings.max_decompress_size)
except zlib.error:
raise FunctionReturn("Error: Decompression of command failed.")
raise SoftError("Error: Decompression of command failed.")
class PacketList(Iterable[Packet], Sized):
"""PacketList manages all file, message, and command packets."""
def __init__(self,
settings: 'Settings',
contact_list: 'ContactList'
) -> None:
def __init__(self, settings: "Settings", contact_list: "ContactList") -> None:
"""Create a new PacketList object."""
self.settings = settings
self.settings = settings
self.contact_list = contact_list
self.packets = [] # type: List[Packet]
self.packets = [] # type: List[Packet]
def __iter__(self) -> Iterator[Packet]:
"""Iterate over packet list."""
@ -421,22 +508,21 @@ class PacketList(Iterable[Packet], Sized):
"""Return number of packets in the packet list."""
return len(self.packets)
def has_packet(self,
onion_pub_key: bytes,
origin: bytes,
p_type: str
) -> bool:
def has_packet(self, onion_pub_key: bytes, origin: bytes, p_type: str) -> bool:
"""Return True if a packet with matching selectors exists, else False."""
return any(p for p in self.packets if (p.onion_pub_key == onion_pub_key
and p.origin == origin
and p.type == p_type))
return any(
p
for p in self.packets
if (
p.onion_pub_key == onion_pub_key
and p.origin == origin
and p.type == p_type
)
)
def get_packet(self,
onion_pub_key: bytes,
origin: bytes,
p_type: str,
log_access: bool = False
) -> Packet:
def get_packet(
self, onion_pub_key: bytes, origin: bytes, p_type: str, log_access: bool = False
) -> Packet:
"""Get packet based on Onion Service public key, origin, and type.
If the packet does not exist, create it.
@ -447,8 +533,16 @@ class PacketList(Iterable[Packet], Sized):
else:
contact = self.contact_list.get_contact_by_pub_key(onion_pub_key)
self.packets.append(Packet(onion_pub_key, origin, p_type, contact, self.settings))
self.packets.append(
Packet(onion_pub_key, origin, p_type, contact, self.settings)
)
return next(p for p in self.packets if (p.onion_pub_key == onion_pub_key
and p.origin == origin
and p.type == p_type))
return next(
p
for p in self.packets
if (
p.onion_pub_key == onion_pub_key
and p.origin == origin
and p.type == p_type
)
)

View File

@ -24,50 +24,65 @@ import time
import typing
from datetime import datetime
from typing import Any, Dict
from typing import Any, Dict
from src.common.encoding import bytes_to_int
from src.common.exceptions import FunctionReturn
from src.common.misc import ignored, separate_headers
from src.common.output import m_print
from src.common.statics import (COMMAND_DATAGRAM_HEADER, DATAGRAM_HEADER_LENGTH, DATAGRAM_TIMESTAMP_LENGTH,
FILE_DATAGRAM_HEADER, GATEWAY_QUEUE, LOCAL_KEY_DATAGRAM_HEADER,
MESSAGE_DATAGRAM_HEADER)
from src.common.encoding import bytes_to_int
from src.common.exceptions import SoftError
from src.common.misc import ignored, separate_headers
from src.common.output import m_print
from src.common.statics import (
COMMAND_DATAGRAM_HEADER,
DATAGRAM_HEADER_LENGTH,
DATAGRAM_TIMESTAMP_LENGTH,
FILE_DATAGRAM_HEADER,
GATEWAY_QUEUE,
LOCAL_KEY_DATAGRAM_HEADER,
MESSAGE_DATAGRAM_HEADER,
)
if typing.TYPE_CHECKING:
from multiprocessing import Queue
from multiprocessing import Queue
from src.common.gateway import Gateway
def receiver_loop(queues: Dict[bytes, 'Queue[Any]'],
gateway: 'Gateway',
unit_test: bool = False
) -> None:
def receiver_loop(
queues: Dict[bytes, "Queue[Any]"], gateway: "Gateway", unit_test: bool = False
) -> None:
"""Decode received packets and forward them to packet queues."""
gateway_queue = queues[GATEWAY_QUEUE]
while True:
with ignored(EOFError, KeyboardInterrupt):
if gateway_queue.qsize() == 0:
if not gateway_queue.qsize():
time.sleep(0.01)
_, packet = gateway_queue.get()
try:
packet = gateway.detect_errors(packet)
except FunctionReturn:
except SoftError:
continue
header, ts_bytes, payload = separate_headers(packet, [DATAGRAM_HEADER_LENGTH, DATAGRAM_TIMESTAMP_LENGTH])
header, ts_bytes, payload = separate_headers(
packet, [DATAGRAM_HEADER_LENGTH, DATAGRAM_TIMESTAMP_LENGTH]
)
try:
ts = datetime.strptime(str(bytes_to_int(ts_bytes)), "%Y%m%d%H%M%S%f")
except (ValueError, struct.error):
m_print("Error: Failed to decode timestamp in the received packet.", head=1, tail=1)
m_print(
"Error: Failed to decode timestamp in the received packet.",
head=1,
tail=1,
)
continue
if header in [MESSAGE_DATAGRAM_HEADER, FILE_DATAGRAM_HEADER,
COMMAND_DATAGRAM_HEADER, LOCAL_KEY_DATAGRAM_HEADER]:
if header in [
MESSAGE_DATAGRAM_HEADER,
FILE_DATAGRAM_HEADER,
COMMAND_DATAGRAM_HEADER,
LOCAL_KEY_DATAGRAM_HEADER,
]:
queues[header].put((ts, payload))
if unit_test:

View File

@ -25,22 +25,41 @@ import textwrap
import typing
from datetime import datetime
from typing import Any, Dict, Iterable, Iterator, List, Optional, Tuple
from typing import Any, Dict, Iterable, Iterator, List, Optional, Tuple
from src.common.encoding import b58encode, pub_key_to_onion_address, pub_key_to_short_address
from src.common.exceptions import FunctionReturn
from src.common.misc import get_terminal_width
from src.common.output import clear_screen, m_print, print_on_previous_line
from src.common.statics import (BOLD_ON, EVENT, FILE, FILE_TRANSFER_INDENT, GROUP_ID_LENGTH, GROUP_MSG_ID_LENGTH, ME,
NORMAL_TEXT, ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_CONTACT_HEADER,
ORIGIN_USER_HEADER, WIN_TYPE_COMMAND, WIN_TYPE_CONTACT, WIN_TYPE_FILE,
WIN_TYPE_GROUP, WIN_UID_FILE, WIN_UID_LOCAL)
from src.common.encoding import (
b58encode,
pub_key_to_onion_address,
pub_key_to_short_address,
)
from src.common.exceptions import SoftError
from src.common.misc import get_terminal_width
from src.common.output import clear_screen, m_print, print_on_previous_line
from src.common.statics import (
BOLD_ON,
EVENT,
FILE,
FILE_TRANSFER_INDENT,
GROUP_ID_LENGTH,
GROUP_MSG_ID_LENGTH,
ME,
NORMAL_TEXT,
ONION_SERVICE_PUBLIC_KEY_LENGTH,
ORIGIN_CONTACT_HEADER,
ORIGIN_USER_HEADER,
WIN_TYPE_COMMAND,
WIN_TYPE_CONTACT,
WIN_TYPE_FILE,
WIN_TYPE_GROUP,
WIN_UID_FILE,
WIN_UID_COMMAND,
)
if typing.TYPE_CHECKING:
from src.common.db_contacts import Contact, ContactList
from src.common.db_groups import GroupList
from src.common.db_groups import GroupList
from src.common.db_settings import Settings
from src.receiver.packet import Packet, PacketList
from src.receiver.packet import Packet, PacketList
MsgTuple = Tuple[datetime, str, bytes, bytes, bool, bool]
@ -52,50 +71,51 @@ class RxWindow(Iterable[MsgTuple]):
their own windows, accessible with separate commands.
"""
def __init__(self,
uid: bytes,
contact_list: 'ContactList',
group_list: 'GroupList',
settings: 'Settings',
packet_list: 'PacketList'
) -> None:
def __init__(
self,
uid: bytes,
contact_list: "ContactList",
group_list: "GroupList",
settings: "Settings",
packet_list: "PacketList",
) -> None:
"""Create a new RxWindow object."""
self.uid = uid
self.uid = uid
self.contact_list = contact_list
self.group_list = group_list
self.settings = settings
self.packet_list = packet_list
self.group_list = group_list
self.settings = settings
self.packet_list = packet_list
self.is_active = False
self.contact = None
self.group = None
self.is_active = False
self.contact = None
self.group = None
self.group_msg_id = os.urandom(GROUP_MSG_ID_LENGTH)
self.window_contacts = [] # type: List[Contact]
self.message_log = [] # type: List[MsgTuple]
self.handle_dict = dict() # type: Dict[bytes, str]
self.window_contacts = [] # type: List[Contact]
self.message_log = [] # type: List[MsgTuple]
self.handle_dict = dict() # type: Dict[bytes, str]
self.previous_msg_ts = datetime.now()
self.unread_messages = 0
if self.uid == WIN_UID_LOCAL:
self.type = WIN_TYPE_COMMAND # type: str
self.name = self.type # type: str
if self.uid == WIN_UID_COMMAND:
self.type = WIN_TYPE_COMMAND # type: str
self.name = self.type # type: str
self.window_contacts = []
elif self.uid == WIN_UID_FILE:
self.type = WIN_TYPE_FILE
self.type = WIN_TYPE_FILE
self.packet_list = packet_list
elif self.uid in self.contact_list.get_list_of_pub_keys():
self.type = WIN_TYPE_CONTACT
self.contact = self.contact_list.get_contact_by_pub_key(uid)
self.name = self.contact.nick
self.type = WIN_TYPE_CONTACT
self.contact = self.contact_list.get_contact_by_pub_key(uid)
self.name = self.contact.nick
self.window_contacts = [self.contact]
elif self.uid in self.group_list.get_list_of_group_ids():
self.type = WIN_TYPE_GROUP
self.group = self.group_list.get_group_by_id(self.uid)
self.name = self.group.name
self.type = WIN_TYPE_GROUP
self.group = self.group_list.get_group_by_id(self.uid)
self.name = self.group.name
self.window_contacts = self.group.members
else:
@ -106,7 +126,7 @@ class RxWindow(Iterable[MsgTuple]):
else:
hr_uid = "<unable to encode>"
raise FunctionReturn(f"Invalid window '{hr_uid}'.")
raise SoftError(f"Invalid window '{hr_uid}'.")
def __iter__(self) -> Iterator[MsgTuple]:
"""Iterate over window's message log."""
@ -118,14 +138,19 @@ class RxWindow(Iterable[MsgTuple]):
def add_contacts(self, pub_keys: List[bytes]) -> None:
"""Add contact objects to the window."""
self.window_contacts += [self.contact_list.get_contact_by_pub_key(k) for k in pub_keys
if not self.has_contact(k) and self.contact_list.has_pub_key(k)]
self.window_contacts += [
self.contact_list.get_contact_by_pub_key(k)
for k in pub_keys
if not self.has_contact(k) and self.contact_list.has_pub_key(k)
]
def remove_contacts(self, pub_keys: List[bytes]) -> None:
"""Remove contact objects from the window."""
to_remove = set(pub_keys) & set([m.onion_pub_key for m in self.window_contacts])
if to_remove:
self.window_contacts = [c for c in self.window_contacts if c.onion_pub_key not in to_remove]
self.window_contacts = [
c for c in self.window_contacts if c.onion_pub_key not in to_remove
]
def reset_window(self) -> None:
"""Reset the ephemeral message log of the window."""
@ -158,33 +183,40 @@ class RxWindow(Iterable[MsgTuple]):
for k in pub_keys:
self.update_handle_dict(k)
def get_handle(self,
time_stamp: 'datetime', # Timestamp of message to be printed
onion_pub_key: bytes, # Onion Service public key of contact (used as lookup for handles)
origin: bytes, # Determines whether to use "Me" or nick of contact as handle
whisper: bool = False, # When True, displays (whisper) specifier next to handle
event_msg: bool = False # When True, sets handle to "-!-"
) -> str: # Handle to use
def get_handle(
self,
time_stamp: "datetime", # Timestamp of message to be printed
onion_pub_key: bytes, # Onion Service public key of contact (used as lookup for handles)
origin: bytes, # Determines whether to use "Me" or nick of contact as handle
whisper: bool = False, # When True, displays (whisper) specifier next to handle
event_msg: bool = False, # When True, sets handle to "-!-"
) -> str: # Handle to use
"""Returns indented handle complete with headers and trailers."""
time_stamp_str = time_stamp.strftime('%H:%M:%S.%f')[:-4]
time_stamp_str = time_stamp.strftime("%H:%M:%S.%f")[:-4]
if onion_pub_key == WIN_UID_LOCAL or event_msg:
if onion_pub_key == WIN_UID_COMMAND or event_msg:
handle = EVENT
ending = ' '
ending = " "
else:
handle = self.handle_dict[onion_pub_key] if origin == ORIGIN_CONTACT_HEADER else ME
handle = (
self.handle_dict[onion_pub_key]
if origin == ORIGIN_CONTACT_HEADER
else ME
)
handles = list(self.handle_dict.values()) + [ME]
indent = max(len(v) for v in handles) - len(handle) if self.is_active else 0
handle = indent * ' ' + handle
indent = max(len(v) for v in handles) - len(handle) if self.is_active else 0
handle = indent * " " + handle
# Handle specifiers for messages to inactive window
if not self.is_active:
handle += {WIN_TYPE_GROUP: f" (group {self.name})",
WIN_TYPE_CONTACT: f" (private message)"}.get(self.type, '')
handle += {
WIN_TYPE_GROUP: f" (group {self.name})",
WIN_TYPE_CONTACT: f" (private message)",
}.get(self.type, "")
if whisper:
handle += " (whisper)"
ending = ': '
ending = ": "
handle = f"{time_stamp_str} {handle}{ending}"
@ -200,53 +232,83 @@ class RxWindow(Iterable[MsgTuple]):
handle = self.get_handle(ts, onion_pub_key, origin, whisper, event_msg)
# Check if message content needs to be changed to privacy-preserving notification
if not self.is_active and not self.settings.new_message_notify_preview and self.uid != WIN_UID_LOCAL:
trailer = 's' if self.unread_messages > 0 else ''
message = BOLD_ON + f"{self.unread_messages + 1} unread message{trailer}" + NORMAL_TEXT
if (
not self.is_active
and not self.settings.new_message_notify_preview
and self.uid != WIN_UID_COMMAND
):
trailer = "s" if self.unread_messages > 0 else ""
message = (
BOLD_ON
+ f"{self.unread_messages + 1} unread message{trailer}"
+ NORMAL_TEXT
)
# Wrap message
wrapper = textwrap.TextWrapper(width=get_terminal_width(),
initial_indent=handle,
subsequent_indent=len(handle)*' ')
wrapper = textwrap.TextWrapper(
width=get_terminal_width(),
initial_indent=handle,
subsequent_indent=len(handle) * " ",
)
wrapped = wrapper.fill(message)
if wrapped == '':
if wrapped == "":
wrapped = handle
# Add bolding unless export file is provided
bold_on, bold_off, f_name = (BOLD_ON, NORMAL_TEXT, sys.stdout) if file is None else ('', '', file)
wrapped = bold_on + wrapped[:len(handle)] + bold_off + wrapped[len(handle):]
bold_on, bold_off, f_name = (
(BOLD_ON, NORMAL_TEXT, sys.stdout) if file is None else ("", "", file)
)
wrapped = bold_on + wrapped[: len(handle)] + bold_off + wrapped[len(handle) :]
if self.is_active:
if self.previous_msg_ts.date() != ts.date():
print(bold_on + f"00:00 -!- Day changed to {str(ts.date())}" + bold_off, file=f_name)
print(
bold_on + f"00:00 -!- Day changed to {str(ts.date())}" + bold_off,
file=f_name,
)
print(wrapped, file=f_name)
else:
if onion_pub_key != WIN_UID_LOCAL:
if onion_pub_key != WIN_UID_COMMAND:
self.unread_messages += 1
if (self.type == WIN_TYPE_CONTACT and self.contact is not None and self.contact.notifications) \
or (self.type == WIN_TYPE_GROUP and self.group is not None and self.group.notifications) \
or (self.type == WIN_TYPE_COMMAND):
if (
(
self.type == WIN_TYPE_CONTACT
and self.contact is not None
and self.contact.notifications
)
or (
self.type == WIN_TYPE_GROUP
and self.group is not None
and self.group.notifications
)
or (self.type == WIN_TYPE_COMMAND)
):
lines = wrapped.split('\n')
lines = wrapped.split("\n")
if len(lines) > 1:
print(lines[0][:-1] + '') # Preview only first line of the long message
print(
lines[0][:-1] + ""
) # Preview only first line of the long message
else:
print(wrapped)
print_on_previous_line(delay=self.settings.new_message_notify_duration, flush=True)
print_on_previous_line(
delay=self.settings.new_message_notify_duration, flush=True
)
self.previous_msg_ts = ts
def add_new(self,
timestamp: 'datetime', # The timestamp of the received message
message: str, # The content of the message
onion_pub_key: bytes = WIN_UID_LOCAL, # The Onion Service public key of associated contact
origin: bytes = ORIGIN_USER_HEADER, # The direction of the message
output: bool = False, # When True, displays message while adding it to message_log
whisper: bool = False, # When True, displays message as whisper message
event_msg: bool = False # When True, uses "-!-" as message handle
) -> None:
def add_new(
self,
timestamp: "datetime", # The timestamp of the received message
message: str, # The content of the message
onion_pub_key: bytes = WIN_UID_COMMAND, # The Onion Service public key of associated contact
origin: bytes = ORIGIN_USER_HEADER, # The direction of the message
output: bool = False, # When True, displays message while adding it to message_log
whisper: bool = False, # When True, displays message as whisper message
event_msg: bool = False, # When True, uses "-!-" as message handle
) -> None:
"""Add message tuple to message log and optionally print it."""
self.update_handle_dict(onion_pub_key)
@ -257,7 +319,7 @@ class RxWindow(Iterable[MsgTuple]):
def redraw(self, file: Any = None) -> None:
"""Print all messages received to the window."""
old_messages = len(self.message_log) - self.unread_messages
old_messages = len(self.message_log) - self.unread_messages
self.unread_messages = 0
if file is None:
@ -268,74 +330,105 @@ class RxWindow(Iterable[MsgTuple]):
self.create_handle_dict(self.message_log)
for i, msg_tuple in enumerate(self.message_log):
if i == old_messages:
print('\n' + ' Unread Messages '.center(get_terminal_width(), '-') + '\n')
print(
"\n"
+ " Unread Messages ".center(get_terminal_width(), "-")
+ "\n"
)
self.print(msg_tuple, file)
else:
m_print(f"This window for {self.name} is currently empty.", bold=True, head=1, tail=1)
m_print(
f"This window for {self.name} is currently empty.",
bold=True,
head=1,
tail=1,
)
def redraw_file_win(self) -> None:
"""Draw file transmission window progress bars."""
# Initialize columns
c1 = ['File name']
c2 = ['Size']
c3 = ['Sender']
c4 = ['Complete']
c1 = ["File name"]
c2 = ["Size"]
c3 = ["Sender"]
c4 = ["Complete"]
# Populate columns with file transmission status data
for p in self.packet_list: # type: Packet
if p.type == FILE and len(p.assembly_pt_list) > 0:
if ( p.name is not None and p.assembly_pt_list is not None
and p.size is not None and p.packets is not None):
if (
p.name is not None
and p.assembly_pt_list is not None
and p.size is not None
and p.packets is not None
):
c1.append(p.name)
c2.append(p.size)
c3.append(p.contact.nick)
c4.append(f"{len(p.assembly_pt_list) / p.packets * 100:.2f}%")
if not len(c1) > 1:
m_print("No file transmissions currently in progress.", bold=True, head=1, tail=1)
if len(c1) <= 1:
m_print(
"No file transmissions currently in progress.",
bold=True,
head=1,
tail=1,
)
print_on_previous_line(reps=3, delay=0.1)
return None
# Calculate column widths
c1w, c2w, c3w, c4w = [max(len(v) for v in column) + FILE_TRANSFER_INDENT for column in [c1, c2, c3, c4]]
c1w, c2w, c3w, c4w = [
max(len(v) for v in column) + FILE_TRANSFER_INDENT
for column in [c1, c2, c3, c4]
]
# Align columns by adding whitespace between fields of each line
lines = [f'{f1:{c1w}}{f2:{c2w}}{f3:{c3w}}{f4:{c4w}}' for f1, f2, f3, f4 in zip(c1, c2, c3, c4)]
lines = [
f"{f1:{c1w}}{f2:{c2w}}{f3:{c3w}}{f4:{c4w}}"
for f1, f2, f3, f4 in zip(c1, c2, c3, c4)
]
# Add a terminal-wide line between the column names and the data
lines.insert(1, get_terminal_width() * '')
lines.insert(1, get_terminal_width() * "")
# Print the file transfer list
print('\n' + '\n'.join(lines) + '\n')
print_on_previous_line(reps=len(lines)+2, delay=0.1)
print("\n" + "\n".join(lines) + "\n")
print_on_previous_line(reps=len(lines) + 2, delay=0.1)
class WindowList(Iterable[RxWindow]):
"""WindowList manages a list of Window objects."""
def __init__(self,
settings: 'Settings',
contact_list: 'ContactList',
group_list: 'GroupList',
packet_list: 'PacketList'
) -> None:
def __init__(
self,
settings: "Settings",
contact_list: "ContactList",
group_list: "GroupList",
packet_list: "PacketList",
) -> None:
"""Create a new WindowList object."""
self.settings = settings
self.settings = settings
self.contact_list = contact_list
self.group_list = group_list
self.packet_list = packet_list
self.group_list = group_list
self.packet_list = packet_list
self.active_win = None # type: Optional[RxWindow]
self.windows = [RxWindow(uid, self.contact_list, self.group_list, self.settings, self.packet_list)
for uid in ([WIN_UID_LOCAL, WIN_UID_FILE]
+ self.contact_list.get_list_of_pub_keys()
+ self.group_list.get_list_of_group_ids())]
self.windows = [
RxWindow(
uid, self.contact_list, self.group_list, self.settings, self.packet_list
)
for uid in (
[WIN_UID_COMMAND, WIN_UID_FILE]
+ self.contact_list.get_list_of_pub_keys()
+ self.group_list.get_list_of_group_ids()
)
]
if self.contact_list.has_local_contact():
self.set_active_rx_window(WIN_UID_LOCAL)
self.set_active_rx_window(WIN_UID_COMMAND)
def __iter__(self) -> Iterator[RxWindow]:
"""Iterate over window list."""
@ -360,25 +453,38 @@ class WindowList(Iterable[RxWindow]):
"""Return list of group windows."""
return [w for w in self.windows if w.type == WIN_TYPE_GROUP]
def get_window(self, uid: bytes) -> 'RxWindow':
def get_window(self, uid: bytes) -> "RxWindow":
"""Return window that matches the specified UID.
Create window if it does not exist.
"""
if not self.has_window(uid):
self.windows.append(RxWindow(uid, self.contact_list, self.group_list, self.settings, self.packet_list))
self.windows.append(
RxWindow(
uid,
self.contact_list,
self.group_list,
self.settings,
self.packet_list,
)
)
return next(w for w in self.windows if w.uid == uid)
def get_local_window(self) -> 'RxWindow':
def refresh_file_window_check(self) -> None:
"""Check if file window needs to be refreshed."""
if self.active_win is not None and self.active_win.uid == WIN_UID_FILE:
self.active_win.redraw_file_win()
def get_command_window(self) -> "RxWindow":
"""Return command window."""
return self.get_window(WIN_UID_LOCAL)
return self.get_window(WIN_UID_COMMAND)
def set_active_rx_window(self, uid: bytes) -> None:
"""Select new active window."""
if self.active_win is not None:
self.active_win.is_active = False
self.active_win = self.get_window(uid)
self.active_win = self.get_window(uid)
self.active_win.is_active = True
if self.active_win.uid == WIN_UID_FILE:

View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3.7
# -*- coding: utf-8 -*-
"""
TFC - Onion-routed, endpoint secure messaging system
Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
TFC is free software: you can redistribute it and/or modify it under the terms
of the GNU General Public License as published by the Free Software Foundation,
either version 3 of the License, or (at your option) any later version.
TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with TFC. If not, see <https://www.gnu.org/licenses/>.
"""

View File

@ -24,46 +24,81 @@ import hashlib
import time
import typing
from datetime import datetime
from datetime import datetime
from multiprocessing import Process, Queue
from typing import Any, Dict, List
from typing import Any, Dict, List, Tuple
import requests
from cryptography.hazmat.primitives.asymmetric.x448 import X448PublicKey, X448PrivateKey
from src.common.encoding import b58encode, int_to_bytes, onion_address_to_pub_key, pub_key_to_onion_address
from src.common.encoding import (
b58encode,
int_to_bytes,
onion_address_to_pub_key,
pub_key_to_onion_address,
)
from src.common.encoding import pub_key_to_short_address
from src.common.misc import ignored, separate_header, split_byte_string, validate_onion_addr
from src.common.output import m_print, print_key, rp_print
from src.common.statics import (CLIENT_OFFLINE_THRESHOLD, CONTACT_MGMT_QUEUE, CONTACT_REQ_QUEUE, C_REQ_MGMT_QUEUE,
C_REQ_STATE_QUEUE, DATAGRAM_HEADER_LENGTH, DST_MESSAGE_QUEUE, FILE_DATAGRAM_HEADER,
GROUP_ID_LENGTH, GROUP_MGMT_QUEUE, GROUP_MSG_EXIT_GROUP_HEADER,
GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER, GROUP_MSG_MEMBER_ADD_HEADER,
GROUP_MSG_MEMBER_REM_HEADER, GROUP_MSG_QUEUE, MESSAGE_DATAGRAM_HEADER,
ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_CONTACT_HEADER, PUBLIC_KEY_DATAGRAM_HEADER,
RELAY_CLIENT_MAX_DELAY, RELAY_CLIENT_MIN_DELAY, RP_ADD_CONTACT_HEADER,
RP_REMOVE_CONTACT_HEADER, TFC_PUBLIC_KEY_LENGTH, TOR_DATA_QUEUE, UNIT_TEST_QUEUE,
URL_TOKEN_LENGTH, URL_TOKEN_QUEUE)
from src.common.exceptions import SoftError
from src.common.misc import (
ignored,
separate_header,
split_byte_string,
validate_onion_addr,
)
from src.common.output import m_print, print_key, rp_print
from src.common.statics import (
CLIENT_OFFLINE_THRESHOLD,
CONTACT_MGMT_QUEUE,
CONTACT_REQ_QUEUE,
C_REQ_MGMT_QUEUE,
C_REQ_STATE_QUEUE,
DATAGRAM_HEADER_LENGTH,
DST_MESSAGE_QUEUE,
FILE_DATAGRAM_HEADER,
GROUP_ID_LENGTH,
GROUP_MGMT_QUEUE,
GROUP_MSG_EXIT_GROUP_HEADER,
GROUP_MSG_INVITE_HEADER,
GROUP_MSG_JOIN_HEADER,
GROUP_MSG_MEMBER_ADD_HEADER,
GROUP_MSG_MEMBER_REM_HEADER,
GROUP_MSG_QUEUE,
MESSAGE_DATAGRAM_HEADER,
ONION_SERVICE_PUBLIC_KEY_LENGTH,
ORIGIN_CONTACT_HEADER,
PUBLIC_KEY_DATAGRAM_HEADER,
RELAY_CLIENT_MAX_DELAY,
RELAY_CLIENT_MIN_DELAY,
RP_ADD_CONTACT_HEADER,
RP_REMOVE_CONTACT_HEADER,
TFC_PUBLIC_KEY_LENGTH,
TOR_DATA_QUEUE,
UNIT_TEST_QUEUE,
URL_TOKEN_LENGTH,
URL_TOKEN_QUEUE,
)
if typing.TYPE_CHECKING:
from src.common.gateway import Gateway
from requests.sessions import Session
from requests.sessions import Session
QueueDict = Dict[bytes, Queue[Any]]
def client_scheduler(queues: 'QueueDict',
gateway: 'Gateway',
url_token_private_key: X448PrivateKey,
unit_test: bool = False
) -> None:
def client_scheduler(
queues: "QueueDict",
gateway: "Gateway",
ut_private_key: X448PrivateKey,
unit_test: bool = False,
) -> None:
"""Manage `client` processes."""
proc_dict = dict() # type: Dict[bytes, Process]
# Wait for Tor port from `onion_service` process.
while True:
with ignored(EOFError, KeyboardInterrupt):
while queues[TOR_DATA_QUEUE].qsize() == 0:
while not queues[TOR_DATA_QUEUE].qsize():
time.sleep(0.1)
tor_port, onion_addr_user = queues[TOR_DATA_QUEUE].get()
break
@ -71,145 +106,244 @@ def client_scheduler(queues: 'QueueDict',
while True:
with ignored(EOFError, KeyboardInterrupt):
while queues[CONTACT_MGMT_QUEUE].qsize() == 0:
while not queues[CONTACT_MGMT_QUEUE].qsize():
time.sleep(0.1)
command, ser_public_keys, is_existing_contact = queues[CONTACT_MGMT_QUEUE].get()
command, ser_public_keys, is_existing_contact = queues[
CONTACT_MGMT_QUEUE
].get() # type: str, bytes, bool
onion_pub_keys = split_byte_string(ser_public_keys, ONION_SERVICE_PUBLIC_KEY_LENGTH)
onion_pub_keys = split_byte_string(
ser_public_keys, ONION_SERVICE_PUBLIC_KEY_LENGTH
)
if command == RP_ADD_CONTACT_HEADER:
for onion_pub_key in onion_pub_keys:
if onion_pub_key not in proc_dict:
onion_addr_user = '' if is_existing_contact else onion_addr_user
proc_dict[onion_pub_key] = Process(target=client, args=(onion_pub_key, queues,
url_token_private_key, tor_port,
gateway, onion_addr_user))
proc_dict[onion_pub_key].start()
add_new_client_process(
gateway,
is_existing_contact,
onion_addr_user,
onion_pub_keys,
proc_dict,
queues,
tor_port,
ut_private_key,
)
elif command == RP_REMOVE_CONTACT_HEADER:
for onion_pub_key in onion_pub_keys:
if onion_pub_key in proc_dict:
process = proc_dict[onion_pub_key] # type: Process
process.terminate()
proc_dict.pop(onion_pub_key)
rp_print(f"Removed {pub_key_to_short_address(onion_pub_key)}", bold=True)
remove_client_process(onion_pub_keys, proc_dict)
if unit_test and queues[UNIT_TEST_QUEUE].qsize() != 0:
break
def client(onion_pub_key: bytes,
queues: 'QueueDict',
url_token_private_key: X448PrivateKey,
tor_port: str,
gateway: 'Gateway',
onion_addr_user: str,
unit_test: bool = False
) -> None:
"""Load packets from contact's Onion Service."""
url_token = ''
cached_pk = ''
short_addr = pub_key_to_short_address(onion_pub_key)
onion_addr = pub_key_to_onion_address(onion_pub_key)
check_delay = RELAY_CLIENT_MIN_DELAY
is_online = False
def add_new_client_process(
gateway: "Gateway",
is_existing_contact: bool,
onion_addr_user: str,
onion_pub_keys: List[bytes],
proc_dict: Dict[bytes, Process],
queues: "QueueDict",
tor_port: int,
url_token_private_key: X448PrivateKey,
) -> None:
"""Add new client process."""
for onion_pub_key in onion_pub_keys:
if onion_pub_key not in proc_dict:
onion_addr_user = "" if is_existing_contact else onion_addr_user
proc_dict[onion_pub_key] = Process(
target=client,
args=(
onion_pub_key,
queues,
url_token_private_key,
tor_port,
gateway,
onion_addr_user,
),
)
proc_dict[onion_pub_key].start()
session = requests.session()
session.proxies = {'http': f'socks5h://127.0.0.1:{tor_port}',
'https': f'socks5h://127.0.0.1:{tor_port}'}
def remove_client_process(
onion_pub_keys: List[bytes], proc_dict: Dict[bytes, Process]
) -> None:
"""Remove client process."""
for onion_pub_key in onion_pub_keys:
if onion_pub_key in proc_dict:
process = proc_dict[onion_pub_key] # type: Process
process.terminate()
proc_dict.pop(onion_pub_key)
rp_print(f"Removed {pub_key_to_short_address(onion_pub_key)}", bold=True)
def client(
onion_pub_key: bytes,
queues: "QueueDict",
url_token_private_key: X448PrivateKey,
tor_port: str,
gateway: "Gateway",
onion_addr_user: str,
unit_test: bool = False,
) -> None:
"""Load packets from contact's Onion Service."""
cached_pk = ""
short_addr = pub_key_to_short_address(onion_pub_key)
onion_addr = pub_key_to_onion_address(onion_pub_key)
check_delay = RELAY_CLIENT_MIN_DELAY
is_online = False
session = requests.session()
session.proxies = {
"http": f"socks5h://127.0.0.1:{tor_port}",
"https": f"socks5h://127.0.0.1:{tor_port}",
}
rp_print(f"Connecting to {short_addr}...", bold=True)
# When Transmitter Program sends contact under UNENCRYPTED_ADD_EXISTING_CONTACT, this function
# receives user's own Onion address: That way it knows to request the contact to add them:
if onion_addr_user:
while True:
try:
reply = session.get(f'http://{onion_addr}.onion/contact_request/{onion_addr_user}', timeout=5).text
if reply == "OK":
break
except requests.exceptions.RequestException:
time.sleep(RELAY_CLIENT_MIN_DELAY)
send_contact_request(onion_addr, onion_addr_user, session)
while True:
with ignored(EOFError, KeyboardInterrupt):
with ignored(EOFError, KeyboardInterrupt, SoftError):
time.sleep(check_delay)
# Obtain URL token
# ----------------
url_token_public_key_hex = load_url_token(onion_addr, session)
is_online, check_delay = manage_contact_status(
url_token_public_key_hex, check_delay, is_online, short_addr
)
# Load URL token public key from contact's Onion Service root domain
try:
url_token_public_key_hex = session.get(f'http://{onion_addr}.onion/', timeout=5).text
except requests.exceptions.RequestException:
url_token_public_key_hex = ''
# Manage online status of contact based on availability of URL token's public key
if url_token_public_key_hex == '':
if check_delay < RELAY_CLIENT_MAX_DELAY:
check_delay *= 2
if check_delay > CLIENT_OFFLINE_THRESHOLD and is_online:
is_online = False
rp_print(f"{short_addr} is now offline", bold=True)
if not is_online:
continue
else:
check_delay = RELAY_CLIENT_MIN_DELAY
if not is_online:
is_online = True
rp_print(f"{short_addr} is now online", bold=True)
# When contact's URL token public key changes, update URL token
if url_token_public_key_hex != cached_pk:
try:
public_key = bytes.fromhex(url_token_public_key_hex)
url_token, cached_pk = update_url_token(
url_token_private_key,
url_token_public_key_hex,
cached_pk,
onion_pub_key,
queues,
)
if len(public_key) != TFC_PUBLIC_KEY_LENGTH or public_key == bytes(TFC_PUBLIC_KEY_LENGTH):
raise ValueError
shared_secret = url_token_private_key.exchange(X448PublicKey.from_public_bytes(public_key))
url_token = hashlib.blake2b(shared_secret, digest_size=URL_TOKEN_LENGTH).hexdigest()
except (TypeError, ValueError):
continue
cached_pk = url_token_public_key_hex # Update client's URL token public key
queues[URL_TOKEN_QUEUE].put((onion_pub_key, url_token)) # Update Flask server's URL token for contact
# Load TFC data with URL token
# ----------------------------
get_data_loop(onion_addr, url_token, short_addr, onion_pub_key, queues, session, gateway)
get_data_loop(
onion_addr,
url_token,
short_addr,
onion_pub_key,
queues,
session,
gateway,
)
if unit_test:
break
def get_data_loop(onion_addr: str,
url_token: str,
short_addr: str,
onion_pub_key: bytes,
queues: 'QueueDict',
session: 'Session',
gateway: 'Gateway') -> None:
def update_url_token(
ut_private_key: "X448PrivateKey",
ut_pubkey_hex: str,
cached_pk: str,
onion_pub_key: bytes,
queues: "QueueDict",
) -> Tuple[str, str]:
"""Update URL token for contact.
When contact's URL token public key changes, update URL token.
"""
if ut_pubkey_hex == cached_pk:
raise SoftError("URL token public key has not changed.", output=False)
try:
public_key = bytes.fromhex(ut_pubkey_hex)
if len(public_key) != TFC_PUBLIC_KEY_LENGTH or public_key == bytes(
TFC_PUBLIC_KEY_LENGTH
):
raise ValueError
shared_secret = ut_private_key.exchange(
X448PublicKey.from_public_bytes(public_key)
)
url_token = hashlib.blake2b(
shared_secret, digest_size=URL_TOKEN_LENGTH
).hexdigest()
queues[URL_TOKEN_QUEUE].put(
(onion_pub_key, url_token)
) # Update Flask server's URL token for contact
return url_token, ut_pubkey_hex
except (TypeError, ValueError):
raise SoftError("URL token derivation failed.", output=False)
def manage_contact_status(
ut_pubkey_hex: str, check_delay: float, is_online: bool, short_addr: str
) -> Tuple[bool, float]:
"""Manage online status of contact based on availability of URL token's public key."""
if ut_pubkey_hex == "":
if check_delay < RELAY_CLIENT_MAX_DELAY:
check_delay *= 2
if check_delay > CLIENT_OFFLINE_THRESHOLD and is_online:
is_online = False
rp_print(f"{short_addr} is now offline", bold=True)
else:
check_delay = RELAY_CLIENT_MIN_DELAY
if not is_online:
is_online = True
rp_print(f"{short_addr} is now online", bold=True)
return is_online, check_delay
def load_url_token(onion_addr: str, session: "Session") -> str:
"""Load URL token for contact."""
try:
ut_pubkey_hex = session.get(f"http://{onion_addr}.onion/", timeout=5).text
except requests.exceptions.RequestException:
ut_pubkey_hex = ""
return ut_pubkey_hex
def send_contact_request(
onion_addr: str, onion_addr_user: str, session: "Session"
) -> None:
"""Send contact request."""
while True:
try:
reply = session.get(
f"http://{onion_addr}.onion/contact_request/{onion_addr_user}",
timeout=5,
).text
if reply == "OK":
break
except requests.exceptions.RequestException:
time.sleep(RELAY_CLIENT_MIN_DELAY)
def get_data_loop(
onion_addr: str,
url_token: str,
short_addr: str,
onion_pub_key: bytes,
queues: "QueueDict",
session: "Session",
gateway: "Gateway",
) -> None:
"""Load TFC data from contact's Onion Service using valid URL token."""
while True:
try:
# See if a file is available
try:
file_data = session.get(f'http://{onion_addr}.onion/{url_token}/files', stream=True).content
if file_data:
ts = datetime.now()
ts_bytes = int_to_bytes(int(ts.strftime('%Y%m%d%H%M%S%f')[:-4]))
packet = FILE_DATAGRAM_HEADER + ts_bytes + onion_pub_key + ORIGIN_CONTACT_HEADER + file_data
queues[DST_MESSAGE_QUEUE].put(packet)
rp_print(f"File from contact {short_addr}", ts)
check_files(
url_token, onion_pub_key, onion_addr, short_addr, session, queues
)
except requests.exceptions.RequestException:
pass
# See if messages are available
try:
r = session.get(f'http://{onion_addr}.onion/{url_token}/messages', stream=True)
r = session.get(
f"http://{onion_addr}.onion/{url_token}/messages", stream=True
)
except requests.exceptions.RequestException:
return None
@ -219,41 +353,97 @@ def get_data_loop(onion_addr: str,
continue
try:
header, payload = separate_header(line, DATAGRAM_HEADER_LENGTH) # type: bytes, bytes
payload_bytes = base64.b85decode(payload)
header, payload = separate_header(
line, DATAGRAM_HEADER_LENGTH
) # type: bytes, bytes
payload_bytes = base64.b85decode(payload)
except (UnicodeError, ValueError):
continue
ts = datetime.now()
ts_bytes = int_to_bytes(int(ts.strftime('%Y%m%d%H%M%S%f')[:-4]))
ts = datetime.now()
ts_bytes = int_to_bytes(int(ts.strftime("%Y%m%d%H%M%S%f")[:-4]))
# Packet type specific handling
if header == PUBLIC_KEY_DATAGRAM_HEADER:
if len(payload_bytes) == TFC_PUBLIC_KEY_LENGTH:
msg = f"Received public key from {short_addr} at {ts.strftime('%b %d - %H:%M:%S.%f')[:-4]}:"
print_key(msg, payload_bytes, gateway.settings, public_key=True)
elif header == MESSAGE_DATAGRAM_HEADER:
queues[DST_MESSAGE_QUEUE].put(header + ts_bytes
+ onion_pub_key + ORIGIN_CONTACT_HEADER + payload_bytes)
rp_print(f"Message from contact {short_addr}", ts)
elif header in [GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER,
GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER,
GROUP_MSG_EXIT_GROUP_HEADER]:
queues[GROUP_MSG_QUEUE].put((header, payload_bytes, short_addr))
else:
rp_print(f"Received invalid packet from {short_addr}", ts, bold=True)
process_received_packet(
ts,
ts_bytes,
header,
payload_bytes,
onion_pub_key,
short_addr,
queues,
gateway,
)
except requests.exceptions.RequestException:
break
def g_msg_manager(queues: 'QueueDict',
unit_test: bool = False
) -> None:
def check_files(
url_token: str,
onion_pub_key: bytes,
onion_addr: str,
short_addr: str,
session: "Session",
queues: "QueueDict",
) -> None:
"""See if a file is available from contact.."""
try:
file_data = session.get(
f"http://{onion_addr}.onion/{url_token}/files", stream=True
).content
if file_data:
ts = datetime.now()
ts_bytes = int_to_bytes(int(ts.strftime("%Y%m%d%H%M%S%f")[:-4]))
packet = (
FILE_DATAGRAM_HEADER
+ ts_bytes
+ onion_pub_key
+ ORIGIN_CONTACT_HEADER
+ file_data
)
queues[DST_MESSAGE_QUEUE].put(packet)
rp_print(f"File from contact {short_addr}", ts)
except requests.exceptions.RequestException:
pass
def process_received_packet(
ts: "datetime",
ts_bytes: bytes,
header: bytes,
payload_bytes: bytes,
onion_pub_key: bytes,
short_addr: str,
queues: "QueueDict",
gateway: "Gateway",
) -> None:
"""Process received packet."""
if header == PUBLIC_KEY_DATAGRAM_HEADER:
if len(payload_bytes) == TFC_PUBLIC_KEY_LENGTH:
msg = f"Received public key from {short_addr} at {ts.strftime('%b %d - %H:%M:%S.%f')[:-4]}:"
print_key(msg, payload_bytes, gateway.settings, public_key=True)
elif header == MESSAGE_DATAGRAM_HEADER:
queues[DST_MESSAGE_QUEUE].put(
header + ts_bytes + onion_pub_key + ORIGIN_CONTACT_HEADER + payload_bytes
)
rp_print(f"Message from contact {short_addr}", ts)
elif header in [
GROUP_MSG_INVITE_HEADER,
GROUP_MSG_JOIN_HEADER,
GROUP_MSG_MEMBER_ADD_HEADER,
GROUP_MSG_MEMBER_REM_HEADER,
GROUP_MSG_EXIT_GROUP_HEADER,
]:
queues[GROUP_MSG_QUEUE].put((header, payload_bytes, short_addr))
else:
rp_print(f"Received invalid packet from {short_addr}", ts, bold=True)
def g_msg_manager(queues: "QueueDict", unit_test: bool = False) -> None:
"""Show group management messages according to contact list state.
This process keeps track of existing contacts for whom there's a
@ -262,73 +452,109 @@ def g_msg_manager(queues: 'QueueDict',
and non-existing contacts are displayed under "unknown contacts".
"""
existing_contacts = [] # type: List[bytes]
group_management_queue = queues[GROUP_MGMT_QUEUE]
while True:
with ignored(EOFError, KeyboardInterrupt):
while queues[GROUP_MSG_QUEUE].qsize() == 0:
while not queues[GROUP_MSG_QUEUE].qsize():
time.sleep(0.01)
header, payload, trunc_addr = queues[GROUP_MSG_QUEUE].get()
group_id, data = separate_header(payload, GROUP_ID_LENGTH)
group_id, data = separate_header(payload, GROUP_ID_LENGTH)
if len(group_id) != GROUP_ID_LENGTH:
continue
group_id_hr = b58encode(group_id)
# Update list of existing contacts
while queues[GROUP_MGMT_QUEUE].qsize() > 0:
command, ser_onion_pub_keys = queues[GROUP_MGMT_QUEUE].get()
onion_pub_key_list = split_byte_string(ser_onion_pub_keys, ONION_SERVICE_PUBLIC_KEY_LENGTH)
if command == RP_ADD_CONTACT_HEADER:
existing_contacts = list(set(existing_contacts) | set(onion_pub_key_list))
elif command == RP_REMOVE_CONTACT_HEADER:
existing_contacts = list(set(existing_contacts) - set(onion_pub_key_list))
existing_contacts = update_list_of_existing_contacts(
group_management_queue, existing_contacts
)
# Handle group management messages
if header in [GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER,
GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER]:
pub_keys = split_byte_string(data, ONION_SERVICE_PUBLIC_KEY_LENGTH)
pub_key_length = ONION_SERVICE_PUBLIC_KEY_LENGTH
members = [k for k in pub_keys if len(k) == pub_key_length ]
known = [f" * {pub_key_to_onion_address(m)}" for m in members if m in existing_contacts]
unknown = [f" * {pub_key_to_onion_address(m)}" for m in members if m not in existing_contacts]
line_list = []
if known:
line_list.extend(["Known contacts"] + known)
if unknown:
line_list.extend(["Unknown contacts"] + unknown)
if header in [GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER]:
action = 'invited you to' if header == GROUP_MSG_INVITE_HEADER else 'joined'
postfix = ' with' if members else ''
m_print([f"{trunc_addr} has {action} group {group_id_hr}{postfix}"] + line_list, box=True)
elif header in [GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER]:
if members:
action, p = ("added", "to") if header == GROUP_MSG_MEMBER_ADD_HEADER else ("removed", "from")
m_print([f"{trunc_addr} has {action} following members {p} group {group_id_hr}"]
+ line_list, box=True)
elif header == GROUP_MSG_EXIT_GROUP_HEADER:
m_print([f"{trunc_addr} has left group {group_id_hr}",
'', "Warning",
"Unless you remove the contact from the group, they",
"can still read messages you send to the group."], box=True)
process_group_management_message(
data, existing_contacts, group_id_hr, header, trunc_addr
)
if unit_test and queues[UNIT_TEST_QUEUE].qsize() != 0:
break
def c_req_manager(queues: 'QueueDict',
unit_test: bool = False
) -> None:
def process_group_management_message(
data: bytes,
existing_contacts: List[bytes],
group_id_hr: str,
header: bytes,
trunc_addr: str,
) -> None:
"""Process group management message."""
if header in [
GROUP_MSG_INVITE_HEADER,
GROUP_MSG_JOIN_HEADER,
GROUP_MSG_MEMBER_ADD_HEADER,
GROUP_MSG_MEMBER_REM_HEADER,
]:
pub_keys = split_byte_string(data, ONION_SERVICE_PUBLIC_KEY_LENGTH)
pub_key_length = ONION_SERVICE_PUBLIC_KEY_LENGTH
members = [k for k in pub_keys if len(k) == pub_key_length]
known = [
f" * {pub_key_to_onion_address(m)}"
for m in members
if m in existing_contacts
]
unknown = [
f" * {pub_key_to_onion_address(m)}"
for m in members
if m not in existing_contacts
]
line_list = []
if known:
line_list.extend(["Known contacts"] + known)
if unknown:
line_list.extend(["Unknown contacts"] + unknown)
if header in [GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER]:
action = "invited you to" if header == GROUP_MSG_INVITE_HEADER else "joined"
postfix = " with" if members else ""
m_print(
[f"{trunc_addr} has {action} group {group_id_hr}{postfix}"] + line_list,
box=True,
)
elif header in [GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER]:
if members:
action, p = (
("added", "to")
if header == GROUP_MSG_MEMBER_ADD_HEADER
else ("removed", "from")
)
m_print(
[
f"{trunc_addr} has {action} following members {p} group {group_id_hr}"
]
+ line_list,
box=True,
)
elif header == GROUP_MSG_EXIT_GROUP_HEADER:
m_print(
[
f"{trunc_addr} has left group {group_id_hr}",
"",
"Warning",
"Unless you remove the contact from the group, they",
"can still read messages you send to the group.",
],
box=True,
)
def c_req_manager(queues: "QueueDict", unit_test: bool = False) -> None:
"""Manage incoming contact requests."""
existing_contacts = [] # type: List[bytes]
contact_requests = [] # type: List[bytes]
contact_requests = [] # type: List[bytes]
request_queue = queues[CONTACT_REQ_QUEUE]
contact_queue = queues[C_REQ_MGMT_QUEUE]
@ -337,24 +563,19 @@ def c_req_manager(queues: 'QueueDict',
while True:
with ignored(EOFError, KeyboardInterrupt):
while request_queue.qsize() == 0:
while not request_queue.qsize():
time.sleep(0.1)
purp_onion_address = request_queue.get()
while setting_queue.qsize() != 0:
while setting_queue.qsize():
show_requests = setting_queue.get()
# Update list of existing contacts
while contact_queue.qsize() > 0:
command, ser_onion_pub_keys = contact_queue.get()
onion_pub_key_list = split_byte_string(ser_onion_pub_keys, ONION_SERVICE_PUBLIC_KEY_LENGTH)
existing_contacts = update_list_of_existing_contacts(
contact_queue, existing_contacts
)
if command == RP_ADD_CONTACT_HEADER:
existing_contacts = list(set(existing_contacts) | set(onion_pub_key_list))
elif command == RP_REMOVE_CONTACT_HEADER:
existing_contacts = list(set(existing_contacts) - set(onion_pub_key_list))
if validate_onion_addr(purp_onion_address) == '':
if validate_onion_addr(purp_onion_address) == "":
onion_pub_key = onion_address_to_pub_key(purp_onion_address)
if onion_pub_key in existing_contacts:
continue
@ -362,10 +583,33 @@ def c_req_manager(queues: 'QueueDict',
continue
if show_requests:
ts_fmt = datetime.now().strftime('%b %d - %H:%M:%S.%f')[:-4]
m_print([f"{ts_fmt} - New contact request from an unknown TFC account:", purp_onion_address],
box=True)
ts_fmt = datetime.now().strftime("%b %d - %H:%M:%S.%f")[:-4]
m_print(
[
f"{ts_fmt} - New contact request from an unknown TFC account:",
purp_onion_address,
],
box=True,
)
contact_requests.append(onion_pub_key)
if unit_test and queues[UNIT_TEST_QUEUE].qsize() != 0:
break
def update_list_of_existing_contacts(
contact_queue: "Queue[Any]", existing_contacts: List[bytes]
) -> List[bytes]:
"""Update list of existing contacts."""
while contact_queue.qsize() > 0:
command, ser_onion_pub_keys = contact_queue.get()
onion_pub_key_list = split_byte_string(
ser_onion_pub_keys, ONION_SERVICE_PUBLIC_KEY_LENGTH
)
if command == RP_ADD_CONTACT_HEADER:
existing_contacts = list(set(existing_contacts) | set(onion_pub_key_list))
elif command == RP_REMOVE_CONTACT_HEADER:
existing_contacts = list(set(existing_contacts) - set(onion_pub_key_list))
return existing_contacts

View File

@ -27,39 +27,66 @@ import typing
from typing import Any, Dict
from src.common.encoding import bytes_to_bool, bytes_to_int
from src.common.exceptions import FunctionReturn
from src.common.misc import ignored, separate_header, separate_headers, split_byte_string
from src.common.output import clear_screen, m_print
from src.common.statics import (CONFIRM_CODE_LENGTH, CONTACT_MGMT_QUEUE, C_REQ_MGMT_QUEUE, C_REQ_STATE_QUEUE,
ENCODED_BOOLEAN_LENGTH, ENCODED_INTEGER_LENGTH, EXIT, GROUP_MGMT_QUEUE,
LOCAL_TESTING_PACKET_DELAY, MAX_INT, ONION_CLOSE_QUEUE, ONION_KEY_QUEUE,
ONION_SERVICE_PRIVATE_KEY_LENGTH, ONION_SERVICE_PUBLIC_KEY_LENGTH, RESET,
RP_ADD_CONTACT_HEADER, RP_REMOVE_CONTACT_HEADER, SRC_TO_RELAY_QUEUE,
UNENCRYPTED_ADD_EXISTING_CONTACT, UNENCRYPTED_ADD_NEW_CONTACT, UNENCRYPTED_BAUDRATE,
UNENCRYPTED_COMMAND_HEADER_LENGTH, UNENCRYPTED_EC_RATIO, UNENCRYPTED_EXIT_COMMAND,
UNENCRYPTED_MANAGE_CONTACT_REQ, UNENCRYPTED_ONION_SERVICE_DATA,
UNENCRYPTED_REM_CONTACT, UNENCRYPTED_SCREEN_CLEAR, UNENCRYPTED_SCREEN_RESET,
UNENCRYPTED_WIPE_COMMAND, WIPE)
from src.common.encoding import bytes_to_bool, bytes_to_int
from src.common.exceptions import SoftError
from src.common.misc import (
ignored,
reset_terminal,
separate_header,
separate_headers,
split_byte_string,
)
from src.common.output import clear_screen, m_print
from src.common.statics import (
CONFIRM_CODE_LENGTH,
CONTACT_MGMT_QUEUE,
C_REQ_MGMT_QUEUE,
C_REQ_STATE_QUEUE,
ENCODED_BOOLEAN_LENGTH,
ENCODED_INTEGER_LENGTH,
EXIT,
GROUP_MGMT_QUEUE,
LOCAL_TESTING_PACKET_DELAY,
MAX_INT,
ONION_CLOSE_QUEUE,
ONION_KEY_QUEUE,
ONION_SERVICE_PRIVATE_KEY_LENGTH,
ONION_SERVICE_PUBLIC_KEY_LENGTH,
RP_ADD_CONTACT_HEADER,
RP_REMOVE_CONTACT_HEADER,
SRC_TO_RELAY_QUEUE,
UNENCRYPTED_ADD_EXISTING_CONTACT,
UNENCRYPTED_ADD_NEW_CONTACT,
UNENCRYPTED_BAUDRATE,
UNENCRYPTED_COMMAND_HEADER_LENGTH,
UNENCRYPTED_EC_RATIO,
UNENCRYPTED_EXIT_COMMAND,
UNENCRYPTED_MANAGE_CONTACT_REQ,
UNENCRYPTED_ONION_SERVICE_DATA,
UNENCRYPTED_REM_CONTACT,
UNENCRYPTED_SCREEN_CLEAR,
UNENCRYPTED_SCREEN_RESET,
UNENCRYPTED_WIPE_COMMAND,
WIPE,
)
if typing.TYPE_CHECKING:
from multiprocessing import Queue
from multiprocessing import Queue
from src.common.gateway import Gateway
QueueDict = Dict[bytes, Queue[Any]]
def relay_command(queues: 'QueueDict',
gateway: 'Gateway',
stdin_fd: int,
unit_test: bool = False
) -> None:
def relay_command(
queues: "QueueDict", gateway: "Gateway", stdin_fd: int, unit_test: bool = False
) -> None:
"""Process Relay Program commands."""
sys.stdin = os.fdopen(stdin_fd)
sys.stdin = os.fdopen(stdin_fd)
queue_from_src = queues[SRC_TO_RELAY_QUEUE]
while True:
with ignored(EOFError, FunctionReturn, KeyboardInterrupt):
while queue_from_src.qsize() == 0:
with ignored(EOFError, KeyboardInterrupt, SoftError):
while not queue_from_src.qsize():
time.sleep(0.01)
command = queue_from_src.get()
@ -69,57 +96,55 @@ def relay_command(queues: 'QueueDict',
break
def process_command(command: bytes,
gateway: 'Gateway',
queues: 'QueueDict'
) -> None:
def process_command(command: bytes, gateway: "Gateway", queues: "QueueDict") -> None:
"""Select function for received Relay Program command."""
header, command = separate_header(command, UNENCRYPTED_COMMAND_HEADER_LENGTH)
# Keyword Function to run ( Parameters )
# ---------------------------------------------------------------------------------
function_d = {UNENCRYPTED_SCREEN_CLEAR: (clear_windows, gateway, ),
UNENCRYPTED_SCREEN_RESET: (reset_windows, gateway, ),
UNENCRYPTED_EXIT_COMMAND: (exit_tfc, gateway, queues),
UNENCRYPTED_WIPE_COMMAND: (wipe, gateway, queues),
UNENCRYPTED_EC_RATIO: (change_ec_ratio, command, gateway, ),
UNENCRYPTED_BAUDRATE: (change_baudrate, command, gateway, ),
UNENCRYPTED_MANAGE_CONTACT_REQ: (manage_contact_req, command, queues),
UNENCRYPTED_ADD_NEW_CONTACT: (add_contact, command, False, queues),
UNENCRYPTED_ADD_EXISTING_CONTACT: (add_contact, command, True, queues),
UNENCRYPTED_REM_CONTACT: (remove_contact, command, queues),
UNENCRYPTED_ONION_SERVICE_DATA: (add_onion_data, command, queues)
} # type: Dict[bytes, Any]
function_d = {
UNENCRYPTED_SCREEN_CLEAR: (clear_windows, gateway,),
UNENCRYPTED_SCREEN_RESET: (reset_windows, gateway,),
UNENCRYPTED_EXIT_COMMAND: (exit_tfc, gateway, queues),
UNENCRYPTED_WIPE_COMMAND: (wipe, gateway, queues),
UNENCRYPTED_EC_RATIO: (change_ec_ratio, command, gateway,),
UNENCRYPTED_BAUDRATE: (change_baudrate, command, gateway,),
UNENCRYPTED_MANAGE_CONTACT_REQ: (manage_contact_req, command, queues),
UNENCRYPTED_ADD_NEW_CONTACT: (add_contact, command, False, queues),
UNENCRYPTED_ADD_EXISTING_CONTACT: (add_contact, command, True, queues),
UNENCRYPTED_REM_CONTACT: (remove_contact, command, queues),
UNENCRYPTED_ONION_SERVICE_DATA: (add_onion_data, command, queues),
} # type: Dict[bytes, Any]
if header not in function_d:
raise FunctionReturn("Error: Received an invalid command.")
raise SoftError("Error: Received an invalid command.")
from_dict = function_d[header]
func = from_dict[0]
from_dict = function_d[header]
func = from_dict[0]
parameters = from_dict[1:]
func(*parameters)
def race_condition_delay(gateway: 'Gateway') -> None:
def race_condition_delay(gateway: "Gateway") -> None:
"""Prevent race condition with Receiver command."""
if gateway.settings.local_testing_mode:
time.sleep(LOCAL_TESTING_PACKET_DELAY)
time.sleep(gateway.settings.data_diode_sockets * 1.0)
def clear_windows(gateway: 'Gateway') -> None:
def clear_windows(gateway: "Gateway") -> None:
"""Clear Relay Program screen."""
race_condition_delay(gateway)
clear_screen()
def reset_windows(gateway: 'Gateway') -> None:
def reset_windows(gateway: "Gateway") -> None:
"""Reset Relay Program screen."""
race_condition_delay(gateway)
os.system(RESET)
reset_terminal()
def exit_tfc(gateway: 'Gateway', queues: 'QueueDict') -> None:
def exit_tfc(gateway: "Gateway", queues: "QueueDict") -> None:
"""Exit TFC.
The queue is read by
@ -129,7 +154,7 @@ def exit_tfc(gateway: 'Gateway', queues: 'QueueDict') -> None:
queues[ONION_CLOSE_QUEUE].put(EXIT)
def wipe(gateway: 'Gateway', queues: 'QueueDict') -> None:
def wipe(gateway: "Gateway", queues: "QueueDict") -> None:
"""Reset terminal, wipe all user data and power off the system.
No effective RAM overwriting tool currently exists, so as long as Source and
@ -140,19 +165,21 @@ def wipe(gateway: 'Gateway', queues: 'QueueDict') -> None:
The queue is read by
relay.onion.onion_service()
"""
os.system(RESET)
reset_terminal()
race_condition_delay(gateway)
queues[ONION_CLOSE_QUEUE].put(WIPE)
def change_ec_ratio(command: bytes, gateway: 'Gateway') -> None:
def change_ec_ratio(command: bytes, gateway: "Gateway") -> None:
"""Change Relay Program's Reed-Solomon error correction ratio."""
try:
value = int(command)
if value < 0 or value > MAX_INT:
raise ValueError
except ValueError:
raise FunctionReturn("Error: Received invalid EC ratio value from Transmitter Program.")
raise SoftError(
"Error: Received invalid EC ratio value from Transmitter Program."
)
m_print("Error correction ratio will change on restart.", head=1, tail=1)
@ -160,14 +187,16 @@ def change_ec_ratio(command: bytes, gateway: 'Gateway') -> None:
gateway.settings.store_settings()
def change_baudrate(command: bytes, gateway: 'Gateway') -> None:
def change_baudrate(command: bytes, gateway: "Gateway") -> None:
"""Change Relay Program's serial interface baud rate setting."""
try:
value = int(command)
if value not in serial.Serial.BAUDRATES:
raise ValueError
except ValueError:
raise FunctionReturn("Error: Received invalid baud rate value from Transmitter Program.")
raise SoftError(
"Error: Received invalid baud rate value from Transmitter Program."
)
m_print("Baud rate will change on restart.", head=1, tail=1)
@ -175,20 +204,21 @@ def change_baudrate(command: bytes, gateway: 'Gateway') -> None:
gateway.settings.store_settings()
def manage_contact_req(command: bytes,
queues: 'QueueDict',
notify: bool = True) -> None:
def manage_contact_req(
command: bytes, queues: "QueueDict", notify: bool = True
) -> None:
"""Control whether contact requests are accepted."""
enabled = bytes_to_bool(command)
if notify:
m_print(f"Contact requests are have been {('enabled' if enabled else 'disabled')}.", head=1, tail=1)
m_print(
f"Contact requests are have been {('enabled' if enabled else 'disabled')}.",
head=1,
tail=1,
)
queues[C_REQ_STATE_QUEUE].put(enabled)
def add_contact(command: bytes,
existing: bool,
queues: 'QueueDict'
) -> None:
def add_contact(command: bytes, existing: bool, queues: "QueueDict") -> None:
"""Add clients to Relay Program.
The queues are read by
@ -201,7 +231,7 @@ def add_contact(command: bytes,
queues[C_REQ_MGMT_QUEUE].put((RP_ADD_CONTACT_HEADER, command))
def remove_contact(command: bytes, queues: 'QueueDict') -> None:
def remove_contact(command: bytes, queues: "QueueDict") -> None:
"""Remove clients from Relay Program.
The queues are read by
@ -214,7 +244,7 @@ def remove_contact(command: bytes, queues: 'QueueDict') -> None:
queues[C_REQ_MGMT_QUEUE].put((RP_REMOVE_CONTACT_HEADER, command))
def add_onion_data(command: bytes, queues: 'QueueDict') -> None:
def add_onion_data(command: bytes, queues: "QueueDict") -> None:
"""Add Onion Service data.
Separate onion service private key and public keys for
@ -223,13 +253,25 @@ def add_onion_data(command: bytes, queues: 'QueueDict') -> None:
The ONION_KEY_QUEUE is read by
relay.onion.onion_service()
"""
os_private_key, confirmation_code, allow_req_byte, no_pending_bytes, ser_pub_keys \
= separate_headers(command, [ONION_SERVICE_PRIVATE_KEY_LENGTH, CONFIRM_CODE_LENGTH,
ENCODED_BOOLEAN_LENGTH, ENCODED_INTEGER_LENGTH])
(
os_private_key,
confirmation_code,
allow_req_byte,
no_pending_bytes,
ser_pub_keys,
) = separate_headers(
command,
[
ONION_SERVICE_PRIVATE_KEY_LENGTH,
CONFIRM_CODE_LENGTH,
ENCODED_BOOLEAN_LENGTH,
ENCODED_INTEGER_LENGTH,
],
)
no_pending = bytes_to_int(no_pending_bytes)
public_key_list = split_byte_string(ser_pub_keys, ONION_SERVICE_PUBLIC_KEY_LENGTH)
pending_public_keys = public_key_list[:no_pending]
no_pending = bytes_to_int(no_pending_bytes)
public_key_list = split_byte_string(ser_pub_keys, ONION_SERVICE_PUBLIC_KEY_LENGTH)
pending_public_keys = public_key_list[:no_pending]
existing_public_keys = public_key_list[no_pending:]
for onion_pub_key in pending_public_keys:

View File

@ -29,7 +29,7 @@ import tempfile
import time
import typing
from typing import Any, Dict, Optional
from typing import Any, Dict, Optional, Union
import nacl.signing
@ -38,26 +38,38 @@ import stem.process
from stem.control import Controller
from src.common.encoding import pub_key_to_onion_address
from src.common.encoding import pub_key_to_onion_address
from src.common.exceptions import CriticalError
from src.common.output import m_print, rp_print
from src.common.statics import (EXIT, EXIT_QUEUE, ONION_CLOSE_QUEUE, ONION_KEY_QUEUE,
ONION_SERVICE_PRIVATE_KEY_LENGTH, TOR_CONTROL_PORT, TOR_DATA_QUEUE, TOR_SOCKS_PORT)
from src.common.output import m_print, rp_print
from src.common.statics import (
EXIT,
EXIT_QUEUE,
ONION_CLOSE_QUEUE,
ONION_KEY_QUEUE,
ONION_SERVICE_PRIVATE_KEY_LENGTH,
TOR_CONTROL_PORT,
TOR_DATA_QUEUE,
TOR_SOCKS_PORT,
)
if typing.TYPE_CHECKING:
from multiprocessing import Queue
QueueDict = Dict[bytes, Queue[Any]]
def get_available_port(min_port: int, max_port: int) -> int:
"""Find a random available port within the given range."""
sys_rand = random.SystemRandom()
with socket.socket() as temp_sock:
while True:
try:
temp_sock.bind(('127.0.0.1', random.randint(min_port, max_port)))
temp_sock.bind(("127.0.0.1", sys_rand.randint(min_port, max_port)))
break
except OSError:
pass
_, port = temp_sock.getsockname() # type: Any, int
_, port = temp_sock.getsockname() # type: str, int
if Tor.platform_is_tails():
return TOR_SOCKS_PORT
@ -70,12 +82,12 @@ class Tor(object):
def __init__(self) -> None:
self.tor_process = None # type: Optional[Any]
self.controller = None # type: Optional[Controller]
self.controller = None # type: Optional[Controller]
@staticmethod
def platform_is_tails() -> bool:
"""Return True if Relay Program is running on Tails."""
with open('/etc/os-release') as f:
with open("/etc/os-release") as f:
data = f.read()
return 'TAILS_PRODUCT_NAME="Tails"' in data
@ -91,29 +103,17 @@ class Tor(object):
return None
tor_data_directory = tempfile.TemporaryDirectory()
tor_control_socket = os.path.join(tor_data_directory.name, 'control_socket')
tor_control_socket = os.path.join(tor_data_directory.name, "control_socket")
if not os.path.isfile('/usr/bin/tor'):
if not os.path.isfile("/usr/bin/tor"):
raise CriticalError("Check that Tor is installed.")
while True:
try:
self.tor_process = stem.process.launch_tor_with_config(
config={'DataDirectory': tor_data_directory.name,
'SocksPort': str(port),
'ControlSocket': tor_control_socket,
'AvoidDiskWrites': '1',
'Log': 'notice stdout',
'GeoIPFile': '/usr/share/tor/geoip',
'GeoIPv6File ': '/usr/share/tor/geoip6'},
tor_cmd='/usr/bin/tor')
break
except OSError:
pass # Tor timed out. Try again.
self.launch_tor_process(port, tor_control_socket, tor_data_directory)
start_ts = time.monotonic()
self.controller = stem.control.Controller.from_socket_file(path=tor_control_socket)
self.controller = stem.control.Controller.from_socket_file(
path=tor_control_socket
)
self.controller.authenticate()
while True:
@ -125,18 +125,43 @@ class Tor(object):
raise CriticalError("Tor socket closed.")
res_parts = shlex.split(response)
summary = res_parts[4].split('=')[1]
summary = res_parts[4].split("=")[1]
if summary == 'Done':
tor_version = self.controller.get_version().version_str.split(' (')[0]
if summary == "Done":
tor_version = self.controller.get_version().version_str.split(" (")[0]
rp_print(f"Setup 70% - Tor {tor_version} is now running", bold=True)
break
if time.monotonic() - start_ts > 15:
start_ts = time.monotonic()
self.controller = stem.control.Controller.from_socket_file(path=tor_control_socket)
self.controller = stem.control.Controller.from_socket_file(
path=tor_control_socket
)
self.controller.authenticate()
def launch_tor_process(
self, port: int, tor_control_socket: Union[bytes, str], tor_data_directory: Any
) -> None:
"""Launch Tor process."""
while True:
try:
self.tor_process = stem.process.launch_tor_with_config(
config={
"DataDirectory": tor_data_directory.name,
"SocksPort": str(port),
"ControlSocket": tor_control_socket,
"AvoidDiskWrites": "1",
"Log": "notice stdout",
"GeoIPFile": "/usr/share/tor/geoip",
"GeoIPv6File ": "/usr/share/tor/geoip6",
},
tor_cmd="/usr/bin/tor",
)
break
except OSError:
pass # Tor timed out. Try again.
def stop(self) -> None:
"""Stop the Tor subprocess."""
if self.tor_process is not None:
@ -164,13 +189,18 @@ def stem_compatible_ed25519_key_from_private_key(private_key: bytes) -> str:
def encode_int(y: int) -> bytes:
"""Encode integer to 32-byte bytestring (little-endian format)."""
bits = [(y >> i) & 1 for i in range(b)]
return b''.join([bytes([(sum([bits[i * 8 + j] << j for j in range(8)]))]) for i in range(b // 8)])
return b"".join(
[
bytes([(sum([bits[i * 8 + j] << j for j in range(8)]))])
for i in range(b // 8)
]
)
def expand_private_key(sk: bytes) -> bytes:
"""Expand private key to base64 blob."""
h = hashlib.sha512(sk).digest()
a = 2 ** (b - 2) + sum(2 ** i * bit(h, i) for i in range(3, b - 2))
k = b''.join([bytes([h[i]]) for i in range(b // 8, b // 4)])
k = b"".join([bytes([h[i]]) for i in range(b // 8, b // 4)])
return encode_int(a) + k
@ -182,20 +212,20 @@ def stem_compatible_ed25519_key_from_private_key(private_key: bytes) -> str:
return base64.b64encode(expanded_private_key).decode()
def onion_service(queues: Dict[bytes, 'Queue[Any]']) -> None:
def onion_service(queues: Dict[bytes, "Queue[Any]"]) -> None:
"""Manage the Tor Onion Service and control Tor via stem."""
rp_print("Setup 0% - Waiting for Onion Service configuration...", bold=True)
while queues[ONION_KEY_QUEUE].qsize() == 0:
while not queues[ONION_KEY_QUEUE].qsize():
time.sleep(0.1)
private_key, c_code = queues[ONION_KEY_QUEUE].get() # type: bytes, bytes
public_key_user = bytes(nacl.signing.SigningKey(seed=private_key).verify_key)
onion_addr_user = pub_key_to_onion_address(public_key_user)
public_key_user = bytes(nacl.signing.SigningKey(seed=private_key).verify_key)
onion_addr_user = pub_key_to_onion_address(public_key_user)
try:
rp_print("Setup 10% - Launching Tor...", bold=True)
tor_port = get_available_port(1000, 65535)
tor = Tor()
tor = Tor()
tor.connect(tor_port)
except (EOFError, KeyboardInterrupt):
return
@ -206,15 +236,23 @@ def onion_service(queues: Dict[bytes, 'Queue[Any]']) -> None:
try:
rp_print("Setup 75% - Launching Onion Service...", bold=True)
key_data = stem_compatible_ed25519_key_from_private_key(private_key)
response = tor.controller.create_ephemeral_hidden_service(ports={80: 5000},
key_type='ED25519-V3',
key_content=key_data,
await_publication=True)
response = tor.controller.create_ephemeral_hidden_service(
ports={80: 5000},
key_type="ED25519-V3",
key_content=key_data,
await_publication=True,
)
rp_print("Setup 100% - Onion Service is now published.", bold=True)
m_print(["Your TFC account is:",
onion_addr_user, '',
f"Onion Service confirmation code (to Transmitter): {c_code.hex()}"], box=True)
m_print(
[
"Your TFC account is:",
onion_addr_user,
"",
f"Onion Service confirmation code (to Transmitter): {c_code.hex()}",
],
box=True,
)
# Allow the client to start looking for contacts at this point.
queues[TOR_DATA_QUEUE].put((tor_port, onion_addr_user))
@ -223,6 +261,11 @@ def onion_service(queues: Dict[bytes, 'Queue[Any]']) -> None:
tor.stop()
return
monitor_queues(tor, response, queues)
def monitor_queues(tor: Tor, response: Any, queues: "QueueDict") -> None:
"""Monitor queues for incoming packets."""
while True:
try:
time.sleep(0.1)
@ -230,12 +273,22 @@ def onion_service(queues: Dict[bytes, 'Queue[Any]']) -> None:
if queues[ONION_KEY_QUEUE].qsize() > 0:
_, c_code = queues[ONION_KEY_QUEUE].get()
m_print(["Onion Service is already running.", '',
f"Onion Service confirmation code (to Transmitter): {c_code.hex()}"], box=True)
m_print(
[
"Onion Service is already running.",
"",
f"Onion Service confirmation code (to Transmitter): {c_code.hex()}",
],
box=True,
)
if queues[ONION_CLOSE_QUEUE].qsize() > 0:
command = queues[ONION_CLOSE_QUEUE].get()
if not tor.platform_is_tails() and command == EXIT:
if (
not tor.platform_is_tails()
and command == EXIT
and tor.controller is not None
):
tor.controller.remove_hidden_service(response.service_id)
tor.stop()
queues[EXIT_QUEUE].put(command)
@ -245,6 +298,7 @@ def onion_service(queues: Dict[bytes, 'Queue[Any]']) -> None:
except (EOFError, KeyboardInterrupt):
pass
except stem.SocketClosed:
tor.controller.remove_hidden_service(response.service_id)
tor.stop()
if tor.controller is not None:
tor.controller.remove_hidden_service(response.service_id)
tor.stop()
break

View File

@ -21,26 +21,64 @@ along with TFC. If not, see <https://www.gnu.org/licenses/>.
import hmac
import logging
import threading
import time
import typing
from io import BytesIO
from io import BytesIO
from multiprocessing import Queue
from typing import Any, Dict, List, Optional
from typing import Any, Dict, List, Optional
from flask import Flask, send_file
from src.common.statics import CONTACT_REQ_QUEUE, F_TO_FLASK_QUEUE, M_TO_FLASK_QUEUE, URL_TOKEN_QUEUE
from src.common.misc import HideRunTime
from src.common.statics import (
CONTACT_REQ_QUEUE,
F_TO_FLASK_QUEUE,
M_TO_FLASK_QUEUE,
URL_TOKEN_QUEUE,
)
if typing.TYPE_CHECKING:
QueueDict = Dict[bytes, Queue[Any]]
PubKeyDict = Dict[str, bytes]
MessageDict = Dict[bytes, List[str]]
FileDict = Dict[bytes, List[bytes]]
def flask_server(queues: 'QueueDict',
url_token_public_key: str,
unit_test: bool = False
) -> Optional[Flask]:
def validate_url_token(
purp_url_token: str, queues: "QueueDict", pub_key_dict: "PubKeyDict"
) -> bool:
"""Validate URL token using constant time comparison."""
# This context manager hides the duration of URL_TOKEN_QUEUE check as
# well as the number of accounts in pub_key_dict when iterating over keys.
with HideRunTime(duration=0.01):
# Check if the client has derived new URL token for contact(s).
# If yes, add the url tokens to pub_key_dict to have up-to-date
# information about whether the purported URL tokens are valid.
while queues[URL_TOKEN_QUEUE].qsize() > 0:
onion_pub_key, url_token = queues[URL_TOKEN_QUEUE].get()
# To keep dictionary compact, delete old key when new
# one with matching value (onion_pub_key) is received.
for ut in list(pub_key_dict.keys()):
if pub_key_dict[ut] == onion_pub_key:
del pub_key_dict[ut]
pub_key_dict[url_token] = onion_pub_key
# Here we OR the result of constant time comparison with initial
# False. ORing is also a constant time operation that returns
# True if a matching shared secret was found in pub_key_dict.
valid_url_token = False
for url_token in pub_key_dict:
valid_url_token |= hmac.compare_digest(purp_url_token, url_token)
return valid_url_token
def flask_server(
queues: "QueueDict", url_token_public_key: str, unit_test: bool = False
) -> Optional[Flask]:
"""Run Flask web server for outgoing messages.
This process runs Flask web server from where clients of contacts
@ -60,114 +98,35 @@ def flask_server(queues: 'QueueDict',
connection is strongly authenticated by the Onion Service domain
name, that is, the TFC account pinned by the user.
"""
app = Flask(__name__)
pub_key_dict = dict() # type: Dict[str, bytes]
message_dict = dict() # type: Dict[bytes, List[str]]
file_dict = dict() # type: Dict[bytes, List[bytes]]
app = Flask(__name__)
pub_key_dict = dict() # type: PubKeyDict
message_dict = dict() # type: MessageDict
file_dict = dict() # type: FileDict
class HideRunTime(object):
"""Context manager that hides function runtime.
By joining a thread that sleeps for a longer time than it takes
for the function to run, this context manager hides the actual
running time of the function.
"""
def __init__(self, length: float = 0.0) -> None:
self.length = length
def __enter__(self) -> None:
self.timer = threading.Thread(target=time.sleep, args=(self.length,))
self.timer.start()
def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
self.timer.join()
def validate_url_token(purp_url_token: str) -> bool:
"""Validate URL token using constant time comparison."""
# This context manager hides the duration of URL_TOKEN_QUEUE check as
# well as the number of accounts in pub_key_dict when iterating over keys.
with HideRunTime(0.01):
# Check if the client has derived new URL token for contact(s).
# If yes, add the url tokens to pub_key_dict to have up-to-date
# information about whether the purported URL tokens are valid.
while queues[URL_TOKEN_QUEUE].qsize() > 0:
onion_pub_key, url_token = queues[URL_TOKEN_QUEUE].get()
# To keep dictionary compact, delete old key when new
# one with matching value (onion_pub_key) is received.
for ut in list(pub_key_dict.keys()):
if pub_key_dict[ut] == onion_pub_key:
del pub_key_dict[ut]
pub_key_dict[url_token] = onion_pub_key
# Here we OR the result of constant time comparison with initial
# False. ORing is also a constant time operation that returns
# True if a matching shared secret was found in pub_key_dict.
valid_url_token = False
for url_token in pub_key_dict:
valid_url_token |= hmac.compare_digest(purp_url_token, url_token)
return valid_url_token
@app.route('/')
@app.route("/")
def index() -> str:
"""Return the URL token public key to contacts that know the .onion address."""
return url_token_public_key
@app.route('/contact_request/<string:purp_onion_address>')
@app.route("/contact_request/<string:purp_onion_address>")
def contact_request(purp_onion_address: str) -> str:
"""Pass contact request to `c_req_manager`."""
queues[CONTACT_REQ_QUEUE].put(purp_onion_address)
return 'OK'
return "OK"
@app.route('/<purp_url_token>/files/')
@app.route("/<purp_url_token>/files/")
def file_get(purp_url_token: str) -> Any:
"""Validate the URL token and return a queued file."""
if not validate_url_token(purp_url_token):
return ''
return get_file(purp_url_token, queues, pub_key_dict, file_dict)
identified_onion_pub_key = pub_key_dict[purp_url_token]
while queues[F_TO_FLASK_QUEUE].qsize() != 0:
packet, onion_pub_key = queues[F_TO_FLASK_QUEUE].get()
file_dict.setdefault(onion_pub_key, []).append(packet)
if identified_onion_pub_key in file_dict and file_dict[identified_onion_pub_key]:
mem = BytesIO()
mem.write(file_dict[identified_onion_pub_key].pop(0))
mem.seek(0)
return send_file(mem, mimetype='application/octet-stream')
else:
return ''
@app.route('/<purp_url_token>/messages/')
def contacts_url(purp_url_token: str) -> str:
@app.route("/<purp_url_token>/messages/")
def message_get(purp_url_token: str) -> str:
"""Validate the URL token and return queued messages."""
if not validate_url_token(purp_url_token):
return ''
identified_onion_pub_key = pub_key_dict[purp_url_token]
# Load outgoing messages for all contacts,
# return the oldest message for contact
while queues[M_TO_FLASK_QUEUE].qsize() != 0:
packet, onion_pub_key = queues[M_TO_FLASK_QUEUE].get()
message_dict.setdefault(onion_pub_key, []).append(packet)
if identified_onion_pub_key in message_dict and message_dict[identified_onion_pub_key]:
packets = '\n'.join(message_dict[identified_onion_pub_key]) # All messages for contact
message_dict[identified_onion_pub_key] = []
return packets
else:
return ''
return get_message(purp_url_token, queues, pub_key_dict, message_dict)
# --------------------------------------------------------------------------
log = logging.getLogger('werkzeug')
log = logging.getLogger("werkzeug")
log.setLevel(logging.ERROR)
if unit_test:
@ -175,3 +134,57 @@ def flask_server(queues: 'QueueDict',
else: # pragma: no cover
app.run()
return None
def get_message(
purp_url_token: str,
queues: "QueueDict",
pub_key_dict: "PubKeyDict",
message_dict: "MessageDict",
) -> str:
"""Send queued messages to contact."""
if not validate_url_token(purp_url_token, queues, pub_key_dict):
return ""
identified_onion_pub_key = pub_key_dict[purp_url_token]
# Load outgoing messages for all contacts,
# return the oldest message for contact
while queues[M_TO_FLASK_QUEUE].qsize():
packet, onion_pub_key = queues[M_TO_FLASK_QUEUE].get()
message_dict.setdefault(onion_pub_key, []).append(packet)
if (
identified_onion_pub_key in message_dict
and message_dict[identified_onion_pub_key]
):
packets = "\n".join(
message_dict[identified_onion_pub_key]
) # All messages for contact
message_dict[identified_onion_pub_key] = []
return packets
return ""
def get_file(
purp_url_token: str,
queues: "QueueDict",
pub_key_dict: "PubKeyDict",
file_dict: "FileDict",
) -> Any:
"""Send queued files to contact."""
if not validate_url_token(purp_url_token, queues, pub_key_dict):
return ""
identified_onion_pub_key = pub_key_dict[purp_url_token]
while queues[F_TO_FLASK_QUEUE].qsize():
packet, onion_pub_key = queues[F_TO_FLASK_QUEUE].get()
file_dict.setdefault(onion_pub_key, []).append(packet)
if identified_onion_pub_key in file_dict and file_dict[identified_onion_pub_key]:
mem = BytesIO()
mem.write(file_dict[identified_onion_pub_key].pop(0))
mem.seek(0)
return send_file(mem, mimetype="application/octet-stream")
return ""

View File

@ -22,159 +22,268 @@ along with TFC. If not, see <https://www.gnu.org/licenses/>.
import time
import typing
from typing import Any, Dict, Tuple, Union
from typing import Any, Dict, List, Tuple, Union
from src.common.encoding import bytes_to_int, pub_key_to_short_address
from src.common.encoding import int_to_bytes, b85encode
from src.common.exceptions import FunctionReturn
from src.common.misc import ignored, separate_header, split_byte_string
from src.common.output import rp_print
from src.common.statics import (COMMAND_DATAGRAM_HEADER, DATAGRAM_HEADER_LENGTH, DST_COMMAND_QUEUE,
DST_MESSAGE_QUEUE, ENCODED_INTEGER_LENGTH, FILE_DATAGRAM_HEADER, F_TO_FLASK_QUEUE,
GATEWAY_QUEUE, GROUP_ID_LENGTH, GROUP_MSG_EXIT_GROUP_HEADER, GROUP_MSG_INVITE_HEADER,
GROUP_MSG_JOIN_HEADER, GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER,
LOCAL_KEY_DATAGRAM_HEADER, MESSAGE_DATAGRAM_HEADER, M_TO_FLASK_QUEUE,
ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_USER_HEADER, PUBLIC_KEY_DATAGRAM_HEADER,
SRC_TO_RELAY_QUEUE, UNENCRYPTED_DATAGRAM_HEADER, UNIT_TEST_QUEUE)
from src.common.encoding import bytes_to_int, pub_key_to_short_address
from src.common.encoding import int_to_bytes, b85encode
from src.common.exceptions import SoftError
from src.common.misc import ignored, separate_header, split_byte_string
from src.common.output import rp_print
from src.common.statics import (
COMMAND_DATAGRAM_HEADER,
DATAGRAM_HEADER_LENGTH,
DST_COMMAND_QUEUE,
DST_MESSAGE_QUEUE,
ENCODED_INTEGER_LENGTH,
FILE_DATAGRAM_HEADER,
F_TO_FLASK_QUEUE,
GATEWAY_QUEUE,
GROUP_ID_LENGTH,
GROUP_MSG_EXIT_GROUP_HEADER,
GROUP_MSG_INVITE_HEADER,
GROUP_MSG_JOIN_HEADER,
GROUP_MSG_MEMBER_ADD_HEADER,
GROUP_MSG_MEMBER_REM_HEADER,
LOCAL_KEY_DATAGRAM_HEADER,
MESSAGE_DATAGRAM_HEADER,
M_TO_FLASK_QUEUE,
ONION_SERVICE_PUBLIC_KEY_LENGTH,
ORIGIN_USER_HEADER,
PUBLIC_KEY_DATAGRAM_HEADER,
SRC_TO_RELAY_QUEUE,
UNENCRYPTED_DATAGRAM_HEADER,
UNIT_TEST_QUEUE,
)
if typing.TYPE_CHECKING:
from datetime import datetime
from multiprocessing import Queue
from datetime import datetime
from multiprocessing import Queue
from src.common.gateway import Gateway
QueueDict = Dict[bytes, Queue[Any]]
def queue_to_flask(packet: Union[bytes, str],
onion_pub_key: bytes,
flask_queue: 'Queue[Tuple[Union[bytes, str], bytes]]',
ts: 'datetime',
header: bytes
) -> None:
def queue_to_flask(
packet: Union[bytes, str],
onion_pub_key: bytes,
flask_queue: "Queue[Tuple[Union[bytes, str], bytes]]",
ts: "datetime",
header: bytes,
) -> None:
"""Put packet to flask queue and print message."""
p_type = {MESSAGE_DATAGRAM_HEADER: 'Message ',
PUBLIC_KEY_DATAGRAM_HEADER: 'Pub key ',
FILE_DATAGRAM_HEADER: 'File ',
GROUP_MSG_INVITE_HEADER: 'G invite ',
GROUP_MSG_JOIN_HEADER: 'G join ',
GROUP_MSG_MEMBER_ADD_HEADER: 'G add ',
GROUP_MSG_MEMBER_REM_HEADER: 'G remove ',
GROUP_MSG_EXIT_GROUP_HEADER: 'G exit '}[header]
p_type = {
MESSAGE_DATAGRAM_HEADER: "Message ",
PUBLIC_KEY_DATAGRAM_HEADER: "Pub key ",
FILE_DATAGRAM_HEADER: "File ",
GROUP_MSG_INVITE_HEADER: "G invite ",
GROUP_MSG_JOIN_HEADER: "G join ",
GROUP_MSG_MEMBER_ADD_HEADER: "G add ",
GROUP_MSG_MEMBER_REM_HEADER: "G remove ",
GROUP_MSG_EXIT_GROUP_HEADER: "G exit ",
}[header]
flask_queue.put((packet, onion_pub_key))
rp_print(f"{p_type} to contact {pub_key_to_short_address(onion_pub_key)}", ts)
def src_incoming(queues: 'QueueDict',
gateway: 'Gateway',
unit_test: bool = False
) -> None:
def src_incoming(
queues: "QueueDict", gateway: "Gateway", unit_test: bool = False
) -> None:
"""\
Redirect datagrams received from Source Computer to appropriate queues.
"""
packets_from_sc = queues[GATEWAY_QUEUE]
packets_to_dc = queues[DST_MESSAGE_QUEUE]
commands_to_dc = queues[DST_COMMAND_QUEUE]
messages_to_flask = queues[M_TO_FLASK_QUEUE]
files_to_flask = queues[F_TO_FLASK_QUEUE]
commands_to_relay = queues[SRC_TO_RELAY_QUEUE]
messages_to_flask = queues[M_TO_FLASK_QUEUE]
while True:
with ignored(EOFError, KeyboardInterrupt):
while packets_from_sc.qsize() == 0:
time.sleep(0.01)
ts, packet = packets_from_sc.get() # type: datetime, bytes
ts_bytes = int_to_bytes(int(ts.strftime('%Y%m%d%H%M%S%f')[:-4]))
try:
packet = gateway.detect_errors(packet)
except FunctionReturn:
continue
with ignored(EOFError, KeyboardInterrupt, SoftError):
ts, packet = load_packet_from_queue(queues, gateway)
header, packet = separate_header(packet, DATAGRAM_HEADER_LENGTH)
if header == UNENCRYPTED_DATAGRAM_HEADER:
commands_to_relay.put(packet)
elif header in [COMMAND_DATAGRAM_HEADER, LOCAL_KEY_DATAGRAM_HEADER]:
commands_to_dc.put(header + ts_bytes + packet)
p_type = 'Command ' if header == COMMAND_DATAGRAM_HEADER else 'Local key'
rp_print(f"{p_type} to local Receiver", ts)
process_command_datagram(ts, packet, header, queues)
elif header in [MESSAGE_DATAGRAM_HEADER, PUBLIC_KEY_DATAGRAM_HEADER]:
onion_pub_key, payload = separate_header(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH)
packet_str = header.decode() + b85encode(payload)
queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header)
if header == MESSAGE_DATAGRAM_HEADER:
packets_to_dc.put(header + ts_bytes + onion_pub_key + ORIGIN_USER_HEADER + payload)
process_message_datagram(ts, packet, header, queues)
elif header == FILE_DATAGRAM_HEADER:
no_contacts_b, payload = separate_header(packet, ENCODED_INTEGER_LENGTH)
no_contacts = bytes_to_int(no_contacts_b)
ser_accounts, file_ct = separate_header(payload, no_contacts * ONION_SERVICE_PUBLIC_KEY_LENGTH)
pub_keys = split_byte_string(ser_accounts, item_len=ONION_SERVICE_PUBLIC_KEY_LENGTH)
for onion_pub_key in pub_keys:
queue_to_flask(file_ct, onion_pub_key, files_to_flask, ts, header)
process_file_datagram(ts, packet, header, queues)
elif header in [GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER,
GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER,
GROUP_MSG_EXIT_GROUP_HEADER]:
elif header in [
GROUP_MSG_INVITE_HEADER,
GROUP_MSG_JOIN_HEADER,
GROUP_MSG_MEMBER_ADD_HEADER,
GROUP_MSG_MEMBER_REM_HEADER,
GROUP_MSG_EXIT_GROUP_HEADER,
]:
process_group_management_message(ts, packet, header, messages_to_flask)
if unit_test:
break
def process_group_management_message(ts: 'datetime',
packet: bytes,
header: bytes,
messages_to_flask: 'Queue[Tuple[Union[bytes, str], bytes]]') -> None:
def load_packet_from_queue(
queues: "QueueDict", gateway: "Gateway"
) -> Tuple["datetime", bytes]:
"""Load packet from Source Computer.
Perform error detection/correction.
"""
packets_from_source_computer = queues[GATEWAY_QUEUE]
while not packets_from_source_computer.qsize():
time.sleep(0.01)
ts, packet = packets_from_source_computer.get() # type: datetime, bytes
packet = gateway.detect_errors(packet)
return ts, packet
def process_command_datagram(
ts: "datetime", packet: bytes, header: bytes, queues: "QueueDict"
) -> None:
"""Process command datagram."""
commands_to_dc = queues[DST_COMMAND_QUEUE]
ts_bytes = int_to_bytes(int(ts.strftime("%Y%m%d%H%M%S%f")[:-4]))
commands_to_dc.put(header + ts_bytes + packet)
p_type = "Command " if header == COMMAND_DATAGRAM_HEADER else "Local key"
rp_print(f"{p_type} to local Receiver", ts)
def process_message_datagram(
ts: "datetime", packet: bytes, header: bytes, queues: "QueueDict"
) -> None:
"""Process message and public key datagram."""
packets_to_dc = queues[DST_MESSAGE_QUEUE]
messages_to_flask = queues[M_TO_FLASK_QUEUE]
onion_pub_key, payload = separate_header(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH)
packet_str = header.decode() + b85encode(payload)
ts_bytes = int_to_bytes(int(ts.strftime("%Y%m%d%H%M%S%f")[:-4]))
queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header)
if header == MESSAGE_DATAGRAM_HEADER:
packets_to_dc.put(
header + ts_bytes + onion_pub_key + ORIGIN_USER_HEADER + payload
)
def process_file_datagram(
ts: "datetime", packet: bytes, header: bytes, queues: "QueueDict"
) -> None:
"""Process file datagram."""
files_to_flask = queues[F_TO_FLASK_QUEUE]
no_contacts_b, payload = separate_header(packet, ENCODED_INTEGER_LENGTH)
no_contacts = bytes_to_int(no_contacts_b)
ser_accounts, file_ct = separate_header(
payload, no_contacts * ONION_SERVICE_PUBLIC_KEY_LENGTH
)
pub_keys = split_byte_string(ser_accounts, item_len=ONION_SERVICE_PUBLIC_KEY_LENGTH)
for onion_pub_key in pub_keys:
queue_to_flask(file_ct, onion_pub_key, files_to_flask, ts, header)
def process_group_management_message(
ts: "datetime",
packet: bytes,
header: bytes,
messages_to_flask: "Queue[Tuple[Union[bytes, str], bytes]]",
) -> None:
"""Parse and display group management message."""
header_str = header.decode()
header_str = header.decode()
group_id, packet = separate_header(packet, GROUP_ID_LENGTH)
if header in [GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER]:
pub_keys = split_byte_string(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH)
for onion_pub_key in pub_keys:
others = [k for k in pub_keys if k != onion_pub_key]
packet_str = header_str + b85encode(group_id + b''.join(others))
others = [k for k in pub_keys if k != onion_pub_key]
packet_str = header_str + b85encode(group_id + b"".join(others))
queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header)
elif header in [GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER]:
first_list_len_b, packet = separate_header(packet, ENCODED_INTEGER_LENGTH)
first_list_length = bytes_to_int(first_list_len_b)
pub_keys = split_byte_string(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH)
first_list_len_b, packet = separate_header(packet, ENCODED_INTEGER_LENGTH)
first_list_length = bytes_to_int(first_list_len_b)
pub_keys = split_byte_string(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH)
before_adding = remaining = pub_keys[:first_list_length]
new_in_group = removable = pub_keys[first_list_length:]
new_in_group = removable = pub_keys[first_list_length:]
if header == GROUP_MSG_MEMBER_ADD_HEADER:
packet_str = GROUP_MSG_MEMBER_ADD_HEADER.decode() + b85encode(group_id + b''.join(new_in_group))
for onion_pub_key in before_adding:
queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header)
process_add_or_group_remove_member(
ts,
header,
header_str,
group_id,
messages_to_flask,
before_adding,
new_in_group,
)
for onion_pub_key in new_in_group:
other_new = [k for k in new_in_group if k != onion_pub_key]
packet_str = (GROUP_MSG_INVITE_HEADER.decode()
+ b85encode(group_id + b''.join(other_new + before_adding)))
other_new = [k for k in new_in_group if k != onion_pub_key]
packet_str = GROUP_MSG_INVITE_HEADER.decode() + b85encode(
group_id + b"".join(other_new + before_adding)
)
queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header)
elif header == GROUP_MSG_MEMBER_REM_HEADER:
packet_str = header_str + b85encode(group_id + b''.join(removable))
for onion_pub_key in remaining:
queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header)
process_add_or_group_remove_member(
ts,
header,
header_str,
group_id,
messages_to_flask,
remaining,
removable,
)
elif header == GROUP_MSG_EXIT_GROUP_HEADER:
pub_keys = split_byte_string(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH)
packet_str = header_str + b85encode(group_id)
for onion_pub_key in pub_keys:
queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header)
process_group_exit_header(
ts, packet, header, header_str, group_id, messages_to_flask
)
def dst_outgoing(queues: 'QueueDict',
gateway: 'Gateway',
unit_test: bool = False
) -> None:
def process_add_or_group_remove_member(
ts: "datetime",
header: bytes,
header_str: str,
group_id: bytes,
messages_to_flask: "Queue[Tuple[Union[bytes, str], bytes]]",
remaining: List[bytes],
removable: List[bytes],
) -> None:
"""Process group add or remove member packet."""
packet_str = header_str + b85encode(group_id + b"".join(removable))
for onion_pub_key in remaining:
queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header)
def process_group_exit_header(
ts: "datetime",
packet: bytes,
header: bytes,
header_str: str,
group_id: bytes,
messages_to_flask: "Queue[Tuple[Union[bytes, str], bytes]]",
) -> None:
"""Process group exit packet."""
pub_keys = split_byte_string(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH)
packet_str = header_str + b85encode(group_id)
for onion_pub_key in pub_keys:
queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header)
def dst_outgoing(
queues: "QueueDict", gateway: "Gateway", unit_test: bool = False
) -> None:
"""Output packets from queues to Destination Computer.
Commands (and local keys) to local Destination Computer have higher
@ -190,10 +299,10 @@ def dst_outgoing(queues: 'QueueDict',
if c_queue.qsize() == 0 and m_queue.qsize() == 0:
time.sleep(0.01)
while c_queue.qsize() != 0:
while c_queue.qsize():
gateway.write(c_queue.get())
if m_queue.qsize() != 0:
if m_queue.qsize():
gateway.write(m_queue.get())
if unit_test and queues[UNIT_TEST_QUEUE].qsize() > 0:

View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3.7
# -*- coding: utf-8 -*-
"""
TFC - Onion-routed, endpoint secure messaging system
Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
TFC is free software: you can redistribute it and/or modify it under the terms
of the GNU General Public License as published by the Free Software Foundation,
either version 3 of the License, or (at your option) any later version.
TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with TFC. If not, see <https://www.gnu.org/licenses/>.
"""

File diff suppressed because it is too large Load Diff

View File

@ -22,129 +22,200 @@ along with TFC. If not, see <https://www.gnu.org/licenses/>.
import os
import typing
from typing import Callable, Dict, List, Optional
from typing import Callable, Dict, List, Optional, Tuple
from src.common.db_logs import remove_logs
from src.common.encoding import b58decode, int_to_bytes
from src.common.exceptions import FunctionReturn
from src.common.input import yes
from src.common.misc import ignored, validate_group_name
from src.common.output import group_management_print, m_print
from src.common.statics import (ADDED_MEMBERS, ALREADY_MEMBER, GROUP_ADD, GROUP_CREATE, GROUP_DELETE,
GROUP_ID_LENGTH, GROUP_MSG_EXIT_GROUP_HEADER, GROUP_MSG_INVITE_HEADER,
GROUP_MSG_JOIN_HEADER, GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER,
GROUP_REMOVE, GROUP_RENAME, LOG_REMOVE, NEW_GROUP, NOT_IN_GROUP, RELAY_PACKET_QUEUE,
REMOVED_MEMBERS, UNKNOWN_ACCOUNTS, US_BYTE, WIN_TYPE_CONTACT)
from src.common.db_logs import remove_logs
from src.common.encoding import b58decode, int_to_bytes
from src.common.exceptions import SoftError
from src.common.input import yes
from src.common.misc import ignored, validate_group_name
from src.common.output import group_management_print, m_print
from src.common.statics import (
ADDED_MEMBERS,
ALREADY_MEMBER,
GROUP_ADD,
GROUP_CREATE,
GROUP_DELETE,
GROUP_ID_LENGTH,
GROUP_MSG_EXIT_GROUP_HEADER,
GROUP_MSG_INVITE_HEADER,
GROUP_MSG_JOIN_HEADER,
GROUP_MSG_MEMBER_ADD_HEADER,
GROUP_MSG_MEMBER_REM_HEADER,
GROUP_REMOVE,
GROUP_RENAME,
LOG_REMOVE,
NEW_GROUP,
NOT_IN_GROUP,
RELAY_PACKET_QUEUE,
REMOVED_MEMBERS,
UNKNOWN_ACCOUNTS,
US_BYTE,
WIN_TYPE_CONTACT,
)
from src.transmitter.packet import queue_command, queue_to_nc
from src.transmitter.packet import queue_command, queue_to_nc
from src.transmitter.user_input import UserInput
if typing.TYPE_CHECKING:
from multiprocessing import Queue
from src.common.db_contacts import ContactList
from src.common.db_groups import GroupList
from multiprocessing import Queue
from src.common.db_contacts import ContactList
from src.common.db_groups import GroupList
from src.common.db_masterkey import MasterKey
from src.common.db_settings import Settings
from src.common.db_settings import Settings
from src.transmitter.windows import TxWindow
QueueDict = Dict[bytes, Queue[bytes]]
FuncDict = (Dict[str, Callable[[str,
List[bytes],
ContactList,
GroupList,
Settings,
QueueDict,
MasterKey,
Optional[bytes]],
None]])
FuncDict = Dict[
str,
Callable[
[
str,
List[bytes],
ContactList,
GroupList,
Settings,
QueueDict,
MasterKey,
Optional[bytes],
],
None,
],
]
def process_group_command(user_input: 'UserInput',
contact_list: 'ContactList',
group_list: 'GroupList',
settings: 'Settings',
queues: 'QueueDict',
master_key: 'MasterKey'
) -> None:
def process_group_command(
user_input: "UserInput",
contact_list: "ContactList",
group_list: "GroupList",
settings: "Settings",
queues: "QueueDict",
master_key: "MasterKey",
) -> None:
"""Parse a group command and process it accordingly."""
if settings.traffic_masking:
raise FunctionReturn("Error: Command is disabled during traffic masking.", head_clear=True)
raise SoftError(
"Error: Command is disabled during traffic masking.", head_clear=True
)
input_parameters = user_input.plaintext.split() # type: List[str]
try:
command_type = input_parameters[1]
except IndexError:
raise FunctionReturn("Error: Invalid group command.", head_clear=True)
if command_type not in ['create', 'join', 'add', 'rm']:
raise FunctionReturn("Error: Invalid group command.")
group_id = None # type: Optional[bytes]
if command_type == 'join':
try:
group_id_s = input_parameters[2]
except IndexError:
raise FunctionReturn("Error: No group ID specified.", head_clear=True)
try:
group_id = b58decode(group_id_s)
except ValueError:
raise FunctionReturn("Error: Invalid group ID.", head_clear=True)
if group_id in group_list.get_list_of_group_ids():
raise FunctionReturn("Error: Group with matching ID already exists.", head_clear=True)
try:
name_index = 3 if command_type == 'join' else 2
group_name = input_parameters[name_index]
except IndexError:
raise FunctionReturn("Error: No group name specified.", head_clear=True)
member_index = 4 if command_type == 'join' else 3
purp_members = input_parameters[member_index:]
command_type, group_id, group_name, purp_members = parse_group_command_parameters(
input_parameters, group_list
)
# Swap specified strings to public keys
selectors = contact_list.contact_selectors()
pub_keys = [contact_list.get_contact_by_address_or_nick(m).onion_pub_key for m in purp_members if m in selectors]
pub_keys = [
contact_list.get_contact_by_address_or_nick(m).onion_pub_key
for m in purp_members
if m in selectors
]
func_d = dict(create=group_create,
join =group_create,
add =group_add_member,
rm =group_rm_member) # type: FuncDict
func_d = dict(
create=group_create, join=group_create, add=group_add_member, rm=group_rm_member
) # type: FuncDict
func = func_d[command_type]
func(group_name, pub_keys, contact_list, group_list, settings, queues, master_key, group_id)
print('')
func(
group_name,
pub_keys,
contact_list,
group_list,
settings,
queues,
master_key,
group_id,
)
print("")
def group_create(group_name: str,
purp_members: List[bytes],
contact_list: 'ContactList',
group_list: 'GroupList',
settings: 'Settings',
queues: 'QueueDict',
_: 'MasterKey',
group_id: Optional[bytes] = None
) -> None:
def parse_group_command_parameters(
input_parameters: List[str], group_list: "GroupList"
) -> Tuple[str, Optional[bytes], str, List[str]]:
"""Parse parameters for group command issued by the user."""
try:
command_type = input_parameters[1]
except IndexError:
raise SoftError("Error: Invalid group command.", head_clear=True)
if command_type not in ["create", "join", "add", "rm"]:
raise SoftError("Error: Invalid group command.")
group_id = validate_group_id(input_parameters, command_type, group_list)
try:
name_index = 3 if command_type == "join" else 2
group_name = input_parameters[name_index]
except IndexError:
raise SoftError("Error: No group name specified.", head_clear=True)
member_index = 4 if command_type == "join" else 3
purp_members = input_parameters[member_index:]
return command_type, group_id, group_name, purp_members
def validate_group_id(
input_parameters: List[str], command_type: str, group_list: "GroupList"
) -> Optional[bytes]:
"""Validate group ID for group command."""
group_id = None # type: Optional[bytes]
if command_type == "join":
try:
group_id_s = input_parameters[2]
except IndexError:
raise SoftError("Error: No group ID specified.", head_clear=True)
try:
group_id = b58decode(group_id_s)
except ValueError:
raise SoftError("Error: Invalid group ID.", head_clear=True)
if group_id in group_list.get_list_of_group_ids():
raise SoftError(
"Error: Group with matching ID already exists.", head_clear=True
)
return group_id
def group_create(
group_name: str,
purp_members: List[bytes],
contact_list: "ContactList",
group_list: "GroupList",
settings: "Settings",
queues: "QueueDict",
_: "MasterKey",
group_id: Optional[bytes] = None,
) -> None:
"""Create a new group.
Validate the group name and determine what members can be added.
"""
error_msg = validate_group_name(group_name, contact_list, group_list)
if error_msg:
raise FunctionReturn(error_msg, head_clear=True)
raise SoftError(error_msg, head_clear=True)
public_keys = set(contact_list.get_list_of_pub_keys())
public_keys = set(contact_list.get_list_of_pub_keys())
purp_pub_keys = set(purp_members)
accepted = list(purp_pub_keys & public_keys)
rejected = list(purp_pub_keys - public_keys)
accepted = list(purp_pub_keys & public_keys)
rejected = list(purp_pub_keys - public_keys)
if len(accepted) > settings.max_number_of_group_members:
raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_group_members} "
f"members per group.", head_clear=True)
raise SoftError(
f"Error: TFC settings only allow {settings.max_number_of_group_members} "
f"members per group.",
head_clear=True,
)
if len(group_list) == settings.max_number_of_groups:
raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_groups} groups.", head_clear=True)
raise SoftError(
f"Error: TFC settings only allow {settings.max_number_of_groups} groups.",
head_clear=True,
)
header = GROUP_MSG_INVITE_HEADER if group_id is None else GROUP_MSG_JOIN_HEADER
@ -154,138 +225,169 @@ def group_create(group_name: str,
if group_id not in group_list.get_list_of_group_ids():
break
group_list.add_group(group_name,
group_id,
settings.log_messages_by_default,
settings.show_notifications_by_default,
members=[contact_list.get_contact_by_pub_key(k) for k in accepted])
group_list.add_group(
group_name,
group_id,
settings.log_messages_by_default,
settings.show_notifications_by_default,
members=[contact_list.get_contact_by_pub_key(k) for k in accepted],
)
command = GROUP_CREATE + group_id + group_name.encode() + US_BYTE + b''.join(accepted)
command = (
GROUP_CREATE + group_id + group_name.encode() + US_BYTE + b"".join(accepted)
)
queue_command(command, settings, queues)
group_management_print(NEW_GROUP, accepted, contact_list, group_name)
group_management_print(NEW_GROUP, accepted, contact_list, group_name)
group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
if accepted:
if yes("Publish the list of group members to participants?", abort=False):
create_packet = header + group_id + b''.join(accepted)
create_packet = header + group_id + b"".join(accepted)
queue_to_nc(create_packet, queues[RELAY_PACKET_QUEUE])
else:
m_print(f"Created an empty group '{group_name}'.", bold=True, head=1)
def group_add_member(group_name: str,
purp_members: List['bytes'],
contact_list: 'ContactList',
group_list: 'GroupList',
settings: 'Settings',
queues: 'QueueDict',
master_key: 'MasterKey',
_: Optional[bytes] = None
) -> None:
def group_add_member(
group_name: str,
purp_members: List["bytes"],
contact_list: "ContactList",
group_list: "GroupList",
settings: "Settings",
queues: "QueueDict",
master_key: "MasterKey",
_: Optional[bytes] = None,
) -> None:
"""Add new member(s) to a specified group."""
if group_name not in group_list.get_list_of_group_names():
if yes(f"Group {group_name} was not found. Create new group?", abort=False, head=1):
group_create(group_name, purp_members, contact_list, group_list, settings, queues, master_key)
if yes(
f"Group {group_name} was not found. Create new group?", abort=False, head=1
):
group_create(
group_name,
purp_members,
contact_list,
group_list,
settings,
queues,
master_key,
)
return None
else:
raise FunctionReturn("Group creation aborted.", head=0, delay=1, tail_clear=True)
raise SoftError("Group creation aborted.", head=0, delay=1, tail_clear=True)
purp_pub_keys = set(purp_members)
pub_keys = set(contact_list.get_list_of_pub_keys())
before_adding = set(group_list.get_group(group_name).get_list_of_member_pub_keys())
ok_pub_keys_set = set(pub_keys & purp_pub_keys)
purp_pub_keys = set(purp_members)
pub_keys = set(contact_list.get_list_of_pub_keys())
before_adding = set(group_list.get_group(group_name).get_list_of_member_pub_keys())
ok_pub_keys_set = set(pub_keys & purp_pub_keys)
new_in_group_set = set(ok_pub_keys_set - before_adding)
end_assembly = list(before_adding | new_in_group_set)
rejected = list(purp_pub_keys - pub_keys)
rejected = list(purp_pub_keys - pub_keys)
already_in_g = list(before_adding & purp_pub_keys)
new_in_group = list(new_in_group_set)
ok_pub_keys = list(ok_pub_keys_set)
ok_pub_keys = list(ok_pub_keys_set)
if len(end_assembly) > settings.max_number_of_group_members:
raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_group_members} "
f"members per group.", head_clear=True)
raise SoftError(
f"Error: TFC settings only allow {settings.max_number_of_group_members} "
f"members per group.",
head_clear=True,
)
group = group_list.get_group(group_name)
group.add_members([contact_list.get_contact_by_pub_key(k) for k in new_in_group])
command = GROUP_ADD + group.group_id + b''.join(ok_pub_keys)
command = GROUP_ADD + group.group_id + b"".join(ok_pub_keys)
queue_command(command, settings, queues)
group_management_print(ADDED_MEMBERS, new_in_group, contact_list, group_name)
group_management_print(ALREADY_MEMBER, already_in_g, contact_list, group_name)
group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
group_management_print(ADDED_MEMBERS, new_in_group, contact_list, group_name)
group_management_print(ALREADY_MEMBER, already_in_g, contact_list, group_name)
group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
if new_in_group:
if yes("Publish the list of new members to involved?", abort=False):
add_packet = (GROUP_MSG_MEMBER_ADD_HEADER
+ group.group_id
+ int_to_bytes(len(before_adding))
+ b''.join(before_adding)
+ b''.join(new_in_group))
add_packet = (
GROUP_MSG_MEMBER_ADD_HEADER
+ group.group_id
+ int_to_bytes(len(before_adding))
+ b"".join(before_adding)
+ b"".join(new_in_group)
)
queue_to_nc(add_packet, queues[RELAY_PACKET_QUEUE])
def group_rm_member(group_name: str,
purp_members: List[bytes],
contact_list: 'ContactList',
group_list: 'GroupList',
settings: 'Settings',
queues: 'QueueDict',
master_key: 'MasterKey',
_: Optional[bytes] = None
) -> None:
def group_rm_member(
group_name: str,
purp_members: List[bytes],
contact_list: "ContactList",
group_list: "GroupList",
settings: "Settings",
queues: "QueueDict",
master_key: "MasterKey",
_: Optional[bytes] = None,
) -> None:
"""Remove member(s) from the specified group or remove the group itself."""
if not purp_members:
group_rm_group(group_name, contact_list, group_list, settings, queues, master_key)
group_rm_group(
group_name, contact_list, group_list, settings, queues, master_key
)
if group_name not in group_list.get_list_of_group_names():
raise FunctionReturn(f"Group '{group_name}' does not exist.", head_clear=True)
raise SoftError(f"Group '{group_name}' does not exist.", head_clear=True)
purp_pub_keys = set(purp_members)
pub_keys = set(contact_list.get_list_of_pub_keys())
before_removal = set(group_list.get_group(group_name).get_list_of_member_pub_keys())
ok_pub_keys_set = set(purp_pub_keys & pub_keys)
removable_set = set(before_removal & ok_pub_keys_set)
purp_pub_keys = set(purp_members)
pub_keys = set(contact_list.get_list_of_pub_keys())
before_removal = set(group_list.get_group(group_name).get_list_of_member_pub_keys())
ok_pub_keys_set = set(purp_pub_keys & pub_keys)
removable_set = set(before_removal & ok_pub_keys_set)
remaining = list(before_removal - removable_set)
remaining = list(before_removal - removable_set)
not_in_group = list(ok_pub_keys_set - before_removal)
rejected = list(purp_pub_keys - pub_keys)
removable = list(removable_set)
ok_pub_keys = list(ok_pub_keys_set)
rejected = list(purp_pub_keys - pub_keys)
removable = list(removable_set)
ok_pub_keys = list(ok_pub_keys_set)
group = group_list.get_group(group_name)
group.remove_members(removable)
command = GROUP_REMOVE + group.group_id + b''.join(ok_pub_keys)
command = GROUP_REMOVE + group.group_id + b"".join(ok_pub_keys)
queue_command(command, settings, queues)
group_management_print(REMOVED_MEMBERS, removable, contact_list, group_name)
group_management_print(NOT_IN_GROUP, not_in_group, contact_list, group_name)
group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
group_management_print(REMOVED_MEMBERS, removable, contact_list, group_name)
group_management_print(NOT_IN_GROUP, not_in_group, contact_list, group_name)
group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
if removable and remaining and yes("Publish the list of removed members to remaining members?", abort=False):
rem_packet = (GROUP_MSG_MEMBER_REM_HEADER
+ group.group_id
+ int_to_bytes(len(remaining))
+ b''.join(remaining)
+ b''.join(removable))
if (
removable
and remaining
and yes(
"Publish the list of removed members to remaining members?", abort=False
)
):
rem_packet = (
GROUP_MSG_MEMBER_REM_HEADER
+ group.group_id
+ int_to_bytes(len(remaining))
+ b"".join(remaining)
+ b"".join(removable)
)
queue_to_nc(rem_packet, queues[RELAY_PACKET_QUEUE])
def group_rm_group(group_name: str,
contact_list: 'ContactList',
group_list: 'GroupList',
settings: 'Settings',
queues: 'QueueDict',
master_key: 'MasterKey',
_: Optional[bytes] = None
) -> None:
def group_rm_group(
group_name: str,
contact_list: "ContactList",
group_list: "GroupList",
settings: "Settings",
queues: "QueueDict",
master_key: "MasterKey",
_: Optional[bytes] = None,
) -> None:
"""Remove the group with its members."""
if not yes(f"Remove group '{group_name}'?", abort=False):
raise FunctionReturn("Group removal aborted.", head=0, delay=1, tail_clear=True)
raise SoftError("Group removal aborted.", head=0, delay=1, tail_clear=True)
if group_name in group_list.get_list_of_group_names():
group_id = group_list.get_group(group_name).group_id
@ -293,7 +395,7 @@ def group_rm_group(group_name: str,
try:
group_id = b58decode(group_name)
except ValueError:
raise FunctionReturn("Error: Invalid group name/ID.", head_clear=True)
raise SoftError("Error: Invalid group name/ID.", head_clear=True)
command = LOG_REMOVE + group_id
queue_command(command, settings, queues)
@ -302,42 +404,53 @@ def group_rm_group(group_name: str,
queue_command(command, settings, queues)
if group_list.has_group(group_name):
with ignored(FunctionReturn):
with ignored(SoftError):
remove_logs(contact_list, group_list, settings, master_key, group_id)
else:
raise FunctionReturn(f"Transmitter has no group '{group_name}' to remove.")
raise SoftError(f"Transmitter has no group '{group_name}' to remove.")
group = group_list.get_group(group_name)
if not group.empty() and yes("Notify members about leaving the group?", abort=False):
exit_packet = (GROUP_MSG_EXIT_GROUP_HEADER
+ group.group_id
+ b''.join(group.get_list_of_member_pub_keys()))
if not group.empty() and yes(
"Notify members about leaving the group?", abort=False
):
exit_packet = (
GROUP_MSG_EXIT_GROUP_HEADER
+ group.group_id
+ b"".join(group.get_list_of_member_pub_keys())
)
queue_to_nc(exit_packet, queues[RELAY_PACKET_QUEUE])
group_list.remove_group_by_name(group_name)
raise FunctionReturn(f"Removed group '{group_name}'.", head=0, delay=1, tail_clear=True, bold=True)
raise SoftError(
f"Removed group '{group_name}'.", head=0, delay=1, tail_clear=True, bold=True
)
def group_rename(new_name: str,
window: 'TxWindow',
contact_list: 'ContactList',
group_list: 'GroupList',
settings: 'Settings',
queues: 'QueueDict',
) -> None:
def group_rename(
new_name: str,
window: "TxWindow",
contact_list: "ContactList",
group_list: "GroupList",
settings: "Settings",
queues: "QueueDict",
) -> None:
"""Rename the active group."""
if window.type == WIN_TYPE_CONTACT or window.group is None:
raise FunctionReturn("Error: Selected window is not a group window.", head_clear=True)
raise SoftError(
"Error: Selected window is not a group window.", head_clear=True
)
error_msg = validate_group_name(new_name, contact_list, group_list)
if error_msg:
raise FunctionReturn(error_msg, head_clear=True)
raise SoftError(error_msg, head_clear=True)
command = GROUP_RENAME + window.uid + new_name.encode()
queue_command(command, settings, queues)
old_name = window.group.name
old_name = window.group.name
window.group.name = new_name
group_list.store_groups()
raise FunctionReturn(f"Renamed group '{old_name}' to '{new_name}'.", delay=1, tail_clear=True)
raise SoftError(
f"Renamed group '{old_name}' to '{new_name}'.", delay=1, tail_clear=True
)

View File

@ -23,40 +23,68 @@ import typing
from typing import Any, Dict
from src.common.db_logs import remove_logs
from src.common.encoding import onion_address_to_pub_key
from src.common.exceptions import FunctionReturn
from src.common.input import box_input, yes
from src.common.misc import ignored, validate_key_exchange, validate_nick, validate_onion_addr
from src.common.output import m_print
from src.common.statics import (ALL, CH_FILE_RECV, CH_LOGGING, CH_NICKNAME, CH_NOTIFY, CONTACT_REM, DISABLE, ECDHE,
ENABLE, KDB_REMOVE_ENTRY_HEADER, KEY_MANAGEMENT_QUEUE, LOGGING, LOG_SETTING_QUEUE,
NOTIFY, ONION_ADDRESS_LENGTH, PSK, RELAY_PACKET_QUEUE, STORE, TRUNC_ADDRESS_LENGTH,
UNENCRYPTED_ADD_NEW_CONTACT, UNENCRYPTED_DATAGRAM_HEADER, UNENCRYPTED_REM_CONTACT,
WIN_TYPE_CONTACT, WIN_TYPE_GROUP)
from src.common.db_logs import remove_logs
from src.common.encoding import onion_address_to_pub_key
from src.common.exceptions import SoftError
from src.common.input import box_input, yes
from src.common.misc import (
ignored,
validate_key_exchange,
validate_nick,
validate_onion_addr,
)
from src.common.output import m_print
from src.common.statics import (
ALL,
CH_FILE_RECV,
CH_LOGGING,
CH_NICKNAME,
CH_NOTIFY,
CONTACT_REM,
DISABLE,
ECDHE,
ENABLE,
KDB_REMOVE_ENTRY_HEADER,
KEY_MANAGEMENT_QUEUE,
LOGGING,
LOG_SETTING_QUEUE,
NOTIFY,
ONION_ADDRESS_LENGTH,
PSK,
RELAY_PACKET_QUEUE,
STORE,
TRUNC_ADDRESS_LENGTH,
UNENCRYPTED_ADD_NEW_CONTACT,
UNENCRYPTED_DATAGRAM_HEADER,
UNENCRYPTED_REM_CONTACT,
WIN_TYPE_CONTACT,
WIN_TYPE_GROUP,
)
from src.transmitter.commands_g import group_rename
from src.transmitter.commands_g import group_rename
from src.transmitter.key_exchanges import create_pre_shared_key, start_key_exchange
from src.transmitter.packet import queue_command, queue_to_nc
from src.transmitter.packet import queue_command, queue_to_nc
if typing.TYPE_CHECKING:
from multiprocessing import Queue
from src.common.db_contacts import ContactList
from src.common.db_groups import GroupList
from src.common.db_masterkey import MasterKey
from src.common.db_onion import OnionService
from src.common.db_settings import Settings
from multiprocessing import Queue
from src.common.db_contacts import ContactList
from src.common.db_groups import GroupList
from src.common.db_masterkey import MasterKey
from src.common.db_onion import OnionService
from src.common.db_settings import Settings
from src.transmitter.user_input import UserInput
from src.transmitter.windows import TxWindow
from src.transmitter.windows import TxWindow
QueueDict = Dict[bytes, Queue[Any]]
def add_new_contact(contact_list: 'ContactList',
group_list: 'GroupList',
settings: 'Settings',
queues: 'QueueDict',
onion_service: 'OnionService'
) -> None:
def add_new_contact(
contact_list: "ContactList",
group_list: "GroupList",
settings: "Settings",
queues: "QueueDict",
onion_service: "OnionService",
) -> None:
"""Prompt for contact account details and initialize desired key exchange.
This function requests the minimum amount of data about the
@ -75,104 +103,157 @@ def add_new_contact(contact_list: 'ContactList',
"""
try:
if settings.traffic_masking:
raise FunctionReturn("Error: Command is disabled during traffic masking.", head_clear=True)
raise SoftError(
"Error: Command is disabled during traffic masking.", head_clear=True
)
if len(contact_list) >= settings.max_number_of_contacts:
raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_contacts} accounts.",
head_clear=True)
raise SoftError(
f"Error: TFC settings only allow {settings.max_number_of_contacts} accounts.",
head_clear=True,
)
m_print("Add new contact", head=1, bold=True, head_clear=True)
m_print(["Your TFC account is",
onion_service.user_onion_address,
'', "Warning!",
"Anyone who knows this account",
"can see when your TFC is online"], box=True)
m_print(
[
"Your TFC account is",
onion_service.user_onion_address,
"",
"Warning!",
"Anyone who knows this account",
"can see when your TFC is online",
],
box=True,
)
contact_address = box_input("Contact account",
expected_len=ONION_ADDRESS_LENGTH,
validator=validate_onion_addr,
validator_args=onion_service.user_onion_address).strip()
contact_address = box_input(
"Contact account",
expected_len=ONION_ADDRESS_LENGTH,
validator=validate_onion_addr,
validator_args=onion_service.user_onion_address,
).strip()
onion_pub_key = onion_address_to_pub_key(contact_address)
contact_nick = box_input("Contact nick",
expected_len=ONION_ADDRESS_LENGTH, # Limited to 255 but such long nick is unpractical.
validator=validate_nick,
validator_args=(contact_list, group_list, onion_pub_key)).strip()
contact_nick = box_input(
"Contact nick",
expected_len=ONION_ADDRESS_LENGTH, # Limited to 255 but such long nick is unpractical.
validator=validate_nick,
validator_args=(contact_list, group_list, onion_pub_key),
).strip()
key_exchange = box_input(f"Key exchange ([{ECDHE}],PSK) ",
default=ECDHE,
expected_len=28,
validator=validate_key_exchange).strip()
key_exchange = box_input(
f"Key exchange ([{ECDHE}],PSK) ",
default=ECDHE,
expected_len=28,
validator=validate_key_exchange,
).strip()
relay_command = UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_ADD_NEW_CONTACT + onion_pub_key
relay_command = (
UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_ADD_NEW_CONTACT + onion_pub_key
)
queue_to_nc(relay_command, queues[RELAY_PACKET_QUEUE])
if key_exchange.upper() in ECDHE:
start_key_exchange(onion_pub_key, contact_nick, contact_list, settings, queues)
start_key_exchange(
onion_pub_key, contact_nick, contact_list, settings, queues
)
elif key_exchange.upper() in PSK:
create_pre_shared_key(onion_pub_key, contact_nick, contact_list, settings, onion_service, queues)
create_pre_shared_key(
onion_pub_key,
contact_nick,
contact_list,
settings,
onion_service,
queues,
)
except (EOFError, KeyboardInterrupt):
raise FunctionReturn("Contact creation aborted.", head=2, delay=1, tail_clear=True)
raise SoftError("Contact creation aborted.", head=2, delay=1, tail_clear=True)
def remove_contact(user_input: 'UserInput',
window: 'TxWindow',
contact_list: 'ContactList',
group_list: 'GroupList',
settings: 'Settings',
queues: 'QueueDict',
master_key: 'MasterKey') -> None:
def remove_contact(
user_input: "UserInput",
window: "TxWindow",
contact_list: "ContactList",
group_list: "GroupList",
settings: "Settings",
queues: "QueueDict",
master_key: "MasterKey",
) -> None:
"""Remove contact from TFC."""
if settings.traffic_masking:
raise FunctionReturn("Error: Command is disabled during traffic masking.", head_clear=True)
raise SoftError(
"Error: Command is disabled during traffic masking.", head_clear=True
)
try:
selection = user_input.plaintext.split()[1]
except IndexError:
raise FunctionReturn("Error: No account specified.", head_clear=True)
raise SoftError("Error: No account specified.", head_clear=True)
if not yes(f"Remove contact '{selection}'?", abort=False, head=1):
raise FunctionReturn("Removal of contact aborted.", head=0, delay=1, tail_clear=True)
raise SoftError("Removal of contact aborted.", head=0, delay=1, tail_clear=True)
if selection in contact_list.contact_selectors():
onion_pub_key = contact_list.get_contact_by_address_or_nick(selection).onion_pub_key
onion_pub_key = contact_list.get_contact_by_address_or_nick(
selection
).onion_pub_key
else:
if validate_onion_addr(selection):
raise FunctionReturn("Error: Invalid selection.", head=0, delay=1, tail_clear=True)
else:
onion_pub_key = onion_address_to_pub_key(selection)
raise SoftError(
"Error: Invalid selection.", head=0, delay=1, tail_clear=True
)
onion_pub_key = onion_address_to_pub_key(selection)
receiver_command = CONTACT_REM + onion_pub_key
queue_command(receiver_command, settings, queues)
with ignored(FunctionReturn):
with ignored(SoftError):
remove_logs(contact_list, group_list, settings, master_key, onion_pub_key)
queues[KEY_MANAGEMENT_QUEUE].put((KDB_REMOVE_ENTRY_HEADER, onion_pub_key))
relay_command = UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_REM_CONTACT + onion_pub_key
relay_command = (
UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_REM_CONTACT + onion_pub_key
)
queue_to_nc(relay_command, queues[RELAY_PACKET_QUEUE])
target = determine_target(selection, onion_pub_key, contact_list)
if any([g.remove_members([onion_pub_key]) for g in group_list]):
m_print(f"Removed {target} from group(s).", tail=1)
check_for_window_deselection(onion_pub_key, window, group_list)
def determine_target(
selection: str, onion_pub_key: bytes, contact_list: "ContactList"
) -> str:
"""Determine name of the target that will be removed."""
if onion_pub_key in contact_list.get_list_of_pub_keys():
contact = contact_list.get_contact_by_pub_key(onion_pub_key)
target = f"{contact.nick} ({contact.short_address})"
target = f"{contact.nick} ({contact.short_address})"
contact_list.remove_contact_by_pub_key(onion_pub_key)
m_print(f"Removed {target} from contacts.", head=1, tail=1)
else:
target = f"{selection[:TRUNC_ADDRESS_LENGTH]}"
m_print(f"Transmitter has no {target} to remove.", head=1, tail=1)
if any([g.remove_members([onion_pub_key]) for g in group_list]):
m_print(f"Removed {target} from group(s).", tail=1)
return target
def check_for_window_deselection(
onion_pub_key: bytes, window: "TxWindow", group_list: "GroupList"
) -> None:
"""\
Check if the window should be deselected after contact is removed.
"""
if window.type == WIN_TYPE_CONTACT:
if onion_pub_key == window.uid:
window.deselect()
if window.type == WIN_TYPE_GROUP:
for c in window:
if c.onion_pub_key == onion_pub_key:
@ -187,98 +268,81 @@ def remove_contact(user_input: 'UserInput',
window.deselect()
def change_nick(user_input: 'UserInput',
window: 'TxWindow',
contact_list: 'ContactList',
group_list: 'GroupList',
settings: 'Settings',
queues: 'QueueDict') -> None:
def change_nick(
user_input: "UserInput",
window: "TxWindow",
contact_list: "ContactList",
group_list: "GroupList",
settings: "Settings",
queues: "QueueDict",
) -> None:
"""Change nick of contact."""
try:
nick = user_input.plaintext.split()[1]
except IndexError:
raise FunctionReturn("Error: No nick specified.", head_clear=True)
raise SoftError("Error: No nick specified.", head_clear=True)
if window.type == WIN_TYPE_GROUP:
group_rename(nick, window, contact_list, group_list, settings, queues)
if window.contact is None:
raise FunctionReturn("Error: Window does not have contact.")
raise SoftError("Error: Window does not have contact.")
onion_pub_key = window.contact.onion_pub_key
error_msg = validate_nick(nick, (contact_list, group_list, onion_pub_key))
error_msg = validate_nick(nick, (contact_list, group_list, onion_pub_key))
if error_msg:
raise FunctionReturn(error_msg, head_clear=True)
raise SoftError(error_msg, head_clear=True)
window.contact.nick = nick
window.name = nick
window.name = nick
contact_list.store_contacts()
command = CH_NICKNAME + onion_pub_key + nick.encode()
queue_command(command, settings, queues)
def contact_setting(user_input: 'UserInput',
window: 'TxWindow',
contact_list: 'ContactList',
group_list: 'GroupList',
settings: 'Settings',
queues: 'QueueDict'
) -> None:
def contact_setting(
user_input: "UserInput",
window: "TxWindow",
contact_list: "ContactList",
group_list: "GroupList",
settings: "Settings",
queues: "QueueDict",
) -> None:
"""\
Change logging, file reception, or notification setting of a group
or (all) contact(s).
"""
try:
parameters = user_input.plaintext.split()
cmd_key = parameters[0]
cmd_header = {LOGGING: CH_LOGGING,
STORE: CH_FILE_RECV,
NOTIFY: CH_NOTIFY}[cmd_key]
cmd_key = parameters[0]
cmd_header = {LOGGING: CH_LOGGING, STORE: CH_FILE_RECV, NOTIFY: CH_NOTIFY}[
cmd_key
]
setting, b_value = dict(on=(ENABLE, True),
off=(DISABLE, False))[parameters[1]]
setting, b_value = dict(on=(ENABLE, True), off=(DISABLE, False))[parameters[1]]
except (IndexError, KeyError):
raise FunctionReturn("Error: Invalid command.", head_clear=True)
raise SoftError("Error: Invalid command.", head_clear=True)
# If second parameter 'all' is included, apply setting for all contacts and groups
try:
win_uid = b''
win_uid = b""
if parameters[2] == ALL:
cmd_value = setting.upper()
else:
raise FunctionReturn("Error: Invalid command.", head_clear=True)
raise SoftError("Error: Invalid command.", head_clear=True)
except IndexError:
win_uid = window.uid
win_uid = window.uid
cmd_value = setting + win_uid
if win_uid:
if window.type == WIN_TYPE_CONTACT and window.contact is not None:
if cmd_key == LOGGING: window.contact.log_messages = b_value
if cmd_key == STORE: window.contact.file_reception = b_value
if cmd_key == NOTIFY: window.contact.notifications = b_value
contact_list.store_contacts()
if window.type == WIN_TYPE_GROUP and window.group is not None:
if cmd_key == LOGGING: window.group.log_messages = b_value
if cmd_key == STORE:
for c in window:
c.file_reception = b_value
if cmd_key == NOTIFY: window.group.notifications = b_value
group_list.store_groups()
change_setting_for_selected_contact(
cmd_key, b_value, window, contact_list, group_list
)
else:
for contact in contact_list:
if cmd_key == LOGGING: contact.log_messages = b_value
if cmd_key == STORE: contact.file_reception = b_value
if cmd_key == NOTIFY: contact.notifications = b_value
contact_list.store_contacts()
for group in group_list:
if cmd_key == LOGGING: group.log_messages = b_value
if cmd_key == NOTIFY: group.notifications = b_value
group_list.store_groups()
change_setting_for_all_contacts(cmd_key, b_value, contact_list, group_list)
command = cmd_header + cmd_value
@ -290,3 +354,54 @@ def contact_setting(user_input: 'UserInput',
window.update_log_messages()
queue_command(command, settings, queues)
def change_setting_for_selected_contact(
cmd_key: str,
b_value: bool,
window: "TxWindow",
contact_list: "ContactList",
group_list: "GroupList",
) -> None:
"""Change setting for selected contact."""
if window.type == WIN_TYPE_CONTACT and window.contact is not None:
if cmd_key == LOGGING:
window.contact.log_messages = b_value
if cmd_key == STORE:
window.contact.file_reception = b_value
if cmd_key == NOTIFY:
window.contact.notifications = b_value
contact_list.store_contacts()
if window.type == WIN_TYPE_GROUP and window.group is not None:
if cmd_key == LOGGING:
window.group.log_messages = b_value
if cmd_key == STORE:
for c in window:
c.file_reception = b_value
if cmd_key == NOTIFY:
window.group.notifications = b_value
group_list.store_groups()
def change_setting_for_all_contacts(
cmd_key: str, b_value: bool, contact_list: "ContactList", group_list: "GroupList"
) -> None:
"""Change setting for all contacts."""
for contact in contact_list:
if cmd_key == LOGGING:
contact.log_messages = b_value
if cmd_key == STORE:
contact.file_reception = b_value
if cmd_key == NOTIFY:
contact.notifications = b_value
contact_list.store_contacts()
for group in group_list:
if cmd_key == LOGGING:
group.log_messages = b_value
if cmd_key == NOTIFY:
group.notifications = b_value
group_list.store_groups()

View File

@ -26,15 +26,22 @@ import zlib
from typing import Tuple
from src.common.crypto import byte_padding, csprng, encrypt_and_sign
from src.common.encoding import int_to_bytes
from src.common.exceptions import FunctionReturn
from src.common.misc import readable_size, split_byte_string
from src.common.statics import (COMPRESSION_LEVEL, FILE_ETA_FIELD_LENGTH, FILE_PACKET_CTR_LENGTH,
FILE_SIZE_FIELD_LENGTH, PADDING_LENGTH, TRAFFIC_MASKING_QUEUE_CHECK_DELAY, US_BYTE)
from src.common.crypto import byte_padding, csprng, encrypt_and_sign
from src.common.encoding import int_to_bytes
from src.common.exceptions import SoftError
from src.common.misc import readable_size, split_byte_string
from src.common.statics import (
COMPRESSION_LEVEL,
FILE_ETA_FIELD_LENGTH,
FILE_PACKET_CTR_LENGTH,
FILE_SIZE_FIELD_LENGTH,
PADDING_LENGTH,
TRAFFIC_MASKING_QUEUE_CHECK_DELAY,
US_BYTE,
)
if typing.TYPE_CHECKING:
from src.common.db_settings import Settings
from src.common.db_settings import Settings
from src.transmitter.windows import TxWindow
@ -45,45 +52,44 @@ class File(object):
masking.
"""
def __init__(self,
path: str,
window: 'TxWindow',
settings: 'Settings'
) -> None:
def __init__(self, path: str, window: "TxWindow", settings: "Settings") -> None:
"""Load file data from specified path and add headers."""
self.window = window
self.window = window
self.settings = settings
self.name = self.get_name(path)
data = self.load_file_data(path)
size, self.size_hr = self.get_size(path)
processed = self.process_file_data(data)
self.name = self.get_name(path)
data = self.load_file_data(path)
size, self.size_hr = self.get_size(path)
processed = self.process_file_data(data)
self.time_hr, self.plaintext = self.finalize(size, processed)
@staticmethod
def get_name(path: str) -> bytes:
"""Parse and validate file name."""
name = (path.split('/')[-1]).encode()
name = (path.split("/")[-1]).encode()
File.name_length_check(name)
return name
@staticmethod
def name_length_check(name: bytes) -> None:
"""Ensure that file header fits the first packet."""
full_header_length = (FILE_PACKET_CTR_LENGTH
+ FILE_ETA_FIELD_LENGTH
+ FILE_SIZE_FIELD_LENGTH
+ len(name) + len(US_BYTE))
full_header_length = (
FILE_PACKET_CTR_LENGTH
+ FILE_ETA_FIELD_LENGTH
+ FILE_SIZE_FIELD_LENGTH
+ len(name)
+ len(US_BYTE)
)
if full_header_length >= PADDING_LENGTH:
raise FunctionReturn("Error: File name is too long.", head_clear=True)
raise SoftError("Error: File name is too long.", head_clear=True)
@staticmethod
def load_file_data(path: str) -> bytes:
"""Load file name, size, and data from the specified path."""
if not os.path.isfile(path):
raise FunctionReturn("Error: File not found.", head_clear=True)
with open(path, 'rb') as f:
raise SoftError("Error: File not found.", head_clear=True)
with open(path, "rb") as f:
data = f.read()
return data
@ -91,9 +97,9 @@ class File(object):
def get_size(path: str) -> Tuple[bytes, str]:
"""Get size of file in bytes and in human readable form."""
byte_size = os.path.getsize(path)
if byte_size == 0:
raise FunctionReturn("Error: Target file is empty.", head_clear=True)
size = int_to_bytes(byte_size)
if not byte_size:
raise SoftError("Error: Target file is empty.", head_clear=True)
size = int_to_bytes(byte_size)
size_hr = readable_size(byte_size)
return size, size_hr
@ -107,24 +113,27 @@ class File(object):
transmission.
"""
compressed = zlib.compress(data, level=COMPRESSION_LEVEL)
file_key = csprng()
processed = encrypt_and_sign(compressed, key=file_key)
file_key = csprng()
processed = encrypt_and_sign(compressed, key=file_key)
processed += file_key
return processed
def finalize(self, size: bytes, processed: bytes) -> Tuple[str, bytes]:
"""Finalize packet and generate plaintext."""
time_bytes, time_print = self.update_delivery_time(self.name, size, processed, self.settings, self.window)
packet_data = time_bytes + size + self.name + US_BYTE + processed
time_bytes, time_print = self.update_delivery_time(
self.name, size, processed, self.settings, self.window
)
packet_data = time_bytes + size + self.name + US_BYTE + processed
return time_print, packet_data
@staticmethod
def update_delivery_time(name: bytes,
size: bytes,
processed: bytes,
settings: 'Settings',
window: 'TxWindow'
) -> Tuple[bytes, str]:
def update_delivery_time(
name: bytes,
size: bytes,
processed: bytes,
settings: "Settings",
window: "TxWindow",
) -> Tuple[bytes, str]:
"""Calculate transmission time.
Transmission time depends on delay settings, file size and
@ -132,29 +141,27 @@ class File(object):
"""
time_bytes = bytes(FILE_ETA_FIELD_LENGTH)
no_packets = File.count_number_of_packets(name, size, processed, time_bytes)
avg_delay = settings.tm_static_delay + (settings.tm_random_delay / 2)
avg_delay = settings.tm_static_delay + (settings.tm_random_delay / 2)
total_time = len(window) * no_packets * avg_delay
total_time = len(window) * no_packets * avg_delay
total_time *= 2 # Accommodate command packets between file packets
total_time += no_packets * TRAFFIC_MASKING_QUEUE_CHECK_DELAY
# Update delivery time
time_bytes = int_to_bytes(int(total_time))
time_hr = str(datetime.timedelta(seconds=int(total_time)))
time_hr = str(datetime.timedelta(seconds=int(total_time)))
return time_bytes, time_hr
@staticmethod
def count_number_of_packets(name: bytes,
size: bytes,
processed: bytes,
time_bytes: bytes
) -> int:
def count_number_of_packets(
name: bytes, size: bytes, processed: bytes, time_bytes: bytes
) -> int:
"""Count number of packets needed for file delivery."""
packet_data = time_bytes + size + name + US_BYTE + processed
if len(packet_data) < PADDING_LENGTH:
return 1
else:
packet_data += bytes(FILE_PACKET_CTR_LENGTH)
packet_data = byte_padding(packet_data)
return len(split_byte_string(packet_data, item_len=PADDING_LENGTH))
packet_data += bytes(FILE_PACKET_CTR_LENGTH)
packet_data = byte_padding(packet_data)
return len(split_byte_string(packet_data, item_len=PADDING_LENGTH))

View File

@ -26,36 +26,37 @@ import typing
from typing import Dict, NoReturn
from src.common.exceptions import FunctionReturn
from src.common.misc import get_tab_completer, ignored
from src.common.statics import COMMAND, FILE, MESSAGE
from src.common.exceptions import SoftError
from src.common.misc import get_tab_completer, ignored
from src.common.statics import COMMAND, FILE, MESSAGE
from src.transmitter.commands import process_command
from src.transmitter.contact import add_new_contact
from src.transmitter.commands import process_command
from src.transmitter.contact import add_new_contact
from src.transmitter.key_exchanges import export_onion_service_data, new_local_key
from src.transmitter.packet import queue_file, queue_message
from src.transmitter.user_input import get_input
from src.transmitter.windows import TxWindow
from src.transmitter.packet import queue_file, queue_message
from src.transmitter.user_input import get_input
from src.transmitter.windows import TxWindow
if typing.TYPE_CHECKING:
from multiprocessing import Queue
from src.common.db_contacts import ContactList
from src.common.db_groups import GroupList
from multiprocessing import Queue
from src.common.db_contacts import ContactList
from src.common.db_groups import GroupList
from src.common.db_masterkey import MasterKey
from src.common.db_onion import OnionService
from src.common.db_settings import Settings
from src.common.gateway import Gateway
from src.common.db_onion import OnionService
from src.common.db_settings import Settings
from src.common.gateway import Gateway
def input_loop(queues: Dict[bytes, 'Queue[bytes]'],
settings: 'Settings',
gateway: 'Gateway',
contact_list: 'ContactList',
group_list: 'GroupList',
master_key: 'MasterKey',
onion_service: 'OnionService',
stdin_fd: int
) -> NoReturn:
def input_loop(
queues: Dict[bytes, "Queue[bytes]"],
settings: "Settings",
gateway: "Gateway",
contact_list: "ContactList",
group_list: "GroupList",
master_key: "MasterKey",
onion_service: "OnionService",
stdin_fd: int,
) -> NoReturn:
"""Get input from user and process it accordingly.
Running this loop as a process allows handling different functions
@ -63,23 +64,29 @@ def input_loop(queues: Dict[bytes, 'Queue[bytes]'],
generation, separate from assembly packet output.
"""
sys.stdin = os.fdopen(stdin_fd)
window = TxWindow(contact_list, group_list)
window = TxWindow(contact_list, group_list)
while True:
with ignored(EOFError, FunctionReturn, KeyboardInterrupt):
readline.set_completer(get_tab_completer(contact_list, group_list, settings, gateway))
readline.parse_and_bind('tab: complete')
with ignored(EOFError, SoftError, KeyboardInterrupt):
readline.set_completer(
get_tab_completer(contact_list, group_list, settings, gateway)
)
readline.parse_and_bind("tab: complete")
window.update_window(group_list)
while not onion_service.is_delivered:
export_onion_service_data(contact_list, settings, onion_service, gateway)
export_onion_service_data(
contact_list, settings, onion_service, gateway
)
while not contact_list.has_local_contact():
new_local_key(contact_list, settings, queues)
while not contact_list.has_contacts():
add_new_contact(contact_list, group_list, settings, queues, onion_service)
add_new_contact(
contact_list, group_list, settings, queues, onion_service
)
while not window.is_selected():
window.select_tx_window(settings, queues, onion_service, gateway)
@ -94,4 +101,13 @@ def input_loop(queues: Dict[bytes, 'Queue[bytes]'],
elif user_input.type == COMMAND:
process_command(
user_input, window, contact_list, group_list, settings, queues, master_key, onion_service, gateway)
user_input,
window,
contact_list,
group_list,
settings,
queues,
master_key,
onion_service,
gateway,
)

View File

@ -25,41 +25,81 @@ import typing
from typing import Any, Dict
from src.common.crypto import argon2_kdf, blake2b, csprng, encrypt_and_sign, X448
from src.common.crypto import argon2_kdf, blake2b, csprng, encrypt_and_sign, X448
from src.common.db_masterkey import MasterKey
from src.common.encoding import bool_to_bytes, int_to_bytes, pub_key_to_short_address, str_to_bytes
from src.common.exceptions import FunctionReturn
from src.common.input import ask_confirmation_code, get_b58_key, nc_bypass_msg, yes
from src.common.output import m_print, phase, print_fingerprint, print_key, print_on_previous_line
from src.common.path import ask_path_gui
from src.common.statics import (ARGON2_PSK_MEMORY_COST, ARGON2_PSK_PARALLELISM, ARGON2_PSK_TIME_COST,
B58_PUBLIC_KEY, CONFIRM_CODE_LENGTH, DONE, ECDHE, FINGERPRINT, FINGERPRINT_LENGTH,
HEADER_KEY, KDB_ADD_ENTRY_HEADER, KEX_STATUS_HAS_RX_PSK, KEX_STATUS_LOCAL_KEY,
KEX_STATUS_NO_RX_PSK, KEX_STATUS_PENDING, KEX_STATUS_UNVERIFIED,
KEX_STATUS_VERIFIED, KEY_EX_ECDHE, KEY_EX_PSK_RX, KEY_EX_PSK_TX,
KEY_MANAGEMENT_QUEUE, LOCAL_KEY_DATAGRAM_HEADER, LOCAL_KEY_RDY, LOCAL_NICK,
LOCAL_PUBKEY, MESSAGE_KEY, NC_BYPASS_START, NC_BYPASS_STOP,
PUBLIC_KEY_DATAGRAM_HEADER, RELAY_PACKET_QUEUE, RESET, SYMMETRIC_KEY_LENGTH,
TFC_PUBLIC_KEY_LENGTH, UNENCRYPTED_DATAGRAM_HEADER, UNENCRYPTED_ONION_SERVICE_DATA,
WIN_TYPE_GROUP)
from src.common.encoding import (
bool_to_bytes,
int_to_bytes,
pub_key_to_short_address,
str_to_bytes,
)
from src.common.exceptions import SoftError
from src.common.input import ask_confirmation_code, get_b58_key, nc_bypass_msg, yes
from src.common.misc import reset_terminal
from src.common.output import (
m_print,
phase,
print_fingerprint,
print_key,
print_on_previous_line,
)
from src.common.path import ask_path_gui
from src.common.statics import (
ARGON2_PSK_MEMORY_COST,
ARGON2_PSK_PARALLELISM,
ARGON2_PSK_TIME_COST,
B58_PUBLIC_KEY,
CONFIRM_CODE_LENGTH,
DONE,
ECDHE,
FINGERPRINT_LENGTH,
KDB_ADD_ENTRY_HEADER,
KEX_STATUS_HAS_RX_PSK,
KEX_STATUS_LOCAL_KEY,
KEX_STATUS_NO_RX_PSK,
KEX_STATUS_PENDING,
KEX_STATUS_UNVERIFIED,
KEX_STATUS_VERIFIED,
KEY_EX_ECDHE,
KEY_EX_PSK_RX,
KEY_EX_PSK_TX,
KEY_MANAGEMENT_QUEUE,
LOCAL_KEY_DATAGRAM_HEADER,
LOCAL_KEY_RDY,
LOCAL_NICK,
LOCAL_PUBKEY,
NC_BYPASS_START,
NC_BYPASS_STOP,
PUBLIC_KEY_DATAGRAM_HEADER,
RELAY_PACKET_QUEUE,
TFC_PUBLIC_KEY_LENGTH,
UNENCRYPTED_DATAGRAM_HEADER,
UNENCRYPTED_ONION_SERVICE_DATA,
WIN_TYPE_GROUP,
)
from src.transmitter.packet import queue_command, queue_to_nc
if typing.TYPE_CHECKING:
from multiprocessing import Queue
from src.common.db_contacts import ContactList
from src.common.db_onion import OnionService
from src.common.db_settings import Settings
from src.common.gateway import Gateway
from multiprocessing import Queue
from src.common.db_contacts import Contact, ContactList
from src.common.db_onion import OnionService
from src.common.db_settings import Settings
from src.common.gateway import Gateway
from src.transmitter.windows import TxWindow
QueueDict = Dict[bytes, Queue[Any]]
def export_onion_service_data(contact_list: 'ContactList',
settings: 'Settings',
onion_service: 'OnionService',
gateway: 'Gateway'
) -> None:
# Onion Service
def export_onion_service_data(
contact_list: "ContactList",
settings: "Settings",
onion_service: "OnionService",
gateway: "Gateway",
) -> None:
"""\
Send the Tor Onion Service's private key and list of Onion Service
public keys of contacts to Relay Program on Networked Computer.
@ -104,44 +144,59 @@ def export_onion_service_data(contact_list: 'ContactList',
"""
m_print("Onion Service setup", bold=True, head_clear=True, head=1, tail=1)
pending_contacts = b''.join(contact_list.get_list_of_pending_pub_keys())
existing_contacts = b''.join(contact_list.get_list_of_existing_pub_keys())
no_pending = int_to_bytes(len(contact_list.get_list_of_pending_pub_keys()))
contact_data = no_pending + pending_contacts + existing_contacts
pending_contacts = b"".join(contact_list.get_list_of_pending_pub_keys())
existing_contacts = b"".join(contact_list.get_list_of_existing_pub_keys())
no_pending = int_to_bytes(len(contact_list.get_list_of_pending_pub_keys()))
contact_data = no_pending + pending_contacts + existing_contacts
relay_command = (UNENCRYPTED_DATAGRAM_HEADER
+ UNENCRYPTED_ONION_SERVICE_DATA
+ onion_service.onion_private_key
+ onion_service.conf_code
+ bool_to_bytes(settings.allow_contact_requests)
+ contact_data)
relay_command = (
UNENCRYPTED_DATAGRAM_HEADER
+ UNENCRYPTED_ONION_SERVICE_DATA
+ onion_service.onion_private_key
+ onion_service.conf_code
+ bool_to_bytes(settings.allow_contact_requests)
+ contact_data
)
deliver_onion_service_data(relay_command, onion_service, gateway)
def deliver_onion_service_data(
relay_command: bytes, onion_service: "OnionService", gateway: "Gateway"
) -> None:
"""Send Onion Service data to Replay Program on Networked Computer."""
gateway.write(relay_command)
while True:
purp_code = ask_confirmation_code('Relay')
purp_code = ask_confirmation_code("Relay")
if purp_code == onion_service.conf_code.hex():
onion_service.is_delivered = True
onion_service.new_confirmation_code()
break
elif purp_code == '':
if purp_code == "":
phase("Resending Onion Service data", head=2)
gateway.write(relay_command)
phase(DONE)
print_on_previous_line(reps=5)
else:
m_print(["Incorrect confirmation code. If Relay Program did not",
"receive Onion Service data, resend it by pressing <Enter>."], head=1)
m_print(
[
"Incorrect confirmation code. If Relay Program did not",
"receive Onion Service data, resend it by pressing <Enter>.",
],
head=1,
)
print_on_previous_line(reps=5, delay=2)
def new_local_key(contact_list: 'ContactList',
settings: 'Settings',
queues: 'QueueDict'
) -> None:
# Local key
def new_local_key(
contact_list: "ContactList", settings: "Settings", queues: "QueueDict"
) -> None:
"""Run local key exchange protocol.
Local key encrypts commands and data sent from Source Computer to
@ -175,102 +230,104 @@ def new_local_key(contact_list: 'ContactList',
"""
try:
if settings.traffic_masking and contact_list.has_local_contact():
raise FunctionReturn("Error: Command is disabled during traffic masking.", head_clear=True)
raise SoftError(
"Error: Command is disabled during traffic masking.", head_clear=True
)
m_print("Local key setup", bold=True, head_clear=True, head=1, tail=1)
if not contact_list.has_local_contact():
time.sleep(0.5)
key = csprng()
hek = csprng()
kek = csprng()
key = csprng()
hek = csprng()
kek = csprng()
c_code = os.urandom(CONFIRM_CODE_LENGTH)
local_key_packet = LOCAL_KEY_DATAGRAM_HEADER + encrypt_and_sign(plaintext=key + hek + c_code, key=kek)
local_key_packet = LOCAL_KEY_DATAGRAM_HEADER + encrypt_and_sign(
plaintext=key + hek + c_code, key=kek
)
# Deliver local key to Destination computer
nc_bypass_msg(NC_BYPASS_START, settings)
queue_to_nc(local_key_packet, queues[RELAY_PACKET_QUEUE])
while True:
print_key("Local key decryption key (to Receiver)", kek, settings)
purp_code = ask_confirmation_code('Receiver')
if purp_code == c_code.hex():
nc_bypass_msg(NC_BYPASS_STOP, settings)
break
elif purp_code == '':
phase("Resending local key", head=2)
queue_to_nc(local_key_packet, queues[RELAY_PACKET_QUEUE])
phase(DONE)
print_on_previous_line(reps=(9 if settings.local_testing_mode else 10))
else:
m_print(["Incorrect confirmation code. If Receiver did not receive",
"the encrypted local key, resend it by pressing <Enter>."], head=1)
print_on_previous_line(reps=(9 if settings.local_testing_mode else 10), delay=2)
deliver_local_key(local_key_packet, kek, c_code, settings, queues)
# Add local contact to contact list database
contact_list.add_contact(LOCAL_PUBKEY,
LOCAL_NICK,
bytes(FINGERPRINT_LENGTH),
bytes(FINGERPRINT_LENGTH),
KEX_STATUS_LOCAL_KEY,
False, False, False)
contact_list.add_contact(
LOCAL_PUBKEY,
LOCAL_NICK,
bytes(FINGERPRINT_LENGTH),
bytes(FINGERPRINT_LENGTH),
KEX_STATUS_LOCAL_KEY,
False,
False,
False,
)
# Add local contact to keyset database
queues[KEY_MANAGEMENT_QUEUE].put((KDB_ADD_ENTRY_HEADER,
LOCAL_PUBKEY,
key, csprng(),
hek, csprng()))
queues[KEY_MANAGEMENT_QUEUE].put(
(KDB_ADD_ENTRY_HEADER, LOCAL_PUBKEY, key, csprng(), hek, csprng())
)
# Notify Receiver that confirmation code was successfully entered
queue_command(LOCAL_KEY_RDY, settings, queues)
m_print("Successfully completed the local key exchange.", bold=True, tail_clear=True, delay=1, head=1)
os.system(RESET)
m_print(
"Successfully completed the local key exchange.",
bold=True,
tail_clear=True,
delay=1,
head=1,
)
reset_terminal()
except (EOFError, KeyboardInterrupt):
raise FunctionReturn("Local key setup aborted.", tail_clear=True, delay=1, head=2)
raise SoftError("Local key setup aborted.", tail_clear=True, delay=1, head=2)
def verify_fingerprints(tx_fp: bytes, # User's fingerprint
rx_fp: bytes # Contact's fingerprint
) -> bool: # True if fingerprints match, else False
"""\
Verify fingerprints over an authenticated out-of-band channel to
detect MITM attacks against TFC's key exchange.
def deliver_local_key(
local_key_packet: bytes,
kek: bytes,
c_code: bytes,
settings: "Settings",
queues: "QueueDict",
) -> None:
"""Deliver encrypted local key to Destination Computer."""
nc_bypass_msg(NC_BYPASS_START, settings)
queue_to_nc(local_key_packet, queues[RELAY_PACKET_QUEUE])
MITM or man-in-the-middle attack is an attack against an inherent
problem in cryptography:
Cryptography is math, nothing more. During key exchange public keys
are just very large numbers. There is no way to tell by looking if a
number (received from an untrusted network / Networked Computer) is
the same number the contact generated.
Public key fingerprints are values designed to be compared by humans
either visually or audibly (or sometimes by using semi-automatic
means such as QR-codes). By comparing the fingerprint over an
authenticated channel it's possible to verify that the correct key
was received from the network.
"""
m_print("To verify received public key was not replaced by an attacker "
"call the contact over an end-to-end encrypted line, preferably Signal "
"(https://signal.org/). Make sure Signal's safety numbers have been "
"verified, and then verbally compare the key fingerprints below.",
head_clear=True, max_width=49, head=1, tail=1)
print_fingerprint(tx_fp, " Your fingerprint (you read) ")
print_fingerprint(rx_fp, "Purported fingerprint for contact (they read)")
return yes("Is the contact's fingerprint correct?")
while True:
print_key("Local key decryption key (to Receiver)", kek, settings)
purp_code = ask_confirmation_code("Receiver")
if purp_code == c_code.hex():
nc_bypass_msg(NC_BYPASS_STOP, settings)
break
elif purp_code == "":
phase("Resending local key", head=2)
queue_to_nc(local_key_packet, queues[RELAY_PACKET_QUEUE])
phase(DONE)
print_on_previous_line(reps=(9 if settings.local_testing_mode else 10))
else:
m_print(
[
"Incorrect confirmation code. If Receiver did not receive",
"the encrypted local key, resend it by pressing <Enter>.",
],
head=1,
)
print_on_previous_line(
reps=(9 if settings.local_testing_mode else 10), delay=2
)
def start_key_exchange(onion_pub_key: bytes, # Public key of contact's v3 Onion Service
nick: str, # Contact's nickname
contact_list: 'ContactList', # Contact list object
settings: 'Settings', # Settings object
queues: 'QueueDict' # Dictionary of multiprocessing queues
) -> None:
# ECDHE
def start_key_exchange(
onion_pub_key: bytes, # Public key of contact's v3 Onion Service
nick: str, # Contact's nickname
contact_list: "ContactList", # ContactList object
settings: "Settings", # Settings object
queues: "QueueDict", # Dictionary of multiprocessing queues
) -> None:
"""Start X448 key exchange with the recipient.
This function first creates the X448 key pair. It then outputs the
@ -309,12 +366,17 @@ def start_key_exchange(onion_pub_key: bytes, # Public key of contact's
mk = message key hk = header key
"""
if not contact_list.has_pub_key(onion_pub_key):
contact_list.add_contact(onion_pub_key, nick,
bytes(FINGERPRINT_LENGTH), bytes(FINGERPRINT_LENGTH),
KEX_STATUS_PENDING,
settings.log_messages_by_default,
settings.accept_files_by_default,
settings.show_notifications_by_default)
contact_list.add_contact(
onion_pub_key,
nick,
bytes(FINGERPRINT_LENGTH),
bytes(FINGERPRINT_LENGTH),
KEX_STATUS_PENDING,
settings.log_messages_by_default,
settings.accept_files_by_default,
settings.show_notifications_by_default,
)
contact = contact_list.get_contact_by_pub_key(onion_pub_key)
# Generate new private key or load cached private key
@ -325,125 +387,244 @@ def start_key_exchange(onion_pub_key: bytes, # Public key of contact's
try:
tfc_public_key_user = X448.derive_public_key(tfc_private_key_user)
tfc_public_key_contact = exchange_public_keys(
onion_pub_key, tfc_public_key_user, contact, settings, queues
)
# Import public key of contact
while True:
public_key_packet = PUBLIC_KEY_DATAGRAM_HEADER + onion_pub_key + tfc_public_key_user
queue_to_nc(public_key_packet, queues[RELAY_PACKET_QUEUE])
validate_contact_public_key(tfc_public_key_contact)
tfc_public_key_contact = get_b58_key(B58_PUBLIC_KEY, settings, contact.short_address)
if tfc_public_key_contact != b'':
break
# Validate public key of contact
if len(tfc_public_key_contact) != TFC_PUBLIC_KEY_LENGTH:
m_print(["Warning!",
"Received invalid size public key.",
"Aborting key exchange for your safety."], bold=True, tail=1)
raise FunctionReturn("Error: Invalid public key length", output=False)
if tfc_public_key_contact == bytes(TFC_PUBLIC_KEY_LENGTH):
# The public key of contact is zero with negligible probability,
# therefore we assume such key is malicious and attempts to set
# the shared key to zero.
m_print(["Warning!",
"Received a malicious zero-public key.",
"Aborting key exchange for your safety."], bold=True, tail=1)
raise FunctionReturn("Error: Zero public key", output=False)
# Derive the shared key
dh_shared_key = X448.shared_key(tfc_private_key_user, tfc_public_key_contact)
# Domain separate unidirectional keys from shared key by using public
# keys as message and the context variable as personalization string.
tx_mk = blake2b(tfc_public_key_contact, dh_shared_key, person=MESSAGE_KEY, digest_size=SYMMETRIC_KEY_LENGTH)
rx_mk = blake2b(tfc_public_key_user, dh_shared_key, person=MESSAGE_KEY, digest_size=SYMMETRIC_KEY_LENGTH)
tx_hk = blake2b(tfc_public_key_contact, dh_shared_key, person=HEADER_KEY, digest_size=SYMMETRIC_KEY_LENGTH)
rx_hk = blake2b(tfc_public_key_user, dh_shared_key, person=HEADER_KEY, digest_size=SYMMETRIC_KEY_LENGTH)
tx_mk, rx_mk, tx_hk, rx_hk, tx_fp, rx_fp = X448.derive_keys(
dh_shared_key, tfc_public_key_user, tfc_public_key_contact
)
# Domain separate fingerprints of public keys by using the
# shared secret as key and the context variable as
# personalization string. This way entities who might monitor
# fingerprint verification channel are unable to correlate
# spoken values with public keys that they might see on RAM or
# screen of Networked Computer: Public keys can not be derived
# from the fingerprints due to preimage resistance of BLAKE2b,
# and fingerprints can not be derived from public key without
# the X448 shared key. Using the context variable ensures
# fingerprints are distinct from derived message and header keys.
tx_fp = blake2b(tfc_public_key_user, dh_shared_key, person=FINGERPRINT, digest_size=FINGERPRINT_LENGTH)
rx_fp = blake2b(tfc_public_key_contact, dh_shared_key, person=FINGERPRINT, digest_size=FINGERPRINT_LENGTH)
kex_status = fingerprint_validation(tx_fp, rx_fp)
# Verify fingerprints
try:
if not verify_fingerprints(tx_fp, rx_fp):
m_print(["Warning!",
"Possible man-in-the-middle attack detected.",
"Aborting key exchange for your safety."], bold=True, tail=1)
raise FunctionReturn("Error: Fingerprint mismatch", delay=2.5, output=False)
kex_status = KEX_STATUS_VERIFIED
except (EOFError, KeyboardInterrupt):
m_print(["Skipping fingerprint verification.",
'', "Warning!",
"Man-in-the-middle attacks can not be detected",
"unless fingerprints are verified! To re-verify",
"the contact, use the command '/verify'.",
'', "Press <enter> to continue."],
manual_proceed=True, box=True, head=2, tail=1)
kex_status = KEX_STATUS_UNVERIFIED
# Send keys to the Receiver Program
c_code = blake2b(onion_pub_key, digest_size=CONFIRM_CODE_LENGTH)
command = (KEY_EX_ECDHE
+ onion_pub_key
+ tx_mk + rx_mk
+ tx_hk + rx_hk
+ str_to_bytes(nick))
queue_command(command, settings, queues)
while True:
purp_code = ask_confirmation_code('Receiver')
if purp_code == c_code.hex():
break
elif purp_code == '':
phase("Resending contact data", head=2)
queue_command(command, settings, queues)
phase(DONE)
print_on_previous_line(reps=5)
else:
m_print("Incorrect confirmation code.", head=1)
print_on_previous_line(reps=4, delay=2)
deliver_contact_data(
KEY_EX_ECDHE,
nick,
onion_pub_key,
tx_mk,
rx_mk,
tx_hk,
rx_hk,
queues,
settings,
)
# Store contact data into databases
contact.tfc_private_key = None
contact.tx_fingerprint = tx_fp
contact.rx_fingerprint = rx_fp
contact.kex_status = kex_status
contact.tx_fingerprint = tx_fp
contact.rx_fingerprint = rx_fp
contact.kex_status = kex_status
contact_list.store_contacts()
queues[KEY_MANAGEMENT_QUEUE].put((KDB_ADD_ENTRY_HEADER,
onion_pub_key,
tx_mk, csprng(),
tx_hk, csprng()))
queues[KEY_MANAGEMENT_QUEUE].put(
(KDB_ADD_ENTRY_HEADER, onion_pub_key, tx_mk, rx_mk, tx_hk, rx_hk)
)
m_print(f"Successfully added {nick}.", bold=True, tail_clear=True, delay=1, head=1)
m_print(
f"Successfully added {nick}.", bold=True, tail_clear=True, delay=1, head=1
)
except (EOFError, KeyboardInterrupt):
contact.tfc_private_key = tfc_private_key_user
raise FunctionReturn("Key exchange interrupted.", tail_clear=True, delay=1, head=2)
raise SoftError("Key exchange interrupted.", tail_clear=True, delay=1, head=2)
def create_pre_shared_key(onion_pub_key: bytes, # Public key of contact's v3 Onion Service
nick: str, # Nick of contact
contact_list: 'ContactList', # Contact list object
settings: 'Settings', # Settings object
onion_service: 'OnionService', # OnionService object
queues: 'QueueDict' # Dictionary of multiprocessing queues
) -> None:
def exchange_public_keys(
onion_pub_key: bytes,
tfc_public_key_user: bytes,
contact: "Contact",
settings: "Settings",
queues: "QueueDict",
) -> bytes:
"""Exchange public keys with contact.
This function outputs the user's public key and waits for user to
enter the public key of the contact. If the User presses <Enter>,
the function will resend the users' public key to contact.
"""
while True:
public_key_packet = (
PUBLIC_KEY_DATAGRAM_HEADER + onion_pub_key + tfc_public_key_user
)
queue_to_nc(public_key_packet, queues[RELAY_PACKET_QUEUE])
tfc_public_key_contact = get_b58_key(
B58_PUBLIC_KEY, settings, contact.short_address
)
if tfc_public_key_contact != b"":
break
return tfc_public_key_contact
def validate_contact_public_key(tfc_public_key_contact: bytes) -> None:
"""This function validates the public key from contact.
The validation takes into account key state and it will detect if
the public key is zero, but it can't predict whether the shared key
will be zero. Further validation of the public key is done by the
`src.common.crypto` module.
"""
if len(tfc_public_key_contact) != TFC_PUBLIC_KEY_LENGTH:
m_print(
[
"Warning!",
"Received invalid size public key.",
"Aborting key exchange for your safety.",
],
bold=True,
tail=1,
)
raise SoftError("Error: Invalid public key length", output=False)
if tfc_public_key_contact == bytes(TFC_PUBLIC_KEY_LENGTH):
# The public key of contact is zero with negligible probability,
# therefore we assume such key is malicious and attempts to set
# the shared key to zero.
m_print(
[
"Warning!",
"Received a malicious zero-public key.",
"Aborting key exchange for your safety.",
],
bold=True,
tail=1,
)
raise SoftError("Error: Zero public key", output=False)
def fingerprint_validation(tx_fp: bytes, rx_fp: bytes) -> bytes:
"""Validate or skip validation of contact fingerprint.
This function prompts the user to verify the fingerprint of the contact.
If the user issues Ctrl+{C,D} command, this function will set the key
exchange status as unverified.
"""
try:
if not verify_fingerprints(tx_fp, rx_fp):
m_print(
[
"Warning!",
"Possible man-in-the-middle attack detected.",
"Aborting key exchange for your safety.",
],
bold=True,
tail=1,
)
raise SoftError("Error: Fingerprint mismatch", delay=2.5, output=False)
kex_status = KEX_STATUS_VERIFIED
except (EOFError, KeyboardInterrupt):
m_print(
[
"Skipping fingerprint verification.",
"",
"Warning!",
"Man-in-the-middle attacks can not be detected",
"unless fingerprints are verified! To re-verify",
"the contact, use the command '/verify'.",
"",
"Press <enter> to continue.",
],
manual_proceed=True,
box=True,
head=2,
tail=1,
)
kex_status = KEX_STATUS_UNVERIFIED
return kex_status
def verify_fingerprints(
tx_fp: bytes, rx_fp: bytes # User's fingerprint # Contact's fingerprint
) -> bool: # True if fingerprints match, else False
"""\
Verify fingerprints over an authenticated out-of-band channel to
detect MITM attacks against TFC's key exchange.
MITM or man-in-the-middle attack is an attack against an inherent
problem in cryptography:
Cryptography is math, nothing more. During key exchange public keys
are just very large numbers. There is no way to tell by looking if a
number (received from an untrusted network / Networked Computer) is
the same number the contact generated.
Public key fingerprints are values designed to be compared by humans
either visually or audibly (or sometimes by using semi-automatic
means such as QR-codes). By comparing the fingerprint over an
authenticated channel it's possible to verify that the correct key
was received from the network.
"""
m_print(
"To verify received public key was not replaced by an attacker "
"call the contact over an end-to-end encrypted line, preferably Signal "
"(https://signal.org/). Make sure Signal's safety numbers have been "
"verified, and then verbally compare the key fingerprints below.",
head_clear=True,
max_width=49,
head=1,
tail=1,
)
print_fingerprint(tx_fp, " Your fingerprint (you read) ")
print_fingerprint(rx_fp, "Purported fingerprint for contact (they read)")
return yes("Is the contact's fingerprint correct?")
def deliver_contact_data(
header: bytes, # Key type (x448, PSK)
nick: str, # Contact's nickname
onion_pub_key: bytes, # Public key of contact's v3 Onion Service
tx_mk: bytes, # Message key for outgoing messages
rx_mk: bytes, # Message key for incoming messages
tx_hk: bytes, # Header key for outgoing messages
rx_hk: bytes, # Header key for incoming messages
queues: "QueueDict", # Dictionary of multiprocessing queues
settings: "Settings", # Settings object
) -> None:
"""Deliver contact data to Destination Computer."""
c_code = blake2b(onion_pub_key, digest_size=CONFIRM_CODE_LENGTH)
command = (
header + onion_pub_key + tx_mk + rx_mk + tx_hk + rx_hk + str_to_bytes(nick)
)
queue_command(command, settings, queues)
while True:
purp_code = ask_confirmation_code("Receiver")
if purp_code == c_code.hex():
break
elif purp_code == "":
phase("Resending contact data", head=2)
queue_command(command, settings, queues)
phase(DONE)
print_on_previous_line(reps=5)
else:
m_print("Incorrect confirmation code.", head=1)
print_on_previous_line(reps=4, delay=2)
# PSK
def create_pre_shared_key(
onion_pub_key: bytes, # Public key of contact's v3 Onion Service
nick: str, # Nick of contact
contact_list: "ContactList", # Contact list object
settings: "Settings", # Settings object
onion_service: "OnionService", # OnionService object
queues: "QueueDict", # Dictionary of multiprocessing queues
) -> None:
"""Generate a new pre-shared key for manual key delivery.
Pre-shared keys offer a low-tech solution against the slowly
@ -471,100 +652,128 @@ def create_pre_shared_key(onion_pub_key: bytes, # Public key of contac
try:
tx_mk = csprng()
tx_hk = csprng()
salt = csprng()
salt = csprng()
password = MasterKey.new_password("password for PSK")
phase("Deriving key encryption key", head=2)
kek = argon2_kdf(password, salt, ARGON2_PSK_TIME_COST, ARGON2_PSK_MEMORY_COST, ARGON2_PSK_PARALLELISM)
kek = argon2_kdf(
password,
salt,
ARGON2_PSK_TIME_COST,
ARGON2_PSK_MEMORY_COST,
ARGON2_PSK_PARALLELISM,
)
phase(DONE)
ct_tag = encrypt_and_sign(tx_mk + tx_hk, key=kek)
while True:
trunc_addr = pub_key_to_short_address(onion_pub_key)
store_d = ask_path_gui(f"Select removable media for {nick}", settings)
f_name = f"{store_d}/{onion_service.user_short_address}.psk - Give to {trunc_addr}"
try:
with open (f_name, 'wb+') as f:
f.write(salt + ct_tag)
f.flush()
os.fsync(f.fileno())
break
except PermissionError:
m_print("Error: Did not have permission to write to the directory.", delay=0.5)
continue
store_keys_on_removable_drive(
ct_tag, salt, nick, onion_pub_key, onion_service, settings
)
c_code = blake2b(onion_pub_key, digest_size=CONFIRM_CODE_LENGTH)
command = (KEY_EX_PSK_TX
+ onion_pub_key
+ tx_mk + csprng()
+ tx_hk + csprng()
+ str_to_bytes(nick))
deliver_contact_data(
KEY_EX_PSK_TX,
nick,
onion_pub_key,
tx_mk,
csprng(),
tx_hk,
csprng(),
queues,
settings,
)
queue_command(command, settings, queues)
contact_list.add_contact(
onion_pub_key,
nick,
bytes(FINGERPRINT_LENGTH),
bytes(FINGERPRINT_LENGTH),
KEX_STATUS_NO_RX_PSK,
settings.log_messages_by_default,
settings.accept_files_by_default,
settings.show_notifications_by_default,
)
while True:
purp_code = ask_confirmation_code('Receiver')
if purp_code == c_code.hex():
break
queues[KEY_MANAGEMENT_QUEUE].put(
(KDB_ADD_ENTRY_HEADER, onion_pub_key, tx_mk, csprng(), tx_hk, csprng())
)
elif purp_code == '':
phase("Resending contact data", head=2)
queue_command(command, settings, queues)
phase(DONE)
print_on_previous_line(reps=5)
else:
m_print("Incorrect confirmation code.", head=1)
print_on_previous_line(reps=4, delay=2)
contact_list.add_contact(onion_pub_key, nick,
bytes(FINGERPRINT_LENGTH), bytes(FINGERPRINT_LENGTH),
KEX_STATUS_NO_RX_PSK,
settings.log_messages_by_default,
settings.accept_files_by_default,
settings.show_notifications_by_default)
queues[KEY_MANAGEMENT_QUEUE].put((KDB_ADD_ENTRY_HEADER,
onion_pub_key,
tx_mk, csprng(),
tx_hk, csprng()))
m_print(f"Successfully added {nick}.", bold=True, tail_clear=True, delay=1, head=1)
m_print(
f"Successfully added {nick}.", bold=True, tail_clear=True, delay=1, head=1
)
except (EOFError, KeyboardInterrupt):
raise FunctionReturn("PSK generation aborted.", tail_clear=True, delay=1, head=2)
raise SoftError("PSK generation aborted.", tail_clear=True, delay=1, head=2)
def rxp_load_psk(window: 'TxWindow',
contact_list: 'ContactList',
settings: 'Settings',
queues: 'QueueDict',
) -> None:
def store_keys_on_removable_drive(
ct_tag: bytes, # Encrypted PSK
salt: bytes, # Salt for PSK decryption key derivation
nick: str, # Contact's nickname
onion_pub_key: bytes, # Public key of contact's v3 Onion Service
onion_service: "OnionService", # OnionService object
settings: "Settings", # Settings object
) -> None:
"""Store keys for contact on a removable media."""
while True:
trunc_addr = pub_key_to_short_address(onion_pub_key)
store_d = ask_path_gui(f"Select removable media for {nick}", settings)
f_name = (
f"{store_d}/{onion_service.user_short_address}.psk - Give to {trunc_addr}"
)
try:
with open(f_name, "wb+") as f:
f.write(salt + ct_tag)
f.flush()
os.fsync(f.fileno())
break
except PermissionError:
m_print(
"Error: Did not have permission to write to the directory.", delay=0.5
)
continue
def rxp_load_psk(
window: "TxWindow",
contact_list: "ContactList",
settings: "Settings",
queues: "QueueDict",
) -> None:
"""Send command to Receiver Program to load PSK for active contact."""
if settings.traffic_masking:
raise FunctionReturn("Error: Command is disabled during traffic masking.", head_clear=True)
raise SoftError(
"Error: Command is disabled during traffic masking.", head_clear=True
)
if window.type == WIN_TYPE_GROUP or window.contact is None:
raise FunctionReturn("Error: Group is selected.", head_clear=True)
raise SoftError("Error: Group is selected.", head_clear=True)
if not contact_list.get_contact_by_pub_key(window.uid).uses_psk():
raise FunctionReturn(f"Error: The current key was exchanged with {ECDHE}.", head_clear=True)
raise SoftError(
f"Error: The current key was exchanged with {ECDHE}.", head_clear=True
)
c_code = blake2b(window.uid, digest_size=CONFIRM_CODE_LENGTH)
c_code = blake2b(window.uid, digest_size=CONFIRM_CODE_LENGTH)
command = KEY_EX_PSK_RX + c_code + window.uid
queue_command(command, settings, queues)
while True:
try:
purp_code = ask_confirmation_code('Receiver')
purp_code = ask_confirmation_code("Receiver")
if purp_code == c_code.hex():
window.contact.kex_status = KEX_STATUS_HAS_RX_PSK
contact_list.store_contacts()
raise FunctionReturn(f"Removed PSK reminder for {window.name}.", tail_clear=True, delay=1)
else:
m_print("Incorrect confirmation code.", head=1)
print_on_previous_line(reps=4, delay=2)
raise SoftError(
f"Removed PSK reminder for {window.name}.", tail_clear=True, delay=1
)
m_print("Incorrect confirmation code.", head=1)
print_on_previous_line(reps=4, delay=2)
except (EOFError, KeyboardInterrupt):
raise FunctionReturn("PSK verification aborted.", tail_clear=True, delay=1, head=2)
raise SoftError(
"PSK verification aborted.", tail_clear=True, delay=1, head=2
)

View File

@ -26,39 +26,72 @@ import zlib
from typing import Any, Dict, List, Optional, Tuple, Union
from src.common.crypto import blake2b, byte_padding, csprng, encrypt_and_sign
from src.common.encoding import bool_to_bytes, int_to_bytes, str_to_bytes
from src.common.exceptions import CriticalError, FunctionReturn
from src.common.input import yes
from src.common.misc import split_byte_string
from src.common.output import m_print, phase, print_on_previous_line
from src.common.path import ask_path_gui
from src.common.statics import (ASSEMBLY_PACKET_LENGTH, COMMAND, COMMAND_DATAGRAM_HEADER, COMMAND_PACKET_QUEUE,
COMPRESSION_LEVEL, C_A_HEADER, C_E_HEADER, C_L_HEADER, C_S_HEADER, DONE, FILE,
FILE_DATAGRAM_HEADER, FILE_KEY_HEADER, FILE_PACKET_CTR_LENGTH, F_A_HEADER,
F_C_HEADER, F_E_HEADER, F_L_HEADER, F_S_HEADER, GROUP_MESSAGE_HEADER,
GROUP_MSG_ID_LENGTH, LOCAL_PUBKEY, MESSAGE, MESSAGE_DATAGRAM_HEADER,
MESSAGE_PACKET_QUEUE, M_A_HEADER, M_C_HEADER, M_E_HEADER, M_L_HEADER, M_S_HEADER,
PADDING_LENGTH, PRIVATE_MESSAGE_HEADER, RELAY_PACKET_QUEUE, TM_COMMAND_PACKET_QUEUE,
TM_FILE_PACKET_QUEUE, TM_MESSAGE_PACKET_QUEUE, WIN_TYPE_GROUP)
from src.common.crypto import blake2b, byte_padding, csprng, encrypt_and_sign
from src.common.encoding import bool_to_bytes, int_to_bytes, str_to_bytes
from src.common.exceptions import CriticalError, SoftError
from src.common.input import yes
from src.common.misc import split_byte_string
from src.common.output import m_print, phase, print_on_previous_line
from src.common.path import ask_path_gui
from src.common.statics import (
ASSEMBLY_PACKET_LENGTH,
COMMAND,
COMMAND_DATAGRAM_HEADER,
COMMAND_PACKET_QUEUE,
COMPRESSION_LEVEL,
C_A_HEADER,
C_E_HEADER,
C_L_HEADER,
C_S_HEADER,
DONE,
FILE,
FILE_DATAGRAM_HEADER,
FILE_KEY_HEADER,
FILE_PACKET_CTR_LENGTH,
F_A_HEADER,
F_C_HEADER,
F_E_HEADER,
F_L_HEADER,
F_S_HEADER,
GROUP_MESSAGE_HEADER,
GROUP_MSG_ID_LENGTH,
LOCAL_PUBKEY,
MESSAGE,
MESSAGE_DATAGRAM_HEADER,
MESSAGE_PACKET_QUEUE,
M_A_HEADER,
M_C_HEADER,
M_E_HEADER,
M_L_HEADER,
M_S_HEADER,
PADDING_LENGTH,
PRIVATE_MESSAGE_HEADER,
RELAY_PACKET_QUEUE,
TM_COMMAND_PACKET_QUEUE,
TM_FILE_PACKET_QUEUE,
TM_MESSAGE_PACKET_QUEUE,
WIN_TYPE_GROUP,
)
from src.transmitter.files import File
from src.transmitter.files import File
from src.transmitter.window_mock import MockWindow
from src.transmitter.user_input import UserInput
if typing.TYPE_CHECKING:
from multiprocessing import Queue
from src.common.db_keys import KeyList
from multiprocessing import Queue
from src.common.db_keys import KeyList
from src.common.db_masterkey import MasterKey
from src.common.db_settings import Settings
from src.common.gateway import Gateway
from src.transmitter.windows import TxWindow, MockWindow
QueueDict = Dict[bytes, Queue[Any]]
log_queue_data = Tuple[Optional[bytes], bytes, Optional[bool], Optional[bool], MasterKey]
from src.common.db_settings import Settings
from src.common.gateway import Gateway
from src.transmitter.windows import TxWindow
QueueDict = Dict[bytes, Queue[Any]]
log_queue_data = Tuple[
Optional[bytes], bytes, Optional[bool], Optional[bool], MasterKey
]
def queue_to_nc(packet: bytes,
nc_queue: 'Queue[bytes]',
) -> None:
def queue_to_nc(packet: bytes, nc_queue: "Queue[bytes]",) -> None:
"""Queue unencrypted command/exported file to Networked Computer.
This function queues unencrypted packets intended for Relay Program
@ -69,24 +102,22 @@ def queue_to_nc(packet: bytes,
nc_queue.put(packet)
def queue_command(command: bytes,
settings: 'Settings',
queues: 'QueueDict'
) -> None:
def queue_command(command: bytes, settings: "Settings", queues: "QueueDict") -> None:
"""Split command to assembly packets and queue them for sender_loop()."""
assembly_packets = split_to_assembly_packets(command, COMMAND)
queue_assembly_packets(assembly_packets, COMMAND, settings, queues)
def queue_message(user_input: 'UserInput',
window: Union['MockWindow', 'TxWindow'],
settings: 'Settings',
queues: 'QueueDict',
header: bytes = b'',
whisper: bool = False,
log_as_ph: bool = False
) -> None:
def queue_message(
user_input: "UserInput",
window: Union["MockWindow", "TxWindow"],
settings: "Settings",
queues: "QueueDict",
header: bytes = b"",
whisper: bool = False,
log_as_ph: bool = False,
) -> None:
"""\
Prepend header to message, split the message into assembly packets,
and queue the assembly packets.
@ -124,20 +155,23 @@ def queue_message(user_input: 'UserInput',
"""
if not header:
if window.type == WIN_TYPE_GROUP and window.group is not None:
header = GROUP_MESSAGE_HEADER + window.group.group_id + os.urandom(GROUP_MSG_ID_LENGTH)
header = (
GROUP_MESSAGE_HEADER
+ window.group.group_id
+ os.urandom(GROUP_MSG_ID_LENGTH)
)
else:
header = PRIVATE_MESSAGE_HEADER
payload = bool_to_bytes(whisper) + header + user_input.plaintext.encode()
payload = bool_to_bytes(whisper) + header + user_input.plaintext.encode()
assembly_packets = split_to_assembly_packets(payload, MESSAGE)
queue_assembly_packets(assembly_packets, MESSAGE, settings, queues, window, log_as_ph)
queue_assembly_packets(
assembly_packets, MESSAGE, settings, queues, window, log_as_ph
)
def queue_file(window: 'TxWindow',
settings: 'Settings',
queues: 'QueueDict'
) -> None:
def queue_file(window: "TxWindow", settings: "Settings", queues: "QueueDict") -> None:
"""Ask file path and load file data.
In TFC there are two ways to send a file.
@ -159,35 +193,51 @@ def queue_file(window: 'TxWindow',
"""
path = ask_path_gui("Select file to send...", settings, get_file=True)
if path.endswith(('tx_contacts', 'tx_groups', 'tx_keys', 'tx_login_data', 'tx_settings',
'rx_contacts', 'rx_groups', 'rx_keys', 'rx_login_data', 'rx_settings',
'tx_serial_settings.json', 'nc_serial_settings.json',
'rx_serial_settings.json', 'tx_onion_db')):
raise FunctionReturn("Error: Can't send TFC database.", head_clear=True)
if path.endswith(
(
"tx_contacts",
"tx_groups",
"tx_keys",
"tx_login_data",
"tx_settings",
"rx_contacts",
"rx_groups",
"rx_keys",
"rx_login_data",
"rx_settings",
"tx_serial_settings.json",
"nc_serial_settings.json",
"rx_serial_settings.json",
"tx_onion_db",
)
):
raise SoftError("Error: Can't send TFC database.", head_clear=True)
if not settings.traffic_masking:
send_file(path, settings, queues, window)
return
file = File(path, window, settings)
file = File(path, window, settings)
assembly_packets = split_to_assembly_packets(file.plaintext, FILE)
if settings.confirm_sent_files:
try:
if not yes(f"Send {file.name.decode()} ({file.size_hr}) to {window.type_print} {window.name} "
f"({len(assembly_packets)} packets, time: {file.time_hr})?"):
raise FunctionReturn("File selection aborted.", head_clear=True)
if not yes(
f"Send {file.name.decode()} ({file.size_hr}) to {window.type_print} {window.name} "
f"({len(assembly_packets)} packets, time: {file.time_hr})?"
):
raise SoftError("File selection aborted.", head_clear=True)
except (EOFError, KeyboardInterrupt):
raise FunctionReturn("File selection aborted.", head_clear=True)
raise SoftError("File selection aborted.", head_clear=True)
queue_assembly_packets(assembly_packets, FILE, settings, queues, window, log_as_ph=True)
queue_assembly_packets(
assembly_packets, FILE, settings, queues, window, log_as_ph=True
)
def send_file(path: str,
settings: 'Settings',
queues: 'QueueDict',
window: 'TxWindow'
) -> None:
def send_file(
path: str, settings: "Settings", queues: "QueueDict", window: "TxWindow"
) -> None:
"""Send file to window members in a single transmission.
This is the default mode for file transmission, used when traffic
@ -250,23 +300,23 @@ def send_file(path: str,
of Bob, it's as if Chuck had dropped Alice's file and sent him
another file instead.
"""
from src.transmitter.windows import MockWindow # Avoid circular import
if settings.traffic_masking:
raise FunctionReturn("Error: Command is disabled during traffic masking.", head_clear=True)
raise SoftError(
"Error: Command is disabled during traffic masking.", head_clear=True
)
name = path.split('/')[-1]
name = path.split("/")[-1]
data = bytearray()
data.extend(str_to_bytes(name))
if not os.path.isfile(path):
raise FunctionReturn("Error: File not found.", head_clear=True)
raise SoftError("Error: File not found.", head_clear=True)
if os.path.getsize(path) == 0:
raise FunctionReturn("Error: Target file is empty.", head_clear=True)
if not os.path.getsize(path):
raise SoftError("Error: Target file is empty.", head_clear=True)
phase("Reading data")
with open(path, 'rb') as f:
with open(path, "rb") as f:
data.extend(f.read())
phase(DONE)
print_on_previous_line(flush=True)
@ -278,25 +328,27 @@ def send_file(path: str,
phase("Encrypting data")
file_key = csprng()
file_ct = encrypt_and_sign(comp, file_key)
ct_hash = blake2b(file_ct)
file_ct = encrypt_and_sign(comp, file_key)
ct_hash = blake2b(file_ct)
phase(DONE)
print_on_previous_line(flush=True)
phase("Exporting data")
no_contacts = int_to_bytes(len(window))
ser_contacts = b''.join([c.onion_pub_key for c in window])
file_packet = FILE_DATAGRAM_HEADER + no_contacts + ser_contacts + file_ct
no_contacts = int_to_bytes(len(window))
ser_contacts = b"".join([c.onion_pub_key for c in window])
file_packet = FILE_DATAGRAM_HEADER + no_contacts + ser_contacts + file_ct
queue_to_nc(file_packet, queues[RELAY_PACKET_QUEUE])
key_delivery_msg = base64.b85encode(ct_hash + file_key).decode()
for contact in window:
queue_message(user_input=UserInput(key_delivery_msg, MESSAGE),
window =MockWindow(contact.onion_pub_key, [contact]),
settings =settings,
queues =queues,
header =FILE_KEY_HEADER,
log_as_ph =True)
queue_message(
user_input=UserInput(key_delivery_msg, MESSAGE),
window=MockWindow(contact.onion_pub_key, [contact]),
settings=settings,
queues=queues,
header=FILE_KEY_HEADER,
log_as_ph=True,
)
phase(DONE)
print_on_previous_line(flush=True)
m_print(f"Sent file '{name}' to {window.type_print} {window.name}.")
@ -336,7 +388,7 @@ def split_to_assembly_packets(payload: bytes, p_type: str) -> List[bytes]:
payload = zlib.compress(payload, level=COMPRESSION_LEVEL)
if len(payload) < PADDING_LENGTH:
padded = byte_padding(payload)
padded = byte_padding(payload)
packet_list = [s_header + padded]
else:
@ -357,20 +409,23 @@ def split_to_assembly_packets(payload: bytes, p_type: str) -> List[bytes]:
if p_type == FILE:
p_list[0] = int_to_bytes(len(p_list)) + p_list[0][FILE_PACKET_CTR_LENGTH:]
packet_list = ([l_header + p_list[0]] +
[a_header + p for p in p_list[1:-1]] +
[e_header + p_list[-1]])
packet_list = (
[l_header + p_list[0]]
+ [a_header + p for p in p_list[1:-1]]
+ [e_header + p_list[-1]]
)
return packet_list
def queue_assembly_packets(assembly_packet_list: List[bytes],
p_type: str,
settings: 'Settings',
queues: 'QueueDict',
window: Optional[Union['TxWindow', 'MockWindow']] = None,
log_as_ph: bool = False
) -> None:
def queue_assembly_packets(
assembly_packet_list: List[bytes],
p_type: str,
settings: "Settings",
queues: "QueueDict",
window: Optional[Union["TxWindow", "MockWindow"]] = None,
log_as_ph: bool = False,
) -> None:
"""Queue assembly packets for sender_loop().
This function is the last function on Transmitter Program's
@ -382,29 +437,48 @@ def queue_assembly_packets(assembly_packet_list: List[bytes],
if p_type in [MESSAGE, FILE] and window is not None:
if settings.traffic_masking:
queue = queues[TM_MESSAGE_PACKET_QUEUE] if p_type == MESSAGE else queues[TM_FILE_PACKET_QUEUE]
queue = (
queues[TM_MESSAGE_PACKET_QUEUE]
if p_type == MESSAGE
else queues[TM_FILE_PACKET_QUEUE]
)
for assembly_packet in assembly_packet_list:
queue.put((assembly_packet, window.log_messages, log_as_ph))
else:
queue = queues[MESSAGE_PACKET_QUEUE]
for c in window:
for assembly_packet in assembly_packet_list:
queue.put((assembly_packet, c.onion_pub_key, window.log_messages, log_as_ph, window.uid))
queue.put(
(
assembly_packet,
c.onion_pub_key,
window.log_messages,
log_as_ph,
window.uid,
)
)
elif p_type == COMMAND:
queue = queues[TM_COMMAND_PACKET_QUEUE] if settings.traffic_masking else queues[COMMAND_PACKET_QUEUE]
queue = (
queues[TM_COMMAND_PACKET_QUEUE]
if settings.traffic_masking
else queues[COMMAND_PACKET_QUEUE]
)
for assembly_packet in assembly_packet_list:
queue.put(assembly_packet)
def send_packet(key_list: 'KeyList', # Key list object
gateway: 'Gateway', # Gateway object
log_queue: 'Queue[log_queue_data]', # Multiprocessing queue for logged messages
assembly_packet: bytes, # Padded plaintext assembly packet
onion_pub_key: Optional[bytes] = None, # Recipient v3 Onion Service address
log_messages: Optional[bool] = None, # When True, log the message assembly packet
log_as_ph: Optional[bool] = None # When True, log assembly packet as placeholder data
) -> None:
def send_packet(
key_list: "KeyList", # Key list object
gateway: "Gateway", # Gateway object
log_queue: "Queue[log_queue_data]", # Multiprocessing queue for logged messages
assembly_packet: bytes, # Padded plaintext assembly packet
onion_pub_key: Optional[bytes] = None, # Recipient v3 Onion Service address
log_messages: Optional[bool] = None, # When True, log the message assembly packet
log_as_ph: Optional[
bool
] = None, # When True, log assembly packet as placeholder data
) -> None:
"""Encrypt and send assembly packet.
The assembly packets are encrypted using a symmetric message key.
@ -443,22 +517,25 @@ def send_packet(key_list: 'KeyList', # Key list object
keyset = key_list.get_keyset(onion_pub_key)
header = MESSAGE_DATAGRAM_HEADER + onion_pub_key
harac_in_bytes = int_to_bytes(keyset.tx_harac)
encrypted_harac = encrypt_and_sign(harac_in_bytes, keyset.tx_hk)
harac_in_bytes = int_to_bytes(keyset.tx_harac)
encrypted_harac = encrypt_and_sign(harac_in_bytes, keyset.tx_hk)
encrypted_message = encrypt_and_sign(assembly_packet, keyset.tx_mk)
encrypted_packet = header + encrypted_harac + encrypted_message
encrypted_packet = header + encrypted_harac + encrypted_message
gateway.write(encrypted_packet)
keyset.rotate_tx_mk()
log_queue.put((onion_pub_key, assembly_packet, log_messages, log_as_ph, key_list.master_key))
log_queue.put(
(onion_pub_key, assembly_packet, log_messages, log_as_ph, key_list.master_key)
)
def cancel_packet(user_input: 'UserInput',
window: 'TxWindow',
settings: 'Settings',
queues: 'QueueDict'
) -> None:
def cancel_packet(
user_input: "UserInput",
window: "TxWindow",
settings: "Settings",
queues: "QueueDict",
) -> None:
"""Cancel sent message/file to contact/group.
In cases where the assembly packets have not yet been encrypted or
@ -474,14 +551,21 @@ def cancel_packet(user_input: 'UserInput',
re-writing it in a compiled language (which is very bad for users'
rights).
"""
header, p_type = dict(cm=(M_C_HEADER, 'messages'),
cf=(F_C_HEADER, 'files' ))[user_input.plaintext]
header, p_type = dict(cm=(M_C_HEADER, "messages"), cf=(F_C_HEADER, "files"))[
user_input.plaintext
]
if settings.traffic_masking:
queue = queues[TM_MESSAGE_PACKET_QUEUE] if header == M_C_HEADER else queues[TM_FILE_PACKET_QUEUE]
queue = (
queues[TM_MESSAGE_PACKET_QUEUE]
if header == M_C_HEADER
else queues[TM_FILE_PACKET_QUEUE]
)
else:
if header == F_C_HEADER:
raise FunctionReturn("Files are only queued during traffic masking.", head_clear=True)
raise SoftError(
"Files are only queued during traffic masking.", head_clear=True
)
queue = queues[MESSAGE_PACKET_QUEUE]
cancel_pt = header + bytes(PADDING_LENGTH)
@ -489,42 +573,65 @@ def cancel_packet(user_input: 'UserInput',
cancel = False
if settings.traffic_masking:
if queue.qsize() != 0:
cancel = True
# Get most recent log_messages setting status in queue
log_messages = False
while queue.qsize() != 0:
log_messages = queue.get()[1]
queue.put((cancel_pt, log_messages, log_as_ph))
m_print(f"Cancelled queues {p_type}." if cancel else f"No {p_type} to cancel.", head=1, tail=1)
cancel_traffic_masking_packet(cancel, cancel_pt, log_as_ph, p_type, queue)
else:
p_buffer = []
while queue.qsize() != 0:
queue_data = queue.get()
window_uid = queue_data[4]
cancel_standard_packet(cancel, cancel_pt, log_as_ph, p_type, queue, window)
# Put messages unrelated to the active window into the buffer
if window_uid != window.uid:
p_buffer.append(queue_data)
else:
cancel = True
# Put cancel packets for each window contact to queue first
if cancel:
for c in window:
queue.put((cancel_pt, c.onion_pub_key, c.log_messages, log_as_ph, window.uid))
def cancel_standard_packet(
cancel: bool,
cancel_pt: bytes,
log_as_ph: bool,
p_type: str,
queue: "Queue[Any]",
window: "TxWindow",
) -> None:
"""Cancel standard packet."""
p_buffer = []
while queue.qsize():
queue_data = queue.get()
window_uid = queue_data[4]
# Put buffered tuples back to the queue
for p in p_buffer:
queue.put(p)
if cancel:
message = f"Cancelled queued {p_type} to {window.type_print} {window.name}."
# Put messages unrelated to the active window into the buffer
if window_uid != window.uid:
p_buffer.append(queue_data)
else:
message = f"No {p_type} queued for {window.type_print} {window.name}."
cancel = True
raise FunctionReturn(message, head_clear=True)
# Put cancel packets for each window contact to queue first
if cancel:
for c in window:
queue.put(
(cancel_pt, c.onion_pub_key, c.log_messages, log_as_ph, window.uid)
)
# Put buffered tuples back to the queue
for p in p_buffer:
queue.put(p)
if cancel:
message = f"Cancelled queued {p_type} to {window.type_print} {window.name}."
else:
message = f"No {p_type} queued for {window.type_print} {window.name}."
raise SoftError(message, head_clear=True)
def cancel_traffic_masking_packet(
cancel: bool, cancel_pt: bytes, log_as_ph: bool, p_type: str, queue: "Queue[Any]"
) -> None:
"""Cancel traffic masking packet."""
if queue.qsize():
cancel = True
# Get most recent log_messages setting status in queue
log_messages = False
while queue.qsize():
log_messages = queue.get()[1]
queue.put((cancel_pt, log_messages, log_as_ph))
m_print(
f"Cancelled queues {p_type}." if cancel else f"No {p_type} to cancel.",
head=1,
tail=1,
)

View File

@ -24,53 +24,71 @@ import typing
from typing import Any, Dict, List, Optional, Tuple
from src.common.misc import ignored
from src.common.statics import (COMMAND_PACKET_QUEUE, DATAGRAM_HEADER_LENGTH, EXIT, EXIT_QUEUE, KEY_MANAGEMENT_QUEUE,
LOG_PACKET_QUEUE, MESSAGE_PACKET_QUEUE, RELAY_PACKET_QUEUE, SENDER_MODE_QUEUE,
TM_COMMAND_PACKET_QUEUE, TM_FILE_PACKET_QUEUE, TM_MESSAGE_PACKET_QUEUE,
TM_NOISE_COMMAND_QUEUE, TM_NOISE_PACKET_QUEUE, TRAFFIC_MASKING,
TRAFFIC_MASKING_QUEUE_CHECK_DELAY, UNENCRYPTED_EXIT_COMMAND, UNENCRYPTED_WIPE_COMMAND,
WINDOW_SELECT_QUEUE, WIPE)
from src.common.exceptions import SoftError
from src.common.misc import HideRunTime, ignored
from src.common.statics import (
COMMAND_PACKET_QUEUE,
DATAGRAM_HEADER_LENGTH,
EXIT,
EXIT_QUEUE,
KEY_MANAGEMENT_QUEUE,
LOG_PACKET_QUEUE,
MESSAGE_PACKET_QUEUE,
RELAY_PACKET_QUEUE,
SENDER_MODE_QUEUE,
TM_COMMAND_PACKET_QUEUE,
TM_FILE_PACKET_QUEUE,
TM_MESSAGE_PACKET_QUEUE,
TM_NOISE_COMMAND_QUEUE,
TM_NOISE_PACKET_QUEUE,
TRAFFIC_MASKING,
TRAFFIC_MASKING_QUEUE_CHECK_DELAY,
UNENCRYPTED_EXIT_COMMAND,
UNENCRYPTED_WIPE_COMMAND,
WINDOW_SELECT_QUEUE,
WIPE,
)
from src.transmitter.packet import send_packet
from src.transmitter.traffic_masking import HideRunTime
from src.transmitter.packet import send_packet
if typing.TYPE_CHECKING:
from multiprocessing import Queue
from src.common.db_keys import KeyList
from multiprocessing import Queue
from src.common.db_keys import KeyList
from src.common.db_settings import Settings
from src.common.gateway import Gateway
QueueDict = Dict[bytes, Queue[Any]]
Message_buffer = Dict[bytes, List[Tuple[bytes, bytes, bool, bool, bytes]]]
from src.common.gateway import Gateway
QueueDict = Dict[bytes, Queue[Any]]
MessageBuffer = Dict[bytes, List[Tuple[bytes, bytes, bool, bool, bytes]]]
def sender_loop(queues: 'QueueDict',
settings: 'Settings',
gateway: 'Gateway',
key_list: 'KeyList',
unit_test: bool = False
) -> None:
def sender_loop(
queues: "QueueDict",
settings: "Settings",
gateway: "Gateway",
key_list: "KeyList",
unit_test: bool = False,
) -> None:
"""Output packets from queues based on queue priority.
Depending on traffic masking setting adjusted by the user, enable
either traffic masking or standard sender loop for packet output.
"""
m_buffer = dict() # type: Message_buffer
m_buffer = dict() # type: MessageBuffer
while True:
if settings.traffic_masking:
settings = traffic_masking_loop(queues, settings, gateway, key_list)
else:
settings, m_buffer = standard_sender_loop(queues, gateway, key_list, m_buffer)
settings, m_buffer = standard_sender_loop(
queues, gateway, key_list, m_buffer
)
if unit_test:
break
def traffic_masking_loop(queues: 'QueueDict',
settings: 'Settings',
gateway: 'Gateway',
key_list: 'KeyList',
) -> 'Settings':
def traffic_masking_loop(
queues: "QueueDict", settings: "Settings", gateway: "Gateway", key_list: "KeyList",
) -> "Settings":
"""Run Transmitter Program in traffic masking mode.
The traffic masking loop loads assembly packets from a set of queues.
@ -104,24 +122,23 @@ def traffic_masking_loop(queues: 'QueueDict',
reveals to Networked Computer when the user operates the Source
Computer.
"""
ws_queue = queues[WINDOW_SELECT_QUEUE]
m_queue = queues[TM_MESSAGE_PACKET_QUEUE]
f_queue = queues[TM_FILE_PACKET_QUEUE]
c_queue = queues[TM_COMMAND_PACKET_QUEUE]
np_queue = queues[TM_NOISE_PACKET_QUEUE]
nc_queue = queues[TM_NOISE_COMMAND_QUEUE]
rp_queue = queues[RELAY_PACKET_QUEUE]
ws_queue = queues[WINDOW_SELECT_QUEUE]
m_queue = queues[TM_MESSAGE_PACKET_QUEUE]
f_queue = queues[TM_FILE_PACKET_QUEUE]
c_queue = queues[TM_COMMAND_PACKET_QUEUE]
np_queue = queues[TM_NOISE_PACKET_QUEUE]
nc_queue = queues[TM_NOISE_COMMAND_QUEUE]
log_queue = queues[LOG_PACKET_QUEUE]
sm_queue = queues[SENDER_MODE_QUEUE]
sm_queue = queues[SENDER_MODE_QUEUE]
while True:
with ignored(EOFError, KeyboardInterrupt):
while ws_queue.qsize() == 0:
while not ws_queue.qsize():
time.sleep(0.01)
window_contacts = ws_queue.get()
# Window selection command to Receiver Program.
while c_queue.qsize() == 0:
while not c_queue.qsize():
time.sleep(0.01)
send_packet(key_list, gateway, log_queue, c_queue.get())
break
@ -133,22 +150,38 @@ def traffic_masking_loop(queues: 'QueueDict',
# Choosing element from list is constant time.
#
# First queue we evaluate: if m_queue has data Second to evaluate. If m_queue
# in it, False is evaluated as 0, and we load has no data but f_queue has, the
# the first nested list. At that point we load False is evaluated as 0 meaning
# from m_queue regardless of f_queue state. f_queue (True as 1 and np_queue)
# | |
# v v
queue = [[m_queue, m_queue], [f_queue, np_queue]][m_queue.qsize() == 0][f_queue.qsize() == 0]
# First queue we evaluate: if m_queue has data
# in it, False is evaluated as 0, and we load
# the first nested list. At that point we load
# from m_queue regardless of f_queue state.
# |
# v
queue = [[m_queue, m_queue], [f_queue, np_queue]][m_queue.qsize() == 0][
f_queue.qsize() == 0
] # ^
# |
# Second queue to evaluate. If m_queue has no data but f_queue has,
# the False is evaluated as 0 meaning f_queue (True as 1 and np_queue)
# Regardless of queue, each .get() returns a tuple with identical
# amount of data: 256 bytes long bytestring and two booleans.
assembly_packet, log_messages, log_as_ph = queue.get() # type: bytes, bool, bool
(
assembly_packet,
log_messages,
_,
) = queue.get() # type: bytes, bool, bool
for c in window_contacts:
# Message/file assembly packet to window contact.
with HideRunTime(settings, delay_type=TRAFFIC_MASKING):
send_packet(key_list, gateway, log_queue, assembly_packet, c.onion_pub_key, log_messages)
send_packet(
key_list,
gateway,
log_queue,
assembly_packet,
c.onion_pub_key,
log_messages,
)
# Send a command between each assembly packet for each contact.
with HideRunTime(settings, delay_type=TRAFFIC_MASKING):
@ -161,32 +194,42 @@ def traffic_masking_loop(queues: 'QueueDict',
send_packet(key_list, gateway, log_queue, command)
# The two queues below are empty until the user is willing to reveal to
# Networked Computer they are either disabling Traffic masking or exiting
# TFC. Until that happens, queue status check takes constant time.
# Check for unencrypted commands that close TFC.
if rp_queue.qsize() != 0:
packet = rp_queue.get()
command = packet[DATAGRAM_HEADER_LENGTH:]
if command in [UNENCRYPTED_EXIT_COMMAND, UNENCRYPTED_WIPE_COMMAND]:
gateway.write(packet)
time.sleep(gateway.settings.local_testing_mode * 0.1)
time.sleep(gateway.settings.data_diode_sockets * 1.5)
signal = WIPE if command == UNENCRYPTED_WIPE_COMMAND else EXIT
queues[EXIT_QUEUE].put(signal)
exit_packet_check(queues, gateway)
# If traffic masking has been disabled, wait until queued messages are sent before returning.
if sm_queue.qsize() != 0 and all(q.qsize() == 0 for q in (m_queue, f_queue, c_queue)):
if sm_queue.qsize() != 0 and all(
q.qsize() == 0 for q in (m_queue, f_queue, c_queue)
):
settings = sm_queue.get()
return settings
def standard_sender_loop(queues: 'QueueDict',
gateway: 'Gateway',
key_list: 'KeyList',
m_buffer: Optional['Message_buffer'] = None
) -> Tuple['Settings', 'Message_buffer']:
def exit_packet_check(queues: "QueueDict", gateway: "Gateway") -> None:
"""Check for unencrypted commands that close TFC.
The relay packet queue is empty until the user is willing to reveal to
Networked Computer they are either disabling traffic masking or exiting
TFC. Until that happens, queue status check takes constant time.
"""
rp_queue = queues[RELAY_PACKET_QUEUE]
if rp_queue.qsize():
packet = rp_queue.get()
command = packet[DATAGRAM_HEADER_LENGTH:]
if command in [UNENCRYPTED_EXIT_COMMAND, UNENCRYPTED_WIPE_COMMAND]:
gateway.write(packet)
time.sleep(gateway.settings.local_testing_mode * 0.1)
time.sleep(gateway.settings.data_diode_sockets * 1.5)
signal = WIPE if command == UNENCRYPTED_WIPE_COMMAND else EXIT
queues[EXIT_QUEUE].put(signal)
def standard_sender_loop(
queues: "QueueDict",
gateway: "Gateway",
key_list: "KeyList",
m_buffer: Optional["MessageBuffer"] = None,
) -> Tuple["Settings", "MessageBuffer"]:
"""Run Transmitter program in standard send mode.
The standard sender loop loads assembly packets from a set of queues.
@ -223,61 +266,113 @@ def standard_sender_loop(queues: 'QueueDict',
can be output later, if the user resumes to standard_sender_loop and
adds new keys for the contact.
"""
km_queue = queues[KEY_MANAGEMENT_QUEUE]
c_queue = queues[COMMAND_PACKET_QUEUE]
rp_queue = queues[RELAY_PACKET_QUEUE]
m_queue = queues[MESSAGE_PACKET_QUEUE]
sm_queue = queues[SENDER_MODE_QUEUE]
log_queue = queues[LOG_PACKET_QUEUE]
km_queue = queues[KEY_MANAGEMENT_QUEUE]
c_queue = queues[COMMAND_PACKET_QUEUE]
rp_queue = queues[RELAY_PACKET_QUEUE]
sm_queue = queues[SENDER_MODE_QUEUE]
m_queue = queues[MESSAGE_PACKET_QUEUE]
if m_buffer is None:
m_buffer = dict()
while True:
with ignored(EOFError, KeyboardInterrupt):
if km_queue.qsize() != 0:
key_list.manage(queues, *km_queue.get())
continue
try:
process_key_management_command(queues, key_list)
# Commands to Receiver
if c_queue.qsize() != 0:
if key_list.has_local_keyset():
send_packet(key_list, gateway, log_queue, c_queue.get())
continue
process_command(queues, key_list, gateway)
# Commands/files to Networked Computer
if rp_queue.qsize() != 0:
packet = rp_queue.get()
gateway.write(packet)
process_relay_packets(queues, gateway)
command = packet[DATAGRAM_HEADER_LENGTH:]
if command in [UNENCRYPTED_EXIT_COMMAND, UNENCRYPTED_WIPE_COMMAND]:
time.sleep(gateway.settings.local_testing_mode * 0.1)
time.sleep(gateway.settings.data_diode_sockets * 1.5)
signal = WIPE if command == UNENCRYPTED_WIPE_COMMAND else EXIT
queues[EXIT_QUEUE].put(signal)
continue
process_buffered_messages(m_buffer, queues, key_list, gateway)
# Buffered messages
for onion_pub_key in m_buffer:
if key_list.has_keyset(onion_pub_key) and m_buffer[onion_pub_key]:
send_packet(key_list, gateway, log_queue, *m_buffer[onion_pub_key].pop(0)[:-1])
continue
# New messages
if m_queue.qsize() != 0:
queue_data = m_queue.get() # type: Tuple[bytes, bytes, bool, bool, bytes]
onion_pub_key = queue_data[1]
if key_list.has_keyset(onion_pub_key):
send_packet(key_list, gateway, log_queue, *queue_data[:-1])
else:
m_buffer.setdefault(onion_pub_key, []).append(queue_data)
continue
process_new_message(m_buffer, queues, key_list, gateway)
# If traffic masking has been enabled, switch send mode when all queues are empty.
if sm_queue.qsize() != 0 and all(q.qsize() == 0 for q in (km_queue, c_queue, rp_queue, m_queue)):
if sm_queue.qsize() != 0 and all(
q.qsize() == 0 for q in (km_queue, c_queue, rp_queue, m_queue)
):
settings = sm_queue.get()
return settings, m_buffer
time.sleep(0.01)
except (EOFError, KeyboardInterrupt, SoftError):
pass
def process_key_management_command(queues: "QueueDict", key_list: "KeyList") -> None:
"""Process key management command."""
km_queue = queues[KEY_MANAGEMENT_QUEUE]
if km_queue.qsize():
key_list.manage(queues, *km_queue.get())
SoftError("Key management command processing complete.", output=False)
def process_command(
queues: "QueueDict", key_list: "KeyList", gateway: "Gateway"
) -> None:
"""Process command."""
c_queue = queues[COMMAND_PACKET_QUEUE]
log_queue = queues[LOG_PACKET_QUEUE]
if c_queue.qsize():
if key_list.has_local_keyset():
send_packet(key_list, gateway, log_queue, c_queue.get())
SoftError("Command processing complete.", output=False)
def process_relay_packets(queues: "QueueDict", gateway: "Gateway") -> None:
"""Process packet to Relay Program on Networked Computer."""
rp_queue = queues[RELAY_PACKET_QUEUE]
if rp_queue.qsize():
packet = rp_queue.get()
gateway.write(packet)
command = packet[DATAGRAM_HEADER_LENGTH:]
if command in [UNENCRYPTED_EXIT_COMMAND, UNENCRYPTED_WIPE_COMMAND]:
time.sleep(gateway.settings.local_testing_mode * 0.1)
time.sleep(gateway.settings.data_diode_sockets * 1.5)
signal = WIPE if command == UNENCRYPTED_WIPE_COMMAND else EXIT
queues[EXIT_QUEUE].put(signal)
SoftError("Relay packet processing complete.", output=False)
def process_buffered_messages(
m_buffer: "MessageBuffer",
queues: "QueueDict",
key_list: "KeyList",
gateway: "Gateway",
) -> None:
"""Process messages cached in `m_buffer`."""
log_queue = queues[LOG_PACKET_QUEUE]
for onion_pub_key in m_buffer:
if key_list.has_keyset(onion_pub_key) and m_buffer[onion_pub_key]:
send_packet(
key_list, gateway, log_queue, *m_buffer[onion_pub_key].pop(0)[:-1]
)
raise SoftError("Buffered message processing complete.", output=False)
def process_new_message(
m_buffer: "MessageBuffer",
queues: "QueueDict",
key_list: "KeyList",
gateway: "Gateway",
) -> None:
"""Process new message in message queue."""
m_queue = queues[MESSAGE_PACKET_QUEUE]
log_queue = queues[LOG_PACKET_QUEUE]
if m_queue.qsize():
queue_data = m_queue.get() # type: Tuple[bytes, bytes, bool, bool, bytes]
onion_pub_key = queue_data[1]
if key_list.has_keyset(onion_pub_key):
send_packet(key_list, gateway, log_queue, *queue_data[:-1])
else:
m_buffer.setdefault(onion_pub_key, []).append(queue_data)
raise SoftError("New message processing complete.", output=False)

View File

@ -19,80 +19,52 @@ You should have received a copy of the GNU General Public License
along with TFC. If not, see <https://www.gnu.org/licenses/>.
"""
import random
import threading
import time
import typing
from typing import Any, Dict, Optional, Tuple, Union
from src.common.misc import ignored
from src.common.statics import (C_N_HEADER, NOISE_PACKET_BUFFER, PADDING_LENGTH, P_N_HEADER, STATIC,
TM_NOISE_COMMAND_QUEUE, TM_NOISE_PACKET_QUEUE, TRAFFIC_MASKING)
from src.common.misc import ignored
from src.common.statics import (
C_N_HEADER,
NOISE_PACKET_BUFFER,
PADDING_LENGTH,
P_N_HEADER,
TM_NOISE_COMMAND_QUEUE,
TM_NOISE_PACKET_QUEUE,
)
if typing.TYPE_CHECKING:
from multiprocessing import Queue
from multiprocessing import Queue
from src.common.db_contacts import ContactList
from src.common.db_settings import Settings
QueueDict = Dict[bytes, Queue[Any]]
class HideRunTime(object):
"""Runtime hiding time context manager.
By joining a thread that sleeps for a longer time than it takes for
the function to run, this context manager hides the actual running
time of the function.
Note that random.SystemRandom() uses the Kernel CSPRNG (/dev/urandom),
not Python's weak PRNG based on Mersenne Twister:
https://docs.python.org/2/library/random.html#random.SystemRandom
"""
def __init__(self,
settings: 'Settings',
delay_type: str = STATIC,
duration: float = 0.0
) -> None:
if delay_type == TRAFFIC_MASKING:
self.length = settings.tm_static_delay
self.length += random.SystemRandom().uniform(0, settings.tm_random_delay)
elif delay_type == STATIC:
self.length = duration
def __enter__(self) -> None:
self.timer = threading.Thread(target=time.sleep, args=(self.length,))
self.timer.start()
def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
self.timer.join()
def noise_loop(queues: 'QueueDict',
contact_list: Optional['ContactList'] = None,
unit_test: bool = False
) -> None:
def noise_loop(
queues: "QueueDict",
contact_list: Optional["ContactList"] = None,
unit_test: bool = False,
) -> None:
"""Generate noise packets for traffic masking.
This process ensures noise packet / noise command queue always has
noise assembly packets available.
"""
log_messages = True # This setting is ignored: settings.log_file_masking controls logging of noise packets.
log_as_ph = True
log_as_ph = True
header = C_N_HEADER if contact_list is None else P_N_HEADER
header = C_N_HEADER if contact_list is None else P_N_HEADER
noise_assembly_packet = header + bytes(PADDING_LENGTH)
if contact_list is None:
# Noise command
queue = queues[TM_NOISE_COMMAND_QUEUE]
queue = queues[TM_NOISE_COMMAND_QUEUE]
content = noise_assembly_packet # type: Union[bytes, Tuple[bytes, bool, bool]]
else:
# Noise packet
queue = queues[TM_NOISE_PACKET_QUEUE]
queue = queues[TM_NOISE_PACKET_QUEUE]
content = (noise_assembly_packet, log_messages, log_as_ph)
while True:

View File

@ -21,22 +21,21 @@ along with TFC. If not, see <https://www.gnu.org/licenses/>.
import typing
from src.common.output import print_on_previous_line
from src.common.output import print_on_previous_line
from src.common.statics import COMMAND, FILE, MESSAGE, WIN_TYPE_GROUP
if typing.TYPE_CHECKING:
from src.common.db_settings import Settings
from src.common.db_settings import Settings
from src.transmitter.windows import TxWindow
def process_aliases(plaintext: str,
settings: 'Settings',
window: 'TxWindow'
) -> str:
def process_aliases(plaintext: str, settings: "Settings", window: "TxWindow") -> str:
"""Check if plaintext is an alias for another command."""
aliases = [(' ', '/unread' ),
(' ', '/exit' if settings.double_space_exits else '/clear'),
('//', '/cmd' )]
aliases = [
(" ", "/unread"),
(" ", "/exit" if settings.double_space_exits else "/clear"),
("//", "/cmd"),
]
for a in aliases:
if plaintext == a[0]:
@ -50,15 +49,15 @@ def process_aliases(plaintext: str,
return plaintext
def get_input(window: 'TxWindow', settings: 'Settings') -> 'UserInput':
def get_input(window: "TxWindow", settings: "Settings") -> "UserInput":
"""Read and process input from the user and determine its type."""
while True:
try:
plaintext = input(f"Msg to {window.type_print} {window.name}: ")
if plaintext in ['', '/']:
if plaintext in ["", "/"]:
raise EOFError
except (EOFError, KeyboardInterrupt):
print('')
print("")
print_on_previous_line()
continue
@ -67,18 +66,20 @@ def get_input(window: 'TxWindow', settings: 'Settings') -> 'UserInput':
# Determine plaintext type
pt_type = MESSAGE
if plaintext == '/file':
if plaintext == "/file":
pt_type = FILE
elif plaintext.startswith('/'):
plaintext = plaintext[len('/'):]
pt_type = COMMAND
elif plaintext.startswith("/"):
plaintext = plaintext[len("/") :]
pt_type = COMMAND
# Check if the group was empty
if pt_type in [MESSAGE, FILE] and window.type == WIN_TYPE_GROUP:
if window.group is not None and window.group.empty():
print_on_previous_line()
print(f"Msg to {window.type_print} {window.name}: Error: The group is empty.")
print(
f"Msg to {window.type_print} {window.name}: Error: The group is empty."
)
print_on_previous_line(delay=0.5)
continue
@ -98,4 +99,4 @@ class UserInput(object):
def __init__(self, plaintext: str, type_: str) -> None:
"""Create a new UserInput object."""
self.plaintext = plaintext
self.type = type_
self.type = type_

View File

@ -0,0 +1,50 @@
#!/usr/bin/env python3.7
# -*- coding: utf-8 -*-
"""
TFC - Onion-routed, endpoint secure messaging system
Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
TFC is free software: you can redistribute it and/or modify it under the terms
of the GNU General Public License as published by the Free Software Foundation,
either version 3 of the License, or (at your option) any later version.
TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with TFC. If not, see <https://www.gnu.org/licenses/>.
"""
import typing
from typing import Iterable, Iterator, List, Optional
from src.common.db_contacts import Contact
from src.common.statics import WIN_TYPE_CONTACT
if typing.TYPE_CHECKING:
from src.common.db_groups import Group
class MockWindow(Iterable[Contact]):
"""\
Mock window simplifies queueing of message assembly packets for
automatically generated group management and key delivery messages.
"""
def __init__(self, uid: bytes, contacts: List["Contact"]) -> None:
"""Create a new MockWindow object."""
self.window_contacts = contacts
self.type = WIN_TYPE_CONTACT
self.group = None # type: Optional[Group]
self.name = None # type: Optional[str]
self.uid = uid
self.log_messages = self.window_contacts[0].log_messages
def __iter__(self) -> Iterator[Contact]:
"""Iterate over contact objects in the window."""
yield from self.window_contacts

View File

@ -24,68 +24,52 @@ import typing
from typing import Any, Dict, Iterable, Iterator, List, Optional, Sized
from src.common.db_contacts import Contact
from src.common.exceptions import FunctionReturn
from src.common.input import yes
from src.common.output import clear_screen
from src.common.statics import KEX_STATUS_PENDING, WINDOW_SELECT_QUEUE, WIN_SELECT, WIN_TYPE_CONTACT, WIN_TYPE_GROUP
from src.common.exceptions import SoftError
from src.common.input import yes
from src.common.output import clear_screen
from src.common.statics import (
KEX_STATUS_PENDING,
WINDOW_SELECT_QUEUE,
WIN_SELECT,
WIN_TYPE_CONTACT,
WIN_TYPE_GROUP,
)
from src.transmitter.contact import add_new_contact
from src.transmitter.contact import add_new_contact
from src.transmitter.key_exchanges import export_onion_service_data, start_key_exchange
from src.transmitter.packet import queue_command
from src.transmitter.packet import queue_command
if typing.TYPE_CHECKING:
from multiprocessing import Queue
from src.common.db_contacts import ContactList
from src.common.db_groups import Group, GroupList
from src.common.db_onion import OnionService
from src.common.db_settings import Settings
from src.common.gateway import Gateway
from multiprocessing import Queue
from src.common.db_contacts import ContactList
from src.common.db_groups import Group, GroupList
from src.common.db_onion import OnionService
from src.common.db_settings import Settings
from src.common.gateway import Gateway
from src.transmitter.user_input import UserInput
QueueDict = Dict[bytes, Queue[Any]]
class MockWindow(Iterable[Contact]):
"""\
Mock window simplifies queueing of message assembly packets for
automatically generated group management and key delivery messages.
"""
def __init__(self, uid: bytes, contacts: List['Contact']) -> None:
"""Create a new MockWindow object."""
self.window_contacts = contacts
self.type = WIN_TYPE_CONTACT
self.group = None # type: Optional[Group]
self.name = None # type: Optional[str]
self.uid = uid
self.log_messages = self.window_contacts[0].log_messages
def __iter__(self) -> Iterator[Contact]:
"""Iterate over contact objects in the window."""
yield from self.window_contacts
class TxWindow(Iterable[Contact], Sized):
"""\
TxWindow object contains data about the active recipient (contact or
group).
"""
def __init__(self,
contact_list: 'ContactList',
group_list: 'GroupList'
) -> None:
def __init__(self, contact_list: "ContactList", group_list: "GroupList") -> None:
"""Create a new TxWindow object."""
self.contact_list = contact_list
self.group_list = group_list
self.window_contacts = [] # type: List[Contact]
self.contact = None # type: Optional[Contact]
self.group = None # type: Optional[Group]
self.name = '' # type: str
self.uid = b'' # type: bytes
self.group_id = None # type: Optional[bytes]
self.log_messages = None # type: Optional[bool]
self.type = '' # type: str
self.type_print = None # type: Optional[str]
self.contact_list = contact_list
self.group_list = group_list
self.window_contacts = [] # type: List[Contact]
self.contact = None # type: Optional[Contact]
self.group = None # type: Optional[Group]
self.name = "" # type: str
self.uid = b"" # type: bytes
self.group_id = None # type: Optional[bytes]
self.log_messages = None # type: Optional[bool]
self.type = "" # type: str
self.type_print = None # type: Optional[str]
def __iter__(self) -> Iterator[Contact]:
"""Iterate over Contact objects in the window."""
@ -95,68 +79,32 @@ class TxWindow(Iterable[Contact], Sized):
"""Return the number of contacts in the window."""
return len(self.window_contacts)
def select_tx_window(self,
settings: 'Settings', # Settings object
queues: 'QueueDict', # Dictionary of Queues
onion_service: 'OnionService', # OnionService object
gateway: 'Gateway', # Gateway object
selection: Optional[str] = None, # Selector for window
cmd: bool = False # True when `/msg` command is used to switch window
) -> None:
def select_tx_window(
self,
settings: "Settings", # Settings object
queues: "QueueDict", # Dictionary of Queues
onion_service: "OnionService", # OnionService object
gateway: "Gateway", # Gateway object
selection: Optional[str] = None, # Selector for window
cmd: bool = False, # True when `/msg` command is used to switch window
) -> None:
"""Select specified window or ask the user to specify one."""
if selection is None:
self.contact_list.print_contacts()
self.group_list.print_groups()
if self.contact_list.has_only_pending_contacts():
print("\n'/connect' sends Onion Service/contact data to Relay"
"\n'/add' adds another contact."
"\n'/rm <Nick>' removes an existing contact.\n")
selection = input("Select recipient: ").strip()
selection = self.select_recipient()
if selection in self.group_list.get_list_of_group_names():
if cmd and settings.traffic_masking and selection != self.name:
raise FunctionReturn("Error: Can't change window during traffic masking.", head_clear=True)
self.contact = None
self.group = self.group_list.get_group(selection)
self.window_contacts = self.group.members
self.name = self.group.name
self.uid = self.group.group_id
self.group_id = self.group.group_id
self.log_messages = self.group.log_messages
self.type = WIN_TYPE_GROUP
self.type_print = 'group'
self.select_group(selection, cmd, settings)
elif selection in self.contact_list.contact_selectors():
if cmd and settings.traffic_masking:
contact = self.contact_list.get_contact_by_address_or_nick(selection)
if contact.onion_pub_key != self.uid:
raise FunctionReturn("Error: Can't change window during traffic masking.", head_clear=True)
self.select_contact(selection, cmd, queues, settings)
self.contact = self.contact_list.get_contact_by_address_or_nick(selection)
if self.contact.kex_status == KEX_STATUS_PENDING:
start_key_exchange(self.contact.onion_pub_key,
self.contact.nick,
self.contact_list,
settings, queues)
self.group = None
self.group_id = None
self.window_contacts = [self.contact]
self.name = self.contact.nick
self.uid = self.contact.onion_pub_key
self.log_messages = self.contact.log_messages
self.type = WIN_TYPE_CONTACT
self.type_print = 'contact'
elif selection.startswith('/'):
self.window_selection_command(selection, settings, queues, onion_service, gateway)
elif selection.startswith("/"):
self.window_selection_command(
selection, settings, queues, onion_service, gateway
)
else:
raise FunctionReturn("Error: No contact/group was found.")
raise SoftError("Error: No contact/group was found.")
if settings.traffic_masking:
queues[WINDOW_SELECT_QUEUE].put(self.window_contacts)
@ -166,60 +114,132 @@ class TxWindow(Iterable[Contact], Sized):
clear_screen()
def window_selection_command(self,
selection: str,
settings: 'Settings',
queues: 'QueueDict',
onion_service: 'OnionService',
gateway: 'Gateway'
) -> None:
def select_recipient(self) -> str:
"""Select recipient."""
self.contact_list.print_contacts()
self.group_list.print_groups()
if self.contact_list.has_only_pending_contacts():
print(
"\n'/connect' sends Onion Service/contact data to Relay"
"\n'/add' adds another contact."
"\n'/rm <Nick>' removes an existing contact.\n"
)
selection = input("Select recipient: ").strip()
return selection
def select_contact(
self, selection: str, cmd: bool, queues: "QueueDict", settings: "Settings"
) -> None:
"""Select contact."""
if cmd and settings.traffic_masking:
contact = self.contact_list.get_contact_by_address_or_nick(selection)
if contact.onion_pub_key != self.uid:
raise SoftError(
"Error: Can't change window during traffic masking.",
head_clear=True,
)
self.contact = self.contact_list.get_contact_by_address_or_nick(selection)
if self.contact.kex_status == KEX_STATUS_PENDING:
start_key_exchange(
self.contact.onion_pub_key,
self.contact.nick,
self.contact_list,
settings,
queues,
)
self.group = None
self.group_id = None
self.window_contacts = [self.contact]
self.name = self.contact.nick
self.uid = self.contact.onion_pub_key
self.log_messages = self.contact.log_messages
self.type = WIN_TYPE_CONTACT
self.type_print = "contact"
def select_group(self, selection: str, cmd: bool, settings: "Settings") -> None:
"""Select group."""
if cmd and settings.traffic_masking and selection != self.name:
raise SoftError(
"Error: Can't change window during traffic masking.", head_clear=True
)
self.contact = None
self.group = self.group_list.get_group(selection)
self.window_contacts = self.group.members
self.name = self.group.name
self.uid = self.group.group_id
self.group_id = self.group.group_id
self.log_messages = self.group.log_messages
self.type = WIN_TYPE_GROUP
self.type_print = "group"
def window_selection_command(
self,
selection: str,
settings: "Settings",
queues: "QueueDict",
onion_service: "OnionService",
gateway: "Gateway",
) -> None:
"""Commands for adding and removing contacts from contact selection menu.
In situations where only pending contacts are available and
those contacts are not online, these commands prevent the user
from not being able to add new contacts.
"""
if selection == '/add':
add_new_contact(self.contact_list, self.group_list, settings, queues, onion_service)
raise FunctionReturn("New contact added.", output=False)
if selection == "/add":
add_new_contact(
self.contact_list, self.group_list, settings, queues, onion_service
)
raise SoftError("New contact added.", output=False)
elif selection == '/connect':
export_onion_service_data(self.contact_list, settings, onion_service, gateway)
if selection == "/connect":
export_onion_service_data(
self.contact_list, settings, onion_service, gateway
)
elif selection.startswith('/rm'):
elif selection.startswith("/rm"):
try:
selection = selection.split()[1]
except IndexError:
raise FunctionReturn("Error: No account specified.", delay=1)
raise SoftError("Error: No account specified.", delay=1)
if not yes(f"Remove contact '{selection}'?", abort=False, head=1):
raise FunctionReturn("Removal of contact aborted.", head=0, delay=1)
raise SoftError("Removal of contact aborted.", head=0, delay=1)
if selection in self.contact_list.contact_selectors():
onion_pub_key = self.contact_list.get_contact_by_address_or_nick(selection).onion_pub_key
onion_pub_key = self.contact_list.get_contact_by_address_or_nick(
selection
).onion_pub_key
self.contact_list.remove_contact_by_pub_key(onion_pub_key)
self.contact_list.store_contacts()
raise FunctionReturn(f"Removed contact '{selection}'.", delay=1)
else:
raise FunctionReturn(f"Error: Unknown contact '{selection}'.", delay=1)
raise SoftError(f"Removed contact '{selection}'.", delay=1)
raise SoftError(f"Error: Unknown contact '{selection}'.", delay=1)
else:
raise FunctionReturn("Error: Invalid command.", delay=1)
raise SoftError("Error: Invalid command.", delay=1)
def deselect(self) -> None:
"""Deselect active window."""
self.window_contacts = []
self.contact = None
self.group = None
self.name = ''
self.uid = b''
self.log_messages = None
self.type = ''
self.type_print = None
self.contact = None
self.group = None
self.name = ""
self.uid = b""
self.log_messages = None
self.type = ""
self.type_print = None
def is_selected(self) -> bool:
"""Return True if a window is selected, else False."""
return self.name != ''
return self.name != ""
def update_log_messages(self) -> None:
"""Update window's logging setting."""
@ -228,7 +248,7 @@ class TxWindow(Iterable[Contact], Sized):
if self.type == WIN_TYPE_GROUP and self.group is not None:
self.log_messages = self.group.log_messages
def update_window(self, group_list: 'GroupList') -> None:
def update_window(self, group_list: "GroupList") -> None:
"""Update window.
Since previous input may have changed the window data, reload
@ -236,31 +256,38 @@ class TxWindow(Iterable[Contact], Sized):
"""
if self.type == WIN_TYPE_GROUP:
if self.group_id is not None and group_list.has_group_id(self.group_id):
self.group = group_list.get_group_by_id(self.group_id)
self.group = group_list.get_group_by_id(self.group_id)
self.window_contacts = self.group.members
self.name = self.group.name
self.uid = self.group.group_id
self.name = self.group.name
self.uid = self.group.group_id
else:
self.deselect()
elif self.type == WIN_TYPE_CONTACT:
if self.contact is not None and self.contact_list.has_pub_key(self.contact.onion_pub_key):
if self.contact is not None and self.contact_list.has_pub_key(
self.contact.onion_pub_key
):
# Reload window contact in case keys were re-exchanged.
self.contact = self.contact_list.get_contact_by_pub_key(self.contact.onion_pub_key)
self.contact = self.contact_list.get_contact_by_pub_key(
self.contact.onion_pub_key
)
self.window_contacts = [self.contact]
def select_window(user_input: 'UserInput',
window: 'TxWindow',
settings: 'Settings',
queues: 'QueueDict',
onion_service: 'OnionService',
gateway: 'Gateway'
) -> None:
def select_window(
user_input: "UserInput",
window: "TxWindow",
settings: "Settings",
queues: "QueueDict",
onion_service: "OnionService",
gateway: "Gateway",
) -> None:
"""Select a new window to send messages/files."""
try:
selection = user_input.plaintext.split()[1]
except (IndexError, TypeError):
raise FunctionReturn("Error: Invalid recipient.", head_clear=True)
raise SoftError("Error: Invalid recipient.", head_clear=True)
window.select_tx_window(settings, queues, onion_service, gateway, selection=selection, cmd=True)
window.select_tx_window(
settings, queues, onion_service, gateway, selection=selection, cmd=True
)

View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3.7
# -*- coding: utf-8 -*-
"""
TFC - Onion-routed, endpoint secure messaging system
Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
TFC is free software: you can redistribute it and/or modify it under the terms
of the GNU General Public License as published by the Free Software Foundation,
either version 3 of the License, or (at your option) any later version.
TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with TFC. If not, see <https://www.gnu.org/licenses/>.
"""

View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3.7
# -*- coding: utf-8 -*-
"""
TFC - Onion-routed, endpoint secure messaging system
Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
TFC is free software: you can redistribute it and/or modify it under the terms
of the GNU General Public License as published by the Free Software Foundation,
either version 3 of the License, or (at your option) any later version.
TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with TFC. If not, see <https://www.gnu.org/licenses/>.
"""

File diff suppressed because it is too large Load Diff

View File

@ -23,50 +23,54 @@ import sqlite3
import os
import unittest
from unittest import mock
from unittest import mock
from unittest.mock import MagicMock
from src.common.crypto import auth_and_decrypt, blake2b, encrypt_and_sign
from src.common.crypto import auth_and_decrypt, blake2b, encrypt_and_sign
from src.common.database import TFCDatabase, MessageLog, TFCUnencryptedDatabase
from src.common.statics import (DB_WRITE_RETRY_LIMIT, DIR_USER_DATA, MASTERKEY_DB_SIZE, LOG_ENTRY_LENGTH,
SYMMETRIC_KEY_LENGTH)
from src.common.statics import (
DB_WRITE_RETRY_LIMIT,
DIR_USER_DATA,
MASTERKEY_DB_SIZE,
LOG_ENTRY_LENGTH,
SYMMETRIC_KEY_LENGTH,
)
from tests.mock_classes import MasterKey, Settings
from tests.utils import cd_unit_test, cleanup, tamper_file
from tests.utils import cd_unit_test, cleanup, tamper_file
class TestTFCDatabase(unittest.TestCase):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.database_name = 'unittest_db'
self.master_key = MasterKey()
self.database = TFCDatabase(self.database_name, self.master_key)
self.database_name = "unittest_db"
self.master_key = MasterKey()
self.database = TFCDatabase(self.database_name, self.master_key)
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
@mock.patch('os.fsync', return_value=MagicMock)
def test_write_to_file(self, mock_os_fsync):
@mock.patch("os.fsync", return_value=MagicMock)
def test_write_to_file(self, mock_os_fsync) -> None:
# Setup
data = os.urandom(MASTERKEY_DB_SIZE)
# Test
self.assertIsNone(self.database.write_to_file(self.database_name, data))
with open(self.database_name, 'rb') as f:
with open(self.database_name, "rb") as f:
stored_data = f.read()
self.assertEqual(data, stored_data)
mock_os_fsync.assert_called()
def test_verify_file(self):
def test_verify_file(self) -> None:
# Setup
pt_bytes = os.urandom(MASTERKEY_DB_SIZE)
ct_bytes = encrypt_and_sign(pt_bytes, self.master_key.master_key)
with open(self.database_name, 'wb') as f:
with open(self.database_name, "wb") as f:
f.write(ct_bytes)
# Test valid file content returns True.
@ -76,10 +80,14 @@ class TestTFCDatabase(unittest.TestCase):
tamper_file(self.database_name, tamper_size=1)
self.assertFalse(self.database.verify_file(self.database_name))
def test_ensure_temp_write_raises_critical_error_after_exceeding_retry_limit(self):
def test_ensure_temp_write_raises_critical_error_after_exceeding_retry_limit(
self,
) -> None:
# Setup
orig_verify_file = self.database.verify_file
self.database.verify_file = MagicMock(side_effect=DB_WRITE_RETRY_LIMIT*[False])
orig_verify_file = self.database.verify_file
self.database.verify_file = MagicMock(
side_effect=DB_WRITE_RETRY_LIMIT * [False]
)
# Test
with self.assertRaises(SystemExit):
@ -88,10 +96,12 @@ class TestTFCDatabase(unittest.TestCase):
# Teardown
self.database.verify_file = orig_verify_file
def test_ensure_temp_write_succeeds_just_before_limit(self):
def test_ensure_temp_write_succeeds_just_before_limit(self) -> None:
# Setup
orig_verify_file = self.database.verify_file
self.database.verify_file = MagicMock(side_effect=(DB_WRITE_RETRY_LIMIT-1)*[False] + [True])
orig_verify_file = self.database.verify_file
self.database.verify_file = MagicMock(
side_effect=(DB_WRITE_RETRY_LIMIT - 1) * [False] + [True]
)
# Test
self.assertIsNone(self.database.store_database(os.urandom(MASTERKEY_DB_SIZE)))
@ -99,17 +109,19 @@ class TestTFCDatabase(unittest.TestCase):
# Teardown
self.database.verify_file = orig_verify_file
def test_store_database_encrypts_data_with_master_key_and_replaces_temp_file_and_original_file(self):
def test_store_database_encrypts_data_with_master_key_and_replaces_temp_file_and_original_file(
self,
) -> None:
# Setup
pt_old = os.urandom(MASTERKEY_DB_SIZE)
ct_old = encrypt_and_sign(pt_old, self.master_key.master_key)
with open(self.database_name, 'wb') as f:
with open(self.database_name, "wb") as f:
f.write(ct_old)
pt_new = os.urandom(MASTERKEY_DB_SIZE)
ct_temp = os.urandom(MASTERKEY_DB_SIZE)
with open(self.database.database_temp, 'wb') as f:
with open(self.database.database_temp, "wb") as f:
f.write(ct_temp)
# Test
@ -117,18 +129,18 @@ class TestTFCDatabase(unittest.TestCase):
self.assertIsNone(self.database.store_database(pt_new))
self.assertFalse(os.path.isfile(self.database.database_temp))
with open(self.database_name, 'rb') as f:
with open(self.database_name, "rb") as f:
purp_data = f.read()
purp_pt = auth_and_decrypt(purp_data, self.master_key.master_key)
self.assertEqual(purp_pt, pt_new)
def test_replace_database(self):
def test_replace_database(self) -> None:
# Setup
self.assertFalse(os.path.isfile(self.database.database_name))
self.assertFalse(os.path.isfile(self.database.database_temp))
with open(self.database.database_temp, 'wb') as f:
f.write(b'temp_file')
with open(self.database.database_temp, "wb") as f:
f.write(b"temp_file")
self.assertFalse(os.path.isfile(self.database.database_name))
self.assertTrue(os.path.isfile(self.database.database_temp))
@ -139,15 +151,15 @@ class TestTFCDatabase(unittest.TestCase):
self.assertFalse(os.path.isfile(self.database.database_temp))
self.assertTrue(os.path.isfile(self.database.database_name))
def test_load_database_ignores_invalid_temp_database(self):
def test_load_database_ignores_invalid_temp_database(self) -> None:
# Setup
pt_old = os.urandom(MASTERKEY_DB_SIZE)
ct_old = encrypt_and_sign(pt_old, self.master_key.master_key)
with open(self.database_name, 'wb') as f:
with open(self.database_name, "wb") as f:
f.write(ct_old)
ct_temp = os.urandom(MASTERKEY_DB_SIZE)
with open(self.database.database_temp, 'wb') as f:
with open(self.database.database_temp, "wb") as f:
f.write(ct_temp)
# Test
@ -155,16 +167,16 @@ class TestTFCDatabase(unittest.TestCase):
self.assertEqual(self.database.load_database(), pt_old)
self.assertFalse(os.path.isfile(self.database.database_temp))
def test_load_database_prefers_valid_temp_database(self):
def test_load_database_prefers_valid_temp_database(self) -> None:
# Setup
pt_old = os.urandom(MASTERKEY_DB_SIZE)
ct_old = encrypt_and_sign(pt_old, self.master_key.master_key)
with open(self.database_name, 'wb') as f:
with open(self.database_name, "wb") as f:
f.write(ct_old)
pt_temp = os.urandom(MASTERKEY_DB_SIZE)
ct_temp = encrypt_and_sign(pt_temp, self.master_key.master_key)
with open(self.database.database_temp, 'wb') as f:
with open(self.database.database_temp, "wb") as f:
f.write(ct_temp)
# Test
@ -174,36 +186,35 @@ class TestTFCDatabase(unittest.TestCase):
class TestTFCUnencryptedDatabase(unittest.TestCase):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.database_name = 'unittest_db'
self.database = TFCUnencryptedDatabase(self.database_name)
self.database_name = "unittest_db"
self.database = TFCUnencryptedDatabase(self.database_name)
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
@mock.patch('os.fsync', return_value=MagicMock)
def test_write_to_file(self, mock_os_fsync):
@mock.patch("os.fsync", return_value=MagicMock)
def test_write_to_file(self, mock_os_fsync) -> None:
# Setup
data = os.urandom(MASTERKEY_DB_SIZE)
# Test
self.assertIsNone(self.database.write_to_file(self.database_name, data))
with open(self.database_name, 'rb') as f:
with open(self.database_name, "rb") as f:
stored_data = f.read()
self.assertEqual(data, stored_data)
mock_os_fsync.assert_called()
def test_verify_file(self):
def test_verify_file(self) -> None:
# Setup
data = os.urandom(MASTERKEY_DB_SIZE)
data = os.urandom(MASTERKEY_DB_SIZE)
checksummed_data = data + blake2b(data)
with open(self.database_name, 'wb') as f:
with open(self.database_name, "wb") as f:
f.write(checksummed_data)
# Test valid file content returns True.
@ -213,10 +224,14 @@ class TestTFCUnencryptedDatabase(unittest.TestCase):
tamper_file(self.database_name, tamper_size=1)
self.assertFalse(self.database.verify_file(self.database_name))
def test_ensure_temp_write_raises_critical_error_after_exceeding_retry_limit(self):
def test_ensure_temp_write_raises_critical_error_after_exceeding_retry_limit(
self,
) -> None:
# Setup
orig_verify_file = self.database.verify_file
self.database.verify_file = MagicMock(side_effect=DB_WRITE_RETRY_LIMIT*[False])
orig_verify_file = self.database.verify_file
self.database.verify_file = MagicMock(
side_effect=DB_WRITE_RETRY_LIMIT * [False]
)
# Test
with self.assertRaises(SystemExit):
@ -225,27 +240,33 @@ class TestTFCUnencryptedDatabase(unittest.TestCase):
# Teardown
self.database.verify_file = orig_verify_file
def test_ensure_temp_write_succeeds_just_before_limit(self):
def test_ensure_temp_write_succeeds_just_before_limit(self) -> None:
# Setup
orig_verify_file = self.database.verify_file
self.database.verify_file = MagicMock(side_effect=(DB_WRITE_RETRY_LIMIT-1)*[False] + [True])
orig_verify_file = self.database.verify_file
self.database.verify_file = MagicMock(
side_effect=(DB_WRITE_RETRY_LIMIT - 1) * [False] + [True]
)
# Test
self.assertIsNone(self.database.store_unencrypted_database(os.urandom(MASTERKEY_DB_SIZE)))
self.assertIsNone(
self.database.store_unencrypted_database(os.urandom(MASTERKEY_DB_SIZE))
)
# Teardown
self.database.verify_file = orig_verify_file
def test_store_unencrypted_database_replaces_temp_file_and_original_file(self):
def test_store_unencrypted_database_replaces_temp_file_and_original_file(
self,
) -> None:
# Setup
data_old = os.urandom(MASTERKEY_DB_SIZE)
with open(self.database_name, 'wb') as f:
with open(self.database_name, "wb") as f:
f.write(data_old)
data_new = os.urandom(MASTERKEY_DB_SIZE)
data_temp = os.urandom(MASTERKEY_DB_SIZE)
with open(self.database.database_temp, 'wb') as f:
with open(self.database.database_temp, "wb") as f:
f.write(data_temp)
# Test
@ -253,18 +274,18 @@ class TestTFCUnencryptedDatabase(unittest.TestCase):
self.assertIsNone(self.database.store_unencrypted_database(data_new))
self.assertFalse(os.path.isfile(self.database.database_temp))
with open(self.database_name, 'rb') as f:
with open(self.database_name, "rb") as f:
purp_data = f.read()
self.assertEqual(purp_data, data_new + blake2b(data_new))
def test_replace_database(self):
def test_replace_database(self) -> None:
# Setup
self.assertFalse(os.path.isfile(self.database.database_name))
self.assertFalse(os.path.isfile(self.database.database_temp))
with open(self.database.database_temp, 'wb') as f:
f.write(b'temp_file')
with open(self.database.database_temp, "wb") as f:
f.write(b"temp_file")
self.assertFalse(os.path.isfile(self.database.database_name))
self.assertTrue(os.path.isfile(self.database.database_temp))
@ -275,11 +296,11 @@ class TestTFCUnencryptedDatabase(unittest.TestCase):
self.assertFalse(os.path.isfile(self.database.database_temp))
self.assertTrue(os.path.isfile(self.database.database_name))
def test_loading_invalid_database_data_raises_critical_error(self):
data_old = os.urandom(MASTERKEY_DB_SIZE)
def test_loading_invalid_database_data_raises_critical_error(self) -> None:
data_old = os.urandom(MASTERKEY_DB_SIZE)
checksummed = data_old + blake2b(data_old)
with open(self.database_name, 'wb') as f:
with open(self.database_name, "wb") as f:
f.write(checksummed)
tamper_file(self.database_name, tamper_size=1)
@ -287,15 +308,15 @@ class TestTFCUnencryptedDatabase(unittest.TestCase):
with self.assertRaises(SystemExit):
self.database.load_database()
def test_load_database_ignores_invalid_temp_database(self):
def test_load_database_ignores_invalid_temp_database(self) -> None:
# Setup
data_old = os.urandom(MASTERKEY_DB_SIZE)
data_old = os.urandom(MASTERKEY_DB_SIZE)
checksummed = data_old + blake2b(data_old)
with open(self.database_name, 'wb') as f:
with open(self.database_name, "wb") as f:
f.write(checksummed)
data_temp = os.urandom(MASTERKEY_DB_SIZE)
with open(self.database.database_temp, 'wb') as f:
with open(self.database.database_temp, "wb") as f:
f.write(data_temp)
# Test
@ -303,16 +324,16 @@ class TestTFCUnencryptedDatabase(unittest.TestCase):
self.assertEqual(self.database.load_database(), data_old)
self.assertFalse(os.path.isfile(self.database.database_temp))
def test_load_database_prefers_valid_temp_database(self):
def test_load_database_prefers_valid_temp_database(self) -> None:
# Setup
data_old = os.urandom(MASTERKEY_DB_SIZE)
data_old = os.urandom(MASTERKEY_DB_SIZE)
checksummed_old = data_old + blake2b(data_old)
with open(self.database_name, 'wb') as f:
with open(self.database_name, "wb") as f:
f.write(checksummed_old)
data_temp = os.urandom(MASTERKEY_DB_SIZE)
data_temp = os.urandom(MASTERKEY_DB_SIZE)
checksummed_temp = data_temp + blake2b(data_temp)
with open(self.database.database_temp, 'wb') as f:
with open(self.database.database_temp, "wb") as f:
f.write(checksummed_temp)
# Test
@ -323,49 +344,50 @@ class TestTFCUnencryptedDatabase(unittest.TestCase):
class TestTFCLogDatabase(unittest.TestCase):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.file_name = f'{DIR_USER_DATA}ut_logs'
self.temp_name = self.file_name + '_temp'
self.settings = Settings()
self.database_key = os.urandom(SYMMETRIC_KEY_LENGTH)
self.unit_test_dir = cd_unit_test()
self.file_name = f"{DIR_USER_DATA}ut_logs"
self.temp_name = self.file_name + "_temp"
self.settings = Settings()
self.database_key = os.urandom(SYMMETRIC_KEY_LENGTH)
self.tfc_log_database = MessageLog(self.file_name, self.database_key)
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
def test_empty_log_database_is_verified(self):
def test_empty_log_database_is_verified(self) -> None:
self.assertTrue(self.tfc_log_database.verify_file(self.file_name))
def test_database_with_one_entry_is_verified(self):
def test_database_with_one_entry_is_verified(self) -> None:
# Setup
test_entry = b'test_log_entry'
test_entry = b"test_log_entry"
self.tfc_log_database.insert_log_entry(test_entry)
# Test
self.assertTrue(self.tfc_log_database.verify_file(self.file_name))
def test_invalid_entry_returns_false(self):
def test_invalid_entry_returns_false(self) -> None:
# Setup
params = (os.urandom(LOG_ENTRY_LENGTH),)
self.tfc_log_database.c.execute(f"""INSERT INTO log_entries (log_entry) VALUES (?)""", params)
self.tfc_log_database.c.execute(
f"""INSERT INTO log_entries (log_entry) VALUES (?)""", params
)
self.tfc_log_database.conn.commit()
# Test
self.assertFalse(self.tfc_log_database.verify_file(self.file_name))
def test_table_creation(self):
def test_table_creation(self) -> None:
self.assertIsInstance(self.tfc_log_database, MessageLog)
self.assertTrue(os.path.isfile(self.file_name))
def test_writing_to_log_database(self):
def test_writing_to_log_database(self) -> None:
data = os.urandom(LOG_ENTRY_LENGTH)
self.assertIsNone(self.tfc_log_database.insert_log_entry(data))
def test_iterating_over_log_database(self):
def test_iterating_over_log_database(self) -> None:
data = [os.urandom(LOG_ENTRY_LENGTH), os.urandom(LOG_ENTRY_LENGTH)]
for entry in data:
self.assertIsNone(self.tfc_log_database.insert_log_entry(entry))
@ -373,61 +395,65 @@ class TestTFCLogDatabase(unittest.TestCase):
for index, stored_entry in enumerate(self.tfc_log_database):
self.assertEqual(stored_entry, data[index])
def test_invalid_temp_database_is_not_loaded(self):
def test_invalid_temp_database_is_not_loaded(self) -> None:
log_file = MessageLog(self.file_name, database_key=self.database_key)
tmp_file = MessageLog(self.temp_name, database_key=self.database_key)
log_file.insert_log_entry(b'a')
log_file.insert_log_entry(b'b')
log_file.insert_log_entry(b'c')
log_file.insert_log_entry(b'd')
log_file.insert_log_entry(b'e')
log_file.insert_log_entry(b"a")
log_file.insert_log_entry(b"b")
log_file.insert_log_entry(b"c")
log_file.insert_log_entry(b"d")
log_file.insert_log_entry(b"e")
tmp_file.insert_log_entry(b'a')
tmp_file.insert_log_entry(b'b')
tmp_file.c.execute(f"""INSERT INTO log_entries (log_entry) VALUES (?)""", (b'c',))
tmp_file.insert_log_entry(b"a")
tmp_file.insert_log_entry(b"b")
tmp_file.c.execute(
f"""INSERT INTO log_entries (log_entry) VALUES (?)""", (b"c",)
)
tmp_file.conn.commit()
tmp_file.insert_log_entry(b'd')
tmp_file.insert_log_entry(b'e')
tmp_file.insert_log_entry(b"d")
tmp_file.insert_log_entry(b"e")
self.assertTrue(os.path.isfile(self.temp_name))
log_file = MessageLog(self.file_name, database_key=self.database_key)
self.assertEqual(list(log_file), [b'a', b'b', b'c', b'd', b'e'])
self.assertEqual(list(log_file), [b"a", b"b", b"c", b"d", b"e"])
self.assertFalse(os.path.isfile(self.temp_name))
def test_valid_temp_database_is_loaded(self):
def test_valid_temp_database_is_loaded(self) -> None:
log_file = MessageLog(self.file_name, database_key=self.database_key)
tmp_file = MessageLog(self.temp_name, database_key=self.database_key)
log_file.insert_log_entry(b'a')
log_file.insert_log_entry(b'b')
log_file.insert_log_entry(b'c')
log_file.insert_log_entry(b'd')
log_file.insert_log_entry(b'e')
log_file.insert_log_entry(b"a")
log_file.insert_log_entry(b"b")
log_file.insert_log_entry(b"c")
log_file.insert_log_entry(b"d")
log_file.insert_log_entry(b"e")
tmp_file.insert_log_entry(b'f')
tmp_file.insert_log_entry(b'g')
tmp_file.insert_log_entry(b'h')
tmp_file.insert_log_entry(b'i')
tmp_file.insert_log_entry(b'j')
tmp_file.insert_log_entry(b"f")
tmp_file.insert_log_entry(b"g")
tmp_file.insert_log_entry(b"h")
tmp_file.insert_log_entry(b"i")
tmp_file.insert_log_entry(b"j")
self.assertTrue(os.path.isfile(self.temp_name))
log_file = MessageLog(self.file_name, database_key=self.database_key)
self.assertEqual(list(log_file), [b'f', b'g', b'h', b'i', b'j'])
self.assertEqual(list(log_file), [b"f", b"g", b"h", b"i", b"j"])
self.assertFalse(os.path.isfile(self.temp_name))
def test_database_closing(self):
def test_database_closing(self) -> None:
self.tfc_log_database.close_database()
# Test insertion would fail at this point
with self.assertRaises(sqlite3.ProgrammingError):
self.tfc_log_database.c.execute(f"""INSERT INTO log_entries (log_entry) VALUES (?)""",
(os.urandom(LOG_ENTRY_LENGTH),))
self.tfc_log_database.c.execute(
f"""INSERT INTO log_entries (log_entry) VALUES (?)""",
(os.urandom(LOG_ENTRY_LENGTH),),
)
# Test that TFC reopens closed database on write
data = os.urandom(LOG_ENTRY_LENGTH)
self.assertIsNone(self.tfc_log_database.insert_log_entry(data))
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

View File

@ -22,104 +22,139 @@ along with TFC. If not, see <https://www.gnu.org/licenses/>.
import os
import unittest
from src.common.crypto import encrypt_and_sign
from src.common.crypto import encrypt_and_sign
from src.common.db_contacts import Contact, ContactList
from src.common.misc import ensure_dir
from src.common.statics import (CLEAR_ENTIRE_SCREEN, CONTACT_LENGTH, CURSOR_LEFT_UP_CORNER, DIR_USER_DATA, ECDHE,
FINGERPRINT_LENGTH, KEX_STATUS_HAS_RX_PSK, KEX_STATUS_LOCAL_KEY, KEX_STATUS_NONE,
KEX_STATUS_NO_RX_PSK, KEX_STATUS_PENDING, KEX_STATUS_UNVERIFIED,
KEX_STATUS_VERIFIED, LOCAL_ID, POLY1305_TAG_LENGTH, PSK, XCHACHA20_NONCE_LENGTH)
from src.common.misc import ensure_dir
from src.common.statics import (
CLEAR_ENTIRE_SCREEN,
CONTACT_LENGTH,
CURSOR_LEFT_UP_CORNER,
DIR_USER_DATA,
ECDHE,
FINGERPRINT_LENGTH,
KEX_STATUS_HAS_RX_PSK,
KEX_STATUS_LOCAL_KEY,
KEX_STATUS_NONE,
KEX_STATUS_NO_RX_PSK,
KEX_STATUS_PENDING,
KEX_STATUS_UNVERIFIED,
KEX_STATUS_VERIFIED,
LOCAL_ID,
POLY1305_TAG_LENGTH,
PSK,
XCHACHA20_NONCE_LENGTH,
)
from tests.mock_classes import create_contact, MasterKey, Settings
from tests.utils import cd_unit_test, cleanup, nick_to_onion_address, nick_to_pub_key, tamper_file, TFCTestCase
from tests.utils import (
cd_unit_test,
cleanup,
nick_to_onion_address,
nick_to_pub_key,
tamper_file,
TFCTestCase,
)
class TestContact(unittest.TestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.contact = Contact(nick_to_pub_key('Bob'),
'Bob',
FINGERPRINT_LENGTH * b'\x01',
FINGERPRINT_LENGTH * b'\x02',
KEX_STATUS_UNVERIFIED,
log_messages =True,
file_reception=True,
notifications =True)
self.contact = Contact(
nick_to_pub_key("Bob"),
"Bob",
FINGERPRINT_LENGTH * b"\x01",
FINGERPRINT_LENGTH * b"\x02",
KEX_STATUS_UNVERIFIED,
log_messages=True,
file_reception=True,
notifications=True,
)
def test_contact_serialization_length_and_type(self):
def test_contact_serialization_length_and_type(self) -> None:
serialized = self.contact.serialize_c()
self.assertEqual(len(serialized), CONTACT_LENGTH)
self.assertIsInstance(serialized, bytes)
def test_uses_psk(self):
def test_uses_psk(self) -> None:
for kex_status in [KEX_STATUS_NO_RX_PSK, KEX_STATUS_HAS_RX_PSK]:
self.contact.kex_status = kex_status
self.assertTrue(self.contact.uses_psk())
for kex_status in [KEX_STATUS_NONE, KEX_STATUS_PENDING, KEX_STATUS_UNVERIFIED,
KEX_STATUS_VERIFIED, KEX_STATUS_LOCAL_KEY]:
for kex_status in [
KEX_STATUS_NONE,
KEX_STATUS_PENDING,
KEX_STATUS_UNVERIFIED,
KEX_STATUS_VERIFIED,
KEX_STATUS_LOCAL_KEY,
]:
self.contact.kex_status = kex_status
self.assertFalse(self.contact.uses_psk())
class TestContactList(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.master_key = MasterKey()
self.settings = Settings()
self.file_name = f'{DIR_USER_DATA}{self.settings.software_operation}_contacts'
self.contact_list = ContactList(self.master_key, self.settings)
self.full_contact_list = ['Alice', 'Bob', 'Charlie', 'David', 'Eric', LOCAL_ID]
self.unit_test_dir = cd_unit_test()
self.master_key = MasterKey()
self.settings = Settings()
self.file_name = f"{DIR_USER_DATA}{self.settings.software_operation}_contacts"
self.contact_list = ContactList(self.master_key, self.settings)
self.full_contact_list = ["Alice", "Bob", "Charlie", "David", "Eric", LOCAL_ID]
self.contact_list.contacts = list(map(create_contact, self.full_contact_list))
self.real_contact_list = self.full_contact_list[:]
self.real_contact_list = self.full_contact_list[:]
self.real_contact_list.remove(LOCAL_ID)
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
def test_contact_list_iterates_over_contact_objects(self):
def test_contact_list_iterates_over_contact_objects(self) -> None:
for c in self.contact_list:
self.assertIsInstance(c, Contact)
def test_len_returns_the_number_of_contacts_and_excludes_the_local_key(self):
self.assertEqual(len(self.contact_list),
len(self.real_contact_list))
def test_len_returns_the_number_of_contacts_and_excludes_the_local_key(
self,
) -> None:
self.assertEqual(len(self.contact_list), len(self.real_contact_list))
def test_storing_and_loading_of_contacts(self):
def test_storing_and_loading_of_contacts(self) -> None:
# Test store
self.contact_list.store_contacts()
self.assertEqual(os.path.getsize(self.file_name),
XCHACHA20_NONCE_LENGTH
+ (self.settings.max_number_of_contacts + 1) * CONTACT_LENGTH
+ POLY1305_TAG_LENGTH)
self.assertEqual(
os.path.getsize(self.file_name),
XCHACHA20_NONCE_LENGTH
+ (self.settings.max_number_of_contacts + 1) * CONTACT_LENGTH
+ POLY1305_TAG_LENGTH,
)
# Test load
contact_list2 = ContactList(self.master_key, self.settings)
self.assertEqual(len(contact_list2), len(self.real_contact_list))
self.assertEqual(len(contact_list2), len(self.real_contact_list))
self.assertEqual(len(contact_list2.contacts), len(self.full_contact_list))
for c in contact_list2:
self.assertIsInstance(c, Contact)
def test_invalid_content_raises_critical_error(self):
def test_invalid_content_raises_critical_error(self) -> None:
# Setup
invalid_data = b'a'
pt_bytes = b''.join([c.serialize_c() for c in self.contact_list.contacts
+ self.contact_list._dummy_contacts()])
ct_bytes = encrypt_and_sign(pt_bytes + invalid_data, self.master_key.master_key)
invalid_data = b"a"
pt_bytes = b"".join(
[
c.serialize_c()
for c in self.contact_list.contacts
+ self.contact_list._dummy_contacts()
]
)
ct_bytes = encrypt_and_sign(pt_bytes + invalid_data, self.master_key.master_key)
ensure_dir(DIR_USER_DATA)
with open(self.file_name, 'wb+') as f:
with open(self.file_name, "wb+") as f:
f.write(ct_bytes)
# Test
with self.assertRaises(SystemExit):
ContactList(self.master_key, self.settings)
def test_load_of_modified_database_raises_critical_error(self):
def test_load_of_modified_database_raises_critical_error(self) -> None:
self.contact_list.store_contacts()
# Test reading works normally
@ -130,232 +165,318 @@ class TestContactList(TFCTestCase):
with self.assertRaises(SystemExit):
ContactList(self.master_key, self.settings)
def test_generate_dummy_contact(self):
def test_generate_dummy_contact(self) -> None:
dummy_contact = ContactList.generate_dummy_contact()
self.assertIsInstance(dummy_contact, Contact)
self.assertEqual(len(dummy_contact.serialize_c()), CONTACT_LENGTH)
def test_dummy_contacts(self):
def test_dummy_contacts(self) -> None:
dummies = self.contact_list._dummy_contacts()
self.assertEqual(len(dummies), self.settings.max_number_of_contacts - len(self.real_contact_list))
self.assertEqual(
len(dummies),
self.settings.max_number_of_contacts - len(self.real_contact_list),
)
for c in dummies:
self.assertIsInstance(c, Contact)
def test_add_contact(self):
tx_fingerprint = FINGERPRINT_LENGTH * b'\x03'
rx_fingerprint = FINGERPRINT_LENGTH * b'\x04'
def test_add_contact(self) -> None:
tx_fingerprint = FINGERPRINT_LENGTH * b"\x03"
rx_fingerprint = FINGERPRINT_LENGTH * b"\x04"
self.assertIsNone(self.contact_list.add_contact(nick_to_pub_key('Faye'),
'Faye',
tx_fingerprint,
rx_fingerprint,
KEX_STATUS_UNVERIFIED,
self.settings.log_messages_by_default,
self.settings.accept_files_by_default,
self.settings.show_notifications_by_default))
self.assertIsNone(
self.contact_list.add_contact(
nick_to_pub_key("Faye"),
"Faye",
tx_fingerprint,
rx_fingerprint,
KEX_STATUS_UNVERIFIED,
self.settings.log_messages_by_default,
self.settings.accept_files_by_default,
self.settings.show_notifications_by_default,
)
)
# Test new contact was stored by loading
# the database from file to another object
contact_list2 = ContactList(MasterKey(), Settings())
faye = contact_list2.get_contact_by_pub_key(nick_to_pub_key('Faye'))
faye = contact_list2.get_contact_by_pub_key(nick_to_pub_key("Faye"))
self.assertEqual(len(self.contact_list), len(self.real_contact_list)+1)
self.assertEqual(len(self.contact_list), len(self.real_contact_list) + 1)
self.assertIsInstance(faye, Contact)
self.assertEqual(faye.tx_fingerprint, tx_fingerprint)
self.assertEqual(faye.rx_fingerprint, rx_fingerprint)
self.assertEqual(faye.kex_status, KEX_STATUS_UNVERIFIED)
self.assertEqual(faye.kex_status, KEX_STATUS_UNVERIFIED)
self.assertEqual(faye.log_messages, self.settings.log_messages_by_default)
self.assertEqual(faye.log_messages, self.settings.log_messages_by_default)
self.assertEqual(faye.file_reception, self.settings.accept_files_by_default)
self.assertEqual(faye.notifications, self.settings.show_notifications_by_default)
self.assertEqual(
faye.notifications, self.settings.show_notifications_by_default
)
def test_add_contact_that_replaces_an_existing_contact(self):
alice = self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice'))
new_nick = 'Alice2'
new_tx_fingerprint = FINGERPRINT_LENGTH * b'\x03'
new_rx_fingerprint = FINGERPRINT_LENGTH * b'\x04'
def test_add_contact_that_replaces_an_existing_contact(self) -> None:
alice = self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice"))
new_nick = "Alice2"
new_tx_fingerprint = FINGERPRINT_LENGTH * b"\x03"
new_rx_fingerprint = FINGERPRINT_LENGTH * b"\x04"
# Verify that existing nick, kex status and fingerprints are
# different from those that will replace the existing data
self.assertNotEqual(alice.nick, new_nick)
self.assertNotEqual(alice.nick, new_nick)
self.assertNotEqual(alice.tx_fingerprint, new_tx_fingerprint)
self.assertNotEqual(alice.rx_fingerprint, new_rx_fingerprint)
self.assertNotEqual(alice.kex_status, KEX_STATUS_UNVERIFIED)
self.assertNotEqual(alice.kex_status, KEX_STATUS_UNVERIFIED)
# Make sure each contact setting is opposite from default value
alice.log_messages = not self.settings.log_messages_by_default
alice.log_messages = not self.settings.log_messages_by_default
alice.file_reception = not self.settings.accept_files_by_default
alice.notifications = not self.settings.show_notifications_by_default
alice.notifications = not self.settings.show_notifications_by_default
# Replace the existing contact
self.assertIsNone(self.contact_list.add_contact(nick_to_pub_key('Alice'),
new_nick,
new_tx_fingerprint,
new_rx_fingerprint,
KEX_STATUS_UNVERIFIED,
self.settings.log_messages_by_default,
self.settings.accept_files_by_default,
self.settings.show_notifications_by_default))
self.assertIsNone(
self.contact_list.add_contact(
nick_to_pub_key("Alice"),
new_nick,
new_tx_fingerprint,
new_rx_fingerprint,
KEX_STATUS_UNVERIFIED,
self.settings.log_messages_by_default,
self.settings.accept_files_by_default,
self.settings.show_notifications_by_default,
)
)
# Load database to another object from
# file to verify new contact was stored
contact_list2 = ContactList(MasterKey(), Settings())
alice = contact_list2.get_contact_by_pub_key(nick_to_pub_key('Alice'))
alice = contact_list2.get_contact_by_pub_key(nick_to_pub_key("Alice"))
# Verify the content of loaded data
self.assertEqual(len(contact_list2), len(self.real_contact_list))
self.assertIsInstance(alice, Contact)
# Test replaced contact replaced nick, fingerprints and kex status
self.assertEqual(alice.nick, new_nick)
self.assertEqual(alice.nick, new_nick)
self.assertEqual(alice.tx_fingerprint, new_tx_fingerprint)
self.assertEqual(alice.rx_fingerprint, new_rx_fingerprint)
self.assertEqual(alice.kex_status, KEX_STATUS_UNVERIFIED)
self.assertEqual(alice.kex_status, KEX_STATUS_UNVERIFIED)
# Test replaced contact kept settings set
# to be opposite from default settings
self.assertNotEqual(alice.log_messages, self.settings.log_messages_by_default)
self.assertNotEqual(alice.log_messages, self.settings.log_messages_by_default)
self.assertNotEqual(alice.file_reception, self.settings.accept_files_by_default)
self.assertNotEqual(alice.notifications, self.settings.show_notifications_by_default)
self.assertNotEqual(
alice.notifications, self.settings.show_notifications_by_default
)
def test_remove_contact_by_pub_key(self):
def test_remove_contact_by_pub_key(self) -> None:
# Verify both contacts exist
self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Bob')))
self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Charlie')))
self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key("Bob")))
self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key("Charlie")))
self.assertIsNone(self.contact_list.remove_contact_by_pub_key(nick_to_pub_key('Bob')))
self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key('Bob')))
self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Charlie')))
self.assertIsNone(
self.contact_list.remove_contact_by_pub_key(nick_to_pub_key("Bob"))
)
self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key("Bob")))
self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key("Charlie")))
def test_remove_contact_by_address_or_nick(self):
def test_remove_contact_by_address_or_nick(self) -> None:
# Verify both contacts exist
self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Bob')))
self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Charlie')))
self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key("Bob")))
self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key("Charlie")))
# Test removal with address
self.assertIsNone(self.contact_list.remove_contact_by_address_or_nick(nick_to_onion_address('Bob')))
self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key('Bob')))
self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Charlie')))
self.assertIsNone(
self.contact_list.remove_contact_by_address_or_nick(
nick_to_onion_address("Bob")
)
)
self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key("Bob")))
self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key("Charlie")))
# Test removal with nick
self.assertIsNone(self.contact_list.remove_contact_by_address_or_nick('Charlie'))
self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key('Bob')))
self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key('Charlie')))
self.assertIsNone(
self.contact_list.remove_contact_by_address_or_nick("Charlie")
)
self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key("Bob")))
self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key("Charlie")))
def test_get_contact_by_pub_key(self):
self.assertIs(self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Bob')),
self.contact_list.get_contact_by_address_or_nick('Bob'))
def test_get_contact_by_pub_key(self) -> None:
self.assertIs(
self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Bob")),
self.contact_list.get_contact_by_address_or_nick("Bob"),
)
def test_get_contact_by_address_or_nick_returns_the_same_contact_object_with_address_and_nick(self):
for selector in [nick_to_onion_address('Bob'), 'Bob']:
self.assertIsInstance(self.contact_list.get_contact_by_address_or_nick(selector), Contact)
def test_get_contact_by_address_or_nick_returns_the_same_contact_object_with_address_and_nick(
self,
) -> None:
for selector in [nick_to_onion_address("Bob"), "Bob"]:
self.assertIsInstance(
self.contact_list.get_contact_by_address_or_nick(selector), Contact
)
self.assertIs(self.contact_list.get_contact_by_address_or_nick('Bob'),
self.contact_list.get_contact_by_address_or_nick(nick_to_onion_address('Bob')))
self.assertIs(
self.contact_list.get_contact_by_address_or_nick("Bob"),
self.contact_list.get_contact_by_address_or_nick(
nick_to_onion_address("Bob")
),
)
def test_get_list_of_contacts(self):
self.assertEqual(len(self.contact_list.get_list_of_contacts()),
len(self.real_contact_list))
def test_get_list_of_contacts(self) -> None:
self.assertEqual(
len(self.contact_list.get_list_of_contacts()), len(self.real_contact_list)
)
for c in self.contact_list.get_list_of_contacts():
self.assertIsInstance(c, Contact)
def test_get_list_of_addresses(self):
self.assertEqual(self.contact_list.get_list_of_addresses(),
[nick_to_onion_address('Alice'),
nick_to_onion_address('Bob'),
nick_to_onion_address('Charlie'),
nick_to_onion_address('David'),
nick_to_onion_address('Eric')])
def test_get_list_of_addresses(self) -> None:
self.assertEqual(
self.contact_list.get_list_of_addresses(),
[
nick_to_onion_address("Alice"),
nick_to_onion_address("Bob"),
nick_to_onion_address("Charlie"),
nick_to_onion_address("David"),
nick_to_onion_address("Eric"),
],
)
def test_get_list_of_nicks(self):
self.assertEqual(self.contact_list.get_list_of_nicks(),
['Alice', 'Bob', 'Charlie', 'David', 'Eric'])
def test_get_list_of_nicks(self) -> None:
self.assertEqual(
self.contact_list.get_list_of_nicks(),
["Alice", "Bob", "Charlie", "David", "Eric"],
)
def test_get_list_of_pub_keys(self):
self.assertEqual(self.contact_list.get_list_of_pub_keys(),
[nick_to_pub_key('Alice'),
nick_to_pub_key('Bob'),
nick_to_pub_key('Charlie'),
nick_to_pub_key('David'),
nick_to_pub_key('Eric')])
def test_get_list_of_pub_keys(self) -> None:
self.assertEqual(
self.contact_list.get_list_of_pub_keys(),
[
nick_to_pub_key("Alice"),
nick_to_pub_key("Bob"),
nick_to_pub_key("Charlie"),
nick_to_pub_key("David"),
nick_to_pub_key("Eric"),
],
)
def test_get_list_of_pending_pub_keys(self):
def test_get_list_of_pending_pub_keys(self) -> None:
# Set key exchange statuses to pending
for nick in ['Alice', 'Bob']:
contact = self.contact_list.get_contact_by_address_or_nick(nick)
for nick in ["Alice", "Bob"]:
contact = self.contact_list.get_contact_by_address_or_nick(nick)
contact.kex_status = KEX_STATUS_PENDING
# Test pending contacts are returned
self.assertEqual(self.contact_list.get_list_of_pending_pub_keys(),
[nick_to_pub_key('Alice'),
nick_to_pub_key('Bob')])
self.assertEqual(
self.contact_list.get_list_of_pending_pub_keys(),
[nick_to_pub_key("Alice"), nick_to_pub_key("Bob")],
)
def test_get_list_of_existing_pub_keys(self):
self.contact_list.get_contact_by_address_or_nick('Alice').kex_status = KEX_STATUS_UNVERIFIED
self.contact_list.get_contact_by_address_or_nick('Bob').kex_status = KEX_STATUS_VERIFIED
self.contact_list.get_contact_by_address_or_nick('Charlie').kex_status = KEX_STATUS_HAS_RX_PSK
self.contact_list.get_contact_by_address_or_nick('David').kex_status = KEX_STATUS_NO_RX_PSK
self.contact_list.get_contact_by_address_or_nick('Eric').kex_status = KEX_STATUS_PENDING
def test_get_list_of_existing_pub_keys(self) -> None:
self.contact_list.get_contact_by_address_or_nick(
"Alice"
).kex_status = KEX_STATUS_UNVERIFIED
self.contact_list.get_contact_by_address_or_nick(
"Bob"
).kex_status = KEX_STATUS_VERIFIED
self.contact_list.get_contact_by_address_or_nick(
"Charlie"
).kex_status = KEX_STATUS_HAS_RX_PSK
self.contact_list.get_contact_by_address_or_nick(
"David"
).kex_status = KEX_STATUS_NO_RX_PSK
self.contact_list.get_contact_by_address_or_nick(
"Eric"
).kex_status = KEX_STATUS_PENDING
self.assertEqual(self.contact_list.get_list_of_existing_pub_keys(),
[nick_to_pub_key('Alice'),
nick_to_pub_key('Bob'),
nick_to_pub_key('Charlie'),
nick_to_pub_key('David')])
self.assertEqual(
self.contact_list.get_list_of_existing_pub_keys(),
[
nick_to_pub_key("Alice"),
nick_to_pub_key("Bob"),
nick_to_pub_key("Charlie"),
nick_to_pub_key("David"),
],
)
def test_contact_selectors(self):
self.assertEqual(self.contact_list.contact_selectors(),
[nick_to_onion_address('Alice'),
nick_to_onion_address('Bob'),
nick_to_onion_address('Charlie'),
nick_to_onion_address('David'),
nick_to_onion_address('Eric'),
'Alice', 'Bob', 'Charlie', 'David', 'Eric'])
def test_contact_selectors(self) -> None:
self.assertEqual(
self.contact_list.contact_selectors(),
[
nick_to_onion_address("Alice"),
nick_to_onion_address("Bob"),
nick_to_onion_address("Charlie"),
nick_to_onion_address("David"),
nick_to_onion_address("Eric"),
"Alice",
"Bob",
"Charlie",
"David",
"Eric",
],
)
def test_has_contacts(self):
def test_has_contacts(self) -> None:
self.assertTrue(self.contact_list.has_contacts())
self.contact_list.contacts = []
self.assertFalse(self.contact_list.has_contacts())
def test_has_only_pending_contacts(self):
def test_has_only_pending_contacts(self) -> None:
# Change all to pending
for contact in self.contact_list.get_list_of_contacts():
contact.kex_status = KEX_STATUS_PENDING
self.assertTrue(self.contact_list.has_only_pending_contacts())
# Change one from pending
alice = self.contact_list.get_contact_by_address_or_nick('Alice')
alice = self.contact_list.get_contact_by_address_or_nick("Alice")
alice.kex_status = KEX_STATUS_UNVERIFIED
self.assertFalse(self.contact_list.has_only_pending_contacts())
def test_has_pub_key(self):
def test_has_pub_key(self) -> None:
self.contact_list.contacts = []
self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key('Bob')))
self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key('Bob')))
self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key("Bob")))
self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key("Bob")))
self.contact_list.contacts = list(map(create_contact, ['Bob', 'Charlie']))
self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Bob')))
self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Charlie')))
self.contact_list.contacts = list(map(create_contact, ["Bob", "Charlie"]))
self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key("Bob")))
self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key("Charlie")))
def test_has_local_contact(self):
def test_has_local_contact(self) -> None:
self.contact_list.contacts = []
self.assertFalse(self.contact_list.has_local_contact())
self.contact_list.contacts = [create_contact(LOCAL_ID)]
self.assertTrue(self.contact_list.has_local_contact())
def test_print_contacts(self):
def test_print_contacts(self) -> None:
self.contact_list.contacts.append(create_contact(LOCAL_ID))
self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice')).log_messages = False
self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice')).kex_status = KEX_STATUS_PENDING
self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Bob')).notifications = False
self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Charlie')).kex_status = KEX_STATUS_UNVERIFIED
self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Bob')).file_reception = False
self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Bob')).kex_status = KEX_STATUS_VERIFIED
self.contact_list.get_contact_by_pub_key(nick_to_pub_key('David')).rx_fingerprint = bytes(FINGERPRINT_LENGTH)
self.contact_list.get_contact_by_pub_key(nick_to_pub_key('David')).kex_status = bytes(KEX_STATUS_NO_RX_PSK)
self.assert_prints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + f"""\
self.contact_list.get_contact_by_pub_key(
nick_to_pub_key("Alice")
).log_messages = False
self.contact_list.get_contact_by_pub_key(
nick_to_pub_key("Alice")
).kex_status = KEX_STATUS_PENDING
self.contact_list.get_contact_by_pub_key(
nick_to_pub_key("Bob")
).notifications = False
self.contact_list.get_contact_by_pub_key(
nick_to_pub_key("Charlie")
).kex_status = KEX_STATUS_UNVERIFIED
self.contact_list.get_contact_by_pub_key(
nick_to_pub_key("Bob")
).file_reception = False
self.contact_list.get_contact_by_pub_key(
nick_to_pub_key("Bob")
).kex_status = KEX_STATUS_VERIFIED
self.contact_list.get_contact_by_pub_key(
nick_to_pub_key("David")
).rx_fingerprint = bytes(FINGERPRINT_LENGTH)
self.contact_list.get_contact_by_pub_key(
nick_to_pub_key("David")
).kex_status = bytes(KEX_STATUS_NO_RX_PSK)
self.assert_prints(
CLEAR_ENTIRE_SCREEN
+ CURSOR_LEFT_UP_CORNER
+ f"""\
Contact Account Logging Notify Files Key Ex
@ -366,8 +487,10 @@ David u22uy Yes Yes Accept {PSK} (No contact key)
Eric jszzy Yes Yes Accept {ECDHE} (Verified)
""", self.contact_list.print_contacts)
""",
self.contact_list.print_contacts,
)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

View File

@ -22,303 +22,408 @@ along with TFC. If not, see <https://www.gnu.org/licenses/>.
import os
import unittest
from src.common.crypto import encrypt_and_sign
from src.common.crypto import encrypt_and_sign
from src.common.db_contacts import Contact, ContactList
from src.common.db_groups import Group, GroupList
from src.common.encoding import b58encode
from src.common.misc import ensure_dir
from src.common.statics import (DIR_USER_DATA, GROUP_DB_HEADER_LENGTH, GROUP_ID_LENGTH, GROUP_STATIC_LENGTH,
ONION_SERVICE_PUBLIC_KEY_LENGTH, POLY1305_TAG_LENGTH, XCHACHA20_NONCE_LENGTH)
from src.common.db_groups import Group, GroupList
from src.common.encoding import b58encode
from src.common.misc import ensure_dir
from src.common.statics import (
DIR_USER_DATA,
GROUP_DB_HEADER_LENGTH,
GROUP_ID_LENGTH,
GROUP_STATIC_LENGTH,
ONION_SERVICE_PUBLIC_KEY_LENGTH,
POLY1305_TAG_LENGTH,
XCHACHA20_NONCE_LENGTH,
)
from tests.mock_classes import create_contact, group_name_to_group_id, MasterKey, nick_to_pub_key, Settings
from tests.utils import cd_unit_test, cleanup, tamper_file, TFCTestCase
from tests.mock_classes import (
create_contact,
group_name_to_group_id,
MasterKey,
nick_to_pub_key,
Settings,
)
from tests.utils import cd_unit_test, cleanup, tamper_file, TFCTestCase
class TestGroup(unittest.TestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.nicks = ['Alice', 'Bob', 'Charlie']
members = list(map(create_contact, self.nicks))
self.settings = Settings()
self.group = Group(name ='test_group',
group_id =group_name_to_group_id('test_group'),
log_messages =False,
notifications=False,
members =members,
settings =self.settings,
store_groups =lambda: None)
self.nicks = ["Alice", "Bob", "Charlie"]
members = list(map(create_contact, self.nicks))
self.settings = Settings()
self.group = Group(
name="test_group",
group_id=group_name_to_group_id("test_group"),
log_messages=False,
notifications=False,
members=members,
settings=self.settings,
store_groups=lambda: None,
)
ensure_dir(DIR_USER_DATA)
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
def test_group_iterates_over_contact_objects(self):
def test_group_iterates_over_contact_objects(self) -> None:
for c in self.group:
self.assertIsInstance(c, Contact)
def test_len_returns_the_number_of_members(self):
def test_len_returns_the_number_of_members(self) -> None:
self.assertEqual(len(self.group), len(self.nicks))
def test_group_serialization_length_and_type(self):
def test_group_serialization_length_and_type(self) -> None:
serialized = self.group.serialize_g()
self.assertIsInstance(serialized, bytes)
self.assertEqual(len(serialized), GROUP_STATIC_LENGTH + (self.settings.max_number_of_group_members
* ONION_SERVICE_PUBLIC_KEY_LENGTH))
self.assertEqual(
len(serialized),
GROUP_STATIC_LENGTH
+ (
self.settings.max_number_of_group_members
* ONION_SERVICE_PUBLIC_KEY_LENGTH
),
)
def test_add_members(self):
def test_add_members(self) -> None:
# Test members to be added are not already in group
self.assertFalse(self.group.has_member(nick_to_pub_key('David')))
self.assertFalse(self.group.has_member(nick_to_pub_key('Eric')))
self.assertFalse(self.group.has_member(nick_to_pub_key("David")))
self.assertFalse(self.group.has_member(nick_to_pub_key("Eric")))
self.assertIsNone(self.group.add_members(list(map(create_contact, ['Alice', 'David', 'Eric']))))
self.assertIsNone(
self.group.add_members(
list(map(create_contact, ["Alice", "David", "Eric"]))
)
)
# Test new members were added
self.assertTrue(self.group.has_member(nick_to_pub_key('David')))
self.assertTrue(self.group.has_member(nick_to_pub_key('Eric')))
self.assertTrue(self.group.has_member(nick_to_pub_key("David")))
self.assertTrue(self.group.has_member(nick_to_pub_key("Eric")))
# Test Alice was not added twice
self.assertEqual(len(self.group), len(['Alice', 'Bob', 'Charlie', 'David', 'Eric']))
self.assertEqual(
len(self.group), len(["Alice", "Bob", "Charlie", "David", "Eric"])
)
def test_remove_members(self):
def test_remove_members(self) -> None:
# Test members to be removed are part of group
self.assertTrue(self.group.has_member(nick_to_pub_key('Alice')))
self.assertTrue(self.group.has_member(nick_to_pub_key('Bob')))
self.assertTrue(self.group.has_member(nick_to_pub_key('Charlie')))
self.assertTrue(self.group.has_member(nick_to_pub_key("Alice")))
self.assertTrue(self.group.has_member(nick_to_pub_key("Bob")))
self.assertTrue(self.group.has_member(nick_to_pub_key("Charlie")))
# Test first attempt to remove returns True (because Charlie was removed)
self.assertTrue(self.group.remove_members([nick_to_pub_key('Charlie'), nick_to_pub_key('Unknown')]))
self.assertTrue(
self.group.remove_members(
[nick_to_pub_key("Charlie"), nick_to_pub_key("Unknown")]
)
)
# Test second attempt to remove returns False (because no-one was removed)
self.assertFalse(self.group.remove_members([nick_to_pub_key('Charlie'), nick_to_pub_key('Unknown')]))
self.assertFalse(
self.group.remove_members(
[nick_to_pub_key("Charlie"), nick_to_pub_key("Unknown")]
)
)
# Test Charlie was removed
self.assertFalse(self.group.has_member(nick_to_pub_key('Charlie')))
self.assertFalse(self.group.has_member(nick_to_pub_key("Charlie")))
# Test no other members were removed
self.assertTrue(self.group.has_member(nick_to_pub_key('Alice')))
self.assertTrue(self.group.has_member(nick_to_pub_key('Bob')))
self.assertTrue(self.group.has_member(nick_to_pub_key("Alice")))
self.assertTrue(self.group.has_member(nick_to_pub_key("Bob")))
def test_get_list_of_member_pub_keys(self):
self.assertEqual(first=self.group.get_list_of_member_pub_keys(),
second=[nick_to_pub_key('Alice'),
nick_to_pub_key('Bob'),
nick_to_pub_key('Charlie')])
def test_get_list_of_member_pub_keys(self) -> None:
self.assertEqual(
first=self.group.get_list_of_member_pub_keys(),
second=[
nick_to_pub_key("Alice"),
nick_to_pub_key("Bob"),
nick_to_pub_key("Charlie"),
],
)
def test_has_member(self):
self.assertTrue(self.group.has_member(nick_to_pub_key('Charlie')))
self.assertFalse(self.group.has_member(nick_to_pub_key('David')))
def test_has_member(self) -> None:
self.assertTrue(self.group.has_member(nick_to_pub_key("Charlie")))
self.assertFalse(self.group.has_member(nick_to_pub_key("David")))
def test_has_members(self):
def test_has_members(self) -> None:
self.assertFalse(self.group.empty())
self.group.members = []
self.assertTrue(self.group.empty())
class TestGroupList(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.master_key = MasterKey()
self.settings = Settings()
self.file_name = f'{DIR_USER_DATA}{self.settings.software_operation}_groups'
self.contact_list = ContactList(self.master_key, self.settings)
self.group_list = GroupList(self.master_key, self.settings, self.contact_list)
self.nicks = ['Alice', 'Bob', 'Charlie', 'David', 'Eric',
'Fido', 'Guido', 'Heidi', 'Ivan', 'Joana', 'Karol']
self.group_names = ['test_group_1', 'test_group_2', 'test_group_3', 'test_group_4', 'test_group_5',
'test_group_6', 'test_group_7', 'test_group_8', 'test_group_9', 'test_group_10',
'test_group_11']
members = list(map(create_contact, self.nicks))
self.master_key = MasterKey()
self.settings = Settings()
self.file_name = f"{DIR_USER_DATA}{self.settings.software_operation}_groups"
self.contact_list = ContactList(self.master_key, self.settings)
self.group_list = GroupList(self.master_key, self.settings, self.contact_list)
self.nicks = [
"Alice",
"Bob",
"Charlie",
"David",
"Eric",
"Fido",
"Guido",
"Heidi",
"Ivan",
"Joana",
"Karol",
]
self.group_names = [
"test_group_1",
"test_group_2",
"test_group_3",
"test_group_4",
"test_group_5",
"test_group_6",
"test_group_7",
"test_group_8",
"test_group_9",
"test_group_10",
"test_group_11",
]
members = list(map(create_contact, self.nicks))
self.contact_list.contacts = members
self.group_list.groups = \
[Group(name =name,
group_id =group_name_to_group_id(name),
log_messages =False,
notifications=False,
members =members,
settings =self.settings,
store_groups =self.group_list.store_groups)
for name in self.group_names]
self.group_list.groups = [
Group(
name=name,
group_id=group_name_to_group_id(name),
log_messages=False,
notifications=False,
members=members,
settings=self.settings,
store_groups=self.group_list.store_groups,
)
for name in self.group_names
]
self.single_member_data_len = (GROUP_STATIC_LENGTH
+ self.settings.max_number_of_group_members * ONION_SERVICE_PUBLIC_KEY_LENGTH)
self.single_member_data_len = (
GROUP_STATIC_LENGTH
+ self.settings.max_number_of_group_members
* ONION_SERVICE_PUBLIC_KEY_LENGTH
)
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
def test_group_list_iterates_over_group_objects(self):
def test_group_list_iterates_over_group_objects(self) -> None:
for g in self.group_list:
self.assertIsInstance(g, Group)
def test_len_returns_the_number_of_groups(self):
def test_len_returns_the_number_of_groups(self) -> None:
self.assertEqual(len(self.group_list), len(self.group_names))
def test_storing_and_loading_of_groups(self):
def test_storing_and_loading_of_groups(self) -> None:
self.group_list.store_groups()
self.assertTrue(os.path.isfile(self.file_name))
self.assertEqual(os.path.getsize(self.file_name),
XCHACHA20_NONCE_LENGTH
+ GROUP_DB_HEADER_LENGTH
+ self.settings.max_number_of_groups * self.single_member_data_len
+ POLY1305_TAG_LENGTH)
self.assertEqual(
os.path.getsize(self.file_name),
XCHACHA20_NONCE_LENGTH
+ GROUP_DB_HEADER_LENGTH
+ self.settings.max_number_of_groups * self.single_member_data_len
+ POLY1305_TAG_LENGTH,
)
# Reduce setting values from 20 to 10
self.settings.max_number_of_groups = 10
self.settings.max_number_of_groups = 10
self.settings.max_number_of_group_members = 10
group_list2 = GroupList(self.master_key, self.settings, self.contact_list)
self.assertEqual(len(group_list2), 11)
# Check that `_load_groups()` increased setting values back to 20 so it fits the 11 groups
self.assertEqual(self.settings.max_number_of_groups, 20)
self.assertEqual(self.settings.max_number_of_groups, 20)
self.assertEqual(self.settings.max_number_of_group_members, 20)
# Check that removed contact from contact list updates group
self.contact_list.remove_contact_by_address_or_nick('Alice')
self.contact_list.remove_contact_by_address_or_nick("Alice")
group_list3 = GroupList(self.master_key, self.settings, self.contact_list)
self.assertEqual(len(group_list3.get_group('test_group_1').members), 10)
self.assertEqual(len(group_list3.get_group("test_group_1").members), 10)
def test_invalid_content_raises_critical_error(self):
def test_invalid_content_raises_critical_error(self) -> None:
# Setup
invalid_data = b'a'
pt_bytes = self.group_list._generate_group_db_header()
pt_bytes += b''.join([g.serialize_g() for g in (self.group_list.groups + self.group_list._dummy_groups())])
ct_bytes = encrypt_and_sign(pt_bytes + invalid_data, self.master_key.master_key)
invalid_data = b"a"
pt_bytes = self.group_list._generate_group_db_header()
pt_bytes += b"".join(
[
g.serialize_g()
for g in (self.group_list.groups + self.group_list._dummy_groups())
]
)
ct_bytes = encrypt_and_sign(pt_bytes + invalid_data, self.master_key.master_key)
ensure_dir(DIR_USER_DATA)
with open(self.file_name, 'wb+') as f:
with open(self.file_name, "wb+") as f:
f.write(ct_bytes)
# Test
with self.assertRaises(SystemExit):
GroupList(self.master_key, self.settings, self.contact_list)
def test_load_of_modified_database_raises_critical_error(self):
def test_load_of_modified_database_raises_critical_error(self) -> None:
self.group_list.store_groups()
# Test reading works normally
self.assertIsInstance(GroupList(self.master_key, self.settings, self.contact_list), GroupList)
self.assertIsInstance(
GroupList(self.master_key, self.settings, self.contact_list), GroupList
)
# Test loading of the tampered database raises CriticalError
tamper_file(self.file_name, tamper_size=1)
with self.assertRaises(SystemExit):
GroupList(self.master_key, self.settings, self.contact_list)
def test_check_db_settings(self):
self.assertFalse(self.group_list._check_db_settings(
number_of_actual_groups=self.settings.max_number_of_groups,
members_in_largest_group=self.settings.max_number_of_group_members))
def test_check_db_settings(self) -> None:
self.assertFalse(
self.group_list._check_db_settings(
number_of_actual_groups=self.settings.max_number_of_groups,
members_in_largest_group=self.settings.max_number_of_group_members,
)
)
self.assertTrue(self.group_list._check_db_settings(
number_of_actual_groups=self.settings.max_number_of_groups + 1,
members_in_largest_group=self.settings.max_number_of_group_members))
self.assertTrue(
self.group_list._check_db_settings(
number_of_actual_groups=self.settings.max_number_of_groups + 1,
members_in_largest_group=self.settings.max_number_of_group_members,
)
)
self.assertTrue(self.group_list._check_db_settings(
number_of_actual_groups=self.settings.max_number_of_groups,
members_in_largest_group=self.settings.max_number_of_group_members + 1))
self.assertTrue(
self.group_list._check_db_settings(
number_of_actual_groups=self.settings.max_number_of_groups,
members_in_largest_group=self.settings.max_number_of_group_members + 1,
)
)
def test_generate_group_db_header(self):
def test_generate_group_db_header(self) -> None:
header = self.group_list._generate_group_db_header()
self.assertEqual(len(header), GROUP_DB_HEADER_LENGTH)
self.assertIsInstance(header, bytes)
def test_generate_dummy_group(self):
def test_generate_dummy_group(self) -> None:
dummy_group = self.group_list._generate_dummy_group()
self.assertIsInstance(dummy_group, Group)
self.assertEqual(len(dummy_group.serialize_g()), self.single_member_data_len)
def test_dummy_groups(self):
def test_dummy_groups(self) -> None:
dummies = self.group_list._dummy_groups()
self.assertEqual(len(dummies), self.settings.max_number_of_contacts - len(self.nicks))
self.assertEqual(
len(dummies), self.settings.max_number_of_contacts - len(self.nicks)
)
for g in dummies:
self.assertIsInstance(g, Group)
def test_add_group(self):
members = [create_contact('Laura')]
self.group_list.add_group('test_group_12', bytes(GROUP_ID_LENGTH), False, False, members)
self.group_list.add_group('test_group_12', bytes(GROUP_ID_LENGTH), False, True, members)
self.assertTrue(self.group_list.get_group('test_group_12').notifications)
self.assertEqual(len(self.group_list), len(self.group_names)+1)
def test_add_group(self) -> None:
members = [create_contact("Laura")]
self.group_list.add_group(
"test_group_12", bytes(GROUP_ID_LENGTH), False, False, members
)
self.group_list.add_group(
"test_group_12", bytes(GROUP_ID_LENGTH), False, True, members
)
self.assertTrue(self.group_list.get_group("test_group_12").notifications)
self.assertEqual(len(self.group_list), len(self.group_names) + 1)
def test_remove_group_by_name(self):
def test_remove_group_by_name(self) -> None:
self.assertEqual(len(self.group_list), len(self.group_names))
# Remove non-existing group
self.assertIsNone(self.group_list.remove_group_by_name('test_group_12'))
self.assertIsNone(self.group_list.remove_group_by_name("test_group_12"))
self.assertEqual(len(self.group_list), len(self.group_names))
# Remove existing group
self.assertIsNone(self.group_list.remove_group_by_name('test_group_11'))
self.assertEqual(len(self.group_list), len(self.group_names)-1)
self.assertIsNone(self.group_list.remove_group_by_name("test_group_11"))
self.assertEqual(len(self.group_list), len(self.group_names) - 1)
def test_remove_group_by_id(self):
def test_remove_group_by_id(self) -> None:
self.assertEqual(len(self.group_list), len(self.group_names))
# Remove non-existing group
self.assertIsNone(self.group_list.remove_group_by_id(group_name_to_group_id('test_group_12')))
self.assertIsNone(
self.group_list.remove_group_by_id(group_name_to_group_id("test_group_12"))
)
self.assertEqual(len(self.group_list), len(self.group_names))
# Remove existing group
self.assertIsNone(self.group_list.remove_group_by_id(group_name_to_group_id('test_group_11')))
self.assertEqual(len(self.group_list), len(self.group_names)-1)
self.assertIsNone(
self.group_list.remove_group_by_id(group_name_to_group_id("test_group_11"))
)
self.assertEqual(len(self.group_list), len(self.group_names) - 1)
def test_get_group(self):
self.assertEqual(self.group_list.get_group('test_group_3').name, 'test_group_3')
def test_get_group(self) -> None:
self.assertEqual(self.group_list.get_group("test_group_3").name, "test_group_3")
def test_get_group_by_id(self):
members = [create_contact('Laura')]
def test_get_group_by_id(self) -> None:
members = [create_contact("Laura")]
group_id = os.urandom(GROUP_ID_LENGTH)
self.group_list.add_group('test_group_12', group_id, False, False, members)
self.assertEqual(self.group_list.get_group_by_id(group_id).name, 'test_group_12')
self.group_list.add_group("test_group_12", group_id, False, False, members)
self.assertEqual(
self.group_list.get_group_by_id(group_id).name, "test_group_12"
)
def test_get_list_of_group_names(self):
def test_get_list_of_group_names(self) -> None:
self.assertEqual(self.group_list.get_list_of_group_names(), self.group_names)
def test_get_list_of_group_ids(self):
self.assertEqual(self.group_list.get_list_of_group_ids(),
list(map(group_name_to_group_id, self.group_names)))
def test_get_list_of_group_ids(self) -> None:
self.assertEqual(
self.group_list.get_list_of_group_ids(),
list(map(group_name_to_group_id, self.group_names)),
)
def test_get_list_of_hr_group_ids(self):
self.assertEqual(self.group_list.get_list_of_hr_group_ids(),
[b58encode(gid) for gid in list(map(group_name_to_group_id, self.group_names))])
def test_get_list_of_hr_group_ids(self) -> None:
self.assertEqual(
self.group_list.get_list_of_hr_group_ids(),
[
b58encode(gid)
for gid in list(map(group_name_to_group_id, self.group_names))
],
)
def test_get_group_members(self):
members = self.group_list.get_group_members(group_name_to_group_id('test_group_1'))
def test_get_group_members(self) -> None:
members = self.group_list.get_group_members(
group_name_to_group_id("test_group_1")
)
for c in members:
self.assertIsInstance(c, Contact)
def test_has_group(self):
self.assertTrue(self.group_list.has_group('test_group_11'))
self.assertFalse(self.group_list.has_group('test_group_12'))
def test_has_group(self) -> None:
self.assertTrue(self.group_list.has_group("test_group_11"))
self.assertFalse(self.group_list.has_group("test_group_12"))
def test_has_group_id(self):
members = [create_contact('Laura')]
def test_has_group_id(self) -> None:
members = [create_contact("Laura")]
group_id = os.urandom(GROUP_ID_LENGTH)
self.assertFalse(self.group_list.has_group_id(group_id))
self.group_list.add_group('test_group_12', group_id, False, False, members)
self.group_list.add_group("test_group_12", group_id, False, False, members)
self.assertTrue(self.group_list.has_group_id(group_id))
def test_largest_group(self):
def test_largest_group(self) -> None:
self.assertEqual(self.group_list.largest_group(), len(self.nicks))
def test_print_group(self):
self.group_list.get_group("test_group_1").name = "group"
self.group_list.get_group("test_group_2").log_messages = True
def test_print_group(self) -> None:
self.group_list.get_group("test_group_1").name = "group"
self.group_list.get_group("test_group_2").log_messages = True
self.group_list.get_group("test_group_3").notifications = True
self.group_list.get_group("test_group_4").log_messages = True
self.group_list.get_group("test_group_4").log_messages = True
self.group_list.get_group("test_group_4").notifications = True
self.group_list.get_group("test_group_5").members = []
self.group_list.get_group("test_group_6").members = list(map(create_contact, ['Alice', 'Bob', 'Charlie',
'David', 'Eric', 'Fido']))
self.assert_prints("""\
self.group_list.get_group("test_group_5").members = []
self.group_list.get_group("test_group_6").members = list(
map(create_contact, ["Alice", "Bob", "Charlie", "David", "Eric", "Fido"])
)
self.assert_prints(
"""\
Group Group ID Logging Notify Members
group 2drs4c4VcDdrP No No Alice, Bob, Charlie,
@ -372,8 +477,10 @@ test_group_11 2e6vAGmHmSEEJ No No Alice, Bob, Charlie,
Joana, Karol
""", self.group_list.print_groups)
""",
self.group_list.print_groups,
)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

View File

@ -26,100 +26,125 @@ import unittest
from unittest import mock
from src.common.crypto import blake2b, encrypt_and_sign
from src.common.db_keys import KeyList, KeySet
from src.common.crypto import blake2b, encrypt_and_sign
from src.common.db_keys import KeyList, KeySet
from src.common.encoding import int_to_bytes
from src.common.misc import ensure_dir
from src.common.statics import (DIR_USER_DATA, INITIAL_HARAC, KDB_ADD_ENTRY_HEADER, KDB_HALT_ACK_HEADER,
KDB_M_KEY_CHANGE_HALT_HEADER, KDB_REMOVE_ENTRY_HEADER, KDB_UPDATE_SIZE_HEADER,
KEY_MANAGEMENT_QUEUE, KEY_MGMT_ACK_QUEUE, KEYSET_LENGTH, LOCAL_ID, LOCAL_PUBKEY,
POLY1305_TAG_LENGTH, RX, SYMMETRIC_KEY_LENGTH, TX, XCHACHA20_NONCE_LENGTH)
from src.common.misc import ensure_dir
from src.common.statics import (
DIR_USER_DATA,
INITIAL_HARAC,
KDB_ADD_ENTRY_HEADER,
KDB_HALT_ACK_HEADER,
KDB_M_KEY_CHANGE_HALT_HEADER,
KDB_REMOVE_ENTRY_HEADER,
KDB_UPDATE_SIZE_HEADER,
KEY_MANAGEMENT_QUEUE,
KEY_MGMT_ACK_QUEUE,
KEYSET_LENGTH,
LOCAL_ID,
LOCAL_PUBKEY,
POLY1305_TAG_LENGTH,
RX,
SYMMETRIC_KEY_LENGTH,
TX,
XCHACHA20_NONCE_LENGTH,
)
from tests.mock_classes import create_keyset, MasterKey, nick_to_pub_key, Settings
from tests.utils import cd_unit_test, cleanup, tamper_file, gen_queue_dict
from tests.utils import cd_unit_test, cleanup, tamper_file, gen_queue_dict
class TestKeySet(unittest.TestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.keyset = KeySet(onion_pub_key=nick_to_pub_key('Alice'),
tx_mk=bytes(SYMMETRIC_KEY_LENGTH),
rx_mk=bytes(SYMMETRIC_KEY_LENGTH),
tx_hk=bytes(SYMMETRIC_KEY_LENGTH),
rx_hk=bytes(SYMMETRIC_KEY_LENGTH),
tx_harac=INITIAL_HARAC,
rx_harac=INITIAL_HARAC,
store_keys=lambda: None)
self.keyset = KeySet(
onion_pub_key=nick_to_pub_key("Alice"),
tx_mk=bytes(SYMMETRIC_KEY_LENGTH),
rx_mk=bytes(SYMMETRIC_KEY_LENGTH),
tx_hk=bytes(SYMMETRIC_KEY_LENGTH),
rx_hk=bytes(SYMMETRIC_KEY_LENGTH),
tx_harac=INITIAL_HARAC,
rx_harac=INITIAL_HARAC,
store_keys=lambda: None,
)
def test_keyset_serialization_length_and_type(self):
def test_keyset_serialization_length_and_type(self) -> None:
serialized = self.keyset.serialize_k()
self.assertEqual(len(serialized), KEYSET_LENGTH)
self.assertIsInstance(serialized, bytes)
def test_rotate_tx_mk(self):
def test_rotate_tx_mk(self) -> None:
self.assertIsNone(self.keyset.rotate_tx_mk())
self.assertEqual(self.keyset.tx_mk, blake2b(bytes(SYMMETRIC_KEY_LENGTH) + int_to_bytes(INITIAL_HARAC),
digest_size=SYMMETRIC_KEY_LENGTH))
self.assertEqual(
self.keyset.tx_mk,
blake2b(
bytes(SYMMETRIC_KEY_LENGTH) + int_to_bytes(INITIAL_HARAC),
digest_size=SYMMETRIC_KEY_LENGTH,
),
)
self.assertEqual(self.keyset.rx_mk, bytes(SYMMETRIC_KEY_LENGTH))
self.assertEqual(self.keyset.tx_hk, bytes(SYMMETRIC_KEY_LENGTH))
self.assertEqual(self.keyset.rx_hk, bytes(SYMMETRIC_KEY_LENGTH))
self.assertEqual(self.keyset.tx_harac, 1)
self.assertEqual(self.keyset.rx_harac, INITIAL_HARAC)
def test_update_tx_mk(self):
self.keyset.update_mk(TX, SYMMETRIC_KEY_LENGTH * b'\x01', 2)
self.assertEqual(self.keyset.tx_mk, SYMMETRIC_KEY_LENGTH * b'\x01')
def test_update_tx_mk(self) -> None:
self.keyset.update_mk(TX, SYMMETRIC_KEY_LENGTH * b"\x01", 2)
self.assertEqual(self.keyset.tx_mk, SYMMETRIC_KEY_LENGTH * b"\x01")
self.assertEqual(self.keyset.rx_mk, bytes(SYMMETRIC_KEY_LENGTH))
self.assertEqual(self.keyset.tx_hk, bytes(SYMMETRIC_KEY_LENGTH))
self.assertEqual(self.keyset.rx_hk, bytes(SYMMETRIC_KEY_LENGTH))
self.assertEqual(self.keyset.tx_harac, 2)
self.assertEqual(self.keyset.rx_harac, INITIAL_HARAC)
def test_update_rx_mk(self):
self.keyset.update_mk(RX, SYMMETRIC_KEY_LENGTH * b'\x01', 2)
def test_update_rx_mk(self) -> None:
self.keyset.update_mk(RX, SYMMETRIC_KEY_LENGTH * b"\x01", 2)
self.assertEqual(self.keyset.tx_mk, bytes(SYMMETRIC_KEY_LENGTH))
self.assertEqual(self.keyset.rx_mk, SYMMETRIC_KEY_LENGTH * b'\x01')
self.assertEqual(self.keyset.rx_mk, SYMMETRIC_KEY_LENGTH * b"\x01")
self.assertEqual(self.keyset.tx_hk, bytes(SYMMETRIC_KEY_LENGTH))
self.assertEqual(self.keyset.rx_hk, bytes(SYMMETRIC_KEY_LENGTH))
self.assertEqual(self.keyset.tx_harac, INITIAL_HARAC)
self.assertEqual(self.keyset.rx_harac, 2)
def test_invalid_direction_raises_critical_error(self):
invalid_direction = 'sx'
def test_invalid_direction_raises_critical_error(self) -> None:
invalid_direction = "sx"
with self.assertRaises(SystemExit):
self.keyset.update_mk(invalid_direction, SYMMETRIC_KEY_LENGTH * b'\x01', 2)
self.keyset.update_mk(invalid_direction, SYMMETRIC_KEY_LENGTH * b"\x01", 2)
class TestKeyList(unittest.TestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.master_key = MasterKey()
self.settings = Settings()
self.file_name = f'{DIR_USER_DATA}{self.settings.software_operation}_keys'
self.keylist = KeyList(self.master_key, self.settings)
self.full_contact_list = ['Alice', 'Bob', 'Charlie', LOCAL_ID]
self.keylist.keysets = [create_keyset(n, store_f=self.keylist.store_keys) for n in self.full_contact_list]
self.unit_test_dir = cd_unit_test()
self.master_key = MasterKey()
self.settings = Settings()
self.file_name = f"{DIR_USER_DATA}{self.settings.software_operation}_keys"
self.keylist = KeyList(self.master_key, self.settings)
self.full_contact_list = ["Alice", "Bob", "Charlie", LOCAL_ID]
self.keylist.keysets = [
create_keyset(n, store_f=self.keylist.store_keys)
for n in self.full_contact_list
]
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
def test_storing_and_loading_of_keysets(self):
def test_storing_and_loading_of_keysets(self) -> None:
# Test store
self.keylist.store_keys()
self.assertEqual(os.path.getsize(self.file_name),
XCHACHA20_NONCE_LENGTH
+ (self.settings.max_number_of_contacts+1) * KEYSET_LENGTH
+ POLY1305_TAG_LENGTH)
self.assertEqual(
os.path.getsize(self.file_name),
XCHACHA20_NONCE_LENGTH
+ (self.settings.max_number_of_contacts + 1) * KEYSET_LENGTH
+ POLY1305_TAG_LENGTH,
)
# Test load
key_list2 = KeyList(MasterKey(), Settings())
self.assertEqual(len(key_list2.keysets), len(self.full_contact_list))
def test_load_of_modified_database_raises_critical_error(self):
def test_load_of_modified_database_raises_critical_error(self) -> None:
self.keylist.store_keys()
# Test reading works normally
@ -130,68 +155,77 @@ class TestKeyList(unittest.TestCase):
with self.assertRaises(SystemExit):
KeyList(self.master_key, self.settings)
def test_invalid_content_raises_critical_error(self):
def test_invalid_content_raises_critical_error(self) -> None:
# Setup
invalid_data = b'a'
pt_bytes = b''.join([k.serialize_k() for k in self.keylist.keysets + self.keylist._dummy_keysets()])
ct_bytes = encrypt_and_sign(pt_bytes + invalid_data, self.master_key.master_key)
invalid_data = b"a"
pt_bytes = b"".join(
[
k.serialize_k()
for k in self.keylist.keysets + self.keylist._dummy_keysets()
]
)
ct_bytes = encrypt_and_sign(pt_bytes + invalid_data, self.master_key.master_key)
ensure_dir(DIR_USER_DATA)
with open(self.file_name, 'wb+') as f:
with open(self.file_name, "wb+") as f:
f.write(ct_bytes)
# Test
with self.assertRaises(SystemExit):
KeyList(self.master_key, self.settings)
def test_generate_dummy_keyset(self):
def test_generate_dummy_keyset(self) -> None:
dummy_keyset = self.keylist.generate_dummy_keyset()
self.assertEqual(len(dummy_keyset.serialize_k()), KEYSET_LENGTH)
self.assertIsInstance(dummy_keyset, KeySet)
def test_dummy_keysets(self):
def test_dummy_keysets(self) -> None:
dummies = self.keylist._dummy_keysets()
self.assertEqual(len(dummies), (self.settings.max_number_of_contacts+1) - len(self.full_contact_list))
self.assertEqual(
len(dummies),
(self.settings.max_number_of_contacts + 1) - len(self.full_contact_list),
)
for c in dummies:
self.assertIsInstance(c, KeySet)
def test_add_keyset(self):
new_key = bytes(SYMMETRIC_KEY_LENGTH)
def test_add_keyset(self) -> None:
new_key = bytes(SYMMETRIC_KEY_LENGTH)
self.keylist.keysets = [create_keyset(LOCAL_ID)]
# Check that KeySet exists and that its keys are different
self.assertNotEqual(self.keylist.keysets[0].rx_hk, new_key)
# Replace existing KeySet
self.assertIsNone(self.keylist.add_keyset(LOCAL_PUBKEY,
new_key, new_key,
new_key, new_key))
self.assertIsNone(
self.keylist.add_keyset(LOCAL_PUBKEY, new_key, new_key, new_key, new_key)
)
# Check that new KeySet replaced the old one
self.assertEqual(self.keylist.keysets[0].onion_pub_key, LOCAL_PUBKEY)
self.assertEqual(self.keylist.keysets[0].rx_hk, new_key)
def test_remove_keyset(self):
def test_remove_keyset(self) -> None:
# Test KeySet for Bob exists
self.assertTrue(self.keylist.has_keyset(nick_to_pub_key('Bob')))
self.assertTrue(self.keylist.has_keyset(nick_to_pub_key("Bob")))
# Remove KeySet for Bob
self.assertIsNone(self.keylist.remove_keyset(nick_to_pub_key('Bob')))
self.assertIsNone(self.keylist.remove_keyset(nick_to_pub_key("Bob")))
# Test KeySet was removed
self.assertFalse(self.keylist.has_keyset(nick_to_pub_key('Bob')))
self.assertFalse(self.keylist.has_keyset(nick_to_pub_key("Bob")))
@mock.patch('builtins.input', side_effect=['test_password'])
def test_change_master_key(self, _):
@mock.patch("builtins.input", side_effect=["test_password"])
def test_change_master_key(self, _) -> None:
# Setup
key = SYMMETRIC_KEY_LENGTH * b'\x01'
key = SYMMETRIC_KEY_LENGTH * b"\x01"
master_key2 = MasterKey(master_key=key)
queues = gen_queue_dict()
queues = gen_queue_dict()
def queue_delayer():
def queue_delayer() -> None:
"""Place packet to queue after timer runs out."""
time.sleep(0.1)
queues[KEY_MANAGEMENT_QUEUE].put(master_key2.master_key)
threading.Thread(target=queue_delayer).start()
# Test that new key is different from existing one
@ -207,67 +241,97 @@ class TestKeyList(unittest.TestCase):
self.assertEqual(queues[KEY_MGMT_ACK_QUEUE].get(), KDB_HALT_ACK_HEADER)
self.assertEqual(queues[KEY_MGMT_ACK_QUEUE].get(), key)
def test_update_database(self):
def test_update_database(self) -> None:
# Setup
queues = gen_queue_dict()
# Test
self.assertEqual(os.path.getsize(self.file_name), 9016)
self.assertIsNone(self.keylist.manage(queues, KDB_UPDATE_SIZE_HEADER, Settings(max_number_of_contacts=100)))
self.assertIsNone(
self.keylist.manage(
queues, KDB_UPDATE_SIZE_HEADER, Settings(max_number_of_contacts=100)
)
)
self.assertEqual(os.path.getsize(self.file_name), 17816)
self.assertEqual(self.keylist.settings.max_number_of_contacts, 100)
def test_get_keyset(self):
keyset = self.keylist.get_keyset(nick_to_pub_key('Alice'))
def test_get_keyset(self) -> None:
keyset = self.keylist.get_keyset(nick_to_pub_key("Alice"))
self.assertIsInstance(keyset, KeySet)
def test_get_list_of_pub_keys(self):
self.assertEqual(self.keylist.get_list_of_pub_keys(),
[nick_to_pub_key("Alice"),
nick_to_pub_key("Bob"),
nick_to_pub_key("Charlie")])
def test_get_list_of_pub_keys(self) -> None:
self.assertEqual(
self.keylist.get_list_of_pub_keys(),
[
nick_to_pub_key("Alice"),
nick_to_pub_key("Bob"),
nick_to_pub_key("Charlie"),
],
)
def test_has_keyset(self):
def test_has_keyset(self) -> None:
self.keylist.keysets = []
self.assertFalse(self.keylist.has_keyset(nick_to_pub_key("Alice")))
self.keylist.keysets = [create_keyset('Alice')]
self.keylist.keysets = [create_keyset("Alice")]
self.assertTrue(self.keylist.has_keyset(nick_to_pub_key("Alice")))
def test_has_rx_mk(self):
self.assertTrue(self.keylist.has_rx_mk(nick_to_pub_key('Bob')))
self.keylist.get_keyset(nick_to_pub_key('Bob')).rx_mk = bytes(SYMMETRIC_KEY_LENGTH)
self.keylist.get_keyset(nick_to_pub_key('Bob')).rx_hk = bytes(SYMMETRIC_KEY_LENGTH)
self.assertFalse(self.keylist.has_rx_mk(nick_to_pub_key('Bob')))
def test_has_rx_mk(self) -> None:
self.assertTrue(self.keylist.has_rx_mk(nick_to_pub_key("Bob")))
self.keylist.get_keyset(nick_to_pub_key("Bob")).rx_mk = bytes(
SYMMETRIC_KEY_LENGTH
)
self.keylist.get_keyset(nick_to_pub_key("Bob")).rx_hk = bytes(
SYMMETRIC_KEY_LENGTH
)
self.assertFalse(self.keylist.has_rx_mk(nick_to_pub_key("Bob")))
def test_has_local_keyset(self):
def test_has_local_keyset(self) -> None:
self.keylist.keysets = []
self.assertFalse(self.keylist.has_local_keyset())
self.assertIsNone(self.keylist.add_keyset(LOCAL_PUBKEY,
bytes(SYMMETRIC_KEY_LENGTH), bytes(SYMMETRIC_KEY_LENGTH),
bytes(SYMMETRIC_KEY_LENGTH), bytes(SYMMETRIC_KEY_LENGTH)))
self.assertIsNone(
self.keylist.add_keyset(
LOCAL_PUBKEY,
bytes(SYMMETRIC_KEY_LENGTH),
bytes(SYMMETRIC_KEY_LENGTH),
bytes(SYMMETRIC_KEY_LENGTH),
bytes(SYMMETRIC_KEY_LENGTH),
)
)
self.assertTrue(self.keylist.has_local_keyset())
def test_manage(self):
def test_manage(self) -> None:
# Setup
queues = gen_queue_dict()
# Test that KeySet for David does not exist
self.assertFalse(self.keylist.has_keyset(nick_to_pub_key('David')))
self.assertFalse(self.keylist.has_keyset(nick_to_pub_key("David")))
# Test adding KeySet
self.assertIsNone(self.keylist.manage(queues, KDB_ADD_ENTRY_HEADER, nick_to_pub_key('David'),
bytes(SYMMETRIC_KEY_LENGTH), bytes(SYMMETRIC_KEY_LENGTH),
bytes(SYMMETRIC_KEY_LENGTH), bytes(SYMMETRIC_KEY_LENGTH)))
self.assertTrue(self.keylist.has_keyset(nick_to_pub_key('David')))
self.assertIsNone(
self.keylist.manage(
queues,
KDB_ADD_ENTRY_HEADER,
nick_to_pub_key("David"),
bytes(SYMMETRIC_KEY_LENGTH),
bytes(SYMMETRIC_KEY_LENGTH),
bytes(SYMMETRIC_KEY_LENGTH),
bytes(SYMMETRIC_KEY_LENGTH),
)
)
self.assertTrue(self.keylist.has_keyset(nick_to_pub_key("David")))
# Test removing KeySet
self.assertIsNone(self.keylist.manage(queues, KDB_REMOVE_ENTRY_HEADER, nick_to_pub_key('David')))
self.assertFalse(self.keylist.has_keyset(nick_to_pub_key('David')))
self.assertIsNone(
self.keylist.manage(
queues, KDB_REMOVE_ENTRY_HEADER, nick_to_pub_key("David")
)
)
self.assertFalse(self.keylist.has_keyset(nick_to_pub_key("David")))
# Test changing master key
new_key = SYMMETRIC_KEY_LENGTH * b'\x01'
new_key = SYMMETRIC_KEY_LENGTH * b"\x01"
self.assertNotEqual(self.master_key.master_key, new_key)
@ -279,8 +343,8 @@ class TestKeyList(unittest.TestCase):
# Test invalid KeyList management command raises Critical Error
with self.assertRaises(SystemExit):
self.keylist.manage(queues, 'invalid_key', None)
self.keylist.manage(queues, "invalid_key", None)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

File diff suppressed because it is too large Load Diff

View File

@ -23,14 +23,20 @@ import os
import os.path
import unittest
from unittest import mock
from unittest import mock
from unittest.mock import MagicMock
from src.common.crypto import blake2b
from src.common.crypto import blake2b
from src.common.db_masterkey import MasterKey
from src.common.misc import ensure_dir
from src.common.statics import (BLAKE2_DIGEST_LENGTH, DIR_USER_DATA, MASTERKEY_DB_SIZE, PASSWORD_MIN_BIT_STRENGTH,
SYMMETRIC_KEY_LENGTH, TX)
from src.common.misc import ensure_dir
from src.common.statics import (
BLAKE2_DIGEST_LENGTH,
DIR_USER_DATA,
MASTERKEY_DB_SIZE,
PASSWORD_MIN_BIT_STRENGTH,
SYMMETRIC_KEY_LENGTH,
TX,
)
from tests.utils import cd_unit_test, cleanup
@ -38,96 +44,122 @@ KL = SYMMETRIC_KEY_LENGTH
class TestMasterKey(unittest.TestCase):
input_list = ['password', 'different_password', # Invalid new password pair
'password', 'password', # Valid new password pair
'invalid_password', # Invalid login password
'password'] # Valid login password
input_list = [
"password",
"different_password", # Invalid new password pair
"password",
"password", # Valid new password pair
"invalid_password", # Invalid login password
"password",
] # Valid login password
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.operation = TX
self.file_name = f"{DIR_USER_DATA}{self.operation}_login_data"
self.operation = TX
self.file_name = f"{DIR_USER_DATA}{self.operation}_login_data"
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
def test_password_generation(self):
def test_password_generation(self) -> None:
bit_strength, password = MasterKey.generate_master_password()
self.assertIsInstance(bit_strength, int)
self.assertIsInstance(password, str)
self.assertIsInstance(password, str)
self.assertGreaterEqual(bit_strength, PASSWORD_MIN_BIT_STRENGTH)
self.assertEqual(len(password.split(' ')), 10)
self.assertEqual(len(password.split(" ")), 10)
@mock.patch('time.sleep', return_value=None)
def test_invalid_data_in_db_raises_critical_error(self, _):
@mock.patch("time.sleep", return_value=None)
def test_invalid_data_in_db_raises_critical_error(self, _) -> None:
for delta in [-1, 1]:
# Setup
ensure_dir(DIR_USER_DATA)
data = os.urandom(MASTERKEY_DB_SIZE + delta)
data += blake2b(data)
with open(self.file_name, 'wb+') as f:
with open(self.file_name, "wb+") as f:
f.write(data)
# Test
with self.assertRaises(SystemExit):
_ = MasterKey(self.operation, local_test=False)
@mock.patch('time.sleep', return_value=None)
def test_load_master_key_with_invalid_data_raises_critical_error(self, _):
@mock.patch("time.sleep", return_value=None)
def test_load_master_key_with_invalid_data_raises_critical_error(self, _) -> None:
# Setup
ensure_dir(DIR_USER_DATA)
data = os.urandom(MASTERKEY_DB_SIZE + BLAKE2_DIGEST_LENGTH)
with open(self.file_name, 'wb+') as f:
with open(self.file_name, "wb+") as f:
f.write(data)
# Test
with self.assertRaises(SystemExit):
_ = MasterKey(self.operation, local_test=False)
@mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 0.01)
@mock.patch('src.common.db_masterkey.MAX_KEY_DERIVATION_TIME', 0.1)
@mock.patch('os.popen', return_value=MagicMock(
read=MagicMock(return_value=MagicMock(splitlines=MagicMock(return_value=["MemAvailable 10240"])))))
@mock.patch('os.path.isfile', side_effect=[KeyboardInterrupt, False, True, False])
@mock.patch('getpass.getpass', side_effect=input_list)
@mock.patch('time.sleep', return_value=None)
def test_master_key_generation_and_load(self, *_):
@mock.patch("src.common.db_masterkey.MIN_KEY_DERIVATION_TIME", 0.01)
@mock.patch("src.common.db_masterkey.MAX_KEY_DERIVATION_TIME", 0.1)
@mock.patch(
"os.popen",
return_value=MagicMock(
read=MagicMock(
return_value=MagicMock(
splitlines=MagicMock(return_value=["MemAvailable 10240"])
)
)
),
)
@mock.patch("os.path.isfile", side_effect=[KeyboardInterrupt, False, True, False])
@mock.patch("getpass.getpass", side_effect=input_list)
@mock.patch("time.sleep", return_value=None)
def test_master_key_generation_and_load(self, *_) -> None:
with self.assertRaises(SystemExit):
MasterKey(self.operation, local_test=True)
master_key = MasterKey(self.operation, local_test=True)
self.assertIsInstance(master_key.master_key, bytes)
self.assertEqual(os.path.getsize(self.file_name), MASTERKEY_DB_SIZE + BLAKE2_DIGEST_LENGTH)
self.assertEqual(
os.path.getsize(self.file_name), MASTERKEY_DB_SIZE + BLAKE2_DIGEST_LENGTH
)
master_key2 = MasterKey(self.operation, local_test=True)
self.assertIsInstance(master_key2.master_key, bytes)
self.assertEqual(master_key.master_key, master_key2.master_key)
@mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 0.01)
@mock.patch('src.common.db_masterkey.MAX_KEY_DERIVATION_TIME', 0.1)
@mock.patch('os.popen', return_value=MagicMock(
read=MagicMock(return_value=MagicMock(splitlines=MagicMock(return_value=["MemAvailable 10240"])))))
@mock.patch('getpass.getpass', side_effect=['generate'])
@mock.patch('builtins.input', side_effect=[''])
@mock.patch('os.system', return_value=None)
@mock.patch('time.sleep', return_value=None)
def test_password_generation(self, *_):
@mock.patch("src.common.db_masterkey.MIN_KEY_DERIVATION_TIME", 0.01)
@mock.patch("src.common.db_masterkey.MAX_KEY_DERIVATION_TIME", 0.1)
@mock.patch("src.common.misc.reset_terminal", return_value=None)
@mock.patch(
"os.popen",
return_value=MagicMock(
read=MagicMock(
return_value=MagicMock(
splitlines=MagicMock(return_value=["MemAvailable 10240"])
)
)
),
)
@mock.patch("getpass.getpass", side_effect=["generate"])
@mock.patch("builtins.input", side_effect=[""])
@mock.patch("time.sleep", return_value=None)
def test_new_masterkey_key_type(self, *_) -> None:
master_key = MasterKey(self.operation, local_test=True)
self.assertIsInstance(master_key.master_key, bytes)
@mock.patch('src.common.db_masterkey.MasterKey.timed_key_derivation',
MagicMock(side_effect= [(KL*b'a', 0.01)]
+ 100 * [(KL*b'b', 5.0)]
+ 2 * [(KL*b'a', 2.5)]
+ [(KL*b'a', 3.0)]))
@mock.patch('os.path.isfile', side_effect=[False, True])
@mock.patch('getpass.getpass', side_effect=input_list)
@mock.patch('time.sleep', return_value=None)
def test_kd_binary_search(self, *_):
@mock.patch(
"src.common.db_masterkey.MasterKey.timed_key_derivation",
MagicMock(
side_effect=[(KL * b"a", 0.01)]
+ 100 * [(KL * b"b", 5.0)]
+ 2 * [(KL * b"a", 2.5)]
+ [(KL * b"a", 3.0)]
),
)
@mock.patch("os.path.isfile", side_effect=[False, True])
@mock.patch("getpass.getpass", side_effect=input_list)
@mock.patch("time.sleep", return_value=None)
def test_kd_binary_search(self, *_) -> None:
MasterKey(self.operation, local_test=True)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

View File

@ -24,64 +24,76 @@ import unittest
from unittest import mock
from src.common.crypto import encrypt_and_sign
from src.common.crypto import encrypt_and_sign
from src.common.db_onion import OnionService
from src.common.misc import ensure_dir, validate_onion_addr
from src.common.statics import (DIR_USER_DATA, ONION_SERVICE_PRIVATE_KEY_LENGTH,
POLY1305_TAG_LENGTH, TX, XCHACHA20_NONCE_LENGTH)
from src.common.misc import ensure_dir, validate_onion_addr
from src.common.statics import (
DIR_USER_DATA,
ONION_SERVICE_PRIVATE_KEY_LENGTH,
POLY1305_TAG_LENGTH,
TX,
XCHACHA20_NONCE_LENGTH,
)
from tests.mock_classes import MasterKey
from tests.utils import cd_unit_test, cleanup, tamper_file
from tests.utils import cd_unit_test, cleanup, tamper_file
class TestOnionService(unittest.TestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.master_key = MasterKey()
self.file_name = f"{DIR_USER_DATA}{TX}_onion_db"
self.master_key = MasterKey()
self.file_name = f"{DIR_USER_DATA}{TX}_onion_db"
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
@mock.patch('time.sleep', return_value=None)
def test_onion_service_key_generation_and_load(self, _):
@mock.patch("time.sleep", return_value=None)
def test_onion_service_key_generation_and_load(self, _) -> None:
onion_service = OnionService(self.master_key)
# Test new OnionService has valid attributes
self.assertIsInstance(onion_service.master_key, MasterKey)
self.assertIsInstance(onion_service.onion_private_key, bytes)
self.assertIsInstance(onion_service.master_key, MasterKey)
self.assertIsInstance(onion_service.onion_private_key, bytes)
self.assertIsInstance(onion_service.user_onion_address, str)
self.assertFalse(onion_service.is_delivered)
self.assertEqual(validate_onion_addr(onion_service.user_onion_address), '')
self.assertEqual(validate_onion_addr(onion_service.user_onion_address), "")
# Test data is stored to a database
self.assertTrue(os.path.isfile(self.file_name))
self.assertEqual(os.path.getsize(self.file_name),
XCHACHA20_NONCE_LENGTH + ONION_SERVICE_PRIVATE_KEY_LENGTH + POLY1305_TAG_LENGTH)
self.assertEqual(
os.path.getsize(self.file_name),
XCHACHA20_NONCE_LENGTH
+ ONION_SERVICE_PRIVATE_KEY_LENGTH
+ POLY1305_TAG_LENGTH,
)
# Test data can be loaded from the database
onion_service2 = OnionService(self.master_key)
self.assertIsInstance(onion_service2.onion_private_key, bytes)
self.assertEqual(onion_service.onion_private_key, onion_service2.onion_private_key)
self.assertEqual(
onion_service.onion_private_key, onion_service2.onion_private_key
)
@mock.patch('time.sleep', return_value=None)
def test_loading_invalid_onion_key_raises_critical_error(self, _):
@mock.patch("time.sleep", return_value=None)
def test_loading_invalid_onion_key_raises_critical_error(self, _) -> None:
# Setup
ct_bytes = encrypt_and_sign((ONION_SERVICE_PRIVATE_KEY_LENGTH + 1) * b'a', self.master_key.master_key)
ct_bytes = encrypt_and_sign(
(ONION_SERVICE_PRIVATE_KEY_LENGTH + 1) * b"a", self.master_key.master_key
)
ensure_dir(DIR_USER_DATA)
with open(f'{DIR_USER_DATA}{TX}_onion_db', 'wb+') as f:
with open(f"{DIR_USER_DATA}{TX}_onion_db", "wb+") as f:
f.write(ct_bytes)
# Test
with self.assertRaises(SystemExit):
OnionService(self.master_key)
@mock.patch('time.sleep', return_value=None)
def test_load_of_modified_database_raises_critical_error(self, _):
@mock.patch("time.sleep", return_value=None)
def test_load_of_modified_database_raises_critical_error(self, _) -> None:
# Write data to file
OnionService(self.master_key)
@ -93,17 +105,22 @@ class TestOnionService(unittest.TestCase):
with self.assertRaises(SystemExit):
OnionService(self.master_key)
@mock.patch('os.getrandom', side_effect=[ 1 * b'a', # Initial confirmation code
32 * b'a', # ed25519 key
24 * b'a', # Nonce
1 * b'b']) # New confirmation code (different)
@mock.patch('time.sleep', return_value=None)
def test_confirmation_code_generation(self, *_):
@mock.patch(
"os.getrandom",
side_effect=[
1 * b"a", # Initial confirmation code
32 * b"a", # ed25519 key
24 * b"a", # Nonce
1 * b"b",
],
) # New confirmation code (different)
@mock.patch("time.sleep", return_value=None)
def test_confirmation_code_generation(self, *_) -> None:
onion_service = OnionService(self.master_key)
conf_code = onion_service.conf_code
conf_code = onion_service.conf_code
onion_service.new_confirmation_code()
self.assertNotEqual(conf_code, onion_service.conf_code)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

View File

@ -25,40 +25,48 @@ import unittest
from unittest import mock
from src.common.db_settings import Settings
from src.common.statics import CLEAR_ENTIRE_SCREEN, CURSOR_LEFT_UP_CORNER, DIR_USER_DATA, RX, SETTING_LENGTH, TX
from src.common.statics import (
CLEAR_ENTIRE_SCREEN,
CURSOR_LEFT_UP_CORNER,
DIR_USER_DATA,
RX,
SETTING_LENGTH,
TX,
)
from tests.mock_classes import ContactList, create_group, GroupList, MasterKey
from tests.utils import cd_unit_test, cleanup, tamper_file, TFCTestCase
from tests.utils import cd_unit_test, cleanup, tamper_file, TFCTestCase
class TestSettings(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.file_name = f"{DIR_USER_DATA}{TX}_settings"
self.master_key = MasterKey()
self.settings = Settings(self.master_key, operation=TX, local_test=False)
self.contact_list = ContactList(nicks=[f'contact_{n}' for n in range(18)])
self.group_list = GroupList(groups=[f'group_{n}' for n in range(18)])
self.group_list.groups[0] = create_group('group_0', [f'contact_{n}' for n in range(18)])
self.args = self.contact_list, self.group_list
self.unit_test_dir = cd_unit_test()
self.file_name = f"{DIR_USER_DATA}{TX}_settings"
self.master_key = MasterKey()
self.settings = Settings(self.master_key, operation=TX, local_test=False)
self.contact_list = ContactList(nicks=[f"contact_{n}" for n in range(18)])
self.group_list = GroupList(groups=[f"group_{n}" for n in range(18)])
self.group_list.groups[0] = create_group(
"group_0", [f"contact_{n}" for n in range(18)]
)
self.args = self.contact_list, self.group_list
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
def test_invalid_type_raises_critical_error_on_store(self):
self.settings.tm_random_delay = b'bytestring'
def test_invalid_type_raises_critical_error_on_store(self) -> None:
self.settings.tm_random_delay = b"bytestring"
with self.assertRaises(SystemExit):
self.settings.store_settings()
def test_invalid_type_raises_critical_error_on_load(self):
def test_invalid_type_raises_critical_error_on_load(self) -> None:
with self.assertRaises(SystemExit):
self.settings.nc_bypass_messages = b'bytestring'
self.settings.nc_bypass_messages = b"bytestring"
self.settings.load_settings()
def test_store_and_load_tx_settings(self):
def test_store_and_load_tx_settings(self) -> None:
# Test store
self.assertFalse(self.settings.disable_gui_dialog)
self.settings.disable_gui_dialog = True
@ -69,7 +77,7 @@ class TestSettings(TFCTestCase):
settings2 = Settings(self.master_key, TX, False)
self.assertTrue(settings2.disable_gui_dialog)
def test_store_and_load_rx_settings(self):
def test_store_and_load_rx_settings(self) -> None:
# Setup
self.settings = Settings(self.master_key, operation=RX, local_test=False)
@ -83,76 +91,175 @@ class TestSettings(TFCTestCase):
settings2 = Settings(self.master_key, RX, False)
self.assertTrue(settings2.disable_gui_dialog)
def test_load_of_modified_database_raises_critical_error(self):
def test_load_of_modified_database_raises_critical_error(self) -> None:
# Store settings to database
self.settings.store_settings()
# Test reading from database works normally
self.assertIsInstance(Settings(self.master_key, operation=TX, local_test=False), Settings)
self.assertIsInstance(
Settings(self.master_key, operation=TX, local_test=False), Settings
)
# Test loading of the tampered database raises CriticalError
tamper_file(self.file_name, tamper_size=1)
with self.assertRaises(SystemExit):
Settings(self.master_key, operation=TX, local_test=False)
def test_invalid_type_raises_critical_error_when_changing_settings(self):
self.settings.traffic_masking = b'bytestring'
def test_invalid_type_raises_critical_error_when_changing_settings(self) -> None:
self.settings.traffic_masking = b"bytestring"
with self.assertRaises(SystemExit):
self.assertIsNone(self.settings.change_setting('traffic_masking', 'True', *self.args))
self.assertIsNone(
self.settings.change_setting("traffic_masking", "True", *self.args)
)
def test_change_settings(self):
self.assert_fr("Error: Invalid setting value 'Falsee'.",
self.settings.change_setting, 'disable_gui_dialog', 'Falsee', *self.args)
self.assert_fr("Error: Invalid setting value '1.1'.",
self.settings.change_setting, 'max_number_of_group_members', '1.1', *self.args)
self.assert_fr("Error: Invalid setting value '18446744073709551616'.",
self.settings.change_setting, 'max_number_of_contacts', str(2 ** 64), *self.args)
self.assert_fr("Error: Invalid setting value '-1.1'.",
self.settings.change_setting, 'tm_static_delay', '-1.1', *self.args)
self.assert_fr("Error: Invalid setting value 'True'.",
self.settings.change_setting, 'tm_static_delay', 'True', *self.args)
def test_change_settings(self) -> None:
self.assert_se(
"Error: Invalid setting value 'Falsee'.",
self.settings.change_setting,
"disable_gui_dialog",
"Falsee",
*self.args,
)
self.assert_se(
"Error: Invalid setting value '1.1'.",
self.settings.change_setting,
"max_number_of_group_members",
"1.1",
*self.args,
)
self.assert_se(
"Error: Invalid setting value '18446744073709551616'.",
self.settings.change_setting,
"max_number_of_contacts",
str(2 ** 64),
*self.args,
)
self.assert_se(
"Error: Invalid setting value '-1.1'.",
self.settings.change_setting,
"tm_static_delay",
"-1.1",
*self.args,
)
self.assert_se(
"Error: Invalid setting value 'True'.",
self.settings.change_setting,
"tm_static_delay",
"True",
*self.args,
)
self.assertIsNone(self.settings.change_setting('traffic_masking', 'True', *self.args))
self.assertIsNone(self.settings.change_setting('max_number_of_group_members', '100', *self.args))
self.assertIsNone(
self.settings.change_setting("traffic_masking", "True", *self.args)
)
self.assertIsNone(
self.settings.change_setting(
"max_number_of_group_members", "100", *self.args
)
)
@mock.patch('builtins.input', side_effect=['No', 'Yes'])
def test_validate_key_value_pair(self, _):
self.assert_fr("Error: Database padding settings must be divisible by 10.",
self.settings.validate_key_value_pair, 'max_number_of_group_members', 0, *self.args)
self.assert_fr("Error: Database padding settings must be divisible by 10.",
self.settings.validate_key_value_pair, 'max_number_of_group_members', 18, *self.args)
self.assert_fr("Error: Database padding settings must be divisible by 10.",
self.settings.validate_key_value_pair, 'max_number_of_groups', 18, *self.args)
self.assert_fr("Error: Database padding settings must be divisible by 10.",
self.settings.validate_key_value_pair, 'max_number_of_contacts', 18, *self.args)
self.assert_fr("Error: Can't set the max number of members lower than 20.",
self.settings.validate_key_value_pair, 'max_number_of_group_members', 10, *self.args)
self.assert_fr("Error: Can't set the max number of groups lower than 20.",
self.settings.validate_key_value_pair, 'max_number_of_groups', 10, *self.args)
self.assert_fr("Error: Can't set the max number of contacts lower than 20.",
self.settings.validate_key_value_pair, 'max_number_of_contacts', 10, *self.args)
self.assert_fr("Error: Too small value for message notify duration.",
self.settings.validate_key_value_pair, 'new_message_notify_duration', 0.04, *self.args)
self.assert_fr("Error: Can't set static delay lower than 0.1.",
self.settings.validate_key_value_pair, 'tm_static_delay', 0.01, *self.args)
self.assert_fr("Error: Can't set random delay lower than 0.1.",
self.settings.validate_key_value_pair, 'tm_random_delay', 0.01, *self.args)
self.assert_fr("Aborted traffic masking setting change.",
self.settings.validate_key_value_pair, 'tm_random_delay', 0.1, *self.args)
@mock.patch("builtins.input", side_effect=["No", "Yes"])
def test_validate_key_value_pair(self, _) -> None:
self.assert_se(
"Error: Database padding settings must be divisible by 10.",
self.settings.validate_key_value_pair,
"max_number_of_group_members",
0,
*self.args,
)
self.assert_se(
"Error: Database padding settings must be divisible by 10.",
self.settings.validate_key_value_pair,
"max_number_of_group_members",
18,
*self.args,
)
self.assert_se(
"Error: Database padding settings must be divisible by 10.",
self.settings.validate_key_value_pair,
"max_number_of_groups",
18,
*self.args,
)
self.assert_se(
"Error: Database padding settings must be divisible by 10.",
self.settings.validate_key_value_pair,
"max_number_of_contacts",
18,
*self.args,
)
self.assert_se(
"Error: Can't set the max number of members lower than 20.",
self.settings.validate_key_value_pair,
"max_number_of_group_members",
10,
*self.args,
)
self.assert_se(
"Error: Can't set the max number of groups lower than 20.",
self.settings.validate_key_value_pair,
"max_number_of_groups",
10,
*self.args,
)
self.assert_se(
"Error: Can't set the max number of contacts lower than 20.",
self.settings.validate_key_value_pair,
"max_number_of_contacts",
10,
*self.args,
)
self.assert_se(
"Error: Too small value for message notify duration.",
self.settings.validate_key_value_pair,
"new_message_notify_duration",
0.04,
*self.args,
)
self.assert_se(
"Error: Can't set static delay lower than 0.1.",
self.settings.validate_key_value_pair,
"tm_static_delay",
0.01,
*self.args,
)
self.assert_se(
"Error: Can't set random delay lower than 0.1.",
self.settings.validate_key_value_pair,
"tm_random_delay",
0.01,
*self.args,
)
self.assert_se(
"Aborted traffic masking setting change.",
self.settings.validate_key_value_pair,
"tm_random_delay",
0.1,
*self.args,
)
self.assertIsNone(self.settings.validate_key_value_pair("serial_baudrate", 9600, *self.args))
self.assertIsNone(self.settings.validate_key_value_pair("tm_static_delay", 1, *self.args))
self.assertIsNone(
self.settings.validate_key_value_pair("serial_baudrate", 9600, *self.args)
)
self.assertIsNone(
self.settings.validate_key_value_pair("tm_static_delay", 1, *self.args)
)
@mock.patch('shutil.get_terminal_size', return_value=(64, 64))
def test_too_narrow_terminal_raises_fr_when_printing_settings(self, _):
@mock.patch("shutil.get_terminal_size", return_value=(64, 64))
def test_too_narrow_terminal_raises_fr_when_printing_settings(self, _) -> None:
# Test
self.assert_fr("Error: Screen width is too small.", self.settings.print_settings)
self.assert_se(
"Error: Screen width is too small.", self.settings.print_settings
)
def test_print_settings(self):
def test_print_settings(self) -> None:
self.settings.max_number_of_group_members = 30
self.settings.log_messages_by_default = True
self.settings.tm_static_delay = 10.2
self.assert_prints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + """\
self.settings.log_messages_by_default = True
self.settings.tm_static_delay = 10.2
self.assert_prints(
CLEAR_ENTIRE_SCREEN
+ CURSOR_LEFT_UP_CORNER
+ """\
Setting name Current value Default value Description
@ -244,8 +351,10 @@ max_decompress_size 100000000 100000000 Max size
decompressing
file
""", self.settings.print_settings)
""",
self.settings.print_settings,
)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

View File

@ -25,57 +25,90 @@ import unittest
from datetime import datetime
from src.common.encoding import b58encode, bool_to_bytes, double_to_bytes, str_to_bytes, int_to_bytes
from src.common.encoding import b58decode, bytes_to_bool, bytes_to_double, bytes_to_str, bytes_to_int
from src.common.encoding import onion_address_to_pub_key, unicode_padding, pub_key_to_short_address, b85encode
from src.common.encoding import pub_key_to_onion_address, rm_padding_str, bytes_to_timestamp, b10encode
from src.common.statics import (ENCODED_BOOLEAN_LENGTH, ENCODED_FLOAT_LENGTH, ENCODED_INTEGER_LENGTH,
FINGERPRINT_LENGTH, ONION_SERVICE_PUBLIC_KEY_LENGTH, PADDED_UTF32_STR_LENGTH,
PADDING_LENGTH, SYMMETRIC_KEY_LENGTH, TFC_PUBLIC_KEY_LENGTH, TRUNC_ADDRESS_LENGTH)
from src.common.encoding import (
b58encode,
bool_to_bytes,
double_to_bytes,
str_to_bytes,
int_to_bytes,
)
from src.common.encoding import (
b58decode,
bytes_to_bool,
bytes_to_double,
bytes_to_str,
bytes_to_int,
)
from src.common.encoding import (
onion_address_to_pub_key,
unicode_padding,
pub_key_to_short_address,
b85encode,
)
from src.common.encoding import (
pub_key_to_onion_address,
rm_padding_str,
bytes_to_timestamp,
b10encode,
)
from src.common.statics import (
ENCODED_BOOLEAN_LENGTH,
ENCODED_FLOAT_LENGTH,
ENCODED_INTEGER_LENGTH,
FINGERPRINT_LENGTH,
ONION_SERVICE_PUBLIC_KEY_LENGTH,
PADDED_UTF32_STR_LENGTH,
PADDING_LENGTH,
SYMMETRIC_KEY_LENGTH,
TFC_PUBLIC_KEY_LENGTH,
TRUNC_ADDRESS_LENGTH,
)
class TestBase58EncodeAndDecode(unittest.TestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.key = SYMMETRIC_KEY_LENGTH * b'\x01'
self.key = SYMMETRIC_KEY_LENGTH * b"\x01"
def test_encoding_and_decoding_of_random_local_keys(self):
def test_encoding_and_decoding_of_random_local_keys(self) -> None:
for _ in range(100):
key = os.urandom(SYMMETRIC_KEY_LENGTH)
key = os.urandom(SYMMETRIC_KEY_LENGTH)
encoded = b58encode(key)
decoded = b58decode(encoded)
self.assertEqual(key, decoded)
def test_encoding_and_decoding_of_random_public_keys(self):
def test_encoding_and_decoding_of_random_public_keys(self) -> None:
for _ in range(100):
key = os.urandom(TFC_PUBLIC_KEY_LENGTH)
encoded = b58encode(key, public_key=True)
key = os.urandom(TFC_PUBLIC_KEY_LENGTH)
encoded = b58encode(key, public_key=True)
decoded = b58decode(encoded, public_key=True)
self.assertEqual(key, decoded)
def test_invalid_decoding(self):
encoded = b58encode(self.key) # 5HpjE2Hs7vjU4SN3YyPQCdhzCu92WoEeuE6PWNuiPyTu3ESGnzn
changed = encoded[:-1] + 'a'
def test_invalid_decoding(self) -> None:
encoded = b58encode(
self.key
) # 5HpjE2Hs7vjU4SN3YyPQCdhzCu92WoEeuE6PWNuiPyTu3ESGnzn
changed = encoded[:-1] + "a"
with self.assertRaises(ValueError):
b58decode(changed)
def test_public_keys_raise_value_error_when_expecting_local_key(self):
def test_public_keys_raise_value_error_when_expecting_local_key(self) -> None:
b58_pub_key = b58encode(self.key)
with self.assertRaises(ValueError):
b58decode(b58_pub_key, public_key=True)
def test_local_keys_raise_value_error_when_expecting_public_key(self):
def test_local_keys_raise_value_error_when_expecting_public_key(self) -> None:
b58_file_key = b58encode(self.key, public_key=True)
with self.assertRaises(ValueError):
b58decode(b58_file_key)
def test_bitcoin_wif_test_vectors(self):
def test_bitcoin_wif_test_vectors(self) -> None:
"""Test vectors are available at
https://en.bitcoin.it/wiki/Wallet_import_format
"""
byte_key = bytes.fromhex("0C28FCA386C7A227600B2FE50B7CAE11"
"EC86D3BF1FBE471BE89827E19D72AA1D")
byte_key = bytes.fromhex(
"0C28FCA386C7A227600B2FE50B7CAE11" "EC86D3BF1FBE471BE89827E19D72AA1D"
)
b58_key = "5HueCGU8rMjxEXxiPuD5BDku4MkFqeZyd4dZ1jvhTVqvbTLvyTJ"
@ -84,97 +117,99 @@ class TestBase58EncodeAndDecode(unittest.TestCase):
class TestBase85Encode(unittest.TestCase):
def test_b85encode(self):
def test_b85encode(self) -> None:
message = os.urandom(100)
self.assertEqual(b85encode(message),
base64.b85encode(message).decode())
self.assertEqual(b85encode(message), base64.b85encode(message).decode())
class TestBase10Encode(unittest.TestCase):
def test_b10encode(self):
self.assertEqual(b10encode(FINGERPRINT_LENGTH * b'a'),
'44046402572626160612103472728795008085361523578694645928734845681441465000289')
def test_b10encode(self) -> None:
self.assertEqual(
b10encode(FINGERPRINT_LENGTH * b"a"),
"44046402572626160612103472728795008085361523578694645928734845681441465000289",
)
class TestUnicodePadding(unittest.TestCase):
def test_padding(self):
def test_padding(self) -> None:
for s in range(0, PADDING_LENGTH):
string = s * 'm'
string = s * "m"
padded = unicode_padding(string)
self.assertEqual(len(padded), PADDING_LENGTH)
# Verify removal of padding doesn't alter the string
self.assertEqual(string, padded[:-ord(padded[-1:])])
self.assertEqual(string, padded[: -ord(padded[-1:])])
def test_oversize_msg_raises_critical_error(self):
for s in range(PADDING_LENGTH, PADDING_LENGTH+1):
def test_oversize_msg_raises_critical_error(self) -> None:
for s in range(PADDING_LENGTH, PADDING_LENGTH + 1):
with self.assertRaises(SystemExit):
unicode_padding(s * 'm')
unicode_padding(s * "m")
class TestRmPaddingStr(unittest.TestCase):
def test_padding_removal(self):
def test_padding_removal(self) -> None:
for i in range(0, 1000):
string = i * 'm'
string = i * "m"
length = PADDING_LENGTH - (len(string) % PADDING_LENGTH)
padded = string + length * chr(length)
self.assertEqual(rm_padding_str(padded), string)
class TestConversions(unittest.TestCase):
def test_conversion_back_and_forth(self):
def test_conversion_back_and_forth(self) -> None:
pub_key = os.urandom(SYMMETRIC_KEY_LENGTH)
self.assertEqual(onion_address_to_pub_key(pub_key_to_onion_address(pub_key)), pub_key)
self.assertEqual(
onion_address_to_pub_key(pub_key_to_onion_address(pub_key)), pub_key
)
def test_pub_key_to_short_addr(self):
self.assertEqual(len(pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH))),
TRUNC_ADDRESS_LENGTH)
def test_pub_key_to_short_addr(self) -> None:
self.assertEqual(
len(pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH))),
TRUNC_ADDRESS_LENGTH,
)
self.assertIsInstance(pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)), str)
self.assertIsInstance(
pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)), str
)
def test_bool_to_bytes(self):
self.assertEqual( bool_to_bytes(False), b'\x00')
self.assertEqual( bool_to_bytes(True), b'\x01')
def test_bool_to_bytes(self) -> None:
self.assertEqual(bool_to_bytes(False), b"\x00")
self.assertEqual(bool_to_bytes(True), b"\x01")
self.assertEqual(len(bool_to_bytes(True)), ENCODED_BOOLEAN_LENGTH)
def test_bytes_to_bool(self):
self.assertEqual(bytes_to_bool(b'\x00'), False)
self.assertEqual(bytes_to_bool(b'\x01'), True)
def test_bytes_to_bool(self) -> None:
self.assertEqual(bytes_to_bool(b"\x00"), False)
self.assertEqual(bytes_to_bool(b"\x01"), True)
def test_int_to_bytes(self):
self.assertEqual( int_to_bytes(1), b'\x00\x00\x00\x00\x00\x00\x00\x01')
def test_int_to_bytes(self) -> None:
self.assertEqual(int_to_bytes(1), b"\x00\x00\x00\x00\x00\x00\x00\x01")
self.assertEqual(len(int_to_bytes(1)), ENCODED_INTEGER_LENGTH)
def test_bytes_to_int(self):
self.assertEqual(bytes_to_int(b'\x00\x00\x00\x00\x00\x00\x00\x01'), 1)
def test_bytes_to_int(self) -> None:
self.assertEqual(bytes_to_int(b"\x00\x00\x00\x00\x00\x00\x00\x01"), 1)
def test_double_to_bytes(self):
self.assertEqual( double_to_bytes(1.0), bytes.fromhex('000000000000f03f'))
self.assertEqual( double_to_bytes(1.1), bytes.fromhex('9a9999999999f13f'))
def test_double_to_bytes(self) -> None:
self.assertEqual(double_to_bytes(1.0), bytes.fromhex("000000000000f03f"))
self.assertEqual(double_to_bytes(1.1), bytes.fromhex("9a9999999999f13f"))
self.assertEqual(len(double_to_bytes(1.1)), ENCODED_FLOAT_LENGTH)
def test_bytes_to_double(self):
self.assertEqual(bytes_to_double(bytes.fromhex('000000000000f03f')), 1.0)
self.assertEqual(bytes_to_double(bytes.fromhex('9a9999999999f13f')), 1.1)
def test_bytes_to_double(self) -> None:
self.assertEqual(bytes_to_double(bytes.fromhex("000000000000f03f")), 1.0)
self.assertEqual(bytes_to_double(bytes.fromhex("9a9999999999f13f")), 1.1)
def test_str_to_bytes(self):
encoded = str_to_bytes('test')
def test_str_to_bytes(self) -> None:
encoded = str_to_bytes("test")
self.assertIsInstance(encoded, bytes)
self.assertEqual(len(encoded), PADDED_UTF32_STR_LENGTH)
def test_bytes_to_str(self):
encoded = str_to_bytes('test')
self.assertEqual(bytes_to_str(encoded), 'test')
def test_bytes_to_str(self) -> None:
encoded = str_to_bytes("test")
self.assertEqual(bytes_to_str(encoded), "test")
def test_bytes_to_timestamp(self):
encoded = bytes.fromhex('00000000')
def test_bytes_to_timestamp(self) -> None:
encoded = bytes.fromhex("00000000")
self.assertIsInstance(bytes_to_timestamp(encoded), datetime)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

View File

@ -21,42 +21,39 @@ along with TFC. If not, see <https://www.gnu.org/licenses/>.
import unittest
from src.common.exceptions import CriticalError, FunctionReturn, graceful_exit
from tests.mock_classes import RxWindow
from src.common.exceptions import CriticalError, SoftError, graceful_exit
from tests.mock_classes import RxWindow
class TestCriticalError(unittest.TestCase):
def test_critical_error(self):
def test_critical_error(self) -> None:
with self.assertRaises(SystemExit):
CriticalError('test')
CriticalError("test")
class TestFunctionReturn(unittest.TestCase):
class TestSoftError(unittest.TestCase):
def test_function_return(self) -> None:
error = SoftError("test message")
self.assertEqual(error.message, "test message")
def test_function_return(self):
error = FunctionReturn('test message')
self.assertEqual(error.message, 'test message')
error = SoftError("test message", head_clear=True)
self.assertEqual(error.message, "test message")
error = FunctionReturn('test message', head_clear=True)
self.assertEqual(error.message, 'test message')
error = SoftError("test message", tail_clear=True)
self.assertEqual(error.message, "test message")
error = FunctionReturn('test message', tail_clear=True)
self.assertEqual(error.message, 'test message')
error = FunctionReturn('test message', window=RxWindow())
self.assertEqual(error.message, 'test message')
error = SoftError("test message", window=RxWindow())
self.assertEqual(error.message, "test message")
class TestGracefulExit(unittest.TestCase):
def test_graceful_exit(self):
def test_graceful_exit(self) -> None:
with self.assertRaises(SystemExit):
graceful_exit('test message')
graceful_exit('test message', clear=False)
graceful_exit('test message', exit_code=1)
graceful_exit('test message', exit_code=2)
graceful_exit("test message")
graceful_exit("test message", clear=False)
graceful_exit("test message", exit_code=1)
graceful_exit("test message", exit_code=2)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

View File

@ -23,228 +23,311 @@ import os
import unittest
import socket
from datetime import datetime
from unittest import mock
from datetime import datetime
from unittest import mock
from unittest.mock import MagicMock
from serial import SerialException
from src.common.crypto import blake2b
from src.common.gateway import gateway_loop, Gateway, GatewaySettings
from src.common.misc import ensure_dir
from src.common.crypto import blake2b
from src.common.gateway import gateway_loop, Gateway, GatewaySettings
from src.common.misc import ensure_dir
from src.common.reed_solomon import RSCodec
from src.common.statics import DIR_USER_DATA, GATEWAY_QUEUE, NC, PACKET_CHECKSUM_LENGTH, RX, TX
from src.common.statics import (
DIR_USER_DATA,
GATEWAY_QUEUE,
NC,
PACKET_CHECKSUM_LENGTH,
RX,
TX,
)
from tests.mock_classes import Settings
from tests.utils import cd_unit_test, cleanup, gen_queue_dict, tear_queues, TFCTestCase
from tests.utils import cd_unit_test, cleanup, gen_queue_dict, tear_queues, TFCTestCase
class TestGatewayLoop(unittest.TestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.queues = gen_queue_dict()
self.queues = gen_queue_dict()
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
tear_queues(self.queues)
@mock.patch('multiprocessing.connection.Listener',
return_value=MagicMock(accept=lambda: MagicMock(recv=MagicMock(return_value='message'))))
def test_loop(self, _):
@mock.patch(
"multiprocessing.connection.Listener",
return_value=MagicMock(
accept=lambda: MagicMock(recv=MagicMock(return_value="message"))
),
)
def test_loop(self, _) -> None:
gateway = Gateway(operation=RX, local_test=True, dd_sockets=False)
self.assertIsNone(gateway_loop(self.queues, gateway, unit_test=True))
data = self.queues[GATEWAY_QUEUE].get()
self.assertIsInstance(data[0], datetime)
self.assertEqual(data[1], 'message')
self.assertEqual(data[1], "message")
class TestGatewaySerial(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.settings = Settings(session_usb_serial_adapter=True)
self.settings = Settings(session_usb_serial_adapter=True)
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
@mock.patch('time.sleep', return_value=None)
@mock.patch('serial.Serial', return_value=MagicMock())
@mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0']])
@mock.patch('builtins.input', side_effect=['Yes'])
def test_search_and_establish_serial(self, *_):
@mock.patch("time.sleep", return_value=None)
@mock.patch("serial.Serial", return_value=MagicMock())
@mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"]])
@mock.patch("builtins.input", side_effect=["Yes"])
def test_search_and_establish_serial(self, *_) -> None:
gateway = Gateway(operation=RX, local_test=False, dd_sockets=False)
self.assertIsInstance(gateway.rs, RSCodec)
self.assertIs(gateway.tx_serial, gateway.rx_serial)
@mock.patch('time.sleep', return_value=None)
@mock.patch('serial.Serial', side_effect=SerialException)
@mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0']])
@mock.patch('builtins.input', side_effect=['Yes'])
def test_serialexception_during_establish_exists(self, *_):
@mock.patch("time.sleep", return_value=None)
@mock.patch("serial.Serial", side_effect=SerialException)
@mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"]])
@mock.patch("builtins.input", side_effect=["Yes"])
def test_serialexception_during_establish_exists(self, *_) -> None:
with self.assertRaises(SystemExit):
Gateway(operation=RX, local_test=False, dd_sockets=False)
@mock.patch('time.sleep', return_value=None)
@mock.patch('serial.Serial', return_value=MagicMock(write=MagicMock(side_effect=[SerialException, None])))
@mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0'], ['ttyUSB0']])
@mock.patch('builtins.input', side_effect=['Yes'])
def test_write_serial_(self, *_):
@mock.patch("time.sleep", return_value=None)
@mock.patch(
"serial.Serial",
return_value=MagicMock(write=MagicMock(side_effect=[SerialException, None])),
)
@mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"], ["ttyUSB0"]])
@mock.patch("builtins.input", side_effect=["Yes"])
def test_write_serial_(self, *_) -> None:
gateway = Gateway(operation=RX, local_test=False, dd_sockets=False)
self.assertIsNone(gateway.write(b"message"))
@mock.patch('time.sleep', return_value=None)
@mock.patch('serial.Serial', return_value=MagicMock(
read_all=MagicMock(side_effect=[KeyboardInterrupt, SerialException, b'', b'1', b'2', b''])))
@mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0'], ['ttyUSB0']])
@mock.patch('builtins.input', side_effect=['Yes'])
def test_serial_uninitialized_serial_interface_for_read_raises_critical_error(self, *_):
@mock.patch("time.sleep", return_value=None)
@mock.patch(
"serial.Serial",
return_value=MagicMock(
read_all=MagicMock(
side_effect=[KeyboardInterrupt, SerialException, b"", b"1", b"2", b""]
)
),
)
@mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"], ["ttyUSB0"]])
@mock.patch("builtins.input", side_effect=["Yes"])
def test_serial_uninitialized_serial_interface_for_read_raises_critical_error(
self, *_
) -> None:
# Setup
gateway = Gateway(operation=RX, local_test=False, dd_sockets=False)
gateway = Gateway(operation=RX, local_test=False, dd_sockets=False)
gateway.rx_serial = None
# Test
with self.assertRaises(SystemExit):
gateway.read()
@mock.patch('time.monotonic', side_effect=[1, 2, 3])
@mock.patch('time.sleep', return_value=None)
@mock.patch('serial.Serial', return_value=MagicMock(
read_all=MagicMock(side_effect=[KeyboardInterrupt, SerialException, b'', b'1', b'2', b''])))
@mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0'], ['ttyUSB0']])
@mock.patch('builtins.input', side_effect=['Yes'])
def test_read_serial(self, *_):
gateway = Gateway(operation=RX, local_test=False, dd_sockets=False)
data = gateway.read()
self.assertEqual(data, b'12')
@mock.patch("time.sleep", return_value=None)
@mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"], ["ttyUSB0"]])
@mock.patch("multiprocessing.connection.Listener", MagicMock())
@mock.patch("builtins.input", side_effect=["Yes"])
def test_serial_uninitialized_socket_interface_for_read_raises_critical_error(
self, *_
) -> None:
# Setup
gateway = Gateway(operation=RX, local_test=True, dd_sockets=False)
gateway.rx_socket = None
@mock.patch('time.sleep', return_value=None)
@mock.patch('serial.Serial', return_value=MagicMock())
@mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0']])
@mock.patch('builtins.input', side_effect=['Yes'])
def test_add_error_correction(self, *_):
# Test
with self.assertRaises(SystemExit):
gateway.read()
@mock.patch("time.monotonic", side_effect=[1, 2, 3])
@mock.patch("time.sleep", return_value=None)
@mock.patch(
"multiprocessing.connection.Listener",
return_value=MagicMock(
accept=MagicMock(return_value=MagicMock(recv=MagicMock(return_value=b"12")))
),
)
@mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"], ["ttyUSB0"]])
@mock.patch("builtins.input", side_effect=["Yes"])
def test_read_socket(self, *_) -> None:
gateway = Gateway(operation=RX, local_test=True, dd_sockets=False)
data = gateway.read()
self.assertEqual(data, b"12")
@mock.patch("time.monotonic", side_effect=[1, 2, 3])
@mock.patch("time.sleep", return_value=None)
@mock.patch(
"serial.Serial",
return_value=MagicMock(
read_all=MagicMock(
side_effect=[KeyboardInterrupt, SerialException, b"", b"1", b"2", b""]
)
),
)
@mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"], ["ttyUSB0"]])
@mock.patch("builtins.input", side_effect=["Yes"])
def test_read_serial(self, *_) -> None:
gateway = Gateway(operation=RX, local_test=False, dd_sockets=False)
packet = b'packet'
data = gateway.read()
self.assertEqual(data, b"12")
@mock.patch("time.sleep", return_value=None)
@mock.patch("serial.Serial", return_value=MagicMock())
@mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"]])
@mock.patch("builtins.input", side_effect=["Yes"])
def test_add_error_correction(self, *_) -> None:
gateway = Gateway(operation=RX, local_test=False, dd_sockets=False)
packet = b"packet"
# Test BLAKE2b based checksum
gateway.settings.session_serial_error_correction = 0
self.assertEqual(gateway.add_error_correction(packet,),
packet + blake2b(packet, digest_size=PACKET_CHECKSUM_LENGTH))
self.assertEqual(
gateway.add_error_correction(packet,),
packet + blake2b(packet, digest_size=PACKET_CHECKSUM_LENGTH),
)
# Test Reed-Solomon erasure code
gateway.settings.session_serial_error_correction = 5
gateway.rs = RSCodec(gateway.settings.session_serial_error_correction)
self.assertEqual(gateway.add_error_correction(packet),
gateway.rs.encode(packet))
self.assertEqual(
gateway.add_error_correction(packet), gateway.rs.encode(packet)
)
@mock.patch('time.sleep', return_value=None)
@mock.patch('serial.Serial', return_value=MagicMock())
@mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0']])
@mock.patch('builtins.input', side_effect=['Yes'])
def test_detect_errors(self, *_):
@mock.patch("time.sleep", return_value=None)
@mock.patch("serial.Serial", return_value=MagicMock())
@mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"]])
@mock.patch("builtins.input", side_effect=["Yes"])
def test_detect_errors(self, *_) -> None:
gateway = Gateway(operation=RX, local_test=False, dd_sockets=False)
packet = b'packet'
packet = b"packet"
# Test BLAKE2b based checksum
gateway.settings.session_serial_error_correction = 0
self.assertEqual(gateway.detect_errors(gateway.add_error_correction(packet)),
packet)
self.assertEqual(
gateway.detect_errors(gateway.add_error_correction(packet)), packet
)
# Test unrecoverable error raises FR
self.assert_fr("Warning! Received packet had an invalid checksum.",
gateway.detect_errors, 300 * b'a')
self.assert_se(
"Warning! Received packet had an invalid checksum.",
gateway.detect_errors,
300 * b"a",
)
# Test Reed-Solomon erasure code
gateway.settings.session_serial_error_correction = 5
gateway.rs = RSCodec(gateway.settings.session_serial_error_correction)
self.assertEqual(gateway.detect_errors(gateway.add_error_correction(packet)),
packet)
self.assertEqual(
gateway.detect_errors(gateway.add_error_correction(packet)), packet
)
# Test unrecoverable error raises FR
self.assert_fr("Error: Reed-Solomon failed to correct errors in the received packet.",
gateway.detect_errors, 300 * b'a')
self.assert_se(
"Error: Reed-Solomon failed to correct errors in the received packet.",
gateway.detect_errors,
300 * b"a",
)
@mock.patch('time.sleep', return_value=None)
@mock.patch('serial.Serial', return_value=MagicMock())
@mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0'], [''], ['ttyUSB0'], ['ttyS0'], ['']])
@mock.patch('builtins.input', side_effect=['Yes'])
def test_search_serial_interfaces(self, *_):
@mock.patch("time.sleep", return_value=None)
@mock.patch("serial.Serial", return_value=MagicMock())
@mock.patch(
"os.listdir",
side_effect=[["ttyUSB0"], ["ttyUSB0"], [""], ["ttyUSB0"], ["ttyS0"], [""]],
)
@mock.patch("builtins.input", side_effect=["Yes"])
def test_search_serial_interfaces(self, *_) -> None:
gateway = Gateway(operation=RX, local_test=False, dd_sockets=False)
interface = gateway.search_serial_interface()
self.assertEqual(interface, '/dev/ttyUSB0')
self.assertEqual(interface, "/dev/ttyUSB0")
# Test unavailable system serial exits:
gateway.settings.session_usb_serial_adapter = False
interface = gateway.search_serial_interface()
self.assertEqual(interface, '/dev/ttyS0')
self.assertEqual(interface, "/dev/ttyS0")
with self.assertRaises(SystemExit):
gateway.search_serial_interface()
@mock.patch('time.sleep', return_value=None)
@mock.patch('multiprocessing.connection.Client', MagicMock())
@mock.patch('multiprocessing.connection.Listener', MagicMock())
def test_establish_local_testing_gateway(self, *_):
@mock.patch("time.sleep", return_value=None)
@mock.patch("multiprocessing.connection.Client", MagicMock())
@mock.patch("multiprocessing.connection.Listener", MagicMock())
def test_establish_local_testing_gateway(self, *_) -> None:
gateway = Gateway(operation=NC, local_test=True, dd_sockets=False)
self.assertIsInstance(gateway.rs, RSCodec)
@mock.patch('time.sleep', return_value=None)
@mock.patch('multiprocessing.connection.Client', MagicMock(side_effect=KeyboardInterrupt))
def test_keyboard_interrupt_exits(self, *_):
@mock.patch("time.sleep", return_value=None)
@mock.patch(
"multiprocessing.connection.Client", MagicMock(side_effect=KeyboardInterrupt)
)
def test_keyboard_interrupt_exits(self, *_) -> None:
with self.assertRaises(SystemExit):
Gateway(operation=TX, local_test=True, dd_sockets=False)
@mock.patch('time.sleep', return_value=None)
@mock.patch('multiprocessing.connection.Client', MagicMock(
side_effect=[socket.error, ConnectionRefusedError, MagicMock()]))
def test_socket_client(self, *_):
@mock.patch("time.sleep", return_value=None)
@mock.patch(
"multiprocessing.connection.Client",
MagicMock(side_effect=[socket.error, ConnectionRefusedError, MagicMock()]),
)
def test_socket_client(self, *_) -> None:
gateway = Gateway(operation=TX, local_test=True, dd_sockets=False)
self.assertIsInstance(gateway, Gateway)
@mock.patch('time.sleep', return_value=None)
@mock.patch('multiprocessing.connection.Listener', MagicMock(
side_effect=[MagicMock(), KeyboardInterrupt]))
def test_socket_server(self, *_):
@mock.patch("time.sleep", return_value=None)
@mock.patch(
"multiprocessing.connection.Listener",
MagicMock(side_effect=[MagicMock(), KeyboardInterrupt]),
)
def test_socket_server(self, *_) -> None:
gateway = Gateway(operation=RX, local_test=True, dd_sockets=False)
self.assertIsInstance(gateway, Gateway)
with self.assertRaises(SystemExit):
Gateway(operation=RX, local_test=True, dd_sockets=False)
@mock.patch('time.sleep', return_value=None)
@mock.patch('multiprocessing.connection.Listener', return_value=MagicMock(
accept=lambda: MagicMock(recv=MagicMock(side_effect=[KeyboardInterrupt, b'data', EOFError]))))
def test_local_testing_read(self, *_):
@mock.patch("time.sleep", return_value=None)
@mock.patch(
"multiprocessing.connection.Listener",
return_value=MagicMock(
accept=lambda: MagicMock(
recv=MagicMock(side_effect=[KeyboardInterrupt, b"data", EOFError])
)
),
)
def test_local_testing_read(self, *_) -> None:
gateway = Gateway(operation=RX, local_test=True, dd_sockets=False)
self.assertEqual(gateway.read(), b'data')
self.assertEqual(gateway.read(), b"data")
with self.assertRaises(SystemExit):
gateway.read()
@mock.patch('time.sleep', return_value=None)
@mock.patch('multiprocessing.connection.Client', return_value=MagicMock(
send=MagicMock(side_effect=[None, BrokenPipeError])))
def test_local_testing_write(self, *_):
@mock.patch("time.sleep", return_value=None)
@mock.patch(
"multiprocessing.connection.Client",
return_value=MagicMock(send=MagicMock(side_effect=[None, BrokenPipeError])),
)
def test_local_testing_write(self, *_) -> None:
gateway = Gateway(operation=TX, local_test=True, dd_sockets=False)
self.assertIsNone(gateway.write(b'data'))
self.assertIsNone(gateway.write(b"data"))
with self.assertRaises(SystemExit):
gateway.write(b'data')
gateway.write(b"data")
class TestGatewaySettings(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.unit_test_dir = cd_unit_test()
self.default_serialized = """\
{
"serial_baudrate": 19200,
@ -253,210 +336,232 @@ class TestGatewaySettings(TFCTestCase):
"built_in_serial_interface": "ttyS0"
}"""
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
@mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyS0'], ['ttyUSB0'], ['ttyS0']])
@mock.patch('builtins.input', side_effect=['yes', 'yes', 'no', 'no'])
def test_gateway_setup(self, *_):
@mock.patch(
"os.listdir", side_effect=[["ttyUSB0"], ["ttyS0"], ["ttyUSB0"], ["ttyS0"]]
)
@mock.patch("builtins.input", side_effect=["yes", "yes", "no", "no"])
def test_gateway_setup(self, *_) -> None:
settings = GatewaySettings(operation=TX, local_test=False, dd_sockets=True)
self.assertIsNone(settings.setup())
def test_store_and_load_of_settings(self):
def test_store_and_load_of_settings(self) -> None:
settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
self.assertTrue(os.path.isfile(f'{DIR_USER_DATA}/{TX}_serial_settings.json'))
self.assertTrue(os.path.isfile(f"{DIR_USER_DATA}/{TX}_serial_settings.json"))
self.assertEqual(settings.serial_baudrate, 19200)
self.assertEqual(settings.serial_baudrate, 19200)
self.assertEqual(settings.use_serial_usb_adapter, True)
settings.serial_baudrate = 115200
settings.serial_baudrate = 115200
settings.use_serial_usb_adapter = False
self.assertIsNone(settings.store_settings())
settings2 = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
self.assertEqual(settings2.serial_baudrate, 115200)
self.assertEqual(settings2.serial_baudrate, 115200)
self.assertEqual(settings.use_serial_usb_adapter, False)
def test_manually_edited_settings_are_loaded(self):
def test_manually_edited_settings_are_loaded(self) -> None:
# Setup
ensure_dir(DIR_USER_DATA)
with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f:
f.write("""\
with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", "w+") as f:
f.write(
"""\
{
"serial_baudrate": 9600,
"serial_error_correction": 1,
"use_serial_usb_adapter": false,
"built_in_serial_interface": "ttyS0"
}""")
}"""
)
# Test
settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
self.assertEqual(settings.serial_baudrate, 9600)
self.assertEqual(settings.serial_error_correction, 1)
self.assertEqual(settings.use_serial_usb_adapter, False)
self.assertEqual(settings.built_in_serial_interface, 'ttyS0')
self.assertEqual(settings.serial_baudrate, 9600)
self.assertEqual(settings.serial_error_correction, 1)
self.assertEqual(settings.use_serial_usb_adapter, False)
self.assertEqual(settings.built_in_serial_interface, "ttyS0")
def test_missing_values_are_set_to_default_and_database_is_overwritten(self):
def test_missing_values_are_set_to_default_and_database_is_overwritten(
self,
) -> None:
# Setup
ensure_dir(DIR_USER_DATA)
with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f:
f.write("""\
with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", "w+") as f:
f.write(
"""\
{
"serial_error_correction": 1,
"use_serial_usb_adapter": false,
"relay_usb_serial_adapter": false
}""")
}"""
)
# Test
settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
self.assertEqual(settings.serial_baudrate, 19200)
self.assertEqual(settings.serial_error_correction, 1)
self.assertEqual(settings.use_serial_usb_adapter, False)
self.assertEqual(settings.built_in_serial_interface, 'ttyS0')
self.assertEqual(settings.serial_baudrate, 19200)
self.assertEqual(settings.serial_error_correction, 1)
self.assertEqual(settings.use_serial_usb_adapter, False)
self.assertEqual(settings.built_in_serial_interface, "ttyS0")
def test_invalid_format_is_replaced_with_defaults(self):
def test_invalid_format_is_replaced_with_defaults(self) -> None:
# Setup
ensure_dir(DIR_USER_DATA)
with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f:
f.write("""\
with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", "w+") as f:
f.write(
"""\
{
"serial_error_correction": 5,
"use_serial_usb_adapter": false,
}""")
}"""
)
# Test
settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
self.assertEqual(settings.serial_baudrate, 19200)
self.assertEqual(settings.serial_error_correction, 5)
self.assertEqual(settings.use_serial_usb_adapter, True)
self.assertEqual(settings.built_in_serial_interface, 'ttyS0')
self.assertEqual(settings.serial_baudrate, 19200)
self.assertEqual(settings.serial_error_correction, 5)
self.assertEqual(settings.use_serial_usb_adapter, True)
self.assertEqual(settings.built_in_serial_interface, "ttyS0")
with open(settings.file_name) as f:
data = f.read()
self.assertEqual(data, self.default_serialized)
def test_invalid_serial_baudrate_is_replaced_with_default(self):
def test_invalid_serial_baudrate_is_replaced_with_default(self) -> None:
# Setup
ensure_dir(DIR_USER_DATA)
with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f:
f.write("""\
with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", "w+") as f:
f.write(
"""\
{
"serial_baudrate": 19201,
"serial_error_correction": 5,
"use_serial_usb_adapter": true,
"built_in_serial_interface": "ttyS0"
}""")
}"""
)
# Test
settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
self.assertEqual(settings.serial_baudrate, 19200)
self.assertEqual(settings.serial_error_correction, 5)
self.assertEqual(settings.use_serial_usb_adapter, True)
self.assertEqual(settings.built_in_serial_interface, 'ttyS0')
self.assertEqual(settings.serial_baudrate, 19200)
self.assertEqual(settings.serial_error_correction, 5)
self.assertEqual(settings.use_serial_usb_adapter, True)
self.assertEqual(settings.built_in_serial_interface, "ttyS0")
with open(settings.file_name) as f:
data = f.read()
self.assertEqual(data, self.default_serialized)
def test_invalid_serial_error_correction_is_replaced_with_default(self):
def test_invalid_serial_error_correction_is_replaced_with_default(self) -> None:
# Setup
ensure_dir(DIR_USER_DATA)
with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f:
f.write("""\
with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", "w+") as f:
f.write(
"""\
{
"serial_baudrate": 19200,
"serial_error_correction": -1,
"use_serial_usb_adapter": true,
"built_in_serial_interface": "ttyS0"
}""")
}"""
)
# Test
settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
self.assertEqual(settings.serial_baudrate, 19200)
self.assertEqual(settings.serial_error_correction, 5)
self.assertEqual(settings.use_serial_usb_adapter, True)
self.assertEqual(settings.built_in_serial_interface, 'ttyS0')
self.assertEqual(settings.serial_baudrate, 19200)
self.assertEqual(settings.serial_error_correction, 5)
self.assertEqual(settings.use_serial_usb_adapter, True)
self.assertEqual(settings.built_in_serial_interface, "ttyS0")
with open(settings.file_name) as f:
data = f.read()
self.assertEqual(data, self.default_serialized)
def test_invalid_serial_interface_is_replaced_with_default(self):
def test_invalid_serial_interface_is_replaced_with_default(self) -> None:
# Setup
ensure_dir(DIR_USER_DATA)
with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f:
f.write("""\
with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", "w+") as f:
f.write(
"""\
{
"serial_baudrate": 19200,
"serial_error_correction": 5,
"use_serial_usb_adapter": true,
"built_in_serial_interface": "does_not_exist"
}""")
}"""
)
# Test
settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
self.assertEqual(settings.serial_baudrate, 19200)
self.assertEqual(settings.serial_error_correction, 5)
self.assertEqual(settings.use_serial_usb_adapter, True)
self.assertEqual(settings.built_in_serial_interface, 'ttyS0')
self.assertEqual(settings.serial_baudrate, 19200)
self.assertEqual(settings.serial_error_correction, 5)
self.assertEqual(settings.use_serial_usb_adapter, True)
self.assertEqual(settings.built_in_serial_interface, "ttyS0")
with open(settings.file_name) as f:
data = f.read()
self.assertEqual(data, self.default_serialized)
def test_invalid_type_is_replaced_with_default(self):
def test_invalid_type_is_replaced_with_default(self) -> None:
# Setup
ensure_dir(DIR_USER_DATA)
with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f:
f.write("""\
with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", "w+") as f:
f.write(
"""\
{
"serial_baudrate": "115200",
"serial_error_correction": "5",
"use_serial_usb_adapter": "true",
"built_in_serial_interface": true
}""")
}"""
)
# Test
settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
self.assertEqual(settings.serial_baudrate, 19200)
self.assertEqual(settings.serial_error_correction, 5)
self.assertEqual(settings.use_serial_usb_adapter, True)
self.assertEqual(settings.built_in_serial_interface, 'ttyS0')
self.assertEqual(settings.serial_baudrate, 19200)
self.assertEqual(settings.serial_error_correction, 5)
self.assertEqual(settings.use_serial_usb_adapter, True)
self.assertEqual(settings.built_in_serial_interface, "ttyS0")
with open(settings.file_name) as f:
data = f.read()
self.assertEqual(data, self.default_serialized)
def test_unknown_kv_pair_is_removed(self):
def test_unknown_kv_pair_is_removed(self) -> None:
# Setup
ensure_dir(DIR_USER_DATA)
with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f:
f.write("""\
with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", "w+") as f:
f.write(
"""\
{
"serial_baudrate": 19200,
"serial_error_correction": 5,
"use_serial_usb_adapter": true,
"built_in_serial_interface": "ttyS0",
"this_should_not_be_here": 1
}""")
}"""
)
# Test
settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
self.assertEqual(settings.serial_baudrate, 19200)
self.assertEqual(settings.serial_error_correction, 5)
self.assertEqual(settings.use_serial_usb_adapter, True)
self.assertEqual(settings.built_in_serial_interface, 'ttyS0')
self.assertEqual(settings.serial_baudrate, 19200)
self.assertEqual(settings.serial_error_correction, 5)
self.assertEqual(settings.use_serial_usb_adapter, True)
self.assertEqual(settings.built_in_serial_interface, "ttyS0")
with open(settings.file_name) as f:
data = f.read()
self.assertEqual(data, self.default_serialized)
@mock.patch('os.listdir', side_effect=[['ttyS0'], ['ttyUSB0'], ['ttyUSB0'], ['ttyS0']])
@mock.patch('builtins.input', side_effect=['Yes', 'Yes', 'No', 'No'])
def test_setup(self, *_):
@mock.patch(
"os.listdir", side_effect=[["ttyS0"], ["ttyUSB0"], ["ttyUSB0"], ["ttyS0"]]
)
@mock.patch("builtins.input", side_effect=["Yes", "Yes", "No", "No"])
def test_setup(self, *_) -> None:
# Setup
ensure_dir(DIR_USER_DATA)
with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f:
with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", "w+") as f:
f.write(self.default_serialized)
settings = GatewaySettings(operation=TX, local_test=False, dd_sockets=True)
@ -465,48 +570,90 @@ class TestGatewaySettings(TFCTestCase):
self.assertIsNone(settings.setup())
self.assertIsNone(settings.setup())
@mock.patch('time.sleep', return_value=None)
def test_change_setting(self, _):
@mock.patch("time.sleep", return_value=None)
def test_change_setting(self, _) -> None:
settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
self.assert_fr("Error: Invalid setting value 'Falsee'.",
settings.change_setting, 'serial_baudrate', 'Falsee')
self.assert_fr("Error: Invalid setting value '1.1'.",
settings.change_setting, 'serial_baudrate', '1.1', )
self.assert_fr("Error: Invalid setting value '18446744073709551616'.",
settings.change_setting, 'serial_baudrate', str(2 ** 64))
self.assert_fr("Error: Invalid setting value 'Falsee'.",
settings.change_setting, 'use_serial_usb_adapter', 'Falsee')
self.assert_se(
"Error: Invalid setting value 'Falsee'.",
settings.change_setting,
"serial_baudrate",
"Falsee",
)
self.assert_se(
"Error: Invalid setting value '1.1'.",
settings.change_setting,
"serial_baudrate",
"1.1",
)
self.assert_se(
"Error: Invalid setting value '18446744073709551616'.",
settings.change_setting,
"serial_baudrate",
str(2 ** 64),
)
self.assert_se(
"Error: Invalid setting value 'Falsee'.",
settings.change_setting,
"use_serial_usb_adapter",
"Falsee",
)
self.assertIsNone(settings.change_setting('serial_baudrate', '9600'))
self.assertEqual(GatewaySettings(operation=TX, local_test=True, dd_sockets=True).serial_baudrate, 9600)
self.assertIsNone(settings.change_setting("serial_baudrate", "9600"))
self.assertEqual(
GatewaySettings(
operation=TX, local_test=True, dd_sockets=True
).serial_baudrate,
9600,
)
settings.serial_baudrate = b'bytestring'
settings.serial_baudrate = b"bytestring"
with self.assertRaises(SystemExit):
settings.change_setting('serial_baudrate', '9600')
settings.change_setting("serial_baudrate", "9600")
def test_validate_key_value_pair(self):
def test_validate_key_value_pair(self) -> None:
settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
self.assert_fr("Error: The specified baud rate is not supported.",
settings.validate_key_value_pair, 'serial_baudrate', 0)
self.assert_fr("Error: The specified baud rate is not supported.",
settings.validate_key_value_pair, 'serial_baudrate', 10)
self.assert_fr("Error: The specified baud rate is not supported.",
settings.validate_key_value_pair, 'serial_baudrate', 9601)
self.assert_fr("Error: Invalid value for error correction ratio.",
settings.validate_key_value_pair, 'serial_error_correction', -1)
self.assert_se(
"Error: The specified baud rate is not supported.",
settings.validate_key_value_pair,
"serial_baudrate",
0,
)
self.assert_se(
"Error: The specified baud rate is not supported.",
settings.validate_key_value_pair,
"serial_baudrate",
10,
)
self.assert_se(
"Error: The specified baud rate is not supported.",
settings.validate_key_value_pair,
"serial_baudrate",
9601,
)
self.assert_se(
"Error: Invalid value for error correction ratio.",
settings.validate_key_value_pair,
"serial_error_correction",
-1,
)
self.assertIsNone(settings.validate_key_value_pair("serial_baudrate", 9600))
self.assertIsNone(settings.validate_key_value_pair("serial_error_correction", 20))
self.assertIsNone(settings.validate_key_value_pair("use_serial_usb_adapter", True))
self.assertIsNone(settings.validate_key_value_pair("serial_baudrate", 9600))
self.assertIsNone(
settings.validate_key_value_pair("serial_error_correction", 20)
)
self.assertIsNone(
settings.validate_key_value_pair("use_serial_usb_adapter", True)
)
@mock.patch('shutil.get_terminal_size', return_value=(64, 64))
def test_too_narrow_terminal_raises_fr_when_printing_settings(self, _):
@mock.patch("shutil.get_terminal_size", return_value=(64, 64))
def test_too_narrow_terminal_raises_fr_when_printing_settings(self, _) -> None:
settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
self.assert_fr("Error: Screen width is too small.", settings.print_settings)
self.assert_se("Error: Screen width is too small.", settings.print_settings)
def test_print_settings(self):
def test_print_settings(self) -> None:
settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
self.assert_prints("""\
self.assert_prints(
"""\
Serial interface setting Current value Default value Description
@ -521,8 +668,10 @@ serial_error_correction 5 5 Number of byte
recover from
""", settings.print_settings)
""",
settings.print_settings,
)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

View File

@ -23,45 +23,72 @@ import unittest
from unittest import mock
from src.common.input import ask_confirmation_code, box_input, get_b58_key, nc_bypass_msg, pwd_prompt, yes
from src.common.statics import (B58_LOCAL_KEY, B58_PUBLIC_KEY, NC_BYPASS_START, NC_BYPASS_STOP, SYMMETRIC_KEY_LENGTH,
TFC_PUBLIC_KEY_LENGTH)
from src.common.input import (
ask_confirmation_code,
box_input,
get_b58_key,
nc_bypass_msg,
pwd_prompt,
yes,
)
from src.common.statics import (
B58_LOCAL_KEY,
B58_PUBLIC_KEY,
NC_BYPASS_START,
NC_BYPASS_STOP,
SYMMETRIC_KEY_LENGTH,
TFC_PUBLIC_KEY_LENGTH,
)
from tests.mock_classes import Settings
from tests.utils import nick_to_short_address, VALID_ECDHE_PUB_KEY, VALID_LOCAL_KEY_KDK
from tests.utils import nick_to_short_address, VALID_ECDHE_PUB_KEY, VALID_LOCAL_KEY_KDK
class TestAskConfirmationCode(unittest.TestCase):
confirmation_code = 'ff'
confirmation_code = "ff"
@mock.patch('builtins.input', return_value=confirmation_code)
def test_ask_confirmation_code(self, _):
self.assertEqual(ask_confirmation_code('Receiver'), self.confirmation_code)
@mock.patch("builtins.input", return_value=confirmation_code)
def test_ask_confirmation_code(self, _) -> None:
self.assertEqual(ask_confirmation_code("Receiver"), self.confirmation_code)
class TestBoxInput(unittest.TestCase):
@mock.patch('time.sleep', return_value=None)
@mock.patch('builtins.input', side_effect=['mock_input', 'mock_input', '', 'invalid', 'ok'])
def test_box_input(self, *_):
self.assertEqual(box_input('test title'), 'mock_input')
self.assertEqual(box_input('test title', head=1, expected_len=20), 'mock_input')
self.assertEqual(box_input('test title', head=1, default='mock_input', expected_len=20), 'mock_input')
self.assertEqual(box_input('test title', validator=lambda string, *_: '' if string == 'ok' else 'Error'), 'ok')
@mock.patch("time.sleep", return_value=None)
@mock.patch(
"builtins.input", side_effect=["mock_input", "mock_input", "", "invalid", "ok"]
)
def test_box_input(self, *_) -> None:
self.assertEqual(box_input("test title"), "mock_input")
self.assertEqual(box_input("test title", head=1, expected_len=20), "mock_input")
self.assertEqual(
box_input("test title", head=1, default="mock_input", expected_len=20),
"mock_input",
)
self.assertEqual(
box_input(
"test title",
validator=lambda string, *_: "" if string == "ok" else "Error",
),
"ok",
)
class TestGetB58Key(unittest.TestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.settings = Settings()
@mock.patch('time.sleep', return_value=None)
@mock.patch('shutil.get_terminal_size', return_value=[200, 200])
@mock.patch('builtins.input', side_effect=(2*['invalid', VALID_LOCAL_KEY_KDK[:-1], VALID_LOCAL_KEY_KDK] +
2*['invalid', VALID_ECDHE_PUB_KEY[:-1], VALID_ECDHE_PUB_KEY]))
def test_get_b58_key(self, *_):
@mock.patch("time.sleep", return_value=None)
@mock.patch("shutil.get_terminal_size", return_value=[200, 200])
@mock.patch(
"builtins.input",
side_effect=(
2 * ["invalid", VALID_LOCAL_KEY_KDK[:-1], VALID_LOCAL_KEY_KDK]
+ 2 * ["invalid", VALID_ECDHE_PUB_KEY[:-1], VALID_ECDHE_PUB_KEY]
),
)
def test_get_b58_key(self, *_) -> None:
for boolean in [True, False]:
self.settings.local_testing_mode = boolean
key = get_b58_key(B58_LOCAL_KEY, self.settings)
@ -70,58 +97,71 @@ class TestGetB58Key(unittest.TestCase):
self.assertEqual(len(key), SYMMETRIC_KEY_LENGTH)
with self.assertRaises(SystemExit):
get_b58_key('invalid_key_type', self.settings)
get_b58_key("invalid_key_type", self.settings)
for boolean in [True, False]:
self.settings.local_testing_mode = boolean
key = get_b58_key(B58_PUBLIC_KEY, self.settings, nick_to_short_address('Alice'))
key = get_b58_key(
B58_PUBLIC_KEY, self.settings, nick_to_short_address("Alice")
)
self.assertIsInstance(key, bytes)
self.assertEqual(len(key), TFC_PUBLIC_KEY_LENGTH)
with self.assertRaises(SystemExit):
get_b58_key('invalid_key_type', self.settings)
get_b58_key("invalid_key_type", self.settings)
@mock.patch('builtins.input', return_value='')
@mock.patch('shutil.get_terminal_size', return_value=[200, 200])
def test_empty_pub_key_returns_empty_bytes(self, *_):
@mock.patch("builtins.input", return_value="")
@mock.patch("shutil.get_terminal_size", return_value=[200, 200])
def test_empty_pub_key_returns_empty_bytes(self, *_) -> None:
key = get_b58_key(B58_PUBLIC_KEY, self.settings)
self.assertEqual(key, b'')
self.assertEqual(key, b"")
class TestNCBypassMsg(unittest.TestCase):
@mock.patch('builtins.input', return_value='')
def test_nc_bypass_msg(self, _):
@mock.patch("builtins.input", return_value="")
def test_nc_bypass_msg(self, _) -> None:
settings = Settings(nc_bypass_messages=True)
self.assertIsNone(nc_bypass_msg(NC_BYPASS_START, settings))
self.assertIsNone(nc_bypass_msg(NC_BYPASS_STOP, settings))
self.assertIsNone(nc_bypass_msg(NC_BYPASS_STOP, settings))
class TestPwdPrompt(unittest.TestCase):
@mock.patch('getpass.getpass', return_value='test_password')
def test_pwd_prompt(self, _):
self.assertEqual(pwd_prompt("test prompt"), 'test_password')
@mock.patch("getpass.getpass", return_value="test_password")
def test_pwd_prompt(self, _) -> None:
self.assertEqual(pwd_prompt("test prompt"), "test_password")
class TestYes(unittest.TestCase):
@mock.patch(
"builtins.input",
side_effect=[
"Invalid",
"",
"invalid",
"Y",
"YES",
"N",
"NO",
KeyboardInterrupt,
KeyboardInterrupt,
EOFError,
EOFError,
],
)
def test_yes(self, _) -> None:
self.assertTrue(yes("test prompt", head=1, tail=1))
self.assertTrue(yes("test prompt"))
@mock.patch('builtins.input', side_effect=['Invalid', '', 'invalid', 'Y', 'YES', 'N', 'NO',
KeyboardInterrupt, KeyboardInterrupt, EOFError, EOFError])
def test_yes(self, _):
self.assertTrue(yes('test prompt', head=1, tail=1))
self.assertTrue(yes('test prompt'))
self.assertFalse(yes("test prompt", head=1, tail=1))
self.assertFalse(yes("test prompt"))
self.assertFalse(yes('test prompt', head=1, tail=1))
self.assertFalse(yes('test prompt'))
self.assertTrue(yes("test prompt", head=1, tail=1, abort=True))
self.assertFalse(yes("test prompt", abort=False))
self.assertTrue(yes('test prompt', head=1, tail=1, abort=True))
self.assertFalse(yes('test prompt', abort=False))
self.assertTrue(yes('test prompt', head=1, tail=1, abort=True))
self.assertFalse(yes('test prompt', abort=False))
self.assertTrue(yes("test prompt", head=1, tail=1, abort=True))
self.assertFalse(yes("test prompt", abort=False))
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

View File

@ -28,116 +28,164 @@ import unittest
import zlib
from multiprocessing import Process
from unittest import mock
from typing import Any
from unittest import mock
from src.common.misc import calculate_race_condition_delay, decompress, ensure_dir, get_tab_complete_list
from src.common.misc import get_tab_completer, get_terminal_height, get_terminal_width, ignored, monitor_processes
from src.common.misc import process_arguments, readable_size, round_up, separate_header, separate_headers
from src.common.misc import separate_trailer, split_string, split_byte_string, terminal_width_check
from src.common.misc import validate_group_name, validate_key_exchange, validate_onion_addr, validate_nick
from src.common.statics import (DIR_RECV_FILES, DIR_USER_DATA, DUMMY_GROUP, ECDHE, EXIT, EXIT_QUEUE, LOCAL_ID,
PADDING_LENGTH, RX, TAILS, WIPE)
from src.common.misc import (
calculate_race_condition_delay,
decompress,
ensure_dir,
get_tab_complete_list,
)
from src.common.misc import (
get_tab_completer,
get_terminal_height,
get_terminal_width,
ignored,
monitor_processes,
)
from src.common.misc import (
process_arguments,
readable_size,
round_up,
separate_header,
separate_headers,
)
from src.common.misc import (
separate_trailer,
split_string,
split_byte_string,
terminal_width_check,
)
from src.common.misc import (
validate_group_name,
validate_key_exchange,
validate_onion_addr,
validate_nick,
)
from src.common.statics import (
DIR_RECV_FILES,
DIR_USER_DATA,
DUMMY_GROUP,
ECDHE,
EXIT,
EXIT_QUEUE,
LOCAL_ID,
PADDING_LENGTH,
RX,
TAILS,
WIPE,
)
from tests.mock_classes import ContactList, Gateway, GroupList, Settings
from tests.utils import cd_unit_test, cleanup, gen_queue_dict, ignored, nick_to_onion_address
from tests.utils import nick_to_pub_key, tear_queues, TFCTestCase
from tests.utils import cd_unit_test, cleanup, gen_queue_dict, nick_to_onion_address
from tests.utils import nick_to_pub_key, tear_queues, TFCTestCase
class TestCalculateRaceConditionDelay(unittest.TestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.settings = Settings()
def test_race_condition_delay_calculation(self):
def test_race_condition_delay_calculation(self) -> None:
self.assertIsInstance(calculate_race_condition_delay(5, 9600), float)
class TestDecompress(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.settings = Settings()
self.settings = Settings()
self.settings.max_decompress_size = 1000
def test_successful_decompression(self):
def test_successful_decompression(self) -> None:
# Setup
data = os.urandom(self.settings.max_decompress_size)
data = os.urandom(self.settings.max_decompress_size)
compressed = zlib.compress(data)
# Test
self.assertEqual(decompress(compressed, self.settings.max_decompress_size), data)
self.assertEqual(
decompress(compressed, self.settings.max_decompress_size), data
)
def test_oversize_decompression_raises_fr(self):
def test_oversize_decompression_raises_fr(self) -> None:
# Setup
data = os.urandom(self.settings.max_decompress_size + 1)
data = os.urandom(self.settings.max_decompress_size + 1)
compressed = zlib.compress(data)
# Test
self.assert_fr("Error: Decompression aborted due to possible zip bomb.",
decompress, compressed, self.settings.max_decompress_size)
self.assert_se(
"Error: Decompression aborted due to possible zip bomb.",
decompress,
compressed,
self.settings.max_decompress_size,
)
class TestEnsureDir(unittest.TestCase):
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
with ignored(OSError):
os.rmdir('test_dir/')
os.rmdir("test_dir/")
def test_ensure_dir(self):
self.assertIsNone(ensure_dir('test_dir/'))
self.assertIsNone(ensure_dir('test_dir/'))
self.assertTrue(os.path.isdir('test_dir/'))
def test_ensure_dir(self) -> None:
self.assertIsNone(ensure_dir("test_dir/"))
self.assertIsNone(ensure_dir("test_dir/"))
self.assertTrue(os.path.isdir("test_dir/"))
class TestTabCompleteList(unittest.TestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.contact_list = ContactList(nicks=['Alice', 'Bob'])
self.group_list = GroupList(groups=['test_group'])
self.settings = Settings(key_list=['key1', 'key2'])
self.gateway = Gateway()
self.contact_list = ContactList(nicks=["Alice", "Bob"])
self.group_list = GroupList(groups=["test_group"])
self.settings = Settings(key_list=["key1", "key2"])
self.gateway = Gateway()
def test_get_tab_complete_list(self):
tab_complete_list = [a + ' ' for a in self.contact_list.get_list_of_addresses()]
tab_complete_list += [i + ' ' for i in self.group_list.get_list_of_hr_group_ids()]
tab_complete_list += [s + ' ' for s in self.settings.key_list]
tab_complete_list += [s + ' ' for s in self.gateway.settings.key_list]
def test_get_tab_complete_list(self) -> None:
tab_complete_list = [a + " " for a in self.contact_list.get_list_of_addresses()]
tab_complete_list += [
i + " " for i in self.group_list.get_list_of_hr_group_ids()
]
tab_complete_list += [s + " " for s in self.settings.key_list]
tab_complete_list += [s + " " for s in self.gateway.settings.key_list]
tc_list = get_tab_complete_list(self.contact_list, self.group_list, self.settings, self.gateway)
tc_list = get_tab_complete_list(
self.contact_list, self.group_list, self.settings, self.gateway
)
self.assertTrue(set(tab_complete_list) < set(tc_list))
self.assertIsInstance(get_tab_completer(self.contact_list, self.group_list, self.settings, self.gateway),
types.FunctionType)
self.assertIsInstance(
get_tab_completer(
self.contact_list, self.group_list, self.settings, self.gateway
),
types.FunctionType,
)
completer = get_tab_completer(self.contact_list, self.group_list, self.settings, self.gateway)
options = completer('a', state=0)
completer = get_tab_completer(
self.contact_list, self.group_list, self.settings, self.gateway
)
options = completer("a", state=0)
self.assertEqual(options, 'all')
self.assertIsNone(completer('a', state=5))
self.assertEqual(options, "all")
self.assertIsNone(completer("a", state=5))
class TestGetTerminalHeight(unittest.TestCase):
def test_get_terminal_height(self):
def test_get_terminal_height(self) -> None:
self.assertIsInstance(get_terminal_height(), int)
class TestGetTerminalWidth(unittest.TestCase):
def test_get_terminal_width(self):
def test_get_terminal_width(self) -> None:
self.assertIsInstance(get_terminal_width(), int)
class TestIgnored(unittest.TestCase):
@staticmethod
def func():
def func() -> None:
"""Mock function that raises exception."""
raise KeyboardInterrupt
def test_ignored_contextmanager(self):
def test_ignored_contextmanager(self) -> None:
raised = False
try:
with ignored(KeyboardInterrupt):
@ -148,34 +196,34 @@ class TestIgnored(unittest.TestCase):
class TestMonitorProcesses(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.settings = Settings()
self.settings = Settings()
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
@staticmethod
def mock_process():
def mock_process() -> None:
"""Mock process that does not return."""
while True:
time.sleep(0.01)
@mock.patch('time.sleep', return_value=None)
def test_exit(self, *_):
queues = gen_queue_dict()
@mock.patch("time.sleep", return_value=None)
def test_exit(self, *_) -> None:
queues = gen_queue_dict()
process_list = [Process(target=self.mock_process)]
for p in process_list:
p.start()
def queue_delayer():
def queue_delayer() -> None:
"""Place EXIT packet into queue after delay."""
time.sleep(0.01)
queues[EXIT_QUEUE].put(EXIT)
threading.Thread(target=queue_delayer).start()
with self.assertRaises(SystemExit):
@ -183,14 +231,13 @@ class TestMonitorProcesses(TFCTestCase):
tear_queues(queues)
@mock.patch('time.sleep', return_value=None)
def test_dying_process(self, *_):
def mock_process():
@mock.patch("time.sleep", return_value=None)
def test_dying_process(self, *_) -> None:
def mock_process() -> None:
"""Function that returns after a moment."""
time.sleep(0.01)
queues = gen_queue_dict()
queues = gen_queue_dict()
process_list = [Process(target=mock_process)]
for p in process_list:
@ -201,10 +248,10 @@ class TestMonitorProcesses(TFCTestCase):
tear_queues(queues)
@mock.patch('time.sleep', return_value=None)
@mock.patch('os.system', return_value=None)
def test_wipe(self, mock_os_system, *_):
queues = gen_queue_dict()
@mock.patch("time.sleep", return_value=None)
@mock.patch("os.system", return_value=None)
def test_wipe(self, mock_os_system, *_) -> None:
queues = gen_queue_dict()
process_list = [Process(target=self.mock_process)]
os.mkdir(DIR_USER_DATA)
@ -215,25 +262,26 @@ class TestMonitorProcesses(TFCTestCase):
for p in process_list:
p.start()
def queue_delayer():
def queue_delayer() -> None:
"""Place WIPE packet to queue after delay."""
time.sleep(0.01)
queues[EXIT_QUEUE].put(WIPE)
threading.Thread(target=queue_delayer).start()
with self.assertRaises(SystemExit):
monitor_processes(process_list, RX, queues)
self.assertFalse(os.path.isdir(DIR_USER_DATA))
self.assertFalse(os.path.isdir(DIR_RECV_FILES))
mock_os_system.assert_called_with('systemctl poweroff')
mock_os_system.assert_called_with("systemctl poweroff")
tear_queues(queues)
@mock.patch('time.sleep', return_value=None)
@mock.patch('os.system', return_value=None)
@mock.patch('builtins.open', mock.mock_open(read_data=TAILS))
def test_wipe_tails(self, mock_os_system, *_):
queues = gen_queue_dict()
@mock.patch("time.sleep", return_value=None)
@mock.patch("os.system", return_value=None)
@mock.patch("builtins.open", mock.mock_open(read_data=TAILS))
def test_wipe_tails(self, mock_os_system, *_) -> None:
queues = gen_queue_dict()
process_list = [Process(target=self.mock_process)]
os.mkdir(DIR_USER_DATA)
@ -242,16 +290,17 @@ class TestMonitorProcesses(TFCTestCase):
for p in process_list:
p.start()
def queue_delayer():
def queue_delayer() -> None:
"""Place WIPE packet to queue after delay."""
time.sleep(0.01)
queues[EXIT_QUEUE].put(WIPE)
threading.Thread(target=queue_delayer).start()
with self.assertRaises(SystemExit):
monitor_processes(process_list, RX, queues)
mock_os_system.assert_called_with('systemctl poweroff')
mock_os_system.assert_called_with("systemctl poweroff")
# Test that user data wasn't removed
self.assertTrue(os.path.isdir(DIR_USER_DATA))
@ -259,58 +308,57 @@ class TestMonitorProcesses(TFCTestCase):
class TestProcessArguments(unittest.TestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
class MockParser(object):
"""MockParse object."""
def __init__(self, *_, **__):
def __init__(self, *_, **__) -> None:
pass
def parse_args(self):
def parse_args(self) -> Any:
"""Return Args mock object."""
class Args(object):
"""Mock object for command line arguments."""
def __init__(self):
def __init__(self) -> None:
"""Create new Args mock object."""
self.operation = True
self.local_test = True
self.operation = True
self.local_test = True
self.data_diode_sockets = True
args = Args()
return args
def add_argument(self, *_, **__):
def add_argument(self, *_, **__) -> None:
"""Mock function for adding argument."""
pass
self.o_argparse = argparse.ArgumentParser
self.o_argparse = argparse.ArgumentParser
argparse.ArgumentParser = MockParser
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
argparse.ArgumentParser = self.o_argparse
def test_process_arguments(self):
def test_process_arguments(self) -> None:
self.assertEqual(process_arguments(), (RX, True, True))
class TestReadableSize(unittest.TestCase):
def test_readable_size(self):
sizes = ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
def test_readable_size(self) -> None:
sizes = ["", "K", "M", "G", "T", "P", "E", "Z", "Y"]
for i in range(0, 9):
size = readable_size(1024 ** i)
self.assertEqual(size, f'1.0{sizes[i]}B')
self.assertEqual(size, f"1.0{sizes[i]}B")
class TestRoundUp(unittest.TestCase):
def test_round_up(self):
self.assertEqual(round_up(1), 10)
self.assertEqual(round_up(5), 10)
self.assertEqual(round_up(8), 10)
def test_round_up(self) -> None:
self.assertEqual(round_up(1), 10)
self.assertEqual(round_up(5), 10)
self.assertEqual(round_up(8), 10)
self.assertEqual(round_up(10), 10)
self.assertEqual(round_up(11), 20)
self.assertEqual(round_up(15), 20)
@ -320,194 +368,259 @@ class TestRoundUp(unittest.TestCase):
class TestSplitString(unittest.TestCase):
def test_split_string(self) -> None:
self.assertEqual(
split_string("cypherpunk", 1),
["c", "y", "p", "h", "e", "r", "p", "u", "n", "k"],
)
def test_split_string(self):
self.assertEqual(split_string('cypherpunk', 1), ['c',
'y',
'p',
'h',
'e',
'r',
'p',
'u',
'n',
'k'])
self.assertEqual(split_string("cypherpunk", 2), ["cy", "ph", "er", "pu", "nk"])
self.assertEqual(split_string('cypherpunk', 2), ['cy',
'ph',
'er',
'pu',
'nk'])
self.assertEqual(split_string("cypherpunk", 3), ["cyp", "her", "pun", "k"])
self.assertEqual(split_string('cypherpunk', 3), ['cyp',
'her',
'pun',
'k'])
self.assertEqual(split_string("cypherpunk", 5), ["cyphe", "rpunk"])
self.assertEqual(split_string('cypherpunk', 5), ['cyphe',
'rpunk'])
self.assertEqual(split_string('cypherpunk', 10), ['cypherpunk'])
self.assertEqual(split_string('cypherpunk', 15), ['cypherpunk'])
self.assertEqual(split_string("cypherpunk", 10), ["cypherpunk"])
self.assertEqual(split_string("cypherpunk", 15), ["cypherpunk"])
class TestSplitByteString(unittest.TestCase):
def test_split_byte_string(self) -> None:
self.assertEqual(
split_byte_string(b"cypherpunk", 1),
[b"c", b"y", b"p", b"h", b"e", b"r", b"p", b"u", b"n", b"k"],
)
def test_split_byte_string(self):
self.assertEqual(split_byte_string(b'cypherpunk', 1), [b'c',
b'y',
b'p',
b'h',
b'e',
b'r',
b'p',
b'u',
b'n',
b'k'])
self.assertEqual(
split_byte_string(b"cypherpunk", 2), [b"cy", b"ph", b"er", b"pu", b"nk"]
)
self.assertEqual(split_byte_string(b'cypherpunk', 2), [b'cy',
b'ph',
b'er',
b'pu',
b'nk'])
self.assertEqual(
split_byte_string(b"cypherpunk", 3), [b"cyp", b"her", b"pun", b"k"]
)
self.assertEqual(split_byte_string(b'cypherpunk', 3), [b'cyp',
b'her',
b'pun',
b'k'])
self.assertEqual(split_byte_string(b"cypherpunk", 5), [b"cyphe", b"rpunk"])
self.assertEqual(split_byte_string(b'cypherpunk', 5), [b'cyphe',
b'rpunk'])
self.assertEqual(split_byte_string(b'cypherpunk', 10), [b'cypherpunk'])
self.assertEqual(split_byte_string(b'cypherpunk', 15), [b'cypherpunk'])
self.assertEqual(split_byte_string(b"cypherpunk", 10), [b"cypherpunk"])
self.assertEqual(split_byte_string(b"cypherpunk", 15), [b"cypherpunk"])
class TestSeparateHeader(unittest.TestCase):
def test_separate_header(self):
self.assertEqual(separate_header(b"cypherpunk", header_length=len(b"cypher")),
(b"cypher", b"punk"))
def test_separate_header(self) -> None:
self.assertEqual(
separate_header(b"cypherpunk", header_length=len(b"cypher")),
(b"cypher", b"punk"),
)
class TestSeparateHeaders(unittest.TestCase):
def test_separate_headers(self) -> None:
self.assertEqual(
separate_headers(b"cypherpunk", header_length_list=[1, 2, 3]),
[b"c", b"yp", b"her", b"punk"],
)
def test_separate_headers(self):
self.assertEqual(separate_headers(b"cypherpunk", header_length_list=[1, 2, 3]),
[b"c", b"yp", b"her", b"punk"])
def test_too_small_string(self):
self.assertEqual(separate_headers(b"cypherpunk", header_length_list=[1, 2, 10]),
[b"c", b"yp", b"herpunk", b""])
def test_too_small_string(self) -> None:
self.assertEqual(
separate_headers(b"cypherpunk", header_length_list=[1, 2, 10]),
[b"c", b"yp", b"herpunk", b""],
)
class TestSeparateTrailer(unittest.TestCase):
def test_separate_header(self):
self.assertEqual(separate_trailer(b"cypherpunk", trailer_length=len(b"punk")),
(b"cypher", b"punk"))
def test_separate_header(self) -> None:
self.assertEqual(
separate_trailer(b"cypherpunk", trailer_length=len(b"punk")),
(b"cypher", b"punk"),
)
class TestTerminalWidthCheck(unittest.TestCase):
@mock.patch('time.sleep', return_value=None)
@mock.patch('shutil.get_terminal_size', side_effect=[[50, 50], [50, 50], [100, 100]])
def test_width_check(self, *_):
@mock.patch("time.sleep", return_value=None)
@mock.patch(
"shutil.get_terminal_size", side_effect=[[50, 50], [50, 50], [100, 100]]
)
def test_width_check(self, *_) -> None:
self.assertIsNone(terminal_width_check(80))
class TestValidateOnionAddr(unittest.TestCase):
def test_validate_account(self):
def test_validate_account(self) -> None:
user_account = nick_to_onion_address("Bob")
self.assertEqual(validate_onion_addr(nick_to_onion_address("Alice"), user_account),
'')
self.assertEqual(validate_onion_addr(nick_to_onion_address("Bob"), user_account),
'Error: Can not add own account.')
self.assertEqual(validate_onion_addr(nick_to_onion_address("Alice")[:-1] + 'a', user_account),
'Checksum error - Check that the entered account is correct.')
self.assertEqual(validate_onion_addr(nick_to_onion_address("Alice")[:-1] + '%', user_account),
'Error: Invalid account format.')
self.assertEqual(validate_onion_addr(nick_to_onion_address("Alice") + 'a', user_account),
'Error: Invalid account format.')
self.assertEqual(validate_onion_addr(nick_to_onion_address("Alice")[:-1] + '', user_account),
'Error: Invalid account format.')
self.assertEqual(validate_onion_addr(LOCAL_ID, user_account),
'Error: Can not add reserved account.')
self.assertEqual(
validate_onion_addr(nick_to_onion_address("Alice"), user_account), ""
)
self.assertEqual(
validate_onion_addr(nick_to_onion_address("Bob"), user_account),
"Error: Can not add own account.",
)
self.assertEqual(
validate_onion_addr(
nick_to_onion_address("Alice")[:-1] + "a", user_account
),
"Checksum error - Check that the entered account is correct.",
)
self.assertEqual(
validate_onion_addr(
nick_to_onion_address("Alice")[:-1] + "%", user_account
),
"Error: Invalid account format.",
)
self.assertEqual(
validate_onion_addr(nick_to_onion_address("Alice") + "a", user_account),
"Error: Invalid account format.",
)
self.assertEqual(
validate_onion_addr(
nick_to_onion_address("Alice")[:-1] + "", user_account
),
"Error: Invalid account format.",
)
self.assertEqual(
validate_onion_addr(LOCAL_ID, user_account),
"Error: Can not add reserved account.",
)
class TestValidateGroupName(unittest.TestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.contact_list = ContactList(nicks=['Alice'])
self.group_list = GroupList(groups=['test_group'])
self.contact_list = ContactList(nicks=["Alice"])
self.group_list = GroupList(groups=["test_group"])
def test_validate_group_name(self):
self.assertEqual(validate_group_name('test_group\x1f', self.contact_list, self.group_list),
"Error: Group name must be printable.")
self.assertEqual(validate_group_name(PADDING_LENGTH * 'a', self.contact_list, self.group_list),
"Error: Group name must be less than 255 chars long.")
self.assertEqual(validate_group_name(DUMMY_GROUP, self.contact_list, self.group_list),
"Error: Group name cannot use the name reserved for database padding.")
self.assertEqual(validate_group_name(nick_to_onion_address("Alice"), self.contact_list, self.group_list),
"Error: Group name cannot have the format of an account.")
self.assertEqual(validate_group_name('Alice', self.contact_list, self.group_list),
"Error: Group name cannot be a nick of contact.")
self.assertEqual(validate_group_name('test_group', self.contact_list, self.group_list),
"Error: Group with name 'test_group' already exists.")
self.assertEqual(validate_group_name('test_group2', self.contact_list, self.group_list),
'')
def test_validate_group_name(self) -> None:
self.assertEqual(
validate_group_name("test_group\x1f", self.contact_list, self.group_list),
"Error: Group name must be printable.",
)
self.assertEqual(
validate_group_name(
PADDING_LENGTH * "a", self.contact_list, self.group_list
),
"Error: Group name must be less than 255 chars long.",
)
self.assertEqual(
validate_group_name(DUMMY_GROUP, self.contact_list, self.group_list),
"Error: Group name cannot use the name reserved for database padding.",
)
self.assertEqual(
validate_group_name(
nick_to_onion_address("Alice"), self.contact_list, self.group_list
),
"Error: Group name cannot have the format of an account.",
)
self.assertEqual(
validate_group_name("Alice", self.contact_list, self.group_list),
"Error: Group name cannot be a nick of contact.",
)
self.assertEqual(
validate_group_name("test_group", self.contact_list, self.group_list),
"Error: Group with name 'test_group' already exists.",
)
self.assertEqual(
validate_group_name("test_group2", self.contact_list, self.group_list), ""
)
class TestValidateKeyExchange(unittest.TestCase):
def test_validate_key_exchange(self):
self.assertEqual(validate_key_exchange(''), 'Invalid key exchange selection.')
self.assertEqual(validate_key_exchange('x2'), 'Invalid key exchange selection.')
self.assertEqual(validate_key_exchange('x'), '')
self.assertEqual(validate_key_exchange('X'), '')
self.assertEqual(validate_key_exchange(ECDHE), '')
self.assertEqual(validate_key_exchange(ECDHE.lower()), '')
self.assertEqual(validate_key_exchange('p'), '')
self.assertEqual(validate_key_exchange('P'), '')
self.assertEqual(validate_key_exchange('psk'), '')
self.assertEqual(validate_key_exchange('PSK'), '')
def test_validate_key_exchange(self) -> None:
self.assertEqual(validate_key_exchange(""), "Invalid key exchange selection.")
self.assertEqual(validate_key_exchange("x2"), "Invalid key exchange selection.")
self.assertEqual(validate_key_exchange("x"), "")
self.assertEqual(validate_key_exchange("X"), "")
self.assertEqual(validate_key_exchange(ECDHE), "")
self.assertEqual(validate_key_exchange(ECDHE.lower()), "")
self.assertEqual(validate_key_exchange("p"), "")
self.assertEqual(validate_key_exchange("P"), "")
self.assertEqual(validate_key_exchange("psk"), "")
self.assertEqual(validate_key_exchange("PSK"), "")
class TestValidateNick(unittest.TestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.contact_list = ContactList(nicks=['Alice', 'Bob'])
self.group_list = GroupList(groups=['test_group'])
self.contact_list = ContactList(nicks=["Alice", "Bob"])
self.group_list = GroupList(groups=["test_group"])
def test_validate_nick(self):
self.assertEqual(validate_nick("Alice_", (self.contact_list, self.group_list, nick_to_pub_key(
"Alice"))), '')
self.assertEqual(validate_nick(254 * "a", (self.contact_list, self.group_list, nick_to_pub_key(
"Alice"))), '')
self.assertEqual(validate_nick(255 * "a", (self.contact_list, self.group_list, nick_to_pub_key(
"Alice"))), 'Error: Nick must be shorter than 255 chars.')
self.assertEqual(validate_nick("\x01Alice", (self.contact_list, self.group_list, nick_to_pub_key(
"Alice"))), 'Error: Nick must be printable.')
self.assertEqual(validate_nick('', (self.contact_list, self.group_list, nick_to_pub_key(
"Alice"))), "Error: Nick cannot be empty.")
self.assertEqual(validate_nick('Me', (self.contact_list, self.group_list, nick_to_pub_key(
"Alice"))), "Error: 'Me' is a reserved nick.")
self.assertEqual(validate_nick('-!-', (self.contact_list, self.group_list, nick_to_pub_key(
"Alice"))), "Error: '-!-' is a reserved nick.")
self.assertEqual(validate_nick(LOCAL_ID, (self.contact_list, self.group_list, nick_to_pub_key(
"Alice"))), "Error: Nick cannot have the format of an account.")
self.assertEqual(validate_nick(nick_to_onion_address('A'), (self.contact_list, self.group_list, nick_to_pub_key(
"Alice"))), "Error: Nick cannot have the format of an account.")
self.assertEqual(validate_nick('Bob', (self.contact_list, self.group_list, nick_to_pub_key(
"Alice"))), 'Error: Nick already in use.')
self.assertEqual(validate_nick("Alice", (self.contact_list, self.group_list, nick_to_pub_key(
"Alice"))), '')
self.assertEqual(validate_nick("test_group", (self.contact_list, self.group_list, nick_to_pub_key(
"Alice"))), "Error: Nick cannot be a group name.")
def test_validate_nick(self) -> None:
self.assertEqual(
validate_nick(
"Alice_", (self.contact_list, self.group_list, nick_to_pub_key("Alice"))
),
"",
)
self.assertEqual(
validate_nick(
254 * "a",
(self.contact_list, self.group_list, nick_to_pub_key("Alice")),
),
"",
)
self.assertEqual(
validate_nick(
255 * "a",
(self.contact_list, self.group_list, nick_to_pub_key("Alice")),
),
"Error: Nick must be shorter than 255 chars.",
)
self.assertEqual(
validate_nick(
"\x01Alice",
(self.contact_list, self.group_list, nick_to_pub_key("Alice")),
),
"Error: Nick must be printable.",
)
self.assertEqual(
validate_nick(
"", (self.contact_list, self.group_list, nick_to_pub_key("Alice"))
),
"Error: Nick cannot be empty.",
)
self.assertEqual(
validate_nick(
"Me", (self.contact_list, self.group_list, nick_to_pub_key("Alice"))
),
"Error: 'Me' is a reserved nick.",
)
self.assertEqual(
validate_nick(
"-!-", (self.contact_list, self.group_list, nick_to_pub_key("Alice"))
),
"Error: '-!-' is a reserved nick.",
)
self.assertEqual(
validate_nick(
LOCAL_ID, (self.contact_list, self.group_list, nick_to_pub_key("Alice"))
),
"Error: Nick cannot have the format of an account.",
)
self.assertEqual(
validate_nick(
nick_to_onion_address("A"),
(self.contact_list, self.group_list, nick_to_pub_key("Alice")),
),
"Error: Nick cannot have the format of an account.",
)
self.assertEqual(
validate_nick(
"Bob", (self.contact_list, self.group_list, nick_to_pub_key("Alice"))
),
"Error: Nick already in use.",
)
self.assertEqual(
validate_nick(
"Alice", (self.contact_list, self.group_list, nick_to_pub_key("Alice"))
),
"",
)
self.assertEqual(
validate_nick(
"test_group",
(self.contact_list, self.group_list, nick_to_pub_key("Alice")),
),
"Error: Nick cannot be a group name.",
)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

View File

@ -24,102 +24,175 @@ import unittest
from datetime import datetime
from unittest import mock
from src.common.output import clear_screen, group_management_print, m_print, phase, print_fingerprint, print_key
from src.common.output import print_title, print_on_previous_line, print_spacing, rp_print
from src.common.statics import (ADDED_MEMBERS, ALREADY_MEMBER, BOLD_ON, CLEAR_ENTIRE_LINE, CLEAR_ENTIRE_SCREEN,
CURSOR_LEFT_UP_CORNER, CURSOR_UP_ONE_LINE, DONE, FINGERPRINT_LENGTH, NEW_GROUP,
NORMAL_TEXT, NOT_IN_GROUP, REMOVED_MEMBERS, RX, SYMMETRIC_KEY_LENGTH, TX,
UNKNOWN_ACCOUNTS, VERSION)
from src.common.output import (
clear_screen,
group_management_print,
m_print,
phase,
print_fingerprint,
print_key,
)
from src.common.output import (
print_title,
print_on_previous_line,
print_spacing,
rp_print,
)
from src.common.statics import (
ADDED_MEMBERS,
ALREADY_MEMBER,
BOLD_ON,
CLEAR_ENTIRE_LINE,
CLEAR_ENTIRE_SCREEN,
CURSOR_LEFT_UP_CORNER,
CURSOR_UP_ONE_LINE,
DONE,
FINGERPRINT_LENGTH,
NEW_GROUP,
NORMAL_TEXT,
NOT_IN_GROUP,
REMOVED_MEMBERS,
RX,
SYMMETRIC_KEY_LENGTH,
TX,
UNKNOWN_ACCOUNTS,
VERSION,
)
from tests.mock_classes import ContactList, nick_to_pub_key, Settings
from tests.utils import TFCTestCase
from tests.utils import TFCTestCase
class TestClearScreen(TFCTestCase):
def test_clear_screen(self):
def test_clear_screen(self) -> None:
self.assert_prints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER, clear_screen)
class TestGroupManagementPrint(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.contact_list = ContactList(nicks=['Alice'])
self.lines = [nick_to_pub_key('Alice'), nick_to_pub_key('Bob')]
self.group_name = 'test_group'
self.contact_list = ContactList(nicks=["Alice"])
self.lines = [nick_to_pub_key("Alice"), nick_to_pub_key("Bob")]
self.group_name = "test_group"
def test_group_management_print(self):
group_management_print(NEW_GROUP, self.lines, self.contact_list, self.group_name)
self.assert_prints("""\
def test_group_management_print(self) -> None:
group_management_print(
NEW_GROUP, self.lines, self.contact_list, self.group_name
)
self.assert_prints(
"""\
Created new group 'test_group' with following members:
* Alice
* zwp3dykiztmeils2u5eqjtdtx5x3kti5ktjthpkznku3ws5u5fq2bnad
""", group_management_print, NEW_GROUP, self.lines, self.contact_list, self.group_name)
""",
group_management_print,
NEW_GROUP,
self.lines,
self.contact_list,
self.group_name,
)
self.assert_prints("""\
self.assert_prints(
"""\
Added following accounts to group 'test_group':
* Alice
* zwp3dykiztmeils2u5eqjtdtx5x3kti5ktjthpkznku3ws5u5fq2bnad
""", group_management_print, ADDED_MEMBERS, self.lines, self.contact_list, self.group_name)
""",
group_management_print,
ADDED_MEMBERS,
self.lines,
self.contact_list,
self.group_name,
)
self.assert_prints("""\
self.assert_prints(
"""\
Following accounts were already in group 'test_group':
* Alice
* zwp3dykiztmeils2u5eqjtdtx5x3kti5ktjthpkznku3ws5u5fq2bnad
""", group_management_print, ALREADY_MEMBER, self.lines, self.contact_list, self.group_name)
""",
group_management_print,
ALREADY_MEMBER,
self.lines,
self.contact_list,
self.group_name,
)
self.assert_prints("""\
self.assert_prints(
"""\
Removed following members from group 'test_group':
* Alice
* zwp3dykiztmeils2u5eqjtdtx5x3kti5ktjthpkznku3ws5u5fq2bnad
""", group_management_print, REMOVED_MEMBERS, self.lines, self.contact_list, self.group_name)
""",
group_management_print,
REMOVED_MEMBERS,
self.lines,
self.contact_list,
self.group_name,
)
self.assert_prints("""\
self.assert_prints(
"""\
Following accounts were not in group 'test_group':
* Alice
* zwp3dykiztmeils2u5eqjtdtx5x3kti5ktjthpkznku3ws5u5fq2bnad
""", group_management_print, NOT_IN_GROUP, self.lines, self.contact_list, self.group_name)
""",
group_management_print,
NOT_IN_GROUP,
self.lines,
self.contact_list,
self.group_name,
)
self.assert_prints("""\
self.assert_prints(
"""\
Following unknown accounts were ignored:
* Alice
* zwp3dykiztmeils2u5eqjtdtx5x3kti5ktjthpkznku3ws5u5fq2bnad
""", group_management_print, UNKNOWN_ACCOUNTS, self.lines, self.contact_list, self.group_name)
""",
group_management_print,
UNKNOWN_ACCOUNTS,
self.lines,
self.contact_list,
self.group_name,
)
class TestMPrint(TFCTestCase):
long_msg = ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis"
" dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu"
"m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma"
"ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el"
"it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic"
"tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r"
"utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali"
"quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris"
"que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti"
"s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu.")
long_msg = (
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis"
" dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu"
"m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma"
"ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el"
"it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic"
"tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r"
"utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali"
"quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris"
"que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti"
"s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu."
)
@mock.patch('builtins.input', return_value='')
def test_m_print(self, _):
@mock.patch("builtins.input", return_value="")
def test_m_print(self, _) -> None:
self.assert_prints("Test message\n", m_print, ["Test message"], center=False)
self.assert_prints("Test message\n", m_print, "Test message", center=False)
def test_long_message(self):
self.assert_prints("""\
def test_long_message(self) -> None:
self.assert_prints(
"""\
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum
consectetur purus quis dapibus. Fusce venenatis lacus ut rhoncus faucibus.
Cras sollicitudin commodo sapien, sed bibendum velit maximus in. Aliquam ac
@ -133,9 +206,14 @@ aliquam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in
iaculis felis scelerisque. In sem elit, fringilla id viverra commodo, sagittis
varius purus. Pellentesque rutrum lobortis neque a facilisis. Mauris id tortor
placerat, aliquam dolor ac, venenatis arcu.
""", m_print, TestMPrint.long_msg, center=False)
""",
m_print,
TestMPrint.long_msg,
center=False,
)
self.assert_prints("""\
self.assert_prints(
"""\
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum
consectetur purus quis dapibus. Fusce venenatis lacus ut rhoncus faucibus.
@ -151,9 +229,15 @@ placerat, aliquam dolor ac, venenatis arcu.
viverra commodo, sagittis varius purus. Pellentesque rutrum lobortis neque a
facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu.
""", m_print, TestMPrint.long_msg, center=False, box=True)
""",
m_print,
TestMPrint.long_msg,
center=False,
box=True,
)
self.assert_prints(f"""\
self.assert_prints(
f"""\
{BOLD_ON}{NORMAL_TEXT}
{BOLD_ON} Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum {NORMAL_TEXT}
{BOLD_ON} consectetur purus quis dapibus. Fusce venenatis lacus ut rhoncus faucibus. {NORMAL_TEXT}
@ -169,29 +253,50 @@ placerat, aliquam dolor ac, venenatis arcu.
{BOLD_ON} viverra commodo, sagittis varius purus. Pellentesque rutrum lobortis neque a {NORMAL_TEXT}
{BOLD_ON} facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu. {NORMAL_TEXT}
{BOLD_ON}{NORMAL_TEXT}
""", m_print, TestMPrint.long_msg, center=False, box=True, bold=True)
""",
m_print,
TestMPrint.long_msg,
center=False,
box=True,
bold=True,
)
def test_multi_line(self):
self.assert_prints("""\
def test_multi_line(self) -> None:
self.assert_prints(
"""\
Test
message
""", m_print, ["Test", '', "message"], box=True)
""",
m_print,
["Test", "", "message"],
box=True,
)
def test_head_and_tail(self):
self.assert_prints("""\
def test_head_and_tail(self) -> None:
self.assert_prints(
"""\

Test
""", m_print, ["Test"], box=True, head_clear=True, tail_clear=True, head=2, tail=1)
""",
m_print,
["Test"],
box=True,
head_clear=True,
tail_clear=True,
head=2,
tail=1,
)
def test_wrapping(self):
self.assert_prints("""\
def test_wrapping(self) -> None:
self.assert_prints(
"""\
short message
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum
@ -208,27 +313,30 @@ placerat, aliquam dolor ac, venenatis arcu.
viverra commodo, sagittis varius purus. Pellentesque rutrum lobortis neque a
facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu.
""", m_print, ["short message", TestMPrint.long_msg], box=True)
""",
m_print,
["short message", TestMPrint.long_msg],
box=True,
)
@mock.patch("builtins.input", return_value='')
def test_manual_proceed(self, _):
@mock.patch("builtins.input", return_value="")
def test_manual_proceed(self, _) -> None:
self.assertIsNone(m_print("test", manual_proceed=True))
class TestPhase(unittest.TestCase):
@mock.patch('time.sleep', return_value=None)
def test_phase(self, _):
self.assertIsNone(phase('Entering phase'))
@mock.patch("time.sleep", return_value=None)
def test_phase(self, _) -> None:
self.assertIsNone(phase("Entering phase"))
self.assertIsNone(phase(DONE))
self.assertIsNone(phase('Starting phase', head=1, offset=len("Finished")))
self.assertIsNone(phase('Finished', done=True))
self.assertIsNone(phase("Starting phase", head=1, offset=len("Finished")))
self.assertIsNone(phase("Finished", done=True))
class TestPrintFingerprint(TFCTestCase):
def test_print_fingerprints(self):
self.assert_prints("""\
def test_print_fingerprints(self) -> None:
self.assert_prints(
"""\
Fingerprint for Alice
@ -236,83 +344,117 @@ class TestPrintFingerprint(TFCTestCase):
54936 03101 11892 94057 51231
59374 09637 58434 47573 71137
\n""",
print_fingerprint, FINGERPRINT_LENGTH * b'\x01', 'Fingerprint for Alice')
print_fingerprint,
FINGERPRINT_LENGTH * b"\x01",
"Fingerprint for Alice",
)
class TestPrintKey(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.settings = Settings()
def test_print_kdk(self):
self.assert_prints("""\
def test_print_kdk(self) -> None:
self.assert_prints(
"""\
Local key decryption key (to Receiver)
A B C D E F G H I J K L M N O P Q
5Hp Hag T65 TZz G1P H3C Su6 3k8 Dbp vD8 s5i p4n EB3 kEs reA bua tmU
\n""",
print_key, "Local key decryption key (to Receiver)",
bytes(SYMMETRIC_KEY_LENGTH), self.settings)
print_key,
"Local key decryption key (to Receiver)",
bytes(SYMMETRIC_KEY_LENGTH),
self.settings,
)
def test_print_kdk_local_testing(self):
def test_print_kdk_local_testing(self) -> None:
self.settings.local_testing_mode = True
self.assert_prints("""\
self.assert_prints(
"""\
Local key decryption key (to Receiver)
5HpHagT65TZzG1PH3CSu63k8DbpvD8s5ip4nEB3kEsreAbuatmU
\n""",
print_key, "Local key decryption key (to Receiver)",
bytes(SYMMETRIC_KEY_LENGTH), self.settings)
print_key,
"Local key decryption key (to Receiver)",
bytes(SYMMETRIC_KEY_LENGTH),
self.settings,
)
class TestPrintTitle(TFCTestCase):
def test_print_tx_title(self):
self.assert_prints(f"""\
def test_print_tx_title(self) -> None:
self.assert_prints(
f"""\
{CLEAR_ENTIRE_SCREEN+CURSOR_LEFT_UP_CORNER}
{BOLD_ON} TFC - Transmitter {VERSION} {NORMAL_TEXT}\n
""", print_title, TX)
""",
print_title,
TX,
)
def test_print_rx_title(self):
self.assert_prints(f"""\
def test_print_rx_title(self) -> None:
self.assert_prints(
f"""\
{CLEAR_ENTIRE_SCREEN+CURSOR_LEFT_UP_CORNER}
{BOLD_ON} TFC - Receiver {VERSION} {NORMAL_TEXT}\n
""", print_title, RX)
""",
print_title,
RX,
)
class TestPrintOnPreviousLine(TFCTestCase):
def test_print_on_previous_line(self):
self.assert_prints(CURSOR_UP_ONE_LINE + CLEAR_ENTIRE_LINE, print_on_previous_line)
self.assert_prints(2 * (CURSOR_UP_ONE_LINE + CLEAR_ENTIRE_LINE), print_on_previous_line, reps=2)
self.assert_prints(2 * (CURSOR_UP_ONE_LINE + CLEAR_ENTIRE_LINE), print_on_previous_line, reps=2, flush=True)
def test_print_on_previous_line(self) -> None:
self.assert_prints(
CURSOR_UP_ONE_LINE + CLEAR_ENTIRE_LINE, print_on_previous_line
)
self.assert_prints(
2 * (CURSOR_UP_ONE_LINE + CLEAR_ENTIRE_LINE), print_on_previous_line, reps=2
)
self.assert_prints(
2 * (CURSOR_UP_ONE_LINE + CLEAR_ENTIRE_LINE),
print_on_previous_line,
reps=2,
flush=True,
)
class TestPrintSpacing(TFCTestCase):
def test_print_spacing(self):
def test_print_spacing(self) -> None:
for i in range(20):
self.assert_prints(i * '\n', print_spacing, i)
self.assert_prints(i * "\n", print_spacing, i)
class TestRPPrint(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.ts = datetime.now()
self.ts = datetime.now()
self.timestamp = self.ts.strftime("%b %d - %H:%M:%S.%f")[:-4]
def test_bold_print(self):
self.assert_prints(f"{BOLD_ON}{self.timestamp} - testMessage{NORMAL_TEXT}\n",
rp_print, "testMessage", self.ts, bold=True)
def test_bold_print(self) -> None:
self.assert_prints(
f"{BOLD_ON}{self.timestamp} - testMessage{NORMAL_TEXT}\n",
rp_print,
"testMessage",
self.ts,
bold=True,
)
def test_normal_print(self):
self.assert_prints(f"{self.timestamp} - testMessage\n", rp_print, "testMessage", self.ts, bold=False)
def test_normal_print(self) -> None:
self.assert_prints(
f"{self.timestamp} - testMessage\n",
rp_print,
"testMessage",
self.ts,
bold=False,
)
def test_works_without_timestamp(self):
def test_works_without_timestamp(self) -> None:
self.assertIsNone(rp_print("testMessage"))
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

View File

@ -23,131 +23,169 @@ import os
import _tkinter
import unittest
from unittest import mock
from unittest import mock
from unittest.mock import MagicMock
from src.common.path import ask_path_cli, ask_path_gui, Completer
from tests.mock_classes import Settings
from tests.utils import cd_unit_test, cleanup, ignored, TFCTestCase
from tests.utils import cd_unit_test, cleanup, ignored, TFCTestCase
class TestAskPathGui(TFCTestCase):
file_path = '/home/user/file.txt'
path = '/home/user/'
file_path = "/home/user/file.txt"
path = "/home/user/"
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.settings = Settings()
@mock.patch('os.path.isfile', return_value=True)
@mock.patch('builtins.input', return_value=file_path)
def test_disabled_gui_uses_cli(self, *_):
@mock.patch("os.path.isfile", return_value=True)
@mock.patch("builtins.input", return_value=file_path)
def test_disabled_gui_uses_cli(self, *_) -> None:
self.settings.disable_gui_dialog = True
self.assertEqual(ask_path_gui('prompt_msg', self.settings, get_file=True), self.file_path)
self.assertEqual(
ask_path_gui("prompt_msg", self.settings, get_file=True), self.file_path
)
@mock.patch('os.path.isfile', return_value=True)
@mock.patch('builtins.input', return_value=file_path)
@mock.patch('tkinter.filedialog.askopenfilename', side_effect=_tkinter.TclError)
def test_tcl_error_falls_back_to_cli(self, *_):
self.assertEqual(ask_path_gui('prompt_msg', self.settings, get_file=True), self.file_path)
@mock.patch("os.path.isfile", return_value=True)
@mock.patch("builtins.input", return_value=file_path)
@mock.patch("tkinter.filedialog.askopenfilename", side_effect=_tkinter.TclError)
def test_tcl_error_falls_back_to_cli(self, *_) -> None:
self.assertEqual(
ask_path_gui("prompt_msg", self.settings, get_file=True), self.file_path
)
@mock.patch('tkinter.Tk', return_value=MagicMock())
@mock.patch('os.path.isfile', return_value=True)
@mock.patch('tkinter.filedialog.askopenfilename', return_value=file_path)
def test_get_path_to_file_gui(self, *_):
self.assertEqual(ask_path_gui('path to file:', self.settings, get_file=True),
self.file_path)
@mock.patch("tkinter.Tk", return_value=MagicMock())
@mock.patch("os.path.isfile", return_value=True)
@mock.patch("tkinter.filedialog.askopenfilename", return_value=file_path)
def test_get_path_to_file_gui(self, *_) -> None:
self.assertEqual(
ask_path_gui("path to file:", self.settings, get_file=True), self.file_path
)
@unittest.skipIf("TRAVIS" in os.environ and os.environ["TRAVIS"] == "true", "Skip as Travis has no $DISPLAY.")
@mock.patch('tkinter.filedialog.askopenfilename', return_value='')
def test_no_path_to_file_raises_fr(self, _):
self.assert_fr("File selection aborted.", ask_path_gui, 'test message', self.settings, True)
@unittest.skipIf(
"TRAVIS" in os.environ and os.environ["TRAVIS"] == "true",
"Skip as Travis has no $DISPLAY.",
)
@mock.patch("tkinter.filedialog.askopenfilename", return_value="")
def test_no_path_to_file_raises_fr(self, _) -> None:
self.assert_se(
"File selection aborted.", ask_path_gui, "test message", self.settings, True
)
@unittest.skipIf("TRAVIS" in os.environ and os.environ["TRAVIS"] == "true", "Skip as Travis has no $DISPLAY.")
@mock.patch('tkinter.filedialog.askdirectory', return_value=path)
def test_get_path_gui(self, _):
self.assertEqual(ask_path_gui('select path for file:', self.settings), self.path)
@unittest.skipIf(
"TRAVIS" in os.environ and os.environ["TRAVIS"] == "true",
"Skip as Travis has no $DISPLAY.",
)
@mock.patch("tkinter.filedialog.askdirectory", return_value=path)
def test_get_path_gui(self, _) -> None:
self.assertEqual(
ask_path_gui("select path for file:", self.settings), self.path
)
@unittest.skipIf("TRAVIS" in os.environ and os.environ["TRAVIS"] == "true", "Skip as Travis has no $DISPLAY.")
@mock.patch('tkinter.filedialog.askdirectory', return_value='')
def test_no_path_raises_fr(self, _):
self.assert_fr("Path selection aborted.", ask_path_gui, 'test message', self.settings, False)
@unittest.skipIf(
"TRAVIS" in os.environ and os.environ["TRAVIS"] == "true",
"Skip as Travis has no $DISPLAY.",
)
@mock.patch("tkinter.filedialog.askdirectory", return_value="")
def test_no_path_raises_fr(self, _) -> None:
self.assert_se(
"Path selection aborted.",
ask_path_gui,
"test message",
self.settings,
False,
)
class TestCompleter(unittest.TestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.cwd = os.getcwd()
self.cwd = os.getcwd()
self.unit_test_dir = cd_unit_test()
# Create test directory structure for the completer.
os.mkdir('outer')
os.chdir('outer/')
with open('file', 'w+') as f:
f.write('text')
os.mkdir('middle')
os.chdir('middle/')
os.mkdir('inner')
os.chdir('..')
os.chdir('..')
os.mkdir("outer")
os.chdir("outer/")
with open("file", "w+") as f:
f.write("text")
os.mkdir("middle")
os.chdir("middle/")
os.mkdir("inner")
os.chdir("..")
os.chdir("..")
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
os.chdir(self.cwd)
def test_completer(self):
def test_completer(self) -> None:
# Test path
completer = Completer(get_file=False)
self.assertEqual(completer.complete_path('outer/'), ['outer/middle/'])
self.assertEqual(completer.path_complete(['/outer']), [])
self.assertEqual(completer.path_complete(), ['./outer/'])
self.assertEqual(completer.complete_path(''), ['outer/'])
self.assertEqual(completer.complete_path('outer/middle'), ['outer/middle/inner/'])
self.assertEqual(completer.complete_path('outer/file'), ['outer/file '])
self.assertNotEqual(completer.listdir('outer/'), [])
self.assertEqual(completer.complete_path("outer/"), ["outer/middle/"])
self.assertEqual(completer.path_complete(["/outer"]), [])
self.assertEqual(completer.path_complete(), ["./outer/"])
self.assertEqual(completer.complete_path(""), ["outer/"])
self.assertEqual(
completer.complete_path("outer/middle"), ["outer/middle/inner/"]
)
self.assertEqual(completer.complete_path("outer/file"), ["outer/file "])
self.assertNotEqual(completer.listdir("outer/"), [])
# Test file
completer = Completer(get_file=True)
self.assertTrue(len(completer.complete_path('/bin/')) > 0)
self.assertTrue(completer.complete('', 0))
self.assertTrue(len(completer.complete_path("/bin/")) > 0)
self.assertTrue(completer.complete("", 0))
class TestPath(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
with ignored(FileExistsError):
os.mkdir('test_dir/')
with ignored(OSError):
os.mkdir("test_dir/")
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
with ignored(OSError):
os.remove('testfile')
os.remove("testfile")
with ignored(OSError):
os.rmdir('test_dir/')
os.rmdir("test_dir/")
@mock.patch('time.sleep', return_value=None)
@mock.patch('os.path.isfile', side_effect=[False, True, True])
@mock.patch('builtins.input', side_effect=['file1', 'file2', './test_dir', './testfile', '', '/home',
'/dir_that_does_not_exist', '/bin/', KeyboardInterrupt])
def test_ask_path_cli(self, *_):
self.assertEqual(ask_path_cli('path to file:', get_file=True), 'file2')
self.assertEqual(ask_path_cli('prompt_msg'), 'test_dir/')
@mock.patch("time.sleep", return_value=None)
@mock.patch("os.path.isfile", side_effect=[False, True, True])
@mock.patch(
"builtins.input",
side_effect=[
"file1",
"file2",
"./test_dir",
"./testfile",
"",
"/home",
"/dir_that_does_not_exist",
"/bin/",
KeyboardInterrupt,
],
)
def test_ask_path_cli(self, *_) -> None:
self.assertEqual(ask_path_cli("path to file:", get_file=True), "file2")
self.assertEqual(ask_path_cli("prompt_msg"), "test_dir/")
open('testfile', 'a+').close()
self.assertEqual(ask_path_cli('prompt_msg', get_file=True), 'testfile')
open("testfile", "a+").close()
self.assertEqual(ask_path_cli("prompt_msg", get_file=True), "testfile")
self.assert_fr("File selection aborted.", ask_path_cli, 'prompt_msg', True)
self.assert_se("File selection aborted.", ask_path_cli, "prompt_msg", True)
self.assertEqual(ask_path_cli('prompt_msg'), '/home/')
self.assertEqual(ask_path_cli('prompt_msg'), '/bin/')
self.assertEqual(ask_path_cli("prompt_msg"), "/home/")
self.assertEqual(ask_path_cli("prompt_msg"), "/bin/")
self.assert_fr("File path selection aborted.", ask_path_cli, 'prompt_msg', False)
self.assert_se(
"File path selection aborted.", ask_path_cli, "prompt_msg", False
)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

File diff suppressed because it is too large Load Diff

View File

@ -24,14 +24,17 @@ import unittest
import src.common.statics
from src.common.encoding import onion_address_to_pub_key
from src.common.misc import validate_onion_addr
from src.common.misc import validate_onion_addr
class TestStatics(unittest.TestCase):
def test_uniqueness(self):
variable_list = [getattr(src.common.statics, i) for i in dir(src.common.statics) if not i.startswith('__')]
variable_list = [v for v in variable_list if (isinstance(v, str) or isinstance(v, bytes))]
def test_uniqueness(self) -> None:
variable_list = [
getattr(src.common.statics, i)
for i in dir(src.common.statics)
if not i.startswith("__")
]
variable_list = [v for v in variable_list if (isinstance(v, (bytes, str)))]
# Debugger
for unique_variable in list(set(variable_list)):
@ -40,15 +43,22 @@ class TestStatics(unittest.TestCase):
if variable == unique_variable:
repeats += 1
if repeats > 1:
spacing = (3 - len(unique_variable)) * ' '
print(f"Setting value '{unique_variable}'{spacing} appeared in {repeats} variables: ", end='')
items = [i for i in dir(src.common.statics)
if not i.startswith('__') and getattr(src.common.statics, i) == unique_variable]
print(', '.join(items))
spacing = (3 - len(unique_variable)) * " "
print(
f"Setting value '{unique_variable}'{spacing} appeared in {repeats} variables: ",
end="",
)
items = [
i
for i in dir(src.common.statics)
if not i.startswith("__")
and getattr(src.common.statics, i) == unique_variable
]
print(", ".join(items))
self.assertEqual(len(list(set(variable_list))), len(variable_list))
def test_group_id_length_is_not_same_as_onion_service_pub_key_length(self):
def test_group_id_length_is_not_same_as_onion_service_pub_key_length(self) -> None:
"""\
In current implementation, `src.common.db_logs.remove_logs`
determines the type of data to be removed from the length of
@ -57,49 +67,59 @@ class TestStatics(unittest.TestCase):
able to distinguish what type of entries (contacts or group
logs) should be removed from the database.
"""
self.assertNotEqual(src.common.statics.ONION_SERVICE_PUBLIC_KEY_LENGTH,
src.common.statics.GROUP_ID_LENGTH)
self.assertNotEqual(
src.common.statics.ONION_SERVICE_PUBLIC_KEY_LENGTH,
src.common.statics.GROUP_ID_LENGTH,
)
def test_reserved_accounts_are_valid(self):
def test_reserved_accounts_are_valid(self) -> None:
"""\
Each used account placeholder should be a valid, but reserved
account.
"""
reserved_accounts = [src.common.statics.LOCAL_ID,
src.common.statics.DUMMY_CONTACT,
src.common.statics.DUMMY_MEMBER]
reserved_accounts = [
src.common.statics.LOCAL_ID,
src.common.statics.DUMMY_CONTACT,
src.common.statics.DUMMY_MEMBER,
]
for account in reserved_accounts:
self.assertEqual(validate_onion_addr(account), "Error: Can not add reserved account.")
self.assertEqual(
validate_onion_addr(account), "Error: Can not add reserved account."
)
# Test each account is unique.
self.assertEqual(len(reserved_accounts),
len(set(reserved_accounts)))
self.assertEqual(len(reserved_accounts), len(set(reserved_accounts)))
def test_local_pubkey(self):
def test_local_pubkey(self) -> None:
"""Test that local key's reserved public key is valid."""
self.assertEqual(src.common.statics.LOCAL_PUBKEY,
onion_address_to_pub_key(src.common.statics.LOCAL_ID))
self.assertEqual(
src.common.statics.LOCAL_PUBKEY,
onion_address_to_pub_key(src.common.statics.LOCAL_ID),
)
def test_group_management_header_length_matches_datagram_header_length(self):
def test_group_management_header_length_matches_datagram_header_length(
self,
) -> None:
"""
As group management messages are handled as messages available
to Relay Program, the header should be the same as any datagrams
handled by the Relay program.
"""
self.assertEqual(src.common.statics.GROUP_MGMT_HEADER_LENGTH,
src.common.statics.DATAGRAM_HEADER_LENGTH)
self.assertEqual(
src.common.statics.GROUP_MGMT_HEADER_LENGTH,
src.common.statics.DATAGRAM_HEADER_LENGTH,
)
def test_key_exchanges_start_with_different_letter(self):
def test_key_exchanges_start_with_different_letter(self) -> None:
"""
Key exchange can be selected by entering just X to represent
X448 or P to represent X448. This test detects if selection
names would ever be set to something like PUBLIC and PSK
that both start with P.
"""
self.assertNotEqual(src.common.statics.ECDHE[:1],
src.common.statics.PSK[:1])
self.assertNotEqual(src.common.statics.ECDHE[:1], src.common.statics.PSK[:1])
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

View File

@ -25,15 +25,12 @@ from src.common.word_list import eff_wordlist
class TestWordList(unittest.TestCase):
def test_each_word_is_unique(self) -> None:
self.assertEqual(len(eff_wordlist), len(set(eff_wordlist)))
def test_each_word_is_unique(self):
self.assertEqual(len(eff_wordlist),
len(set(eff_wordlist)))
def test_word_list_length(self):
self.assertEqual(len(eff_wordlist),
7776)
def test_word_list_length(self) -> None:
self.assertEqual(len(eff_wordlist), 7776)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main()

View File

@ -23,80 +23,118 @@ import getpass
import time
from datetime import datetime
from typing import Generator, Iterable, List, Sized
from typing import Generator, Iterable, List, Optional, Sized
import nacl.signing
from src.common.database import TFCUnencryptedDatabase
from src.common.db_contacts import Contact
from src.common.db_groups import Group
from src.common.db_keys import KeySet
from src.common.db_contacts import ContactList as OrigContactList
from src.common.db_groups import GroupList as OrigGroupList
from src.common.db_onion import OnionService as OrigOnionService
from src.common.db_keys import KeyList as OrigKeyList
from src.common.db_masterkey import MasterKey as OrigMasterKey
from src.common.gateway import Gateway as OrigGateway
from src.common.gateway import GatewaySettings as OrigGatewaySettings
from src.common.db_settings import Settings as OrigSettings
from src.common.encoding import pub_key_to_onion_address, pub_key_to_short_address
from src.common.misc import calculate_race_condition_delay
from src.common.database import TFCUnencryptedDatabase
from src.common.db_contacts import Contact
from src.common.db_groups import Group
from src.common.db_keys import KeySet
from src.common.db_contacts import ContactList as OrigContactList
from src.common.db_groups import GroupList as OrigGroupList
from src.common.db_onion import OnionService as OrigOnionService
from src.common.db_keys import KeyList as OrigKeyList
from src.common.db_masterkey import MasterKey as OrigMasterKey
from src.common.gateway import Gateway as OrigGateway
from src.common.gateway import GatewaySettings as OrigGatewaySettings
from src.common.db_settings import Settings as OrigSettings
from src.common.encoding import pub_key_to_onion_address, pub_key_to_short_address
from src.common.misc import calculate_race_condition_delay
from src.common.reed_solomon import RSCodec
from src.common.statics import (DIR_USER_DATA, FINGERPRINT_LENGTH, INITIAL_HARAC, KEX_STATUS_VERIFIED, LOCAL_ID,
LOCAL_NICK, LOCAL_PUBKEY, ONION_SERVICE_PRIVATE_KEY_LENGTH, SYMMETRIC_KEY_LENGTH,
TX, WIN_TYPE_GROUP, WIN_UID_LOCAL)
from src.common.statics import (
DIR_USER_DATA,
FINGERPRINT_LENGTH,
INITIAL_HARAC,
KEX_STATUS_VERIFIED,
LOCAL_ID,
LOCAL_NICK,
LOCAL_PUBKEY,
ONION_SERVICE_PRIVATE_KEY_LENGTH,
SYMMETRIC_KEY_LENGTH,
TX,
WIN_UID_COMMAND,
WIN_TYPE_GROUP,
)
from src.transmitter.windows import TxWindow as OrigTxWindow
from src.receiver.packet import PacketList as OrigPacketList
from src.receiver.windows import RxWindow as OrigRxWindow
from src.receiver.packet import PacketList as OrigPacketList
from src.receiver.windows import RxWindow as OrigRxWindow
from tests.utils import nick_to_pub_key, group_name_to_group_id
def create_contact(nick,
tx_fingerprint=FINGERPRINT_LENGTH * b'\x01',
rx_fingerprint=FINGERPRINT_LENGTH * b'\x02',
kex_status =KEX_STATUS_VERIFIED,
log_messages =True,
file_reception=True,
notifications =True):
def create_contact(
nick: str,
tx_fingerprint: bytes = FINGERPRINT_LENGTH * b"\x01",
rx_fingerprint: bytes = FINGERPRINT_LENGTH * b"\x02",
kex_status: bytes = KEX_STATUS_VERIFIED,
log_messages: bool = True,
file_reception: bool = True,
notifications: bool = True,
) -> Contact:
"""Create a mock contact object."""
if nick == LOCAL_ID:
pub_key = LOCAL_PUBKEY
nick = LOCAL_NICK
nick = LOCAL_NICK
else:
pub_key = nick_to_pub_key(nick)
return Contact(pub_key, nick,
tx_fingerprint, rx_fingerprint, kex_status,
log_messages, file_reception, notifications)
return Contact(
pub_key,
nick,
tx_fingerprint,
rx_fingerprint,
kex_status,
log_messages,
file_reception,
notifications,
)
def create_group(name, nick_list=None):
def create_group(name: str, nick_list: Optional[List[str]] = None):
"""Create a mock group object."""
if nick_list is None:
nick_list = ['Alice', 'Bob']
nick_list = ["Alice", "Bob"]
settings = Settings()
members = [create_contact(n) for n in nick_list]
return Group(name, group_name_to_group_id(name), False, False, members, settings, lambda: None)
members = [create_contact(n) for n in nick_list]
return Group(
name,
group_name_to_group_id(name),
False,
False,
members,
settings,
lambda: None,
)
def create_keyset(nick,
tx_key=SYMMETRIC_KEY_LENGTH * b'\x01',
tx_hek=SYMMETRIC_KEY_LENGTH * b'\x01',
rx_key=SYMMETRIC_KEY_LENGTH * b'\x01',
rx_hek=SYMMETRIC_KEY_LENGTH * b'\x01',
tx_harac=INITIAL_HARAC,
rx_harac=INITIAL_HARAC,
store_f=None):
def create_keyset(
nick,
tx_key=SYMMETRIC_KEY_LENGTH * b"\x01",
tx_hek=SYMMETRIC_KEY_LENGTH * b"\x01",
rx_key=SYMMETRIC_KEY_LENGTH * b"\x01",
rx_hek=SYMMETRIC_KEY_LENGTH * b"\x01",
tx_harac=INITIAL_HARAC,
rx_harac=INITIAL_HARAC,
store_f=None,
) -> KeySet:
"""Create a mock keyset object."""
pub_key = LOCAL_PUBKEY if nick == LOCAL_ID else nick_to_pub_key(nick)
return KeySet(pub_key, tx_key, tx_hek, rx_key, rx_hek, tx_harac, rx_harac,
store_keys=lambda: None if store_f is None else store_f)
return KeySet(
pub_key,
tx_key,
tx_hek,
rx_key,
rx_hek,
tx_harac,
rx_harac,
store_keys=lambda: None if store_f is None else store_f,
)
def create_rx_window(nick='Alice'):
def create_rx_window(nick="Alice") -> OrigRxWindow:
"""Create a mock Rx-window object."""
pub_key = LOCAL_PUBKEY if nick == LOCAL_ID else nick_to_pub_key(nick)
return RxWindow(uid=pub_key)
@ -106,10 +144,10 @@ def create_rx_window(nick='Alice'):
class ContactList(OrigContactList, Iterable, Sized):
"""Mock the object for unit testing."""
def __init__(self, nicks=None, **kwargs):
def __init__(self, nicks=None, **kwargs) -> None:
self.master_key = MasterKey()
self.settings = Settings()
self.contacts = [] if nicks is None else [create_contact(n) for n in nicks]
self.settings = Settings()
self.contacts = [] if nicks is None else [create_contact(n) for n in nicks]
for key, value in kwargs.items():
setattr(self, key, value)
@ -119,38 +157,37 @@ class ContactList(OrigContactList, Iterable, Sized):
def store_contacts(self, replace: bool = True):
"""Mock method."""
pass
def load_contacts(self):
def load_contacts(self) -> None:
"""Mock method."""
pass
def print_contacts(self):
def print_contacts(self) -> None:
"""Mock method."""
pass
class Gateway(OrigGateway):
"""Mock the object for unit testing."""
def __init__(self, **kwargs):
self.packets = []
def __init__(self, **kwargs) -> None:
self.packets = []
self.settings = GatewaySettings(**kwargs)
self.rs = RSCodec(2 * self.settings.serial_error_correction)
self.rs = RSCodec(2 * self.settings.serial_error_correction)
def write(self, output):
def write(self, orig_packet: bytes) -> None:
"""Mock method."""
self.packets.append(output)
self.packets.append(orig_packet)
class GroupList(OrigGroupList, Iterable, Sized):
"""Mock the object for unit testing."""
def __init__(self, groups=None, **kwargs):
self.master_key = MasterKey()
self.settings = Settings()
def __init__(self, groups=None, **kwargs) -> None:
self.master_key = MasterKey()
self.settings = Settings()
self.contact_list = ContactList()
self.groups = [] if groups is None else [(create_group(g)) for g in groups] # type: List[Group]
self.groups = (
[] if groups is None else [(create_group(g)) for g in groups]
) # type: List[Group]
self.store_groups_called = False
for key, value in kwargs.items():
@ -168,22 +205,20 @@ class GroupList(OrigGroupList, Iterable, Sized):
"""Mock method."""
self.store_groups_called = True
def load_groups(self):
def load_groups(self) -> None:
"""Mock method."""
pass
def print_groups(self):
def print_groups(self) -> None:
"""Mock method."""
pass
class KeyList(OrigKeyList):
"""Mock the object for unit testing."""
def __init__(self, nicks=None, **kwargs):
def __init__(self, nicks=None, **kwargs) -> None:
self.master_key = MasterKey()
self.settings = Settings()
self.keysets = [] if nicks is None else [create_keyset(n) for n in nicks]
self.settings = Settings()
self.keysets = [] if nicks is None else [create_keyset(n) for n in nicks]
self.store_keys_called = False
@ -194,43 +229,43 @@ class KeyList(OrigKeyList):
"""Mock method."""
self.store_keys_called = True
def load_keys(self):
def load_keys(self) -> None:
"""Mock method."""
pass
class MasterKey(OrigMasterKey):
"""Mock the object for unit testing."""
def __init__(self, **kwargs):
def __init__(self, **kwargs) -> None:
"""Create new MasterKey mock object."""
self.local_test = False
self.master_key = bytes(SYMMETRIC_KEY_LENGTH)
self.file_name = f'{DIR_USER_DATA}{TX}_login_data'
self.database = TFCUnencryptedDatabase(self.file_name)
self.file_name = f"{DIR_USER_DATA}{TX}_login_data"
self.database = TFCUnencryptedDatabase(self.file_name)
for key, value in kwargs.items():
setattr(self, key, value)
def load_master_key(self) -> bytes:
"""Create mock master key bytes."""
if getpass.getpass() == 'test_password':
if getpass.getpass() == "test_password":
return self.master_key
else:
return SYMMETRIC_KEY_LENGTH * b'f'
return SYMMETRIC_KEY_LENGTH * b"f"
class OnionService(OrigOnionService):
"""Mock the object for unit testing."""
def __init__(self, **kwargs):
def __init__(self, **kwargs) -> None:
"""Create new OnionService mock object."""
self.onion_private_key = ONION_SERVICE_PRIVATE_KEY_LENGTH*b'a'
self.conf_code = b'a'
self.public_key = bytes(nacl.signing.SigningKey(seed=self.onion_private_key).verify_key)
self.onion_private_key = ONION_SERVICE_PRIVATE_KEY_LENGTH * b"a"
self.conf_code = b"a"
self.public_key = bytes(
nacl.signing.SigningKey(seed=self.onion_private_key).verify_key
)
self.user_onion_address = pub_key_to_onion_address(self.public_key)
self.user_short_address = pub_key_to_short_address(self.public_key)
self.is_delivered = False
self.is_delivered = False
for key, value in kwargs.items():
setattr(self, key, value)
@ -240,40 +275,40 @@ class OnionService(OrigOnionService):
class Settings(OrigSettings):
"""Mock the object for unit testing."""
def __init__(self, **kwargs):
def __init__(self, **kwargs) -> None:
"""Create new Settings mock object."""
self.disable_gui_dialog = False
self.max_number_of_group_members = 50
self.max_number_of_groups = 50
self.max_number_of_contacts = 50
self.log_messages_by_default = False
self.accept_files_by_default = False
self.disable_gui_dialog = False
self.max_number_of_group_members = 50
self.max_number_of_groups = 50
self.max_number_of_contacts = 50
self.log_messages_by_default = False
self.accept_files_by_default = False
self.show_notifications_by_default = True
self.log_file_masking = False
self.ask_password_for_log_access = True
self.log_file_masking = False
self.ask_password_for_log_access = True
# Transmitter settings
self.nc_bypass_messages = False
self.confirm_sent_files = True
self.double_space_exits = False
self.traffic_masking = False
self.tm_static_delay = 2.0
self.tm_random_delay = 2.0
self.traffic_masking = False
self.tm_static_delay = 2.0
self.tm_random_delay = 2.0
# Relay settings
self.allow_contact_requests = True
# Receiver settings
self.new_message_notify_preview = False
self.new_message_notify_preview = False
self.new_message_notify_duration = 1.0
self.max_decompress_size = 100_000_000
self.max_decompress_size = 100_000_000
self.master_key = MasterKey()
self.master_key = MasterKey()
self.software_operation = TX
self.local_testing_mode = False
self.all_keys = list(vars(self).keys())
self.key_list = self.all_keys[:self.all_keys.index('master_key')]
self.key_list = self.all_keys[: self.all_keys.index("master_key")]
self.defaults = {k: self.__dict__[k] for k in self.key_list}
# Override defaults with specified kwargs
@ -282,75 +317,71 @@ class Settings(OrigSettings):
def store_settings(self, replace: bool = True):
"""Mock method."""
pass
def load_settings(self):
def load_settings(self) -> None:
"""Mock method."""
pass
@staticmethod
def validate_key_value_pair(key, value, contact_list, group_list):
def validate_key_value_pair(key, value, contact_list, group_list) -> None:
"""Mock method."""
pass
# Transmitter Program
class GatewaySettings(OrigGatewaySettings):
"""Mock the object for unit testing."""
def __init__(self, **kwargs):
def __init__(self, **kwargs) -> None:
"""Create new GatewaySettings mock object."""
self.serial_baudrate = 19200
self.serial_error_correction = 5
self.use_serial_usb_adapter = True
self.built_in_serial_interface = 'ttyS0'
self.serial_baudrate = 19200
self.serial_error_correction = 5
self.use_serial_usb_adapter = True
self.built_in_serial_interface = "ttyS0"
self.software_operation = TX
self.local_testing_mode = False
self.data_diode_sockets = False
self.all_keys = list(vars(self).keys())
self.key_list = self.all_keys[:self.all_keys.index('software_operation')]
self.key_list = self.all_keys[: self.all_keys.index("software_operation")]
self.defaults = {k: self.__dict__[k] for k in self.key_list}
self.session_serial_error_correction = self.serial_error_correction
self.session_serial_baudrate = self.serial_baudrate
self.session_usb_serial_adapter = self.use_serial_usb_adapter
self.session_serial_baudrate = self.serial_baudrate
self.session_usb_serial_adapter = self.use_serial_usb_adapter
self.tx_inter_packet_delay = 0.0
self.rx_receive_timeout = 0.0
self.rx_receive_timeout = 0.0
self.race_condition_delay = calculate_race_condition_delay(self.session_serial_error_correction,
self.serial_baudrate)
self.race_condition_delay = calculate_race_condition_delay(
self.session_serial_error_correction, self.serial_baudrate
)
# Override defaults with specified kwargs
for key, value in kwargs.items():
setattr(self, key, value)
def store_settings(self):
def store_settings(self) -> None:
"""Mock method."""
pass
def load_settings(self):
def load_settings(self) -> None:
"""Mock method."""
pass
class TxWindow(OrigTxWindow):
"""Mock the object for unit testing."""
def __init__(self, **kwargs):
def __init__(self, **kwargs) -> None:
"""Create new TxWindow mock object."""
self.contact_list = ContactList()
self.group_list = GroupList()
self.contact_list = ContactList()
self.group_list = GroupList()
self.window_contacts = []
self.group = None
self.contact = None
self.name = None
self.type = None
self.uid = None
self.group_id = None
self.imc_name = None
self.group = None
self.contact = None
self.name = None
self.type = None
self.uid = None
self.group_id = None
self.imc_name = None
for key, value in kwargs.items():
setattr(self, key, value)
@ -358,10 +389,10 @@ class TxWindow(OrigTxWindow):
class UserInput(object):
"""Mock the object for unit testing."""
def __init__(self, plaintext=None, **kwargs):
def __init__(self, plaintext=None, **kwargs) -> None:
"""Create new UserInput mock object."""
self.plaintext = plaintext
self.type = None
self.type = None
for key, value in kwargs.items():
setattr(self, key, value)
@ -370,37 +401,36 @@ class UserInput(object):
class Packet(object):
"""Mock the object for unit testing."""
def __init__(self, **kwargs):
def __init__(self, **kwargs) -> None:
"""Create new Pack mock object."""
self.account = None
self.contact = None
self.origin = None
self.type = None
self.settings = None
self.f_name = None
self.f_size = None
self.f_packets = None
self.f_eta = None
self.lt_active = False
self.is_complete = False
self.account = None
self.contact = None
self.origin = None
self.type = None
self.settings = None
self.f_name = None
self.f_size = None
self.f_packets = None
self.f_eta = None
self.lt_active = False
self.is_complete = False
self.assembly_pt_list = []
self.payload = None # Unittest mock return value
self.payload = None # Unittest mock return value
for key, value in kwargs.items():
setattr(self, key, value)
def add_packet(self, packet):
def add_packet(self, packet) -> None:
"""Mock method."""
pass
def assemble_message_packet(self):
def assemble_message_packet(self) -> None:
"""Mock method."""
return self.payload
def assemble_and_store_file(self):
def assemble_and_store_file(self) -> None:
"""Mock method."""
return self.payload
def assemble_command_packet(self):
def assemble_command_packet(self) -> None:
"""Mock method."""
return self.payload
@ -408,10 +438,10 @@ class Packet(object):
class PacketList(OrigPacketList):
"""Mock the object for unit testing."""
def __init__(self, **kwargs):
self.settings = Settings()
def __init__(self, **kwargs) -> None:
self.settings = Settings()
self.contact_list = ContactList()
self.packets = []
self.packets = []
for key, value in kwargs.items():
setattr(self, key, value)
@ -420,26 +450,26 @@ class PacketList(OrigPacketList):
class RxWindow(OrigRxWindow):
"""Mock the object for unit testing."""
def __init__(self, **kwargs):
self.uid = None
def __init__(self, **kwargs) -> None:
self.uid = None
self.contact_list = ContactList()
self.group_list = GroupList()
self.settings = Settings()
self.packet_list = PacketList()
self.group_list = GroupList()
self.settings = Settings()
self.packet_list = PacketList()
self.is_active = False
self.is_active = False
self.group_timestamp = time.time() * 1000
self.group = None
self.group = None
self.window_contacts = []
self.message_log = []
self.handle_dict = dict()
self.message_log = []
self.handle_dict = dict()
self.previous_msg_ts = datetime.now()
self.unread_messages = 0
self.type = None
self.type = None
self.type_print = None
self.name = None
self.name = None
for key, value in kwargs.items():
setattr(self, key, value)
@ -448,46 +478,46 @@ class RxWindow(OrigRxWindow):
class WindowList(object):
"""Mock the object for unit testing."""
def __init__(self, nicks=None, **kwargs):
def __init__(self, nicks=None, **kwargs) -> None:
"""Create new WindowList mock object."""
self.contact_list = ContactList()
self.group_list = GroupList()
self.packet_list = PacketList()
self.settings = Settings()
self.windows = [] if nicks is None else [create_rx_window(n) for n in nicks]
self.group_list = GroupList()
self.packet_list = PacketList()
self.settings = Settings()
self.windows = [] if nicks is None else [create_rx_window(n) for n in nicks]
self.active_win = None
for key, value in kwargs.items():
setattr(self, key, value)
def __len__(self):
def __len__(self) -> int:
return len(self.windows)
def __iter__(self):
def __iter__(self) -> None:
yield from self.windows
def group_windows(self):
def group_windows(self) -> List[RxWindow]:
"""Mock method."""
return [w for w in self.windows if w.type == WIN_TYPE_GROUP]
def set_active_rx_window(self, name):
def set_active_rx_window(self, name) -> None:
"""Mock method."""
if self.active_win is not None:
self.active_win.is_active = False
self.active_win = self.get_window(name)
self.active_win = self.get_window(name)
self.active_win.is_active = True
def has_window(self, name):
def has_window(self, name) -> bool:
"""Mock method."""
return name in self.get_list_of_window_names()
def get_list_of_window_names(self):
def get_list_of_window_names(self) -> List[bytes]:
"""Mock method."""
return [w.uid for w in self.windows]
def get_local_window(self):
def get_command_window(self) -> RxWindow:
"""Mock method."""
return self.get_window(WIN_UID_LOCAL)
return self.get_window(WIN_UID_COMMAND)
def remove_window(self, uid: str) -> None:
"""Mock method."""
@ -496,13 +526,17 @@ class WindowList(object):
del self.windows[i]
break
def get_window(self, uid):
def get_window(self, uid) -> RxWindow:
"""Mock method."""
if not self.has_window(uid):
self.windows.append(RxWindow(uid=uid,
contact_list=self.contact_list,
group_list =self.group_list,
settings =self.settings,
packet_list =self.packet_list))
self.windows.append(
RxWindow(
uid=uid,
contact_list=self.contact_list,
group_list=self.group_list,
settings=self.settings,
packet_list=self.packet_list,
)
)
return next(w for w in self.windows if w.uid == uid)

View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3.7
# -*- coding: utf-8 -*-
"""
TFC - Onion-routed, endpoint secure messaging system
Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
TFC is free software: you can redistribute it and/or modify it under the terms
of the GNU General Public License as published by the Free Software Foundation,
either version 3 of the License, or (at your option) any later version.
TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with TFC. If not, see <https://www.gnu.org/licenses/>.
"""

View File

@ -23,264 +23,392 @@ import os
import struct
import unittest
from datetime import datetime
from datetime import datetime
from multiprocessing import Queue
from unittest import mock
from unittest.mock import MagicMock
from unittest import mock
from unittest.mock import MagicMock
from src.common.database import MessageLog, TFCDatabase
from src.common.db_logs import write_log_entry
from src.common.db_logs import write_log_entry
from src.common.encoding import int_to_bytes
from src.common.statics import (CH_FILE_RECV, CH_LOGGING, CH_NOTIFY, CLEAR_ENTIRE_LINE, COMMAND, CURSOR_UP_ONE_LINE,
C_L_HEADER, DIR_USER_DATA, DISABLE, ENABLE, F_S_HEADER, LOCAL_ID, LOCAL_PUBKEY,
LOG_REMOVE, MESSAGE, ORIGIN_CONTACT_HEADER, PADDING_LENGTH, RESET, RX,
SYMMETRIC_KEY_LENGTH, US_BYTE, WIN_TYPE_CONTACT, WIN_TYPE_GROUP, WIN_UID_FILE, WIPE)
from src.common.statics import (
CH_FILE_RECV,
CH_LOGGING,
CH_NOTIFY,
CLEAR_ENTIRE_LINE,
COMMAND,
CURSOR_UP_ONE_LINE,
C_L_HEADER,
DIR_USER_DATA,
DISABLE,
ENABLE,
F_S_HEADER,
LOCAL_ID,
LOCAL_PUBKEY,
LOG_REMOVE,
MESSAGE,
ORIGIN_CONTACT_HEADER,
PADDING_LENGTH,
RESET,
RX,
SYMMETRIC_KEY_LENGTH,
US_BYTE,
WIN_TYPE_CONTACT,
WIN_TYPE_GROUP,
WIN_UID_FILE,
WIPE,
)
from src.receiver.packet import PacketList
from src.receiver.commands import ch_contact_s, ch_master_key, ch_nick, ch_setting, contact_rem, exit_tfc, log_command
from src.receiver.commands import process_command, remove_log, reset_screen, win_activity, win_select, wipe
from src.receiver.packet import PacketList
from src.receiver.commands import (
ch_contact_s,
ch_master_key,
ch_nick,
ch_setting,
contact_rem,
exit_tfc,
log_command,
)
from src.receiver.commands import (
process_command,
remove_log,
reset_screen,
win_activity,
win_select,
wipe,
)
from tests.mock_classes import ContactList, Gateway, group_name_to_group_id, GroupList, KeyList, MasterKey
from tests.mock_classes import (
ContactList,
Gateway,
group_name_to_group_id,
GroupList,
KeyList,
MasterKey,
)
from tests.mock_classes import nick_to_pub_key, RxWindow, Settings, WindowList
from tests.utils import assembly_packet_creator, cd_unit_test, cleanup, ignored, nick_to_short_address
from tests.utils import tear_queue, TFCTestCase
from tests.utils import (
assembly_packet_creator,
cd_unit_test,
cleanup,
ignored,
nick_to_short_address,
)
from tests.utils import tear_queue, TFCTestCase
class TestProcessCommand(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.ts = datetime.now()
self.settings = Settings()
self.master_key = MasterKey()
self.group_list = GroupList()
self.exit_queue = Queue()
self.gateway = Gateway()
self.window_list = WindowList(nicks=[LOCAL_ID])
self.contact_list = ContactList(nicks=[LOCAL_ID])
self.packet_list = PacketList(self.settings, self.contact_list)
self.key_list = KeyList(nicks=[LOCAL_ID])
self.key_set = self.key_list.get_keyset(LOCAL_PUBKEY)
self.ts = datetime.now()
self.settings = Settings()
self.master_key = MasterKey()
self.group_list = GroupList()
self.exit_queue = Queue()
self.gateway = Gateway()
self.window_list = WindowList(nicks=[LOCAL_ID])
self.contact_list = ContactList(nicks=[LOCAL_ID])
self.packet_list = PacketList(self.settings, self.contact_list)
self.key_list = KeyList(nicks=[LOCAL_ID])
self.key_set = self.key_list.get_keyset(LOCAL_PUBKEY)
self.args = (self.window_list, self.packet_list, self.contact_list, self.key_list, self.group_list,
self.settings, self.master_key, self.gateway, self.exit_queue)
self.args = (
self.window_list,
self.packet_list,
self.contact_list,
self.key_list,
self.group_list,
self.settings,
self.master_key,
self.gateway,
self.exit_queue,
)
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
tear_queue(self.exit_queue)
def test_incomplete_command_raises_fr(self):
packet = assembly_packet_creator(COMMAND, b'test_command', s_header_override=C_L_HEADER, encrypt_packet=True)[0]
self.assert_fr("Incomplete command.", process_command, self.ts, packet, *self.args)
def test_incomplete_command_raises_fr(self) -> None:
packet = assembly_packet_creator(
COMMAND, b"test_command", s_header_override=C_L_HEADER, encrypt_packet=True
)[0]
self.assert_se(
"Incomplete command.", process_command, self.ts, packet, *self.args
)
def test_invalid_command_header(self):
packet = assembly_packet_creator(COMMAND, b'invalid_header', encrypt_packet=True)[0]
self.assert_fr("Error: Received an invalid command.", process_command, self.ts, packet, *self.args)
def test_invalid_command_header(self) -> None:
packet = assembly_packet_creator(
COMMAND, b"invalid_header", encrypt_packet=True
)[0]
self.assert_se(
"Error: Received an invalid command.",
process_command,
self.ts,
packet,
*self.args,
)
def test_process_command(self):
def test_process_command(self) -> None:
packet = assembly_packet_creator(COMMAND, LOG_REMOVE, encrypt_packet=True)[0]
self.assert_fr(f"No log database available.", process_command, self.ts, packet, *self.args)
self.assert_se(
f"No log database available.", process_command, self.ts, packet, *self.args
)
class TestWinActivity(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.window_list = WindowList()
self.window_list.windows = [RxWindow(name='Alice', unread_messages=4),
RxWindow(name='Bob', unread_messages=15)]
self.window_list = WindowList()
self.window_list.windows = [
RxWindow(name="Alice", unread_messages=4),
RxWindow(name="Bob", unread_messages=15),
]
@mock.patch('time.sleep', return_value=None)
def test_function(self, _):
self.assert_prints(f"""\
@mock.patch("time.sleep", return_value=None)
def test_function(self, _) -> None:
self.assert_prints(
f"""\
Window activity
Alice: 4
Bob: 15
{5*(CURSOR_UP_ONE_LINE+CLEAR_ENTIRE_LINE)}""", win_activity, self.window_list)
{5*(CURSOR_UP_ONE_LINE+CLEAR_ENTIRE_LINE)}""",
win_activity,
self.window_list,
)
class TestWinSelect(unittest.TestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.window_list = WindowList()
self.window_list.windows = [RxWindow(uid=nick_to_pub_key("Alice"), name='Alice'),
RxWindow(uid=nick_to_pub_key("Bob"), name='Bob')]
self.window_list = WindowList()
self.window_list.windows = [
RxWindow(uid=nick_to_pub_key("Alice"), name="Alice"),
RxWindow(uid=nick_to_pub_key("Bob"), name="Bob"),
]
def test_window_selection(self):
def test_window_selection(self) -> None:
self.assertIsNone(win_select(nick_to_pub_key("Alice"), self.window_list))
self.assertEqual(self.window_list.active_win.name, 'Alice')
self.assertEqual(self.window_list.active_win.name, "Alice")
self.assertIsNone(win_select(nick_to_pub_key("Bob"), self.window_list))
self.assertEqual(self.window_list.active_win.name, 'Bob')
self.assertEqual(self.window_list.active_win.name, "Bob")
self.assertIsNone(win_select(WIN_UID_FILE, self.window_list))
self.assertEqual(self.window_list.active_win.uid, WIN_UID_FILE)
class TestResetScreen(unittest.TestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.cmd_data = nick_to_pub_key("Alice")
self.window_list = WindowList()
self.window_list.windows = [RxWindow(uid=nick_to_pub_key("Alice"), name='Alice'),
RxWindow(uid=nick_to_pub_key("Bob"), name='Bob')]
self.window = self.window_list.get_window(nick_to_pub_key("Alice"))
self.window.message_log = [(datetime.now(), 'Hi Bob', nick_to_pub_key("Alice"), ORIGIN_CONTACT_HEADER)]
self.cmd_data = nick_to_pub_key("Alice")
self.window_list = WindowList()
self.window_list.windows = [
RxWindow(uid=nick_to_pub_key("Alice"), name="Alice"),
RxWindow(uid=nick_to_pub_key("Bob"), name="Bob"),
]
self.window = self.window_list.get_window(nick_to_pub_key("Alice"))
self.window.message_log = [
(datetime.now(), "Hi Bob", nick_to_pub_key("Alice"), ORIGIN_CONTACT_HEADER)
]
@mock.patch('os.system', return_value=None, create_autospec=True)
def test_screen_reset(self, reset):
@mock.patch("os.system", return_value=None)
def test_screen_reset(self, mock_os_system) -> None:
# Ensure there is a message to be removed from the ephemeral message log
self.assertEqual(len(self.window.message_log), 1)
reset_screen(self.cmd_data, self.window_list)
# Test that screen is reset by the command
reset.assert_called_with(RESET)
mock_os_system.assert_called_with(RESET)
# Test that the ephemeral message log is empty after the command
self.assertEqual(len(self.window.message_log), 0)
class TestExitTFC(unittest.TestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.exit_queue = Queue()
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
tear_queue(self.exit_queue)
def test_function(self):
def test_function(self) -> None:
self.assertIsNone(exit_tfc(self.exit_queue))
self.assertEqual(self.exit_queue.qsize(), 1)
class TestLogCommand(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.cmd_data = int_to_bytes(1) + nick_to_pub_key("Bob")
self.ts = datetime.now()
self.window_list = WindowList(nicks=['Alice', 'Bob'])
self.window = self.window_list.get_window(nick_to_pub_key("Bob"))
self.unit_test_dir = cd_unit_test()
self.cmd_data = int_to_bytes(1) + nick_to_pub_key("Bob")
self.ts = datetime.now()
self.window_list = WindowList(nicks=["Alice", "Bob"])
self.window = self.window_list.get_window(nick_to_pub_key("Bob"))
self.window.type_print = WIN_TYPE_CONTACT
self.window.name = 'Bob'
self.window.type = WIN_TYPE_CONTACT
self.contact_list = ContactList(nicks=['Alice', 'Bob'])
self.group_list = GroupList()
self.settings = Settings(software_operation=RX)
self.master_key = MasterKey(operation=RX, local_test=True)
self.args = (self.ts, self.window_list, self.contact_list,
self.group_list, self.settings, self.master_key)
self.log_file = f'{DIR_USER_DATA}{self.settings.software_operation}_logs'
self.tfc_log_database = MessageLog(self.log_file, self.master_key.master_key)
self.window.name = "Bob"
self.window.type = WIN_TYPE_CONTACT
self.contact_list = ContactList(nicks=["Alice", "Bob"])
self.group_list = GroupList()
self.settings = Settings(software_operation=RX)
self.master_key = MasterKey(operation=RX, local_test=True)
self.args = (
self.ts,
self.window_list,
self.contact_list,
self.group_list,
self.settings,
self.master_key,
)
self.log_file = f"{DIR_USER_DATA}{self.settings.software_operation}_logs"
self.tfc_log_database = MessageLog(self.log_file, self.master_key.master_key)
time_float = struct.unpack('<L', bytes.fromhex('08ceae02'))[0]
self.time = datetime.fromtimestamp(time_float).strftime("%H:%M:%S.%f")[:-4]
time_float = struct.unpack("<L", bytes.fromhex("08ceae02"))[0]
self.time = datetime.fromtimestamp(time_float).strftime("%H:%M:%S.%f")[:-4]
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
with ignored(OSError):
os.remove('Receiver - Plaintext log (None)')
os.remove("Receiver - Plaintext log (None)")
def test_print(self):
def test_print(self) -> None:
# Setup
os.remove(self.log_file)
# Test
self.assert_fr(f"No log database available.", log_command, self.cmd_data, *self.args)
self.assert_se(
f"No log database available.", log_command, self.cmd_data, *self.args
)
@mock.patch('struct.pack', return_value=bytes.fromhex('08ceae02'))
def test_export(self, _):
@mock.patch("struct.pack", return_value=bytes.fromhex("08ceae02"))
def test_export(self, _) -> None:
# Setup
for p in assembly_packet_creator(MESSAGE, 'A short message'):
write_log_entry(p, nick_to_pub_key("Bob"), self.tfc_log_database, origin=ORIGIN_CONTACT_HEADER)
for p in assembly_packet_creator(MESSAGE, "A short message"):
write_log_entry(
p,
nick_to_pub_key("Bob"),
self.tfc_log_database,
origin=ORIGIN_CONTACT_HEADER,
)
# Test
self.assertIsNone(log_command(self.cmd_data, *self.args))
with open('Receiver - Plaintext log (Bob)') as f:
with open("Receiver - Plaintext log (Bob)") as f:
data = f.read()
self.assertEqual(data, f"""\
self.assertEqual(
data,
f"""\
Log file of 1 most recent message(s) to/from contact Bob
{self.time} Bob: A short message
<End of log file>
""")
""",
)
class TestRemoveLog(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.win_name = nick_to_pub_key("Alice")
self.contact_list = ContactList()
self.group_list = GroupList()
self.settings = Settings()
self.master_key = MasterKey()
self.win_name = nick_to_pub_key("Alice")
self.contact_list = ContactList()
self.group_list = GroupList()
self.settings = Settings()
self.master_key = MasterKey()
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
def test_remove_log_file(self):
self.assert_fr(f"No log database available.",
remove_log, self.win_name, self.contact_list, self.group_list, self.settings, self.master_key)
def test_remove_log_file(self) -> None:
self.assert_se(
f"No log database available.",
remove_log,
self.win_name,
self.contact_list,
self.group_list,
self.settings,
self.master_key,
)
class TestChMasterKey(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.ts = datetime.now()
self.master_key = MasterKey()
self.settings = Settings()
self.contact_list = ContactList(nicks=[LOCAL_ID])
self.window_list = WindowList(nicks=[LOCAL_ID])
self.group_list = GroupList()
self.key_list = KeyList()
self.args = (self.ts, self.window_list, self.contact_list, self.group_list,
self.key_list, self.settings, self.master_key)
self.log_file = f'{DIR_USER_DATA}{self.settings.software_operation}_logs'
self.unit_test_dir = cd_unit_test()
self.ts = datetime.now()
self.master_key = MasterKey()
self.settings = Settings()
self.contact_list = ContactList(nicks=[LOCAL_ID])
self.window_list = WindowList(nicks=[LOCAL_ID])
self.group_list = GroupList()
self.key_list = KeyList()
self.args = (
self.ts,
self.window_list,
self.contact_list,
self.group_list,
self.key_list,
self.settings,
self.master_key,
)
self.log_file = f"{DIR_USER_DATA}{self.settings.software_operation}_logs"
self.tfc_log_database = MessageLog(self.log_file, self.master_key.master_key)
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
@mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 0.1)
@mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 1.0)
@mock.patch('os.popen', return_value=MagicMock(
read=MagicMock(return_value=MagicMock(splitlines=MagicMock(return_value=["MemAvailable 10240"])))))
@mock.patch('multiprocessing.cpu_count', return_value=1)
@mock.patch('getpass.getpass', side_effect=['test_password', 'a', 'a'])
@mock.patch('time.sleep', return_value=None)
def test_master_key_change(self, *_):
@mock.patch("src.common.db_masterkey.MIN_KEY_DERIVATION_TIME", 0.1)
@mock.patch("src.common.db_masterkey.MIN_KEY_DERIVATION_TIME", 1.0)
@mock.patch(
"os.popen",
return_value=MagicMock(
read=MagicMock(
return_value=MagicMock(
splitlines=MagicMock(return_value=["MemAvailable 10240"])
)
)
),
)
@mock.patch("multiprocessing.cpu_count", return_value=1)
@mock.patch("getpass.getpass", side_effect=["test_password", "a", "a"])
@mock.patch("time.sleep", return_value=None)
def test_master_key_change(self, *_) -> None:
# Setup
write_log_entry(F_S_HEADER + bytes(PADDING_LENGTH), nick_to_pub_key("Alice"), self.tfc_log_database)
write_log_entry(
F_S_HEADER + bytes(PADDING_LENGTH),
nick_to_pub_key("Alice"),
self.tfc_log_database,
)
self.contact_list.file_name = f'{DIR_USER_DATA}{RX}_contacts'
self.group_list.file_name = f'{DIR_USER_DATA}{RX}_groups'
self.key_list.file_name = f'{DIR_USER_DATA}{RX}_keys'
self.settings.file_name = f'{DIR_USER_DATA}{RX}_settings'
self.contact_list.file_name = f"{DIR_USER_DATA}{RX}_contacts"
self.group_list.file_name = f"{DIR_USER_DATA}{RX}_groups"
self.key_list.file_name = f"{DIR_USER_DATA}{RX}_keys"
self.settings.file_name = f"{DIR_USER_DATA}{RX}_settings"
self.contact_list.database = TFCDatabase(self.contact_list.file_name, self.contact_list.master_key)
self.group_list.database = TFCDatabase(self.group_list.file_name, self.group_list.master_key)
self.key_list.database = TFCDatabase(self.key_list.file_name, self.group_list.master_key)
self.settings.database = TFCDatabase(self.settings.file_name, self.settings.master_key)
self.contact_list.database = TFCDatabase(
self.contact_list.file_name, self.contact_list.master_key
)
self.group_list.database = TFCDatabase(
self.group_list.file_name, self.group_list.master_key
)
self.key_list.database = TFCDatabase(
self.key_list.file_name, self.group_list.master_key
)
self.settings.database = TFCDatabase(
self.settings.file_name, self.settings.master_key
)
orig_cl_rd = self.contact_list.database.replace_database
orig_gl_rd = self.group_list.database.replace_database
@ -288,9 +416,9 @@ class TestChMasterKey(TFCTestCase):
orig_st_rd = self.settings.database.replace_database
self.contact_list.database.replace_database = lambda: None
self.group_list.database.replace_database = lambda: None
self.key_list.database.replace_database = lambda: None
self.settings.database.replace_database = lambda: None
self.group_list.database.replace_database = lambda: None
self.key_list.database.replace_database = lambda: None
self.settings.database.replace_database = lambda: None
# Test
self.assertEqual(self.master_key.master_key, bytes(SYMMETRIC_KEY_LENGTH))
@ -299,180 +427,226 @@ class TestChMasterKey(TFCTestCase):
# Teardown
self.contact_list.database.replace_database = orig_cl_rd
self.group_list.database.replace_database = orig_gl_rd
self.key_list.database.replace_database = orig_kl_rd
self.settings.database.replace_database = orig_st_rd
self.group_list.database.replace_database = orig_gl_rd
self.key_list.database.replace_database = orig_kl_rd
self.settings.database.replace_database = orig_st_rd
@mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 0.1)
@mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 1.0)
@mock.patch('os.popen', return_value=MagicMock(
read=MagicMock(return_value=MagicMock(splitlines=MagicMock(return_value=["MemAvailable 10240"])))))
@mock.patch('multiprocessing.cpu_count', return_value=1)
@mock.patch('getpass.getpass', return_value='a')
@mock.patch('time.sleep', return_value=None)
def test_invalid_password_raises_function_return(self, *_):
@mock.patch("src.common.db_masterkey.MIN_KEY_DERIVATION_TIME", 0.1)
@mock.patch("src.common.db_masterkey.MIN_KEY_DERIVATION_TIME", 1.0)
@mock.patch(
"os.popen",
return_value=MagicMock(
read=MagicMock(
return_value=MagicMock(
splitlines=MagicMock(return_value=["MemAvailable 10240"])
)
)
),
)
@mock.patch("multiprocessing.cpu_count", return_value=1)
@mock.patch("getpass.getpass", return_value="a")
@mock.patch("time.sleep", return_value=None)
def test_invalid_password_raises_function_return(self, *_) -> None:
self.assertEqual(self.master_key.master_key, bytes(SYMMETRIC_KEY_LENGTH))
self.assert_fr("Error: Invalid password.", ch_master_key, *self.args)
self.assert_se("Error: Invalid password.", ch_master_key, *self.args)
@mock.patch('getpass.getpass', return_value='a')
@mock.patch('time.sleep', return_value=None)
@mock.patch('os.getrandom', side_effect=KeyboardInterrupt)
def test_keyboard_interrupt_raises_fr(self, *_):
self.assert_fr("Error: Invalid password.", ch_master_key, *self.args)
@mock.patch("getpass.getpass", return_value="a")
@mock.patch("time.sleep", return_value=None)
@mock.patch("os.getrandom", side_effect=KeyboardInterrupt)
def test_keyboard_interrupt_raises_fr(self, *_) -> None:
self.assert_se("Error: Invalid password.", ch_master_key, *self.args)
class TestChNick(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.ts = datetime.now()
self.contact_list = ContactList(nicks=['Alice'])
self.window_list = WindowList(contact_list=self.contact_list)
self.group_list = GroupList()
self.args = self.ts, self.window_list, self.contact_list
self.window = self.window_list.get_window(nick_to_pub_key("Alice"))
self.window.type = WIN_TYPE_CONTACT
self.ts = datetime.now()
self.contact_list = ContactList(nicks=["Alice"])
self.window_list = WindowList(contact_list=self.contact_list)
self.group_list = GroupList()
self.args = self.ts, self.window_list, self.contact_list
self.window = self.window_list.get_window(nick_to_pub_key("Alice"))
self.window.type = WIN_TYPE_CONTACT
def test_unknown_account_raises_fr(self):
def test_unknown_account_raises_fr(self) -> None:
# Setup
cmd_data = nick_to_pub_key("Bob") + b'Bob_'
cmd_data = nick_to_pub_key("Bob") + b"Bob_"
# Test
trunc_addr = nick_to_short_address('Bob')
self.assert_fr(f"Error: Receiver has no contact '{trunc_addr}' to rename.", ch_nick, cmd_data, *self.args)
trunc_addr = nick_to_short_address("Bob")
self.assert_se(
f"Error: Receiver has no contact '{trunc_addr}' to rename.",
ch_nick,
cmd_data,
*self.args,
)
def test_nick_change(self):
def test_nick_change(self) -> None:
# Setup
cmd_data = nick_to_pub_key("Alice") + b'Alice_'
cmd_data = nick_to_pub_key("Alice") + b"Alice_"
# Test
self.assertIsNone(ch_nick(cmd_data, *self.args))
self.assertEqual(self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice")).nick, 'Alice_')
self.assertEqual(self.window.name, 'Alice_')
self.assertEqual(
self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice")).nick,
"Alice_",
)
self.assertEqual(self.window.name, "Alice_")
class TestChSetting(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.ts = datetime.now()
self.window_list = WindowList()
self.ts = datetime.now()
self.window_list = WindowList()
self.contact_list = ContactList()
self.group_list = GroupList()
self.key_list = KeyList()
self.settings = Settings()
self.gateway = Gateway()
self.args = (self.ts, self.window_list, self.contact_list, self.group_list,
self.key_list, self.settings, self.gateway)
self.group_list = GroupList()
self.key_list = KeyList()
self.settings = Settings()
self.gateway = Gateway()
self.args = (
self.ts,
self.window_list,
self.contact_list,
self.group_list,
self.key_list,
self.settings,
self.gateway,
)
def test_invalid_data_raises_fr(self):
def test_invalid_data_raises_fr(self) -> None:
# Setup
self.settings.key_list = ['']
self.settings.key_list = [""]
# Test
cmd_data = b'setting' + b'True'
self.assert_fr("Error: Received invalid setting data.", ch_setting, cmd_data, *self.args)
cmd_data = b"setting" + b"True"
self.assert_se(
"Error: Received invalid setting data.", ch_setting, cmd_data, *self.args
)
def test_invalid_setting_raises_fr(self):
def test_invalid_setting_raises_fr(self) -> None:
# Setup
self.settings.key_list = ['']
self.settings.key_list = [""]
# Test
cmd_data = b'setting' + US_BYTE + b'True'
self.assert_fr("Error: Invalid setting 'setting'.", ch_setting, cmd_data, *self.args)
cmd_data = b"setting" + US_BYTE + b"True"
self.assert_se(
"Error: Invalid setting 'setting'.", ch_setting, cmd_data, *self.args
)
def test_databases(self):
def test_databases(self) -> None:
# Setup
self.settings.key_list = ['max_number_of_group_members', 'max_number_of_contacts']
self.settings.key_list = [
"max_number_of_group_members",
"max_number_of_contacts",
]
# Test
cmd_data = b'max_number_of_group_members' + US_BYTE + b'30'
cmd_data = b"max_number_of_group_members" + US_BYTE + b"30"
self.assertIsNone(ch_setting(cmd_data, *self.args))
cmd_data = b'max_number_of_contacts' + US_BYTE + b'30'
cmd_data = b"max_number_of_contacts" + US_BYTE + b"30"
self.assertIsNone(ch_setting(cmd_data, *self.args))
def test_change_gateway_setting(self):
def test_change_gateway_setting(self) -> None:
# Setup
self.settings.key_list = ['max_number_of_group_members', 'max_number_of_contacts']
self.settings.key_list = [
"max_number_of_group_members",
"max_number_of_contacts",
]
# Test
cmd_data = b'serial_baudrate' + US_BYTE + b'115200'
cmd_data = b"serial_baudrate" + US_BYTE + b"115200"
self.assertIsNone(ch_setting(cmd_data, *self.args))
class TestChContactSetting(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.ts = datetime.fromtimestamp(1502750000)
self.contact_list = ContactList(nicks=['Alice', 'Bob'])
self.group_list = GroupList(groups=['test_group', 'test_group2'])
self.window_list = WindowList(contact_list=self.contact_list,
group_list=self.group_list)
self.args = self.ts, self.window_list, self.contact_list, self.group_list
self.ts = datetime.fromtimestamp(1502750000)
self.contact_list = ContactList(nicks=["Alice", "Bob"])
self.group_list = GroupList(groups=["test_group", "test_group2"])
self.window_list = WindowList(
contact_list=self.contact_list, group_list=self.group_list
)
self.args = self.ts, self.window_list, self.contact_list, self.group_list
def test_invalid_window_raises_fr(self):
def test_invalid_window_raises_fr(self) -> None:
# Setup
cmd_data = ENABLE + nick_to_pub_key("Bob")
header = CH_LOGGING
self.contact_list = ContactList(nicks=['Alice'])
self.window_list = WindowList(contact_list=self.contact_list,
group_list=self.group_list)
cmd_data = ENABLE + nick_to_pub_key("Bob")
header = CH_LOGGING
self.contact_list = ContactList(nicks=["Alice"])
self.window_list = WindowList(
contact_list=self.contact_list, group_list=self.group_list
)
# Test
self.assert_fr(f"Error: Found no window for '{nick_to_short_address('Bob')}'.",
ch_contact_s, cmd_data, *self.args, header)
self.assert_se(
f"Error: Found no window for '{nick_to_short_address('Bob')}'.",
ch_contact_s,
cmd_data,
*self.args,
header,
)
def test_setting_change_contact(self):
def test_setting_change_contact(self) -> None:
# Setup
self.window = self.window_list.get_window(nick_to_pub_key("Bob"))
self.window.type = WIN_TYPE_CONTACT
self.window.type_print = 'contact'
self.window = self.window_list.get_window(nick_to_pub_key("Bob"))
self.window.type = WIN_TYPE_CONTACT
self.window.type_print = "contact"
self.window.window_contacts = self.contact_list.contacts
bob = self.contact_list.get_contact_by_address_or_nick("Bob")
bob = self.contact_list.get_contact_by_address_or_nick("Bob")
# Test
for attr, header in [('log_messages', CH_LOGGING),
('notifications', CH_NOTIFY),
('file_reception', CH_FILE_RECV)]:
for attr, header in [
("log_messages", CH_LOGGING),
("notifications", CH_NOTIFY),
("file_reception", CH_FILE_RECV),
]:
for s in [ENABLE, ENABLE, DISABLE, DISABLE]:
cmd_data = s + nick_to_pub_key("Bob")
self.assertIsNone(ch_contact_s(cmd_data, *self.args, header))
self.assertEqual(bob.__getattribute__(attr), (s == ENABLE))
def test_setting_change_group(self):
def test_setting_change_group(self) -> None:
# Setup
self.window = self.window_list.get_window(group_name_to_group_id('test_group'))
self.window.type = WIN_TYPE_GROUP
self.window.type_print = 'group'
self.window.window_contacts = self.group_list.get_group('test_group').members
self.window = self.window_list.get_window(group_name_to_group_id("test_group"))
self.window.type = WIN_TYPE_GROUP
self.window.type_print = "group"
self.window.window_contacts = self.group_list.get_group("test_group").members
# Test
for attr, header in [('log_messages', CH_LOGGING),
('notifications', CH_NOTIFY),
('file_reception', CH_FILE_RECV)]:
for attr, header in [
("log_messages", CH_LOGGING),
("notifications", CH_NOTIFY),
("file_reception", CH_FILE_RECV),
]:
for s in [ENABLE, ENABLE, DISABLE, DISABLE]:
cmd_data = s + group_name_to_group_id('test_group')
cmd_data = s + group_name_to_group_id("test_group")
self.assertIsNone(ch_contact_s(cmd_data, *self.args, header))
if header in [CH_LOGGING, CH_NOTIFY]:
self.assertEqual(self.group_list.get_group('test_group').__getattribute__(attr), (s == ENABLE))
self.assertEqual(
self.group_list.get_group("test_group").__getattribute__(attr),
(s == ENABLE),
)
if header == CH_FILE_RECV:
for m in self.group_list.get_group('test_group').members:
for m in self.group_list.get_group("test_group").members:
self.assertEqual(m.file_reception, (s == ENABLE))
def test_setting_change_all(self):
def test_setting_change_all(self) -> None:
# Setup
self.window = self.window_list.get_window(nick_to_pub_key("Bob"))
self.window.type = WIN_TYPE_CONTACT
self.window.type_print = 'contact'
self.window = self.window_list.get_window(nick_to_pub_key("Bob"))
self.window.type = WIN_TYPE_CONTACT
self.window.type_print = "contact"
self.window.window_contacts = self.contact_list.contacts
# Test
for attr, header in [('log_messages', CH_LOGGING),
('notifications', CH_NOTIFY),
('file_reception', CH_FILE_RECV)]:
for attr, header in [
("log_messages", CH_LOGGING),
("notifications", CH_NOTIFY),
("file_reception", CH_FILE_RECV),
]:
for s in [ENABLE, ENABLE, DISABLE, DISABLE]:
cmd_data = s.upper() + US_BYTE
self.assertIsNone(ch_contact_s(cmd_data, *self.args, header))
@ -489,42 +663,57 @@ class TestChContactSetting(TFCTestCase):
class TestContactRemove(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.ts = datetime.now()
self.window_list = WindowList()
self.cmd_data = nick_to_pub_key("Bob")
self.settings = Settings()
self.master_key = MasterKey()
self.args = self.cmd_data, self.ts, self.window_list
self.ts = datetime.now()
self.window_list = WindowList()
self.cmd_data = nick_to_pub_key("Bob")
self.settings = Settings()
self.master_key = MasterKey()
self.args = self.cmd_data, self.ts, self.window_list
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
def test_no_contact_raises_fr(self):
def test_no_contact_raises_fr(self) -> None:
# Setup
contact_list = ContactList(nicks=['Alice'])
group_list = GroupList(groups=[])
key_list = KeyList(nicks=['Alice'])
contact_list = ContactList(nicks=["Alice"])
group_list = GroupList(groups=[])
key_list = KeyList(nicks=["Alice"])
# Test
self.assert_fr(f"Receiver has no account '{nick_to_short_address('Bob')}' to remove.",
contact_rem, *self.args, contact_list, group_list, key_list, self.settings, self.master_key)
self.assert_se(
f"Receiver has no account '{nick_to_short_address('Bob')}' to remove.",
contact_rem,
*self.args,
contact_list,
group_list,
key_list,
self.settings,
self.master_key,
)
def test_successful_removal(self):
def test_successful_removal(self) -> None:
# Setup
contact_list = ContactList(nicks=['Alice', 'Bob'])
contact = contact_list.get_contact_by_address_or_nick("Bob")
group_list = GroupList(groups=['test_group', 'test_group2'])
key_list = KeyList(nicks=['Alice', 'Bob'])
contact_list = ContactList(nicks=["Alice", "Bob"])
contact = contact_list.get_contact_by_address_or_nick("Bob")
group_list = GroupList(groups=["test_group", "test_group2"])
key_list = KeyList(nicks=["Alice", "Bob"])
self.window_list.windows = [RxWindow(type=WIN_TYPE_GROUP)]
# Test
self.assert_fr("No log database available.",
contact_rem, *self.args, contact_list, group_list, key_list, self.settings, self.master_key)
self.assert_se(
"No log database available.",
contact_rem,
*self.args,
contact_list,
group_list,
key_list,
self.settings,
self.master_key,
)
self.assertFalse(contact_list.has_pub_key(nick_to_pub_key("Bob")))
self.assertFalse(key_list.has_keyset(nick_to_pub_key("Bob")))
for g in group_list:
@ -532,7 +721,6 @@ class TestContactRemove(TFCTestCase):
class TestWipe(unittest.TestCase):
def setUp(self) -> None:
"""Pre-test actions."""
self.exit_queue = Queue()
@ -541,11 +729,11 @@ class TestWipe(unittest.TestCase):
"""Post-test actions."""
tear_queue(self.exit_queue)
@mock.patch('os.system', return_value=None)
def test_wipe_command(self, _):
@mock.patch("src.common.misc.reset_terminal", return_value=None)
def test_wipe_command(self, _) -> None:
self.assertIsNone(wipe(self.exit_queue))
self.assertEqual(self.exit_queue.get(), WIPE)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

View File

@ -24,101 +24,169 @@ import unittest
from src.common.statics import US_BYTE
from src.receiver.commands_g import group_add, group_create, group_delete, group_remove, group_rename
from src.receiver.commands_g import (
group_add,
group_create,
group_delete,
group_remove,
group_rename,
)
from tests.mock_classes import Contact, ContactList, GroupList, RxWindow, Settings, WindowList
from tests.utils import group_name_to_group_id, nick_to_pub_key, TFCTestCase, UNDECODABLE_UNICODE
from tests.mock_classes import (
Contact,
ContactList,
GroupList,
RxWindow,
Settings,
WindowList,
)
from tests.utils import (
group_name_to_group_id,
nick_to_pub_key,
TFCTestCase,
UNDECODABLE_UNICODE,
)
class TestGroupCreate(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.ts = datetime.datetime.now()
self.settings = Settings()
self.ts = datetime.datetime.now()
self.settings = Settings()
self.window_list = WindowList()
self.group_id = group_name_to_group_id('test_group')
self.group_id = group_name_to_group_id("test_group")
def test_too_many_purp_accounts_raises_fr(self):
def test_too_many_purp_accounts_raises_fr(self) -> None:
# Setup
create_list = [nick_to_pub_key(str(n)) for n in range(51)]
cmd_data = self.group_id + b'test_group' + US_BYTE + b''.join(create_list)
group_list = GroupList(groups=['test_group'])
contact_list = ContactList(nicks=[str(n) for n in range(51)])
group = group_list.get_group('test_group')
create_list = [nick_to_pub_key(str(n)) for n in range(51)]
cmd_data = self.group_id + b"test_group" + US_BYTE + b"".join(create_list)
group_list = GroupList(groups=["test_group"])
contact_list = ContactList(nicks=[str(n) for n in range(51)])
group = group_list.get_group("test_group")
group.members = contact_list.contacts
# Test
self.assert_fr("Error: TFC settings only allow 50 members per group.",
group_create, cmd_data, self.ts, self.window_list, contact_list, group_list, self.settings)
self.assert_se(
"Error: TFC settings only allow 50 members per group.",
group_create,
cmd_data,
self.ts,
self.window_list,
contact_list,
group_list,
self.settings,
)
def test_full_group_list_raises_fr(self):
def test_full_group_list_raises_fr(self) -> None:
# Setup
cmd_data = self.group_id + b'test_group' + US_BYTE + nick_to_pub_key('51')
group_list = GroupList(groups=[f"test_group_{n}" for n in range(50)])
contact_list = ContactList(nicks=['Alice'])
cmd_data = self.group_id + b"test_group" + US_BYTE + nick_to_pub_key("51")
group_list = GroupList(groups=[f"test_group_{n}" for n in range(50)])
contact_list = ContactList(nicks=["Alice"])
# Test
self.assert_fr("Error: TFC settings only allow 50 groups.",
group_create, cmd_data, self.ts, self.window_list, contact_list, group_list, self.settings)
self.assert_se(
"Error: TFC settings only allow 50 groups.",
group_create,
cmd_data,
self.ts,
self.window_list,
contact_list,
group_list,
self.settings,
)
def test_successful_group_creation(self):
def test_successful_group_creation(self) -> None:
# Setup
group_list = GroupList(groups=['test_group'])
cmd_data = group_name_to_group_id('test_group') + b'test_group2' + US_BYTE + nick_to_pub_key('Bob')
contact_list = ContactList(nicks=['Alice', 'Bob'])
window_list = WindowList(nicks =['Alice', 'Bob'],
contact_list=contact_list,
group_lis =group_list,
packet_list =None,
settings =Settings)
group_list = GroupList(groups=["test_group"])
cmd_data = (
group_name_to_group_id("test_group")
+ b"test_group2"
+ US_BYTE
+ nick_to_pub_key("Bob")
)
contact_list = ContactList(nicks=["Alice", "Bob"])
window_list = WindowList(
nicks=["Alice", "Bob"],
contact_list=contact_list,
group_lis=group_list,
packet_list=None,
settings=Settings,
)
# Test
self.assertIsNone(group_create(cmd_data, self.ts, window_list, contact_list, group_list, self.settings))
self.assertEqual(len(group_list.get_group('test_group')), 2)
self.assertIsNone(
group_create(
cmd_data, self.ts, window_list, contact_list, group_list, self.settings
)
)
self.assertEqual(len(group_list.get_group("test_group")), 2)
class TestGroupAdd(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.ts = datetime.datetime.now()
self.settings = Settings()
self.ts = datetime.datetime.now()
self.settings = Settings()
self.window_list = WindowList()
def test_too_large_final_member_list_raises_fr(self):
def test_too_large_final_member_list_raises_fr(self) -> None:
# Setup
group_list = GroupList(groups=['test_group'])
contact_list = ContactList(nicks=[str(n) for n in range(51)])
group = group_list.get_group('test_group')
group_list = GroupList(groups=["test_group"])
contact_list = ContactList(nicks=[str(n) for n in range(51)])
group = group_list.get_group("test_group")
group.members = contact_list.contacts[:50]
cmd_data = group_name_to_group_id('test_group') + nick_to_pub_key('50')
cmd_data = group_name_to_group_id("test_group") + nick_to_pub_key("50")
# Test
self.assert_fr("Error: TFC settings only allow 50 members per group.",
group_add, cmd_data, self.ts, self.window_list, contact_list, group_list, self.settings)
self.assert_se(
"Error: TFC settings only allow 50 members per group.",
group_add,
cmd_data,
self.ts,
self.window_list,
contact_list,
group_list,
self.settings,
)
def test_unknown_group_id_raises_fr(self):
def test_unknown_group_id_raises_fr(self) -> None:
# Setup
group_list = GroupList(groups=['test_group'])
group_list = GroupList(groups=["test_group"])
contact_list = ContactList(nicks=[str(n) for n in range(21)])
cmd_data = group_name_to_group_id('test_group2') + nick_to_pub_key('50')
cmd_data = group_name_to_group_id("test_group2") + nick_to_pub_key("50")
# Test
self.assert_fr("Error: No group with ID '2e7mHQznTMsP6' found.",
group_add, cmd_data, self.ts, self.window_list, contact_list, group_list, self.settings)
self.assert_se(
"Error: No group with ID '2e7mHQznTMsP6' found.",
group_add,
cmd_data,
self.ts,
self.window_list,
contact_list,
group_list,
self.settings,
)
def test_successful_group_add(self):
def test_successful_group_add(self) -> None:
# Setup
contact_list = ContactList(nicks=[str(n) for n in range(21)])
group_lst = GroupList(groups=['test_group'])
group = group_lst.get_group('test_group')
contact_list = ContactList(nicks=[str(n) for n in range(21)])
group_lst = GroupList(groups=["test_group"])
group = group_lst.get_group("test_group")
group.members = contact_list.contacts[:19]
cmd_data = group_name_to_group_id('test_group') + nick_to_pub_key('20')
cmd_data = group_name_to_group_id("test_group") + nick_to_pub_key("20")
# Test
self.assertIsNone(group_add(cmd_data, self.ts, self.window_list, contact_list, group_lst, self.settings))
self.assertIsNone(
group_add(
cmd_data,
self.ts,
self.window_list,
contact_list,
group_lst,
self.settings,
)
)
group2 = group_lst.get_group('test_group')
group2 = group_lst.get_group("test_group")
self.assertEqual(len(group2), 20)
for c in group2:
@ -126,101 +194,142 @@ class TestGroupAdd(TFCTestCase):
class TestGroupRemove(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.ts = datetime.datetime.now()
self.window_list = WindowList()
self.contact_list = ContactList(nicks=[f"contact_{n}" for n in range(21)])
self.group_list = GroupList(groups=['test_group'])
self.group = self.group_list.get_group('test_group')
self.ts = datetime.datetime.now()
self.window_list = WindowList()
self.contact_list = ContactList(nicks=[f"contact_{n}" for n in range(21)])
self.group_list = GroupList(groups=["test_group"])
self.group = self.group_list.get_group("test_group")
self.group.members = self.contact_list.contacts[:19]
self.settings = Settings()
self.settings = Settings()
def test_unknown_group_id_raises_fr(self):
def test_unknown_group_id_raises_fr(self) -> None:
# Setup
group_list = GroupList(groups=['test_group'])
group_list = GroupList(groups=["test_group"])
contact_list = ContactList(nicks=[str(n) for n in range(21)])
cmd_data = group_name_to_group_id('test_group2') + nick_to_pub_key('20')
cmd_data = group_name_to_group_id("test_group2") + nick_to_pub_key("20")
# Test
self.assert_fr("Error: No group with ID '2e7mHQznTMsP6' found.",
group_remove, cmd_data, self.ts, self.window_list, contact_list, group_list)
self.assert_se(
"Error: No group with ID '2e7mHQznTMsP6' found.",
group_remove,
cmd_data,
self.ts,
self.window_list,
contact_list,
group_list,
)
def test_successful_member_removal(self):
self.cmd_data = group_name_to_group_id('test_group') + b''.join([nick_to_pub_key('contact_18'),
nick_to_pub_key('contact_20')])
self.assertIsNone(group_remove(self.cmd_data, self.ts, self.window_list, self.contact_list, self.group_list))
def test_successful_member_removal(self) -> None:
self.cmd_data = group_name_to_group_id("test_group") + b"".join(
[nick_to_pub_key("contact_18"), nick_to_pub_key("contact_20")]
)
self.assertIsNone(
group_remove(
self.cmd_data,
self.ts,
self.window_list,
self.contact_list,
self.group_list,
)
)
class TestGroupDelete(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.ts = datetime.datetime.now()
self.ts = datetime.datetime.now()
self.window_list = WindowList()
self.group_list = GroupList(groups=['test_group'])
self.group_list = GroupList(groups=["test_group"])
def test_missing_group_raises_fr(self):
cmd_data = group_name_to_group_id('test_group2')
self.assert_fr("Error: No group with ID '2e7mHQznTMsP6' found.",
group_delete, cmd_data, self.ts, self.window_list, self.group_list)
def test_missing_group_raises_fr(self) -> None:
cmd_data = group_name_to_group_id("test_group2")
self.assert_se(
"Error: No group with ID '2e7mHQznTMsP6' found.",
group_delete,
cmd_data,
self.ts,
self.window_list,
self.group_list,
)
def test_unknown_group_id_raises_fr(self):
def test_unknown_group_id_raises_fr(self) -> None:
# Setup
group_list = GroupList(groups=['test_group'])
cmd_data = group_name_to_group_id('test_group2')
group_list = GroupList(groups=["test_group"])
cmd_data = group_name_to_group_id("test_group2")
# Test
self.assert_fr("Error: No group with ID '2e7mHQznTMsP6' found.",
group_delete, cmd_data, self.ts, self.window_list, group_list)
self.assert_se(
"Error: No group with ID '2e7mHQznTMsP6' found.",
group_delete,
cmd_data,
self.ts,
self.window_list,
group_list,
)
def test_successful_remove(self):
cmd_data = group_name_to_group_id('test_group')
self.assertIsNone(group_delete(cmd_data, self.ts, self.window_list, self.group_list))
def test_successful_remove(self) -> None:
cmd_data = group_name_to_group_id("test_group")
self.assertIsNone(
group_delete(cmd_data, self.ts, self.window_list, self.group_list)
)
self.assertEqual(len(self.group_list.groups), 0)
class TestGroupRename(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.ts = datetime.datetime.now()
self.group_list = GroupList(groups=['test_group'])
self.window_list = WindowList()
self.window = RxWindow()
self.ts = datetime.datetime.now()
self.group_list = GroupList(groups=["test_group"])
self.window_list = WindowList()
self.window = RxWindow()
self.window_list.windows = [self.window]
self.contact_list = ContactList(nicks=['alice'])
self.args = self.ts, self.window_list, self.contact_list, self.group_list
self.contact_list = ContactList(nicks=["alice"])
self.args = self.ts, self.window_list, self.contact_list, self.group_list
def test_missing_group_id_raises_fr(self):
def test_missing_group_id_raises_fr(self) -> None:
# Setup
cmd_data = group_name_to_group_id('test_group2') + b'new_name'
cmd_data = group_name_to_group_id("test_group2") + b"new_name"
# Test
self.assert_fr("Error: No group with ID '2e7mHQznTMsP6' found.", group_rename, cmd_data, *self.args)
self.assert_se(
"Error: No group with ID '2e7mHQznTMsP6' found.",
group_rename,
cmd_data,
*self.args,
)
def test_invalid_group_name_encoding_raises_fr(self):
def test_invalid_group_name_encoding_raises_fr(self) -> None:
# Setup
cmd_data = group_name_to_group_id('test_group') + b'new_name' + UNDECODABLE_UNICODE
cmd_data = (
group_name_to_group_id("test_group") + b"new_name" + UNDECODABLE_UNICODE
)
# Test
self.assert_fr("Error: New name for group 'test_group' was invalid.", group_rename, cmd_data, *self.args)
self.assert_se(
"Error: New name for group 'test_group' was invalid.",
group_rename,
cmd_data,
*self.args,
)
def test_invalid_group_name_raises_fr(self):
def test_invalid_group_name_raises_fr(self) -> None:
# Setup
cmd_data = group_name_to_group_id('test_group') + b'new_name\x1f'
cmd_data = group_name_to_group_id("test_group") + b"new_name\x1f"
# Test
self.assert_fr("Error: Group name must be printable.", group_rename, cmd_data, *self.args)
self.assert_se(
"Error: Group name must be printable.", group_rename, cmd_data, *self.args
)
def test_valid_group_name_change(self):
def test_valid_group_name_change(self) -> None:
# Setup
cmd_data = group_name_to_group_id('test_group') + b'new_name'
cmd_data = group_name_to_group_id("test_group") + b"new_name"
# Test
self.assertIsNone(group_rename(cmd_data, *self.args))
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

View File

@ -26,276 +26,401 @@ import zlib
from datetime import datetime
from unittest import mock
from src.common.crypto import blake2b, encrypt_and_sign
from src.common.crypto import blake2b, encrypt_and_sign
from src.common.encoding import str_to_bytes
from src.common.statics import COMPRESSION_LEVEL, DIR_RECV_FILES, ORIGIN_CONTACT_HEADER, SYMMETRIC_KEY_LENGTH, US_BYTE
from src.common.statics import (
COMPRESSION_LEVEL,
DIR_RECV_FILES,
ORIGIN_CONTACT_HEADER,
SYMMETRIC_KEY_LENGTH,
US_BYTE,
)
from src.receiver.files import new_file, process_assembled_file, process_file, store_unique
from src.receiver.files import (
new_file,
process_assembled_file,
process_file,
store_unique,
)
from tests.mock_classes import ContactList, Settings, WindowList
from tests.utils import cd_unit_test, cleanup, nick_to_pub_key, TFCTestCase, UNDECODABLE_UNICODE
from tests.utils import (
cd_unit_test,
cleanup,
nick_to_pub_key,
TFCTestCase,
UNDECODABLE_UNICODE,
)
class TestStoreUnique(unittest.TestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.file_data = os.urandom(100)
self.file_dir = 'test_dir/'
self.file_name = 'test_file'
self.file_data = os.urandom(100)
self.file_dir = "test_dir/"
self.file_name = "test_file"
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
def test_each_file_is_store_with_unique_name(self):
self.assertEqual(store_unique(self.file_data, self.file_dir, self.file_name), 'test_file')
self.assertEqual(store_unique(self.file_data, self.file_dir, self.file_name), 'test_file.1')
self.assertEqual(store_unique(self.file_data, self.file_dir, self.file_name), 'test_file.2')
def test_each_file_is_store_with_unique_name(self) -> None:
self.assertEqual(
store_unique(self.file_data, self.file_dir, self.file_name), "test_file"
)
self.assertEqual(
store_unique(self.file_data, self.file_dir, self.file_name), "test_file.1"
)
self.assertEqual(
store_unique(self.file_data, self.file_dir, self.file_name), "test_file.2"
)
class ProcessAssembledFile(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.ts = datetime.now()
self.onion_pub_key = nick_to_pub_key('Alice')
self.nick = 'Alice'
self.settings = Settings()
self.window_list = WindowList(nick=['Alice', 'Bob'])
self.key = os.urandom(SYMMETRIC_KEY_LENGTH)
self.args = self.onion_pub_key, self.nick, self.settings, self.window_list
self.ts = datetime.now()
self.onion_pub_key = nick_to_pub_key("Alice")
self.nick = "Alice"
self.settings = Settings()
self.window_list = WindowList(nick=["Alice", "Bob"])
self.key = os.urandom(SYMMETRIC_KEY_LENGTH)
self.args = self.onion_pub_key, self.nick, self.settings, self.window_list
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
def test_invalid_structure_raises_fr(self):
def test_invalid_structure_raises_fr(self) -> None:
# Setup
payload = b'testfile.txt'
payload = b"testfile.txt"
# Test
self.assert_fr("Error: Received file had an invalid structure.",
process_assembled_file, self.ts, payload, *self.args)
self.assert_se(
"Error: Received file had an invalid structure.",
process_assembled_file,
self.ts,
payload,
*self.args,
)
def test_invalid_encoding_raises_fr(self):
def test_invalid_encoding_raises_fr(self) -> None:
# Setup
payload = UNDECODABLE_UNICODE + US_BYTE + b'file_data'
payload = UNDECODABLE_UNICODE + US_BYTE + b"file_data"
# Test
self.assert_fr("Error: Received file name had an invalid encoding.",
process_assembled_file, self.ts, payload, *self.args)
self.assert_se(
"Error: Received file name had an invalid encoding.",
process_assembled_file,
self.ts,
payload,
*self.args,
)
def test_invalid_name_raises_fr(self):
def test_invalid_name_raises_fr(self) -> None:
# Setup
payload = b'\x01filename' + US_BYTE + b'file_data'
payload = b"\x01filename" + US_BYTE + b"file_data"
# Test
self.assert_fr("Error: Received file had an invalid name.",
process_assembled_file, self.ts, payload, *self.args)
self.assert_se(
"Error: Received file had an invalid name.",
process_assembled_file,
self.ts,
payload,
*self.args,
)
def test_slash_in_file_name_raises_fr(self):
def test_slash_in_file_name_raises_fr(self) -> None:
# Setup
payload = b'file/name' + US_BYTE + b'file_data'
payload = b"file/name" + US_BYTE + b"file_data"
# Test
self.assert_fr("Error: Received file had an invalid name.",
process_assembled_file, self.ts, payload, *self.args)
self.assert_se(
"Error: Received file had an invalid name.",
process_assembled_file,
self.ts,
payload,
*self.args,
)
def test_invalid_key_raises_fr(self):
def test_invalid_key_raises_fr(self) -> None:
# Setup
payload = b'testfile.txt' + US_BYTE + b'file_data'
payload = b"testfile.txt" + US_BYTE + b"file_data"
# Test
self.assert_fr("Error: Received file had an invalid key.",
process_assembled_file, self.ts, payload, *self.args)
self.assert_se(
"Error: Received file had an invalid key.",
process_assembled_file,
self.ts,
payload,
*self.args,
)
def test_decryption_fail_raises_fr(self):
def test_decryption_fail_raises_fr(self) -> None:
# Setup
file_data = encrypt_and_sign(b'file_data', self.key)[::-1]
payload = b'testfile.txt' + US_BYTE + file_data
file_data = encrypt_and_sign(b"file_data", self.key)[::-1]
payload = b"testfile.txt" + US_BYTE + file_data
# Test
self.assert_fr("Error: Decryption of file data failed.",
process_assembled_file, self.ts, payload, *self.args)
self.assert_se(
"Error: Decryption of file data failed.",
process_assembled_file,
self.ts,
payload,
*self.args,
)
def test_invalid_compression_raises_fr(self):
def test_invalid_compression_raises_fr(self) -> None:
# Setup
compressed = zlib.compress(b'file_data', level=COMPRESSION_LEVEL)[::-1]
file_data = encrypt_and_sign(compressed, self.key) + self.key
payload = b'testfile.txt' + US_BYTE + file_data
compressed = zlib.compress(b"file_data", level=COMPRESSION_LEVEL)[::-1]
file_data = encrypt_and_sign(compressed, self.key) + self.key
payload = b"testfile.txt" + US_BYTE + file_data
# Test
self.assert_fr("Error: Decompression of file data failed.",
process_assembled_file, self.ts, payload, *self.args)
self.assert_se(
"Error: Decompression of file data failed.",
process_assembled_file,
self.ts,
payload,
*self.args,
)
def test_successful_reception(self):
def test_successful_reception(self) -> None:
# Setup
compressed = zlib.compress(b'file_data', level=COMPRESSION_LEVEL)
file_data = encrypt_and_sign(compressed, self.key) + self.key
payload = b'testfile.txt' + US_BYTE + file_data
compressed = zlib.compress(b"file_data", level=COMPRESSION_LEVEL)
file_data = encrypt_and_sign(compressed, self.key) + self.key
payload = b"testfile.txt" + US_BYTE + file_data
# Test
self.assertIsNone(process_assembled_file(self.ts, payload, *self.args))
self.assertTrue(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt'))
self.assertTrue(os.path.isfile(f"{DIR_RECV_FILES}Alice/testfile.txt"))
def test_successful_reception_during_traffic_masking(self):
def test_successful_reception_during_traffic_masking(self) -> None:
# Setup
self.settings.traffic_masking = True
self.window_list.active_win = self.window_list.get_window(nick_to_pub_key('Bob'))
self.window_list.active_win = self.window_list.get_window(
nick_to_pub_key("Bob")
)
compressed = zlib.compress(b'file_data', level=COMPRESSION_LEVEL)
file_data = encrypt_and_sign(compressed, self.key) + self.key
payload = b'testfile.txt' + US_BYTE + file_data
compressed = zlib.compress(b"file_data", level=COMPRESSION_LEVEL)
file_data = encrypt_and_sign(compressed, self.key) + self.key
payload = b"testfile.txt" + US_BYTE + file_data
# Test
self.assertIsNone(process_assembled_file(self.ts, payload, *self.args))
self.assertEqual(self.window_list.get_window(nick_to_pub_key('Bob')).message_log[0][1],
"Stored file from Alice as 'testfile.txt'.")
self.assertTrue(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt'))
self.assertEqual(
self.window_list.get_window(nick_to_pub_key("Bob")).message_log[0][1],
"Stored file from Alice as 'testfile.txt'.",
)
self.assertTrue(os.path.isfile(f"{DIR_RECV_FILES}Alice/testfile.txt"))
class TestNewFile(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.ts = datetime.now()
self.packet = b''
self.file_keys = dict()
self.file_buf = dict()
self.contact_list = ContactList(nicks=['Alice'])
self.window_list = WindowList()
self.file_key = SYMMETRIC_KEY_LENGTH*b'a'
self.settings = Settings()
self.compressed = zlib.compress(str_to_bytes("test_file.txt") + b'file_data', level=COMPRESSION_LEVEL)
self.args = self.file_keys, self.file_buf, self.contact_list, self.window_list, self.settings
self.ts = datetime.now()
self.packet = b""
self.file_keys = dict()
self.file_buf = dict()
self.contact_list = ContactList(nicks=["Alice"])
self.window_list = WindowList()
self.file_key = SYMMETRIC_KEY_LENGTH * b"a"
self.settings = Settings()
self.compressed = zlib.compress(
str_to_bytes("test_file.txt") + b"file_data", level=COMPRESSION_LEVEL
)
self.args = (
self.file_keys,
self.file_buf,
self.contact_list,
self.window_list,
self.settings,
)
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
def test_unknown_account_raises_fr(self):
def test_unknown_account_raises_fr(self) -> None:
# Setup
file_ct = encrypt_and_sign(self.compressed, self.file_key)
packet = nick_to_pub_key('Bob') + ORIGIN_CONTACT_HEADER + file_ct
packet = nick_to_pub_key("Bob") + ORIGIN_CONTACT_HEADER + file_ct
# Test
self.assert_fr("File from an unknown account.", new_file, self.ts, packet, *self.args)
self.assert_se(
"File from an unknown account.", new_file, self.ts, packet, *self.args
)
def test_disabled_file_reception_raises_fr(self):
def test_disabled_file_reception_raises_fr(self) -> None:
# Setup
file_ct = encrypt_and_sign(self.compressed, self.file_key)
packet = nick_to_pub_key('Alice') + ORIGIN_CONTACT_HEADER + file_ct
self.contact_list.get_contact_by_address_or_nick('Alice').file_reception = False
packet = nick_to_pub_key("Alice") + ORIGIN_CONTACT_HEADER + file_ct
self.contact_list.get_contact_by_address_or_nick("Alice").file_reception = False
# Test
self.assert_fr("Alert! Discarded file from Alice as file reception for them is disabled.",
new_file, self.ts, packet, *self.args)
self.assert_se(
"Alert! Discarded file from Alice as file reception for them is disabled.",
new_file,
self.ts,
packet,
*self.args,
)
def test_valid_file_without_key_is_cached(self):
def test_valid_file_without_key_is_cached(self) -> None:
# Setup
file_ct = encrypt_and_sign(self.compressed, self.file_key)
file_ct = encrypt_and_sign(self.compressed, self.file_key)
file_hash = blake2b(file_ct)
packet = nick_to_pub_key('Alice') + ORIGIN_CONTACT_HEADER + file_ct
packet = nick_to_pub_key("Alice") + ORIGIN_CONTACT_HEADER + file_ct
# Test
self.assertIsNone(new_file(self.ts, packet, *self.args))
self.assertEqual(self.file_buf[nick_to_pub_key('Alice') + file_hash], (self.ts, file_ct))
self.assertEqual(
self.file_buf[nick_to_pub_key("Alice") + file_hash], (self.ts, file_ct)
)
@mock.patch('time.sleep', return_value=None)
def test_valid_file_with_key_is_processed(self, _):
@mock.patch("time.sleep", return_value=None)
def test_valid_file_with_key_is_processed(self, _) -> None:
# Setup
file_ct = encrypt_and_sign(self.compressed, self.file_key)
file_hash = blake2b(file_ct)
packet = nick_to_pub_key('Alice') + ORIGIN_CONTACT_HEADER + file_ct
self.file_keys = {(nick_to_pub_key('Alice') + file_hash): self.file_key}
self.args = self.file_keys, self.file_buf, self.contact_list, self.window_list, self.settings
file_ct = encrypt_and_sign(self.compressed, self.file_key)
file_hash = blake2b(file_ct)
packet = nick_to_pub_key("Alice") + ORIGIN_CONTACT_HEADER + file_ct
self.file_keys = {(nick_to_pub_key("Alice") + file_hash): self.file_key}
self.args = (
self.file_keys,
self.file_buf,
self.contact_list,
self.window_list,
self.settings,
)
# Test
self.assertIsNone(new_file(self.ts, packet, *self.args))
class TestProcessFile(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.ts = datetime.now()
self.account = nick_to_pub_key('Alice')
self.file_key = SYMMETRIC_KEY_LENGTH*b'a'
self.file_ct = encrypt_and_sign(50 * b'a', key=self.file_key)
self.contact_list = ContactList(nicks=['Alice'])
self.window_list = WindowList()
self.settings = Settings()
self.args = self.file_key, self.contact_list, self.window_list, self.settings
self.ts = datetime.now()
self.account = nick_to_pub_key("Alice")
self.file_key = SYMMETRIC_KEY_LENGTH * b"a"
self.file_ct = encrypt_and_sign(50 * b"a", key=self.file_key)
self.contact_list = ContactList(nicks=["Alice"])
self.window_list = WindowList()
self.settings = Settings()
self.args = self.file_key, self.contact_list, self.window_list, self.settings
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
def test_invalid_key_raises_fr(self):
self.file_key = SYMMETRIC_KEY_LENGTH * b'f'
self.args = self.file_key, self.contact_list, self.window_list, self.settings
self.assert_fr("Error: Decryption key for file from Alice was invalid.",
process_file, self.ts, self.account, self.file_ct, *self.args)
def test_invalid_key_raises_fr(self) -> None:
self.file_key = SYMMETRIC_KEY_LENGTH * b"f"
self.args = self.file_key, self.contact_list, self.window_list, self.settings
self.assert_se(
"Error: Decryption key for file from Alice was invalid.",
process_file,
self.ts,
self.account,
self.file_ct,
*self.args,
)
def test_invalid_compression_raises_fr(self):
compressed = zlib.compress(b'file_data', level=COMPRESSION_LEVEL)[::-1]
file_data = encrypt_and_sign(compressed, self.file_key)
def test_invalid_compression_raises_fr(self) -> None:
compressed = zlib.compress(b"file_data", level=COMPRESSION_LEVEL)[::-1]
file_data = encrypt_and_sign(compressed, self.file_key)
self.assert_fr("Error: Failed to decompress file from Alice.",
process_file, self.ts, self.account, file_data, *self.args)
self.assert_se(
"Error: Failed to decompress file from Alice.",
process_file,
self.ts,
self.account,
file_data,
*self.args,
)
@mock.patch('time.sleep', return_value=None)
def test_invalid_file_name_raises_fr(self, _):
compressed = zlib.compress(UNDECODABLE_UNICODE + b'file_data', level=COMPRESSION_LEVEL)
file_data = encrypt_and_sign(compressed, self.file_key)
@mock.patch("time.sleep", return_value=None)
def test_invalid_file_name_raises_fr(self, _) -> None:
compressed = zlib.compress(
UNDECODABLE_UNICODE + b"file_data", level=COMPRESSION_LEVEL
)
file_data = encrypt_and_sign(compressed, self.file_key)
self.assert_fr("Error: Name of file from Alice had an invalid encoding.",
process_file, self.ts, self.account, file_data, *self.args)
self.assert_se(
"Error: Name of file from Alice had an invalid encoding.",
process_file,
self.ts,
self.account,
file_data,
*self.args,
)
@mock.patch('time.sleep', return_value=None)
def test_non_printable_name_raises_fr(self, _):
compressed = zlib.compress(str_to_bytes("file\x01") + b'file_data', level=COMPRESSION_LEVEL)
file_data = encrypt_and_sign(compressed, self.file_key)
@mock.patch("time.sleep", return_value=None)
def test_non_printable_name_raises_fr(self, _) -> None:
compressed = zlib.compress(
str_to_bytes("file\x01") + b"file_data", level=COMPRESSION_LEVEL
)
file_data = encrypt_and_sign(compressed, self.file_key)
self.assert_fr("Error: Name of file from Alice was invalid.",
process_file, self.ts, self.account, file_data, *self.args)
self.assert_se(
"Error: Name of file from Alice was invalid.",
process_file,
self.ts,
self.account,
file_data,
*self.args,
)
@mock.patch('time.sleep', return_value=None)
def test_slash_in_name_raises_fr(self, _):
compressed = zlib.compress(str_to_bytes("Alice/file.txt") + b'file_data', level=COMPRESSION_LEVEL)
file_data = encrypt_and_sign(compressed, self.file_key)
@mock.patch("time.sleep", return_value=None)
def test_slash_in_name_raises_fr(self, _) -> None:
compressed = zlib.compress(
str_to_bytes("Alice/file.txt") + b"file_data", level=COMPRESSION_LEVEL
)
file_data = encrypt_and_sign(compressed, self.file_key)
self.assert_fr("Error: Name of file from Alice was invalid.",
process_file, self.ts, self.account, file_data, *self.args)
self.assert_se(
"Error: Name of file from Alice was invalid.",
process_file,
self.ts,
self.account,
file_data,
*self.args,
)
@mock.patch('time.sleep', return_value=None)
def test_successful_storage_of_file(self, _):
compressed = zlib.compress(str_to_bytes("test_file.txt") + b'file_data', level=COMPRESSION_LEVEL)
file_data = encrypt_and_sign(compressed, self.file_key)
@mock.patch("time.sleep", return_value=None)
def test_successful_storage_of_file(self, _) -> None:
compressed = zlib.compress(
str_to_bytes("test_file.txt") + b"file_data", level=COMPRESSION_LEVEL
)
file_data = encrypt_and_sign(compressed, self.file_key)
self.assertIsNone(process_file(self.ts, self.account, file_data, *self.args))
@mock.patch('time.sleep', return_value=None)
def test_successful_storage_during_traffic_masking(self, _):
@mock.patch("time.sleep", return_value=None)
def test_successful_storage_during_traffic_masking(self, _) -> None:
# Setup
self.settings.traffic_masking = True
self.window_list.active_win = self.window_list.get_window(nick_to_pub_key('Bob'))
self.window_list.active_win = self.window_list.get_window(
nick_to_pub_key("Bob")
)
compressed = zlib.compress(str_to_bytes("testfile.txt") + b'file_data', level=COMPRESSION_LEVEL)
file_data = encrypt_and_sign(compressed, self.file_key)
compressed = zlib.compress(
str_to_bytes("testfile.txt") + b"file_data", level=COMPRESSION_LEVEL
)
file_data = encrypt_and_sign(compressed, self.file_key)
self.assertIsNone(process_file(self.ts, self.account, file_data, *self.args))
self.assertEqual(self.window_list.get_window(nick_to_pub_key('Bob')).message_log[0][1],
"Stored file from Alice as 'testfile.txt'.")
self.assertEqual(
self.window_list.get_window(nick_to_pub_key("Bob")).message_log[0][1],
"Stored file from Alice as 'testfile.txt'.",
)
self.assertTrue(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt'))
self.assertTrue(os.path.isfile(f"{DIR_RECV_FILES}Alice/testfile.txt"))
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

View File

@ -25,158 +25,270 @@ import unittest
from multiprocessing import Queue
from datetime import datetime
from unittest import mock
from datetime import datetime
from unittest import mock
from unittest.mock import MagicMock
from src.common.crypto import argon2_kdf, encrypt_and_sign
from src.common.encoding import b58encode, str_to_bytes
from src.common.exceptions import FunctionReturn
from src.common.statics import (ARGON2_SALT_LENGTH, BOLD_ON, CLEAR_ENTIRE_SCREEN, CONFIRM_CODE_LENGTH,
CURSOR_LEFT_UP_CORNER, FINGERPRINT_LENGTH, LOCAL_ID, NORMAL_TEXT, PSK_FILE_SIZE,
SYMMETRIC_KEY_LENGTH, WIN_TYPE_CONTACT, WIN_UID_LOCAL, XCHACHA20_NONCE_LENGTH)
from src.common.crypto import argon2_kdf, encrypt_and_sign
from src.common.encoding import b58encode, str_to_bytes
from src.common.exceptions import SoftError
from src.common.statics import (
ARGON2_SALT_LENGTH,
BOLD_ON,
CLEAR_ENTIRE_SCREEN,
CONFIRM_CODE_LENGTH,
CURSOR_LEFT_UP_CORNER,
FINGERPRINT_LENGTH,
LOCAL_ID,
NORMAL_TEXT,
PSK_FILE_SIZE,
SYMMETRIC_KEY_LENGTH,
WIN_TYPE_CONTACT,
WIN_UID_COMMAND,
XCHACHA20_NONCE_LENGTH,
)
from src.receiver.key_exchanges import key_ex_ecdhe, key_ex_psk_rx, key_ex_psk_tx, local_key_rdy, process_local_key
from src.receiver.key_exchanges import (
key_ex_ecdhe,
key_ex_psk_rx,
key_ex_psk_tx,
local_key_rdy,
process_local_key,
)
from tests.mock_classes import Contact, ContactList, KeyList, KeySet, Settings, WindowList
from tests.utils import cd_unit_test, cleanup, nick_to_short_address, nick_to_pub_key, tear_queue, TFCTestCase
from tests.utils import UNDECODABLE_UNICODE
from tests.mock_classes import (
Contact,
ContactList,
KeyList,
KeySet,
Settings,
WindowList,
)
from tests.utils import (
cd_unit_test,
cleanup,
nick_to_short_address,
nick_to_pub_key,
tear_queue,
TFCTestCase,
)
from tests.utils import UNDECODABLE_UNICODE
class TestProcessLocalKey(TFCTestCase):
kek = os.urandom(SYMMETRIC_KEY_LENGTH)
kek = os.urandom(SYMMETRIC_KEY_LENGTH)
new_kek = os.urandom(SYMMETRIC_KEY_LENGTH)
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.contact_list = ContactList(nicks=[LOCAL_ID, 'Alice'])
self.key_list = KeyList( nicks=[LOCAL_ID, 'Alice'])
self.window_list = WindowList( nicks=[LOCAL_ID, 'Alice'])
self.settings = Settings()
self.ts = datetime.now()
self.kdk_hashes = list()
self.contact_list = ContactList(nicks=[LOCAL_ID, "Alice"])
self.key_list = KeyList(nicks=[LOCAL_ID, "Alice"])
self.window_list = WindowList(nicks=[LOCAL_ID, "Alice"])
self.settings = Settings()
self.ts = datetime.now()
self.kdk_hashes = list()
self.packet_hashes = list()
self.l_queue = Queue()
self.key = os.urandom(SYMMETRIC_KEY_LENGTH)
self.hek = os.urandom(SYMMETRIC_KEY_LENGTH)
self.conf_code = os.urandom(CONFIRM_CODE_LENGTH)
self.packet = encrypt_and_sign(self.key + self.hek + self.conf_code, key=self.kek)
self.args = (self.window_list, self.contact_list, self.key_list, self.settings,
self.kdk_hashes, self.packet_hashes, self.l_queue)
self.l_queue = Queue()
self.key = os.urandom(SYMMETRIC_KEY_LENGTH)
self.hek = os.urandom(SYMMETRIC_KEY_LENGTH)
self.conf_code = os.urandom(CONFIRM_CODE_LENGTH)
self.packet = encrypt_and_sign(
self.key + self.hek + self.conf_code, key=self.kek
)
self.args = (
self.window_list,
self.contact_list,
self.key_list,
self.settings,
self.kdk_hashes,
self.packet_hashes,
self.l_queue,
)
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
tear_queue(self.l_queue)
@mock.patch('tkinter.Tk', return_value=MagicMock())
@mock.patch('time.sleep', return_value=None)
@mock.patch('builtins.input', return_value='5KfgdgUvseWfNkoUPWSvxMPNStu5wBBxyjz1zpZtLEjk7ZvwEAT')
def test_invalid_decryption_key_raises_fr(self, *_):
@mock.patch("tkinter.Tk", return_value=MagicMock())
@mock.patch("time.sleep", return_value=None)
@mock.patch(
"builtins.input",
return_value="5KfgdgUvseWfNkoUPWSvxMPNStu5wBBxyjz1zpZtLEjk7ZvwEAT",
)
def test_invalid_decryption_key_raises_fr(self, *_) -> None:
# Setup
packet = b''
packet = b""
self.key_list.keysets = []
# Test
self.assert_fr("Error: Incorrect key decryption key.", process_local_key, self.ts, packet, *self.args)
self.assert_se(
"Error: Incorrect key decryption key.",
process_local_key,
self.ts,
packet,
*self.args,
)
@mock.patch('tkinter.Tk', return_value=MagicMock())
@mock.patch('time.sleep', return_value=None)
@mock.patch('builtins.input', side_effect=['5KfgdgUvseWfNkoUPWSvxMPNStu5wBBxyjz1zpZtLEjk7ZvwEAT', b58encode(kek)])
@mock.patch('os.system', return_value=None)
def test_successful_local_key_processing_with_existing_local_key(self, *_):
self.assert_fr("Error: Incorrect key decryption key.", process_local_key, self.ts, self.packet, *self.args)
self.assertIsNone(process_local_key(self.ts, self.packet, *self.args))
@mock.patch("src.common.misc.reset_terminal", return_value=None)
@mock.patch("tkinter.Tk", return_value=MagicMock())
@mock.patch("time.sleep", return_value=None)
@mock.patch(
"builtins.input",
side_effect=[
"5KfgdgUvseWfNkoUPWSvxMPNStu5wBBxyjz1zpZtLEjk7ZvwEAT",
b58encode(kek),
],
)
def test_successful_local_key_processing_with_existing_local_key(self, *_) -> None:
self.assert_se(
"Error: Incorrect key decryption key.",
process_local_key,
self.ts,
self.packet,
*self.args,
)
self.assert_se(
"Added new local key.", process_local_key, self.ts, self.packet, *self.args
)
@mock.patch('tkinter.Tk', return_value=MagicMock())
@mock.patch('time.sleep', return_value=None)
@mock.patch('builtins.input', return_value=b58encode(kek))
@mock.patch('os.system', return_value=None)
def test_successful_local_key_processing_existing_bootstrap(self, *_):
@mock.patch("src.common.misc.reset_terminal", return_value=None)
@mock.patch("tkinter.Tk", return_value=MagicMock())
@mock.patch("time.sleep", return_value=None)
@mock.patch("builtins.input", return_value=b58encode(kek))
def test_successful_local_key_processing_existing_bootstrap(self, *_) -> None:
# Setup
self.key_list.keysets = []
# Test
self.assertIsNone(process_local_key(self.ts, self.packet, *self.args))
self.assertEqual(self.window_list.active_win.uid, WIN_UID_LOCAL)
self.assert_se(
"Added new local key.", process_local_key, self.ts, self.packet, *self.args
)
self.assertEqual(self.window_list.active_win.uid, WIN_UID_COMMAND)
@mock.patch('tkinter.Tk', return_value=MagicMock())
@mock.patch('time.sleep', return_value=None)
@mock.patch('builtins.input', side_effect=KeyboardInterrupt)
def test_keyboard_interrupt_raises_fr(self, *_):
@mock.patch("tkinter.Tk", return_value=MagicMock())
@mock.patch("time.sleep", return_value=None)
@mock.patch("builtins.input", side_effect=KeyboardInterrupt)
def test_keyboard_interrupt_raises_fr(self, *_) -> None:
# Setup
self.window_list.active_win = self.window_list.get_window(nick_to_pub_key('Alice'))
self.window_list.active_win = self.window_list.get_window(
nick_to_pub_key("Alice")
)
# Test
self.assert_fr("Local key setup aborted.", process_local_key, self.ts, bytes(SYMMETRIC_KEY_LENGTH), *self.args)
self.assert_se(
"Local key setup aborted.",
process_local_key,
self.ts,
bytes(SYMMETRIC_KEY_LENGTH),
*self.args,
)
@mock.patch('tkinter.Tk', return_value=MagicMock())
@mock.patch('os.system', return_value=None)
@mock.patch('time.sleep', return_value=None)
@mock.patch('builtins.input', side_effect=[b58encode(kek), b58encode(kek), b58encode(kek), b58encode(new_kek)])
def test_old_local_key_packet_raises_fr(self, *_):
@mock.patch("src.common.misc.reset_terminal", return_value=None)
@mock.patch("tkinter.Tk", return_value=MagicMock())
@mock.patch("time.sleep", return_value=None)
@mock.patch(
"builtins.input",
side_effect=[
b58encode(kek),
b58encode(kek),
b58encode(kek),
b58encode(new_kek),
],
)
def test_old_local_key_packet_raises_fr(self, *_) -> None:
# Setup
self.key_list.keysets = []
new_key = os.urandom(SYMMETRIC_KEY_LENGTH)
new_hek = os.urandom(SYMMETRIC_KEY_LENGTH)
new_conf_code = os.urandom(CONFIRM_CODE_LENGTH)
new_packet = encrypt_and_sign(new_key + new_hek + new_conf_code, key=self.new_kek)
new_key = os.urandom(SYMMETRIC_KEY_LENGTH)
new_hek = os.urandom(SYMMETRIC_KEY_LENGTH)
new_conf_code = os.urandom(CONFIRM_CODE_LENGTH)
new_packet = encrypt_and_sign(
new_key + new_hek + new_conf_code, key=self.new_kek
)
# Test
self.assertIsNone(process_local_key(self.ts, self.packet, *self.args))
self.assert_fr("Error: Received old local key packet.", process_local_key, self.ts, self.packet, *self.args)
self.assertIsNone(process_local_key(self.ts, new_packet, *self.args))
self.assert_se(
"Added new local key.", process_local_key, self.ts, self.packet, *self.args
)
self.assert_se(
"Error: Received old local key packet.",
process_local_key,
self.ts,
self.packet,
*self.args,
)
self.assert_se(
"Added new local key.", process_local_key, self.ts, new_packet, *self.args
)
@mock.patch('tkinter.Tk', side_effect=[MagicMock(clipboard_get =MagicMock(return_value=b58encode(new_kek)),
clipboard_clear=MagicMock(side_effect=[tkinter.TclError]))])
@mock.patch('os.system', return_value=None)
@mock.patch('time.sleep', return_value=None)
@mock.patch('builtins.input', side_effect=[b58encode(new_kek)])
def test_loading_local_key_from_queue(self, *_):
@mock.patch("src.common.misc.reset_terminal", return_value=None)
@mock.patch(
"tkinter.Tk",
side_effect=[
MagicMock(
clipboard_get=MagicMock(return_value=b58encode(new_kek)),
clipboard_clear=MagicMock(side_effect=[tkinter.TclError]),
)
],
)
@mock.patch("time.sleep", return_value=None)
@mock.patch("builtins.input", side_effect=[b58encode(new_kek)])
def test_loading_local_key_from_queue(self, *_) -> None:
# Setup
self.key_list.keysets = []
new_key = os.urandom(SYMMETRIC_KEY_LENGTH)
new_hek = os.urandom(SYMMETRIC_KEY_LENGTH)
new_conf_code = os.urandom(CONFIRM_CODE_LENGTH)
new_packet = encrypt_and_sign(new_key + new_hek + new_conf_code, key=self.new_kek)
next_packet = os.urandom(len(new_packet))
first_packet = os.urandom(len(new_packet))
new_key = os.urandom(SYMMETRIC_KEY_LENGTH)
new_hek = os.urandom(SYMMETRIC_KEY_LENGTH)
new_conf_code = os.urandom(CONFIRM_CODE_LENGTH)
new_packet = encrypt_and_sign(
new_key + new_hek + new_conf_code, key=self.new_kek
)
next_packet = os.urandom(len(new_packet))
first_packet = os.urandom(len(new_packet))
self.l_queue.put((datetime.now(), first_packet))
self.l_queue.put((datetime.now(), new_packet))
self.l_queue.put((datetime.now(), next_packet))
# Test
self.assertEqual(self.l_queue.qsize(), 3)
self.assertIsNone(process_local_key(self.ts, self.packet, *self.args))
self.assert_se(
"Added new local key.", process_local_key, self.ts, self.packet, *self.args
)
self.assertEqual(self.l_queue.qsize(), 1)
class TestLocalKeyRdy(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.ts = datetime.fromtimestamp(1502750000)
@mock.patch('time.sleep', return_value=None)
def test_local_key_installed_no_contacts(self, _):
@mock.patch("time.sleep", return_value=None)
def test_local_key_installed_no_contacts(self, _) -> None:
# Setup
self.window_list = WindowList(nicks=[LOCAL_ID])
self.window_list = WindowList(nicks=[LOCAL_ID])
self.contact_list = ContactList(nicks=[LOCAL_ID])
# Test
self.assert_prints(f"""\
self.assert_prints(
f"""\
{BOLD_ON} Successfully completed the local key setup. {NORMAL_TEXT}
{CLEAR_ENTIRE_SCREEN+CURSOR_LEFT_UP_CORNER}
{BOLD_ON} Waiting for new contacts {NORMAL_TEXT}
""", local_key_rdy, self.ts, self.window_list, self.contact_list)
""",
local_key_rdy,
self.ts,
self.window_list,
self.contact_list,
)
@mock.patch('time.sleep', return_value=None)
def test_local_key_installed_existing_contact(self, _):
@mock.patch("time.sleep", return_value=None)
def test_local_key_installed_existing_contact(self, _) -> None:
# Setup
self.window_list = WindowList(nicks=[LOCAL_ID, 'Alice'])
self.contact_list = ContactList(nicks=[LOCAL_ID, 'Alice'])
self.window_list.active_win = self.window_list.get_window(nick_to_pub_key('Alice'))
self.window_list = WindowList(nicks=[LOCAL_ID, "Alice"])
self.contact_list = ContactList(nicks=[LOCAL_ID, "Alice"])
self.window_list.active_win = self.window_list.get_window(
nick_to_pub_key("Alice")
)
self.window_list.active_win.type = WIN_TYPE_CONTACT
# Test
@ -184,102 +296,138 @@ class TestLocalKeyRdy(TFCTestCase):
class TestKeyExECDHE(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.ts = datetime.fromtimestamp(1502750000)
self.window_list = WindowList(nicks=[LOCAL_ID])
self.ts = datetime.fromtimestamp(1502750000)
self.window_list = WindowList(nicks=[LOCAL_ID])
self.contact_list = ContactList()
self.key_list = KeyList()
self.settings = Settings()
self.packet = (nick_to_pub_key("Alice")
+ SYMMETRIC_KEY_LENGTH * b'\x01'
+ SYMMETRIC_KEY_LENGTH * b'\x02'
+ SYMMETRIC_KEY_LENGTH * b'\x03'
+ SYMMETRIC_KEY_LENGTH * b'\x04'
+ str_to_bytes('Alice'))
self.args = self.packet, self.ts, self.window_list, self.contact_list, self.key_list, self.settings
self.key_list = KeyList()
self.settings = Settings()
self.packet = (
nick_to_pub_key("Alice")
+ SYMMETRIC_KEY_LENGTH * b"\x01"
+ SYMMETRIC_KEY_LENGTH * b"\x02"
+ SYMMETRIC_KEY_LENGTH * b"\x03"
+ SYMMETRIC_KEY_LENGTH * b"\x04"
+ str_to_bytes("Alice")
)
self.args = (
self.packet,
self.ts,
self.window_list,
self.contact_list,
self.key_list,
self.settings,
)
@mock.patch('time.sleep', return_value=None)
def test_invalid_nick_raises_fr(self, _):
self.packet = (nick_to_pub_key("Alice")
+ SYMMETRIC_KEY_LENGTH * b'\x01'
+ SYMMETRIC_KEY_LENGTH * b'\x02'
+ SYMMETRIC_KEY_LENGTH * b'\x03'
+ SYMMETRIC_KEY_LENGTH * b'\x04'
+ UNDECODABLE_UNICODE)
self.args = self.packet, self.ts, self.window_list, self.contact_list, self.key_list, self.settings
@mock.patch("time.sleep", return_value=None)
def test_invalid_nick_raises_fr(self, _) -> None:
self.packet = (
nick_to_pub_key("Alice")
+ SYMMETRIC_KEY_LENGTH * b"\x01"
+ SYMMETRIC_KEY_LENGTH * b"\x02"
+ SYMMETRIC_KEY_LENGTH * b"\x03"
+ SYMMETRIC_KEY_LENGTH * b"\x04"
+ UNDECODABLE_UNICODE
)
self.args = (
self.packet,
self.ts,
self.window_list,
self.contact_list,
self.key_list,
self.settings,
)
self.assert_fr("Error: Received invalid contact data", key_ex_ecdhe, *self.args)
self.assert_se("Error: Received invalid contact data", key_ex_ecdhe, *self.args)
@mock.patch('time.sleep', return_value=None)
def test_add_ecdhe_keys(self, _):
@mock.patch("time.sleep", return_value=None)
def test_add_ecdhe_keys(self, _) -> None:
self.assertIsNone(key_ex_ecdhe(*self.args))
keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
self.assertIsInstance(keyset, KeySet)
self.assertEqual(keyset.onion_pub_key, nick_to_pub_key("Alice"))
self.assertEqual(keyset.tx_mk, SYMMETRIC_KEY_LENGTH * b'\x01')
self.assertEqual(keyset.rx_mk, SYMMETRIC_KEY_LENGTH * b'\x02')
self.assertEqual(keyset.tx_hk, SYMMETRIC_KEY_LENGTH * b'\x03')
self.assertEqual(keyset.rx_hk, SYMMETRIC_KEY_LENGTH * b'\x04')
self.assertEqual(keyset.tx_mk, SYMMETRIC_KEY_LENGTH * b"\x01")
self.assertEqual(keyset.rx_mk, SYMMETRIC_KEY_LENGTH * b"\x02")
self.assertEqual(keyset.tx_hk, SYMMETRIC_KEY_LENGTH * b"\x03")
self.assertEqual(keyset.rx_hk, SYMMETRIC_KEY_LENGTH * b"\x04")
contact = self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice"))
self.assertIsInstance(contact, Contact)
self.assertEqual(contact.onion_pub_key, nick_to_pub_key("Alice"))
self.assertEqual(contact.nick, 'Alice')
self.assertEqual(contact.nick, "Alice")
self.assertEqual(contact.rx_fingerprint, bytes(FINGERPRINT_LENGTH))
self.assertEqual(contact.tx_fingerprint, bytes(FINGERPRINT_LENGTH))
class TestKeyExPSKTx(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.ts = datetime.fromtimestamp(1502750000)
self.window_list = WindowList(nicks=[LOCAL_ID])
self.ts = datetime.fromtimestamp(1502750000)
self.window_list = WindowList(nicks=[LOCAL_ID])
self.contact_list = ContactList()
self.key_list = KeyList()
self.settings = Settings()
self.packet = (nick_to_pub_key("Alice")
+ SYMMETRIC_KEY_LENGTH * b'\x01'
+ bytes(SYMMETRIC_KEY_LENGTH)
+ SYMMETRIC_KEY_LENGTH * b'\x02'
+ bytes(SYMMETRIC_KEY_LENGTH)
+ str_to_bytes('Alice'))
self.args = self.packet, self.ts, self.window_list, self.contact_list, self.key_list, self.settings
self.key_list = KeyList()
self.settings = Settings()
self.packet = (
nick_to_pub_key("Alice")
+ SYMMETRIC_KEY_LENGTH * b"\x01"
+ bytes(SYMMETRIC_KEY_LENGTH)
+ SYMMETRIC_KEY_LENGTH * b"\x02"
+ bytes(SYMMETRIC_KEY_LENGTH)
+ str_to_bytes("Alice")
)
self.args = (
self.packet,
self.ts,
self.window_list,
self.contact_list,
self.key_list,
self.settings,
)
@mock.patch('time.sleep', return_value=None)
def test_invalid_nick_raises_fr(self, _):
self.packet = (nick_to_pub_key("Alice")
+ SYMMETRIC_KEY_LENGTH * b'\x01'
+ bytes(SYMMETRIC_KEY_LENGTH)
+ SYMMETRIC_KEY_LENGTH * b'\x02'
+ bytes(SYMMETRIC_KEY_LENGTH)
+ UNDECODABLE_UNICODE)
self.args = self.packet, self.ts, self.window_list, self.contact_list, self.key_list, self.settings
@mock.patch("time.sleep", return_value=None)
def test_invalid_nick_raises_fr(self, _) -> None:
self.packet = (
nick_to_pub_key("Alice")
+ SYMMETRIC_KEY_LENGTH * b"\x01"
+ bytes(SYMMETRIC_KEY_LENGTH)
+ SYMMETRIC_KEY_LENGTH * b"\x02"
+ bytes(SYMMETRIC_KEY_LENGTH)
+ UNDECODABLE_UNICODE
)
self.args = (
self.packet,
self.ts,
self.window_list,
self.contact_list,
self.key_list,
self.settings,
)
self.assert_fr("Error: Received invalid contact data", key_ex_psk_tx, *self.args)
self.assert_se(
"Error: Received invalid contact data", key_ex_psk_tx, *self.args
)
@mock.patch('time.sleep', return_value=None)
def test_add_psk_tx_keys(self, _):
@mock.patch("time.sleep", return_value=None)
def test_add_psk_tx_keys(self, _) -> None:
self.assertIsNone(key_ex_psk_tx(*self.args))
keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
self.assertIsInstance(keyset, KeySet)
self.assertEqual(keyset.onion_pub_key, nick_to_pub_key("Alice"))
self.assertEqual(keyset.tx_mk, SYMMETRIC_KEY_LENGTH * b'\x01')
self.assertEqual(keyset.rx_mk, bytes(SYMMETRIC_KEY_LENGTH))
self.assertEqual(keyset.tx_hk, SYMMETRIC_KEY_LENGTH * b'\x02')
self.assertEqual(keyset.rx_hk, bytes(SYMMETRIC_KEY_LENGTH))
self.assertEqual(keyset.tx_mk, SYMMETRIC_KEY_LENGTH * b"\x01")
self.assertEqual(keyset.rx_mk, bytes(SYMMETRIC_KEY_LENGTH))
self.assertEqual(keyset.tx_hk, SYMMETRIC_KEY_LENGTH * b"\x02")
self.assertEqual(keyset.rx_hk, bytes(SYMMETRIC_KEY_LENGTH))
contact = self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice"))
self.assertIsInstance(contact, Contact)
self.assertEqual(contact.onion_pub_key, nick_to_pub_key("Alice"))
self.assertEqual(contact.nick, 'Alice')
self.assertEqual(contact.onion_pub_key, nick_to_pub_key("Alice"))
self.assertEqual(contact.nick, "Alice")
self.assertEqual(contact.tx_fingerprint, bytes(FINGERPRINT_LENGTH))
self.assertEqual(contact.rx_fingerprint, bytes(FINGERPRINT_LENGTH))
@ -288,96 +436,118 @@ class TestKeyExPSKRx(TFCTestCase):
file_name = f"{nick_to_short_address('User')}.psk - give to {nick_to_short_address('Alice')}"
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.packet = b'\x00' + nick_to_pub_key("Alice")
self.ts = datetime.now()
self.window_list = WindowList( nicks=['Alice', LOCAL_ID])
self.contact_list = ContactList(nicks=['Alice', LOCAL_ID])
self.key_list = KeyList( nicks=['Alice', LOCAL_ID])
self.settings = Settings(disable_gui_dialog=True)
self.file_name = self.file_name
self.args = self.packet, self.ts, self.window_list, self.contact_list, self.key_list, self.settings
self.packet = b"\x00" + nick_to_pub_key("Alice")
self.ts = datetime.now()
self.window_list = WindowList(nicks=["Alice", LOCAL_ID])
self.contact_list = ContactList(nicks=["Alice", LOCAL_ID])
self.key_list = KeyList(nicks=["Alice", LOCAL_ID])
self.settings = Settings(disable_gui_dialog=True)
self.file_name = self.file_name
self.args = (
self.packet,
self.ts,
self.window_list,
self.contact_list,
self.key_list,
self.settings,
)
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
def test_unknown_account_raises_fr(self):
self.assert_fr(f"Error: Unknown account '{nick_to_short_address('Bob')}'.",
key_ex_psk_rx, b'\x00' + nick_to_pub_key("Bob"),
self.ts, self.window_list, self.contact_list, self.key_list, self.settings)
def test_unknown_account_raises_fr(self) -> None:
self.assert_se(
f"Error: Unknown account '{nick_to_short_address('Bob')}'.",
key_ex_psk_rx,
b"\x00" + nick_to_pub_key("Bob"),
self.ts,
self.window_list,
self.contact_list,
self.key_list,
self.settings,
)
@mock.patch('builtins.input', return_value=file_name)
def test_invalid_psk_data_raises_fr(self, _):
@mock.patch("builtins.input", return_value=file_name)
def test_invalid_psk_data_raises_fr(self, _) -> None:
# Setup
with open(self.file_name, 'wb+') as f:
with open(self.file_name, "wb+") as f:
f.write(os.urandom(135))
# Test
self.assert_fr("Error: The PSK data in the file was invalid.", key_ex_psk_rx, *self.args)
self.assert_se(
"Error: The PSK data in the file was invalid.", key_ex_psk_rx, *self.args
)
@mock.patch('time.sleep', return_value=None)
@mock.patch('builtins.input', return_value=file_name)
def test_permission_error_raises_fr(self, *_):
@mock.patch("time.sleep", return_value=None)
@mock.patch("builtins.input", return_value=file_name)
def test_permission_error_raises_fr(self, *_) -> None:
# Setup
with open(self.file_name, 'wb+') as f:
with open(self.file_name, "wb+") as f:
f.write(os.urandom(PSK_FILE_SIZE))
# Test
error_raised = False
try:
with mock.patch('builtins.open', side_effect=PermissionError):
with mock.patch("builtins.open", side_effect=PermissionError):
key_ex_psk_rx(*self.args)
except FunctionReturn as inst:
except SoftError as inst:
error_raised = True
self.assertEqual("Error: No read permission for the PSK file.", inst.message)
self.assertEqual(
"Error: No read permission for the PSK file.", inst.message
)
self.assertTrue(error_raised)
@mock.patch('src.receiver.key_exchanges.ARGON2_PSK_PARALLELISM', 1)
@mock.patch('src.receiver.key_exchanges.ARGON2_PSK_TIME_COST', 1)
@mock.patch('src.receiver.key_exchanges.ARGON2_PSK_MEMORY_COST', 100)
@mock.patch('getpass.getpass', side_effect=['invalid', 'password'])
@mock.patch('time.sleep', return_value=None)
@mock.patch('os.urandom', side_effect=[bytes(XCHACHA20_NONCE_LENGTH)])
@mock.patch('builtins.input', return_value=file_name)
def test_invalid_keys_raise_fr(self, *_):
@mock.patch("src.receiver.key_exchanges.ARGON2_PSK_PARALLELISM", 1)
@mock.patch("src.receiver.key_exchanges.ARGON2_PSK_TIME_COST", 1)
@mock.patch("src.receiver.key_exchanges.ARGON2_PSK_MEMORY_COST", 100)
@mock.patch("getpass.getpass", side_effect=["invalid", "password"])
@mock.patch("time.sleep", return_value=None)
@mock.patch("os.urandom", side_effect=[bytes(XCHACHA20_NONCE_LENGTH)])
@mock.patch("builtins.input", return_value=file_name)
def test_invalid_keys_raise_fr(self, *_) -> None:
# Setup
keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
keyset.rx_mk = bytes(SYMMETRIC_KEY_LENGTH)
keyset.rx_hk = bytes(SYMMETRIC_KEY_LENGTH)
salt = bytes(ARGON2_SALT_LENGTH)
salt = bytes(ARGON2_SALT_LENGTH)
rx_key = bytes(SYMMETRIC_KEY_LENGTH)
rx_hek = bytes(SYMMETRIC_KEY_LENGTH)
kek = argon2_kdf('password', salt, time_cost=1, memory_cost=100, parallelism=1)
kek = argon2_kdf("password", salt, time_cost=1, memory_cost=100, parallelism=1)
ct_tag = encrypt_and_sign(rx_key + rx_hek, key=kek)
with open(self.file_name, 'wb+') as f:
with open(self.file_name, "wb+") as f:
f.write(salt + ct_tag)
# Test
self.assert_fr("Error: Received invalid keys from contact.", key_ex_psk_rx, *self.args)
self.assert_se(
"Error: Received invalid keys from contact.", key_ex_psk_rx, *self.args
)
@mock.patch('src.receiver.key_exchanges.ARGON2_PSK_PARALLELISM', 1)
@mock.patch('src.receiver.key_exchanges.ARGON2_PSK_TIME_COST', 1)
@mock.patch('src.receiver.key_exchanges.ARGON2_PSK_MEMORY_COST', 100)
@mock.patch('time.sleep', return_value=None)
@mock.patch('builtins.input', return_value=file_name)
@mock.patch('getpass.getpass', return_value='test_password')
def test_valid_psk(self, *_):
@mock.patch("src.receiver.key_exchanges.ARGON2_PSK_PARALLELISM", 1)
@mock.patch("src.receiver.key_exchanges.ARGON2_PSK_TIME_COST", 1)
@mock.patch("src.receiver.key_exchanges.ARGON2_PSK_MEMORY_COST", 100)
@mock.patch("time.sleep", return_value=None)
@mock.patch("builtins.input", return_value=file_name)
@mock.patch("getpass.getpass", return_value="test_password")
def test_valid_psk(self, *_) -> None:
# Setup
keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
keyset.rx_mk = bytes(SYMMETRIC_KEY_LENGTH)
keyset.rx_hk = bytes(SYMMETRIC_KEY_LENGTH)
salt = os.urandom(ARGON2_SALT_LENGTH)
rx_key = os.urandom(SYMMETRIC_KEY_LENGTH)
rx_hek = os.urandom(SYMMETRIC_KEY_LENGTH)
kek = argon2_kdf('test_password', salt, time_cost=1, memory_cost=100, parallelism=1)
ct_tag = encrypt_and_sign(rx_key + rx_hek, key=kek)
salt = os.urandom(ARGON2_SALT_LENGTH)
rx_key = os.urandom(SYMMETRIC_KEY_LENGTH)
rx_hek = os.urandom(SYMMETRIC_KEY_LENGTH)
kek = argon2_kdf(
"test_password", salt, time_cost=1, memory_cost=100, parallelism=1
)
ct_tag = encrypt_and_sign(rx_key + rx_hek, key=kek)
with open(self.file_name, 'wb+') as f:
with open(self.file_name, "wb+") as f:
f.write(salt + ct_tag)
# Test
@ -387,26 +557,28 @@ class TestKeyExPSKRx(TFCTestCase):
self.assertEqual(keyset.rx_mk, rx_key)
self.assertEqual(keyset.rx_hk, rx_hek)
@mock.patch('src.receiver.key_exchanges.ARGON2_PSK_PARALLELISM', 1)
@mock.patch('src.receiver.key_exchanges.ARGON2_PSK_TIME_COST', 1)
@mock.patch('src.receiver.key_exchanges.ARGON2_PSK_MEMORY_COST', 100)
@mock.patch('subprocess.Popen')
@mock.patch('time.sleep', return_value=None)
@mock.patch('builtins.input', side_effect=[file_name, ''])
@mock.patch('getpass.getpass', return_value='test_password')
def test_valid_psk_overwrite_failure(self, *_):
@mock.patch("src.receiver.key_exchanges.ARGON2_PSK_PARALLELISM", 1)
@mock.patch("src.receiver.key_exchanges.ARGON2_PSK_TIME_COST", 1)
@mock.patch("src.receiver.key_exchanges.ARGON2_PSK_MEMORY_COST", 100)
@mock.patch("subprocess.Popen")
@mock.patch("time.sleep", return_value=None)
@mock.patch("builtins.input", side_effect=[file_name, ""])
@mock.patch("getpass.getpass", return_value="test_password")
def test_valid_psk_overwrite_failure(self, *_) -> None:
# Setup
keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
keyset.rx_mk = bytes(SYMMETRIC_KEY_LENGTH)
keyset.rx_hk = bytes(SYMMETRIC_KEY_LENGTH)
salt = os.urandom(ARGON2_SALT_LENGTH)
salt = os.urandom(ARGON2_SALT_LENGTH)
rx_key = os.urandom(SYMMETRIC_KEY_LENGTH)
rx_hek = os.urandom(SYMMETRIC_KEY_LENGTH)
kek = argon2_kdf('test_password', salt, time_cost=1, memory_cost=100, parallelism=1)
kek = argon2_kdf(
"test_password", salt, time_cost=1, memory_cost=100, parallelism=1
)
ct_tag = encrypt_and_sign(rx_key + rx_hek, key=kek)
with open(self.file_name, 'wb+') as f:
with open(self.file_name, "wb+") as f:
f.write(salt + ct_tag)
# Test
@ -416,19 +588,18 @@ class TestKeyExPSKRx(TFCTestCase):
self.assertEqual(keyset.rx_mk, rx_key)
self.assertEqual(keyset.rx_hk, rx_hek)
@mock.patch('src.receiver.key_exchanges.ARGON2_PSK_TIME_COST', 1)
@mock.patch('src.receiver.key_exchanges.ARGON2_PSK_MEMORY_COST', 100)
@mock.patch('subprocess.Popen')
@mock.patch('time.sleep', return_value=None)
@mock.patch('builtins.input', side_effect=[file_name, ''])
@mock.patch('getpass.getpass', side_effect=[KeyboardInterrupt])
def test_valid_psk_keyboard_interrupt_raises_fr(self, *_):
with open(self.file_name, 'wb+') as f:
@mock.patch("src.receiver.key_exchanges.ARGON2_PSK_TIME_COST", 1)
@mock.patch("src.receiver.key_exchanges.ARGON2_PSK_MEMORY_COST", 100)
@mock.patch("subprocess.Popen")
@mock.patch("time.sleep", return_value=None)
@mock.patch("builtins.input", side_effect=[file_name, ""])
@mock.patch("getpass.getpass", side_effect=[KeyboardInterrupt])
def test_valid_psk_keyboard_interrupt_raises_fr(self, *_) -> None:
with open(self.file_name, "wb+") as f:
f.write(bytes(PSK_FILE_SIZE))
self.assert_fr("PSK import aborted.",
key_ex_psk_rx, *self.args)
self.assert_se("PSK import aborted.", key_ex_psk_rx, *self.args)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

View File

@ -27,295 +27,501 @@ from unittest import mock
from src.common.database import MessageLog
from src.common.encoding import bool_to_bytes
from src.common.misc import ensure_dir
from src.common.statics import (BLAKE2_DIGEST_LENGTH, DIR_USER_DATA, FILE, FILE_KEY_HEADER, GROUP_ID_LENGTH, LOCAL_ID,
LOCAL_PUBKEY, MESSAGE, MESSAGE_LENGTH, ORIGIN_CONTACT_HEADER, ORIGIN_USER_HEADER,
SYMMETRIC_KEY_LENGTH)
from src.common.misc import ensure_dir
from src.common.statics import (
BLAKE2_DIGEST_LENGTH,
DIR_USER_DATA,
FILE,
FILE_KEY_HEADER,
GROUP_ID_LENGTH,
LOCAL_ID,
LOCAL_PUBKEY,
MESSAGE,
MESSAGE_LENGTH,
ORIGIN_CONTACT_HEADER,
ORIGIN_USER_HEADER,
SYMMETRIC_KEY_LENGTH,
)
from src.receiver.messages import process_message
from src.receiver.packet import PacketList
from src.receiver.windows import WindowList
from src.receiver.messages import process_message_packet
from src.receiver.packet import PacketList
from src.receiver.windows import WindowList
from tests.mock_classes import ContactList, GroupList, KeyList, MasterKey, Settings
from tests.utils import assembly_packet_creator, cd_unit_test, cleanup, group_name_to_group_id
from tests.utils import nick_to_pub_key, TFCTestCase
from tests.utils import (
assembly_packet_creator,
cd_unit_test,
cleanup,
group_name_to_group_id,
)
from tests.utils import nick_to_pub_key, TFCTestCase
class TestProcessMessage(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.msg = ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis"
" dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu"
"m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma"
"ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el"
"it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic"
"tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r"
"utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali"
"quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris"
"que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti"
"s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu.")
self.msg = (
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis"
" dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu"
"m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma"
"ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el"
"it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic"
"tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r"
"utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali"
"quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris"
"que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti"
"s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu."
)
self.ts = datetime.now()
self.ts = datetime.now()
self.master_key = MasterKey()
self.settings = Settings(log_file_masking=True)
self.file_name = f'{DIR_USER_DATA}{self.settings.software_operation}_logs'
self.settings = Settings(log_file_masking=True)
self.file_name = f"{DIR_USER_DATA}{self.settings.software_operation}_logs"
self.contact_list = ContactList(nicks=['Alice', 'Bob', 'Charlie', LOCAL_ID])
self.key_list = KeyList( nicks=['Alice', 'Bob', 'Charlie', LOCAL_ID])
self.group_list = GroupList( groups=['test_group'])
self.packet_list = PacketList(contact_list=self.contact_list, settings=self.settings)
self.window_list = WindowList(contact_list=self.contact_list, settings=self.settings,
group_list=self.group_list, packet_list=self.packet_list)
self.group_id = group_name_to_group_id('test_group')
self.file_keys = dict()
self.contact_list = ContactList(nicks=["Alice", "Bob", "Charlie", LOCAL_ID])
self.key_list = KeyList(nicks=["Alice", "Bob", "Charlie", LOCAL_ID])
self.group_list = GroupList(groups=["test_group"])
self.packet_list = PacketList(
contact_list=self.contact_list, settings=self.settings
)
self.window_list = WindowList(
contact_list=self.contact_list,
settings=self.settings,
group_list=self.group_list,
packet_list=self.packet_list,
)
self.group_id = group_name_to_group_id("test_group")
self.file_keys = dict()
self.log_file = f'{DIR_USER_DATA}{self.settings.software_operation}_logs'
self.log_file = f"{DIR_USER_DATA}{self.settings.software_operation}_logs"
self.tfc_log_database = MessageLog(self.log_file, self.master_key.master_key)
self.group_list.get_group('test_group').log_messages = True
self.args = (self.window_list, self.packet_list, self.contact_list, self.key_list,
self.group_list, self.settings, self.file_keys, self.tfc_log_database)
self.group_list.get_group("test_group").log_messages = True
self.args = (
self.window_list,
self.packet_list,
self.contact_list,
self.key_list,
self.group_list,
self.settings,
self.file_keys,
self.tfc_log_database,
)
ensure_dir(DIR_USER_DATA)
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
# Invalid packets
@mock.patch('time.sleep', return_value=None)
def test_invalid_origin_header_raises_fr(self, _):
@mock.patch("time.sleep", return_value=None)
def test_invalid_origin_header_raises_fr(self, _) -> None:
# Setup
invalid_origin_header = b'e'
packet = nick_to_pub_key('Alice') + invalid_origin_header + MESSAGE_LENGTH * b'm'
invalid_origin_header = b"e"
packet = (
nick_to_pub_key("Alice") + invalid_origin_header + MESSAGE_LENGTH * b"m"
)
# Test
self.assert_fr("Error: Received packet had an invalid origin-header.",
process_message, self.ts, packet, *self.args)
self.assert_se(
"Error: Received packet had an invalid origin-header.",
process_message_packet,
self.ts,
packet,
*self.args,
)
@mock.patch('time.sleep', return_value=None)
def test_masqueraded_command_raises_fr(self, _):
@mock.patch("time.sleep", return_value=None)
def test_masqueraded_command_raises_fr(self, _) -> None:
for origin_header in [ORIGIN_USER_HEADER, ORIGIN_CONTACT_HEADER]:
# Setup
packet = LOCAL_PUBKEY + origin_header + MESSAGE_LENGTH * b'm'
packet = LOCAL_PUBKEY + origin_header + MESSAGE_LENGTH * b"m"
# Test
self.assert_fr("Warning! Received packet masqueraded as a command.",
process_message, self.ts, packet, *self.args)
self.assert_se(
"Warning! Received packet masqueraded as a command.",
process_message_packet,
self.ts,
packet,
*self.args,
)
# Private messages
@mock.patch('time.sleep', return_value=None)
def test_private_msg_from_contact(self, _):
@mock.patch("time.sleep", return_value=None)
def test_private_msg_from_contact(self, _) -> None:
# Setup
assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'))
assembly_ct_list = assembly_packet_creator(
MESSAGE,
self.msg,
origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True,
onion_pub_key=nick_to_pub_key("Alice"),
)
# Test
for p in assembly_ct_list:
self.assertIsNone(process_message(self.ts, p, *self.args))
self.assertIsNone(process_message_packet(self.ts, p, *self.args))
@mock.patch('time.sleep', return_value=None)
def test_private_msg_from_user(self, _):
@mock.patch("time.sleep", return_value=None)
def test_private_msg_from_user(self, _) -> None:
# Setup
assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, origin_header=ORIGIN_USER_HEADER,
encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'))
assembly_ct_list = assembly_packet_creator(
MESSAGE,
self.msg,
origin_header=ORIGIN_USER_HEADER,
encrypt_packet=True,
onion_pub_key=nick_to_pub_key("Alice"),
)
# Test
for p in assembly_ct_list:
self.assertIsNone(process_message(self.ts, p, *self.args))
self.assertIsNone(process_message_packet(self.ts, p, *self.args))
# Whispered messages
@mock.patch('time.sleep', return_value=None)
def test_whisper_msg_from_contact(self, _):
@mock.patch("time.sleep", return_value=None)
def test_whisper_msg_from_contact(self, _) -> None:
# Setup
assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
whisper_header=bool_to_bytes(True))
assembly_ct_list = assembly_packet_creator(
MESSAGE,
self.msg,
origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True,
onion_pub_key=nick_to_pub_key("Alice"),
whisper_header=bool_to_bytes(True),
)
# Test
for p in assembly_ct_list[:-1]:
self.assertIsNone(process_message(self.ts, p, *self.args))
self.assertIsNone(process_message_packet(self.ts, p, *self.args))
for p in assembly_ct_list[-1:]:
self.assert_fr("Whisper message complete.",
process_message, self.ts, p, *self.args)
self.assert_se(
"Whisper message complete.",
process_message_packet,
self.ts,
p,
*self.args,
)
@mock.patch('time.sleep', return_value=None)
def test_whisper_msg_from_user(self, _):
@mock.patch("time.sleep", return_value=None)
def test_whisper_msg_from_user(self, _) -> None:
# Setup
assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, origin_header=ORIGIN_USER_HEADER,
encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
whisper_header=bool_to_bytes(True))
assembly_ct_list = assembly_packet_creator(
MESSAGE,
self.msg,
origin_header=ORIGIN_USER_HEADER,
encrypt_packet=True,
onion_pub_key=nick_to_pub_key("Alice"),
whisper_header=bool_to_bytes(True),
)
# Test
for p in assembly_ct_list[:-1]:
self.assertIsNone(process_message(self.ts, p, *self.args))
self.assertIsNone(process_message_packet(self.ts, p, *self.args))
for p in assembly_ct_list[-1:]:
self.assert_fr("Whisper message complete.", process_message, self.ts, p, *self.args)
self.assert_se(
"Whisper message complete.",
process_message_packet,
self.ts,
p,
*self.args,
)
@mock.patch('time.sleep', return_value=None)
def test_empty_whisper_msg_from_user(self, _):
@mock.patch("time.sleep", return_value=None)
def test_empty_whisper_msg_from_user(self, _) -> None:
# Setup
assembly_ct_list = assembly_packet_creator(MESSAGE, '', origin_header=ORIGIN_USER_HEADER,
encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
whisper_header=bool_to_bytes(True))
assembly_ct_list = assembly_packet_creator(
MESSAGE,
"",
origin_header=ORIGIN_USER_HEADER,
encrypt_packet=True,
onion_pub_key=nick_to_pub_key("Alice"),
whisper_header=bool_to_bytes(True),
)
# Test
for p in assembly_ct_list[:-1]:
self.assertIsNone(process_message(self.ts, p, *self.args))
self.assertIsNone(process_message_packet(self.ts, p, *self.args))
for p in assembly_ct_list[-1:]:
self.assert_fr("Whisper message complete.", process_message, self.ts, p, *self.args)
self.assert_se(
"Whisper message complete.",
process_message_packet,
self.ts,
p,
*self.args,
)
# File key messages
@mock.patch('time.sleep', return_value=None)
def test_user_origin_raises_fr(self, _):
assembly_ct_list = assembly_packet_creator(MESSAGE, ' ', origin_header=ORIGIN_USER_HEADER,
encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
message_header=FILE_KEY_HEADER)
@mock.patch("time.sleep", return_value=None)
def test_user_origin_raises_fr(self, _) -> None:
assembly_ct_list = assembly_packet_creator(
MESSAGE,
" ",
origin_header=ORIGIN_USER_HEADER,
encrypt_packet=True,
onion_pub_key=nick_to_pub_key("Alice"),
message_header=FILE_KEY_HEADER,
)
for p in assembly_ct_list[-1:]:
self.assert_fr("File key message from the user.", process_message, self.ts, p, *self.args)
self.assert_se(
"File key message from the user.",
process_message_packet,
self.ts,
p,
*self.args,
)
@mock.patch('time.sleep', return_value=None)
def test_invalid_file_key_data_raises_fr(self, _):
assembly_ct_list = assembly_packet_creator(MESSAGE, ' ', origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
message_header=FILE_KEY_HEADER)
@mock.patch("time.sleep", return_value=None)
def test_invalid_file_key_data_raises_fr(self, _) -> None:
assembly_ct_list = assembly_packet_creator(
MESSAGE,
" ",
origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True,
onion_pub_key=nick_to_pub_key("Alice"),
message_header=FILE_KEY_HEADER,
)
for p in assembly_ct_list[-1:]:
self.assert_fr("Error: Received an invalid file key message.", process_message, self.ts, p, *self.args)
self.assert_se(
"Error: Received an invalid file key message.",
process_message_packet,
self.ts,
p,
*self.args,
)
@mock.patch('time.sleep', return_value=None)
def test_too_large_file_key_data_raises_fr(self, _):
assembly_ct_list = assembly_packet_creator(MESSAGE, base64.b85encode(BLAKE2_DIGEST_LENGTH * b'a'
+ SYMMETRIC_KEY_LENGTH * b'b'
+ b'a').decode(),
origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
message_header=FILE_KEY_HEADER)
@mock.patch("time.sleep", return_value=None)
def test_too_large_file_key_data_raises_fr(self, _) -> None:
assembly_ct_list = assembly_packet_creator(
MESSAGE,
base64.b85encode(
BLAKE2_DIGEST_LENGTH * b"a" + SYMMETRIC_KEY_LENGTH * b"b" + b"a"
).decode(),
origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True,
onion_pub_key=nick_to_pub_key("Alice"),
message_header=FILE_KEY_HEADER,
)
for p in assembly_ct_list[-1:]:
self.assert_fr("Error: Received an invalid file key message.", process_message, self.ts, p, *self.args)
self.assert_se(
"Error: Received an invalid file key message.",
process_message_packet,
self.ts,
p,
*self.args,
)
@mock.patch('time.sleep', return_value=None)
def test_valid_file_key_message(self, _):
assembly_ct_list = assembly_packet_creator(MESSAGE, base64.b85encode(BLAKE2_DIGEST_LENGTH * b'a'
+ SYMMETRIC_KEY_LENGTH * b'b').decode(),
origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
message_header=FILE_KEY_HEADER)
@mock.patch("time.sleep", return_value=None)
def test_valid_file_key_message(self, _) -> None:
assembly_ct_list = assembly_packet_creator(
MESSAGE,
base64.b85encode(
BLAKE2_DIGEST_LENGTH * b"a" + SYMMETRIC_KEY_LENGTH * b"b"
).decode(),
origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True,
onion_pub_key=nick_to_pub_key("Alice"),
message_header=FILE_KEY_HEADER,
)
for p in assembly_ct_list[-1:]:
self.assert_fr("Received file decryption key from Alice", process_message, self.ts, p, *self.args)
self.assert_se(
"Received file decryption key from Alice",
process_message_packet,
self.ts,
p,
*self.args,
)
# Group messages
@mock.patch('time.sleep', return_value=None)
def test_invalid_message_header_raises_fr(self, _):
@mock.patch("time.sleep", return_value=None)
def test_invalid_message_header_raises_fr(self, _) -> None:
# Setup
assembly_ct_list = assembly_packet_creator(MESSAGE, 'test_message', origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
message_header=b'Z')
assembly_ct_list = assembly_packet_creator(
MESSAGE,
"test_message",
origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True,
onion_pub_key=nick_to_pub_key("Alice"),
message_header=b"Z",
)
# Test
self.assert_fr("Error: Message from contact had an invalid header.",
process_message, self.ts, assembly_ct_list[0], *self.args)
self.assert_se(
"Error: Message from contact had an invalid header.",
process_message_packet,
self.ts,
assembly_ct_list[0],
*self.args,
)
@mock.patch('time.sleep', return_value=None)
def test_invalid_window_raises_fr(self, _):
@mock.patch("time.sleep", return_value=None)
def test_invalid_window_raises_fr(self, _) -> None:
# Setup
assembly_ct_list = assembly_packet_creator(MESSAGE, 'test_message', origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
group_id=self.group_id)
assembly_ct_list = assembly_packet_creator(
MESSAGE,
"test_message",
origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True,
onion_pub_key=nick_to_pub_key("Alice"),
group_id=self.group_id,
)
self.group_list.get_group('test_group').group_id = GROUP_ID_LENGTH * b'a'
self.group_list.get_group("test_group").group_id = GROUP_ID_LENGTH * b"a"
# Test
self.assert_fr("Error: Received message to an unknown group.",
process_message, self.ts, assembly_ct_list[0], *self.args)
self.assert_se(
"Error: Received message to an unknown group.",
process_message_packet,
self.ts,
assembly_ct_list[0],
*self.args,
)
@mock.patch('time.sleep', return_value=None)
def test_invalid_message_raises_fr(self, _):
@mock.patch("time.sleep", return_value=None)
def test_invalid_message_raises_fr(self, _) -> None:
# Setup
assembly_ct_list = assembly_packet_creator(MESSAGE, ' ', origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
group_id=self.group_id, tamper_plaintext=True)
assembly_ct_list = assembly_packet_creator(
MESSAGE,
" ",
origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True,
onion_pub_key=nick_to_pub_key("Alice"),
group_id=self.group_id,
tamper_plaintext=True,
)
# Test
self.assert_fr("Error: Received an invalid group message.",
process_message, self.ts, assembly_ct_list[0], *self.args)
self.assert_se(
"Error: Received an invalid group message.",
process_message_packet,
self.ts,
assembly_ct_list[0],
*self.args,
)
@mock.patch('time.sleep', return_value=None)
def test_invalid_whisper_header_raises_fr(self, _):
@mock.patch("time.sleep", return_value=None)
def test_invalid_whisper_header_raises_fr(self, _) -> None:
# Setup
assembly_ct_list = assembly_packet_creator(MESSAGE, '', origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
whisper_header=b'', message_header=b'')
assembly_ct_list = assembly_packet_creator(
MESSAGE,
"",
origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True,
onion_pub_key=nick_to_pub_key("Alice"),
whisper_header=b"",
message_header=b"",
)
# Test
self.assert_fr("Error: Message from contact had an invalid whisper header.",
process_message, self.ts, assembly_ct_list[0], *self.args)
self.assert_se(
"Error: Message from contact had an invalid whisper header.",
process_message_packet,
self.ts,
assembly_ct_list[0],
*self.args,
)
@mock.patch('time.sleep', return_value=None)
def test_contact_not_in_group_raises_fr(self, _):
@mock.patch("time.sleep", return_value=None)
def test_contact_not_in_group_raises_fr(self, _) -> None:
# Setup
assembly_ct_list = assembly_packet_creator(MESSAGE, 'test_message', origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True, group_id=self.group_id,
onion_pub_key=nick_to_pub_key('Charlie'))
assembly_ct_list = assembly_packet_creator(
MESSAGE,
"test_message",
origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True,
group_id=self.group_id,
onion_pub_key=nick_to_pub_key("Charlie"),
)
# Test
self.assert_fr("Error: Account is not a member of the group.",
process_message, self.ts, assembly_ct_list[0], *self.args)
self.assert_se(
"Error: Account is not a member of the group.",
process_message_packet,
self.ts,
assembly_ct_list[0],
*self.args,
)
@mock.patch('time.sleep', return_value=None)
def test_normal_group_msg_from_contact(self, _):
@mock.patch("time.sleep", return_value=None)
def test_normal_group_msg_from_contact(self, _) -> None:
# Setup
assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, origin_header=ORIGIN_CONTACT_HEADER,
group_id=self.group_id, encrypt_packet=True,
onion_pub_key=nick_to_pub_key('Alice'))
assembly_ct_list = assembly_packet_creator(
MESSAGE,
self.msg,
origin_header=ORIGIN_CONTACT_HEADER,
group_id=self.group_id,
encrypt_packet=True,
onion_pub_key=nick_to_pub_key("Alice"),
)
for p in assembly_ct_list:
self.assertIsNone(process_message(self.ts, p, *self.args))
self.assertIsNone(process_message_packet(self.ts, p, *self.args))
@mock.patch('time.sleep', return_value=None)
def test_normal_group_msg_from_user(self, _):
@mock.patch("time.sleep", return_value=None)
def test_normal_group_msg_from_user(self, _) -> None:
# Setup
assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, origin_header=ORIGIN_USER_HEADER,
group_id=self.group_id, encrypt_packet=True,
onion_pub_key=nick_to_pub_key('Alice'))
assembly_ct_list = assembly_packet_creator(
MESSAGE,
self.msg,
origin_header=ORIGIN_USER_HEADER,
group_id=self.group_id,
encrypt_packet=True,
onion_pub_key=nick_to_pub_key("Alice"),
)
for p in assembly_ct_list:
self.assertIsNone(process_message(self.ts, p, *self.args))
self.assertIsNone(process_message_packet(self.ts, p, *self.args))
# Files
@mock.patch('time.sleep', return_value=None)
def test_file(self, _):
@mock.patch("time.sleep", return_value=None)
def test_file(self, _) -> None:
# Setup
assembly_ct_list = assembly_packet_creator(FILE, origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'))
assembly_ct_list = assembly_packet_creator(
FILE,
origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True,
onion_pub_key=nick_to_pub_key("Alice"),
)
# Test
for p in assembly_ct_list[:-1]:
self.assertIsNone(process_message(self.ts, p, *self.args))
self.assertIsNone(process_message_packet(self.ts, p, *self.args))
for p in assembly_ct_list[-1:]:
self.assert_fr("File storage complete.",
process_message, self.ts, p, *self.args)
self.assert_se(
"File storage complete.", process_message_packet, self.ts, p, *self.args
)
@mock.patch('time.sleep', return_value=None)
def test_file_when_reception_is_disabled(self, _):
@mock.patch("time.sleep", return_value=None)
def test_file_when_reception_is_disabled(self, _) -> None:
# Setup
assembly_ct_list = assembly_packet_creator(FILE, origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'))
assembly_ct_list = assembly_packet_creator(
FILE,
origin_header=ORIGIN_CONTACT_HEADER,
encrypt_packet=True,
onion_pub_key=nick_to_pub_key("Alice"),
)
self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice')).file_reception = False
self.contact_list.get_contact_by_pub_key(
nick_to_pub_key("Alice")
).file_reception = False
# Test
self.assert_fr("Alert! File transmission from Alice but reception is disabled.",
process_message, self.ts, assembly_ct_list[0], *self.args)
self.assert_se(
"Alert! File transmission from Alice but reception is disabled.",
process_message_packet,
self.ts,
assembly_ct_list[0],
*self.args,
)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

View File

@ -25,89 +25,127 @@ import threading
import time
import unittest
from typing import Tuple
from unittest import mock
from typing import Tuple
from unittest import mock
from unittest.mock import MagicMock
from src.common.crypto import blake2b, encrypt_and_sign
from src.common.crypto import blake2b, encrypt_and_sign
from src.common.database import MessageLog
from src.common.encoding import b58encode, bool_to_bytes, int_to_bytes, str_to_bytes
from src.common.statics import (CH_FILE_RECV, COMMAND, COMMAND_DATAGRAM_HEADER, CONFIRM_CODE_LENGTH, DIR_USER_DATA,
ENABLE, EXIT, FILE_DATAGRAM_HEADER, FILE_KEY_HEADER, INITIAL_HARAC, KEY_EX_ECDHE,
LOCAL_KEY_DATAGRAM_HEADER, MESSAGE, MESSAGE_DATAGRAM_HEADER, ORIGIN_CONTACT_HEADER,
PRIVATE_MESSAGE_HEADER, SYMMETRIC_KEY_LENGTH, UNIT_TEST_QUEUE, US_BYTE, WIN_SELECT,
WIN_UID_FILE, WIN_UID_LOCAL)
from src.common.statics import (
CH_FILE_RECV,
COMMAND,
COMMAND_DATAGRAM_HEADER,
CONFIRM_CODE_LENGTH,
DIR_USER_DATA,
ENABLE,
EXIT,
FILE_DATAGRAM_HEADER,
FILE_KEY_HEADER,
INITIAL_HARAC,
KEY_EX_ECDHE,
LOCAL_KEY_DATAGRAM_HEADER,
MESSAGE,
MESSAGE_DATAGRAM_HEADER,
ORIGIN_CONTACT_HEADER,
PRIVATE_MESSAGE_HEADER,
SYMMETRIC_KEY_LENGTH,
UNIT_TEST_QUEUE,
US_BYTE,
WIN_SELECT,
WIN_UID_COMMAND,
WIN_UID_FILE,
)
from src.transmitter.packet import split_to_assembly_packets
from src.receiver.output_loop import output_loop
from tests.mock_classes import ContactList, Gateway, GroupList, KeyList, MasterKey, nick_to_pub_key, Settings
from tests.utils import cd_unit_test, cleanup, gen_queue_dict, tear_queues
from tests.mock_classes import (
ContactList,
Gateway,
GroupList,
KeyList,
MasterKey,
nick_to_pub_key,
Settings,
)
from tests.utils import cd_unit_test, cleanup, gen_queue_dict, tear_queues
def rotate_key(key: bytes, harac: int) -> Tuple[bytes, int]:
"""Move to next key in hash ratchet."""
return blake2b(key + int_to_bytes(harac), digest_size=SYMMETRIC_KEY_LENGTH), harac + 1
return (
blake2b(key + int_to_bytes(harac), digest_size=SYMMETRIC_KEY_LENGTH),
harac + 1,
)
class TestOutputLoop(unittest.TestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.unit_test_dir = cd_unit_test()
self.o_sleep = time.sleep
time.sleep = lambda _: None
self.o_sleep = time.sleep
time.sleep = lambda _: None
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
time.sleep = self.o_sleep
cleanup(self.unit_test_dir)
@mock.patch('tkinter.Tk', return_value=MagicMock())
@mock.patch('os.system', return_value=None)
@mock.patch('builtins.input', side_effect=[b58encode(SYMMETRIC_KEY_LENGTH*b'a'),
bytes(CONFIRM_CODE_LENGTH).hex(),
b58encode(SYMMETRIC_KEY_LENGTH*b'a', public_key=True)])
def test_loop(self, *_):
@mock.patch("os.system", return_value=None)
@mock.patch("tkinter.Tk", return_value=MagicMock())
@mock.patch(
"builtins.input",
side_effect=[
b58encode(SYMMETRIC_KEY_LENGTH * b"a"),
bytes(CONFIRM_CODE_LENGTH).hex(),
b58encode(SYMMETRIC_KEY_LENGTH * b"a", public_key=True),
],
)
def test_loop(self, *_) -> None:
# Setup
queues = gen_queue_dict()
kek = SYMMETRIC_KEY_LENGTH * b'a'
conf_code = bytes(1)
tx_pub_key = nick_to_pub_key('Bob')
o_sleep = self.o_sleep
test_delay = 0.2
queues = gen_queue_dict()
kek = SYMMETRIC_KEY_LENGTH * b"a"
conf_code = bytes(1)
tx_pub_key = nick_to_pub_key("Bob")
o_sleep = self.o_sleep
test_delay = 0.3
def queue_packet(mk, hk, tx_harac, packet, onion_pub_key=None):
def queue_packet(mk, hk, tx_harac, packet, onion_pub_key=None) -> None:
"""Create encrypted datagram."""
if onion_pub_key is None:
header = b''
queue = queues[COMMAND_DATAGRAM_HEADER]
header = b""
queue = queues[COMMAND_DATAGRAM_HEADER]
packet = split_to_assembly_packets(packet, COMMAND)[0]
else:
header = onion_pub_key + ORIGIN_CONTACT_HEADER
queue = queues[MESSAGE_DATAGRAM_HEADER]
queue = queues[MESSAGE_DATAGRAM_HEADER]
packet = split_to_assembly_packets(packet, MESSAGE)[0]
encrypted_harac = encrypt_and_sign(int_to_bytes(tx_harac), hk)
encrypted_message = encrypt_and_sign(packet, mk)
encrypted_packet = header + encrypted_harac + encrypted_message
encrypted_harac = encrypt_and_sign(int_to_bytes(tx_harac), hk)
encrypted_message = encrypt_and_sign(packet, mk)
encrypted_packet = header + encrypted_harac + encrypted_message
queue.put((datetime.datetime.now(), encrypted_packet))
def queue_delayer():
def queue_delayer() -> None:
"""Place datagrams into queue after delay."""
o_sleep(test_delay)
local_harac = INITIAL_HARAC
tx_harac = INITIAL_HARAC
local_hek = SYMMETRIC_KEY_LENGTH * b'a'
file_key = SYMMETRIC_KEY_LENGTH * b'b'
local_key = SYMMETRIC_KEY_LENGTH * b'a'
tx_mk = SYMMETRIC_KEY_LENGTH * b'a'
tx_hk = SYMMETRIC_KEY_LENGTH * b'a'
tx_harac = INITIAL_HARAC
local_hek = SYMMETRIC_KEY_LENGTH * b"a"
file_key = SYMMETRIC_KEY_LENGTH * b"b"
local_key = SYMMETRIC_KEY_LENGTH * b"a"
tx_mk = SYMMETRIC_KEY_LENGTH * b"a"
tx_hk = SYMMETRIC_KEY_LENGTH * b"a"
# Queue local key packet
local_key_packet = encrypt_and_sign(local_key + local_hek + conf_code, key=kek)
queues[LOCAL_KEY_DATAGRAM_HEADER].put((datetime.datetime.now(), local_key_packet))
local_key_packet = encrypt_and_sign(
local_key + local_hek + conf_code, key=kek
)
queues[LOCAL_KEY_DATAGRAM_HEADER].put(
(datetime.datetime.now(), local_key_packet)
)
o_sleep(test_delay)
# Select file window
@ -117,23 +155,40 @@ class TestOutputLoop(unittest.TestCase):
o_sleep(test_delay)
# Select local window
command = WIN_SELECT + WIN_UID_LOCAL
command = WIN_SELECT + WIN_UID_COMMAND
queue_packet(local_key, tx_hk, local_harac, command)
local_key, local_harac = rotate_key(local_key, local_harac)
o_sleep(test_delay)
# A message that goes to buffer
queue_packet(tx_mk, tx_hk, tx_harac, bool_to_bytes(False) + PRIVATE_MESSAGE_HEADER + b'Hi Bob', tx_pub_key)
queue_packet(
tx_mk,
tx_hk,
tx_harac,
bool_to_bytes(False) + PRIVATE_MESSAGE_HEADER + b"Hi Bob",
tx_pub_key,
)
tx_mk, tx_harac = rotate_key(tx_mk, tx_harac)
# ECDHE keyset for Bob
command = KEY_EX_ECDHE + nick_to_pub_key("Bob") + (4 * SYMMETRIC_KEY_LENGTH * b'a') + str_to_bytes('Bob')
command = (
KEY_EX_ECDHE
+ nick_to_pub_key("Bob")
+ (4 * SYMMETRIC_KEY_LENGTH * b"a")
+ str_to_bytes("Bob")
)
queue_packet(local_key, tx_hk, local_harac, command)
local_key, local_harac = rotate_key(local_key, local_harac)
o_sleep(test_delay)
# Message for Bob
queue_packet(tx_mk, tx_hk, tx_harac, bool_to_bytes(False) + PRIVATE_MESSAGE_HEADER + b'Hi Bob', tx_pub_key)
queue_packet(
tx_mk,
tx_hk,
tx_harac,
bool_to_bytes(False) + PRIVATE_MESSAGE_HEADER + b"Hi Bob",
tx_pub_key,
)
tx_mk, tx_harac = rotate_key(tx_mk, tx_harac)
o_sleep(test_delay)
@ -143,15 +198,22 @@ class TestOutputLoop(unittest.TestCase):
o_sleep(test_delay)
# File packet from Bob
ct = encrypt_and_sign(b'test', file_key)
ct = encrypt_and_sign(b"test", file_key)
f_hash = blake2b(ct)
packet = nick_to_pub_key('Bob') + ORIGIN_CONTACT_HEADER + ct
packet = nick_to_pub_key("Bob") + ORIGIN_CONTACT_HEADER + ct
queues[FILE_DATAGRAM_HEADER].put((datetime.datetime.now(), packet))
o_sleep(test_delay)
# File key packet from Bob
queue_packet(tx_mk, tx_hk, tx_harac, bool_to_bytes(False)
+ FILE_KEY_HEADER + base64.b85encode(f_hash + file_key), tx_pub_key)
queue_packet(
tx_mk,
tx_hk,
tx_harac,
bool_to_bytes(False)
+ FILE_KEY_HEADER
+ base64.b85encode(f_hash + file_key),
tx_pub_key,
)
o_sleep(test_delay)
# Queue exit message to break the loop
@ -162,16 +224,30 @@ class TestOutputLoop(unittest.TestCase):
threading.Thread(target=queue_delayer).start()
# Test
master_key = MasterKey()
settings = Settings()
message_log = MessageLog(f'{DIR_USER_DATA}{settings.software_operation}_logs', master_key.master_key)
master_key = MasterKey()
settings = Settings()
message_log = MessageLog(
f"{DIR_USER_DATA}{settings.software_operation}_logs", master_key.master_key
)
self.assertIsNone(output_loop(queues, Gateway(), settings, ContactList(), KeyList(),
GroupList(), master_key, message_log, stdin_fd=1, unit_test=True))
self.assertIsNone(
output_loop(
queues,
Gateway(),
settings,
ContactList(),
KeyList(),
GroupList(),
master_key,
message_log,
stdin_fd=1,
unit_test=True,
)
)
# Teardown
tear_queues(queues)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

View File

@ -26,254 +26,399 @@ import zlib
from datetime import datetime
from unittest import mock
from src.common.crypto import byte_padding, encrypt_and_sign
from src.common.crypto import byte_padding, encrypt_and_sign
from src.common.encoding import int_to_bytes
from src.common.statics import (COMMAND, COMPRESSION_LEVEL, DIR_RECV_FILES, FILE, F_C_HEADER, LOCAL_ID, MESSAGE,
M_A_HEADER, M_E_HEADER, ORIGIN_CONTACT_HEADER, ORIGIN_USER_HEADER, PADDING_LENGTH,
PRIVATE_MESSAGE_HEADER, P_N_HEADER, SYMMETRIC_KEY_LENGTH, US_BYTE)
from src.common.statics import (
COMMAND,
COMPRESSION_LEVEL,
DIR_RECV_FILES,
FILE,
F_C_HEADER,
LOCAL_ID,
MESSAGE,
M_A_HEADER,
M_E_HEADER,
ORIGIN_CONTACT_HEADER,
ORIGIN_USER_HEADER,
PADDING_LENGTH,
PRIVATE_MESSAGE_HEADER,
P_N_HEADER,
SYMMETRIC_KEY_LENGTH,
US_BYTE,
)
from src.transmitter.packet import split_to_assembly_packets
from src.receiver.packet import decrypt_assembly_packet, Packet, PacketList
from tests.mock_classes import ContactList, create_contact, KeyList, Settings, WindowList
from tests.utils import assembly_packet_creator, cd_unit_test, cleanup, nick_to_pub_key, TFCTestCase
from tests.utils import UNDECODABLE_UNICODE
from tests.mock_classes import (
ContactList,
create_contact,
KeyList,
Settings,
WindowList,
)
from tests.utils import (
assembly_packet_creator,
cd_unit_test,
cleanup,
nick_to_pub_key,
TFCTestCase,
)
from tests.utils import UNDECODABLE_UNICODE
class TestDecryptAssemblyPacket(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.onion_pub_key = nick_to_pub_key("Alice")
self.origin = ORIGIN_CONTACT_HEADER
self.window_list = WindowList(nicks=['Alice', LOCAL_ID])
self.contact_list = ContactList(nicks=['Alice', LOCAL_ID])
self.key_list = KeyList(nicks=['Alice', LOCAL_ID])
self.keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
self.args = self.onion_pub_key, self.origin, self.window_list, self.contact_list, self.key_list
self.origin = ORIGIN_CONTACT_HEADER
self.window_list = WindowList(nicks=["Alice", LOCAL_ID])
self.contact_list = ContactList(nicks=["Alice", LOCAL_ID])
self.key_list = KeyList(nicks=["Alice", LOCAL_ID])
self.keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
self.args = (
self.onion_pub_key,
self.origin,
self.window_list,
self.contact_list,
self.key_list,
)
def test_decryption_with_zero_rx_key_raises_fr(self):
def test_decryption_with_zero_rx_key_raises_fr(self) -> None:
# Setup
keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
keyset.rx_mk = bytes(SYMMETRIC_KEY_LENGTH)
packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True)[0]
packet = assembly_packet_creator(
MESSAGE, payload="Test message", encrypt_packet=True
)[0]
# Test
self.assert_fr("Warning! Loaded zero-key for packet decryption.",
decrypt_assembly_packet, packet, *self.args)
self.assert_se(
"Warning! Loaded zero-key for packet decryption.",
decrypt_assembly_packet,
packet,
*self.args,
)
def test_invalid_harac_ct_raises_fr(self):
packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True, tamper_harac=True)[0]
self.assert_fr("Warning! Received packet from Alice had an invalid hash ratchet MAC.",
decrypt_assembly_packet, packet, *self.args)
def test_invalid_harac_ct_raises_fr(self) -> None:
packet = assembly_packet_creator(
MESSAGE, payload="Test message", encrypt_packet=True, tamper_harac=True
)[0]
self.assert_se(
"Warning! Received packet from Alice had an invalid hash ratchet MAC.",
decrypt_assembly_packet,
packet,
*self.args,
)
def test_decryption_with_zero_rx_hek_raises_fr(self):
def test_decryption_with_zero_rx_hek_raises_fr(self) -> None:
# Setup
keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
keyset.rx_hk = bytes(SYMMETRIC_KEY_LENGTH)
packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True)[0]
packet = assembly_packet_creator(
MESSAGE, payload="Test message", encrypt_packet=True
)[0]
# Test
self.assert_fr("Warning! Loaded zero-key for packet decryption.", decrypt_assembly_packet, packet, *self.args)
self.assert_se(
"Warning! Loaded zero-key for packet decryption.",
decrypt_assembly_packet,
packet,
*self.args,
)
def test_expired_harac_raises_fr(self):
def test_expired_harac_raises_fr(self) -> None:
# Setup
self.keyset.rx_harac = 1
# Test
packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True, harac=0)[0]
self.assert_fr("Warning! Received packet from Alice had an expired hash ratchet counter.",
decrypt_assembly_packet, packet, *self.args)
packet = assembly_packet_creator(
MESSAGE, payload="Test message", encrypt_packet=True, harac=0
)[0]
self.assert_se(
"Warning! Received packet from Alice had an expired hash ratchet counter.",
decrypt_assembly_packet,
packet,
*self.args,
)
@mock.patch('builtins.input', return_value='No')
def test_harac_dos_can_be_interrupted(self, _):
packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True, harac=100_001)[0]
self.assert_fr("Dropped packet from Alice.",
decrypt_assembly_packet, packet, *self.args)
@mock.patch("builtins.input", return_value="No")
def test_harac_dos_can_be_interrupted(self, _) -> None:
packet = assembly_packet_creator(
MESSAGE, payload="Test message", encrypt_packet=True, harac=100_001
)[0]
self.assert_se(
"Dropped packet from Alice.", decrypt_assembly_packet, packet, *self.args
)
def test_invalid_packet_ct_raises_fr(self):
packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True, tamper_message=True)[0]
self.assert_fr("Warning! Received packet from Alice had an invalid MAC.",
decrypt_assembly_packet, packet, *self.args)
def test_invalid_packet_ct_raises_fr(self) -> None:
packet = assembly_packet_creator(
MESSAGE, payload="Test message", encrypt_packet=True, tamper_message=True
)[0]
self.assert_se(
"Warning! Received packet from Alice had an invalid MAC.",
decrypt_assembly_packet,
packet,
*self.args,
)
def test_successful_packet_decryption(self):
packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True)[0]
self.assertEqual(decrypt_assembly_packet(packet, *self.args),
assembly_packet_creator(MESSAGE, payload="Test message")[0])
def test_successful_packet_decryption(self) -> None:
packet = assembly_packet_creator(
MESSAGE, payload="Test message", encrypt_packet=True
)[0]
self.assertEqual(
decrypt_assembly_packet(packet, *self.args),
assembly_packet_creator(MESSAGE, payload="Test message")[0],
)
def test_successful_packet_decryption_with_offset(self):
packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True, message_number=3)[0]
self.assertEqual(decrypt_assembly_packet(packet, *self.args),
assembly_packet_creator(MESSAGE, payload="Test message", message_number=3)[0])
def test_successful_packet_decryption_with_offset(self) -> None:
packet = assembly_packet_creator(
MESSAGE, payload="Test message", encrypt_packet=True, message_number=3
)[0]
self.assertEqual(
decrypt_assembly_packet(packet, *self.args),
assembly_packet_creator(MESSAGE, payload="Test message", message_number=3)[
0
],
)
def test_successful_command_decryption(self):
packet = assembly_packet_creator(COMMAND, payload=b"command_data", encrypt_packet=True)[0]
self.assertEqual(decrypt_assembly_packet(packet, *self.args),
assembly_packet_creator(COMMAND, payload=b"command_data")[0])
def test_successful_command_decryption(self) -> None:
packet = assembly_packet_creator(
COMMAND, payload=b"command_data", encrypt_packet=True
)[0]
self.assertEqual(
decrypt_assembly_packet(packet, *self.args),
assembly_packet_creator(COMMAND, payload=b"command_data")[0],
)
class TestPacket(TFCTestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.short_msg = "Lorem ipsum dolor sit amet, consectetur adipiscing elit"
self.msg = ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis"
" dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu"
"m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma"
"ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el"
"it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic"
"tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r"
"utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali"
"quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris"
"que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti"
"s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu.")
self.msg = (
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis"
" dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu"
"m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma"
"ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el"
"it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic"
"tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r"
"utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali"
"quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris"
"que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti"
"s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu."
)
self.unit_test_dir = cd_unit_test()
self.ts = datetime.now()
self.contact = create_contact('Alice')
self.settings = Settings(log_file_masking=True)
self.onion_pub_key = nick_to_pub_key('Alice')
self.window_list = WindowList()
self.whisper_header = b'\x00'
self.unit_test_dir = cd_unit_test()
self.ts = datetime.now()
self.contact = create_contact("Alice")
self.settings = Settings(log_file_masking=True)
self.onion_pub_key = nick_to_pub_key("Alice")
self.window_list = WindowList()
self.whisper_header = b"\x00"
compressed = zlib.compress(b'file_data', level=COMPRESSION_LEVEL)
file_key = os.urandom(SYMMETRIC_KEY_LENGTH)
encrypted = encrypt_and_sign(compressed, key=file_key)
encrypted += file_key
self.short_f_data = (int_to_bytes(1) + int_to_bytes(2) + b'testfile.txt' + US_BYTE + encrypted)
compressed = zlib.compress(b"file_data", level=COMPRESSION_LEVEL)
file_key = os.urandom(SYMMETRIC_KEY_LENGTH)
encrypted = encrypt_and_sign(compressed, key=file_key)
encrypted += file_key
self.short_f_data = (
int_to_bytes(1) + int_to_bytes(2) + b"testfile.txt" + US_BYTE + encrypted
)
def tearDown(self):
def tearDown(self) -> None:
"""Post-test actions."""
cleanup(self.unit_test_dir)
def test_invalid_assembly_packet_header_raises_fr(self):
def test_invalid_assembly_packet_header_raises_fr(self) -> None:
# Setup
packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE, self.contact, self.settings)
a_packet = assembly_packet_creator(MESSAGE, payload=self.short_msg, s_header_override=b'i')[0]
packet = Packet(
self.onion_pub_key,
ORIGIN_CONTACT_HEADER,
MESSAGE,
self.contact,
self.settings,
)
a_packet = assembly_packet_creator(
MESSAGE, payload=self.short_msg, s_header_override=b"i"
)[0]
# Test
self.assert_fr("Error: Received packet had an invalid assembly packet header.", packet.add_packet, a_packet)
self.assert_se(
"Error: Received packet had an invalid assembly packet header.",
packet.add_packet,
a_packet,
)
self.assertEqual(packet.log_masking_ctr, 1)
def test_missing_start_packet_raises_fr(self):
def test_missing_start_packet_raises_fr(self) -> None:
# Setup
packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings)
packet = Packet(
self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings
)
# Test
for header in [M_A_HEADER, M_E_HEADER]:
self.assert_fr("Missing start packet.", packet.add_packet, header + bytes(PADDING_LENGTH))
self.assert_se(
"Missing start packet.",
packet.add_packet,
header + bytes(PADDING_LENGTH),
)
self.assertEqual(packet.log_masking_ctr, 2)
def test_short_message(self):
def test_short_message(self) -> None:
# Setup
packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings)
packet = Packet(
self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings
)
packet_list = assembly_packet_creator(MESSAGE, self.short_msg)
for p in packet_list:
packet.add_packet(p, packet_ct=b'test_ct')
packet.add_packet(p, packet_ct=b"test_ct")
# Test
self.assertEqual(packet.assemble_message_packet(),
self.whisper_header + PRIVATE_MESSAGE_HEADER + self.short_msg.encode())
self.assertEqual(packet.log_ct_list, [b'test_ct'])
self.assertEqual(
packet.assemble_message_packet(),
self.whisper_header + PRIVATE_MESSAGE_HEADER + self.short_msg.encode(),
)
self.assertEqual(packet.log_ct_list, [b"test_ct"])
def test_compression_error_raises_fr(self):
def test_compression_error_raises_fr(self) -> None:
# Setup
packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings)
packet_list = assembly_packet_creator(MESSAGE, self.short_msg, tamper_compression=True)
packet = Packet(
self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings
)
packet_list = assembly_packet_creator(
MESSAGE, self.short_msg, tamper_compression=True
)
for p in packet_list:
packet.add_packet(p)
# Test
self.assert_fr("Error: Decompression of message failed.", packet.assemble_message_packet)
self.assert_se(
"Error: Decompression of message failed.", packet.assemble_message_packet
)
def test_long_message(self):
def test_long_message(self) -> None:
# Setup
packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings)
packet = Packet(
self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings
)
packet_list = assembly_packet_creator(MESSAGE, self.msg)
for p in packet_list:
packet.add_packet(p, packet_ct=b'test_ct')
packet.add_packet(p, packet_ct=b"test_ct")
# Test
message = packet.assemble_message_packet()
self.assertEqual(message, self.whisper_header + PRIVATE_MESSAGE_HEADER + self.msg.encode())
self.assertEqual(packet.log_ct_list, 3 * [b'test_ct'])
self.assertEqual(
message, self.whisper_header + PRIVATE_MESSAGE_HEADER + self.msg.encode()
)
self.assertEqual(packet.log_ct_list, 3 * [b"test_ct"])
def test_decryption_error_raises_fr(self):
def test_decryption_error_raises_fr(self) -> None:
# Setup
packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings)
packet = Packet(
self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings
)
packet_list = assembly_packet_creator(MESSAGE, self.msg, tamper_ciphertext=True)
for p in packet_list:
packet.add_packet(p)
# Test
self.assert_fr("Error: Decryption of message failed.", packet.assemble_message_packet)
self.assert_se(
"Error: Decryption of message failed.", packet.assemble_message_packet
)
def test_short_file(self):
def test_short_file(self) -> None:
# Setup
packets = split_to_assembly_packets(self.short_f_data, FILE)
# Test
self.assertFalse(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt'))
self.assertFalse(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt.1'))
self.assertFalse(os.path.isfile(f"{DIR_RECV_FILES}Alice/testfile.txt"))
self.assertFalse(os.path.isfile(f"{DIR_RECV_FILES}Alice/testfile.txt.1"))
packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings)
packet = Packet(
self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings
)
packet.long_active = True
for p in packets:
packet.add_packet(p)
self.assertIsNone(packet.assemble_and_store_file(self.ts, self.onion_pub_key, self.window_list))
self.assertTrue(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt'))
self.assertIsNone(
packet.assemble_and_store_file(
self.ts, self.onion_pub_key, self.window_list
)
)
self.assertTrue(os.path.isfile(f"{DIR_RECV_FILES}Alice/testfile.txt"))
for p in packets:
packet.add_packet(p)
self.assertIsNone(packet.assemble_and_store_file(self.ts, self.onion_pub_key, self.window_list))
self.assertTrue(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt.1'))
self.assertIsNone(
packet.assemble_and_store_file(
self.ts, self.onion_pub_key, self.window_list
)
)
self.assertTrue(os.path.isfile(f"{DIR_RECV_FILES}Alice/testfile.txt.1"))
def test_short_file_from_user_raises_fr(self):
def test_short_file_from_user_raises_fr(self) -> None:
# Setup
packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, FILE, self.contact, self.settings)
packet = Packet(
self.onion_pub_key, ORIGIN_USER_HEADER, FILE, self.contact, self.settings
)
packets = split_to_assembly_packets(self.short_f_data, FILE)
# Test
for p in packets:
self.assert_fr("Ignored file from the user.", packet.add_packet, p)
self.assert_se("Ignored file from the user.", packet.add_packet, p)
self.assertEqual(packet.log_masking_ctr, 1)
def test_unauthorized_file_from_contact_raises_fr(self):
def test_unauthorized_file_from_contact_raises_fr(self) -> None:
# Setup
self.contact.file_reception = False
packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings)
packet = Packet(
self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings
)
packets = split_to_assembly_packets(self.short_f_data, FILE)
# Test
for p in packets:
self.assert_fr("Alert! File transmission from Alice but reception is disabled.", packet.add_packet, p)
self.assert_se(
"Alert! File transmission from Alice but reception is disabled.",
packet.add_packet,
p,
)
self.assertEqual(packet.log_masking_ctr, 1)
def test_long_file(self):
def test_long_file(self) -> None:
# Setup
packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings)
packet = Packet(
self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings
)
packet.long_active = True
packet_list = assembly_packet_creator(FILE)
packet_list = assembly_packet_creator(FILE)
for p in packet_list:
packet.add_packet(p)
# Test
self.assertIsNone(packet.assemble_and_store_file(self.ts, self.onion_pub_key, self.window_list))
self.assertEqual(os.path.getsize(f'{DIR_RECV_FILES}Alice/test_file.txt'), 10000)
self.assertIsNone(
packet.assemble_and_store_file(
self.ts, self.onion_pub_key, self.window_list
)
)
self.assertEqual(os.path.getsize(f"{DIR_RECV_FILES}Alice/test_file.txt"), 10000)
def test_disabled_file_reception_raises_fr_with_append_packet(self):
def test_disabled_file_reception_raises_fr_with_append_packet(self) -> None:
# Setup
packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings)
packet = Packet(
self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings
)
packet.long_active = True
packet_list = assembly_packet_creator(FILE)
packet_list = assembly_packet_creator(FILE)
for p in packet_list[:2]:
self.assertIsNone(packet.add_packet(p))
@ -281,18 +426,24 @@ class TestPacket(TFCTestCase):
packet.contact.file_reception = False
# Test
self.assert_fr("Alert! File reception disabled mid-transfer.", packet.add_packet, packet_list[2])
self.assert_se(
"Alert! File reception disabled mid-transfer.",
packet.add_packet,
packet_list[2],
)
for p in packet_list[3:]:
self.assert_fr("Missing start packet.", packet.add_packet, p)
self.assert_se("Missing start packet.", packet.add_packet, p)
self.assertEqual(packet.log_masking_ctr, len(packet_list))
def test_disabled_file_reception_raises_fr_with_end_packet(self):
def test_disabled_file_reception_raises_fr_with_end_packet(self) -> None:
# Setup
packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings)
packet = Packet(
self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings
)
packet.long_active = True
packet_list = assembly_packet_creator(FILE)
packet_list = assembly_packet_creator(FILE)
for p in packet_list[:-1]:
self.assertIsNone(packet.add_packet(p))
@ -301,42 +452,59 @@ class TestPacket(TFCTestCase):
# Test
for p in packet_list[-1:]:
self.assert_fr("Alert! File reception disabled mid-transfer.", packet.add_packet, p)
self.assert_se(
"Alert! File reception disabled mid-transfer.", packet.add_packet, p
)
self.assertEqual(packet.log_masking_ctr, len(packet_list))
def test_long_file_from_user_raises_fr(self):
def test_long_file_from_user_raises_fr(self) -> None:
# Setup
packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, FILE, self.contact, self.settings)
packet = Packet(
self.onion_pub_key, ORIGIN_USER_HEADER, FILE, self.contact, self.settings
)
packet_list = assembly_packet_creator(FILE)
# Test
self.assert_fr("Ignored file from the user.", packet.add_packet, packet_list[0])
self.assert_se("Ignored file from the user.", packet.add_packet, packet_list[0])
self.assertEqual(packet.log_masking_ctr, 1)
def test_unauthorized_long_file_raises_fr(self):
def test_unauthorized_long_file_raises_fr(self) -> None:
# Setup
self.contact.file_reception = False
packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings)
packet = Packet(
self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings
)
packet_list = assembly_packet_creator(FILE)
# Test
self.assert_fr("Alert! File transmission from Alice but reception is disabled.",
packet.add_packet, packet_list[0])
self.assert_se(
"Alert! File transmission from Alice but reception is disabled.",
packet.add_packet,
packet_list[0],
)
self.assertEqual(packet.log_masking_ctr, 1)
def test_invalid_long_file_header_raises_fr(self):
def test_invalid_long_file_header_raises_fr(self) -> None:
# Setup
packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings)
packet = Packet(
self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings
)
packet_list = assembly_packet_creator(FILE, file_name=UNDECODABLE_UNICODE)
# Test
self.assert_fr("Error: Received file packet had an invalid header.", packet.add_packet, packet_list[0])
self.assert_se(
"Error: Received file packet had an invalid header.",
packet.add_packet,
packet_list[0],
)
self.assertEqual(packet.log_masking_ctr, 1)
def test_contact_canceled_file(self):
def test_contact_canceled_file(self) -> None:
# Setup
packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings)
packet = Packet(
self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings
)
packet_list = assembly_packet_creator(FILE)[:20]
packet_list.append(byte_padding(F_C_HEADER)) # Add cancel packet
@ -344,14 +512,18 @@ class TestPacket(TFCTestCase):
packet.add_packet(p)
# Test
self.assertEqual(len(packet.assembly_pt_list), 0) # Cancel packet empties packet list
self.assertEqual(
len(packet.assembly_pt_list), 0
) # Cancel packet empties packet list
self.assertFalse(packet.long_active)
self.assertFalse(packet.is_complete)
self.assertEqual(packet.log_masking_ctr, len(packet_list))
def test_noise_packet_interrupts_file(self):
def test_noise_packet_interrupts_file(self) -> None:
# Setup
packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings)
packet = Packet(
self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings
)
packet_list = assembly_packet_creator(FILE)[:20]
packet_list.append(byte_padding(P_N_HEADER)) # Add noise packet
@ -359,27 +531,33 @@ class TestPacket(TFCTestCase):
packet.add_packet(p)
# Test
self.assertEqual(len(packet.assembly_pt_list), 0) # Noise packet empties packet list
self.assertEqual(
len(packet.assembly_pt_list), 0
) # Noise packet empties packet list
self.assertFalse(packet.long_active)
self.assertFalse(packet.is_complete)
self.assertEqual(packet.log_masking_ctr, len(packet_list))
def test_short_command(self):
def test_short_command(self) -> None:
# Setup
packet = Packet(LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings)
packets = assembly_packet_creator(COMMAND, b'test_command')
packet = Packet(
LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings
)
packets = assembly_packet_creator(COMMAND, b"test_command")
for p in packets:
packet.add_packet(p)
# Test
self.assertEqual(packet.assemble_command_packet(), b'test_command')
self.assertEqual(packet.assemble_command_packet(), b"test_command")
self.assertEqual(packet.log_masking_ctr, 0)
def test_long_command(self):
def test_long_command(self) -> None:
# Setup
packet = Packet(LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings)
command = 500*b'test_command'
packet = Packet(
LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings
)
command = 500 * b"test_command"
packets = assembly_packet_creator(COMMAND, command)
for p in packets:
@ -389,66 +567,92 @@ class TestPacket(TFCTestCase):
self.assertEqual(packet.assemble_command_packet(), command)
self.assertEqual(packet.log_masking_ctr, 0)
def test_long_command_hash_mismatch_raises_fr(self):
def test_long_command_hash_mismatch_raises_fr(self) -> None:
# Setup
packet = Packet(LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings)
packet_list = assembly_packet_creator(COMMAND, os.urandom(500), tamper_cmd_hash=True)
packet = Packet(
LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings
)
packet_list = assembly_packet_creator(
COMMAND, os.urandom(500), tamper_cmd_hash=True
)
for p in packet_list:
packet.add_packet(p)
# Test
self.assert_fr("Error: Received an invalid command.", packet.assemble_command_packet)
self.assert_se(
"Error: Received an invalid command.", packet.assemble_command_packet
)
self.assertEqual(packet.log_masking_ctr, 0)
def test_long_command_compression_error_raises_fr(self):
def test_long_command_compression_error_raises_fr(self) -> None:
# Setup
packet = Packet(LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings)
packet_list = assembly_packet_creator(COMMAND, os.urandom(500), tamper_compression=True)
packet = Packet(
LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings
)
packet_list = assembly_packet_creator(
COMMAND, os.urandom(500), tamper_compression=True
)
for p in packet_list:
packet.add_packet(p)
# Test
self.assert_fr("Error: Decompression of command failed.", packet.assemble_command_packet)
self.assert_se(
"Error: Decompression of command failed.", packet.assemble_command_packet
)
self.assertEqual(packet.log_masking_ctr, 0)
class TestPacketList(unittest.TestCase):
def setUp(self):
def setUp(self) -> None:
"""Pre-test actions."""
self.contact_list = ContactList(nicks=['Alice', 'Bob'])
self.settings = Settings()
self.onion_pub_key = nick_to_pub_key('Alice')
packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE,
self.contact_list.get_contact_by_address_or_nick('Alice'), self.settings)
self.contact_list = ContactList(nicks=["Alice", "Bob"])
self.settings = Settings()
self.onion_pub_key = nick_to_pub_key("Alice")
packet = Packet(
self.onion_pub_key,
ORIGIN_CONTACT_HEADER,
MESSAGE,
self.contact_list.get_contact_by_address_or_nick("Alice"),
self.settings,
)
self.packet_list = PacketList(self.settings, self.contact_list)
self.packet_list = PacketList(self.settings, self.contact_list)
self.packet_list.packets = [packet]
def test_packet_list_iterates_over_contact_objects(self):
def test_packet_list_iterates_over_contact_objects(self) -> None:
for p in self.packet_list:
self.assertIsInstance(p, Packet)
def test_len_returns_number_of_contacts(self):
def test_len_returns_number_of_contacts(self) -> None:
self.assertEqual(len(self.packet_list), 1)
def test_has_packet(self):
self.assertTrue(self.packet_list.has_packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE))
self.assertFalse(self.packet_list.has_packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE))
def test_has_packet(self) -> None:
self.assertTrue(
self.packet_list.has_packet(
self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE
)
)
self.assertFalse(
self.packet_list.has_packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE)
)
def test_get_packet(self):
packet = self.packet_list.get_packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE)
def test_get_packet(self) -> None:
packet = self.packet_list.get_packet(
self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE
)
self.assertEqual(packet.onion_pub_key, self.onion_pub_key)
self.assertEqual(packet.origin, ORIGIN_CONTACT_HEADER)
self.assertEqual(packet.type, MESSAGE)
packet = self.packet_list.get_packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE)
packet = self.packet_list.get_packet(
self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE
)
self.assertEqual(packet.onion_pub_key, self.onion_pub_key)
self.assertEqual(packet.origin, ORIGIN_CONTACT_HEADER)
self.assertEqual(packet.type, MESSAGE)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

View File

@ -23,48 +23,60 @@ import threading
import time
import unittest
from datetime import datetime
from datetime import datetime
from multiprocessing import Queue
from src.common.encoding import int_to_bytes
from src.common.encoding import int_to_bytes
from src.common.reed_solomon import RSCodec
from src.common.statics import (COMMAND_DATAGRAM_HEADER, FILE_DATAGRAM_HEADER, GATEWAY_QUEUE,
LOCAL_KEY_DATAGRAM_HEADER, MESSAGE_DATAGRAM_HEADER,
ONION_SERVICE_PUBLIC_KEY_LENGTH)
from src.common.statics import (
COMMAND_DATAGRAM_HEADER,
FILE_DATAGRAM_HEADER,
GATEWAY_QUEUE,
LOCAL_KEY_DATAGRAM_HEADER,
MESSAGE_DATAGRAM_HEADER,
ONION_SERVICE_PUBLIC_KEY_LENGTH,
)
from src.receiver.receiver_loop import receiver_loop
from tests.mock_classes import Gateway
from tests.utils import tear_queue
from tests.utils import tear_queue
class TestReceiverLoop(unittest.TestCase):
def test_receiver_loop(self):
def test_receiver_loop(self) -> None:
# Setup
gateway = Gateway(local_test=False)
rs = RSCodec(2 * gateway.settings.serial_error_correction)
queues = {MESSAGE_DATAGRAM_HEADER: Queue(),
FILE_DATAGRAM_HEADER: Queue(),
COMMAND_DATAGRAM_HEADER: Queue(),
LOCAL_KEY_DATAGRAM_HEADER: Queue()}
rs = RSCodec(2 * gateway.settings.serial_error_correction)
queues = {
MESSAGE_DATAGRAM_HEADER: Queue(),
FILE_DATAGRAM_HEADER: Queue(),
COMMAND_DATAGRAM_HEADER: Queue(),
LOCAL_KEY_DATAGRAM_HEADER: Queue(),
}
all_q = dict(queues)
all_q.update({GATEWAY_QUEUE: Queue()})
ts = datetime.now()
ts_bytes = int_to_bytes(int(ts.strftime('%Y%m%d%H%M%S%f')[:-4]))
ts = datetime.now()
ts_bytes = int_to_bytes(int(ts.strftime("%Y%m%d%H%M%S%f")[:-4]))
for key in queues:
packet = key + ts_bytes + bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)
encoded = rs.encode(packet)
broken_p = key + bytes.fromhex('df9005313af4136d') + bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)
broken_p += rs.encode(b'a')
packet = key + ts_bytes + bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)
encoded = rs.encode(packet)
broken_p = (
key
+ bytes.fromhex("df9005313af4136d")
+ bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)
)
broken_p += rs.encode(b"a")
def queue_delayer():
def queue_delayer() -> None:
"""Place datagrams into queue after delay."""
time.sleep(0.01)
all_q[GATEWAY_QUEUE].put((datetime.now(), rs.encode(8 * b'1' + b'undecodable')))
all_q[GATEWAY_QUEUE].put(
(datetime.now(), rs.encode(8 * b"1" + b"undecodable"))
)
all_q[GATEWAY_QUEUE].put((datetime.now(), broken_p))
all_q[GATEWAY_QUEUE].put((datetime.now(), encoded))
@ -79,5 +91,5 @@ class TestReceiverLoop(unittest.TestCase):
tear_queue(queues[key])
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(exit=False)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3.7
# -*- coding: utf-8 -*-
"""
TFC - Onion-routed, endpoint secure messaging system
Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
TFC is free software: you can redistribute it and/or modify it under the terms
of the GNU General Public License as published by the Free Software Foundation,
either version 3 of the License, or (at your option) any later version.
TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with TFC. If not, see <https://www.gnu.org/licenses/>.
"""

Some files were not shown because too many files have changed in this diff Show More