From 0ac650c2fe8a1548da25ea2a5e549b56e68f6138 Mon Sep 17 00:00:00 2001 From: Markus Ottela Date: Fri, 6 Mar 2020 11:21:13 +0200 Subject: [PATCH] 1.20.03 --- .coveragerc | 0 .travis.yml | 0 LICENSE-3RD-PARTY | 16 + README.md | 45 +- install.sh | 1120 ++++++++++++++--------- install.sh.asc | 26 +- launchers/TFC-Dev.desktop | 2 +- launchers/TFC-Local-test.desktop | 2 +- launchers/TFC-RP-Qubes.desktop | 8 + launchers/TFC-RP-Tails.desktop | 4 +- launchers/TFC-RP.desktop | 4 +- launchers/TFC-RxP-Qubes.desktop | 8 + launchers/TFC-RxP.desktop | 2 +- launchers/TFC-TxP-Qubes.desktop | 8 + launchers/TFC-TxP.desktop | 2 +- launchers/tfc-qubes-receiver | 19 + launchers/tfc-qubes-relay | 19 + launchers/tfc-qubes-transmitter | 19 + relay.py | 8 +- requirements-dev.txt | 105 ++- requirements-relay-tails.txt | 15 +- requirements-relay.txt | 15 +- requirements-setuptools.txt | 2 +- requirements-venv.txt | 12 +- requirements.txt | 9 +- src/common/crypto.py | 40 +- src/common/database.py | 2 +- src/common/db_contacts.py | 15 +- src/common/db_groups.py | 16 +- src/common/db_keys.py | 11 +- src/common/db_logs.py | 42 +- src/common/db_masterkey.py | 250 +++-- src/common/db_settings.py | 16 +- src/common/gateway.py | 242 +++-- src/common/input.py | 6 +- src/common/misc.py | 30 +- src/common/output.py | 8 +- src/common/statics.py | 8 +- src/receiver/commands.py | 2 +- src/receiver/files.py | 7 +- src/receiver/key_exchanges.py | 4 +- src/receiver/packet.py | 28 +- src/relay/client.py | 28 +- src/relay/commands.py | 12 +- src/relay/diffs.py | 21 +- src/relay/server.py | 7 +- src/relay/tcb.py | 6 +- src/transmitter/commands.py | 2 +- src/transmitter/key_exchanges.py | 12 +- src/transmitter/traffic_masking.py | 4 +- tests/common/test_crypto.py | 60 +- tests/common/test_database.py | 4 +- tests/common/test_db_contacts.py | 4 +- tests/common/test_db_groups.py | 12 +- tests/common/test_db_keys.py | 30 +- tests/common/test_db_logs.py | 28 +- tests/common/test_db_masterkey.py | 52 +- tests/common/test_db_settings.py | 4 +- tests/common/test_encoding.py | 8 +- tests/common/test_gateway.py | 217 +++-- tests/common/test_misc.py | 35 +- tests/common/test_output.py | 4 +- tests/common/test_path.py | 4 +- tests/common/test_reed_solomon.py | 72 +- tests/mock_classes.py | 3 + tests/receiver/test_commands.py | 26 +- tests/receiver/test_commands_g.py | 20 +- tests/receiver/test_files.py | 29 +- tests/receiver/test_key_exchanges.py | 22 +- tests/receiver/test_messages.py | 24 +- tests/receiver/test_output_loop.py | 6 +- tests/receiver/test_packet.py | 36 +- tests/receiver/test_windows.py | 2 +- tests/relay/test_commands.py | 10 +- tests/relay/test_diffs.py | 4 +- tests/relay/test_server.py | 16 +- tests/transmitter/test_commands.py | 68 +- tests/transmitter/test_commands_g.py | 32 +- tests/transmitter/test_contact.py | 38 +- tests/transmitter/test_files.py | 6 +- tests/transmitter/test_key_exchanges.py | 28 +- tests/transmitter/test_packet.py | 20 +- tests/transmitter/test_user_input.py | 6 +- tests/transmitter/test_windows.py | 18 +- tfc.py | 6 +- tfc.yml | 2 +- uninstall.sh | 7 +- 87 files changed, 2058 insertions(+), 1194 deletions(-) mode change 100644 => 100755 .coveragerc mode change 100644 => 100755 .travis.yml create mode 100755 launchers/TFC-RP-Qubes.desktop create mode 100755 launchers/TFC-RxP-Qubes.desktop create mode 100755 launchers/TFC-TxP-Qubes.desktop create mode 100755 launchers/tfc-qubes-receiver create mode 100755 launchers/tfc-qubes-relay create mode 100755 launchers/tfc-qubes-transmitter diff --git a/.coveragerc b/.coveragerc old mode 100644 new mode 100755 diff --git a/.travis.yml b/.travis.yml old mode 100644 new mode 100755 diff --git a/LICENSE-3RD-PARTY b/LICENSE-3RD-PARTY index df0d334..623782f 100644 --- a/LICENSE-3RD-PARTY +++ b/LICENSE-3RD-PARTY @@ -80,6 +80,9 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. MIT License applies to: + - The appdirs library, Copyright (c) 2010 ActiveState Software Inc. + (https://github.com/ActiveState/appdirs) + - The Argon2 library, Copyright © 2015, Hynek Schlawack (https://github.com/hynek/argon2_cffi) @@ -119,6 +122,9 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright © 2011-2016, The virtualenv developers (https://github.com/pypa/virtualenv) + - The zipp library. Copyright © Jason R. Coombs + (https://github.com/jaraco/zipp) + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Permission is hereby granted, free of charge, to any person obtaining a copy @@ -1210,6 +1216,9 @@ Public License instead of this License. But first, please read https://www.apache.org/licenses/ applies to: + - The importlib_metadata library, Copyright © 2017-2019 Jason R. Coombs, Barry Warsaw + (https://gitlab.com/python-devs/importlib_metadata) + - The OpenSSL library, Copyright © 1995-1998, Eric A. Young, Tim J. Hudson Copyright © 1999-2018, The OpenSSL Project (https://github.com/openssl/openssl) @@ -2469,6 +2478,7 @@ Library. Copyright © Guido van Rossum and others. applies to: + - The CPython3.7 programming language, Copyright © 1991-1995, Stichting Mathematisch Centrum, Amsterdam. All Rights Reserved. @@ -2482,6 +2492,9 @@ Library. All Rights Reserved. (https://www.python.org/) + - distlib library Copyright © 2020 [Python Packaging Authority Developers] + (https://bitbucket.org/pypa/distlib/src/master/) + - The python3-tk library, Copyright © 2006, Matthias Klose This package was debianized by Matthias Klose on Wed, 7 Jun 2006 15:02:31 +0200. @@ -2909,6 +2922,9 @@ OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. Note: This unlicense is meant only as a description applies to: + - The py-filelock libary by Benedikt Schmitt + (https://github.com/benediktschmitt/py-filelock) + - The Reed-Solomon erasure code library by Tomer Filiba, Stephen Larroque (https://github.com/lrq3000/reedsolomon/) (https://github.com/tomerfiliba/reedsolomon) diff --git a/README.md b/README.md index fba6198..d06f81e 100755 --- a/README.md +++ b/README.md @@ -94,15 +94,16 @@ TFC is designed to be used in hardware configuration that provides strong [endpoint security](https://en.wikipedia.org/wiki/Endpoint_security). This configuration uses three computers per endpoint: Encryption and decryption processes are separated from each other onto two isolated computers, the Source Computer, and the -Destination Computer. These two systems are are dedicated for TFC. This split +Destination Computer. These two devices are are dedicated for TFC. This split [TCB](https://en.wikipedia.org/wiki/Trusted_computing_base) interacts with the network via the user's daily computer, called the Networked Computer. -Data moves from the Source Computer to the Networked Computer, and from the Networked -Computer to the Destination Computer, unidirectionally. The unidirectionality of data flow -is enforced with a free hardware design +In TFC data moves from the Source Computer to the Networked Computer, and from the Networked +Computer to the Destination Computer, unidirectionally. The unidirectionality of data +flow is enforced, as the data is passed from one device to another only through a free +hardware design [data diode](https://en.wikipedia.org/wiki/Unidirectional_network), -which is connected to the three computers using one USB-cable per computer. +that is connected to the three computers using one USB-cable per device. The Source and Destination Computers are not connected to the Internet, or to any device other than the data diode. @@ -114,8 +115,12 @@ Optical repeater inside the [optocouplers](https://en.wikipedia.org/wiki/Opto-isolator) of the data diode enforce direction of data transmission with the fundamental laws of physics. This protection is so strong, the certified implementations of data diodes are -typically found in critical infrastructure protection and government networks where -classification level of data varies between systems. +typically found in critical infrastructure protection and government networks where the +classification level of data varies between systems. A data diode might e.g. allow access +to a nuclear power plant's safety system readings, while at the same time preventing +attackers from exploiting these critical systems. An alternative use case is to allow +importing data from less secure systems to ones that contain classified documents that +must be protected from exfiltration. In TFC the hardware data diode ensures that neither of the TCB-halves can be accessed bidirectionally. Since the protection relies on physical limitations of the hardware's @@ -183,18 +188,39 @@ the Source or Destination Computer, the ciphertexts are of no value to the attac [Exfiltration security](https://www.cs.helsinki.fi/u/oottela/wiki/readme/attacks.png) +### Qubes-isolated intermediate solution + +For some users the +[APTs](https://en.wikipedia.org/wiki/Advanced_persistent_threat) +of the modern world are not part of the threat model, and for others, the +requirement of having to build the data diode by themselves is a deal breaker. Yet, for +all of them, storing private keys on a networked device is still a security risk. + +To meet these users' needs, TFC can also be run in three dedicated +[Qubes](https://www.qubes-os.org/) +virtual machines. With the Qubes configuration, the isolation is provided by the +[Xen hypervisor](https://xenproject.org/users/security/), +and the unidirectionality of data flow between the VMs is enforced with strict firewall +rules. This intermediate isolation mechanism runs on a single computer which means no +hardware data diode is needed. + + ### Supported Operating Systems #### Source/Destination Computer - Debian 10 - PureOS 9.0 - *buntu 19.10 +- LMDE 4 +- Qubes 4 (Debian 10 VM) #### Networked Computer - Tails 4.0 - Debian 10 - PureOS 9.0 - *buntu 19.10 +- LMDE 4 +- Qubes 4 (Debian 10 VM) ### More information @@ -209,10 +235,9 @@ Hardware Data Diode
How to use
    [Installation](https://github.com/maqp/tfc/wiki/Installation)
-    [Launching TFC](https://github.com/maqp/tfc/wiki/Launching-TFC)
-    [Setup master password](https://github.com/maqp/tfc/wiki/Master-Password)
+    [Master password setup](https://github.com/maqp/tfc/wiki/Master-Password)
    [Local key setup](https://github.com/maqp/tfc/wiki/Local-Key-Setup)
-    [Launch Onion Service](https://github.com/maqp/tfc/wiki/Onion-Service-Setup)
+    [Onion Service setup](https://github.com/maqp/tfc/wiki/Onion-Service-Setup)
    [X448 key exchange](https://github.com/maqp/tfc/wiki/X448)
    [Pre-shared keys](https://github.com/maqp/tfc/wiki/PSK)
    [Commands](https://github.com/maqp/tfc/wiki/Commands)
diff --git a/install.sh b/install.sh index dc1e837..33ef92e 100644 --- a/install.sh +++ b/install.sh @@ -16,229 +16,65 @@ # You should have received a copy of the GNU General Public License # along with TFC. If not, see . +# Installer configuration + +INSTALL_DIR="/opt/tfc" + + # PIP dependency file names -ARGON2=argon2_cffi-19.2.0-cp34-abi3-manylinux1_x86_64.whl + +APPDIRS=appdirs-1.4.3-py2.py3-none-any.whl +ARGON2_CFFI=argon2_cffi-19.2.0-cp34-abi3-manylinux1_x86_64.whl CERTIFI=certifi-2019.11.28-py2.py3-none-any.whl -CFFI=cffi-1.13.2-cp37-cp37m-manylinux1_x86_64.whl +CFFI=cffi-1.14.0-cp37-cp37m-manylinux1_x86_64.whl CHARDET=chardet-3.0.4-py2.py3-none-any.whl CLICK=Click-7.0-py2.py3-none-any.whl CRYPTOGRAPHY=cryptography-2.8-cp34-abi3-manylinux1_x86_64.whl +DISTLIB=distlib-0.3.0.zip +FILELOCK=filelock-3.0.12-py3-none-any.whl FLASK=Flask-1.1.1-py2.py3-none-any.whl -IDNA=idna-2.8-py2.py3-none-any.whl +IDNA=idna-2.9-py2.py3-none-any.whl +IMPORTLIB_METADATA=importlib_metadata-1.5.0-py2.py3-none-any.whl ITSDANGEROUS=itsdangerous-1.1.0-py2.py3-none-any.whl JINJA2=Jinja2-2.11.1-py2.py3-none-any.whl MARKUPSAFE=MarkupSafe-1.1.1-cp37-cp37m-manylinux1_x86_64.whl -PYCPARSER=pycparser-2.19.tar.gz +PYCPARSER=pycparser-2.20-py2.py3-none-any.whl PYNACL=PyNaCl-1.3.0-cp34-abi3-manylinux1_x86_64.whl PYSERIAL=pyserial-3.4-py2.py3-none-any.whl PYSOCKS=PySocks-1.7.1-py3-none-any.whl -REQUESTS=requests-2.22.0-py2.py3-none-any.whl -SETUPTOOLS=setuptools-45.1.0-py3-none-any.whl +REQUESTS=requests-2.23.0-py2.py3-none-any.whl +SETUPTOOLS=setuptools-45.2.0-py3-none-any.whl SIX=six-1.14.0-py2.py3-none-any.whl -# STEM=stem-1.8.0.tar.gz URLLIB3=urllib3-1.25.8-py2.py3-none-any.whl -VIRTUALENV=virtualenv-16.7.9-py2.py3-none-any.whl -WERKZEUG=Werkzeug-0.16.1-py2.py3-none-any.whl +VIRTUALENV=virtualenv-20.0.8-py2.py3-none-any.whl +WERKZEUG=Werkzeug-1.0.0-py2.py3-none-any.whl +ZIPP=zipp-3.1.0-py3-none-any.whl -function compare_digest { - # Compare the SHA512 digest of TFC file against the digest pinned in - # this installer. - if sha512sum "/opt/tfc/${2}${3}" | grep -Eo '^\w+' | cmp -s <(echo "$1"); then - echo "OK - Pinned SHA512 hash matched file /opt/tfc/${2}${3}" - else - echo "Error: /opt/tfc/${2}${3} had an invalid SHA512 hash" - exit 1 - fi -} - - -function verify_tcb_requirements_files { - # To minimize the time TCB installer configuration stays online, only - # the requirements files are authenticated between downloads. - compare_digest 8cb58c52af4481bc0be13dcda4db00456f7522934f185c216dcfe3ded794e5a35ecf7fa3e6417d7cbb477c0b3c59606a1c858b0b17d46ba6453ed71522fd976e '' requirements.txt - compare_digest 4f7372efb604ca5d45f8f8d76d0b840f68c5e2401b09b9824d6a0fc34291ceffbd0ebf516735e2ac5244681628ed2bd6fca1405f0c6d463bf869061bd6f6cd29 '' requirements-venv.txt -} - - -function verify_files { - # Verify the authenticity of the rest of the TFC files. - compare_digest 1d9ee816a00eb66a96cf2a6484f37037e90eb8865d68b02de9c01d7ee6fa735cbbd2279099fe8adfb4dda5d9c0a2da649a5f530dba1f0c44471838995abcebb2 '' dd.py - compare_digest d361e5e8201481c6346ee6a886592c51265112be550d5224f1a7a6e116255c2f1ab8788df579d9b8372ed7bfd19bac4b6e70e00b472642966ab5b319b99a2686 '' LICENSE - compare_digest 4a239b91b2626375255fbf4202a4bf49ea248832b65d69e00530d7cec88e489466508aeb1ff8c8ef0f138819500c4fde2010207ebe3e289c36bc201df55d3c47 '' LICENSE-3RD-PARTY - compare_digest 260f20df57dc6afdef634501430039e16b8964fd58eb7e9f4ca889e4511331de8e643fe2c525b8f23b33ad60e23dae740236586188c87d4b3289738abb4b901b '' relay.py - compare_digest 2bd7f8925af923c44b11ef1a1bdb530c0ee4098066b06cbf334680756332d83f1dcda2e5f6e377b839cc70202f8e32b6387201e42d2618c68453e7cbd66a7e64 '' requirements-dev.txt - compare_digest 89eb610ad4b41d36f4f02c892e40e35fbe6567ff1e5523511bc87c0bc0a0838bf463a58a87a6389f907bb9b5fffd289ad95a92854d52ded028f908e946db1824 '' requirements-relay.txt - compare_digest e8cd32a91370b6b4dd306391a3b78488f6a0f467dcd82387749d499cd6beb13b50ba01be9ceed2fe5620595640ecec3e43dbb192b8732e4943f7a5a43f601407 '' requirements-relay-tails.txt - compare_digest 89e82f1f1b1a4d9f3d1432c2988c00f70d2cc1b5e670f50666d189817115bac7b1e15331dc243d1f0364d7d283a9d25c9982ee7ba90563b29bdf41986e734b50 '' requirements-setuptools.txt - compare_digest 79f8272a2ab122a48c60630c965cd9d000dcafabf5ee9d69b1c33c58ec321feb17e4654dbbbf783cc8868ccdfe2777d60c6c3fc9ef16f8264d9fcf43724e83c2 '' tfc.png - compare_digest 4e659a97f7f4b8ba816b111446e5795460db8def5db380bd05ede96042170796def1f4cdc4f6afc7062079fca584ac09010a6222d6835403777d6acba91add8c '' tfc.py - compare_digest 7ae1c2a393d96761843bea90edd569244bfb4e0f9943e68a4549ee46d93180d26d4101c2471c1a37785ccdfaef45eedecf15057c0a9cc6c056460c5f9a69d37b '' tfc.yml - compare_digest ba16a9b9a197d7a916609bcd1f1ad8a076ad55c0b3f04510e8f19dfea35be9cf4a036481b3a03deb5932d5e9a90c4ca9f6e28d0127549681d756b4eda3c5c6e0 '' uninstall.sh - - compare_digest d4f503df2186db02641f54a545739d90974b6d9d920f76ad7e93fe1a38a68a85c167da6c19f7574d11fbb69e57d563845d174d420c55691bc2cd75a1a72806dc launchers/ terminator-config-local-test - compare_digest 9a40d97bd9fe1324b5dd53079c49c535ae307cbb28a0bc1371067d03c72e67ddeed368c93352732c191c26dcdc9ac558868e1df9dfd43a9b01ba0a8681064ab3 launchers/ TFC-Local-test.desktop - compare_digest c5dfa3e4c94c380b0fcf613f57b5879a0d57d593b9b369da3afde37609a9fb11051d71832285d3372f3df6c5dbe96d00a39734fbddf138ab0c04d0f1f752826f launchers/ TFC-RP.desktop - compare_digest c5dfa3e4c94c380b0fcf613f57b5879a0d57d593b9b369da3afde37609a9fb11051d71832285d3372f3df6c5dbe96d00a39734fbddf138ab0c04d0f1f752826f launchers/ TFC-RP-Tails.desktop - compare_digest d109dc200709d9565a076d7adcc666e6ca4b39a2ed9eff0bb7f0beff2d13b368cc008a0bbb9a639e27f6881b3f8a18a264861be98a5b96b67686904ba70e70f2 launchers/ TFC-RxP.desktop - compare_digest aa1c23f195bcf158037c075157f12564d92090ed29d9b413688cf0382f016fad4f59cf9755f74408829c444cd1a2db56c481a1b938c6e3981e6a73745c65f406 launchers/ TFC-TxP.desktop - - compare_digest 3ee90ee305382d80da801f047a6e58e5b763f9f6bc08dce531d5c620f2748c6bba59a1528eee5d721decb8e724f53b28fc7609f5b20472f679f554b78b5d4cc6 src/ __init__.py - compare_digest 3ee90ee305382d80da801f047a6e58e5b763f9f6bc08dce531d5c620f2748c6bba59a1528eee5d721decb8e724f53b28fc7609f5b20472f679f554b78b5d4cc6 src/common/ __init__.py - compare_digest 1e27af90e641bd65538ba7d5b13e7b1b4ccded01b12e7bdc19894338639eeaf8074aef9726f3816e37fc01671b5dfbcabf7beb70244c887567e40874f44886c9 src/common/ crypto.py - compare_digest dd30ee2bdcab64bd62c3d41ff238a4e35fcb950a5b33d43b9758306cda4ab3d9d1a86399a58b70ac78fb2b39649de830b57965e767e7d958839cd9169bc5317d src/common/ database.py - compare_digest 99bb26974918c1fe23c001d39296653b5dda678fbde1d3470bfb2d62ccc73d31f782adc74666f53389cf8560215e098350dcac7cd66c297712564460c50c4302 src/common/ db_contacts.py - compare_digest 032ccacc86f62bbd1eafb645730679a1023841f53c6391b700e6562ba33321f0ef81d36f3fa02752b189cb795677d209404fffa7de6ebf042bd3ff418e056b9a src/common/ db_groups.py - compare_digest 38fed0ace4cc1032b9d52d80c2a94252a0001b11ed7a7d7dc27fff66ed1e10309ee07b345556775958389d83cb976cd151df2d717b5c7dbe6d312778ecb06408 src/common/ db_keys.py - compare_digest 4d9436a5381b81425c13b43829a1c2dac62a2210ebd5a80b3bb8445aa3b6509b33af58e14cb9803c330d81aa429341382c13170d6770cd1726698a274980978e src/common/ db_logs.py - compare_digest ccbff2a8e0bfe11b063971879e3849b376324a132534e6a520c58811945e93320f5837366a4548a2183e158242c52801d5276f7b6b86ca860977ae8f95c2c607 src/common/ db_masterkey.py - compare_digest 325298cd6cb7e68d27681c18f29e635f46222e34015ba3c8fe55e6718e6907b4257bbe12d71fd344b557aff302ae9d7fca2b581b4208e59ac7923e57aca23fe5 src/common/ db_onion.py - compare_digest 63451ece46802c1e4d0ddb591fda951c00b40ed2e0f37ffc9e5310adb687f0db4980d8593ce1ed5c7b5ca9274be33b2666ae9165aa002d99ecf69b0ec620cc1b src/common/ db_settings.py - compare_digest 60fb4c922af286307865b29f0cadab53a5a575a9f820cd5ad99ea116c841b54dd1d1be1352bf7c3ab51d2fd223077217bcda1b442d44d2b9f1bf614e15c4a14d src/common/ encoding.py - compare_digest ccd522408ad2e8e21f01038f5f49b9d82d5288717f1a1acf6cda278c421c05472827ee5928fbf56121c2dfc4f2cc49986e32c493e892bd6ae584be38ba381edd src/common/ exceptions.py - compare_digest 999bb5264e4e586fcdc163a65e6bf0cea7b9d856ab876e1f23c1926324dc90df2f8afe86057c2f8e578f6f77c45f8e776de3c9ff99475a839f188efe8f861fe9 src/common/ gateway.py - compare_digest b01aa02c1985c7c1f7d95678d2422561a06342378306936c35c077938c133b8f60a63952bdc9005210d31e68addd88b86a45f82626c40beff07e785fdc2aa115 src/common/ input.py - compare_digest d617f7bddf11525d672aa53b9076b19c27754f60768dd240c29d1f937ffb62d15e063513b59268a5d478ef3a645135fb0e1e5970522f225fef240874f8cfaae1 src/common/ misc.py - compare_digest 8b479b3a7c1c4fdaf4c4f8d4a4436231933ebb1da47a5a07a596037b20db7d5aa7e8a1d107d4ec973603551f28833ff404c177b9977d654f3b38a915d16a33bb src/common/ output.py - compare_digest 08443cfe633bb552d6bb55e48d81423db4a4099f9febc73ec6ee85ee535bc543720f199ac8b600b718e9af7247fb96ef4b9991b0416cf7186fd75a149365dd36 src/common/ path.py - compare_digest 39e48b0b55f4f1a48bc558f47b5f7c872583f3f3925fd829de28710024b000fcb03799cb36da3a31806143bc3cbb98e5d357a8d62674c23e1e8bf957aece79f6 src/common/ reed_solomon.py - compare_digest 6782f85e365848376675c988d9e9a25689b8df9755e47c790d4cba3a9e0ee25b5c974f7814022097d13987e6200c5b9398bf6b787b7f77910a678e9b7c118aae src/common/ statics.py - compare_digest a57d5525a570a78d15c75e79702289cf8571c1b3c142fae57f32bf3ed8bb784c7f63ce2e805d295b4a505fdeaf9d59094ebe67d8979c92dc11e2534474505b0e src/common/ word_list.py - - compare_digest 3ee90ee305382d80da801f047a6e58e5b763f9f6bc08dce531d5c620f2748c6bba59a1528eee5d721decb8e724f53b28fc7609f5b20472f679f554b78b5d4cc6 src/receiver/ __init__.py - compare_digest a4aeb64c2adb0d435e52f8de33aaff1ec135ca6b551c813fa53281087e8e62a0db9ad0ac595bca17391ee80cba9dfef9790b39d0877b0497bab3871f167ee9cd src/receiver/ commands.py - compare_digest 6dd0d73fe240f974bb92e850c5f38e97ee8d632fbb3a53abc3160b4e29f9b2b076b8b5d20dc7f7231c01c978ea67b55740ac17dc74bf05e6811d2e35e34056fb src/receiver/ commands_g.py - compare_digest 320bc36df51efb138c173d9fcef7a8ee7de06bcee672e75512d2a9e07eef5006068650e76d4dc741b4cf180cb8ee46560f941753539e377d0d4e534d0f6c629b src/receiver/ files.py - compare_digest 437a27e1ee948994866b1e2bdfa8a835cad67e89c4ecc0d04589fc27dfabb3a2d6d582bee502d50dc76c7fbea5cd1e71a08bc08a7fde75c7a595b463c7f3ce43 src/receiver/ key_exchanges.py - compare_digest 6ebd6c0638525997949783b7623ce9a78683169e95f572ea65dcec52da150b0473a25e928862cab34eac44b0e0991a0969c5252c03cf4dc8f49d1aa9809b43bd src/receiver/ messages.py - compare_digest eabe1695cd0fe04346e49ed91b64a11ad74ff60b636333140f9a3c6745b9c408d77aae8f45256b5d74b241324a5d429249b2be6c732205ab729a38049b8631f7 src/receiver/ output_loop.py - compare_digest 25b49765e149f5e072df2aa70c125478d1c9621886527201bf0d7718db557f2991823d980274f53abf93269f5aa1096b3e56fae94ecaa974ef31b0cb7907fde7 src/receiver/ packet.py - compare_digest 002c960023393bec10da3de6d9a218c8e2c27da1635fd1a7f99e02a9a28792428a2c0e6cd030d1cc1fac1124c58f397f63d60b7af4c384367a8c293978125539 src/receiver/ receiver_loop.py - compare_digest da8ff22a1ece42c780328c84722ae42c2dced69dd87f2fb2d09fd517d3ee98f3777c448922b2b06a5839347e075a5598e6c770a544fdf801e664ba5ad06b684d src/receiver/ windows.py - - compare_digest 3ee90ee305382d80da801f047a6e58e5b763f9f6bc08dce531d5c620f2748c6bba59a1528eee5d721decb8e724f53b28fc7609f5b20472f679f554b78b5d4cc6 src/relay/ __init__.py - compare_digest 340063f239e43e7504f1a3389e1cbdd3603b756c113d32a4add0a4815afef234d19a6b2f8dc675de90f62b0f8b9d414c829762d6b87d58d08eac21fb413b011d src/relay/ client.py - compare_digest 49c540cab10d932cd6b7afa417f3c2551b452d657c7be086fc4fd7fb490f56d1016e882a3de9b3ba78fa7160ce79967958c05d657874443c5488735ade7691f7 src/relay/ commands.py - compare_digest 959129c8eb8c1ae40d8c97997dde609a02692f808c476ffe1edcbdb03330b0d38d450c8898abd41e5498ca8f962e135b328a319f7475dfa1f69a25baae463e5d src/relay/ diffs.py - compare_digest 0bc1912af4d45ff72fbd7c43a09ab48ea3a780bb096226011525924820ba0ba1f396937fb191e5e18ec87dee14ccd3b505192e16e835d46088e4b50c941125f5 src/relay/ onion.py - compare_digest 0273508b0c2f95ba5e710a9c8201fa6915881305a02de9c3e165b6f78912d559ed8fa4edb0ca07cbf1843a0cee4b377c073dbfc3eb6600bbdb163228ac1f1742 src/relay/ server.py - compare_digest d9a5d7c806f45419e7d79d543fba186621c09c79839a8976f833c92ef3ba6ea2ca9fbb6db2ac6455080a294dd2dcf7f9dbaa2cfac56414587753a3754bd3732b src/relay/ tcb.py - - compare_digest 3ee90ee305382d80da801f047a6e58e5b763f9f6bc08dce531d5c620f2748c6bba59a1528eee5d721decb8e724f53b28fc7609f5b20472f679f554b78b5d4cc6 src/transmitter/ __init__.py - compare_digest 09cd7d5921ac74ebbd40fbe3abc22faee48b30c33f73405fe20ce7d3096df0599d0346027cf0c5b458acf55f8919373c1c215062236e522ddfe154802b219622 src/transmitter/ commands.py - compare_digest 2af2cd801fc83f882c65e031b5bd6f5c2c30b32dc0bb538953021b1f520714723d39b2a2462a6718cbb3efea1b645767b50d468978bb802dacf8b73535a2975f src/transmitter/ commands_g.py - compare_digest 31267d2049e4e9a88301e0d851e11c8e3db0bbb96c4509c10a3528c29ab679a49db8430cca1529ccd71556e273f4937d3bf7e0c2e1a165a8d36729ed284a4f19 src/transmitter/ contact.py - compare_digest f2fefbc2acbad441cb997969d6b39fbe26813abc781f5b6caaa08f1eb4c52c05b2bd4cbc341cb75ea07f7a4931d9b1145bef2fb352376a72442f7a71299fb595 src/transmitter/ files.py - compare_digest 110665f962eb827a9f636cc823837222a7bed4a429d4e10eb90c7bf5ba7bd5900aa1ecc4d4b485927a276d5727e18fe9e78f75ab8bd4ff67f039bb633fe505ec src/transmitter/ input_loop.py - compare_digest 20b06b3b28bdecc9b572acb7d47e51ab98863230966cfa2d8e93ead13126f6019e88b2bd648de7fc7795805a836c1f9f7e243f2c13ebc8bf5bca1078ff6c14d8 src/transmitter/ key_exchanges.py - compare_digest 766b1efa548f2da49272870fa5f89b8aacdf65b737b908f7064209f2f256c4d4875228ad087ac4957a292a82ed5936a40b9ae7553bfae2eae739f0c4579eb21a src/transmitter/ packet.py - compare_digest b8cfc11ae235c8cddbbd4003f8f95504456d9b2d6b6cc09bd538c09132bc737b6f070bdbc8d697e9ddfc5854546575526fa26c813f9f6bff7dc32fcdbb337753 src/transmitter/ sender_loop.py - compare_digest cdcb21128f71134ae49f3e99bf2a6dce5ec88766ecf6d91be89200ef282f7bd326c9805ba8f2d73d3fa12a8e05da20630874b5bbf9e18085d47ad5063098eaf8 src/transmitter/ traffic_masking.py - compare_digest eb77c6206cab63ffdb47bbcb8b76a55100636d893e234a048221d83e9ce07b76ccfcc93b506d9fb48d6f8823135e5697f3e56aed8e95f23990d8dfc1cece325e src/transmitter/ user_input.py - compare_digest 489f869176da0040b6f06327544f5eb72863a748a4799c66198a09402df6d54d842e9af27af51faaeed9d0661133eeaebb9918bd1bcd50950c182ba4b1e5fc74 src/transmitter/ window_mock.py - compare_digest 09c536d43b37103b6340293efa67345f54da6563ea65441546161066d735b4dfad9eaea9c58452de3413b72b28a923d2efb851ac740ba09ada45368bb64b9f15 src/transmitter/ windows.py -} - - -function process_tcb_dependencies { - # Manage TCB dependencies in batch. The command that uses the files - # is passed to the function as a parameter. - sudo $1 "/opt/tfc/${SIX}" - sudo $1 "/opt/tfc/${PYCPARSER}" - sudo $1 "/opt/tfc/${CFFI}" - sudo $1 "/opt/tfc/${ARGON2}" - sudo $1 "/opt/tfc/${SETUPTOOLS}" - sudo $1 "/opt/tfc/${PYNACL}" - sudo $1 "/opt/tfc/${PYSERIAL}" - sudo $1 "/opt/tfc/${CRYPTOGRAPHY}" -} - - -function process_tails_dependencies { - # Manage Tails dependencies in batch. The command that uses the - # files is passed to the function as a parameter. - - t_sudo -E $1 "/opt/tfc/${PYSERIAL}" - # t_sudo -E $1 "/opt/tfc/${STEM}" - t_sudo -E $1 "/opt/tfc/${PYSOCKS}" - - # Requests - t_sudo -E $1 "/opt/tfc/${URLLIB3}" - t_sudo -E $1 "/opt/tfc/${IDNA}" - t_sudo -E $1 "/opt/tfc/${CHARDET}" - t_sudo -E $1 "/opt/tfc/${CERTIFI}" - t_sudo -E $1 "/opt/tfc/${REQUESTS}" - - # Flask - t_sudo -E $1 "/opt/tfc/${WERKZEUG}" - t_sudo -E $1 "/opt/tfc/${MARKUPSAFE}" - t_sudo -E $1 "/opt/tfc/${JINJA2}" - t_sudo -E $1 "/opt/tfc/${ITSDANGEROUS}" - t_sudo -E $1 "/opt/tfc/${CLICK}" - t_sudo -E $1 "/opt/tfc/${FLASK}" - - # Cryptography - t_sudo -E $1 "/opt/tfc/${SIX}" - t_sudo -E $1 "/opt/tfc/${PYCPARSER}" - t_sudo -E $1 "/opt/tfc/${CFFI}" - t_sudo -E $1 "/opt/tfc/${CRYPTOGRAPHY}" - - # PyNaCl - t_sudo -E $1 "/opt/tfc/${PYNACL}" -} - - -function move_tails_dependencies { - # Move Tails dependencies in batch. - t_sudo mv "$HOME/${VIRTUALENV}" "/opt/tfc/" - t_sudo mv "$HOME/${PYSERIAL}" "/opt/tfc/" - # t_sudo mv "$HOME/${STEM}" "/opt/tfc/" - t_sudo mv "$HOME/${PYSOCKS}" "/opt/tfc/" - - # Requests - t_sudo mv "$HOME/${URLLIB3}" "/opt/tfc/" - t_sudo mv "$HOME/${IDNA}" "/opt/tfc/" - t_sudo mv "$HOME/${CHARDET}" "/opt/tfc/" - t_sudo mv "$HOME/${CERTIFI}" "/opt/tfc/" - t_sudo mv "$HOME/${REQUESTS}" "/opt/tfc/" - - # Flask - t_sudo mv "$HOME/${WERKZEUG}" "/opt/tfc/" - t_sudo mv "$HOME/${MARKUPSAFE}" "/opt/tfc/" - t_sudo mv "$HOME/${JINJA2}" "/opt/tfc/" - t_sudo mv "$HOME/${ITSDANGEROUS}" "/opt/tfc/" - t_sudo mv "$HOME/${CLICK}" "/opt/tfc/" - t_sudo mv "$HOME/${FLASK}" "/opt/tfc/" - - # Cryptography - t_sudo mv "$HOME/${SIX}" "/opt/tfc/" - t_sudo mv "$HOME/${PYCPARSER}" "/opt/tfc/" - t_sudo mv "$HOME/${CFFI}" "/opt/tfc/" - t_sudo mv "$HOME/${CRYPTOGRAPHY}" "/opt/tfc/" - - # PyNaCl - t_sudo mv "$HOME/${PYNACL}" "/opt/tfc/" -} - +# Functions with pinned hashes function verify_tails_dependencies { - # Tails doesn't allow downloading over PIP to /opt/tfc, so we - # first download to $HOME, move the files to /opt/tfc, and then - # perform additional hash verification - compare_digest f4e7148f1de50fa2e69061e72db211085fc2f44007de4d18ee02a20d34bca30a00d2fe56ff6f3132e696c3f6efd4151863f26dac4c1d43e87b597c47a51c52ad '' ${VIRTUALENV} compare_digest 8333ac2843fd136d5d0d63b527b37866f7d18afc3bb33c4938b63af077492aeb118eb32a89ac78547f14d59a2adb1e5d00728728275de62317da48dadf6cdff9 '' ${PYSERIAL} - # compare_digest a275f59bba650cb5bb151cf53fb1dd820334f9abbeae1a25e64502adc854c7f54c51bc3d6c1656b595d142fc0695ffad53aab3c57bc285421c1f4f10c9c3db4c '' ${STEM} compare_digest 313b954102231d038d52ab58f41e3642579be29f827135b8dd92c06acb362effcb0a7fd5f35de9273372b92d9fe29f38381ae44f8b41aa90d2564d6dd07ecd12 '' ${PYSOCKS} + # Virtualenv + compare_digest 8b85fa635c5ec51881aed2238f1e9229d6607644995e26e3f9fe6f8bb6313c51f7b290a6ac1347738866626b1b49d08c5622836dfe2a39ae60f697888bcea615 '' ${VIRTUALENV} + compare_digest b79e9fa76eadee595fe47ea7efd35c4cc72f058a9ed16a95cfa4d91a52c330efba50df7a9926900bbced229cca7bbfb05bbf0a8ee1d46bac2362c98ab9a5154d '' ${APPDIRS} + compare_digest 6f910a9607569c9023a19aee35be15cf8521ec7c07c5d478e6d555a301d024a2ee1db48562707b238a72c631d75d9dc154d38b39ed51746b66c938ac40671e60 '' ${DISTLIB} + compare_digest a6e7e35921ce8f2f8e79a296ea79a9c3515ff6dd7e777d7892fe4988594f1b3a442a68ffb89cf64530b90a32ceeea00e4ab9069bb697629ab4eb7262c68d1b0f '' ${SIX} + compare_digest 53e51d4b75c1df19fcb6b32e57fa73ffcb00eede86fee7ac9634f02661360538a74d3546b65a641b68ee84c0d78293fe03d09b65cb85359780822b56f813b926 '' ${IMPORTLIB_METADATA} + compare_digest d13edd50779bca9842694e0da157ca1fdad9d28166771275049f41dea4b8d8466fc5604b610b6ad64552cdf4c1d3cada9977ca37c6b775c4cc92f333709e8ea3 '' ${FILELOCK} + compare_digest 89170b91cfdc0ef4d85b5316b484c8d6e01985f19bb9f545b11d648e122392efa68d40c66e056b8998fb69af49f4e18707f783be8d500b8957ce3a885662d27c '' ${ZIPP} + # Requests compare_digest f7fd3b54b7c555c0e74eb445e543763d233b5c6f8021ccf46a45d452c334953276d43ecd8f3d0eafefa35103a7d1874e291216fc9a41362eb6f1250a2a670f16 '' ${URLLIB3} - compare_digest fb07dbec1de86efbad82a4f73d98123c59b083c1f1277445204bef75de99ca200377ad2f1db8924ae79b31b3dd984891c87d0a6344ec4d07a0ddbbbc655821a3 '' ${IDNA} + compare_digest be96b782728404acec374f446b11811f8e76d5ed42d4673a07e883220f5ba2a099a8124cda5898c3f5da7d92b87b36127e8fd42e9edb240b587a380ed73cce93 '' ${IDNA} compare_digest bfae58c8ea19c87cc9c9bf3d0b6146bfdb3630346bd954fe8e9f7da1f09da1fc0d6943ff04802798a665ea3b610ee2d65658ce84fe5a89f9e93625ea396a17f4 '' ${CHARDET} compare_digest fe5b05c29c1e1d9079150aaea28b09d84f0dd15907e276ccabb314433cfaac948a9615e10d6d01cbd537f99eed8072fbda7cb901e932fbab4f1286ae8c50471b '' ${CERTIFI} - compare_digest 9186ce4e39bb64f5931a205ffc9afac61657bc42078bc4754ed12a2b66a12b7a620583440849fc2e161d1061ac0750ddef4670f54916931ace1e9abd2a9fb09c '' ${REQUESTS} + compare_digest 98e4c9435434b8f63fc37a21133adbbfeb471bfb8b40d60f04bded5cbe328c14a22527d54ab2a55a81d93110d627bacc26943e55ec338b7bed8708b55e15fff3 '' ${REQUESTS} # Flask - compare_digest 4c982970fef39bf7cfbb4e516864fec0f8ec3f743ccb632d1659c6ee415597d98f4abd63b5c0fd999eb43fc0c89a97123f07625b01ea86b02ef51cb67a2b148d '' ${WERKZEUG} + compare_digest 82a0f1776820d07e929daa60bfa0a3e746464b0f2923376330f8ae5abf535bcb756c7384757b2ff8e0076f299fe85d96ef34b3a8eede21c11df9aba8cc58cb77 '' ${WERKZEUG} compare_digest 69e9b9c9ac4fdf3cfa1a3de23d14964b843989128f8cc6ea58617fc5d6ef937bcc3eae9cb32b5164b5f54b06f96bdff9bc249529f20671cc26adc9e6ce8f6bec '' ${MARKUPSAFE} compare_digest 461bbd517560f1c4dbf7309bdf0cf33b468938fddfa2c3385fab07343269732d8ce68d8827148645113267d48e7d67b03f1663cc64839dd1fcec723ea606aaf4 '' ${JINJA2} compare_digest 891c294867f705eb9c66274bd04ac5d93140d6e9beea6cbf9a44e7f9c13c0e2efa3554bdf56620712759a5cd579e112a782d25f3f91ba9419d60b2b4d2bc5b7c '' ${ITSDANGEROUS} @@ -246,11 +82,9 @@ function verify_tails_dependencies { compare_digest bd49cb364307569480196289fa61fbb5493e46199620333f67617367278e1f56b20fc0d40fd540bef15642a8065e488c24e97f50535e8ec143875095157d8069 '' ${FLASK} # Cryptography - compare_digest a6e7e35921ce8f2f8e79a296ea79a9c3515ff6dd7e777d7892fe4988594f1b3a442a68ffb89cf64530b90a32ceeea00e4ab9069bb697629ab4eb7262c68d1b0f '' ${SIX} - compare_digest 7f830e1c9066ee2d297a55e2bf6db4bf6447b6d9da0145d11a88c3bb98505755fb7986eafa6e06ae0b7680838f5e5d6a6d188245ca5ad45c2a727587bac93ab5 '' ${PYCPARSER} - compare_digest b8753a0435cc7a2176f8748badc074ec6ffab6698d6be42b1770c85871f85aa7cf60152a8be053c3031b234a286c5cef07267cb812accb704783d74a2675ed3b '' ${CFFI} - compare_digest 184003c89fee74892de25c3e5ec366faea7a5f1fcca3c82b0d5e5f9f797286671a820ca54da5266d6f879ab342c97e25bce9db366c5fb1178690cd5978d4d622 '' ${CRYPTOGRAPHY} # manylinux1 - # compare_digest d8ddabe127ae8d7330d219e284de68b37fa450a27b4cf05334e9115388295b00148d9861c23b1a2e5ea9df0c33a2d27f3e4b25ce9abd3c334f1979920b19c902 '' ${CRYPTOGRAPHY} # manylinux2010 + compare_digest 06dc9cefdcde6b97c96d0452a77db42a629c48ee545edd7ab241763e50e3b3c56d21f9fcce4e206817aa1a597763d948a10ccc73572490d739c89eea7fede0a1 '' ${PYCPARSER} + compare_digest 5b315a65fc8f40622ceef35466546620aaca9dd304f5491a845239659b4066469c5fb3f1683c382eb57f8975caf318e5d88852e3dbb049cde193c9189b88c9c0 '' ${CFFI} + compare_digest 184003c89fee74892de25c3e5ec366faea7a5f1fcca3c82b0d5e5f9f797286671a820ca54da5266d6f879ab342c97e25bce9db366c5fb1178690cd5978d4d622 '' ${CRYPTOGRAPHY} # PyNaCl compare_digest c4017c38b026a5c531b15839b8d61d1fae9907ba1960c2f97f4cd67fe0827729346d5186a6d6927ba84f64b4cbfdece12b287aa7750a039f4160831be871cea3 '' ${PYNACL} @@ -258,33 +92,238 @@ function verify_tails_dependencies { function install_tails_setuptools { - # Download setuptools package for Tails and then authenticate and install it. - torsocks python3.7 -m pip download --no-cache-dir -r "/opt/tfc/requirements-setuptools.txt" --require-hashes --no-deps -d "${HOME}/" - t_sudo mv "$HOME/${SETUPTOOLS}" "/opt/tfc/" - compare_digest 761e4c8df239b8d173513b08959b387c1059e3e023ba6b3f6250fade518d6ef29f287ab90dd35d02bb681b410a050b30b2ed44849638b6f98831f4290a4ccd15 '' ${SETUPTOOLS} - t_sudo python3.7 -m pip install "/opt/tfc/${SETUPTOOLS}" - t_sudo -E rm "/opt/tfc/${SETUPTOOLS}" + # Download setuptools package for Tails, and move it to /opt/tfc so it can't be edited. + # Once the package has been authenticated, install it and then remove the install file. + torsocks python3.7 -m pip download --no-cache-dir -r "${INSTALL_DIR}/requirements-setuptools.txt" --require-hashes --no-deps -d "${HOME}/" + t_sudo mv "$HOME/${SETUPTOOLS}" "${INSTALL_DIR}/" + compare_digest de1ac45cb52e8a28322048e6a2b95015aa6826c49679349a1b579cb46b95cb2ffd62242c861c2fe3e059c0c55d4fdb4384c51b964ca2634b2843263543f8842a '' ${SETUPTOOLS} + t_sudo python3.7 -m pip install "${INSTALL_DIR}/${SETUPTOOLS}" + t_sudo -E rm "${INSTALL_DIR}/${SETUPTOOLS}" } +function verify_tcb_requirements_files { + # To minimize the time TCB installer configuration stays online, + # only the requirements files are authenticated between downloads. + compare_digest 1a17cd2aa1c3556c5c0b0fc8bc8d073f593c059a966da337990135022cc55543a8f78b950645425d652da165f5e1c06f2d8402976e0876608bb70c034e16b270 '' requirements.txt + compare_digest 0e3795239a17a032372f6d81f6c821f9eceff80a80c0314b21e277840a01c0115fca1ae60a53d0110768add5a17088cc9bc23a7a130f84ebc93d54229be7cfd3 '' requirements-venv.txt +} + + +function verify_files { + # Verify the authenticity of the rest of the TFC files. + compare_digest 1d9ee816a00eb66a96cf2a6484f37037e90eb8865d68b02de9c01d7ee6fa735cbbd2279099fe8adfb4dda5d9c0a2da649a5f530dba1f0c44471838995abcebb2 '' dd.py + compare_digest d361e5e8201481c6346ee6a886592c51265112be550d5224f1a7a6e116255c2f1ab8788df579d9b8372ed7bfd19bac4b6e70e00b472642966ab5b319b99a2686 '' LICENSE + compare_digest 8db25eafc66308f1fe8223c39bc5fb025ae111ebce3eae5601c907fa7a2654f68395af4f355ff0ff03775e79cda8dfccddaf7d68555bfe065d9469ca04a288f9 '' LICENSE-3RD-PARTY + compare_digest 7cad2202e4cc940627e31577162c38f44022ddb138a51f52d0ac3747e264e065919df2b646020851d8973cc76a2873a72ceabcbe93c39911ebbfa7c867f01675 '' relay.py + compare_digest a93162f4b4317c265a5ce46ae2d55370b2d0592d8fa6dccddb9f06cda8a129d8279526aa83aab05600ec17bbefaf5584bb4dd4b771256e84db38192ffc391276 '' requirements-dev.txt + compare_digest 1c8505fca5c1191a6aecc28511e5f36c36a137a3fe99cef63500055dc3d85cbf64cdc1cc5d6380a407f22f367e1702a344f66515f3e3bf1f7fa83de0c74be8b0 '' requirements-relay.txt + compare_digest 01d022de0db3354da312c8bbefe82f0b032717ea8246186b7cffcd040b25390bd37eea9d9d9a42c199ce8b652bcff750ba37773230a471b43ebd60b468a7a29c '' requirements-relay-tails.txt + compare_digest 1432c2f098b1e656a597cbcfcceef00e2cda1897242f2a9316182fffff55bd64ea974534756200ec878741cd8de12b46b4b05a39ed550ad3997d111a6176f16f '' requirements-setuptools.txt + compare_digest 79f8272a2ab122a48c60630c965cd9d000dcafabf5ee9d69b1c33c58ec321feb17e4654dbbbf783cc8868ccdfe2777d60c6c3fc9ef16f8264d9fcf43724e83c2 '' tfc.png + compare_digest c746fa981fcdc1b21cbe7117ed186ef7757d120cb96fbe8500b8b5f7f4effebe71360ae5c1cc2bf873818002544d9aeba26990b93723a79c6bbcd647552a7ca0 '' tfc.py + compare_digest 62f26d2805570ee70fad3a076579a554008e7d9f2c9ff310f3bb5876d361cc03dbae7ab63b144ac215a35f920ac56d359481352805a356479d622ab00da15f7f '' tfc.yml + compare_digest e96471894b177d65639a3cc68e85bf609334ecaaa806009467d0e5e45d8ed4fcb4a43b8fce842458e18b0da9362a895948d45109a020a667986998f5c0055294 '' uninstall.sh + + compare_digest d4f503df2186db02641f54a545739d90974b6d9d920f76ad7e93fe1a38a68a85c167da6c19f7574d11fbb69e57d563845d174d420c55691bc2cd75a1a72806dc launchers/ terminator-config-local-test + compare_digest 7580e185bc1464d83d1aa482a830d4466311fc513287e89362da6a659c6047e7402b7e1096142d3d1865019832b3a67288e10399033598af5692b0a1f9fb892b launchers/ TFC-Local-test.desktop + compare_digest 62ada11d5513d2d196184b207a82dd14c7ad78af3f7b43c4de64162c8784c2996fa999f36fc6e2c7f1b823df43497725b05f0b52489e9a9e1d9bddbe2ce7910f launchers/ tfc-qubes-receiver + compare_digest 72e04fe07ac400ca70da59d196119db985d9e74b99b0fd735be20a534bd10084287a52aee46c6467703fe5e14609729db229112d53ce3156838c80b4f159df0a launchers/ tfc-qubes-relay + compare_digest 89e23c4d9e7d4402d7dde3d905fe0c3a3fd5ff8c09c33033f95d203ae9f1f53fa26d20c913820ff16764e9b6e0d558f8f332f9657b2e92eb77134576e683e1a9 launchers/ tfc-qubes-transmitter + compare_digest 09b6eb46542ea3d1c710e05b0259b1b39305e39c1e7565a19726cff74aa91beb0dc5fc1a4e7e5938368ab65467b79de575fce130028403fcdf24800c2c60c4ec launchers/ TFC-RP.desktop + compare_digest 85a135377c0bde33e9a2d4c6ac70d04aa96f5a5529930f22a377bbdc4891f75e9edc36ff675c1f32e7490b6073b433d07218afad2a14805785cc9594151e83eb launchers/ TFC-RP-Qubes.desktop + compare_digest 09b6eb46542ea3d1c710e05b0259b1b39305e39c1e7565a19726cff74aa91beb0dc5fc1a4e7e5938368ab65467b79de575fce130028403fcdf24800c2c60c4ec launchers/ TFC-RP-Tails.desktop + compare_digest c9d8d39b5781f27ae8787eddd5189c03980338bbe2c762cac5a5b0060870a975b5d625ecebdc906b16f411a3adbd3ca2d0b1542b3112506a9599c569feefc477 launchers/ TFC-RxP.desktop + compare_digest 6b0c72e448eed95a37d90950167edbffa1063086cf2dc04740dd831c7b9a82a3b3a310c71cc45116d76c568c02e662f46a41d661e638e2c48d2b826ad6d43504 launchers/ TFC-RxP-Qubes.desktop + compare_digest 8cc6844e053533b10b51c35e9dd0f5c0e055708b93baa7c93b7ff7c0d5c1e2f7033fd28e2cf6c306e4533fb329d192a95e93655d69407aa8d7377e40180dcd15 launchers/ TFC-TxP.desktop + compare_digest 13948e8e5aec162ed457db14a5adbbd18ace5d948c2b9cad157815646b6342cb879fbe002e0b3ae16352e5924786747be05e55c01e7bceb750703e316c3e26ca launchers/ TFC-TxP-Qubes.desktop + + compare_digest 3ee90ee305382d80da801f047a6e58e5b763f9f6bc08dce531d5c620f2748c6bba59a1528eee5d721decb8e724f53b28fc7609f5b20472f679f554b78b5d4cc6 src/ __init__.py + compare_digest 3ee90ee305382d80da801f047a6e58e5b763f9f6bc08dce531d5c620f2748c6bba59a1528eee5d721decb8e724f53b28fc7609f5b20472f679f554b78b5d4cc6 src/common/ __init__.py + compare_digest f6572b3e2b446405a4af1a1a197787d40bf980f80c19569b33ff503f0b3a312a1e78076ee19095ad149930d7919b9fb468d3937eef44012fd9a926a8bf0658c7 src/common/ crypto.py + compare_digest b87ad9321dedc59fd17d1a60866ed061925870156a458861d5c51d5825f8c5562c9a33d8f8d14a46c6b054a6542c8aa5d97c06ce78442f66913e8ab043fa20de src/common/ database.py + compare_digest dfef16b30d75bbe270c4b7df1369b3eeb2347b931e7bb3a974965cc916a6ffb20aaa40d14532ecb4a8cabdb71598fb53d86589aa475dbb02030bdf9489d71429 src/common/ db_contacts.py + compare_digest 7c0214208857174b43092eaf61d14c16e60d6ebb68ba25b260f84546ce39f1fed8b21aceb58833920c8d939304b313c0ad95c554210ae3d5d0547143f7dd704c src/common/ db_groups.py + compare_digest c49231429824d8133de7efad667c2bdde694a6c7a2e34e3b015ddb8cf59a150574cdd7099aaad02a4993a1669cd631f5af4cc611fac7d538d3ecd141d9295d0d src/common/ db_keys.py + compare_digest 04e0c0d53bcfc71476410bbdfcacee2ba3df6d7761d02111aca69a56cac848e4fb0178ee572b181b1a925bd45aae005b31b9e2afcce7416f7bd8c5dad96bc615 src/common/ db_logs.py + compare_digest cf7adc70366043d8813a51c9cbeac3931995e4647d11654b2ade98014137d2c2a9938951c00b88e80b24162741a1b37de5514a3ea8ec8e12f044e2549e2bf4ab src/common/ db_masterkey.py + compare_digest 325298cd6cb7e68d27681c18f29e635f46222e34015ba3c8fe55e6718e6907b4257bbe12d71fd344b557aff302ae9d7fca2b581b4208e59ac7923e57aca23fe5 src/common/ db_onion.py + compare_digest 4ef757ba877ee6b74632af3a0d3567c9483a62b9063ec0e7fe7b6abc7e82b490ec52279198f0be22866595dae1948bb1ef9ef556c88b3c320c5316fd59fc0743 src/common/ db_settings.py + compare_digest 60fb4c922af286307865b29f0cadab53a5a575a9f820cd5ad99ea116c841b54dd1d1be1352bf7c3ab51d2fd223077217bcda1b442d44d2b9f1bf614e15c4a14d src/common/ encoding.py + compare_digest ccd522408ad2e8e21f01038f5f49b9d82d5288717f1a1acf6cda278c421c05472827ee5928fbf56121c2dfc4f2cc49986e32c493e892bd6ae584be38ba381edd src/common/ exceptions.py + compare_digest 6a0b92cc259f7f0b4d1b65663ea633cc49590ff3562e1fedb096b59b49eddcbffa5e1892a6a5873a879f13b666192d3986f2c010de2e994ae7f6f6119b49ab60 src/common/ gateway.py + compare_digest d4021175fba75649fa1b8b65116b0acc98cedccd2a012986037a78e799908329694ee6f4c50617f92f5df279cfe5e719e38cada5f3775a8ea912a541f1dbf438 src/common/ input.py + compare_digest 159d192376b243fb48f02449e0db2b5576b4bb46a1c43bd0e7a641ae401e489af668171e4c771005114ac40c7951023b087f32b0d6eec3fa78559ed0a1408a8a src/common/ misc.py + compare_digest 6329bbdc9d24c1342d0996009a8cd4d852d5a800cbf6a582c047c0fc13e6ca9be28251b783325adffca100d2a372616088cedff2441cc103b8c18540828445ef src/common/ output.py + compare_digest 08443cfe633bb552d6bb55e48d81423db4a4099f9febc73ec6ee85ee535bc543720f199ac8b600b718e9af7247fb96ef4b9991b0416cf7186fd75a149365dd36 src/common/ path.py + compare_digest 39e48b0b55f4f1a48bc558f47b5f7c872583f3f3925fd829de28710024b000fcb03799cb36da3a31806143bc3cbb98e5d357a8d62674c23e1e8bf957aece79f6 src/common/ reed_solomon.py + compare_digest bf3c4d643b846b9e92d227a25d1ce182aa70430f178f2964f1293b304dbe522a37105ea51f359c4f85a7ee52a75851b58c0b6a41dc6656f2c9b12c5dc2ec4c1d src/common/ statics.py + compare_digest a57d5525a570a78d15c75e79702289cf8571c1b3c142fae57f32bf3ed8bb784c7f63ce2e805d295b4a505fdeaf9d59094ebe67d8979c92dc11e2534474505b0e src/common/ word_list.py + + compare_digest 3ee90ee305382d80da801f047a6e58e5b763f9f6bc08dce531d5c620f2748c6bba59a1528eee5d721decb8e724f53b28fc7609f5b20472f679f554b78b5d4cc6 src/receiver/ __init__.py + compare_digest ccc8d6bf2b10cf4ccb51a014ff7f251e527196f1172ab9b4da48b01ddcb9a64c15358f03424b76babd1ce2dd20147d847d2e1292fb0d76135c2ba10e182ec14b src/receiver/ commands.py + compare_digest 6dd0d73fe240f974bb92e850c5f38e97ee8d632fbb3a53abc3160b4e29f9b2b076b8b5d20dc7f7231c01c978ea67b55740ac17dc74bf05e6811d2e35e34056fb src/receiver/ commands_g.py + compare_digest 46be945df548416ec306054cdd2026b7f0057489bb2ded4e7d99b7b8f5bdb3208acbc8e5bc39617aa1d94a7e90b38a70321882e1753883389b0780ab58d9ed12 src/receiver/ files.py + compare_digest acfa0b7ac684b5a2747e1db315386ada28cf077c5fbedfc13a89d9912682b5020ae8da98fc65aef7fcbe3e3180184a7f787eba10b5617666bc43f4e4ba40231c src/receiver/ key_exchanges.py + compare_digest 6ebd6c0638525997949783b7623ce9a78683169e95f572ea65dcec52da150b0473a25e928862cab34eac44b0e0991a0969c5252c03cf4dc8f49d1aa9809b43bd src/receiver/ messages.py + compare_digest eabe1695cd0fe04346e49ed91b64a11ad74ff60b636333140f9a3c6745b9c408d77aae8f45256b5d74b241324a5d429249b2be6c732205ab729a38049b8631f7 src/receiver/ output_loop.py + compare_digest 27494b6be36e574ccabf6783614fa1d210e97ce835c39426ee9c2fcded6836e0773e419ea007044fd7eb7c6fe2f7765fa07cf3e48c483599b90a041cead191e7 src/receiver/ packet.py + compare_digest 002c960023393bec10da3de6d9a218c8e2c27da1635fd1a7f99e02a9a28792428a2c0e6cd030d1cc1fac1124c58f397f63d60b7af4c384367a8c293978125539 src/receiver/ receiver_loop.py + compare_digest da8ff22a1ece42c780328c84722ae42c2dced69dd87f2fb2d09fd517d3ee98f3777c448922b2b06a5839347e075a5598e6c770a544fdf801e664ba5ad06b684d src/receiver/ windows.py + + compare_digest 3ee90ee305382d80da801f047a6e58e5b763f9f6bc08dce531d5c620f2748c6bba59a1528eee5d721decb8e724f53b28fc7609f5b20472f679f554b78b5d4cc6 src/relay/ __init__.py + compare_digest 0ab86ddcfc7a28e7945e302918e384c2570d8b19942bb7c1b300d5913f77b184aae36612819ec85f0ef5b4a3b21d22aa710f218fc229c1317f04a11782e832e5 src/relay/ client.py + compare_digest c7457a0b21383c9d803f3854bbbd616943132a775641d8cada0c5fbd0d756910679d44a748b79291149758e2650e1bee4450b0c51ceb9f8bd680cfc6a5635407 src/relay/ commands.py + compare_digest 10229a8a8869b1c27e0f23733e9680ef3826831490be8c81553f0735ecfb93c0776cf976de2107c1d5822caa1b7dcacb7d1f090a9ff73df18ec2500fcd930089 src/relay/ diffs.py + compare_digest 0bc1912af4d45ff72fbd7c43a09ab48ea3a780bb096226011525924820ba0ba1f396937fb191e5e18ec87dee14ccd3b505192e16e835d46088e4b50c941125f5 src/relay/ onion.py + compare_digest 42acbe9557c848eea66ea6b9db71b3c3ac5b2da7710f7dc79f70f8b4952e582c8953d8fb38eab82be9b8b15db6a3f3fc882ef8d65adbe5ccdf26f55ef54d4758 src/relay/ server.py + compare_digest ee0bdbf39053e34d5e6597004ffc4a3831835238631368d29e301094a45551c6ff64b4d8cd9a8e8f7b6cf3fcfddd21e3dd275c0dee7cbc0503584b6991f923f5 src/relay/ tcb.py + + compare_digest 3ee90ee305382d80da801f047a6e58e5b763f9f6bc08dce531d5c620f2748c6bba59a1528eee5d721decb8e724f53b28fc7609f5b20472f679f554b78b5d4cc6 src/transmitter/ __init__.py + compare_digest 20b128c1aa0353db8f20f4274632454f8ead9e8e3ec59876673a1b2c270c66c8ab1af2252e83d95e15c95e6d8e56ef4d8708fda5afeedef583f3b297d92d38c1 src/transmitter/ commands.py + compare_digest 2af2cd801fc83f882c65e031b5bd6f5c2c30b32dc0bb538953021b1f520714723d39b2a2462a6718cbb3efea1b645767b50d468978bb802dacf8b73535a2975f src/transmitter/ commands_g.py + compare_digest 31267d2049e4e9a88301e0d851e11c8e3db0bbb96c4509c10a3528c29ab679a49db8430cca1529ccd71556e273f4937d3bf7e0c2e1a165a8d36729ed284a4f19 src/transmitter/ contact.py + compare_digest f2fefbc2acbad441cb997969d6b39fbe26813abc781f5b6caaa08f1eb4c52c05b2bd4cbc341cb75ea07f7a4931d9b1145bef2fb352376a72442f7a71299fb595 src/transmitter/ files.py + compare_digest 110665f962eb827a9f636cc823837222a7bed4a429d4e10eb90c7bf5ba7bd5900aa1ecc4d4b485927a276d5727e18fe9e78f75ab8bd4ff67f039bb633fe505ec src/transmitter/ input_loop.py + compare_digest 89407e887d0cba4d993c0ee60412ea1ecfdedd8bbb0c73417bb71847733f85dbe1dab2997f65824ae58b4b5278bb0866a2a04bb8273228ca1bbbc1068eec7c04 src/transmitter/ key_exchanges.py + compare_digest 766b1efa548f2da49272870fa5f89b8aacdf65b737b908f7064209f2f256c4d4875228ad087ac4957a292a82ed5936a40b9ae7553bfae2eae739f0c4579eb21a src/transmitter/ packet.py + compare_digest b8cfc11ae235c8cddbbd4003f8f95504456d9b2d6b6cc09bd538c09132bc737b6f070bdbc8d697e9ddfc5854546575526fa26c813f9f6bff7dc32fcdbb337753 src/transmitter/ sender_loop.py + compare_digest c102bb337ade562e0d9aedc0910f70f14652e2eba004a632bfb0ba8dddf147ab271d3ae544c4d9f3b2fcd3830646d9ad28255717d017cb91b3463829069360ba src/transmitter/ traffic_masking.py + compare_digest eb77c6206cab63ffdb47bbcb8b76a55100636d893e234a048221d83e9ce07b76ccfcc93b506d9fb48d6f8823135e5697f3e56aed8e95f23990d8dfc1cece325e src/transmitter/ user_input.py + compare_digest 489f869176da0040b6f06327544f5eb72863a748a4799c66198a09402df6d54d842e9af27af51faaeed9d0661133eeaebb9918bd1bcd50950c182ba4b1e5fc74 src/transmitter/ window_mock.py + compare_digest 09c536d43b37103b6340293efa67345f54da6563ea65441546161066d735b4dfad9eaea9c58452de3413b72b28a923d2efb851ac740ba09ada45368bb64b9f15 src/transmitter/ windows.py +} + + +# ---------------------------------------------------------------------------------------- + +# Dependency batch processing + +function process_virtualenv_dependencies { + # Manage Virtualenv dependencies in batch. + sudo $1 "${INSTALL_DIR}/${ZIPP}" + sudo $1 "${INSTALL_DIR}/${FILELOCK}" + sudo $1 "${INSTALL_DIR}/${IMPORTLIB_METADATA}" + sudo $1 "${INSTALL_DIR}/${SIX}" + sudo $1 "${INSTALL_DIR}/${DISTLIB}" + sudo $1 "${INSTALL_DIR}/${APPDIRS}" + sudo $1 "${INSTALL_DIR}/${VIRTUALENV}" +} + + +function process_tails_venv_dependencies { + # Process Tails Virtualenv dependencies in batch. + t_sudo -E $1 "${INSTALL_DIR}/${ZIPP}" + t_sudo -E $1 "${INSTALL_DIR}/${FILELOCK}" + t_sudo -E $1 "${INSTALL_DIR}/${IMPORTLIB_METADATA}" + t_sudo -E $1 "${INSTALL_DIR}/${SIX}" + t_sudo -E $1 "${INSTALL_DIR}/${DISTLIB}" + t_sudo -E $1 "${INSTALL_DIR}/${APPDIRS}" + t_sudo -E $1 "${INSTALL_DIR}/${VIRTUALENV}" +} + + +function process_tcb_dependencies { + # Manage TCB dependencies in batch. + sudo $1 "${INSTALL_DIR}/${PYCPARSER}" + sudo $1 "${INSTALL_DIR}/${CFFI}" + sudo $1 "${INSTALL_DIR}/${ARGON2_CFFI}" + sudo $1 "${INSTALL_DIR}/${SETUPTOOLS}" + sudo $1 "${INSTALL_DIR}/${PYNACL}" + sudo $1 "${INSTALL_DIR}/${PYSERIAL}" + sudo $1 "${INSTALL_DIR}/${CRYPTOGRAPHY}" +} + + +function process_tails_dependencies { + # Manage Tails dependencies in batch. + t_sudo -E $1 "${INSTALL_DIR}/${PYSERIAL}" + t_sudo -E $1 "${INSTALL_DIR}/${PYSOCKS}" + + # Requests + t_sudo -E $1 "${INSTALL_DIR}/${URLLIB3}" + t_sudo -E $1 "${INSTALL_DIR}/${IDNA}" + t_sudo -E $1 "${INSTALL_DIR}/${CHARDET}" + t_sudo -E $1 "${INSTALL_DIR}/${CERTIFI}" + t_sudo -E $1 "${INSTALL_DIR}/${REQUESTS}" + + # Flask + t_sudo -E $1 "${INSTALL_DIR}/${WERKZEUG}" + t_sudo -E $1 "${INSTALL_DIR}/${MARKUPSAFE}" + t_sudo -E $1 "${INSTALL_DIR}/${JINJA2}" + t_sudo -E $1 "${INSTALL_DIR}/${ITSDANGEROUS}" + t_sudo -E $1 "${INSTALL_DIR}/${CLICK}" + t_sudo -E $1 "${INSTALL_DIR}/${FLASK}" + + # Cryptography + t_sudo -E $1 "${INSTALL_DIR}/${PYCPARSER}" + t_sudo -E $1 "${INSTALL_DIR}/${CFFI}" + t_sudo -E $1 "${INSTALL_DIR}/${CRYPTOGRAPHY}" + + # PyNaCl + t_sudo -E $1 "${INSTALL_DIR}/${PYNACL}" +} + + +function move_tails_dependencies { + # Move Tails dependencies in batch. + t_sudo mv "$HOME/${PYSERIAL}" "${INSTALL_DIR}/" + t_sudo mv "$HOME/${PYSOCKS}" "${INSTALL_DIR}/" + + # Virtualenv + t_sudo mv "$HOME/${VIRTUALENV}" "${INSTALL_DIR}/" + t_sudo mv "$HOME/${APPDIRS}" "${INSTALL_DIR}/" + t_sudo mv "$HOME/${DISTLIB}" "${INSTALL_DIR}/" + t_sudo mv "$HOME/${FILELOCK}" "${INSTALL_DIR}/" + t_sudo mv "$HOME/${IMPORTLIB_METADATA}" "${INSTALL_DIR}/" + t_sudo mv "$HOME/${SIX}" "${INSTALL_DIR}/" + t_sudo mv "$HOME/${ZIPP}" "${INSTALL_DIR}/" + + # Requests + t_sudo mv "$HOME/${URLLIB3}" "${INSTALL_DIR}/" + t_sudo mv "$HOME/${IDNA}" "${INSTALL_DIR}/" + t_sudo mv "$HOME/${CHARDET}" "${INSTALL_DIR}/" + t_sudo mv "$HOME/${CERTIFI}" "${INSTALL_DIR}/" + t_sudo mv "$HOME/${REQUESTS}" "${INSTALL_DIR}/" + + # Flask + t_sudo mv "$HOME/${WERKZEUG}" "${INSTALL_DIR}/" + t_sudo mv "$HOME/${MARKUPSAFE}" "${INSTALL_DIR}/" + t_sudo mv "$HOME/${JINJA2}" "${INSTALL_DIR}/" + t_sudo mv "$HOME/${ITSDANGEROUS}" "${INSTALL_DIR}/" + t_sudo mv "$HOME/${CLICK}" "${INSTALL_DIR}/" + t_sudo mv "$HOME/${FLASK}" "${INSTALL_DIR}/" + + # Cryptography + t_sudo mv "$HOME/${PYCPARSER}" "${INSTALL_DIR}/" + t_sudo mv "$HOME/${CFFI}" "${INSTALL_DIR}/" + t_sudo mv "$HOME/${CRYPTOGRAPHY}" "${INSTALL_DIR}/" + + # PyNaCl + t_sudo mv "$HOME/${PYNACL}" "${INSTALL_DIR}/" +} + + +# Common tasks + function remove_common_files { # Remove files that become unnecessary after installation. - $1 rm -r /opt/tfc/.git/ - $1 rm -r /opt/tfc/launchers/ - $1 rm -r /opt/tfc/tests/ - $1 rm /opt/tfc/.coveragerc - $1 rm /opt/tfc/.travis.yml - $1 rm /opt/tfc/install.sh - $1 rm /opt/tfc/install.sh.asc - $1 rm /opt/tfc/pubkey.asc - $1 rm /opt/tfc/pytest.ini - $1 rm /opt/tfc/README.md - $1 rm /opt/tfc/requirements.txt - $1 rm /opt/tfc/requirements-dev.txt - $1 rm /opt/tfc/requirements-relay.txt - $1 rm /opt/tfc/requirements-relay-tails.txt - $1 rm /opt/tfc/requirements-setuptools.txt - $1 rm /opt/tfc/requirements-venv.txt + $1 rm -r ${INSTALL_DIR}/.git/ + $1 rm -r ${INSTALL_DIR}/launchers/ + $1 rm -r ${INSTALL_DIR}/tests/ + $1 rm ${INSTALL_DIR}/.coveragerc + $1 rm ${INSTALL_DIR}/.travis.yml + $1 rm ${INSTALL_DIR}/install.sh + $1 rm ${INSTALL_DIR}/install.sh.asc + $1 rm ${INSTALL_DIR}/pubkey.asc + $1 rm ${INSTALL_DIR}/pytest.ini + $1 rm ${INSTALL_DIR}/README.md + $1 rm ${INSTALL_DIR}/requirements.txt + $1 rm ${INSTALL_DIR}/requirements-dev.txt + $1 rm ${INSTALL_DIR}/requirements-relay.txt + $1 rm ${INSTALL_DIR}/requirements-relay-tails.txt + $1 rm ${INSTALL_DIR}/requirements-setuptools.txt + $1 rm ${INSTALL_DIR}/requirements-venv.txt $1 rm -f /opt/install.sh $1 rm -f /opt/install.sh.asc $1 rm -f /opt/pubkey.asc @@ -294,7 +333,7 @@ function remove_common_files { function steps_before_network_kill { # These steps are identical in TCB/Relay/Local test configurations. # This makes it harder to distinguish from network traffic when the - # user is installing TFC for Source or Destination computer: By the + # user is installing TFC for Source or Destination Computer: By the # time `kill_network` is run, it's too late to compromise the TCB. # Hopefully this forces adversaries to attempt compromise of more # endpoints during installation, which increases their chances of @@ -304,20 +343,20 @@ function steps_before_network_kill { sudo torsocks apt update sudo torsocks apt install git gnome-terminal libssl-dev python3-pip python3-tk net-tools -y - sudo torsocks git clone --depth 1 https://github.com/maqp/tfc.git /opt/tfc + sudo torsocks git clone --depth 1 https://github.com/maqp/tfc.git ${INSTALL_DIR} verify_tcb_requirements_files - sudo torsocks python3.7 -m pip download --no-cache-dir -r "/opt/tfc/requirements-venv.txt" --require-hashes --no-deps -d /opt/tfc/ - sudo torsocks python3.7 -m pip download --no-cache-dir -r "/opt/tfc/requirements.txt" --require-hashes --no-deps -d /opt/tfc/ + sudo torsocks python3.7 -m pip download --no-cache-dir -r "${INSTALL_DIR}/requirements-venv.txt" --require-hashes --no-deps -d ${INSTALL_DIR}/ + sudo torsocks python3.7 -m pip download --no-cache-dir -r "${INSTALL_DIR}/requirements.txt" --require-hashes --no-deps -d ${INSTALL_DIR}/ } +# ---------------------------------------------------------------------------------------- + +# Installation configurations for Debian/PureOS/Ubuntu/LMDE + function install_tcb { # Install TFC for Source/Destination Computer. - # - # The installer configuration first downloads all necessary files. - # It then disconnects the computer from network, before completing - # the rest of the installation steps. steps_before_network_kill kill_network @@ -325,25 +364,25 @@ function install_tcb { verify_files create_user_data_dir - sudo python3.7 -m pip install "/opt/tfc/${VIRTUALENV}" - sudo python3.7 -m virtualenv "/opt/tfc/venv_tcb" --system-site-packages --never-download + process_virtualenv_dependencies "python3.7 -m pip install" + sudo python3.7 -m virtualenv "${INSTALL_DIR}/venv_tcb" --system-site-packages --never-download - . /opt/tfc/venv_tcb/bin/activate + . ${INSTALL_DIR}/venv_tcb/bin/activate process_tcb_dependencies "python3.7 -m pip install" deactivate - sudo mv /opt/tfc/tfc.png /usr/share/pixmaps/ - sudo mv /opt/tfc/launchers/TFC-TxP.desktop /usr/share/applications/ - sudo mv /opt/tfc/launchers/TFC-RxP.desktop /usr/share/applications/ + sudo mv ${INSTALL_DIR}/tfc.png /usr/share/pixmaps/ + sudo mv ${INSTALL_DIR}/launchers/TFC-TxP.desktop /usr/share/applications/ + sudo mv ${INSTALL_DIR}/launchers/TFC-RxP.desktop /usr/share/applications/ # Remove unnecessary files - remove_common_files "sudo" - process_tcb_dependencies "rm" - sudo rm -r /opt/tfc/src/relay/ - sudo rm /opt/tfc/dd.py - sudo rm /opt/tfc/relay.py - sudo rm /opt/tfc/tfc.yml - sudo rm /opt/tfc/${VIRTUALENV} + remove_common_files "sudo" + process_virtualenv_dependencies "rm" + process_tcb_dependencies "rm" + sudo rm -r ${INSTALL_DIR}/src/relay/ + sudo rm ${INSTALL_DIR}/dd.py + sudo rm ${INSTALL_DIR}/relay.py + sudo rm ${INSTALL_DIR}/tfc.yml add_serial_permissions @@ -351,6 +390,279 @@ function install_tcb { } +function install_relay { + # Install TFC Relay configuration on Networked Computer. + steps_before_network_kill + + verify_files + create_user_data_dir + + install_virtualenv + sudo python3.7 -m virtualenv ${INSTALL_DIR}/venv_relay --system-site-packages + + . ${INSTALL_DIR}/venv_relay/bin/activate + sudo torsocks python3.7 -m pip install -r ${INSTALL_DIR}/requirements-relay.txt --require-hashes --no-deps + deactivate + + sudo mv ${INSTALL_DIR}/tfc.png /usr/share/pixmaps/ + sudo mv ${INSTALL_DIR}/launchers/TFC-RP.desktop /usr/share/applications/ + + # Remove unnecessary files + remove_common_files "sudo" + process_virtualenv_dependencies "rm" + process_tcb_dependencies "rm" + sudo rm -r "${INSTALL_DIR}/src/receiver/" + sudo rm -r "${INSTALL_DIR}/src/transmitter/" + sudo rm "${INSTALL_DIR}/dd.py" + sudo rm "${INSTALL_DIR}/tfc.py" + sudo rm "${INSTALL_DIR}/tfc.yml" + + add_serial_permissions + + install_complete "Installation of the TFC Relay configuration is now complete." +} + + +# Installation configuration for Tails + +function install_relay_tails { + # Install TFC Relay configuration on Networked Computer + # running Tails live distro (https://tails.boum.org/). + read_sudo_pwd + + t_sudo apt update + t_sudo apt install git libssl-dev python3-pip python3-tk -y || true # Ignore error in case packets can not be persistently installed + + torsocks git clone --depth 1 https://github.com/maqp/tfc.git "${HOME}/tfc" + t_sudo mv "${HOME}/tfc/ ${INSTALL_DIR}/" + t_sudo chown -R root ${INSTALL_DIR}/ + + verify_tcb_requirements_files + verify_files + + create_user_data_dir + + install_tails_setuptools + + # Tails doesn't allow downloading over PIP to /opt/tfc, so we first download + # to $HOME, move the files to /opt/tfc, and then perform the hash verification + torsocks python3.7 -m pip download --no-cache-dir -r "${INSTALL_DIR}/requirements-venv.txt" --require-hashes --no-deps -d "${HOME}/" + torsocks python3.7 -m pip download --no-cache-dir -r "${INSTALL_DIR}/requirements-relay-tails.txt" --require-hashes --no-deps -d "${HOME}/" + move_tails_dependencies + verify_tails_dependencies + + process_tails_venv_dependencies "python3.7 -m pip install" + t_sudo python3.7 -m virtualenv ${INSTALL_DIR}/venv_relay --system-site-packages + + . ${INSTALL_DIR}/venv_relay/bin/activate + process_tails_dependencies "python3.7 -m pip install" + deactivate + + t_sudo mv ${INSTALL_DIR}/tfc.png /usr/share/pixmaps/ + t_sudo mv ${INSTALL_DIR}/launchers/TFC-RP-Tails.desktop /usr/share/applications/ + t_sudo mv ${INSTALL_DIR}/tfc.yml /etc/onion-grater.d/ + + # Remove unnecessary files + remove_common_files "t_sudo" + process_tails_venv_dependencies "rm" + process_tails_dependencies "rm" + t_sudo rm -r "${INSTALL_DIR}/src/receiver/" + t_sudo rm -r "${INSTALL_DIR}/src/transmitter/" + t_sudo rm "${INSTALL_DIR}/dd.py" + t_sudo rm "${INSTALL_DIR}/tfc.py" + + install_complete "Installation of the TFC Relay configuration is now complete." +} + + +# Installation configurations for Qubes OS (https://www.qubes-os.org/) + +function install_qubes_src { + # Qubes Source VM installation configuration for Debian 10 domains. + create_user_data_dir + + steps_before_network_kill + qubes_src_firewall_config + + verify_files + + process_virtualenv_dependencies "python3.7 -m pip install" + sudo python3.7 -m virtualenv "${INSTALL_DIR}/venv_tcb" --system-site-packages --never-download + + . ${INSTALL_DIR}/venv_tcb/bin/activate + process_tcb_dependencies "python3.7 -m pip install" + deactivate + + sudo mv ${INSTALL_DIR}/tfc.png /usr/share/pixmaps/ + sudo mv ${INSTALL_DIR}/launchers/TFC-TxP-Qubes.desktop /usr/share/applications/ + sudo mv ${INSTALL_DIR}/launchers/tfc-qubes-transmitter /usr/bin/tfc-transmitter + + # Remove unnecessary files + remove_common_files "sudo" + process_virtualenv_dependencies "rm" + process_tcb_dependencies "rm" + sudo rm -r ${INSTALL_DIR}/src/relay/ + sudo rm ${INSTALL_DIR}/dd.py + sudo rm ${INSTALL_DIR}/relay.py + sudo rm ${INSTALL_DIR}/tfc.yml + + install_complete_qubes +} + + +function install_qubes_dst { + # Qubes Destination VM installation configuration for Debian 10 domains. + create_user_data_dir + + steps_before_network_kill + qubes_dst_firewall_config + + verify_files + + process_virtualenv_dependencies "python3.7 -m pip install" + sudo python3.7 -m virtualenv "${INSTALL_DIR}/venv_tcb" --system-site-packages --never-download + + . ${INSTALL_DIR}/venv_tcb/bin/activate + process_tcb_dependencies "python3.7 -m pip install" + deactivate + + sudo mv ${INSTALL_DIR}/tfc.png /usr/share/pixmaps/ + sudo mv ${INSTALL_DIR}/launchers/TFC-RxP-Qubes.desktop /usr/share/applications/ + sudo mv ${INSTALL_DIR}/launchers/tfc-qubes-receiver /usr/bin/tfc-receiver + + # Remove unnecessary files + remove_common_files "sudo" + process_virtualenv_dependencies "rm" + process_tcb_dependencies "rm" + sudo rm -r ${INSTALL_DIR}/src/relay/ + sudo rm ${INSTALL_DIR}/dd.py + sudo rm ${INSTALL_DIR}/relay.py + sudo rm ${INSTALL_DIR}/tfc.yml + + install_complete_qubes +} + + +function install_qubes_net { + # Qubes Networked VM installation configuration for Debian 10 domains. + create_user_data_dir + + steps_before_network_kill + qubes_net_firewall_config + + verify_files + + process_virtualenv_dependencies "python3.7 -m pip install" + sudo python3.7 -m virtualenv ${INSTALL_DIR}/venv_relay --system-site-packages + + . ${INSTALL_DIR}/venv_relay/bin/activate + sudo torsocks python3.7 -m pip install -r ${INSTALL_DIR}/requirements-relay.txt --require-hashes --no-deps + deactivate + + sudo mv ${INSTALL_DIR}/tfc.png /usr/share/pixmaps/ + sudo mv ${INSTALL_DIR}/launchers/TFC-RP-Qubes.desktop /usr/share/applications/ + sudo mv ${INSTALL_DIR}/launchers/tfc-qubes-relay /usr/bin/tfc-relay + + # Remove unnecessary files + remove_common_files "sudo" + process_virtualenv_dependencies "rm" + sudo rm -r "${INSTALL_DIR}/src/receiver/" + sudo rm -r "${INSTALL_DIR}/src/transmitter/" + sudo rm "${INSTALL_DIR}/dd.py" + sudo rm "${INSTALL_DIR}/tfc.py" + sudo rm "${INSTALL_DIR}/tfc.yml" + + install_complete_qubes +} + + +# Qubes firewall configurations + +function add_fw_rule { + # Add a firewall rule that takes effect immediately + sudo ${1} + + # Make the firewall rule persistent + echo "${1}" | sudo tee -a /rw/config/rc.local +} + + +function qubes_src_firewall_config { + # Edit Source VM's firewall rules to block all incoming connections, + # and to only allow UDP packets to Networked VM's TFC port. + + # Create backup of the current rc.local file (firewall rules) + sudo mv /rw/config/rc.local{,.backup."$(date +%Y-%m-%d-%H_%M_%S)"} + + # Add firewall rules that block all incoming/outgoing connections + add_fw_rule "iptables --flush" + add_fw_rule "iptables -t filter -P INPUT DROP" + add_fw_rule "iptables -t filter -P OUTPUT DROP" + add_fw_rule "iptables -t filter -P FORWARD DROP" + + src_ip=$(sudo ifconfig eth0 | grep "inet" | cut -d: -f2 | awk '{print $2}') + net_ip=$(get_net_ip) + + # Allow export of data to the Networked VM + add_fw_rule "iptables -I OUTPUT -s ${src_ip} -d ${net_ip} -p udp --dport 2063 -j ACCEPT" + sudo chmod a+x /rw/config/rc.local + + # Store Networked VM IP address so Transmitter Program can configure itself + echo ${net_ip} > $HOME/tfc/rx_ip_addr +} + + +function qubes_dst_firewall_config { + # Edit Destination VM's firewall rules to block all outgoing connections, + # and to only allow UDP packets from Networked VM to Receiver Programs' port. + + # Create backup of the current rc.local file (firewall rules) + sudo mv /rw/config/rc.local{,.backup."$(date +%Y-%m-%d-%H_%M_%S)"} + + # Add firewall rules that block all connections + add_fw_rule "iptables --flush" + add_fw_rule "iptables -t filter -P INPUT DROP" + add_fw_rule "iptables -t filter -P OUTPUT DROP" + add_fw_rule "iptables -t filter -P FORWARD DROP" + + net_ip=$(get_net_ip) + dst_ip=$(sudo ifconfig eth0 | grep "inet" | cut -d: -f2 | awk '{print $2}') + + # Allow import of data from the Networked VM + add_fw_rule "iptables -I INPUT -s ${net_ip} -d ${dst_ip} -p udp --dport 2064 -j ACCEPT" + sudo chmod a+x /rw/config/rc.local +} + + +function qubes_net_firewall_config { + # Edit Networked VM's firewall rules to accept UDP + # packets from Source VM to the Relay Program's port. + net_ip=$(sudo ifconfig eth0 | grep "inet" | cut -d: -f2 | awk '{print $2}') + tcb_ips=$(get_tcb_ips) + src_ip=$(echo ${tcb_ips} | awk -F "|" '{print $1}') + dst_ip=$(echo ${tcb_ips} | awk -F "|" '{print $2}') + + # Store Destination VM IP address so Relay Program can configure itself + echo ${dst_ip} > $HOME/tfc/rx_ip_addr + + # Create backup of the current rc.local file (firewall rules) + sudo cp /rw/config/rc.local{,.backup."$(date +%Y-%m-%d-%H_%M_%S)"} + + # Add firewall rules + add_fw_rule "iptables -t filter -P INPUT DROP" + add_fw_rule "iptables -t filter -P OUTPUT ACCEPT" + add_fw_rule "iptables -t filter -P FORWARD DROP" + add_fw_rule "iptables -I INPUT -s ${src_ip} -d ${net_ip} -p udp --dport 2063 -j ACCEPT" # 5. Whitelist UDP packets from SRC VM to NET VM's TFC port (2063) + add_fw_rule "iptables -I OUTPUT -d ${dst_ip} -p udp ! --dport 2064 -j DROP" # 4. Blacklist all UDP packets from NET VM to DST VM that don't have destination port 2064 + add_fw_rule "iptables -I OUTPUT -d ${dst_ip} ! -p udp -j DROP" # 3. Blacklist all non-UDP packets from NET VM to DST VM + add_fw_rule "iptables -I OUTPUT ! -s ${net_ip} -d ${dst_ip} -j DROP" # 2. Blacklist all packets to DST VM that do not originate from NET VM + add_fw_rule "iptables -I OUTPUT -d ${src_ip} -p all -j DROP" # 1. Blacklist all packets to SRC VM + sudo chmod a+x /rw/config/rc.local +} + + +# Tiling terminal emulator configurations for single OS + function install_local_test { # Install TFC for local testing on a single computer. steps_before_network_kill @@ -361,23 +673,23 @@ function install_local_test { sudo torsocks apt install terminator -y install_virtualenv - sudo python3.7 -m virtualenv /opt/tfc/venv_tfc --system-site-packages + sudo python3.7 -m virtualenv ${INSTALL_DIR}/venv_tfc --system-site-packages - . /opt/tfc/venv_tfc/bin/activate - sudo torsocks python3.7 -m pip install -r /opt/tfc/requirements.txt --require-hashes --no-deps - sudo torsocks python3.7 -m pip install -r /opt/tfc/requirements-relay.txt --require-hashes --no-deps + . ${INSTALL_DIR}/venv_tfc/bin/activate + sudo torsocks python3.7 -m pip install -r ${INSTALL_DIR}/requirements.txt --require-hashes --no-deps + sudo torsocks python3.7 -m pip install -r ${INSTALL_DIR}/requirements-relay.txt --require-hashes --no-deps deactivate - sudo mv /opt/tfc/tfc.png /usr/share/pixmaps/ - sudo mv /opt/tfc/launchers/TFC-Local-test.desktop /usr/share/applications/ - sudo mv /opt/tfc/launchers/terminator-config-local-test /opt/tfc/ - modify_terminator_font_size "sudo" "/opt/tfc/terminator-config-local-test" + sudo mv ${INSTALL_DIR}/tfc.png /usr/share/pixmaps/ + sudo mv ${INSTALL_DIR}/launchers/TFC-Local-test.desktop /usr/share/applications/ + sudo mv ${INSTALL_DIR}/launchers/terminator-config-local-test ${INSTALL_DIR}/ + modify_terminator_font_size "sudo" "${INSTALL_DIR}/terminator-config-local-test" # Remove unnecessary files - remove_common_files "sudo" - process_tcb_dependencies "rm" - sudo rm /opt/tfc/tfc.yml - sudo rm /opt/tfc/${VIRTUALENV} + remove_common_files "sudo" + process_virtualenv_dependencies "rm" + process_tcb_dependencies "rm" + sudo rm ${INSTALL_DIR}/tfc.yml install_complete "Installation of TFC for local testing is now complete." } @@ -391,7 +703,7 @@ function install_developer { # Note that it also means, that any malicious program with # user-level privileges is also able to modify the source files. For # more secure use on a single computer, select the local testing - # install configuration. + # install configuration, or preferably use the Qubes configuration. dpkg_check create_user_data_dir @@ -426,88 +738,73 @@ function install_developer { } -function install_relay_ubuntu { - # Install TFC Relay configuration on Networked Computer. - steps_before_network_kill +# ---------------------------------------------------------------------------------------- - verify_files - create_user_data_dir +# Installation utilities - install_virtualenv - sudo python3.7 -m virtualenv /opt/tfc/venv_relay --system-site-packages - - . /opt/tfc/venv_relay/bin/activate - sudo torsocks python3.7 -m pip install -r /opt/tfc/requirements-relay.txt --require-hashes --no-deps - deactivate - - sudo mv /opt/tfc/tfc.png /usr/share/pixmaps/ - sudo mv /opt/tfc/launchers/TFC-RP.desktop /usr/share/applications/ - - # Remove unnecessary files - remove_common_files "sudo" - process_tcb_dependencies "rm" - sudo rm -r "/opt/tfc/src/receiver/" - sudo rm -r "/opt/tfc/src/transmitter/" - sudo rm "/opt/tfc/dd.py" - sudo rm "/opt/tfc/tfc.py" - sudo rm "/opt/tfc/tfc.yml" - sudo rm "/opt/tfc/${VIRTUALENV}" - - add_serial_permissions - - install_complete "Installation of the TFC Relay configuration is now complete." +function compare_digest { + # Compare the SHA512 digest of TFC file against the digest pinned in this installer. + purp_digest=$(sha512sum "${INSTALL_DIR}/${2}${3}" | awk '{print $1}') + if echo ${purp_digest} | cmp -s <(echo "$1"); then + echo "OK - Pinned SHA512 hash matched file ${INSTALL_DIR}/${2}${3}" + else + echo "Error: ${INSTALL_DIR}/${2}${3} had an invalid SHA512 hash:" + echo "${purp_digest}" + echo "Expected following hash:" + echo "${1}" + exit 1 + fi } -function install_relay_tails { - # Install TFC Relay configuration on Networked Computer running - # Tails live distro (https://tails.boum.org/). - check_tails_tor_version - read_sudo_pwd +function valid_ip() { + # Validate an IP-address. (Borrowed from https://www.linuxjournal.com/content/validating-ip-address-bash-script) + local ip=$1 + local valid=1 - # Apt dependencies - t_sudo apt update - t_sudo apt install git libssl-dev python3-pip python3-tk -y || true # Ignore error in case packets can not be persistently installed + if [[ ${ip} =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then + OIFS=$IFS + IFS='.' + ip=(${ip}) + IFS=${OIFS} + [[ ${ip[0]} -le 255 && ${ip[1]} -le 255 && ${ip[2]} -le 255 && ${ip[3]} -le 255 ]] + valid=$? + fi + return ${valid} +} - torsocks git clone --depth 1 https://github.com/maqp/tfc.git "${HOME}/tfc" - t_sudo mv "${HOME}/tfc/ /opt/tfc/" - t_sudo chown -R root /opt/tfc/ - verify_tcb_requirements_files - verify_files +function get_net_ip { + # Get the IP-address of the Networker VM from the user. + ip=$(zenity --entry --title="TFC Installer" --text="Enter the IP-address of the Networked Computer VM:") + if valid_ip ${ip}; then + echo ${ip} + return + else + zenity --info --title='TFC installer' --text='Error: Invalid IP' + get_net_ip + fi +} - create_user_data_dir - install_tails_setuptools +function get_tcb_ips { + # Get the Source and Destination VM IP-addresses from the user. + ips=$(zenity --forms \ + --title="TFC Installer" \ + --text="Enter the IP-addresses of the TCB VMs" \ + --add-entry="Source Computer VM IP:" \ + --add-entry="Destination Computer VM IP:") - torsocks python3.7 -m pip download --no-cache-dir -r "/opt/tfc/requirements-venv.txt" --require-hashes --no-deps -d "${HOME}/" - torsocks python3.7 -m pip download --no-cache-dir -r "/opt/tfc/requirements-relay-tails.txt" --require-hashes --no-deps -d "${HOME}/" + first_ip=$(echo ${ips} | awk -F "|" '{print $1}') + second_ip=$(echo ${ips} | awk -F "|" '{print $2}') - move_tails_dependencies - verify_tails_dependencies - - t_sudo python3.7 -m pip install /opt/tfc/${VIRTUALENV} - t_sudo python3.7 -m virtualenv /opt/tfc/venv_relay --system-site-packages - - . /opt/tfc/venv_relay/bin/activate - process_tails_dependencies "python3.7 -m pip install" - deactivate - - # Complete setup - t_sudo mv /opt/tfc/tfc.png /usr/share/pixmaps/ - t_sudo mv /opt/tfc/launchers/TFC-RP-Tails.desktop /usr/share/applications/ - t_sudo mv /opt/tfc/tfc.yml /etc/onion-grater.d/ - - remove_common_files "t_sudo" - process_tails_dependencies "rm" - - t_sudo rm "/opt/tfc/${VIRTUALENV}" - t_sudo rm -r "/opt/tfc/src/receiver/" - t_sudo rm -r "/opt/tfc/src/transmitter/" - t_sudo rm "/opt/tfc/dd.py" - t_sudo rm "/opt/tfc/tfc.py" - - install_complete "Installation of the TFC Relay configuration is now complete." + if valid_ip ${first_ip} && valid_ip ${second_ip}; then + echo ${ips} + return + else + zenity --info --title='TFC installer' --text='Error: Invalid IP' + get_tcb_ips + fi } @@ -517,27 +814,17 @@ function t_sudo { } -function install_relay { - # Determine the Networked Computer OS for Relay Program installation. - if [[ $(grep "Tails" /etc/os-release 2>/dev/null) ]]; then - install_relay_tails - else - install_relay_ubuntu - fi -} - - function install_virtualenv { - # Some distros want virtualenv installed as sudo and other do - # not. Install both to improve the chances of compatibility. - sudo torsocks python3.7 -m pip install -r /opt/tfc/requirements-venv.txt --require-hashes --no-deps - torsocks python3.7 -m pip install -r /opt/tfc/requirements-venv.txt --require-hashes --no-deps + # Some distros want virtualenv installed as sudo and other don't. + # Install as both users to improve the chances of compatibility. + sudo torsocks python3.7 -m pip install -r ${INSTALL_DIR}/requirements-venv.txt --require-hashes --no-deps + torsocks python3.7 -m pip install -r ${INSTALL_DIR}/requirements-venv.txt --require-hashes --no-deps } function read_sudo_pwd { - # Cache the sudo password so that Debian doesn't keep asking for it - # during the installation (it won't be stored on disk). + # Cache the sudo password so that Debian doesn't keep asking + # for it during the installation (it won't be stored on disk). read -s -p "[sudo] password for ${USER}: " sudo_pwd until (t_sudo echo '' 2>/dev/null) do @@ -548,24 +835,11 @@ function read_sudo_pwd { } -function check_tails_tor_version { - # Check that the Tails distro is running Tor 0.3.5 or newer. - included=($(tor --version |awk '{print $3}' |head -c 5)) - required="0.3.5" - - if ! [[ "$(printf '%s\n' "$required" "$included" | sort -V | head -n1)" = "$required" ]]; then - clear - echo -e "\nError: This Tails includes Tor $included but Tor $required is required. Exiting.\n" 1>&2 - exit 1 - fi -} - - function kill_network { # Kill network interfaces to protect the TCB from remote compromise. for interface in /sys/class/net/*; do name=$(basename "${interface}") - if [[ $name != "lo" ]]; then + if [[ ${name} != "lo" ]]; then echo "Disabling network interface ${name}" sudo ifconfig "${name}" down fi @@ -608,24 +882,6 @@ function add_serial_permissions { } -function c_echo { - # Justify printed text to the center of the terminal. - printf "%*s\n" "$(( ( $(echo "${1}" | wc -c ) + 80 ) / 2 ))" "${1}" -} - - -function check_rm_existing_installation { - # Remove TFC installation directory if TFC is already installed. - if [[ -d "/opt/tfc" ]]; then - if [[ ${sudo_pwd} ]]; then - t_sudo rm -r /opt/tfc # Tails - else - sudo rm -r /opt/tfc # *buntu - fi - fi -} - - function create_user_data_dir { # Backup TFC user data directory if it exists and has files in it. if [[ -d "$HOME/tfc" ]]; then @@ -638,7 +894,7 @@ function create_user_data_dir { function modify_terminator_font_size { - # Adjust terminator font size for local testing configurations. + # Adjust terminator font size for tiling terminal emulator configurations. # # The default font sizes in terminator config file are for 1920px # wide screens. The lowest resolution (width) supported is 1366px. @@ -660,6 +916,24 @@ function get_screen_width { } +# Printing functions + +function c_echo { + # Justify printed text to the center of the terminal. + printf "%*s\n" "$(( ( $(echo "${1}" | wc -c ) + 80 ) / 2 ))" "${1}" +} + + +function exit_with_message { + # Print error message and exit the installer with flag 1. + clear + echo '' + c_echo "Error: $* Exiting." 1>&2 + echo '' + exit 1 +} + + function install_complete { # Notify the user that the installation is complete. clear @@ -674,9 +948,52 @@ function install_complete { } +function install_complete_qubes { + # Notify the user that the installation for Qubes VM is complete. + clear + c_echo '' + c_echo "Installation of TFC on this Qube is now complete." + c_echo '' + c_echo "Press any key to close the installer." + read -n 1 -s -p '' + clear + + kill -9 $PPID +} + + +function arg_error { + # Print help message if the user launches the + # installer with missing or invalid argument. + clear + echo -e "\nUsage: bash install.sh [OPTION]\n" + echo "Mandatory arguments" + echo " tcb Install Transmitter/Receiver Program (Debian 10 / PureOS 9.0+ *buntu 19.10+ / LMDE 4)" + echo " relay Install Relay Program (Debian 10 / PureOS 9.0+ *buntu 19.10+ / LMDE 4 / Tails 4.0+)" + echo -e " local Install insecure local testing mode (Debian 10 / PureOS 9.0+ *buntu 19.10+ / LMDE 4)\n" + echo " qsrc Install Transmitter Program (Qubes 4.0.3)" + echo " qdst Install Receiver Program (Qubes 4.0.3)" + echo -e " qnet Install Relay Program (Qubes 4.0.3)\n" + exit 1 +} + + +# Pre-install checks + +function check_rm_existing_installation { + # Remove TFC installation directory if TFC is already installed. + if [[ -d "${INSTALL_DIR}" ]]; then + if [[ ${sudo_pwd} ]]; then + t_sudo rm -r ${INSTALL_DIR} # Tails + else + sudo rm -r ${INSTALL_DIR} # Debian etc. + fi + fi +} + + function dpkg_check { - # Check if the software manager is busy, and if, wait until it - # completes. + # Check if the software manager is busy, and if, wait until it completes. i=0 tput sc while sudo fuser /var/lib/dpkg/lock >/dev/null 2>&1 ; do @@ -695,16 +1012,11 @@ function dpkg_check { } -function arg_error { - # Print help message if the user launches the installer with missing - # or invalid argument. - clear - echo -e "\nUsage: bash install.sh [OPTION]\n" - echo "Mandatory arguments" - echo " tcb Install Transmitter/Receiver Program (*buntu 19.10+ / Debian 10 / PureOS 9.0+ )" - echo " relay Install Relay Program (*buntu 19.10+ / Debian 10 / PureOS 9.0+ / Tails 4.0+)" - echo -e " local Install insecure local testing mode (*buntu 19.10+ / Debian 10 / PureOS 9.0+ )\n" - exit 1 +function architecture_check { + # Check that the OS is 64-bit, and not 32-bit. + if ! [[ "$(uname -m 2>/dev/null | grep x86_64)" ]]; then + exit_with_message "Invalid system architecture." + fi } @@ -740,23 +1052,7 @@ function sudoer_check { } -function architecture_check { - # Check that the OS is 64-bit, and not 32-bit. - if ! [[ "$(uname -m 2>/dev/null | grep x86_64)" ]]; then - exit_with_message "Invalid system architecture." - fi -} - - -function exit_with_message { - # Print error message and exit the installer with flag 1. - clear - echo '' - c_echo "Error: $* Exiting." 1>&2 - echo '' - exit 1 -} - +# Main routine set -e architecture_check @@ -765,9 +1061,13 @@ sudoer_check sudo_pwd='' case $1 in - tcb ) install_tcb;; - relay ) install_relay;; - local ) install_local_test;; - dev ) install_developer;; - * ) arg_error;; + tcb ) install_tcb;; + relay ) install_relay;; + tails ) install_relay_tails;; + local ) install_local_test;; + qsrc ) install_qubes_src;; + qdst ) install_qubes_dst;; + qnet ) install_qubes_net;; + dev ) install_developer;; + * ) arg_error;; esac diff --git a/install.sh.asc b/install.sh.asc index 6542c4e..11d3bf3 100644 --- a/install.sh.asc +++ b/install.sh.asc @@ -1,16 +1,16 @@ -----BEGIN PGP SIGNATURE----- -iQIzBAABCAAdFiEEE3wqdU+qbbuozmTV+rAVyyKvL4QFAl42DPoACgkQ+rAVyyKv -L4S3Gg/+OujW2IlEDBpxd97jPRRH1L3UZ3tHOV2VuV5hIukkblOLx1UZJbWWL/VC -/Q4Yd9Xi6f58Jwz/f7RIFBzp1xNa6rEcTYT6CBTvzsyxDyrUQQVgGzdJhuYHqoRk -j6b8SLuxnafEEtVjgESoy0Ei5bSgs9l4aZU/Jd86ClUI0yF4SWeh562UWGHXObVJ -/RtjrpnKn6OnIVY5QvbYOpTk2Q3dd0sz26/pxykptselzN+2kFCl+4mtu5oT1bkx -+c33lp3ihyJyNpEkEqISudtfR5FfQlq5ZbQRL9p77Y9e4ePUG6wlUN0dlm7oXS2/ -uS1Y1+U1wQNcMjisOo/bZs9wPzatfP9cl9I5DCl1vogMheSYKuORR3kc0FR+cAuX -/J0KryZrMP3kK43eM4LzHdoimGyX6D79Wdy2cPZ70QpKYrCDjSawanmX1ZxxPblD -HfmHnJ0Inc1o85lf5l/PYy3xLQrQbBuUIlctBFWbpW7XdUqKS9HdVqzxGnbOAJnP -C59O3EpkiMqV3I9zn3e85wMzKy4xrbrm/asl+S97BdHzZf8xGdvBRBwYK1OFfzKL -7fqxJDfkkOoTyrC0vhO+mbm2ktyR1oOjRCsEitXWA1sYmz1x+NbuaMQoiwpnefFG -JG1EYzokYahZFTz3NrfG2IK+2vOr7TV6KlWasQLKboleiMNQEZw= -=v3mk +iQIzBAABCAAdFiEEE3wqdU+qbbuozmTV+rAVyyKvL4QFAl42GLEACgkQ+rAVyyKv +L4SRQA//ccJj0h7tRE9kVu0Txi7BXDBzUQCD7c8yhTqRxoGTTzRj1bHrBDDeP2e/ +pd2c5MQLNFE26pnGxhpvVgEFfSWSxQxxs5BgPhFAj9V1Bn1CrrBJYueuupBo8An3 +VdRzqArBljGgScGOPXECeTwldoWY5ugtxREwQlBL7JCix9wmq4/yghzE61YdN5K3 +tx+WVj4Y06SWZAni6nssYiBYrToAslAgTlyAtaCYJccOUpHgnsyfqzZLH7+a/6Lz +NiHTJHm9zsZ6KzzpgnhDNMhTlZK9m2fuwdVMU6JjScNZA9gswTdKyi8kPeVpm/1g +m0LZAxOLcZuMKNdG/Wrtm/174yFIURoOmg7rF8m1FKHwvLQa2+FICICx7CLBiASA +Z+vVzfI7py97/hiVTNFDTlKENk4kS9Auhaf5pI6f2v/ehKXYnTYc8sLSUk5MYVWI +06ZmMJ3cvD4P/NPr7nCDT9WHUx+qKMnSQirQ86/wSxK3KcjE9Fu8Q8AXTYVZSN11 +xtCCtDkrd6TbxTwl5K54syoerg9PqkiWnRmf0gi00LuoJExg8i4Td2jBMVpxRJhi +KGGIj2GhexiB/slyz2kEZsmIkZr+dMqHTxoQwSoop9Ev0GHjkgkGa10LxxoRAzUg +x0A1+8TJo1dOs8+GD5qN6N68ZyMQhlAmp5b2EED0lrbQVRkZEig= +=/l6F -----END PGP SIGNATURE----- diff --git a/launchers/TFC-Dev.desktop b/launchers/TFC-Dev.desktop index df3ee9a..b0e3e6c 100755 --- a/launchers/TFC-Dev.desktop +++ b/launchers/TFC-Dev.desktop @@ -1,5 +1,5 @@ [Desktop Entry] -Version=1.20.02 +Version=1.20.03 Name=TFC-Dev-LR Comment=Developer configuration Exec=terminator -m -u -g $HOME/tfc/launchers/terminator-config-dev -p tfc -l tfc-lr diff --git a/launchers/TFC-Local-test.desktop b/launchers/TFC-Local-test.desktop index aa8bf9e..8d33beb 100755 --- a/launchers/TFC-Local-test.desktop +++ b/launchers/TFC-Local-test.desktop @@ -1,5 +1,5 @@ [Desktop Entry] -Version=1.20.02 +Version=1.20.03 Name=TFC-Local-Test-LR Comment=Local testing configuration Exec=terminator -m -u -g /opt/tfc/terminator-config-local-test -p tfc -l tfc-lr diff --git a/launchers/TFC-RP-Qubes.desktop b/launchers/TFC-RP-Qubes.desktop new file mode 100755 index 0000000..616f6d1 --- /dev/null +++ b/launchers/TFC-RP-Qubes.desktop @@ -0,0 +1,8 @@ +[Desktop Entry] +Version=1.20.03 +Name=TFC-Relay +Exec=gnome-terminal --geometry=94x25 -x bash -c "source /opt/tfc/venv_relay/bin/activate && python3.7 /opt/tfc/relay.py -q && deactivate || bash" +Icon=tfc.png +Terminal=false +Type=Application +Categories=Network;Messaging;Security; diff --git a/launchers/TFC-RP-Tails.desktop b/launchers/TFC-RP-Tails.desktop index 698ab38..f56b026 100755 --- a/launchers/TFC-RP-Tails.desktop +++ b/launchers/TFC-RP-Tails.desktop @@ -1,7 +1,7 @@ [Desktop Entry] -Version=1.20.02 +Version=1.20.03 Name=TFC-Relay -Exec=gnome-terminal -x bash -c "cd /opt/tfc && source venv_relay/bin/activate && python3.7 'relay.py' && deactivate || bash" +Exec=gnome-terminal --geometry=105x25 -x bash -c "cd /opt/tfc && source venv_relay/bin/activate && python3.7 'relay.py' && deactivate || bash" Icon=tfc.png Terminal=false Type=Application diff --git a/launchers/TFC-RP.desktop b/launchers/TFC-RP.desktop index 698ab38..f56b026 100755 --- a/launchers/TFC-RP.desktop +++ b/launchers/TFC-RP.desktop @@ -1,7 +1,7 @@ [Desktop Entry] -Version=1.20.02 +Version=1.20.03 Name=TFC-Relay -Exec=gnome-terminal -x bash -c "cd /opt/tfc && source venv_relay/bin/activate && python3.7 'relay.py' && deactivate || bash" +Exec=gnome-terminal --geometry=105x25 -x bash -c "cd /opt/tfc && source venv_relay/bin/activate && python3.7 'relay.py' && deactivate || bash" Icon=tfc.png Terminal=false Type=Application diff --git a/launchers/TFC-RxP-Qubes.desktop b/launchers/TFC-RxP-Qubes.desktop new file mode 100755 index 0000000..8e669e3 --- /dev/null +++ b/launchers/TFC-RxP-Qubes.desktop @@ -0,0 +1,8 @@ +[Desktop Entry] +Version=1.20.03 +Name=TFC-Receiver +Exec=gnome-terminal --geometry=94x25 -x bash -c "source /opt/tfc/venv_tcb/bin/activate && python3.7 /opt/tfc/tfc.py -r -q && deactivate || bash" +Icon=tfc.png +Terminal=false +Type=Application +Categories=Network;Messaging;Security; diff --git a/launchers/TFC-RxP.desktop b/launchers/TFC-RxP.desktop index 471a8ea..c06a38c 100755 --- a/launchers/TFC-RxP.desktop +++ b/launchers/TFC-RxP.desktop @@ -1,5 +1,5 @@ [Desktop Entry] -Version=1.20.02 +Version=1.20.03 Name=TFC-Receiver Exec=gnome-terminal --maximize -x bash -c "cd /opt/tfc && source venv_tcb/bin/activate && python3.7 'tfc.py' -r && deactivate || bash" Icon=tfc.png diff --git a/launchers/TFC-TxP-Qubes.desktop b/launchers/TFC-TxP-Qubes.desktop new file mode 100755 index 0000000..d356701 --- /dev/null +++ b/launchers/TFC-TxP-Qubes.desktop @@ -0,0 +1,8 @@ +[Desktop Entry] +Version=1.20.03 +Name=TFC-Transmitter +Exec=gnome-terminal --geometry=94x25 -x bash -c "source /opt/tfc/venv_tcb/bin/activate && python3.7 /opt/tfc/tfc.py -q && deactivate || bash" +Icon=tfc.png +Terminal=false +Type=Application +Categories=Network;Messaging;Security; diff --git a/launchers/TFC-TxP.desktop b/launchers/TFC-TxP.desktop index 3347c8d..7ce8057 100755 --- a/launchers/TFC-TxP.desktop +++ b/launchers/TFC-TxP.desktop @@ -1,5 +1,5 @@ [Desktop Entry] -Version=1.20.02 +Version=1.20.03 Name=TFC-Transmitter Exec=gnome-terminal --maximize -x bash -c "cd /opt/tfc && source venv_tcb/bin/activate && python3.7 'tfc.py' && deactivate || bash" Icon=tfc.png diff --git a/launchers/tfc-qubes-receiver b/launchers/tfc-qubes-receiver new file mode 100755 index 0000000..4ad0542 --- /dev/null +++ b/launchers/tfc-qubes-receiver @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +# TFC - Onion-routed, endpoint secure messaging system +# Copyright (C) 2013-2020 Markus Ottela +# +# This file is part of TFC. +# +# TFC is free software: you can redistribute it and/or modify it under the terms +# of the GNU General Public License as published by the Free Software Foundation, +# either version 3 of the License, or (at your option) any later version. +# +# TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; +# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR +# PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with TFC. If not, see . + +cd /opt/tfc/ && source venv_tcb/bin/activate && python3.7 tfc.py -q -r diff --git a/launchers/tfc-qubes-relay b/launchers/tfc-qubes-relay new file mode 100755 index 0000000..ed17aa8 --- /dev/null +++ b/launchers/tfc-qubes-relay @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +# TFC - Onion-routed, endpoint secure messaging system +# Copyright (C) 2013-2020 Markus Ottela +# +# This file is part of TFC. +# +# TFC is free software: you can redistribute it and/or modify it under the terms +# of the GNU General Public License as published by the Free Software Foundation, +# either version 3 of the License, or (at your option) any later version. +# +# TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; +# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR +# PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with TFC. If not, see . + +cd /opt/tfc/ && source venv_relay/bin/activate && python3.7 relay.py -q diff --git a/launchers/tfc-qubes-transmitter b/launchers/tfc-qubes-transmitter new file mode 100755 index 0000000..c93208b --- /dev/null +++ b/launchers/tfc-qubes-transmitter @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +# TFC - Onion-routed, endpoint secure messaging system +# Copyright (C) 2013-2020 Markus Ottela +# +# This file is part of TFC. +# +# TFC is free software: you can redistribute it and/or modify it under the terms +# of the GNU General Public License as published by the Free Software Foundation, +# either version 3 of the License, or (at your option) any later version. +# +# TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; +# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR +# PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with TFC. If not, see . + +cd /opt/tfc/ && source venv_tcb/bin/activate && python3.7 tfc.py -q diff --git a/relay.py b/relay.py index 1090898..19456b3 100644 --- a/relay.py +++ b/relay.py @@ -139,9 +139,9 @@ def main() -> None: ensure_dir(working_dir) os.chdir(working_dir) - _, local_test, data_diode_sockets = process_arguments() + _, local_test, data_diode_sockets, qubes = process_arguments() - gateway = Gateway(NC, local_test, data_diode_sockets) + gateway = Gateway(NC, local_test, data_diode_sockets, qubes) print_title(NC) @@ -167,7 +167,7 @@ def main() -> None: ONION_KEY_QUEUE: Queue(), # Onion Service private key from `relay_command` to `onion_service` TOR_DATA_QUEUE: Queue(), # Open port for Tor from `onion_service` to `client_scheduler` EXIT_QUEUE: Queue(), # EXIT/WIPE signal from `relay_command` to `main` - ACCOUNT_CHECK_QUEUE: Queue(), # Incorrectly typed accounts from `src_incomfing` to `account_checker` + ACCOUNT_CHECK_QUEUE: Queue(), # Incorrectly typed accounts from `src_incoming` to `account_checker` ACCOUNT_SEND_QUEUE: Queue(), # Contact requests from `flask_server` to `account_checker` USER_ACCOUNT_QUEUE: Queue(), # User's public key from `onion_service` to `account_checker` PUB_KEY_CHECK_QUEUE: Queue(), # Typed public keys from `src_incoming` to `pub_key_checker` @@ -184,7 +184,7 @@ def main() -> None: Process(target=flask_server, args=(queues, url_token_public_key )), Process(target=onion_service, args=(queues, )), Process(target=relay_command, args=(queues, gateway, )), - Process(target=account_checker, args=(queues, sys.stdin.fileno())), + Process(target=account_checker, args=(queues, sys.stdin.fileno() )), Process(target=pub_key_checker, args=(queues, local_test ))] for p in process_list: diff --git a/requirements-dev.txt b/requirements-dev.txt index 6d66178..693b5b9 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,57 +1,72 @@ -# Static type checking tool +# argon2_cffi +six>=1.14.0 +pycparser>=2.20 +cffi>=1.14.0 +argon2_cffi>=19.2.0 + +# cryptography (pyca) +cryptography>=2.8 + +# Flask +Werkzeug>=1.0.0 +MarkupSafe>=1.1.1 +Jinja2>=2.11.1 +itsdangerous>=1.1.0 +click>=7.0 +Flask>=1.1.1 + +# mypy static type checking tool +typing-extensions>=3.7.4.1 +typed-ast>=1.4.1 +mypy-extensions>=0.4.3 mypy>=0.761 -mypy_extensions>=0.4.3 -typed_ast>=1.4.1 -typing_extensions>=3.7.4.1 -# Unit test tools -pytest>=5.3.5 -pytest-cov>=2.8.1 -pytest-xdist>=1.31.0 +# PyLama +pyflakes>=2.1.1 +snowballstemmer>=2.0.0 +pydocstyle>=5.0.2 +pycodestyle>=2.5.0 +mccabe>=0.6.1 +pylama>=7.7.1 -# TFC dependencies (note: not authenticated with hashes) +# PyNaCl (pyca) +setuptools>=45.2.0 +PyNaCl>=1.3.0 # pyserial pyserial>=3.4 -# argon2_cffi -argon2_cffi>=19.2.0 -cffi>=1.13.2 -pycparser>=2.19 -six>=1.14.0 +# PySocks +PySocks>=1.7.1 -# pyca/pynacl -PyNaCl>=1.3.0 -setuptools>=45.1.0 +# pytest +wcwidth>=0.1.8 +py>=1.8.1 +pluggy>=0.13.1 +pyparsing>=2.4.6 +packaging>=20.3 +more-itertools>=8.2.0 +zipp>=3.1.0 +importlib-metadata>=1.5.0 +attrs>=19.3.0 +pytest>=5.3.5 -# pyca/cryptography -cryptography>=2.8 +# pytest-cov +coverage>=5.0.3 +pytest-cov>=2.8.1 + +# xdist: pytest distributed testing plugin +pytest-forked>=1.1.3 +apipkg>=1.5 +execnet>=1.7.1 +pytest-xdist>=1.31.0 + +# Requests +requests>=2.23.0 +certifi>=2019.11.28 +chardet>=3.0.4 +idna>=2.9 +urllib3>=1.25.8 # Stem stem>=1.8.0 - -# PySocks -pysocks>=1.7.1 - -# Requests -requests>=2.22.0 -certifi>=2019.11.28 -chardet>=3.0.4 -idna>=2.8 -urllib3>=1.25.8 - -# Flask -flask>=1.1.1 -click>=7.0 -itsdangerous>=1.1.0 -jinja2>=2.11.1 -markupsafe>=1.1.1 -werkzeug>=0.16.1 - -# PyLama -pylama>=7.7.1 -snowballstemmer>=2.0.0 -pyflakes>=2.1.1 -pydocstyle>=5.0.2 -pycodestyle>=2.5.0 -mccabe>=0.6.1 diff --git a/requirements-relay-tails.txt b/requirements-relay-tails.txt index afd1a27..4c5af6e 100644 --- a/requirements-relay-tails.txt +++ b/requirements-relay-tails.txt @@ -4,16 +4,16 @@ pyserial==3.4 --hash=sha512:8333ac2843fd136d5d0d63b527b37866f7d18afc3bb33c4938b63af077492aeb118eb32a89ac78547f14d59a2adb1e5d00728728275de62317da48dadf6cdff9 # Stem (Connects to Tor and manages Onion Services) -stem==1.8.0 --hash=sha512:aa2033567b79aef960f8321e4c6cbc28105c59d6513ff49a9f12509d8f97b1a2e8a3b04dc28abb07fad59b0f6ba66443b92bbefa0d08b26038bbaf24f7f2846d +# stem==1.8.0 --hash=sha512:aa2033567b79aef960f8321e4c6cbc28105c59d6513ff49a9f12509d8f97b1a2e8a3b04dc28abb07fad59b0f6ba66443b92bbefa0d08b26038bbaf24f7f2846d # PySocks (Routes requests library through SOCKS5 proxy making Onion Service connections possible) pysocks==1.7.1 --hash=sha512:313b954102231d038d52ab58f41e3642579be29f827135b8dd92c06acb362effcb0a7fd5f35de9273372b92d9fe29f38381ae44f8b41aa90d2564d6dd07ecd12 # Requests (Connects to the contact's Tor Onion Service) -requests==2.22.0 --hash=sha512:9186ce4e39bb64f5931a205ffc9afac61657bc42078bc4754ed12a2b66a12b7a620583440849fc2e161d1061ac0750ddef4670f54916931ace1e9abd2a9fb09c +requests==2.23.0 --hash=sha512:98e4c9435434b8f63fc37a21133adbbfeb471bfb8b40d60f04bded5cbe328c14a22527d54ab2a55a81d93110d627bacc26943e55ec338b7bed8708b55e15fff3 certifi==2019.11.28 --hash=sha512:fe5b05c29c1e1d9079150aaea28b09d84f0dd15907e276ccabb314433cfaac948a9615e10d6d01cbd537f99eed8072fbda7cb901e932fbab4f1286ae8c50471b chardet==3.0.4 --hash=sha512:bfae58c8ea19c87cc9c9bf3d0b6146bfdb3630346bd954fe8e9f7da1f09da1fc0d6943ff04802798a665ea3b610ee2d65658ce84fe5a89f9e93625ea396a17f4 -idna==2.8 --hash=sha512:fb07dbec1de86efbad82a4f73d98123c59b083c1f1277445204bef75de99ca200377ad2f1db8924ae79b31b3dd984891c87d0a6344ec4d07a0ddbbbc655821a3 +idna==2.9 --hash=sha512:be96b782728404acec374f446b11811f8e76d5ed42d4673a07e883220f5ba2a099a8124cda5898c3f5da7d92b87b36127e8fd42e9edb240b587a380ed73cce93 urllib3==1.25.8 --hash=sha512:f7fd3b54b7c555c0e74eb445e543763d233b5c6f8021ccf46a45d452c334953276d43ecd8f3d0eafefa35103a7d1874e291216fc9a41362eb6f1250a2a670f16 # Flask (Onion Service web server that serves TFC public keys and ciphertexts to contacts) @@ -22,12 +22,13 @@ click==7.0 --hash=sha512:6b30987349df7c45c5f41cff9076ed45b178b444fca1ab itsdangerous==1.1.0 --hash=sha512:891c294867f705eb9c66274bd04ac5d93140d6e9beea6cbf9a44e7f9c13c0e2efa3554bdf56620712759a5cd579e112a782d25f3f91ba9419d60b2b4d2bc5b7c jinja2==2.11.1 --hash=sha512:461bbd517560f1c4dbf7309bdf0cf33b468938fddfa2c3385fab07343269732d8ce68d8827148645113267d48e7d67b03f1663cc64839dd1fcec723ea606aaf4 markupsafe==1.1.1 --hash=sha512:69e9b9c9ac4fdf3cfa1a3de23d14964b843989128f8cc6ea58617fc5d6ef937bcc3eae9cb32b5164b5f54b06f96bdff9bc249529f20671cc26adc9e6ce8f6bec -werkzeug==0.16.1 --hash=sha512:4c982970fef39bf7cfbb4e516864fec0f8ec3f743ccb632d1659c6ee415597d98f4abd63b5c0fd999eb43fc0c89a97123f07625b01ea86b02ef51cb67a2b148d +werkzeug==1.0.0 --hash=sha512:82a0f1776820d07e929daa60bfa0a3e746464b0f2923376330f8ae5abf535bcb756c7384757b2ff8e0076f299fe85d96ef34b3a8eede21c11df9aba8cc58cb77 # Cryptography (Handles URL token derivation) -cryptography==2.8 --hash=sha512:184003c89fee74892de25c3e5ec366faea7a5f1fcca3c82b0d5e5f9f797286671a820ca54da5266d6f879ab342c97e25bce9db366c5fb1178690cd5978d4d622 -cffi==1.13.2 --hash=sha512:b8753a0435cc7a2176f8748badc074ec6ffab6698d6be42b1770c85871f85aa7cf60152a8be053c3031b234a286c5cef07267cb812accb704783d74a2675ed3b -pycparser==2.19 --hash=sha512:7f830e1c9066ee2d297a55e2bf6db4bf6447b6d9da0145d11a88c3bb98505755fb7986eafa6e06ae0b7680838f5e5d6a6d188245ca5ad45c2a727587bac93ab5 +cryptography==2.8 --hash=sha512:184003c89fee74892de25c3e5ec366faea7a5f1fcca3c82b0d5e5f9f797286671a820ca54da5266d6f879ab342c97e25bce9db366c5fb1178690cd5978d4d622 \ + --hash=sha512:d8ddabe127ae8d7330d219e284de68b37fa450a27b4cf05334e9115388295b00148d9861c23b1a2e5ea9df0c33a2d27f3e4b25ce9abd3c334f1979920b19c902 +cffi==1.14.0 --hash=sha512:5b315a65fc8f40622ceef35466546620aaca9dd304f5491a845239659b4066469c5fb3f1683c382eb57f8975caf318e5d88852e3dbb049cde193c9189b88c9c0 +pycparser==2.20 --hash=sha512:06dc9cefdcde6b97c96d0452a77db42a629c48ee545edd7ab241763e50e3b3c56d21f9fcce4e206817aa1a597763d948a10ccc73572490d739c89eea7fede0a1 six==1.14.0 --hash=sha512:a6e7e35921ce8f2f8e79a296ea79a9c3515ff6dd7e777d7892fe4988594f1b3a442a68ffb89cf64530b90a32ceeea00e4ab9069bb697629ab4eb7262c68d1b0f # PyNaCl (Derives TFC account from Onion Service private key) diff --git a/requirements-relay.txt b/requirements-relay.txt index 4003200..8e8f879 100644 --- a/requirements-relay.txt +++ b/requirements-relay.txt @@ -10,10 +10,10 @@ stem==1.8.0 --hash=sha512:aa2033567b79aef960f8321e4c6cbc28105c59d6513ff4 pysocks==1.7.1 --hash=sha512:313b954102231d038d52ab58f41e3642579be29f827135b8dd92c06acb362effcb0a7fd5f35de9273372b92d9fe29f38381ae44f8b41aa90d2564d6dd07ecd12 # Requests (Connects to the contact's Tor Onion Service) -requests==2.22.0 --hash=sha512:9186ce4e39bb64f5931a205ffc9afac61657bc42078bc4754ed12a2b66a12b7a620583440849fc2e161d1061ac0750ddef4670f54916931ace1e9abd2a9fb09c +requests==2.23.0 --hash=sha512:98e4c9435434b8f63fc37a21133adbbfeb471bfb8b40d60f04bded5cbe328c14a22527d54ab2a55a81d93110d627bacc26943e55ec338b7bed8708b55e15fff3 certifi==2019.11.28 --hash=sha512:fe5b05c29c1e1d9079150aaea28b09d84f0dd15907e276ccabb314433cfaac948a9615e10d6d01cbd537f99eed8072fbda7cb901e932fbab4f1286ae8c50471b chardet==3.0.4 --hash=sha512:bfae58c8ea19c87cc9c9bf3d0b6146bfdb3630346bd954fe8e9f7da1f09da1fc0d6943ff04802798a665ea3b610ee2d65658ce84fe5a89f9e93625ea396a17f4 -idna==2.8 --hash=sha512:fb07dbec1de86efbad82a4f73d98123c59b083c1f1277445204bef75de99ca200377ad2f1db8924ae79b31b3dd984891c87d0a6344ec4d07a0ddbbbc655821a3 +idna==2.9 --hash=sha512:be96b782728404acec374f446b11811f8e76d5ed42d4673a07e883220f5ba2a099a8124cda5898c3f5da7d92b87b36127e8fd42e9edb240b587a380ed73cce93 urllib3==1.25.8 --hash=sha512:f7fd3b54b7c555c0e74eb445e543763d233b5c6f8021ccf46a45d452c334953276d43ecd8f3d0eafefa35103a7d1874e291216fc9a41362eb6f1250a2a670f16 # Flask (Onion Service web server that serves TFC public keys and ciphertexts to contacts) @@ -22,15 +22,16 @@ click==7.0 --hash=sha512:6b30987349df7c45c5f41cff9076ed45b178b444fca1ab itsdangerous==1.1.0 --hash=sha512:891c294867f705eb9c66274bd04ac5d93140d6e9beea6cbf9a44e7f9c13c0e2efa3554bdf56620712759a5cd579e112a782d25f3f91ba9419d60b2b4d2bc5b7c jinja2==2.11.1 --hash=sha512:461bbd517560f1c4dbf7309bdf0cf33b468938fddfa2c3385fab07343269732d8ce68d8827148645113267d48e7d67b03f1663cc64839dd1fcec723ea606aaf4 markupsafe==1.1.1 --hash=sha512:69e9b9c9ac4fdf3cfa1a3de23d14964b843989128f8cc6ea58617fc5d6ef937bcc3eae9cb32b5164b5f54b06f96bdff9bc249529f20671cc26adc9e6ce8f6bec -werkzeug==0.16.1 --hash=sha512:4c982970fef39bf7cfbb4e516864fec0f8ec3f743ccb632d1659c6ee415597d98f4abd63b5c0fd999eb43fc0c89a97123f07625b01ea86b02ef51cb67a2b148d +werkzeug==1.0.0 --hash=sha512:82a0f1776820d07e929daa60bfa0a3e746464b0f2923376330f8ae5abf535bcb756c7384757b2ff8e0076f299fe85d96ef34b3a8eede21c11df9aba8cc58cb77 # Cryptography (Handles URL token derivation) -cryptography==2.8 --hash=sha512:184003c89fee74892de25c3e5ec366faea7a5f1fcca3c82b0d5e5f9f797286671a820ca54da5266d6f879ab342c97e25bce9db366c5fb1178690cd5978d4d622 -cffi==1.13.2 --hash=sha512:b8753a0435cc7a2176f8748badc074ec6ffab6698d6be42b1770c85871f85aa7cf60152a8be053c3031b234a286c5cef07267cb812accb704783d74a2675ed3b -pycparser==2.19 --hash=sha512:7f830e1c9066ee2d297a55e2bf6db4bf6447b6d9da0145d11a88c3bb98505755fb7986eafa6e06ae0b7680838f5e5d6a6d188245ca5ad45c2a727587bac93ab5 +cryptography==2.8 --hash=sha512:184003c89fee74892de25c3e5ec366faea7a5f1fcca3c82b0d5e5f9f797286671a820ca54da5266d6f879ab342c97e25bce9db366c5fb1178690cd5978d4d622 \ + --hash=sha512:d8ddabe127ae8d7330d219e284de68b37fa450a27b4cf05334e9115388295b00148d9861c23b1a2e5ea9df0c33a2d27f3e4b25ce9abd3c334f1979920b19c902 +cffi==1.14.0 --hash=sha512:5b315a65fc8f40622ceef35466546620aaca9dd304f5491a845239659b4066469c5fb3f1683c382eb57f8975caf318e5d88852e3dbb049cde193c9189b88c9c0 +pycparser==2.20 --hash=sha512:06dc9cefdcde6b97c96d0452a77db42a629c48ee545edd7ab241763e50e3b3c56d21f9fcce4e206817aa1a597763d948a10ccc73572490d739c89eea7fede0a1 six==1.14.0 --hash=sha512:a6e7e35921ce8f2f8e79a296ea79a9c3515ff6dd7e777d7892fe4988594f1b3a442a68ffb89cf64530b90a32ceeea00e4ab9069bb697629ab4eb7262c68d1b0f # PyNaCl (Derives TFC account from Onion Service private key) PyNaCl==1.3.0 --hash=sha512:c4017c38b026a5c531b15839b8d61d1fae9907ba1960c2f97f4cd67fe0827729346d5186a6d6927ba84f64b4cbfdece12b287aa7750a039f4160831be871cea3 -setuptools==45.1.0 --hash=sha512:761e4c8df239b8d173513b08959b387c1059e3e023ba6b3f6250fade518d6ef29f287ab90dd35d02bb681b410a050b30b2ed44849638b6f98831f4290a4ccd15 +setuptools==45.2.0 --hash=sha512:de1ac45cb52e8a28322048e6a2b95015aa6826c49679349a1b579cb46b95cb2ffd62242c861c2fe3e059c0c55d4fdb4384c51b964ca2634b2843263543f8842a # Duplicate sub-dependencies: cffi, pycparser, six diff --git a/requirements-setuptools.txt b/requirements-setuptools.txt index 8573219..4f67f16 100644 --- a/requirements-setuptools.txt +++ b/requirements-setuptools.txt @@ -1,2 +1,2 @@ # Setuptools (Allows installation of pycparser which is a sub-dependency of the cryptography and PyNaCl packages) -setuptools==45.1.0 --hash=sha512:761e4c8df239b8d173513b08959b387c1059e3e023ba6b3f6250fade518d6ef29f287ab90dd35d02bb681b410a050b30b2ed44849638b6f98831f4290a4ccd15 +setuptools==45.2.0 --hash=sha512:de1ac45cb52e8a28322048e6a2b95015aa6826c49679349a1b579cb46b95cb2ffd62242c861c2fe3e059c0c55d4fdb4384c51b964ca2634b2843263543f8842a diff --git a/requirements-venv.txt b/requirements-venv.txt index 437c80d..22070cb 100644 --- a/requirements-venv.txt +++ b/requirements-venv.txt @@ -1,2 +1,12 @@ +# Sub-dependencies are listed below dependencies + # Virtual environment (Used to create an isolated Python environment for TFC dependencies) -virtualenv==16.7.9 --hash=sha512:f4e7148f1de50fa2e69061e72db211085fc2f44007de4d18ee02a20d34bca30a00d2fe56ff6f3132e696c3f6efd4151863f26dac4c1d43e87b597c47a51c52ad +virtualenv==20.0.8 --hash=sha512:8b85fa635c5ec51881aed2238f1e9229d6607644995e26e3f9fe6f8bb6313c51f7b290a6ac1347738866626b1b49d08c5622836dfe2a39ae60f697888bcea615 +appdirs==1.4.3 --hash=sha512:b79e9fa76eadee595fe47ea7efd35c4cc72f058a9ed16a95cfa4d91a52c330efba50df7a9926900bbced229cca7bbfb05bbf0a8ee1d46bac2362c98ab9a5154d +distlib==0.3.0 --hash=sha512:6f910a9607569c9023a19aee35be15cf8521ec7c07c5d478e6d555a301d024a2ee1db48562707b238a72c631d75d9dc154d38b39ed51746b66c938ac40671e60 +six==1.14.0 --hash=sha512:a6e7e35921ce8f2f8e79a296ea79a9c3515ff6dd7e777d7892fe4988594f1b3a442a68ffb89cf64530b90a32ceeea00e4ab9069bb697629ab4eb7262c68d1b0f +importlib_metadata==1.5.0 --hash=sha512:53e51d4b75c1df19fcb6b32e57fa73ffcb00eede86fee7ac9634f02661360538a74d3546b65a641b68ee84c0d78293fe03d09b65cb85359780822b56f813b926 + +# importlib_metadata sub-dependencies +filelock==3.0.12 --hash=sha512:d13edd50779bca9842694e0da157ca1fdad9d28166771275049f41dea4b8d8466fc5604b610b6ad64552cdf4c1d3cada9977ca37c6b775c4cc92f333709e8ea3 +zipp==3.1.0 --hash=sha512:89170b91cfdc0ef4d85b5316b484c8d6e01985f19bb9f545b11d648e122392efa68d40c66e056b8998fb69af49f4e18707f783be8d500b8957ce3a885662d27c diff --git a/requirements.txt b/requirements.txt index 58c1e3c..d412675 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,15 +5,16 @@ pyserial==3.4 --hash=sha512:8333ac2843fd136d5d0d63b527b37866f7d18afc3bb33c # Argon2 (Derives keys that protect persistent user data) argon2_cffi==19.2.0 --hash=sha512:91c4afc2d0cac14cf4342f198f68afd6477dc5bdf2683476c6f8e253de7b3bdc83b229ce96d0280f656ff33667ab9902c92741b82faee8d8892307cde6199845 -cffi==1.13.2 --hash=sha512:b8753a0435cc7a2176f8748badc074ec6ffab6698d6be42b1770c85871f85aa7cf60152a8be053c3031b234a286c5cef07267cb812accb704783d74a2675ed3b -pycparser==2.19 --hash=sha512:7f830e1c9066ee2d297a55e2bf6db4bf6447b6d9da0145d11a88c3bb98505755fb7986eafa6e06ae0b7680838f5e5d6a6d188245ca5ad45c2a727587bac93ab5 +cffi==1.14.0 --hash=sha512:5b315a65fc8f40622ceef35466546620aaca9dd304f5491a845239659b4066469c5fb3f1683c382eb57f8975caf318e5d88852e3dbb049cde193c9189b88c9c0 +pycparser==2.20 --hash=sha512:06dc9cefdcde6b97c96d0452a77db42a629c48ee545edd7ab241763e50e3b3c56d21f9fcce4e206817aa1a597763d948a10ccc73572490d739c89eea7fede0a1 six==1.14.0 --hash=sha512:a6e7e35921ce8f2f8e79a296ea79a9c3515ff6dd7e777d7892fe4988594f1b3a442a68ffb89cf64530b90a32ceeea00e4ab9069bb697629ab4eb7262c68d1b0f # PyNaCl (Handles TCB-side XChaCha20-Poly1305 symmetric encryption) PyNaCl==1.3.0 --hash=sha512:c4017c38b026a5c531b15839b8d61d1fae9907ba1960c2f97f4cd67fe0827729346d5186a6d6927ba84f64b4cbfdece12b287aa7750a039f4160831be871cea3 -setuptools==45.1.0 --hash=sha512:761e4c8df239b8d173513b08959b387c1059e3e023ba6b3f6250fade518d6ef29f287ab90dd35d02bb681b410a050b30b2ed44849638b6f98831f4290a4ccd15 +setuptools==45.2.0 --hash=sha512:de1ac45cb52e8a28322048e6a2b95015aa6826c49679349a1b579cb46b95cb2ffd62242c861c2fe3e059c0c55d4fdb4384c51b964ca2634b2843263543f8842a # Duplicate sub-dependencies: cffi, pycparser, six # Cryptography (Handles TCB-side X448 key exchange) -cryptography==2.8 --hash=sha512:184003c89fee74892de25c3e5ec366faea7a5f1fcca3c82b0d5e5f9f797286671a820ca54da5266d6f879ab342c97e25bce9db366c5fb1178690cd5978d4d622 +cryptography==2.8 --hash=sha512:184003c89fee74892de25c3e5ec366faea7a5f1fcca3c82b0d5e5f9f797286671a820ca54da5266d6f879ab342c97e25bce9db366c5fb1178690cd5978d4d622 \ + --hash=sha512:d8ddabe127ae8d7330d219e284de68b37fa450a27b4cf05334e9115388295b00148d9861c23b1a2e5ea9df0c33a2d27f3e4b25ce9abd3c334f1979920b19c902 # Duplicate sub-dependencies: cffi, pycparser, six diff --git a/src/common/crypto.py b/src/common/crypto.py index 518776d..ffb1f8e 100755 --- a/src/common/crypto.py +++ b/src/common/crypto.py @@ -210,7 +210,7 @@ def argon2_kdf(password: str, # Password to derive the key from [1] https://github.com/P-H-C/phc-winner-argon2/blob/master/argon2-specs.pdf [2] https://password-hashing.net/submissions/specs/Catena-v5.pdf [3] https://crypto.stanford.edu/balloon/ - [4] https://tools.ietf.org/html/draft-irtf-cfrg-argon2-06#section-9.4 + [4] https://tools.ietf.org/html/draft-irtf-cfrg-argon2-09#section-8.4 [5] https://github.com/P-H-C/phc-winner-argon2 https://github.com/hynek/argon2_cffi """ @@ -355,14 +355,14 @@ class X448(object): fully seeded. This is the same case as with TFC's `csprng()` function. - [1] https://github.com/pyca/cryptography/blob/2.7/src/cryptography/hazmat/primitives/asymmetric/x448.py#L38 - [2] https://github.com/pyca/cryptography/blob/2.7/src/cryptography/hazmat/backends/openssl/backend.py#L2445 - [3] https://github.com/pyca/cryptography/blob/2.7/src/cryptography/hazmat/backends/openssl/backend.py#L115 - [4] https://github.com/pyca/cryptography/blob/2.7/src/cryptography/hazmat/backends/openssl/backend.py#L122 + [1] https://github.com/pyca/cryptography/blob/2.8/src/cryptography/hazmat/primitives/asymmetric/x448.py#L38 + [2] https://github.com/pyca/cryptography/blob/2.8/src/cryptography/hazmat/backends/openssl/backend.py#L2483 + [3] https://github.com/pyca/cryptography/blob/2.8/src/cryptography/hazmat/backends/openssl/backend.py#L118 + [4] https://github.com/pyca/cryptography/blob/2.8/src/cryptography/hazmat/backends/openssl/backend.py#L125 [5] https://cryptography.io/en/latest/hazmat/backends/openssl/#activate_osrandom_engine [6] https://cryptography.io/en/latest/hazmat/backends/openssl/#os-random-engine [7] https://cryptography.io/en/latest/hazmat/backends/openssl/#os-random-sources - [8] https://github.com/pyca/cryptography/blob/master/src/_cffi_src/openssl/src/osrandom_engine.c#L391 + [8] https://github.com/pyca/cryptography/blob/master/src/_cffi_src/openssl/src/osrandom_engine.c#L395 """ return X448PrivateKey.generate() @@ -419,11 +419,13 @@ class X448(object): return blake2b(shared_secret, digest_size=SYMMETRIC_KEY_LENGTH) @staticmethod - def derive_keys(dh_shared_key: bytes, - tfc_public_key_user: bytes, - tfc_public_key_contact: bytes - ) -> Tuple[bytes, bytes, bytes, bytes, bytes, bytes]: - """Create domain separated message and header keys and fingerprints from shared key. + def derive_subkeys(dh_shared_key: bytes, + tfc_public_key_user: bytes, + tfc_public_key_contact: bytes + ) -> Tuple[bytes, bytes, bytes, bytes, bytes, bytes]: + """\ + Create domain separated message and header subkeys and fingerprints + from the shared key. Domain separate unidirectional keys from shared key by using public keys as message and the context variable as personalization string. @@ -451,7 +453,7 @@ class X448(object): key_tuple = tx_mk, rx_mk, tx_hk, rx_hk, tx_fp, rx_fp if len(set(key_tuple)) != len(key_tuple): - raise CriticalError("Derived keys were not unique.") + raise CriticalError("Derived subkeys were not unique.") return key_tuple @@ -515,7 +517,7 @@ def encrypt_and_sign(plaintext: bytes, # Plaintext to encrypt tested by TFC unit tests. The testing is done in limited scope by using the libsodium and official IETF test vectors. - [1] https://tools.ietf.org/html/draft-irtf-cfrg-xchacha-01 + [1] https://tools.ietf.org/html/draft-irtf-cfrg-xchacha-03 [2] https://tools.ietf.org/html/rfc8439 [3] https://download.libsodium.org/doc/secret-key_cryptography/aead/chacha20-poly1305/xchacha20-poly1305_construction [4] https://cr.yp.to/snuffle/keysizes.pdf @@ -887,8 +889,8 @@ def csprng(key_length: int = SYMMETRIC_KEY_LENGTH # Length of the key entropy estimator by 1024 bits.[1; pp.59-60] [1] https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/Studies/LinuxRNG/LinuxRNG_EN.pdf - [2] https://github.com/torvalds/linux/blob/master/drivers/char/random.c#L791 - [3] https://github.com/torvalds/linux/blob/master/drivers/char/random.c#L1032 + [2] https://github.com/torvalds/linux/blob/master/drivers/char/random.c#L734 + [3] https://github.com/torvalds/linux/blob/master/drivers/char/random.c#L952 The ChaCha20 DRNG ================= @@ -994,10 +996,10 @@ def csprng(key_length: int = SYMMETRIC_KEY_LENGTH # Length of the key [1] https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/Studies/LinuxRNG/LinuxRNG_EN.pdf [2] https://lkml.org/lkml/2019/5/30/867 - [3] https://github.com/torvalds/linux/blob/master/drivers/char/random.c#L889 - https://github.com/torvalds/linux/blob/master/drivers/char/random.c#L1058 - [4] https://github.com/torvalds/linux/blob/master/lib/chacha.c#L87 - https://github.com/torvalds/linux/blob/master/drivers/char/random.c#L1064 + [3] https://github.com/torvalds/linux/blob/master/drivers/char/random.c#L810 + https://github.com/torvalds/linux/blob/master/drivers/char/random.c#L977 + [4] https://github.com/torvalds/linux/blob/master/lib/crypto/chacha.c#L89 + https://github.com/torvalds/linux/blob/master/drivers/char/random.c#L983 GETRANDOM and Python diff --git a/src/common/database.py b/src/common/database.py index 5074cd2..ebb7236 100644 --- a/src/common/database.py +++ b/src/common/database.py @@ -64,7 +64,7 @@ class TFCDatabase(object): os.fsync(f.fileno()) def verify_file(self, database_name: str) -> bool: - """Verify integrity of file content.""" + """Verify integrity of database content.""" with open(database_name, 'rb') as f: purp_data = f.read() diff --git a/src/common/db_contacts.py b/src/common/db_contacts.py index 63655d0..97395a0 100755 --- a/src/common/db_contacts.py +++ b/src/common/db_contacts.py @@ -25,8 +25,8 @@ import typing from typing import Iterable, Iterator, List, Optional, Sized from src.common.database import TFCDatabase -from src.common.encoding import bool_to_bytes, pub_key_to_onion_address, str_to_bytes, pub_key_to_short_address -from src.common.encoding import bytes_to_bool, onion_address_to_pub_key, bytes_to_str +from src.common.encoding import (bool_to_bytes, pub_key_to_onion_address, str_to_bytes, pub_key_to_short_address, + bytes_to_bool, onion_address_to_pub_key, bytes_to_str) from src.common.exceptions import CriticalError from src.common.misc import ensure_dir, get_terminal_width, separate_headers, split_byte_string from src.common.output import clear_screen @@ -111,8 +111,8 @@ class Contact(object): notifications: This setting defines whether, in situations where some other window is active, the Receiver Program - displays a notification about the contact sending a - new message to their window. The setting has no + displays a notification about the contact sending + a new message to their window. The setting has no effect on user's Transmitter Program. tfc_private_key: This value is an ephemerally stored private key @@ -139,8 +139,8 @@ class Contact(object): ) -> None: """Create a new Contact object. - `self.short_address` is a truncated version of the account used - to identify TFC account in printed messages. + `self.short_address` is the truncated version of the account + used to identify TFC account in printed messages. """ self.onion_pub_key = onion_pub_key self.nick = nick @@ -473,8 +473,7 @@ class ContactList(Iterable[Contact], Sized): KEX_STATUS_UNVERIFIED: f"{ECDHE} (Unverified)", KEX_STATUS_VERIFIED: f"{ECDHE} (Verified)", KEX_STATUS_NO_RX_PSK: f"{PSK} (No contact key)", - KEX_STATUS_HAS_RX_PSK: PSK - } + KEX_STATUS_HAS_RX_PSK: PSK} # Populate columns with contact data for c in self.get_list_of_contacts(): diff --git a/src/common/db_groups.py b/src/common/db_groups.py index e80d133..c3a152c 100755 --- a/src/common/db_groups.py +++ b/src/common/db_groups.py @@ -27,11 +27,11 @@ from typing import Callable, Iterable, Iterator, List, Sized from src.common.database import TFCDatabase from src.common.db_contacts import Contact -from src.common.encoding import bool_to_bytes, int_to_bytes, str_to_bytes, onion_address_to_pub_key, b58encode -from src.common.encoding import bytes_to_bool, bytes_to_int, bytes_to_str +from src.common.encoding import (bool_to_bytes, int_to_bytes, str_to_bytes, onion_address_to_pub_key, + bytes_to_bool, bytes_to_int, bytes_to_str, b58encode) from src.common.exceptions import CriticalError -from src.common.misc import ensure_dir, get_terminal_width, round_up, separate_header, separate_headers -from src.common.misc import split_byte_string +from src.common.misc import (ensure_dir, get_terminal_width, round_up, separate_header, separate_headers, + split_byte_string) from src.common.statics import (CONTACT_LIST_INDENT, DIR_USER_DATA, DUMMY_GROUP, DUMMY_MEMBER, ENCODED_BOOLEAN_LENGTH, ENCODED_INTEGER_LENGTH, GROUP_DB_HEADER_LENGTH, GROUP_ID_LENGTH, GROUP_STATIC_LENGTH, ONION_SERVICE_PUBLIC_KEY_LENGTH, @@ -266,8 +266,8 @@ class GroupList(Iterable[Group], Sized): content. The function then removes dummy groups based on header data. Next, the function updates the group database settings if necessary. It then splits group data based on header data into - blocks, which are further sliced, and processed if necessary, to - obtain data required to create Group objects. Finally, if + blocks, which are further sliced, and processed if necessary, + to obtain data required to create Group objects. Finally, if needed, the function will update the group database content. """ pt_bytes = self.database.load_database() @@ -319,8 +319,8 @@ class GroupList(Iterable[Group], Sized): members_in_largest_group: int ) -> bool: """\ - Adjust TFC's settings automatically if loaded group database was - stored using larger database setting values. + Adjust TFC's settings automatically if the loaded group database + was stored using larger database setting values. If settings had to be adjusted, return True so the method `self._load_groups` knows to write changes to a new database. diff --git a/src/common/db_keys.py b/src/common/db_keys.py index 1b0ef2e..452b79b 100644 --- a/src/common/db_keys.py +++ b/src/common/db_keys.py @@ -27,8 +27,7 @@ from typing import Any, Callable, Dict, List from src.common.crypto import blake2b, csprng from src.common.database import TFCDatabase -from src.common.encoding import int_to_bytes, onion_address_to_pub_key -from src.common.encoding import bytes_to_int +from src.common.encoding import bytes_to_int, int_to_bytes, onion_address_to_pub_key from src.common.exceptions import CriticalError from src.common.misc import ensure_dir, separate_headers, split_byte_string from src.common.statics import (DIR_USER_DATA, DUMMY_CONTACT, HARAC_LENGTH, INITIAL_HARAC, KDB_ADD_ENTRY_HEADER, @@ -52,16 +51,16 @@ class KeySet(object): Tor Onion Service address. Used to uniquely identify the KeySet object. - tx_mk: Forward secret message key for sent messages. + tx_mk: The forward secret message key for sent messages. - rx_mk: Forward secret message key for received messages. + rx_mk: The forward secret message key for received messages. Used only by the Receiver Program. - tx_hk: Static header key used to encrypt and sign the hash + tx_hk: The static header key used to encrypt and sign the hash ratchet counter provided along the encrypted assembly packet. - rx_hk: Static header key used to authenticate and decrypt + rx_hk: The static header key used to authenticate and decrypt the hash ratchet counter of received messages. Used only by the Receiver Program. diff --git a/src/common/db_logs.py b/src/common/db_logs.py index f4a4e4b..4de4730 100644 --- a/src/common/db_logs.py +++ b/src/common/db_logs.py @@ -60,7 +60,7 @@ def log_writer_loop(queues: Dict[bytes, 'Queue[Any]'], # Dictionary of que message_log: 'MessageLog', # MessageLog object unit_test: bool = False # True, exits loop when UNIT_TEST_QUEUE is no longer empty. ) -> None: - """Write assembly packets to log database. + """Write assembly packets to the log database. When traffic masking is enabled, the fact this loop is run as a separate process, means the rate at which `sender_loop` outputs @@ -84,17 +84,17 @@ def log_writer_loop(queues: Dict[bytes, 'Queue[Any]'], # Dictionary of que while log_packet_queue.qsize() == 0: time.sleep(0.01) - traffic_masking, logfile_masking = check_log_setting_queues(traffic_masking, - traffic_masking_queue, - logfile_masking, - logfile_masking_queue) + traffic_masking, logfile_masking = check_setting_queues(traffic_masking, + traffic_masking_queue, + logfile_masking, + logfile_masking_queue) onion_pub_key, assembly_packet, log_messages, log_as_ph, master_key = log_packet_queue.get() - # Update log database key + # Update the log database key message_log.database_key = master_key.master_key - # Detect and ignore commands. + # Detect commands and ignore them if onion_pub_key is None: continue @@ -135,12 +135,12 @@ def log_writer_loop(queues: Dict[bytes, 'Queue[Any]'], # Dictionary of que break -def check_log_setting_queues(traffic_masking: bool, - traffic_masking_queue: 'Queue[Any]', - logfile_masking: bool, - logfile_masking_queue: 'Queue[Any]' - ) -> Tuple[bool, bool]: - """Check for updates to logging settings.""" +def check_setting_queues(traffic_masking: bool, + traffic_masking_queue: 'Queue[Any]', + logfile_masking: bool, + logfile_masking_queue: 'Queue[Any]' + ) -> Tuple[bool, bool]: + """Check queues for updates to traffic masking and logging settings.""" if traffic_masking_queue.qsize(): traffic_masking = traffic_masking_queue.get() @@ -159,10 +159,10 @@ def update_logging_state(assembly_packet: bytes, `logging_state` retains the logging setting for noise packets that do not know the log setting of the window. To prevent logging of - noise packets in situation where logging has been disabled, but no - new message assembly packet carrying the logging setting is received, - the LOG_SETTING_QUEUE is checked for up-to-date logging setting for - every received noise packet. + noise packets in a situation where logging has been disabled, but no + new message assembly packet carrying the logging setting has been + received, the LOG_SETTING_QUEUE is checked for up-to-date logging + setting for every received noise packet. """ if assembly_packet[:ASSEMBLY_PACKET_HEADER_LENGTH] == P_N_HEADER: if log_setting_queue.qsize(): @@ -181,7 +181,7 @@ def write_log_entry(assembly_packet: bytes, # Assembly pac Logging assembly packets allows reconstruction of conversation while protecting metadata about the length of messages alternative log - file formats could reveal. + file formats could reveal to a physical attacker. Transmitter Program can only log sent messages. This is not useful for recalling conversations but it makes it possible to audit @@ -335,7 +335,7 @@ def change_log_db_key(old_key: bytes, new_key: bytes, settings: 'Settings' ) -> None: - """Re-encrypt log database with a new master key.""" + """Re-encrypt the log database with a new master key.""" ensure_dir(DIR_USER_DATA) file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs' temp_name = file_name + TEMP_POSTFIX @@ -357,7 +357,7 @@ def change_log_db_key(old_key: bytes, def replace_log_db(settings: 'Settings') -> None: - """Replace log database with temp file.""" + """Replace the log database with the temp file.""" ensure_dir(DIR_USER_DATA) file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs' temp_name = file_name + TEMP_POSTFIX @@ -378,7 +378,7 @@ def remove_logs(contact_list: 'ContactList', If the selector is a public key, all messages (both the private conversation and any associated group messages) sent to and received from the associated contact are removed. If the selector is a group - ID, only messages for group determined by that group ID are removed. + ID, only messages for the group matching that group ID are removed. """ ensure_dir(DIR_USER_DATA) file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs' diff --git a/src/common/db_masterkey.py b/src/common/db_masterkey.py index 8c00543..91c4659 100755 --- a/src/common/db_masterkey.py +++ b/src/common/db_masterkey.py @@ -111,14 +111,14 @@ class MasterKey(object): The generated master key depends on a 256-bit salt and the password entered by the user. Additional computational strength - is added by the slow hash function (Argon2id). The more cores and - the faster each core is, and the more memory the system has, the - more secure TFC data is under the same password. + is added by the slow hash function (Argon2id). The more cores + and the faster each core is, and the more memory the system has, + the more secure TFC data is under the same password. This method automatically tweaks the Argon2 time and memory cost parameters according to best practices as determined in - https://tools.ietf.org/html/draft-irtf-cfrg-argon2-04#section-4 + https://tools.ietf.org/html/draft-irtf-cfrg-argon2-09#section-4 1) For Argon2 type (y), Argon2id was selected because the adversary might be able to run arbitrary code on Destination @@ -145,93 +145,45 @@ class MasterKey(object): share the same salt is just 10^(-18).* * https://en.wikipedia.org/wiki/Birthday_attack - The salt does not need additional protection as the security it - provides depends on the salt space in relation to the number of - attacked targets (i.e. if two or more physically compromised - systems happen to share the same salt, the attacker can speed up - the attack against those systems with time-memory-trade-off - attack). + The salt does not need additional protection as the security + it provides depends on the salt space in relation to the + number of attacked targets (i.e. if two or more physically + compromised systems happen to share the same salt, the + attacker can speed up the attack against those systems with + time-memory-trade-off attack). - 6) The tag length isn't utilized. The result of the key derivation is - the master encryption key itself, which is set to 32 bytes for - use in XChaCha20-Poly1305. + 6) The tag length isn't utilized. The result of the key + derivation is the master encryption key itself, which is set + to 32 bytes for use in XChaCha20-Poly1305. 7) Memory wiping feature is not provided. - To recognize the password is correct, the BLAKE2b hash of the master - key is stored together with key derivation parameters into the - login database. - The preimage resistance of BLAKE2b prevents derivation of master - key from the stored hash, and Argon2id ensures brute force and - dictionary attacks against the master password are painfully - slow even with GPUs/ASICs/FPGAs, as long as the password is - sufficiently strong. + To recognize the password is correct, the BLAKE2b hash of the + master key is stored together with key derivation parameters + into the login database. + The preimage resistance of BLAKE2b prevents derivation of + master key from the stored hash, and Argon2id ensures brute + force and dictionary attacks against the master password are + painfully slow even with GPUs/ASICs/FPGAs, as long as the + password is sufficiently strong. """ - password = MasterKey.new_password() - salt = csprng(ARGON2_SALT_LENGTH) - time_cost = ARGON2_MIN_TIME_COST + password = MasterKey.new_password() + salt = csprng(ARGON2_SALT_LENGTH) # Determine the amount of memory used from the amount of free RAM in the system. memory_cost = self.get_available_memory() - # Determine the amount of threads to use + # Determine the number of threads to use parallelism = multiprocessing.cpu_count() if self.local_test: parallelism = max(ARGON2_MIN_PARALLELISM, parallelism // 2) - # Initial key derivation - phase("Deriving master key", head=2, offset=0) - master_key, kd_time = self.timed_key_derivation(password, salt, time_cost, memory_cost, parallelism) - phase("", done=True, tail=1) - - # If derivation was too fast, increase time_cost - while kd_time < MIN_KEY_DERIVATION_TIME: - print_on_previous_line() - phase(f"Trying time cost {time_cost+1}") - time_cost += 1 - master_key, kd_time = self.timed_key_derivation(password, salt, time_cost, memory_cost, parallelism) - phase(f"{kd_time:.1f}s", done=True) - - # At this point time_cost may have value of 1 or it may have increased to e.g. 3, which might make it take - # longer than MAX_KEY_DERIVATION_TIME. If that's the case, it makes no sense to lower it back to 2 because even - # with all memory, time_cost=2 will still be too fast. We therefore accept the time_cost whatever it is. - - # If the key derivation time is too long, we do a binary search on the amount - # of memory to use until we hit the desired key derivation time range. - middle = None + # Determine time cost + time_cost, kd_time, master_key = self.determine_time_cost(password, salt, memory_cost, parallelism) + # Determine memory cost if kd_time > MAX_KEY_DERIVATION_TIME: - - lower_bound = ARGON2_MIN_MEMORY_COST - upper_bound = memory_cost - - while kd_time < MIN_KEY_DERIVATION_TIME or kd_time > MAX_KEY_DERIVATION_TIME: - - middle = (lower_bound + upper_bound) // 2 - - print_on_previous_line() - phase(f"Trying memory cost {middle} KiB") - master_key, kd_time = self.timed_key_derivation(password, salt, time_cost, middle, parallelism) - phase(f"{kd_time:.1f}s", done=True) - - # The search might fail e.g. if external CPU load causes delay in key derivation, which causes the - # search to continue into wrong branch. In such a situation the search is restarted. The binary search - # is problematic with tight key derivation time target ranges, so if the search keeps restarting, - # increasing MAX_KEY_DERIVATION_TIME (and thus expanding the range) will help finding suitable - # memory_cost value faster. Increasing MAX_KEY_DERIVATION_TIME slightly affects security (positively) - # and user experience (negatively). - if middle == lower_bound or middle == upper_bound: - lower_bound = ARGON2_MIN_MEMORY_COST - upper_bound = self.get_available_memory() - continue - - if kd_time < MIN_KEY_DERIVATION_TIME: - lower_bound = middle - - elif kd_time > MAX_KEY_DERIVATION_TIME: - upper_bound = middle - - memory_cost = middle if middle is not None else memory_cost + memory_cost, master_key = self.determine_memory_cost(password, salt, time_cost, memory_cost, parallelism) # Store values to database database_data = (salt @@ -249,12 +201,156 @@ class MasterKey(object): # the database data. self.database_data = database_data - print_on_previous_line(2) + print_on_previous_line() phase("Deriving master key") phase(DONE, delay=1) return master_key + def determine_time_cost(self, + password: str, + salt: bytes, + memory_cost: int, + parallelism: int + ) -> Tuple[int, float, bytes]: + """Find suitable time_cost value for Argon2id. + + There are two acceptable time_cost values. + + 1. A time_cost value that together with all available memory + sets the key derivation time between MIN_KEY_DERIVATION_TIME + and MAX_KEY_DERIVATION_TIME. If during the search we find + such suitable time_cost value, we accept it as such. + + 2. In a situation where no time_cost value is suitable alone, + there will exist some time_cost value `t` that makes key + derivation too fast, and another time_cost value `t+1` that + makes key derivation too slow. In this case we are interested + in the latter value, as unlike `t`, the value `t+1` can be + fine-tuned to suitable key derivation time range by adjusting + the memory_cost parameter. + + As time_cost has no upper limit, and as the amount of available + memory has tremendous effect on how long one round takes, it's + difficult to determine the upper bound for a time_cost binary + search. We therefore start with a single round, and by + benchmarking it, estimate how many rounds are needed to reach + the target zone. After every try, we update our time_cost + candidate based on new average time per round estimate, a value + that gets more accurate as the search progresses. If this + method isn't able to suggest a value larger than 1, we increase + time_cost by 1 anyway to prevent an Alderson loop. + + Every time the time_cost value is increased, we update the lower + bound to narrow the search space of the binary search we can + switch to immediately, once the MAX_KEY_DERIVATION_TIME is + exceeded (i.e. once an upper bound is found). At that point, the + time_cost `t+1` can be found in log(n) time. + """ + lower_bound = ARGON2_MIN_TIME_COST # type: int + upper_bound = None # type: Optional[int] + time_cost = lower_bound + + print(2*'\n') + + while True: + print_on_previous_line() + phase(f"Trying time cost {time_cost}") + master_key, kd_time = self.timed_key_derivation(password, salt, time_cost, memory_cost, parallelism) + phase(f"{kd_time:.1f}s", done=True) + + # Sentinel that checks if the binary search has ended, and that restarts + # the search if kd_time repeats. This prevents an Alderson loop. + if upper_bound is not None and time_cost in [lower_bound, upper_bound]: # pragma: no cover + lower_bound = ARGON2_MIN_TIME_COST + upper_bound = None + continue + + if MIN_KEY_DERIVATION_TIME <= kd_time <= MAX_KEY_DERIVATION_TIME: + break + + if kd_time < MIN_KEY_DERIVATION_TIME: + lower_bound = time_cost + + if upper_bound is None: + avg_time_per_round = kd_time / time_cost + time_cost_candidate = math.floor(MAX_KEY_DERIVATION_TIME / avg_time_per_round) + time_cost = max(time_cost+1, time_cost_candidate) + + else: + if time_cost + 1 == upper_bound: + time_cost += 1 + break + + time_cost = math.floor((lower_bound + upper_bound) / 2) + + elif kd_time > MAX_KEY_DERIVATION_TIME: + upper_bound = time_cost + + # Sentinel: If even a single round takes too long, it's the `t+1` we're looking for. + if time_cost == 1: + break + + # Sentinel: If the current time_cost value (that was too large) is one + # greater than the lower_bound, we know current time_cost is at `t+1`. + if time_cost == lower_bound + 1: + break + + # Otherwise we know the current time_cost is at least two integers greater + # than `t`. Our best candidate for `t` is lower_bound, but for all we know, + # `t` might be a much greater value. So we continue binary search for `t+1` + time_cost = math.floor((lower_bound + upper_bound) / 2) + + return time_cost, kd_time, master_key + + def determine_memory_cost(self, + password: str, + salt: bytes, + time_cost: int, + memory_cost: int, + parallelism: int, + ) -> Tuple[int, bytes]: + """Determine suitable memory_cost value for Argon2id. + + If we reached this function, it means we found a `t+1` value for + time_cost (explained in the `determine_time_cost` function). We + therefore do a binary search on the amount of memory to use + until we hit the desired key derivation time range. + """ + lower_bound = ARGON2_MIN_MEMORY_COST + upper_bound = memory_cost + + while True: + memory_cost = int(round((lower_bound + upper_bound) // 2, -3)) + + print_on_previous_line() + phase(f"Trying memory cost {memory_cost} KiB") + master_key, kd_time = self.timed_key_derivation(password, salt, time_cost, memory_cost, parallelism) + phase(f"{kd_time:.1f}s", done=True) + + # If we found a suitable memory_cost value, we accept the key and the memory_cost. + if MIN_KEY_DERIVATION_TIME <= kd_time <= MAX_KEY_DERIVATION_TIME: + return memory_cost, master_key + + # The search might fail e.g. if external CPU load causes delay in key + # derivation, which causes the search to continue into wrong branch. In + # such a situation the search is restarted. The binary search is problematic + # with tight key derivation time target ranges, so if the search keeps + # restarting, increasing MAX_KEY_DERIVATION_TIME (and thus expanding the + # range) will help finding suitable memory_cost value faster. Increasing + # MAX_KEY_DERIVATION_TIME slightly affects security (positively) and user + # experience (negatively). + if memory_cost == lower_bound or memory_cost == upper_bound: + lower_bound = ARGON2_MIN_MEMORY_COST + upper_bound = self.get_available_memory() + continue + + if kd_time < MIN_KEY_DERIVATION_TIME: + lower_bound = memory_cost + + elif kd_time > MAX_KEY_DERIVATION_TIME: + upper_bound = memory_cost + def replace_database_data(self) -> None: """Store cached database data into database.""" if self.database_data is not None: diff --git a/src/common/db_settings.py b/src/common/db_settings.py index ce4757c..e0d479a 100755 --- a/src/common/db_settings.py +++ b/src/common/db_settings.py @@ -26,8 +26,8 @@ import typing from typing import Union from src.common.database import TFCDatabase -from src.common.encoding import bool_to_bytes, double_to_bytes, int_to_bytes -from src.common.encoding import bytes_to_bool, bytes_to_double, bytes_to_int +from src.common.encoding import (bool_to_bytes, double_to_bytes, int_to_bytes, + bytes_to_bool, bytes_to_double, bytes_to_int) from src.common.exceptions import CriticalError, SoftError from src.common.input import yes from src.common.misc import ensure_dir, get_terminal_width, round_up @@ -53,6 +53,7 @@ class Settings(object): master_key: 'MasterKey', # MasterKey object operation: str, # Operation mode of the program (Tx or Rx) local_test: bool, # Local testing setting from command-line argument + qubes: bool = False # Qubes setting from command-line argument ) -> None: """Create a new Settings object. @@ -91,6 +92,7 @@ class Settings(object): self.master_key = master_key self.software_operation = operation self.local_testing_mode = local_test + self.qubes = qubes self.file_name = f'{DIR_USER_DATA}{operation}_settings' self.database = TFCDatabase(self.file_name, master_key) @@ -199,7 +201,7 @@ class Settings(object): Settings.validate_max_number_of_groups(key, value, group_list) Settings.validate_max_number_of_contacts(key, value, contact_list) Settings.validate_new_message_notify_duration(key, value) - Settings.validate_traffic_maskig_delay(key, value, contact_list) + Settings.validate_traffic_masking_delay(key, value, contact_list) @staticmethod def validate_database_limit(key: str, value: 'SettingType') -> None: @@ -248,10 +250,10 @@ class Settings(object): raise SoftError("Error: Too small value for message notify duration.", head_clear=True) @staticmethod - def validate_traffic_maskig_delay(key: str, - value: 'SettingType', - contact_list: 'ContactList' - ) -> None: + def validate_traffic_masking_delay(key: str, + value: 'SettingType', + contact_list: 'ContactList' + ) -> None: """Validate setting value for traffic masking delays.""" if key in ["tm_static_delay", "tm_random_delay"]: diff --git a/src/common/gateway.py b/src/common/gateway.py index 48e9060..a0548f6 100644 --- a/src/common/gateway.py +++ b/src/common/gateway.py @@ -19,6 +19,7 @@ You should have received a copy of the GNU General Public License along with TFC. If not, see . """ +import base64 import hashlib import json import multiprocessing.connection @@ -36,15 +37,17 @@ from typing import Any, Dict, Optional, Tuple, Union from serial.serialutil import SerialException from src.common.exceptions import CriticalError, graceful_exit, SoftError -from src.common.input import yes -from src.common.misc import calculate_race_condition_delay, ensure_dir, ignored, get_terminal_width -from src.common.misc import separate_trailer +from src.common.input import box_input, yes +from src.common.misc import (calculate_race_condition_delay, ensure_dir, ignored, get_terminal_width, + separate_trailer, split_byte_string, validate_ip_address) from src.common.output import m_print, phase, print_on_previous_line from src.common.reed_solomon import ReedSolomonError, RSCodec from src.common.statics import (BAUDS_PER_BYTE, DIR_USER_DATA, DONE, DST_DD_LISTEN_SOCKET, DST_LISTEN_SOCKET, GATEWAY_QUEUE, LOCALHOST, LOCAL_TESTING_PACKET_DELAY, MAX_INT, NC, + QUBES_DST_LISTEN_SOCKET, QUBES_RX_IP_ADDR_FILE, QUBES_SRC_LISTEN_SOCKET, PACKET_CHECKSUM_LENGTH, RECEIVER, RELAY, RP_LISTEN_SOCKET, RX, - SERIAL_RX_MIN_TIMEOUT, SETTINGS_INDENT, SRC_DD_LISTEN_SOCKET, TRANSMITTER, TX) + SERIAL_RX_MIN_TIMEOUT, SETTINGS_INDENT, SOCKET_BUFFER_SIZE, SRC_DD_LISTEN_SOCKET, + TRANSMITTER, TX, US_BYTE) if typing.TYPE_CHECKING: from multiprocessing import Queue @@ -59,9 +62,10 @@ def gateway_loop(queues: Dict[bytes, 'Queue[Tuple[datetime, bytes]]'], Also place the current timestamp to queue to be delivered to the Receiver Program. The timestamp is used both to notify when the sent - message was received by Relay Program, and as part of a commitment - scheme: For more information, see the section on "Covert channel - based on user interaction" under TFC's Security Design wiki article. + message was received by the Relay Program, and as part of a + commitment scheme: For more information, see the section on "Covert + channel based on user interaction" under TFC's Security Design wiki + article. """ queue = queues[GATEWAY_QUEUE] @@ -75,20 +79,23 @@ def gateway_loop(queues: Dict[bytes, 'Queue[Tuple[datetime, bytes]]'], class Gateway(object): """\ Gateway object is a wrapper for interfaces that connect - Source/Destination Computer with the Networked computer. + Source/Destination Computer with the Networked Computer. """ def __init__(self, operation: str, local_test: bool, - dd_sockets: bool + dd_sockets: bool, + qubes: bool, ) -> None: """Create a new Gateway object.""" - self.settings = GatewaySettings(operation, local_test, dd_sockets) - self.tx_serial = None # type: Optional[serial.Serial] - self.rx_serial = None # type: Optional[serial.Serial] - self.rx_socket = None # type: Optional[multiprocessing.connection.Connection] - self.tx_socket = None # type: Optional[multiprocessing.connection.Connection] + self.settings = GatewaySettings(operation, local_test, dd_sockets, qubes) + self.tx_serial = None # type: Optional[serial.Serial] + self.rx_serial = None # type: Optional[serial.Serial] + self.rx_socket = None # type: Optional[multiprocessing.connection.Connection] + self.tx_socket = None # type: Optional[multiprocessing.connection.Connection] + self.txq_socket = None # type: Optional[socket.socket] + self.rxq_socket = None # type: Optional[socket.socket] # Initialize Reed-Solomon erasure code handler self.rs = RSCodec(2 * self.settings.session_serial_error_correction) @@ -102,6 +109,11 @@ class Gateway(object): self.client_establish_socket() if self.settings.software_operation in [NC, RX]: self.server_establish_socket() + elif qubes: + if self.settings.software_operation in [TX, NC]: + self.qubes_client_establish_socket() + if self.settings.software_operation in [NC, RX]: + self.qubes_server_establish_socket() else: self.establish_serial() @@ -141,6 +153,19 @@ class Gateway(object): except SerialException: raise CriticalError("SerialException. Ensure $USER is in the dialout group by restarting this computer.") + def write_udp_packet(self, packet: bytes) -> None: + """Split packet to smaller parts and transmit them over the socket.""" + udp_port = QUBES_SRC_LISTEN_SOCKET if self.settings.software_operation == TX else QUBES_DST_LISTEN_SOCKET + + packet = base64.b85encode(packet) + packets = split_byte_string(packet, SOCKET_BUFFER_SIZE) + + if self.txq_socket is not None: + for p in packets: + self.txq_socket.sendto(p, (self.settings.rx_udp_ip, udp_port)) + time.sleep(0.000001) + self.txq_socket.sendto(US_BYTE, (self.settings.rx_udp_ip, udp_port)) + def write(self, orig_packet: bytes) -> None: """Add error correction data and output data via socket/serial interface. @@ -157,6 +182,10 @@ class Gateway(object): time.sleep(LOCAL_TESTING_PACKET_DELAY) except BrokenPipeError: raise CriticalError("Relay IPC server disconnected.", exit_code=0) + + elif self.txq_socket is not None: + self.write_udp_packet(packet) + elif self.tx_serial is not None: try: self.tx_serial.write(packet) @@ -180,6 +209,24 @@ class Gateway(object): except EOFError: raise CriticalError("Relay IPC client disconnected.", exit_code=0) + def read_qubes_socket(self) -> bytes: + """Read packet from Qubes' socket interface.""" + if self.rxq_socket is None: + raise CriticalError("Socket interface has not been initialized.") + + while True: + try: + read_buffer = bytearray() + + while True: + read = self.rxq_socket.recv(SOCKET_BUFFER_SIZE) + if read == US_BYTE: + return read_buffer + read_buffer.extend(read) + + except (EOFError, KeyboardInterrupt): + pass + def read_serial(self) -> bytes: """Read packet from serial interface. @@ -215,8 +262,11 @@ class Gateway(object): def read(self) -> bytes: """Read data via socket/serial interface.""" - data = (self.read_socket() if self.settings.local_testing_mode else self.read_serial()) - return data + if self.settings.local_testing_mode: + return self.read_socket() + if self.settings.qubes: + return self.read_qubes_socket() + return self.read_serial() def add_error_correction(self, packet: bytes) -> bytes: """Add error correction to packet that will be output. @@ -230,8 +280,10 @@ class Gateway(object): If error correction is set to 0, errors are only detected. This is done by using a BLAKE2b based, 128-bit checksum. + + If Qubes is used, Reed-Solomon is not used as it only slows down data transfer. """ - if self.settings.session_serial_error_correction: + if self.settings.session_serial_error_correction and not self.settings.qubes: packet = self.rs.encode(packet) else: packet = packet + hashlib.blake2b(packet, digest_size=PACKET_CHECKSUM_LENGTH).digest() @@ -239,7 +291,13 @@ class Gateway(object): def detect_errors(self, packet: bytes) -> bytes: """Handle received packet error detection and/or correction.""" - if self.settings.session_serial_error_correction: + if self.settings.qubes: + try: + packet = base64.b85decode(packet) + except ValueError: + raise SoftError("Error: Received packet had invalid Base85 encoding.") + + if self.settings.session_serial_error_correction and not self.settings.qubes: try: packet, _ = self.rs.decode(packet) return bytes(packet) @@ -280,6 +338,30 @@ class Gateway(object): return f'/dev/{self.settings.built_in_serial_interface}' raise CriticalError(f"Error: /dev/{self.settings.built_in_serial_interface} was not found.") + # Qubes + + def qubes_client_establish_socket(self) -> None: + """Establish Qubes socket for outgoing data.""" + self.txq_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + + def qubes_server_establish_socket(self) -> None: + """Establish Qubes socket for incoming data.""" + udp_port = QUBES_SRC_LISTEN_SOCKET if self.settings.software_operation == NC else QUBES_DST_LISTEN_SOCKET + self.rxq_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + self.rxq_socket.bind((self.get_local_ip_addr(), udp_port)) + + @staticmethod + def get_local_ip_addr() -> str: + """Get local IP address of the system.""" + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + try: + s.connect(('192.0.0.8', 1027)) + except socket.error: + raise CriticalError("Socket error") + ip_address = s.getsockname()[0] # type: str + + return ip_address + # Local testing def server_establish_socket(self) -> None: @@ -354,19 +436,20 @@ class GatewaySettings(object): unencrypted JSON database. The reason these settings are in plaintext is it protects the system - from inconsistent state of serial settings: Would the user reconfigure - their serial settings, and would the setting altering packet to - Receiver Program drop, Relay Program could in some situations no - longer communicate with the Receiver Program. + from an inconsistent serial setting state: Would the user change one + or more settings of their serial interfaces, and would the setting + adjusting packet to Receiver Program drop, Relay Program could in + some situations no longer communicate with the Receiver Program. Serial interface settings are not sensitive enough to justify the - inconvenience of encrypting the setting values. + inconveniences that would result from encrypting the setting values. """ def __init__(self, operation: str, local_test: bool, - dd_sockets: bool + dd_sockets: bool, + qubes: bool ) -> None: """Create a new Settings object. @@ -379,11 +462,13 @@ class GatewaySettings(object): self.serial_error_correction = 5 self.use_serial_usb_adapter = True self.built_in_serial_interface = 'ttyS0' + self.rx_udp_ip = '' self.software_operation = operation self.local_testing_mode = local_test self.data_diode_sockets = dd_sockets + self.qubes = qubes self.all_keys = list(vars(self).keys()) self.key_list = self.all_keys[:self.all_keys.index('software_operation')] self.defaults = {k: self.__dict__[k] for k in self.key_list} @@ -428,7 +513,7 @@ class GatewaySettings(object): Ensure that the serial interface is available before proceeding. """ - if not self.local_testing_mode: + if not self.local_testing_mode and not self.qubes: name = {TX: TRANSMITTER, NC: RELAY, RX: RECEIVER}[self.software_operation] self.use_serial_usb_adapter = yes(f"Use USB-to-serial/TTL adapter for {name} Computer?", head=1, tail=1) @@ -444,6 +529,22 @@ class GatewaySettings(object): m_print(f"Error: Serial interface /dev/{self.built_in_serial_interface} not found.") self.setup() + if self.qubes and self.software_operation != RX: + + # Check if IP address was stored by the installer. + if os.path.isfile(QUBES_RX_IP_ADDR_FILE): + cached_ip = open(QUBES_RX_IP_ADDR_FILE).read().strip() + os.remove(QUBES_RX_IP_ADDR_FILE) + + if validate_ip_address(cached_ip) == '': + self.rx_udp_ip = cached_ip + return + + # If we reach this point, no cached IP was found, prompt for IP address from the user. + rx_device, short = ('Networked', 'NET') if self.software_operation == TX else ('Destination', 'DST') + m_print(f"Enter the IP address of the {rx_device} Computer", head=1, tail=1) + self.rx_udp_ip = box_input(f"{short} IP-address", expected_len=15, validator=validate_ip_address, tail=1) + def store_settings(self) -> None: """Store serial settings in JSON format.""" serialized = json.dumps(self, default=(lambda o: {k: self.__dict__[k] for k in self.key_list}), indent=4) @@ -484,36 +585,75 @@ class GatewaySettings(object): def check_missing_settings(self, json_dict: Any) -> None: """Check for missing JSON fields and invalid values.""" for key in self.key_list: - if key not in json_dict: - m_print([f"Error: Missing setting '{key}' in '{self.file_name}'.", - f"The value has been set to default ({self.defaults[key]})."], head=1, tail=1) - setattr(self, key, self.defaults[key]) + try: + self.check_key_in_key_list(key, json_dict) + + if key == 'serial_baudrate': + self.validate_serial_baudrate(key, json_dict) + + elif key == 'serial_error_correction': + self.validate_serial_error_correction(key, json_dict) + + elif key == 'use_serial_usb_adapter': + self.validate_serial_usb_adapter_value(key, json_dict) + + elif key == 'built_in_serial_interface': + self.validate_serial_interface_value(key, json_dict) + + elif key == 'rx_udp_ip': + json_dict[key] = self.validate_rx_udp_ip_address(key, json_dict) + + except SoftError: continue - # Closer inspection of each setting value - if key == 'serial_baudrate' and json_dict[key] not in serial.Serial().BAUDRATES: - self.invalid_setting(key, json_dict) - continue - - elif key == 'serial_error_correction' and (not isinstance(json_dict[key], int) or json_dict[key] < 0): - self.invalid_setting(key, json_dict) - continue - - elif key == 'use_serial_usb_adapter': - if not isinstance(json_dict[key], bool): - self.invalid_setting(key, json_dict) - continue - - elif key == 'built_in_serial_interface': - if not isinstance(json_dict[key], str): - self.invalid_setting(key, json_dict) - continue - if not any(json_dict[key] == f for f in os.listdir('/sys/class/tty')): - self.invalid_setting(key, json_dict) - continue - setattr(self, key, json_dict[key]) + def check_key_in_key_list(self, key: str, json_dict: Any) -> None: + """Check if the setting's key value is in the setting dictionary.""" + if key not in json_dict: + m_print([f"Error: Missing setting '{key}' in '{self.file_name}'.", + f"The value has been set to default ({self.defaults[key]})."], head=1, tail=1) + setattr(self, key, self.defaults[key]) + raise SoftError("Missing key", output=False) + + def validate_serial_usb_adapter_value(self, key: str, json_dict: Any) -> None: + """Validate the serial usb adapter setting value.""" + if not isinstance(json_dict[key], bool): + self.invalid_setting(key, json_dict) + raise SoftError("Invalid value", output=False) + + def validate_serial_baudrate(self, key: str, json_dict: Any) -> None: + """Validate the serial baudrate setting value.""" + if json_dict[key] not in serial.Serial().BAUDRATES: + self.invalid_setting(key, json_dict) + raise SoftError("Invalid value", output=False) + + def validate_serial_error_correction(self, key: str, json_dict: Any) -> None: + """Validate the serial error correction setting value.""" + if not isinstance(json_dict[key], int) or json_dict[key] < 0: + self.invalid_setting(key, json_dict) + raise SoftError("Invalid value", output=False) + + def validate_serial_interface_value(self, key: str, json_dict: Any) -> None: + """Validate the serial interface setting value.""" + if not isinstance(json_dict[key], str): + self.invalid_setting(key, json_dict) + raise SoftError("Invalid value", output=False) + + if not any(json_dict[key] == f for f in os.listdir('/sys/class/tty')): + self.invalid_setting(key, json_dict) + raise SoftError("Invalid value", output=False) + + def validate_rx_udp_ip_address(self, key: str, json_dict: Any) -> str: + """Validate IP address of receiving Qubes VM.""" + if self.qubes: + if not isinstance(json_dict[key], str) or validate_ip_address(json_dict[key]) != '': + self.setup() + return self.rx_udp_ip + + rx_udp_ip = json_dict[key] # type: str + return rx_udp_ip + def change_setting(self, key: str, value_str: str) -> None: """Parse, update and store new setting value.""" attribute = self.__getattribute__(key) diff --git a/src/common/input.py b/src/common/input.py index 2609d27..960ad8b 100644 --- a/src/common/input.py +++ b/src/common/input.py @@ -149,7 +149,7 @@ def get_b58_key(key_type: str, # The type of Base58 key to be enter raise CriticalError("Invalid key type") while True: - rx_pk = box_input(box_msg, key_type=key_type, guide=not settings.local_testing_mode) + rx_pk = box_input(box_msg, key_type=key_type, guide=not (settings.local_testing_mode or settings.qubes)) rx_pk = ''.join(rx_pk.split()) if key_type == B58_PUBLIC_KEY and rx_pk == '': @@ -175,8 +175,8 @@ def nc_bypass_msg(key: str, settings: 'Settings') -> None: key. Without the ciphertext, e.g. a visually collected local key decryption key is useless. """ - m = {NC_BYPASS_START: "Bypass Networked Computer if needed. Press to send local key.", - NC_BYPASS_STOP: "Remove bypass of Networked Computer. Press to continue."} + m = {NC_BYPASS_START: "Bypass the Networked Computer if needed. Press to send local key.", + NC_BYPASS_STOP: "Remove bypass of the Networked Computer. Press to continue."} if settings.nc_bypass_messages: m_print(m[key], manual_proceed=True, box=True, head=(1 if key == NC_BYPASS_STOP else 0)) diff --git a/src/common/misc.py b/src/common/misc.py index c7c6354..9eeeeb7 100755 --- a/src/common/misc.py +++ b/src/common/misc.py @@ -27,6 +27,7 @@ import math import os import random import shutil +import socket import subprocess import sys import time @@ -60,13 +61,13 @@ def calculate_race_condition_delay(serial_error_correction: int, Calculate the delay required to prevent Relay Program race condition. When Transmitter Program outputs a command to exit or wipe data, - Relay program will also receive a copy of the command. If Relay - Program acts on the command too early, Receiver Program will not + Relay program will also receive a copy of the command. If the Relay + Program acts on the command too early, the Receiver Program will not receive the exit/wipe command at all. - This program calculates the delay Transmitter Program should wait - before outputting command for Relay Program, to ensure Receiver - Program has received the encrypted command. + This function calculates the delay Transmitter Program should wait + before outputting command to the Relay Program, to ensure the + Receiver Program has received its encrypted command. """ rs = RSCodec(2 * serial_error_correction) message_length = PACKET_LENGTH + ONION_ADDRESS_LENGTH @@ -276,7 +277,7 @@ def power_off_system() -> None: os.system(POWEROFF) -def process_arguments() -> Tuple[str, bool, bool]: +def process_arguments() -> Tuple[str, bool, bool, bool]: """Load program-specific settings from command line arguments. The arguments are determined by the desktop entries and in the @@ -305,10 +306,16 @@ def process_arguments() -> Tuple[str, bool, bool]: dest='data_diode_sockets', help="use data diode simulator sockets during local testing mode") + parser.add_argument('-q', + action='store_true', + default=False, + dest='qubes', + help="output data as UDP packets. Allows running TFC in qubes") + args = parser.parse_args() operation = RX if args.operation else TX - return operation, args.local_test, args.data_diode_sockets + return operation, args.local_test, args.data_diode_sockets, args.qubes def readable_size(size: int) -> str: @@ -477,6 +484,15 @@ def validate_group_name(group_name: str, # Name of the group return error_msg +def validate_ip_address(ip_address: str, *_: Any) -> str: + """Validate the IP address.""" + try: + socket.inet_aton(ip_address) + return '' + except socket.error: + return 'Invalid IP address' + + def validate_key_exchange(key_ex: str, # Key exchange selection to validate *_: Any # Unused arguments ) -> str: # Error message if validation failed, else empty string diff --git a/src/common/output.py b/src/common/output.py index 01cb9f0..c2a24d1 100644 --- a/src/common/output.py +++ b/src/common/output.py @@ -205,9 +205,9 @@ def print_key(message: str, # Instructive messag ) -> None: """Print a symmetric key in WIF format. - If local testing is not enabled, this function adds spacing in the - middle of the key, as well as guide letters to help the user keep - track of typing progress: + If serial-interface based platform is used, this function adds + spacing in the middle of the key, as well as guide letters to help + the user keep track of typing progress: Local key encryption keys: @@ -220,7 +220,7 @@ def print_key(message: str, # Instructive messag 4EcuqaD ddsdsuc gBX2PY2 qR8hReA aeSN2oh JB9w5Cv q6BQjDa PPgzSvW 932aHio sT42SKJ Gu2PpS1 Za3Xrao """ b58key = b58encode(key_bytes, public_key) - if settings.local_testing_mode: + if settings.local_testing_mode or settings.qubes: m_print([message, b58key], box=True) else: guide, chunk_length = (B58_PUBLIC_KEY_GUIDE, 7) if public_key else (B58_LOCAL_KEY_GUIDE, 3) diff --git a/src/common/statics.py b/src/common/statics.py index 5bd1628..ed1102b 100644 --- a/src/common/statics.py +++ b/src/common/statics.py @@ -21,7 +21,7 @@ along with TFC. If not, see . """Program details""" TFC = 'TFC' -VERSION = '1.20.02' +VERSION = '1.20.03' TRANSMITTER = 'Transmitter' RECEIVER = 'Receiver' RELAY = 'Relay' @@ -503,6 +503,12 @@ DST_LISTEN_SOCKET = 5008 DD_ANIMATION_LENGTH = 16 DD_OFFSET_FROM_CENTER = 4 +# Qubes related +QUBES_SRC_LISTEN_SOCKET = 2063 +QUBES_DST_LISTEN_SOCKET = 2064 +SOCKET_BUFFER_SIZE = 4096 +QUBES_RX_IP_ADDR_FILE = 'rx_ip_addr' + # Field lengths ENCODED_BOOLEAN_LENGTH = 1 ENCODED_BYTE_LENGTH = 1 diff --git a/src/receiver/commands.py b/src/receiver/commands.py index b816190..0f99b0c 100644 --- a/src/receiver/commands.py +++ b/src/receiver/commands.py @@ -79,7 +79,7 @@ def process_command(ts: 'datetime', header, cmd = separate_header(cmd_packet.assemble_command_packet(), ENCRYPTED_COMMAND_HEADER_LENGTH) no = None - # Keyword Function to run ( Parameters ) + # Header Function to run ( Parameters ) # -------------------------------------------------------------------------------------------------------------- d = {LOCAL_KEY_RDY: (local_key_rdy, ts, window_list, contact_list ), WIN_ACTIVITY: (win_activity, window_list ), diff --git a/src/receiver/files.py b/src/receiver/files.py index 1ef4488..833894c 100644 --- a/src/receiver/files.py +++ b/src/receiver/files.py @@ -93,9 +93,7 @@ def process_assembled_file(ts: 'datetime', # Timestamp last receiv if len(file_key) != SYMMETRIC_KEY_LENGTH: raise SoftError("Error: Received file had an invalid key.") - decrypt_and_store_file( - ts, file_ct, file_key, file_name, onion_pub_key, nick, window_list, settings - ) + decrypt_and_store_file(ts, file_ct, file_key, file_name, onion_pub_key, nick, window_list, settings) def decrypt_and_store_file(ts: 'datetime', # Timestamp of received packet @@ -146,8 +144,7 @@ def new_file(ts: 'datetime', # Timestamp o contact = contact_list.get_contact_by_pub_key(onion_pub_key) if not contact.file_reception: - raise SoftError( - f"Alert! Discarded file from {contact.nick} as file reception for them is disabled.", bold=True) + raise SoftError(f"Alert! Discarded file from {contact.nick} as file reception for them is disabled.", bold=True) k = onion_pub_key + blake2b(file_ct) # Dictionary key diff --git a/src/receiver/key_exchanges.py b/src/receiver/key_exchanges.py index 93b9644..932a59d 100644 --- a/src/receiver/key_exchanges.py +++ b/src/receiver/key_exchanges.py @@ -77,7 +77,7 @@ def protect_kdk(kdk: bytes) -> None: def process_local_key_buffer(kdk: bytes, l_queue: 'local_key_queue' ) -> Tuple[datetime, bytes]: - """Check if the kdk was for a packet further ahead in the queue.""" + """Check if the KDK was for a packet further ahead in the queue.""" buffer = [] # type: List[Tuple[datetime, bytes]] while l_queue.qsize() > 0: tup = l_queue.get() # type: Tuple[datetime, bytes] @@ -113,7 +113,7 @@ def decrypt_local_key(ts: 'datetime', ) -> Tuple['datetime', bytes]: """Decrypt local key packet.""" while True: - kdk = get_b58_key(B58_LOCAL_KEY, settings) + kdk = get_b58_key(B58_LOCAL_KEY, settings) kdk_hash = blake2b(kdk) # Check if the key was an old one. diff --git a/src/receiver/packet.py b/src/receiver/packet.py index 51cd777..b9ae246 100644 --- a/src/receiver/packet.py +++ b/src/receiver/packet.py @@ -90,22 +90,22 @@ def decrypt_assembly_packet(packet: bytes, # Assembly packet cip While all message datagrams have been implicitly assumed to have originated from some contact until this point, to prevent the - possibility of existential forgeries, the origin of message will be - validated at this point with the cryptographic Poly1305-tag. + possibility of existential forgeries, the origin of the message will + be validated at this point with the cryptographic Poly1305-tag. - As per the cryptographic doom principle, the message will not be - even decrypted unless the Poly1305 tag of the ciphertext is valid. + As per the cryptographic doom principle, the message won't be even + decrypted unless the Poly1305 tag of the ciphertext is valid. - This function also authentication of packets that handle control - flow of the Receiver program. Like messages, command datagrams have - been implicitly assumed to be commands until this point. However, - unless the Poly1305-tag of the purported command is found to be valid - with the forward secret local key, it will not be even decrypted, - let alone processed. + This function also authenticates packets that handle control flow of + the Receiver program. Like messages, command datagrams have been + implicitly assumed to be commands until this point. However, unless + the Poly1305-tag of the purported command is found to be valid with + the forward secret local key, it will not be even decrypted, let + alone processed. """ - ct_harac, ct_assemby_packet = separate_header(packet, header_length=HARAC_CT_LENGTH) - cmd_win = window_list.get_command_window() - command = onion_pub_key == LOCAL_PUBKEY + ct_harac, ct_assembly_packet = separate_header(packet, header_length=HARAC_CT_LENGTH) + cmd_win = window_list.get_command_window() + command = onion_pub_key == LOCAL_PUBKEY p_type = "command" if command else "packet" direction = "from" if command or (origin == ORIGIN_CONTACT_HEADER) else "sent to" @@ -142,7 +142,7 @@ def decrypt_assembly_packet(packet: bytes, # Assembly packet cip # Decrypt packet try: - assembly_packet = auth_and_decrypt(ct_assemby_packet, message_key) + assembly_packet = auth_and_decrypt(ct_assembly_packet, message_key) except nacl.exceptions.CryptoError: raise SoftError(f"Warning! Received {p_type} {direction} {nick} had an invalid MAC.", window=cmd_win) diff --git a/src/relay/client.py b/src/relay/client.py index c5c178f..607a097 100644 --- a/src/relay/client.py +++ b/src/relay/client.py @@ -32,19 +32,17 @@ import requests from cryptography.hazmat.primitives.asymmetric.x448 import X448PublicKey, X448PrivateKey -from src.common.encoding import b58encode, int_to_bytes, onion_address_to_pub_key, pub_key_to_onion_address -from src.common.encoding import pub_key_to_short_address +from src.common.encoding import (b58encode, int_to_bytes, onion_address_to_pub_key, pub_key_to_onion_address, + pub_key_to_short_address) from src.common.exceptions import SoftError from src.common.misc import ignored, separate_header, split_byte_string, validate_onion_addr from src.common.output import m_print, print_key, rp_print -from src.common.statics import (ACCOUNT_SEND_QUEUE, - CLIENT_OFFLINE_THRESHOLD, CONTACT_MGMT_QUEUE, CONTACT_REQ_QUEUE, C_REQ_MGMT_QUEUE, - C_REQ_STATE_QUEUE, DATAGRAM_HEADER_LENGTH, DST_MESSAGE_QUEUE, - FILE_DATAGRAM_HEADER, GROUP_ID_LENGTH, GROUP_MGMT_QUEUE, - GROUP_MSG_EXIT_GROUP_HEADER, GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER, - GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER, GROUP_MSG_QUEUE, - MESSAGE_DATAGRAM_HEADER, ONION_SERVICE_PUBLIC_KEY_LENGTH, - ORIGIN_CONTACT_HEADER, PUB_KEY_SEND_QUEUE, +from src.common.statics import (ACCOUNT_SEND_QUEUE, CLIENT_OFFLINE_THRESHOLD, CONTACT_MGMT_QUEUE, CONTACT_REQ_QUEUE, + C_REQ_MGMT_QUEUE, C_REQ_STATE_QUEUE, DATAGRAM_HEADER_LENGTH, DST_MESSAGE_QUEUE, + FILE_DATAGRAM_HEADER, GROUP_ID_LENGTH, GROUP_MGMT_QUEUE, GROUP_MSG_EXIT_GROUP_HEADER, + GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER, GROUP_MSG_MEMBER_ADD_HEADER, + GROUP_MSG_MEMBER_REM_HEADER, GROUP_MSG_QUEUE, MESSAGE_DATAGRAM_HEADER, + ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_CONTACT_HEADER, PUB_KEY_SEND_QUEUE, PUBLIC_KEY_DATAGRAM_HEADER, RELAY_CLIENT_MAX_DELAY, RELAY_CLIENT_MIN_DELAY, RP_ADD_CONTACT_HEADER, RP_REMOVE_CONTACT_HEADER, TFC_PUBLIC_KEY_LENGTH, TOR_DATA_QUEUE, UNIT_TEST_QUEUE, URL_TOKEN_LENGTH, URL_TOKEN_QUEUE) @@ -406,9 +404,9 @@ def process_group_management_message(data: bytes, def c_req_manager(queues: 'QueueDict', unit_test: bool = False) -> None: - """Manage incoming contact requests.""" - existing_contacts = [] # type: List[bytes] - contact_requests = [] # type: List[bytes] + """Manage displayed contact requests.""" + existing_contacts = [] # type: List[bytes] + displayed_requests = [] # type: List[bytes] request_queue = queues[CONTACT_REQ_QUEUE] contact_queue = queues[C_REQ_MGMT_QUEUE] @@ -431,7 +429,7 @@ def c_req_manager(queues: 'QueueDict', unit_test: bool = False) -> None: onion_pub_key = onion_address_to_pub_key(purp_onion_address) if onion_pub_key in existing_contacts: continue - if onion_pub_key in contact_requests: + if onion_pub_key in displayed_requests: continue if show_requests: @@ -439,7 +437,7 @@ def c_req_manager(queues: 'QueueDict', unit_test: bool = False) -> None: m_print([f"{ts} - New contact request from an unknown TFC account:", purp_onion_address], box=True) account_queue.put(purp_onion_address) - contact_requests.append(onion_pub_key) + displayed_requests.append(onion_pub_key) if unit_test and queues[UNIT_TEST_QUEUE].qsize() != 0: break diff --git a/src/relay/commands.py b/src/relay/commands.py index c467eea..fe1b522 100644 --- a/src/relay/commands.py +++ b/src/relay/commands.py @@ -72,7 +72,7 @@ def process_command(command: bytes, """Select function for received Relay Program command.""" header, command = separate_header(command, UNENCRYPTED_COMMAND_HEADER_LENGTH) - # Keyword Function to run ( Parameters ) + # Header Function to run ( Parameters ) # --------------------------------------------------------------------------------- function_d = {UNENCRYPTED_SCREEN_CLEAR: (clear_windows, gateway, ), UNENCRYPTED_SCREEN_RESET: (reset_windows, gateway, ), @@ -81,8 +81,8 @@ def process_command(command: bytes, UNENCRYPTED_EC_RATIO: (change_ec_ratio, command, gateway, ), UNENCRYPTED_BAUDRATE: (change_baudrate, command, gateway, ), UNENCRYPTED_MANAGE_CONTACT_REQ: (manage_contact_req, command, queues), - UNENCRYPTED_ADD_NEW_CONTACT: (add_contact, command, False, queues), - UNENCRYPTED_ADD_EXISTING_CONTACT: (add_contact, command, True, queues), + UNENCRYPTED_ADD_NEW_CONTACT: (add_contact, command, queues, False ), + UNENCRYPTED_ADD_EXISTING_CONTACT: (add_contact, command, queues, True ), UNENCRYPTED_REM_CONTACT: (remove_contact, command, queues), UNENCRYPTED_ONION_SERVICE_DATA: (add_onion_data, command, queues), UNENCRYPTED_ACCOUNT_CHECK: (compare_accounts, command, queues), @@ -185,8 +185,8 @@ def manage_contact_req(command: bytes, def add_contact(command: bytes, + queues: 'QueueDict', existing: bool, - queues: 'QueueDict' ) -> None: """Add clients to Relay Program. @@ -232,9 +232,9 @@ def add_onion_data(command: bytes, queues: 'QueueDict') -> None: existing_public_keys = public_key_list[no_pending:] for onion_pub_key in pending_public_keys: - add_contact(onion_pub_key, False, queues) + add_contact(onion_pub_key, queues, existing=False) for onion_pub_key in existing_public_keys: - add_contact(onion_pub_key, True, queues) + add_contact(onion_pub_key, queues, existing=True) manage_contact_req(allow_req_byte, queues, notify=False) queues[ONION_KEY_QUEUE].put((os_private_key, confirmation_code)) diff --git a/src/relay/diffs.py b/src/relay/diffs.py index e200bdb..fb343e1 100644 --- a/src/relay/diffs.py +++ b/src/relay/diffs.py @@ -196,34 +196,31 @@ def show_value_diffs(value_type: str, purp_value: str, local_test: bool ) -> None: - """Compare purported value with correct value.""" + """Show differences between purported value and correct value.""" # Pad with underscores to denote missing chars while len(purp_value) < ENCODED_B58_PUB_KEY_LENGTH: purp_value += '_' - replace_l = '' - purported = '' + rep_arrows = '' + purported = '' + for c1, c2 in zip(purp_value, true_value): - if c1 == c2: - replace_l += ' ' - purported += c1 - else: - replace_l += '↓' - purported += c1 + rep_arrows += ' ' if c1 == c2 else '↓' + purported += c1 message_list = [f"Source Computer received an invalid {value_type}.", "See arrows below that point to correct characters."] if local_test: - m_print(message_list + ['', purported, replace_l, true_value], box=True) + m_print(message_list + ['', purported, rep_arrows, true_value], box=True) else: purported = ' '.join(split_string(purported, item_len=7)) - replace_l = ' '.join(split_string(replace_l, item_len=7)) + rep_arrows = ' '.join(split_string(rep_arrows, item_len=7)) true_value = ' '.join(split_string(true_value, item_len=7)) m_print(message_list + ['', B58_PUBLIC_KEY_GUIDE, purported, - replace_l, + rep_arrows, true_value, B58_PUBLIC_KEY_GUIDE], box=True) diff --git a/src/relay/server.py b/src/relay/server.py index e2a4fed..0e38802 100644 --- a/src/relay/server.py +++ b/src/relay/server.py @@ -19,8 +19,8 @@ You should have received a copy of the GNU General Public License along with TFC. If not, see . """ -import hmac import logging +import secrets import typing from io import BytesIO @@ -67,7 +67,10 @@ def validate_url_token(purp_url_token: str, # True if a matching shared secret was found in pub_key_dict. valid_url_token = False for url_token in pub_key_dict: - valid_url_token |= hmac.compare_digest(purp_url_token, url_token) + try: + valid_url_token |= secrets.compare_digest(purp_url_token, url_token) + except TypeError: + valid_url_token |= False return valid_url_token diff --git a/src/relay/tcb.py b/src/relay/tcb.py index c374601..5ff62ad 100644 --- a/src/relay/tcb.py +++ b/src/relay/tcb.py @@ -24,8 +24,7 @@ import typing from typing import Any, Dict, List, Tuple, Union -from src.common.encoding import bytes_to_int, pub_key_to_short_address -from src.common.encoding import int_to_bytes, b85encode +from src.common.encoding import b85encode, bytes_to_int, int_to_bytes, pub_key_to_short_address from src.common.exceptions import SoftError from src.common.misc import ignored, separate_header, split_byte_string from src.common.output import rp_print @@ -217,7 +216,8 @@ def process_add_or_group_remove_member(ts: 'datetime', header_str: str, group_id: bytes, messages_to_flask: 'Queue[Tuple[Union[bytes, str], bytes]]', - remaining: List[bytes], removable: List[bytes] + remaining: List[bytes], + removable: List[bytes] ) -> None: """Process group add or remove member packet.""" packet_str = header_str + b85encode(group_id + b"".join(removable)) diff --git a/src/transmitter/commands.py b/src/transmitter/commands.py index ce29f06..bb1c495 100755 --- a/src/transmitter/commands.py +++ b/src/transmitter/commands.py @@ -79,7 +79,7 @@ def process_command(user_input: 'UserInput', Select function based on the first keyword of the issued command, and pass relevant parameters to it. """ - # Keyword Function to run ( Parameters ) + # Command Function to run ( Parameters ) # ----------------------------------------------------------------------------------------------------------------------------------------- d = {'about': (print_about, ), 'add': (add_new_contact, contact_list, group_list, settings, queues, onion_service ), diff --git a/src/transmitter/key_exchanges.py b/src/transmitter/key_exchanges.py index d503443..60d155d 100644 --- a/src/transmitter/key_exchanges.py +++ b/src/transmitter/key_exchanges.py @@ -212,7 +212,7 @@ def new_local_key(contact_list: 'ContactList', key, csprng(), hek, csprng())) - # Notify Receiver that confirmation code was successfully entered + # Notify Receiver Program that confirmation code was successfully entered queue_command(LOCAL_KEY_RDY, settings, queues) m_print("Successfully completed the local key exchange.", bold=True, tail_clear=True, delay=1, head=1) @@ -319,7 +319,7 @@ def start_key_exchange(onion_pub_key: bytes, # Public key of contact's dh_shared_key = X448.shared_key(tfc_private_key_user, tfc_public_key_contact) tx_mk, rx_mk, tx_hk, rx_hk, tx_fp, rx_fp \ - = X448.derive_keys(dh_shared_key, tfc_public_key_user, tfc_public_key_contact) + = X448.derive_subkeys(dh_shared_key, tfc_public_key_user, tfc_public_key_contact) kex_status = validate_contact_fingerprint(tx_fp, rx_fp) @@ -577,10 +577,10 @@ def store_keys_on_removable_drive(ct_tag: bytes, # Encrypted PS settings: 'Settings', # Settings object ) -> None: """Store keys for contact on a removable media.""" + trunc_addr = pub_key_to_short_address(onion_pub_key) while True: - trunc_addr = pub_key_to_short_address(onion_pub_key) - store_d = ask_path_gui(f"Select removable media for {nick}", settings) - f_name = f"{store_d}/{onion_service.user_short_address}.psk - Give to {trunc_addr}" + store_d = ask_path_gui(f"Select removable media for {nick}", settings) + f_name = f"{store_d}/{onion_service.user_short_address}.psk - Give to {trunc_addr}" try: with open(f_name, "wb+") as f: @@ -624,4 +624,4 @@ def rxp_load_psk(window: 'TxWindow', print_on_previous_line(reps=4, delay=2) except (EOFError, KeyboardInterrupt): - raise SoftError("PSK verification aborted.", tail_clear=True, delay=1, head=2) + raise SoftError("PSK install verification aborted.", tail_clear=True, delay=1, head=2) diff --git a/src/transmitter/traffic_masking.py b/src/transmitter/traffic_masking.py index f109055..f1b5919 100755 --- a/src/transmitter/traffic_masking.py +++ b/src/transmitter/traffic_masking.py @@ -25,8 +25,8 @@ import typing from typing import Any, Dict, Optional, Tuple, Union from src.common.misc import ignored -from src.common.statics import (C_N_HEADER, NOISE_PACKET_BUFFER, PADDING_LENGTH, P_N_HEADER, - TM_NOISE_COMMAND_QUEUE, TM_NOISE_PACKET_QUEUE) +from src.common.statics import (C_N_HEADER, NOISE_PACKET_BUFFER, PADDING_LENGTH, P_N_HEADER, TM_NOISE_COMMAND_QUEUE, + TM_NOISE_PACKET_QUEUE) if typing.TYPE_CHECKING: from multiprocessing import Queue diff --git a/tests/common/test_crypto.py b/tests/common/test_crypto.py index 6cfd66d..c1f99d1 100644 --- a/tests/common/test_crypto.py +++ b/tests/common/test_crypto.py @@ -39,8 +39,8 @@ import nacl.utils from cryptography.hazmat.primitives.asymmetric.x448 import X448PrivateKey from cryptography.hazmat.primitives.serialization import Encoding, NoEncryption, PrivateFormat -from src.common.crypto import argon2_kdf, auth_and_decrypt, blake2b, byte_padding, check_kernel_version, csprng -from src.common.crypto import encrypt_and_sign, rm_padding_bytes, X448 +from src.common.crypto import (argon2_kdf, auth_and_decrypt, blake2b, byte_padding, check_kernel_version, csprng, + encrypt_and_sign, rm_padding_bytes, X448) from src.common.statics import (ARGON2_MIN_MEMORY_COST, ARGON2_MIN_PARALLELISM, ARGON2_MIN_TIME_COST, ARGON2_SALT_LENGTH, BLAKE2_DIGEST_LENGTH, BLAKE2_DIGEST_LENGTH_MAX, BLAKE2_DIGEST_LENGTH_MIN, BLAKE2_KEY_LENGTH_MAX, BLAKE2_PERSON_LENGTH_MAX, @@ -125,7 +125,7 @@ class TestBLAKE2bWrapper(unittest.TestCase): These tests ensure the BLAKE2b implementation detects invalid parameters. """ - + def setUp(self) -> None: """Pre-test actions.""" self.test_string = b'test_string' @@ -184,7 +184,7 @@ class TestArgon2KDF(unittest.TestCase): output of the argon2_cffi library to the output of the command-line utility under those input parameters. - [1] https://tools.ietf.org/html/draft-irtf-cfrg-argon2-03#section-6.3 + [1] https://tools.ietf.org/html/draft-irtf-cfrg-argon2-09#section-5.3 [2] https://github.com/P-H-C/phc-winner-argon2#command-line-utility """ @@ -339,8 +339,8 @@ class TestX448(unittest.TestCase): The pyca/cryptography library does not provide bindings for the OpenSSL's X448 internals, but both KATs are done by OpenSSL tests: - https://github.com/openssl/openssl/blob/master/test/curve448_internal_test.c#L654 - https://github.com/openssl/openssl/blob/master/test/curve448_internal_test.c#L668 + https://github.com/openssl/openssl/blob/master/test/curve448_internal_test.c#L655 + https://github.com/openssl/openssl/blob/master/test/curve448_internal_test.c#L669 """ sk_alice = bytes.fromhex( '9a8f4925d1519f5775cf46b04b5800d4ee9ee8bae8bc5565d498c28d' @@ -446,23 +446,23 @@ class TestX448(unittest.TestCase): self.assertEqual(shared_secret1, blake2b(TestX448.shared_secret)) self.assertEqual(shared_secret2, blake2b(TestX448.shared_secret)) - def test_non_unique_keys_raise_critical_error(self) -> None: + def test_non_unique_subkeys_raise_critical_error(self) -> None: # Setup shared_key = os.urandom(SYMMETRIC_KEY_LENGTH) tx_public_key = os.urandom(TFC_PUBLIC_KEY_LENGTH) # Test with self.assertRaises(SystemExit): - X448.derive_keys(shared_key, tx_public_key, tx_public_key) + X448.derive_subkeys(shared_key, tx_public_key, tx_public_key) - def test_x448_key_derivation(self) -> None: + def test_x448_subkey_derivation(self) -> None: # Setup shared_key = os.urandom(SYMMETRIC_KEY_LENGTH) tx_public_key = os.urandom(TFC_PUBLIC_KEY_LENGTH) rx_public_key = os.urandom(TFC_PUBLIC_KEY_LENGTH) # Test - key_set = X448.derive_keys(shared_key, tx_public_key, rx_public_key) + key_set = X448.derive_subkeys(shared_key, tx_public_key, rx_public_key) # Test that correct number of keys were returned self.assertEqual(len(key_set), 6) @@ -486,7 +486,7 @@ class TestXChaCha20Poly1305(unittest.TestCase): ciphertext and tag. IETF test vectors: - https://tools.ietf.org/html/draft-irtf-cfrg-xchacha-01#appendix-A.1 + https://tools.ietf.org/html/draft-irtf-cfrg-xchacha-03#appendix-A.3 Libsodium test vectors: Message: https://github.com/jedisct1/libsodium/blob/master/test/default/aead_xchacha20poly1305.c#L22 @@ -500,38 +500,28 @@ class TestXChaCha20Poly1305(unittest.TestCase): """ ietf_plaintext = bytes.fromhex( - '4c 61 64 69 65 73 20 61 6e 64 20 47 65 6e 74 6c' - '65 6d 65 6e 20 6f 66 20 74 68 65 20 63 6c 61 73' - '73 20 6f 66 20 27 39 39 3a 20 49 66 20 49 20 63' - '6f 75 6c 64 20 6f 66 66 65 72 20 79 6f 75 20 6f' - '6e 6c 79 20 6f 6e 65 20 74 69 70 20 66 6f 72 20' - '74 68 65 20 66 75 74 75 72 65 2c 20 73 75 6e 73' - '63 72 65 65 6e 20 77 6f 75 6c 64 20 62 65 20 69' - '74 2e') + '4c616469657320616e642047656e746c656d656e206f662074686520636c6173' + '73206f66202739393a204966204920636f756c64206f6666657220796f75206f' + '6e6c79206f6e652074697020666f7220746865206675747572652c2073756e73' + '637265656e20776f756c642062652069742e') ietf_ad = bytes.fromhex( - '50 51 52 53 c0 c1 c2 c3 c4 c5 c6 c7') + '50515253c0c1c2c3c4c5c6c7') ietf_key = bytes.fromhex( - '80 81 82 83 84 85 86 87 88 89 8a 8b 8c 8d 8e 8f' - '90 91 92 93 94 95 96 97 98 99 9a 9b 9c 9d 9e 9f') + '808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9f') ietf_nonce = bytes.fromhex( - '40 41 42 43 44 45 46 47 48 49 4a 4b 4c 4d 4e 4f' - '50 51 52 53 54 55 56 57') + '404142434445464748494a4b4c4d4e4f5051525354555657') ietf_ciphertext = bytes.fromhex( - 'bd 6d 17 9d 3e 83 d4 3b 95 76 57 94 93 c0 e9 39' - '57 2a 17 00 25 2b fa cc be d2 90 2c 21 39 6c bb' - '73 1c 7f 1b 0b 4a a6 44 0b f3 a8 2f 4e da 7e 39' - 'ae 64 c6 70 8c 54 c2 16 cb 96 b7 2e 12 13 b4 52' - '2f 8c 9b a4 0d b5 d9 45 b1 1b 69 b9 82 c1 bb 9e' - '3f 3f ac 2b c3 69 48 8f 76 b2 38 35 65 d3 ff f9' - '21 f9 66 4c 97 63 7d a9 76 88 12 f6 15 c6 8b 13' - 'b5 2e') + 'bd6d179d3e83d43b9576579493c0e939572a1700252bfaccbed2902c21396cbb' + '731c7f1b0b4aa6440bf3a82f4eda7e39ae64c6708c54c216cb96b72e1213b452' + '2f8c9ba40db5d945b11b69b982c1bb9e3f3fac2bc369488f76b2383565d3fff9' + '21f9664c97637da9768812f615c68b13b52e') ietf_tag = bytes.fromhex( - 'c0:87:59:24:c1:c7:98:79:47:de:af:d8:78:0a:cf:49'.replace(':', '')) + 'c0875924c1c7987947deafd8780acf49') nonce_ct_tag_ietf = ietf_nonce + ietf_ciphertext + ietf_tag @@ -712,7 +702,7 @@ class TestCSPRNG(unittest.TestCase): https://github.com/smuellerDD/lrng/tree/master/test The report on the statistical tests of the LRNG can be found from - Chapter 3 (pp.26-48) of the whitepaper: + Chapter 3 (pp.30-46) of the white paper: https://www.chronox.de/lrng/doc/lrng.pdf Further analysis of the LRNG can be found from Chapters 4-8 @@ -742,7 +732,7 @@ class TestCSPRNG(unittest.TestCase): with self.assertRaises(SystemExit): csprng() - def test_subceeding_hash_function_min_digest_size_raises_critical_error(self) -> None: + def test_subceding_hash_function_min_digest_size_raises_critical_error(self) -> None: with self.assertRaises(SystemExit): csprng(BLAKE2_DIGEST_LENGTH_MIN-1) diff --git a/tests/common/test_database.py b/tests/common/test_database.py index 5c6ab94..d4a32a2 100644 --- a/tests/common/test_database.py +++ b/tests/common/test_database.py @@ -303,7 +303,7 @@ class TestTFCUnencryptedDatabase(unittest.TestCase): self.assertEqual(self.database.load_database(), data_old) self.assertFalse(os.path.isfile(self.database.database_temp)) - def test_load_database_prefers_valid_temp_database(self) -> None: + def test_load_database_prioritizes_valid_temp_database(self) -> None: # Setup data_old = os.urandom(MASTERKEY_DB_SIZE) checksummed_old = data_old + blake2b(data_old) @@ -432,7 +432,7 @@ class TestMessageLog(unittest.TestCase): self.message_log.c.execute(f"""INSERT INTO log_entries (log_entry) VALUES (?)""", (os.urandom(LOG_ENTRY_LENGTH),)) - # Test that TFC reopens closed database on write + # Test closed database is re-opened during write data = os.urandom(LOG_ENTRY_LENGTH) self.assertIsNone(self.message_log.insert_log_entry(data)) diff --git a/tests/common/test_db_contacts.py b/tests/common/test_db_contacts.py index 9df3acd..334fd1f 100644 --- a/tests/common/test_db_contacts.py +++ b/tests/common/test_db_contacts.py @@ -322,12 +322,12 @@ class TestContactList(TFCTestCase): self.assertFalse(self.contact_list.has_contacts()) def test_has_only_pending_contacts(self) -> None: - # Change all to pending + # Change all contacts' kex status to pending for contact in self.contact_list.get_list_of_contacts(): contact.kex_status = KEX_STATUS_PENDING self.assertTrue(self.contact_list.has_only_pending_contacts()) - # Change one from pending + # Change one kex status to unverified alice = self.contact_list.get_contact_by_address_or_nick('Alice') alice.kex_status = KEX_STATUS_UNVERIFIED self.assertFalse(self.contact_list.has_only_pending_contacts()) diff --git a/tests/common/test_db_groups.py b/tests/common/test_db_groups.py index af2a05b..f2ba18e 100644 --- a/tests/common/test_db_groups.py +++ b/tests/common/test_db_groups.py @@ -170,7 +170,7 @@ class TestGroupList(TFCTestCase): + self.settings.max_number_of_groups * self.single_member_data_len + POLY1305_TAG_LENGTH) - # Reduce setting values from 20 to 10 + # Reduce group database setting values from 20 to 10 self.settings.max_number_of_groups = 10 self.settings.max_number_of_group_members = 10 @@ -244,29 +244,29 @@ class TestGroupList(TFCTestCase): def test_add_group(self) -> None: members = [create_contact('Laura')] self.group_list.add_group('test_group_12', bytes(GROUP_ID_LENGTH), False, False, members) - self.group_list.add_group('test_group_12', bytes(GROUP_ID_LENGTH), False, True, members) + self.group_list.add_group('test_group_12', bytes(GROUP_ID_LENGTH), False, True, members) self.assertTrue(self.group_list.get_group('test_group_12').notifications) self.assertEqual(len(self.group_list), len(self.group_names)+1) def test_remove_group_by_name(self) -> None: self.assertEqual(len(self.group_list), len(self.group_names)) - # Remove non-existing group + # Test removing a non-existing group self.assertIsNone(self.group_list.remove_group_by_name('test_group_12')) self.assertEqual(len(self.group_list), len(self.group_names)) - # Remove existing group + # Test removing an existing group self.assertIsNone(self.group_list.remove_group_by_name('test_group_11')) self.assertEqual(len(self.group_list), len(self.group_names)-1) def test_remove_group_by_id(self) -> None: self.assertEqual(len(self.group_list), len(self.group_names)) - # Remove non-existing group + # Test removing a non-existing group self.assertIsNone(self.group_list.remove_group_by_id(group_name_to_group_id('test_group_12'))) self.assertEqual(len(self.group_list), len(self.group_names)) - # Remove existing group + # Test removing an existing group self.assertIsNone(self.group_list.remove_group_by_id(group_name_to_group_id('test_group_11'))) self.assertEqual(len(self.group_list), len(self.group_names)-1) diff --git a/tests/common/test_db_keys.py b/tests/common/test_db_keys.py index bf3ca51..1234c51 100644 --- a/tests/common/test_db_keys.py +++ b/tests/common/test_db_keys.py @@ -160,26 +160,26 @@ class TestKeyList(unittest.TestCase): new_key = bytes(SYMMETRIC_KEY_LENGTH) self.keylist.keysets = [create_keyset(LOCAL_ID)] - # Check that KeySet exists and that its keys are different + # Check that KeySet exists and that its keys are different from the new ones self.assertNotEqual(self.keylist.keysets[0].rx_hk, new_key) - # Replace existing KeySet + # Replace the existing KeySet self.assertIsNone(self.keylist.add_keyset(LOCAL_PUBKEY, new_key, new_key, new_key, new_key)) - # Check that new KeySet replaced the old one + # Check that the new KeySet replaced the old one self.assertEqual(self.keylist.keysets[0].onion_pub_key, LOCAL_PUBKEY) self.assertEqual(self.keylist.keysets[0].rx_hk, new_key) def test_remove_keyset(self) -> None: - # Test KeySet for Bob exists + # Test that the KeySet for Bob exists self.assertTrue(self.keylist.has_keyset(nick_to_pub_key('Bob'))) - # Remove KeySet for Bob + # Remove the KeySet for Bob self.assertIsNone(self.keylist.remove_keyset(nick_to_pub_key('Bob'))) - # Test KeySet was removed + # Test that the KeySet was removed self.assertFalse(self.keylist.has_keyset(nick_to_pub_key('Bob'))) @mock.patch('builtins.input', side_effect=['test_password']) @@ -190,18 +190,18 @@ class TestKeyList(unittest.TestCase): queues = gen_queue_dict() def queue_delayer() -> None: - """Place packet to queue after timer runs out.""" + """Place packet to the key management queue after timer runs out.""" time.sleep(0.1) queues[KEY_MANAGEMENT_QUEUE].put(master_key2.master_key) threading.Thread(target=queue_delayer).start() - # Test that new key is different from existing one + # Test that the new key is different from the existing one self.assertNotEqual(key, self.master_key.master_key) - # Change master key + # Change the master key self.assertIsNone(self.keylist.change_master_key(queues)) - # Test that master key has changed + # Test that the master key was changed self.assertEqual(self.keylist.master_key.master_key, key) self.assertEqual(self.keylist.database.database_key, key) @@ -254,20 +254,20 @@ class TestKeyList(unittest.TestCase): # Setup queues = gen_queue_dict() - # Test that KeySet for David does not exist + # Test that the KeySet for David does not exist self.assertFalse(self.keylist.has_keyset(nick_to_pub_key('David'))) - # Test adding KeySet + # Test adding the KeySet for David self.assertIsNone(self.keylist.manage(queues, KDB_ADD_ENTRY_HEADER, nick_to_pub_key('David'), bytes(SYMMETRIC_KEY_LENGTH), bytes(SYMMETRIC_KEY_LENGTH), bytes(SYMMETRIC_KEY_LENGTH), bytes(SYMMETRIC_KEY_LENGTH))) self.assertTrue(self.keylist.has_keyset(nick_to_pub_key('David'))) - # Test removing KeySet + # Test removing David's KeySet self.assertIsNone(self.keylist.manage(queues, KDB_REMOVE_ENTRY_HEADER, nick_to_pub_key('David'))) self.assertFalse(self.keylist.has_keyset(nick_to_pub_key('David'))) - # Test changing master key + # Test changing the master key new_key = SYMMETRIC_KEY_LENGTH * b'\x01' self.assertNotEqual(self.master_key.master_key, new_key) @@ -278,7 +278,7 @@ class TestKeyList(unittest.TestCase): self.assertEqual(self.keylist.master_key.master_key, new_key) self.assertEqual(self.keylist.database.database_key, new_key) - # Test invalid KeyList management command raises Critical Error + # Test an invalid KeyList management command raises CriticalError with self.assertRaises(SystemExit): self.keylist.manage(queues, 'invalid_key', None) diff --git a/tests/common/test_db_logs.py b/tests/common/test_db_logs.py index 14ebf2e..988f77b 100644 --- a/tests/common/test_db_logs.py +++ b/tests/common/test_db_logs.py @@ -41,8 +41,8 @@ from src.common.statics import (CLEAR_ENTIRE_SCREEN, CURSOR_LEFT_UP_CORNER, WIN_TYPE_GROUP) from tests.mock_classes import create_contact, GroupList, MasterKey, RxWindow, Settings -from tests.utils import assembly_packet_creator, cd_unit_test, cleanup, group_name_to_group_id, nick_to_pub_key -from tests.utils import nick_to_short_address, tear_queues, TFCTestCase, gen_queue_dict +from tests.utils import (assembly_packet_creator, cd_unit_test, cleanup, group_name_to_group_id, nick_to_pub_key, + nick_to_short_address, tear_queues, TFCTestCase, gen_queue_dict) TIMESTAMP_BYTES = bytes.fromhex('08ceae02') STATIC_TIMESTAMP = bytes_to_timestamp(TIMESTAMP_BYTES).strftime('%H:%M:%S.%f')[:-TIMESTAMP_LENGTH] @@ -68,7 +68,7 @@ class TestLogWriterLoop(unittest.TestCase): queues = gen_queue_dict() def queue_delayer() -> None: - """Place messages to queue one at a time.""" + """Place messages to the logging queue one at a time.""" for p in [(nick_to_pub_key('Alice'), M_S_HEADER + bytes(PADDING_LENGTH), False, False, master_key), (None, C_S_HEADER + bytes(PADDING_LENGTH), True, False, master_key), (nick_to_pub_key('Alice'), P_N_HEADER + bytes(PADDING_LENGTH), True, True, master_key), @@ -101,7 +101,7 @@ class TestLogWriterLoop(unittest.TestCase): queues[TRAFFIC_MASKING_QUEUE].put(True) def queue_delayer() -> None: - """Place messages to queue one at a time.""" + """Place messages to the logging queue one at a time.""" for p in [(nick_to_pub_key('Alice'), M_S_HEADER + bytes(PADDING_LENGTH), False, False, master_key), (None, C_S_HEADER + bytes(PADDING_LENGTH), True, False, master_key), (nick_to_pub_key('Alice'), F_S_HEADER + bytes(PADDING_LENGTH), True, True, master_key), @@ -131,7 +131,7 @@ class TestLogWriterLoop(unittest.TestCase): queues = gen_queue_dict() def queue_delayer() -> None: - """Place messages to queue one at a time.""" + """Place messages to the logging queue one at a time.""" for p in [(None, C_S_HEADER + bytes(PADDING_LENGTH), True, False, master_key), (nick_to_pub_key('Alice'), M_S_HEADER + bytes(PADDING_LENGTH), False, False, master_key), (nick_to_pub_key('Alice'), F_S_HEADER + bytes(PADDING_LENGTH), True, True, master_key)]: @@ -139,7 +139,7 @@ class TestLogWriterLoop(unittest.TestCase): queues[LOG_PACKET_QUEUE].put(p) time.sleep(SLEEP_DELAY) - queues[LOGFILE_MASKING_QUEUE].put(True) # Start logging noise packets + queues[LOGFILE_MASKING_QUEUE].put(True) # Start logging of noise packets time.sleep(SLEEP_DELAY) for _ in range(2): @@ -171,7 +171,7 @@ class TestLogWriterLoop(unittest.TestCase): noise_tuple = (nick_to_pub_key('Alice'), P_N_HEADER + bytes(PADDING_LENGTH), True, True, master_key) def queue_delayer() -> None: - """Place packets to log into queue after delay.""" + """Place packets to log into the log queue after delay.""" for _ in range(5): queues[LOG_PACKET_QUEUE].put(noise_tuple) # Not logged because logging_state is False by default time.sleep(SLEEP_DELAY) @@ -267,7 +267,7 @@ class TestAccessHistoryAndPrintLogs(TFCTestCase): """Post-test actions.""" cleanup(self.unit_test_dir) - def test_missing_log_file_raises_se(self) -> None: + def test_missing_log_file_raises_soft_error(self) -> None: # Setup os.remove(self.log_file) @@ -443,8 +443,8 @@ Log file of message(s) sent to group test_group group=self.group, type_print='group') - # Add an assembly packet sequence sent to contact Alice in group containing cancel packet. - # Access_logs should skip this. + # Add an assembly packet sequence sent to contact Alice in group + # containing cancel packet. Access_logs should skip this. packets = assembly_packet_creator(MESSAGE, self.msg, group_id=group_name_to_group_id('test_group')) packets = packets[2:] + [M_C_HEADER + bytes(PADDING_LENGTH)] for p in packets: @@ -461,8 +461,8 @@ Log file of message(s) sent to group test_group for p in assembly_packet_creator(MESSAGE, 'This is a short group message', group_id=GROUP_ID_LENGTH * b'1'): write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) - # Add messages to Alice and Charlie in group. - # Add duplicate of outgoing message that should be skipped by access_logs. + # Add messages to Alice and Charlie in group. Add duplicate + # of outgoing message that should be skipped by access_logs. for p in assembly_packet_creator(MESSAGE, self.msg, group_id=group_name_to_group_id('test_group')): write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database, origin=ORIGIN_CONTACT_HEADER) @@ -546,7 +546,7 @@ class TestReEncrypt(TFCTestCase): """Post-test actions.""" cleanup(self.unit_test_dir) - def test_missing_log_database_raises_se(self) -> None: + def test_missing_log_database_raises_soft_error(self) -> None: # Setup os.remove(self.log_file) @@ -632,7 +632,7 @@ class TestRemoveLog(TFCTestCase): """Post-test actions.""" cleanup(self.unit_test_dir) - def test_missing_log_file_raises_se(self) -> None: + def test_missing_log_file_raises_soft_error(self) -> None: # Setup os.remove(self.file_name) diff --git a/tests/common/test_db_masterkey.py b/tests/common/test_db_masterkey.py index 81b37a5..e1e2ad8 100644 --- a/tests/common/test_db_masterkey.py +++ b/tests/common/test_db_masterkey.py @@ -135,15 +135,53 @@ class TestMasterKey(TFCTestCase): self.assertIsInstance(master_key.master_key, bytes) @mock.patch('src.common.db_masterkey.MasterKey.timed_key_derivation', - MagicMock(side_effect= [(KL*b'a', 0.01)] + MagicMock(side_effect= [(KL*b'a', 3.5)] # Early exit to create the object. + + [(KL*b'a', 4.1)] # Test1: Make key derivation too slow so it returns with time_cost 1 + + [(KL*b'a', 2.0)] # Test2: Second key derivation time sentinel + + [(KL*b'a', 4.1)] + + [(KL*b'a', 0.1)] # Test3: Complete binary search with search end + + [(KL*b'a', 6.0)] + + 7 * [(KL*b'a', 2.5)])) + @mock.patch('os.popen', return_value=MagicMock( + read=MagicMock(return_value=MagicMock(splitlines=MagicMock(return_value=["MemAvailable 10240"]))))) + @mock.patch('getpass.getpass', side_effect=['generate']) + @mock.patch('builtins.input', side_effect=['']) + @mock.patch('os.system', return_value=None) + @mock.patch('time.sleep', return_value=None) + def test_determine_time_cost(self, *_: Any) -> None: + master_key = MasterKey(self.operation, local_test=True) + + # Test1: Sentinel returns immediately if MAX_KEY_DERIVATION_TIME is exceeded + time_cost, kd_time, _ = master_key.determine_time_cost("password", 8*b'salt', memory_cost=512, parallelism=1) + self.assertEqual(time_cost, 1) + self.assertEqual(kd_time, 4.1) + + # Test2: Second key derivation time sentinel + time_cost, kd_time, _ = master_key.determine_time_cost("password", 8 * b'salt', memory_cost=512, parallelism=1) + self.assertEqual(time_cost, 2) + self.assertEqual(kd_time, 4.1) + + # Test3: Complete binary search with search end + time_cost, kd_time, _ = master_key.determine_time_cost("password", 8 * b'salt', memory_cost=512, parallelism=1) + self.assertEqual(time_cost, 40) + self.assertEqual(kd_time, 2.5) + + @mock.patch('src.common.db_masterkey.MasterKey.timed_key_derivation', + MagicMock(side_effect= [(KL*b'a', 4.1)] + + [(KL*b'a', 3.5)] + + [(KL*b'a', 0.01)] + 100 * [(KL*b'b', 5.0)] + 2 * [(KL*b'a', 2.5)] - + [(KL*b'a', 3.0)])) - @mock.patch('os.path.isfile', side_effect=[False, True]) - @mock.patch('getpass.getpass', side_effect=input_list) - @mock.patch('time.sleep', return_value=None) - def test_kd_binary_search(self, *_: Any) -> None: - MasterKey(self.operation, local_test=True) + + [(KL*b'a', 3.1)])) + @mock.patch('os.popen', return_value=MagicMock( + read=MagicMock(return_value=MagicMock(splitlines=MagicMock(return_value=["MemAvailable 10240"]))))) + @mock.patch('getpass.getpass', side_effect=['generate']) + @mock.patch('builtins.input', side_effect=['']) + @mock.patch('os.system', return_value=None) + @mock.patch('time.sleep', return_value=None) + def test_determine_memory_cost(self, *_: Any) -> None: + master_key = MasterKey(self.operation, local_test=True) + master_key.determine_memory_cost("password", 8*b'salt', time_cost=1, memory_cost=1024, parallelism=1) @mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 0.01) @mock.patch('src.common.db_masterkey.MAX_KEY_DERIVATION_TIME', 0.1) diff --git a/tests/common/test_db_settings.py b/tests/common/test_db_settings.py index a0726d5..9e50a95 100644 --- a/tests/common/test_db_settings.py +++ b/tests/common/test_db_settings.py @@ -113,8 +113,8 @@ class TestSettings(TFCTestCase): self.assert_se("Error: Invalid setting value 'True'.", self.settings.change_setting, 'tm_static_delay', 'True', *self.args) - self.assertIsNone(self.settings.change_setting('traffic_masking', 'True', *self.args)) - self.assertIsNone(self.settings.change_setting('max_number_of_group_members', '100', *self.args)) + self.assertIsNone(self.settings.change_setting('traffic_masking', 'True', *self.args)) + self.assertIsNone(self.settings.change_setting('max_number_of_group_members', '100', *self.args)) @mock.patch('builtins.input', side_effect=['No', 'Yes']) def test_validate_key_value_pair(self, _: Any) -> None: diff --git a/tests/common/test_encoding.py b/tests/common/test_encoding.py index d48d248..b341b64 100644 --- a/tests/common/test_encoding.py +++ b/tests/common/test_encoding.py @@ -25,10 +25,10 @@ import unittest from datetime import datetime -from src.common.encoding import b58encode, bool_to_bytes, double_to_bytes, str_to_bytes, int_to_bytes -from src.common.encoding import b58decode, bytes_to_bool, bytes_to_double, bytes_to_str, bytes_to_int -from src.common.encoding import onion_address_to_pub_key, unicode_padding, pub_key_to_short_address, b85encode -from src.common.encoding import pub_key_to_onion_address, rm_padding_str, bytes_to_timestamp, b10encode +from src.common.encoding import (b58encode, bool_to_bytes, double_to_bytes, str_to_bytes, int_to_bytes, + b58decode, bytes_to_bool, bytes_to_double, bytes_to_str, bytes_to_int, + onion_address_to_pub_key, unicode_padding, pub_key_to_short_address, b85encode, + pub_key_to_onion_address, rm_padding_str, bytes_to_timestamp, b10encode) from src.common.statics import (ENCODED_BOOLEAN_LENGTH, ENCODED_FLOAT_LENGTH, ENCODED_INTEGER_LENGTH, FINGERPRINT_LENGTH, ONION_SERVICE_PUBLIC_KEY_LENGTH, PADDED_UTF32_STR_LENGTH, PADDING_LENGTH, SYMMETRIC_KEY_LENGTH, TFC_PUBLIC_KEY_LENGTH, TRUNC_ADDRESS_LENGTH) diff --git a/tests/common/test_gateway.py b/tests/common/test_gateway.py index c03dda5..22fef3b 100644 --- a/tests/common/test_gateway.py +++ b/tests/common/test_gateway.py @@ -19,6 +19,7 @@ You should have received a copy of the GNU General Public License along with TFC. If not, see . """ +import base64 import os import unittest import socket @@ -34,7 +35,8 @@ from src.common.crypto import blake2b from src.common.gateway import gateway_loop, Gateway, GatewaySettings from src.common.misc import ensure_dir from src.common.reed_solomon import RSCodec -from src.common.statics import DIR_USER_DATA, GATEWAY_QUEUE, NC, PACKET_CHECKSUM_LENGTH, RX, TX +from src.common.statics import (DIR_USER_DATA, GATEWAY_QUEUE, NC, PACKET_CHECKSUM_LENGTH, QUBES_RX_IP_ADDR_FILE, + RX, TX, US_BYTE) from tests.mock_classes import Settings from tests.utils import cd_unit_test, cleanup, gen_queue_dict, tear_queues, TFCTestCase @@ -55,7 +57,7 @@ class TestGatewayLoop(unittest.TestCase): @mock.patch('multiprocessing.connection.Listener', return_value=MagicMock(accept=lambda: MagicMock(recv=MagicMock(return_value='message')))) def test_loop(self, _: Any) -> None: - gateway = Gateway(operation=RX, local_test=True, dd_sockets=False) + gateway = Gateway(operation=RX, local_test=True, dd_sockets=False, qubes=False) self.assertIsNone(gateway_loop(self.queues, gateway, unit_test=True)) data = self.queues[GATEWAY_QUEUE].get() @@ -79,7 +81,7 @@ class TestGatewaySerial(TFCTestCase): @mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0']]) @mock.patch('builtins.input', side_effect=['Yes']) def test_search_and_establish_serial(self, *_: Any) -> None: - gateway = Gateway(operation=RX, local_test=False, dd_sockets=False) + gateway = Gateway(operation=RX, local_test=False, dd_sockets=False, qubes=False) self.assertIsInstance(gateway.rs, RSCodec) self.assertIs(gateway.tx_serial, gateway.rx_serial) @@ -87,16 +89,16 @@ class TestGatewaySerial(TFCTestCase): @mock.patch('serial.Serial', side_effect=SerialException) @mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0']]) @mock.patch('builtins.input', side_effect=['Yes']) - def test_serialexception_during_establish_exists(self, *_: Any) -> None: + def test_serial_exception_during_establish_exists(self, *_: Any) -> None: with self.assertRaises(SystemExit): - Gateway(operation=RX, local_test=False, dd_sockets=False) + Gateway(operation=RX, local_test=False, dd_sockets=False, qubes=False) @mock.patch('time.sleep', return_value=None) @mock.patch('serial.Serial', return_value=MagicMock(write=MagicMock(side_effect=[SerialException, None]))) @mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0'], ['ttyUSB0']]) @mock.patch('builtins.input', side_effect=['Yes']) def test_write_serial_(self, *_: Any) -> None: - gateway = Gateway(operation=RX, local_test=False, dd_sockets=False) + gateway = Gateway(operation=RX, local_test=False, dd_sockets=False, qubes=False) self.assertIsNone(gateway.write(b"message")) @mock.patch("time.sleep", return_value=None) @@ -106,7 +108,7 @@ class TestGatewaySerial(TFCTestCase): @mock.patch("builtins.input", side_effect=["Yes"]) def test_serial_uninitialized_serial_interface_for_read_raises_critical_error(self, *_) -> None: # Setup - gateway = Gateway(operation=RX, local_test=False, dd_sockets=False) + gateway = Gateway(operation=RX, local_test=False, dd_sockets=False, qubes=False) gateway.rx_serial = None # Test @@ -119,7 +121,7 @@ class TestGatewaySerial(TFCTestCase): @mock.patch("builtins.input", side_effect=["Yes"]) def test_serial_uninitialized_socket_interface_for_read_raises_critical_error(self, *_) -> None: # Setup - gateway = Gateway(operation=RX, local_test=True, dd_sockets=False) + gateway = Gateway(operation=RX, local_test=True, dd_sockets=False, qubes=False) gateway.rx_socket = None # Test @@ -133,7 +135,7 @@ class TestGatewaySerial(TFCTestCase): @mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"], ["ttyUSB0"]]) @mock.patch("builtins.input", side_effect=["Yes"]) def test_read_socket(self, *_) -> None: - gateway = Gateway(operation=RX, local_test=True, dd_sockets=False) + gateway = Gateway(operation=RX, local_test=True, dd_sockets=False, qubes=False) data = gateway.read() self.assertEqual(data, b"12") @@ -144,7 +146,7 @@ class TestGatewaySerial(TFCTestCase): @mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"], ["ttyUSB0"]]) @mock.patch("builtins.input", side_effect=["Yes"]) def test_read_serial(self, *_) -> None: - gateway = Gateway(operation=RX, local_test=False, dd_sockets=False) + gateway = Gateway(operation=RX, local_test=False, dd_sockets=False, qubes=False) data = gateway.read() self.assertEqual(data, b"12") @@ -153,29 +155,26 @@ class TestGatewaySerial(TFCTestCase): @mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"]]) @mock.patch("builtins.input", side_effect=["Yes"]) def test_add_error_correction(self, *_) -> None: - gateway = Gateway(operation=RX, local_test=False, dd_sockets=False) + gateway = Gateway(operation=RX, local_test=False, dd_sockets=False, qubes=False) packet = b"packet" # Test BLAKE2b based checksum gateway.settings.session_serial_error_correction = 0 - self.assertEqual( - gateway.add_error_correction(packet), - packet + blake2b(packet, digest_size=PACKET_CHECKSUM_LENGTH), - ) + self.assertEqual(gateway.add_error_correction(packet), + packet + blake2b(packet, digest_size=PACKET_CHECKSUM_LENGTH)) # Test Reed-Solomon erasure code gateway.settings.session_serial_error_correction = 5 gateway.rs = RSCodec(gateway.settings.session_serial_error_correction) - self.assertEqual( - gateway.add_error_correction(packet), gateway.rs.encode(packet) - ) + self.assertEqual(gateway.add_error_correction(packet), + gateway.rs.encode(packet)) @mock.patch('time.sleep', return_value=None) @mock.patch('serial.Serial', return_value=MagicMock()) @mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0']]) @mock.patch('builtins.input', side_effect=['Yes']) def test_detect_errors(self, *_: Any) -> None: - gateway = Gateway(operation=RX, local_test=False, dd_sockets=False) + gateway = Gateway(operation=RX, local_test=False, dd_sockets=False, qubes=False) packet = b'packet' # Test BLAKE2b based checksum @@ -183,7 +182,7 @@ class TestGatewaySerial(TFCTestCase): self.assertEqual(gateway.detect_errors(gateway.add_error_correction(packet)), packet) - # Test unrecoverable error raises FR + # Test unrecoverable error raises SoftError self.assert_se("Warning! Received packet had an invalid checksum.", gateway.detect_errors, 300 * b'a') @@ -193,16 +192,30 @@ class TestGatewaySerial(TFCTestCase): self.assertEqual(gateway.detect_errors(gateway.add_error_correction(packet)), packet) - # Test unrecoverable error raises FR + # Test unrecoverable error raises SoftError self.assert_se("Error: Reed-Solomon failed to correct errors in the received packet.", gateway.detect_errors, 300 * b'a') + # Qubes + + # Test with B58 encoding + gateway.settings.qubes = True + packet_with_error_correction = base64.b85encode(gateway.add_error_correction(packet)) + self.assertEqual(gateway.detect_errors(packet_with_error_correction), packet) + + # Test invalid B85 encoding raises SoftError + packet_with_error_correction = base64.b85encode(gateway.add_error_correction(packet)) + packet_with_error_correction += b'\x00' + self.assert_se("Error: Received packet had invalid Base85 encoding.", + gateway.detect_errors, packet_with_error_correction) + gateway.settings.qubes = False + @mock.patch('time.sleep', return_value=None) @mock.patch('serial.Serial', return_value=MagicMock()) @mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0'], [''], ['ttyUSB0'], ['ttyS0'], ['']]) @mock.patch('builtins.input', side_effect=['Yes']) def test_search_serial_interfaces(self, *_: Any) -> None: - gateway = Gateway(operation=RX, local_test=False, dd_sockets=False) + gateway = Gateway(operation=RX, local_test=False, dd_sockets=False, qubes=False) interface = gateway.search_serial_interface() self.assertEqual(interface, '/dev/ttyUSB0') @@ -220,39 +233,38 @@ class TestGatewaySerial(TFCTestCase): @mock.patch('multiprocessing.connection.Client', MagicMock()) @mock.patch('multiprocessing.connection.Listener', MagicMock()) def test_establish_local_testing_gateway(self, *_: Any) -> None: - gateway = Gateway(operation=NC, local_test=True, dd_sockets=False) + gateway = Gateway(operation=NC, local_test=True, dd_sockets=False, qubes=False) self.assertIsInstance(gateway.rs, RSCodec) @mock.patch('time.sleep', return_value=None) @mock.patch('multiprocessing.connection.Client', MagicMock(side_effect=KeyboardInterrupt)) def test_keyboard_interrupt_exits(self, *_: Any) -> None: with self.assertRaises(SystemExit): - Gateway(operation=TX, local_test=True, dd_sockets=False) + Gateway(operation=TX, local_test=True, dd_sockets=False, qubes=False) @mock.patch('time.sleep', return_value=None) @mock.patch('multiprocessing.connection.Client', MagicMock( side_effect=[socket.error, ConnectionRefusedError, MagicMock()])) def test_socket_client(self, *_: Any) -> None: - gateway = Gateway(operation=TX, local_test=True, dd_sockets=False) + gateway = Gateway(operation=TX, local_test=True, dd_sockets=False, qubes=False) self.assertIsInstance(gateway, Gateway) @mock.patch('time.sleep', return_value=None) @mock.patch('multiprocessing.connection.Listener', MagicMock( side_effect=[MagicMock(), KeyboardInterrupt])) def test_socket_server(self, *_: Any) -> None: - gateway = Gateway(operation=RX, local_test=True, dd_sockets=False) + gateway = Gateway(operation=RX, local_test=True, dd_sockets=False, qubes=False) self.assertIsInstance(gateway, Gateway) with self.assertRaises(SystemExit): - Gateway(operation=RX, local_test=True, dd_sockets=False) + Gateway(operation=RX, local_test=True, dd_sockets=False, qubes=False) @mock.patch('time.sleep', return_value=None) @mock.patch('multiprocessing.connection.Listener', return_value=MagicMock( accept=lambda: MagicMock(recv=MagicMock(side_effect=[KeyboardInterrupt, b'data', EOFError])))) def test_local_testing_read(self, *_: Any) -> None: - gateway = Gateway(operation=RX, local_test=True, dd_sockets=False) + gateway = Gateway(operation=RX, local_test=True, dd_sockets=False, qubes=False) self.assertEqual(gateway.read(), b'data') - with self.assertRaises(SystemExit): gateway.read() @@ -260,13 +272,61 @@ class TestGatewaySerial(TFCTestCase): @mock.patch('multiprocessing.connection.Client', return_value=MagicMock( send=MagicMock(side_effect=[None, BrokenPipeError]))) def test_local_testing_write(self, *_: Any) -> None: - gateway = Gateway(operation=TX, local_test=True, dd_sockets=False) - + gateway = Gateway(operation=TX, local_test=True, dd_sockets=False, qubes=False) self.assertIsNone(gateway.write(b'data')) with self.assertRaises(SystemExit): gateway.write(b'data') + # Qubes + @mock.patch('time.sleep', return_value=None) + @mock.patch('socket.socket', MagicMock(return_value=MagicMock( + recv=MagicMock(side_effect=[EOFError, b'data', US_BYTE])))) + def test_qubes_socket_server(self, *_: Any) -> None: + gateway = Gateway(operation=RX, local_test=False, dd_sockets=False, qubes=True) + self.assertIsInstance(gateway, Gateway) + self.assertEqual(gateway.read(), b'data') + + @mock.patch('time.sleep', return_value=None) + @mock.patch('socket.socket', MagicMock(return_value=MagicMock( + recv=MagicMock(side_effect=[EOFError, b'data', US_BYTE])))) + def test_qubes_socket_server_raises_critical_error_if_interface_is_not_initialized(self, *_: Any) -> None: + # Setup + gateway = Gateway(operation=RX, local_test=False, dd_sockets=False, qubes=True) + gateway.rxq_socket = None + + # Test + with self.assertRaises(SystemExit): + self.assertEqual(gateway.read(), b'data') + + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=['10.137.0.17']) + @mock.patch('socket.socket', MagicMock()) + def test_qubes_socket_client(self, *_: Any) -> None: + gateway = Gateway(operation=TX, local_test=False, dd_sockets=False, qubes=True) + self.assertIsInstance(gateway, Gateway) + self.assertIsNone(gateway.write(b'data')) + + @mock.patch('time.sleep', return_value=None) + @mock.patch('socket.socket', MagicMock()) + def test_qubes_auto_config_from_file(self, *_: Any) -> None: + # Setup + test_ip = '10.137.0.17' + open(QUBES_RX_IP_ADDR_FILE, 'w+').write(test_ip) + + # Test + self.assertTrue(os.path.isfile(QUBES_RX_IP_ADDR_FILE)) + gateway = Gateway(operation=TX, local_test=False, dd_sockets=False, qubes=True) + self.assertEqual(gateway.settings.rx_udp_ip, test_ip) + self.assertFalse(os.path.isfile(QUBES_RX_IP_ADDR_FILE)) + + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=['10.137.0.17']) + @mock.patch('socket.socket', MagicMock(return_value=MagicMock(connect=MagicMock(side_effect=[socket.error])))) + def test_socket_error_raises_critical_error(self, *_: Any) -> None: + gateway = Gateway(operation=TX, local_test=False, dd_sockets=False, qubes=True) + with self.assertRaises(SystemExit): + gateway.get_local_ip_addr() class TestGatewaySettings(TFCTestCase): @@ -278,7 +338,8 @@ class TestGatewaySettings(TFCTestCase): "serial_baudrate": 19200, "serial_error_correction": 5, "use_serial_usb_adapter": true, - "built_in_serial_interface": "ttyS0" + "built_in_serial_interface": "ttyS0", + "rx_udp_ip": "" }""" def tearDown(self) -> None: @@ -288,11 +349,11 @@ class TestGatewaySettings(TFCTestCase): @mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyS0'], ['ttyUSB0'], ['ttyS0']]) @mock.patch('builtins.input', side_effect=['yes', 'yes', 'no', 'no']) def test_gateway_setup(self, *_: Any) -> None: - settings = GatewaySettings(operation=TX, local_test=False, dd_sockets=True) + settings = GatewaySettings(operation=TX, local_test=False, dd_sockets=True, qubes=False) self.assertIsNone(settings.setup()) def test_store_and_load_of_settings(self) -> None: - settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) + settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True, qubes=False) self.assertTrue(os.path.isfile(f'{DIR_USER_DATA}/{TX}_serial_settings.json')) self.assertEqual(settings.serial_baudrate, 19200) @@ -301,7 +362,7 @@ class TestGatewaySettings(TFCTestCase): settings.use_serial_usb_adapter = False self.assertIsNone(settings.store_settings()) - settings2 = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) + settings2 = GatewaySettings(operation=TX, local_test=True, dd_sockets=True, qubes=False) self.assertEqual(settings2.serial_baudrate, 115200) self.assertEqual(settings.use_serial_usb_adapter, False) @@ -315,14 +376,16 @@ class TestGatewaySettings(TFCTestCase): "serial_baudrate": 9600, "serial_error_correction": 1, "use_serial_usb_adapter": false, - "built_in_serial_interface": "ttyS0" + "built_in_serial_interface": "ttyS0", + "rx_udp_ip": "10.137.0.17" }""") # Test - settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) + settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True, qubes=False) self.assertEqual(settings.serial_baudrate, 9600) self.assertEqual(settings.serial_error_correction, 1) self.assertEqual(settings.use_serial_usb_adapter, False) self.assertEqual(settings.built_in_serial_interface, 'ttyS0') + self.assertEqual(settings.rx_udp_ip, '10.137.0.17') def test_missing_values_are_set_to_default_and_database_is_overwritten(self) -> None: # Setup @@ -335,7 +398,7 @@ class TestGatewaySettings(TFCTestCase): "relay_usb_serial_adapter": false }""") # Test - settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) + settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True, qubes=False) self.assertEqual(settings.serial_baudrate, 19200) self.assertEqual(settings.serial_error_correction, 1) self.assertEqual(settings.use_serial_usb_adapter, False) @@ -352,7 +415,7 @@ class TestGatewaySettings(TFCTestCase): }""") # Test - settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) + settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True, qubes=False) self.assertEqual(settings.serial_baudrate, 19200) self.assertEqual(settings.serial_error_correction, 5) self.assertEqual(settings.use_serial_usb_adapter, True) @@ -372,10 +435,11 @@ class TestGatewaySettings(TFCTestCase): "serial_baudrate": 19201, "serial_error_correction": 5, "use_serial_usb_adapter": true, - "built_in_serial_interface": "ttyS0" + "built_in_serial_interface": "ttyS0", + "rx_udp_ip": "" }""") # Test - settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) + settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True, qubes=False) self.assertEqual(settings.serial_baudrate, 19200) self.assertEqual(settings.serial_error_correction, 5) self.assertEqual(settings.use_serial_usb_adapter, True) @@ -395,10 +459,11 @@ class TestGatewaySettings(TFCTestCase): "serial_baudrate": 19200, "serial_error_correction": -1, "use_serial_usb_adapter": true, - "built_in_serial_interface": "ttyS0" + "built_in_serial_interface": "ttyS0", + "rx_udp_ip": "" }""") # Test - settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) + settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True, qubes=False) self.assertEqual(settings.serial_baudrate, 19200) self.assertEqual(settings.serial_error_correction, 5) self.assertEqual(settings.use_serial_usb_adapter, True) @@ -418,10 +483,11 @@ class TestGatewaySettings(TFCTestCase): "serial_baudrate": 19200, "serial_error_correction": 5, "use_serial_usb_adapter": true, - "built_in_serial_interface": "does_not_exist" + "built_in_serial_interface": "does_not_exist", + "rx_udp_ip": "" }""") # Test - settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) + settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True, qubes=False) self.assertEqual(settings.serial_baudrate, 19200) self.assertEqual(settings.serial_error_correction, 5) self.assertEqual(settings.use_serial_usb_adapter, True) @@ -432,6 +498,48 @@ class TestGatewaySettings(TFCTestCase): self.assertEqual(data, self.default_serialized) + @mock.patch('builtins.input', side_effect=['10.137.0.17']) + def test_invalid_rx_udp_ip_is_replaced_with_user_input(self, _) -> None: + # Setup + ensure_dir(DIR_USER_DATA) + with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f: + f.write("""\ +{ + "serial_baudrate": 19200, + "serial_error_correction": 5, + "use_serial_usb_adapter": true, + "built_in_serial_interface": "ttyS0", + "rx_udp_ip": "256.256.256.256" +}""") + # Test + settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True, qubes=True) + self.assertEqual(settings.serial_baudrate, 19200) + self.assertEqual(settings.serial_error_correction, 5) + self.assertEqual(settings.use_serial_usb_adapter, True) + self.assertEqual(settings.built_in_serial_interface, 'ttyS0') + self.assertEqual(settings.rx_udp_ip, '10.137.0.17') + + @mock.patch('builtins.input', side_effect=['10.137.0.17']) + def test_invalid_rx_udp_ip_type_is_replaced_with_user_input(self, _) -> None: + # Setup + ensure_dir(DIR_USER_DATA) + with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f: + f.write("""\ +{ + "serial_baudrate": 19200, + "serial_error_correction": 5, + "use_serial_usb_adapter": true, + "built_in_serial_interface": "ttyS0", + "rx_udp_ip": 5 +}""") + # Test + settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True, qubes=True) + self.assertEqual(settings.serial_baudrate, 19200) + self.assertEqual(settings.serial_error_correction, 5) + self.assertEqual(settings.use_serial_usb_adapter, True) + self.assertEqual(settings.built_in_serial_interface, 'ttyS0') + self.assertEqual(settings.rx_udp_ip, '10.137.0.17') + def test_invalid_type_is_replaced_with_default(self) -> None: # Setup ensure_dir(DIR_USER_DATA) @@ -441,10 +549,11 @@ class TestGatewaySettings(TFCTestCase): "serial_baudrate": "115200", "serial_error_correction": "5", "use_serial_usb_adapter": "true", - "built_in_serial_interface": true + "built_in_serial_interface": true, + "rx_udp_ip": "" }""") # Test - settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) + settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True, qubes=False) self.assertEqual(settings.serial_baudrate, 19200) self.assertEqual(settings.serial_error_correction, 5) self.assertEqual(settings.use_serial_usb_adapter, True) @@ -468,7 +577,7 @@ class TestGatewaySettings(TFCTestCase): "this_should_not_be_here": 1 }""") # Test - settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) + settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True, qubes=False) self.assertEqual(settings.serial_baudrate, 19200) self.assertEqual(settings.serial_error_correction, 5) self.assertEqual(settings.use_serial_usb_adapter, True) @@ -487,7 +596,7 @@ class TestGatewaySettings(TFCTestCase): with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f: f.write(self.default_serialized) - settings = GatewaySettings(operation=TX, local_test=False, dd_sockets=True) + settings = GatewaySettings(operation=TX, local_test=False, dd_sockets=True, qubes=False) # Test self.assertIsNone(settings.setup()) @@ -495,7 +604,7 @@ class TestGatewaySettings(TFCTestCase): @mock.patch('time.sleep', return_value=None) def test_change_setting(self, _: Any) -> None: - settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) + settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True, qubes=False) self.assert_se("Error: Invalid setting value 'Falsee'.", settings.change_setting, 'serial_baudrate', 'Falsee') self.assert_se("Error: Invalid setting value '1.1'.", @@ -506,14 +615,14 @@ class TestGatewaySettings(TFCTestCase): settings.change_setting, 'use_serial_usb_adapter', 'Falsee') self.assertIsNone(settings.change_setting('serial_baudrate', '9600')) - self.assertEqual(GatewaySettings(operation=TX, local_test=True, dd_sockets=True).serial_baudrate, 9600) + self.assertEqual(GatewaySettings(operation=TX, local_test=True, dd_sockets=True, qubes=False).serial_baudrate, 9600) settings.serial_baudrate = b'bytestring' with self.assertRaises(SystemExit): settings.change_setting('serial_baudrate', '9600') def test_validate_key_value_pair(self) -> None: - settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) + settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True, qubes=False) self.assert_se("Error: The specified baud rate is not supported.", settings.validate_key_value_pair, 'serial_baudrate', 0) self.assert_se("Error: The specified baud rate is not supported.", @@ -529,11 +638,11 @@ class TestGatewaySettings(TFCTestCase): @mock.patch('shutil.get_terminal_size', return_value=(64, 64)) def test_too_narrow_terminal_raises_fr_when_printing_settings(self, _: Any) -> None: - settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) + settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True, qubes=False) self.assert_se("Error: Screen width is too small.", settings.print_settings) def test_print_settings(self) -> None: - settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) + settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True, qubes=False) self.assert_prints("""\ Serial interface setting Current value Default value Description diff --git a/tests/common/test_misc.py b/tests/common/test_misc.py index 23deb7c..ae3af67 100644 --- a/tests/common/test_misc.py +++ b/tests/common/test_misc.py @@ -33,18 +33,18 @@ from typing import Any, NoReturn from unittest.mock import MagicMock -from src.common.misc import calculate_race_condition_delay, decompress, ensure_dir, get_tab_complete_list -from src.common.misc import get_tab_completer, get_terminal_height, get_terminal_width, HideRunTime, ignored -from src.common.misc import monitor_processes, process_arguments, readable_size, reset_terminal, round_up -from src.common.misc import separate_header, separate_headers, separate_trailer, split_string, split_byte_string -from src.common.misc import split_to_substrings, terminal_width_check, validate_group_name, validate_key_exchange -from src.common.misc import validate_onion_addr, validate_nick +from src.common.misc import (calculate_race_condition_delay, decompress, ensure_dir, get_tab_complete_list, + get_tab_completer, get_terminal_height, get_terminal_width, HideRunTime, ignored, + monitor_processes, process_arguments, readable_size, reset_terminal, round_up, + separate_header, separate_headers, separate_trailer, split_string, split_byte_string, + split_to_substrings, terminal_width_check, validate_group_name, validate_ip_address, + validate_key_exchange, validate_onion_addr, validate_nick) from src.common.statics import (DIR_RECV_FILES, DIR_USER_DATA, DUMMY_GROUP, ECDHE, EXIT, EXIT_QUEUE, LOCAL_ID, PADDING_LENGTH, RESET, RX, TAILS, TRAFFIC_MASKING, WIPE) from tests.mock_classes import ContactList, Gateway, GroupList, Settings -from tests.utils import cd_unit_test, cleanup, gen_queue_dict, nick_to_onion_address -from tests.utils import nick_to_pub_key, tear_queues, TFCTestCase +from tests.utils import (cd_unit_test, cleanup, gen_queue_dict, nick_to_onion_address, nick_to_pub_key, + tear_queues, TFCTestCase) class TestCalculateRaceConditionDelay(unittest.TestCase): @@ -72,7 +72,7 @@ class TestDecompress(TFCTestCase): # Test self.assertEqual(decompress(compressed, self.settings.max_decompress_size), data) - def test_oversize_decompression_raises_se(self) -> None: + def test_oversize_decompression_raises_soft_error(self) -> None: # Setup data = os.urandom(self.settings.max_decompress_size + 1) compressed = zlib.compress(data) @@ -272,9 +272,10 @@ class TestProcessArguments(unittest.TestCase): def __init__(self) -> None: """Create new Args mock object.""" - self.operation = True - self.local_test = True + self.operation = True + self.local_test = True self.data_diode_sockets = True + self.qubes = False class MockParser(object): """MockParse object.""" @@ -298,7 +299,7 @@ class TestProcessArguments(unittest.TestCase): argparse.ArgumentParser = self.o_argparse def test_process_arguments(self) -> None: - self.assertEqual(process_arguments(), (RX, True, True)) + self.assertEqual(process_arguments(), (RX, True, True, False)) class TestReadableSize(unittest.TestCase): @@ -499,6 +500,16 @@ class TestValidateGroupName(unittest.TestCase): '') +class TestValidateIpAddress(unittest.TestCase): + + def test_validate_ip_address(self) -> None: + self.assertEqual(validate_ip_address("10.137.0.17"), '') + self.assertEqual(validate_ip_address("10.137.0.255"), '') + self.assertEqual(validate_ip_address("255.255.255.255"), '') + self.assertEqual(validate_ip_address("10.137.0.256"), 'Invalid IP address') + self.assertEqual(validate_ip_address("256.256.256.256"), 'Invalid IP address') + + class TestValidateKeyExchange(unittest.TestCase): def test_validate_key_exchange(self) -> None: diff --git a/tests/common/test_output.py b/tests/common/test_output.py index a67c304..c698879 100644 --- a/tests/common/test_output.py +++ b/tests/common/test_output.py @@ -25,8 +25,8 @@ from datetime import datetime from unittest import mock from typing import Any -from src.common.output import clear_screen, group_management_print, m_print, phase, print_fingerprint, print_key -from src.common.output import print_title, print_on_previous_line, print_spacing, rp_print +from src.common.output import (clear_screen, group_management_print, m_print, phase, print_fingerprint, print_key, + print_title, print_on_previous_line, print_spacing, rp_print) from src.common.statics import (ADDED_MEMBERS, ALREADY_MEMBER, BOLD_ON, CLEAR_ENTIRE_LINE, CLEAR_ENTIRE_SCREEN, CURSOR_LEFT_UP_CORNER, CURSOR_UP_ONE_LINE, DONE, FINGERPRINT_LENGTH, NEW_GROUP, NORMAL_TEXT, NOT_IN_GROUP, REMOVED_MEMBERS, RX, SYMMETRIC_KEY_LENGTH, TX, diff --git a/tests/common/test_path.py b/tests/common/test_path.py index f374cae..f3e9ca5 100644 --- a/tests/common/test_path.py +++ b/tests/common/test_path.py @@ -63,7 +63,7 @@ class TestAskPathGui(TFCTestCase): @mock.patch('tkinter.Tk', return_value=MagicMock()) @mock.patch('tkinter.filedialog.askopenfilename', return_value='') - def test_no_path_to_file_raises_se(self, *_: Any) -> None: + def test_no_path_to_file_raises_soft_error(self, *_: Any) -> None: self.assert_se("File selection aborted.", ask_path_gui, 'test message', self.settings, True) @mock.patch('tkinter.Tk', return_value=MagicMock()) @@ -73,7 +73,7 @@ class TestAskPathGui(TFCTestCase): @mock.patch('tkinter.Tk', return_value=MagicMock()) @mock.patch('tkinter.filedialog.askdirectory', return_value='') - def test_no_path_raises_se(self, *_: Any) -> None: + def test_no_path_raises_soft_error(self, *_: Any) -> None: self.assert_se("Path selection aborted.", ask_path_gui, 'test message', self.settings, False) diff --git a/tests/common/test_reed_solomon.py b/tests/common/test_reed_solomon.py index 8619857..94db312 100644 --- a/tests/common/test_reed_solomon.py +++ b/tests/common/test_reed_solomon.py @@ -75,66 +75,66 @@ class TestReedSolomon(unittest.TestCase): self.assertEqual(dec_enc2, enc) def test_prim_fcr_basic(self) -> None: - nn = 30 - kk = 18 - tt = nn - kk - rs = RSCodec(tt, fcr=120, prim=0x187) - hexencmsg = ('00faa123555555c000000354064432' + nn = 30 + kk = 18 + tt = nn - kk + rs = RSCodec(tt, fcr=120, prim=0x187) + hex_enc_msg = ('00faa123555555c000000354064432' 'c02800fe97c434e1ff5365cf8fafe4') - strf = str - encmsg = bytearray.fromhex(strf(hexencmsg)) - decmsg = encmsg[:kk] - tem = rs.encode(decmsg) - self.assertEqual(encmsg, tem, msg="encoded does not match expected") + strf = str + enc_msg = bytearray.fromhex(strf(hex_enc_msg)) + dec_msg = enc_msg[:kk] + tem = rs.encode(dec_msg) + self.assertEqual(enc_msg, tem, msg="encoded does not match expected") tdm, rtem = rs.decode(tem) - self.assertEqual(tdm, decmsg, msg="decoded does not match original") - self.assertEqual(rtem, tem, msg="decoded mesecc does not match original") + self.assertEqual(tdm, dec_msg, msg="decoded does not match original") + self.assertEqual(rtem, tem, msg="decoded mesecc does not match original") tem1 = bytearray(tem) # Clone a copy # Encoding and decoding intact message seem OK, so test errors - numerrs = tt >> 1 # Inject tt/2 errors (expected to recover fully) - for i in sample(range(nn), numerrs): # inject errors in random places + num_errs = tt >> 1 # Inject tt/2 errors (expected to recover fully) + for i in sample(range(nn), num_errs): # inject errors in random places tem1[i] ^= 0xff # flip all 8 bits tdm, _ = rs.decode(tem1) - self.assertEqual(tdm, decmsg, msg="decoded with errors does not match original") + self.assertEqual(tdm, dec_msg, msg="decoded with errors does not match original") tem1 = bytearray(tem) # Clone another copy - numerrs += 1 # Inject tt/2 + 1 errors (expected to fail and detect it) - for i in sample(range(nn), numerrs): # Inject errors in random places + num_errs += 1 # Inject tt/2 + 1 errors (expected to fail and detect it) + for i in sample(range(nn), num_errs): # Inject errors in random places tem1[i] ^= 0xff # Flip all 8 bits # If this fails, it means excessive errors not detected self.assertRaises(ReedSolomonError, rs.decode, tem1) def test_prim_fcr_long(self) -> None: - nn = 48 - kk = 34 - tt = nn - kk - rs = RSCodec(tt, fcr=120, prim=0x187) - hexencmsg = ('08faa123555555c000000354064432c0280e1b4d090cfc04' - '887400000003500000000e1985ff9c6b33066ca9f43d12e8') - strf = str - encmsg = bytearray.fromhex(strf(hexencmsg)) - decmsg = encmsg[:kk] - tem = rs.encode(decmsg) - self.assertEqual(encmsg, tem, msg="encoded does not match expected") + nn = 48 + kk = 34 + tt = nn - kk + rs = RSCodec(tt, fcr=120, prim=0x187) + hex_enc_msg = ('08faa123555555c000000354064432c0280e1b4d090cfc04' + '887400000003500000000e1985ff9c6b33066ca9f43d12e8') + strf = str + enc_msg = bytearray.fromhex(strf(hex_enc_msg)) + dec_msg = enc_msg[:kk] + tem = rs.encode(dec_msg) + self.assertEqual(enc_msg, tem, msg="encoded does not match expected") tdm, rtem = rs.decode(tem) - self.assertEqual(tdm, decmsg, msg="decoded does not match original") - self.assertEqual(rtem, tem, msg="decoded mesecc does not match original") + self.assertEqual(tdm, dec_msg, msg="decoded does not match original") + self.assertEqual(rtem, tem, msg="decoded mesecc does not match original") tem1 = bytearray(tem) - numerrs = tt >> 1 - for i in sample(range(nn), numerrs): + num_errs = tt >> 1 + for i in sample(range(nn), num_errs): tem1[i] ^= 0xff tdm, rtem = rs.decode(tem1) - self.assertEqual(tdm, decmsg, msg="decoded with errors does not match original") - self.assertEqual(rtem, tem, msg="decoded mesecc with errors does not match original") + self.assertEqual(tdm, dec_msg, msg="decoded with errors does not match original") + self.assertEqual(rtem, tem, msg="decoded mesecc with errors does not match original") tem1 = bytearray(tem) - numerrs += 1 - for i in sample(range(nn), numerrs): + num_errs += 1 + for i in sample(range(nn), num_errs): tem1[i] ^= 0xff self.assertRaises(ReedSolomonError, rs.decode, tem1) diff --git a/tests/mock_classes.py b/tests/mock_classes.py index f9b9d83..b402e05 100644 --- a/tests/mock_classes.py +++ b/tests/mock_classes.py @@ -265,6 +265,7 @@ class Settings(OrigSettings): self.master_key = MasterKey() self.software_operation = TX self.local_testing_mode = False + self.qubes = False self.all_keys = list(vars(self).keys()) self.key_list = self.all_keys[:self.all_keys.index('master_key')] @@ -304,6 +305,8 @@ class GatewaySettings(OrigGatewaySettings): self.local_testing_mode = False self.data_diode_sockets = False + self.qubes = False + self.all_keys = list(vars(self).keys()) self.key_list = self.all_keys[:self.all_keys.index('software_operation')] self.defaults = {k: self.__dict__[k] for k in self.key_list} diff --git a/tests/receiver/test_commands.py b/tests/receiver/test_commands.py index 2d6f8e0..5a20b66 100644 --- a/tests/receiver/test_commands.py +++ b/tests/receiver/test_commands.py @@ -36,14 +36,14 @@ from src.common.statics import (CH_FILE_RECV, CH_LOGGING, CH_NOTIFY, CLEAR_ENTI LOCAL_PUBKEY, MESSAGE, ORIGIN_CONTACT_HEADER, PADDING_LENGTH, RESET, RX, SYMMETRIC_KEY_LENGTH, US_BYTE, WIN_TYPE_CONTACT, WIN_TYPE_GROUP, WIN_UID_FILE, WIPE) +from src.receiver.commands import (ch_contact_s, ch_master_key, ch_nick, ch_setting, contact_rem, exit_tfc, log_command, + process_command, remove_log, reset_screen, win_activity, win_select, wipe) from src.receiver.packet import PacketList -from src.receiver.commands import ch_contact_s, ch_master_key, ch_nick, ch_setting, contact_rem, exit_tfc, log_command -from src.receiver.commands import process_command, remove_log, reset_screen, win_activity, win_select, wipe -from tests.mock_classes import ContactList, Gateway, group_name_to_group_id, GroupList, KeyList, MasterKey -from tests.mock_classes import nick_to_pub_key, RxWindow, Settings, WindowList -from tests.utils import assembly_packet_creator, cd_unit_test, cleanup, ignored, nick_to_short_address -from tests.utils import tear_queue, TFCTestCase +from tests.mock_classes import (ContactList, Gateway, group_name_to_group_id, GroupList, KeyList, MasterKey, + nick_to_pub_key, RxWindow, Settings, WindowList) +from tests.utils import (assembly_packet_creator, cd_unit_test, cleanup, ignored, nick_to_short_address, + tear_queue, TFCTestCase) class TestProcessCommand(TFCTestCase): @@ -71,7 +71,7 @@ class TestProcessCommand(TFCTestCase): cleanup(self.unit_test_dir) tear_queue(self.exit_queue) - def test_incomplete_command_raises_se(self) -> None: + def test_incomplete_command_raises_soft_error(self) -> None: packet = assembly_packet_creator(COMMAND, b'test_command', s_header_override=C_L_HEADER, encrypt_packet=True)[0] self.assert_se("Incomplete command.", process_command, self.ts, packet, *self.args) @@ -316,7 +316,7 @@ class TestChMasterKey(TFCTestCase): @mock.patch('getpass.getpass', return_value='a') @mock.patch('time.sleep', return_value=None) @mock.patch('os.getrandom', side_effect=KeyboardInterrupt) - def test_keyboard_interrupt_raises_se(self, *_) -> None: + def test_keyboard_interrupt_raises_soft_error(self, *_) -> None: self.assert_se("Error: Invalid password.", ch_master_key, *self.args) @@ -332,7 +332,7 @@ class TestChNick(TFCTestCase): self.window = self.window_list.get_window(nick_to_pub_key("Alice")) self.window.type = WIN_TYPE_CONTACT - def test_unknown_account_raises_se(self) -> None: + def test_unknown_account_raises_soft_error(self) -> None: # Setup cmd_data = nick_to_pub_key("Bob") + b'Bob_' @@ -364,7 +364,7 @@ class TestChSetting(TFCTestCase): self.args = (self.ts, self.window_list, self.contact_list, self.group_list, self.key_list, self.settings, self.gateway) - def test_invalid_data_raises_se(self) -> None: + def test_invalid_data_raises_soft_error(self) -> None: # Setup self.settings.key_list = [''] @@ -372,7 +372,7 @@ class TestChSetting(TFCTestCase): cmd_data = b'setting' + b'True' self.assert_se("Error: Received invalid setting data.", ch_setting, cmd_data, *self.args) - def test_invalid_setting_raises_se(self) -> None: + def test_invalid_setting_raises_soft_error(self) -> None: # Setup self.settings.key_list = [''] @@ -411,7 +411,7 @@ class TestChContactSetting(TFCTestCase): group_list=self.group_list) self.args = self.ts, self.window_list, self.contact_list, self.group_list - def test_invalid_window_raises_se(self) -> None: + def test_invalid_window_raises_soft_error(self) -> None: # Setup cmd_data = ENABLE + nick_to_pub_key("Bob") header = CH_LOGGING @@ -503,7 +503,7 @@ class TestContactRemove(TFCTestCase): """Post-test actions.""" cleanup(self.unit_test_dir) - def test_no_contact_raises_se(self) -> None: + def test_no_contact_raises_soft_error(self) -> None: # Setup contact_list = ContactList(nicks=['Alice']) group_list = GroupList(groups=[]) diff --git a/tests/receiver/test_commands_g.py b/tests/receiver/test_commands_g.py index 394c9ae..9596053 100644 --- a/tests/receiver/test_commands_g.py +++ b/tests/receiver/test_commands_g.py @@ -39,7 +39,7 @@ class TestGroupCreate(TFCTestCase): self.window_list = WindowList() self.group_id = group_name_to_group_id('test_group') - def test_too_many_purp_accounts_raises_se(self) -> None: + def test_too_many_purp_accounts_raises_soft_error(self) -> None: # Setup create_list = [nick_to_pub_key(str(n)) for n in range(51)] cmd_data = self.group_id + b'test_group' + US_BYTE + b''.join(create_list) @@ -52,7 +52,7 @@ class TestGroupCreate(TFCTestCase): self.assert_se("Error: TFC settings only allow 50 members per group.", group_create, cmd_data, self.ts, self.window_list, contact_list, group_list, self.settings) - def test_full_group_list_raises_se(self) -> None: + def test_full_group_list_raises_soft_error(self) -> None: # Setup cmd_data = self.group_id + b'test_group' + US_BYTE + nick_to_pub_key('51') group_list = GroupList(groups=[f"test_group_{n}" for n in range(50)]) @@ -85,7 +85,7 @@ class TestGroupAdd(TFCTestCase): self.settings = Settings() self.window_list = WindowList() - def test_too_large_final_member_list_raises_se(self) -> None: + def test_too_large_final_member_list_raises_soft_error(self) -> None: # Setup group_list = GroupList(groups=['test_group']) contact_list = ContactList(nicks=[str(n) for n in range(51)]) @@ -97,7 +97,7 @@ class TestGroupAdd(TFCTestCase): self.assert_se("Error: TFC settings only allow 50 members per group.", group_add, cmd_data, self.ts, self.window_list, contact_list, group_list, self.settings) - def test_unknown_group_id_raises_se(self) -> None: + def test_unknown_group_id_raises_soft_error(self) -> None: # Setup group_list = GroupList(groups=['test_group']) contact_list = ContactList(nicks=[str(n) for n in range(21)]) @@ -137,7 +137,7 @@ class TestGroupRemove(TFCTestCase): self.group.members = self.contact_list.contacts[:19] self.settings = Settings() - def test_unknown_group_id_raises_se(self) -> None: + def test_unknown_group_id_raises_soft_error(self) -> None: # Setup group_list = GroupList(groups=['test_group']) contact_list = ContactList(nicks=[str(n) for n in range(21)]) @@ -161,12 +161,12 @@ class TestGroupDelete(TFCTestCase): self.window_list = WindowList() self.group_list = GroupList(groups=['test_group']) - def test_missing_group_raises_se(self) -> None: + def test_missing_group_raises_soft_error(self) -> None: cmd_data = group_name_to_group_id('test_group2') self.assert_se("Error: No group with ID '2e7mHQznTMsP6' found.", group_delete, cmd_data, self.ts, self.window_list, self.group_list) - def test_unknown_group_id_raises_se(self) -> None: + def test_unknown_group_id_raises_soft_error(self) -> None: # Setup group_list = GroupList(groups=['test_group']) cmd_data = group_name_to_group_id('test_group2') @@ -193,21 +193,21 @@ class TestGroupRename(TFCTestCase): self.contact_list = ContactList(nicks=['alice']) self.args = self.ts, self.window_list, self.contact_list, self.group_list - def test_missing_group_id_raises_se(self) -> None: + def test_missing_group_id_raises_soft_error(self) -> None: # Setup cmd_data = group_name_to_group_id('test_group2') + b'new_name' # Test self.assert_se("Error: No group with ID '2e7mHQznTMsP6' found.", group_rename, cmd_data, *self.args) - def test_invalid_group_name_encoding_raises_se(self) -> None: + def test_invalid_group_name_encoding_raises_soft_error(self) -> None: # Setup cmd_data = group_name_to_group_id('test_group') + b'new_name' + UNDECODABLE_UNICODE # Test self.assert_se("Error: New name for group 'test_group' was invalid.", group_rename, cmd_data, *self.args) - def test_invalid_group_name_raises_se(self) -> None: + def test_invalid_group_name_raises_soft_error(self) -> None: # Setup cmd_data = group_name_to_group_id('test_group') + b'new_name\x1f' diff --git a/tests/receiver/test_files.py b/tests/receiver/test_files.py index 55a39b8..ba39334 100644 --- a/tests/receiver/test_files.py +++ b/tests/receiver/test_files.py @@ -73,7 +73,7 @@ class ProcessAssembledFile(TFCTestCase): """Post-test actions.""" cleanup(self.unit_test_dir) - def test_invalid_structure_raises_se(self) -> None: + def test_invalid_structure_raises_soft_error(self) -> None: # Setup payload = b'testfile.txt' @@ -81,7 +81,7 @@ class ProcessAssembledFile(TFCTestCase): self.assert_se("Error: Received file had an invalid structure.", process_assembled_file, self.ts, payload, *self.args) - def test_invalid_encoding_raises_se(self) -> None: + def test_invalid_encoding_raises_soft_error(self) -> None: # Setup payload = UNDECODABLE_UNICODE + US_BYTE + b'file_data' @@ -89,7 +89,7 @@ class ProcessAssembledFile(TFCTestCase): self.assert_se("Error: Received file name had an invalid encoding.", process_assembled_file, self.ts, payload, *self.args) - def test_invalid_name_raises_se(self) -> None: + def test_invalid_name_raises_soft_error(self) -> None: # Setup payload = b'\x01filename' + US_BYTE + b'file_data' @@ -97,7 +97,7 @@ class ProcessAssembledFile(TFCTestCase): self.assert_se("Error: Received file had an invalid name.", process_assembled_file, self.ts, payload, *self.args) - def test_slash_in_file_name_raises_se(self) -> None: + def test_slash_in_file_name_raises_soft_error(self) -> None: # Setup payload = b'file/name' + US_BYTE + b'file_data' @@ -105,7 +105,7 @@ class ProcessAssembledFile(TFCTestCase): self.assert_se("Error: Received file had an invalid name.", process_assembled_file, self.ts, payload, *self.args) - def test_invalid_key_raises_se(self) -> None: + def test_invalid_key_raises_soft_error(self) -> None: # Setup payload = b'testfile.txt' + US_BYTE + b'file_data' @@ -113,7 +113,7 @@ class ProcessAssembledFile(TFCTestCase): self.assert_se("Error: Received file had an invalid key.", process_assembled_file, self.ts, payload, *self.args) - def test_decryption_fail_raises_se(self) -> None: + def test_decryption_fail_raises_soft_error(self) -> None: # Setup file_data = encrypt_and_sign(b'file_data', self.key)[::-1] payload = b'testfile.txt' + US_BYTE + file_data @@ -122,7 +122,7 @@ class ProcessAssembledFile(TFCTestCase): self.assert_se("Error: Decryption of file data failed.", process_assembled_file, self.ts, payload, *self.args) - def test_invalid_compression_raises_se(self) -> None: + def test_invalid_compression_raises_soft_error(self) -> None: # Setup compressed = zlib.compress(b'file_data', level=COMPRESSION_LEVEL)[::-1] file_data = encrypt_and_sign(compressed, self.key) + self.key @@ -178,7 +178,7 @@ class TestNewFile(TFCTestCase): """Post-test actions.""" cleanup(self.unit_test_dir) - def test_unknown_account_raises_se(self) -> None: + def test_unknown_account_raises_soft_error(self) -> None: # Setup file_ct = encrypt_and_sign(self.compressed, self.file_key) packet = nick_to_pub_key('Bob') + ORIGIN_CONTACT_HEADER + file_ct @@ -186,7 +186,7 @@ class TestNewFile(TFCTestCase): # Test self.assert_se("File from an unknown account.", new_file, self.ts, packet, *self.args) - def test_disabled_file_reception_raises_se(self) -> None: + def test_disabled_file_reception_raises_soft_error(self) -> None: # Setup file_ct = encrypt_and_sign(self.compressed, self.file_key) packet = nick_to_pub_key('Alice') + ORIGIN_CONTACT_HEADER + file_ct @@ -237,13 +237,14 @@ class TestProcessFile(TFCTestCase): """Post-test actions.""" cleanup(self.unit_test_dir) - def test_invalid_key_raises_se(self) -> None: + def test_invalid_key_raises_soft_error(self) -> None: self.file_key = SYMMETRIC_KEY_LENGTH * b'f' self.args = self.file_key, self.contact_list, self.window_list, self.settings + self.assert_se("Error: Decryption key for file from Alice was invalid.", process_file, self.ts, self.account, self.file_ct, *self.args) - def test_invalid_compression_raises_se(self) -> None: + def test_invalid_compression_raises_soft_error(self) -> None: compressed = zlib.compress(b'file_data', level=COMPRESSION_LEVEL)[::-1] file_data = encrypt_and_sign(compressed, self.file_key) @@ -251,7 +252,7 @@ class TestProcessFile(TFCTestCase): process_file, self.ts, self.account, file_data, *self.args) @mock.patch('time.sleep', return_value=None) - def test_invalid_file_name_raises_se(self, _: Any) -> None: + def test_invalid_file_name_raises_soft_error(self, _: Any) -> None: compressed = zlib.compress(UNDECODABLE_UNICODE + b'file_data', level=COMPRESSION_LEVEL) file_data = encrypt_and_sign(compressed, self.file_key) @@ -259,7 +260,7 @@ class TestProcessFile(TFCTestCase): process_file, self.ts, self.account, file_data, *self.args) @mock.patch('time.sleep', return_value=None) - def test_non_printable_name_raises_se(self, _: Any) -> None: + def test_non_printable_name_raises_soft_error(self, _: Any) -> None: compressed = zlib.compress(str_to_bytes("file\x01") + b'file_data', level=COMPRESSION_LEVEL) file_data = encrypt_and_sign(compressed, self.file_key) @@ -267,7 +268,7 @@ class TestProcessFile(TFCTestCase): process_file, self.ts, self.account, file_data, *self.args) @mock.patch('time.sleep', return_value=None) - def test_slash_in_name_raises_se(self, _: Any) -> None: + def test_slash_in_name_raises_soft_error(self, _: Any) -> None: compressed = zlib.compress(str_to_bytes("Alice/file.txt") + b'file_data', level=COMPRESSION_LEVEL) file_data = encrypt_and_sign(compressed, self.file_key) diff --git a/tests/receiver/test_key_exchanges.py b/tests/receiver/test_key_exchanges.py index 0b93a15..537fd91 100644 --- a/tests/receiver/test_key_exchanges.py +++ b/tests/receiver/test_key_exchanges.py @@ -40,8 +40,8 @@ from src.common.statics import (ARGON2_SALT_LENGTH, BOLD_ON, CLEAR_ENTIRE_SCR from src.receiver.key_exchanges import key_ex_ecdhe, key_ex_psk_rx, key_ex_psk_tx, local_key_rdy, process_local_key from tests.mock_classes import Contact, ContactList, KeyList, KeySet, Settings, WindowList -from tests.utils import cd_unit_test, cleanup, nick_to_short_address, nick_to_pub_key, tear_queue, TFCTestCase -from tests.utils import UNDECODABLE_UNICODE +from tests.utils import (cd_unit_test, cleanup, nick_to_short_address, nick_to_pub_key, tear_queue, TFCTestCase, + UNDECODABLE_UNICODE) class TestProcessLocalKey(TFCTestCase): @@ -73,7 +73,7 @@ class TestProcessLocalKey(TFCTestCase): @mock.patch('tkinter.Tk', return_value=MagicMock()) @mock.patch('time.sleep', return_value=None) @mock.patch('builtins.input', return_value='5KfgdgUvseWfNkoUPWSvxMPNStu5wBBxyjz1zpZtLEjk7ZvwEAT') - def test_invalid_decryption_key_raises_se(self, *_: Any) -> None: + def test_invalid_decryption_key_raises_soft_error(self, *_: Any) -> None: # Setup packet = b'' self.key_list.keysets = [] @@ -104,7 +104,7 @@ class TestProcessLocalKey(TFCTestCase): @mock.patch('tkinter.Tk', return_value=MagicMock()) @mock.patch('time.sleep', return_value=None) @mock.patch('builtins.input', side_effect=KeyboardInterrupt) - def test_keyboard_interrupt_raises_se(self, *_: Any) -> None: + def test_keyboard_interrupt_raises_soft_error(self, *_: Any) -> None: # Setup self.window_list.active_win = self.window_list.get_window(nick_to_pub_key('Alice')) @@ -115,7 +115,7 @@ class TestProcessLocalKey(TFCTestCase): @mock.patch('tkinter.Tk', return_value=MagicMock()) @mock.patch('time.sleep', return_value=None) @mock.patch('builtins.input', side_effect=[b58encode(kek), b58encode(kek), b58encode(kek), b58encode(new_kek)]) - def test_old_local_key_packet_raises_se(self, *_: Any) -> None: + def test_old_local_key_packet_raises_soft_error(self, *_: Any) -> None: # Setup self.key_list.keysets = [] new_key = os.urandom(SYMMETRIC_KEY_LENGTH) @@ -202,7 +202,7 @@ class TestKeyExECDHE(TFCTestCase): self.args = self.packet, self.ts, self.window_list, self.contact_list, self.key_list, self.settings @mock.patch('time.sleep', return_value=None) - def test_invalid_nick_raises_se(self, _: Any) -> None: + def test_invalid_nick_raises_soft_error(self, _: Any) -> None: self.packet = (nick_to_pub_key("Alice") + SYMMETRIC_KEY_LENGTH * b'\x01' + SYMMETRIC_KEY_LENGTH * b'\x02' @@ -252,7 +252,7 @@ class TestKeyExPSKTx(TFCTestCase): self.args = self.packet, self.ts, self.window_list, self.contact_list, self.key_list, self.settings @mock.patch('time.sleep', return_value=None) - def test_invalid_nick_raises_se(self, _: Any) -> None: + def test_invalid_nick_raises_soft_error(self, _: Any) -> None: self.packet = (nick_to_pub_key("Alice") + SYMMETRIC_KEY_LENGTH * b'\x01' + bytes(SYMMETRIC_KEY_LENGTH) @@ -305,13 +305,13 @@ class TestKeyExPSKRx(TFCTestCase): """Post-test actions.""" cleanup(self.unit_test_dir) - def test_unknown_account_raises_se(self) -> None: + def test_unknown_account_raises_soft_error(self) -> None: self.assert_se(f"Error: Unknown account '{nick_to_short_address('Bob')}'.", key_ex_psk_rx, b'\x00' + nick_to_pub_key("Bob"), self.ts, self.window_list, self.contact_list, self.key_list, self.settings) @mock.patch('builtins.input', return_value=file_name) - def test_invalid_psk_data_raises_se(self, _: Any) -> None: + def test_invalid_psk_data_raises_soft_error(self, _: Any) -> None: # Setup with open(self.file_name, 'wb+') as f: f.write(os.urandom(135)) @@ -321,7 +321,7 @@ class TestKeyExPSKRx(TFCTestCase): @mock.patch('time.sleep', return_value=None) @mock.patch('builtins.input', return_value=file_name) - def test_permission_error_raises_se(self, *_: Any) -> None: + def test_permission_error_raises_soft_error(self, *_: Any) -> None: # Setup with open(self.file_name, 'wb+') as f: f.write(os.urandom(PSK_FILE_SIZE)) @@ -423,7 +423,7 @@ class TestKeyExPSKRx(TFCTestCase): @mock.patch('time.sleep', return_value=None) @mock.patch('builtins.input', side_effect=[file_name, '']) @mock.patch('getpass.getpass', side_effect=[KeyboardInterrupt]) - def test_valid_psk_keyboard_interrupt_raises_se(self, *_: Any) -> None: + def test_valid_psk_keyboard_interrupt_raises_soft_error(self, *_: Any) -> None: with open(self.file_name, 'wb+') as f: f.write(bytes(PSK_FILE_SIZE)) diff --git a/tests/receiver/test_messages.py b/tests/receiver/test_messages.py index 8bed0f2..0d50085 100644 --- a/tests/receiver/test_messages.py +++ b/tests/receiver/test_messages.py @@ -38,8 +38,8 @@ from src.receiver.packet import PacketList from src.receiver.windows import WindowList from tests.mock_classes import ContactList, GroupList, KeyList, MasterKey, Settings -from tests.utils import assembly_packet_creator, cd_unit_test, cleanup, group_name_to_group_id -from tests.utils import nick_to_pub_key, TFCTestCase +from tests.utils import (assembly_packet_creator, cd_unit_test, cleanup, group_name_to_group_id, + nick_to_pub_key, TFCTestCase) class TestProcessMessagePacket(TFCTestCase): @@ -88,7 +88,7 @@ class TestProcessMessagePacket(TFCTestCase): # Invalid packets @mock.patch('time.sleep', return_value=None) - def test_invalid_origin_header_raises_se(self, _: Any) -> None: + def test_invalid_origin_header_raises_soft_error(self, _: Any) -> None: # Setup invalid_origin_header = b'e' packet = nick_to_pub_key('Alice') + invalid_origin_header + MESSAGE_LENGTH * b'm' @@ -98,7 +98,7 @@ class TestProcessMessagePacket(TFCTestCase): process_message_packet, self.ts, packet, *self.args) @mock.patch('time.sleep', return_value=None) - def test_masqueraded_command_raises_se(self, _: Any) -> None: + def test_masqueraded_command_raises_soft_error(self, _: Any) -> None: for origin_header in [ORIGIN_USER_HEADER, ORIGIN_CONTACT_HEADER]: # Setup packet = LOCAL_PUBKEY + origin_header + MESSAGE_LENGTH * b'm' @@ -172,7 +172,7 @@ class TestProcessMessagePacket(TFCTestCase): # File key messages @mock.patch('time.sleep', return_value=None) - def test_user_origin_raises_se(self, _: Any) -> None: + def test_user_origin_raises_soft_error(self, _: Any) -> None: assembly_ct_list = assembly_packet_creator(MESSAGE, ' ', origin_header=ORIGIN_USER_HEADER, encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'), message_header=FILE_KEY_HEADER) @@ -181,7 +181,7 @@ class TestProcessMessagePacket(TFCTestCase): self.assert_se("File key message from the user.", process_message_packet, self.ts, p, *self.args) @mock.patch('time.sleep', return_value=None) - def test_invalid_file_key_data_raises_se(self, _: Any) -> None: + def test_invalid_file_key_data_raises_soft_error(self, _: Any) -> None: assembly_ct_list = assembly_packet_creator(MESSAGE, ' ', origin_header=ORIGIN_CONTACT_HEADER, encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'), message_header=FILE_KEY_HEADER) @@ -191,7 +191,7 @@ class TestProcessMessagePacket(TFCTestCase): process_message_packet, self.ts, p, *self.args) @mock.patch('time.sleep', return_value=None) - def test_too_large_file_key_data_raises_se(self, _: Any) -> None: + def test_too_large_file_key_data_raises_soft_error(self, _: Any) -> None: assembly_ct_list = assembly_packet_creator(MESSAGE, base64.b85encode(BLAKE2_DIGEST_LENGTH * b'a' + SYMMETRIC_KEY_LENGTH * b'b' + b'a').decode(), @@ -216,7 +216,7 @@ class TestProcessMessagePacket(TFCTestCase): # Group messages @mock.patch('time.sleep', return_value=None) - def test_invalid_message_header_raises_se(self, _: Any) -> None: + def test_invalid_message_header_raises_soft_error(self, _: Any) -> None: # Setup assembly_ct_list = assembly_packet_creator(MESSAGE, 'test_message', origin_header=ORIGIN_CONTACT_HEADER, encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'), @@ -227,7 +227,7 @@ class TestProcessMessagePacket(TFCTestCase): process_message_packet, self.ts, assembly_ct_list[0], *self.args) @mock.patch('time.sleep', return_value=None) - def test_invalid_window_raises_se(self, _: Any) -> None: + def test_invalid_window_raises_soft_error(self, _: Any) -> None: # Setup assembly_ct_list = assembly_packet_creator(MESSAGE, 'test_message', origin_header=ORIGIN_CONTACT_HEADER, encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'), @@ -240,7 +240,7 @@ class TestProcessMessagePacket(TFCTestCase): process_message_packet, self.ts, assembly_ct_list[0], *self.args) @mock.patch('time.sleep', return_value=None) - def test_invalid_message_raises_se(self, _: Any) -> None: + def test_invalid_message_raises_soft_error(self, _: Any) -> None: # Setup assembly_ct_list = assembly_packet_creator(MESSAGE, ' ', origin_header=ORIGIN_CONTACT_HEADER, encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'), @@ -251,7 +251,7 @@ class TestProcessMessagePacket(TFCTestCase): process_message_packet, self.ts, assembly_ct_list[0], *self.args) @mock.patch('time.sleep', return_value=None) - def test_invalid_whisper_header_raises_se(self, _: Any) -> None: + def test_invalid_whisper_header_raises_soft_error(self, _: Any) -> None: # Setup assembly_ct_list = assembly_packet_creator(MESSAGE, '', origin_header=ORIGIN_CONTACT_HEADER, encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'), @@ -262,7 +262,7 @@ class TestProcessMessagePacket(TFCTestCase): process_message_packet, self.ts, assembly_ct_list[0], *self.args) @mock.patch('time.sleep', return_value=None) - def test_contact_not_in_group_raises_se(self, _: Any) -> None: + def test_contact_not_in_group_raises_soft_error(self, _: Any) -> None: # Setup assembly_ct_list = assembly_packet_creator(MESSAGE, 'test_message', origin_header=ORIGIN_CONTACT_HEADER, diff --git a/tests/receiver/test_output_loop.py b/tests/receiver/test_output_loop.py index e447d45..e57c8f1 100644 --- a/tests/receiver/test_output_loop.py +++ b/tests/receiver/test_output_loop.py @@ -129,7 +129,7 @@ class TestOutputLoop(unittest.TestCase): tx_pub_key) tx_mk, tx_harac = rotate_key(tx_mk, tx_harac) - # ECDHE keyset for Bob + # ECDHE keyset to Bob command = (KEY_EX_ECDHE + nick_to_pub_key("Bob") + (4 * SYMMETRIC_KEY_LENGTH * b"a") @@ -138,7 +138,7 @@ class TestOutputLoop(unittest.TestCase): local_key, local_harac = rotate_key(local_key, local_harac) o_sleep(test_delay) - # Message for Bob + # Message to Bob queue_packet(tx_mk, tx_hk, tx_harac, @@ -147,7 +147,7 @@ class TestOutputLoop(unittest.TestCase): tx_mk, tx_harac = rotate_key(tx_mk, tx_harac) o_sleep(test_delay) - # Enable file reception for Bob + # Enable file reception to Bob command = CH_FILE_RECV + ENABLE.upper() + US_BYTE queue_packet(local_key, tx_hk, local_harac, command) o_sleep(test_delay) diff --git a/tests/receiver/test_packet.py b/tests/receiver/test_packet.py index a752edf..0b1c2d4 100644 --- a/tests/receiver/test_packet.py +++ b/tests/receiver/test_packet.py @@ -38,8 +38,8 @@ from src.transmitter.packet import split_to_assembly_packets from src.receiver.packet import decrypt_assembly_packet, Packet, PacketList from tests.mock_classes import ContactList, create_contact, KeyList, Settings, WindowList -from tests.utils import assembly_packet_creator, cd_unit_test, cleanup, nick_to_pub_key, TFCTestCase -from tests.utils import UNDECODABLE_UNICODE +from tests.utils import (assembly_packet_creator, cd_unit_test, cleanup, nick_to_pub_key, TFCTestCase, + UNDECODABLE_UNICODE) class TestDecryptAssemblyPacket(TFCTestCase): @@ -54,7 +54,7 @@ class TestDecryptAssemblyPacket(TFCTestCase): self.keyset = self.key_list.get_keyset(nick_to_pub_key("Alice")) self.args = self.onion_pub_key, self.origin, self.window_list, self.contact_list, self.key_list - def test_decryption_with_zero_rx_key_raises_se(self) -> None: + def test_decryption_with_zero_rx_key_raises_soft_error(self) -> None: # Setup keyset = self.key_list.get_keyset(nick_to_pub_key("Alice")) keyset.rx_mk = bytes(SYMMETRIC_KEY_LENGTH) @@ -64,12 +64,12 @@ class TestDecryptAssemblyPacket(TFCTestCase): self.assert_se("Warning! Loaded zero-key for packet decryption.", decrypt_assembly_packet, packet, *self.args) - def test_invalid_harac_ct_raises_se(self) -> None: + def test_invalid_harac_ct_raises_soft_error(self) -> None: packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True, tamper_harac=True)[0] self.assert_se("Warning! Received packet from Alice had an invalid hash ratchet MAC.", decrypt_assembly_packet, packet, *self.args) - def test_decryption_with_zero_rx_hek_raises_se(self) -> None: + def test_decryption_with_zero_rx_hek_raises_soft_error(self) -> None: # Setup keyset = self.key_list.get_keyset(nick_to_pub_key("Alice")) keyset.rx_hk = bytes(SYMMETRIC_KEY_LENGTH) @@ -78,7 +78,7 @@ class TestDecryptAssemblyPacket(TFCTestCase): # Test self.assert_se("Warning! Loaded zero-key for packet decryption.", decrypt_assembly_packet, packet, *self.args) - def test_expired_harac_raises_se(self) -> None: + def test_expired_harac_raises_soft_error(self) -> None: # Setup self.keyset.rx_harac = 1 @@ -93,7 +93,7 @@ class TestDecryptAssemblyPacket(TFCTestCase): self.assert_se("Dropped packet from Alice.", decrypt_assembly_packet, packet, *self.args) - def test_invalid_packet_ct_raises_se(self) -> None: + def test_invalid_packet_ct_raises_soft_error(self) -> None: packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True, tamper_message=True)[0] self.assert_se("Warning! Received packet from Alice had an invalid MAC.", decrypt_assembly_packet, packet, *self.args) @@ -148,7 +148,7 @@ class TestPacket(TFCTestCase): """Post-test actions.""" cleanup(self.unit_test_dir) - def test_invalid_assembly_packet_header_raises_se(self) -> None: + def test_invalid_assembly_packet_header_raises_soft_error(self) -> None: # Setup packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE, self.contact, self.settings) a_packet = assembly_packet_creator(MESSAGE, payload=self.short_msg, s_header_override=b'i')[0] @@ -157,7 +157,7 @@ class TestPacket(TFCTestCase): self.assert_se("Error: Received packet had an invalid assembly packet header.", packet.add_packet, a_packet) self.assertEqual(packet.log_masking_ctr, 1) - def test_missing_start_packet_raises_se(self) -> None: + def test_missing_start_packet_raises_soft_error(self) -> None: # Setup packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings) @@ -179,7 +179,7 @@ class TestPacket(TFCTestCase): self.whisper_header + PRIVATE_MESSAGE_HEADER + self.short_msg.encode()) self.assertEqual(packet.log_ct_list, [b'test_ct']) - def test_compression_error_raises_se(self) -> None: + def test_compression_error_raises_soft_error(self) -> None: # Setup packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings) packet_list = assembly_packet_creator(MESSAGE, self.short_msg, tamper_compression=True) @@ -203,7 +203,7 @@ class TestPacket(TFCTestCase): self.assertEqual(message, self.whisper_header + PRIVATE_MESSAGE_HEADER + self.msg.encode()) self.assertEqual(packet.log_ct_list, 3 * [b'test_ct']) - def test_decryption_error_raises_se(self) -> None: + def test_decryption_error_raises_soft_error(self) -> None: # Setup packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings) packet_list = assembly_packet_creator(MESSAGE, self.msg, tamper_ciphertext=True) @@ -235,7 +235,7 @@ class TestPacket(TFCTestCase): self.assertIsNone(packet.assemble_and_store_file(self.ts, self.onion_pub_key, self.window_list)) self.assertTrue(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt.1')) - def test_short_file_from_user_raises_se(self) -> None: + def test_short_file_from_user_raises_soft_error(self) -> None: # Setup packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, FILE, self.contact, self.settings) packets = split_to_assembly_packets(self.short_f_data, FILE) @@ -245,7 +245,7 @@ class TestPacket(TFCTestCase): self.assert_se("Ignored file from the user.", packet.add_packet, p) self.assertEqual(packet.log_masking_ctr, 1) - def test_unauthorized_file_from_contact_raises_se(self) -> None: + def test_unauthorized_file_from_contact_raises_soft_error(self) -> None: # Setup self.contact.file_reception = False @@ -305,7 +305,7 @@ class TestPacket(TFCTestCase): self.assert_se("Alert! File reception disabled mid-transfer.", packet.add_packet, p) self.assertEqual(packet.log_masking_ctr, len(packet_list)) - def test_long_file_from_user_raises_se(self) -> None: + def test_long_file_from_user_raises_soft_error(self) -> None: # Setup packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, FILE, self.contact, self.settings) packet_list = assembly_packet_creator(FILE) @@ -314,7 +314,7 @@ class TestPacket(TFCTestCase): self.assert_se("Ignored file from the user.", packet.add_packet, packet_list[0]) self.assertEqual(packet.log_masking_ctr, 1) - def test_unauthorized_long_file_raises_se(self) -> None: + def test_unauthorized_long_file_raises_soft_error(self) -> None: # Setup self.contact.file_reception = False @@ -326,7 +326,7 @@ class TestPacket(TFCTestCase): packet.add_packet, packet_list[0]) self.assertEqual(packet.log_masking_ctr, 1) - def test_invalid_long_file_header_raises_se(self) -> None: + def test_invalid_long_file_header_raises_soft_error(self) -> None: # Setup packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings) packet_list = assembly_packet_creator(FILE, file_name=UNDECODABLE_UNICODE) @@ -390,7 +390,7 @@ class TestPacket(TFCTestCase): self.assertEqual(packet.assemble_command_packet(), command) self.assertEqual(packet.log_masking_ctr, 0) - def test_long_command_hash_mismatch_raises_se(self) -> None: + def test_long_command_hash_mismatch_raises_soft_error(self) -> None: # Setup packet = Packet(LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings) packet_list = assembly_packet_creator(COMMAND, os.urandom(500), tamper_cmd_hash=True) @@ -402,7 +402,7 @@ class TestPacket(TFCTestCase): self.assert_se("Error: Received an invalid command.", packet.assemble_command_packet) self.assertEqual(packet.log_masking_ctr, 0) - def test_long_command_compression_error_raises_se(self) -> None: + def test_long_command_compression_error_raises_soft_error(self) -> None: # Setup packet = Packet(LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings) packet_list = assembly_packet_creator(COMMAND, os.urandom(500), tamper_compression=True) diff --git a/tests/receiver/test_windows.py b/tests/receiver/test_windows.py index 0d17a07..38a7508 100644 --- a/tests/receiver/test_windows.py +++ b/tests/receiver/test_windows.py @@ -76,7 +76,7 @@ class TestRxWindow(TFCTestCase): self.assertEqual(window.window_contacts[0].onion_pub_key, nick_to_pub_key("Alice")) self.assertEqual(window.name, 'test_group') - def test_invalid_uid_raises_se(self) -> None: + def test_invalid_uid_raises_soft_error(self) -> None: self.assert_se("Invalid window 'mfqwcylbmfqwcylbmfqwcylbmfqwcylbmfqwcylbmfqwcylbmfqwbfad'.", self.create_window, ONION_SERVICE_PUBLIC_KEY_LENGTH * b'a') diff --git a/tests/relay/test_commands.py b/tests/relay/test_commands.py index 1c246c9..022fb0f 100644 --- a/tests/relay/test_commands.py +++ b/tests/relay/test_commands.py @@ -136,11 +136,11 @@ class TestChangeECRatio(TFCTestCase): """Pre-test actions.""" self.gateway = Gateway() - def test_non_digit_value_raises_se(self) -> None: + def test_non_digit_value_raises_soft_error(self) -> None: self.assert_se("Error: Received invalid EC ratio value from Transmitter Program.", change_ec_ratio, b'a', self.gateway) - def test_invalid_digit_value_raises_se(self) -> None: + def test_invalid_digit_value_raises_soft_error(self) -> None: self.assert_se("Error: Received invalid EC ratio value from Transmitter Program.", change_ec_ratio, b'-1', self.gateway) @@ -155,11 +155,11 @@ class TestChangeBaudrate(TFCTestCase): """Pre-test actions.""" self.gateway = Gateway() - def test_non_digit_value_raises_se(self) -> None: + def test_non_digit_value_raises_soft_error(self) -> None: self.assert_se("Error: Received invalid baud rate value from Transmitter Program.", change_baudrate, b'a', self.gateway) - def test_invalid_digit_value_raises_se(self) -> None: + def test_invalid_digit_value_raises_soft_error(self) -> None: self.assert_se("Error: Received invalid baud rate value from Transmitter Program.", change_baudrate, b'1300', self.gateway) @@ -216,7 +216,7 @@ class TestAddContact(unittest.TestCase): def test_add_contact(self) -> None: command = b''.join([nick_to_pub_key('Alice'), nick_to_pub_key('Bob')]) - self.assertIsNone(add_contact(command, True, self.queues)) + self.assertIsNone(add_contact(command, self.queues, True)) self.assertEqual(self.queues[CONTACT_MGMT_QUEUE].qsize(), 1) for q in [GROUP_MGMT_QUEUE, C_REQ_MGMT_QUEUE]: command = self.queues[q].get() diff --git a/tests/relay/test_diffs.py b/tests/relay/test_diffs.py index 192dbf4..325934c 100644 --- a/tests/relay/test_diffs.py +++ b/tests/relay/test_diffs.py @@ -107,9 +107,9 @@ class TestPubKeyChecker(unittest.TestCase): @mock.patch('shutil.get_terminal_size', return_value=[200, 200]) def test_pub_key_checker(self, _: Any) -> None: # Setup - public_key = TFC_PUBLIC_KEY_LENGTH*b'a' + public_key = TFC_PUBLIC_KEY_LENGTH*b'a' invalid_public_key = b58encode(public_key, public_key=True)[:-1] + 'a' - account = nick_to_pub_key('Bob') + account = nick_to_pub_key('Bob') for local_test in [True, False]: self.queues[PUB_KEY_SEND_QUEUE].put((account, public_key)) diff --git a/tests/relay/test_server.py b/tests/relay/test_server.py index 0e51f21..d54ba78 100644 --- a/tests/relay/test_server.py +++ b/tests/relay/test_server.py @@ -38,7 +38,7 @@ class TestFlaskServer(unittest.TestCase): url_token_public_key = X448.derive_public_key(url_token_private_key).hex() url_token = 'a450987345098723459870234509827340598273405983274234098723490285' url_token_old = 'a450987345098723459870234509827340598273405983274234098723490286' - url_token_invalid = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + url_token_invalid = 'ääääääääääääääääääääääääääääääääääääääääääääääääääääääääääääääää' onion_pub_key = nick_to_pub_key('Alice') onion_address = nick_to_onion_address('Alice') packet1 = "packet1" @@ -48,6 +48,13 @@ class TestFlaskServer(unittest.TestCase): # Test app = flask_server(queues, url_token_public_key, unit_test=True) + # Test valid URL token returns all queued messages + queues[URL_TOKEN_QUEUE].put((onion_pub_key, url_token_old)) + queues[URL_TOKEN_QUEUE].put((onion_pub_key, url_token)) + queues[M_TO_FLASK_QUEUE].put((packet1, onion_pub_key)) + queues[M_TO_FLASK_QUEUE].put((packet2, onion_pub_key)) + queues[F_TO_FLASK_QUEUE].put((packet3, onion_pub_key)) + with app.test_client() as c: # Test root domain returns public key of server. resp = c.get('/') @@ -63,13 +70,6 @@ class TestFlaskServer(unittest.TestCase): resp = c.get(f'/{url_token_invalid}/files/') self.assertEqual(b'', resp.data) - # Test valid URL token returns all queued messages - queues[URL_TOKEN_QUEUE].put((onion_pub_key, url_token_old)) - queues[URL_TOKEN_QUEUE].put((onion_pub_key, url_token)) - queues[M_TO_FLASK_QUEUE].put((packet1, onion_pub_key)) - queues[M_TO_FLASK_QUEUE].put((packet2, onion_pub_key)) - queues[F_TO_FLASK_QUEUE].put((packet3, onion_pub_key)) - with app.test_client() as c: resp = c.get(f'/{url_token}/messages/') self.assertEqual(b'packet1\npacket2', resp.data) diff --git a/tests/transmitter/test_commands.py b/tests/transmitter/test_commands.py index 6dffac5..1c7d3ad 100644 --- a/tests/transmitter/test_commands.py +++ b/tests/transmitter/test_commands.py @@ -28,9 +28,9 @@ from unittest import mock from unittest.mock import MagicMock from typing import Any -from src.common.database import TFCDatabase, MessageLog -from src.common.db_logs import write_log_entry -from src.common.encoding import bool_to_bytes +from src.common.database import TFCDatabase, MessageLog +from src.common.db_logs import write_log_entry +from src.common.encoding import bool_to_bytes from src.common.db_masterkey import MasterKey as OrigMasterKey from src.common.statics import (BOLD_ON, CLEAR_ENTIRE_SCREEN, COMMAND_PACKET_QUEUE, CURSOR_LEFT_UP_CORNER, DIR_USER_DATA, KEY_MGMT_ACK_QUEUE, KEX_STATUS_NO_RX_PSK, KEX_STATUS_UNVERIFIED, @@ -41,16 +41,16 @@ from src.common.statics import (BOLD_ON, CLEAR_ENTIRE_SCREEN, COMMAND_PACKE UNENCRYPTED_WIPE_COMMAND, VERSION, WIN_TYPE_CONTACT, WIN_TYPE_GROUP, KDB_HALT_ACK_HEADER, KDB_M_KEY_CHANGE_HALT_HEADER) -from src.transmitter.commands import change_master_key, change_setting, clear_screens, exit_tfc, log_command -from src.transmitter.commands import print_about, print_help, print_recipients, print_settings, process_command -from src.transmitter.commands import remove_log, rxp_display_unread, rxp_show_sys_win, send_onion_service_key, verify -from src.transmitter.commands import whisper, whois, wipe +from src.transmitter.commands import (change_master_key, change_setting, clear_screens, exit_tfc, log_command, + print_about, print_help, print_recipients, print_settings, process_command, + remove_log, rxp_display_unread, rxp_show_sys_win, send_onion_service_key, + verify, whisper, whois, wipe) from src.transmitter.packet import split_to_assembly_packets -from tests.mock_classes import ContactList, create_contact, Gateway, GroupList, MasterKey, OnionService, Settings -from tests.mock_classes import TxWindow, UserInput -from tests.utils import assembly_packet_creator, cd_unit_test, cleanup, group_name_to_group_id -from tests.utils import gen_queue_dict, nick_to_onion_address, nick_to_pub_key, tear_queues, TFCTestCase +from tests.mock_classes import (ContactList, create_contact, Gateway, GroupList, MasterKey, OnionService, Settings, + TxWindow, UserInput) +from tests.utils import (assembly_packet_creator, cd_unit_test, cleanup, group_name_to_group_id, gen_queue_dict, + nick_to_onion_address, nick_to_pub_key, tear_queues, TFCTestCase) class TestProcessCommand(TFCTestCase): @@ -268,17 +268,17 @@ class TestLogCommand(TFCTestCase): log_command, UserInput("history"), *self.args) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) - def test_invalid_number_raises_se(self) -> None: + def test_invalid_number_raises_soft_error(self) -> None: self.assert_se("Error: Invalid number of messages.", log_command, UserInput('history a'), *self.args) - def test_too_high_number_raises_se(self) -> None: + def test_too_high_number_raises_soft_error(self) -> None: self.assert_se("Error: Invalid number of messages.", log_command, UserInput('history 94857634985763454345'), *self.args) @mock.patch('time.sleep', return_value=None) @mock.patch('builtins.input', return_value='No') - def test_user_abort_raises_se(self, *_: Any) -> None: + def test_user_abort_raises_soft_error(self, *_: Any) -> None: self.assert_se("Log file export aborted.", log_command, UserInput('export'), *self.args) @@ -290,7 +290,7 @@ class TestLogCommand(TFCTestCase): @mock.patch('time.sleep', return_value=None) @mock.patch('builtins.input', return_value='Yes') @mock.patch('getpass.getpass', side_effect=['test_password', 'test_password', KeyboardInterrupt]) - def test_keyboard_interrupt_raises_se(self, *_: Any) -> None: + def test_keyboard_interrupt_raises_soft_error(self, *_: Any) -> None: self.master_key = OrigMasterKey(operation=TX, local_test=True) self.assert_se("Authentication aborted.", log_command, UserInput('export'), *self.args) @@ -543,12 +543,12 @@ class TestChangeMasterKey(TFCTestCase): self.assert_se("Error: Command is disabled during traffic masking.", change_master_key, UserInput(), *self.args) - def test_missing_target_sys_raises_se(self) -> None: + def test_missing_target_sys_raises_soft_error(self) -> None: self.assert_se("Error: No target-system ('tx' or 'rx') specified.", change_master_key, UserInput("passwd "), *self.args) @mock.patch('getpass.getpass', return_value='test_password') - def test_invalid_target_sys_raises_se(self, _: Any) -> None: + def test_invalid_target_sys_raises_soft_error(self, _: Any) -> None: self.assert_se("Error: Invalid target system 't'.", change_master_key, UserInput("passwd t"), *self.args) @@ -557,7 +557,7 @@ class TestChangeMasterKey(TFCTestCase): @mock.patch('getpass.getpass', side_effect=['test_password', 'a', 'a']) @mock.patch('time.sleep', return_value=None) @mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 0.01) - def test_invalid_response_from_key_db_raises_se(self, *_: Any) -> None: + def test_invalid_response_from_key_db_raises_soft_error(self, *_: Any) -> None: # Setup def mock_sender_loop() -> None: """Mock sender loop key management functionality.""" @@ -581,7 +581,7 @@ class TestChangeMasterKey(TFCTestCase): @mock.patch('getpass.getpass', side_effect=['test_password', 'a', 'a']) @mock.patch('time.sleep', return_value=None) @mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 0.01) - def test_transmitter_command_raises_system_exit_if_key_database_returns_invalid_master_key(self, *_: Any) -> None: + def test_transmitter_command_raises_critical_error_if_key_database_returns_invalid_master_key(self, *_: Any) -> None: # Setup def mock_sender_loop() -> None: """Mock sender loop key management functionality.""" @@ -693,7 +693,7 @@ class TestChangeMasterKey(TFCTestCase): @mock.patch('time.sleep', return_value=None) @mock.patch('getpass.getpass', side_effect=KeyboardInterrupt) - def test_keyboard_interrupt_raises_se(self, *_: Any) -> None: + def test_keyboard_interrupt_raises_soft_error(self, *_: Any) -> None: self.assert_se("Authentication aborted.", change_master_key, UserInput("passwd tx"), *self.args) @@ -717,7 +717,7 @@ class TestRemoveLog(TFCTestCase): tear_queues(self.queues) cleanup(self.unit_test_dir) - def test_missing_contact_raises_se(self) -> None: + def test_missing_contact_raises_soft_error(self) -> None: self.assert_se("Error: No contact/group specified.", remove_log, UserInput(''), *self.args) @@ -734,12 +734,12 @@ class TestRemoveLog(TFCTestCase): @mock.patch('shutil.get_terminal_size', return_value=[150, 150]) @mock.patch('builtins.input', return_value='Yes') - def test_removal_with_invalid_account_raises_se(self, *_: Any) -> None: + def test_removal_with_invalid_account_raises_soft_error(self, *_: Any) -> None: self.assert_se("Error: Invalid account.", remove_log, UserInput(f'/rmlogs {nick_to_onion_address("Alice")[:-1] + "a"}'), *self.args) @mock.patch('builtins.input', return_value='Yes') - def test_invalid_group_id_raises_se(self, _: Any) -> None: + def test_invalid_group_id_raises_soft_error(self, _: Any) -> None: self.assert_se("Error: Invalid group ID.", remove_log, UserInput(f'/rmlogs {group_name_to_group_id("test_group")[:-1] + b"a"}'), *self.args) @@ -788,7 +788,7 @@ class TestRemoveLog(TFCTestCase): self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) @mock.patch('builtins.input', return_value='Yes') - def test_unknown_selector_raises_se(self, _: Any) -> None: + def test_unknown_selector_raises_soft_error(self, _: Any) -> None: # Setup write_log_entry(M_S_HEADER + PADDING_LENGTH * b'a', nick_to_pub_key("Alice"), self.tfc_log_database) @@ -814,15 +814,15 @@ class TestChangeSetting(TFCTestCase): """Post-test actions.""" tear_queues(self.queues) - def test_missing_setting_raises_se(self) -> None: + def test_missing_setting_raises_soft_error(self) -> None: self.assert_se("Error: No setting specified.", change_setting, UserInput('set'), *self.args) - def test_invalid_setting_raises_se(self) -> None: + def test_invalid_setting_raises_soft_error(self) -> None: self.assert_se("Error: Invalid setting 'e_correction_ratia'.", change_setting, UserInput("set e_correction_ratia true"), *self.args) - def test_missing_value_raises_se(self) -> None: + def test_missing_value_raises_soft_error(self) -> None: self.assert_se("Error: No value for setting specified.", change_setting, UserInput("set serial_error_correction"), *self.args) @@ -1027,11 +1027,11 @@ class TestVerify(TFCTestCase): self.window.contact = self.contact self.args = self.window, self.contact_list - def test_active_group_raises_se(self) -> None: + def test_active_group_raises_soft_error(self) -> None: self.window.type = WIN_TYPE_GROUP self.assert_se("Error: A group is selected.", verify, *self.args) - def test_psk_raises_se(self) -> None: + def test_psk_raises_soft_error(self) -> None: self.contact.kex_status = KEX_STATUS_NO_RX_PSK self.assert_se("Pre-shared keys have no fingerprints.", verify, *self.args) @@ -1048,7 +1048,7 @@ class TestVerify(TFCTestCase): @mock.patch('time.sleep', return_value=None) @mock.patch('builtins.input', side_effect=KeyboardInterrupt) - def test_keyboard_interrupt_raises_se(self, *_: Any) -> None: + def test_keyboard_interrupt_raises_soft_error(self, *_: Any) -> None: self.contact.kex_status = KEX_STATUS_VERIFIED self.assert_se("Fingerprint verification aborted.", verify, *self.args) self.assertEqual(self.contact.kex_status, KEX_STATUS_VERIFIED) @@ -1066,7 +1066,7 @@ class TestWhisper(TFCTestCase): self.queues = gen_queue_dict() self.args = self.window, self.settings, self.queues - def test_empty_input_raises_se(self) -> None: + def test_empty_input_raises_soft_error(self) -> None: self.assert_se("Error: No whisper message specified.", whisper, UserInput("whisper"), *self.args) @@ -1087,10 +1087,10 @@ class TestWhois(TFCTestCase): self.group_list = GroupList(groups=['test_group']) self.args = self.contact_list, self.group_list - def test_missing_selector_raises_se(self) -> None: + def test_missing_selector_raises_soft_error(self) -> None: self.assert_se("Error: No account or nick specified.", whois, UserInput("whois"), *self.args) - def test_unknown_account_raises_se(self) -> None: + def test_unknown_account_raises_soft_error(self) -> None: self.assert_se("Error: Unknown selector.", whois, UserInput("whois alice"), *self.args) def test_nick_from_account(self) -> None: @@ -1132,7 +1132,7 @@ class TestWipe(TFCTestCase): self.args = self.settings, self.queues, self.gateway @mock.patch('builtins.input', return_value='No') - def test_no_raises_se(self, _: Any) -> None: + def test_no_raises_soft_error(self, _: Any) -> None: self.assert_se("Wipe command aborted.", wipe, *self.args) @mock.patch('os.system', return_value=None) diff --git a/tests/transmitter/test_commands_g.py b/tests/transmitter/test_commands_g.py index 4a06ca6..7d165ff 100644 --- a/tests/transmitter/test_commands_g.py +++ b/tests/transmitter/test_commands_g.py @@ -28,8 +28,8 @@ from src.common.encoding import b58encode from src.common.statics import (COMMAND_PACKET_QUEUE, GROUP_ID_LENGTH, RELAY_PACKET_QUEUE, WIN_TYPE_CONTACT, WIN_TYPE_GROUP) -from src.transmitter.commands_g import group_add_member, group_create, group_rm_group, group_rm_member -from src.transmitter.commands_g import process_group_command, group_rename +from src.transmitter.commands_g import (group_add_member, group_create, group_rm_group, group_rm_member, + process_group_command, group_rename) from tests.mock_classes import create_group, Contact, ContactList, GroupList, MasterKey, Settings, UserInput, TxWindow from tests.utils import cd_unit_test, cleanup, gen_queue_dict, nick_to_pub_key, tear_queues, TFCTestCase @@ -58,19 +58,19 @@ class TestProcessGroupCommand(TFCTestCase): self.assert_se("Error: Command is disabled during traffic masking.", process_group_command, UserInput(), *self.args) - def test_invalid_command_raises_se(self) -> None: + def test_invalid_command_raises_soft_error(self) -> None: self.assert_se("Error: Invalid group command.", process_group_command, UserInput('group '), *self.args) - def test_invalid_command_parameters_raises_se(self) -> None: + def test_invalid_command_parameters_raises_soft_error(self) -> None: self.assert_se("Error: Invalid group command.", process_group_command, UserInput('group bad'), *self.args) - def test_missing_group_id_raises_se(self) -> None: + def test_missing_group_id_raises_soft_error(self) -> None: self.assert_se("Error: No group ID specified.", process_group_command, UserInput('group join '), *self.args) - def test_invalid_group_id_raises_se(self) -> None: + def test_invalid_group_id_raises_soft_error(self) -> None: self.assert_se("Error: Invalid group ID.", process_group_command, UserInput('group join invalid'), *self.args) - def test_missing_name_raises_se(self) -> None: + def test_missing_name_raises_soft_error(self) -> None: self.assert_se("Error: No group name specified.", process_group_command, UserInput('group create '), *self.args) @mock.patch('builtins.input', return_value='Yes') @@ -105,7 +105,7 @@ class TestGroupCreate(TFCTestCase): self.group.members = self.contact_list.contacts self.account_list = [nick_to_pub_key(str(n)) for n in range(no_contacts)] - def test_invalid_group_name_raises_se(self) -> None: + def test_invalid_group_name_raises_soft_error(self) -> None: # Setup self.configure_groups(no_contacts=21) @@ -113,7 +113,7 @@ class TestGroupCreate(TFCTestCase): self.assert_se("Error: Group name must be printable.", group_create, 'test_group\x1f', self.account_list, *self.args) - def test_too_many_purp_accounts_raises_se(self) -> None: + def test_too_many_purp_accounts_raises_soft_error(self) -> None: # Setup self.configure_groups(no_contacts=60) @@ -123,7 +123,7 @@ class TestGroupCreate(TFCTestCase): group_create, 'test_group_50', cl_str, self.contact_list, self.group_list, self.settings, self.queues, self.master_key) - def test_full_group_list_raises_se(self) -> None: + def test_full_group_list_raises_soft_error(self) -> None: # Setup self.group_list = GroupList(groups=[f"testgroup_{n}" for n in range(50)]) @@ -180,7 +180,7 @@ class TestGroupAddMember(TFCTestCase): def test_raises_fr_if_specified_group_does_not_exist_and_user_chooses_no(self, *_: Any) -> None: self.assert_se("Group creation aborted.", group_add_member, 'test_group', [], *self.args) - def test_too_large_final_member_list_raises_se(self) -> None: + def test_too_large_final_member_list_raises_soft_error(self) -> None: # Setup contact_list = ContactList(nicks=[str(n) for n in range(51)]) group_list = GroupList(groups=['testgroup']) @@ -266,18 +266,18 @@ class TestGroupRmGroup(TFCTestCase): @mock.patch('time.sleep', return_value=None) @mock.patch('builtins.input', return_value='No') - def test_cancel_of_remove_raises_se(self, *_: Any) -> None: + def test_cancel_of_remove_raises_soft_error(self, *_: Any) -> None: self.assert_se("Group removal aborted.", group_rm_group, 'test_group', *self.args) @mock.patch('builtins.input', return_value='Yes') - def test_remove_group_not_on_transmitter_raises_se(self, _: Any) -> None: + def test_remove_group_not_on_transmitter_raises_soft_error(self, _: Any) -> None: unknown_group_id = b58encode(bytes(GROUP_ID_LENGTH)) self.assert_se("Transmitter has no group '2dVseX46KS9Sp' to remove.", group_rm_group, unknown_group_id, *self.args) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 2) @mock.patch('builtins.input', return_value='Yes') - def test_invalid_group_id_raises_se(self, _: Any) -> None: + def test_invalid_group_id_raises_soft_error(self, _: Any) -> None: invalid_group_id = b58encode(bytes(GROUP_ID_LENGTH))[:-1] self.assert_se("Error: Invalid group name/ID.", group_rm_group, invalid_group_id, *self.args) @@ -304,14 +304,14 @@ class TestGroupRename(TFCTestCase): """Post-test actions.""" tear_queues(self.queues) - def test_contact_window_raises_se(self) -> None: + def test_contact_window_raises_soft_error(self) -> None: # Setup self.window.type = WIN_TYPE_CONTACT # Test self.assert_se("Error: Selected window is not a group window.", group_rename, "window", *self.args) - def test_invalid_group_name_raises_se(self) -> None: + def test_invalid_group_name_raises_soft_error(self) -> None: # Setup self.window.type = WIN_TYPE_GROUP self.window.group = self.group_list.get_group('test_group') diff --git a/tests/transmitter/test_contact.py b/tests/transmitter/test_contact.py index d21a66a..3416afd 100644 --- a/tests/transmitter/test_contact.py +++ b/tests/transmitter/test_contact.py @@ -30,13 +30,13 @@ from src.common.statics import (COMMAND_PACKET_QUEUE, CONFIRM_CODE_LENGTH, FINGE KEY_MANAGEMENT_QUEUE, LOCAL_ID, LOG_SETTING_QUEUE, RELAY_PACKET_QUEUE, TM_COMMAND_PACKET_QUEUE, WIN_TYPE_CONTACT, WIN_TYPE_GROUP) -from src.transmitter.contact import add_new_contact, change_nick, contact_setting, get_onion_address_from_user -from src.transmitter.contact import remove_contact +from src.transmitter.contact import (add_new_contact, change_nick, contact_setting, get_onion_address_from_user, + remove_contact) -from tests.mock_classes import ContactList, create_contact, create_group, Group, GroupList, MasterKey, OnionService -from tests.mock_classes import Settings, TxWindow, UserInput -from tests.utils import cd_unit_test, cleanup, gen_queue_dict, group_name_to_group_id, ignored -from tests.utils import nick_to_onion_address, nick_to_pub_key, tear_queues, TFCTestCase, VALID_ECDHE_PUB_KEY +from tests.mock_classes import (ContactList, create_contact, create_group, Group, GroupList, MasterKey, OnionService, + Settings, TxWindow, UserInput) +from tests.utils import (cd_unit_test, cleanup, gen_queue_dict, group_name_to_group_id, ignored, + nick_to_onion_address, nick_to_pub_key, tear_queues, TFCTestCase, VALID_ECDHE_PUB_KEY) class TestAddNewContact(TFCTestCase): @@ -56,14 +56,14 @@ class TestAddNewContact(TFCTestCase): os.remove(f'v4dkh.psk - Give to hpcra') tear_queues(self.queues) - def test_adding_new_contact_during_traffic_masking_raises_se(self) -> None: + def test_adding_new_contact_during_traffic_masking_raises_soft_error(self) -> None: # Setup self.settings.traffic_masking = True # Test self.assert_se("Error: Command is disabled during traffic masking.", add_new_contact, *self.args) - def test_contact_list_full_raises_se(self) -> None: + def test_contact_list_full_raises_soft_error(self) -> None: # Setup contact_list = ContactList(nicks=[str(n) for n in range(50)]) self.contact_list.contacts = contact_list.contacts @@ -96,7 +96,7 @@ class TestAddNewContact(TFCTestCase): @mock.patch('time.sleep', return_value=None) @mock.patch('builtins.input', side_effect=KeyboardInterrupt) - def test_keyboard_interrupt_raises_se(self, *_: Any) -> None: + def test_keyboard_interrupt_raises_soft_error(self, *_: Any) -> None: self.assert_se('Contact creation aborted.', add_new_contact, *self.args) @@ -147,7 +147,7 @@ class TestRemoveContact(TFCTestCase): cleanup(self.unit_test_dir) tear_queues(self.queues) - def test_contact_removal_during_traffic_masking_raises_se(self) -> None: + def test_contact_removal_during_traffic_masking_raises_soft_error(self) -> None: # Setup self.settings.traffic_masking = True @@ -155,13 +155,13 @@ class TestRemoveContact(TFCTestCase): self.assert_se("Error: Command is disabled during traffic masking.", remove_contact, UserInput(), None, *self.args) - def test_missing_account_raises_se(self) -> None: + def test_missing_account_raises_soft_error(self) -> None: self.assert_se("Error: No account specified.", remove_contact, UserInput('rm '), None, *self.args) @mock.patch('time.sleep', return_value=None) @mock.patch('shutil.get_terminal_size', return_value=[150, 150]) @mock.patch('builtins.input', return_value='Yes') - def test_invalid_account_raises_se(self, *_: Any) -> None: + def test_invalid_account_raises_soft_error(self, *_: Any) -> None: # Setup user_input = UserInput(f'rm {nick_to_onion_address("Alice")[:-1]}') window = TxWindow(window_contacts=[self.contact_list.get_contact_by_address_or_nick('Alice')], @@ -174,7 +174,7 @@ class TestRemoveContact(TFCTestCase): @mock.patch('time.sleep', return_value=None) @mock.patch('shutil.get_terminal_size', return_value=[150, 150]) @mock.patch('builtins.input', return_value='No') - def test_user_abort_raises_se(self, *_: Any) -> None: + def test_user_abort_raises_soft_error(self, *_: Any) -> None: # Setup user_input = UserInput(f'rm {nick_to_onion_address("Alice")}') @@ -264,11 +264,11 @@ class TestChangeNick(TFCTestCase): """Post-test actions.""" tear_queues(self.queues) - def test_missing_nick_raises_se(self) -> None: + def test_missing_nick_raises_soft_error(self) -> None: self.assert_se("Error: No nick specified.", change_nick, UserInput("nick "), TxWindow(type=WIN_TYPE_CONTACT), *self.args) - def test_invalid_nick_raises_se(self) -> None: + def test_invalid_nick_raises_soft_error(self) -> None: # Setup window = TxWindow(type=WIN_TYPE_CONTACT, contact=create_contact('Bob')) @@ -277,7 +277,7 @@ class TestChangeNick(TFCTestCase): self.assert_se("Error: Nick must be printable.", change_nick, UserInput("nick Alice\x01"), window, *self.args) - def test_no_contact_raises_se(self) -> None: + def test_no_contact_raises_soft_error(self) -> None: # Setup window = TxWindow(type=WIN_TYPE_CONTACT, contact=create_contact('Bob')) @@ -327,13 +327,13 @@ class TestContactSetting(TFCTestCase): """Post-test actions.""" tear_queues(self.queues) - def test_invalid_command_raises_se(self) -> None: + def test_invalid_command_raises_soft_error(self) -> None: self.assert_se("Error: Invalid command.", contact_setting, UserInput('loging on'), None, *self.args) - def test_missing_parameter_raises_se(self) -> None: + def test_missing_parameter_raises_soft_error(self) -> None: self.assert_se("Error: Invalid command.", contact_setting, UserInput(''), None, *self.args) - def test_invalid_extra_parameter_raises_se(self) -> None: + def test_invalid_extra_parameter_raises_soft_error(self) -> None: self.assert_se("Error: Invalid command.", contact_setting, UserInput('logging on al'), None, *self.args) def test_enable_logging_for_user(self) -> None: diff --git a/tests/transmitter/test_files.py b/tests/transmitter/test_files.py index 433c8d9..8f48506 100644 --- a/tests/transmitter/test_files.py +++ b/tests/transmitter/test_files.py @@ -41,10 +41,10 @@ class TestFile(TFCTestCase): """Post-test actions.""" cleanup(self.unit_test_dir) - def test_missing_file_raises_se(self) -> None: + def test_missing_file_raises_soft_error(self) -> None: self.assert_se("Error: File not found.", File, './testfile.txt', *self.args) - def test_empty_file_raises_se(self) -> None: + def test_empty_file_raises_soft_error(self) -> None: # Setup with open('testfile.txt', 'wb+') as f: f.write(b'') @@ -52,7 +52,7 @@ class TestFile(TFCTestCase): # Test self.assert_se("Error: Target file is empty.", File, './testfile.txt', *self.args) - def test_oversize_filename_raises_se(self) -> None: + def test_oversize_filename_raises_soft_error(self) -> None: # Setup f_name = 250 * 'a' + '.txt' with open(f_name, 'wb+') as f: diff --git a/tests/transmitter/test_key_exchanges.py b/tests/transmitter/test_key_exchanges.py index feff025..c3953d1 100644 --- a/tests/transmitter/test_key_exchanges.py +++ b/tests/transmitter/test_key_exchanges.py @@ -33,12 +33,12 @@ from src.common.statics import (COMMAND_PACKET_QUEUE, CONFIRM_CODE_LENGTH, ECDH LOCAL_PUBKEY, RELAY_PACKET_QUEUE, SYMMETRIC_KEY_LENGTH, TFC_PUBLIC_KEY_LENGTH, WIN_TYPE_CONTACT, WIN_TYPE_GROUP, XCHACHA20_NONCE_LENGTH) -from src.transmitter.key_exchanges import create_pre_shared_key, export_onion_service_data, new_local_key -from src.transmitter.key_exchanges import rxp_load_psk, start_key_exchange, verify_fingerprints +from src.transmitter.key_exchanges import (create_pre_shared_key, export_onion_service_data, new_local_key, + rxp_load_psk, start_key_exchange, verify_fingerprints) from tests.mock_classes import ContactList, create_contact, Gateway, OnionService, Settings, TxWindow -from tests.utils import cd_unit_test, cleanup, gen_queue_dict, ignored, nick_to_pub_key -from tests.utils import nick_to_short_address, tear_queues, TFCTestCase, VALID_ECDHE_PUB_KEY +from tests.utils import (cd_unit_test, cleanup, gen_queue_dict, ignored, nick_to_pub_key, nick_to_short_address, + tear_queues, TFCTestCase, VALID_ECDHE_PUB_KEY) class TestOnionService(TFCTestCase): @@ -72,7 +72,7 @@ class TestLocalKey(TFCTestCase): """Post-test actions.""" tear_queues(self.queues) - def test_new_local_key_when_traffic_masking_is_enabled_raises_se(self) -> None: + def test_new_local_key_when_traffic_masking_is_enabled_raises_soft_error(self) -> None: self.settings.traffic_masking = True self.contact_list.contacts = [create_contact(LOCAL_ID)] self.assert_se("Error: Command is disabled during traffic masking.", new_local_key, *self.args) @@ -117,7 +117,7 @@ class TestLocalKey(TFCTestCase): @mock.patch('time.sleep', return_value=None) @mock.patch('builtins.input', side_effect=KeyboardInterrupt) @mock.patch('os.getrandom', lambda x, flags: x * b'a') - def test_keyboard_interrupt_raises_se(self, *_: Any) -> None: + def test_keyboard_interrupt_raises_soft_error(self, *_: Any) -> None: self.assert_se("Local key setup aborted.", new_local_key, *self.args) @@ -147,12 +147,12 @@ class TestKeyExchange(TFCTestCase): @mock.patch('shutil.get_terminal_size', return_value=[200, 200]) @mock.patch('builtins.input', return_value=b58encode(bytes(TFC_PUBLIC_KEY_LENGTH), public_key=True)) - def test_zero_public_key_raises_se(self, *_: Any) -> None: + def test_zero_public_key_raises_soft_error(self, *_: Any) -> None: self.assert_se("Error: Zero public key", start_key_exchange, nick_to_pub_key("Alice"), 'Alice', *self.args) @mock.patch('shutil.get_terminal_size', return_value=[200, 200]) @mock.patch('builtins.input', return_value=b58encode((TFC_PUBLIC_KEY_LENGTH-1)*b'a', public_key=True)) - def test_invalid_public_key_length_raises_se(self, *_: Any) -> None: + def test_invalid_public_key_length_raises_soft_error(self, *_: Any) -> None: self.assert_se("Error: Invalid public key length", start_key_exchange, nick_to_pub_key("Alice"), 'Alice', *self.args) @@ -164,7 +164,7 @@ class TestKeyExchange(TFCTestCase): 'No']) # Fingerprint mismatch) @mock.patch('time.sleep', return_value=None) @mock.patch('shutil.get_terminal_size', return_value=[200, 200]) - def test_fingerprint_mismatch_raises_se(self, *_: Any) -> None: + def test_fingerprint_mismatch_raises_soft_error(self, *_: Any) -> None: self.assert_se("Error: Fingerprint mismatch", start_key_exchange, nick_to_pub_key("Alice"), 'Alice', *self.args) @mock.patch('builtins.input', side_effect=['', # Resend public key @@ -297,7 +297,7 @@ class TestPSK(TFCTestCase): @mock.patch('time.sleep', return_value=None) @mock.patch('getpass.getpass', side_effect=KeyboardInterrupt) - def test_keyboard_interrupt_raises_se(self, *_: Any) -> None: + def test_keyboard_interrupt_raises_soft_error(self, *_: Any) -> None: self.assert_se("PSK generation aborted.", create_pre_shared_key, nick_to_pub_key("Alice"), 'Alice', *self.args) @@ -320,14 +320,14 @@ class TestReceiverLoadPSK(TFCTestCase): # Test self.assert_se("Error: Command is disabled during traffic masking.", rxp_load_psk, None, None, *self.args) - def test_active_group_raises_se(self) -> None: + def test_active_group_raises_soft_error(self) -> None: # Setup window = TxWindow(type=WIN_TYPE_GROUP) # Test self.assert_se("Error: Group is selected.", rxp_load_psk, window, None, *self.args) - def test_ecdhe_key_raises_se(self) -> None: + def test_ecdhe_key_raises_soft_error(self) -> None: # Setup contact = create_contact('Alice') contact_list = ContactList(contacts=[contact]) @@ -360,7 +360,7 @@ class TestReceiverLoadPSK(TFCTestCase): @mock.patch('time.sleep', return_value=None) @mock.patch('builtins.input', side_effect=KeyboardInterrupt) - def test_keyboard_interrupt_raises_se(self, *_: Any) -> None: + def test_keyboard_interrupt_raises_soft_error(self, *_: Any) -> None: # Setup contact = create_contact('Alice', kex_status=KEX_STATUS_NO_RX_PSK) contact_list = ContactList(contacts=[contact]) @@ -369,7 +369,7 @@ class TestReceiverLoadPSK(TFCTestCase): contact=contact) # Test - self.assert_se("PSK verification aborted.", rxp_load_psk, window, contact_list, *self.args) + self.assert_se("PSK install verification aborted.", rxp_load_psk, window, contact_list, *self.args) if __name__ == '__main__': diff --git a/tests/transmitter/test_packet.py b/tests/transmitter/test_packet.py index d5cd1ee..194e878 100644 --- a/tests/transmitter/test_packet.py +++ b/tests/transmitter/test_packet.py @@ -35,11 +35,11 @@ from src.common.statics import (ASSEMBLY_PACKET_LENGTH, COMMAND, COMMAND_PACKET_ TM_COMMAND_PACKET_QUEUE, TM_FILE_PACKET_QUEUE, TM_MESSAGE_PACKET_QUEUE, WIN_TYPE_CONTACT, WIN_TYPE_GROUP) -from src.transmitter.packet import cancel_packet, queue_command, queue_file, queue_message, queue_assembly_packets -from src.transmitter.packet import send_file, send_packet, split_to_assembly_packets +from src.transmitter.packet import (cancel_packet, queue_command, queue_file, queue_message, queue_assembly_packets, + send_file, send_packet, split_to_assembly_packets) -from tests.mock_classes import create_contact, create_group, create_keyset, Gateway, ContactList, KeyList -from tests.mock_classes import nick_to_pub_key, OnionService, Settings, TxWindow, UserInput +from tests.mock_classes import (create_contact, create_group, create_keyset, Gateway, ContactList, KeyList, + nick_to_pub_key, OnionService, Settings, TxWindow, UserInput) from tests.utils import cd_unit_test, cleanup, gen_queue_dict, tear_queue, tear_queues, TFCTestCase @@ -106,14 +106,14 @@ class TestSendFile(TFCTestCase): cleanup(self.unit_test_dir) tear_queues(self.queues) - def test_traffic_masking_raises_se(self) -> None: + def test_traffic_masking_raises_soft_error(self) -> None: self.settings.traffic_masking = True self.assert_se("Error: Command is disabled during traffic masking.", send_file, "testfile.txt", *self.args) - def test_missing_file_raises_se(self) -> None: + def test_missing_file_raises_soft_error(self) -> None: self.assert_se("Error: File not found.", send_file, "testfile.txt", *self.args) - def test_empty_file_raises_se(self) -> None: + def test_empty_file_raises_soft_error(self) -> None: # Setup open('testfile.txt', 'wb+').close() @@ -170,7 +170,7 @@ class TestQueueFile(TFCTestCase): @mock.patch('time.sleep', return_value=None) @mock.patch('builtins.input', side_effect=file_list) - def test_tfc_database_raises_se(self, *_: Any) -> None: + def test_tfc_database_raises_soft_error(self, *_: Any) -> None: window = TxWindow(name='Alice', type=WIN_TYPE_CONTACT, type_print='contact', @@ -253,7 +253,7 @@ class TestQueueFile(TFCTestCase): @mock.patch('shutil.get_terminal_size', return_value=[150, 150]) @mock.patch('time.sleep', return_value=None) @mock.patch('builtins.input', side_effect=['./testfile.txt', KeyboardInterrupt]) - def test_keyboard_interrupt_raises_se(self, *_: Any) -> None: + def test_keyboard_interrupt_raises_soft_error(self, *_: Any) -> None: # Setup input_data = os.urandom(2000) with open('testfile.txt', 'wb+') as f: @@ -344,7 +344,7 @@ class TestQueueAssemblyPackets(unittest.TestCase): log_messages=True) self.window.window_contacts = [create_contact('Alice')] self.args = self.settings, self.queues, self.window - + def tearDown(self) -> None: """Post-test actions.""" tear_queues(self.queues) diff --git a/tests/transmitter/test_user_input.py b/tests/transmitter/test_user_input.py index 844280e..3788496 100644 --- a/tests/transmitter/test_user_input.py +++ b/tests/transmitter/test_user_input.py @@ -24,9 +24,11 @@ import unittest from unittest import mock from typing import Any -from src.common.statics import COMMAND, FILE, MESSAGE, WIN_TYPE_CONTACT, WIN_TYPE_GROUP +from src.common.statics import COMMAND, FILE, MESSAGE, WIN_TYPE_CONTACT, WIN_TYPE_GROUP + from src.transmitter.user_input import get_input, process_aliases, UserInput -from tests.mock_classes import create_contact, create_group, Settings, TxWindow + +from tests.mock_classes import create_contact, create_group, Settings, TxWindow class TestProcessAliases(unittest.TestCase): diff --git a/tests/transmitter/test_windows.py b/tests/transmitter/test_windows.py index d8fe15f..6d0c366 100644 --- a/tests/transmitter/test_windows.py +++ b/tests/transmitter/test_windows.py @@ -33,8 +33,8 @@ from src.transmitter.windows import select_window, TxWindow from tests.mock_classes import ContactList, create_contact, Gateway, GroupList, OnionService, Settings, UserInput -from tests.utils import gen_queue_dict, group_name_to_group_id, nick_to_onion_address, nick_to_pub_key -from tests.utils import tear_queues, TFCTestCase, VALID_ECDHE_PUB_KEY +from tests.utils import (gen_queue_dict, group_name_to_group_id, nick_to_onion_address, nick_to_pub_key, + tear_queues, TFCTestCase, VALID_ECDHE_PUB_KEY) class TestTxWindow(TFCTestCase): @@ -72,7 +72,7 @@ class TestTxWindow(TFCTestCase): # Test self.assertEqual(len(self.window), 2) - def test_group_window_change_during_traffic_masking_raises_se(self) -> None: + def test_group_window_change_during_traffic_masking_raises_soft_error(self) -> None: # Setup self.settings.traffic_masking = True self.window.uid = 'test_group' @@ -81,7 +81,7 @@ class TestTxWindow(TFCTestCase): self.assert_se("Error: Can't change window during traffic masking.", self.window.select_tx_window, *self.args, selection='test_group_2', cmd=True) - def test_contact_window_change_during_traffic_masking_raises_se(self) -> None: + def test_contact_window_change_during_traffic_masking_raises_soft_error(self) -> None: # Setup self.settings.traffic_masking = True self.window.uid = nick_to_pub_key("Alice") @@ -109,7 +109,7 @@ class TestTxWindow(TFCTestCase): self.assertIsNone(self.window.select_tx_window(*self.args, selection='test_group', cmd=True)) self.assertEqual(self.window.uid, group_name_to_group_id('test_group')) - def test_invalid_selection_raises_se(self) -> None: + def test_invalid_selection_raises_soft_error(self) -> None: # Setup self.window.uid = nick_to_pub_key("Alice") @@ -275,12 +275,12 @@ class TestTxWindow(TFCTestCase): @mock.patch('time.sleep', return_value=None) @mock.patch('builtins.input', side_effect=['/rm ']) - def test_missing_account_when_removing_raises_se(self, *_: Any) -> None: + def test_missing_account_when_removing_raises_soft_error(self, *_: Any) -> None: self.assert_se("Error: No account specified.", self.window.select_tx_window, *self.args) @mock.patch('time.sleep', return_value=None) @mock.patch('builtins.input', side_effect=['/rm Charlie', 'yes']) - def test_unknown_account_when_removing_raises_se(self, *_: Any) -> None: + def test_unknown_account_when_removing_raises_soft_error(self, *_: Any) -> None: self.assert_se("Error: Unknown contact 'Charlie'.", self.window.select_tx_window, *self.args) @mock.patch('time.sleep', return_value=None) @@ -301,7 +301,7 @@ class TestTxWindow(TFCTestCase): @mock.patch('time.sleep', return_value=None) @mock.patch('builtins.input', side_effect=['/help']) - def test_invalid_command_raises_se(self, *_: Any) -> None: + def test_invalid_command_raises_soft_error(self, *_: Any) -> None: self.assert_se("Error: Invalid command.", self.window.select_tx_window, *self.args) @@ -323,7 +323,7 @@ class TestSelectWindow(TFCTestCase): """Post-test actions.""" tear_queues(self.queues) - def test_invalid_selection_raises_se(self) -> None: + def test_invalid_selection_raises_soft_error(self) -> None: # Setup self.user_input.plaintext = 'msg' self.assert_se("Error: Invalid recipient.", select_window, *self.args) diff --git a/tfc.py b/tfc.py index 3b3b768..45221a3 100755 --- a/tfc.py +++ b/tfc.py @@ -95,15 +95,15 @@ def main() -> None: ensure_dir(working_dir) os.chdir(working_dir) - operation, local_test, data_diode_sockets = process_arguments() + operation, local_test, data_diode_sockets, qubes = process_arguments() check_kernel_version() print_title(operation) master_key = MasterKey( operation, local_test) - gateway = Gateway( operation, local_test, data_diode_sockets) - settings = Settings( master_key, operation, local_test) + gateway = Gateway( operation, local_test, data_diode_sockets, qubes) + settings = Settings( master_key, operation, local_test, qubes) contact_list = ContactList(master_key, settings) key_list = KeyList( master_key, settings) group_list = GroupList( master_key, settings, contact_list) diff --git a/tfc.yml b/tfc.yml index 48d76cf..6d484f7 100644 --- a/tfc.yml +++ b/tfc.yml @@ -1,6 +1,6 @@ --- - apparmor-profiles: - - '/opt/tfc/venv_relay/bin/python3.7' + - '/usr/bin/python3.7' users: - 'amnesia' commands: diff --git a/uninstall.sh b/uninstall.sh index 466a15e..34079b2 100644 --- a/uninstall.sh +++ b/uninstall.sh @@ -40,9 +40,14 @@ sudo rm -f /usr/share/pixmaps/tfc.png sudo rm -f /usr/share/applications/TFC-Dev.desktop sudo rm -f /usr/share/applications/TFC-Local-test.desktop sudo rm -f /usr/share/applications/TFC-RP.desktop +sudo rm -f /usr/share/applications/TFC-RP-Qubes.desktop sudo rm -f /usr/share/applications/TFC-RP-Tails.desktop -sudo rm -f /usr/share/applications/TFC-RxP.desktop +sudo rm -f /usr/share/applications/TFC-RxP-Qubes.desktop sudo rm -f /usr/share/applications/TFC-TxP.desktop +sudo rm -f /usr/share/applications/TFC-TxP-Qubes.desktop +sudo rm -f /usr/bin/tfc-transmitter +sudo rm -f /usr/bin/tfc-receiver +sudo rm -f /usr/bin/tfc-relay sudo rm -rf /opt/tfc/ yn_prompt "Remove user data?" "rm -rf $HOME/tfc/"