diff --git a/LICENSE-3RD-PARTY b/LICENSE-3RD-PARTY index 4ac851d..df0d334 100644 --- a/LICENSE-3RD-PARTY +++ b/LICENSE-3RD-PARTY @@ -39,7 +39,7 @@ by the TFC installer. - The Tor application, Copyright © 2001-2004, Roger Dingledine Copyright © 2004-2006, Roger Dingledine, Nick Mathewson - Copyright © 2007-2019, The Tor Project, Inc. + Copyright © 2007-2020, The Tor Project, Inc. (https://torproject.org) (See the Tor licence and 3rd party licences at https://gitweb.torproject.org/tor.git/tree/LICENSE) @@ -86,17 +86,17 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - The src.common.encoding Base58 implementation, Copyright © 2015, David Keijser (https://github.com/keis/base58) - - The cffi library, Copyright © 2012-2019, Armin Rigo, Maciej Fijalkowski + - The cffi library, Copyright © 2012-2020, Armin Rigo, Maciej Fijalkowski (https://bitbucket.org/cffi/cffi/overview) - The Mypy static type checker, Copyright © 2015-2016, Jukka Lehtosalo and contributors (https://github.com/python/mypy) (See 3rd party licenses at https://github.com/python/mypy/blob/master/LICENSE) - - The pip package installer, Copyright © 2008-2019, The pip developers (see AUTHORS.txt file) + - The pip package installer, Copyright © 2008-2020, The pip developers (see AUTHORS.txt file) (https://github.com/pypa/pip) - - The pytest framework, Copyright © 2004-2019, Holger Krekel and others + - The pytest framework, Copyright © 2004-2020, Holger Krekel and others (https://github.com/pytest-dev/pytest) - The pytest-cov plugin, Copyright © 2010, Meme Dough @@ -108,10 +108,10 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - The Setuptools build system, Copyright © 2016, Jason R Coombs (https://github.com/pypa/setuptools) - - The Six library, Copyright © 2010-2019, Benjamin Peterson + - The Six library, Copyright © 2010-2020, Benjamin Peterson (https://github.com/benjaminp/six) - - The urllib3 library, Copyright © 2008-2019, Andrey Petrov and contributors (see CONTRIBUTORS.txt) + - The urllib3 library, Copyright © 2008-2020, Andrey Petrov and contributors (see CONTRIBUTORS.txt) (https://github.com/urllib3/urllib3) - The virtualenv tool, Copyright © 2007, Ian Bicking and Contributors @@ -144,7 +144,7 @@ SOFTWARE. ISC License applies to: - - The libsodium library, Copyright © 2013-2019, Frank Denis + - The libsodium library, Copyright © 2013-2020, Frank Denis (https://github.com/jedisct1/libsodium) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - @@ -525,7 +525,7 @@ Public License instead of this License. of this license document, but changing it is not allowed. applies to: - - The src.relay.onion Tor class, Copyright © 2014-2019, Micah Lee + - The src.relay.onion Tor class, Copyright © 2014-2020, Micah Lee (https://github.com/micahflee/onionshare) - gnome-terminal, Copyright © Guilherme de S. Pastore , @@ -2478,7 +2478,7 @@ Library. Copyright © 2000, BeOpen.com. All Rights Reserved. - Copyright © 2001-2019, Python Software Foundation. + Copyright © 2001-2020, Python Software Foundation. All Rights Reserved. (https://www.python.org/) diff --git a/README.md b/README.md index 4f9f4c8..fba6198 100755 --- a/README.md +++ b/README.md @@ -9,33 +9,37 @@ [![Coverage Status](https://coveralls.io/repos/github/maqp/tfc/badge.svg?branch=master)](https://coveralls.io/github/maqp/tfc?branch=master) [![Codacy Badge](https://api.codacy.com/project/badge/Grade/71fa9cc1da424f52a576a04c2722da26)](https://www.codacy.com/manual/maqp/tfc?utm_source=github.com&utm_medium=referral&utm_content=maqp/tfc&utm_campaign=Badge_Grade) [![CodeFactor](https://www.codefactor.io/repository/github/maqp/tfc/badge)](https://www.codefactor.io/repository/github/maqp/tfc) -[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![Requirements Status](https://requires.io/github/maqp/tfc/requirements.svg?branch=master)](https://requires.io/github/maqp/tfc/requirements/?branch=master) [![Known Vulnerabilities](https://snyk.io/test/github/maqp/tfc/badge.svg)](https://snyk.io/test/github/maqp/tfc) -Tinfoil Chat (TFC) is a -[FOSS](https://www.gnu.org/philosophy/free-sw.html)+[FHD](https://www.gnu.org/philosophy/free-hardware-designs.en.html) -messaging system that relies on high assurance hardware architecture to protect -users from -[passive eavesdropping](https://en.wikipedia.org/wiki/Upstream_collection), -[active MITM attacks](https://en.wikipedia.org/wiki/Man-in-the-middle_attack) -and -[remote exfiltration](https://www.youtube.com/watch?v=3euYBPlX9LM) -(=hacking) practised by organized crime and nation state actors. +Tinfoil Chat (TFC) is a +[FOSS](https://www.gnu.org/philosophy/free-sw.html)+[FHD](https://www.gnu.org/philosophy/free-hardware-designs.en.html) +[peer-to-peer](https://en.wikipedia.org/wiki/Peer-to-peer) +messaging system that relies on high assurance hardware architecture to protect users from +[passive collection](https://en.wikipedia.org/wiki/Upstream_collection), +[MITM attacks](https://en.wikipedia.org/wiki/Man-in-the-middle_attack) +and most importantly, +[remote key exfiltration](https://www.youtube.com/watch?v=3euYBPlX9LM). +TFC is designed for people with one of the most complex threat models: organized crime +groups and nation state hackers who bypass end-to-end encryption of traditional secure +messaging apps by hacking the endpoint. + + +#### State-of-the-art cryptography -##### State-of-the-art cryptography TFC uses [XChaCha20](https://cr.yp.to/chacha/chacha-20080128.pdf)-[Poly1305](https://cr.yp.to/mac/poly1305-20050329.pdf) [end-to-end encryption](https://en.wikipedia.org/wiki/End-to-end_encryption) with -[deniable authentication](https://en.wikipedia.org/wiki/Deniable_encryption#Deniable_authentication). +[deniable authentication](https://en.wikipedia.org/wiki/Deniable_encryption#Deniable_authentication) +to protect all messages and files sent to individual recipients and groups. The symmetric keys are either [pre-shared](https://en.wikipedia.org/wiki/Pre-shared_key), or exchanged using [X448](https://eprint.iacr.org/2015/625.pdf), the base-10 [fingerprints](https://en.wikipedia.org/wiki/Public_key_fingerprint) -of which are verified via out-of-band channel. TFC provides per-message +of which are verified via an out-of-band channel. TFC provides per-message [forward secrecy](https://en.wikipedia.org/wiki/Forward_secrecy) with [BLAKE2b](https://blake2.net/blake2.pdf) @@ -43,96 +47,140 @@ based [hash ratchet](https://www.youtube.com/watch?v=9sO2qdTci-s#t=1m34s). All persistent user data is encrypted locally using XChaCha20-Poly1305, the key of which is derived from password and salt using -[Argon2id](https://github.com/P-H-C/phc-winner-argon2/blob/master/argon2-specs.pdf). -Key generation of TFC relies on Linux kernel's +[Argon2id](https://github.com/P-H-C/phc-winner-argon2/blob/master/argon2-specs.pdf), +the parameters of which are automatically tuned according to best +practices. Key generation of TFC relies on Linux kernel's [getrandom()](https://manpages.debian.org/testing/manpages-dev/getrandom.2.en.html), -a syscall for its ChaCha20 based CSPRNG. +a syscall for its ChaCha20 based +[CSPRNG](https://en.wikipedia.org/wiki/Cryptographically_secure_pseudorandom_number_generator). -##### First messaging system with endpoint security -The software is used in hardware configuration that provides strong -[endpoint security](https://en.wikipedia.org/wiki/Endpoint_security): -Encryption and decryption are separated on two isolated computers. The split -[TCB](https://en.wikipedia.org/wiki/Trusted_computing_base) -interacts with a third, Networked Computer, through unidirectional -[serial](https://en.wikipedia.org/wiki/Universal_asynchronous_receiver/transmitter) -interfaces. The direction of data flow between the computers is enforced with free -hardware design -[data diodes](https://en.wikipedia.org/wiki/Unidirectional_network), -technology the certified implementations of which are typically found in -critical infrastructure protection and government networks where classification -level of data varies. -##### Anonymous by design -TFC routes all communication through next generation -[Tor](https://2019.www.torproject.org/about/overview.html.en) +#### Anonymous by design +TFC routes all communication exclusively through the +[Tor](https://2019.www.torproject.org/about/overview.html.en) +anonymity network. It uses the next generation ([v3](https://trac.torproject.org/projects/tor/wiki/doc/NextGenOnions)) -[Onion Services](https://2019.www.torproject.org/docs/onion-services) -to hide metadata about real-life identity and geolocation of users, when and how -much they communicate, the social graph of the users and the fact TFC is -running. TFC also features a traffic masking mode that hides the type, quantity, -and schedule of communication, even if the Networked Computer is compromised. +[Tor Onion Services](https://2019.www.torproject.org/docs/onion-services) +to enable P2P communication that never exits the Tor network. This makes it hard for the +users to accidentally deanonymize themselves. It also means that unlike (de)centralized +messengers, there's no third party server with access to user metadata such as who is +talking to whom, when, and how much. The network architecture means TFC runs exclusively +on the user's devices. There are no ads or tracking, and it collects no data whatsoever +about the user. All data is always encrypted with keys the user controls, and the +databases never leave the user's device. + +Using Onion Services also means no account registration is needed. During the first launch +TFC generates a random TFC account (an Onion Service address) for the user, e.g. +`4sci35xrhp2d45gbm3qpta7ogfedonuw2mucmc36jxemucd7fmgzj3ad`. By knowing this TFC account, +anyone can send the user a contact request and talk to them without ever learning their +real life identity, IP-address, or geolocation. Protected geolocation makes physical +attacks very difficult because the attacker doesn't know where the device is located on +the planet. At the same time it makes the communication censorship resistant: Blocking TFC +requires blocking Tor categorically, nation-wide. + +TFC also features a traffic masking mode that hides the type, quantity, and schedule of +communication, even if the network facing device of the user is hacked. To provide even +further metadata protection from hackers, the Internet-facing part of TFC can be run on +[Tails](https://tails.boum.org/), a privacy and anonymity focused operating system that +contains no personal files of the user (which makes it hard to deduce to whom the endpoint +belongs to), and that provides +[additional layers of protection](https://github.com/Whonix/onion-grater) +for their anonymity. + + +#### First messaging system with endpoint security + +TFC is designed to be used in hardware configuration that provides strong +[endpoint security](https://en.wikipedia.org/wiki/Endpoint_security). +This configuration uses three computers per endpoint: Encryption and decryption processes +are separated from each other onto two isolated computers, the Source Computer, and the +Destination Computer. These two systems are are dedicated for TFC. This split +[TCB](https://en.wikipedia.org/wiki/Trusted_computing_base) +interacts with the network via the user's daily computer, called the Networked Computer. + +Data moves from the Source Computer to the Networked Computer, and from the Networked +Computer to the Destination Computer, unidirectionally. The unidirectionality of data flow +is enforced with a free hardware design +[data diode](https://en.wikipedia.org/wiki/Unidirectional_network), +which is connected to the three computers using one USB-cable per computer. +The Source and Destination Computers are not connected to the Internet, or to any device +other than the data diode. + + +![](https://www.cs.helsinki.fi/u/oottela/wiki/readme/data_diode.jpg) +[TFC data diode](https://www.cs.helsinki.fi/u/oottela/wiki/readme/data_diode.jpg) + +Optical repeater inside the +[optocouplers](https://en.wikipedia.org/wiki/Opto-isolator) +of the data diode enforce direction of data transmission with the fundamental laws of +physics. This protection is so strong, the certified implementations of data diodes are +typically found in critical infrastructure protection and government networks where +classification level of data varies between systems. + +In TFC the hardware data diode ensures that neither of the TCB-halves can be accessed +bidirectionally. Since the protection relies on physical limitations of the hardware's +capabilities, no piece of malware, not even a +[zero-day exploit](https://en.wikipedia.org/wiki/Zero-day_(computing)) +can bypass the security provided by the data diode. ### How it works -![](https://www.cs.helsinki.fi/u/oottela/wiki/readme/how_it_works2.png) -[System overview](https://www.cs.helsinki.fi/u/oottela/wiki/readme/how_it_works2.png) +With the hardware in place, all that's left for the users to do is launch the device +specific TFC program on each computer. -TFC uses three computers per endpoint: Source Computer, Networked Computer, and -Destination Computer. +![](https://www.cs.helsinki.fi/u/oottela/wiki/readme/overview.png) +[System overview](https://www.cs.helsinki.fi/u/oottela/wiki/readme/overview.png) -Alice enters messages and commands to Transmitter Program running on her Source -Computer. Transmitter Program encrypts and signs plaintext data and relays the -ciphertexts from Source Computer to her Networked Computer through a serial -interface and a hardware data diode. +In the illustration above, Alice enters messages and commands to Transmitter Program +running on her Source Computer. The Transmitter Program encrypts and signs plaintext +data and relays the ciphertexts from Source Computer to her Networked Computer +through the data diode. -Relay Program on Alice's Networked Computer relays commands and copies of -outgoing messages to her Destination Computer via the serial interface and data -diode. Receiver Program on Alice's Destination Computer authenticates, decrypts -and processes the received message/command. +Relay Program on Alice's Networked Computer relays commands and copies of outgoing +messages to her Destination Computer via the data diode. Receiver Program on Alice's +Destination Computer authenticates, decrypts and processes the received message/command. -Alice's Relay Program shares messages and files to Bob over Tor Onion Service. +Alice's Relay Program shares messages and files to Bob over a Tor Onion Service. The web client of Bob's Relay Program fetches the ciphertext from Alice's Onion -Service and forwards it to his Destination Computer (again through a serial -interface and data diode). Bob's Receiver Program then authenticates, decrypts -and processes the received message/file. +Service and forwards it to his Destination Computer through his data diode. Bob's +Receiver Program then authenticates, decrypts and processes the received message/file. -When Bob responds, he will type his message to his Source Computer, and after a -mirrored process, Alice reads the message from her Destination Computer. +When Bob responds, he will type his message to the Transmitter Program on his Source +Computer, and after a mirrored process, Alice reads the message from the Receiver Program +on her Destination Computer. ### Why keys and plaintexts cannot be exfiltrated -TFC is designed to combine the -[classical and alternative data diode models](https://en.wikipedia.org/wiki/Unidirectional_network#Applications) -to provide hardware enforced endpoint security: +The architecture described above simultaneously utilizes both +[the classical and the alternative data diode models](https://en.wikipedia.org/wiki/Unidirectional_network#Applications) +to enable bidirectional communication between two users, while at the same time providing +hardware enforced endpoint security: -1. The Destination Computer uses the classical data diode model. It is designed -to receive data from the insecure Networked Computer while preventing the export -of any data back to the Networked Computer. Not even malware on Destination -Computer can exfiltrate keys or plaintexts as the data diode prevents all -outbound traffic. +1. The Destination Computer uses the classical data diode model. This means it can receive +data from the insecure Networked Computer, but is unable to send data back to the Networked +Computer. The Receiver Program is designed to function under these constraints. However, +even though the program authenticates and validates all incoming data, it is not ruled out +malware couldn't still infiltrate the Destination Computer. However, in the event that +would happen, the malware would be unable to exfiltrate sensitive keys or plaintexts back +to the Networked Computer, as the data diode prevents all outbound traffic. -2. The Source Computer uses the alternative data diode model that is designed to -allow the export of data to the Networked Computer. The data diode protects the -Source Computer from attacks by physically preventing all inbound traffic. To -allow key exchanges, the short elliptic-curve public keys are input manually by -the user. +2. The Source Computer uses the alternative data diode model. This means it can output +encrypted data to the insecure Networked Computer without having to worry about being +compromised: The data diode protects the Source Computer from all attacks by physically +preventing all inbound traffic. The Transmitter Program is also designed to work under +the data flow constraints introduced by the data diode; To allow key exchanges, the short +elliptic-curve public keys are input manually by the user. -3. The Networked Computer is assumed to be compromised. All sensitive data that -passes through it is encrypted and signed with no exceptions. +3. The Networked Computer is designed under the assumption it can be compromised by a +remote attacker: All sensitive data that passes through the Relay Program is encrypted and +signed with no exceptions. Since the attacker is unable to exfiltrate decryption keys from +the Source or Destination Computer, the ciphertexts are of no value to the attacker. -![](https://www.cs.helsinki.fi/u/oottela/wiki/readme/attacks2.png) -[Exfiltration security](https://www.cs.helsinki.fi/u/oottela/wiki/readme/attacks2.png) -#### Data diode -Optical repeater inside the -[optocouplers](https://en.wikipedia.org/wiki/Opto-isolator) -of the data diode (below) enforce direction of data transmission with the -fundamental laws of physics. - -![](https://www.cs.helsinki.fi/u/oottela/wiki/readme/readme_dd.jpg) -[TFC data diode](https://www.cs.helsinki.fi/u/oottela/wiki/readme/readme_dd.jpg) +![](https://www.cs.helsinki.fi/u/oottela/wiki/readme/attacks.png) +[Exfiltration security](https://www.cs.helsinki.fi/u/oottela/wiki/readme/attacks.png) ### Supported Operating Systems @@ -154,10 +202,19 @@ fundamental laws of physics. [FAQ](https://github.com/maqp/tfc/wiki/FAQ)
[Security design](https://github.com/maqp/tfc/wiki/Security-design)
-Hardware
-    [Data diode (breadboard)](https://github.com/maqp/tfc/wiki/TTL-Data-Diode-(breadboard))
-    [Data diode (perfboard)](https://github.com/maqp/tfc/wiki/TTL-Data-Diode-(perfboard))
+Hardware Data Diode
+    [Breadboard version](https://github.com/maqp/tfc/wiki/TTL-Data-Diode-(breadboard)) (Easy)
+    [Perfboard version](https://github.com/maqp/tfc/wiki/TTL-Data-Diode-(perfboard)) (Intermediate)
+    [PCB version](https://github.com/maqp/tfc/wiki/TTL-Data-Diode-(PCB)) (Advanced)
+ +How to use
+    [Installation](https://github.com/maqp/tfc/wiki/Installation)
+    [Launching TFC](https://github.com/maqp/tfc/wiki/Launching-TFC)
+    [Setup master password](https://github.com/maqp/tfc/wiki/Master-Password)
+    [Local key setup](https://github.com/maqp/tfc/wiki/Local-Key-Setup)
+    [Launch Onion Service](https://github.com/maqp/tfc/wiki/Onion-Service-Setup)
+    [X448 key exchange](https://github.com/maqp/tfc/wiki/X448)
+    [Pre-shared keys](https://github.com/maqp/tfc/wiki/PSK)
+    [Commands](https://github.com/maqp/tfc/wiki/Commands)
[Update log](https://github.com/maqp/tfc/wiki/Update-Log)
- -For the rest of the articles, see [TFC wiki](https://github.com/maqp/tfc/wiki). \ No newline at end of file diff --git a/dd.py b/dd.py index a91cdc7..eff9b13 100644 --- a/dd.py +++ b/dd.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -25,49 +25,27 @@ import sys import time from multiprocessing import Process, Queue -from typing import Any, Dict, Tuple +from typing import Any, Dict, Tuple -from src.common.misc import ( - get_terminal_height, - get_terminal_width, - ignored, - monitor_processes, -) -from src.common.output import clear_screen -from src.common.statics import ( - DATA_FLOW, - DD_ANIMATION_LENGTH, - DD_OFFSET_FROM_CENTER, - DST_DD_LISTEN_SOCKET, - DST_LISTEN_SOCKET, - EXIT_QUEUE, - IDLE, - LOCALHOST, - NC, - NCDCLR, - NCDCRL, - RP_LISTEN_SOCKET, - SCNCLR, - SCNCRL, - SRC_DD_LISTEN_SOCKET, -) +from src.common.misc import get_terminal_height, get_terminal_width, ignored, monitor_processes +from src.common.output import clear_screen +from src.common.statics import (DATA_FLOW, DD_ANIMATION_LENGTH, DD_OFFSET_FROM_CENTER, DST_DD_LISTEN_SOCKET, + DST_LISTEN_SOCKET, EXIT_QUEUE, IDLE, LOCALHOST, NC, NCDCLR, NCDCRL, RP_LISTEN_SOCKET, + SCNCLR, SCNCRL, SRC_DD_LISTEN_SOCKET) -def draw_frame( - argv: str, # Arguments for the simulator position/orientation - message: str, # Status message to print - high: bool = False, # Determines the signal's state (high/low) -) -> None: +def draw_frame(argv: str, # Arguments for the simulator position/orientation + message: str, # Status message to print + high: bool = False # Determines the signal's state (high/low) + ) -> None: """Draw a data diode animation frame.""" - l, indicator, arrow, r = { - NCDCLR: ("Rx", "<", "←", "Tx"), - SCNCLR: ("Tx", ">", "→", "Rx"), - NCDCRL: ("Tx", ">", "→", "Rx"), - SCNCRL: ("Rx", "<", "←", "Tx"), - }[argv] + l, indicator, arrow, r = {NCDCLR: ('Rx', '<', '←', 'Tx'), + SCNCLR: ('Tx', '>', '→', 'Rx'), + NCDCRL: ('Tx', '>', '→', 'Rx'), + SCNCRL: ('Rx', '<', '←', 'Tx')}[argv] - indicator = indicator if high else " " - arrow = arrow if message != IDLE else " " + indicator = indicator if high else ' ' + arrow = arrow if message != IDLE else ' ' terminal_width = get_terminal_width() @@ -75,13 +53,13 @@ def draw_frame( """Print string on the center of the screen.""" print(string.center(terminal_width)) - print("\n" * ((get_terminal_height() // 2) - DD_OFFSET_FROM_CENTER)) + print('\n' * ((get_terminal_height() // 2) - DD_OFFSET_FROM_CENTER)) c_print(message) c_print(arrow) - c_print("────╮ " + " " + " ╭────") + c_print( "────╮ " + ' ' + " ╭────" ) c_print(f" {l} │ " + indicator + f" │ {r} ") - c_print("────╯ " + " " + " ╰────") + c_print( "────╯ " + ' ' + " ╰────" ) def animate(argv: str) -> None: @@ -95,12 +73,11 @@ def animate(argv: str) -> None: draw_frame(argv, IDLE) -def rx_loop( - io_queue: "Queue[Any]", # Queue through which to push datagrams through - input_socket: int, # Socket number for Transmitter/Relay Program -) -> None: +def rx_loop(io_queue: 'Queue[Any]', # Queue through which to push datagrams through + input_socket: int # Socket number for Transmitter/Relay Program + ) -> None: """Read datagrams from a transmitting program.""" - listener = multiprocessing.connection.Listener((LOCALHOST, input_socket)) + listener = multiprocessing.connection.Listener((LOCALHOST, input_socket)) interface = listener.accept() while True: @@ -112,12 +89,11 @@ def rx_loop( sys.exit(0) -def tx_loop( - io_queue: "Queue[Any]", # Queue through which to push datagrams through - output_socket: int, # Socket number for the Relay/Receiver Program - argv: str, # Arguments for the simulator position/orientation - unit_test: bool = False, # Break out from the loop during unit testing -) -> None: +def tx_loop(io_queue: 'Queue[Any]', # Queue through which to push datagrams through + output_socket: int, # Socket number for the Relay/Receiver Program + argv: str, # Arguments for the simulator position/orientation + unit_test: bool = False # Break out from the loop during unit testing + ) -> None: """Send queued datagrams to a receiving program.""" draw_frame(argv, IDLE) @@ -130,7 +106,7 @@ def tx_loop( while True: with ignored(EOFError, KeyboardInterrupt): - while not io_queue.qsize(): + while io_queue.qsize() == 0: time.sleep(0.01) animate(argv) interface.send(io_queue.get()) @@ -142,31 +118,27 @@ def tx_loop( def process_arguments() -> Tuple[str, int, int]: """Load simulator settings from the command line argument.""" try: - argv = str(sys.argv[1]) - input_socket, output_socket = { - SCNCLR: (SRC_DD_LISTEN_SOCKET, RP_LISTEN_SOCKET), - SCNCRL: (SRC_DD_LISTEN_SOCKET, RP_LISTEN_SOCKET), - NCDCLR: (DST_DD_LISTEN_SOCKET, DST_LISTEN_SOCKET), - NCDCRL: (DST_DD_LISTEN_SOCKET, DST_LISTEN_SOCKET), - }[argv] + argv = str(sys.argv[1]) + input_socket, output_socket = {SCNCLR: (SRC_DD_LISTEN_SOCKET, RP_LISTEN_SOCKET), + SCNCRL: (SRC_DD_LISTEN_SOCKET, RP_LISTEN_SOCKET), + NCDCLR: (DST_DD_LISTEN_SOCKET, DST_LISTEN_SOCKET), + NCDCRL: (DST_DD_LISTEN_SOCKET, DST_LISTEN_SOCKET)}[argv] return argv, input_socket, output_socket except (IndexError, KeyError): clear_screen() - print( - f"\nUsage: python3.7 dd.py [OPTION]\n\n" - f"\nMandatory arguments" - f"\n Argument Simulate data diode between..." - f"\n {SCNCLR} Source Computer and Networked Computer (left to right)" - f"\n {SCNCRL} Source Computer and Networked Computer (right to left)" - f"\n {NCDCLR} Networked Computer and Destination Computer (left to right)" - f"\n {NCDCRL} Networked Computer and Destination Computer (right to left)" - ) + print(f"\nUsage: python3.7 dd.py [OPTION]\n\n" + f"\nMandatory arguments" + f"\n Argument Simulate data diode between..." + f"\n {SCNCLR} Source Computer and Networked Computer (left to right)" + f"\n {SCNCRL} Source Computer and Networked Computer (right to left)" + f"\n {NCDCLR} Networked Computer and Destination Computer (left to right)" + f"\n {NCDCRL} Networked Computer and Destination Computer (right to left)") sys.exit(1) -def main(queues: Dict[bytes, "Queue[Any]"]) -> None: +def main(queues: Dict[bytes, 'Queue[Any]']) -> None: """\ Read the argument from the command line and launch the data diode simulator. @@ -191,11 +163,9 @@ def main(queues: Dict[bytes, "Queue[Any]"]) -> None: argv, input_socket, output_socket = process_arguments() - io_queue = Queue() # type: Queue[Any] - process_list = [ - Process(target=rx_loop, args=(io_queue, input_socket)), - Process(target=tx_loop, args=(io_queue, output_socket, argv)), - ] + io_queue = Queue() # type: Queue[Any] + process_list = [Process(target=rx_loop, args=(io_queue, input_socket )), + Process(target=tx_loop, args=(io_queue, output_socket, argv))] for p in process_list: p.start() @@ -203,5 +173,5 @@ def main(queues: Dict[bytes, "Queue[Any]"]) -> None: monitor_processes(process_list, NC, queues, error_exit_code=0) -if __name__ == "__main__": # pragma: no cover +if __name__ == '__main__': # pragma: no cover main({EXIT_QUEUE: Queue()}) diff --git a/install.sh b/install.sh index 9ecf104..dc1e837 100644 --- a/install.sh +++ b/install.sh @@ -1,7 +1,7 @@ #!/usr/bin/env bash # TFC - Onion-routed, endpoint secure messaging system -# Copyright (C) 2013-2019 Markus Ottela +# Copyright (C) 2013-2020 Markus Ottela # # This file is part of TFC. # @@ -26,19 +26,19 @@ CRYPTOGRAPHY=cryptography-2.8-cp34-abi3-manylinux1_x86_64.whl FLASK=Flask-1.1.1-py2.py3-none-any.whl IDNA=idna-2.8-py2.py3-none-any.whl ITSDANGEROUS=itsdangerous-1.1.0-py2.py3-none-any.whl -JINJA2=Jinja2-2.10.3-py2.py3-none-any.whl +JINJA2=Jinja2-2.11.1-py2.py3-none-any.whl MARKUPSAFE=MarkupSafe-1.1.1-cp37-cp37m-manylinux1_x86_64.whl PYCPARSER=pycparser-2.19.tar.gz PYNACL=PyNaCl-1.3.0-cp34-abi3-manylinux1_x86_64.whl PYSERIAL=pyserial-3.4-py2.py3-none-any.whl PYSOCKS=PySocks-1.7.1-py3-none-any.whl REQUESTS=requests-2.22.0-py2.py3-none-any.whl -SETUPTOOLS=setuptools-42.0.2-py2.py3-none-any.whl -SIX=six-1.13.0-py2.py3-none-any.whl -# STEM=stem-1.7.1.tar.gz -URLLIB3=urllib3-1.25.7-py2.py3-none-any.whl -VIRTUALENV=virtualenv-16.7.8-py2.py3-none-any.whl -WERKZEUG=Werkzeug-0.16.0-py2.py3-none-any.whl +SETUPTOOLS=setuptools-45.1.0-py3-none-any.whl +SIX=six-1.14.0-py2.py3-none-any.whl +# STEM=stem-1.8.0.tar.gz +URLLIB3=urllib3-1.25.8-py2.py3-none-any.whl +VIRTUALENV=virtualenv-16.7.9-py2.py3-none-any.whl +WERKZEUG=Werkzeug-0.16.1-py2.py3-none-any.whl function compare_digest { @@ -56,86 +56,87 @@ function compare_digest { function verify_tcb_requirements_files { # To minimize the time TCB installer configuration stays online, only # the requirements files are authenticated between downloads. - compare_digest b2ac8925070d9f304aac6c7500a752b3907b236fe796b5fd82491d02ce9a8b6e2f739a5efd175a2205ecc9241d5e0465a748ad373e8e2a1346eb4f674cf16e65 '' requirements.txt - compare_digest 1c95643d28addf2e8a631b7ec54b2c03cdbe8135695aa5c74b7729bbd272d8590fa3ac03ced5034429c2a3012334713924a83550ff835bc1d0fff77cf43500f6 '' requirements-venv.txt + compare_digest 8cb58c52af4481bc0be13dcda4db00456f7522934f185c216dcfe3ded794e5a35ecf7fa3e6417d7cbb477c0b3c59606a1c858b0b17d46ba6453ed71522fd976e '' requirements.txt + compare_digest 4f7372efb604ca5d45f8f8d76d0b840f68c5e2401b09b9824d6a0fc34291ceffbd0ebf516735e2ac5244681628ed2bd6fca1405f0c6d463bf869061bd6f6cd29 '' requirements-venv.txt } function verify_files { # Verify the authenticity of the rest of the TFC files. - compare_digest 941cc47f9846ea9a6fd067a1bc7ecd9e8a945ec8d9a4997b7c24c28072b8b1ab5cb278e93fb3c9d8bb2acca5616c9c32f697af66f5f648a8f56761edddc2564c '' dd.py + compare_digest 1d9ee816a00eb66a96cf2a6484f37037e90eb8865d68b02de9c01d7ee6fa735cbbd2279099fe8adfb4dda5d9c0a2da649a5f530dba1f0c44471838995abcebb2 '' dd.py compare_digest d361e5e8201481c6346ee6a886592c51265112be550d5224f1a7a6e116255c2f1ab8788df579d9b8372ed7bfd19bac4b6e70e00b472642966ab5b319b99a2686 '' LICENSE - compare_digest 7e519d20fef24e25e88ec4a9c03abadf513b084e05038f17c62ca7899c2f9174a953caa0bfbd3b61e455e243513cdab737c22a34d73ebab07b65d3ce99100f0a '' LICENSE-3RD-PARTY - compare_digest e81bb00e894a14419365b43ecf45443a4fed9ab0332c468066840e8ba17e2e099ff0dc1346c98fbb9d979093afaec4323d53a35d7ffdaca1fe41a4e797a07f29 '' relay.py - compare_digest cef01f168a92975a2e1fb7d514e60fb95995f51d750596f08fdb62c27912e7d6502e1ab5e1cf5dd621c77f5f1423240f75c7269d45eecf5a56a40ba863360f5d '' requirements-dev.txt - compare_digest 6d3c903bc74f5d1f2d20072a73aaac9b3c5f55de6a844f627a1e9d2b3522ecd7516d8637a52ccddb74bb8a854703bc28ec0349049a0e3c9cc59838dfdd22b328 '' requirements-relay.txt - compare_digest fd6073d05c3dc24b44fe1a3b24fcbc6d3b4ffff44d7a96cb5f99c4e431bf7ebe6838fde80384f18fce75bc4f2be752a446bc2cb5bb0335de80366d60eccfdfcc '' requirements-relay-tails.txt - compare_digest c9ac159bb9a7969ab152ea192f3c7597f852493b088bd1801fc36aee8870e319459509abb253915f4d9bfb4f9482d2b0f004fbccce2d41305557ded33cf8c19e '' requirements-setuptools.txt + compare_digest 4a239b91b2626375255fbf4202a4bf49ea248832b65d69e00530d7cec88e489466508aeb1ff8c8ef0f138819500c4fde2010207ebe3e289c36bc201df55d3c47 '' LICENSE-3RD-PARTY + compare_digest 260f20df57dc6afdef634501430039e16b8964fd58eb7e9f4ca889e4511331de8e643fe2c525b8f23b33ad60e23dae740236586188c87d4b3289738abb4b901b '' relay.py + compare_digest 2bd7f8925af923c44b11ef1a1bdb530c0ee4098066b06cbf334680756332d83f1dcda2e5f6e377b839cc70202f8e32b6387201e42d2618c68453e7cbd66a7e64 '' requirements-dev.txt + compare_digest 89eb610ad4b41d36f4f02c892e40e35fbe6567ff1e5523511bc87c0bc0a0838bf463a58a87a6389f907bb9b5fffd289ad95a92854d52ded028f908e946db1824 '' requirements-relay.txt + compare_digest e8cd32a91370b6b4dd306391a3b78488f6a0f467dcd82387749d499cd6beb13b50ba01be9ceed2fe5620595640ecec3e43dbb192b8732e4943f7a5a43f601407 '' requirements-relay-tails.txt + compare_digest 89e82f1f1b1a4d9f3d1432c2988c00f70d2cc1b5e670f50666d189817115bac7b1e15331dc243d1f0364d7d283a9d25c9982ee7ba90563b29bdf41986e734b50 '' requirements-setuptools.txt compare_digest 79f8272a2ab122a48c60630c965cd9d000dcafabf5ee9d69b1c33c58ec321feb17e4654dbbbf783cc8868ccdfe2777d60c6c3fc9ef16f8264d9fcf43724e83c2 '' tfc.png - compare_digest a6776ed2f82b8afec830c7cfb57473ea15656445ca14f3cea5065f8775ea7829f36a3212462b0c72bf6ec002cff2e309e788a5ca43c742d03d98b6d5691bbaaf '' tfc.py + compare_digest 4e659a97f7f4b8ba816b111446e5795460db8def5db380bd05ede96042170796def1f4cdc4f6afc7062079fca584ac09010a6222d6835403777d6acba91add8c '' tfc.py compare_digest 7ae1c2a393d96761843bea90edd569244bfb4e0f9943e68a4549ee46d93180d26d4101c2471c1a37785ccdfaef45eedecf15057c0a9cc6c056460c5f9a69d37b '' tfc.yml - compare_digest 50bb3db478184b954480069e40e47167f28f13e55aa87961eed36804c612a8c25e9ab0c0505cf5d36d790051ccfb465a2d7641ab3efb659503b634541d07e9c2 '' uninstall.sh + compare_digest ba16a9b9a197d7a916609bcd1f1ad8a076ad55c0b3f04510e8f19dfea35be9cf4a036481b3a03deb5932d5e9a90c4ca9f6e28d0127549681d756b4eda3c5c6e0 '' uninstall.sh compare_digest d4f503df2186db02641f54a545739d90974b6d9d920f76ad7e93fe1a38a68a85c167da6c19f7574d11fbb69e57d563845d174d420c55691bc2cd75a1a72806dc launchers/ terminator-config-local-test - compare_digest 6e1c1082b7850e55fe19fb2ebe0f622dea16e038072adcfe1347305324d10b97bbc443d7ed1ff3ee141d647b4561874c7736ba449c1e8e34dccd4be9dab5db8b launchers/ TFC-Local-test.desktop - compare_digest 6a6469b5b11cb081e1f9e2848cb328d92f283f94f977f8e89984fa115fbeb719e6b094c9de0c1ff5a4f5f3fd66d3ca71bce1a3a5e4ca3ae454557ad261f8acf6 launchers/ TFC-RP.desktop - compare_digest 6a6469b5b11cb081e1f9e2848cb328d92f283f94f977f8e89984fa115fbeb719e6b094c9de0c1ff5a4f5f3fd66d3ca71bce1a3a5e4ca3ae454557ad261f8acf6 launchers/ TFC-RP-Tails.desktop - compare_digest 4b387996983b6b900a53aedaba0a542eb89416fed0e99ed845680e41748bbad65956c5d4662dfce4b5519412a10404e6c995464c26c74298e0db37f55b3dcd2c launchers/ TFC-RxP.desktop - compare_digest 54b1ff5b89f12548594f65f20b4bd615f6659cdf47188be720c05d3126b8efb13e86257e4f2a1728fca758613519805da66eea3dee01215d389d9d9af6944f4d launchers/ TFC-TxP.desktop + compare_digest 9a40d97bd9fe1324b5dd53079c49c535ae307cbb28a0bc1371067d03c72e67ddeed368c93352732c191c26dcdc9ac558868e1df9dfd43a9b01ba0a8681064ab3 launchers/ TFC-Local-test.desktop + compare_digest c5dfa3e4c94c380b0fcf613f57b5879a0d57d593b9b369da3afde37609a9fb11051d71832285d3372f3df6c5dbe96d00a39734fbddf138ab0c04d0f1f752826f launchers/ TFC-RP.desktop + compare_digest c5dfa3e4c94c380b0fcf613f57b5879a0d57d593b9b369da3afde37609a9fb11051d71832285d3372f3df6c5dbe96d00a39734fbddf138ab0c04d0f1f752826f launchers/ TFC-RP-Tails.desktop + compare_digest d109dc200709d9565a076d7adcc666e6ca4b39a2ed9eff0bb7f0beff2d13b368cc008a0bbb9a639e27f6881b3f8a18a264861be98a5b96b67686904ba70e70f2 launchers/ TFC-RxP.desktop + compare_digest aa1c23f195bcf158037c075157f12564d92090ed29d9b413688cf0382f016fad4f59cf9755f74408829c444cd1a2db56c481a1b938c6e3981e6a73745c65f406 launchers/ TFC-TxP.desktop - compare_digest 2e9a7e53ed381f7c75499fa84c4e5c1d29d891fb5ebde5f404ded73689e8794327604876955b98c40c2e711fb4869edebf1f06d8b17ed92a02e28938091bd958 src/ __init__.py - compare_digest 2e9a7e53ed381f7c75499fa84c4e5c1d29d891fb5ebde5f404ded73689e8794327604876955b98c40c2e711fb4869edebf1f06d8b17ed92a02e28938091bd958 src/common/ __init__.py - compare_digest b6ed487f95631e2fb72f88e27cc612090f79a232e1984c3da0bb6a6cc0205b7843eec2525135503a1f363ebcd41acf46255103e0ba6a91cbb926a6525dd8f1c9 src/common/ crypto.py - compare_digest 70e6a3638e3b5953153b4ab70aed16763bae68c0a5d9284057cdd8dcce2491a5caf2061e9d40083e7cfe4eec7c7625ff5127a532661bcc02c27026821397e49b src/common/ database.py - compare_digest 1a4ca913dcd30418d0340f8c34e51fce4949e2d16149c7f7b41a02c536066cb24d4168de5ba64086c1299f5b6ad10b35fa1c16037fecd2e4576094c106294806 src/common/ db_contacts.py - compare_digest 2478f5dfb1f0b0493a6692294aae064f2b26671d84006a926e0b7e71e1d70995f3406a22ab12e3fca909ae448218a9e5cd6b9802c2df310e32573efc767303b5 src/common/ db_groups.py - compare_digest f04237c84aa8df5ed5f08c8f5c275fa3f97db557f441feaaf71045538bd1f33a0fc910ff43bd7153a7aeac05a348b3c1020c534cc342c761f91280d09019b6c9 src/common/ db_keys.py - compare_digest e85583d1bbe9f04640f9347600a27bfa98a28d208de988dcf923d8158c165a6badb91a176fc3cf138f32aab6350cfabc365619652bd25c6d250359be008fc3e2 src/common/ db_logs.py - compare_digest cefbb2f59fc5e0cff3e86f59db3a00bf5c6ad07ff056fc82252aa838732c4d5ce759a84dc278b9dcdbbcfe24f26077072f939947281a60e67627d5248655599e src/common/ db_masterkey.py - compare_digest b46670b84d392cb748e76554a4ed72dd8c020ee843d4d9b6d1f4d54ea2c77ca783b4ff9fd0dcca7100bfdb4661ca1248f786b50b5230dc76e8f200d352989758 src/common/ db_onion.py - compare_digest 1d92f8e369b8a8d1b3b9edf2a66519e74209ca5ddd1f0d3e321e5075466e13035ba2f34eb5f93de0213a273db7e0aa88bdd110c6a6e63b1fd83c55305efb8917 src/common/ db_settings.py - compare_digest 5ba06fd066cbcb055a6c14683bd3b424b41a135213501d604929b2ddb667c34a9ab25fb74fb8cd5d3c8a709cf3747c0e7f06b3a8ef80f7e5ad02ed7e87dabff5 src/common/ encoding.py - compare_digest c85299b1f59a350f3284fef956f6627397da36f35ed85161cc018d9b3422943018a99c57841516cc6f5a818a558d05ab9d768ffa4eea0b9fc5baa2d470ce5296 src/common/ exceptions.py - compare_digest 170a5db2b1d9e1b3445fcaa3e3e76fda11a1e8df7459b98efab8d8c634f94233706aa7b71e766251d211af93c062eb9b7fb18d9b3d0cd8e223262bc01faf26ba src/common/ gateway.py - compare_digest 45471974fe553d516e81b1548d93e38f92caf2104963827a04988c1627afb08429ef3abc82e2400e8706a0071fa2b4d5255f8ebfca607ff25fffa6bc1c9658c5 src/common/ input.py - compare_digest dea694844fe207a1df84e5e954c0f009449a06513cdb7341f7cdc98761fb81b663258d6ecd7741b2f5c3db9d19730e83a35fbb638d448aaca333735811238c92 src/common/ misc.py - compare_digest c30a5df2a0eadfce97d2df1f142ce8ab0064a9de5231f855df0da95ef2e5378fbcb4373ca03efb6d43c598fe0d36bb3703124ce1ff77d035fc7a4cc54bb5b7e0 src/common/ output.py - compare_digest a13de0bd9308db2b566d9a2fde25debd617f09dfc403a126a4d0f0015206a1b2e2b1ff23e32f48bfad4dd8fee95756d6ee4dbd3f2ccb6aeaf13c0321b91bdba6 src/common/ path.py - compare_digest 2bbc79ad9621d7529c44665525840fa92ad97fb65959e8cc35b1b36344d33dc29a75ace3bcf48338195500a7fddc668f9b3c8775d74617551e46f6f92c8b90c3 src/common/ reed_solomon.py - compare_digest a412d6f1004f9515dc07519b27b6ed860380a7a328ada27eda99a282082e71c7ddf4a4e6ad8aabb9db3ec38dac2ab09ca56b2c69e2ee35e53867d9d4b5bb0b99 src/common/ statics.py - compare_digest 0ca623e729844bb569eab70c12c6f31c74e342bb131faec37bbcb8db9c3b2eb806357937f6ae764604d8a4482ba95fe1cf61cd1e6ceea4882189f38f8a93db4d src/common/ word_list.py + compare_digest 3ee90ee305382d80da801f047a6e58e5b763f9f6bc08dce531d5c620f2748c6bba59a1528eee5d721decb8e724f53b28fc7609f5b20472f679f554b78b5d4cc6 src/ __init__.py + compare_digest 3ee90ee305382d80da801f047a6e58e5b763f9f6bc08dce531d5c620f2748c6bba59a1528eee5d721decb8e724f53b28fc7609f5b20472f679f554b78b5d4cc6 src/common/ __init__.py + compare_digest 1e27af90e641bd65538ba7d5b13e7b1b4ccded01b12e7bdc19894338639eeaf8074aef9726f3816e37fc01671b5dfbcabf7beb70244c887567e40874f44886c9 src/common/ crypto.py + compare_digest dd30ee2bdcab64bd62c3d41ff238a4e35fcb950a5b33d43b9758306cda4ab3d9d1a86399a58b70ac78fb2b39649de830b57965e767e7d958839cd9169bc5317d src/common/ database.py + compare_digest 99bb26974918c1fe23c001d39296653b5dda678fbde1d3470bfb2d62ccc73d31f782adc74666f53389cf8560215e098350dcac7cd66c297712564460c50c4302 src/common/ db_contacts.py + compare_digest 032ccacc86f62bbd1eafb645730679a1023841f53c6391b700e6562ba33321f0ef81d36f3fa02752b189cb795677d209404fffa7de6ebf042bd3ff418e056b9a src/common/ db_groups.py + compare_digest 38fed0ace4cc1032b9d52d80c2a94252a0001b11ed7a7d7dc27fff66ed1e10309ee07b345556775958389d83cb976cd151df2d717b5c7dbe6d312778ecb06408 src/common/ db_keys.py + compare_digest 4d9436a5381b81425c13b43829a1c2dac62a2210ebd5a80b3bb8445aa3b6509b33af58e14cb9803c330d81aa429341382c13170d6770cd1726698a274980978e src/common/ db_logs.py + compare_digest ccbff2a8e0bfe11b063971879e3849b376324a132534e6a520c58811945e93320f5837366a4548a2183e158242c52801d5276f7b6b86ca860977ae8f95c2c607 src/common/ db_masterkey.py + compare_digest 325298cd6cb7e68d27681c18f29e635f46222e34015ba3c8fe55e6718e6907b4257bbe12d71fd344b557aff302ae9d7fca2b581b4208e59ac7923e57aca23fe5 src/common/ db_onion.py + compare_digest 63451ece46802c1e4d0ddb591fda951c00b40ed2e0f37ffc9e5310adb687f0db4980d8593ce1ed5c7b5ca9274be33b2666ae9165aa002d99ecf69b0ec620cc1b src/common/ db_settings.py + compare_digest 60fb4c922af286307865b29f0cadab53a5a575a9f820cd5ad99ea116c841b54dd1d1be1352bf7c3ab51d2fd223077217bcda1b442d44d2b9f1bf614e15c4a14d src/common/ encoding.py + compare_digest ccd522408ad2e8e21f01038f5f49b9d82d5288717f1a1acf6cda278c421c05472827ee5928fbf56121c2dfc4f2cc49986e32c493e892bd6ae584be38ba381edd src/common/ exceptions.py + compare_digest 999bb5264e4e586fcdc163a65e6bf0cea7b9d856ab876e1f23c1926324dc90df2f8afe86057c2f8e578f6f77c45f8e776de3c9ff99475a839f188efe8f861fe9 src/common/ gateway.py + compare_digest b01aa02c1985c7c1f7d95678d2422561a06342378306936c35c077938c133b8f60a63952bdc9005210d31e68addd88b86a45f82626c40beff07e785fdc2aa115 src/common/ input.py + compare_digest d617f7bddf11525d672aa53b9076b19c27754f60768dd240c29d1f937ffb62d15e063513b59268a5d478ef3a645135fb0e1e5970522f225fef240874f8cfaae1 src/common/ misc.py + compare_digest 8b479b3a7c1c4fdaf4c4f8d4a4436231933ebb1da47a5a07a596037b20db7d5aa7e8a1d107d4ec973603551f28833ff404c177b9977d654f3b38a915d16a33bb src/common/ output.py + compare_digest 08443cfe633bb552d6bb55e48d81423db4a4099f9febc73ec6ee85ee535bc543720f199ac8b600b718e9af7247fb96ef4b9991b0416cf7186fd75a149365dd36 src/common/ path.py + compare_digest 39e48b0b55f4f1a48bc558f47b5f7c872583f3f3925fd829de28710024b000fcb03799cb36da3a31806143bc3cbb98e5d357a8d62674c23e1e8bf957aece79f6 src/common/ reed_solomon.py + compare_digest 6782f85e365848376675c988d9e9a25689b8df9755e47c790d4cba3a9e0ee25b5c974f7814022097d13987e6200c5b9398bf6b787b7f77910a678e9b7c118aae src/common/ statics.py + compare_digest a57d5525a570a78d15c75e79702289cf8571c1b3c142fae57f32bf3ed8bb784c7f63ce2e805d295b4a505fdeaf9d59094ebe67d8979c92dc11e2534474505b0e src/common/ word_list.py - compare_digest 2e9a7e53ed381f7c75499fa84c4e5c1d29d891fb5ebde5f404ded73689e8794327604876955b98c40c2e711fb4869edebf1f06d8b17ed92a02e28938091bd958 src/receiver/ __init__.py - compare_digest 2da8d697103a3a4fd95e1a885a40be89779aef7f8f1ca3d1567b5edcf50692b7a899c42140eb9777622bc80a0f0a20c4b2d751ef394d108893f4d04c2afe637e src/receiver/ commands.py - compare_digest 1cc28058c8efbd8a9597455375a4f45ec7f3368bf269c93c07c9c8f26bfb4fe7120b96ed24231ee634e5a5e7c72a157a0976bf1aced2ab4de030903b27bb25e0 src/receiver/ commands_g.py - compare_digest 4e253f29869de701cd0a7f642b4e5e0637c0ec0bcda6c94ee2ac6dac7b78d18626c5d099d475338bc8bfe03502782b873bb8e0e4fa5b6b38a2d1b1a6f7e32e60 src/receiver/ files.py - compare_digest 452bcb094829bec416b09679d3d566e668d23a16a3bd67bc76fc1d020f4d7de6ac66911cfcfbe40386e35f70392215c9979b1bb264a75506c83e7c27f9980a08 src/receiver/ key_exchanges.py - compare_digest d6f54bdc5c000ac2addf8a40d359fad289e8926d04807bfc784cfe1033a91bc6cc05a2c65cfdea4cfb383cbb53d9614275d4d0ae567c726bee269b5ffff734ff src/receiver/ messages.py - compare_digest e123ac2b4f568875e0d7b801a41fdd37d2d8062d8bcd98ec2913d696070e948d6c161577d82105a21f60dd5619a9a704a7dec6828d676f617efda6d08c3423b1 src/receiver/ output_loop.py - compare_digest 4bcbe8364c33f3b9d69d5a52768b4779f493ed174308bd4bfff9f9748dcd7530d1c9d91b53fa5fddb211ff687afc90e88c513515f8ce991e7a43eb8326a23f2f src/receiver/ packet.py - compare_digest 62d8f02f133edc70fa7a46d53f4e44ef22f9d16541424103001db20f2db6cfb5f8d96ed34c0eb9a61d8c6ae56b5f51f95ef0edfda5b0b5a2c23d85d988f7c10e src/receiver/ receiver_loop.py - compare_digest 40b8c61f0439e64ba6fdc994a944dce22d556b20e9aa76722921bb92a79d8a561f23ce3924ca33fda1f8f5a83b6bd0d089575779b301ce0fee1f51fcb83065e6 src/receiver/ windows.py + compare_digest 3ee90ee305382d80da801f047a6e58e5b763f9f6bc08dce531d5c620f2748c6bba59a1528eee5d721decb8e724f53b28fc7609f5b20472f679f554b78b5d4cc6 src/receiver/ __init__.py + compare_digest a4aeb64c2adb0d435e52f8de33aaff1ec135ca6b551c813fa53281087e8e62a0db9ad0ac595bca17391ee80cba9dfef9790b39d0877b0497bab3871f167ee9cd src/receiver/ commands.py + compare_digest 6dd0d73fe240f974bb92e850c5f38e97ee8d632fbb3a53abc3160b4e29f9b2b076b8b5d20dc7f7231c01c978ea67b55740ac17dc74bf05e6811d2e35e34056fb src/receiver/ commands_g.py + compare_digest 320bc36df51efb138c173d9fcef7a8ee7de06bcee672e75512d2a9e07eef5006068650e76d4dc741b4cf180cb8ee46560f941753539e377d0d4e534d0f6c629b src/receiver/ files.py + compare_digest 437a27e1ee948994866b1e2bdfa8a835cad67e89c4ecc0d04589fc27dfabb3a2d6d582bee502d50dc76c7fbea5cd1e71a08bc08a7fde75c7a595b463c7f3ce43 src/receiver/ key_exchanges.py + compare_digest 6ebd6c0638525997949783b7623ce9a78683169e95f572ea65dcec52da150b0473a25e928862cab34eac44b0e0991a0969c5252c03cf4dc8f49d1aa9809b43bd src/receiver/ messages.py + compare_digest eabe1695cd0fe04346e49ed91b64a11ad74ff60b636333140f9a3c6745b9c408d77aae8f45256b5d74b241324a5d429249b2be6c732205ab729a38049b8631f7 src/receiver/ output_loop.py + compare_digest 25b49765e149f5e072df2aa70c125478d1c9621886527201bf0d7718db557f2991823d980274f53abf93269f5aa1096b3e56fae94ecaa974ef31b0cb7907fde7 src/receiver/ packet.py + compare_digest 002c960023393bec10da3de6d9a218c8e2c27da1635fd1a7f99e02a9a28792428a2c0e6cd030d1cc1fac1124c58f397f63d60b7af4c384367a8c293978125539 src/receiver/ receiver_loop.py + compare_digest da8ff22a1ece42c780328c84722ae42c2dced69dd87f2fb2d09fd517d3ee98f3777c448922b2b06a5839347e075a5598e6c770a544fdf801e664ba5ad06b684d src/receiver/ windows.py - compare_digest 2e9a7e53ed381f7c75499fa84c4e5c1d29d891fb5ebde5f404ded73689e8794327604876955b98c40c2e711fb4869edebf1f06d8b17ed92a02e28938091bd958 src/relay/ __init__.py - compare_digest 946baf7d5e67dc30adfcaa92dceb4f8ddc7421f0171c4a328ceef886d9bf8f78bf044a19ff25490fac6ba51293b7beceee2feb21457d5fb80a4b93966db6ec68 src/relay/ client.py - compare_digest 69df9dfee65de516f835174189d388b377aa0a08fc71ac660e50da7bb912319bb526b735f7cb83e560bbef9acfe40dbf04f433d185ced4cc295bb8bf63b2afcb src/relay/ commands.py - compare_digest ef65dce3e6cc0b0f362972ceaab4151a798c18ca872af9eb23927b854c28d883344fa00813546eb28b4cabf074f3da97d7cf978f9e5261efd84497510f154057 src/relay/ onion.py - compare_digest fc355ee1118a20202a9e029a80f0af83a876843c4f8a7458e5af99a96427dd039c61601c9fc3f90d13512a7837241609825988a482118dff3916fc955e8bfce2 src/relay/ server.py - compare_digest a61d9d56efabc7a302e0bbf3a7c7b52d8552ea0d736582ecfe3a7c768fbcc67beaf07c2087e310ab45cdb440004063986cb7bb76b81fb140a236c79399dc7fd0 src/relay/ tcb.py + compare_digest 3ee90ee305382d80da801f047a6e58e5b763f9f6bc08dce531d5c620f2748c6bba59a1528eee5d721decb8e724f53b28fc7609f5b20472f679f554b78b5d4cc6 src/relay/ __init__.py + compare_digest 340063f239e43e7504f1a3389e1cbdd3603b756c113d32a4add0a4815afef234d19a6b2f8dc675de90f62b0f8b9d414c829762d6b87d58d08eac21fb413b011d src/relay/ client.py + compare_digest 49c540cab10d932cd6b7afa417f3c2551b452d657c7be086fc4fd7fb490f56d1016e882a3de9b3ba78fa7160ce79967958c05d657874443c5488735ade7691f7 src/relay/ commands.py + compare_digest 959129c8eb8c1ae40d8c97997dde609a02692f808c476ffe1edcbdb03330b0d38d450c8898abd41e5498ca8f962e135b328a319f7475dfa1f69a25baae463e5d src/relay/ diffs.py + compare_digest 0bc1912af4d45ff72fbd7c43a09ab48ea3a780bb096226011525924820ba0ba1f396937fb191e5e18ec87dee14ccd3b505192e16e835d46088e4b50c941125f5 src/relay/ onion.py + compare_digest 0273508b0c2f95ba5e710a9c8201fa6915881305a02de9c3e165b6f78912d559ed8fa4edb0ca07cbf1843a0cee4b377c073dbfc3eb6600bbdb163228ac1f1742 src/relay/ server.py + compare_digest d9a5d7c806f45419e7d79d543fba186621c09c79839a8976f833c92ef3ba6ea2ca9fbb6db2ac6455080a294dd2dcf7f9dbaa2cfac56414587753a3754bd3732b src/relay/ tcb.py - compare_digest 2e9a7e53ed381f7c75499fa84c4e5c1d29d891fb5ebde5f404ded73689e8794327604876955b98c40c2e711fb4869edebf1f06d8b17ed92a02e28938091bd958 src/transmitter/ __init__.py - compare_digest 9629bf56ac1b2ca2e1f0164ece2333a2ea73ecc5bedc3231cf34b4bc199f9c03e9bb567b7fb6e2e73ccc80d4689c419a9e1241b9d5c6351c467f7754e81d7fbc src/transmitter/ commands.py - compare_digest 7edf5c9b72486af7e4ec870bd5b3b6fed5f1c143463b83ea8732842662d8509604319c2bc68edca40714371c992fcfb2810dbb1e105a0061c32cf66a31e2d7ed src/transmitter/ commands_g.py - compare_digest d98bd8b8097e024a255de0783265d7d521368d31a234e1118408fb9353a90ac7fac3286b2aacbe2c417d7855f8bfc126675926ee505607d7ed2a3539225b5ad2 src/transmitter/ contact.py - compare_digest 2c1eceb95d0e3dced8d5b598c028bfcbe749a1e331e82a8a81976b1e13e33eaab1d378e652c1f420dcb099ab78cd62659fd3e589a11631497b247b3f8f59c3e1 src/transmitter/ files.py - compare_digest 47e91c019b4a606309f48f1c60b19a6883e460769769e8e4de1c1a9f7113642b61e8f4de3292c36a6eb8f51bfde0cb7e6687d07234f6479cb28bf9a194916bcc src/transmitter/ input_loop.py - compare_digest 750e8f0f1b0a243d3c0a9c42d32160aea213d094d3aaffa5422da3a9fa2de5ef0bd2f9e13e186776527b46c7fb800067389bb0b3db626c1fa0d64100216bf0ba src/transmitter/ key_exchanges.py - compare_digest f4e0d9c913382b6745a2823294802a603db5fba41d68a42061d0e8c244da2199638d52e3b8b3150a165f3348e293cff48c4663d2bd1b3c36b3a4b4505deb7cd1 src/transmitter/ packet.py - compare_digest 22886a86c203a97410fdd1f3b7831eb8f091de45aa323ebbdd2901533c61e7418e468d1e2d37c74074f66c24083917fe3fda94dc809236fb51342414cf0e4436 src/transmitter/ sender_loop.py - compare_digest 9ea30785b8459e342ac71666012251c76a16a55f059cfdc7b8ad6c74fc7aae69965adf4382654bf686d9acb79b525a3513ddcf6a49bb6459caea124c5fb69eea src/transmitter/ traffic_masking.py - compare_digest 1f082487590125de9ddeefe696be062ce5bc3fc1f82c3117dab3de2349dca5f859c7aaa3f626d8fe306d5b64c34bfdb1b0b50a0d7f4ed156f1043d35cdb2b618 src/transmitter/ user_input.py - compare_digest 827869782567511343923f7164d5469733691664257c94bd488be451467edcfa4a2513f1e3ce48094f0aa4067b61c1b52d8b90ed3c479907e66e9d870ad6d18d src/transmitter/ window_mock.py - compare_digest ff1ff1c5fe95726607f15a2e3e2cecae899b424497eae17a2e52d9279b012752a2330fb70a68491b4b3cf60f205f9ade02aaa7c5e28af86b94c703c16be8abad src/transmitter/ windows.py + compare_digest 3ee90ee305382d80da801f047a6e58e5b763f9f6bc08dce531d5c620f2748c6bba59a1528eee5d721decb8e724f53b28fc7609f5b20472f679f554b78b5d4cc6 src/transmitter/ __init__.py + compare_digest 09cd7d5921ac74ebbd40fbe3abc22faee48b30c33f73405fe20ce7d3096df0599d0346027cf0c5b458acf55f8919373c1c215062236e522ddfe154802b219622 src/transmitter/ commands.py + compare_digest 2af2cd801fc83f882c65e031b5bd6f5c2c30b32dc0bb538953021b1f520714723d39b2a2462a6718cbb3efea1b645767b50d468978bb802dacf8b73535a2975f src/transmitter/ commands_g.py + compare_digest 31267d2049e4e9a88301e0d851e11c8e3db0bbb96c4509c10a3528c29ab679a49db8430cca1529ccd71556e273f4937d3bf7e0c2e1a165a8d36729ed284a4f19 src/transmitter/ contact.py + compare_digest f2fefbc2acbad441cb997969d6b39fbe26813abc781f5b6caaa08f1eb4c52c05b2bd4cbc341cb75ea07f7a4931d9b1145bef2fb352376a72442f7a71299fb595 src/transmitter/ files.py + compare_digest 110665f962eb827a9f636cc823837222a7bed4a429d4e10eb90c7bf5ba7bd5900aa1ecc4d4b485927a276d5727e18fe9e78f75ab8bd4ff67f039bb633fe505ec src/transmitter/ input_loop.py + compare_digest 20b06b3b28bdecc9b572acb7d47e51ab98863230966cfa2d8e93ead13126f6019e88b2bd648de7fc7795805a836c1f9f7e243f2c13ebc8bf5bca1078ff6c14d8 src/transmitter/ key_exchanges.py + compare_digest 766b1efa548f2da49272870fa5f89b8aacdf65b737b908f7064209f2f256c4d4875228ad087ac4957a292a82ed5936a40b9ae7553bfae2eae739f0c4579eb21a src/transmitter/ packet.py + compare_digest b8cfc11ae235c8cddbbd4003f8f95504456d9b2d6b6cc09bd538c09132bc737b6f070bdbc8d697e9ddfc5854546575526fa26c813f9f6bff7dc32fcdbb337753 src/transmitter/ sender_loop.py + compare_digest cdcb21128f71134ae49f3e99bf2a6dce5ec88766ecf6d91be89200ef282f7bd326c9805ba8f2d73d3fa12a8e05da20630874b5bbf9e18085d47ad5063098eaf8 src/transmitter/ traffic_masking.py + compare_digest eb77c6206cab63ffdb47bbcb8b76a55100636d893e234a048221d83e9ce07b76ccfcc93b506d9fb48d6f8823135e5697f3e56aed8e95f23990d8dfc1cece325e src/transmitter/ user_input.py + compare_digest 489f869176da0040b6f06327544f5eb72863a748a4799c66198a09402df6d54d842e9af27af51faaeed9d0661133eeaebb9918bd1bcd50950c182ba4b1e5fc74 src/transmitter/ window_mock.py + compare_digest 09c536d43b37103b6340293efa67345f54da6563ea65441546161066d735b4dfad9eaea9c58452de3413b72b28a923d2efb851ac740ba09ada45368bb64b9f15 src/transmitter/ windows.py } @@ -224,28 +225,28 @@ function verify_tails_dependencies { # Tails doesn't allow downloading over PIP to /opt/tfc, so we # first download to $HOME, move the files to /opt/tfc, and then # perform additional hash verification - compare_digest 4483bdd81d63cc38e0003cd3cba995f3e21d506e2f6a64bc98a673f1ef5ccd56e8e1109ec049c9394a538b879ea47dbafa0c575cdc02eedb1b9172e8fc045ca6 '' ${VIRTUALENV} + compare_digest f4e7148f1de50fa2e69061e72db211085fc2f44007de4d18ee02a20d34bca30a00d2fe56ff6f3132e696c3f6efd4151863f26dac4c1d43e87b597c47a51c52ad '' ${VIRTUALENV} compare_digest 8333ac2843fd136d5d0d63b527b37866f7d18afc3bb33c4938b63af077492aeb118eb32a89ac78547f14d59a2adb1e5d00728728275de62317da48dadf6cdff9 '' ${PYSERIAL} # compare_digest a275f59bba650cb5bb151cf53fb1dd820334f9abbeae1a25e64502adc854c7f54c51bc3d6c1656b595d142fc0695ffad53aab3c57bc285421c1f4f10c9c3db4c '' ${STEM} compare_digest 313b954102231d038d52ab58f41e3642579be29f827135b8dd92c06acb362effcb0a7fd5f35de9273372b92d9fe29f38381ae44f8b41aa90d2564d6dd07ecd12 '' ${PYSOCKS} # Requests - compare_digest f6a78508cb87050e176005a088118f8ad87b17cf541457d949e5712c356f8c4de7e7516ba066e5c4bb9ced5c7e7590ba7e07d4ae7fc7190487bf27f1bb9d0668 '' ${URLLIB3} + compare_digest f7fd3b54b7c555c0e74eb445e543763d233b5c6f8021ccf46a45d452c334953276d43ecd8f3d0eafefa35103a7d1874e291216fc9a41362eb6f1250a2a670f16 '' ${URLLIB3} compare_digest fb07dbec1de86efbad82a4f73d98123c59b083c1f1277445204bef75de99ca200377ad2f1db8924ae79b31b3dd984891c87d0a6344ec4d07a0ddbbbc655821a3 '' ${IDNA} compare_digest bfae58c8ea19c87cc9c9bf3d0b6146bfdb3630346bd954fe8e9f7da1f09da1fc0d6943ff04802798a665ea3b610ee2d65658ce84fe5a89f9e93625ea396a17f4 '' ${CHARDET} compare_digest fe5b05c29c1e1d9079150aaea28b09d84f0dd15907e276ccabb314433cfaac948a9615e10d6d01cbd537f99eed8072fbda7cb901e932fbab4f1286ae8c50471b '' ${CERTIFI} compare_digest 9186ce4e39bb64f5931a205ffc9afac61657bc42078bc4754ed12a2b66a12b7a620583440849fc2e161d1061ac0750ddef4670f54916931ace1e9abd2a9fb09c '' ${REQUESTS} # Flask - compare_digest 3905022d0c398856b30d2ed6bae046c1532e87f56a0a40060030c18124c6c9c98976d9429e2ab03676c4ce75be4ea915ffc2719e04e4b4912a96e498dcd9eb89 '' ${WERKZEUG} + compare_digest 4c982970fef39bf7cfbb4e516864fec0f8ec3f743ccb632d1659c6ee415597d98f4abd63b5c0fd999eb43fc0c89a97123f07625b01ea86b02ef51cb67a2b148d '' ${WERKZEUG} compare_digest 69e9b9c9ac4fdf3cfa1a3de23d14964b843989128f8cc6ea58617fc5d6ef937bcc3eae9cb32b5164b5f54b06f96bdff9bc249529f20671cc26adc9e6ce8f6bec '' ${MARKUPSAFE} - compare_digest 658d069944c81f9d8b2e90577a9d2c844b4c6a26764efefd7a86f26c05276baf6c7255f381e20e5178782be1786b7400cab12dec15653e7262b36194228bf649 '' ${JINJA2} + compare_digest 461bbd517560f1c4dbf7309bdf0cf33b468938fddfa2c3385fab07343269732d8ce68d8827148645113267d48e7d67b03f1663cc64839dd1fcec723ea606aaf4 '' ${JINJA2} compare_digest 891c294867f705eb9c66274bd04ac5d93140d6e9beea6cbf9a44e7f9c13c0e2efa3554bdf56620712759a5cd579e112a782d25f3f91ba9419d60b2b4d2bc5b7c '' ${ITSDANGEROUS} compare_digest 6b30987349df7c45c5f41cff9076ed45b178b444fca1ab1965f4ae33d1631522ce0a2868392c736666e83672b8b20e9503ae9ce5016dce3fa8f77bc8a3674130 '' ${CLICK} compare_digest bd49cb364307569480196289fa61fbb5493e46199620333f67617367278e1f56b20fc0d40fd540bef15642a8065e488c24e97f50535e8ec143875095157d8069 '' ${FLASK} # Cryptography - compare_digest 387d94f37a74e2d86ac0a41f482638dd9aec9e94215ffc50f314eb2f8e0cfc2f15afc3e508ea37a4fbcca7e4bcfc65efa1e5cab5f8094ccedc18bee2b0f2e3a8 '' ${SIX} + compare_digest a6e7e35921ce8f2f8e79a296ea79a9c3515ff6dd7e777d7892fe4988594f1b3a442a68ffb89cf64530b90a32ceeea00e4ab9069bb697629ab4eb7262c68d1b0f '' ${SIX} compare_digest 7f830e1c9066ee2d297a55e2bf6db4bf6447b6d9da0145d11a88c3bb98505755fb7986eafa6e06ae0b7680838f5e5d6a6d188245ca5ad45c2a727587bac93ab5 '' ${PYCPARSER} compare_digest b8753a0435cc7a2176f8748badc074ec6ffab6698d6be42b1770c85871f85aa7cf60152a8be053c3031b234a286c5cef07267cb812accb704783d74a2675ed3b '' ${CFFI} compare_digest 184003c89fee74892de25c3e5ec366faea7a5f1fcca3c82b0d5e5f9f797286671a820ca54da5266d6f879ab342c97e25bce9db366c5fb1178690cd5978d4d622 '' ${CRYPTOGRAPHY} # manylinux1 @@ -260,7 +261,7 @@ function install_tails_setuptools { # Download setuptools package for Tails and then authenticate and install it. torsocks python3.7 -m pip download --no-cache-dir -r "/opt/tfc/requirements-setuptools.txt" --require-hashes --no-deps -d "${HOME}/" t_sudo mv "$HOME/${SETUPTOOLS}" "/opt/tfc/" - compare_digest dd18da86ba566a7abde86890f6fa7c5a4dee34970927ef883a07a44ca8992713f9c2c4c87538d18d7bbf19073f1cc7887b150474375f24a0938cef5db097c841 '' ${SETUPTOOLS} + compare_digest 761e4c8df239b8d173513b08959b387c1059e3e023ba6b3f6250fade518d6ef29f287ab90dd35d02bb681b410a050b30b2ed44849638b6f98831f4290a4ccd15 '' ${SETUPTOOLS} t_sudo python3.7 -m pip install "/opt/tfc/${SETUPTOOLS}" t_sudo -E rm "/opt/tfc/${SETUPTOOLS}" } @@ -303,7 +304,7 @@ function steps_before_network_kill { sudo torsocks apt update sudo torsocks apt install git gnome-terminal libssl-dev python3-pip python3-tk net-tools -y - sudo torsocks git clone --depth 1 https://github.com/tfctesting/tfc.git /opt/tfc + sudo torsocks git clone --depth 1 https://github.com/maqp/tfc.git /opt/tfc verify_tcb_requirements_files sudo torsocks python3.7 -m pip download --no-cache-dir -r "/opt/tfc/requirements-venv.txt" --require-hashes --no-deps -d /opt/tfc/ @@ -398,7 +399,7 @@ function install_developer { sudo torsocks apt update sudo torsocks apt install git libssl-dev python3-pip python3-tk terminator -y - torsocks git clone https://github.com/tfctesting/tfc.git "${HOME}/tfc" + torsocks git clone https://github.com/maqp/tfc.git "${HOME}/tfc" torsocks python3.7 -m pip install -r "${HOME}/tfc/requirements-venv.txt" --require-hashes --no-deps @@ -466,9 +467,9 @@ function install_relay_tails { # Apt dependencies t_sudo apt update - t_sudo apt install git libssl-dev python3-pip -y || true # Ignore error in case packets can not be persistently installed + t_sudo apt install git libssl-dev python3-pip python3-tk -y || true # Ignore error in case packets can not be persistently installed - torsocks git clone --depth 1 https://github.com/tfctesting/tfc.git "${HOME}/tfc" + torsocks git clone --depth 1 https://github.com/maqp/tfc.git "${HOME}/tfc" t_sudo mv "${HOME}/tfc/ /opt/tfc/" t_sudo chown -R root /opt/tfc/ diff --git a/install.sh.asc b/install.sh.asc index 3e314dd..6542c4e 100644 --- a/install.sh.asc +++ b/install.sh.asc @@ -1,16 +1,16 @@ -----BEGIN PGP SIGNATURE----- -iQIzBAABCAAdFiEE6o84umdLJC6ZRIRcmBNw6XJaD7oFAl3lt0MACgkQmBNw6XJa -D7qXAw/9Ht6hSg89F4Yuw3pzq/6j5Ab5kVcE7OO1Nk0FPzJ5wPHuTqjetZhg1XGA -C/DzKsEE5ArSgvRGCWPd/dRgcyQ5IHDEcCB8VtKLaxTaZzP9vmM4KlRRK+ISs8te -ufH7RdAUdHJrhR5eO2LHy+xb4y736/sl9FHYq3MtnSXSBphhNQrb+lSjgDn+AEZt -t0VDN0V0MIwQ4iT65p+6rURUab18NvRzEmdfh71a89NL8sMEP4Ww5dGxhEQVAkxP -bGMuPKc4fVlhj3HA61aHtFSWxboIchzVM/tVeLSh1Rw2rHPvRmmbR/zHbN+2ezdt -RFWqdoDQ32lpzT6eTi6XpE8B6HInAp1pgOhzBsv98TirURvx1nF9O5yQKPUyDxYr -zajp/RpHsuf8j65pJNWQntvU7pVkQG7oZUPkVvrN12ekQi5kKLy0+9L/TuC53uTd -5g7cr+HGg3dU+zh/iHN7AEL0ozp3z7AKfgEtZHiq2NV6fZiq9gxiyHYSYtQ+vuz+ -ATb9pYDOy+wqI8IxL+7X9Wckk192hB63U5ML4sjlVXK8+gct4p8lE8ePO2GI8kwm -d/5jeGUiwEdMURAoWASVkwASBezkWVchSBoPt2BqjuhKvirygKddPU+RCRUoMzqn -5EEgscR903+9FpwOhmgm0iqn5UEvBubuJA0LeoSlx/+IAjgQKP4= -=dQkc +iQIzBAABCAAdFiEEE3wqdU+qbbuozmTV+rAVyyKvL4QFAl42DPoACgkQ+rAVyyKv +L4S3Gg/+OujW2IlEDBpxd97jPRRH1L3UZ3tHOV2VuV5hIukkblOLx1UZJbWWL/VC +/Q4Yd9Xi6f58Jwz/f7RIFBzp1xNa6rEcTYT6CBTvzsyxDyrUQQVgGzdJhuYHqoRk +j6b8SLuxnafEEtVjgESoy0Ei5bSgs9l4aZU/Jd86ClUI0yF4SWeh562UWGHXObVJ +/RtjrpnKn6OnIVY5QvbYOpTk2Q3dd0sz26/pxykptselzN+2kFCl+4mtu5oT1bkx ++c33lp3ihyJyNpEkEqISudtfR5FfQlq5ZbQRL9p77Y9e4ePUG6wlUN0dlm7oXS2/ +uS1Y1+U1wQNcMjisOo/bZs9wPzatfP9cl9I5DCl1vogMheSYKuORR3kc0FR+cAuX +/J0KryZrMP3kK43eM4LzHdoimGyX6D79Wdy2cPZ70QpKYrCDjSawanmX1ZxxPblD +HfmHnJ0Inc1o85lf5l/PYy3xLQrQbBuUIlctBFWbpW7XdUqKS9HdVqzxGnbOAJnP +C59O3EpkiMqV3I9zn3e85wMzKy4xrbrm/asl+S97BdHzZf8xGdvBRBwYK1OFfzKL +7fqxJDfkkOoTyrC0vhO+mbm2ktyR1oOjRCsEitXWA1sYmz1x+NbuaMQoiwpnefFG +JG1EYzokYahZFTz3NrfG2IK+2vOr7TV6KlWasQLKboleiMNQEZw= +=v3mk -----END PGP SIGNATURE----- diff --git a/launchers/TFC-Dev.desktop b/launchers/TFC-Dev.desktop index b216c30..df3ee9a 100755 --- a/launchers/TFC-Dev.desktop +++ b/launchers/TFC-Dev.desktop @@ -1,5 +1,5 @@ [Desktop Entry] -Version=1.19.12 +Version=1.20.02 Name=TFC-Dev-LR Comment=Developer configuration Exec=terminator -m -u -g $HOME/tfc/launchers/terminator-config-dev -p tfc -l tfc-lr diff --git a/launchers/TFC-Local-test.desktop b/launchers/TFC-Local-test.desktop index 58e1c25..aa8bf9e 100755 --- a/launchers/TFC-Local-test.desktop +++ b/launchers/TFC-Local-test.desktop @@ -1,5 +1,5 @@ [Desktop Entry] -Version=1.19.12 +Version=1.20.02 Name=TFC-Local-Test-LR Comment=Local testing configuration Exec=terminator -m -u -g /opt/tfc/terminator-config-local-test -p tfc -l tfc-lr diff --git a/launchers/TFC-RP-Tails.desktop b/launchers/TFC-RP-Tails.desktop index 8f349eb..698ab38 100755 --- a/launchers/TFC-RP-Tails.desktop +++ b/launchers/TFC-RP-Tails.desktop @@ -1,5 +1,5 @@ [Desktop Entry] -Version=1.19.12 +Version=1.20.02 Name=TFC-Relay Exec=gnome-terminal -x bash -c "cd /opt/tfc && source venv_relay/bin/activate && python3.7 'relay.py' && deactivate || bash" Icon=tfc.png diff --git a/launchers/TFC-RP.desktop b/launchers/TFC-RP.desktop index 8f349eb..698ab38 100755 --- a/launchers/TFC-RP.desktop +++ b/launchers/TFC-RP.desktop @@ -1,5 +1,5 @@ [Desktop Entry] -Version=1.19.12 +Version=1.20.02 Name=TFC-Relay Exec=gnome-terminal -x bash -c "cd /opt/tfc && source venv_relay/bin/activate && python3.7 'relay.py' && deactivate || bash" Icon=tfc.png diff --git a/launchers/TFC-RxP.desktop b/launchers/TFC-RxP.desktop index a3a1a08..471a8ea 100755 --- a/launchers/TFC-RxP.desktop +++ b/launchers/TFC-RxP.desktop @@ -1,5 +1,5 @@ [Desktop Entry] -Version=1.19.12 +Version=1.20.02 Name=TFC-Receiver Exec=gnome-terminal --maximize -x bash -c "cd /opt/tfc && source venv_tcb/bin/activate && python3.7 'tfc.py' -r && deactivate || bash" Icon=tfc.png diff --git a/launchers/TFC-TxP.desktop b/launchers/TFC-TxP.desktop index 0008324..3347c8d 100755 --- a/launchers/TFC-TxP.desktop +++ b/launchers/TFC-TxP.desktop @@ -1,5 +1,5 @@ [Desktop Entry] -Version=1.19.12 +Version=1.20.02 Name=TFC-Transmitter Exec=gnome-terminal --maximize -x bash -c "cd /opt/tfc && source venv_tcb/bin/activate && python3.7 'tfc.py' && deactivate || bash" Icon=tfc.png diff --git a/pubkey.asc b/pubkey.asc index 7d46843..33c27d4 100644 --- a/pubkey.asc +++ b/pubkey.asc @@ -1,29 +1,29 @@ -----BEGIN PGP PUBLIC KEY BLOCK----- -mQINBFxI9IABEADGN00o+hqjq0wp+Nt4TfkuP7+yj5kHbqnc2nGWXoWrdPMxATDf -5MRdhD9dc+pf/6McfcN7od3U6AyJFW1Tzs6yFzSFJI0EGdZsqfco87Za0VpiVn0i -XaoHZgtklgwK47pcJZe5+TeXWkBGB02OC0UZu3THdjL8JrqhtdSR7jxShHxpZcR1 -4PCsPLcA7wJiWM6Xu8KYpjuDFkGSbpk4msuLN9E5M6DFspcTlrD2ZT2F2l/BnzVt -XARg+2PAnAddmmK7NXBTe1MWN2UJc8eer1RaWlovi5m3ziS+hvTtCkjgTrVDyZHc -Xi35XRRX+Zn1uijTBwOqPFDyUwo4VGnaOTkKyS5awYrAL0pCTQbKr119mKcv5bPp -pCm8Ld59/W/0XP8Fwpdyll1E3Nft6cPa3esyVaN1zdsmvxDo7rHp/C8I1bgEdAlj -VFFaOKpXXHiQ78Ey3JTzXfGq7LduHhKohOFQsBTsWlm1hYspaDVqxRLsIirQgHkW -5CYWYg0V+e7EsY4UzCQCA2KkaVon9hR8gYcbv/DzCDYds+RkyJY6dFC+hSyVuPtx -fUoH6K8fMhuYArBQU9mzz2qKlzT7Q5Z5xisOquFaCdorsoUxUpQ8Jzt1sqASyrZx -uI2de0QYQzC5E0ySdfgtQmEXwF+rjsT1zhD33/UbjBUuk0moNrXyLWFH4QARAQAB +mQINBF41/cABEACvq795K1+tvNbh3q1x+SDfGbyaB/YlsLv1y9jrRVZabCg0CGaw +2SOzAQNeIzo+R0Z+qTBkSRZSkjsVdDVNQGo/1/XHQT3oCtHTa/JQcdxjbG6jz9iX +ZY0WqkfVoM0u0CWqBBLq1DwxBGfgvH7LLrTQxmNPj13TwjYLBqYv8fWqwItuqXEM +9DOrllb9Wc5ckyGdCVrTUJXR/Xq9wFYbk9DIZlDKvM7ws0LWndTTPc8XxI2UZeYj +4/5iAYjR+wa4gYx2fb9NQqq4Xmb5SqFG7Xy6Ui0kWmqkJrJeiPF/2OCLk9UJWFc7 ++SYb7c68EeHRyXGH9XUJsfemOUNhaAW+rSBcmMPHNoZrw7NO6389mdO8MUfFRCxj +jb1yW7eg4ivgWyYa3EM9kACCLv5qysWa74DBs8G7JkP9IYlp+HulL67e99wgHirY +jW6omBuQhzz/EHrJl01CGKPYKDK0C6+0pc72ig/HuOxWbQVy/10d3RbIsTB2T2qu +yUEHN+KUhy/FB2STJQfUNbmxgJVBhB13JKpIu2QMyMwzdnkJ+uSzjTV67PreFboM +XvTIWRwvV3IzZLPz7c/hajD1diYitbgyzS1jquQ3IPi5PRYpNEN7hBSp3im1ApvP +XnZl473AgKP0CptpPDcE4hYmFY0EhGtbjCyTDUFooX5J+EfynTbfGn5f9QARAQAB tDhNYXJrdXMgT3R0ZWxhIChURkMgc2lnbmluZyBrZXkpIDxvb3R0ZWxhQGNzLmhl -bHNpbmtpLmZpPokCVAQTAQgAPhYhBOqPOLpnSyQumUSEXJgTcOlyWg+6BQJcSPSA -AhsDBQkB4TOABQsJCAcCBhUICQoLAgQWAgMBAh4BAheAAAoJEJgTcOlyWg+6L34P -/0c+32+Je9Sgt87a88CiND7oRCMznSSfNG0vaNu7+L2KxgHyM1WZ3QgCJdZdtqDX -CIATMpEAUXT+DPf/zmpSVn0TCY3/7LhRdxT7DuF6diyXVjGkiBLI66sqFv3mB19K -alc3GJmCq6AD63ubNofVcwfs1gH7aPI0a1szTN0eB1vxV/vRjiCbtD7W6QNcWuup -EjJCdZu4PEJyiFOIMucqGmYla9vVmFt/2t5cPGSwxYqEzFhmE100QcRHmzoOAztl -5rNnDHwte9/9MBYVKaR3Xg3id6zmAJrTzopjYNND2ULoXiOn9iwwGe6cmT2Hi9VR -bFK1rh7P8x9d/ZxojSG7QfNmrgJORSLUIczJlIECBoUmMJlb63H3TMgUQh5ui8mH -Z1YfAwodcO54ShmrNh1rRgtXvcrwwwlXf2IhYirZmophw9gIo6Rvo533m/RwGxuR -iRhteVRg9G5fCZtZhbrpmgaoKom+PyxX+a2IeN9DcnPYy2qYBER1DXdNenuo4S8W -DWgrs4T6Zc8AbSNroV2QKBY69LGMBR4LjRhtbdeJkv5f9BMzWM/cHbJfGOcFpxfD -FQ9S1Dj/eW4rGgmazljlQgdwr7wDj0rbzAR3Gs6bDBMn4UEXeqKADm6UMuFnXisq -ioBl7i5Wxnv66C5I4KgQtdOb5XszP9xU1hLuBKsIjFJa -=efYt +bHNpbmtpLmZpPokCVAQTAQgAPhYhBBN8KnVPqm27qM5k1fqwFcsiry+EBQJeNf3A +AhsDBQkDwmcABQsJCAcCBhUICQoLAgQWAgMBAh4BAheAAAoJEPqwFcsiry+E0uwQ +AKRpMlGgnyR6k0VxAOjNl0yPVqB87urZjPAShBiGPWjl1pWeezFMnpuVAzkxFkkj +GO8PyXZTZO5rRzQjs3DyP5kiT5nDf+4V9SdU4jHpPkrjL5fKwmkQQoyKD/l0iPcU +2OBYTKdkCop13/8h3emVE4FQMWZfHpgWCLm2t/czw/VQ+hBY+P5jRYj57wUOtmB6 +flNBotXd/+8iWTe/MrnVQ+Q28TWd9wvLH1JO2k32gkO8mrd+t2r+la7DUt9NXe90 +cToA6ThrJE4PA2PHqrgIqnZebpPa1teYTLAZIJY4sd9EKAfiMFAWuhu2EoQX7oFF +Ag5WZb5s1N4h/eXQucEGSoiEwH6bHYqRWv2J+ZcFmhXZ3EEbrbYNt1yfwx79yBPl +RfcRoGOUifKnJK/HeTiUvnhD8JgtNZQ920Em/pghXjZCFalJvm3cUQnUxMSGy/vi +gDNj20OCGSF55r7o349gENGc57zqwKKTlSR8fN0EuUX8dNIhjLyjZ/eopMlm5pWt +195DkxY1PBZwYSzSS8ngsoKe1Yi386juuoqxaeXq7pTPgcbX0D68UaYrDuQzes3G +s5Amn2CXKdh+wAj9wJriJu8WfgQOsghjI4cpsaFKtmmkz6+MAgCzkL+/naJappCD +w9USTnek5Mwbjd4+nqGpVDUwBAVjuDGucYPTJnqYrkmI +=hDRN -----END PGP PUBLIC KEY BLOCK----- diff --git a/relay.py b/relay.py index aeeb96c..1090898 100644 --- a/relay.py +++ b/relay.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -23,41 +23,27 @@ import os import sys from multiprocessing import Process, Queue -from typing import Any, Dict +from typing import Any, Dict from cryptography.hazmat.primitives.asymmetric.x448 import X448PrivateKey -from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat +from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat from src.common.gateway import Gateway, gateway_loop -from src.common.misc import ensure_dir, monitor_processes, process_arguments -from src.common.output import print_title -from src.common.statics import ( - CONTACT_MGMT_QUEUE, - CONTACT_REQ_QUEUE, - C_REQ_MGMT_QUEUE, - C_REQ_STATE_QUEUE, - DIR_TFC, - DST_COMMAND_QUEUE, - DST_MESSAGE_QUEUE, - EXIT_QUEUE, - F_TO_FLASK_QUEUE, - GATEWAY_QUEUE, - GROUP_MGMT_QUEUE, - GROUP_MSG_QUEUE, - M_TO_FLASK_QUEUE, - NC, - ONION_CLOSE_QUEUE, - ONION_KEY_QUEUE, - SRC_TO_RELAY_QUEUE, - TOR_DATA_QUEUE, - URL_TOKEN_QUEUE, -) +from src.common.misc import ensure_dir, monitor_processes, process_arguments +from src.common.output import print_title +from src.common.statics import (ACCOUNT_CHECK_QUEUE, ACCOUNT_SEND_QUEUE, CONTACT_MGMT_QUEUE, CONTACT_REQ_QUEUE, + C_REQ_MGMT_QUEUE, C_REQ_STATE_QUEUE, DIR_TFC, DST_COMMAND_QUEUE, DST_MESSAGE_QUEUE, + EXIT_QUEUE, F_TO_FLASK_QUEUE, GATEWAY_QUEUE, GUI_INPUT_QUEUE, GROUP_MGMT_QUEUE, + GROUP_MSG_QUEUE, M_TO_FLASK_QUEUE, NC, ONION_CLOSE_QUEUE, PUB_KEY_CHECK_QUEUE, + PUB_KEY_SEND_QUEUE, ONION_KEY_QUEUE, SRC_TO_RELAY_QUEUE, TOR_DATA_QUEUE, + URL_TOKEN_QUEUE, USER_ACCOUNT_QUEUE) -from src.relay.client import c_req_manager, client_scheduler, g_msg_manager +from src.relay.client import c_req_manager, client_scheduler, g_msg_manager from src.relay.commands import relay_command -from src.relay.onion import onion_service -from src.relay.server import flask_server -from src.relay.tcb import dst_outgoing, src_incoming +from src.relay.diffs import account_checker, pub_key_checker +from src.relay.onion import onion_service +from src.relay.server import flask_server +from src.relay.tcb import dst_outgoing, src_incoming def main() -> None: @@ -160,43 +146,46 @@ def main() -> None: print_title(NC) url_token_private_key = X448PrivateKey.generate() - url_token_public_key = ( - url_token_private_key.public_key() - .public_bytes(encoding=Encoding.Raw, format=PublicFormat.Raw) - .hex() - ) # type: str + url_token_public_key = url_token_private_key.public_key().public_bytes(encoding=Encoding.Raw, + format=PublicFormat.Raw).hex() # type: str - queues = { - GATEWAY_QUEUE: Queue(), # All datagrams from `gateway_loop` to `src_incoming` - DST_MESSAGE_QUEUE: Queue(), # Message datagrams from `src_incoming`/`client` to `dst_outgoing` - M_TO_FLASK_QUEUE: Queue(), # Message/pubkey datagrams from `src_incoming` to `flask_server` - F_TO_FLASK_QUEUE: Queue(), # File datagrams from `src_incoming` to `flask_server` - SRC_TO_RELAY_QUEUE: Queue(), # Command datagrams from `src_incoming` to `relay_command` - DST_COMMAND_QUEUE: Queue(), # Command datagrams from `src_incoming` to `dst_outgoing` - CONTACT_MGMT_QUEUE: Queue(), # Contact management commands from `relay_command` to `client_scheduler` - C_REQ_STATE_QUEUE: Queue(), # Contact req. notify setting from `relay_command` to `c_req_manager` - URL_TOKEN_QUEUE: Queue(), # URL tokens from `client` to `flask_server` - GROUP_MSG_QUEUE: Queue(), # Group management messages from `client` to `g_msg_manager` - CONTACT_REQ_QUEUE: Queue(), # Contact requests from `flask_server` to `c_req_manager` - C_REQ_MGMT_QUEUE: Queue(), # Contact list management from `relay_command` to `c_req_manager` - GROUP_MGMT_QUEUE: Queue(), # Contact list management from `relay_command` to `g_msg_manager` - ONION_CLOSE_QUEUE: Queue(), # Onion Service close command from `relay_command` to `onion_service` - ONION_KEY_QUEUE: Queue(), # Onion Service private key from `relay_command` to `onion_service` - TOR_DATA_QUEUE: Queue(), # Open port for Tor from `onion_service` to `client_scheduler` - EXIT_QUEUE: Queue(), # EXIT/WIPE signal from `relay_command` to `main` - } # type: Dict[bytes, Queue[Any]] + queues = \ + {GATEWAY_QUEUE: Queue(), # All datagrams from `gateway_loop` to `src_incoming` + DST_MESSAGE_QUEUE: Queue(), # Message datagrams from `src_incoming`/`client` to `dst_outgoing` + M_TO_FLASK_QUEUE: Queue(), # Message/pubkey datagrams from `src_incoming` to `flask_server` + F_TO_FLASK_QUEUE: Queue(), # File datagrams from `src_incoming` to `flask_server` + SRC_TO_RELAY_QUEUE: Queue(), # Command datagrams from `src_incoming` to `relay_command` + DST_COMMAND_QUEUE: Queue(), # Command datagrams from `src_incoming` to `dst_outgoing` + CONTACT_MGMT_QUEUE: Queue(), # Contact management commands from `relay_command` to `client_scheduler` + C_REQ_STATE_QUEUE: Queue(), # Contact req. notify setting from `relay_command` to `c_req_manager` + URL_TOKEN_QUEUE: Queue(), # URL tokens from `client` to `flask_server` + GROUP_MSG_QUEUE: Queue(), # Group management messages from `client` to `g_msg_manager` + CONTACT_REQ_QUEUE: Queue(), # Contact requests from `flask_server` to `c_req_manager` + C_REQ_MGMT_QUEUE: Queue(), # Contact list management from `relay_command` to `c_req_manager` + GROUP_MGMT_QUEUE: Queue(), # Contact list management from `relay_command` to `g_msg_manager` + ONION_CLOSE_QUEUE: Queue(), # Onion Service close command from `relay_command` to `onion_service` + ONION_KEY_QUEUE: Queue(), # Onion Service private key from `relay_command` to `onion_service` + TOR_DATA_QUEUE: Queue(), # Open port for Tor from `onion_service` to `client_scheduler` + EXIT_QUEUE: Queue(), # EXIT/WIPE signal from `relay_command` to `main` + ACCOUNT_CHECK_QUEUE: Queue(), # Incorrectly typed accounts from `src_incomfing` to `account_checker` + ACCOUNT_SEND_QUEUE: Queue(), # Contact requests from `flask_server` to `account_checker` + USER_ACCOUNT_QUEUE: Queue(), # User's public key from `onion_service` to `account_checker` + PUB_KEY_CHECK_QUEUE: Queue(), # Typed public keys from `src_incoming` to `pub_key_checker` + PUB_KEY_SEND_QUEUE: Queue(), # Received public keys from `client` to `pub_key_checker` + GUI_INPUT_QUEUE: Queue() # User inputs from `GUI prompt` to `account_checker` + } # type: Dict[bytes, Queue[Any]] - process_list = [ - Process(target=gateway_loop, args=(queues, gateway)), - Process(target=src_incoming, args=(queues, gateway)), - Process(target=dst_outgoing, args=(queues, gateway)), - Process(target=client_scheduler, args=(queues, gateway, url_token_private_key)), - Process(target=g_msg_manager, args=(queues,)), - Process(target=c_req_manager, args=(queues,)), - Process(target=flask_server, args=(queues, url_token_public_key)), - Process(target=onion_service, args=(queues,)), - Process(target=relay_command, args=(queues, gateway, sys.stdin.fileno())), - ] + process_list = [Process(target=gateway_loop, args=(queues, gateway )), + Process(target=src_incoming, args=(queues, gateway )), + Process(target=dst_outgoing, args=(queues, gateway )), + Process(target=client_scheduler, args=(queues, gateway, url_token_private_key)), + Process(target=g_msg_manager, args=(queues, )), + Process(target=c_req_manager, args=(queues, )), + Process(target=flask_server, args=(queues, url_token_public_key )), + Process(target=onion_service, args=(queues, )), + Process(target=relay_command, args=(queues, gateway, )), + Process(target=account_checker, args=(queues, sys.stdin.fileno())), + Process(target=pub_key_checker, args=(queues, local_test ))] for p in process_list: p.start() @@ -204,5 +193,5 @@ def main() -> None: monitor_processes(process_list, NC, queues) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/requirements-dev.txt b/requirements-dev.txt index 72f31c9..6d66178 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,10 +1,13 @@ # Static type checking tool -mypy>=0.750 +mypy>=0.761 +mypy_extensions>=0.4.3 +typed_ast>=1.4.1 +typing_extensions>=3.7.4.1 # Unit test tools -pytest>=5.2.1 +pytest>=5.3.5 pytest-cov>=2.8.1 -pytest-xdist>=1.30.0 +pytest-xdist>=1.31.0 # TFC dependencies (note: not authenticated with hashes) @@ -12,20 +15,20 @@ pytest-xdist>=1.30.0 pyserial>=3.4 # argon2_cffi -argon2_cffi>=19.1.0 -cffi>=1.13.1 +argon2_cffi>=19.2.0 +cffi>=1.13.2 pycparser>=2.19 -six>=1.12.0 +six>=1.14.0 # pyca/pynacl PyNaCl>=1.3.0 -setuptools>=42.0.2 +setuptools>=45.1.0 # pyca/cryptography cryptography>=2.8 # Stem -stem>=1.7.1 +stem>=1.8.0 # PySocks pysocks>=1.7.1 @@ -35,22 +38,20 @@ requests>=2.22.0 certifi>=2019.11.28 chardet>=3.0.4 idna>=2.8 -urllib3>=1.25.7 +urllib3>=1.25.8 # Flask flask>=1.1.1 click>=7.0 itsdangerous>=1.1.0 -jinja2>=2.10.3 +jinja2>=2.11.1 markupsafe>=1.1.1 -werkzeug>=0.16.0 +werkzeug>=0.16.1 -# Black -black>=19.10b0 -appdirs>=1.4.3 -attrs>=19.3.0 -Click>=7.0 -pathspec>=0.6.0 -regex>=2019.11.1 -toml>=0.10.0 -typed_ast>=1.4.0 +# PyLama +pylama>=7.7.1 +snowballstemmer>=2.0.0 +pyflakes>=2.1.1 +pydocstyle>=5.0.2 +pycodestyle>=2.5.0 +mccabe>=0.6.1 diff --git a/requirements-relay-tails.txt b/requirements-relay-tails.txt index c1096e8..afd1a27 100644 --- a/requirements-relay-tails.txt +++ b/requirements-relay-tails.txt @@ -4,7 +4,7 @@ pyserial==3.4 --hash=sha512:8333ac2843fd136d5d0d63b527b37866f7d18afc3bb33c4938b63af077492aeb118eb32a89ac78547f14d59a2adb1e5d00728728275de62317da48dadf6cdff9 # Stem (Connects to Tor and manages Onion Services) -# stem==1.7.1 --hash=sha512:a275f59bba650cb5bb151cf53fb1dd820334f9abbeae1a25e64502adc854c7f54c51bc3d6c1656b595d142fc0695ffad53aab3c57bc285421c1f4f10c9c3db4c +stem==1.8.0 --hash=sha512:aa2033567b79aef960f8321e4c6cbc28105c59d6513ff49a9f12509d8f97b1a2e8a3b04dc28abb07fad59b0f6ba66443b92bbefa0d08b26038bbaf24f7f2846d # PySocks (Routes requests library through SOCKS5 proxy making Onion Service connections possible) pysocks==1.7.1 --hash=sha512:313b954102231d038d52ab58f41e3642579be29f827135b8dd92c06acb362effcb0a7fd5f35de9273372b92d9fe29f38381ae44f8b41aa90d2564d6dd07ecd12 @@ -14,21 +14,21 @@ requests==2.22.0 --hash=sha512:9186ce4e39bb64f5931a205ffc9afac61657bc42078bc4 certifi==2019.11.28 --hash=sha512:fe5b05c29c1e1d9079150aaea28b09d84f0dd15907e276ccabb314433cfaac948a9615e10d6d01cbd537f99eed8072fbda7cb901e932fbab4f1286ae8c50471b chardet==3.0.4 --hash=sha512:bfae58c8ea19c87cc9c9bf3d0b6146bfdb3630346bd954fe8e9f7da1f09da1fc0d6943ff04802798a665ea3b610ee2d65658ce84fe5a89f9e93625ea396a17f4 idna==2.8 --hash=sha512:fb07dbec1de86efbad82a4f73d98123c59b083c1f1277445204bef75de99ca200377ad2f1db8924ae79b31b3dd984891c87d0a6344ec4d07a0ddbbbc655821a3 -urllib3==1.25.7 --hash=sha512:f6a78508cb87050e176005a088118f8ad87b17cf541457d949e5712c356f8c4de7e7516ba066e5c4bb9ced5c7e7590ba7e07d4ae7fc7190487bf27f1bb9d0668 +urllib3==1.25.8 --hash=sha512:f7fd3b54b7c555c0e74eb445e543763d233b5c6f8021ccf46a45d452c334953276d43ecd8f3d0eafefa35103a7d1874e291216fc9a41362eb6f1250a2a670f16 # Flask (Onion Service web server that serves TFC public keys and ciphertexts to contacts) flask==1.1.1 --hash=sha512:bd49cb364307569480196289fa61fbb5493e46199620333f67617367278e1f56b20fc0d40fd540bef15642a8065e488c24e97f50535e8ec143875095157d8069 click==7.0 --hash=sha512:6b30987349df7c45c5f41cff9076ed45b178b444fca1ab1965f4ae33d1631522ce0a2868392c736666e83672b8b20e9503ae9ce5016dce3fa8f77bc8a3674130 itsdangerous==1.1.0 --hash=sha512:891c294867f705eb9c66274bd04ac5d93140d6e9beea6cbf9a44e7f9c13c0e2efa3554bdf56620712759a5cd579e112a782d25f3f91ba9419d60b2b4d2bc5b7c -jinja2==2.10.3 --hash=sha512:658d069944c81f9d8b2e90577a9d2c844b4c6a26764efefd7a86f26c05276baf6c7255f381e20e5178782be1786b7400cab12dec15653e7262b36194228bf649 +jinja2==2.11.1 --hash=sha512:461bbd517560f1c4dbf7309bdf0cf33b468938fddfa2c3385fab07343269732d8ce68d8827148645113267d48e7d67b03f1663cc64839dd1fcec723ea606aaf4 markupsafe==1.1.1 --hash=sha512:69e9b9c9ac4fdf3cfa1a3de23d14964b843989128f8cc6ea58617fc5d6ef937bcc3eae9cb32b5164b5f54b06f96bdff9bc249529f20671cc26adc9e6ce8f6bec -werkzeug==0.16.0 --hash=sha512:3905022d0c398856b30d2ed6bae046c1532e87f56a0a40060030c18124c6c9c98976d9429e2ab03676c4ce75be4ea915ffc2719e04e4b4912a96e498dcd9eb89 +werkzeug==0.16.1 --hash=sha512:4c982970fef39bf7cfbb4e516864fec0f8ec3f743ccb632d1659c6ee415597d98f4abd63b5c0fd999eb43fc0c89a97123f07625b01ea86b02ef51cb67a2b148d # Cryptography (Handles URL token derivation) cryptography==2.8 --hash=sha512:184003c89fee74892de25c3e5ec366faea7a5f1fcca3c82b0d5e5f9f797286671a820ca54da5266d6f879ab342c97e25bce9db366c5fb1178690cd5978d4d622 cffi==1.13.2 --hash=sha512:b8753a0435cc7a2176f8748badc074ec6ffab6698d6be42b1770c85871f85aa7cf60152a8be053c3031b234a286c5cef07267cb812accb704783d74a2675ed3b pycparser==2.19 --hash=sha512:7f830e1c9066ee2d297a55e2bf6db4bf6447b6d9da0145d11a88c3bb98505755fb7986eafa6e06ae0b7680838f5e5d6a6d188245ca5ad45c2a727587bac93ab5 -six==1.13.0 --hash=sha512:387d94f37a74e2d86ac0a41f482638dd9aec9e94215ffc50f314eb2f8e0cfc2f15afc3e508ea37a4fbcca7e4bcfc65efa1e5cab5f8094ccedc18bee2b0f2e3a8 +six==1.14.0 --hash=sha512:a6e7e35921ce8f2f8e79a296ea79a9c3515ff6dd7e777d7892fe4988594f1b3a442a68ffb89cf64530b90a32ceeea00e4ab9069bb697629ab4eb7262c68d1b0f # PyNaCl (Derives TFC account from Onion Service private key) PyNaCl==1.3.0 --hash=sha512:c4017c38b026a5c531b15839b8d61d1fae9907ba1960c2f97f4cd67fe0827729346d5186a6d6927ba84f64b4cbfdece12b287aa7750a039f4160831be871cea3 diff --git a/requirements-relay.txt b/requirements-relay.txt index 8079eff..4003200 100644 --- a/requirements-relay.txt +++ b/requirements-relay.txt @@ -4,7 +4,7 @@ pyserial==3.4 --hash=sha512:8333ac2843fd136d5d0d63b527b37866f7d18afc3bb33c4938b63af077492aeb118eb32a89ac78547f14d59a2adb1e5d00728728275de62317da48dadf6cdff9 # Stem (Connects to Tor and manages Onion Services) -stem==1.7.1 --hash=sha512:a275f59bba650cb5bb151cf53fb1dd820334f9abbeae1a25e64502adc854c7f54c51bc3d6c1656b595d142fc0695ffad53aab3c57bc285421c1f4f10c9c3db4c +stem==1.8.0 --hash=sha512:aa2033567b79aef960f8321e4c6cbc28105c59d6513ff49a9f12509d8f97b1a2e8a3b04dc28abb07fad59b0f6ba66443b92bbefa0d08b26038bbaf24f7f2846d # PySocks (Routes requests library through SOCKS5 proxy making Onion Service connections possible) pysocks==1.7.1 --hash=sha512:313b954102231d038d52ab58f41e3642579be29f827135b8dd92c06acb362effcb0a7fd5f35de9273372b92d9fe29f38381ae44f8b41aa90d2564d6dd07ecd12 @@ -14,23 +14,23 @@ requests==2.22.0 --hash=sha512:9186ce4e39bb64f5931a205ffc9afac61657bc42078bc4 certifi==2019.11.28 --hash=sha512:fe5b05c29c1e1d9079150aaea28b09d84f0dd15907e276ccabb314433cfaac948a9615e10d6d01cbd537f99eed8072fbda7cb901e932fbab4f1286ae8c50471b chardet==3.0.4 --hash=sha512:bfae58c8ea19c87cc9c9bf3d0b6146bfdb3630346bd954fe8e9f7da1f09da1fc0d6943ff04802798a665ea3b610ee2d65658ce84fe5a89f9e93625ea396a17f4 idna==2.8 --hash=sha512:fb07dbec1de86efbad82a4f73d98123c59b083c1f1277445204bef75de99ca200377ad2f1db8924ae79b31b3dd984891c87d0a6344ec4d07a0ddbbbc655821a3 -urllib3==1.25.7 --hash=sha512:f6a78508cb87050e176005a088118f8ad87b17cf541457d949e5712c356f8c4de7e7516ba066e5c4bb9ced5c7e7590ba7e07d4ae7fc7190487bf27f1bb9d0668 +urllib3==1.25.8 --hash=sha512:f7fd3b54b7c555c0e74eb445e543763d233b5c6f8021ccf46a45d452c334953276d43ecd8f3d0eafefa35103a7d1874e291216fc9a41362eb6f1250a2a670f16 # Flask (Onion Service web server that serves TFC public keys and ciphertexts to contacts) flask==1.1.1 --hash=sha512:bd49cb364307569480196289fa61fbb5493e46199620333f67617367278e1f56b20fc0d40fd540bef15642a8065e488c24e97f50535e8ec143875095157d8069 click==7.0 --hash=sha512:6b30987349df7c45c5f41cff9076ed45b178b444fca1ab1965f4ae33d1631522ce0a2868392c736666e83672b8b20e9503ae9ce5016dce3fa8f77bc8a3674130 itsdangerous==1.1.0 --hash=sha512:891c294867f705eb9c66274bd04ac5d93140d6e9beea6cbf9a44e7f9c13c0e2efa3554bdf56620712759a5cd579e112a782d25f3f91ba9419d60b2b4d2bc5b7c -jinja2==2.10.3 --hash=sha512:658d069944c81f9d8b2e90577a9d2c844b4c6a26764efefd7a86f26c05276baf6c7255f381e20e5178782be1786b7400cab12dec15653e7262b36194228bf649 +jinja2==2.11.1 --hash=sha512:461bbd517560f1c4dbf7309bdf0cf33b468938fddfa2c3385fab07343269732d8ce68d8827148645113267d48e7d67b03f1663cc64839dd1fcec723ea606aaf4 markupsafe==1.1.1 --hash=sha512:69e9b9c9ac4fdf3cfa1a3de23d14964b843989128f8cc6ea58617fc5d6ef937bcc3eae9cb32b5164b5f54b06f96bdff9bc249529f20671cc26adc9e6ce8f6bec -werkzeug==0.16.0 --hash=sha512:3905022d0c398856b30d2ed6bae046c1532e87f56a0a40060030c18124c6c9c98976d9429e2ab03676c4ce75be4ea915ffc2719e04e4b4912a96e498dcd9eb89 +werkzeug==0.16.1 --hash=sha512:4c982970fef39bf7cfbb4e516864fec0f8ec3f743ccb632d1659c6ee415597d98f4abd63b5c0fd999eb43fc0c89a97123f07625b01ea86b02ef51cb67a2b148d # Cryptography (Handles URL token derivation) cryptography==2.8 --hash=sha512:184003c89fee74892de25c3e5ec366faea7a5f1fcca3c82b0d5e5f9f797286671a820ca54da5266d6f879ab342c97e25bce9db366c5fb1178690cd5978d4d622 cffi==1.13.2 --hash=sha512:b8753a0435cc7a2176f8748badc074ec6ffab6698d6be42b1770c85871f85aa7cf60152a8be053c3031b234a286c5cef07267cb812accb704783d74a2675ed3b pycparser==2.19 --hash=sha512:7f830e1c9066ee2d297a55e2bf6db4bf6447b6d9da0145d11a88c3bb98505755fb7986eafa6e06ae0b7680838f5e5d6a6d188245ca5ad45c2a727587bac93ab5 -six==1.13.0 --hash=sha512:387d94f37a74e2d86ac0a41f482638dd9aec9e94215ffc50f314eb2f8e0cfc2f15afc3e508ea37a4fbcca7e4bcfc65efa1e5cab5f8094ccedc18bee2b0f2e3a8 +six==1.14.0 --hash=sha512:a6e7e35921ce8f2f8e79a296ea79a9c3515ff6dd7e777d7892fe4988594f1b3a442a68ffb89cf64530b90a32ceeea00e4ab9069bb697629ab4eb7262c68d1b0f # PyNaCl (Derives TFC account from Onion Service private key) PyNaCl==1.3.0 --hash=sha512:c4017c38b026a5c531b15839b8d61d1fae9907ba1960c2f97f4cd67fe0827729346d5186a6d6927ba84f64b4cbfdece12b287aa7750a039f4160831be871cea3 -setuptools==42.0.2 --hash=sha512:dd18da86ba566a7abde86890f6fa7c5a4dee34970927ef883a07a44ca8992713f9c2c4c87538d18d7bbf19073f1cc7887b150474375f24a0938cef5db097c841 +setuptools==45.1.0 --hash=sha512:761e4c8df239b8d173513b08959b387c1059e3e023ba6b3f6250fade518d6ef29f287ab90dd35d02bb681b410a050b30b2ed44849638b6f98831f4290a4ccd15 # Duplicate sub-dependencies: cffi, pycparser, six diff --git a/requirements-setuptools.txt b/requirements-setuptools.txt index 6f013ff..8573219 100644 --- a/requirements-setuptools.txt +++ b/requirements-setuptools.txt @@ -1,2 +1,2 @@ # Setuptools (Allows installation of pycparser which is a sub-dependency of the cryptography and PyNaCl packages) -setuptools==42.0.2 --hash=sha512:dd18da86ba566a7abde86890f6fa7c5a4dee34970927ef883a07a44ca8992713f9c2c4c87538d18d7bbf19073f1cc7887b150474375f24a0938cef5db097c841 +setuptools==45.1.0 --hash=sha512:761e4c8df239b8d173513b08959b387c1059e3e023ba6b3f6250fade518d6ef29f287ab90dd35d02bb681b410a050b30b2ed44849638b6f98831f4290a4ccd15 diff --git a/requirements-venv.txt b/requirements-venv.txt index 28e1c76..437c80d 100644 --- a/requirements-venv.txt +++ b/requirements-venv.txt @@ -1,2 +1,2 @@ # Virtual environment (Used to create an isolated Python environment for TFC dependencies) -virtualenv==16.7.8 --hash=sha512:4483bdd81d63cc38e0003cd3cba995f3e21d506e2f6a64bc98a673f1ef5ccd56e8e1109ec049c9394a538b879ea47dbafa0c575cdc02eedb1b9172e8fc045ca6 +virtualenv==16.7.9 --hash=sha512:f4e7148f1de50fa2e69061e72db211085fc2f44007de4d18ee02a20d34bca30a00d2fe56ff6f3132e696c3f6efd4151863f26dac4c1d43e87b597c47a51c52ad diff --git a/requirements.txt b/requirements.txt index 5947846..58c1e3c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,11 +7,11 @@ pyserial==3.4 --hash=sha512:8333ac2843fd136d5d0d63b527b37866f7d18afc3bb33c argon2_cffi==19.2.0 --hash=sha512:91c4afc2d0cac14cf4342f198f68afd6477dc5bdf2683476c6f8e253de7b3bdc83b229ce96d0280f656ff33667ab9902c92741b82faee8d8892307cde6199845 cffi==1.13.2 --hash=sha512:b8753a0435cc7a2176f8748badc074ec6ffab6698d6be42b1770c85871f85aa7cf60152a8be053c3031b234a286c5cef07267cb812accb704783d74a2675ed3b pycparser==2.19 --hash=sha512:7f830e1c9066ee2d297a55e2bf6db4bf6447b6d9da0145d11a88c3bb98505755fb7986eafa6e06ae0b7680838f5e5d6a6d188245ca5ad45c2a727587bac93ab5 -six==1.13.0 --hash=sha512:387d94f37a74e2d86ac0a41f482638dd9aec9e94215ffc50f314eb2f8e0cfc2f15afc3e508ea37a4fbcca7e4bcfc65efa1e5cab5f8094ccedc18bee2b0f2e3a8 +six==1.14.0 --hash=sha512:a6e7e35921ce8f2f8e79a296ea79a9c3515ff6dd7e777d7892fe4988594f1b3a442a68ffb89cf64530b90a32ceeea00e4ab9069bb697629ab4eb7262c68d1b0f # PyNaCl (Handles TCB-side XChaCha20-Poly1305 symmetric encryption) PyNaCl==1.3.0 --hash=sha512:c4017c38b026a5c531b15839b8d61d1fae9907ba1960c2f97f4cd67fe0827729346d5186a6d6927ba84f64b4cbfdece12b287aa7750a039f4160831be871cea3 -setuptools==42.0.2 --hash=sha512:dd18da86ba566a7abde86890f6fa7c5a4dee34970927ef883a07a44ca8992713f9c2c4c87538d18d7bbf19073f1cc7887b150474375f24a0938cef5db097c841 +setuptools==45.1.0 --hash=sha512:761e4c8df239b8d173513b08959b387c1059e3e023ba6b3f6250fade518d6ef29f287ab90dd35d02bb681b410a050b30b2ed44849638b6f98831f4290a4ccd15 # Duplicate sub-dependencies: cffi, pycparser, six # Cryptography (Handles TCB-side X448 key exchange) diff --git a/src/__init__.py b/src/__init__.py index 6eb560e..833769a 100755 --- a/src/__init__.py +++ b/src/__init__.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. diff --git a/src/common/__init__.py b/src/common/__init__.py index 6eb560e..833769a 100755 --- a/src/common/__init__.py +++ b/src/common/__init__.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. diff --git a/src/common/crypto.py b/src/common/crypto.py index 8d175fc..518776d 100755 --- a/src/common/crypto.py +++ b/src/common/crypto.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -44,37 +44,24 @@ import nacl.utils from typing import Tuple -from cryptography.hazmat.primitives import padding +from cryptography.hazmat.primitives import padding from cryptography.hazmat.primitives.asymmetric.x448 import X448PrivateKey, X448PublicKey -from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat +from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat from src.common.exceptions import CriticalError -from src.common.misc import separate_header -from src.common.statics import ( - ARGON2_SALT_LENGTH, - BITS_PER_BYTE, - BLAKE2_DIGEST_LENGTH, - BLAKE2_DIGEST_LENGTH_MAX, - BLAKE2_DIGEST_LENGTH_MIN, - FINGERPRINT, - FINGERPRINT_LENGTH, - MESSAGE_KEY, - HEADER_KEY, - PADDING_LENGTH, - SYMMETRIC_KEY_LENGTH, - TFC_PUBLIC_KEY_LENGTH, - X448_SHARED_SECRET_LENGTH, - XCHACHA20_NONCE_LENGTH, -) +from src.common.misc import separate_header +from src.common.statics import (ARGON2_SALT_LENGTH, BITS_PER_BYTE, BLAKE2_DIGEST_LENGTH, BLAKE2_DIGEST_LENGTH_MAX, + BLAKE2_DIGEST_LENGTH_MIN, FINGERPRINT, FINGERPRINT_LENGTH, MESSAGE_KEY, HEADER_KEY, + PADDING_LENGTH, SYMMETRIC_KEY_LENGTH, TFC_PUBLIC_KEY_LENGTH, + X448_SHARED_SECRET_LENGTH, XCHACHA20_NONCE_LENGTH) -def blake2b( - message: bytes, # Message to hash - key: bytes = b"", # Key for keyed hashing - salt: bytes = b"", # Salt for randomized hashing - person: bytes = b"", # Personalization string - digest_size: int = BLAKE2_DIGEST_LENGTH, # Length of the digest -) -> bytes: # The BLAKE2b digest +def blake2b(message: bytes, # Message to hash + key: bytes = b'', # Key for keyed hashing + salt: bytes = b'', # Salt for randomized hashing + person: bytes = b'', # Personalization string + digest_size: int = BLAKE2_DIGEST_LENGTH # Length of the digest + ) -> bytes: # The BLAKE2b digest """Generate BLAKE2b digest (i.e. cryptographic hash) of a message. BLAKE2 is the successor of SHA3-finalist BLAKE*, designed by @@ -134,16 +121,16 @@ def blake2b( https://github.com/python/cpython/blob/3.7/Lib/hashlib.py """ try: - digest = hashlib.blake2b( - message, digest_size=digest_size, key=key, salt=salt, person=person - ).digest() # type: bytes + digest = hashlib.blake2b(message, + digest_size=digest_size, + key=key, + salt=salt, + person=person).digest() # type: bytes except ValueError as e: raise CriticalError(str(e)) if not isinstance(digest, bytes): - raise CriticalError( - f"BLAKE2b returned an invalid type ({type(digest)}) digest." - ) + raise CriticalError(f"BLAKE2b returned an invalid type ({type(digest)}) digest.") if len(digest) != digest_size: raise CriticalError(f"BLAKE2b digest had invalid length ({len(digest)} bytes).") @@ -151,13 +138,12 @@ def blake2b( return digest -def argon2_kdf( - password: str, # Password to derive the key from - salt: bytes, # Salt to derive the key from - time_cost: int, # Number of iterations - memory_cost: int, # Amount of memory to use (in bytes) - parallelism: int, # Number of threads to use -) -> bytes: # The derived key +def argon2_kdf(password: str, # Password to derive the key from + salt: bytes, # Salt to derive the key from + time_cost: int, # Number of iterations + memory_cost: int, # Amount of memory to use (in bytes) + parallelism: int # Number of threads to use + ) -> bytes: # The derived key """Derive an encryption key from password and salt using Argon2id. Argon2 is a password hashing function designed by Alex Biryukov, @@ -232,15 +218,13 @@ def argon2_kdf( raise CriticalError(f"Invalid salt length ({len(salt)} bytes).") try: - key = argon2.low_level.hash_secret_raw( - secret=password.encode(), - salt=salt, - time_cost=time_cost, - memory_cost=memory_cost, - parallelism=parallelism, - hash_len=SYMMETRIC_KEY_LENGTH, - type=argon2.Type.ID, - ) # type: bytes + key = argon2.low_level.hash_secret_raw(secret=password.encode(), + salt=salt, + time_cost=time_cost, + memory_cost=memory_cost, + parallelism=parallelism, + hash_len=SYMMETRIC_KEY_LENGTH, + type=argon2.Type.ID) # type: bytes except argon2.exceptions.Argon2Error as e: raise CriticalError(str(e)) @@ -249,9 +233,7 @@ def argon2_kdf( raise CriticalError(f"Argon2 returned an invalid type ({type(key)}) key.") if len(key) != SYMMETRIC_KEY_LENGTH: - raise CriticalError( - f"Derived an invalid length key from password ({len(key)} bytes)." - ) + raise CriticalError(f"Derived an invalid length key from password ({len(key)} bytes).") return key @@ -336,7 +318,7 @@ class X448(object): """ @staticmethod - def generate_private_key() -> "X448PrivateKey": + def generate_private_key() -> 'X448PrivateKey': """Generate the X448 private key. The pyca/cryptography's key generation process is as follows: @@ -353,13 +335,13 @@ class X448(object): OpenSSL CSPRNG, and activates the pyca/cryptography "OS random engine".[5] - 4. Unlike OpenSSL user-space CSPRNG that only seeds from - /dev/urandom, the OS random engine uses GETRANDOM(0) syscall - that sources all of its entropy directly from the ChaCha20 - DRNG. The OS random engine does not suffer from the fork() - weakness where forked process is not automatically reseeded, - and it's also safe from issues with OpenSSL CSPRNG - initialization.[6] + 4. Unlike the OpenSSL user-space CSPRNG that only seeds from + /dev/urandom, the OS random engine uses the GETRANDOM(0) + syscall that sources all of its entropy directly from the + LRNG's ChaCha20 DRNG. The OS random engine does not suffer + from the fork() weakness where forked process is not + automatically reseeded, and it's also safe from issues with + OpenSSL CSPRNG initialization.[6] 5. The fallback option (/dev/urandom) of OS random engine might be problematic on pre-3.17 kernels if the CSPRNG has not been @@ -385,26 +367,21 @@ class X448(object): return X448PrivateKey.generate() @staticmethod - def derive_public_key(private_key: "X448PrivateKey") -> bytes: + def derive_public_key(private_key: 'X448PrivateKey') -> bytes: """Derive public key from an X448 private key.""" - public_key = private_key.public_key().public_bytes( - encoding=Encoding.Raw, format=PublicFormat.Raw - ) # type: bytes + public_key = private_key.public_key().public_bytes(encoding=Encoding.Raw, + format=PublicFormat.Raw) # type: bytes if not isinstance(public_key, bytes): - raise CriticalError( - f"Generated an invalid type ({type(public_key)}) public key." - ) + raise CriticalError(f"Generated an invalid type ({type(public_key)}) public key.") if len(public_key) != TFC_PUBLIC_KEY_LENGTH: - raise CriticalError( - f"Generated an invalid size public key from private key ({len(public_key)} bytes)." - ) + raise CriticalError(f"Generated an invalid size public key from private key ({len(public_key)} bytes).") return public_key @staticmethod - def shared_key(private_key: "X448PrivateKey", public_key: bytes) -> bytes: + def shared_key(private_key: 'X448PrivateKey', public_key: bytes) -> bytes: """Derive the X448 shared key. The pyca/cryptography library validates the length of the public @@ -429,28 +406,23 @@ class X448(object): extract unidirectional message/header keys and fingerprints. """ try: - shared_secret = private_key.exchange( - X448PublicKey.from_public_bytes(public_key) - ) # type: bytes + shared_secret = private_key.exchange(X448PublicKey.from_public_bytes(public_key)) # type: bytes except ValueError as e: raise CriticalError(str(e)) if not isinstance(shared_secret, bytes): # pragma: no cover - raise CriticalError( - f"Derived an invalid type ({type(shared_secret)}) shared secret." - ) + raise CriticalError(f"Derived an invalid type ({type(shared_secret)}) shared secret.") if len(shared_secret) != X448_SHARED_SECRET_LENGTH: # pragma: no cover - raise CriticalError( - f"Generated an invalid size shared secret ({len(shared_secret)} bytes)." - ) + raise CriticalError(f"Generated an invalid size shared secret ({len(shared_secret)} bytes).") return blake2b(shared_secret, digest_size=SYMMETRIC_KEY_LENGTH) @staticmethod - def derive_keys( - dh_shared_key: bytes, tfc_public_key_user: bytes, tfc_public_key_contact: bytes - ) -> Tuple[bytes, bytes, bytes, bytes, bytes, bytes]: + def derive_keys(dh_shared_key: bytes, + tfc_public_key_user: bytes, + tfc_public_key_contact: bytes + ) -> Tuple[bytes, bytes, bytes, bytes, bytes, bytes]: """Create domain separated message and header keys and fingerprints from shared key. Domain separate unidirectional keys from shared key by using public @@ -467,58 +439,27 @@ class X448(object): context variable ensures fingerprints are distinct from derived message and header keys. """ - tx_mk = blake2b( - tfc_public_key_contact, - dh_shared_key, - person=MESSAGE_KEY, - digest_size=SYMMETRIC_KEY_LENGTH, - ) - rx_mk = blake2b( - tfc_public_key_user, - dh_shared_key, - person=MESSAGE_KEY, - digest_size=SYMMETRIC_KEY_LENGTH, - ) + tx_mk = blake2b(tfc_public_key_contact, dh_shared_key, person=MESSAGE_KEY, digest_size=SYMMETRIC_KEY_LENGTH) + rx_mk = blake2b(tfc_public_key_user, dh_shared_key, person=MESSAGE_KEY, digest_size=SYMMETRIC_KEY_LENGTH) - tx_hk = blake2b( - tfc_public_key_contact, - dh_shared_key, - person=HEADER_KEY, - digest_size=SYMMETRIC_KEY_LENGTH, - ) - rx_hk = blake2b( - tfc_public_key_user, - dh_shared_key, - person=HEADER_KEY, - digest_size=SYMMETRIC_KEY_LENGTH, - ) + tx_hk = blake2b(tfc_public_key_contact, dh_shared_key, person=HEADER_KEY, digest_size=SYMMETRIC_KEY_LENGTH) + rx_hk = blake2b(tfc_public_key_user, dh_shared_key, person=HEADER_KEY, digest_size=SYMMETRIC_KEY_LENGTH) - tx_fp = blake2b( - tfc_public_key_user, - dh_shared_key, - person=FINGERPRINT, - digest_size=FINGERPRINT_LENGTH, - ) - rx_fp = blake2b( - tfc_public_key_contact, - dh_shared_key, - person=FINGERPRINT, - digest_size=FINGERPRINT_LENGTH, - ) + tx_fp = blake2b(tfc_public_key_user, dh_shared_key, person=FINGERPRINT, digest_size=FINGERPRINT_LENGTH) + rx_fp = blake2b(tfc_public_key_contact, dh_shared_key, person=FINGERPRINT, digest_size=FINGERPRINT_LENGTH) key_tuple = tx_mk, rx_mk, tx_hk, rx_hk, tx_fp, rx_fp - if len(key_tuple) != len(set(key_tuple)): + if len(set(key_tuple)) != len(key_tuple): raise CriticalError("Derived keys were not unique.") return key_tuple -def encrypt_and_sign( - plaintext: bytes, # Plaintext to encrypt - key: bytes, # 32-byte symmetric key - ad: bytes = b"", # Associated data -) -> bytes: # Nonce + ciphertext + tag +def encrypt_and_sign(plaintext: bytes, # Plaintext to encrypt + key: bytes, # 32-byte symmetric key + ad: bytes = b'' # Associated data + ) -> bytes: # Nonce + ciphertext + tag """Encrypt plaintext with XChaCha20-Poly1305 (IETF variant). ChaCha20 is a stream cipher published by Daniel J. Bernstein (djb) @@ -597,21 +538,18 @@ def encrypt_and_sign( nonce = csprng(XCHACHA20_NONCE_LENGTH) try: - ct_tag = nacl.bindings.crypto_aead_xchacha20poly1305_ietf_encrypt( - plaintext, ad, nonce, key - ) # type: bytes + ct_tag = nacl.bindings.crypto_aead_xchacha20poly1305_ietf_encrypt(plaintext, ad, nonce, key) # type: bytes except nacl.exceptions.CryptoError as e: raise CriticalError(str(e)) return nonce + ct_tag -def auth_and_decrypt( - nonce_ct_tag: bytes, # Nonce + ciphertext + tag - key: bytes, # 32-byte symmetric key - database: str = "", # When provided, gracefully exits TFC when the tag is invalid - ad: bytes = b"", # Associated data -) -> bytes: # Plaintext +def auth_and_decrypt(nonce_ct_tag: bytes, # Nonce + ciphertext + tag + key: bytes, # 32-byte symmetric key + database: str = '', # When provided, gracefully exits TFC when the tag is invalid + ad: bytes = b'' # Associated data + ) -> bytes: # Plaintext """Authenticate and decrypt XChaCha20-Poly1305 ciphertext. The Poly1305 tag is checked using constant time `sodium_memcmp`: @@ -636,21 +574,16 @@ def auth_and_decrypt( nonce, ct_tag = separate_header(nonce_ct_tag, XCHACHA20_NONCE_LENGTH) try: - plaintext = nacl.bindings.crypto_aead_xchacha20poly1305_ietf_decrypt( - ct_tag, ad, nonce, key - ) # type: bytes + plaintext = nacl.bindings.crypto_aead_xchacha20poly1305_ietf_decrypt(ct_tag, ad, nonce, key) # type: bytes return plaintext except nacl.exceptions.CryptoError: if database: - raise CriticalError( - f"Authentication of data in database '{database}' failed." - ) + raise CriticalError(f"Authentication of data in database '{database}' failed.") raise -def byte_padding( - bytestring: bytes, # Bytestring to be padded -) -> bytes: # Padded bytestring +def byte_padding(bytestring: bytes # Bytestring to be padded + ) -> bytes: # Padded bytestring """Pad bytestring to next 255 bytes. TFC adds padding to messages it outputs. The padding ensures each @@ -672,8 +605,8 @@ def byte_padding( For a better explanation, see https://en.wikipedia.org/wiki/Padding_(cryptography)#PKCS#5_and_PKCS#7 """ - padder = padding.PKCS7(PADDING_LENGTH * BITS_PER_BYTE).padder() - padded = padder.update(bytestring) # type: bytes + padder = padding.PKCS7(PADDING_LENGTH * BITS_PER_BYTE).padder() + padded = padder.update(bytestring) # type: bytes padded += padder.finalize() if not isinstance(padded, bytes): @@ -685,24 +618,22 @@ def byte_padding( return padded -def rm_padding_bytes( - bytestring: bytes, # Padded bytestring -) -> bytes: # Bytestring without padding +def rm_padding_bytes(bytestring: bytes # Padded bytestring + ) -> bytes: # Bytestring without padding """Remove padding from plaintext. The length of padding is determined by the ord-value of the last byte that is always part of the padding. """ - unpadder = padding.PKCS7(PADDING_LENGTH * BITS_PER_BYTE).unpadder() - unpadded = unpadder.update(bytestring) # type: bytes + unpadder = padding.PKCS7(PADDING_LENGTH * BITS_PER_BYTE).unpadder() + unpadded = unpadder.update(bytestring) # type: bytes unpadded += unpadder.finalize() return unpadded -def csprng( - key_length: int = SYMMETRIC_KEY_LENGTH, # Length of the key -) -> bytes: # The generated key +def csprng(key_length: int = SYMMETRIC_KEY_LENGTH # Length of the key + ) -> bytes: # The generated key """Generate a cryptographically secure random key. The default key length is 32 bytes (256 bits). @@ -1137,9 +1068,7 @@ def csprng( raise CriticalError(f"GETRANDOM returned invalid type data ({type(entropy)}).") if len(entropy) != key_length: - raise CriticalError( - f"GETRANDOM returned invalid amount of entropy ({len(entropy)} bytes)." - ) + raise CriticalError(f"GETRANDOM returned invalid amount of entropy ({len(entropy)} bytes).") compressed = blake2b(entropy, digest_size=key_length) @@ -1161,7 +1090,7 @@ def check_kernel_version() -> None: [1] https://lkml.org/lkml/2016/7/25/43 [2] https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/Studies/LinuxRNG/LinuxRNG_EN.pdf """ - major_v, minor_v = [int(i) for i in os.uname()[2].split(".")[:2]] # type: int, int + major_v, minor_v = [int(i) for i in os.uname()[2].split('.')[:2]] # type: int, int if major_v < 4 or (major_v == 4 and minor_v < 17): raise CriticalError("Insecure kernel CSPRNG version detected.") diff --git a/src/common/database.py b/src/common/database.py index 06a6f2b..5074cd2 100644 --- a/src/common/database.py +++ b/src/common/database.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -27,15 +27,10 @@ from typing import Iterator import nacl.exceptions -from src.common.crypto import auth_and_decrypt, blake2b, encrypt_and_sign +from src.common.crypto import auth_and_decrypt, blake2b, encrypt_and_sign from src.common.exceptions import CriticalError -from src.common.misc import ensure_dir, separate_trailer -from src.common.statics import ( - BLAKE2_DIGEST_LENGTH, - DB_WRITE_RETRY_LIMIT, - DIR_USER_DATA, - TEMP_POSTFIX, -) +from src.common.misc import ensure_dir, separate_trailer +from src.common.statics import BLAKE2_DIGEST_LENGTH, DB_WRITE_RETRY_LIMIT, DIR_USER_DATA, TEMP_POSTFIX if typing.TYPE_CHECKING: from src.common.db_masterkey import MasterKey @@ -47,16 +42,16 @@ class TFCDatabase(object): as atomicity to ensure database writing always succeeds or fails. """ - def __init__(self, database_name: str, master_key: "MasterKey") -> None: + def __init__(self, database_name: str, master_key: 'MasterKey') -> None: """Initialize TFC database.""" self.database_name = database_name self.database_temp = database_name + TEMP_POSTFIX - self.database_key = master_key.master_key + self.database_key = master_key.master_key @staticmethod def write_to_file(file_name: str, data: bytes) -> None: """Write data to file.""" - with open(file_name, "wb+") as f: + with open(file_name, 'wb+') as f: f.write(data) # Write data from program buffer to operating system buffer. @@ -70,7 +65,7 @@ class TFCDatabase(object): def verify_file(self, database_name: str) -> bool: """Verify integrity of file content.""" - with open(database_name, "rb") as f: + with open(database_name, 'rb') as f: purp_data = f.read() try: @@ -87,9 +82,7 @@ class TFCDatabase(object): while not self.verify_file(self.database_temp): retries += 1 if retries >= DB_WRITE_RETRY_LIMIT: - raise CriticalError( - f"Writing to database '{self.database_temp}' failed after {retries} retries." - ) + raise CriticalError(f"Writing to database '{self.database_temp}' failed after {retries} retries.") self.write_to_file(self.database_temp, ct_bytes) @@ -99,8 +92,8 @@ class TFCDatabase(object): ensure_dir(DIR_USER_DATA) self.ensure_temp_write(ct_bytes) - # Replace original file with temp file. (`os.replace` is atomic as per POSIX - # requirements): https://docs.python.org/3/library/os.html#os.replace + # Replace the original file with a temp file. (`os.replace` is atomic as per + # POSIX requirements): https://docs.python.org/3/library/os.html#os.replace if replace: self.replace_database() @@ -127,12 +120,10 @@ class TFCDatabase(object): # we delete it and continue using the old file to ensure atomicity. os.remove(self.database_temp) - with open(self.database_name, "rb") as f: + with open(self.database_name, 'rb') as f: database_data = f.read() - return auth_and_decrypt( - database_data, self.database_key, database=self.database_name - ) + return auth_and_decrypt(database_data, self.database_key, database=self.database_name) class TFCUnencryptedDatabase(object): @@ -148,7 +139,7 @@ class TFCUnencryptedDatabase(object): @staticmethod def write_to_file(file_name: str, data: bytes) -> None: """Write data to file.""" - with open(file_name, "wb+") as f: + with open(file_name, 'wb+') as f: f.write(data) f.flush() os.fsync(f.fileno()) @@ -156,7 +147,7 @@ class TFCUnencryptedDatabase(object): @staticmethod def verify_file(database_name: str) -> bool: """Verify integrity of file content.""" - with open(database_name, "rb") as f: + with open(database_name, 'rb') as f: file_data = f.read() purp_data, digest = separate_trailer(file_data, BLAKE2_DIGEST_LENGTH) @@ -171,9 +162,7 @@ class TFCUnencryptedDatabase(object): while not self.verify_file(self.database_temp): retries += 1 if retries >= DB_WRITE_RETRY_LIMIT: - raise CriticalError( - f"Writing to database '{self.database_temp}' failed after {retries} retries." - ) + raise CriticalError(f"Writing to database '{self.database_temp}' failed after {retries} retries.") self.write_to_file(self.database_temp, data) @@ -187,8 +176,8 @@ class TFCUnencryptedDatabase(object): self.ensure_temp_write(data + blake2b(data)) - # Replace original file with temp file. (`os.replace` is atomic as per POSIX - # requirements): https://docs.python.org/3/library/os.html#os.replace + # Replace the original file with a temp file. (`os.replace` is atomic as per + # POSIX requirements): https://docs.python.org/3/library/os.html#os.replace os.replace(self.database_temp, self.database_name) def replace_database(self) -> None: @@ -214,7 +203,7 @@ class TFCUnencryptedDatabase(object): # so we delete it and continue using the old file to ensure atomicity. os.remove(self.database_temp) - with open(self.database_name, "rb") as f: + with open(self.database_name, 'rb') as f: database_data = f.read() database_data, digest = separate_trailer(database_data, BLAKE2_DIGEST_LENGTH) @@ -232,28 +221,26 @@ class MessageLog(object): """Create a new MessageLog object.""" self.database_name = database_name self.database_temp = self.database_name + TEMP_POSTFIX - self.database_key = database_key + self.database_key = database_key ensure_dir(DIR_USER_DATA) if os.path.isfile(self.database_name): self.check_for_temp_database() self.conn = sqlite3.connect(self.database_name) - self.c = self.conn.cursor() + self.c = self.conn.cursor() self.create_table() def __iter__(self) -> Iterator[bytes]: """Iterate over encrypted log entries.""" for log_entry in self.c.execute("SELECT log_entry FROM log_entries"): - plaintext = auth_and_decrypt( - log_entry[0], self.database_key, database=self.database_name - ) + plaintext = auth_and_decrypt(log_entry[0], self.database_key, database=self.database_name) yield plaintext def verify_file(self, database_name: str) -> bool: """Verify integrity of database file content.""" conn = sqlite3.connect(database_name) - c = conn.cursor() + c = conn.cursor() try: log_entries = c.execute("SELECT log_entry FROM log_entries") @@ -280,23 +267,19 @@ class MessageLog(object): def create_table(self) -> None: """Create new table for logged messages.""" - self.c.execute( - """CREATE TABLE IF NOT EXISTS log_entries (id INTEGER PRIMARY KEY, log_entry BLOB NOT NULL)""" - ) + self.c.execute("""CREATE TABLE IF NOT EXISTS log_entries (id INTEGER PRIMARY KEY, log_entry BLOB NOT NULL)""") def insert_log_entry(self, pt_log_entry: bytes) -> None: """Encrypt log entry and insert the ciphertext into the sqlite3 database.""" ct_log_entry = encrypt_and_sign(pt_log_entry, self.database_key) try: - self.c.execute( - f"""INSERT INTO log_entries (log_entry) VALUES (?)""", (ct_log_entry,) - ) + self.c.execute("""INSERT INTO log_entries (log_entry) VALUES (?)""", (ct_log_entry,)) self.conn.commit() except sqlite3.Error: # Re-connect to database self.conn = sqlite3.connect(self.database_name) - self.c = self.conn.cursor() + self.c = self.conn.cursor() self.insert_log_entry(pt_log_entry) def close_database(self) -> None: diff --git a/src/common/db_contacts.py b/src/common/db_contacts.py index e5ff773..63655d0 100755 --- a/src/common/db_contacts.py +++ b/src/common/db_contacts.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -24,46 +24,20 @@ import typing from typing import Iterable, Iterator, List, Optional, Sized -from src.common.database import TFCDatabase -from src.common.encoding import ( - bool_to_bytes, - pub_key_to_onion_address, - str_to_bytes, - pub_key_to_short_address, -) -from src.common.encoding import bytes_to_bool, onion_address_to_pub_key, bytes_to_str +from src.common.database import TFCDatabase +from src.common.encoding import bool_to_bytes, pub_key_to_onion_address, str_to_bytes, pub_key_to_short_address +from src.common.encoding import bytes_to_bool, onion_address_to_pub_key, bytes_to_str from src.common.exceptions import CriticalError -from src.common.misc import ( - ensure_dir, - get_terminal_width, - separate_headers, - split_byte_string, -) -from src.common.output import clear_screen -from src.common.statics import ( - CONTACT_LENGTH, - CONTACT_LIST_INDENT, - DIR_USER_DATA, - DUMMY_CONTACT, - DUMMY_NICK, - ECDHE, - ENCODED_BOOLEAN_LENGTH, - FINGERPRINT_LENGTH, - KEX_STATUS_HAS_RX_PSK, - KEX_STATUS_LENGTH, - KEX_STATUS_NONE, - KEX_STATUS_NO_RX_PSK, - KEX_STATUS_PENDING, - KEX_STATUS_UNVERIFIED, - KEX_STATUS_VERIFIED, - LOCAL_ID, - ONION_SERVICE_PUBLIC_KEY_LENGTH, - PSK, -) +from src.common.misc import ensure_dir, get_terminal_width, separate_headers, split_byte_string +from src.common.output import clear_screen +from src.common.statics import (CONTACT_LENGTH, CONTACT_LIST_INDENT, DIR_USER_DATA, DUMMY_CONTACT, DUMMY_NICK, ECDHE, + ENCODED_BOOLEAN_LENGTH, FINGERPRINT_LENGTH, KEX_STATUS_HAS_RX_PSK, KEX_STATUS_LENGTH, + KEX_STATUS_NONE, KEX_STATUS_NO_RX_PSK, KEX_STATUS_PENDING, KEX_STATUS_UNVERIFIED, + KEX_STATUS_VERIFIED, LOCAL_ID, ONION_SERVICE_PUBLIC_KEY_LENGTH, PSK) if typing.TYPE_CHECKING: from src.common.db_masterkey import MasterKey - from src.common.db_settings import Settings + from src.common.db_settings import Settings from cryptography.hazmat.primitives.asymmetric.x448 import X448PrivateKey @@ -153,32 +127,31 @@ class Contact(object): with each other. """ - def __init__( - self, - onion_pub_key: bytes, - nick: str, - tx_fingerprint: bytes, - rx_fingerprint: bytes, - kex_status: bytes, - log_messages: bool, - file_reception: bool, - notifications: bool, - ) -> None: + def __init__(self, + onion_pub_key: bytes, + nick: str, + tx_fingerprint: bytes, + rx_fingerprint: bytes, + kex_status: bytes, + log_messages: bool, + file_reception: bool, + notifications: bool + ) -> None: """Create a new Contact object. `self.short_address` is a truncated version of the account used to identify TFC account in printed messages. """ - self.onion_pub_key = onion_pub_key - self.nick = nick - self.tx_fingerprint = tx_fingerprint - self.rx_fingerprint = rx_fingerprint - self.kex_status = kex_status - self.log_messages = log_messages - self.file_reception = file_reception - self.notifications = notifications - self.onion_address = pub_key_to_onion_address(self.onion_pub_key) - self.short_address = pub_key_to_short_address(self.onion_pub_key) + self.onion_pub_key = onion_pub_key + self.nick = nick + self.tx_fingerprint = tx_fingerprint + self.rx_fingerprint = rx_fingerprint + self.kex_status = kex_status + self.log_messages = log_messages + self.file_reception = file_reception + self.notifications = notifications + self.onion_address = pub_key_to_onion_address(self.onion_pub_key) + self.short_address = pub_key_to_short_address(self.onion_pub_key) self.tfc_private_key = None # type: Optional[X448PrivateKey] def serialize_c(self) -> bytes: @@ -192,16 +165,14 @@ class Contact(object): metadata about the contact the ciphertext length of the contact database would reveal. """ - return ( - self.onion_pub_key - + self.tx_fingerprint - + self.rx_fingerprint - + self.kex_status - + bool_to_bytes(self.log_messages) - + bool_to_bytes(self.file_reception) - + bool_to_bytes(self.notifications) - + str_to_bytes(self.nick) - ) + return (self.onion_pub_key + + self.tx_fingerprint + + self.rx_fingerprint + + self.kex_status + + bool_to_bytes(self.log_messages) + + bool_to_bytes(self.file_reception) + + bool_to_bytes(self.notifications) + + str_to_bytes(self.nick)) def uses_psk(self) -> bool: """\ @@ -238,13 +209,13 @@ class ContactList(Iterable[Contact], Sized): readable names for making queries to the database. """ - def __init__(self, master_key: "MasterKey", settings: "Settings") -> None: + def __init__(self, master_key: 'MasterKey', settings: 'Settings') -> None: """Create a new ContactList object.""" - self.settings = settings - self.contacts = [] # type: List[Contact] + self.settings = settings + self.contacts = [] # type: List[Contact] self.dummy_contact = self.generate_dummy_contact() - self.file_name = f"{DIR_USER_DATA}{settings.software_operation}_contacts" - self.database = TFCDatabase(self.file_name, master_key) + self.file_name = f'{DIR_USER_DATA}{settings.software_operation}_contacts' + self.database = TFCDatabase(self.file_name, master_key) ensure_dir(DIR_USER_DATA) if os.path.isfile(self.file_name): @@ -280,9 +251,7 @@ class ContactList(Iterable[Contact], Sized): and a 16-byte tag, so the size of the final database is 57313 bytes. """ - pt_bytes = b"".join( - [c.serialize_c() for c in self.contacts + self._dummy_contacts()] - ) + pt_bytes = b''.join([c.serialize_c() for c in self.contacts + self._dummy_contacts()]) self.database.store_database(pt_bytes, replace) def _load_contacts(self) -> None: @@ -296,45 +265,30 @@ class ContactList(Iterable[Contact], Sized): populate the `self.contacts` list with Contact objects, the data of which is sliced and decoded from the dummy-free blocks. """ - pt_bytes = self.database.load_database() - blocks = split_byte_string(pt_bytes, item_len=CONTACT_LENGTH) - df_blocks = [ - b for b in blocks if not b.startswith(self.dummy_contact.onion_pub_key) - ] + pt_bytes = self.database.load_database() + blocks = split_byte_string(pt_bytes, item_len=CONTACT_LENGTH) + df_blocks = [b for b in blocks if not b.startswith(self.dummy_contact.onion_pub_key)] for block in df_blocks: if len(block) != CONTACT_LENGTH: raise CriticalError("Invalid data in contact database.") - ( - onion_pub_key, - tx_fingerprint, - rx_fingerprint, - kex_status_byte, - log_messages_byte, - file_reception_byte, - notifications_byte, - nick_bytes, - ) = separate_headers( - block, - [ONION_SERVICE_PUBLIC_KEY_LENGTH] - + 2 * [FINGERPRINT_LENGTH] - + [KEX_STATUS_LENGTH] - + 3 * [ENCODED_BOOLEAN_LENGTH], - ) + (onion_pub_key, tx_fingerprint, rx_fingerprint, kex_status_byte, + log_messages_byte, file_reception_byte, notifications_byte, + nick_bytes) = separate_headers(block, + [ONION_SERVICE_PUBLIC_KEY_LENGTH] + + 2*[FINGERPRINT_LENGTH] + + [KEX_STATUS_LENGTH] + + 3*[ENCODED_BOOLEAN_LENGTH]) - self.contacts.append( - Contact( - onion_pub_key=onion_pub_key, - tx_fingerprint=tx_fingerprint, - rx_fingerprint=rx_fingerprint, - kex_status=kex_status_byte, - log_messages=bytes_to_bool(log_messages_byte), - file_reception=bytes_to_bool(file_reception_byte), - notifications=bytes_to_bool(notifications_byte), - nick=bytes_to_str(nick_bytes), - ) - ) + self.contacts.append(Contact(onion_pub_key =onion_pub_key, + tx_fingerprint=tx_fingerprint, + rx_fingerprint=rx_fingerprint, + kex_status =kex_status_byte, + log_messages =bytes_to_bool(log_messages_byte), + file_reception=bytes_to_bool(file_reception_byte), + notifications =bytes_to_bool(notifications_byte), + nick =bytes_to_str(nick_bytes))) @staticmethod def generate_dummy_contact() -> Contact: @@ -344,16 +298,14 @@ class ContactList(Iterable[Contact], Sized): serialization when the data is stored to, or read from the database. """ - return Contact( - onion_pub_key=onion_address_to_pub_key(DUMMY_CONTACT), - nick=DUMMY_NICK, - tx_fingerprint=bytes(FINGERPRINT_LENGTH), - rx_fingerprint=bytes(FINGERPRINT_LENGTH), - kex_status=KEX_STATUS_NONE, - log_messages=False, - file_reception=False, - notifications=False, - ) + return Contact(onion_pub_key =onion_address_to_pub_key(DUMMY_CONTACT), + nick =DUMMY_NICK, + tx_fingerprint=bytes(FINGERPRINT_LENGTH), + rx_fingerprint=bytes(FINGERPRINT_LENGTH), + kex_status =KEX_STATUS_NONE, + log_messages =False, + file_reception=False, + notifications =False) def _dummy_contacts(self) -> List[Contact]: """\ @@ -368,20 +320,19 @@ class ContactList(Iterable[Contact], Sized): KeyList database that contains the local key. """ number_of_contacts_to_store = self.settings.max_number_of_contacts + 1 - number_of_dummies = number_of_contacts_to_store - len(self.contacts) + number_of_dummies = number_of_contacts_to_store - len(self.contacts) return [self.dummy_contact] * number_of_dummies - def add_contact( - self, - onion_pub_key: bytes, - nick: str, - tx_fingerprint: bytes, - rx_fingerprint: bytes, - kex_status: bytes, - log_messages: bool, - file_reception: bool, - notifications: bool, - ) -> None: + def add_contact(self, + onion_pub_key: bytes, + nick: str, + tx_fingerprint: bytes, + rx_fingerprint: bytes, + kex_status: bytes, + log_messages: bool, + file_reception: bool, + notifications: bool + ) -> None: """\ Add a new contact to `self.contacts` list and write changes to the database. @@ -398,23 +349,19 @@ class ContactList(Iterable[Contact], Sized): """ if self.has_pub_key(onion_pub_key): current_contact = self.get_contact_by_pub_key(onion_pub_key) - log_messages = current_contact.log_messages - file_reception = current_contact.file_reception - notifications = current_contact.notifications + log_messages = current_contact.log_messages + file_reception = current_contact.file_reception + notifications = current_contact.notifications self.remove_contact_by_pub_key(onion_pub_key) - self.contacts.append( - Contact( - onion_pub_key, - nick, - tx_fingerprint, - rx_fingerprint, - kex_status, - log_messages, - file_reception, - notifications, - ) - ) + self.contacts.append(Contact(onion_pub_key, + nick, + tx_fingerprint, + rx_fingerprint, + kex_status, + log_messages, + file_reception, + notifications)) self.store_contacts() def remove_contact_by_pub_key(self, onion_pub_key: bytes) -> None: @@ -475,23 +422,13 @@ class ContactList(Iterable[Contact], Sized): def get_list_of_pending_pub_keys(self) -> List[bytes]: """Return list of public keys for contacts that haven't completed key exchange yet.""" - return [ - c.onion_pub_key for c in self.contacts if c.kex_status == KEX_STATUS_PENDING - ] + return [c.onion_pub_key for c in self.contacts if c.kex_status == KEX_STATUS_PENDING] def get_list_of_existing_pub_keys(self) -> List[bytes]: """Return list of public keys for contacts with whom key exchange has been completed.""" - return [ - c.onion_pub_key - for c in self.get_list_of_contacts() - if c.kex_status - in [ - KEX_STATUS_UNVERIFIED, - KEX_STATUS_VERIFIED, - KEX_STATUS_HAS_RX_PSK, - KEX_STATUS_NO_RX_PSK, - ] - ] + return [c.onion_pub_key for c in self.get_list_of_contacts() + if c.kex_status in [KEX_STATUS_UNVERIFIED, KEX_STATUS_VERIFIED, + KEX_STATUS_HAS_RX_PSK, KEX_STATUS_NO_RX_PSK]] def contact_selectors(self) -> List[str]: """Return list of string-type UIDs that can be used to select a contact.""" @@ -503,9 +440,7 @@ class ContactList(Iterable[Contact], Sized): def has_only_pending_contacts(self) -> bool: """Return True if ContactList only has pending contacts, else False.""" - return all( - c.kex_status == KEX_STATUS_PENDING for c in self.get_list_of_contacts() - ) + return all(c.kex_status == KEX_STATUS_PENDING for c in self.get_list_of_contacts()) def has_pub_key(self, onion_pub_key: bytes) -> bool: """Return True if contact with public key exists, else False.""" @@ -526,46 +461,41 @@ class ContactList(Iterable[Contact], Sized): corresponds to what nick etc. """ # Initialize columns - c1 = ["Contact"] - c2 = ["Account"] - c3 = ["Logging"] - c4 = ["Notify"] - c5 = ["Files "] - c6 = ["Key Ex"] + c1 = ['Contact'] + c2 = ['Account'] + c3 = ['Logging'] + c4 = ['Notify'] + c5 = ['Files '] + c6 = ['Key Ex'] # Key exchange status dictionary - kex_dict = { - KEX_STATUS_PENDING: f"{ECDHE} (Pending)", - KEX_STATUS_UNVERIFIED: f"{ECDHE} (Unverified)", - KEX_STATUS_VERIFIED: f"{ECDHE} (Verified)", - KEX_STATUS_NO_RX_PSK: f"{PSK} (No contact key)", - KEX_STATUS_HAS_RX_PSK: PSK, - } + kex_dict = {KEX_STATUS_PENDING: f"{ECDHE} (Pending)", + KEX_STATUS_UNVERIFIED: f"{ECDHE} (Unverified)", + KEX_STATUS_VERIFIED: f"{ECDHE} (Verified)", + KEX_STATUS_NO_RX_PSK: f"{PSK} (No contact key)", + KEX_STATUS_HAS_RX_PSK: PSK + } # Populate columns with contact data for c in self.get_list_of_contacts(): c1.append(c.nick) c2.append(c.short_address) - c3.append("Yes" if c.log_messages else "No") - c4.append("Yes" if c.notifications else "No") - c5.append("Accept" if c.file_reception else "Reject") + c3.append('Yes' if c.log_messages else 'No') + c4.append('Yes' if c.notifications else 'No') + c5.append('Accept' if c.file_reception else 'Reject') c6.append(kex_dict[c.kex_status]) # Calculate column widths - c1w, c2w, c3w, c4w, c5w, = [ - max(len(v) for v in column) + CONTACT_LIST_INDENT - for column in [c1, c2, c3, c4, c5] - ] + c1w, c2w, c3w, c4w, c5w, = [max(len(v) for v in column) + CONTACT_LIST_INDENT + for column in [c1, c2, c3, c4, c5]] # Align columns by adding whitespace between fields of each line - lines = [ - f"{f1:{c1w}}{f2:{c2w}}{f3:{c3w}}{f4:{c4w}}{f5:{c5w}}{f6}" - for f1, f2, f3, f4, f5, f6 in zip(c1, c2, c3, c4, c5, c6) - ] + lines = [f'{f1:{c1w}}{f2:{c2w}}{f3:{c3w}}{f4:{c4w}}{f5:{c5w}}{f6}' + for f1, f2, f3, f4, f5, f6 in zip(c1, c2, c3, c4, c5, c6)] # Add a terminal-wide line between the column names and the data - lines.insert(1, get_terminal_width() * "─") + lines.insert(1, get_terminal_width() * '─') # Print the contact list clear_screen() - print("\n" + "\n".join(lines) + "\n\n") + print('\n' + '\n'.join(lines) + '\n\n') diff --git a/src/common/db_groups.py b/src/common/db_groups.py index edc8628..e80d133 100755 --- a/src/common/db_groups.py +++ b/src/common/db_groups.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -25,43 +25,22 @@ import typing from typing import Callable, Iterable, Iterator, List, Sized -from src.common.database import TFCDatabase +from src.common.database import TFCDatabase from src.common.db_contacts import Contact -from src.common.encoding import ( - bool_to_bytes, - int_to_bytes, - str_to_bytes, - onion_address_to_pub_key, - b58encode, -) -from src.common.encoding import bytes_to_bool, bytes_to_int, bytes_to_str -from src.common.exceptions import CriticalError -from src.common.misc import ( - ensure_dir, - get_terminal_width, - round_up, - separate_header, - separate_headers, -) -from src.common.misc import split_byte_string -from src.common.statics import ( - CONTACT_LIST_INDENT, - DIR_USER_DATA, - DUMMY_GROUP, - DUMMY_MEMBER, - ENCODED_BOOLEAN_LENGTH, - ENCODED_INTEGER_LENGTH, - GROUP_DB_HEADER_LENGTH, - GROUP_ID_LENGTH, - GROUP_STATIC_LENGTH, - ONION_SERVICE_PUBLIC_KEY_LENGTH, - PADDED_UTF32_STR_LENGTH, -) +from src.common.encoding import bool_to_bytes, int_to_bytes, str_to_bytes, onion_address_to_pub_key, b58encode +from src.common.encoding import bytes_to_bool, bytes_to_int, bytes_to_str +from src.common.exceptions import CriticalError +from src.common.misc import ensure_dir, get_terminal_width, round_up, separate_header, separate_headers +from src.common.misc import split_byte_string +from src.common.statics import (CONTACT_LIST_INDENT, DIR_USER_DATA, DUMMY_GROUP, DUMMY_MEMBER, + ENCODED_BOOLEAN_LENGTH, ENCODED_INTEGER_LENGTH, GROUP_DB_HEADER_LENGTH, + GROUP_ID_LENGTH, GROUP_STATIC_LENGTH, ONION_SERVICE_PUBLIC_KEY_LENGTH, + PADDED_UTF32_STR_LENGTH) if typing.TYPE_CHECKING: - from src.common.db_contacts import ContactList + from src.common.db_contacts import ContactList from src.common.db_masterkey import MasterKey - from src.common.db_settings import Settings + from src.common.db_settings import Settings class Group(Iterable[Contact], Sized): @@ -116,29 +95,28 @@ class Group(Iterable[Contact], Sized): header. """ - def __init__( - self, - name: str, - group_id: bytes, - log_messages: bool, - notifications: bool, - members: List["Contact"], - settings: "Settings", - store_groups: Callable[..., None], - ) -> None: + def __init__(self, + name: str, + group_id: bytes, + log_messages: bool, + notifications: bool, + members: List['Contact'], + settings: 'Settings', + store_groups: Callable[..., None] + ) -> None: """Create a new Group object. The `self.store_groups` is a reference to the method of the parent object GroupList that stores the list of groups into an encrypted database. """ - self.name = name - self.group_id = group_id - self.log_messages = log_messages + self.name = name + self.group_id = group_id + self.log_messages = log_messages self.notifications = notifications - self.members = members - self.settings = settings - self.store_groups = store_groups + self.members = members + self.settings = settings + self.store_groups = store_groups def __iter__(self) -> Iterator[Contact]: """Iterate over members (Contact objects) in the Group object.""" @@ -161,27 +139,21 @@ class Group(Iterable[Contact], Sized): metadata the ciphertext length of the group database could reveal. """ - members = self.get_list_of_member_pub_keys() - number_of_dummies = self.settings.max_number_of_group_members - len( - self.members - ) - members += number_of_dummies * [onion_address_to_pub_key(DUMMY_MEMBER)] - member_bytes = b"".join(members) + members = self.get_list_of_member_pub_keys() + number_of_dummies = self.settings.max_number_of_group_members - len(self.members) + members += number_of_dummies * [onion_address_to_pub_key(DUMMY_MEMBER)] + member_bytes = b''.join(members) - return ( - str_to_bytes(self.name) - + self.group_id - + bool_to_bytes(self.log_messages) - + bool_to_bytes(self.notifications) - + member_bytes - ) + return (str_to_bytes(self.name) + + self.group_id + + bool_to_bytes(self.log_messages) + + bool_to_bytes(self.notifications) + + member_bytes) - def add_members(self, contacts: List["Contact"]) -> None: + def add_members(self, contacts: List['Contact']) -> None: """Add a list of Contact objects to the group.""" pre_existing = self.get_list_of_member_pub_keys() - self.members.extend( - (c for c in contacts if c.onion_pub_key not in pre_existing) - ) + self.members.extend((c for c in contacts if c.onion_pub_key not in pre_existing)) self.store_groups() def remove_members(self, pub_keys: List[bytes]) -> bool: @@ -235,15 +207,17 @@ class GroupList(Iterable[Group], Sized): names for making queries to the database. """ - def __init__( - self, master_key: "MasterKey", settings: "Settings", contact_list: "ContactList" - ) -> None: + def __init__(self, + master_key: 'MasterKey', + settings: 'Settings', + contact_list: 'ContactList' + ) -> None: """Create a new GroupList object.""" - self.settings = settings + self.settings = settings self.contact_list = contact_list - self.groups = [] # type: List[Group] - self.file_name = f"{DIR_USER_DATA}{settings.software_operation}_groups" - self.database = TFCDatabase(self.file_name, master_key) + self.groups = [] # type: List[Group] + self.file_name = f'{DIR_USER_DATA}{settings.software_operation}_groups' + self.database = TFCDatabase(self.file_name, master_key) ensure_dir(DIR_USER_DATA) if os.path.isfile(self.file_name): @@ -279,10 +253,8 @@ class GroupList(Iterable[Group], Sized): The ciphertext includes a 24-byte nonce and a 16-byte tag, so the size of the final database is 131572 bytes. """ - pt_bytes = self._generate_group_db_header() - pt_bytes += b"".join( - [g.serialize_g() for g in (self.groups + self._dummy_groups())] - ) + pt_bytes = self._generate_group_db_header() + pt_bytes += b''.join([g.serialize_g() for g in (self.groups + self._dummy_groups())]) self.database.store_database(pt_bytes, replace) def _load_groups(self) -> None: @@ -303,29 +275,18 @@ class GroupList(Iterable[Group], Sized): # Slice and decode headers group_db_headers, pt_bytes = separate_header(pt_bytes, GROUP_DB_HEADER_LENGTH) - ( - padding_for_group_db, - padding_for_members, - number_of_groups, - members_in_largest_group, - ) = list( - map( - bytes_to_int, - split_byte_string(group_db_headers, ENCODED_INTEGER_LENGTH), - ) - ) + padding_for_group_db, padding_for_members, number_of_groups, members_in_largest_group \ + = list(map(bytes_to_int, split_byte_string(group_db_headers, ENCODED_INTEGER_LENGTH))) # Slice dummy groups - bytes_per_group = ( - GROUP_STATIC_LENGTH + padding_for_members * ONION_SERVICE_PUBLIC_KEY_LENGTH - ) - dummy_data_len = (padding_for_group_db - number_of_groups) * bytes_per_group - group_data = pt_bytes[:-dummy_data_len] + bytes_per_group = GROUP_STATIC_LENGTH + padding_for_members * ONION_SERVICE_PUBLIC_KEY_LENGTH + dummy_data_len = (padding_for_group_db - number_of_groups) * bytes_per_group + group_data = pt_bytes[:-dummy_data_len] update_db = self._check_db_settings(number_of_groups, members_in_largest_group) - blocks = split_byte_string(group_data, item_len=bytes_per_group) + blocks = split_byte_string(group_data, item_len=bytes_per_group) - all_pub_keys = self.contact_list.get_list_of_pub_keys() + all_pub_keys = self.contact_list.get_list_of_pub_keys() dummy_pub_key = onion_address_to_pub_key(DUMMY_MEMBER) # Deserialize group objects @@ -333,48 +294,30 @@ class GroupList(Iterable[Group], Sized): if len(block) != bytes_per_group: raise CriticalError("Invalid data in group database.") - ( - name_bytes, - group_id, - log_messages_byte, - notification_byte, - ser_pub_keys, - ) = separate_headers( - block, - [PADDED_UTF32_STR_LENGTH, GROUP_ID_LENGTH] - + 2 * [ENCODED_BOOLEAN_LENGTH], - ) + name_bytes, group_id, log_messages_byte, notification_byte, ser_pub_keys \ + = separate_headers(block, [PADDED_UTF32_STR_LENGTH, GROUP_ID_LENGTH] + 2*[ENCODED_BOOLEAN_LENGTH]) - pub_key_list = split_byte_string( - ser_pub_keys, item_len=ONION_SERVICE_PUBLIC_KEY_LENGTH - ) + pub_key_list = split_byte_string(ser_pub_keys, item_len=ONION_SERVICE_PUBLIC_KEY_LENGTH) group_pub_keys = [k for k in pub_key_list if k != dummy_pub_key] - group_members = [ - self.contact_list.get_contact_by_pub_key(k) - for k in group_pub_keys - if k in all_pub_keys - ] + group_members = [self.contact_list.get_contact_by_pub_key(k) for k in group_pub_keys if k in all_pub_keys] - self.groups.append( - Group( - name=bytes_to_str(name_bytes), - group_id=group_id, - log_messages=bytes_to_bool(log_messages_byte), - notifications=bytes_to_bool(notification_byte), - members=group_members, - settings=self.settings, - store_groups=self.store_groups, - ) - ) + self.groups.append(Group(name =bytes_to_str(name_bytes), + group_id =group_id, + log_messages =bytes_to_bool(log_messages_byte), + notifications=bytes_to_bool(notification_byte), + members =group_members, + settings =self.settings, + store_groups =self.store_groups)) update_db |= set(all_pub_keys) > set(group_pub_keys) if update_db: self.store_groups() - def _check_db_settings( - self, number_of_actual_groups: int, members_in_largest_group: int - ) -> bool: + def _check_db_settings(self, + number_of_actual_groups: int, + members_in_largest_group: int + ) -> bool: """\ Adjust TFC's settings automatically if loaded group database was stored using larger database setting values. @@ -389,9 +332,7 @@ class GroupList(Iterable[Group], Sized): update_db = True if members_in_largest_group > self.settings.max_number_of_group_members: - self.settings.max_number_of_group_members = round_up( - members_in_largest_group - ) + self.settings.max_number_of_group_members = round_up(members_in_largest_group) update_db = True if update_db: @@ -429,21 +370,12 @@ class GroupList(Iterable[Group], Sized): setting (e.g., in cases like the one described above). """ - return b"".join( - list( - map( - int_to_bytes, - [ - self.settings.max_number_of_groups, - self.settings.max_number_of_group_members, - len(self.groups), - self.largest_group(), - ], - ) - ) - ) + return b''.join(list(map(int_to_bytes, [self.settings.max_number_of_groups, + self.settings.max_number_of_group_members, + len(self.groups), + self.largest_group()]))) - def _generate_dummy_group(self) -> "Group": + def _generate_dummy_group(self) -> 'Group': """Generate a dummy Group object. The dummy group simplifies the code around the constant length @@ -452,45 +384,37 @@ class GroupList(Iterable[Group], Sized): """ dummy_member = self.contact_list.generate_dummy_contact() - return Group( - name=DUMMY_GROUP, - group_id=bytes(GROUP_ID_LENGTH), - log_messages=False, - notifications=False, - members=self.settings.max_number_of_group_members * [dummy_member], - settings=self.settings, - store_groups=lambda: None, - ) + return Group(name =DUMMY_GROUP, + group_id =bytes(GROUP_ID_LENGTH), + log_messages =False, + notifications=False, + members =self.settings.max_number_of_group_members * [dummy_member], + settings =self.settings, + store_groups =lambda: None) def _dummy_groups(self) -> List[Group]: """Generate a proper size list of dummy groups for database padding.""" number_of_dummies = self.settings.max_number_of_groups - len(self.groups) - dummy_group = self._generate_dummy_group() + dummy_group = self._generate_dummy_group() return [dummy_group] * number_of_dummies - def add_group( - self, - name: str, - group_id: bytes, - log_messages: bool, - notifications: bool, - members: List["Contact"], - ) -> None: + def add_group(self, + name: str, + group_id: bytes, + log_messages: bool, + notifications: bool, + members: List['Contact']) -> None: """Add a new group to `self.groups` and write changes to the database.""" if self.has_group(name): self.remove_group_by_name(name) - self.groups.append( - Group( - name, - group_id, - log_messages, - notifications, - members, - self.settings, - self.store_groups, - ) - ) + self.groups.append(Group(name, + group_id, + log_messages, + notifications, + members, + self.settings, + self.store_groups)) self.store_groups() def remove_group_by_name(self, name: str) -> None: @@ -537,7 +461,7 @@ class GroupList(Iterable[Group], Sized): """Return list of human readable (B58 encoded) group IDs.""" return [b58encode(g.group_id) for g in self.groups] - def get_group_members(self, group_id: bytes) -> List["Contact"]: + def get_group_members(self, group_id: bytes) -> List['Contact']: """Return list of group members (Contact objects).""" return self.get_group_by_id(group_id).members @@ -562,54 +486,46 @@ class GroupList(Iterable[Group], Sized): corresponds to what group, and which contacts are in the group. """ # Initialize columns - c1 = ["Group"] - c2 = ["Group ID"] - c3 = ["Logging "] - c4 = ["Notify"] - c5 = ["Members"] + c1 = ['Group' ] + c2 = ['Group ID'] + c3 = ['Logging '] + c4 = ['Notify' ] + c5 = ['Members' ] # Populate columns with group data that has only a single line for g in self.groups: c1.append(g.name) c2.append(b58encode(g.group_id)) - c3.append("Yes" if g.log_messages else "No") - c4.append("Yes" if g.notifications else "No") + c3.append('Yes' if g.log_messages else 'No') + c4.append('Yes' if g.notifications else 'No') # Calculate the width of single-line columns - c1w, c2w, c3w, c4w = [ - max(len(v) for v in column) + CONTACT_LIST_INDENT - for column in [c1, c2, c3, c4] - ] + c1w, c2w, c3w, c4w = [max(len(v) for v in column) + CONTACT_LIST_INDENT for column in [c1, c2, c3, c4]] # Create a wrapper for Members-column wrapped_members_line_indent = c1w + c2w + c3w + c4w - members_column_width = max( - 1, get_terminal_width() - wrapped_members_line_indent - ) - wrapper = textwrap.TextWrapper(width=members_column_width) + members_column_width = max(1, get_terminal_width() - wrapped_members_line_indent) + wrapper = textwrap.TextWrapper(width=members_column_width) # Populate the Members-column for g in self.groups: if g.empty(): c5.append("\n") else: - comma_separated_nicks = ", ".join(sorted([m.nick for m in g.members])) - members_column_lines = wrapper.fill(comma_separated_nicks).split("\n") + comma_separated_nicks = ', '.join(sorted([m.nick for m in g.members])) + members_column_lines = wrapper.fill(comma_separated_nicks).split('\n') - final_str = members_column_lines[0] + "\n" + final_str = members_column_lines[0] + '\n' for line in members_column_lines[1:]: - final_str += wrapped_members_line_indent * " " + line + "\n" + final_str += wrapped_members_line_indent * ' ' + line + '\n' c5.append(final_str) # Align columns by adding whitespace between fields of each line - lines = [ - f"{f1:{c1w}}{f2:{c2w}}{f3:{c3w}}{f4:{c4w}}{f5}" - for f1, f2, f3, f4, f5 in zip(c1, c2, c3, c4, c5) - ] + lines = [f'{f1:{c1w}}{f2:{c2w}}{f3:{c3w}}{f4:{c4w}}{f5}' for f1, f2, f3, f4, f5 in zip(c1, c2, c3, c4, c5)] # Add a terminal-wide line between the column names and the data - lines.insert(1, get_terminal_width() * "─") + lines.insert(1, get_terminal_width() * '─') # Print the group list - print("\n".join(lines) + "\n") + print('\n'.join(lines) + '\n') diff --git a/src/common/db_keys.py b/src/common/db_keys.py index a9be0e3..1b0ef2e 100644 --- a/src/common/db_keys.py +++ b/src/common/db_keys.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -25,37 +25,21 @@ import typing from typing import Any, Callable, Dict, List -from src.common.crypto import blake2b, csprng -from src.common.database import TFCDatabase -from src.common.encoding import int_to_bytes, onion_address_to_pub_key -from src.common.encoding import bytes_to_int +from src.common.crypto import blake2b, csprng +from src.common.database import TFCDatabase +from src.common.encoding import int_to_bytes, onion_address_to_pub_key +from src.common.encoding import bytes_to_int from src.common.exceptions import CriticalError -from src.common.misc import ensure_dir, separate_headers, split_byte_string -from src.common.statics import ( - DIR_USER_DATA, - DUMMY_CONTACT, - HARAC_LENGTH, - INITIAL_HARAC, - KDB_ADD_ENTRY_HEADER, - KDB_HALT_ACK_HEADER, - KDB_M_KEY_CHANGE_HALT_HEADER, - KDB_REMOVE_ENTRY_HEADER, - KDB_UPDATE_SIZE_HEADER, - KEY_MANAGEMENT_QUEUE, - KEY_MGMT_ACK_QUEUE, - KEYSET_LENGTH, - LOCAL_PUBKEY, - ONION_SERVICE_PUBLIC_KEY_LENGTH, - RX, - SYMMETRIC_KEY_LENGTH, - TX, -) +from src.common.misc import ensure_dir, separate_headers, split_byte_string +from src.common.statics import (DIR_USER_DATA, DUMMY_CONTACT, HARAC_LENGTH, INITIAL_HARAC, KDB_ADD_ENTRY_HEADER, + KDB_HALT_ACK_HEADER, KDB_M_KEY_CHANGE_HALT_HEADER, KDB_REMOVE_ENTRY_HEADER, + KDB_UPDATE_SIZE_HEADER, KEY_MANAGEMENT_QUEUE, KEY_MGMT_ACK_QUEUE, KEYSET_LENGTH, + LOCAL_PUBKEY, ONION_SERVICE_PUBLIC_KEY_LENGTH, RX, SYMMETRIC_KEY_LENGTH, TX) if typing.TYPE_CHECKING: - from multiprocessing import Queue + from multiprocessing import Queue from src.common.db_masterkey import MasterKey - from src.common.db_settings import Settings - + from src.common.db_settings import Settings QueueDict = Dict[bytes, Queue[Any]] @@ -87,17 +71,16 @@ class KeySet(object): only by the Receiver Program. """ - def __init__( - self, - onion_pub_key: bytes, - tx_mk: bytes, - rx_mk: bytes, - tx_hk: bytes, - rx_hk: bytes, - tx_harac: int, - rx_harac: int, - store_keys: Callable[..., None], - ) -> None: + def __init__(self, + onion_pub_key: bytes, + tx_mk: bytes, + rx_mk: bytes, + tx_hk: bytes, + rx_hk: bytes, + tx_harac: int, + rx_harac: int, + store_keys: Callable[..., None] + ) -> None: """Create a new KeySet object. The `self.store_keys` is a reference to the method of the parent @@ -105,13 +88,13 @@ class KeySet(object): encrypted database. """ self.onion_pub_key = onion_pub_key - self.tx_mk = tx_mk - self.rx_mk = rx_mk - self.tx_hk = tx_hk - self.rx_hk = rx_hk - self.tx_harac = tx_harac - self.rx_harac = rx_harac - self.store_keys = store_keys + self.tx_mk = tx_mk + self.rx_mk = rx_mk + self.tx_hk = tx_hk + self.rx_hk = rx_hk + self.tx_harac = tx_harac + self.rx_harac = rx_harac + self.store_keys = store_keys def serialize_k(self) -> bytes: """Return KeySet data as a constant length byte string. @@ -123,15 +106,13 @@ class KeySet(object): serialization is to hide any metadata about the KeySet database the ciphertext length of the key database would reveal. """ - return ( - self.onion_pub_key - + self.tx_mk - + self.rx_mk - + self.tx_hk - + self.rx_hk - + int_to_bytes(self.tx_harac) - + int_to_bytes(self.rx_harac) - ) + return (self.onion_pub_key + + self.tx_mk + + self.rx_mk + + self.tx_hk + + self.rx_hk + + int_to_bytes(self.tx_harac) + + int_to_bytes(self.rx_harac)) def rotate_tx_mk(self) -> None: """\ @@ -149,13 +130,15 @@ class KeySet(object): [1] (pp. 17-18) https://netzpolitik.org/wp-upload/SCIMP-paper.pdf [2] https://signal.org/blog/advanced-ratcheting/ """ - self.tx_mk = blake2b( - self.tx_mk + int_to_bytes(self.tx_harac), digest_size=SYMMETRIC_KEY_LENGTH - ) + self.tx_mk = blake2b(self.tx_mk + int_to_bytes(self.tx_harac), digest_size=SYMMETRIC_KEY_LENGTH) self.tx_harac += 1 self.store_keys() - def update_mk(self, direction: str, key: bytes, offset: int) -> None: + def update_mk(self, + direction: str, + key: bytes, + offset: int + ) -> None: """Update Receiver Program's tx/rx-message key and tx/rx-harac. This method provides per-message forward secrecy for received @@ -165,11 +148,11 @@ class KeySet(object): function is not linear like in the case of `rotate_tx_mk`. """ if direction == TX: - self.tx_mk = key + self.tx_mk = key self.tx_harac += offset self.store_keys() elif direction == RX: - self.rx_mk = key + self.rx_mk = key self.rx_harac += offset self.store_keys() else: @@ -196,15 +179,15 @@ class KeyList(object): being stored in the database. """ - def __init__(self, master_key: "MasterKey", settings: "Settings") -> None: + def __init__(self, master_key: 'MasterKey', settings: 'Settings') -> None: """Create a new KeyList object.""" - self.master_key = master_key - self.settings = settings - self.keysets = [] # type: List[KeySet] + self.master_key = master_key + self.settings = settings + self.keysets = [] # type: List[KeySet] self.dummy_keyset = self.generate_dummy_keyset() - self.dummy_id = self.dummy_keyset.onion_pub_key - self.file_name = f"{DIR_USER_DATA}{settings.software_operation}_keys" - self.database = TFCDatabase(self.file_name, master_key) + self.dummy_id = self.dummy_keyset.onion_pub_key + self.file_name = f'{DIR_USER_DATA}{settings.software_operation}_keys' + self.database = TFCDatabase(self.file_name, master_key) ensure_dir(DIR_USER_DATA) if os.path.isfile(self.file_name): @@ -227,9 +210,7 @@ class KeyList(object): ciphertext includes a 24-byte nonce and a 16-byte tag, so the size of the final database is 9016 bytes. """ - pt_bytes = b"".join( - [k.serialize_k() for k in self.keysets + self._dummy_keysets()] - ) + pt_bytes = b''.join([k.serialize_k() for k in self.keysets + self._dummy_keysets()]) self.database.store_database(pt_bytes, replace) def _load_keys(self) -> None: @@ -243,44 +224,28 @@ class KeyList(object): populate the `self.keysets` list with KeySet objects, the data of which is sliced and decoded from the dummy-free blocks. """ - pt_bytes = self.database.load_database() - blocks = split_byte_string(pt_bytes, item_len=KEYSET_LENGTH) + pt_bytes = self.database.load_database() + blocks = split_byte_string(pt_bytes, item_len=KEYSET_LENGTH) df_blocks = [b for b in blocks if not b.startswith(self.dummy_id)] for block in df_blocks: if len(block) != KEYSET_LENGTH: raise CriticalError("Invalid data in key database.") - ( - onion_pub_key, - tx_mk, - rx_mk, - tx_hk, - rx_hk, - tx_harac_bytes, - rx_harac_bytes, - ) = separate_headers( - block, - [ONION_SERVICE_PUBLIC_KEY_LENGTH] - + 4 * [SYMMETRIC_KEY_LENGTH] - + [HARAC_LENGTH], - ) + onion_pub_key, tx_mk, rx_mk, tx_hk, rx_hk, tx_harac_bytes, rx_harac_bytes \ + = separate_headers(block, [ONION_SERVICE_PUBLIC_KEY_LENGTH] + 4*[SYMMETRIC_KEY_LENGTH] + [HARAC_LENGTH]) - self.keysets.append( - KeySet( - onion_pub_key=onion_pub_key, - tx_mk=tx_mk, - rx_mk=rx_mk, - tx_hk=tx_hk, - rx_hk=rx_hk, - tx_harac=bytes_to_int(tx_harac_bytes), - rx_harac=bytes_to_int(rx_harac_bytes), - store_keys=self.store_keys, - ) - ) + self.keysets.append(KeySet(onion_pub_key=onion_pub_key, + tx_mk=tx_mk, + rx_mk=rx_mk, + tx_hk=tx_hk, + rx_hk=rx_hk, + tx_harac=bytes_to_int(tx_harac_bytes), + rx_harac=bytes_to_int(rx_harac_bytes), + store_keys=self.store_keys)) @staticmethod - def generate_dummy_keyset() -> "KeySet": + def generate_dummy_keyset() -> 'KeySet': """Generate a dummy KeySet object. The dummy KeySet simplifies the code around the constant length @@ -290,16 +255,14 @@ class KeyList(object): In case the dummy keyset would ever be loaded accidentally, it uses a set of random keys to prevent decryption by eavesdropper. """ - return KeySet( - onion_pub_key=onion_address_to_pub_key(DUMMY_CONTACT), - tx_mk=csprng(), - rx_mk=csprng(), - tx_hk=csprng(), - rx_hk=csprng(), - tx_harac=INITIAL_HARAC, - rx_harac=INITIAL_HARAC, - store_keys=lambda: None, - ) + return KeySet(onion_pub_key=onion_address_to_pub_key(DUMMY_CONTACT), + tx_mk=csprng(), + rx_mk=csprng(), + tx_hk=csprng(), + rx_hk=csprng(), + tx_harac=INITIAL_HARAC, + rx_harac=INITIAL_HARAC, + store_keys=lambda: None) def _dummy_keysets(self) -> List[KeySet]: """\ @@ -309,17 +272,15 @@ class KeyList(object): The additional contact (+1) is the local key. """ number_of_contacts_to_store = self.settings.max_number_of_contacts + 1 - number_of_dummies = number_of_contacts_to_store - len(self.keysets) + number_of_dummies = number_of_contacts_to_store - len(self.keysets) return [self.dummy_keyset] * number_of_dummies - def add_keyset( - self, - onion_pub_key: bytes, - tx_mk: bytes, - rx_mk: bytes, - tx_hk: bytes, - rx_hk: bytes, - ) -> None: + def add_keyset(self, + onion_pub_key: bytes, + tx_mk: bytes, + rx_mk: bytes, + tx_hk: bytes, + rx_hk: bytes) -> None: """\ Add a new KeySet to `self.keysets` list and write changes to the database. @@ -327,18 +288,14 @@ class KeyList(object): if self.has_keyset(onion_pub_key): self.remove_keyset(onion_pub_key) - self.keysets.append( - KeySet( - onion_pub_key=onion_pub_key, - tx_mk=tx_mk, - rx_mk=rx_mk, - tx_hk=tx_hk, - rx_hk=rx_hk, - tx_harac=INITIAL_HARAC, - rx_harac=INITIAL_HARAC, - store_keys=self.store_keys, - ) - ) + self.keysets.append(KeySet(onion_pub_key=onion_pub_key, + tx_mk=tx_mk, + rx_mk=rx_mk, + tx_hk=tx_hk, + rx_hk=rx_hk, + tx_harac=INITIAL_HARAC, + rx_harac=INITIAL_HARAC, + store_keys=self.store_keys)) self.store_keys() def remove_keyset(self, onion_pub_key: bytes) -> None: @@ -353,7 +310,7 @@ class KeyList(object): self.store_keys() break - def change_master_key(self, queues: "QueueDict") -> None: + def change_master_key(self, queues: 'QueueDict') -> None: """Change the master key and encrypt the database with the new key.""" key_queue = queues[KEY_MANAGEMENT_QUEUE] ack_queue = queues[KEY_MGMT_ACK_QUEUE] @@ -361,7 +318,7 @@ class KeyList(object): # Halt sender loop here until keys have been replaced by the # `input_loop` process, and new master key is delivered. ack_queue.put(KDB_HALT_ACK_HEADER) - while not key_queue.qsize(): + while key_queue.qsize() == 0: time.sleep(0.001) new_master_key = key_queue.get() @@ -372,7 +329,7 @@ class KeyList(object): # Send new master key back to `input_loop` process to verify it was received. ack_queue.put(new_master_key) - def update_database(self, settings: "Settings") -> None: + def update_database(self, settings: 'Settings') -> None: """Update settings and database size.""" self.settings = settings self.store_keys() @@ -386,9 +343,7 @@ class KeyList(object): def get_list_of_pub_keys(self) -> List[bytes]: """Return list of Onion Service public keys for KeySets.""" - return [ - k.onion_pub_key for k in self.keysets if k.onion_pub_key != LOCAL_PUBKEY - ] + return [k.onion_pub_key for k in self.keysets if k.onion_pub_key != LOCAL_PUBKEY] def has_keyset(self, onion_pub_key: bytes) -> bool: """Return True if KeySet with matching Onion Service public key exists, else False.""" @@ -410,7 +365,7 @@ class KeyList(object): """Return True if local KeySet object exists, else False.""" return any(k.onion_pub_key == LOCAL_PUBKEY for k in self.keysets) - def manage(self, queues: "QueueDict", command: str, *params: Any) -> None: + def manage(self, queues: 'QueueDict', command: str, *params: Any) -> None: """Manage KeyList based on a command. The command is delivered from `input_process` to `sender_loop` diff --git a/src/common/db_logs.py b/src/common/db_logs.py index 2bfcd6f..f4a4e4b 100644 --- a/src/common/db_logs.py +++ b/src/common/db_logs.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -27,73 +27,39 @@ import time import typing from datetime import datetime -from typing import Any, Dict, List, Tuple, Union +from typing import Any, Dict, List, Tuple, Union -from src.common.database import MessageLog -from src.common.encoding import ( - b58encode, - bytes_to_bool, - bytes_to_timestamp, - pub_key_to_short_address, -) +from src.common.database import MessageLog +from src.common.encoding import b58encode, bytes_to_bool, bytes_to_timestamp, pub_key_to_short_address from src.common.exceptions import CriticalError, SoftError -from src.common.misc import ( - ensure_dir, - get_terminal_width, - ignored, - separate_header, - separate_headers, -) -from src.common.output import clear_screen -from src.common.statics import ( - ASSEMBLY_PACKET_HEADER_LENGTH, - DIR_USER_DATA, - GROUP_ID_LENGTH, - GROUP_MESSAGE_HEADER, - GROUP_MSG_ID_LENGTH, - LOGFILE_MASKING_QUEUE, - LOG_ENTRY_LENGTH, - LOG_PACKET_QUEUE, - LOG_SETTING_QUEUE, - MESSAGE, - MESSAGE_HEADER_LENGTH, - ONION_SERVICE_PUBLIC_KEY_LENGTH, - ORIGIN_HEADER_LENGTH, - ORIGIN_USER_HEADER, - PLACEHOLDER_DATA, - PRIVATE_MESSAGE_HEADER, - P_N_HEADER, - RX, - TEMP_POSTFIX, - TIMESTAMP_LENGTH, - TRAFFIC_MASKING_QUEUE, - TX, - UNIT_TEST_QUEUE, - WHISPER_FIELD_LENGTH, - WIN_TYPE_CONTACT, - WIN_TYPE_GROUP, -) +from src.common.misc import ensure_dir, get_terminal_width, ignored, separate_header, separate_headers +from src.common.output import clear_screen +from src.common.statics import (ASSEMBLY_PACKET_HEADER_LENGTH, DIR_USER_DATA, GROUP_ID_LENGTH, GROUP_MESSAGE_HEADER, + GROUP_MSG_ID_LENGTH, LOGFILE_MASKING_QUEUE, LOG_ENTRY_LENGTH, LOG_PACKET_QUEUE, + LOG_SETTING_QUEUE, MESSAGE, MESSAGE_HEADER_LENGTH, ONION_SERVICE_PUBLIC_KEY_LENGTH, + ORIGIN_HEADER_LENGTH, ORIGIN_USER_HEADER, PLACEHOLDER_DATA, PRIVATE_MESSAGE_HEADER, + P_N_HEADER, RX, TEMP_POSTFIX, TIMESTAMP_LENGTH, TRAFFIC_MASKING_QUEUE, TX, + UNIT_TEST_QUEUE, WHISPER_FIELD_LENGTH, WIN_TYPE_CONTACT, WIN_TYPE_GROUP) -from src.receiver.packet import Packet, PacketList +from src.receiver.packet import Packet, PacketList from src.receiver.windows import RxWindow if typing.TYPE_CHECKING: - from multiprocessing import Queue - from src.common.db_contacts import ContactList - from src.common.db_groups import GroupList + from multiprocessing import Queue + from src.common.db_contacts import ContactList + from src.common.db_groups import GroupList from src.common.db_masterkey import MasterKey - from src.common.db_settings import Settings + from src.common.db_settings import Settings from src.transmitter.windows import TxWindow MsgTuple = Tuple[datetime, str, bytes, bytes, bool, bool] -def log_writer_loop( - queues: Dict[bytes, "Queue[Any]"], # Dictionary of queues - settings: "Settings", # Settings object - message_log: "MessageLog", # MessageLog object - unit_test: bool = False, # True, exits loop when UNIT_TEST_QUEUE is no longer empty. -) -> None: +def log_writer_loop(queues: Dict[bytes, 'Queue[Any]'], # Dictionary of queues + settings: 'Settings', # Settings object + message_log: 'MessageLog', # MessageLog object + unit_test: bool = False # True, exits loop when UNIT_TEST_QUEUE is no longer empty. + ) -> None: """Write assembly packets to log database. When traffic masking is enabled, the fact this loop is run as a @@ -103,35 +69,27 @@ def log_writer_loop( even from an adversary performing timing attacks from within the Networked Computer of the user. """ - log_packet_queue = queues[LOG_PACKET_QUEUE] - log_setting_queue = queues[LOG_SETTING_QUEUE] + log_packet_queue = queues[LOG_PACKET_QUEUE] + log_setting_queue = queues[LOG_SETTING_QUEUE] traffic_masking_queue = queues[TRAFFIC_MASKING_QUEUE] logfile_masking_queue = queues[LOGFILE_MASKING_QUEUE] - logging_state = False + logging_state = False logfile_masking = settings.log_file_masking traffic_masking = settings.traffic_masking while True: with ignored(EOFError, KeyboardInterrupt): - while not log_packet_queue.qsize(): + while log_packet_queue.qsize() == 0: time.sleep(0.01) - traffic_masking, logfile_masking = check_log_setting_queues( - traffic_masking, - traffic_masking_queue, - logfile_masking, - logfile_masking_queue, - ) + traffic_masking, logfile_masking = check_log_setting_queues(traffic_masking, + traffic_masking_queue, + logfile_masking, + logfile_masking_queue) - ( - onion_pub_key, - assembly_packet, - log_messages, - log_as_ph, - master_key, - ) = log_packet_queue.get() + onion_pub_key, assembly_packet, log_messages, log_as_ph, master_key = log_packet_queue.get() # Update log database key message_log.database_key = master_key.master_key @@ -140,9 +98,7 @@ def log_writer_loop( if onion_pub_key is None: continue - logging_state = update_logging_state( - assembly_packet, logging_state, log_messages, log_setting_queue - ) + logging_state = update_logging_state(assembly_packet, logging_state, log_messages, log_setting_queue) # Detect if we are going to log the packet at all. if not logging_state: @@ -179,12 +135,11 @@ def log_writer_loop( break -def check_log_setting_queues( - traffic_masking: bool, - traffic_masking_queue: "Queue[Any]", - logfile_masking: bool, - logfile_masking_queue: "Queue[Any]", -) -> Tuple[bool, bool]: +def check_log_setting_queues(traffic_masking: bool, + traffic_masking_queue: 'Queue[Any]', + logfile_masking: bool, + logfile_masking_queue: 'Queue[Any]' + ) -> Tuple[bool, bool]: """Check for updates to logging settings.""" if traffic_masking_queue.qsize(): traffic_masking = traffic_masking_queue.get() @@ -195,12 +150,11 @@ def check_log_setting_queues( return traffic_masking, logfile_masking -def update_logging_state( - assembly_packet: bytes, - logging_state: bool, - log_messages: bool, - log_setting_queue: "Queue[Any]", -) -> bool: +def update_logging_state(assembly_packet: bytes, + logging_state: bool, + log_messages: bool, + log_setting_queue: 'Queue[Any]' + ) -> bool: """Update logging state. `logging_state` retains the logging setting for noise packets that @@ -218,12 +172,11 @@ def update_logging_state( return logging_state -def write_log_entry( - assembly_packet: bytes, # Assembly packet to log - onion_pub_key: bytes, # Onion Service public key of the associated contact - message_log: MessageLog, # MessageLog object - origin: bytes = ORIGIN_USER_HEADER, # The direction of logged packet -) -> None: +def write_log_entry(assembly_packet: bytes, # Assembly packet to log + onion_pub_key: bytes, # Onion Service public key of the associated contact + message_log: MessageLog, # MessageLog object + origin: bytes = ORIGIN_USER_HEADER, # The direction of logged packet + ) -> None: """Add an assembly packet to the encrypted log database. Logging assembly packets allows reconstruction of conversation while @@ -245,7 +198,7 @@ def write_log_entry( `settings.log_file_masking` is enabled, instead of file data, TFC writes placeholder data to the log database. """ - timestamp = struct.pack(" None: raise SoftError("No log database available.") -def access_logs( - window: Union["TxWindow", "RxWindow"], - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - master_key: "MasterKey", - msg_to_load: int = 0, - export: bool = False, -) -> None: +def access_logs(window: Union['TxWindow', 'RxWindow'], + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + master_key: 'MasterKey', + msg_to_load: int = 0, + export: bool = False + ) -> None: """\ Load 'msg_to_load' last messages from log database and display or export them. @@ -278,25 +230,22 @@ def access_logs( The default value of zero for `msg_to_load` means all messages for the window will be retrieved from the log database. """ - file_name = f"{DIR_USER_DATA}{settings.software_operation}_logs" - packet_list = PacketList(settings, contact_list) + file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs' + packet_list = PacketList(settings, contact_list) message_list = [] # type: List[MsgTuple] - group_msg_id = b"" + group_msg_id = b'' check_log_file_exists(file_name) message_log = MessageLog(file_name, master_key.master_key) for log_entry in message_log: - onion_pub_key, timestamp, origin, assembly_packet = separate_headers( - log_entry, - [ONION_SERVICE_PUBLIC_KEY_LENGTH, TIMESTAMP_LENGTH, ORIGIN_HEADER_LENGTH], - ) + onion_pub_key, timestamp, origin, assembly_packet \ + = separate_headers(log_entry, [ONION_SERVICE_PUBLIC_KEY_LENGTH, TIMESTAMP_LENGTH, ORIGIN_HEADER_LENGTH]) if window.type == WIN_TYPE_CONTACT and onion_pub_key != window.uid: continue packet = packet_list.get_packet(onion_pub_key, origin, MESSAGE, log_access=True) - try: packet.add_packet(assembly_packet) except SoftError: @@ -304,48 +253,30 @@ def access_logs( if not packet.is_complete: continue - group_msg_id = add_complete_message_to_message_list( - timestamp, onion_pub_key, group_msg_id, packet, message_list, window - ) + group_msg_id = add_complete_message_to_message_list(timestamp, onion_pub_key, group_msg_id, + packet, message_list, window) message_log.close_database() - print_logs( - message_list[-msg_to_load:], - export, - msg_to_load, - window, - contact_list, - group_list, - settings, - ) + print_logs(message_list[-msg_to_load:], export, msg_to_load, window, contact_list, group_list, settings) -def add_complete_message_to_message_list( - timestamp: bytes, - onion_pub_key: bytes, - group_msg_id: bytes, - packet: "Packet", - message_list: List[MsgTuple], - window: Union["TxWindow", "RxWindow"], -) -> bytes: +def add_complete_message_to_message_list(timestamp: bytes, + onion_pub_key: bytes, + group_msg_id: bytes, + packet: 'Packet', + message_list: List[MsgTuple], + window: Union['TxWindow', 'RxWindow'] + ) -> bytes: """Add complete log file message to `message_list`.""" whisper_byte, header, message = separate_headers( - packet.assemble_message_packet(), [WHISPER_FIELD_LENGTH, MESSAGE_HEADER_LENGTH] - ) + packet.assemble_message_packet(), [WHISPER_FIELD_LENGTH, MESSAGE_HEADER_LENGTH]) + whisper = bytes_to_bool(whisper_byte) if header == PRIVATE_MESSAGE_HEADER and window.type == WIN_TYPE_CONTACT: message_list.append( - ( - bytes_to_timestamp(timestamp), - message.decode(), - onion_pub_key, - packet.origin, - whisper, - False, - ) - ) + (bytes_to_timestamp(timestamp), message.decode(), onion_pub_key, packet.origin, whisper, False)) elif header == GROUP_MESSAGE_HEADER and window.type == WIN_TYPE_GROUP: purp_group_id, message = separate_header(message, GROUP_ID_LENGTH) @@ -359,70 +290,54 @@ def add_complete_message_to_message_list( group_msg_id = purp_msg_id message_list.append( - ( - bytes_to_timestamp(timestamp), - message.decode(), - onion_pub_key, - packet.origin, - whisper, - False, - ) - ) + (bytes_to_timestamp(timestamp), message.decode(), onion_pub_key, packet.origin, whisper, False)) return group_msg_id -def print_logs( - message_list: List[MsgTuple], - export: bool, - msg_to_load: int, - window: Union["TxWindow", "RxWindow"], - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", -) -> None: +def print_logs(message_list: List[MsgTuple], + export: bool, + msg_to_load: int, + window: Union['TxWindow', 'RxWindow'], + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings' + ) -> None: """Print list of logged messages to screen or export them to file.""" terminal_width = get_terminal_width() - system, m_dir = {TX: ("Transmitter", "sent to"), RX: ("Receiver", "to/from")}[ - settings.software_operation - ] + system, m_dir = {TX: ("Transmitter", "sent to"), + RX: ("Receiver", "to/from")}[settings.software_operation] - f_name = ( - open(f"{system} - Plaintext log ({window.name})", "w+") - if export - else sys.stdout - ) - subset = "" if msg_to_load == 0 else f"{msg_to_load} most recent " - title = textwrap.fill( - f"Log file of {subset}message(s) {m_dir} {window.type} {window.name}", - terminal_width, - ) + f_name = open(f"{system} - Plaintext log ({window.name})", 'w+') if export else sys.stdout + subset = f"{msg_to_load} most recent " if msg_to_load != 0 else '' + title = textwrap.fill(f"Log file of {subset}message(s) {m_dir} {window.type} {window.name}", terminal_width) - packet_list = PacketList(settings, contact_list) - log_window = RxWindow(window.uid, contact_list, group_list, settings, packet_list) - log_window.is_active = True + packet_list = PacketList(settings, contact_list) + log_window = RxWindow(window.uid, contact_list, group_list, settings, packet_list) + log_window.is_active = True log_window.message_log = message_list if message_list: if not export: clear_screen() - print(title, file=f_name) - print(terminal_width * "═", file=f_name) - log_window.redraw(file=f_name) + print(title, file=f_name) + print(terminal_width * '═', file=f_name) + log_window.redraw( file=f_name) print("\n", file=f_name) else: - raise SoftError( - f"No logged messages for {window.type} '{window.name}'.", head_clear=True - ) + raise SoftError(f"No logged messages for {window.type} '{window.name}'.", head_clear=True) if export: f_name.close() -def change_log_db_key(old_key: bytes, new_key: bytes, settings: "Settings") -> None: +def change_log_db_key(old_key: bytes, + new_key: bytes, + settings: 'Settings' + ) -> None: """Re-encrypt log database with a new master key.""" ensure_dir(DIR_USER_DATA) - file_name = f"{DIR_USER_DATA}{settings.software_operation}_logs" + file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs' temp_name = file_name + TEMP_POSTFIX if not os.path.isfile(file_name): @@ -441,23 +356,22 @@ def change_log_db_key(old_key: bytes, new_key: bytes, settings: "Settings") -> N message_log_tmp.close_database() -def replace_log_db(settings: "Settings") -> None: +def replace_log_db(settings: 'Settings') -> None: """Replace log database with temp file.""" ensure_dir(DIR_USER_DATA) - file_name = f"{DIR_USER_DATA}{settings.software_operation}_logs" + file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs' temp_name = file_name + TEMP_POSTFIX if os.path.isfile(temp_name): os.replace(temp_name, file_name) -def remove_logs( - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - master_key: "MasterKey", - selector: bytes, -) -> None: +def remove_logs(contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + master_key: 'MasterKey', + selector: bytes + ) -> None: """\ Remove log entries for selector (public key of an account/group ID). @@ -467,22 +381,21 @@ def remove_logs( ID, only messages for group determined by that group ID are removed. """ ensure_dir(DIR_USER_DATA) - file_name = f"{DIR_USER_DATA}{settings.software_operation}_logs" - temp_name = file_name + TEMP_POSTFIX - packet_list = PacketList(settings, contact_list) + file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs' + temp_name = file_name + TEMP_POSTFIX + packet_list = PacketList(settings, contact_list) entries_to_keep = [] # type: List[bytes] - removed = False - contact = len(selector) == ONION_SERVICE_PUBLIC_KEY_LENGTH + removed = False + contact = len(selector) == ONION_SERVICE_PUBLIC_KEY_LENGTH check_log_file_exists(file_name) message_log = MessageLog(file_name, master_key.master_key) for log_entry in message_log: - onion_pub_key, _, origin, assembly_packet = separate_headers( - log_entry, - [ONION_SERVICE_PUBLIC_KEY_LENGTH, TIMESTAMP_LENGTH, ORIGIN_HEADER_LENGTH], - ) + onion_pub_key, _, origin, assembly_packet = separate_headers(log_entry, [ONION_SERVICE_PUBLIC_KEY_LENGTH, + TIMESTAMP_LENGTH, + ORIGIN_HEADER_LENGTH]) if contact: if onion_pub_key == selector: removed = True @@ -490,9 +403,7 @@ def remove_logs( entries_to_keep.append(log_entry) else: # Group - packet = packet_list.get_packet( - onion_pub_key, origin, MESSAGE, log_access=True - ) + packet = packet_list.get_packet(onion_pub_key, origin, MESSAGE, log_access=True) try: packet.add_packet(assembly_packet, log_entry) except SoftError: @@ -513,27 +424,25 @@ def remove_logs( os.replace(temp_name, file_name) try: - name = ( - contact_list.get_nick_by_pub_key(selector) - if contact - else group_list.get_group_by_id(selector).name - ) + name = contact_list.get_nick_by_pub_key(selector) if contact else group_list.get_group_by_id(selector).name except StopIteration: - name = pub_key_to_short_address(selector) if contact else b58encode(selector) + name = pub_key_to_short_address(selector) if contact else b58encode(selector) - action = "Removed" if removed else "Found no" + action = "Removed" if removed else "Found no" win_type = "contact" if contact else "group" raise SoftError(f"{action} log entries for {win_type} '{name}'.") -def check_packet_fate( - entries_to_keep: List[bytes], packet: "Packet", removed: bool, selector: bytes -) -> bool: +def check_packet_fate(entries_to_keep: List[bytes], + packet: 'Packet', + removed: bool, + selector: bytes + ) -> bool: """Check whether the packet should be kept.""" - _, header, message = separate_headers( - packet.assemble_message_packet(), [WHISPER_FIELD_LENGTH, MESSAGE_HEADER_LENGTH] - ) + _, header, message = separate_headers(packet.assemble_message_packet(), + [WHISPER_FIELD_LENGTH, MESSAGE_HEADER_LENGTH]) + if header == PRIVATE_MESSAGE_HEADER: entries_to_keep.extend(packet.log_ct_list) packet.clear_assembly_packets() diff --git a/src/common/db_masterkey.py b/src/common/db_masterkey.py index 403d46d..8c00543 100755 --- a/src/common/db_masterkey.py +++ b/src/common/db_masterkey.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -27,29 +27,18 @@ import time from typing import List, Optional, Tuple -from src.common.crypto import argon2_kdf, blake2b, csprng -from src.common.database import TFCUnencryptedDatabase -from src.common.encoding import bytes_to_int, int_to_bytes +from src.common.crypto import argon2_kdf, blake2b, csprng +from src.common.database import TFCUnencryptedDatabase +from src.common.encoding import bytes_to_int, int_to_bytes from src.common.exceptions import CriticalError, graceful_exit, SoftError -from src.common.input import pwd_prompt -from src.common.misc import ensure_dir, reset_terminal, separate_headers -from src.common.output import clear_screen, m_print, phase, print_on_previous_line -from src.common.word_list import eff_wordlist -from src.common.statics import ( - ARGON2_MIN_MEMORY_COST, - ARGON2_MIN_PARALLELISM, - ARGON2_MIN_TIME_COST, - ARGON2_SALT_LENGTH, - BLAKE2_DIGEST_LENGTH, - DIR_USER_DATA, - DONE, - ENCODED_INTEGER_LENGTH, - GENERATE, - MASTERKEY_DB_SIZE, - MAX_KEY_DERIVATION_TIME, - MIN_KEY_DERIVATION_TIME, - PASSWORD_MIN_BIT_STRENGTH, -) +from src.common.input import pwd_prompt +from src.common.misc import ensure_dir, reset_terminal, separate_headers +from src.common.output import clear_screen, m_print, phase, print_on_previous_line +from src.common.word_list import eff_wordlist +from src.common.statics import (ARGON2_MIN_MEMORY_COST, ARGON2_MIN_PARALLELISM, ARGON2_MIN_TIME_COST, + ARGON2_SALT_LENGTH, BLAKE2_DIGEST_LENGTH, DIR_USER_DATA, DONE, + ENCODED_INTEGER_LENGTH, GENERATE, MASTERKEY_DB_SIZE, MAX_KEY_DERIVATION_TIME, + MIN_KEY_DERIVATION_TIME, PASSWORD_MIN_BIT_STRENGTH) class MasterKey(object): @@ -60,10 +49,10 @@ class MasterKey(object): def __init__(self, operation: str, local_test: bool) -> None: """Create a new MasterKey object.""" - self.operation = operation - self.file_name = f"{DIR_USER_DATA}{self.operation}_login_data" - self.database = TFCUnencryptedDatabase(self.file_name) - self.local_test = local_test + self.operation = operation + self.file_name = f'{DIR_USER_DATA}{operation}_login_data' + self.database = TFCUnencryptedDatabase(self.file_name) + self.local_test = local_test self.database_data = None # type: Optional[bytes] ensure_dir(DIR_USER_DATA) @@ -76,20 +65,23 @@ class MasterKey(object): graceful_exit() @staticmethod - def timed_key_derivation( - password: str, salt: bytes, time_cost: int, memory_cost: int, parallelism: int - ) -> Tuple[bytes, float]: + def timed_key_derivation(password: str, + salt: bytes, + time_cost: int, + memory_cost: int, + parallelism: int + ) -> Tuple[bytes, float]: """Derive key and measure its derivation time.""" time_start = time.monotonic() master_key = argon2_kdf(password, salt, time_cost, memory_cost, parallelism) - kd_time = time.monotonic() - time_start + kd_time = time.monotonic() - time_start return master_key, kd_time def get_available_memory(self) -> int: """Return the amount of available memory in the system.""" fields = os.popen("/bin/cat /proc/meminfo").read().splitlines() - field = [f for f in fields if f.startswith("MemAvailable")][0] + field = [f for f in fields if f.startswith("MemAvailable")][0] mem_avail = int(field.split()[1]) if self.local_test: @@ -101,16 +93,16 @@ class MasterKey(object): def generate_master_password() -> Tuple[int, str]: """Generate a strong password using the EFF wordlist.""" word_space = len(eff_wordlist) - sys_rand = random.SystemRandom() + sys_rand = random.SystemRandom() pwd_bit_strength = 0.0 - password_words = [] # type: List[str] + password_words = [] # type: List[str] while pwd_bit_strength < PASSWORD_MIN_BIT_STRENGTH: password_words.append(sys_rand.choice(eff_wordlist)) pwd_bit_strength = math.log2(word_space ** len(password_words)) - password = " ".join(password_words) + password = ' '.join(password_words) return int(pwd_bit_strength), password @@ -175,8 +167,8 @@ class MasterKey(object): slow even with GPUs/ASICs/FPGAs, as long as the password is sufficiently strong. """ - password = MasterKey.new_password() - salt = csprng(ARGON2_SALT_LENGTH) + password = MasterKey.new_password() + salt = csprng(ARGON2_SALT_LENGTH) time_cost = ARGON2_MIN_TIME_COST # Determine the amount of memory used from the amount of free RAM in the system. @@ -189,20 +181,15 @@ class MasterKey(object): # Initial key derivation phase("Deriving master key", head=2, offset=0) - master_key, kd_time = self.timed_key_derivation( - password, salt, time_cost, memory_cost, parallelism - ) - phase("", done=True) - print() + master_key, kd_time = self.timed_key_derivation(password, salt, time_cost, memory_cost, parallelism) + phase("", done=True, tail=1) # If derivation was too fast, increase time_cost while kd_time < MIN_KEY_DERIVATION_TIME: print_on_previous_line() phase(f"Trying time cost {time_cost+1}") time_cost += 1 - master_key, kd_time = self.timed_key_derivation( - password, salt, time_cost, memory_cost, parallelism - ) + master_key, kd_time = self.timed_key_derivation(password, salt, time_cost, memory_cost, parallelism) phase(f"{kd_time:.1f}s", done=True) # At this point time_cost may have value of 1 or it may have increased to e.g. 3, which might make it take @@ -218,17 +205,13 @@ class MasterKey(object): lower_bound = ARGON2_MIN_MEMORY_COST upper_bound = memory_cost - while ( - kd_time < MIN_KEY_DERIVATION_TIME or kd_time > MAX_KEY_DERIVATION_TIME - ): + while kd_time < MIN_KEY_DERIVATION_TIME or kd_time > MAX_KEY_DERIVATION_TIME: middle = (lower_bound + upper_bound) // 2 print_on_previous_line() phase(f"Trying memory cost {middle} KiB") - master_key, kd_time = self.timed_key_derivation( - password, salt, time_cost, middle, parallelism - ) + master_key, kd_time = self.timed_key_derivation(password, salt, time_cost, middle, parallelism) phase(f"{kd_time:.1f}s", done=True) # The search might fail e.g. if external CPU load causes delay in key derivation, which causes the @@ -251,13 +234,11 @@ class MasterKey(object): memory_cost = middle if middle is not None else memory_cost # Store values to database - database_data = ( - salt - + blake2b(master_key) - + int_to_bytes(time_cost) - + int_to_bytes(memory_cost) - + int_to_bytes(parallelism) - ) + database_data = (salt + + blake2b(master_key) + + int_to_bytes(time_cost) + + int_to_bytes(memory_cost) + + int_to_bytes(parallelism)) if replace: self.database.store_unencrypted_database(database_data) @@ -294,17 +275,11 @@ class MasterKey(object): if len(database_data) != MASTERKEY_DB_SIZE: raise CriticalError(f"Invalid {self.file_name} database size.") - salt, key_hash, time_bytes, memory_bytes, parallelism_bytes = separate_headers( - database_data, - [ - ARGON2_SALT_LENGTH, - BLAKE2_DIGEST_LENGTH, - ENCODED_INTEGER_LENGTH, - ENCODED_INTEGER_LENGTH, - ], - ) + salt, key_hash, time_bytes, memory_bytes, parallelism_bytes \ + = separate_headers(database_data, [ARGON2_SALT_LENGTH, BLAKE2_DIGEST_LENGTH, + ENCODED_INTEGER_LENGTH, ENCODED_INTEGER_LENGTH]) - time_cost = bytes_to_int(time_bytes) + time_cost = bytes_to_int(time_bytes) memory_cost = bytes_to_int(memory_bytes) parallelism = bytes_to_int(parallelism_bytes) @@ -329,20 +304,10 @@ class MasterKey(object): if password_1 == GENERATE: pwd_bit_strength, password_1 = MasterKey.generate_master_password() - m_print( - [ - f"Generated a {pwd_bit_strength}-bit password:", - "", - password_1, - "", - "Write down this password and dispose of the copy once you remember it.", - "Press to continue.", - ], - manual_proceed=True, - box=True, - head=1, - tail=1, - ) + m_print([f"Generated a {pwd_bit_strength}-bit password:", + '', password_1, '', + "Write down this password and dispose of the copy once you remember it.", + "Press to continue."], manual_proceed=True, box=True, head=1, tail=1) reset_terminal() password_2 = password_1 @@ -366,8 +331,6 @@ class MasterKey(object): try: authenticated = self.load_master_key() == self.master_key except (EOFError, KeyboardInterrupt): - raise SoftError( - f"Authentication aborted.", tail_clear=True, head=2, delay=1 - ) + raise SoftError(f"Authentication aborted.", tail_clear=True, head=2, delay=1) return authenticated diff --git a/src/common/db_onion.py b/src/common/db_onion.py index 206204a..10b776c 100644 --- a/src/common/db_onion.py +++ b/src/common/db_onion.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -24,19 +24,13 @@ import typing import nacl.signing -from src.common.crypto import csprng -from src.common.database import TFCDatabase -from src.common.encoding import pub_key_to_onion_address, pub_key_to_short_address +from src.common.crypto import csprng +from src.common.database import TFCDatabase +from src.common.encoding import pub_key_to_onion_address, pub_key_to_short_address from src.common.exceptions import CriticalError -from src.common.misc import ensure_dir -from src.common.output import phase -from src.common.statics import ( - CONFIRM_CODE_LENGTH, - DIR_USER_DATA, - DONE, - ONION_SERVICE_PRIVATE_KEY_LENGTH, - TX, -) +from src.common.misc import ensure_dir +from src.common.output import phase +from src.common.statics import CONFIRM_CODE_LENGTH, DIR_USER_DATA, DONE, ONION_SERVICE_PRIVATE_KEY_LENGTH, TX if typing.TYPE_CHECKING: from src.common.db_masterkey import MasterKey @@ -59,13 +53,13 @@ class OnionService(object): anyway. """ - def __init__(self, master_key: "MasterKey") -> None: + def __init__(self, master_key: 'MasterKey') -> None: """Create a new OnionService object.""" - self.master_key = master_key - self.file_name = f"{DIR_USER_DATA}{TX}_onion_db" - self.database = TFCDatabase(self.file_name, self.master_key) + self.master_key = master_key + self.file_name = f'{DIR_USER_DATA}{TX}_onion_db' + self.database = TFCDatabase(self.file_name, self.master_key) self.is_delivered = False - self.conf_code = csprng(CONFIRM_CODE_LENGTH) + self.conf_code = csprng(CONFIRM_CODE_LENGTH) ensure_dir(DIR_USER_DATA) if os.path.isfile(self.file_name): @@ -74,9 +68,7 @@ class OnionService(object): self.onion_private_key = self.new_onion_service_private_key() self.store_onion_service_private_key() - self.public_key = bytes( - nacl.signing.SigningKey(seed=self.onion_private_key).verify_key - ) + self.public_key = bytes(nacl.signing.SigningKey(seed=self.onion_private_key).verify_key) self.user_onion_address = pub_key_to_onion_address(self.public_key) self.user_short_address = pub_key_to_short_address(self.public_key) diff --git a/src/common/db_settings.py b/src/common/db_settings.py index 988ed96..ce4757c 100755 --- a/src/common/db_settings.py +++ b/src/common/db_settings.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -25,29 +25,22 @@ import typing from typing import Union -from src.common.database import TFCDatabase -from src.common.encoding import bool_to_bytes, double_to_bytes, int_to_bytes -from src.common.encoding import bytes_to_bool, bytes_to_double, bytes_to_int +from src.common.database import TFCDatabase +from src.common.encoding import bool_to_bytes, double_to_bytes, int_to_bytes +from src.common.encoding import bytes_to_bool, bytes_to_double, bytes_to_int from src.common.exceptions import CriticalError, SoftError -from src.common.input import yes -from src.common.misc import ensure_dir, get_terminal_width, round_up -from src.common.output import clear_screen, m_print -from src.common.statics import ( - DIR_USER_DATA, - ENCODED_BOOLEAN_LENGTH, - ENCODED_FLOAT_LENGTH, - ENCODED_INTEGER_LENGTH, - MAX_INT, - SETTINGS_INDENT, - TRAFFIC_MASKING_MIN_RANDOM_DELAY, - TRAFFIC_MASKING_MIN_STATIC_DELAY, - TX, -) +from src.common.input import yes +from src.common.misc import ensure_dir, get_terminal_width, round_up +from src.common.output import clear_screen, m_print +from src.common.statics import (DIR_USER_DATA, ENCODED_BOOLEAN_LENGTH, ENCODED_FLOAT_LENGTH, ENCODED_INTEGER_LENGTH, + MAX_INT, SETTINGS_INDENT, TRAFFIC_MASKING_MIN_RANDOM_DELAY, + TRAFFIC_MASKING_MIN_STATIC_DELAY, TX) if typing.TYPE_CHECKING: - from src.common.db_contacts import ContactList - from src.common.db_groups import GroupList + from src.common.db_contacts import ContactList + from src.common.db_groups import GroupList from src.common.db_masterkey import MasterKey + SettingType = Union[int, float, bool] class Settings(object): @@ -56,12 +49,11 @@ class Settings(object): related to serial interface) under an encrypted database. """ - def __init__( - self, - master_key: "MasterKey", # MasterKey object - operation: str, # Operation mode of the program (Tx or Rx) - local_test: bool, # Local testing setting from command-line argument - ) -> None: + def __init__(self, + master_key: 'MasterKey', # MasterKey object + operation: str, # Operation mode of the program (Tx or Rx) + local_test: bool, # Local testing setting from command-line argument + ) -> None: """Create a new Settings object. The settings below are defaults, and are only to be altered from @@ -70,41 +62,41 @@ class Settings(object): are loaded when the program starts. """ # Common settings - self.disable_gui_dialog = False - self.max_number_of_group_members = 50 - self.max_number_of_groups = 50 - self.max_number_of_contacts = 50 - self.log_messages_by_default = False - self.accept_files_by_default = False + self.disable_gui_dialog = False + self.max_number_of_group_members = 50 + self.max_number_of_groups = 50 + self.max_number_of_contacts = 50 + self.log_messages_by_default = False + self.accept_files_by_default = False self.show_notifications_by_default = True - self.log_file_masking = False - self.ask_password_for_log_access = True + self.log_file_masking = False + self.ask_password_for_log_access = True # Transmitter settings self.nc_bypass_messages = False self.confirm_sent_files = True self.double_space_exits = False - self.traffic_masking = False - self.tm_static_delay = 2.0 - self.tm_random_delay = 2.0 + self.traffic_masking = False + self.tm_static_delay = 2.0 + self.tm_random_delay = 2.0 # Relay Settings self.allow_contact_requests = True # Receiver settings - self.new_message_notify_preview = False + self.new_message_notify_preview = False self.new_message_notify_duration = 1.0 - self.max_decompress_size = 100_000_000 + self.max_decompress_size = 100_000_000 - self.master_key = master_key + self.master_key = master_key self.software_operation = operation self.local_testing_mode = local_test - self.file_name = f"{DIR_USER_DATA}{operation}_settings" - self.database = TFCDatabase(self.file_name, master_key) + self.file_name = f'{DIR_USER_DATA}{operation}_settings' + self.database = TFCDatabase(self.file_name, master_key) self.all_keys = list(vars(self).keys()) - self.key_list = self.all_keys[: self.all_keys.index("master_key")] + self.key_list = self.all_keys[:self.all_keys.index('master_key')] self.defaults = {k: self.__dict__[k] for k in self.key_list} ensure_dir(DIR_USER_DATA) @@ -132,7 +124,7 @@ class Settings(object): else: raise CriticalError("Invalid attribute type in settings.") - pt_bytes = b"".join(bytes_lst) + pt_bytes = b''.join(bytes_lst) self.database.store_database(pt_bytes, replace) def load_settings(self) -> None: @@ -145,15 +137,15 @@ class Settings(object): attribute = self.__getattribute__(key) if isinstance(attribute, bool): - value = bytes_to_bool(pt_bytes[0]) # type: Union[bool, int, float] + value = bytes_to_bool(pt_bytes[0]) # type: Union[bool, int, float] pt_bytes = pt_bytes[ENCODED_BOOLEAN_LENGTH:] elif isinstance(attribute, int): - value = bytes_to_int(pt_bytes[:ENCODED_INTEGER_LENGTH]) + value = bytes_to_int(pt_bytes[:ENCODED_INTEGER_LENGTH]) pt_bytes = pt_bytes[ENCODED_INTEGER_LENGTH:] elif isinstance(attribute, float): - value = bytes_to_double(pt_bytes[:ENCODED_FLOAT_LENGTH]) + value = bytes_to_double(pt_bytes[:ENCODED_FLOAT_LENGTH]) pt_bytes = pt_bytes[ENCODED_FLOAT_LENGTH:] else: @@ -161,21 +153,18 @@ class Settings(object): setattr(self, key, value) - def change_setting( - self, - key: str, # Name of the setting - value_str: str, # Value of the setting - contact_list: "ContactList", - group_list: "GroupList", - ) -> None: + def change_setting(self, + key: str, # Name of the setting + value_str: str, # Value of the setting + contact_list: 'ContactList', + group_list: 'GroupList' + ) -> None: """Parse, update and store new setting value.""" attribute = self.__getattribute__(key) try: if isinstance(attribute, bool): - value = dict(true=True, false=False)[ - value_str.lower() - ] # type: Union[bool, int, float] + value = dict(true=True, false=False)[value_str.lower()] # type: Union[bool, int, float] elif isinstance(attribute, int): value = int(value_str) @@ -191,9 +180,7 @@ class Settings(object): raise CriticalError("Invalid attribute type in settings.") except (KeyError, ValueError): - raise SoftError( - f"Error: Invalid setting value '{value_str}'.", head_clear=True - ) + raise SoftError(f"Error: Invalid setting value '{value_str}'.", head_clear=True) self.validate_key_value_pair(key, value, contact_list, group_list) @@ -201,120 +188,84 @@ class Settings(object): self.store_settings() @staticmethod - def validate_key_value_pair( - key: str, # Name of the setting - value: Union[int, float, bool], # Value of the setting - contact_list: "ContactList", # ContactList object - group_list: "GroupList", # GroupList object - ) -> None: + def validate_key_value_pair(key: str, # Name of the setting + value: 'SettingType', # Value of the setting + contact_list: 'ContactList', # ContactList object + group_list: 'GroupList', # GroupList object + ) -> None: """Evaluate values for settings that have further restrictions.""" Settings.validate_database_limit(key, value) - Settings.validate_max_number_of_group_members(key, value, group_list) - Settings.validate_max_number_of_groups(key, value, group_list) - Settings.validate_max_number_of_contacts(key, value, contact_list) - Settings.validate_new_message_notify_duration(key, value) - Settings.validate_traffic_maskig_delay(key, value, contact_list) @staticmethod - def validate_database_limit(key: str, value: Union[int, float, bool]) -> None: + def validate_database_limit(key: str, value: 'SettingType') -> None: """Validate setting values for database entry limits.""" - if key in [ - "max_number_of_group_members", - "max_number_of_groups", - "max_number_of_contacts", - ]: + if key in ["max_number_of_group_members", "max_number_of_groups", "max_number_of_contacts"]: if value % 10 != 0 or value == 0: - raise SoftError( - "Error: Database padding settings must be divisible by 10.", - head_clear=True, - ) + raise SoftError("Error: Database padding settings must be divisible by 10.", head_clear=True) @staticmethod - def validate_max_number_of_group_members( - key: str, value: Union[int, float, bool], group_list: "GroupList" - ) -> None: + def validate_max_number_of_group_members(key: str, + value: 'SettingType', + group_list: 'GroupList' + ) -> None: """Validate setting value for maximum number of group members.""" if key == "max_number_of_group_members": min_size = round_up(group_list.largest_group()) if value < min_size: - raise SoftError( - f"Error: Can't set the max number of members lower than {min_size}.", - head_clear=True, - ) + raise SoftError(f"Error: Can't set the max number of members lower than {min_size}.", head_clear=True) @staticmethod - def validate_max_number_of_groups( - key: str, value: Union[int, float, bool], group_list: "GroupList" - ) -> None: + def validate_max_number_of_groups(key: str, + value: 'SettingType', + group_list: 'GroupList' + ) -> None: """Validate setting value for maximum number of groups.""" if key == "max_number_of_groups": min_size = round_up(len(group_list)) if value < min_size: - raise SoftError( - f"Error: Can't set the max number of groups lower than {min_size}.", - head_clear=True, - ) + raise SoftError(f"Error: Can't set the max number of groups lower than {min_size}.", head_clear=True) @staticmethod - def validate_max_number_of_contacts( - key: str, value: Union[int, float, bool], contact_list: "ContactList" - ) -> None: + def validate_max_number_of_contacts(key: str, + value: 'SettingType', + contact_list: 'ContactList' + ) -> None: """Validate setting value for maximum number of contacts.""" if key == "max_number_of_contacts": min_size = round_up(len(contact_list)) if value < min_size: - raise SoftError( - f"Error: Can't set the max number of contacts lower than {min_size}.", - head_clear=True, - ) + raise SoftError(f"Error: Can't set the max number of contacts lower than {min_size}.", head_clear=True) @staticmethod - def validate_new_message_notify_duration( - key: str, value: Union[int, float, bool] - ) -> None: + def validate_new_message_notify_duration(key: str, value: 'SettingType') -> None: """Validate setting value for duration of new message notification.""" if key == "new_message_notify_duration" and value < 0.05: - raise SoftError( - "Error: Too small value for message notify duration.", head_clear=True - ) + raise SoftError("Error: Too small value for message notify duration.", head_clear=True) @staticmethod - def validate_traffic_maskig_delay( - key: str, value: Union[int, float, bool], contact_list: "ContactList" - ) -> None: + def validate_traffic_maskig_delay(key: str, + value: 'SettingType', + contact_list: 'ContactList' + ) -> None: """Validate setting value for traffic masking delays.""" if key in ["tm_static_delay", "tm_random_delay"]: - for key_, name, min_setting in [ - ("tm_static_delay", "static", TRAFFIC_MASKING_MIN_STATIC_DELAY), - ("tm_random_delay", "random", TRAFFIC_MASKING_MIN_RANDOM_DELAY), - ]: + for key_, name, min_setting in [("tm_static_delay", "static", TRAFFIC_MASKING_MIN_STATIC_DELAY), + ("tm_random_delay", "random", TRAFFIC_MASKING_MIN_RANDOM_DELAY)]: if key == key_ and value < min_setting: - raise SoftError( - f"Error: Can't set {name} delay lower than {min_setting}.", - head_clear=True, - ) + raise SoftError(f"Error: Can't set {name} delay lower than {min_setting}.", head_clear=True) if contact_list.settings.software_operation == TX: - m_print( - [ - "WARNING!", - "Changing traffic masking delay can make your endpoint and traffic look unique!", - ], - bold=True, - head=1, - tail=1, - ) + m_print(["WARNING!", "Changing traffic masking delay can make your endpoint and traffic look unique!"], + bold=True, head=1, tail=1) if not yes("Proceed anyway?"): - raise SoftError( - "Aborted traffic masking setting change.", head_clear=True - ) + raise SoftError("Aborted traffic masking setting change.", head_clear=True) m_print("Traffic masking setting will change on restart.", head=1, tail=1) @@ -325,37 +276,39 @@ class Settings(object): """ desc_d = { # Common settings - "disable_gui_dialog": "True replaces GUI dialogs with CLI prompts", - "max_number_of_group_members": "Maximum number of members in a group", - "max_number_of_groups": "Maximum number of groups", - "max_number_of_contacts": "Maximum number of contacts", - "log_messages_by_default": "Default logging setting for new contacts/groups", - "accept_files_by_default": "Default file reception setting for new contacts", + "disable_gui_dialog": "True replaces GUI dialogs with CLI prompts", + "max_number_of_group_members": "Maximum number of members in a group", + "max_number_of_groups": "Maximum number of groups", + "max_number_of_contacts": "Maximum number of contacts", + "log_messages_by_default": "Default logging setting for new contacts/groups", + "accept_files_by_default": "Default file reception setting for new contacts", "show_notifications_by_default": "Default message notification setting for new contacts/groups", - "log_file_masking": "True hides real size of log file during traffic masking", - "ask_password_for_log_access": "False disables password prompt when viewing/exporting logs", + "log_file_masking": "True hides real size of log file during traffic masking", + "ask_password_for_log_access": "False disables password prompt when viewing/exporting logs", + # Transmitter settings - "nc_bypass_messages": "False removes Networked Computer bypass interrupt messages", - "confirm_sent_files": "False sends files without asking for confirmation", - "double_space_exits": "True exits, False clears screen with double space command", - "traffic_masking": "True enables traffic masking to hide metadata", - "tm_static_delay": "The static delay between traffic masking packets", - "tm_random_delay": "Max random delay for traffic masking timing obfuscation", + "nc_bypass_messages": "False removes Networked Computer bypass interrupt messages", + "confirm_sent_files": "False sends files without asking for confirmation", + "double_space_exits": "True exits, False clears screen with double space command", + "traffic_masking": "True enables traffic masking to hide metadata", + "tm_static_delay": "The static delay between traffic masking packets", + "tm_random_delay": "Max random delay for traffic masking timing obfuscation", + # Relay settings - "allow_contact_requests": "When False, does not show TFC contact requests", + "allow_contact_requests": "When False, does not show TFC contact requests", + # Receiver settings - "new_message_notify_preview": "When True, shows a preview of the received message", - "new_message_notify_duration": "Number of seconds new message notification appears", - "max_decompress_size": "Max size Receiver accepts when decompressing file", - } + "new_message_notify_preview": "When True, shows a preview of the received message", + "new_message_notify_duration": "Number of seconds new message notification appears", + "max_decompress_size": "Max size Receiver accepts when decompressing file"} # Columns - c1 = ["Setting name"] - c2 = ["Current value"] - c3 = ["Default value"] - c4 = ["Description"] + c1 = ['Setting name'] + c2 = ['Current value'] + c3 = ['Default value'] + c4 = ['Description'] - terminal_width = get_terminal_width() + terminal_width = get_terminal_width() description_indent = 64 if terminal_width < description_indent + 1: @@ -368,34 +321,27 @@ class Settings(object): c3.append(str(self.defaults[key])) description = desc_d[key] - wrapper = textwrap.TextWrapper( - width=max(1, (terminal_width - description_indent)) - ) - desc_lines = wrapper.fill(description).split("\n") + wrapper = textwrap.TextWrapper(width=max(1, (terminal_width - description_indent))) + desc_lines = wrapper.fill(description).split('\n') desc_string = desc_lines[0] for line in desc_lines[1:]: - desc_string += "\n" + description_indent * " " + line + desc_string += '\n' + description_indent * ' ' + line if len(desc_lines) > 1: - desc_string += "\n" + desc_string += '\n' c4.append(desc_string) # Calculate column widths - c1w, c2w, c3w = [ - max(len(v) for v in column) + SETTINGS_INDENT for column in [c1, c2, c3] - ] + c1w, c2w, c3w = [max(len(v) for v in column) + SETTINGS_INDENT for column in [c1, c2, c3]] # Align columns by adding whitespace between fields of each line - lines = [ - f"{f1:{c1w}} {f2:{c2w}} {f3:{c3w}} {f4}" - for f1, f2, f3, f4 in zip(c1, c2, c3, c4) - ] + lines = [f'{f1:{c1w}} {f2:{c2w}} {f3:{c3w}} {f4}' for f1, f2, f3, f4 in zip(c1, c2, c3, c4)] # Add a terminal-wide line between the column names and the data - lines.insert(1, get_terminal_width() * "─") + lines.insert(1, get_terminal_width() * '─') # Print the settings clear_screen() - print("\n" + "\n".join(lines)) + print('\n' + '\n'.join(lines)) diff --git a/src/common/encoding.py b/src/common/encoding.py index 3f5e28a..85ea673 100755 --- a/src/common/encoding.py +++ b/src/common/encoding.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -24,25 +24,18 @@ import hashlib import struct from datetime import datetime -from typing import List, Union +from typing import List, Union -from src.common.statics import ( - B58_ALPHABET, - B58_CHECKSUM_LENGTH, - MAINNET_HEADER, - ONION_ADDRESS_CHECKSUM_ID, - ONION_ADDRESS_CHECKSUM_LENGTH, - ONION_SERVICE_VERSION, - ONION_SERVICE_VERSION_LENGTH, - PADDING_LENGTH, - TESTNET_HEADER, - TRUNC_ADDRESS_LENGTH, -) +from src.common.statics import (B58_ALPHABET, B58_CHECKSUM_LENGTH, MAINNET_HEADER, ONION_ADDRESS_CHECKSUM_ID, + ONION_ADDRESS_CHECKSUM_LENGTH, ONION_SERVICE_VERSION, ONION_SERVICE_VERSION_LENGTH, + PADDING_LENGTH, TESTNET_HEADER, TRUNC_ADDRESS_LENGTH) def sha256d(message: bytes) -> bytes: """Chain SHA256 twice for Bitcoin WIF format.""" - return hashlib.sha256(hashlib.sha256(message).digest()).digest() + return hashlib.sha256( + hashlib.sha256(message).digest() + ).digest() def b58encode(byte_string: bytes, public_key: bool = False) -> str: @@ -52,20 +45,20 @@ def b58encode(byte_string: bytes, public_key: bool = False) -> str: (WIF) for mainnet and testnet addresses. https://en.bitcoin.it/wiki/Wallet_import_format """ - net_id = TESTNET_HEADER if public_key else MAINNET_HEADER - byte_string = net_id + byte_string + net_id = TESTNET_HEADER if public_key else MAINNET_HEADER + byte_string = net_id + byte_string byte_string += sha256d(byte_string)[:B58_CHECKSUM_LENGTH] original_len = len(byte_string) - byte_string = byte_string.lstrip(b"\x00") - new_len = len(byte_string) + byte_string = byte_string.lstrip(b'\x00') + new_len = len(byte_string) p, acc = 1, 0 for byte in bytearray(byte_string[::-1]): acc += p * byte - p *= 256 + p *= 256 - encoded = "" + encoded = '' while acc > 0: acc, mod = divmod(acc, 58) encoded += B58_ALPHABET[mod] @@ -75,35 +68,30 @@ def b58encode(byte_string: bytes, public_key: bool = False) -> str: def b58decode(string: str, public_key: bool = False) -> bytes: """Decode a Base58-encoded string and verify the checksum.""" - net_id = TESTNET_HEADER if public_key else MAINNET_HEADER + net_id = TESTNET_HEADER if public_key else MAINNET_HEADER orig_len = len(string) - string = string.lstrip(B58_ALPHABET[0]) - new_len = len(string) + string = string.lstrip(B58_ALPHABET[0]) + new_len = len(string) p, acc = 1, 0 for c in string[::-1]: acc += p * B58_ALPHABET.index(c) - p *= 58 + p *= 58 decoded = [] while acc > 0: acc, mod = divmod(acc, 256) decoded.append(mod) - decoded_ = (bytes(decoded) + (orig_len - new_len) * b"\x00")[ - ::-1 - ] # type: Union[bytes, List[int]] + decoded_ = (bytes(decoded) + (orig_len - new_len) * b'\x00')[::-1] # type: Union[bytes, List[int]] - if ( - sha256d(bytes(decoded_[:-B58_CHECKSUM_LENGTH]))[:B58_CHECKSUM_LENGTH] - != decoded_[-B58_CHECKSUM_LENGTH:] - ): + if sha256d(bytes(decoded_[:-B58_CHECKSUM_LENGTH]))[:B58_CHECKSUM_LENGTH] != decoded_[-B58_CHECKSUM_LENGTH:]: raise ValueError - if decoded_[: len(net_id)] != net_id: + if decoded_[:len(net_id)] != net_id: raise ValueError - return bytes(decoded_[len(net_id) : -B58_CHECKSUM_LENGTH]) + return bytes(decoded_[len(net_id):-B58_CHECKSUM_LENGTH]) def b85encode(data: bytes) -> str: @@ -145,7 +133,6 @@ def b10encode(fingerprint: bytes) -> str: # Database unicode string padding - def unicode_padding(string: str) -> str: """Pad Unicode string to 255 chars. @@ -157,7 +144,7 @@ def unicode_padding(string: str) -> str: if len(string) >= PADDING_LENGTH: raise CriticalError("Invalid input size.") - length = PADDING_LENGTH - (len(string) % PADDING_LENGTH) + length = PADDING_LENGTH - (len(string) % PADDING_LENGTH) string += length * chr(length) if len(string) != PADDING_LENGTH: # pragma: no cover @@ -168,21 +155,18 @@ def unicode_padding(string: str) -> str: def rm_padding_str(string: str) -> str: """Remove padding from plaintext.""" - return string[: -ord(string[-1:])] + return string[:-ord(string[-1:])] # Database constant length encoding - def onion_address_to_pub_key(account: str) -> bytes: """Encode TFC account to a public key byte string. The public key is the most compact possible representation of a TFC account, so it is useful when storing the address into databases. """ - return base64.b32decode(account.upper())[ - : -(ONION_ADDRESS_CHECKSUM_LENGTH + ONION_SERVICE_VERSION_LENGTH) - ] + return base64.b32decode(account.upper())[:-(ONION_ADDRESS_CHECKSUM_LENGTH + ONION_SERVICE_VERSION_LENGTH)] def bool_to_bytes(boolean: bool) -> bytes: @@ -192,12 +176,12 @@ def bool_to_bytes(boolean: bool) -> bytes: def int_to_bytes(integer: int) -> bytes: """Convert integer to an 8-byte byte string.""" - return struct.pack("!Q", integer) + return struct.pack('!Q', integer) def double_to_bytes(double_: float) -> bytes: """Convert double to an 8-byte byte string.""" - return struct.pack("d", double_) + return struct.pack('d', double_) def str_to_bytes(string: str) -> bytes: @@ -205,12 +189,11 @@ def str_to_bytes(string: str) -> bytes: Length of padded string is 255 * 4 + 4 (BOM) = 1024 bytes. """ - return unicode_padding(string).encode("utf-32") + return unicode_padding(string).encode('utf-32') # Decoding - def pub_key_to_onion_address(public_key: bytes) -> str: """Decode public key byte string to TFC account. @@ -218,13 +201,12 @@ def pub_key_to_onion_address(public_key: bytes) -> str: public key of v3 Onion Service into service ID: https://gitweb.torproject.org/torspec.git/tree/rend-spec-v3.txt#n2019 """ - checksum = hashlib.sha3_256( - ONION_ADDRESS_CHECKSUM_ID + public_key + ONION_SERVICE_VERSION - ).digest()[:ONION_ADDRESS_CHECKSUM_LENGTH] + checksum = hashlib.sha3_256(ONION_ADDRESS_CHECKSUM_ID + + public_key + + ONION_SERVICE_VERSION + ).digest()[:ONION_ADDRESS_CHECKSUM_LENGTH] - return ( - base64.b32encode(public_key + checksum + ONION_SERVICE_VERSION).lower().decode() - ) + return base64.b32encode(public_key + checksum + ONION_SERVICE_VERSION).lower().decode() def pub_key_to_short_address(public_key: bytes) -> str: @@ -241,13 +223,13 @@ def bytes_to_bool(byte_string: Union[bytes, int]) -> bool: def bytes_to_int(byte_string: bytes) -> int: """Convert 8-byte byte string to an integer.""" - int_format = struct.unpack("!Q", byte_string)[0] # type: int + int_format = struct.unpack('!Q', byte_string)[0] # type: int return int_format def bytes_to_double(byte_string: bytes) -> float: """Convert 8-byte byte string to double.""" - float_format = struct.unpack("d", byte_string)[0] # type: float + float_format = struct.unpack('d', byte_string)[0] # type: float return float_format @@ -256,9 +238,9 @@ def bytes_to_str(byte_string: bytes) -> str: Decode byte string with UTF-32 and remove Unicode padding. """ - return rm_padding_str(byte_string.decode("utf-32")) + return rm_padding_str(byte_string.decode('utf-32')) def bytes_to_timestamp(byte_string: bytes) -> datetime: """Covert 4-byte byte string to datetime object.""" - return datetime.fromtimestamp(struct.unpack(" None: """A severe exception that requires TFC to gracefully exit.""" - graceful_exit( - f"Critical error in function '{inspect.stack()[1][3]}':\n{error_message}", - clear=False, - exit_code=exit_code, - ) + graceful_exit(f"Critical error in function '{inspect.stack()[1][3]}':\n{error_message}", + clear=False, exit_code=exit_code) class SoftError(Exception): @@ -52,48 +49,44 @@ class SoftError(Exception): and returns to the exception handler function. """ - def __init__( - self, - message: str, - window: Optional["RxWindow"] = None, # The window to include the message in - output: bool = True, # When False, doesn't print message when adding it to window - bold: bool = False, # When True, prints the message in bold - head_clear: bool = False, # When True, clears the screen before printing message - tail_clear: bool = False, # When True, clears the screen after message (needs delay) - delay: float = 0, # The delay before continuing - head: int = 1, # The number of new-lines to print before the message - tail: int = 1, # The number of new-lines to print after message - ts: Optional["datetime"] = None, # Datetime object - ) -> None: + def __init__(self, + message: str, + window: Optional['RxWindow'] = None, # The window to include the message in + output: bool = True, # When False, doesn't print message when adding it to window + bold: bool = False, # When True, prints the message in bold + head_clear: bool = False, # When True, clears the screen before printing message + tail_clear: bool = False, # When True, clears the screen after message (needs delay) + delay: float = 0, # The delay before continuing + head: int = 1, # The number of new-lines to print before the message + tail: int = 1, # The number of new-lines to print after message + ts: Optional['datetime'] = None # Datetime object + ) -> None: """Print return message and return to exception handler function.""" self.message = message if window is None: if output: - m_print( - self.message, - bold=bold, - head_clear=head_clear, - tail_clear=tail_clear, - delay=delay, - head=head, - tail=tail, - ) + m_print(self.message, + bold=bold, + head_clear=head_clear, + tail_clear=tail_clear, + delay=delay, + head=head, + tail=tail) else: ts = datetime.now() if ts is None else ts window.add_new(ts, self.message, output=output) -def graceful_exit( - message: str = "", # Exit message to print - clear: bool = True, # When False, does not clear screen before printing message - exit_code: int = 0, # Value returned to parent process -) -> None: +def graceful_exit(message: str = '', # Exit message to print + clear: bool = True, # When False, does not clear screen before printing message + exit_code: int = 0 # Value returned to parent process + ) -> None: """Display a message and exit TFC.""" if clear: clear_screen() if message: - print("\n" + message) + print('\n' + message) print(f"\nExiting {TFC}.\n") sys.exit(exit_code) diff --git a/src/common/gateway.py b/src/common/gateway.py index 9cf4446..48e9060 100644 --- a/src/common/gateway.py +++ b/src/common/gateway.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -31,55 +31,30 @@ import time import typing from datetime import datetime -from typing import Any, Dict, Optional, Tuple, Union +from typing import Any, Dict, Optional, Tuple, Union from serial.serialutil import SerialException -from src.common.exceptions import CriticalError, graceful_exit, SoftError -from src.common.input import yes -from src.common.misc import ( - calculate_race_condition_delay, - ensure_dir, - ignored, - get_terminal_width, -) -from src.common.misc import separate_trailer -from src.common.output import m_print, phase, print_on_previous_line +from src.common.exceptions import CriticalError, graceful_exit, SoftError +from src.common.input import yes +from src.common.misc import calculate_race_condition_delay, ensure_dir, ignored, get_terminal_width +from src.common.misc import separate_trailer +from src.common.output import m_print, phase, print_on_previous_line from src.common.reed_solomon import ReedSolomonError, RSCodec -from src.common.statics import ( - BAUDS_PER_BYTE, - DIR_USER_DATA, - DONE, - DST_DD_LISTEN_SOCKET, - DST_LISTEN_SOCKET, - GATEWAY_QUEUE, - LOCALHOST, - LOCAL_TESTING_PACKET_DELAY, - MAX_INT, - NC, - PACKET_CHECKSUM_LENGTH, - RECEIVER, - RELAY, - RP_LISTEN_SOCKET, - RX, - SERIAL_RX_MIN_TIMEOUT, - SETTINGS_INDENT, - SRC_DD_LISTEN_SOCKET, - TRANSMITTER, - TX, -) +from src.common.statics import (BAUDS_PER_BYTE, DIR_USER_DATA, DONE, DST_DD_LISTEN_SOCKET, DST_LISTEN_SOCKET, + GATEWAY_QUEUE, LOCALHOST, LOCAL_TESTING_PACKET_DELAY, MAX_INT, NC, + PACKET_CHECKSUM_LENGTH, RECEIVER, RELAY, RP_LISTEN_SOCKET, RX, + SERIAL_RX_MIN_TIMEOUT, SETTINGS_INDENT, SRC_DD_LISTEN_SOCKET, TRANSMITTER, TX) if typing.TYPE_CHECKING: from multiprocessing import Queue - JSONDict = Dict[str, Union[int, bool, str]] -def gateway_loop( - queues: Dict[bytes, "Queue[Tuple[datetime, bytes]]"], - gateway: "Gateway", - unit_test: bool = False, -) -> None: +def gateway_loop(queues: Dict[bytes, 'Queue[Tuple[datetime, bytes]]'], + gateway: 'Gateway', + unit_test: bool = False + ) -> None: """Load data from serial interface or socket into a queue. Also place the current timestamp to queue to be delivered to the @@ -103,9 +78,13 @@ class Gateway(object): Source/Destination Computer with the Networked computer. """ - def __init__(self, operation: str, local_test: bool, dd_sockets: bool) -> None: + def __init__(self, + operation: str, + local_test: bool, + dd_sockets: bool + ) -> None: """Create a new Gateway object.""" - self.settings = GatewaySettings(operation, local_test, dd_sockets) + self.settings = GatewaySettings(operation, local_test, dd_sockets) self.tx_serial = None # type: Optional[serial.Serial] self.rx_serial = None # type: Optional[serial.Serial] self.rx_socket = None # type: Optional[multiprocessing.connection.Connection] @@ -156,15 +135,11 @@ class Gateway(object): the time it takes to send one byte with given baud rate. """ try: - self.tx_serial = self.rx_serial = serial.Serial( - self.search_serial_interface(), - self.settings.session_serial_baudrate, - timeout=0, - ) + self.tx_serial = self.rx_serial = serial.Serial(self.search_serial_interface(), + self.settings.session_serial_baudrate, + timeout=0) except SerialException: - raise CriticalError( - "SerialException. Ensure $USER is in the dialout group by restarting this computer." - ) + raise CriticalError("SerialException. Ensure $USER is in the dialout group by restarting this computer.") def write(self, orig_packet: bytes) -> None: """Add error correction data and output data via socket/serial interface. @@ -218,7 +193,7 @@ class Gateway(object): while True: try: - start_time = 0.0 + start_time = 0.0 read_buffer = bytearray() while True: read = self.rx_serial.read_all() @@ -240,11 +215,7 @@ class Gateway(object): def read(self) -> bytes: """Read data via socket/serial interface.""" - data = ( - self.read_socket() - if self.settings.local_testing_mode - else self.read_serial() - ) + data = (self.read_socket() if self.settings.local_testing_mode else self.read_serial()) return data def add_error_correction(self, packet: bytes) -> bytes: @@ -263,10 +234,7 @@ class Gateway(object): if self.settings.session_serial_error_correction: packet = self.rs.encode(packet) else: - packet = ( - packet - + hashlib.blake2b(packet, digest_size=PACKET_CHECKSUM_LENGTH).digest() - ) + packet = packet + hashlib.blake2b(packet, digest_size=PACKET_CHECKSUM_LENGTH).digest() return packet def detect_errors(self, packet: bytes) -> bytes: @@ -276,19 +244,11 @@ class Gateway(object): packet, _ = self.rs.decode(packet) return bytes(packet) except ReedSolomonError: - raise SoftError( - "Error: Reed-Solomon failed to correct errors in the received packet.", - bold=True, - ) + raise SoftError("Error: Reed-Solomon failed to correct errors in the received packet.", bold=True) else: packet, checksum = separate_trailer(packet, PACKET_CHECKSUM_LENGTH) - if ( - hashlib.blake2b(packet, digest_size=PACKET_CHECKSUM_LENGTH).digest() - != checksum - ): - raise SoftError( - "Warning! Received packet had an invalid checksum.", bold=True - ) + if hashlib.blake2b(packet, digest_size=PACKET_CHECKSUM_LENGTH).digest() != checksum: + raise SoftError("Warning! Received packet had an invalid checksum.", bold=True) return packet def search_serial_interface(self) -> str: @@ -297,34 +257,28 @@ class Gateway(object): search_announced = False if not self.init_found: - phase("Searching for USB-to-serial interface", offset=len("Found")) + phase("Searching for USB-to-serial interface", offset=len('Found')) while True: - for f in sorted(os.listdir("/dev/")): - if f.startswith("ttyUSB"): + for f in sorted(os.listdir('/dev/')): + if f.startswith('ttyUSB'): if self.init_found: time.sleep(1) - phase("Found", done=True) + phase('Found', done=True) if self.init_found: print_on_previous_line(reps=2) self.init_found = True - return f"/dev/{f}" + return f'/dev/{f}' time.sleep(0.1) if self.init_found and not search_announced: - phase( - "Serial adapter disconnected. Waiting for interface", - head=1, - offset=len("Found"), - ) + phase("Serial adapter disconnected. Waiting for interface", head=1, offset=len('Found')) search_announced = True else: - if self.settings.built_in_serial_interface in sorted(os.listdir("/dev/")): - return f"/dev/{self.settings.built_in_serial_interface}" - raise CriticalError( - f"Error: /dev/{self.settings.built_in_serial_interface} was not found." - ) + if self.settings.built_in_serial_interface in sorted(os.listdir('/dev/')): + return f'/dev/{self.settings.built_in_serial_interface}' + raise CriticalError(f"Error: /dev/{self.settings.built_in_serial_interface} was not found.") # Local testing @@ -360,12 +314,8 @@ class Gateway(object): under a threat model where endpoint security is of importance. """ try: - socket_number = ( - RP_LISTEN_SOCKET - if self.settings.software_operation == NC - else DST_LISTEN_SOCKET - ) - listener = multiprocessing.connection.Listener((LOCALHOST, socket_number)) + socket_number = RP_LISTEN_SOCKET if self.settings.software_operation == NC else DST_LISTEN_SOCKET + listener = multiprocessing.connection.Listener((LOCALHOST, socket_number)) self.rx_socket = listener.accept() except KeyboardInterrupt: graceful_exit() @@ -378,22 +328,12 @@ class Gateway(object): while True: try: if self.settings.software_operation == TX: - socket_number = ( - SRC_DD_LISTEN_SOCKET - if self.settings.data_diode_sockets - else RP_LISTEN_SOCKET - ) + socket_number = SRC_DD_LISTEN_SOCKET if self.settings.data_diode_sockets else RP_LISTEN_SOCKET else: - socket_number = ( - DST_DD_LISTEN_SOCKET - if self.settings.data_diode_sockets - else DST_LISTEN_SOCKET - ) + socket_number = DST_DD_LISTEN_SOCKET if self.settings.data_diode_sockets else DST_LISTEN_SOCKET try: - self.tx_socket = multiprocessing.connection.Client( - (LOCALHOST, socket_number) - ) + self.tx_socket = multiprocessing.connection.Client((LOCALHOST, socket_number)) except ConnectionRefusedError: time.sleep(0.1) continue @@ -423,7 +363,11 @@ class GatewaySettings(object): inconvenience of encrypting the setting values. """ - def __init__(self, operation: str, local_test: bool, dd_sockets: bool) -> None: + def __init__(self, + operation: str, + local_test: bool, + dd_sockets: bool + ) -> None: """Create a new Settings object. The settings below are altered from within the program itself. @@ -431,22 +375,20 @@ class GatewaySettings(object): file under $HOME/tfc/user_data from where, if needed, they can be manually altered by the user. """ - self.serial_baudrate = 19200 - self.serial_error_correction = 5 - self.use_serial_usb_adapter = True - self.built_in_serial_interface = "ttyS0" + self.serial_baudrate = 19200 + self.serial_error_correction = 5 + self.use_serial_usb_adapter = True + self.built_in_serial_interface = 'ttyS0' self.software_operation = operation self.local_testing_mode = local_test self.data_diode_sockets = dd_sockets self.all_keys = list(vars(self).keys()) - self.key_list = self.all_keys[: self.all_keys.index("software_operation")] + self.key_list = self.all_keys[:self.all_keys.index('software_operation')] self.defaults = {k: self.__dict__[k] for k in self.key_list} - self.file_name = ( - f"{DIR_USER_DATA}{self.software_operation}_serial_settings.json" - ) + self.file_name = f'{DIR_USER_DATA}{self.software_operation}_serial_settings.json' ensure_dir(DIR_USER_DATA) if os.path.isfile(self.file_name): @@ -455,18 +397,14 @@ class GatewaySettings(object): self.setup() self.store_settings() - self.session_serial_baudrate = self.serial_baudrate + self.session_serial_baudrate = self.serial_baudrate self.session_serial_error_correction = self.serial_error_correction - self.session_usb_serial_adapter = self.use_serial_usb_adapter + self.session_usb_serial_adapter = self.use_serial_usb_adapter - ( - self.tx_inter_packet_delay, - self.rx_receive_timeout, - ) = self.calculate_serial_delays(self.session_serial_baudrate) + self.tx_inter_packet_delay, self.rx_receive_timeout = self.calculate_serial_delays(self.session_serial_baudrate) - self.race_condition_delay = calculate_race_condition_delay( - self.session_serial_error_correction, self.serial_baudrate - ) + self.race_condition_delay = calculate_race_condition_delay(self.session_serial_error_correction, + self.serial_baudrate) @classmethod def calculate_serial_delays(cls, baud_rate: int) -> Tuple[float, float]: @@ -480,7 +418,7 @@ class GatewaySettings(object): bytes_per_sec = baud_rate / BAUDS_PER_BYTE byte_travel_t = 1 / bytes_per_sec - rx_receive_timeout = max(2 * byte_travel_t, SERIAL_RX_MIN_TIMEOUT) + rx_receive_timeout = max(2 * byte_travel_t, SERIAL_RX_MIN_TIMEOUT) tx_inter_packet_delay = 2 * rx_receive_timeout return tx_inter_packet_delay, rx_receive_timeout @@ -493,48 +431,35 @@ class GatewaySettings(object): if not self.local_testing_mode: name = {TX: TRANSMITTER, NC: RELAY, RX: RECEIVER}[self.software_operation] - self.use_serial_usb_adapter = yes( - f"Use USB-to-serial/TTL adapter for {name} Computer?", head=1, tail=1 - ) + self.use_serial_usb_adapter = yes(f"Use USB-to-serial/TTL adapter for {name} Computer?", head=1, tail=1) if self.use_serial_usb_adapter: - for f in sorted(os.listdir("/dev/")): - if f.startswith("ttyUSB"): + for f in sorted(os.listdir('/dev/')): + if f.startswith('ttyUSB'): return None m_print("Error: USB-to-serial/TTL adapter not found.") self.setup() else: - if self.built_in_serial_interface not in sorted(os.listdir("/dev/")): - m_print( - f"Error: Serial interface /dev/{self.built_in_serial_interface} not found." - ) + if self.built_in_serial_interface not in sorted(os.listdir('/dev/')): + m_print(f"Error: Serial interface /dev/{self.built_in_serial_interface} not found.") self.setup() def store_settings(self) -> None: """Store serial settings in JSON format.""" - serialized = json.dumps( - self, - default=(lambda _: {k: self.__dict__[k] for k in self.key_list}), - indent=4, - ) + serialized = json.dumps(self, default=(lambda o: {k: self.__dict__[k] for k in self.key_list}), indent=4) - with open(self.file_name, "w+") as f: + with open(self.file_name, 'w+') as f: f.write(serialized) f.flush() os.fsync(f.fileno()) - def invalid_setting( - self, key: str, json_dict: Dict[str, Union[bool, int, str]] - ) -> None: + def invalid_setting(self, + key: str, + json_dict: Dict[str, Union[bool, int, str]] + ) -> None: """Notify about setting an invalid value to default value.""" - m_print( - [ - f"Error: Invalid value '{json_dict[key]}' for setting '{key}' in '{self.file_name}'.", - f"The value has been set to default ({self.defaults[key]}).", - ], - head=1, - tail=1, - ) + m_print([f"Error: Invalid value '{json_dict[key]}' for setting '{key}' in '{self.file_name}'.", + f"The value has been set to default ({self.defaults[key]})."], head=1, tail=1) setattr(self, key, self.defaults[key]) def load_settings(self) -> None: @@ -545,10 +470,8 @@ class GatewaySettings(object): except json.decoder.JSONDecodeError: os.remove(self.file_name) self.store_settings() - print( - f"\nError: Invalid JSON format in '{self.file_name}'." - "\nSerial interface settings have been set to default values.\n" - ) + print(f"\nError: Invalid JSON format in '{self.file_name}'." + "\nSerial interface settings have been set to default values.\n") return None # Check for missing setting @@ -562,41 +485,30 @@ class GatewaySettings(object): """Check for missing JSON fields and invalid values.""" for key in self.key_list: if key not in json_dict: - m_print( - [ - f"Error: Missing setting '{key}' in '{self.file_name}'.", - f"The value has been set to default ({self.defaults[key]}).", - ], - head=1, - tail=1, - ) + m_print([f"Error: Missing setting '{key}' in '{self.file_name}'.", + f"The value has been set to default ({self.defaults[key]})."], head=1, tail=1) setattr(self, key, self.defaults[key]) continue # Closer inspection of each setting value - if ( - key == "serial_baudrate" - and json_dict[key] not in serial.Serial().BAUDRATES - ): + if key == 'serial_baudrate' and json_dict[key] not in serial.Serial().BAUDRATES: self.invalid_setting(key, json_dict) continue - elif key == "serial_error_correction" and ( - not isinstance(json_dict[key], int) or json_dict[key] < 0 - ): + elif key == 'serial_error_correction' and (not isinstance(json_dict[key], int) or json_dict[key] < 0): self.invalid_setting(key, json_dict) continue - elif key == "use_serial_usb_adapter": + elif key == 'use_serial_usb_adapter': if not isinstance(json_dict[key], bool): self.invalid_setting(key, json_dict) continue - elif key == "built_in_serial_interface": + elif key == 'built_in_serial_interface': if not isinstance(json_dict[key], str): self.invalid_setting(key, json_dict) continue - if not any(json_dict[key] == f for f in os.listdir("/sys/class/tty")): + if not any(json_dict[key] == f for f in os.listdir('/sys/class/tty')): self.invalid_setting(key, json_dict) continue @@ -607,9 +519,7 @@ class GatewaySettings(object): attribute = self.__getattribute__(key) try: if isinstance(attribute, bool): - value = dict(true=True, false=False)[ - value_str.lower() - ] # type: Union[bool, int] + value = dict(true=True, false=False)[value_str.lower()] # type: Union[bool, int] elif isinstance(attribute, int): value = int(value_str) @@ -620,9 +530,7 @@ class GatewaySettings(object): raise CriticalError("Invalid attribute type in settings.") except (KeyError, ValueError): - raise SoftError( - f"Error: Invalid setting value '{value_str}'.", delay=1, tail_clear=True - ) + raise SoftError(f"Error: Invalid setting value '{value_str}'.", delay=1, tail_clear=True) self.validate_key_value_pair(key, value) @@ -630,17 +538,19 @@ class GatewaySettings(object): self.store_settings() @staticmethod - def validate_key_value_pair(key: str, value: Union[int, bool]) -> None: + def validate_key_value_pair(key: str, + value: Union[int, bool] + ) -> None: """\ Perform further evaluation on settings the values of which have restrictions. """ - if key == "serial_baudrate": + if key == 'serial_baudrate': if value not in serial.Serial().BAUDRATES: raise SoftError("Error: The specified baud rate is not supported.") m_print("Baud rate will change on restart.", head=1, tail=1) - if key == "serial_error_correction": + if key == 'serial_error_correction': if value < 0: raise SoftError("Error: Invalid value for error correction ratio.") m_print("Error correction ratio will change on restart.", head=1, tail=1) @@ -650,16 +560,14 @@ class GatewaySettings(object): Print list of settings, their current and default values, and setting descriptions. """ - desc_d = { - "serial_baudrate": "The speed of serial interface in bauds per second", - "serial_error_correction": "Number of byte errors serial datagrams can recover from", - } + desc_d = {"serial_baudrate": "The speed of serial interface in bauds per second", + "serial_error_correction": "Number of byte errors serial datagrams can recover from"} # Columns - c1 = ["Serial interface setting"] - c2 = ["Current value"] - c3 = ["Default value"] - c4 = ["Description"] + c1 = ['Serial interface setting'] + c2 = ['Current value'] + c3 = ['Default value'] + c4 = ['Description'] terminal_width = get_terminal_width() description_indent = 64 @@ -674,33 +582,26 @@ class GatewaySettings(object): c3.append(str(self.defaults[key])) description = desc_d[key] - wrapper = textwrap.TextWrapper( - width=max(1, (terminal_width - description_indent)) - ) - desc_lines = wrapper.fill(description).split("\n") + wrapper = textwrap.TextWrapper(width=max(1, (terminal_width - description_indent))) + desc_lines = wrapper.fill(description).split('\n') desc_string = desc_lines[0] for line in desc_lines[1:]: - desc_string += "\n" + description_indent * " " + line + desc_string += '\n' + description_indent * ' ' + line if len(desc_lines) > 1: - desc_string += "\n" + desc_string += '\n' c4.append(desc_string) # Calculate column widths - c1w, c2w, c3w = [ - max(len(v) for v in column) + SETTINGS_INDENT for column in [c1, c2, c3] - ] + c1w, c2w, c3w = [max(len(v) for v in column) + SETTINGS_INDENT for column in [c1, c2, c3]] # Align columns by adding whitespace between fields of each line - lines = [ - f"{f1:{c1w}} {f2:{c2w}} {f3:{c3w}} {f4}" - for f1, f2, f3, f4 in zip(c1, c2, c3, c4) - ] + lines = [f'{f1:{c1w}} {f2:{c2w}} {f3:{c3w}} {f4}' for f1, f2, f3, f4 in zip(c1, c2, c3, c4)] # Add a terminal-wide line between the column names and the data - lines.insert(1, get_terminal_width() * "─") + lines.insert(1, get_terminal_width() * '─') # Print the settings - print("\n" + "\n".join(lines) + "\n") + print('\n' + '\n'.join(lines) + '\n') diff --git a/src/common/input.py b/src/common/input.py index 2f86339..2609d27 100644 --- a/src/common/input.py +++ b/src/common/input.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -24,25 +24,13 @@ import typing from typing import Any, Callable, Optional -from src.common.encoding import b58decode +from src.common.encoding import b58decode from src.common.exceptions import CriticalError -from src.common.misc import get_terminal_width, terminal_width_check -from src.common.output import ( - clear_screen, - m_print, - print_on_previous_line, - print_spacing, -) -from src.common.statics import ( - B58_LOCAL_KEY, - B58_LOCAL_KEY_GUIDE, - B58_PUBLIC_KEY, - B58_PUBLIC_KEY_GUIDE, - CURSOR_UP_ONE_LINE, - ECDHE, - NC_BYPASS_START, - NC_BYPASS_STOP, -) +from src.common.misc import get_terminal_width, terminal_width_check +from src.common.output import clear_screen, m_print, print_on_previous_line, print_spacing +from src.common.statics import (B58_LOCAL_KEY, B58_LOCAL_KEY_GUIDE, B58_PUBLIC_KEY, B58_PUBLIC_KEY_GUIDE, + CURSOR_UP_ONE_LINE, ECDHE, ENCODED_B58_KDK_LENGTH, ENCODED_B58_PUB_KEY_LENGTH, + NC_BYPASS_START, NC_BYPASS_STOP) if typing.TYPE_CHECKING: from src.common.db_settings import Settings @@ -51,19 +39,18 @@ if typing.TYPE_CHECKING: Validator = Callable[..., str] -def ask_confirmation_code( - source: str, # The system the confirmation code is displayed by -) -> str: # The confirmation code entered by the user +def ask_confirmation_code(source: str # The system the confirmation code is displayed by + ) -> str: # The confirmation code entered by the user """\ Ask the user to input confirmation code from Source Computer to verify local key has been installed. """ - title = f"Enter confirmation code (from {source}): " - input_space = len(" ff ") + title = f"Enter confirmation code (from {source}): " + input_space = len(' ff ') - upper_line = "┌" + (len(title) + input_space) * "─" + "┐" - title_line = "│" + title + input_space * " " + "│" - lower_line = "└" + (len(title) + input_space) * "─" + "┘" + upper_line = '┌' + (len(title) + input_space) * '─' + '┐' + title_line = '│' + title + input_space * ' ' + '│' + lower_line = '└' + (len(title) + input_space) * '─' + '┘' terminal_w = get_terminal_width() upper_line = upper_line.center(terminal_w) @@ -77,44 +64,42 @@ def ask_confirmation_code( print(lower_line) print(3 * CURSOR_UP_ONE_LINE) - indent = title_line.find("│") - return input(indent * " " + f"│ {title}") + indent = title_line.find('│') + return input(indent * ' ' + f'│ {title}') -def box_input( - message: str, # Input prompt message - default: str = "", # Default return value - head: int = 0, # Number of new lines to print before the input - tail: int = 1, # Number of new lines to print after input - expected_len: int = 0, # Expected length of the input - key_type: str = "", # When specified, sets input width - guide: bool = False, # When True, prints the guide for key - validator: Optional[Validator] = None, # Input validator function - validator_args: Optional[Any] = None, # Arguments required by the validator -) -> str: # Input from user +def box_input(message: str, # Input prompt message + default: str = '', # Default return value + head: int = 0, # Number of new lines to print before the input + tail: int = 1, # Number of new lines to print after input + expected_len: int = 0, # Expected length of the input + key_type: str = '', # When specified, sets input width + guide: bool = False, # When True, prints the guide for key + validator: Optional[Validator] = None, # Input validator function + validator_args: Optional[Any] = None # Arguments required by the validator + ) -> str: # Input from user """Display boxed input prompt with a message.""" print_spacing(head) terminal_width = get_terminal_width() if key_type: - key_guide = { - B58_LOCAL_KEY: B58_LOCAL_KEY_GUIDE, - B58_PUBLIC_KEY: B58_PUBLIC_KEY_GUIDE, - }.get(key_type, "") + key_guide = {B58_LOCAL_KEY: B58_LOCAL_KEY_GUIDE, + B58_PUBLIC_KEY: B58_PUBLIC_KEY_GUIDE}.get(key_type, '') if guide: inner_spc = len(key_guide) + 2 else: - inner_spc = 86 if key_type == B58_PUBLIC_KEY else 53 + inner_spc = ENCODED_B58_PUB_KEY_LENGTH if key_type == B58_PUBLIC_KEY else ENCODED_B58_KDK_LENGTH + inner_spc += 2 # Spacing around input space else: - key_guide = "" + key_guide = '' inner_spc = terminal_width - 2 if expected_len == 0 else expected_len + 2 - upper_line = "┌" + inner_spc * "─" + "┐" - guide_line = "│ " + key_guide + " │" - input_line = "│" + inner_spc * " " + "│" - lower_line = "└" + inner_spc * "─" + "┘" - box_indent = (terminal_width - len(upper_line)) // 2 * " " + upper_line = '┌' + inner_spc * '─' + '┐' + guide_line = '│ ' + key_guide + ' │' + input_line = '│' + inner_spc * ' ' + '│' + lower_line = '└' + inner_spc * '─' + '┘' + box_indent = (terminal_width - len(upper_line)) // 2 * ' ' terminal_width_check(len(upper_line)) @@ -124,15 +109,15 @@ def box_input( print(box_indent + input_line) print(box_indent + lower_line) print((5 if guide else 4) * CURSOR_UP_ONE_LINE) - print(box_indent + "┌─┤" + message + "├") + print(box_indent + '┌─┤' + message + '├') if guide: - print("") + print('') - user_input = input(box_indent + "│ ") + user_input = input(box_indent + '│ ') - if user_input == "": + if user_input == '': print(2 * CURSOR_UP_ONE_LINE) - print(box_indent + "│ " + default) + print(box_indent + '│ ' + default) user_input = default if validator is not None: @@ -140,36 +125,22 @@ def box_input( if error_msg: m_print(error_msg, head=1) print_on_previous_line(reps=4, delay=1) - return box_input( - message, - default, - head, - tail, - expected_len, - key_type, - guide, - validator, - validator_args, - ) + return box_input(message, default, head, tail, expected_len, key_type, guide, validator, validator_args) print_spacing(tail) return user_input -def get_b58_key( - key_type: str, # The type of Base58 key to be entered - settings: "Settings", # Settings object - short_address: str = "", # The contact's short Onion address -) -> bytes: # The Base58 decoded key +def get_b58_key(key_type: str, # The type of Base58 key to be entered + settings: 'Settings', # Settings object + short_address: str = '' # The contact's short Onion address + ) -> bytes: # The Base58 decoded key """Ask the user to input a Base58 encoded key.""" if key_type == B58_PUBLIC_KEY: clear_screen() m_print(f"{ECDHE} key exchange", head=1, tail=1, bold=True) - m_print( - "If needed, resend your public key to the contact by pressing ", - tail=1, - ) + m_print("If needed, resend your public key to the contact by pressing ", tail=1) box_msg = f"Enter public key of {short_address} (from Relay)" elif key_type == B58_LOCAL_KEY: @@ -178,24 +149,23 @@ def get_b58_key( raise CriticalError("Invalid key type") while True: - rx_pk = box_input( - box_msg, key_type=key_type, guide=not settings.local_testing_mode - ) - rx_pk = "".join(rx_pk.split()) + rx_pk = box_input(box_msg, key_type=key_type, guide=not settings.local_testing_mode) + rx_pk = ''.join(rx_pk.split()) - if key_type == B58_PUBLIC_KEY and rx_pk == "": + if key_type == B58_PUBLIC_KEY and rx_pk == '': return rx_pk.encode() try: return b58decode(rx_pk, public_key=(key_type == B58_PUBLIC_KEY)) except ValueError: m_print("Checksum error - Check that the entered key is correct.") - print_on_previous_line( - reps=(4 if settings.local_testing_mode else 5), delay=1 - ) + print_on_previous_line(reps=(4 if settings.local_testing_mode else 5), delay=1) + + if key_type == B58_PUBLIC_KEY and len(rx_pk) == ENCODED_B58_PUB_KEY_LENGTH: + raise ValueError(rx_pk) -def nc_bypass_msg(key: str, settings: "Settings") -> None: +def nc_bypass_msg(key: str, settings: 'Settings') -> None: """Print messages about bypassing Networked Computer. During ciphertext delivery of local key exchange, these bypass @@ -205,37 +175,29 @@ def nc_bypass_msg(key: str, settings: "Settings") -> None: key. Without the ciphertext, e.g. a visually collected local key decryption key is useless. """ - m = { - NC_BYPASS_START: "Bypass Networked Computer if needed. Press to send local key.", - NC_BYPASS_STOP: "Remove bypass of Networked Computer. Press to continue.", - } + m = {NC_BYPASS_START: "Bypass Networked Computer if needed. Press to send local key.", + NC_BYPASS_STOP: "Remove bypass of Networked Computer. Press to continue."} if settings.nc_bypass_messages: - m_print( - m[key], - manual_proceed=True, - box=True, - head=(1 if key == NC_BYPASS_STOP else 0), - ) + m_print(m[key], manual_proceed=True, box=True, head=(1 if key == NC_BYPASS_STOP else 0)) -def pwd_prompt( - message: str, # Prompt message - repeat: bool = False, # When True, prints corner chars for the second box -) -> str: # Password from user +def pwd_prompt(message: str, # Prompt message + repeat: bool = False # When True, prints corner chars for the second box + ) -> str: # Password from user """Prompt the user to enter a password. The getpass library ensures the password is not echoed on screen when it is typed. """ - l, r = ("├", "┤") if repeat else ("┌", "┐") + l, r = ('├', '┤') if repeat else ('┌', '┐') - terminal_w = get_terminal_width() - input_space = len(" c ") # `c` is where the caret sits + terminal_w = get_terminal_width() + input_space = len(' c ') # `c` is where the caret sits - upper_line = (l + (len(message) + input_space) * "─" + r).center(terminal_w) - title_line = ("│" + message + input_space * " " + "│").center(terminal_w) - lower_line = ("└" + (len(message) + input_space) * "─" + "┘").center(terminal_w) + upper_line = ( l + (len(message) + input_space) * '─' + r ).center(terminal_w) + title_line = ('│' + message + input_space * ' ' + '│').center(terminal_w) + lower_line = ('└' + (len(message) + input_space) * '─' + '┘').center(terminal_w) terminal_width_check(len(upper_line)) @@ -244,34 +206,33 @@ def pwd_prompt( print(lower_line) print(3 * CURSOR_UP_ONE_LINE) - indent = title_line.find("│") - user_input = getpass.getpass(indent * " " + f"│ {message}") + indent = title_line.find('│') + user_input = getpass.getpass(indent * ' ' + f'│ {message}') return user_input -def yes( - prompt: str, # Question to be asked - abort: Optional[bool] = None, # Determines the return value of ^C and ^D - head: int = 0, # Number of new lines to print before prompt - tail: int = 0, # Number of new lines to print after prompt -) -> bool: # True/False depending on input +def yes(prompt: str, # Question to be asked + abort: Optional[bool] = None, # Determines the return value of ^C and ^D + head: int = 0, # Number of new lines to print before prompt + tail: int = 0 # Number of new lines to print after prompt + ) -> bool: # True/False depending on input """Prompt the user a question that is answered with yes/no.""" print_spacing(head) - prompt = f"{prompt} (y/n): " - input_space = len(" yes ") + prompt = f"{prompt} (y/n): " + input_space = len(' yes ') - upper_line = "┌" + (len(prompt) + input_space) * "─" + "┐" - title_line = "│" + prompt + input_space * " " + "│" - lower_line = "└" + (len(prompt) + input_space) * "─" + "┘" + upper_line = '┌' + (len(prompt) + input_space) * '─' + '┐' + title_line = '│' + prompt + input_space * ' ' + '│' + lower_line = '└' + (len(prompt) + input_space) * '─' + '┘' terminal_w = get_terminal_width() upper_line = upper_line.center(terminal_w) title_line = title_line.center(terminal_w) lower_line = lower_line.center(terminal_w) - indent = title_line.find("│") + indent = title_line.find('│') terminal_width_check(len(upper_line)) @@ -282,24 +243,24 @@ def yes( print(3 * CURSOR_UP_ONE_LINE) try: - user_input = input(indent * " " + f"│ {prompt}") + user_input = input(indent * ' ' + f'│ {prompt}') except (EOFError, KeyboardInterrupt): if abort is None: raise - print("") - user_input = "y" if abort else "n" + print('') + user_input = 'y' if abort else 'n' print_on_previous_line() - if user_input == "": + if user_input == '': continue - if user_input.lower() in ["y", "yes"]: - print(indent * " " + f"│ {prompt}Yes │\n") + if user_input.lower() in ['y', 'yes']: + print(indent * ' ' + f'│ {prompt}Yes │\n') print_spacing(tail) return True - if user_input.lower() in ["n", "no"]: - print(indent * " " + f"│ {prompt}No │\n") + if user_input.lower() in ['n', 'no']: + print(indent * ' ' + f'│ {prompt}No │\n') print_spacing(tail) return False diff --git a/src/common/misc.py b/src/common/misc.py index 1f5aaff..c7c6354 100755 --- a/src/common/misc.py +++ b/src/common/misc.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -34,55 +34,28 @@ import threading import typing import zlib -from contextlib import contextmanager -from typing import Any, Callable, Dict, Iterator, List, Optional, Tuple, Type, Union +from contextlib import contextmanager +from typing import Any, Callable, Dict, Iterator, List, Optional, Tuple, Type, Union from multiprocessing import Process from src.common.reed_solomon import RSCodec -from src.common.statics import ( - BAUDS_PER_BYTE, - COMMAND_LENGTH, - CURSOR_UP_ONE_LINE, - DIR_RECV_FILES, - DIR_USER_DATA, - DUMMY_CONTACT, - DUMMY_GROUP, - DUMMY_MEMBER, - ECDHE, - EVENT, - EXIT, - EXIT_QUEUE, - LOCAL_ID, - LOCAL_PUBKEY, - ME, - ONION_ADDRESS_CHECKSUM_ID, - ONION_ADDRESS_CHECKSUM_LENGTH, - ONION_ADDRESS_LENGTH, - ONION_SERVICE_PUBLIC_KEY_LENGTH, - PACKET_LENGTH, - PADDING_LENGTH, - POWEROFF, - PSK, - RESET, - RX, - STATIC, - TAILS, - TRAFFIC_MASKING, - TX, - WIPE, -) +from src.common.statics import (BAUDS_PER_BYTE, COMMAND_LENGTH, CURSOR_UP_ONE_LINE, DIR_RECV_FILES, DIR_USER_DATA, + DUMMY_CONTACT, DUMMY_GROUP, DUMMY_MEMBER, ECDHE, EVENT, EXIT, EXIT_QUEUE, LOCAL_ID, + LOCAL_PUBKEY, ME, ONION_ADDRESS_CHECKSUM_ID, ONION_ADDRESS_CHECKSUM_LENGTH, + ONION_ADDRESS_LENGTH, ONION_SERVICE_PUBLIC_KEY_LENGTH, PACKET_LENGTH, + PADDING_LENGTH, POWEROFF, PSK, RESET, RX, STATIC, TAILS, TRAFFIC_MASKING, TX, WIPE) if typing.TYPE_CHECKING: - from multiprocessing import Queue + from multiprocessing import Queue from src.common.db_contacts import ContactList - from src.common.db_groups import GroupList + from src.common.db_groups import GroupList from src.common.db_settings import Settings - from src.common.gateway import Gateway + from src.common.gateway import Gateway -def calculate_race_condition_delay( - serial_error_correction: int, serial_baudrate: int -) -> float: +def calculate_race_condition_delay(serial_error_correction: int, + serial_baudrate: int + ) -> float: """\ Calculate the delay required to prevent Relay Program race condition. @@ -95,19 +68,18 @@ def calculate_race_condition_delay( before outputting command for Relay Program, to ensure Receiver Program has received the encrypted command. """ - rs = RSCodec(2 * serial_error_correction) + rs = RSCodec(2 * serial_error_correction) message_length = PACKET_LENGTH + ONION_ADDRESS_LENGTH enc_msg_length = len(rs.encode(os.urandom(message_length))) enc_cmd_length = len(rs.encode(os.urandom(COMMAND_LENGTH))) - max_bytes = enc_msg_length + (2 * enc_cmd_length) + max_bytes = enc_msg_length + (2 * enc_cmd_length) return (max_bytes * BAUDS_PER_BYTE) / serial_baudrate -def decompress( - data: bytes, # Data to be decompressed - max_size: int, # The maximum size of decompressed data. -) -> bytes: # Decompressed data +def decompress(data: bytes, # Data to be decompressed + max_size: int # The maximum size of decompressed data. + ) -> bytes: # Decompressed data """Decompress received data. The decompressed data has a maximum size, designed to prevent zip @@ -115,7 +87,7 @@ def decompress( """ from src.common.exceptions import SoftError # Avoid circular import - dec = zlib.decompressobj() + dec = zlib.decompressobj() data = dec.decompress(data, max_size) if dec.unconsumed_tail: raise SoftError("Error: Decompression aborted due to possible zip bomb.") @@ -138,72 +110,64 @@ def ensure_dir(directory: str) -> None: os.makedirs(name) -def get_tab_complete_list( - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - gateway: "Gateway", -) -> List[str]: +def get_tab_complete_list(contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + gateway: 'Gateway' + ) -> List[str]: """Return a list of tab-complete words.""" - commands = [ - "about", - "add ", - "clear", - "cmd", - "connect", - "exit", - "export ", - "file", - "group ", - "help", - "history ", - "localkey", - "logging ", - "msg ", - "names", - "nick ", - "notify ", - "passwd ", - "psk", - "reset", - "rmlogs ", - "set ", - "settings", - "store ", - "unread", - "verify", - "whisper ", - "whois ", - ] + commands = ['about', + 'add ', + 'clear', + 'cmd', + 'connect', + 'exit', + 'export ', + 'file', + 'group ', + 'help', + 'history ', + 'localkey', + 'logging ', + 'msg ', + 'names', + 'nick ', + 'notify ', + 'passwd ', + 'psk', + 'reset', + 'rmlogs ', + 'set ', + 'settings', + 'store ', + 'unread', + 'verify', + 'whisper ', + 'whois '] - tc_list = ["all", "create ", "false", "False", "join ", "true", "True"] + tc_list = ['all', 'create ', 'false', 'False', 'join ', 'true', 'True'] tc_list += commands - tc_list += [(a + " ") for a in contact_list.get_list_of_addresses()] - tc_list += [(n + " ") for n in contact_list.get_list_of_nicks()] - tc_list += [(g + " ") for g in group_list.get_list_of_group_names()] - tc_list += [(i + " ") for i in group_list.get_list_of_hr_group_ids()] - tc_list += [(s + " ") for s in settings.key_list] - tc_list += [(s + " ") for s in gateway.settings.key_list] + tc_list += [(a + ' ') for a in contact_list.get_list_of_addresses()] + tc_list += [(n + ' ') for n in contact_list.get_list_of_nicks()] + tc_list += [(g + ' ') for g in group_list.get_list_of_group_names()] + tc_list += [(i + ' ') for i in group_list.get_list_of_hr_group_ids()] + tc_list += [(s + ' ') for s in settings.key_list] + tc_list += [(s + ' ') for s in gateway.settings.key_list] return tc_list -def get_tab_completer( - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - gateway: "Gateway", -) -> Optional[Callable[[str, Any], Any]]: +def get_tab_completer(contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + gateway: 'Gateway' + ) -> Optional[Callable[[str, Any], Any]]: """Return the tab completer object.""" def tab_complete(text: str, state: Any) -> List[str]: """Return tab-complete options.""" - tab_complete_list = get_tab_complete_list( - contact_list, group_list, settings, gateway - ) - options = [ - t for t in tab_complete_list if t.startswith(text) - ] # type: List[str] + tab_complete_list = get_tab_complete_list(contact_list, group_list, settings, gateway) + options = [t for t in tab_complete_list if t.startswith(text)] # type: List[str] with ignored(IndexError): tc = options[state] # type: List[str] return tc @@ -233,12 +197,11 @@ class HideRunTime(object): https://docs.python.org/2/library/random.html#random.SystemRandom """ - def __init__( - self, - settings: Optional["Settings"] = None, - delay_type: str = STATIC, - duration: float = 0.0, - ) -> None: + def __init__(self, + settings: Optional['Settings'] = None, + delay_type: str = STATIC, + duration: float = 0.0 + ) -> None: if delay_type == TRAFFIC_MASKING and settings is not None: self.length = settings.tm_static_delay @@ -251,7 +214,11 @@ class HideRunTime(object): self.timer = threading.Thread(target=time.sleep, args=(self.length,)) self.timer.start() - def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None: + def __exit__(self, + exc_type: Any, + exc_value: Any, + traceback: Any + ) -> None: self.timer.join() @@ -264,12 +231,11 @@ def ignored(*exceptions: Type[BaseException]) -> Iterator[Any]: pass -def monitor_processes( - process_list: List[Process], - software_operation: str, - queues: Dict[bytes, "Queue[bytes]"], - error_exit_code: int = 1, -) -> None: +def monitor_processes(process_list: List[Process], + software_operation: str, + queues: Dict[bytes, 'Queue[bytes]'], + error_exit_code: int = 1 + ) -> None: """Monitor the status of `process_list` and EXIT_QUEUE. This function monitors a list of processes. If one of them dies, it @@ -298,7 +264,7 @@ def monitor_processes( sys.exit(0) if command == WIPE: - with open("/etc/os-release") as f: + with open('/etc/os-release') as f: data = f.read() if TAILS not in data: shred_databases(software_operation) @@ -317,37 +283,29 @@ def process_arguments() -> Tuple[str, bool, bool]: Terminator configuration file for local testing. The descriptions here are provided for the sake of completeness. """ - parser = argparse.ArgumentParser( - f"python3.7 {sys.argv[0]}", - usage="%(prog)s [OPTION]", - epilog="Full documentation at: ", - ) + parser = argparse.ArgumentParser(f'python3.7 {sys.argv[0]}', + usage='%(prog)s [OPTION]', + epilog='Full documentation at: ') - parser.add_argument( - "-r", - action="store_true", - default=False, - dest="operation", - help="run Receiver instead of Transmitter Program", - ) + parser.add_argument('-r', + action='store_true', + default=False, + dest='operation', + help="run Receiver instead of Transmitter Program") - parser.add_argument( - "-l", - action="store_true", - default=False, - dest="local_test", - help="enable local testing mode", - ) + parser.add_argument('-l', + action='store_true', + default=False, + dest='local_test', + help="enable local testing mode") - parser.add_argument( - "-d", - action="store_true", - default=False, - dest="data_diode_sockets", - help="use data diode simulator sockets during local testing mode", - ) + parser.add_argument('-d', + action='store_true', + default=False, + dest='data_diode_sockets', + help="use data diode simulator sockets during local testing mode") - args = parser.parse_args() + args = parser.parse_args() operation = RX if args.operation else TX return operation, args.local_test, args.data_diode_sockets @@ -356,11 +314,11 @@ def process_arguments() -> Tuple[str, bool, bool]: def readable_size(size: int) -> str: """Convert file size from bytes to a human-readable form.""" f_size = float(size) - for unit in ["", "K", "M", "G", "T", "P", "E", "Z"]: + for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']: if abs(f_size) < 1024.0: - return f"{f_size:3.1f}{unit}B" + return f'{f_size:3.1f}{unit}B' f_size /= 1024.0 - return f"{f_size:3.1f}YB" + return f'{f_size:3.1f}YB' def reset_terminal() -> None: @@ -376,49 +334,40 @@ def round_up(value: Union[int, float]) -> int: def shred_databases(software_operation: str) -> None: """Shred TFC databases and remove directories.""" if software_operation == RX: - subprocess.Popen( - "find {} -type f -exec shred -n 3 -z -u {{}} \\;".format(DIR_RECV_FILES), - shell=True, - ).wait() + subprocess.Popen("find {} -type f -exec shred -n 3 -z -u {{}} \\;".format(DIR_RECV_FILES), shell=True).wait() - subprocess.Popen( - "find {} -name '{}*' -type f -exec shred -n 3 -z -u {{}} \\;".format( - DIR_USER_DATA, software_operation - ), - shell=True, - ).wait() + subprocess.Popen("find {} -name '{}*' -type f -exec shred -n 3 -z -u {{}} \\;" + .format(DIR_USER_DATA, software_operation), shell=True).wait() for d in [DIR_USER_DATA, DIR_RECV_FILES]: with ignored(FileNotFoundError): shutil.rmtree(d) -def split_byte_string( - bytestring: bytes, item_len: int # Bytestring to split # Length of each substring -) -> List[bytes]: # List of substrings +def split_byte_string(bytestring: bytes, # Bytestring to split + item_len: int # Length of each substring + ) -> List[bytes]: # List of substrings """Split a bytestring into a list of specific length substrings.""" - return [bytestring[i : i + item_len] for i in range(0, len(bytestring), item_len)] + return [bytestring[i:i + item_len] for i in range(0, len(bytestring), item_len)] -def split_string( - string: str, item_len: int # String to split # Length of each substring -) -> List[str]: # List of substrings +def split_string(string: str, # String to split + item_len: int # Length of each substring + ) -> List[str]: # List of substrings """Split a string into a list of specific length substrings.""" - return [string[i : i + item_len] for i in range(0, len(string), item_len)] + return [string[i:i + item_len] for i in range(0, len(string), item_len)] -def separate_header( - bytestring: bytes, # Bytestring to slice - header_length: int, # Number of header bytes to separate -) -> Tuple[bytes, bytes]: # Header and payload +def separate_header(bytestring: bytes, # Bytestring to slice + header_length: int # Number of header bytes to separate + ) -> Tuple[bytes, bytes]: # Header and payload """Separate `header_length` first bytes from a bytestring.""" return bytestring[:header_length], bytestring[header_length:] -def separate_headers( - bytestring: bytes, # Bytestring to slice - header_length_list: List[int], # List of header lengths -) -> List[bytes]: # Header and payload +def separate_headers(bytestring: bytes, # Bytestring to slice + header_length_list: List[int], # List of header lengths + ) -> List[bytes]: # Header and payload """Separate a list of headers from bytestring. Length of each header is determined in the `header_length_list`. @@ -432,10 +381,9 @@ def separate_headers( return fields -def separate_trailer( - bytestring: bytes, # Bytestring to slice - trailer_length: int, # Number of trailer bytes to separate -) -> Tuple[bytes, bytes]: # Payload and trailer +def separate_trailer(bytestring: bytes, # Bytestring to slice + trailer_length: int # Number of trailer bytes to separate + ) -> Tuple[bytes, bytes]: # Payload and trailer """Separate `trailer_length` last bytes from a bytestring. This saves space and makes trailer separation more readable. @@ -443,6 +391,15 @@ def separate_trailer( return bytestring[:-trailer_length], bytestring[-trailer_length:] +def split_to_substrings(bytestring: bytes, length: int) -> List[bytes]: + """Split byte string into all it's possible `length` long substrings.""" + substrings = [] + for i in range(0, len(bytestring) - length + 1): + substrings.append(bytestring[i:length + i]) + + return substrings + + def terminal_width_check(minimum_width: int) -> None: """Wait until user re-sizes their terminal to specified width. """ if get_terminal_width() < minimum_width: @@ -450,38 +407,38 @@ def terminal_width_check(minimum_width: int) -> None: while get_terminal_width() < minimum_width: time.sleep(0.1) time.sleep(0.1) - print(2 * CURSOR_UP_ONE_LINE) + print(2*CURSOR_UP_ONE_LINE) -def validate_onion_addr( - onion_address_contact: str, # String to slice - onion_address_user: str = "", # Number of header chars to separate -) -> str: # Payload and trailer +def validate_onion_addr(onion_address_contact: str, # String to slice + onion_address_user: str = '' # Number of header chars to separate + ) -> str: # Payload and trailer """Validate a v3 Onion Service address.""" - error_msg = "" + error_msg = '' + + if len(onion_address_contact) != ONION_ADDRESS_LENGTH: + return "Error: Invalid account length." + + # Together with length check this should make accidental export local key decryption keys hard enough. + if any(c.isupper() for c in onion_address_contact): + return "Error: Account must be in lower case." try: decoded = base64.b32decode(onion_address_contact.upper()) - public_key, checksum, version = separate_headers( - decoded, [ONION_SERVICE_PUBLIC_KEY_LENGTH, ONION_ADDRESS_CHECKSUM_LENGTH] - ) + public_key, checksum, version \ + = separate_headers(decoded, [ONION_SERVICE_PUBLIC_KEY_LENGTH, ONION_ADDRESS_CHECKSUM_LENGTH]) - if ( - checksum - != hashlib.sha3_256( - ONION_ADDRESS_CHECKSUM_ID + public_key + version - ).digest()[:ONION_ADDRESS_CHECKSUM_LENGTH] - ): + if checksum != hashlib.sha3_256(ONION_ADDRESS_CHECKSUM_ID + + public_key + + version + ).digest()[:ONION_ADDRESS_CHECKSUM_LENGTH]: error_msg = "Checksum error - Check that the entered account is correct." except (binascii.Error, ValueError): return "Error: Invalid account format." - if ( - onion_address_contact in (LOCAL_ID, DUMMY_CONTACT, DUMMY_MEMBER) - or public_key == LOCAL_PUBKEY - ): + if onion_address_contact in (LOCAL_ID, DUMMY_CONTACT, DUMMY_MEMBER) or public_key == LOCAL_PUBKEY: error_msg = "Error: Can not add reserved account." if onion_address_user and onion_address_contact == onion_address_user: @@ -490,13 +447,12 @@ def validate_onion_addr( return error_msg -def validate_group_name( - group_name: str, # Name of the group - contact_list: "ContactList", # ContactList object - group_list: "GroupList", # GroupList object -) -> str: # Error message if validation failed, else empty string +def validate_group_name(group_name: str, # Name of the group + contact_list: 'ContactList', # ContactList object + group_list: 'GroupList' # GroupList object + ) -> str: # Error message if validation failed, else empty string """Validate the specified group name.""" - error_msg = "" + error_msg = '' # Avoids collision with delimiters if not group_name.isprintable(): @@ -507,9 +463,7 @@ def validate_group_name( error_msg = f"Error: Group name must be less than {PADDING_LENGTH} chars long." if group_name == DUMMY_GROUP: - error_msg = ( - "Error: Group name cannot use the name reserved for database padding." - ) + error_msg = "Error: Group name cannot use the name reserved for database padding." if not validate_onion_addr(group_name): error_msg = "Error: Group name cannot have the format of an account." @@ -523,11 +477,11 @@ def validate_group_name( return error_msg -def validate_key_exchange( - key_ex: str, *_: Any # Key exchange selection to validate # Unused arguments -) -> str: # Error message if validation failed, else empty string +def validate_key_exchange(key_ex: str, # Key exchange selection to validate + *_: Any # Unused arguments + ) -> str: # Error message if validation failed, else empty string """Validate the specified key exchange.""" - error_msg = "" + error_msg = '' if key_ex.upper() not in [ECDHE, ECDHE[:1], PSK, PSK[:1]]: error_msg = "Invalid key exchange selection." @@ -535,16 +489,13 @@ def validate_key_exchange( return error_msg -def validate_nick( - nick: str, # Nick to validate - args: Tuple[ - "ContactList", "GroupList", bytes - ], # Contact list and group list databases -) -> str: # Error message if validation failed, else '' +def validate_nick(nick: str, # Nick to validate + args: Tuple['ContactList', 'GroupList', bytes] # Contact list and group list databases + ) -> str: # Error message if validation failed, else '' """Validate the specified nickname.""" contact_list, group_list, onion_pub_key = args - error_msg = "" + error_msg = '' # Length is limited by database's Unicode padding if len(nick) >= PADDING_LENGTH: @@ -554,7 +505,7 @@ def validate_nick( if not nick.isprintable(): error_msg = "Error: Nick must be printable." - if nick == "": + if nick == '': error_msg = "Error: Nick cannot be empty." # Receiver displays sent messages under 'Me' @@ -566,9 +517,7 @@ def validate_nick( error_msg = f"Error: '{EVENT}' is a reserved nick." # Ensure that nicks, accounts and group names are UIDs in recipient selection - if ( - validate_onion_addr(nick) == "" - ): # If no error message was received, nick had format of account + if validate_onion_addr(nick) == '': # If no error message was received, nick had format of account error_msg = "Error: Nick cannot have the format of an account." if nick in (LOCAL_ID, DUMMY_CONTACT, DUMMY_MEMBER): @@ -583,14 +532,15 @@ def validate_nick( return error_msg -def same_contact_check( - onion_pub_key: bytes, nick: str, contact_list: "ContactList" -) -> str: +def same_contact_check(onion_pub_key: bytes, + nick: str, + contact_list: 'ContactList' + ) -> str: """Check if nick matches the account being replaced.""" error_msg = "Error: Nick already in use." if contact_list.has_pub_key(onion_pub_key): if nick == contact_list.get_nick_by_pub_key(onion_pub_key): - error_msg = "" + error_msg = '' return error_msg diff --git a/src/common/output.py b/src/common/output.py index cbb1b49..01cb9f0 100644 --- a/src/common/output.py +++ b/src/common/output.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -25,42 +25,21 @@ import typing import sys from datetime import datetime -from typing import List, Optional, Tuple, Union +from typing import List, Optional, Tuple, Union from src.common.encoding import b10encode, b58encode, pub_key_to_onion_address -from src.common.misc import get_terminal_width, split_string -from src.common.statics import ( - ADDED_MEMBERS, - ALREADY_MEMBER, - B58_LOCAL_KEY_GUIDE, - B58_PUBLIC_KEY_GUIDE, - BOLD_ON, - CLEAR_ENTIRE_LINE, - CLEAR_ENTIRE_SCREEN, - CURSOR_LEFT_UP_CORNER, - CURSOR_UP_ONE_LINE, - DONE, - NC, - NEW_GROUP, - NORMAL_TEXT, - NOT_IN_GROUP, - RECEIVER, - RELAY, - REMOVED_MEMBERS, - RX, - TFC, - TRANSMITTER, - TX, - UNKNOWN_ACCOUNTS, - VERSION, -) +from src.common.misc import get_terminal_width, split_string +from src.common.statics import (ADDED_MEMBERS, ALREADY_MEMBER, B58_LOCAL_KEY_GUIDE, B58_PUBLIC_KEY_GUIDE, BOLD_ON, + CLEAR_ENTIRE_LINE, CLEAR_ENTIRE_SCREEN, CURSOR_LEFT_UP_CORNER, CURSOR_UP_ONE_LINE, + DONE, NC, NEW_GROUP, NORMAL_TEXT, NOT_IN_GROUP, RECEIVER, RELAY, REMOVED_MEMBERS, RX, + TFC, TRANSMITTER, TX, UNKNOWN_ACCOUNTS, VERSION) if typing.TYPE_CHECKING: from src.common.db_contacts import ContactList from src.common.db_settings import Settings - from src.common.gateway import GatewaySettings as GWSettings + from src.common.gateway import GatewaySettings as GWSettings - msg_list_type = Union[str, List[str]] + MsgListType = Union[str, List[str]] def clear_screen(delay: float = 0.0) -> None: @@ -70,55 +49,40 @@ def clear_screen(delay: float = 0.0) -> None: sys.stdout.flush() -def group_management_print( - key: str, # Group management message identifier - members: List[bytes], # List of members' Onion public keys - contact_list: "ContactList", # ContactList object - group_name: str = "", # Name of the group -) -> None: +def group_management_print(key: str, # Group management message identifier + members: List[bytes], # List of members' Onion public keys + contact_list: 'ContactList', # ContactList object + group_name: str = '' # Name of the group + ) -> None: """Print group management command results.""" - m = { - NEW_GROUP: "Created new group '{}' with following members:".format(group_name), - ADDED_MEMBERS: "Added following accounts to group '{}':".format(group_name), - ALREADY_MEMBER: "Following accounts were already in group '{}':".format( - group_name - ), - REMOVED_MEMBERS: "Removed following members from group '{}':".format( - group_name - ), - NOT_IN_GROUP: "Following accounts were not in group '{}':".format(group_name), - UNKNOWN_ACCOUNTS: "Following unknown accounts were ignored:", - }[key] + m = {NEW_GROUP: "Created new group '{}' with following members:".format(group_name), + ADDED_MEMBERS: "Added following accounts to group '{}':" .format(group_name), + ALREADY_MEMBER: "Following accounts were already in group '{}':".format(group_name), + REMOVED_MEMBERS: "Removed following members from group '{}':" .format(group_name), + NOT_IN_GROUP: "Following accounts were not in group '{}':" .format(group_name), + UNKNOWN_ACCOUNTS: "Following unknown accounts were ignored:"}[key] if members: - m_list = [ - contact_list.get_nick_by_pub_key(m) - for m in members - if contact_list.has_pub_key(m) - ] + [ - pub_key_to_onion_address(m) - for m in members - if not contact_list.has_pub_key(m) - ] + m_list = ([contact_list.get_nick_by_pub_key(m) for m in members if contact_list.has_pub_key(m)] + + [pub_key_to_onion_address(m) for m in members if not contact_list.has_pub_key(m)]) - just_len = max(len(m) for m in m_list) + just_len = max(len(m) for m in m_list) justified = [m] + [f" * {m.ljust(just_len)}" for m in m_list] m_print(justified, box=True) -def m_print( - msg_list: "msg_list_type", # List of lines to print - manual_proceed: bool = False, # Wait for user input before continuing - bold: bool = False, # When True, prints the message in bold style - center: bool = True, # When False, does not center message - box: bool = False, # When True, prints a box around the message - head_clear: bool = False, # When True, clears screen before printing message - tail_clear: bool = False, # When True, clears screen after printing message (requires delay) - delay: float = 0, # Delay before continuing - max_width: int = 0, # Maximum width of message - head: int = 0, # Number of new lines to print before the message - tail: int = 0, # Number of new lines to print after the message -) -> None: +def m_print(msg_list: Union[str, List[str]], # List of lines to print + manual_proceed: bool = False, # Wait for user input before continuing + bold: bool = False, # When True, prints the message in bold style + center: bool = True, # When False, does not center message + box: bool = False, # When True, prints a box around the message + head_clear: bool = False, # When True, clears screen before printing message + tail_clear: bool = False, # When True, clears screen after printing message (requires delay) + delay: float = 0, # Delay before continuing + max_width: int = 0, # Maximum width of message + head: int = 0, # Number of new lines to print before the message + tail: int = 0, # Number of new lines to print after the message + ) -> None: """Print message to screen. The message automatically wraps if the terminal is too narrow to @@ -127,20 +91,18 @@ def m_print( if isinstance(msg_list, str): msg_list = [msg_list] - terminal_width = get_terminal_width() - len_widest_msg, msg_list = split_too_wide_messages( - box, max_width, msg_list, terminal_width - ) + terminal_width = get_terminal_width() + len_widest_msg, msg_list = split_too_wide_messages(box, max_width, msg_list, terminal_width) if box or center: # Insert whitespace around every line to make them equally long - msg_list = [f"{m:^{len_widest_msg}}" for m in msg_list] + msg_list = [f'{m:^{len_widest_msg}}' for m in msg_list] if box: # Add box chars around the message - msg_list = [f"│ {m} │" for m in msg_list] - msg_list.insert(0, "┌" + (len_widest_msg + 2) * "─" + "┐") - msg_list.append("└" + (len_widest_msg + 2) * "─" + "┘") + msg_list = [f'│ {m} │' for m in msg_list] + msg_list.insert(0, '┌' + (len_widest_msg + 2) * '─' + '┐') + msg_list.append( '└' + (len_widest_msg + 2) * '─' + '┘') # Print the message if head_clear: @@ -161,17 +123,19 @@ def m_print( # Check if message needs to be manually dismissed if manual_proceed: - input("") + input('') print_on_previous_line() -def split_too_wide_messages( - box: bool, max_width: int, msg_list: "msg_list_type", terminal_width: int -) -> Tuple[int, "msg_list_type"]: +def split_too_wide_messages(box: bool, + max_width: int, + msg_list: 'MsgListType', + terminal_width: int + ) -> Tuple[int, 'MsgListType']: """Split too wide messages to multiple lines.""" len_widest_msg = max(len(m) for m in msg_list) spc_around_msg = 4 if box else 2 - max_msg_width = terminal_width - spc_around_msg + max_msg_width = terminal_width - spc_around_msg if max_width: max_msg_width = min(max_width, max_msg_width) @@ -190,13 +154,13 @@ def split_too_wide_messages( return len_widest_msg, msg_list -def phase( - string: str, # Description of the phase - done: bool = False, # When True, uses string as the phase completion message - head: int = 0, # Number of inserted new lines before print - offset: int = 4, # Offset of phase string from center to left - delay: float = 0.5, # Duration of phase completion message -) -> None: +def phase(string: str, # Description of the phase + done: bool = False, # When True, uses string as the phase completion message + head: int = 0, # Number of inserted new lines before print + tail: int = 0, # Number of inserted new lines after print + offset: int = 4, # Offset of phase string from center to left + delay: float = 0.5 # Duration of phase completion message + ) -> None: """Print the name of the next phase. The notification of completion of the phase is printed on the same @@ -208,15 +172,17 @@ def phase( print(string) time.sleep(delay) else: - string += "... " - indent = ((get_terminal_width() - (len(string) + offset)) // 2) * " " + string += '... ' + indent = ((get_terminal_width() - (len(string) + offset)) // 2) * ' ' - print(indent + string, end="", flush=True) + print(indent + string, end='', flush=True) + + print_spacing(tail) -def print_fingerprint( - fp: bytes, msg: str = "" # Contact's fingerprint # Title message -) -> None: +def print_fingerprint(fp: bytes, # Contact's fingerprint + msg: str = '' # Title message + ) -> None: """Print a formatted message and fingerprint inside the box. Truncate fingerprint for clean layout with three rows that have @@ -224,20 +190,19 @@ def print_fingerprint( 249.15 bits of entropy which is more than the symmetric security of X448. """ - p_lst = [msg, ""] if msg else [] - b10fp = b10encode(fp)[: (3 * 5 * 5)] - parts = split_string(b10fp, item_len=(5 * 5)) - p_lst += [" ".join(split_string(p, item_len=5)) for p in parts] + p_lst = [msg, ''] if msg else [] + b10fp = b10encode(fp)[:(3*5*5)] + parts = split_string(b10fp, item_len=(5*5)) + p_lst += [' '.join(split_string(p, item_len=5)) for p in parts] m_print(p_lst, box=True) -def print_key( - message: str, # Instructive message - key_bytes: bytes, # 32-byte key to be displayed - settings: Union["Settings", "GWSettings"], # Settings object - public_key: bool = False, # When True, uses Testnet address WIF format -) -> None: +def print_key(message: str, # Instructive message + key_bytes: bytes, # 32-byte key to be displayed + settings: Union['Settings', 'GWSettings'], # Settings object + public_key: bool = False # When True, uses Testnet address WIF format + ) -> None: """Print a symmetric key in WIF format. If local testing is not enabled, this function adds spacing in the @@ -258,31 +223,22 @@ def print_key( if settings.local_testing_mode: m_print([message, b58key], box=True) else: - guide, chunk_length = ( - (B58_PUBLIC_KEY_GUIDE, 7) if public_key else (B58_LOCAL_KEY_GUIDE, 3) - ) + guide, chunk_length = (B58_PUBLIC_KEY_GUIDE, 7) if public_key else (B58_LOCAL_KEY_GUIDE, 3) - key = " ".join(split_string(b58key, item_len=chunk_length)) + key = ' '.join(split_string(b58key, item_len=chunk_length)) m_print([message, guide, key], box=True) def print_title(operation: str) -> None: """Print the TFC title.""" operation_name = {TX: TRANSMITTER, RX: RECEIVER, NC: RELAY}[operation] - m_print( - f"{TFC} - {operation_name} {VERSION}", - bold=True, - head_clear=True, - head=1, - tail=1, - ) + m_print(f"{TFC} - {operation_name} {VERSION}", bold=True, head_clear=True, head=1, tail=1) -def print_on_previous_line( - reps: int = 1, # Number of times to repeat the action - delay: float = 0.0, # Time to sleep before clearing lines above - flush: bool = False, # Flush stdout when true -) -> None: +def print_on_previous_line(reps: int = 1, # Number of times to repeat the action + delay: float = 0.0, # Time to sleep before clearing lines above + flush: bool = False # Flush stdout when true + ) -> None: """Next message is printed on upper line.""" time.sleep(delay) @@ -298,15 +254,14 @@ def print_spacing(count: int = 0) -> None: print() -def rp_print( - message: str, # Message to print - ts: Optional["datetime"] = None, # Timestamp for displayed event - bold: bool = False, # When True, prints the message in bold style -) -> None: +def rp_print(message: str, # Message to print + ts: Optional['datetime'] = None, # Timestamp for displayed event + bold: bool = False # When True, prints the message in bold style + ) -> None: """Print an event in Relay Program.""" if ts is None: ts = datetime.now() - ts_fmt = ts.strftime("%b %d - %H:%M:%S.%f")[:-4] + ts_fmt = ts.strftime('%b %d - %H:%M:%S.%f')[:-4] if bold: print(f"{BOLD_ON}{ts_fmt} - {message}{NORMAL_TEXT}") diff --git a/src/common/path.py b/src/common/path.py index bf6df3d..30e11be 100644 --- a/src/common/path.py +++ b/src/common/path.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -30,17 +30,16 @@ import tkinter from tkinter import filedialog from src.common.exceptions import SoftError -from src.common.output import m_print, print_on_previous_line +from src.common.output import m_print, print_on_previous_line if typing.TYPE_CHECKING: from src.common.db_settings import Settings -def ask_path_gui( - prompt_msg: str, # Directory selection prompt - settings: "Settings", # Settings object - get_file: bool = False, # When True, prompts for a path to file instead of a directory -) -> str: # Selected directory or file +def ask_path_gui(prompt_msg: str, # Directory selection prompt + settings: 'Settings', # Settings object + get_file: bool = False # When True, prompts for a path to file instead of a directory + ) -> str: # Selected directory or file """Prompt (file) path with Tkinter / CLI prompt.""" try: if settings.disable_gui_dialog: @@ -57,10 +56,7 @@ def ask_path_gui( root.destroy() if not file_path: - raise SoftError( - ("File" if get_file else "Path") + " selection aborted.", - head_clear=True, - ) + raise SoftError(("File" if get_file else "Path") + " selection aborted.", head_clear=True) return file_path @@ -91,13 +87,11 @@ class Completer(object): def complete_path(self, path: Optional[str] = None) -> Any: """Perform completion of the filesystem path.""" if not path: - return self.listdir(".") + return self.listdir('.') dir_name, rest = os.path.split(path) - tmp = dir_name if dir_name else "." - matches = [ - os.path.join(dir_name, p) for p in self.listdir(tmp) if p.startswith(rest) - ] + tmp = dir_name if dir_name else '.' + matches = [os.path.join(dir_name, p) for p in self.listdir(tmp) if p.startswith(rest)] # More than one match, or single match which does not exist (typo) if len(matches) > 1 or not os.path.exists(path): @@ -108,12 +102,12 @@ class Completer(object): return [os.path.join(path, p) for p in self.listdir(path)] # Exact file match terminates this completion - return [path + " "] + return [path + ' '] def path_complete(self, args: Optional[List[str]] = None) -> Any: """Return the list of directories from the current directory.""" if not args: - return self.complete_path(".") + return self.complete_path('.') # Treat the last arg as a path and complete it return self.complete_path(args[-1]) @@ -124,22 +118,20 @@ class Completer(object): return self.path_complete(line)[state] -def ask_path_cli( - prompt_msg: str, # File selection prompt - get_file: bool = False, # When True, prompts for a file instead of a directory -) -> str: # Selected directory or file +def ask_path_cli(prompt_msg: str, # File selection prompt + get_file: bool = False # When True, prompts for a file instead of a directory + ) -> str: # Selected directory or file """\ Prompt file location or store directory for a file with tab-complete supported CLI. """ - readline.set_completer_delims(" \t\n;") - readline.parse_and_bind("tab: complete") + readline.set_completer_delims(' \t\n;') + readline.parse_and_bind('tab: complete') readline.set_completer(Completer(get_file).complete) - print("") + print('') - if get_file: - return cli_get_file(prompt_msg) - return cli_get_path(prompt_msg) + func = cli_get_file if get_file else cli_get_path + return func(prompt_msg) def cli_get_file(prompt_msg: str) -> str: @@ -153,9 +145,9 @@ def cli_get_file(prompt_msg: str) -> str: raise KeyboardInterrupt if os.path.isfile(path_to_file): - if path_to_file.startswith("./"): - path_to_file = path_to_file[len("./") :] - print("") + if path_to_file.startswith('./'): + path_to_file = path_to_file[len('./'):] + print('') return path_to_file m_print("File selection error.", head=1, tail=1) @@ -172,8 +164,8 @@ def cli_get_path(prompt_msg: str) -> str: try: directory = input(prompt_msg + ": ") - if directory.startswith("./"): - directory = directory[len("./") :] + if directory.startswith('./'): + directory = directory[len('./'):] if not directory.endswith(os.sep): directory += os.sep diff --git a/src/common/reed_solomon.py b/src/common/reed_solomon.py index 250efc3..908ef95 100644 --- a/src/common/reed_solomon.py +++ b/src/common/reed_solomon.py @@ -187,7 +187,7 @@ import itertools import math import shutil -from array import array +from array import array from typing import Any, Dict, Iterator, List, Optional, overload, Tuple, Union @@ -195,23 +195,23 @@ class ReedSolomonError(Exception): """Reed-Solomon exception stub.""" -# For efficiency, gf_exp[] has size 2*GF_SIZE, so that a simple -# multiplication of two numbers can be resolved without calling % 255. -# For more info on how to generate this extended exponentiation table, -# see paper: -# "Fast software implementation of finite field operations", -# Cheng Huang and Lihao Xu -# Washington University in St. Louis, Tech. Rep (2003). - -_bytearray = bytearray # type: Any -gf_exp = _bytearray([1] * 512) -gf_log = _bytearray(256) +""" +For efficiency, gf_exp[] has size 2*GF_SIZE, so that a simple +multiplication of two numbers can be resolved without calling % 255. +For more info on how to generate this extended exponentiation table, +see paper: + "Fast software implementation of finite field operations", + Cheng Huang and Lihao Xu + Washington University in St. Louis, Tech. Rep (2003). +""" +_bytearray = bytearray # type: Any +gf_exp = _bytearray([1] * 512) +gf_log = _bytearray(256) field_charac = int(2 ** 8 - 1) # type: int # Galois Field elements maths - def rwh_primes1(n: int) -> List[int]: """Returns a list of primes < n https://stackoverflow.com/questions/2068372/fastest-way-to-list-all-primes-below-n/3035188#3035188 @@ -219,13 +219,15 @@ def rwh_primes1(n: int) -> List[int]: sieve = [True] * int(n / 2) for i in range(3, int(n ** 0.5) + 1, 2): if sieve[int(i / 2)]: - sieve[int((i * i) / 2) :: i] = [False] * int((n - i * i - 1) / (2 * i) + 1) + sieve[int((i * i) / 2)::i] = [False] * int((n - i * i - 1) / (2 * i) + 1) return [2] + [2 * i + 1 for i in range(1, int(n / 2)) if sieve[i]] -def find_prime_polys( - generator: int = 2, c_exp: int = 8, fast_primes: bool = False, single: bool = False -) -> Any: +def find_prime_polys(generator: int = 2, + c_exp: int = 8, + fast_primes: bool = False, + single: bool = False + ) -> Any: """ Compute the list of prime polynomials for the given generator and Galois Field characteristic exponent. @@ -287,17 +289,15 @@ def find_prime_polys( # Prepare the finite field characteristic (2^p - 1), this # also represent the maximum possible value in this field - root_charac = 2 # we're in GF(2) - field_charac_ = int(root_charac ** c_exp - 1) + root_charac = 2 # we're in GF(2) + field_charac_ = int(root_charac ** c_exp - 1) field_charac_next = int(root_charac ** (c_exp + 1) - 1) if fast_primes: # Generate maybe prime polynomials and # check later if they really are irreducible prim_candidates = rwh_primes1(field_charac_next) - prim_candidates = [ - x for x in prim_candidates if x > field_charac_ - ] # filter out too small primes + prim_candidates = [x for x in prim_candidates if x > field_charac_] # filter out too small primes else: # try each possible prime polynomial, but skip even numbers # (because divisible by 2 so necessarily not irreducible) @@ -311,7 +311,7 @@ def find_prime_polys( # memory variable to indicate if a value was already generated # in the field (value at index x is set to 1) or not (set to # 0 by default) - seen = _bytearray(field_charac_ + 1) + seen = _bytearray(field_charac_ + 1) conflict = False # flag to know if there was at least one conflict # Second loop, build the whole Galois Field @@ -347,9 +347,10 @@ def find_prime_polys( # of each prime polynomial: print [hex(i) for i in correct_primes] -def init_tables( - prim: int = 0x11D, generator: int = 2, c_exp: int = 8 -) -> List[Union[Any, Any, int]]: +def init_tables(prim: int = 0x11D, + generator: int = 2, + c_exp: int = 8 + ) -> List[Union[Any, Any, int]]: """\ Precompute the logarithm and anti-log tables for faster computation later, using the provided primitive polynomial. These tables are @@ -400,10 +401,7 @@ def init_tables( if c_exp <= 8: _bytearray = bytearray else: - - def _bytearray( - obj: Union[str, bytes, int, List[int]] = 0, encoding: str = "latin-1" - ) -> Any: + def _bytearray(obj: Union[str, bytes, int, List[int]] = 0, encoding: str = "latin-1") -> Any: """Fake bytearray replacement, supporting int values above 255""" # always use Latin-1 and not UTF8 because Latin-1 maps the # first 256 characters to their byte value equivalents. UTF8 @@ -423,7 +421,7 @@ def init_tables( global gf_exp, gf_log, field_charac field_charac = int(2 ** c_exp - 1) - gf_exp = _bytearray(field_charac * 2) + gf_exp = _bytearray(field_charac * 2) # Anti-log (exponential) table. The first two # elements will always be [GF256int(1), generator] @@ -443,7 +441,7 @@ def init_tables( for i in range(field_charac): gf_exp[i] = x # compute anti-log for this value and store it in a table gf_log[x] = i # compute log at the same time - x = gf_mult_nolut(x, generator, prim, field_charac + 1) + x = gf_mult_nolut(x, generator, prim, field_charac + 1) # If you use only generator==2 or a power of 2, you can use the # following which is faster than gf_mult_noLUT(): @@ -500,9 +498,9 @@ def gf_mul(x: int, y: int) -> int: def gf_div(x: int, y: int) -> int: """Perform division in the binary Galois Field.""" - if not y: + if y == 0: raise ZeroDivisionError() - if not x: + if x == 0: return 0 ret_val = gf_exp[(gf_log[x] + field_charac - gf_log[y]) % field_charac] # type: int return ret_val @@ -562,7 +560,10 @@ def cl_div(dividend: int, divisor: int) -> int: return dividend -def gf_mult_nolut_slow(x: int, y: int, prim: int = 0) -> int: +def gf_mult_nolut_slow(x: int, + y: int, + prim: int = 0 + ) -> int: """\ Multiplication in Galois Fields without using a precomputed look-up table (and thus it's slower) by using the standard carry-less @@ -580,9 +581,12 @@ def gf_mult_nolut_slow(x: int, y: int, prim: int = 0) -> int: return result -def gf_mult_nolut( - x: int, y: int, prim: int = 0, field_charac_full: int = 256, carryless: bool = True -) -> int: +def gf_mult_nolut(x: int, + y: int, + prim: int = 0, + field_charac_full: int = 256, + carryless: bool = True + ) -> int: """\ Galois Field integer multiplication using Russian Peasant Multiplication algorithm (faster than the standard multiplication @@ -612,25 +616,28 @@ def gf_mult_nolut( # Galois Field polynomials maths - def gf_poly_scale(p: bytes, x: int) -> bytearray: """No docstring provided.""" - ret_val = _bytearray([gf_mul(p[i], x) for i in range(len(p))]) # type: bytearray + ret_val = _bytearray([gf_mul(p[i], x) for i, _ in enumerate(p)]) # type: bytearray return ret_val -def gf_poly_add(p: bytes, q: Union[bytearray, List[int]]) -> Any: +def gf_poly_add(p: bytes, + q: Union[bytearray, List[int]] + ) -> Any: """No docstring provided.""" r = _bytearray(max(len(p), len(q))) # type: bytearray - r[len(r) - len(p) : len(r)] = p + r[len(r) - len(p):len(r)] = p for i, _ in enumerate(q): r[i + len(r) - len(q)] ^= q[i] return r -def gf_poly_mul(p: Any, q: List[Any]) -> Any: +def gf_poly_mul(p: Any, + q: List[Any] + ) -> Any: """\ Multiply two polynomials, inside Galois Field (but the procedure is generic). Optimized function by precomputation of log. @@ -639,7 +646,7 @@ def gf_poly_mul(p: Any, q: List[Any]) -> Any: r = _bytearray(len(p) + len(q) - 1) # Precompute the logarithm of p - lp = [gf_log[p[i]] for i in range(len(p))] + lp = [gf_log[p[i]] for i, _ in enumerate(p)] # Compute the polynomial multiplication (just like the # outer product of two vectors, we multiply each @@ -648,20 +655,22 @@ def gf_poly_mul(p: Any, q: List[Any]) -> Any: # Optimization: load the coefficient once qj = q[j] # log(0) is undefined, we need to check that - if qj: + if qj != 0: # Optimization: precache the logarithm # of the current coefficient of q lq = gf_log[qj] for i, _ in enumerate(p): # log(0) is undefined, need to check that... - if p[i]: + if p[i] != 0: # Equivalent to: # r[i + j] = gf_add(r[i+j], gf_mul(p[i], q[j])) r[i + j] ^= gf_exp[lp[i] + lq] return r -def gf_poly_mul_simple(p: List[int], q: List[int]) -> bytearray: +def gf_poly_mul_simple(p: List[int], + q: List[int] + ) -> bytearray: """Multiply two polynomials, inside Galois Field Simple equivalent way of multiplying two polynomials @@ -690,9 +699,9 @@ def gf_poly_neg(poly: List[int]) -> List[int]: return poly -def gf_poly_div( - dividend: bytearray, divisor: Union[bytearray, List[int]] -) -> Tuple[bytearray, bytearray]: +def gf_poly_div(dividend: bytearray, + divisor: Union[bytearray, List[int]] + ) -> Tuple[bytearray, bytearray]: """Fast polynomial division by using Extended Synthetic Division and optimized for GF(2^p) computations (doesn't work with standard polynomials outside of this Galois Field). @@ -722,13 +731,13 @@ def gf_poly_div( # it should still work because gf_mul() will take care of the # condition. But it's still a good practice to put the condition # here. - if coef: + if coef != 0: # In synthetic division, we always skip the first coefficient # of the divisor, because it's only used to normalize the # dividend coefficient for j in range(1, len(divisor)): # log(0) is undefined - if divisor[j]: + if divisor[j] != 0: # Equivalent to the more mathematically correct (but # XORing directly is faster): # msg_out[i + j] += -divisor[j] * coef @@ -759,8 +768,10 @@ def gf_poly_eval(poly: Union[bytearray, List[int]], x: int) -> int: # Reed-Solomon encoding - -def rs_generator_poly(nsym: int, fcr: int = 0, generator: int = 2) -> bytearray: +def rs_generator_poly(nsym: int, + fcr: int = 0, + generator: int = 2 + ) -> bytearray: """\ Generate an irreducible generator polynomial (necessary to encode a message into Reed-Solomon) @@ -771,9 +782,10 @@ def rs_generator_poly(nsym: int, fcr: int = 0, generator: int = 2) -> bytearray: return g -def rs_generator_poly_all( - max_nsym: int, fcr: int = 0, generator: int = 2 -) -> Dict[int, bytearray]: +def rs_generator_poly_all(max_nsym: int, + fcr: int = 0, + generator: int = 2 + ) -> Dict[int, bytearray]: """\ Generate all irreducible generator polynomials up to max_nsym (usually you can use n, the length of the message+ecc). Very useful @@ -786,9 +798,11 @@ def rs_generator_poly_all( return g_all -def rs_simple_encode_msg( - msg_in: bytearray, nsym: int, fcr: int = 0, generator: int = 2 -) -> bytearray: +def rs_simple_encode_msg(msg_in: bytearray, + nsym: int, + fcr: int = 0, + generator: int = 2 + ) -> bytearray: """\ Simple Reed-Solomon encoding (mainly an example for you to understand how it works, because it's slower than the in-lined @@ -797,10 +811,8 @@ def rs_simple_encode_msg( global field_charac if (len(msg_in) + nsym) > field_charac: # pragma: no cover - raise ValueError( - "Message is too long (%i when max is %i)" - % (len(msg_in) + nsym, field_charac) - ) + raise ValueError("Message is too long (%i when max is %i)" + % (len(msg_in) + nsym, field_charac)) gen = rs_generator_poly(nsym, fcr, generator) @@ -817,13 +829,12 @@ def rs_simple_encode_msg( return msg_out -def rs_encode_msg( - msg_in: bytes, - nsym: int, - fcr: int = 0, - generator: int = 2, - gen: Optional[bytearray] = None, -) -> bytearray: +def rs_encode_msg(msg_in: bytes, + nsym: int, + fcr: int = 0, + generator: int = 2, + gen: Optional[bytearray] = None + ) -> bytearray: """\ Reed-Solomon main encoding function, using polynomial division (Extended Synthetic Division, the fastest algorithm available to my @@ -831,10 +842,8 @@ def rs_encode_msg( """ global field_charac if (len(msg_in) + nsym) > field_charac: # pragma: no cover - raise ValueError( - "Message is too long (%i when max is %i)" - % (len(msg_in) + nsym, field_charac) - ) + raise ValueError("Message is too long (%i when max is %i)" + % (len(msg_in) + nsym, field_charac)) if gen is None: gen = rs_generator_poly(nsym, fcr, generator) msg_in = _bytearray(msg_in) @@ -844,12 +853,12 @@ def rs_encode_msg( msg_out = _bytearray(msg_in) + _bytearray(len(gen) - 1) # type: bytearray # Precompute the logarithm of every items in the generator - lgen = _bytearray([gf_log[gen[j]] for j in range(len(gen))]) + lgen = _bytearray([gf_log[gen[j]] for j, _ in enumerate(gen)]) # Extended synthetic division main loop # Fastest implementation with PyPy (but the Cython # version in creedsolo.pyx is about 2x faster) - for i in range(len(msg_in)): + for i, _ in enumerate(msg_in): # Note that it's msg_out here, not msg_in. Thus, we reuse the # updated value at each iteration (this is how Synthetic Division # works: instead of storing in a temporary register the @@ -866,7 +875,7 @@ def rs_encode_msg( # log(0) is undefined, so we need to manually check for this # case. There's no need to check the divisor here because we # know it can't be 0 since we generated it. - if coef: + if coef != 0: lcoef = gf_log[coef] # precaching # In synthetic division, we always skip the first @@ -892,16 +901,17 @@ def rs_encode_msg( # Equivalent to c = mprime - b, where # mprime is msg_in padded with [0]*nsym - msg_out[: len(msg_in)] = msg_in + msg_out[:len(msg_in)] = msg_in return msg_out # Reed-Solomon decoding - -def rs_calc_syndromes( - msg: bytearray, nsym: int, fcr: int = 0, generator: int = 2 -) -> List[int]: +def rs_calc_syndromes(msg: bytearray, + nsym: int, + fcr: int = 0, + generator: int = 2 + ) -> List[int]: """\ Given the received codeword msg and the number of error correcting symbols (nsym), computes the syndromes polynomial. Mathematically, @@ -924,13 +934,12 @@ def rs_calc_syndromes( return [0] + [gf_poly_eval(msg, gf_pow(generator, i + fcr)) for i in range(nsym)] -def rs_correct_errata( - msg_in: bytearray, - synd: List[int], - err_pos: List[int], - fcr: int = 0, - generator: int = 2, -) -> bytearray: +def rs_correct_errata(msg_in: bytearray, + synd: List[int], + err_pos: List[int], + fcr: int = 0, + generator: int = 2 + ) -> bytearray: """\ Forney algorithm, computes the values (error magnitude) to correct the input message. @@ -948,7 +957,7 @@ def rs_correct_errata( # errata locator algorithm to work (e.g. instead of [0, 1, 2] it # will become [len(msg)-1, len(msg)-2, len(msg) -3]) coef_pos = [len(msg) - 1 - p for p in err_pos] - err_loc = rs_find_errata_locator(coef_pos, generator) + err_loc = rs_find_errata_locator(coef_pos, generator) # Calculate errata evaluator polynomial (often # called Omega or Gamma in academic papers) @@ -965,7 +974,7 @@ def rs_correct_errata( # Forney algorithm: Compute the magnitudes will store the values # that need to be corrected (subtracted) to the message containing # errors. This is sometimes called the error magnitude polynomial. - e = _bytearray(len(msg)) + e = _bytearray(len(msg)) xlength = len(x) for i, xi in enumerate(x): xi_inv = gf_inverse(xi) @@ -1036,12 +1045,11 @@ def rs_correct_errata( return msg -def rs_find_error_locator( - synd: List[int], - nsym: int, - erase_loc: Optional[bytearray] = None, - erase_count: int = 0, -) -> List[int]: +def rs_find_error_locator(synd: List[int], + nsym: int, + erase_loc: Optional[bytearray] = None, + erase_count: int = 0 + ) -> List[int]: """\ Find error/errata locator and evaluator polynomials with Berlekamp-Massey algorithm @@ -1143,7 +1151,7 @@ def rs_find_error_locator( old_loc += _bytearray([0]) # Iteratively estimate the errata locator and evaluator polynomials - if delta: # Update only if there's a discrepancy + if delta != 0: # Update only if there's a discrepancy # Rule B (rule A is implicitly defined because rule A just # says that we skip any modification for this iteration) if len(old_loc) > len(err_loc): @@ -1176,14 +1184,16 @@ def rs_find_error_locator( # Check if the result is correct, that there's not too many errors to # correct drop leading 0s, else errs will not be of the correct size err_loc_ = list(itertools.dropwhile(lambda x: x == 0, err_loc)) # type: List[int] - errs = len(err_loc_) - 1 + errs = len(err_loc_) - 1 if (errs - erase_count) * 2 + erase_count > nsym: # pragma: no cover raise ReedSolomonError("Too many errors to correct") return err_loc_ -def rs_find_errata_locator(e_pos: List[int], generator: int = 2) -> List[int]: +def rs_find_errata_locator(e_pos: List[int], + generator: int = 2 + ) -> List[int]: """\ Compute the erasures/errors/errata locator polynomial from the erasures/errors/errata positions (the positions must be relative to @@ -1216,17 +1226,15 @@ def rs_find_errata_locator(e_pos: List[int], generator: int = 2) -> List[int]: print(string.center(terminal_width)) if len(e_pos) > 0: - print("") - for s in [ - "Warning! Reed-Solomon erasure code", - "detected and corrected {} errors in ".format(len(e_pos)), - "a received packet. This might indicate", - "bad connection, an eminent adapter or", - "data diode HW failure or that serial", - "interface's baud rate is set too high.", - ]: + print('') + for s in ["Warning! Reed-Solomon erasure code", + "detected and corrected {} errors in ".format(len(e_pos)), + "a received packet. This might indicate", + "bad connection, an eminent adapter or", + "data diode HW failure or that serial", + "interface's baud rate is set too high."]: c_print(s) - print("") + print('') # erasures_loc is very simple to compute: # erasures_loc = prod(1 - x*alpha**i) for i in erasures_pos and @@ -1235,15 +1243,13 @@ def rs_find_errata_locator(e_pos: List[int], generator: int = 2) -> List[int]: # we simply generate a Polynomial([c, 0]) where 0 is the constant # and c is positioned to be the coefficient for x^1. for i in e_pos: - e_loc = gf_poly_mul( - e_loc, gf_poly_add(_bytearray([1]), [gf_pow(generator, i), 0]) - ) + e_loc = gf_poly_mul(e_loc, gf_poly_add(_bytearray([1]), [gf_pow(generator, i), 0])) return e_loc -def rs_find_error_evaluator( - synd: List[int], err_loc: List[int], nsym: int -) -> bytearray: +def rs_find_error_evaluator(synd: List[int], + err_loc: List[int], + nsym: int) -> bytearray: """\ Compute the error (or erasures if you supply sigma=erasures locator polynomial, or errata) evaluator polynomial Omega from the syndrome @@ -1268,22 +1274,23 @@ def rs_find_error_evaluator( return remainder -def rs_find_errors( - err_loc: Union[bytearray, List[int]], nmess: int, generator: int = 2 -) -> List[int]: +def rs_find_errors(err_loc: Union[bytearray, List[int]], + nmess: int, + generator: int = 2 + ) -> List[int]: """\ Find the roots (i.e., where evaluation = zero) of error polynomial by brute-force trial, this is a sort of Chien's search (but less efficient, Chien's search is a way to evaluate the polynomial such that each evaluation only takes constant time). """ - errs = len(err_loc) - 1 + errs = len(err_loc) - 1 err_pos = [] # Normally we should try all 2^8 possible values, but here # we optimize to just check the interesting symbols for i in range(nmess): - if not gf_poly_eval(err_loc, gf_pow(generator, i)): + if gf_poly_eval(err_loc, gf_pow(generator, i)) == 0: # It's a 0? Bingo, it's a root of the error locator # polynomial, in other terms this is the location of an error err_pos.append(nmess - 1 - i) @@ -1303,16 +1310,16 @@ def rs_find_errors( # all 0), so we may not even be able to check if that's correct # or not, so I'm not sure the brute-force approach may even be # possible. - raise ReedSolomonError( - "Too many (or few) errors found by Chien" - " Search for the errata locator polynomial!" - ) + raise ReedSolomonError("Too many (or few) errors found by Chien" + " Search for the errata locator polynomial!") return err_pos -def rs_forney_syndromes( - synd: List[int], pos: List[int], nmess: int, generator: int = 2 -) -> List[int]: +def rs_forney_syndromes(synd: List[int], + pos: List[int], + nmess: int, + generator: int = 2 + ) -> List[int]: """\ Compute Forney syndromes, which computes a modified syndromes to compute only errors (erasures are trimmed out). Do not confuse this @@ -1327,7 +1334,7 @@ def rs_forney_syndromes( # Optimized method, all operations are in-lined make a copy and # trim the first coefficient which is always 0 by definition fsynd = list(synd[1:]) - for i in range(len(pos)): + for i, _ in enumerate(pos): x = gf_pow(generator, erase_pos_reversed[i]) for j in range(len(fsynd) - 1): fsynd[j] = gf_mul(fsynd[j], x) ^ fsynd[j + 1] @@ -1359,14 +1366,13 @@ def rs_forney_syndromes( return fsynd -def rs_correct_msg( - msg_in: Union[bytes, bytearray], - nsym: int, - fcr: int = 0, - generator: int = 2, - erase_pos: Optional[List[int]] = None, - only_erasures: bool = False, -) -> Tuple[bytearray, bytearray]: +def rs_correct_msg(msg_in: Union[bytes, bytearray], + nsym: int, + fcr: int = 0, + generator: int = 2, + erase_pos: Optional[List[int]] = None, + only_erasures: bool = False + ) -> Tuple[bytearray, bytearray]: """Reed-Solomon main decoding function""" global field_charac if len(msg_in) > field_charac: # pragma: no cover @@ -1383,9 +1389,8 @@ def rs_correct_msg( # with a position above the length of field_charac -- if you # really need a bigger message without chunking, then you should # better enlarge c_exp so that you get a bigger field). - raise ValueError( - "Message is too long (%i when max is %i)" % (len(msg_in), field_charac) - ) + raise ValueError("Message is too long (%i when max is %i)" + % (len(msg_in), field_charac)) msg_out = _bytearray(msg_in) # copy of message @@ -1412,7 +1417,7 @@ def rs_correct_msg( # Check if there's any error/erasure in the input codeword. If not # (all syndromes coefficients are 0), then just return the codeword # as-is. - if not max(synd): + if max(synd) == 0: return msg_out[:-nsym], msg_out[-nsym:] # no errors # Find errors locations @@ -1443,7 +1448,7 @@ def rs_correct_msg( # Check if the final message is fully repaired. synd = rs_calc_syndromes(msg_out, nsym, fcr, generator) - if max(synd) > 0: + if max(synd) > 0: # pragma: no cover raise ReedSolomonError("Could not correct message") # Return the successfully decoded message. Also return the corrected @@ -1451,23 +1456,21 @@ def rs_correct_msg( return msg_out[:-nsym], msg_out[-nsym:] -def rs_correct_msg_nofsynd( - msg_in: bytearray, - nsym: int, - fcr: int = 0, - generator: int = 2, - erase_pos: Optional[List[int]] = None, - only_erasures: bool = False, -) -> Tuple[bytearray, bytearray]: +def rs_correct_msg_nofsynd(msg_in: bytearray, + nsym: int, + fcr: int = 0, + generator: int = 2, + erase_pos: Optional[List[int]] = None, + only_erasures: bool = False + ) -> Tuple[bytearray, bytearray]: """\ Reed-Solomon main decoding function, without using the modified Forney syndromes. """ global field_charac if len(msg_in) > field_charac: # pragma: no cover - raise ValueError( - "Message is too long (%i when max is %i)" % (len(msg_in), field_charac) - ) + raise ValueError("Message is too long (%i when max is %i)" + % (len(msg_in), field_charac)) msg_out = _bytearray(msg_in) # copy of message @@ -1493,7 +1496,7 @@ def rs_correct_msg_nofsynd( # Check if there's any error/erasure in the input codeword. If not # (all syndromes coefficients are 0), then just return the codeword # as-is. - if not max(synd): + if max(synd) == 0: return msg_out[:-nsym], msg_out[-nsym:] # no errors # Prepare erasures locator and evaluator polynomials. @@ -1502,21 +1505,15 @@ def rs_correct_msg_nofsynd( # erase_eval = None erase_count = 0 if erase_pos: - erase_count = len(erase_pos) + erase_count = len(erase_pos) erase_pos_reversed = [len(msg_out) - 1 - eras for eras in erase_pos] - erase_loc = bytearray( - rs_find_errata_locator(erase_pos_reversed, generator=generator) - ) + erase_loc = bytearray(rs_find_errata_locator(erase_pos_reversed, generator=generator)) # Prepare errors/errata locator polynomial if only_erasures: err_loc = erase_loc[::-1] else: - err_loc = bytearray( - rs_find_error_locator( - synd, nsym, erase_loc=erase_loc, erase_count=erase_count - ) - ) + err_loc = bytearray(rs_find_error_locator(synd, nsym, erase_loc=erase_loc, erase_count=erase_count)) err_loc = err_loc[::-1] # Locate the message errors @@ -1541,7 +1538,11 @@ def rs_correct_msg_nofsynd( return msg_out[:-nsym], msg_out[-nsym:] -def rs_check(msg: bytearray, nsym: int, fcr: int = 0, generator: int = 2) -> bool: +def rs_check(msg: bytearray, + nsym: int, + fcr: int = 0, + generator: int = 2 + ) -> bool: """\ Returns true if the message + ecc has no error of false otherwise (may not always catch a wrong decoding or a wrong message, @@ -1561,13 +1562,13 @@ def chunk(data: bytes, chunk_size: int) -> Iterator[bytes]: """Split a long message into chunks.""" -def chunk( - data: Union[bytearray, bytes], chunk_size: int -) -> Iterator[Union[bytearray, bytes]]: +def chunk(data: Union[bytearray, bytes], + chunk_size: int + ) -> Iterator[Union[bytearray, bytes]]: """Split a long message into chunks.""" for i in range(0, len(data), chunk_size): # Split the long message in a chunk. - chunk_ = data[i : i + chunk_size] + chunk_ = data[i: i + chunk_size] yield chunk_ @@ -1589,16 +1590,15 @@ class RSCodec(object): previous values (0 and 0x11d). """ - def __init__( - self, - nsym: int = 10, - nsize: int = 255, - fcr: int = 0, - prim: int = 0x11D, - generator: int = 2, - c_exp: int = 8, - single_gen: bool = True, - ) -> None: + def __init__(self, + nsym: int = 10, + nsize: int = 255, + fcr: int = 0, + prim: int = 0x11D, + generator: int = 2, + c_exp: int = 8, + single_gen: bool = True + ) -> None: """\ Initialize the Reed-Solomon codec. Note that different parameters change the internal values (the ecc symbols, look-up @@ -1624,15 +1624,11 @@ class RSCodec(object): # resize the Galois Field. if nsize > 255 and c_exp <= 8: # Get the next closest power of two - c_exp = int( - math.log(2 ** (math.floor(math.log(nsize) / math.log(2)) + 1), 2) - ) + c_exp = int(math.log(2 ** (math.floor(math.log(nsize) / math.log(2)) + 1), 2)) # prim was not correctly defined, find one if c_exp != 8 and prim == 0x11D: - prim = find_prime_polys( - generator=generator, c_exp=c_exp, fast_primes=True, single=True - ) + prim = find_prime_polys(generator=generator, c_exp=c_exp, fast_primes=True, single=True) if nsize == 255: # Resize chunk size if not set nsize = int(2 ** c_exp - 1) @@ -1666,9 +1662,7 @@ class RSCodec(object): # Initialize the look-up tables for easy # and quick multiplication/division - self.gf_log, self.gf_exp, self.field_charac = init_tables( - prim, generator, c_exp - ) + self.gf_log, self.gf_exp, self.field_charac = init_tables(prim, generator, c_exp) # Pre-compute the generator polynomials if single_gen: @@ -1676,7 +1670,10 @@ class RSCodec(object): else: # pragma: no cover self.gen = rs_generator_poly_all(nsize, fcr=fcr, generator=generator) - def encode(self, data_: Union[bytes, str], nsym: Optional[int] = None) -> bytearray: + def encode(self, + data_: Union[bytes, str], + nsym: Optional[int] = None + ) -> bytearray: """\ Encode a message (i.e., add the ecc symbols) using Reed-Solomon, whatever the length of the message because we use chunking. @@ -1695,24 +1692,15 @@ class RSCodec(object): data = data_ enc = _bytearray() # type: bytearray for chunk_ in chunk(data, self.nsize - self.nsym): - enc.extend( - rs_encode_msg( - chunk_, - self.nsym, - fcr=self.fcr, - generator=self.generator, - gen=self.gen[nsym], - ) - ) + enc.extend(rs_encode_msg(chunk_, self.nsym, fcr=self.fcr, generator=self.generator, gen=self.gen[nsym])) return enc - def decode( - self, - data: bytes, - nsym: Optional[int] = None, - erase_pos: Optional[List[int]] = None, - only_erasures: bool = False, - ) -> Tuple[bytearray, bytearray]: + def decode(self, + data: bytes, + nsym: Optional[int] = None, + erase_pos: Optional[List[int]] = None, + only_erasures: bool = False + ) -> Tuple[bytearray, bytearray]: """\ Repair a message, whatever its size is, by using chunking. May return a wrong result if number of errors > nsym. Note that it @@ -1738,7 +1726,7 @@ class RSCodec(object): if isinstance(data, str): # pragma: no cover data = _bytearray(data) - dec = _bytearray() + dec = _bytearray() dec_full = _bytearray() for chunk_ in chunk(data, self.nsize): # Extract the erasures for this chunk @@ -1755,19 +1743,16 @@ class RSCodec(object): erase_pos = [x - (self.nsize + 1) for x in erase_pos if x > self.nsize] # Decode/repair this chunk! - rmes, recc = rs_correct_msg( - chunk_, - nsym, - fcr=self.fcr, - generator=self.generator, - erase_pos=e_pos, - only_erasures=only_erasures, - ) + rmes, recc = rs_correct_msg(chunk_, nsym, fcr=self.fcr, generator=self.generator, + erase_pos=e_pos, only_erasures=only_erasures) dec.extend(rmes) dec_full.extend(rmes + recc) return dec, dec_full - def check(self, data: bytearray, nsym: Optional[int] = None) -> List[bool]: + def check(self, + data: bytearray, + nsym: Optional[int] = None + ) -> List[bool]: """\ Check if a message+ecc stream is not corrupted (or fully repaired). Note: may return a wrong result if number of errors > nsym. diff --git a/src/common/statics.py b/src/common/statics.py index 54ffe5e..5bd1628 100644 --- a/src/common/statics.py +++ b/src/common/statics.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -19,542 +19,561 @@ You should have received a copy of the GNU General Public License along with TFC. If not, see . """ -# Program details -TFC = "TFC" -VERSION = "1.19.12" -TRANSMITTER = "Transmitter" -RECEIVER = "Receiver" -RELAY = "Relay" +"""Program details""" +TFC = 'TFC' +VERSION = '1.20.02' +TRANSMITTER = 'Transmitter' +RECEIVER = 'Receiver' +RELAY = 'Relay' -# Identifiers -# -# Placeholder accounts for databases need to be valid v3 Onion addresses. -LOCAL_ID = "localidlocalidlocalidlocalidlocalidlocalidlocalidloj7uyd" -LOCAL_PUBKEY = b"[\x84\x05\xa0kp\x80\xb4\rn\x10\x16\x81\xad\xc2\x02\xd05\xb8@Z\x06\xb7\x08\x0b@\xd6\xe1\x01h\x1a\xdc" -LOCAL_NICK = "local Source Computer" -DUMMY_CONTACT = "dummycontactdummycontactdummycontactdummycontactdumhsiid" -DUMMY_MEMBER = "dummymemberdummymemberdummymemberdummymemberdummymedakad" -DUMMY_NICK = "dummy_nick" -DUMMY_GROUP = "dummy_group" -TX = "tx" -RX = "rx" -NC = "nc" -TAILS = 'TAILS_PRODUCT_NAME="Tails"' +"""Identifiers + +Placeholder accounts for databases need to be valid v3 Onion addresses. +""" +LOCAL_ID = 'localidlocalidlocalidlocalidlocalidlocalidlocalidloj7uyd' +LOCAL_PUBKEY = b'[\x84\x05\xa0kp\x80\xb4\rn\x10\x16\x81\xad\xc2\x02\xd05\xb8@Z\x06\xb7\x08\x0b@\xd6\xe1\x01h\x1a\xdc' +LOCAL_NICK = 'local Source Computer' +DUMMY_CONTACT = 'dummycontactdummycontactdummycontactdummycontactdumhsiid' +DUMMY_MEMBER = 'dummymemberdummymemberdummymemberdummymemberdummymedakad' +DUMMY_NICK = 'dummy_nick' +DUMMY_GROUP = 'dummy_group' +TX = 'tx' +RX = 'rx' +NC = 'nc' +TAILS = 'TAILS_PRODUCT_NAME="Tails"' -# Window identifiers -WIN_TYPE_COMMAND = "system messages" -WIN_TYPE_FILE = "incoming files" -WIN_TYPE_CONTACT = "contact" -WIN_TYPE_GROUP = "group" +"""Window identifiers""" +WIN_TYPE_COMMAND = 'system messages' +WIN_TYPE_FILE = 'incoming files' +WIN_TYPE_CONTACT = 'contact' +WIN_TYPE_GROUP = 'group' -# Window UIDs +"""Window UIDs""" WIN_UID_COMMAND = b"win_uid_command" -WIN_UID_FILE = b"win_uid_file" +WIN_UID_FILE = b'win_uid_file' -# Packet types -COMMAND = "command" -FILE = "file" -MESSAGE = "message" +"""Packet types""" +COMMAND = 'command' +FILE = 'file' +MESSAGE = 'message' -# Group message IDs -NEW_GROUP = "new_group" -ADDED_MEMBERS = "added_members" -ALREADY_MEMBER = "already_member" -REMOVED_MEMBERS = "removed_members" -NOT_IN_GROUP = "not_in_group" -UNKNOWN_ACCOUNTS = "unknown_accounts" +"""Group message IDs""" +NEW_GROUP = 'new_group' +ADDED_MEMBERS = 'added_members' +ALREADY_MEMBER = 'already_member' +REMOVED_MEMBERS = 'removed_members' +NOT_IN_GROUP = 'not_in_group' +UNKNOWN_ACCOUNTS = 'unknown_accounts' -# Base58 encoding -B58_ALPHABET = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" -MAINNET_HEADER = b"\x80" -TESTNET_HEADER = b"\xef" +"""Base58 encoding""" +B58_ALPHABET = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' +MAINNET_HEADER = b'\x80' +TESTNET_HEADER = b'\xef' -# Base58 key types -B58_PUBLIC_KEY = "b58_public_key" -B58_LOCAL_KEY = "b58_local_key" +"""Base58 key types""" +B58_PUBLIC_KEY = 'b58_public_key' +B58_LOCAL_KEY = 'b58_local_key' -# Base58 key input guides -B58_PUBLIC_KEY_GUIDE = " A B C D E F G H I J K L " -B58_LOCAL_KEY_GUIDE = ( - " A B C D E F G H I J K L M N O P Q " -) +"""Base58 key input guides""" +B58_PUBLIC_KEY_GUIDE = ' A B C D E F G H I J K L ' +B58_LOCAL_KEY_GUIDE = ' A B C D E F G H I J K L M N O P Q ' -# Key exchange types -ECDHE = "X448" -PSK = "PSK" +"""Key exchange types""" +ECDHE = 'X448' +PSK = 'PSK' -# Contact setting types -LOGGING = "logging" -STORE = "store" -NOTIFY = "notify" +"""Contact setting types""" +LOGGING = 'logging' +STORE = 'store' +NOTIFY = 'notify' -# Command identifiers -CLEAR = "clear" -RESET = "reset" -POWEROFF = "systemctl poweroff" -GENERATE = "generate" +"""Command identifiers""" +CLEAR = 'clear' +RESET = 'reset' +POWEROFF = 'systemctl poweroff' +GENERATE = 'generate' -# Contact setting management +"""Contact setting management""" CONTACT_SETTING_HEADER_LENGTH = 2 -ENABLE = b"es" -DISABLE = b"ds" -ALL = "all" +ENABLE = b'es' +DISABLE = b'ds' +ALL = 'all' -# Networked Computer bypass states -NC_BYPASS_START = "nc_bypass_start" -NC_BYPASS_STOP = "nc_bypass_stop" +"""Networked Computer bypass states""" +NC_BYPASS_START = 'nc_bypass_start' +NC_BYPASS_STOP = 'nc_bypass_stop' -# Status messages -DONE = "DONE" -EVENT = "-!-" -ME = "Me" +"""Status messages""" +DONE = 'DONE' +EVENT = '-!-' +ME = 'Me' -# Data diode simulator identifiers -IDLE = "Idle" -DATA_FLOW = "Data flow" -SCNCLR = "scnclr" -SCNCRL = "scncrl" -NCDCLR = "ncdclr" -NCDCRL = "ncdcrl" +"""Data diode simulator identifiers""" +IDLE = 'Idle' +DATA_FLOW = 'Data flow' +SCNCLR = 'scnclr' +SCNCRL = 'scncrl' +NCDCLR = 'ncdclr' +NCDCRL = 'ncdcrl' -# VT100 codes -# -# VT100 codes are used to control printing to the terminal. These make -# building functions like textbox drawers possible. -CURSOR_UP_ONE_LINE = "\x1b[1A" -CURSOR_RIGHT_ONE_COLUMN = "\x1b[1C" -CLEAR_ENTIRE_LINE = "\x1b[2K" -CLEAR_ENTIRE_SCREEN = "\x1b[2J" -CURSOR_LEFT_UP_CORNER = "\x1b[H" -BOLD_ON = "\033[1m" -NORMAL_TEXT = "\033[0m" +"""VT100 codes + +VT100 codes are used to control printing to the terminal. These make +building functions like textbox drawers possible. +""" +CURSOR_UP_ONE_LINE = '\x1b[1A' +CURSOR_RIGHT_ONE_COLUMN = '\x1b[1C' +CLEAR_ENTIRE_LINE = '\x1b[2K' +CLEAR_ENTIRE_SCREEN = '\x1b[2J' +CURSOR_LEFT_UP_CORNER = '\x1b[H' +BOLD_ON = '\033[1m' +NORMAL_TEXT = '\033[0m' -# Separators -# -# Separator byte is a non-printable byte used to separate fields in -# serialized data structures. -US_BYTE = b"\x1f" +"""Separators + +Separator byte is a non-printable byte used to separate fields in +serialized data structures. +""" +US_BYTE = b'\x1f' -# Datagram headers +"""Datagram headers -# These headers are prepended to datagrams that are transmitted over -# serial or over the network. They tell the receiving device what type of -# datagram is in question. -# -# Datagrams with local key header contain the encrypted local key, used to -# encrypt commands and data transferred between local Source and -# Destination computers. Packets with the header are only accepted by the -# Relay Program when they originate from the user's Source Computer. Even -# if the Networked Computer is compromised and the local key datagram is -# injected to the Destination Computer, the injected key could not be -# accepted by the user as they don't know the decryption key for it. The -# worst case scenario is a DoS attack where the Receiver Program receives -# new local keys continuously. Such an attack would, however, reveal the -# user they are under a sophisticated attack, and that their Networked -# Computer has been compromised. -# -# Datagrams with Public key header contain TCB-level public keys that -# originate from the sender's Source Computer, and are displayed by the -# recipient's Networked Computer, from where they are manually typed to -# recipient's Destination Computer. -# -# Message and command type datagrams tell the Receiver Program whether to -# parse the trailing fields that determine which XChaCha20-Poly1305 -# decryption keys it should load. Contacts can of course try to alter -# their datagrams to contain a COMMAND_DATAGRAM_HEADER header, but Relay -# Program will by design drop them. Even if a compromised Networked -# Computer injects such a datagram to Destination Computer, the Receiver -# Program will drop the datagram when the MAC verification of the -# encrypted hash ratchet counter value fails. -# -# File type datagram contains an encrypted file that the Receiver Program -# caches until its decryption key arrives from the sender inside a -# special, automated key delivery message. +These headers are prepended to datagrams that are transmitted over +serial or over the network. They tell the receiving device what type of +datagram is in question. -# Unencrypted type datagrams contain commands intended for the Relay -# Program. These commands are in some cases preceded by an encrypted -# version of the command, that the Relay Program forwards to Receiver -# Program on Destination Computer. The unencrypted Relay commands are -# disabled during traffic masking to hide the quantity and schedule of -# communication even from the Networked Computer (in case it's compromised -# and monitoring the user). The fact these commands are unencrypted, do -# not cause security issues because if an adversary can compromise the -# Networked Computer to the point it can issue commands to the Relay -# Program, they could DoS the Relay Program, and thus TFC, anyway. -DATAGRAM_TIMESTAMP_LENGTH = 8 -DATAGRAM_HEADER_LENGTH = 1 -LOCAL_KEY_DATAGRAM_HEADER = b"L" -PUBLIC_KEY_DATAGRAM_HEADER = b"P" -MESSAGE_DATAGRAM_HEADER = b"M" -COMMAND_DATAGRAM_HEADER = b"K" -FILE_DATAGRAM_HEADER = b"F" -UNENCRYPTED_DATAGRAM_HEADER = b"U" +Datagrams with local key header contain the encrypted local key, used to +encrypt commands and data transferred between local Source and +Destination computers. Packets with the header are only accepted by the +Relay Program when they originate from the user's Source Computer. Even +if the Networked Computer is compromised and the local key datagram is +injected to the Destination Computer, the injected key could not be +accepted by the user as they don't know the decryption key for it. The +worst case scenario is a DoS attack where the Receiver Program receives +new local keys continuously. Such an attack would, however, reveal the +user they are under a sophisticated attack, and that their Networked +Computer has been compromised. + +Datagrams with Public key header contain TCB-level public keys that +originate from the sender's Source Computer, and are displayed by the +recipient's Networked Computer, from where they are manually typed to +recipient's Destination Computer. + +Message and command type datagrams tell the Receiver Program whether to +parse the trailing fields that determine which XChaCha20-Poly1305 +decryption keys it should load. Contacts can of course try to alter +their datagrams to contain a COMMAND_DATAGRAM_HEADER header, but Relay +Program will by design drop them. Even if a compromised Networked +Computer injects such a datagram to Destination Computer, the Receiver +Program will drop the datagram when the MAC verification of the +encrypted hash ratchet counter value fails. + +File type datagram contains an encrypted file that the Receiver Program +caches until its decryption key arrives from the sender inside a +special, automated key delivery message. + +Unencrypted type datagrams contain commands intended for the Relay +Program. These commands are in some cases preceded by an encrypted +version of the command, that the Relay Program forwards to Receiver +Program on Destination Computer. The unencrypted Relay commands are +disabled during traffic masking to hide the quantity and schedule of +communication even from the Networked Computer (in case it's compromised +and monitoring the user). The fact these commands are unencrypted, do +not cause security issues because if an adversary can compromise the +Networked Computer to the point it can issue commands to the Relay +Program, they could DoS the Relay Program, and thus TFC, anyway. +""" +DATAGRAM_TIMESTAMP_LENGTH = 8 +DATAGRAM_HEADER_LENGTH = 1 +LOCAL_KEY_DATAGRAM_HEADER = b'L' +PUBLIC_KEY_DATAGRAM_HEADER = b'P' +MESSAGE_DATAGRAM_HEADER = b'M' +COMMAND_DATAGRAM_HEADER = b'K' +FILE_DATAGRAM_HEADER = b'F' +UNENCRYPTED_DATAGRAM_HEADER = b'U' -# Group management headers -# -# Group management datagrams are are automatic messages that the -# Transmitter Program recommends the user to send when they make changes -# to the member list of a group, or when they add or remove groups. These -# messages are displayed by the Relay Program. -GROUP_ID_LENGTH = 4 -GROUP_ID_ENC_LENGTH = 13 -GROUP_MSG_ID_LENGTH = 16 -GROUP_MGMT_HEADER_LENGTH = 1 -GROUP_MSG_INVITE_HEADER = b"I" -GROUP_MSG_JOIN_HEADER = b"J" -GROUP_MSG_MEMBER_ADD_HEADER = b"N" -GROUP_MSG_MEMBER_REM_HEADER = b"R" -GROUP_MSG_EXIT_GROUP_HEADER = b"X" +"""Group management headers + +Group management datagrams are are automatic messages that the +Transmitter Program recommends the user to send when they make changes +to the member list of a group, or when they add or remove groups. These +messages are displayed by the Relay Program. +""" +GROUP_ID_LENGTH = 4 +GROUP_ID_ENC_LENGTH = 13 +GROUP_MSG_ID_LENGTH = 16 +GROUP_MGMT_HEADER_LENGTH = 1 +GROUP_MSG_INVITE_HEADER = b'I' +GROUP_MSG_JOIN_HEADER = b'J' +GROUP_MSG_MEMBER_ADD_HEADER = b'N' +GROUP_MSG_MEMBER_REM_HEADER = b'R' +GROUP_MSG_EXIT_GROUP_HEADER = b'X' -# Assembly packet headers -# -# These one-byte assembly packet headers are not part of the padded -# message parsed from assembly packets. They are however the very first -# plaintext byte, prepended to every padded assembly packet that is -# delivered to the recipient/local Destination Computer. The header -# delivers the information about if and when to assemble the packet, -# as well as when to drop any previously collected assembly packets. -FILE_PACKET_CTR_LENGTH = 8 +"""Assembly packet headers + +These one-byte assembly packet headers are not part of the padded +message parsed from assembly packets. They are however the very first +plaintext byte, prepended to every padded assembly packet that is +delivered to the recipient/local Destination Computer. The header +delivers the information about if and when to assemble the packet, +as well as when to drop any previously collected assembly packets. +""" +FILE_PACKET_CTR_LENGTH = 8 ASSEMBLY_PACKET_HEADER_LENGTH = 1 -M_S_HEADER = b"a" # Short message packet -M_L_HEADER = b"b" # First packet of multi-packet message -M_A_HEADER = b"c" # Appended packet of multi-packet message -M_E_HEADER = b"d" # Last packet of multi-packet message -M_C_HEADER = b"e" # Cancelled multi-packet message -P_N_HEADER = b"f" # Noise message packet +M_S_HEADER = b'a' # Short message packet +M_L_HEADER = b'b' # First packet of multi-packet message +M_A_HEADER = b'c' # Appended packet of multi-packet message +M_E_HEADER = b'd' # Last packet of multi-packet message +M_C_HEADER = b'e' # Cancelled multi-packet message +P_N_HEADER = b'f' # Noise message packet -F_S_HEADER = b"A" # Short file packet -F_L_HEADER = b"B" # First packet of multi-packet file -F_A_HEADER = b"C" # Appended packet of multi-packet file -F_E_HEADER = b"D" # Last packet of multi-packet file -F_C_HEADER = b"E" # Cancelled multi-packet file +F_S_HEADER = b'A' # Short file packet +F_L_HEADER = b'B' # First packet of multi-packet file +F_A_HEADER = b'C' # Appended packet of multi-packet file +F_E_HEADER = b'D' # Last packet of multi-packet file +F_C_HEADER = b'E' # Cancelled multi-packet file -C_S_HEADER = b"0" # Short command packet -C_L_HEADER = b"1" # First packet of multi-packet command -C_A_HEADER = b"2" # Appended packet of multi-packet command -C_E_HEADER = b"3" # Last packet of multi-packet command -C_C_HEADER = b"4" # Cancelled multi-packet command (reserved but not in use) -C_N_HEADER = b"5" # Noise command packet +C_S_HEADER = b'0' # Short command packet +C_L_HEADER = b'1' # First packet of multi-packet command +C_A_HEADER = b'2' # Appended packet of multi-packet command +C_E_HEADER = b'3' # Last packet of multi-packet command +C_C_HEADER = b'4' # Cancelled multi-packet command (reserved but not in use) +C_N_HEADER = b'5' # Noise command packet -# Unencrypted command headers -# -# These two-byte headers are only used to control the Relay Program on -# Networked Computer. These commands will not be used during traffic -# masking, as they would reveal when TFC is being used. These commands do -# not require encryption, because if an attacker can compromise the -# Networked Computer to the point it could inject commands to Relay -# Program, it could most likely also access any decryption keys used by -# the Relay Program. +"""Unencrypted command headers + +These two-byte headers are only used to control the Relay Program on +Networked Computer. These commands will not be used during traffic +masking, as they would reveal when TFC is being used. These commands do +not require encryption, because if an attacker can compromise the +Networked Computer to the point it could inject commands to Relay +Program, it could most likely also access any decryption keys used by +the Relay Program. +""" UNENCRYPTED_COMMAND_HEADER_LENGTH = 2 -UNENCRYPTED_SCREEN_CLEAR = b"UC" -UNENCRYPTED_SCREEN_RESET = b"UR" -UNENCRYPTED_EXIT_COMMAND = b"UX" -UNENCRYPTED_EC_RATIO = b"UE" -UNENCRYPTED_BAUDRATE = b"UB" -UNENCRYPTED_WIPE_COMMAND = b"UW" -UNENCRYPTED_ADD_NEW_CONTACT = b"UN" -UNENCRYPTED_ADD_EXISTING_CONTACT = b"UA" -UNENCRYPTED_REM_CONTACT = b"UD" -UNENCRYPTED_ONION_SERVICE_DATA = b"UO" -UNENCRYPTED_MANAGE_CONTACT_REQ = b"UM" +UNENCRYPTED_SCREEN_CLEAR = b'UC' +UNENCRYPTED_SCREEN_RESET = b'UR' +UNENCRYPTED_EXIT_COMMAND = b'UX' +UNENCRYPTED_EC_RATIO = b'UE' +UNENCRYPTED_BAUDRATE = b'UB' +UNENCRYPTED_WIPE_COMMAND = b'UW' +UNENCRYPTED_ADD_NEW_CONTACT = b'UN' +UNENCRYPTED_ADD_EXISTING_CONTACT = b'UA' +UNENCRYPTED_REM_CONTACT = b'UD' +UNENCRYPTED_ONION_SERVICE_DATA = b'UO' +UNENCRYPTED_MANAGE_CONTACT_REQ = b'UM' +UNENCRYPTED_PUBKEY_CHECK = b'UP' +UNENCRYPTED_ACCOUNT_CHECK = b'UT' -# Encrypted command headers -# -# These two-byte headers determine the type of command for Receiver -# Program on local Destination Computer. The header is evaluated after the -# Receiver Program has received all assembly packets and assembled the -# command. These headers tell the Receiver Program to which function the -# provided parameters (if any) must be redirected. +"""Encrypted command headers + +These two-byte headers determine the type of command for Receiver +Program on local Destination Computer. The header is evaluated after the +Receiver Program has received all assembly packets and assembled the +command. These headers tell the Receiver Program to which function the +provided parameters (if any) must be redirected. +""" ENCRYPTED_COMMAND_HEADER_LENGTH = 2 -LOCAL_KEY_RDY = b"LI" -WIN_ACTIVITY = b"SA" -WIN_SELECT = b"WS" -CLEAR_SCREEN = b"SC" -RESET_SCREEN = b"SR" -EXIT_PROGRAM = b"EX" -LOG_DISPLAY = b"LD" -LOG_EXPORT = b"LE" -LOG_REMOVE = b"LR" -CH_MASTER_KEY = b"MK" -CH_NICKNAME = b"NC" -CH_SETTING = b"CS" -CH_LOGGING = b"CL" -CH_FILE_RECV = b"CF" -CH_NOTIFY = b"CN" -GROUP_CREATE = b"GC" -GROUP_ADD = b"GA" -GROUP_REMOVE = b"GR" -GROUP_DELETE = b"GD" -GROUP_RENAME = b"GN" -KEY_EX_ECDHE = b"KE" -KEY_EX_PSK_TX = b"KT" -KEY_EX_PSK_RX = b"KR" -CONTACT_REM = b"CR" -WIPE_USR_DATA = b"WD" +LOCAL_KEY_RDY = b'LI' +WIN_ACTIVITY = b'SA' +WIN_SELECT = b'WS' +CLEAR_SCREEN = b'SC' +RESET_SCREEN = b'SR' +EXIT_PROGRAM = b'EX' +LOG_DISPLAY = b'LD' +LOG_EXPORT = b'LE' +LOG_REMOVE = b'LR' +CH_MASTER_KEY = b'MK' +CH_NICKNAME = b'NC' +CH_SETTING = b'CS' +CH_LOGGING = b'CL' +CH_FILE_RECV = b'CF' +CH_NOTIFY = b'CN' +GROUP_CREATE = b'GC' +GROUP_ADD = b'GA' +GROUP_REMOVE = b'GR' +GROUP_DELETE = b'GD' +GROUP_RENAME = b'GN' +KEY_EX_ECDHE = b'KE' +KEY_EX_PSK_TX = b'KT' +KEY_EX_PSK_RX = b'KR' +CONTACT_REM = b'CR' +WIPE_USR_DATA = b'WD' -# Origin headers -# -# This one-byte header tells the Relay and Receiver Programs whether the -# account included in the packet is the source or the destination of the -# transmission. The user origin header is used when the Relay Program -# forwards the message packets from user's Source Computer to user's -# Destination Computer. The contact origin header is used when the program -# forwards packets that are loaded from servers of contacts to the user's -# Destination Computer. -# -# On Destination Computer, the Receiver Program uses the origin header to -# determine which unidirectional keys it should load to decrypt the -# datagram payload. -ORIGIN_HEADER_LENGTH = 1 -ORIGIN_USER_HEADER = b"o" -ORIGIN_CONTACT_HEADER = b"i" +"""Origin headers + +This one-byte header tells the Relay and Receiver Programs whether the +account included in the packet is the source or the destination of the +transmission. The user origin header is used when the Relay Program +forwards the message packets from user's Source Computer to user's +Destination Computer. The contact origin header is used when the program +forwards packets that are loaded from servers of contacts to the user's +Destination Computer. + +On Destination Computer, the Receiver Program uses the origin header to +determine which unidirectional keys it should load to decrypt the +datagram payload. +""" +ORIGIN_HEADER_LENGTH = 1 +ORIGIN_USER_HEADER = b'o' +ORIGIN_CONTACT_HEADER = b'i' -# Message headers -# -# This one-byte header will be prepended to each plaintext message before -# padding and splitting the message. It will be evaluated once the Relay -# Program has received all assembly packets and assembled the message. -# -# The private and group message headers allow the Receiver Program to -# determine whether the message should be displayed in a private or in a -# group window. This does not allow re-direction of messages to -# unauthorized group windows, because TFC's manually managed group -# configuration is also a whitelist for accounts that are authorized to -# display messages under the group's window. -# -# Messages with the whisper message header have "sender-based control". -# Unless the contact maliciously alters their Receiver Program's behavior, -# whispered messages are not logged regardless of in-program controlled -# settings. -# -# Messages with file key header contain the hash of the file ciphertext -# that was sent to the user earlier. It also contains the symmetric -# decryption key for that file. -MESSAGE_HEADER_LENGTH = 1 -WHISPER_FIELD_LENGTH = 1 -PRIVATE_MESSAGE_HEADER = b"p" -GROUP_MESSAGE_HEADER = b"g" -FILE_KEY_HEADER = b"k" +"""Message headers + +This one-byte header will be prepended to each plaintext message before +padding and splitting the message. It will be evaluated once the Relay +Program has received all assembly packets and assembled the message. + +The private and group message headers allow the Receiver Program to +determine whether the message should be displayed in a private or in a +group window. This does not allow re-direction of messages to +unauthorized group windows, because TFC's manually managed group +configuration is also a whitelist for accounts that are authorized to +display messages under the group's window. + +Messages with the whisper message header have "sender-based control". +Unless the contact maliciously alters their Receiver Program's behavior, +whispered messages are not logged regardless of in-program controlled +settings. + +Messages with file key header contain the hash of the file ciphertext +that was sent to the user earlier. It also contains the symmetric +decryption key for that file. +""" +MESSAGE_HEADER_LENGTH = 1 +WHISPER_FIELD_LENGTH = 1 +PRIVATE_MESSAGE_HEADER = b'p' +GROUP_MESSAGE_HEADER = b'g' +FILE_KEY_HEADER = b'k' -# Delays -# -# Traffic masking packet queue check delay ensures that the lookup time -# for the packet queue is obfuscated. -# -# The local testing packet delay is an arbitrary delay that simulates the -# slight delay caused by data transmission over a serial interface. -# -# The Relay client delays are values that determine the delays between -# checking the online status of the contact (and the state of their -# ephemeral URL token public key). +"""Delays + +Traffic masking packet queue check delay ensures that the lookup time +for the packet queue is obfuscated. + +The local testing packet delay is an arbitrary delay that simulates the +slight delay caused by data transmission over a serial interface. + +The Relay client delays are values that determine the delays between +checking the online status of the contact (and the state of their +ephemeral URL token public key). +""" TRAFFIC_MASKING_QUEUE_CHECK_DELAY = 0.1 -TRAFFIC_MASKING_MIN_STATIC_DELAY = 0.1 -TRAFFIC_MASKING_MIN_RANDOM_DELAY = 0.1 -LOCAL_TESTING_PACKET_DELAY = 0.1 -RELAY_CLIENT_MAX_DELAY = 8 -RELAY_CLIENT_MIN_DELAY = 0.125 -CLIENT_OFFLINE_THRESHOLD = 4.0 +TRAFFIC_MASKING_MIN_STATIC_DELAY = 0.1 +TRAFFIC_MASKING_MIN_RANDOM_DELAY = 0.1 +LOCAL_TESTING_PACKET_DELAY = 0.1 +RELAY_CLIENT_MAX_DELAY = 8 +RELAY_CLIENT_MIN_DELAY = 0.125 +CLIENT_OFFLINE_THRESHOLD = 4.0 -# Constant time delay types -STATIC = "static" -TRAFFIC_MASKING = "traffic_masking" +"""Constant time delay types""" +STATIC = 'static' +TRAFFIC_MASKING = 'traffic_masking' -# Default directories -DIR_USER_DATA = "user_data/" -DIR_RECV_FILES = "received_files/" -DIR_TFC = "tfc/" -TEMP_POSTFIX = "_temp" +"""Default directories""" +DIR_USER_DATA = 'user_data/' +DIR_RECV_FILES = 'received_files/' +DIR_TFC = 'tfc/' +TEMP_POSTFIX = '_temp' -# Key exchange status states -KEX_STATUS_LENGTH = 1 -KEX_STATUS_NONE = b"\xa0" -KEX_STATUS_PENDING = b"\xa1" -KEX_STATUS_UNVERIFIED = b"\xa2" -KEX_STATUS_VERIFIED = b"\xa3" -KEX_STATUS_NO_RX_PSK = b"\xa4" -KEX_STATUS_HAS_RX_PSK = b"\xa5" -KEX_STATUS_LOCAL_KEY = b"\xa6" +"""Key exchange status states""" +KEX_STATUS_LENGTH = 1 +KEX_STATUS_NONE = b'\xa0' +KEX_STATUS_PENDING = b'\xa1' +KEX_STATUS_UNVERIFIED = b'\xa2' +KEX_STATUS_VERIFIED = b'\xa3' +KEX_STATUS_NO_RX_PSK = b'\xa4' +KEX_STATUS_HAS_RX_PSK = b'\xa5' +KEX_STATUS_LOCAL_KEY = b'\xa6' -# Queue dictionary keys -# +"""Queue dictionary keys""" # Common -EXIT_QUEUE = b"exit" -GATEWAY_QUEUE = b"gateway" -UNIT_TEST_QUEUE = b"unit_test" +EXIT_QUEUE = b'exit' +GATEWAY_QUEUE = b'gateway' +UNIT_TEST_QUEUE = b'unit_test' # Transmitter -MESSAGE_PACKET_QUEUE = b"message_packet" -COMMAND_PACKET_QUEUE = b"command_packet" -TM_MESSAGE_PACKET_QUEUE = b"tm_message_packet" -TM_FILE_PACKET_QUEUE = b"tm_file_packet" -TM_COMMAND_PACKET_QUEUE = b"tm_command_packet" -TM_NOISE_PACKET_QUEUE = b"tm_noise_packet" -TM_NOISE_COMMAND_QUEUE = b"tm_noise_command" -RELAY_PACKET_QUEUE = b"relay_packet" -LOG_PACKET_QUEUE = b"log_packet" -LOG_SETTING_QUEUE = b"log_setting" -TRAFFIC_MASKING_QUEUE = b"traffic_masking" -LOGFILE_MASKING_QUEUE = b"logfile_masking" -KEY_MANAGEMENT_QUEUE = b"key_management" -KEY_MGMT_ACK_QUEUE = b"key_mgmt_ack" -SENDER_MODE_QUEUE = b"sender_mode" -WINDOW_SELECT_QUEUE = b"window_select" +MESSAGE_PACKET_QUEUE = b'message_packet' +COMMAND_PACKET_QUEUE = b'command_packet' +TM_MESSAGE_PACKET_QUEUE = b'tm_message_packet' +TM_FILE_PACKET_QUEUE = b'tm_file_packet' +TM_COMMAND_PACKET_QUEUE = b'tm_command_packet' +TM_NOISE_PACKET_QUEUE = b'tm_noise_packet' +TM_NOISE_COMMAND_QUEUE = b'tm_noise_command' +RELAY_PACKET_QUEUE = b'relay_packet' +LOG_PACKET_QUEUE = b'log_packet' +LOG_SETTING_QUEUE = b'log_setting' +TRAFFIC_MASKING_QUEUE = b'traffic_masking' +LOGFILE_MASKING_QUEUE = b'logfile_masking' +KEY_MANAGEMENT_QUEUE = b'key_management' +KEY_MGMT_ACK_QUEUE = b'key_mgmt_ack' +SENDER_MODE_QUEUE = b'sender_mode' +WINDOW_SELECT_QUEUE = b'window_select' # Relay -DST_COMMAND_QUEUE = b"dst_command" -DST_MESSAGE_QUEUE = b"dst_message" -M_TO_FLASK_QUEUE = b"m_to_flask" -F_TO_FLASK_QUEUE = b"f_to_flask" -SRC_TO_RELAY_QUEUE = b"src_to_relay" -URL_TOKEN_QUEUE = b"url_token" -GROUP_MGMT_QUEUE = b"group_mgmt" -GROUP_MSG_QUEUE = b"group_msg" -CONTACT_REQ_QUEUE = b"contact_req" -C_REQ_MGMT_QUEUE = b"c_req_mgmt" -CONTACT_MGMT_QUEUE = b"contact_mgmt" -C_REQ_STATE_QUEUE = b"c_req_state" -ONION_KEY_QUEUE = b"onion_key" -ONION_CLOSE_QUEUE = b"close_onion" -TOR_DATA_QUEUE = b"tor_data" +DST_COMMAND_QUEUE = b'dst_command' +DST_MESSAGE_QUEUE = b'dst_message' +M_TO_FLASK_QUEUE = b'm_to_flask' +F_TO_FLASK_QUEUE = b'f_to_flask' +SRC_TO_RELAY_QUEUE = b'src_to_relay' +URL_TOKEN_QUEUE = b'url_token' +GROUP_MGMT_QUEUE = b'group_mgmt' +GROUP_MSG_QUEUE = b'group_msg' +CONTACT_REQ_QUEUE = b'contact_req' +C_REQ_MGMT_QUEUE = b'c_req_mgmt' +CONTACT_MGMT_QUEUE = b'contact_mgmt' +C_REQ_STATE_QUEUE = b'c_req_state' +ONION_KEY_QUEUE = b'onion_key' +ONION_CLOSE_QUEUE = b'close_onion' +TOR_DATA_QUEUE = b'tor_data' +PUB_KEY_CHECK_QUEUE = b'pubkey_check' +PUB_KEY_SEND_QUEUE = b'pubkey_send' +ACCOUNT_CHECK_QUEUE = b'account_check' +ACCOUNT_SEND_QUEUE = b'account_send' +USER_ACCOUNT_QUEUE = b'user_account' +GUI_INPUT_QUEUE = b'gui_input' -# Queue signals -KDB_ADD_ENTRY_HEADER = "ADD" -KDB_REMOVE_ENTRY_HEADER = "REM" -KDB_M_KEY_CHANGE_HALT_HEADER = "HALT" -KDB_HALT_ACK_HEADER = "HALT_ACK" -KDB_UPDATE_SIZE_HEADER = "STO" -RP_ADD_CONTACT_HEADER = "RAC" -RP_REMOVE_CONTACT_HEADER = "RRC" -EXIT = "EXIT" -WIPE = "WIPE" +"""Queue signals""" +KDB_ADD_ENTRY_HEADER = 'ADD' +KDB_REMOVE_ENTRY_HEADER = 'REM' +KDB_M_KEY_CHANGE_HALT_HEADER = 'HALT' +KDB_HALT_ACK_HEADER = 'HALT_ACK' +KDB_UPDATE_SIZE_HEADER = 'STO' +RP_ADD_CONTACT_HEADER = 'RAC' +RP_REMOVE_CONTACT_HEADER = 'RRC' +EXIT = 'EXIT' +WIPE = 'WIPE' # Serial interface -BAUDS_PER_BYTE = 10 +BAUDS_PER_BYTE = 10 SERIAL_RX_MIN_TIMEOUT = 0.05 # CLI indents -CONTACT_LIST_INDENT = 4 +CONTACT_LIST_INDENT = 4 FILE_TRANSFER_INDENT = 4 -SETTINGS_INDENT = 2 +SETTINGS_INDENT = 2 # Compression COMPRESSION_LEVEL = 9 -MAX_MESSAGE_SIZE = 100_000 # bytes +MAX_MESSAGE_SIZE = 100_000 # bytes # Traffic masking NOISE_PACKET_BUFFER = 100 # Local testing -LOCALHOST = "localhost" -SRC_DD_LISTEN_SOCKET = 5005 -RP_LISTEN_SOCKET = 5006 -DST_DD_LISTEN_SOCKET = 5007 -DST_LISTEN_SOCKET = 5008 -DD_ANIMATION_LENGTH = 16 +LOCALHOST = 'localhost' +SRC_DD_LISTEN_SOCKET = 5005 +RP_LISTEN_SOCKET = 5006 +DST_DD_LISTEN_SOCKET = 5007 +DST_LISTEN_SOCKET = 5008 +DD_ANIMATION_LENGTH = 16 DD_OFFSET_FROM_CENTER = 4 # Field lengths -ENCODED_BOOLEAN_LENGTH = 1 -ENCODED_BYTE_LENGTH = 1 -TIMESTAMP_LENGTH = 4 -ENCODED_INTEGER_LENGTH = 8 -ENCODED_FLOAT_LENGTH = 8 -FILE_ETA_FIELD_LENGTH = 8 -FILE_SIZE_FIELD_LENGTH = 8 -GROUP_DB_HEADER_LENGTH = 32 +ENCODED_BOOLEAN_LENGTH = 1 +ENCODED_BYTE_LENGTH = 1 +TIMESTAMP_LENGTH = 4 +ENCODED_INTEGER_LENGTH = 8 +ENCODED_FLOAT_LENGTH = 8 +FILE_ETA_FIELD_LENGTH = 8 +FILE_SIZE_FIELD_LENGTH = 8 +GROUP_DB_HEADER_LENGTH = 32 PADDED_UTF32_STR_LENGTH = 1024 -CONFIRM_CODE_LENGTH = 1 -PACKET_CHECKSUM_LENGTH = 16 +CONFIRM_CODE_LENGTH = 1 +PACKET_CHECKSUM_LENGTH = 16 # Onion address format -ONION_ADDRESS_CHECKSUM_ID = b".onion checksum" -ONION_SERVICE_VERSION = b"\x03" -ONION_SERVICE_VERSION_LENGTH = 1 +ONION_ADDRESS_CHECKSUM_ID = b'.onion checksum' +ONION_SERVICE_VERSION = b'\x03' +ONION_SERVICE_VERSION_LENGTH = 1 ONION_ADDRESS_CHECKSUM_LENGTH = 2 -ONION_ADDRESS_LENGTH = 56 +ONION_ADDRESS_LENGTH = 56 # Misc -BITS_PER_BYTE = 8 -MAX_INT = 2 ** 64 - 1 -B58_CHECKSUM_LENGTH = 4 +BITS_PER_BYTE = 8 +MAX_INT = 2 ** 64 - 1 +B58_CHECKSUM_LENGTH = 4 TRUNC_ADDRESS_LENGTH = 5 -TOR_CONTROL_PORT = 9051 -TOR_SOCKS_PORT = 9050 +TOR_CONTROL_PORT = 9051 +TOR_SOCKS_PORT = 9050 DB_WRITE_RETRY_LIMIT = 10 +ACCOUNT_RATIO_LIMIT = 0.75 # Key derivation -ARGON2_MIN_TIME_COST = 1 -ARGON2_MIN_MEMORY_COST = 8 -ARGON2_MIN_PARALLELISM = 1 -ARGON2_SALT_LENGTH = 32 -ARGON2_PSK_TIME_COST = 25 -ARGON2_PSK_MEMORY_COST = 512 * 1024 # kibibytes -ARGON2_PSK_PARALLELISM = 2 -MIN_KEY_DERIVATION_TIME = 3.0 # seconds -MAX_KEY_DERIVATION_TIME = 4.0 # seconds +ARGON2_MIN_TIME_COST = 1 +ARGON2_MIN_MEMORY_COST = 8 +ARGON2_MIN_PARALLELISM = 1 +ARGON2_SALT_LENGTH = 32 +ARGON2_PSK_TIME_COST = 25 +ARGON2_PSK_MEMORY_COST = 512 * 1024 # kibibytes +ARGON2_PSK_PARALLELISM = 2 +MIN_KEY_DERIVATION_TIME = 3.0 # seconds +MAX_KEY_DERIVATION_TIME = 4.0 # seconds PASSWORD_MIN_BIT_STRENGTH = 128 # Cryptographic field sizes -TFC_PRIVATE_KEY_LENGTH = 56 -TFC_PUBLIC_KEY_LENGTH = 56 -X448_SHARED_SECRET_LENGTH = 56 -FINGERPRINT_LENGTH = 32 +TFC_PRIVATE_KEY_LENGTH = 56 +TFC_PUBLIC_KEY_LENGTH = 56 +X448_SHARED_SECRET_LENGTH = 56 +FINGERPRINT_LENGTH = 32 ONION_SERVICE_PRIVATE_KEY_LENGTH = 32 -ONION_SERVICE_PUBLIC_KEY_LENGTH = 32 -URL_TOKEN_LENGTH = 32 -XCHACHA20_NONCE_LENGTH = 24 -SYMMETRIC_KEY_LENGTH = 32 -POLY1305_TAG_LENGTH = 16 -BLAKE2_DIGEST_LENGTH = 32 -BLAKE2_DIGEST_LENGTH_MIN = 1 -BLAKE2_DIGEST_LENGTH_MAX = 64 -BLAKE2_KEY_LENGTH_MAX = 64 -BLAKE2_SALT_LENGTH_MAX = 16 -BLAKE2_PERSON_LENGTH_MAX = 16 -HARAC_LENGTH = 8 -PADDING_LENGTH = 255 +ONION_SERVICE_PUBLIC_KEY_LENGTH = 32 +URL_TOKEN_LENGTH = 32 +XCHACHA20_NONCE_LENGTH = 24 +SYMMETRIC_KEY_LENGTH = 32 +POLY1305_TAG_LENGTH = 16 +BLAKE2_DIGEST_LENGTH = 32 +BLAKE2_DIGEST_LENGTH_MIN = 1 +BLAKE2_DIGEST_LENGTH_MAX = 64 +BLAKE2_KEY_LENGTH_MAX = 64 +BLAKE2_SALT_LENGTH_MAX = 16 +BLAKE2_PERSON_LENGTH_MAX = 16 +HARAC_LENGTH = 8 +PADDING_LENGTH = 255 +ENCODED_B58_PUB_KEY_LENGTH = 84 +ENCODED_B58_KDK_LENGTH = 51 # Domain separation -MESSAGE_KEY = b"message_key" -HEADER_KEY = b"header_key" -FINGERPRINT = b"fingerprint" +MESSAGE_KEY = b'message_key' +HEADER_KEY = b'header_key' +FINGERPRINT = b'fingerprint' # Forward secrecy -INITIAL_HARAC = 0 +INITIAL_HARAC = 0 HARAC_WARN_THRESHOLD = 100_000 # Special messages @@ -563,55 +582,52 @@ PLACEHOLDER_DATA = P_N_HEADER + bytes(PADDING_LENGTH) # Field lengths ASSEMBLY_PACKET_LENGTH = ASSEMBLY_PACKET_HEADER_LENGTH + PADDING_LENGTH -HARAC_CT_LENGTH = XCHACHA20_NONCE_LENGTH + HARAC_LENGTH + POLY1305_TAG_LENGTH +HARAC_CT_LENGTH = (XCHACHA20_NONCE_LENGTH + + HARAC_LENGTH + + POLY1305_TAG_LENGTH) -ASSEMBLY_PACKET_CT_LENGTH = ( - XCHACHA20_NONCE_LENGTH + ASSEMBLY_PACKET_LENGTH + POLY1305_TAG_LENGTH -) +ASSEMBLY_PACKET_CT_LENGTH = (XCHACHA20_NONCE_LENGTH + + ASSEMBLY_PACKET_LENGTH + + POLY1305_TAG_LENGTH) MESSAGE_LENGTH = HARAC_CT_LENGTH + ASSEMBLY_PACKET_CT_LENGTH -COMMAND_LENGTH = DATAGRAM_HEADER_LENGTH + MESSAGE_LENGTH +COMMAND_LENGTH = (DATAGRAM_HEADER_LENGTH + + MESSAGE_LENGTH) -PACKET_LENGTH = DATAGRAM_HEADER_LENGTH + MESSAGE_LENGTH + ORIGIN_HEADER_LENGTH +PACKET_LENGTH = (DATAGRAM_HEADER_LENGTH + + MESSAGE_LENGTH + + ORIGIN_HEADER_LENGTH) -GROUP_STATIC_LENGTH = ( - PADDED_UTF32_STR_LENGTH + GROUP_ID_LENGTH + 2 * ENCODED_BOOLEAN_LENGTH -) +GROUP_STATIC_LENGTH = (PADDED_UTF32_STR_LENGTH + + GROUP_ID_LENGTH + + 2 * ENCODED_BOOLEAN_LENGTH) -CONTACT_LENGTH = ( - ONION_SERVICE_PUBLIC_KEY_LENGTH - + 2 * FINGERPRINT_LENGTH - + 4 * ENCODED_BOOLEAN_LENGTH - + PADDED_UTF32_STR_LENGTH -) +CONTACT_LENGTH = (ONION_SERVICE_PUBLIC_KEY_LENGTH + + 2 * FINGERPRINT_LENGTH + + 4 * ENCODED_BOOLEAN_LENGTH + + PADDED_UTF32_STR_LENGTH) -KEYSET_LENGTH = ( - ONION_SERVICE_PUBLIC_KEY_LENGTH + 4 * SYMMETRIC_KEY_LENGTH + 2 * HARAC_LENGTH -) +KEYSET_LENGTH = (ONION_SERVICE_PUBLIC_KEY_LENGTH + + 4 * SYMMETRIC_KEY_LENGTH + + 2 * HARAC_LENGTH) -PSK_FILE_SIZE = ( - XCHACHA20_NONCE_LENGTH - + ARGON2_SALT_LENGTH - + 2 * SYMMETRIC_KEY_LENGTH - + POLY1305_TAG_LENGTH -) +PSK_FILE_SIZE = (XCHACHA20_NONCE_LENGTH + + ARGON2_SALT_LENGTH + + 2 * SYMMETRIC_KEY_LENGTH + + POLY1305_TAG_LENGTH) -LOG_ENTRY_LENGTH = ( - ONION_SERVICE_PUBLIC_KEY_LENGTH - + TIMESTAMP_LENGTH - + ORIGIN_HEADER_LENGTH - + ASSEMBLY_PACKET_LENGTH -) +LOG_ENTRY_LENGTH = (ONION_SERVICE_PUBLIC_KEY_LENGTH + + TIMESTAMP_LENGTH + + ORIGIN_HEADER_LENGTH + + ASSEMBLY_PACKET_LENGTH) -MASTERKEY_DB_SIZE = ( - ARGON2_SALT_LENGTH + BLAKE2_DIGEST_LENGTH + 3 * ENCODED_INTEGER_LENGTH -) +MASTERKEY_DB_SIZE = (ARGON2_SALT_LENGTH + + BLAKE2_DIGEST_LENGTH + + 3 * ENCODED_INTEGER_LENGTH) -SETTING_LENGTH = ( - XCHACHA20_NONCE_LENGTH - + 4 * ENCODED_INTEGER_LENGTH - + 3 * ENCODED_FLOAT_LENGTH - + 12 * ENCODED_BOOLEAN_LENGTH - + POLY1305_TAG_LENGTH -) +SETTING_LENGTH = (XCHACHA20_NONCE_LENGTH + + 4 * ENCODED_INTEGER_LENGTH + + 3 * ENCODED_FLOAT_LENGTH + + 12 * ENCODED_BOOLEAN_LENGTH + + POLY1305_TAG_LENGTH) diff --git a/src/common/word_list.py b/src/common/word_list.py index 40e6162..f5b74b5 100644 --- a/src/common/word_list.py +++ b/src/common/word_list.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. diff --git a/src/receiver/__init__.py b/src/receiver/__init__.py index 6eb560e..833769a 100755 --- a/src/receiver/__init__.py +++ b/src/receiver/__init__.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. diff --git a/src/receiver/commands.py b/src/receiver/commands.py index adcdb73..b816190 100644 --- a/src/receiver/commands.py +++ b/src/receiver/commands.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -23,111 +23,52 @@ import typing from typing import Any, Dict, Tuple, Union -from src.common.db_logs import ( - access_logs, - change_log_db_key, - remove_logs, - replace_log_db, -) -from src.common.encoding import bytes_to_int, pub_key_to_short_address +from src.common.db_logs import access_logs, change_log_db_key, remove_logs, replace_log_db +from src.common.encoding import bytes_to_int, pub_key_to_short_address from src.common.exceptions import SoftError -from src.common.misc import ignored, reset_terminal, separate_header -from src.common.output import clear_screen, m_print, phase, print_on_previous_line -from src.common.statics import ( - CH_FILE_RECV, - CH_LOGGING, - CH_MASTER_KEY, - CH_NICKNAME, - CH_NOTIFY, - CH_SETTING, - CLEAR_SCREEN, - COMMAND, - CONTACT_REM, - CONTACT_SETTING_HEADER_LENGTH, - DISABLE, - DONE, - ENABLE, - ENCODED_INTEGER_LENGTH, - ENCRYPTED_COMMAND_HEADER_LENGTH, - EXIT, - EXIT_PROGRAM, - GROUP_ADD, - GROUP_CREATE, - GROUP_DELETE, - GROUP_REMOVE, - GROUP_RENAME, - KEY_EX_ECDHE, - KEY_EX_PSK_RX, - KEY_EX_PSK_TX, - LOCAL_KEY_RDY, - LOCAL_PUBKEY, - LOG_DISPLAY, - LOG_EXPORT, - LOG_REMOVE, - ONION_SERVICE_PUBLIC_KEY_LENGTH, - ORIGIN_USER_HEADER, - RESET_SCREEN, - US_BYTE, - WIN_ACTIVITY, - WIN_SELECT, - WIN_TYPE_CONTACT, - WIN_TYPE_GROUP, - WIN_UID_COMMAND, - WIN_UID_FILE, - WIPE, - WIPE_USR_DATA, -) +from src.common.misc import ignored, reset_terminal, separate_header +from src.common.output import clear_screen, m_print, phase, print_on_previous_line +from src.common.statics import (CH_FILE_RECV, CH_LOGGING, CH_MASTER_KEY, CH_NICKNAME, CH_NOTIFY, CH_SETTING, + CLEAR_SCREEN, COMMAND, CONTACT_REM, CONTACT_SETTING_HEADER_LENGTH, DISABLE, DONE, + ENABLE, ENCODED_INTEGER_LENGTH, ENCRYPTED_COMMAND_HEADER_LENGTH, EXIT, EXIT_PROGRAM, + GROUP_ADD, GROUP_CREATE, GROUP_DELETE, GROUP_REMOVE, GROUP_RENAME, KEY_EX_ECDHE, + KEY_EX_PSK_RX, KEY_EX_PSK_TX, LOCAL_KEY_RDY, LOCAL_PUBKEY, LOG_DISPLAY, LOG_EXPORT, + LOG_REMOVE, ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_USER_HEADER, RESET_SCREEN, + US_BYTE, WIN_ACTIVITY, WIN_SELECT, WIN_TYPE_CONTACT, WIN_TYPE_GROUP, WIN_UID_COMMAND, + WIN_UID_FILE, WIPE, WIPE_USR_DATA) -from src.receiver.commands_g import ( - group_add, - group_create, - group_delete, - group_remove, - group_rename, -) -from src.receiver.key_exchanges import ( - key_ex_ecdhe, - key_ex_psk_rx, - key_ex_psk_tx, - local_key_rdy, -) -from src.receiver.packet import decrypt_assembly_packet +from src.receiver.commands_g import group_add, group_create, group_delete, group_remove, group_rename +from src.receiver.key_exchanges import key_ex_ecdhe, key_ex_psk_rx, key_ex_psk_tx, local_key_rdy +from src.receiver.packet import decrypt_assembly_packet if typing.TYPE_CHECKING: - from datetime import datetime - from multiprocessing import Queue - from src.common.db_contacts import Contact, ContactList - from src.common.db_groups import Group, GroupList - from src.common.db_keys import KeyList + from datetime import datetime + from multiprocessing import Queue + from src.common.db_contacts import Contact, ContactList + from src.common.db_groups import Group, GroupList + from src.common.db_keys import KeyList from src.common.db_masterkey import MasterKey - from src.common.db_settings import Settings - from src.common.gateway import Gateway - from src.receiver.packet import PacketList - from src.receiver.windows import WindowList + from src.common.db_settings import Settings + from src.common.gateway import Gateway + from src.receiver.packet import PacketList + from src.receiver.windows import WindowList -def process_command( - ts: "datetime", - assembly_ct: bytes, - window_list: "WindowList", - packet_list: "PacketList", - contact_list: "ContactList", - key_list: "KeyList", - group_list: "GroupList", - settings: "Settings", - master_key: "MasterKey", - gateway: "Gateway", - exit_queue: "Queue[bytes]", -) -> None: +def process_command(ts: 'datetime', + assembly_ct: bytes, + window_list: 'WindowList', + packet_list: 'PacketList', + contact_list: 'ContactList', + key_list: 'KeyList', + group_list: 'GroupList', + settings: 'Settings', + master_key: 'MasterKey', + gateway: 'Gateway', + exit_queue: 'Queue[bytes]' + ) -> None: """Decrypt command assembly packet and process command.""" - assembly_packet = decrypt_assembly_packet( - assembly_ct, - LOCAL_PUBKEY, - ORIGIN_USER_HEADER, - window_list, - contact_list, - key_list, - ) + assembly_packet = decrypt_assembly_packet(assembly_ct, LOCAL_PUBKEY, ORIGIN_USER_HEADER, + window_list, contact_list, key_list) cmd_packet = packet_list.get_packet(LOCAL_PUBKEY, ORIGIN_USER_HEADER, COMMAND) cmd_packet.add_packet(assembly_packet) @@ -135,204 +76,87 @@ def process_command( if not cmd_packet.is_complete: raise SoftError("Incomplete command.", output=False) - header, cmd = separate_header( - cmd_packet.assemble_command_packet(), ENCRYPTED_COMMAND_HEADER_LENGTH - ) - no = None + header, cmd = separate_header(cmd_packet.assemble_command_packet(), ENCRYPTED_COMMAND_HEADER_LENGTH) + no = None # Keyword Function to run ( Parameters ) # -------------------------------------------------------------------------------------------------------------- - d = { - LOCAL_KEY_RDY: (local_key_rdy, ts, window_list, contact_list), - WIN_ACTIVITY: (win_activity, window_list), - WIN_SELECT: (win_select, cmd, window_list), - CLEAR_SCREEN: (clear_screen,), - RESET_SCREEN: (reset_screen, cmd, window_list), - EXIT_PROGRAM: (exit_tfc, exit_queue), - LOG_DISPLAY: ( - log_command, - cmd, - no, - window_list, - contact_list, - group_list, - settings, - master_key, - ), - LOG_EXPORT: ( - log_command, - cmd, - ts, - window_list, - contact_list, - group_list, - settings, - master_key, - ), - LOG_REMOVE: (remove_log, cmd, contact_list, group_list, settings, master_key), - CH_MASTER_KEY: ( - ch_master_key, - ts, - window_list, - contact_list, - group_list, - key_list, - settings, - master_key, - ), - CH_NICKNAME: (ch_nick, cmd, ts, window_list, contact_list,), - CH_SETTING: ( - ch_setting, - cmd, - ts, - window_list, - contact_list, - group_list, - key_list, - settings, - gateway, - ), - CH_LOGGING: ( - ch_contact_s, - cmd, - ts, - window_list, - contact_list, - group_list, - header, - ), - CH_FILE_RECV: ( - ch_contact_s, - cmd, - ts, - window_list, - contact_list, - group_list, - header, - ), - CH_NOTIFY: ( - ch_contact_s, - cmd, - ts, - window_list, - contact_list, - group_list, - header, - ), - GROUP_CREATE: ( - group_create, - cmd, - ts, - window_list, - contact_list, - group_list, - settings, - ), - GROUP_ADD: ( - group_add, - cmd, - ts, - window_list, - contact_list, - group_list, - settings, - ), - GROUP_REMOVE: (group_remove, cmd, ts, window_list, contact_list, group_list), - GROUP_DELETE: (group_delete, cmd, ts, window_list, group_list), - GROUP_RENAME: (group_rename, cmd, ts, window_list, contact_list, group_list), - KEY_EX_ECDHE: ( - key_ex_ecdhe, - cmd, - ts, - window_list, - contact_list, - key_list, - settings, - ), - KEY_EX_PSK_TX: ( - key_ex_psk_tx, - cmd, - ts, - window_list, - contact_list, - key_list, - settings, - ), - KEY_EX_PSK_RX: ( - key_ex_psk_rx, - cmd, - ts, - window_list, - contact_list, - key_list, - settings, - ), - CONTACT_REM: ( - contact_rem, - cmd, - ts, - window_list, - contact_list, - group_list, - key_list, - settings, - master_key, - ), - WIPE_USR_DATA: (wipe, exit_queue), - } # type: Dict[bytes, Any] + d = {LOCAL_KEY_RDY: (local_key_rdy, ts, window_list, contact_list ), + WIN_ACTIVITY: (win_activity, window_list ), + WIN_SELECT: (win_select, cmd, window_list ), + CLEAR_SCREEN: (clear_screen, ), + RESET_SCREEN: (reset_screen, cmd, window_list ), + EXIT_PROGRAM: (exit_tfc, exit_queue), + LOG_DISPLAY: (log_command, cmd, no, window_list, contact_list, group_list, settings, master_key), + LOG_EXPORT: (log_command, cmd, ts, window_list, contact_list, group_list, settings, master_key), + LOG_REMOVE: (remove_log, cmd, contact_list, group_list, settings, master_key), + CH_MASTER_KEY: (ch_master_key, ts, window_list, contact_list, group_list, key_list, settings, master_key), + CH_NICKNAME: (ch_nick, cmd, ts, window_list, contact_list, ), + CH_SETTING: (ch_setting, cmd, ts, window_list, contact_list, group_list, key_list, settings, gateway ), + CH_LOGGING: (ch_contact_s, cmd, ts, window_list, contact_list, group_list, header ), + CH_FILE_RECV: (ch_contact_s, cmd, ts, window_list, contact_list, group_list, header ), + CH_NOTIFY: (ch_contact_s, cmd, ts, window_list, contact_list, group_list, header ), + GROUP_CREATE: (group_create, cmd, ts, window_list, contact_list, group_list, settings ), + GROUP_ADD: (group_add, cmd, ts, window_list, contact_list, group_list, settings ), + GROUP_REMOVE: (group_remove, cmd, ts, window_list, contact_list, group_list ), + GROUP_DELETE: (group_delete, cmd, ts, window_list, group_list ), + GROUP_RENAME: (group_rename, cmd, ts, window_list, contact_list, group_list ), + KEY_EX_ECDHE: (key_ex_ecdhe, cmd, ts, window_list, contact_list, key_list, settings ), + KEY_EX_PSK_TX: (key_ex_psk_tx, cmd, ts, window_list, contact_list, key_list, settings ), + KEY_EX_PSK_RX: (key_ex_psk_rx, cmd, ts, window_list, contact_list, key_list, settings ), + CONTACT_REM: (contact_rem, cmd, ts, window_list, contact_list, group_list, key_list, settings, master_key), + WIPE_USR_DATA: (wipe, exit_queue) + } # type: Dict[bytes, Any] try: from_dict = d[header] except KeyError: raise SoftError("Error: Received an invalid command.") - func = from_dict[0] + func = from_dict[0] parameters = from_dict[1:] func(*parameters) raise SoftError("Command completed.", output=False) -def win_activity(window_list: "WindowList") -> None: +def win_activity(window_list: 'WindowList') -> None: """Show number of unread messages in each window.""" - unread_wins = [ - w for w in window_list if (w.uid != WIN_UID_COMMAND and w.unread_messages > 0) - ] - print_list = ["Window activity"] if unread_wins else ["No window activity"] + unread_wins = [w for w in window_list if (w.uid != WIN_UID_COMMAND and w.unread_messages > 0)] + print_list = ["Window activity"] if unread_wins else ["No window activity"] print_list += [f"{w.name}: {w.unread_messages}" for w in unread_wins] m_print(print_list, box=True) print_on_previous_line(reps=(len(print_list) + 2), delay=1) -def win_select(window_uid: bytes, window_list: "WindowList") -> None: +def win_select(window_uid: bytes, window_list: 'WindowList') -> None: """Select window specified by the Transmitter Program.""" if window_uid == WIN_UID_FILE: clear_screen() window_list.set_active_rx_window(window_uid) -def reset_screen(win_uid: bytes, window_list: "WindowList") -> None: +def reset_screen(win_uid: bytes, window_list: 'WindowList') -> None: """Reset window specified by the Transmitter Program.""" window = window_list.get_window(win_uid) window.reset_window() reset_terminal() -def exit_tfc(exit_queue: "Queue[str]") -> None: +def exit_tfc(exit_queue: 'Queue[str]') -> None: """Exit TFC.""" exit_queue.put(EXIT) -def log_command( - cmd_data: bytes, - ts: "datetime", - window_list: "WindowList", - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - master_key: "MasterKey", -) -> None: +def log_command(cmd_data: bytes, + ts: 'datetime', + window_list: 'WindowList', + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + master_key: 'MasterKey' + ) -> None: """Display or export log file for the active window. Having the capability to export the log file from the encrypted @@ -344,48 +168,36 @@ def log_command( password to ensure no unauthorized user who gains momentary access to the system can the export logs from the database. """ - export = ts is not None + export = ts is not None ser_no_msg, uid = separate_header(cmd_data, ENCODED_INTEGER_LENGTH) - no_messages = bytes_to_int(ser_no_msg) - window = window_list.get_window(uid) + no_messages = bytes_to_int(ser_no_msg) + window = window_list.get_window(uid) - access_logs( - window, - contact_list, - group_list, - settings, - master_key, - msg_to_load=no_messages, - export=export, - ) + access_logs(window, contact_list, group_list, settings, master_key, msg_to_load=no_messages, export=export) if export: cmd_win = window_list.get_command_window() - cmd_win.add_new( - ts, f"Exported log file of {window.type} '{window.name}'.", output=True - ) + cmd_win.add_new(ts, f"Exported log file of {window.type} '{window.name}'.", output=True) -def remove_log( - cmd_data: bytes, - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - master_key: "MasterKey", -) -> None: +def remove_log(cmd_data: bytes, + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + master_key: 'MasterKey' + ) -> None: """Remove log entries for contact or group.""" remove_logs(contact_list, group_list, settings, master_key, selector=cmd_data) -def ch_master_key( - ts: "datetime", - window_list: "WindowList", - contact_list: "ContactList", - group_list: "GroupList", - key_list: "KeyList", - settings: "Settings", - master_key: "MasterKey", -) -> None: +def ch_master_key(ts: 'datetime', + window_list: 'WindowList', + contact_list: 'ContactList', + group_list: 'GroupList', + key_list: 'KeyList', + settings: 'Settings', + master_key: 'MasterKey' + ) -> None: """Prompt the user for a new master password and derive a new master key from that.""" if not master_key.authenticate_action(): raise SoftError("Error: Invalid password.", tail_clear=True, delay=1, head=2) @@ -399,9 +211,9 @@ def ch_master_key( # Update encryption keys for databases contact_list.database.database_key = new_master_key - key_list.database.database_key = new_master_key - group_list.database.database_key = new_master_key - settings.database.database_key = new_master_key + key_list.database.database_key = new_master_key + group_list.database.database_key = new_master_key + settings.database.database_key = new_master_key # Create temp databases for each database, do not replace original. with ignored(SoftError): @@ -425,30 +237,21 @@ def ch_master_key( settings.database.replace_database() phase(DONE) - m_print( - "Master password successfully changed.", - bold=True, - tail_clear=True, - delay=1, - head=1, - ) + m_print("Master password successfully changed.", bold=True, tail_clear=True, delay=1, head=1) cmd_win = window_list.get_command_window() cmd_win.add_new(ts, "Changed Receiver master password.") -def ch_nick( - cmd_data: bytes, - ts: "datetime", - window_list: "WindowList", - contact_list: "ContactList", -) -> None: +def ch_nick(cmd_data: bytes, + ts: 'datetime', + window_list: 'WindowList', + contact_list: 'ContactList' + ) -> None: """Change nickname of contact.""" - onion_pub_key, nick_bytes = separate_header( - cmd_data, header_length=ONION_SERVICE_PUBLIC_KEY_LENGTH - ) - nick = nick_bytes.decode() - short_addr = pub_key_to_short_address(onion_pub_key) + onion_pub_key, nick_bytes = separate_header(cmd_data, header_length=ONION_SERVICE_PUBLIC_KEY_LENGTH) + nick = nick_bytes.decode() + short_addr = pub_key_to_short_address(onion_pub_key) try: contact = contact_list.get_contact_by_pub_key(onion_pub_key) @@ -458,7 +261,7 @@ def ch_nick( contact.nick = nick contact_list.store_contacts() - window = window_list.get_window(onion_pub_key) + window = window_list.get_window(onion_pub_key) window.name = nick window.handle_dict[onion_pub_key] = nick @@ -469,16 +272,15 @@ def ch_nick( cmd_win.add_new(ts, f"Changed {short_addr} nick to '{nick}'.", output=True) -def ch_setting( - cmd_data: bytes, - ts: "datetime", - window_list: "WindowList", - contact_list: "ContactList", - group_list: "GroupList", - key_list: "KeyList", - settings: "Settings", - gateway: "Gateway", -) -> None: +def ch_setting(cmd_data: bytes, + ts: 'datetime', + window_list: 'WindowList', + contact_list: 'ContactList', + group_list: 'GroupList', + key_list: 'KeyList', + settings: 'Settings', + gateway: 'Gateway' + ) -> None: """Change TFC setting.""" try: setting, value = [f.decode() for f in cmd_data.split(US_BYTE)] @@ -495,70 +297,60 @@ def ch_setting( cmd_win = window_list.get_command_window() cmd_win.add_new(ts, f"Changed setting '{setting}' to '{value}'.", output=True) - if setting == "max_number_of_contacts": + if setting == 'max_number_of_contacts': contact_list.store_contacts() key_list.store_keys() - if setting in ["max_number_of_group_members", "max_number_of_groups"]: + if setting in ['max_number_of_group_members', 'max_number_of_groups']: group_list.store_groups() -def ch_contact_s( - cmd_data: bytes, - ts: "datetime", - window_list: "WindowList", - contact_list: "ContactList", - group_list: "GroupList", - header: bytes, -) -> None: +def ch_contact_s(cmd_data: bytes, + ts: 'datetime', + window_list: 'WindowList', + contact_list: 'ContactList', + group_list: 'GroupList', + header: bytes + ) -> None: """Change contact/group related setting.""" - setting, win_uid = separate_header(cmd_data, CONTACT_SETTING_HEADER_LENGTH) - attr, desc, file_cmd = { - CH_LOGGING: ("log_messages", "Logging of messages", False), - CH_FILE_RECV: ("file_reception", "Reception of files", True), - CH_NOTIFY: ("notifications", "Message notifications", False), - }[header] + setting, win_uid = separate_header(cmd_data, CONTACT_SETTING_HEADER_LENGTH) + attr, desc, file_cmd = {CH_LOGGING: ('log_messages', "Logging of messages", False), + CH_FILE_RECV: ('file_reception', "Reception of files", True), + CH_NOTIFY: ('notifications', "Message notifications", False)}[header] - action, b_value = {ENABLE: ("enabled", True), DISABLE: ("disabled", False)}[ - setting.lower() - ] + action, b_value = {ENABLE: ('enabled', True), + DISABLE: ('disabled', False)}[setting.lower()] if setting.isupper(): specifier, status, w_name, w_type = change_setting_for_all_contacts( - attr, file_cmd, b_value, contact_list, group_list - ) + attr, file_cmd, b_value, contact_list, group_list) + else: status, specifier, w_type, w_name = change_setting_for_one_contact( - attr, file_cmd, b_value, win_uid, window_list, contact_list, group_list - ) + attr, file_cmd, b_value, win_uid, window_list, contact_list, group_list) message = f"{desc} {status} {action} for {specifier}{w_type}{w_name}" cmd_win = window_list.get_command_window() cmd_win.add_new(ts, message, output=True) -def change_setting_for_one_contact( - attr: str, - file_cmd: bool, - b_value: bool, - win_uid: bytes, - window_list: "WindowList", - contact_list: "ContactList", - group_list: "GroupList", -) -> Tuple[str, str, str, str]: +def change_setting_for_one_contact(attr: str, + file_cmd: bool, + b_value: bool, + win_uid: bytes, + window_list: 'WindowList', + contact_list: 'ContactList', + group_list: 'GroupList' + ) -> Tuple[str, str, str, str]: """Change setting for contacts in specified window.""" if not window_list.has_window(win_uid): - raise SoftError( - f"Error: Found no window for '{pub_key_to_short_address(win_uid)}'." - ) + raise SoftError(f"Error: Found no window for '{pub_key_to_short_address(win_uid)}'.") - window = window_list.get_window(win_uid) - group_window = window.type == WIN_TYPE_GROUP + window = window_list.get_window(win_uid) + group_window = window.type == WIN_TYPE_GROUP contact_window = window.type == WIN_TYPE_CONTACT if contact_window: - target = contact_list.get_contact_by_pub_key( - win_uid - ) # type: Union[Contact, Group] + target = contact_list.get_contact_by_pub_key(win_uid) # type: Union[Contact, Group] else: target = group_list.get_group_by_id(win_uid) @@ -568,10 +360,10 @@ def change_setting_for_one_contact( else: changed = getattr(target, attr) != b_value - status = "has been" if changed else "was already" - specifier = "members in " if (file_cmd and group_window) else "" - w_type = window.type - w_name = f" {window.name}." + status = "has been" if changed else "was already" + specifier = "members in " if (file_cmd and group_window) else '' + w_type = window.type + w_name = f" {window.name}." # Set values if contact_window or (group_window and file_cmd): @@ -586,25 +378,20 @@ def change_setting_for_one_contact( return status, specifier, w_type, w_name -def change_setting_for_all_contacts( - attr: str, - file_cmd: bool, - b_value: bool, - contact_list: "ContactList", - group_list: "GroupList", -) -> Tuple[str, str, str, str]: +def change_setting_for_all_contacts(attr: str, + file_cmd: bool, + b_value: bool, + contact_list: 'ContactList', + group_list: 'GroupList' + ) -> Tuple[str, str, str, str]: """Change settings for all contacts (and groups).""" - enabled = [getattr(c, attr) for c in contact_list.get_list_of_contacts()] + enabled = [getattr(c, attr) for c in contact_list.get_list_of_contacts()] enabled += [getattr(g, attr) for g in group_list] if not file_cmd else [] - status = ( - "was already" - if ((all(enabled) and b_value) or (not any(enabled) and not b_value)) - else "has been" - ) + status = ("was already" if ((all(enabled) and b_value) or (not any(enabled) and not b_value)) else "has been") specifier = "every " - w_type = "contact" - w_name = "." if file_cmd else " and group." + w_type = "contact" + w_name = "." if file_cmd else " and group." # Set values for c in contact_list.get_list_of_contacts(): @@ -620,16 +407,15 @@ def change_setting_for_all_contacts( return status, specifier, w_type, w_name -def contact_rem( - onion_pub_key: bytes, - ts: "datetime", - window_list: "WindowList", - contact_list: "ContactList", - group_list: "GroupList", - key_list: "KeyList", - settings: "Settings", - master_key: "MasterKey", -) -> None: +def contact_rem(onion_pub_key: bytes, + ts: 'datetime', + window_list: 'WindowList', + contact_list: 'ContactList', + group_list: 'GroupList', + key_list: 'KeyList', + settings: 'Settings', + master_key: 'MasterKey' + ) -> None: """Remove contact from Receiver Program.""" key_list.remove_keyset(onion_pub_key) window_list.remove_window(onion_pub_key) @@ -640,7 +426,7 @@ def contact_rem( except StopIteration: raise SoftError(f"Receiver has no account '{short_addr}' to remove.") - nick = contact.nick + nick = contact.nick in_group = any([g.remove_members([onion_pub_key]) for g in group_list]) contact_list.remove_contact_by_pub_key(onion_pub_key) @@ -654,7 +440,7 @@ def contact_rem( remove_logs(contact_list, group_list, settings, master_key, onion_pub_key) -def wipe(exit_queue: "Queue[str]") -> None: +def wipe(exit_queue: 'Queue[str]') -> None: """\ Reset terminals, wipe all TFC user data on Destination Computer and power off the system. diff --git a/src/receiver/commands_g.py b/src/receiver/commands_g.py index 987ec72..289c562 100644 --- a/src/receiver/commands_g.py +++ b/src/receiver/commands_g.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -21,115 +21,94 @@ along with TFC. If not, see . import typing -from src.common.encoding import b58encode +from src.common.encoding import b58encode from src.common.exceptions import SoftError -from src.common.misc import separate_header, split_byte_string, validate_group_name -from src.common.output import group_management_print, m_print -from src.common.statics import ( - ADDED_MEMBERS, - ALREADY_MEMBER, - GROUP_ID_LENGTH, - NEW_GROUP, - NOT_IN_GROUP, - ONION_SERVICE_PUBLIC_KEY_LENGTH, - REMOVED_MEMBERS, - UNKNOWN_ACCOUNTS, - US_BYTE, - WIN_UID_COMMAND, -) +from src.common.misc import separate_header, split_byte_string, validate_group_name +from src.common.output import group_management_print, m_print +from src.common.statics import (ADDED_MEMBERS, ALREADY_MEMBER, GROUP_ID_LENGTH, NEW_GROUP, NOT_IN_GROUP, + ONION_SERVICE_PUBLIC_KEY_LENGTH, REMOVED_MEMBERS, UNKNOWN_ACCOUNTS, US_BYTE, + WIN_UID_COMMAND) if typing.TYPE_CHECKING: - from datetime import datetime + from datetime import datetime from src.common.db_contacts import ContactList - from src.common.db_groups import GroupList + from src.common.db_groups import GroupList from src.common.db_settings import Settings - from src.receiver.windows import WindowList + from src.receiver.windows import WindowList -def group_create( - cmd_data: bytes, - ts: "datetime", - window_list: "WindowList", - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", -) -> None: +def group_create(cmd_data: bytes, + ts: 'datetime', + window_list: 'WindowList', + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings' + ) -> None: """Create a new group.""" - group_id, variable_len_data = separate_header(cmd_data, GROUP_ID_LENGTH) + group_id, variable_len_data = separate_header(cmd_data, GROUP_ID_LENGTH) group_name_bytes, ser_members = variable_len_data.split(US_BYTE, 1) - group_name = group_name_bytes.decode() + group_name = group_name_bytes.decode() purp_pub_keys = set(split_byte_string(ser_members, ONION_SERVICE_PUBLIC_KEY_LENGTH)) - pub_keys = set(contact_list.get_list_of_pub_keys()) - accepted = list(purp_pub_keys & pub_keys) - rejected = list(purp_pub_keys - pub_keys) + pub_keys = set(contact_list.get_list_of_pub_keys()) + accepted = list(purp_pub_keys & pub_keys) + rejected = list(purp_pub_keys - pub_keys) if len(accepted) > settings.max_number_of_group_members: - raise SoftError( - f"Error: TFC settings only allow {settings.max_number_of_group_members} " - f"members per group." - ) + raise SoftError(f"Error: TFC settings only allow {settings.max_number_of_group_members} members per group.") if len(group_list) == settings.max_number_of_groups: - raise SoftError( - f"Error: TFC settings only allow {settings.max_number_of_groups} groups." - ) + raise SoftError(f"Error: TFC settings only allow {settings.max_number_of_groups} groups.") accepted_contacts = [contact_list.get_contact_by_pub_key(k) for k in accepted] - group_list.add_group( - group_name, - group_id, - settings.log_messages_by_default, - settings.show_notifications_by_default, - accepted_contacts, - ) + group_list.add_group(group_name, + group_id, + settings.log_messages_by_default, + settings.show_notifications_by_default, + accepted_contacts) - group = group_list.get_group(group_name) - window = window_list.get_window(group.group_id) + group = group_list.get_group(group_name) + window = window_list.get_window(group.group_id) window.window_contacts = accepted_contacts - window.message_log = [] + window.message_log = [] window.unread_messages = 0 window.create_handle_dict() - group_management_print(NEW_GROUP, accepted, contact_list, group_name) + group_management_print(NEW_GROUP, accepted, contact_list, group_name) group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name) cmd_win = window_list.get_window(WIN_UID_COMMAND) cmd_win.add_new(ts, f"Created new group {group_name}.") -def group_add( - cmd_data: bytes, - ts: "datetime", - window_list: "WindowList", - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", -) -> None: +def group_add(cmd_data: bytes, + ts: 'datetime', + window_list: 'WindowList', + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings' + ) -> None: """Add member(s) to group.""" group_id, ser_members = separate_header(cmd_data, GROUP_ID_LENGTH) - purp_pub_keys = set(split_byte_string(ser_members, ONION_SERVICE_PUBLIC_KEY_LENGTH)) + purp_pub_keys = set(split_byte_string(ser_members, ONION_SERVICE_PUBLIC_KEY_LENGTH)) try: group_name = group_list.get_group_by_id(group_id).name except StopIteration: raise SoftError(f"Error: No group with ID '{b58encode(group_id)}' found.") - pub_keys = set(contact_list.get_list_of_pub_keys()) - before_adding = set(group_list.get_group(group_name).get_list_of_member_pub_keys()) - ok_accounts = set(pub_keys & purp_pub_keys) + pub_keys = set(contact_list.get_list_of_pub_keys()) + before_adding = set(group_list.get_group(group_name).get_list_of_member_pub_keys()) + ok_accounts = set(pub_keys & purp_pub_keys) new_in_group_set = set(ok_accounts - before_adding) end_assembly = list(before_adding | new_in_group_set) already_in_g = list(purp_pub_keys & before_adding) - rejected = list(purp_pub_keys - pub_keys) + rejected = list(purp_pub_keys - pub_keys) new_in_group = list(new_in_group_set) if len(end_assembly) > settings.max_number_of_group_members: - raise SoftError( - f"Error: TFC settings only allow {settings.max_number_of_group_members} " - f"members per group." - ) + raise SoftError(f"Error: TFC settings only allow {settings.max_number_of_group_members} members per group.") group = group_list.get_group(group_name) group.add_members([contact_list.get_contact_by_pub_key(k) for k in new_in_group]) @@ -138,38 +117,37 @@ def group_add( window.add_contacts(new_in_group) window.create_handle_dict() - group_management_print(ADDED_MEMBERS, new_in_group, contact_list, group_name) - group_management_print(ALREADY_MEMBER, already_in_g, contact_list, group_name) - group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name) + group_management_print(ADDED_MEMBERS, new_in_group, contact_list, group_name) + group_management_print(ALREADY_MEMBER, already_in_g, contact_list, group_name) + group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name) cmd_win = window_list.get_window(WIN_UID_COMMAND) cmd_win.add_new(ts, f"Added members to group {group_name}.") -def group_remove( - cmd_data: bytes, - ts: "datetime", - window_list: "WindowList", - contact_list: "ContactList", - group_list: "GroupList", -) -> None: +def group_remove(cmd_data: bytes, + ts: 'datetime', + window_list: 'WindowList', + contact_list: 'ContactList', + group_list: 'GroupList' + ) -> None: """Remove member(s) from the group.""" group_id, ser_members = separate_header(cmd_data, GROUP_ID_LENGTH) - purp_pub_keys = set(split_byte_string(ser_members, ONION_SERVICE_PUBLIC_KEY_LENGTH)) + purp_pub_keys = set(split_byte_string(ser_members, ONION_SERVICE_PUBLIC_KEY_LENGTH)) try: group_name = group_list.get_group_by_id(group_id).name except StopIteration: raise SoftError(f"Error: No group with ID '{b58encode(group_id)}' found.") - pub_keys = set(contact_list.get_list_of_pub_keys()) - before_removal = set(group_list.get_group(group_name).get_list_of_member_pub_keys()) - ok_accounts_set = set(purp_pub_keys & pub_keys) - removable_set = set(before_removal & ok_accounts_set) + pub_keys = set(contact_list.get_list_of_pub_keys()) + before_removal = set(group_list.get_group(group_name).get_list_of_member_pub_keys()) + ok_accounts_set = set(purp_pub_keys & pub_keys) + removable_set = set(before_removal & ok_accounts_set) not_in_group = list(ok_accounts_set - before_removal) - rejected = list(purp_pub_keys - pub_keys) - removable = list(removable_set) + rejected = list(purp_pub_keys - pub_keys) + removable = list(removable_set) group = group_list.get_group(group_name) group.remove_members(removable) @@ -177,17 +155,19 @@ def group_remove( window = window_list.get_window(group.group_id) window.remove_contacts(removable) - group_management_print(REMOVED_MEMBERS, removable, contact_list, group_name) - group_management_print(NOT_IN_GROUP, not_in_group, contact_list, group_name) - group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name) + group_management_print(REMOVED_MEMBERS, removable, contact_list, group_name) + group_management_print(NOT_IN_GROUP, not_in_group, contact_list, group_name) + group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name) cmd_win = window_list.get_window(WIN_UID_COMMAND) cmd_win.add_new(ts, f"Removed members from group {group_name}.") -def group_delete( - group_id: bytes, ts: "datetime", window_list: "WindowList", group_list: "GroupList" -) -> None: +def group_delete(group_id: bytes, + ts: 'datetime', + window_list: 'WindowList', + group_list: 'GroupList' + ) -> None: """Remove the group.""" if not group_list.has_group_id(group_id): raise SoftError(f"Error: No group with ID '{b58encode(group_id)}' found.") @@ -203,13 +183,12 @@ def group_delete( cmd_win.add_new(ts, message) -def group_rename( - cmd_data: bytes, - ts: "datetime", - window_list: "WindowList", - contact_list: "ContactList", - group_list: "GroupList", -) -> None: +def group_rename(cmd_data: bytes, + ts: 'datetime', + window_list: 'WindowList', + contact_list: 'ContactList', + group_list: 'GroupList' + ) -> None: """Rename the group.""" group_id, new_name_bytes = separate_header(cmd_data, GROUP_ID_LENGTH) @@ -227,11 +206,11 @@ def group_rename( if error_msg: raise SoftError(error_msg) - old_name = group.name + old_name = group.name group.name = new_name group_list.store_groups() - window = window_list.get_window(group.group_id) + window = window_list.get_window(group.group_id) window.name = new_name message = f"Renamed group '{old_name}' to '{new_name}'." diff --git a/src/receiver/files.py b/src/receiver/files.py index d30180e..1ef4488 100644 --- a/src/receiver/files.py +++ b/src/receiver/files.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -27,33 +27,25 @@ from typing import Dict, Tuple import nacl.exceptions -from src.common.crypto import auth_and_decrypt, blake2b -from src.common.encoding import bytes_to_str +from src.common.crypto import auth_and_decrypt, blake2b +from src.common.encoding import bytes_to_str from src.common.exceptions import SoftError -from src.common.misc import decompress, ensure_dir, separate_headers, separate_trailer -from src.common.output import phase, print_on_previous_line -from src.common.statics import ( - DIR_RECV_FILES, - DONE, - ONION_SERVICE_PUBLIC_KEY_LENGTH, - ORIGIN_HEADER_LENGTH, - PADDED_UTF32_STR_LENGTH, - SYMMETRIC_KEY_LENGTH, - US_BYTE, -) +from src.common.misc import decompress, ensure_dir, separate_headers, separate_trailer +from src.common.output import phase, print_on_previous_line +from src.common.statics import (DIR_RECV_FILES, DONE, ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_HEADER_LENGTH, + PADDED_UTF32_STR_LENGTH, SYMMETRIC_KEY_LENGTH, US_BYTE) if typing.TYPE_CHECKING: - from datetime import datetime + from datetime import datetime from src.common.db_contacts import ContactList from src.common.db_settings import Settings - from src.receiver.windows import WindowList + from src.receiver.windows import WindowList -def store_unique( - file_data: bytes, # File data to store - file_dir: str, # Directory to store file - file_name: str, # Preferred name for the file. -) -> str: +def store_unique(file_data: bytes, # File data to store + file_dir: str, # Directory to store file + file_name: str # Preferred name for the file. + ) -> str: """Store file under a unique filename. If file exists, add trailing counter .# with value as large as @@ -63,11 +55,11 @@ def store_unique( if os.path.isfile(file_dir + file_name): ctr = 1 - while os.path.isfile(file_dir + file_name + f".{ctr}"): + while os.path.isfile(file_dir + file_name + f'.{ctr}'): ctr += 1 - file_name += f".{ctr}" + file_name += f'.{ctr}' - with open(file_dir + file_name, "wb+") as f: + with open(file_dir + file_name, 'wb+') as f: f.write(file_data) f.flush() os.fsync(f.fileno()) @@ -75,17 +67,16 @@ def store_unique( return file_name -def process_assembled_file( - ts: "datetime", # Timestamp last received packet - payload: bytes, # File name and content - onion_pub_key: bytes, # Onion Service pubkey of sender - nick: str, # Nickname of sender - settings: "Settings", # Settings object - window_list: "WindowList", # WindowList object -) -> None: +def process_assembled_file(ts: 'datetime', # Timestamp last received packet + payload: bytes, # File name and content + onion_pub_key: bytes, # Onion Service pubkey of sender + nick: str, # Nickname of sender + settings: 'Settings', # Settings object + window_list: 'WindowList', # WindowList object + ) -> None: """Process received file assembly packets.""" try: - file_name_b, file_data = payload.split(US_BYTE, 1) # type: bytes, bytes + file_name_b, file_data = payload.split(US_BYTE, 1) except ValueError: raise SoftError("Error: Received file had an invalid structure.") @@ -94,7 +85,7 @@ def process_assembled_file( except UnicodeError: raise SoftError("Error: Received file name had an invalid encoding.") - if not file_name.isprintable() or not file_name or "/" in file_name: + if not file_name.isprintable() or not file_name or '/' in file_name: raise SoftError("Error: Received file had an invalid name.") file_ct, file_key = separate_trailer(file_data, SYMMETRIC_KEY_LENGTH) @@ -107,16 +98,15 @@ def process_assembled_file( ) -def decrypt_and_store_file( - ts: "datetime", - file_ct: bytes, - file_key: bytes, - file_name: str, - onion_pub_key: bytes, - nick: str, - window_list: "WindowList", - settings: "Settings", -) -> None: +def decrypt_and_store_file(ts: 'datetime', # Timestamp of received packet + file_ct: bytes, # File ciphertext + file_key: bytes, # File decryption key + file_name: str, # Name of the file + onion_pub_key: bytes, # Onion Service pubkey of sender + nick: str, # Nickname of sender + window_list: 'WindowList', # WindowList object + settings: 'Settings' # Settings object + ) -> None: """Decrypt and store file.""" try: file_pt = auth_and_decrypt(file_ct, file_key) @@ -128,33 +118,27 @@ def decrypt_and_store_file( except zlib.error: raise SoftError("Error: Decompression of file data failed.") - file_dir = f"{DIR_RECV_FILES}{nick}/" + file_dir = f'{DIR_RECV_FILES}{nick}/' final_name = store_unique(file_dc, file_dir, file_name) - message = f"Stored file from {nick} as '{final_name}'." + message = f"Stored file from {nick} as '{final_name}'." if settings.traffic_masking and window_list.active_win is not None: window = window_list.active_win else: window = window_list.get_window(onion_pub_key) - window.add_new(ts, message, onion_pub_key, output=True, event_msg=True) -def new_file( - ts: "datetime", # Timestamp of received_packet - packet: bytes, # Sender of file and file ciphertext - file_keys: Dict[bytes, bytes], # Dictionary for file decryption keys - file_buf: Dict[ - bytes, Tuple["datetime", bytes] - ], # Dictionary for cached file ciphertexts - contact_list: "ContactList", # ContactList object - window_list: "WindowList", # WindowList object - settings: "Settings", # Settings object -) -> None: +def new_file(ts: 'datetime', # Timestamp of received packet + packet: bytes, # Sender of file and file ciphertext + file_keys: Dict[bytes, bytes], # Dictionary for file decryption keys + file_buf: Dict[bytes, Tuple['datetime', bytes]], # Dictionary for cached file ciphertexts + contact_list: 'ContactList', # ContactList object + window_list: 'WindowList', # WindowList object + settings: 'Settings' # Settings object + ) -> None: """Validate received file and process or cache it.""" - onion_pub_key, _, file_ct = separate_headers( - packet, [ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_HEADER_LENGTH] - ) + onion_pub_key, _, file_ct = separate_headers(packet, [ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_HEADER_LENGTH]) if not contact_list.has_pub_key(onion_pub_key): raise SoftError("File from an unknown account.", output=False) @@ -163,37 +147,26 @@ def new_file( if not contact.file_reception: raise SoftError( - f"Alert! Discarded file from {contact.nick} as file reception for them is disabled.", - bold=True, - ) + f"Alert! Discarded file from {contact.nick} as file reception for them is disabled.", bold=True) k = onion_pub_key + blake2b(file_ct) # Dictionary key if k in file_keys: decryption_key = file_keys[k] - process_file( - ts, - onion_pub_key, - file_ct, - decryption_key, - contact_list, - window_list, - settings, - ) + process_file(ts, onion_pub_key, file_ct, decryption_key, contact_list, window_list, settings) file_keys.pop(k) else: file_buf[k] = (ts, file_ct) -def process_file( - ts: "datetime", # Timestamp of received_packet - onion_pub_key: bytes, # Onion Service pubkey of sender - file_ct: bytes, # File ciphertext - file_key: bytes, # File decryption key - contact_list: "ContactList", # ContactList object - window_list: "WindowList", # WindowList object - settings: "Settings", # Settings object -) -> None: +def process_file(ts: 'datetime', # Timestamp of received_packet + onion_pub_key: bytes, # Onion Service pubkey of sender + file_ct: bytes, # File ciphertext + file_key: bytes, # File decryption key + contact_list: 'ContactList', # ContactList object + window_list: 'WindowList', # WindowList object + settings: 'Settings' # Settings object + ) -> None: """Store file received from a contact.""" nick = contact_list.get_nick_by_pub_key(onion_pub_key) @@ -215,13 +188,13 @@ def process_file( except UnicodeError: raise SoftError(f"Error: Name of file from {nick} had an invalid encoding.") - if not file_name.isprintable() or not file_name or "/" in file_name: + if not file_name.isprintable() or not file_name or '/' in file_name: raise SoftError(f"Error: Name of file from {nick} was invalid.") - file_data = file_dc[PADDED_UTF32_STR_LENGTH:] - file_dir = f"{DIR_RECV_FILES}{nick}/" + file_data = file_dc[PADDED_UTF32_STR_LENGTH:] + file_dir = f'{DIR_RECV_FILES}{nick}/' final_name = store_unique(file_data, file_dir, file_name) - message = f"Stored file from {nick} as '{final_name}'." + message = f"Stored file from {nick} as '{final_name}'." if settings.traffic_masking and window_list.active_win is not None: window = window_list.active_win diff --git a/src/receiver/key_exchanges.py b/src/receiver/key_exchanges.py index 3ec59f3..93b9644 100644 --- a/src/receiver/key_exchanges.py +++ b/src/receiver/key_exchanges.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -28,53 +28,36 @@ import tkinter import typing from datetime import datetime -from typing import List, Tuple +from typing import List, Tuple import nacl.exceptions -from src.common.crypto import argon2_kdf, auth_and_decrypt, blake2b, csprng +from src.common.crypto import argon2_kdf, auth_and_decrypt, blake2b, csprng from src.common.db_masterkey import MasterKey -from src.common.encoding import b58encode, bytes_to_str, pub_key_to_short_address -from src.common.exceptions import SoftError -from src.common.input import get_b58_key -from src.common.misc import reset_terminal, separate_header, separate_headers -from src.common.output import m_print, phase, print_on_previous_line -from src.common.path import ask_path_gui -from src.common.statics import ( - ARGON2_PSK_MEMORY_COST, - ARGON2_PSK_PARALLELISM, - ARGON2_PSK_TIME_COST, - ARGON2_SALT_LENGTH, - B58_LOCAL_KEY, - CONFIRM_CODE_LENGTH, - DONE, - FINGERPRINT_LENGTH, - KEX_STATUS_HAS_RX_PSK, - KEX_STATUS_LOCAL_KEY, - KEX_STATUS_NONE, - KEX_STATUS_NO_RX_PSK, - LOCAL_NICK, - LOCAL_PUBKEY, - ONION_SERVICE_PUBLIC_KEY_LENGTH, - PSK_FILE_SIZE, - SYMMETRIC_KEY_LENGTH, - WIN_TYPE_CONTACT, - WIN_TYPE_GROUP, -) +from src.common.encoding import b58encode, bytes_to_str, pub_key_to_short_address +from src.common.exceptions import SoftError +from src.common.input import get_b58_key +from src.common.misc import reset_terminal, separate_header, separate_headers +from src.common.output import m_print, phase, print_on_previous_line +from src.common.path import ask_path_gui +from src.common.statics import (ARGON2_PSK_MEMORY_COST, ARGON2_PSK_PARALLELISM, ARGON2_PSK_TIME_COST, + ARGON2_SALT_LENGTH, B58_LOCAL_KEY, CONFIRM_CODE_LENGTH, DONE, FINGERPRINT_LENGTH, + KEX_STATUS_HAS_RX_PSK, KEX_STATUS_LOCAL_KEY, KEX_STATUS_NONE, KEX_STATUS_NO_RX_PSK, + LOCAL_NICK, LOCAL_PUBKEY, ONION_SERVICE_PUBLIC_KEY_LENGTH, PSK_FILE_SIZE, + SYMMETRIC_KEY_LENGTH, WIN_TYPE_CONTACT, WIN_TYPE_GROUP) if typing.TYPE_CHECKING: - from multiprocessing import Queue + from multiprocessing import Queue from src.common.db_contacts import ContactList - from src.common.db_keys import KeyList + from src.common.db_keys import KeyList from src.common.db_settings import Settings - from src.receiver.windows import WindowList + from src.receiver.windows import WindowList local_key_queue = Queue[Tuple[datetime, bytes]] # Local key - def protect_kdk(kdk: bytes) -> None: """Prevent leak of KDK via terminal history / clipboard.""" readline.clear_history() @@ -91,9 +74,9 @@ def protect_kdk(kdk: bytes) -> None: root.destroy() -def process_local_key_buffer( - kdk: bytes, l_queue: "local_key_queue", -) -> Tuple[datetime, bytes]: +def process_local_key_buffer(kdk: bytes, + l_queue: 'local_key_queue' + ) -> Tuple[datetime, bytes]: """Check if the kdk was for a packet further ahead in the queue.""" buffer = [] # type: List[Tuple[datetime, bytes]] while l_queue.qsize() > 0: @@ -106,10 +89,10 @@ def process_local_key_buffer( plaintext = auth_and_decrypt(tup[1], kdk) # If we reach this point, decryption was successful. - for unexamined in buffer[i + 1 :]: + for unexamined in buffer[i + 1:]: l_queue.put(unexamined) buffer = [] - ts = tup[0] + ts = tup[0] return ts, plaintext @@ -121,14 +104,13 @@ def process_local_key_buffer( raise SoftError("Error: Incorrect key decryption key.", delay=1) -def decrypt_local_key( - ts: "datetime", - packet: bytes, - kdk_hashes: List[bytes], - packet_hashes: List[bytes], - settings: "Settings", - l_queue: "local_key_queue", -) -> Tuple["datetime", bytes]: +def decrypt_local_key(ts: 'datetime', + packet: bytes, + kdk_hashes: List[bytes], + packet_hashes: List[bytes], + settings: 'Settings', + l_queue: 'local_key_queue' + ) -> Tuple['datetime', bytes]: """Decrypt local key packet.""" while True: kdk = get_b58_key(B58_LOCAL_KEY, settings) @@ -153,17 +135,16 @@ def decrypt_local_key( return ts, plaintext -def process_local_key( - ts: "datetime", - packet: bytes, - window_list: "WindowList", - contact_list: "ContactList", - key_list: "KeyList", - settings: "Settings", - kdk_hashes: List[bytes], - packet_hashes: List[bytes], - l_queue: "Queue[Tuple[datetime, bytes]]", -) -> None: +def process_local_key(ts: 'datetime', + packet: bytes, + window_list: 'WindowList', + contact_list: 'ContactList', + key_list: 'KeyList', + settings: 'Settings', + kdk_hashes: List[bytes], + packet_hashes: List[bytes], + l_queue: 'Queue[Tuple[datetime, bytes]]' + ) -> None: """Decrypt local key packet and add local contact/keyset.""" first_local_key = not key_list.has_local_keyset() @@ -173,41 +154,27 @@ def process_local_key( m_print("Local key setup", bold=True, head_clear=True, head=1, tail=1) - ts, plaintext = decrypt_local_key( - ts, packet, kdk_hashes, packet_hashes, settings, l_queue - ) + ts, plaintext = decrypt_local_key(ts, packet, kdk_hashes, packet_hashes, settings, l_queue) # Add local contact to contact list database - contact_list.add_contact( - LOCAL_PUBKEY, - LOCAL_NICK, - KEX_STATUS_LOCAL_KEY, - bytes(FINGERPRINT_LENGTH), - bytes(FINGERPRINT_LENGTH), - False, - False, - True, - ) + contact_list.add_contact(LOCAL_PUBKEY, + LOCAL_NICK, + KEX_STATUS_LOCAL_KEY, + bytes(FINGERPRINT_LENGTH), + bytes(FINGERPRINT_LENGTH), + False, False, True) tx_mk, tx_hk, c_code = separate_headers(plaintext, 2 * [SYMMETRIC_KEY_LENGTH]) # Add local keyset to keyset database - key_list.add_keyset( - onion_pub_key=LOCAL_PUBKEY, - tx_mk=tx_mk, - rx_mk=csprng(), - tx_hk=tx_hk, - rx_hk=csprng(), - ) + key_list.add_keyset(onion_pub_key=LOCAL_PUBKEY, + tx_mk=tx_mk, + rx_mk=csprng(), + tx_hk=tx_hk, + rx_hk=csprng()) - m_print( - [ - "Local key successfully installed.", - f"Confirmation code (to Transmitter): {c_code.hex()}", - ], - box=True, - head=1, - ) + m_print(["Local key successfully installed.", f"Confirmation code (to Transmitter): {c_code.hex()}"], + box=True, head=1) cmd_win = window_list.get_command_window() @@ -225,9 +192,10 @@ def process_local_key( raise SoftError("Local key setup aborted.", output=False) -def local_key_rdy( - ts: "datetime", window_list: "WindowList", contact_list: "ContactList" -) -> None: +def local_key_rdy(ts: 'datetime', + window_list: 'WindowList', + contact_list: 'ContactList' + ) -> None: """Clear local key bootstrap process from the screen.""" message = "Successfully completed the local key setup." cmd_win = window_list.get_command_window() @@ -236,10 +204,7 @@ def local_key_rdy( m_print(message, bold=True, tail_clear=True, delay=1) if contact_list.has_contacts(): - if window_list.active_win is not None and window_list.active_win.type in [ - WIN_TYPE_CONTACT, - WIN_TYPE_GROUP, - ]: + if window_list.active_win is not None and window_list.active_win.type in [WIN_TYPE_CONTACT, WIN_TYPE_GROUP]: window_list.active_win.redraw() else: m_print("Waiting for new contacts", bold=True, head=1, tail=1) @@ -247,36 +212,30 @@ def local_key_rdy( # ECDHE - -def key_ex_ecdhe( - packet: bytes, - ts: "datetime", - window_list: "WindowList", - contact_list: "ContactList", - key_list: "KeyList", - settings: "Settings", -) -> None: +def key_ex_ecdhe(packet: bytes, + ts: 'datetime', + window_list: 'WindowList', + contact_list: 'ContactList', + key_list: 'KeyList', + settings: 'Settings' + ) -> None: """Add contact and symmetric keys derived from X448 shared key.""" - onion_pub_key, tx_mk, rx_mk, tx_hk, rx_hk, nick_bytes = separate_headers( - packet, [ONION_SERVICE_PUBLIC_KEY_LENGTH] + 4 * [SYMMETRIC_KEY_LENGTH] - ) + onion_pub_key, tx_mk, rx_mk, tx_hk, rx_hk, nick_bytes \ + = separate_headers(packet, [ONION_SERVICE_PUBLIC_KEY_LENGTH] + 4*[SYMMETRIC_KEY_LENGTH]) try: nick = bytes_to_str(nick_bytes) except (struct.error, UnicodeError): raise SoftError("Error: Received invalid contact data") - contact_list.add_contact( - onion_pub_key, - nick, - bytes(FINGERPRINT_LENGTH), - bytes(FINGERPRINT_LENGTH), - KEX_STATUS_NONE, - settings.log_messages_by_default, - settings.accept_files_by_default, - settings.show_notifications_by_default, - ) + contact_list.add_contact(onion_pub_key, nick, + bytes(FINGERPRINT_LENGTH), + bytes(FINGERPRINT_LENGTH), + KEX_STATUS_NONE, + settings.log_messages_by_default, + settings.accept_files_by_default, + settings.show_notifications_by_default) key_list.add_keyset(onion_pub_key, tx_mk, rx_mk, tx_hk, rx_hk) @@ -290,51 +249,42 @@ def key_ex_ecdhe( # PSK - -def key_ex_psk_tx( - packet: bytes, - ts: "datetime", - window_list: "WindowList", - contact_list: "ContactList", - key_list: "KeyList", - settings: "Settings", -) -> None: +def key_ex_psk_tx(packet: bytes, + ts: 'datetime', + window_list: 'WindowList', + contact_list: 'ContactList', + key_list: 'KeyList', + settings: 'Settings' + ) -> None: """Add contact and Tx-PSKs.""" - onion_pub_key, tx_mk, _, tx_hk, _, nick_bytes = separate_headers( - packet, [ONION_SERVICE_PUBLIC_KEY_LENGTH] + 4 * [SYMMETRIC_KEY_LENGTH] - ) + + onion_pub_key, tx_mk, _, tx_hk, _, nick_bytes \ + = separate_headers(packet, [ONION_SERVICE_PUBLIC_KEY_LENGTH] + 4*[SYMMETRIC_KEY_LENGTH]) try: nick = bytes_to_str(nick_bytes) except (struct.error, UnicodeError): raise SoftError("Error: Received invalid contact data") - contact_list.add_contact( - onion_pub_key, - nick, - bytes(FINGERPRINT_LENGTH), - bytes(FINGERPRINT_LENGTH), - KEX_STATUS_NO_RX_PSK, - settings.log_messages_by_default, - settings.accept_files_by_default, - settings.show_notifications_by_default, - ) + contact_list.add_contact(onion_pub_key, nick, + bytes(FINGERPRINT_LENGTH), + bytes(FINGERPRINT_LENGTH), + KEX_STATUS_NO_RX_PSK, + settings.log_messages_by_default, + settings.accept_files_by_default, + settings.show_notifications_by_default) # The Rx-side keys are set as null-byte strings to indicate they have not # been added yet. The zero-keys do not allow existential forgeries as # `decrypt_assembly_packet`does not allow the use of zero-keys for decryption. - key_list.add_keyset( - onion_pub_key=onion_pub_key, - tx_mk=tx_mk, - rx_mk=bytes(SYMMETRIC_KEY_LENGTH), - tx_hk=tx_hk, - rx_hk=bytes(SYMMETRIC_KEY_LENGTH), - ) + key_list.add_keyset(onion_pub_key=onion_pub_key, + tx_mk=tx_mk, + rx_mk=bytes(SYMMETRIC_KEY_LENGTH), + tx_hk=tx_hk, + rx_hk=bytes(SYMMETRIC_KEY_LENGTH)) - c_code = blake2b(onion_pub_key, digest_size=CONFIRM_CODE_LENGTH) - message = ( - f"Added Tx-side PSK for {nick} ({pub_key_to_short_address(onion_pub_key)})." - ) + c_code = blake2b(onion_pub_key, digest_size=CONFIRM_CODE_LENGTH) + message = f"Added Tx-side PSK for {nick} ({pub_key_to_short_address(onion_pub_key)})." cmd_win = window_list.get_command_window() cmd_win.add_new(ts, message) @@ -347,13 +297,7 @@ def decrypt_rx_psk(ct_tag: bytes, salt: bytes) -> bytes: try: password = MasterKey.get_password("PSK password") phase("Deriving the key decryption key", head=2) - kdk = argon2_kdf( - password, - salt, - ARGON2_PSK_TIME_COST, - ARGON2_PSK_MEMORY_COST, - ARGON2_PSK_PARALLELISM, - ) + kdk = argon2_kdf(password, salt, ARGON2_PSK_TIME_COST, ARGON2_PSK_MEMORY_COST, ARGON2_PSK_PARALLELISM) psk = auth_and_decrypt(ct_tag, kdk) phase(DONE) return psk @@ -366,28 +310,25 @@ def decrypt_rx_psk(ct_tag: bytes, salt: bytes) -> bytes: raise SoftError("PSK import aborted.", head=2, delay=1, tail_clear=True) -def key_ex_psk_rx( - packet: bytes, - ts: "datetime", - window_list: "WindowList", - contact_list: "ContactList", - key_list: "KeyList", - settings: "Settings", -) -> None: +def key_ex_psk_rx(packet: bytes, + ts: 'datetime', + window_list: 'WindowList', + contact_list: 'ContactList', + key_list: 'KeyList', + settings: 'Settings' + ) -> None: """Import Rx-PSK of contact.""" c_code, onion_pub_key = separate_header(packet, CONFIRM_CODE_LENGTH) - short_addr = pub_key_to_short_address(onion_pub_key) + short_addr = pub_key_to_short_address(onion_pub_key) if not contact_list.has_pub_key(onion_pub_key): raise SoftError(f"Error: Unknown account '{short_addr}'.", head_clear=True) - contact = contact_list.get_contact_by_pub_key(onion_pub_key) - psk_file = ask_path_gui( - f"Select PSK for {contact.nick} ({short_addr})", settings, get_file=True - ) + contact = contact_list.get_contact_by_pub_key(onion_pub_key) + psk_file = ask_path_gui(f"Select PSK for {contact.nick} ({short_addr})", settings, get_file=True) try: - with open(psk_file, "rb") as f: + with open(psk_file, 'rb') as f: psk_data = f.read() except PermissionError: raise SoftError("Error: No read permission for the PSK file.") @@ -396,14 +337,13 @@ def key_ex_psk_rx( raise SoftError("Error: The PSK data in the file was invalid.", head_clear=True) salt, ct_tag = separate_header(psk_data, ARGON2_SALT_LENGTH) - - psk = decrypt_rx_psk(ct_tag, salt) + psk = decrypt_rx_psk(ct_tag, salt) rx_mk, rx_hk = separate_header(psk, SYMMETRIC_KEY_LENGTH) if any(k == bytes(SYMMETRIC_KEY_LENGTH) for k in [rx_mk, rx_hk]): raise SoftError("Error: Received invalid keys from contact.", head_clear=True) - keyset = key_list.get_keyset(onion_pub_key) + keyset = key_list.get_keyset(onion_pub_key) keyset.rx_mk = rx_mk keyset.rx_hk = rx_hk key_list.store_keys() @@ -415,27 +355,14 @@ def key_ex_psk_rx( # the program itself, and therefore trusted, but it's still good practice. subprocess.Popen(f"shred -n 3 -z -u {pipes.quote(psk_file)}", shell=True).wait() if os.path.isfile(psk_file): - m_print( - f"Warning! Overwriting of PSK ({psk_file}) failed. Press to continue.", - manual_proceed=True, - box=True, - ) + m_print(f"Warning! Overwriting of PSK ({psk_file}) failed. Press to continue.", + manual_proceed=True, box=True) message = f"Added Rx-side PSK for {contact.nick} ({short_addr})." cmd_win = window_list.get_command_window() cmd_win.add_new(ts, message) - m_print( - [ - message, - "", - "Warning!", - "Physically destroy the keyfile transmission media ", - "to ensure it does not steal data from this computer!", - "", - f"Confirmation code (to Transmitter): {c_code.hex()}", - ], - box=True, - head=1, - tail=1, - ) + m_print([message, '', "Warning!", + "Physically destroy the keyfile transmission media ", + "to ensure it does not steal data from this computer!", '', + f"Confirmation code (to Transmitter): {c_code.hex()}"], box=True, head=1, tail=1) diff --git a/src/receiver/messages.py b/src/receiver/messages.py index d7a8290..2adb4df 100644 --- a/src/receiver/messages.py +++ b/src/receiver/messages.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -24,53 +24,37 @@ import typing from typing import Dict -from src.common.db_logs import write_log_entry -from src.common.encoding import bytes_to_bool +from src.common.db_logs import write_log_entry +from src.common.encoding import bytes_to_bool from src.common.exceptions import SoftError -from src.common.misc import separate_header, separate_headers -from src.common.statics import ( - ASSEMBLY_PACKET_HEADER_LENGTH, - BLAKE2_DIGEST_LENGTH, - FILE, - FILE_KEY_HEADER, - GROUP_ID_LENGTH, - GROUP_MESSAGE_HEADER, - GROUP_MSG_ID_LENGTH, - LOCAL_PUBKEY, - MESSAGE, - MESSAGE_HEADER_LENGTH, - ONION_SERVICE_PUBLIC_KEY_LENGTH, - ORIGIN_CONTACT_HEADER, - ORIGIN_HEADER_LENGTH, - ORIGIN_USER_HEADER, - PLACEHOLDER_DATA, - PRIVATE_MESSAGE_HEADER, - SYMMETRIC_KEY_LENGTH, - WHISPER_FIELD_LENGTH, -) +from src.common.misc import separate_header, separate_headers +from src.common.statics import (ASSEMBLY_PACKET_HEADER_LENGTH, BLAKE2_DIGEST_LENGTH, FILE, FILE_KEY_HEADER, + GROUP_ID_LENGTH, GROUP_MESSAGE_HEADER, GROUP_MSG_ID_LENGTH, LOCAL_PUBKEY, MESSAGE, + MESSAGE_HEADER_LENGTH, ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_CONTACT_HEADER, + ORIGIN_HEADER_LENGTH, ORIGIN_USER_HEADER, PLACEHOLDER_DATA, PRIVATE_MESSAGE_HEADER, + SYMMETRIC_KEY_LENGTH, WHISPER_FIELD_LENGTH) from src.receiver.packet import decrypt_assembly_packet if typing.TYPE_CHECKING: - from datetime import datetime - from src.common.database import MessageLog + from datetime import datetime + from src.common.database import MessageLog from src.common.db_contacts import ContactList - from src.common.db_groups import GroupList - from src.common.db_keys import KeyList + from src.common.db_groups import GroupList + from src.common.db_keys import KeyList from src.common.db_settings import Settings - from src.receiver.packet import Packet, PacketList - from src.receiver.windows import WindowList + from src.receiver.packet import Packet, PacketList + from src.receiver.windows import WindowList -def log_masking_packets( - onion_pub_key: bytes, # Onion address of associated contact - origin: bytes, # Origin of packet (user / contact) - logging: bool, # When True, message will be logged - settings: "Settings", # Settings object - packet: "Packet", # Packet object - message_log: "MessageLog", # MessageLog object - completed: bool = False, # When True, logs placeholder data for completed message -) -> None: +def log_masking_packets(onion_pub_key: bytes, # Onion address of associated contact + origin: bytes, # Origin of packet (user / contact) + logging: bool, # When True, message will be logged + settings: 'Settings', # Settings object + packet: 'Packet', # Packet object + message_log: 'MessageLog', # MessageLog object + completed: bool = False, # When True, logs placeholder data for completed message + ) -> None: """Add masking packets to log file. If logging and log file masking are enabled, this function will @@ -79,93 +63,67 @@ def log_masking_packets( communication that log file observation would otherwise reveal. """ if logging and settings.log_file_masking and (packet.log_masking_ctr or completed): - no_masking_packets = ( - len(packet.assembly_pt_list) if completed else packet.log_masking_ctr - ) + no_masking_packets = len(packet.assembly_pt_list) if completed else packet.log_masking_ctr for _ in range(no_masking_packets): write_log_entry(PLACEHOLDER_DATA, onion_pub_key, message_log, origin) packet.log_masking_ctr = 0 -def process_message_packet( - ts: "datetime", # Timestamp of received message packet - assembly_packet_ct: bytes, # Encrypted assembly packet - window_list: "WindowList", # WindowList object - packet_list: "PacketList", # PacketList object - contact_list: "ContactList", # ContactList object - key_list: "KeyList", # KeyList object - group_list: "GroupList", # GroupList object - settings: "Settings", # Settings object - file_keys: Dict[bytes, bytes], # Dictionary of file decryption keys - message_log: "MessageLog", # MessageLog object -) -> None: +def process_message_packet(ts: 'datetime', # Timestamp of received message packet + assembly_packet_ct: bytes, # Encrypted assembly packet + window_list: 'WindowList', # WindowList object + packet_list: 'PacketList', # PacketList object + contact_list: 'ContactList', # ContactList object + key_list: 'KeyList', # KeyList object + group_list: 'GroupList', # GroupList object + settings: 'Settings', # Settings object + file_keys: Dict[bytes, bytes], # Dictionary of file decryption keys + message_log: 'MessageLog', # MessageLog object + ) -> None: """Process received message packet.""" command_window = window_list.get_command_window() onion_pub_key, origin, assembly_packet_ct = separate_headers( - assembly_packet_ct, [ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_HEADER_LENGTH] - ) + assembly_packet_ct, [ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_HEADER_LENGTH]) if onion_pub_key == LOCAL_PUBKEY: - raise SoftError( - "Warning! Received packet masqueraded as a command.", window=command_window - ) + raise SoftError("Warning! Received packet masqueraded as a command.", window=command_window) + if origin not in [ORIGIN_USER_HEADER, ORIGIN_CONTACT_HEADER]: - raise SoftError( - "Error: Received packet had an invalid origin-header.", - window=command_window, - ) + raise SoftError("Error: Received packet had an invalid origin-header.", window=command_window) - assembly_packet = decrypt_assembly_packet( - assembly_packet_ct, onion_pub_key, origin, window_list, contact_list, key_list - ) + assembly_packet = decrypt_assembly_packet(assembly_packet_ct, onion_pub_key, origin, + window_list, contact_list, key_list) - p_type = ( - FILE if assembly_packet[:ASSEMBLY_PACKET_HEADER_LENGTH].isupper() else MESSAGE - ) - packet = packet_list.get_packet(onion_pub_key, origin, p_type) + p_type = (FILE if assembly_packet[:ASSEMBLY_PACKET_HEADER_LENGTH].isupper() else MESSAGE) + packet = packet_list.get_packet(onion_pub_key, origin, p_type) logging = contact_list.get_contact_by_pub_key(onion_pub_key).log_messages try: packet.add_packet(assembly_packet) except SoftError: - log_masking_packets( - onion_pub_key, origin, logging, settings, packet, message_log - ) + log_masking_packets(onion_pub_key, origin, logging, settings, packet, message_log) raise log_masking_packets(onion_pub_key, origin, logging, settings, packet, message_log) if packet.is_complete: - process_complete_message_packet( - ts, - onion_pub_key, - p_type, - origin, - logging, - packet, - window_list, - contact_list, - group_list, - settings, - message_log, - file_keys, - ) + process_complete_message_packet(ts, onion_pub_key, p_type, origin, logging, packet, window_list, + contact_list, group_list, settings, message_log, file_keys) -def process_complete_message_packet( - ts: "datetime", # Timestamp of received message packet - onion_pub_key: bytes, # Onion address of associated contact - p_type: str, # Packet type (file, message) - origin: bytes, # Origin of packet (user / contact) - logging: bool, # When True, message will be logged - packet: "Packet", # Packet object - window_list: "WindowList", # WindowList object - contact_list: "ContactList", # ContactList object - group_list: "GroupList", # GroupList object - settings: "Settings", # Settings object - message_log: "MessageLog", # MessageLog object - file_keys: Dict[bytes, bytes], # Dictionary of file decryption keys -) -> None: +def process_complete_message_packet(ts: 'datetime', # Timestamp of received message packet + onion_pub_key: bytes, # Onion address of associated contact + p_type: str, # Packet type (file, message) + origin: bytes, # Origin of packet (user / contact) + logging: bool, # When True, message will be logged + packet: 'Packet', # Packet object + window_list: 'WindowList', # WindowList object + contact_list: 'ContactList', # ContactList object + group_list: 'GroupList', # GroupList object + settings: 'Settings', # Settings object + message_log: 'MessageLog', # MessageLog object + file_keys: Dict[bytes, bytes] # Dictionary of file decryption keys + ) -> None: """Process complete message packet. The assembled message packet might contain a file if the sender @@ -174,52 +132,31 @@ def process_complete_message_packet( try: if p_type == FILE: packet.assemble_and_store_file(ts, onion_pub_key, window_list) - raise SoftError( - "File storage complete.", output=False - ) # Raising allows calling log_masking_packets + raise SoftError("File storage complete.", output=False) # Raising allows calling log_masking_packets if p_type == MESSAGE: - process_message( - ts, - onion_pub_key, - origin, - logging, - packet, - window_list, - contact_list, - group_list, - message_log, - file_keys, - ) + process_message(ts, onion_pub_key, origin, logging, packet, window_list, + contact_list, group_list, message_log, file_keys) except (SoftError, UnicodeError): - log_masking_packets( - onion_pub_key, - origin, - logging, - settings, - packet, - message_log, - completed=True, - ) + log_masking_packets(onion_pub_key, origin, logging, settings, packet, message_log, completed=True) raise finally: packet.clear_assembly_packets() -def process_message( - ts: "datetime", # Timestamp of received message packet - onion_pub_key: bytes, # Onion address of associated contact - origin: bytes, # Origin of message (user / contact) - logging: bool, # When True, message will be logged - packet: "Packet", # Packet object - window_list: "WindowList", # WindowList object - contact_list: "ContactList", # ContactList object - group_list: "GroupList", # GroupList object - message_log: "MessageLog", # MessageLog object - file_keys: Dict[bytes, bytes], # Dictionary of file decryption keys -) -> None: +def process_message(ts: 'datetime', # Timestamp of received message packet + onion_pub_key: bytes, # Onion address of associated contact + origin: bytes, # Origin of message (user / contact) + logging: bool, # When True, message will be logged + packet: 'Packet', # Packet object + window_list: 'WindowList', # WindowList object + contact_list: 'ContactList', # ContactList object + group_list: 'GroupList', # GroupList object + message_log: 'MessageLog', # MessageLog object + file_keys: Dict[bytes, bytes] # Dictionary of file decryption keys + ) -> None: """Process message packet. The received message might be a private or group message, or it @@ -231,33 +168,23 @@ def process_message( below. Thus, the sender should not trust a whisper message is never logged. """ - whisper_byte, header, assembled = separate_headers( - packet.assemble_message_packet(), [WHISPER_FIELD_LENGTH, MESSAGE_HEADER_LENGTH] - ) + whisper_byte, header, assembled = separate_headers(packet.assemble_message_packet(), + [WHISPER_FIELD_LENGTH, MESSAGE_HEADER_LENGTH]) if len(whisper_byte) != WHISPER_FIELD_LENGTH: raise SoftError("Error: Message from contact had an invalid whisper header.") whisper = bytes_to_bool(whisper_byte) if header == GROUP_MESSAGE_HEADER: - logging = process_group_message( - ts, assembled, onion_pub_key, origin, whisper, group_list, window_list - ) + logging = process_group_message(ts, assembled, onion_pub_key, origin, whisper, group_list, window_list) elif header == PRIVATE_MESSAGE_HEADER: window = window_list.get_window(onion_pub_key) - window.add_new( - ts, assembled.decode(), onion_pub_key, origin, output=True, whisper=whisper - ) + window.add_new(ts, assembled.decode(), onion_pub_key, origin, output=True, whisper=whisper) elif header == FILE_KEY_HEADER: - nick = process_file_key_message( - assembled, onion_pub_key, origin, contact_list, file_keys - ) - raise SoftError( - f"Received file decryption key from {nick}", - window=window_list.get_command_window(), - ) + nick = process_file_key_message(assembled, onion_pub_key, origin, contact_list, file_keys) + raise SoftError(f"Received file decryption key from {nick}", window=window_list.get_command_window()) else: raise SoftError("Error: Message from contact had an invalid header.") @@ -265,20 +192,20 @@ def process_message( # Logging if whisper: raise SoftError("Whisper message complete.", output=False) + if logging: for p in packet.assembly_pt_list: write_log_entry(p, onion_pub_key, message_log, origin) -def process_group_message( - ts: "datetime", # Timestamp of group message - assembled: bytes, # Group message and its headers - onion_pub_key: bytes, # Onion address of associated contact - origin: bytes, # Origin of group message (user / contact) - whisper: bool, # When True, message is not logged. - group_list: "GroupList", # GroupList object - window_list: "WindowList", # WindowList object -) -> bool: +def process_group_message(ts: 'datetime', # Timestamp of group message + assembled: bytes, # Group message and its headers + onion_pub_key: bytes, # Onion address of associated contact + origin: bytes, # Origin of group message (user / contact) + whisper: bool, # When True, message is not logged. + group_list: 'GroupList', # GroupList object + window_list: 'WindowList' # WindowList object + ) -> bool: """Process a group message.""" group_id, assembled = separate_header(assembled, GROUP_ID_LENGTH) if not group_list.has_group_id(group_id): @@ -303,32 +230,22 @@ def process_group_message( if origin == ORIGIN_USER_HEADER: if window.group_msg_id != group_msg_id: window.group_msg_id = group_msg_id - window.add_new( - ts, - group_message_str, - onion_pub_key, - origin, - output=True, - whisper=whisper, - ) + window.add_new(ts, group_message_str, onion_pub_key, origin, output=True, whisper=whisper) elif origin == ORIGIN_CONTACT_HEADER: - window.add_new( - ts, group_message_str, onion_pub_key, origin, output=True, whisper=whisper - ) + window.add_new(ts, group_message_str, onion_pub_key, origin, output=True, whisper=whisper) # Return the group's logging setting because it might be different # from the logging setting of the contact who sent group message. return group.log_messages -def process_file_key_message( - assembled: bytes, # File decryption key - onion_pub_key: bytes, # Onion address of associated contact - origin: bytes, # Origin of file key packet (user / contact) - contact_list: "ContactList", # ContactList object - file_keys: Dict[bytes, bytes], # Dictionary of file identifiers and decryption keys -) -> str: +def process_file_key_message(assembled: bytes, # File decryption key + onion_pub_key: bytes, # Onion address of associated contact + origin: bytes, # Origin of file key packet (user / contact) + contact_list: 'ContactList', # ContactList object + file_keys: Dict[bytes, bytes] # Dictionary of file identifiers and decryption keys + ) -> str: """Process received file key delivery message.""" if origin == ORIGIN_USER_HEADER: raise SoftError("File key message from the user.", output=False) diff --git a/src/receiver/output_loop.py b/src/receiver/output_loop.py index f2f9db1..ef824d9 100755 --- a/src/receiver/output_loop.py +++ b/src/receiver/output_loop.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -26,60 +26,53 @@ import typing from typing import Any, Dict, List, Tuple -from src.common.database import MessageLog +from src.common.database import MessageLog from src.common.exceptions import SoftError -from src.common.output import clear_screen -from src.common.statics import ( - COMMAND_DATAGRAM_HEADER, - EXIT_QUEUE, - FILE_DATAGRAM_HEADER, - LOCAL_KEY_DATAGRAM_HEADER, - MESSAGE_DATAGRAM_HEADER, - ONION_SERVICE_PUBLIC_KEY_LENGTH, - UNIT_TEST_QUEUE, -) +from src.common.output import clear_screen +from src.common.statics import (COMMAND_DATAGRAM_HEADER, EXIT_QUEUE, FILE_DATAGRAM_HEADER, LOCAL_KEY_DATAGRAM_HEADER, + MESSAGE_DATAGRAM_HEADER, ONION_SERVICE_PUBLIC_KEY_LENGTH, UNIT_TEST_QUEUE) -from src.receiver.commands import process_command -from src.receiver.files import new_file, process_file +from src.receiver.commands import process_command +from src.receiver.files import new_file, process_file from src.receiver.key_exchanges import process_local_key -from src.receiver.messages import process_message_packet -from src.receiver.packet import PacketList -from src.receiver.windows import WindowList +from src.receiver.messages import process_message_packet +from src.receiver.packet import PacketList +from src.receiver.windows import WindowList if typing.TYPE_CHECKING: - from datetime import datetime - from multiprocessing import Queue - from src.common.db_contacts import ContactList - from src.common.db_groups import GroupList - from src.common.db_keys import KeyList + from datetime import datetime + from multiprocessing import Queue + from src.common.db_contacts import ContactList + from src.common.db_groups import GroupList + from src.common.db_keys import KeyList from src.common.db_masterkey import MasterKey - from src.common.db_settings import Settings - from src.common.gateway import Gateway + from src.common.db_settings import Settings + from src.common.gateway import Gateway packet_buffer_type = Dict[bytes, List[Tuple[datetime, bytes]]] - file_buffer_type = Dict[bytes, Tuple[datetime, bytes]] - file_keys_type = Dict[bytes, bytes] - queue_dict = Dict[bytes, Queue[Any]] + file_buffer_type = Dict[bytes, Tuple[datetime, bytes]] + file_keys_type = Dict[bytes, bytes] + queue_dict = Dict[bytes, Queue[Any]] -def output_loop( - queues: Dict[bytes, "Queue[Any]"], - gateway: "Gateway", - settings: "Settings", - contact_list: "ContactList", - key_list: "KeyList", - group_list: "GroupList", - master_key: "MasterKey", - message_log: "MessageLog", - stdin_fd: int, - unit_test: bool = False, -) -> None: +def output_loop(queues: Dict[bytes, 'Queue[Any]'], + gateway: 'Gateway', + settings: 'Settings', + contact_list: 'ContactList', + key_list: 'KeyList', + group_list: 'GroupList', + master_key: 'MasterKey', + message_log: 'MessageLog', + stdin_fd: int, + unit_test: bool = False + ) -> None: """Process packets in message queues according to their priority.""" - sys.stdin = os.fdopen(stdin_fd) + sys.stdin = os.fdopen(stdin_fd) packet_buffer = dict() # type: packet_buffer_type - file_buffer = dict() # type: file_buffer_type - file_keys = dict() # type: file_keys_type - kdk_hashes = [] # type: List[bytes] + file_buffer = dict() # type: file_buffer_type + file_keys = dict() # type: file_keys_type + + kdk_hashes = [] # type: List[bytes] packet_hashes = [] # type: List[bytes] packet_list = PacketList(settings, contact_list) @@ -89,68 +82,28 @@ def output_loop( while True: try: # Local key packets - process_local_key_queue( - queues, - window_list, - contact_list, - key_list, - settings, - kdk_hashes, - packet_hashes, - ) + process_local_key_queue(queues, window_list, contact_list, key_list, settings, kdk_hashes, packet_hashes) # Commands - process_command_queue( - queues, - window_list, - contact_list, - group_list, - key_list, - settings, - packet_list, - master_key, - gateway, - ) + process_command_queue(queues, window_list, contact_list, group_list, + settings, key_list, packet_list, master_key, gateway) # File window refresh window_list.refresh_file_window_check() # Cached messages - process_cached_messages( - window_list, - contact_list, - group_list, - key_list, - settings, - packet_list, - message_log, - file_keys, - packet_buffer, - ) + process_cached_messages(window_list, contact_list, group_list, key_list, settings, + packet_list, message_log, file_keys, packet_buffer) # New messages - process_message_queue( - queues, - window_list, - contact_list, - group_list, - key_list, - settings, - packet_list, - message_log, - file_keys, - packet_buffer, - ) + process_message_queue(queues, window_list, contact_list, group_list, key_list, settings, + packet_list, message_log, file_keys, packet_buffer) # Cached files - process_cached_files( - window_list, contact_list, settings, file_keys, file_buffer - ) + process_cached_files(window_list, contact_list, settings, file_keys, file_buffer) # New files - process_file_queue( - queues, window_list, contact_list, settings, file_keys, file_buffer - ) + process_file_queue(queues, window_list, contact_list, settings, file_keys, file_buffer) time.sleep(0.01) @@ -161,15 +114,14 @@ def output_loop( pass -def process_local_key_queue( - queues: "queue_dict", - window_list: "WindowList", - contact_list: "ContactList", - key_list: "KeyList", - settings: "Settings", - kdk_hashes: List[bytes], - packet_hashes: List[bytes], -) -> None: +def process_local_key_queue(queues: 'queue_dict', + window_list: 'WindowList', + contact_list: 'ContactList', + key_list: 'KeyList', + settings: 'Settings', + kdk_hashes: List[bytes], + packet_hashes: List[bytes] + ) -> None: """Check local key queue for packets. This function also checks that local key is installed. @@ -178,155 +130,99 @@ def process_local_key_queue( if local_key_queue.qsize(): ts, packet = local_key_queue.get() - process_local_key( - ts, - packet, - window_list, - contact_list, - key_list, - settings, - kdk_hashes, - packet_hashes, - local_key_queue, - ) + process_local_key(ts, packet, window_list, contact_list, key_list, + settings, kdk_hashes, packet_hashes, local_key_queue) if not contact_list.has_local_contact(): time.sleep(0.1) raise SoftError("No local key", output=False) -def process_command_queue( - queues: "queue_dict", - window_list: "WindowList", - contact_list: "ContactList", - group_list: "GroupList", - key_list: "KeyList", - settings: "Settings", - packet_list: "PacketList", - master_key: "MasterKey", - gateway: "Gateway", -) -> None: +def process_command_queue(queues: 'queue_dict', + window_list: 'WindowList', + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + key_list: 'KeyList', + packet_list: 'PacketList', + master_key: 'MasterKey', + gateway: 'Gateway' + ) -> None: """Check command queue for packets.""" command_queue = queues[COMMAND_DATAGRAM_HEADER] - exit_queue = queues[EXIT_QUEUE] + exit_queue = queues[EXIT_QUEUE] if command_queue.qsize(): ts, packet = command_queue.get() - process_command( - ts, - packet, - window_list, - packet_list, - contact_list, - key_list, - group_list, - settings, - master_key, - gateway, - exit_queue, - ) + process_command(ts, packet, window_list, packet_list, contact_list, key_list, + group_list, settings, master_key, gateway, exit_queue) -def process_cached_messages( - window_list: "WindowList", - contact_list: "ContactList", - group_list: "GroupList", - key_list: "KeyList", - settings: "Settings", - packet_list: "PacketList", - message_log: "MessageLog", - file_keys: "file_keys_type", - packet_buffer: "packet_buffer_type", -) -> None: +def process_cached_messages(window_list: 'WindowList', + contact_list: 'ContactList', + group_list: 'GroupList', + key_list: 'KeyList', + settings: 'Settings', + packet_list: 'PacketList', + message_log: 'MessageLog', + file_keys: 'file_keys_type', + packet_buffer: 'packet_buffer_type' + ) -> None: """Process cached message packets.""" for onion_pub_key in packet_buffer: - if ( - contact_list.has_pub_key(onion_pub_key) - and key_list.has_rx_mk(onion_pub_key) - and packet_buffer[onion_pub_key] - ): + if (contact_list.has_pub_key(onion_pub_key) + and key_list.has_rx_mk(onion_pub_key) + and packet_buffer[onion_pub_key]): ts, packet = packet_buffer[onion_pub_key].pop(0) - process_message_packet( - ts, - packet, - window_list, - packet_list, - contact_list, - key_list, - group_list, - settings, - file_keys, - message_log, - ) + process_message_packet(ts, packet, window_list, packet_list, contact_list, + key_list, group_list, settings, file_keys, message_log) raise SoftError("Cached message processing complete.", output=False) -def process_message_queue( - queues: "queue_dict", - window_list: "WindowList", - contact_list: "ContactList", - group_list: "GroupList", - key_list: "KeyList", - settings: "Settings", - packet_list: "PacketList", - message_log: "MessageLog", - file_keys: "file_keys_type", - packet_buffer: "packet_buffer_type", -) -> None: +def process_message_queue(queues: 'queue_dict', + window_list: 'WindowList', + contact_list: 'ContactList', + group_list: 'GroupList', + key_list: 'KeyList', + settings: 'Settings', + packet_list: 'PacketList', + message_log: 'MessageLog', + file_keys: 'file_keys_type', + packet_buffer: 'packet_buffer_type' + ) -> None: """Check message queue for packets.""" message_queue = queues[MESSAGE_DATAGRAM_HEADER] if message_queue.qsize(): - ts, packet = message_queue.get() + ts, packet = message_queue.get() onion_pub_key = packet[:ONION_SERVICE_PUBLIC_KEY_LENGTH] - if contact_list.has_pub_key(onion_pub_key) and key_list.has_rx_mk( - onion_pub_key - ): - process_message_packet( - ts, - packet, - window_list, - packet_list, - contact_list, - key_list, - group_list, - settings, - file_keys, - message_log, - ) + if contact_list.has_pub_key(onion_pub_key) and key_list.has_rx_mk(onion_pub_key): + process_message_packet(ts, packet, window_list, packet_list, contact_list, key_list, + group_list, settings, file_keys, message_log) else: packet_buffer.setdefault(onion_pub_key, []).append((ts, packet)) raise SoftError("Message processing complete.", output=False) -def process_cached_files( - window_list: "WindowList", - contact_list: "ContactList", - settings: "Settings", - file_keys: "file_keys_type", - file_buffer: "file_buffer_type", -) -> None: +def process_cached_files(window_list: 'WindowList', + contact_list: 'ContactList', + settings: 'Settings', + file_keys: 'file_keys_type', + file_buffer: 'file_buffer_type' + ) -> None: """Check if file key has been received for cached file packet.""" if file_buffer: for k in file_buffer: - key_to_remove = b"" + key_to_remove = b'' try: if k in file_keys: key_to_remove = k - ts_, file_ct = file_buffer[k] - dec_key = file_keys[k] + ts_, file_ct = file_buffer[k] + dec_key = file_keys[k] onion_pub_key = k[:ONION_SERVICE_PUBLIC_KEY_LENGTH] - process_file( - ts_, - onion_pub_key, - file_ct, - dec_key, - contact_list, - window_list, - settings, - ) + process_file(ts_, onion_pub_key, file_ct, dec_key, contact_list, window_list, settings) finally: if key_to_remove: file_buffer.pop(k) @@ -334,21 +230,17 @@ def process_cached_files( raise SoftError("Cached file processing complete.", output=False) -def process_file_queue( - queues: "queue_dict", - window_list: "WindowList", - contact_list: "ContactList", - settings: "Settings", - file_keys: "file_keys_type", - file_buffer: "file_buffer_type", -) -> None: +def process_file_queue(queues: 'queue_dict', + window_list: 'WindowList', + contact_list: 'ContactList', + settings: 'Settings', + file_keys: 'file_keys_type', + file_buffer: 'file_buffer_type' + ) -> None: """Check file queue for packets.""" file_queue = queues[FILE_DATAGRAM_HEADER] if file_queue.qsize(): ts, packet = file_queue.get() - new_file( - ts, packet, file_keys, file_buffer, contact_list, window_list, settings - ) - + new_file(ts, packet, file_keys, file_buffer, contact_list, window_list, settings) raise SoftError("File processing complete.", output=False) diff --git a/src/receiver/packet.py b/src/receiver/packet.py index aee51b1..51cd777 100644 --- a/src/receiver/packet.py +++ b/src/receiver/packet.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -24,107 +24,64 @@ import typing import zlib from datetime import datetime, timedelta -from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Sized +from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Sized import nacl.exceptions -from src.common.crypto import auth_and_decrypt, blake2b, rm_padding_bytes -from src.common.encoding import bytes_to_int, int_to_bytes +from src.common.crypto import auth_and_decrypt, blake2b, rm_padding_bytes +from src.common.encoding import bytes_to_int, int_to_bytes from src.common.exceptions import SoftError -from src.common.input import yes -from src.common.misc import ( - decompress, - readable_size, - separate_header, - separate_headers, - separate_trailer, -) -from src.common.output import m_print -from src.common.statics import ( - ASSEMBLY_PACKET_HEADER_LENGTH, - BLAKE2_DIGEST_LENGTH, - COMMAND, - C_A_HEADER, - C_C_HEADER, - C_E_HEADER, - C_L_HEADER, - C_N_HEADER, - C_S_HEADER, - ENCODED_INTEGER_LENGTH, - FILE, - F_A_HEADER, - F_C_HEADER, - F_E_HEADER, - F_L_HEADER, - F_S_HEADER, - HARAC_CT_LENGTH, - HARAC_WARN_THRESHOLD, - LOCAL_PUBKEY, - MAX_MESSAGE_SIZE, - MESSAGE, - M_A_HEADER, - M_C_HEADER, - M_E_HEADER, - M_L_HEADER, - M_S_HEADER, - ORIGIN_CONTACT_HEADER, - ORIGIN_USER_HEADER, - P_N_HEADER, - RX, - SYMMETRIC_KEY_LENGTH, - TX, - US_BYTE, -) +from src.common.input import yes +from src.common.misc import decompress, readable_size, separate_header, separate_headers, separate_trailer +from src.common.output import m_print +from src.common.statics import (ASSEMBLY_PACKET_HEADER_LENGTH, BLAKE2_DIGEST_LENGTH, COMMAND, C_A_HEADER, C_C_HEADER, + C_E_HEADER, C_L_HEADER, C_N_HEADER, C_S_HEADER, ENCODED_INTEGER_LENGTH, FILE, + F_A_HEADER, F_C_HEADER, F_E_HEADER, F_L_HEADER, F_S_HEADER, HARAC_CT_LENGTH, + HARAC_WARN_THRESHOLD, LOCAL_PUBKEY, MAX_MESSAGE_SIZE, MESSAGE, M_A_HEADER, + M_C_HEADER, M_E_HEADER, M_L_HEADER, M_S_HEADER, ORIGIN_CONTACT_HEADER, + ORIGIN_USER_HEADER, P_N_HEADER, RX, SYMMETRIC_KEY_LENGTH, TX, US_BYTE) from src.receiver.files import process_assembled_file if typing.TYPE_CHECKING: from src.common.db_contacts import Contact, ContactList - from src.common.db_keys import KeyList + from src.common.db_keys import KeyList from src.common.db_settings import Settings - from src.receiver.windows import RxWindow, WindowList + from src.receiver.windows import RxWindow, WindowList -def process_offset( - offset: int, # Number of dropped packets - origin: bytes, # "to/from" preposition - direction: str, # Direction of packet - nick: str, # Nickname of associated contact - window: "RxWindow", # RxWindow object -) -> None: +def process_offset(offset: int, # Number of dropped packets + origin: bytes, # "to/from" preposition + direction: str, # Direction of packet + nick: str, # Nickname of associated contact + window: 'RxWindow' # RxWindow object + ) -> None: """Display warnings about increased offsets. If the offset has increased over the threshold, ask the user to confirm hash ratchet catch up. """ if offset > HARAC_WARN_THRESHOLD and origin == ORIGIN_CONTACT_HEADER: - m_print( - [ - f"Warning! {offset} packets from {nick} were not received.", - f"This might indicate that {offset} most recent packets were ", - f"lost during transmission, or that the contact is attempting ", - f"a DoS attack. You can wait for TFC to attempt to decrypt the ", - "packet, but it might take a very long time or even forever.", - ] - ) + m_print([f"Warning! {offset} packets from {nick} were not received.", + f"This might indicate that {offset} most recent packets were ", + f"lost during transmission, or that the contact is attempting ", + f"a DoS attack. You can wait for TFC to attempt to decrypt the ", + "packet, but it might take a very long time or even forever."]) if not yes("Proceed with the decryption?", abort=False, tail=1): raise SoftError(f"Dropped packet from {nick}.", window=window) elif offset: - m_print( - f"Warning! {offset} packet{'s' if offset > 1 else ''} {direction} {nick} were not received." - ) + m_print(f"Warning! {offset} packet{'s' if offset > 1 else ''} {direction} {nick} were not received.") -def decrypt_assembly_packet( - packet: bytes, # Assembly packet ciphertext - onion_pub_key: bytes, # Onion Service pubkey of associated contact - origin: bytes, # Direction of packet - window_list: "WindowList", # WindowList object - contact_list: "ContactList", # ContactList object - key_list: "KeyList", # Keylist object -) -> bytes: # Decrypted assembly packet +def decrypt_assembly_packet(packet: bytes, # Assembly packet ciphertext + onion_pub_key: bytes, # Onion Service pubkey of associated contact + origin: bytes, # Direction of packet + window_list: 'WindowList', # WindowList object + contact_list: 'ContactList', # ContactList object + key_list: 'KeyList' # Keylist object + ) -> bytes: # Decrypted assembly packet """Decrypt assembly packet from contact/local Transmitter. This function authenticates and decrypts incoming message and @@ -147,19 +104,19 @@ def decrypt_assembly_packet( let alone processed. """ ct_harac, ct_assemby_packet = separate_header(packet, header_length=HARAC_CT_LENGTH) - cmd_win = window_list.get_command_window() - command = onion_pub_key == LOCAL_PUBKEY + cmd_win = window_list.get_command_window() + command = onion_pub_key == LOCAL_PUBKEY - p_type = "command" if command else "packet" - direction = "from" if command or (origin == ORIGIN_CONTACT_HEADER) else "sent to" - nick = contact_list.get_nick_by_pub_key(onion_pub_key) + p_type = "command" if command else "packet" + direction = "from" if command or (origin == ORIGIN_CONTACT_HEADER) else "sent to" + nick = contact_list.get_nick_by_pub_key(onion_pub_key) # Load keys - keyset = key_list.get_keyset(onion_pub_key) + keyset = key_list.get_keyset(onion_pub_key) key_dir = TX if origin == ORIGIN_USER_HEADER else RX - header_key = getattr(keyset, f"{key_dir}_hk") # type: bytes - message_key = getattr(keyset, f"{key_dir}_mk") # type: bytes + header_key = getattr(keyset, f'{key_dir}_hk') # type: bytes + message_key = getattr(keyset, f'{key_dir}_mk') # type: bytes if any(k == bytes(SYMMETRIC_KEY_LENGTH) for k in [header_key, message_key]): raise SoftError("Warning! Loaded zero-key for packet decryption.") @@ -168,41 +125,30 @@ def decrypt_assembly_packet( try: harac_bytes = auth_and_decrypt(ct_harac, header_key) except nacl.exceptions.CryptoError: - raise SoftError( - f"Warning! Received {p_type} {direction} {nick} had an invalid hash ratchet MAC.", - window=cmd_win, - ) + raise SoftError(f"Warning! Received {p_type} {direction} {nick} had an invalid hash ratchet MAC.", + window=cmd_win) # Catch up with hash ratchet offset - purp_harac = bytes_to_int(harac_bytes) - stored_harac = getattr(keyset, f"{key_dir}_harac") - offset = purp_harac - stored_harac + purp_harac = bytes_to_int(harac_bytes) + stored_harac = getattr(keyset, f'{key_dir}_harac') + offset = purp_harac - stored_harac if offset < 0: - raise SoftError( - f"Warning! Received {p_type} {direction} {nick} had an expired hash ratchet counter.", - window=cmd_win, - ) + raise SoftError(f"Warning! Received {p_type} {direction} {nick} had an expired hash ratchet counter.", + window=cmd_win) process_offset(offset, origin, direction, nick, cmd_win) for harac in range(stored_harac, stored_harac + offset): - message_key = blake2b( - message_key + int_to_bytes(harac), digest_size=SYMMETRIC_KEY_LENGTH - ) + message_key = blake2b(message_key + int_to_bytes(harac), digest_size=SYMMETRIC_KEY_LENGTH) # Decrypt packet try: assembly_packet = auth_and_decrypt(ct_assemby_packet, message_key) except nacl.exceptions.CryptoError: - raise SoftError( - f"Warning! Received {p_type} {direction} {nick} had an invalid MAC.", - window=cmd_win, - ) + raise SoftError(f"Warning! Received {p_type} {direction} {nick} had an invalid MAC.", + window=cmd_win) # Update message key and harac - new_key = blake2b( - message_key + int_to_bytes(stored_harac + offset), - digest_size=SYMMETRIC_KEY_LENGTH, - ) + new_key = blake2b(message_key + int_to_bytes(stored_harac + offset), digest_size=SYMMETRIC_KEY_LENGTH) keyset.update_mk(key_dir, new_key, offset + 1) return assembly_packet @@ -211,51 +157,38 @@ def decrypt_assembly_packet( class Packet(object): """Packet objects collect and keep track of received assembly packets.""" - def __init__( - self, - onion_pub_key: bytes, # Public key of the contact associated with the packet <─┐ - origin: bytes, # Origin of packet (user, contact) <─┼─ Form packet UID - p_type: str, # Packet type (message, file, command) <─┘ - contact: "Contact", # Contact object of contact associated with the packet - settings: "Settings", # Settings object - ) -> None: + def __init__(self, + onion_pub_key: bytes, # Public key of the contact associated with the packet <─┐ + origin: bytes, # Origin of packet (user, contact) <─┼─ Form packet UID + p_type: str, # Packet type (message, file, command) <─┘ + contact: 'Contact', # Contact object of contact associated with the packet + settings: 'Settings' # Settings object + ) -> None: """Create a new Packet object.""" self.onion_pub_key = onion_pub_key - self.contact = contact - self.origin = origin - self.type = p_type - self.settings = settings + self.contact = contact + self.origin = origin + self.type = p_type + self.settings = settings # File transmission metadata self.packets = None # type: Optional[int] - self.time = None # type: Optional[str] - self.size = None # type: Optional[str] - self.name = None # type: Optional[str] + self.time = None # type: Optional[str] + self.size = None # type: Optional[str] + self.name = None # type: Optional[str] - self.sh = {MESSAGE: M_S_HEADER, FILE: F_S_HEADER, COMMAND: C_S_HEADER}[ - self.type - ] - self.lh = {MESSAGE: M_L_HEADER, FILE: F_L_HEADER, COMMAND: C_L_HEADER}[ - self.type - ] - self.ah = {MESSAGE: M_A_HEADER, FILE: F_A_HEADER, COMMAND: C_A_HEADER}[ - self.type - ] - self.eh = {MESSAGE: M_E_HEADER, FILE: F_E_HEADER, COMMAND: C_E_HEADER}[ - self.type - ] - self.ch = {MESSAGE: M_C_HEADER, FILE: F_C_HEADER, COMMAND: C_C_HEADER}[ - self.type - ] - self.nh = {MESSAGE: P_N_HEADER, FILE: P_N_HEADER, COMMAND: C_N_HEADER}[ - self.type - ] + self.sh = {MESSAGE: M_S_HEADER, FILE: F_S_HEADER, COMMAND: C_S_HEADER}[self.type] + self.lh = {MESSAGE: M_L_HEADER, FILE: F_L_HEADER, COMMAND: C_L_HEADER}[self.type] + self.ah = {MESSAGE: M_A_HEADER, FILE: F_A_HEADER, COMMAND: C_A_HEADER}[self.type] + self.eh = {MESSAGE: M_E_HEADER, FILE: F_E_HEADER, COMMAND: C_E_HEADER}[self.type] + self.ch = {MESSAGE: M_C_HEADER, FILE: F_C_HEADER, COMMAND: C_C_HEADER}[self.type] + self.nh = {MESSAGE: P_N_HEADER, FILE: P_N_HEADER, COMMAND: C_N_HEADER}[self.type] - self.log_masking_ctr = 0 # type: int + self.log_masking_ctr = 0 # type: int self.assembly_pt_list = [] # type: List[bytes] - self.log_ct_list = [] # type: List[bytes] - self.long_active = False - self.is_complete = False + self.log_ct_list = [] # type: List[bytes] + self.long_active = False + self.is_complete = False def add_masking_packet_to_log_file(self, increase: int = 1) -> None: """Increase `log_masking_ctr` for message and file packets.""" @@ -265,20 +198,20 @@ class Packet(object): def clear_file_metadata(self) -> None: """Clear file metadata.""" self.packets = None - self.time = None - self.size = None - self.name = None + self.time = None + self.size = None + self.name = None def clear_assembly_packets(self) -> None: """Clear packet state.""" self.assembly_pt_list = [] - self.log_ct_list = [] - self.long_active = False - self.is_complete = False + self.log_ct_list = [] + self.long_active = False + self.is_complete = False def new_file_packet(self) -> None: """New file transmission handling logic.""" - name = self.name + name = self.name was_active = self.long_active self.clear_file_metadata() self.clear_assembly_packets() @@ -289,16 +222,10 @@ class Packet(object): if not self.contact.file_reception: self.add_masking_packet_to_log_file() - raise SoftError( - f"Alert! File transmission from {self.contact.nick} but reception is disabled." - ) + raise SoftError(f"Alert! File transmission from {self.contact.nick} but reception is disabled.") if was_active: - m_print( - f"Alert! File '{name}' from {self.contact.nick} never completed.", - head=1, - tail=1, - ) + m_print(f"Alert! File '{name}' from {self.contact.nick} never completed.", head=1, tail=1) def check_long_packet(self) -> None: """Check if the long packet has permission to be extended.""" @@ -311,9 +238,10 @@ class Packet(object): self.clear_assembly_packets() raise SoftError("Alert! File reception disabled mid-transfer.") - def process_short_header( - self, packet: bytes, packet_ct: Optional[bytes] = None - ) -> None: + def process_short_header(self, + packet: bytes, + packet_ct: Optional[bytes] = None + ) -> None: """Process short packet.""" if self.long_active: self.add_masking_packet_to_log_file(increase=len(self.assembly_pt_list)) @@ -322,15 +250,16 @@ class Packet(object): self.new_file_packet() self.assembly_pt_list = [packet] - self.long_active = False - self.is_complete = True + self.long_active = False + self.is_complete = True if packet_ct is not None: self.log_ct_list = [packet_ct] - def process_long_header( - self, packet: bytes, packet_ct: Optional[bytes] = None - ) -> None: + def process_long_header(self, + packet: bytes, + packet_ct: Optional[bytes] = None + ) -> None: """Process first packet of long transmission.""" if self.long_active: self.add_masking_packet_to_log_file(increase=len(self.assembly_pt_list)) @@ -338,43 +267,33 @@ class Packet(object): if self.type == FILE: self.new_file_packet() try: - _, no_p_bytes, time_bytes, size_bytes, name_us_data = separate_headers( - packet, - [ASSEMBLY_PACKET_HEADER_LENGTH] + 3 * [ENCODED_INTEGER_LENGTH], - ) + _, no_p_bytes, time_bytes, size_bytes, name_us_data \ + = separate_headers(packet, [ASSEMBLY_PACKET_HEADER_LENGTH] + 3*[ENCODED_INTEGER_LENGTH]) - self.packets = bytes_to_int( - no_p_bytes - ) # added by transmitter.packet.split_to_assembly_packets - self.time = str(timedelta(seconds=bytes_to_int(time_bytes))) - self.size = readable_size(bytes_to_int(size_bytes)) - self.name = name_us_data.split(US_BYTE, 1)[0].decode() + self.packets = bytes_to_int(no_p_bytes) # added by transmitter.packet.split_to_assembly_packets + self.time = str(timedelta(seconds=bytes_to_int(time_bytes))) + self.size = readable_size(bytes_to_int(size_bytes)) + self.name = name_us_data.split(US_BYTE, 1)[0].decode() - m_print( - [ - f"Receiving file from {self.contact.nick}:", - f"{self.name} ({self.size})", - f"ETA {self.time} ({self.packets} packets)", - ], - bold=True, - head=1, - tail=1, - ) + m_print([f'Receiving file from {self.contact.nick}:', + f'{self.name} ({self.size})', + f'ETA {self.time} ({self.packets} packets)'], bold=True, head=1, tail=1) except (struct.error, UnicodeError, ValueError): self.add_masking_packet_to_log_file() raise SoftError("Error: Received file packet had an invalid header.") self.assembly_pt_list = [packet] - self.long_active = True - self.is_complete = False + self.long_active = True + self.is_complete = False if packet_ct is not None: self.log_ct_list = [packet_ct] - def process_append_header( - self, packet: bytes, packet_ct: Optional[bytes] = None - ) -> None: + def process_append_header(self, + packet: bytes, + packet_ct: Optional[bytes] = None + ) -> None: """Process consecutive packet(s) of long transmission.""" self.check_long_packet() self.assembly_pt_list.append(packet) @@ -382,9 +301,10 @@ class Packet(object): if packet_ct is not None: self.log_ct_list.append(packet_ct) - def process_end_header( - self, packet: bytes, packet_ct: Optional[bytes] = None - ) -> None: + def process_end_header(self, + packet: bytes, + packet_ct: Optional[bytes] = None + ) -> None: """Process last packet of long transmission.""" self.check_long_packet() self.assembly_pt_list.append(packet) @@ -396,11 +316,7 @@ class Packet(object): def abort_packet(self, cancel: bool = False) -> None: """Process cancel/noise packet.""" - if ( - self.type == FILE - and self.origin == ORIGIN_CONTACT_HEADER - and self.long_active - ): + if self.type == FILE and self.origin == ORIGIN_CONTACT_HEADER and self.long_active: if cancel: message = f"{self.contact.nick} cancelled file." else: @@ -418,31 +334,29 @@ class Packet(object): """Process traffic masking noise packet.""" self.abort_packet() - def add_packet(self, packet: bytes, packet_ct: Optional[bytes] = None) -> None: + def add_packet(self, + packet: bytes, + packet_ct: Optional[bytes] = None + ) -> None: """Add a new assembly packet to the object.""" try: - func_d = { - self.sh: self.process_short_header, - self.lh: self.process_long_header, - self.ah: self.process_append_header, - self.eh: self.process_end_header, - self.ch: self.process_cancel_header, - self.nh: self.process_noise_header, - } # type: Dict[bytes, Callable[[bytes, Optional[bytes]], None]] + func_d = {self.sh: self.process_short_header, + self.lh: self.process_long_header, + self.ah: self.process_append_header, + self.eh: self.process_end_header, + self.ch: self.process_cancel_header, + self.nh: self.process_noise_header + } # type: Dict[bytes, Callable[[bytes, Optional[bytes]], None]] func = func_d[packet[:ASSEMBLY_PACKET_HEADER_LENGTH]] except KeyError: # Erroneous headers are ignored but stored as placeholder data. self.add_masking_packet_to_log_file() - raise SoftError( - "Error: Received packet had an invalid assembly packet header." - ) + raise SoftError("Error: Received packet had an invalid assembly packet header.") func(packet, packet_ct) def assemble_message_packet(self) -> bytes: """Assemble message packet.""" - padded = b"".join( - [p[ASSEMBLY_PACKET_HEADER_LENGTH:] for p in self.assembly_pt_list] - ) + padded = b''.join([p[ASSEMBLY_PACKET_HEADER_LENGTH:] for p in self.assembly_pt_list]) payload = rm_padding_bytes(padded) if len(self.assembly_pt_list) > 1: @@ -457,27 +371,23 @@ class Packet(object): except zlib.error: raise SoftError("Error: Decompression of message failed.") - def assemble_and_store_file( - self, ts: "datetime", onion_pub_key: bytes, window_list: "WindowList" - ) -> None: + def assemble_and_store_file(self, + ts: 'datetime', + onion_pub_key: bytes, + window_list: 'WindowList' + ) -> None: """Assemble file packet and store it.""" - padded = b"".join( - [p[ASSEMBLY_PACKET_HEADER_LENGTH:] for p in self.assembly_pt_list] - ) + padded = b''.join([p[ASSEMBLY_PACKET_HEADER_LENGTH:] for p in self.assembly_pt_list]) payload = rm_padding_bytes(padded) - no_fields = 3 if len(self.assembly_pt_list) > 1 else 2 + no_fields = 3 if len(self.assembly_pt_list) > 1 else 2 *_, payload = separate_headers(payload, no_fields * [ENCODED_INTEGER_LENGTH]) - process_assembled_file( - ts, payload, onion_pub_key, self.contact.nick, self.settings, window_list - ) + process_assembled_file(ts, payload, onion_pub_key, self.contact.nick, self.settings, window_list) def assemble_command_packet(self) -> bytes: """Assemble command packet.""" - padded = b"".join( - [p[ASSEMBLY_PACKET_HEADER_LENGTH:] for p in self.assembly_pt_list] - ) + padded = b''.join([p[ASSEMBLY_PACKET_HEADER_LENGTH:] for p in self.assembly_pt_list]) payload = rm_padding_bytes(padded) if len(self.assembly_pt_list) > 1: @@ -494,11 +404,14 @@ class Packet(object): class PacketList(Iterable[Packet], Sized): """PacketList manages all file, message, and command packets.""" - def __init__(self, settings: "Settings", contact_list: "ContactList") -> None: + def __init__(self, + settings: 'Settings', + contact_list: 'ContactList' + ) -> None: """Create a new PacketList object.""" - self.settings = settings + self.settings = settings self.contact_list = contact_list - self.packets = [] # type: List[Packet] + self.packets = [] # type: List[Packet] def __iter__(self) -> Iterator[Packet]: """Iterate over packet list.""" @@ -508,21 +421,22 @@ class PacketList(Iterable[Packet], Sized): """Return number of packets in the packet list.""" return len(self.packets) - def has_packet(self, onion_pub_key: bytes, origin: bytes, p_type: str) -> bool: + def has_packet(self, + onion_pub_key: bytes, + origin: bytes, + p_type: str + ) -> bool: """Return True if a packet with matching selectors exists, else False.""" - return any( - p - for p in self.packets - if ( - p.onion_pub_key == onion_pub_key - and p.origin == origin - and p.type == p_type - ) - ) + return any(p for p in self.packets if (p.onion_pub_key == onion_pub_key + and p.origin == origin + and p.type == p_type)) - def get_packet( - self, onion_pub_key: bytes, origin: bytes, p_type: str, log_access: bool = False - ) -> Packet: + def get_packet(self, + onion_pub_key: bytes, + origin: bytes, + p_type: str, + log_access: bool = False + ) -> Packet: """Get packet based on Onion Service public key, origin, and type. If the packet does not exist, create it. @@ -533,16 +447,8 @@ class PacketList(Iterable[Packet], Sized): else: contact = self.contact_list.get_contact_by_pub_key(onion_pub_key) - self.packets.append( - Packet(onion_pub_key, origin, p_type, contact, self.settings) - ) + self.packets.append(Packet(onion_pub_key, origin, p_type, contact, self.settings)) - return next( - p - for p in self.packets - if ( - p.onion_pub_key == onion_pub_key - and p.origin == origin - and p.type == p_type - ) - ) + return next(p for p in self.packets if (p.onion_pub_key == onion_pub_key + and p.origin == origin + and p.type == p_type)) diff --git a/src/receiver/receiver_loop.py b/src/receiver/receiver_loop.py index e61d6fd..713a7e4 100755 --- a/src/receiver/receiver_loop.py +++ b/src/receiver/receiver_loop.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -24,36 +24,31 @@ import time import typing from datetime import datetime -from typing import Any, Dict +from typing import Any, Dict -from src.common.encoding import bytes_to_int +from src.common.encoding import bytes_to_int from src.common.exceptions import SoftError -from src.common.misc import ignored, separate_headers -from src.common.output import m_print -from src.common.statics import ( - COMMAND_DATAGRAM_HEADER, - DATAGRAM_HEADER_LENGTH, - DATAGRAM_TIMESTAMP_LENGTH, - FILE_DATAGRAM_HEADER, - GATEWAY_QUEUE, - LOCAL_KEY_DATAGRAM_HEADER, - MESSAGE_DATAGRAM_HEADER, -) +from src.common.misc import ignored, separate_headers +from src.common.output import m_print +from src.common.statics import (COMMAND_DATAGRAM_HEADER, DATAGRAM_HEADER_LENGTH, DATAGRAM_TIMESTAMP_LENGTH, + FILE_DATAGRAM_HEADER, GATEWAY_QUEUE, LOCAL_KEY_DATAGRAM_HEADER, + MESSAGE_DATAGRAM_HEADER) if typing.TYPE_CHECKING: - from multiprocessing import Queue + from multiprocessing import Queue from src.common.gateway import Gateway -def receiver_loop( - queues: Dict[bytes, "Queue[Any]"], gateway: "Gateway", unit_test: bool = False -) -> None: +def receiver_loop(queues: Dict[bytes, 'Queue[Any]'], + gateway: 'Gateway', + unit_test: bool = False + ) -> None: """Decode received packets and forward them to packet queues.""" gateway_queue = queues[GATEWAY_QUEUE] while True: with ignored(EOFError, KeyboardInterrupt): - if not gateway_queue.qsize(): + if gateway_queue.qsize() == 0: time.sleep(0.01) _, packet = gateway_queue.get() @@ -63,26 +58,16 @@ def receiver_loop( except SoftError: continue - header, ts_bytes, payload = separate_headers( - packet, [DATAGRAM_HEADER_LENGTH, DATAGRAM_TIMESTAMP_LENGTH] - ) + header, ts_bytes, payload = separate_headers(packet, [DATAGRAM_HEADER_LENGTH, DATAGRAM_TIMESTAMP_LENGTH]) try: ts = datetime.strptime(str(bytes_to_int(ts_bytes)), "%Y%m%d%H%M%S%f") except (ValueError, struct.error): - m_print( - "Error: Failed to decode timestamp in the received packet.", - head=1, - tail=1, - ) + m_print("Error: Failed to decode timestamp in the received packet.", head=1, tail=1) continue - if header in [ - MESSAGE_DATAGRAM_HEADER, - FILE_DATAGRAM_HEADER, - COMMAND_DATAGRAM_HEADER, - LOCAL_KEY_DATAGRAM_HEADER, - ]: + if header in [MESSAGE_DATAGRAM_HEADER, FILE_DATAGRAM_HEADER, + COMMAND_DATAGRAM_HEADER, LOCAL_KEY_DATAGRAM_HEADER]: queues[header].put((ts, payload)) if unit_test: diff --git a/src/receiver/windows.py b/src/receiver/windows.py index 23615d9..8901663 100644 --- a/src/receiver/windows.py +++ b/src/receiver/windows.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -25,41 +25,22 @@ import textwrap import typing from datetime import datetime -from typing import Any, Dict, Iterable, Iterator, List, Optional, Tuple +from typing import Any, Dict, Iterable, Iterator, List, Optional, Tuple -from src.common.encoding import ( - b58encode, - pub_key_to_onion_address, - pub_key_to_short_address, -) +from src.common.encoding import b58encode, pub_key_to_onion_address, pub_key_to_short_address from src.common.exceptions import SoftError -from src.common.misc import get_terminal_width -from src.common.output import clear_screen, m_print, print_on_previous_line -from src.common.statics import ( - BOLD_ON, - EVENT, - FILE, - FILE_TRANSFER_INDENT, - GROUP_ID_LENGTH, - GROUP_MSG_ID_LENGTH, - ME, - NORMAL_TEXT, - ONION_SERVICE_PUBLIC_KEY_LENGTH, - ORIGIN_CONTACT_HEADER, - ORIGIN_USER_HEADER, - WIN_TYPE_COMMAND, - WIN_TYPE_CONTACT, - WIN_TYPE_FILE, - WIN_TYPE_GROUP, - WIN_UID_FILE, - WIN_UID_COMMAND, -) +from src.common.misc import get_terminal_width +from src.common.output import clear_screen, m_print, print_on_previous_line +from src.common.statics import (BOLD_ON, EVENT, FILE, FILE_TRANSFER_INDENT, GROUP_ID_LENGTH, GROUP_MSG_ID_LENGTH, ME, + NORMAL_TEXT, ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_CONTACT_HEADER, + ORIGIN_USER_HEADER, WIN_TYPE_COMMAND, WIN_TYPE_CONTACT, WIN_TYPE_FILE, + WIN_TYPE_GROUP, WIN_UID_FILE, WIN_UID_COMMAND) if typing.TYPE_CHECKING: from src.common.db_contacts import Contact, ContactList - from src.common.db_groups import GroupList + from src.common.db_groups import GroupList from src.common.db_settings import Settings - from src.receiver.packet import Packet, PacketList + from src.receiver.packet import Packet, PacketList MsgTuple = Tuple[datetime, str, bytes, bytes, bool, bool] @@ -71,51 +52,50 @@ class RxWindow(Iterable[MsgTuple]): their own windows, accessible with separate commands. """ - def __init__( - self, - uid: bytes, - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - packet_list: "PacketList", - ) -> None: + def __init__(self, + uid: bytes, + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + packet_list: 'PacketList' + ) -> None: """Create a new RxWindow object.""" - self.uid = uid + self.uid = uid self.contact_list = contact_list - self.group_list = group_list - self.settings = settings - self.packet_list = packet_list + self.group_list = group_list + self.settings = settings + self.packet_list = packet_list - self.is_active = False - self.contact = None - self.group = None + self.is_active = False + self.contact = None + self.group = None self.group_msg_id = os.urandom(GROUP_MSG_ID_LENGTH) - self.window_contacts = [] # type: List[Contact] - self.message_log = [] # type: List[MsgTuple] - self.handle_dict = dict() # type: Dict[bytes, str] + self.window_contacts = [] # type: List[Contact] + self.message_log = [] # type: List[MsgTuple] + self.handle_dict = dict() # type: Dict[bytes, str] self.previous_msg_ts = datetime.now() self.unread_messages = 0 if self.uid == WIN_UID_COMMAND: - self.type = WIN_TYPE_COMMAND # type: str - self.name = self.type # type: str + self.type = WIN_TYPE_COMMAND # type: str + self.name = self.type # type: str self.window_contacts = [] elif self.uid == WIN_UID_FILE: - self.type = WIN_TYPE_FILE + self.type = WIN_TYPE_FILE self.packet_list = packet_list elif self.uid in self.contact_list.get_list_of_pub_keys(): - self.type = WIN_TYPE_CONTACT - self.contact = self.contact_list.get_contact_by_pub_key(uid) - self.name = self.contact.nick + self.type = WIN_TYPE_CONTACT + self.contact = self.contact_list.get_contact_by_pub_key(uid) + self.name = self.contact.nick self.window_contacts = [self.contact] elif self.uid in self.group_list.get_list_of_group_ids(): - self.type = WIN_TYPE_GROUP - self.group = self.group_list.get_group_by_id(self.uid) - self.name = self.group.name + self.type = WIN_TYPE_GROUP + self.group = self.group_list.get_group_by_id(self.uid) + self.name = self.group.name self.window_contacts = self.group.members else: @@ -138,19 +118,14 @@ class RxWindow(Iterable[MsgTuple]): def add_contacts(self, pub_keys: List[bytes]) -> None: """Add contact objects to the window.""" - self.window_contacts += [ - self.contact_list.get_contact_by_pub_key(k) - for k in pub_keys - if not self.has_contact(k) and self.contact_list.has_pub_key(k) - ] + self.window_contacts += [self.contact_list.get_contact_by_pub_key(k) for k in pub_keys + if not self.has_contact(k) and self.contact_list.has_pub_key(k)] def remove_contacts(self, pub_keys: List[bytes]) -> None: """Remove contact objects from the window.""" to_remove = set(pub_keys) & set([m.onion_pub_key for m in self.window_contacts]) if to_remove: - self.window_contacts = [ - c for c in self.window_contacts if c.onion_pub_key not in to_remove - ] + self.window_contacts = [c for c in self.window_contacts if c.onion_pub_key not in to_remove] def reset_window(self) -> None: """Reset the ephemeral message log of the window.""" @@ -183,40 +158,33 @@ class RxWindow(Iterable[MsgTuple]): for k in pub_keys: self.update_handle_dict(k) - def get_handle( - self, - time_stamp: "datetime", # Timestamp of message to be printed - onion_pub_key: bytes, # Onion Service public key of contact (used as lookup for handles) - origin: bytes, # Determines whether to use "Me" or nick of contact as handle - whisper: bool = False, # When True, displays (whisper) specifier next to handle - event_msg: bool = False, # When True, sets handle to "-!-" - ) -> str: # Handle to use + def get_handle(self, + time_stamp: 'datetime', # Timestamp of message to be printed + onion_pub_key: bytes, # Onion Service public key of contact (used as lookup for handles) + origin: bytes, # Determines whether to use "Me" or nick of contact as handle + whisper: bool = False, # When True, displays (whisper) specifier next to handle + event_msg: bool = False # When True, sets handle to "-!-" + ) -> str: # Handle to use """Returns indented handle complete with headers and trailers.""" - time_stamp_str = time_stamp.strftime("%H:%M:%S.%f")[:-4] + time_stamp_str = time_stamp.strftime('%H:%M:%S.%f')[:-4] if onion_pub_key == WIN_UID_COMMAND or event_msg: handle = EVENT - ending = " " + ending = ' ' else: - handle = ( - self.handle_dict[onion_pub_key] - if origin == ORIGIN_CONTACT_HEADER - else ME - ) + handle = self.handle_dict[onion_pub_key] if origin == ORIGIN_CONTACT_HEADER else ME handles = list(self.handle_dict.values()) + [ME] - indent = max(len(v) for v in handles) - len(handle) if self.is_active else 0 - handle = indent * " " + handle + indent = max(len(v) for v in handles) - len(handle) if self.is_active else 0 + handle = indent * ' ' + handle # Handle specifiers for messages to inactive window if not self.is_active: - handle += { - WIN_TYPE_GROUP: f" (group {self.name})", - WIN_TYPE_CONTACT: f" (private message)", - }.get(self.type, "") + handle += {WIN_TYPE_GROUP: f" (group {self.name})", + WIN_TYPE_CONTACT: f" (private message)"}.get(self.type, '') if whisper: handle += " (whisper)" - ending = ": " + ending = ': ' handle = f"{time_stamp_str} {handle}{ending}" @@ -232,83 +200,53 @@ class RxWindow(Iterable[MsgTuple]): handle = self.get_handle(ts, onion_pub_key, origin, whisper, event_msg) # Check if message content needs to be changed to privacy-preserving notification - if ( - not self.is_active - and not self.settings.new_message_notify_preview - and self.uid != WIN_UID_COMMAND - ): - trailer = "s" if self.unread_messages > 0 else "" - message = ( - BOLD_ON - + f"{self.unread_messages + 1} unread message{trailer}" - + NORMAL_TEXT - ) + if not self.is_active and not self.settings.new_message_notify_preview and self.uid != WIN_UID_COMMAND: + trailer = 's' if self.unread_messages > 0 else '' + message = BOLD_ON + f"{self.unread_messages + 1} unread message{trailer}" + NORMAL_TEXT # Wrap message - wrapper = textwrap.TextWrapper( - width=get_terminal_width(), - initial_indent=handle, - subsequent_indent=len(handle) * " ", - ) + wrapper = textwrap.TextWrapper(width=get_terminal_width(), + initial_indent=handle, + subsequent_indent=len(handle)*' ') wrapped = wrapper.fill(message) - if wrapped == "": + if wrapped == '': wrapped = handle # Add bolding unless export file is provided - bold_on, bold_off, f_name = ( - (BOLD_ON, NORMAL_TEXT, sys.stdout) if file is None else ("", "", file) - ) - wrapped = bold_on + wrapped[: len(handle)] + bold_off + wrapped[len(handle) :] + bold_on, bold_off, f_name = (BOLD_ON, NORMAL_TEXT, sys.stdout) if file is None else ('', '', file) + wrapped = bold_on + wrapped[:len(handle)] + bold_off + wrapped[len(handle):] if self.is_active: if self.previous_msg_ts.date() != ts.date(): - print( - bold_on + f"00:00 -!- Day changed to {str(ts.date())}" + bold_off, - file=f_name, - ) + print(bold_on + f"00:00 -!- Day changed to {str(ts.date())}" + bold_off, file=f_name) print(wrapped, file=f_name) else: if onion_pub_key != WIN_UID_COMMAND: self.unread_messages += 1 - if ( - ( - self.type == WIN_TYPE_CONTACT - and self.contact is not None - and self.contact.notifications - ) - or ( - self.type == WIN_TYPE_GROUP - and self.group is not None - and self.group.notifications - ) - or (self.type == WIN_TYPE_COMMAND) - ): + if (self.type == WIN_TYPE_CONTACT and self.contact is not None and self.contact.notifications) \ + or (self.type == WIN_TYPE_GROUP and self.group is not None and self.group.notifications) \ + or (self.type == WIN_TYPE_COMMAND): - lines = wrapped.split("\n") + lines = wrapped.split('\n') if len(lines) > 1: - print( - lines[0][:-1] + "…" - ) # Preview only first line of the long message + print(lines[0][:-1] + '…') # Preview only first line of the long message else: print(wrapped) - print_on_previous_line( - delay=self.settings.new_message_notify_duration, flush=True - ) + print_on_previous_line(delay=self.settings.new_message_notify_duration, flush=True) self.previous_msg_ts = ts - def add_new( - self, - timestamp: "datetime", # The timestamp of the received message - message: str, # The content of the message - onion_pub_key: bytes = WIN_UID_COMMAND, # The Onion Service public key of associated contact - origin: bytes = ORIGIN_USER_HEADER, # The direction of the message - output: bool = False, # When True, displays message while adding it to message_log - whisper: bool = False, # When True, displays message as whisper message - event_msg: bool = False, # When True, uses "-!-" as message handle - ) -> None: + def add_new(self, + timestamp: 'datetime', # The timestamp of the received message + message: str, # The content of the message + onion_pub_key: bytes = WIN_UID_COMMAND, # The Onion Service public key of associated contact + origin: bytes = ORIGIN_USER_HEADER, # The direction of the message + output: bool = False, # When True, displays message while adding it to message_log + whisper: bool = False, # When True, displays message as whisper message + event_msg: bool = False # When True, uses "-!-" as message handle + ) -> None: """Add message tuple to message log and optionally print it.""" self.update_handle_dict(onion_pub_key) @@ -319,7 +257,7 @@ class RxWindow(Iterable[MsgTuple]): def redraw(self, file: Any = None) -> None: """Print all messages received to the window.""" - old_messages = len(self.message_log) - self.unread_messages + old_messages = len(self.message_log) - self.unread_messages self.unread_messages = 0 if file is None: @@ -330,39 +268,26 @@ class RxWindow(Iterable[MsgTuple]): self.create_handle_dict(self.message_log) for i, msg_tuple in enumerate(self.message_log): if i == old_messages: - print( - "\n" - + " Unread Messages ".center(get_terminal_width(), "-") - + "\n" - ) + print('\n' + ' Unread Messages '.center(get_terminal_width(), '-') + '\n') self.print(msg_tuple, file) else: - m_print( - f"This window for {self.name} is currently empty.", - bold=True, - head=1, - tail=1, - ) + m_print(f"This window for {self.name} is currently empty.", bold=True, head=1, tail=1) def redraw_file_win(self) -> None: """Draw file transmission window progress bars.""" # Initialize columns - c1 = ["File name"] - c2 = ["Size"] - c3 = ["Sender"] - c4 = ["Complete"] + c1 = ['File name'] + c2 = ['Size'] + c3 = ['Sender'] + c4 = ['Complete'] # Populate columns with file transmission status data for p in self.packet_list: # type: Packet if p.type == FILE and len(p.assembly_pt_list) > 0: - if ( - p.name is not None - and p.assembly_pt_list is not None - and p.size is not None - and p.packets is not None - ): + if ( p.name is not None and p.assembly_pt_list is not None + and p.size is not None and p.packets is not None): c1.append(p.name) c2.append(p.size) @@ -370,62 +295,44 @@ class RxWindow(Iterable[MsgTuple]): c4.append(f"{len(p.assembly_pt_list) / p.packets * 100:.2f}%") if len(c1) <= 1: - m_print( - "No file transmissions currently in progress.", - bold=True, - head=1, - tail=1, - ) + m_print("No file transmissions currently in progress.", bold=True, head=1, tail=1) print_on_previous_line(reps=3, delay=0.1) return None # Calculate column widths - c1w, c2w, c3w, c4w = [ - max(len(v) for v in column) + FILE_TRANSFER_INDENT - for column in [c1, c2, c3, c4] - ] + c1w, c2w, c3w, c4w = [max(len(v) for v in column) + FILE_TRANSFER_INDENT for column in [c1, c2, c3, c4]] # Align columns by adding whitespace between fields of each line - lines = [ - f"{f1:{c1w}}{f2:{c2w}}{f3:{c3w}}{f4:{c4w}}" - for f1, f2, f3, f4 in zip(c1, c2, c3, c4) - ] + lines = [f'{f1:{c1w}}{f2:{c2w}}{f3:{c3w}}{f4:{c4w}}' for f1, f2, f3, f4 in zip(c1, c2, c3, c4)] # Add a terminal-wide line between the column names and the data - lines.insert(1, get_terminal_width() * "─") + lines.insert(1, get_terminal_width() * '─') # Print the file transfer list - print("\n" + "\n".join(lines) + "\n") - print_on_previous_line(reps=len(lines) + 2, delay=0.1) + print('\n' + '\n'.join(lines) + '\n') + print_on_previous_line(reps=len(lines)+2, delay=0.1) class WindowList(Iterable[RxWindow]): """WindowList manages a list of Window objects.""" - def __init__( - self, - settings: "Settings", - contact_list: "ContactList", - group_list: "GroupList", - packet_list: "PacketList", - ) -> None: + def __init__(self, + settings: 'Settings', + contact_list: 'ContactList', + group_list: 'GroupList', + packet_list: 'PacketList' + ) -> None: """Create a new WindowList object.""" - self.settings = settings + self.settings = settings self.contact_list = contact_list - self.group_list = group_list - self.packet_list = packet_list + self.group_list = group_list + self.packet_list = packet_list self.active_win = None # type: Optional[RxWindow] - self.windows = [ - RxWindow( - uid, self.contact_list, self.group_list, self.settings, self.packet_list - ) - for uid in ( - [WIN_UID_COMMAND, WIN_UID_FILE] - + self.contact_list.get_list_of_pub_keys() - + self.group_list.get_list_of_group_ids() - ) - ] + self.windows = [RxWindow(uid, self.contact_list, self.group_list, self.settings, self.packet_list) + for uid in ([WIN_UID_COMMAND, WIN_UID_FILE] + + self.contact_list.get_list_of_pub_keys() + + self.group_list.get_list_of_group_ids())] if self.contact_list.has_local_contact(): self.set_active_rx_window(WIN_UID_COMMAND) @@ -453,21 +360,13 @@ class WindowList(Iterable[RxWindow]): """Return list of group windows.""" return [w for w in self.windows if w.type == WIN_TYPE_GROUP] - def get_window(self, uid: bytes) -> "RxWindow": + def get_window(self, uid: bytes) -> 'RxWindow': """Return window that matches the specified UID. Create window if it does not exist. """ if not self.has_window(uid): - self.windows.append( - RxWindow( - uid, - self.contact_list, - self.group_list, - self.settings, - self.packet_list, - ) - ) + self.windows.append(RxWindow(uid, self.contact_list, self.group_list, self.settings, self.packet_list)) return next(w for w in self.windows if w.uid == uid) @@ -476,7 +375,7 @@ class WindowList(Iterable[RxWindow]): if self.active_win is not None and self.active_win.uid == WIN_UID_FILE: self.active_win.redraw_file_win() - def get_command_window(self) -> "RxWindow": + def get_command_window(self) -> 'RxWindow': """Return command window.""" return self.get_window(WIN_UID_COMMAND) @@ -484,7 +383,7 @@ class WindowList(Iterable[RxWindow]): """Select new active window.""" if self.active_win is not None: self.active_win.is_active = False - self.active_win = self.get_window(uid) + self.active_win = self.get_window(uid) self.active_win.is_active = True if self.active_win.uid == WIN_UID_FILE: diff --git a/src/relay/__init__.py b/src/relay/__init__.py index 6eb560e..833769a 100644 --- a/src/relay/__init__.py +++ b/src/relay/__init__.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. diff --git a/src/relay/client.py b/src/relay/client.py index b9c42a9..c5c178f 100644 --- a/src/relay/client.py +++ b/src/relay/client.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -24,81 +24,49 @@ import hashlib import time import typing -from datetime import datetime +from datetime import datetime from multiprocessing import Process, Queue -from typing import Any, Dict, List, Tuple +from typing import Any, Dict, List, Tuple import requests from cryptography.hazmat.primitives.asymmetric.x448 import X448PublicKey, X448PrivateKey -from src.common.encoding import ( - b58encode, - int_to_bytes, - onion_address_to_pub_key, - pub_key_to_onion_address, -) -from src.common.encoding import pub_key_to_short_address +from src.common.encoding import b58encode, int_to_bytes, onion_address_to_pub_key, pub_key_to_onion_address +from src.common.encoding import pub_key_to_short_address from src.common.exceptions import SoftError -from src.common.misc import ( - ignored, - separate_header, - split_byte_string, - validate_onion_addr, -) -from src.common.output import m_print, print_key, rp_print -from src.common.statics import ( - CLIENT_OFFLINE_THRESHOLD, - CONTACT_MGMT_QUEUE, - CONTACT_REQ_QUEUE, - C_REQ_MGMT_QUEUE, - C_REQ_STATE_QUEUE, - DATAGRAM_HEADER_LENGTH, - DST_MESSAGE_QUEUE, - FILE_DATAGRAM_HEADER, - GROUP_ID_LENGTH, - GROUP_MGMT_QUEUE, - GROUP_MSG_EXIT_GROUP_HEADER, - GROUP_MSG_INVITE_HEADER, - GROUP_MSG_JOIN_HEADER, - GROUP_MSG_MEMBER_ADD_HEADER, - GROUP_MSG_MEMBER_REM_HEADER, - GROUP_MSG_QUEUE, - MESSAGE_DATAGRAM_HEADER, - ONION_SERVICE_PUBLIC_KEY_LENGTH, - ORIGIN_CONTACT_HEADER, - PUBLIC_KEY_DATAGRAM_HEADER, - RELAY_CLIENT_MAX_DELAY, - RELAY_CLIENT_MIN_DELAY, - RP_ADD_CONTACT_HEADER, - RP_REMOVE_CONTACT_HEADER, - TFC_PUBLIC_KEY_LENGTH, - TOR_DATA_QUEUE, - UNIT_TEST_QUEUE, - URL_TOKEN_LENGTH, - URL_TOKEN_QUEUE, -) +from src.common.misc import ignored, separate_header, split_byte_string, validate_onion_addr +from src.common.output import m_print, print_key, rp_print +from src.common.statics import (ACCOUNT_SEND_QUEUE, + CLIENT_OFFLINE_THRESHOLD, CONTACT_MGMT_QUEUE, CONTACT_REQ_QUEUE, C_REQ_MGMT_QUEUE, + C_REQ_STATE_QUEUE, DATAGRAM_HEADER_LENGTH, DST_MESSAGE_QUEUE, + FILE_DATAGRAM_HEADER, GROUP_ID_LENGTH, GROUP_MGMT_QUEUE, + GROUP_MSG_EXIT_GROUP_HEADER, GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER, + GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER, GROUP_MSG_QUEUE, + MESSAGE_DATAGRAM_HEADER, ONION_SERVICE_PUBLIC_KEY_LENGTH, + ORIGIN_CONTACT_HEADER, PUB_KEY_SEND_QUEUE, + PUBLIC_KEY_DATAGRAM_HEADER, RELAY_CLIENT_MAX_DELAY, RELAY_CLIENT_MIN_DELAY, + RP_ADD_CONTACT_HEADER, RP_REMOVE_CONTACT_HEADER, TFC_PUBLIC_KEY_LENGTH, + TOR_DATA_QUEUE, UNIT_TEST_QUEUE, URL_TOKEN_LENGTH, URL_TOKEN_QUEUE) if typing.TYPE_CHECKING: from src.common.gateway import Gateway - from requests.sessions import Session - + from requests.sessions import Session QueueDict = Dict[bytes, Queue[Any]] -def client_scheduler( - queues: "QueueDict", - gateway: "Gateway", - ut_private_key: X448PrivateKey, - unit_test: bool = False, -) -> None: +def client_scheduler(queues: 'QueueDict', + gateway: 'Gateway', + url_token_private_key: X448PrivateKey, + unit_test: bool = False + ) -> None: """Manage `client` processes.""" proc_dict = dict() # type: Dict[bytes, Process] # Wait for Tor port from `onion_service` process. while True: with ignored(EOFError, KeyboardInterrupt): - while not queues[TOR_DATA_QUEUE].qsize(): + while queues[TOR_DATA_QUEUE].qsize() == 0: time.sleep(0.1) tor_port, onion_addr_user = queues[TOR_DATA_QUEUE].get() break @@ -106,28 +74,16 @@ def client_scheduler( while True: with ignored(EOFError, KeyboardInterrupt): - while not queues[CONTACT_MGMT_QUEUE].qsize(): + while queues[CONTACT_MGMT_QUEUE].qsize() == 0: time.sleep(0.1) - command, ser_public_keys, is_existing_contact = queues[ - CONTACT_MGMT_QUEUE - ].get() # type: str, bytes, bool + command, ser_public_keys, is_existing_contact = queues[CONTACT_MGMT_QUEUE].get() - onion_pub_keys = split_byte_string( - ser_public_keys, ONION_SERVICE_PUBLIC_KEY_LENGTH - ) + onion_pub_keys = split_byte_string(ser_public_keys, ONION_SERVICE_PUBLIC_KEY_LENGTH) if command == RP_ADD_CONTACT_HEADER: - add_new_client_process( - gateway, - is_existing_contact, - onion_addr_user, - onion_pub_keys, - proc_dict, - queues, - tor_port, - ut_private_key, - ) + add_new_client_process(gateway, is_existing_contact, onion_addr_user, onion_pub_keys, + proc_dict, queues, tor_port, url_token_private_key) elif command == RP_REMOVE_CONTACT_HEADER: remove_client_process(onion_pub_keys, proc_dict) @@ -136,37 +92,27 @@ def client_scheduler( break -def add_new_client_process( - gateway: "Gateway", - is_existing_contact: bool, - onion_addr_user: str, - onion_pub_keys: List[bytes], - proc_dict: Dict[bytes, Process], - queues: "QueueDict", - tor_port: int, - url_token_private_key: X448PrivateKey, -) -> None: +def add_new_client_process(gateway: 'Gateway', + is_existing_contact: bool, + onion_addr_user: str, + onion_pub_keys: List[bytes], + proc_dict: Dict[bytes, Process], + queues: 'QueueDict', + tor_port: int, + url_token_private_key: X448PrivateKey + ) -> None: """Add new client process.""" for onion_pub_key in onion_pub_keys: if onion_pub_key not in proc_dict: - onion_addr_user = "" if is_existing_contact else onion_addr_user - proc_dict[onion_pub_key] = Process( - target=client, - args=( - onion_pub_key, - queues, - url_token_private_key, - tor_port, - gateway, - onion_addr_user, - ), - ) + onion_addr_user = '' if is_existing_contact else onion_addr_user + proc_dict[onion_pub_key] = Process(target=client, args=(onion_pub_key, queues, url_token_private_key, + tor_port, gateway, onion_addr_user)) proc_dict[onion_pub_key].start() -def remove_client_process( - onion_pub_keys: List[bytes], proc_dict: Dict[bytes, Process] -) -> None: +def remove_client_process(onion_pub_keys: List[bytes], + proc_dict: Dict[bytes, Process] + ) -> None: """Remove client process.""" for onion_pub_key in onion_pub_keys: if onion_pub_key in proc_dict: @@ -176,27 +122,24 @@ def remove_client_process( rp_print(f"Removed {pub_key_to_short_address(onion_pub_key)}", bold=True) -def client( - onion_pub_key: bytes, - queues: "QueueDict", - url_token_private_key: X448PrivateKey, - tor_port: str, - gateway: "Gateway", - onion_addr_user: str, - unit_test: bool = False, -) -> None: +def client(onion_pub_key: bytes, + queues: 'QueueDict', + url_token_private_key: X448PrivateKey, + tor_port: str, + gateway: 'Gateway', + onion_addr_user: str, + unit_test: bool = False + ) -> None: """Load packets from contact's Onion Service.""" - cached_pk = "" - short_addr = pub_key_to_short_address(onion_pub_key) - onion_addr = pub_key_to_onion_address(onion_pub_key) + cached_pk = '' + short_addr = pub_key_to_short_address(onion_pub_key) + onion_addr = pub_key_to_onion_address(onion_pub_key) check_delay = RELAY_CLIENT_MIN_DELAY - is_online = False + is_online = False - session = requests.session() - session.proxies = { - "http": f"socks5h://127.0.0.1:{tor_port}", - "https": f"socks5h://127.0.0.1:{tor_port}", - } + session = requests.session() + session.proxies = {'http': f'socks5h://127.0.0.1:{tor_port}', + 'https': f'socks5h://127.0.0.1:{tor_port}'} rp_print(f"Connecting to {short_addr}...", bold=True) @@ -210,42 +153,27 @@ def client( time.sleep(check_delay) url_token_public_key_hex = load_url_token(onion_addr, session) - is_online, check_delay = manage_contact_status( - url_token_public_key_hex, check_delay, is_online, short_addr - ) + is_online, check_delay = manage_contact_status(url_token_public_key_hex, + check_delay, is_online, short_addr) if not is_online: continue - url_token, cached_pk = update_url_token( - url_token_private_key, - url_token_public_key_hex, - cached_pk, - onion_pub_key, - queues, - ) + url_token, cached_pk = update_url_token(url_token_private_key, url_token_public_key_hex, + cached_pk, onion_pub_key, queues) - get_data_loop( - onion_addr, - url_token, - short_addr, - onion_pub_key, - queues, - session, - gateway, - ) + get_data_loop(onion_addr, url_token, short_addr, onion_pub_key, queues, session, gateway) if unit_test: break -def update_url_token( - ut_private_key: "X448PrivateKey", - ut_pubkey_hex: str, - cached_pk: str, - onion_pub_key: bytes, - queues: "QueueDict", -) -> Tuple[str, str]: +def update_url_token(url_token_private_key: 'X448PrivateKey', + ut_pubkey_hex: str, + cached_pk: str, + onion_pub_key: bytes, + queues: 'QueueDict' + ) -> Tuple[str, str]: """Update URL token for contact. When contact's URL token public key changes, update URL token. @@ -256,21 +184,13 @@ def update_url_token( try: public_key = bytes.fromhex(ut_pubkey_hex) - if len(public_key) != TFC_PUBLIC_KEY_LENGTH or public_key == bytes( - TFC_PUBLIC_KEY_LENGTH - ): + if len(public_key) != TFC_PUBLIC_KEY_LENGTH or public_key == bytes(TFC_PUBLIC_KEY_LENGTH): raise ValueError - shared_secret = ut_private_key.exchange( - X448PublicKey.from_public_bytes(public_key) - ) - url_token = hashlib.blake2b( - shared_secret, digest_size=URL_TOKEN_LENGTH - ).hexdigest() + shared_secret = url_token_private_key.exchange(X448PublicKey.from_public_bytes(public_key)) + url_token = hashlib.blake2b(shared_secret, digest_size=URL_TOKEN_LENGTH).hexdigest() - queues[URL_TOKEN_QUEUE].put( - (onion_pub_key, url_token) - ) # Update Flask server's URL token for contact + queues[URL_TOKEN_QUEUE].put((onion_pub_key, url_token)) # Update Flask server's URL token for contact return url_token, ut_pubkey_hex @@ -278,9 +198,11 @@ def update_url_token( raise SoftError("URL token derivation failed.", output=False) -def manage_contact_status( - ut_pubkey_hex: str, check_delay: float, is_online: bool, short_addr: str -) -> Tuple[bool, float]: +def manage_contact_status(ut_pubkey_hex: str, + check_delay: float, + is_online: bool, + short_addr: str + ) -> Tuple[bool, float]: """Manage online status of contact based on availability of URL token's public key.""" if ut_pubkey_hex == "": if check_delay < RELAY_CLIENT_MAX_DELAY: @@ -298,52 +220,45 @@ def manage_contact_status( return is_online, check_delay -def load_url_token(onion_addr: str, session: "Session") -> str: +def load_url_token(onion_addr: str, session: 'Session') -> str: """Load URL token for contact.""" try: ut_pubkey_hex = session.get(f"http://{onion_addr}.onion/", timeout=5).text except requests.exceptions.RequestException: - ut_pubkey_hex = "" + ut_pubkey_hex = '' return ut_pubkey_hex -def send_contact_request( - onion_addr: str, onion_addr_user: str, session: "Session" -) -> None: +def send_contact_request(onion_addr: str, + onion_addr_user: str, + session: 'Session' + ) -> None: """Send contact request.""" while True: try: - reply = session.get( - f"http://{onion_addr}.onion/contact_request/{onion_addr_user}", - timeout=5, - ).text - if reply == "OK": + reply = session.get(f"http://{onion_addr}.onion/contact_request/{onion_addr_user}", timeout=5).text + if reply == 'OK': break except requests.exceptions.RequestException: time.sleep(RELAY_CLIENT_MIN_DELAY) -def get_data_loop( - onion_addr: str, - url_token: str, - short_addr: str, - onion_pub_key: bytes, - queues: "QueueDict", - session: "Session", - gateway: "Gateway", -) -> None: +def get_data_loop(onion_addr: str, + url_token: str, + short_addr: str, + onion_pub_key: bytes, + queues: 'QueueDict', + session: 'Session', + gateway: 'Gateway' + ) -> None: """Load TFC data from contact's Onion Service using valid URL token.""" while True: try: - check_files( - url_token, onion_pub_key, onion_addr, short_addr, session, queues - ) + check_for_files(url_token, onion_pub_key, onion_addr, short_addr, session, queues) try: - r = session.get( - f"http://{onion_addr}.onion/{url_token}/messages", stream=True - ) + r = session.get(f'http://{onion_addr}.onion/{url_token}/messages', stream=True) except requests.exceptions.RequestException: return None @@ -353,54 +268,34 @@ def get_data_loop( continue try: - header, payload = separate_header( - line, DATAGRAM_HEADER_LENGTH - ) # type: bytes, bytes - payload_bytes = base64.b85decode(payload) + header, payload = separate_header(line, DATAGRAM_HEADER_LENGTH) # type: bytes, bytes + payload_bytes = base64.b85decode(payload) except (UnicodeError, ValueError): continue - ts = datetime.now() - ts_bytes = int_to_bytes(int(ts.strftime("%Y%m%d%H%M%S%f")[:-4])) + ts = datetime.now() + ts_bytes = int_to_bytes(int(ts.strftime('%Y%m%d%H%M%S%f')[:-4])) - process_received_packet( - ts, - ts_bytes, - header, - payload_bytes, - onion_pub_key, - short_addr, - queues, - gateway, - ) + process_received_packet(ts, ts_bytes, header, payload_bytes, onion_pub_key, short_addr, queues, gateway) except requests.exceptions.RequestException: break -def check_files( - url_token: str, - onion_pub_key: bytes, - onion_addr: str, - short_addr: str, - session: "Session", - queues: "QueueDict", -) -> None: +def check_for_files(url_token: str, + onion_pub_key: bytes, + onion_addr: str, + short_addr: str, + session: 'Session', + queues: 'QueueDict' + ) -> None: """See if a file is available from contact..""" try: - file_data = session.get( - f"http://{onion_addr}.onion/{url_token}/files", stream=True - ).content + file_data = session.get(f"http://{onion_addr}.onion/{url_token}/files", stream=True).content if file_data: - ts = datetime.now() + ts = datetime.now() ts_bytes = int_to_bytes(int(ts.strftime("%Y%m%d%H%M%S%f")[:-4])) - packet = ( - FILE_DATAGRAM_HEADER - + ts_bytes - + onion_pub_key - + ORIGIN_CONTACT_HEADER - + file_data - ) + packet = FILE_DATAGRAM_HEADER + ts_bytes + onion_pub_key + ORIGIN_CONTACT_HEADER + file_data queues[DST_MESSAGE_QUEUE].put(packet) rp_print(f"File from contact {short_addr}", ts) @@ -408,42 +303,38 @@ def check_files( pass -def process_received_packet( - ts: "datetime", - ts_bytes: bytes, - header: bytes, - payload_bytes: bytes, - onion_pub_key: bytes, - short_addr: str, - queues: "QueueDict", - gateway: "Gateway", -) -> None: +def process_received_packet(ts: 'datetime', + ts_bytes: bytes, + header: bytes, + payload_bytes: bytes, + onion_pub_key: bytes, + short_addr: str, + queues: 'QueueDict', + gateway: 'Gateway' + ) -> None: """Process received packet.""" if header == PUBLIC_KEY_DATAGRAM_HEADER: if len(payload_bytes) == TFC_PUBLIC_KEY_LENGTH: msg = f"Received public key from {short_addr} at {ts.strftime('%b %d - %H:%M:%S.%f')[:-4]}:" print_key(msg, payload_bytes, gateway.settings, public_key=True) + queues[PUB_KEY_SEND_QUEUE].put((onion_pub_key, payload_bytes)) elif header == MESSAGE_DATAGRAM_HEADER: - queues[DST_MESSAGE_QUEUE].put( - header + ts_bytes + onion_pub_key + ORIGIN_CONTACT_HEADER + payload_bytes - ) + queues[DST_MESSAGE_QUEUE].put(header + ts_bytes + onion_pub_key + ORIGIN_CONTACT_HEADER + payload_bytes) rp_print(f"Message from contact {short_addr}", ts) - elif header in [ - GROUP_MSG_INVITE_HEADER, - GROUP_MSG_JOIN_HEADER, - GROUP_MSG_MEMBER_ADD_HEADER, - GROUP_MSG_MEMBER_REM_HEADER, - GROUP_MSG_EXIT_GROUP_HEADER, - ]: + elif header in [GROUP_MSG_INVITE_HEADER, + GROUP_MSG_JOIN_HEADER, + GROUP_MSG_MEMBER_ADD_HEADER, + GROUP_MSG_MEMBER_REM_HEADER, + GROUP_MSG_EXIT_GROUP_HEADER]: queues[GROUP_MSG_QUEUE].put((header, payload_bytes, short_addr)) else: rp_print(f"Received invalid packet from {short_addr}", ts, bold=True) -def g_msg_manager(queues: "QueueDict", unit_test: bool = False) -> None: +def g_msg_manager(queues: 'QueueDict', unit_test: bool = False) -> None: """Show group management messages according to contact list state. This process keeps track of existing contacts for whom there's a @@ -456,58 +347,40 @@ def g_msg_manager(queues: "QueueDict", unit_test: bool = False) -> None: while True: with ignored(EOFError, KeyboardInterrupt): - while not queues[GROUP_MSG_QUEUE].qsize(): + while queues[GROUP_MSG_QUEUE].qsize() == 0: time.sleep(0.01) header, payload, trunc_addr = queues[GROUP_MSG_QUEUE].get() - group_id, data = separate_header(payload, GROUP_ID_LENGTH) + group_id, data = separate_header(payload, GROUP_ID_LENGTH) if len(group_id) != GROUP_ID_LENGTH: continue group_id_hr = b58encode(group_id) - existing_contacts = update_list_of_existing_contacts( - group_management_queue, existing_contacts - ) + existing_contacts = update_list_of_existing_contacts(group_management_queue, existing_contacts) - # Handle group management messages - process_group_management_message( - data, existing_contacts, group_id_hr, header, trunc_addr - ) + process_group_management_message(data, existing_contacts, group_id_hr, header, trunc_addr) if unit_test and queues[UNIT_TEST_QUEUE].qsize() != 0: break -def process_group_management_message( - data: bytes, - existing_contacts: List[bytes], - group_id_hr: str, - header: bytes, - trunc_addr: str, -) -> None: +def process_group_management_message(data: bytes, + existing_contacts: List[bytes], + group_id_hr: str, + header: bytes, + trunc_addr: str + ) -> None: """Process group management message.""" - if header in [ - GROUP_MSG_INVITE_HEADER, - GROUP_MSG_JOIN_HEADER, - GROUP_MSG_MEMBER_ADD_HEADER, - GROUP_MSG_MEMBER_REM_HEADER, - ]: + if header in [GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER, + GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER]: - pub_keys = split_byte_string(data, ONION_SERVICE_PUBLIC_KEY_LENGTH) + pub_keys = split_byte_string(data, ONION_SERVICE_PUBLIC_KEY_LENGTH) pub_key_length = ONION_SERVICE_PUBLIC_KEY_LENGTH - members = [k for k in pub_keys if len(k) == pub_key_length] - known = [ - f" * {pub_key_to_onion_address(m)}" - for m in members - if m in existing_contacts - ] - unknown = [ - f" * {pub_key_to_onion_address(m)}" - for m in members - if m not in existing_contacts - ] + members = [k for k in pub_keys if len(k) == pub_key_length ] + known = [f" * {pub_key_to_onion_address(m)}" for m in members if m in existing_contacts] + unknown = [f" * {pub_key_to_onion_address(m)}" for m in members if m not in existing_contacts] line_list = [] if known: @@ -516,66 +389,45 @@ def process_group_management_message( line_list.extend(["Unknown contacts"] + unknown) if header in [GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER]: - action = "invited you to" if header == GROUP_MSG_INVITE_HEADER else "joined" - postfix = " with" if members else "" - m_print( - [f"{trunc_addr} has {action} group {group_id_hr}{postfix}"] + line_list, - box=True, - ) + action = 'invited you to' if header == GROUP_MSG_INVITE_HEADER else 'joined' + postfix = ' with' if members else '' + m_print([f"{trunc_addr} has {action} group {group_id_hr}{postfix}"] + line_list, box=True) elif header in [GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER]: if members: - action, p = ( - ("added", "to") - if header == GROUP_MSG_MEMBER_ADD_HEADER - else ("removed", "from") - ) - m_print( - [ - f"{trunc_addr} has {action} following members {p} group {group_id_hr}" - ] - + line_list, - box=True, - ) + action, p = ("added", "to") if header == GROUP_MSG_MEMBER_ADD_HEADER else ("removed", "from") + m_print([f"{trunc_addr} has {action} following members {p} group {group_id_hr}"] + line_list, box=True) elif header == GROUP_MSG_EXIT_GROUP_HEADER: - m_print( - [ - f"{trunc_addr} has left group {group_id_hr}", - "", - "Warning", - "Unless you remove the contact from the group, they", - "can still read messages you send to the group.", - ], - box=True, - ) + m_print([f"{trunc_addr} has left group {group_id_hr}", + '', "Warning", + "Unless you remove the contact from the group, they", + "can still read messages you send to the group."], box=True) -def c_req_manager(queues: "QueueDict", unit_test: bool = False) -> None: +def c_req_manager(queues: 'QueueDict', unit_test: bool = False) -> None: """Manage incoming contact requests.""" existing_contacts = [] # type: List[bytes] - contact_requests = [] # type: List[bytes] + contact_requests = [] # type: List[bytes] request_queue = queues[CONTACT_REQ_QUEUE] contact_queue = queues[C_REQ_MGMT_QUEUE] setting_queue = queues[C_REQ_STATE_QUEUE] + account_queue = queues[ACCOUNT_SEND_QUEUE] show_requests = True while True: with ignored(EOFError, KeyboardInterrupt): - while not request_queue.qsize(): + while request_queue.qsize() == 0: time.sleep(0.1) purp_onion_address = request_queue.get() - while setting_queue.qsize(): + while setting_queue.qsize() != 0: show_requests = setting_queue.get() - # Update list of existing contacts - existing_contacts = update_list_of_existing_contacts( - contact_queue, existing_contacts - ) + existing_contacts = update_list_of_existing_contacts(contact_queue, existing_contacts) - if validate_onion_addr(purp_onion_address) == "": + if validate_onion_addr(purp_onion_address) == '': onion_pub_key = onion_address_to_pub_key(purp_onion_address) if onion_pub_key in existing_contacts: continue @@ -583,29 +435,23 @@ def c_req_manager(queues: "QueueDict", unit_test: bool = False) -> None: continue if show_requests: - ts_fmt = datetime.now().strftime("%b %d - %H:%M:%S.%f")[:-4] - m_print( - [ - f"{ts_fmt} - New contact request from an unknown TFC account:", - purp_onion_address, - ], - box=True, - ) + ts = datetime.now().strftime('%b %d - %H:%M:%S.%f')[:-4] + m_print([f"{ts} - New contact request from an unknown TFC account:", purp_onion_address], box=True) + account_queue.put(purp_onion_address) + contact_requests.append(onion_pub_key) if unit_test and queues[UNIT_TEST_QUEUE].qsize() != 0: break -def update_list_of_existing_contacts( - contact_queue: "Queue[Any]", existing_contacts: List[bytes] -) -> List[bytes]: +def update_list_of_existing_contacts(contact_queue: 'Queue[Any]', + existing_contacts: List[bytes] + ) -> List[bytes]: """Update list of existing contacts.""" while contact_queue.qsize() > 0: command, ser_onion_pub_keys = contact_queue.get() - onion_pub_key_list = split_byte_string( - ser_onion_pub_keys, ONION_SERVICE_PUBLIC_KEY_LENGTH - ) + onion_pub_key_list = split_byte_string(ser_onion_pub_keys, ONION_SERVICE_PUBLIC_KEY_LENGTH) if command == RP_ADD_CONTACT_HEADER: existing_contacts = list(set(existing_contacts) | set(onion_pub_key_list)) diff --git a/src/relay/commands.py b/src/relay/commands.py index aed01a6..c467eea 100644 --- a/src/relay/commands.py +++ b/src/relay/commands.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -19,74 +19,43 @@ You should have received a copy of the GNU General Public License along with TFC. If not, see . """ -import os import serial -import sys import time import typing from typing import Any, Dict -from src.common.encoding import bytes_to_bool, bytes_to_int +from src.common.encoding import bytes_to_bool, bytes_to_int from src.common.exceptions import SoftError -from src.common.misc import ( - ignored, - reset_terminal, - separate_header, - separate_headers, - split_byte_string, -) -from src.common.output import clear_screen, m_print -from src.common.statics import ( - CONFIRM_CODE_LENGTH, - CONTACT_MGMT_QUEUE, - C_REQ_MGMT_QUEUE, - C_REQ_STATE_QUEUE, - ENCODED_BOOLEAN_LENGTH, - ENCODED_INTEGER_LENGTH, - EXIT, - GROUP_MGMT_QUEUE, - LOCAL_TESTING_PACKET_DELAY, - MAX_INT, - ONION_CLOSE_QUEUE, - ONION_KEY_QUEUE, - ONION_SERVICE_PRIVATE_KEY_LENGTH, - ONION_SERVICE_PUBLIC_KEY_LENGTH, - RP_ADD_CONTACT_HEADER, - RP_REMOVE_CONTACT_HEADER, - SRC_TO_RELAY_QUEUE, - UNENCRYPTED_ADD_EXISTING_CONTACT, - UNENCRYPTED_ADD_NEW_CONTACT, - UNENCRYPTED_BAUDRATE, - UNENCRYPTED_COMMAND_HEADER_LENGTH, - UNENCRYPTED_EC_RATIO, - UNENCRYPTED_EXIT_COMMAND, - UNENCRYPTED_MANAGE_CONTACT_REQ, - UNENCRYPTED_ONION_SERVICE_DATA, - UNENCRYPTED_REM_CONTACT, - UNENCRYPTED_SCREEN_CLEAR, - UNENCRYPTED_SCREEN_RESET, - UNENCRYPTED_WIPE_COMMAND, - WIPE, -) +from src.common.misc import ignored, reset_terminal, separate_header, separate_headers, split_byte_string +from src.common.output import clear_screen, m_print +from src.common.statics import (ACCOUNT_CHECK_QUEUE, CONFIRM_CODE_LENGTH, CONTACT_MGMT_QUEUE, C_REQ_MGMT_QUEUE, + C_REQ_STATE_QUEUE, ENCODED_BOOLEAN_LENGTH, ENCODED_INTEGER_LENGTH, EXIT, + GROUP_MGMT_QUEUE, LOCAL_TESTING_PACKET_DELAY, MAX_INT, ONION_CLOSE_QUEUE, + ONION_KEY_QUEUE, ONION_SERVICE_PRIVATE_KEY_LENGTH, ONION_SERVICE_PUBLIC_KEY_LENGTH, + PUB_KEY_CHECK_QUEUE, RP_ADD_CONTACT_HEADER, RP_REMOVE_CONTACT_HEADER, + SRC_TO_RELAY_QUEUE, UNENCRYPTED_ACCOUNT_CHECK, UNENCRYPTED_ADD_EXISTING_CONTACT, + UNENCRYPTED_ADD_NEW_CONTACT, UNENCRYPTED_BAUDRATE, UNENCRYPTED_COMMAND_HEADER_LENGTH, + UNENCRYPTED_EC_RATIO, UNENCRYPTED_EXIT_COMMAND, UNENCRYPTED_MANAGE_CONTACT_REQ, + UNENCRYPTED_ONION_SERVICE_DATA, UNENCRYPTED_PUBKEY_CHECK, UNENCRYPTED_REM_CONTACT, + UNENCRYPTED_SCREEN_CLEAR, UNENCRYPTED_SCREEN_RESET, UNENCRYPTED_WIPE_COMMAND, WIPE) if typing.TYPE_CHECKING: - from multiprocessing import Queue + from multiprocessing import Queue from src.common.gateway import Gateway - QueueDict = Dict[bytes, Queue[Any]] -def relay_command( - queues: "QueueDict", gateway: "Gateway", stdin_fd: int, unit_test: bool = False -) -> None: +def relay_command(queues: 'QueueDict', + gateway: 'Gateway', + unit_test: bool = False + ) -> None: """Process Relay Program commands.""" - sys.stdin = os.fdopen(stdin_fd) queue_from_src = queues[SRC_TO_RELAY_QUEUE] while True: with ignored(EOFError, KeyboardInterrupt, SoftError): - while not queue_from_src.qsize(): + while queue_from_src.qsize() == 0: time.sleep(0.01) command = queue_from_src.get() @@ -96,55 +65,59 @@ def relay_command( break -def process_command(command: bytes, gateway: "Gateway", queues: "QueueDict") -> None: +def process_command(command: bytes, + gateway: 'Gateway', + queues: 'QueueDict' + ) -> None: """Select function for received Relay Program command.""" header, command = separate_header(command, UNENCRYPTED_COMMAND_HEADER_LENGTH) # Keyword Function to run ( Parameters ) # --------------------------------------------------------------------------------- - function_d = { - UNENCRYPTED_SCREEN_CLEAR: (clear_windows, gateway,), - UNENCRYPTED_SCREEN_RESET: (reset_windows, gateway,), - UNENCRYPTED_EXIT_COMMAND: (exit_tfc, gateway, queues), - UNENCRYPTED_WIPE_COMMAND: (wipe, gateway, queues), - UNENCRYPTED_EC_RATIO: (change_ec_ratio, command, gateway,), - UNENCRYPTED_BAUDRATE: (change_baudrate, command, gateway,), - UNENCRYPTED_MANAGE_CONTACT_REQ: (manage_contact_req, command, queues), - UNENCRYPTED_ADD_NEW_CONTACT: (add_contact, command, False, queues), - UNENCRYPTED_ADD_EXISTING_CONTACT: (add_contact, command, True, queues), - UNENCRYPTED_REM_CONTACT: (remove_contact, command, queues), - UNENCRYPTED_ONION_SERVICE_DATA: (add_onion_data, command, queues), - } # type: Dict[bytes, Any] + function_d = {UNENCRYPTED_SCREEN_CLEAR: (clear_windows, gateway, ), + UNENCRYPTED_SCREEN_RESET: (reset_windows, gateway, ), + UNENCRYPTED_EXIT_COMMAND: (exit_tfc, gateway, queues), + UNENCRYPTED_WIPE_COMMAND: (wipe, gateway, queues), + UNENCRYPTED_EC_RATIO: (change_ec_ratio, command, gateway, ), + UNENCRYPTED_BAUDRATE: (change_baudrate, command, gateway, ), + UNENCRYPTED_MANAGE_CONTACT_REQ: (manage_contact_req, command, queues), + UNENCRYPTED_ADD_NEW_CONTACT: (add_contact, command, False, queues), + UNENCRYPTED_ADD_EXISTING_CONTACT: (add_contact, command, True, queues), + UNENCRYPTED_REM_CONTACT: (remove_contact, command, queues), + UNENCRYPTED_ONION_SERVICE_DATA: (add_onion_data, command, queues), + UNENCRYPTED_ACCOUNT_CHECK: (compare_accounts, command, queues), + UNENCRYPTED_PUBKEY_CHECK: (compare_pub_keys, command, queues) + } # type: Dict[bytes, Any] if header not in function_d: raise SoftError("Error: Received an invalid command.") - from_dict = function_d[header] - func = from_dict[0] + from_dict = function_d[header] + func = from_dict[0] parameters = from_dict[1:] func(*parameters) -def race_condition_delay(gateway: "Gateway") -> None: +def race_condition_delay(gateway: 'Gateway') -> None: """Prevent race condition with Receiver command.""" if gateway.settings.local_testing_mode: time.sleep(LOCAL_TESTING_PACKET_DELAY) time.sleep(gateway.settings.data_diode_sockets * 1.0) -def clear_windows(gateway: "Gateway") -> None: +def clear_windows(gateway: 'Gateway') -> None: """Clear Relay Program screen.""" race_condition_delay(gateway) clear_screen() -def reset_windows(gateway: "Gateway") -> None: +def reset_windows(gateway: 'Gateway') -> None: """Reset Relay Program screen.""" race_condition_delay(gateway) reset_terminal() -def exit_tfc(gateway: "Gateway", queues: "QueueDict") -> None: +def exit_tfc(gateway: 'Gateway', queues: 'QueueDict') -> None: """Exit TFC. The queue is read by @@ -154,7 +127,7 @@ def exit_tfc(gateway: "Gateway", queues: "QueueDict") -> None: queues[ONION_CLOSE_QUEUE].put(EXIT) -def wipe(gateway: "Gateway", queues: "QueueDict") -> None: +def wipe(gateway: 'Gateway', queues: 'QueueDict') -> None: """Reset terminal, wipe all user data and power off the system. No effective RAM overwriting tool currently exists, so as long as Source and @@ -170,16 +143,14 @@ def wipe(gateway: "Gateway", queues: "QueueDict") -> None: queues[ONION_CLOSE_QUEUE].put(WIPE) -def change_ec_ratio(command: bytes, gateway: "Gateway") -> None: +def change_ec_ratio(command: bytes, gateway: 'Gateway') -> None: """Change Relay Program's Reed-Solomon error correction ratio.""" try: value = int(command) if value < 0 or value > MAX_INT: raise ValueError except ValueError: - raise SoftError( - "Error: Received invalid EC ratio value from Transmitter Program." - ) + raise SoftError("Error: Received invalid EC ratio value from Transmitter Program.") m_print("Error correction ratio will change on restart.", head=1, tail=1) @@ -187,16 +158,14 @@ def change_ec_ratio(command: bytes, gateway: "Gateway") -> None: gateway.settings.store_settings() -def change_baudrate(command: bytes, gateway: "Gateway") -> None: +def change_baudrate(command: bytes, gateway: 'Gateway') -> None: """Change Relay Program's serial interface baud rate setting.""" try: value = int(command) if value not in serial.Serial.BAUDRATES: raise ValueError except ValueError: - raise SoftError( - "Error: Received invalid baud rate value from Transmitter Program." - ) + raise SoftError("Error: Received invalid baud rate value from Transmitter Program.") m_print("Baud rate will change on restart.", head=1, tail=1) @@ -204,21 +173,21 @@ def change_baudrate(command: bytes, gateway: "Gateway") -> None: gateway.settings.store_settings() -def manage_contact_req( - command: bytes, queues: "QueueDict", notify: bool = True -) -> None: +def manage_contact_req(command: bytes, + queues: 'QueueDict', + notify: bool = True + ) -> None: """Control whether contact requests are accepted.""" enabled = bytes_to_bool(command) if notify: - m_print( - f"Contact requests are have been {('enabled' if enabled else 'disabled')}.", - head=1, - tail=1, - ) + m_print(f"Contact requests are have been {('enabled' if enabled else 'disabled')}.", head=1, tail=1) queues[C_REQ_STATE_QUEUE].put(enabled) -def add_contact(command: bytes, existing: bool, queues: "QueueDict") -> None: +def add_contact(command: bytes, + existing: bool, + queues: 'QueueDict' + ) -> None: """Add clients to Relay Program. The queues are read by @@ -231,7 +200,7 @@ def add_contact(command: bytes, existing: bool, queues: "QueueDict") -> None: queues[C_REQ_MGMT_QUEUE].put((RP_ADD_CONTACT_HEADER, command)) -def remove_contact(command: bytes, queues: "QueueDict") -> None: +def remove_contact(command: bytes, queues: 'QueueDict') -> None: """Remove clients from Relay Program. The queues are read by @@ -244,7 +213,7 @@ def remove_contact(command: bytes, queues: "QueueDict") -> None: queues[C_REQ_MGMT_QUEUE].put((RP_REMOVE_CONTACT_HEADER, command)) -def add_onion_data(command: bytes, queues: "QueueDict") -> None: +def add_onion_data(command: bytes, queues: 'QueueDict') -> None: """Add Onion Service data. Separate onion service private key and public keys for @@ -253,25 +222,13 @@ def add_onion_data(command: bytes, queues: "QueueDict") -> None: The ONION_KEY_QUEUE is read by relay.onion.onion_service() """ - ( - os_private_key, - confirmation_code, - allow_req_byte, - no_pending_bytes, - ser_pub_keys, - ) = separate_headers( - command, - [ - ONION_SERVICE_PRIVATE_KEY_LENGTH, - CONFIRM_CODE_LENGTH, - ENCODED_BOOLEAN_LENGTH, - ENCODED_INTEGER_LENGTH, - ], - ) + os_private_key, confirmation_code, allow_req_byte, no_pending_bytes, ser_pub_keys \ + = separate_headers(command, [ONION_SERVICE_PRIVATE_KEY_LENGTH, CONFIRM_CODE_LENGTH, + ENCODED_BOOLEAN_LENGTH, ENCODED_INTEGER_LENGTH]) - no_pending = bytes_to_int(no_pending_bytes) - public_key_list = split_byte_string(ser_pub_keys, ONION_SERVICE_PUBLIC_KEY_LENGTH) - pending_public_keys = public_key_list[:no_pending] + no_pending = bytes_to_int(no_pending_bytes) + public_key_list = split_byte_string(ser_pub_keys, ONION_SERVICE_PUBLIC_KEY_LENGTH) + pending_public_keys = public_key_list[:no_pending] existing_public_keys = public_key_list[no_pending:] for onion_pub_key in pending_public_keys: @@ -281,3 +238,20 @@ def add_onion_data(command: bytes, queues: "QueueDict") -> None: manage_contact_req(allow_req_byte, queues, notify=False) queues[ONION_KEY_QUEUE].put((os_private_key, confirmation_code)) + + +def compare_accounts(command: bytes, queues: 'QueueDict') -> None: + """\ + Compare incorrectly typed account to what's available on Relay + Program. + """ + queues[ACCOUNT_CHECK_QUEUE].put(command.decode()) + + +def compare_pub_keys(command: bytes, queues: 'QueueDict') -> None: + """\ + Compare incorrectly typed public key to what's available on Relay + Program. + """ + account, incorrect_pub_key = separate_header(command, ONION_SERVICE_PUBLIC_KEY_LENGTH) + queues[PUB_KEY_CHECK_QUEUE].put((account, incorrect_pub_key)) diff --git a/src/relay/diffs.py b/src/relay/diffs.py new file mode 100644 index 0000000..e200bdb --- /dev/null +++ b/src/relay/diffs.py @@ -0,0 +1,229 @@ +#!/usr/bin/env python3.7 +# -*- coding: utf-8 -*- + +""" +TFC - Onion-routed, endpoint secure messaging system +Copyright (C) 2013-2020 Markus Ottela + +This file is part of TFC. + +TFC is free software: you can redistribute it and/or modify it under the terms +of the GNU General Public License as published by the Free Software Foundation, +either version 3 of the License, or (at your option) any later version. + +TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; +without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR +PURPOSE. See the GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with TFC. If not, see . +""" + +import difflib +import os +import sys +import time +import typing + +from multiprocessing import Queue +from typing import Any, Dict, List, Optional + +import tkinter + +from src.common.encoding import b58encode +from src.common.misc import ignored, split_string, validate_onion_addr +from src.common.output import m_print +from src.common.statics import (ACCOUNT_CHECK_QUEUE, ACCOUNT_RATIO_LIMIT, ACCOUNT_SEND_QUEUE, B58_PUBLIC_KEY_GUIDE, + ENCODED_B58_PUB_KEY_LENGTH, GUI_INPUT_QUEUE, PUB_KEY_CHECK_QUEUE, PUB_KEY_SEND_QUEUE, + USER_ACCOUNT_QUEUE) + +if typing.TYPE_CHECKING: + AccountQueue = Queue[Optional[str]] + QueueDict = Dict[bytes, Queue[Any]] + + +# Accounts + +class GetAccountFromUser(object): + """Get correct account of contact from the user via Tkinter prompt.""" + + def __init__(self, queue: 'AccountQueue', onion_address_user: str) -> None: + """Create new Tkinter input box.""" + self.queue = queue + self.onion_address_user = onion_address_user + + self.root = tkinter.Tk() + self.root.title("Contact account entry") + self.root.protocol("WM_DELETE_WINDOW", self.dismiss_window) + + self.error_label = tkinter.Label(self.root, text=None) + + self.instruction = tkinter.Text(self.root, height=3, width=54) + self.instruction.tag_configure('center', justify='center') + self.instruction.insert('1.0', "Could not determine the account being added.\n" # type: ignore + "Please paste the account here to see diffs\n" + "or press Cancel to dismiss this prompt.") + self.instruction.tag_add('center', '1.0', 'end') # type: ignore + self.instruction.grid(row=0, rowspan=2, columnspan=2) + + self.address_entry_box = tkinter.Entry(self.root, width=54) + self.address_entry_box.grid(row=2, columnspan=2) + + tkinter.Button(self.root, text='Cancel', command=self.dismiss_window).grid( row=4, column=0, sticky='NSEW') + tkinter.Button(self.root, text='Ok', command=self.evaluate_account).grid(row=4, column=1, sticky='NSEW') + + self.root.mainloop() + + def evaluate_account(self) -> None: + """Check if the input is a valid TFC account.""" + purp_acco = self.address_entry_box.get() # type: ignore + error_msg = validate_onion_addr(purp_acco, self.onion_address_user) + + if error_msg: + self.address_entry_box.delete(0, tkinter.END) + self.error_label.forget() + self.error_label.configure(text=error_msg, justify='center') + self.error_label.grid(row=3, columnspan=2, sticky='NSEW') + else: + self.queue.put(purp_acco) + self.root.destroy() + + def dismiss_window(self) -> None: + """Dismiss the account input window.""" + self.queue.put(None) + self.root.destroy() + + +def account_checker(queues: 'QueueDict', + stdin_fd: int, + unit_test: bool = False + ) -> None: + """\ + Display diffs between received TFC accounts and accounts + manually imported to Source Computer.""" + sys.stdin = os.fdopen(stdin_fd) + account_list = [] # type: List[str] + account_check_queue = queues[ACCOUNT_CHECK_QUEUE] + account_send_queue = queues[ACCOUNT_SEND_QUEUE] + account_input_queue = queues[GUI_INPUT_QUEUE] + + while queues[USER_ACCOUNT_QUEUE].qsize() == 0: + time.sleep(0.01) + onion_address_user = queues[USER_ACCOUNT_QUEUE].get() + + while True: + with ignored(EOFError, KeyboardInterrupt): + if account_send_queue.qsize() != 0: + account = account_send_queue.get() # type: Optional[str] + if account is not None and account not in account_list: + account_list.append(account) + continue + + if account_check_queue.qsize() != 0: + purp_account = account_check_queue.get() # type: str + + # Determine correct account + for account in account_list: + # Check if accounts are similar enough: + ratio = difflib.SequenceMatcher(a=account, b=purp_account).ratio() + if ratio >= ACCOUNT_RATIO_LIMIT: + break + else: + account = get_account_from_user(account_list, onion_address_user, account_input_queue) + + if account is not None: + show_value_diffs("account", account, purp_account, local_test=True) + + continue + time.sleep(0.01) + + if unit_test: + break + + +def get_account_from_user(account_list: List[str], + onion_address_user: str, + account_input_queue: 'AccountQueue' + ) -> Optional[str]: + """Get account from user.""" + GetAccountFromUser(account_input_queue, onion_address_user) + account = account_input_queue.get() + if account is not None and account not in account_list: + account_list.append(account) + return account + + +# Public keys + +def pub_key_checker(queues: 'QueueDict', + local_test: bool, + unit_test: bool = False + ) -> None: + """\ + Display diffs between received public keys and public keys + manually imported to Source Computer. + """ + pub_key_check_queue = queues[PUB_KEY_CHECK_QUEUE] + pub_key_send_queue = queues[PUB_KEY_SEND_QUEUE] + pub_key_dictionary = dict() + + while True: + with ignored(EOFError, KeyboardInterrupt): + if pub_key_send_queue.qsize() != 0: + account, pub_key = pub_key_send_queue.get() + pub_key_dictionary[account] = b58encode(pub_key, public_key=True) + continue + + if pub_key_check_queue.qsize() != 0: + purp_account, purp_pub_key = pub_key_check_queue.get() # type: bytes, bytes + + if purp_account in pub_key_dictionary: + purp_b58_pub_key = purp_pub_key.decode() + true_b58_pub_key = pub_key_dictionary[purp_account] + + show_value_diffs("public key", true_b58_pub_key, purp_b58_pub_key, local_test) + + time.sleep(0.01) + + if unit_test: + break + + +# Diffs + +def show_value_diffs(value_type: str, + true_value: str, + purp_value: str, + local_test: bool + ) -> None: + """Compare purported value with correct value.""" + # Pad with underscores to denote missing chars + while len(purp_value) < ENCODED_B58_PUB_KEY_LENGTH: + purp_value += '_' + + replace_l = '' + purported = '' + for c1, c2 in zip(purp_value, true_value): + if c1 == c2: + replace_l += ' ' + purported += c1 + else: + replace_l += '↓' + purported += c1 + + message_list = [f"Source Computer received an invalid {value_type}.", + "See arrows below that point to correct characters."] + + if local_test: + m_print(message_list + ['', purported, replace_l, true_value], box=True) + else: + purported = ' '.join(split_string(purported, item_len=7)) + replace_l = ' '.join(split_string(replace_l, item_len=7)) + true_value = ' '.join(split_string(true_value, item_len=7)) + + m_print(message_list + ['', + B58_PUBLIC_KEY_GUIDE, + purported, + replace_l, + true_value, + B58_PUBLIC_KEY_GUIDE], box=True) diff --git a/src/relay/onion.py b/src/relay/onion.py index ad8184b..9a88713 100644 --- a/src/relay/onion.py +++ b/src/relay/onion.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -38,23 +38,15 @@ import stem.process from stem.control import Controller -from src.common.encoding import pub_key_to_onion_address +from src.common.encoding import pub_key_to_onion_address from src.common.exceptions import CriticalError -from src.common.output import m_print, rp_print -from src.common.statics import ( - EXIT, - EXIT_QUEUE, - ONION_CLOSE_QUEUE, - ONION_KEY_QUEUE, - ONION_SERVICE_PRIVATE_KEY_LENGTH, - TOR_CONTROL_PORT, - TOR_DATA_QUEUE, - TOR_SOCKS_PORT, -) +from src.common.output import m_print, rp_print +from src.common.statics import (EXIT, EXIT_QUEUE, ONION_CLOSE_QUEUE, ONION_KEY_QUEUE, + ONION_SERVICE_PRIVATE_KEY_LENGTH, TOR_CONTROL_PORT, TOR_DATA_QUEUE, TOR_SOCKS_PORT, + USER_ACCOUNT_QUEUE) if typing.TYPE_CHECKING: from multiprocessing import Queue - QueueDict = Dict[bytes, Queue[Any]] @@ -65,7 +57,7 @@ def get_available_port(min_port: int, max_port: int) -> int: with socket.socket() as temp_sock: while True: try: - temp_sock.bind(("127.0.0.1", sys_rand.randint(min_port, max_port))) + temp_sock.bind(('127.0.0.1', sys_rand.randint(min_port, max_port))) break except OSError: pass @@ -82,12 +74,12 @@ class Tor(object): def __init__(self) -> None: self.tor_process = None # type: Optional[Any] - self.controller = None # type: Optional[Controller] + self.controller = None # type: Optional[Controller] @staticmethod def platform_is_tails() -> bool: """Return True if Relay Program is running on Tails.""" - with open("/etc/os-release") as f: + with open('/etc/os-release') as f: data = f.read() return 'TAILS_PRODUCT_NAME="Tails"' in data @@ -103,17 +95,15 @@ class Tor(object): return None tor_data_directory = tempfile.TemporaryDirectory() - tor_control_socket = os.path.join(tor_data_directory.name, "control_socket") + tor_control_socket = os.path.join(tor_data_directory.name, 'control_socket') - if not os.path.isfile("/usr/bin/tor"): + if not os.path.isfile('/usr/bin/tor'): raise CriticalError("Check that Tor is installed.") self.launch_tor_process(port, tor_control_socket, tor_data_directory) - start_ts = time.monotonic() - self.controller = stem.control.Controller.from_socket_file( - path=tor_control_socket - ) + start_ts = time.monotonic() + self.controller = stem.control.Controller.from_socket_file(path=tor_control_socket) self.controller.authenticate() while True: @@ -125,38 +115,35 @@ class Tor(object): raise CriticalError("Tor socket closed.") res_parts = shlex.split(response) - summary = res_parts[4].split("=")[1] + summary = res_parts[4].split('=')[1] - if summary == "Done": - tor_version = self.controller.get_version().version_str.split(" (")[0] + if summary == 'Done': + tor_version = self.controller.get_version().version_str.split(' (')[0] rp_print(f"Setup 70% - Tor {tor_version} is now running", bold=True) break if time.monotonic() - start_ts > 15: - start_ts = time.monotonic() - self.controller = stem.control.Controller.from_socket_file( - path=tor_control_socket - ) + start_ts = time.monotonic() + self.controller = stem.control.Controller.from_socket_file(path=tor_control_socket) self.controller.authenticate() - def launch_tor_process( - self, port: int, tor_control_socket: Union[bytes, str], tor_data_directory: Any - ) -> None: + def launch_tor_process(self, + port: int, + tor_control_socket: Union[bytes, str], + tor_data_directory: Any + ) -> None: """Launch Tor process.""" while True: try: self.tor_process = stem.process.launch_tor_with_config( - config={ - "DataDirectory": tor_data_directory.name, - "SocksPort": str(port), - "ControlSocket": tor_control_socket, - "AvoidDiskWrites": "1", - "Log": "notice stdout", - "GeoIPFile": "/usr/share/tor/geoip", - "GeoIPv6File ": "/usr/share/tor/geoip6", - }, - tor_cmd="/usr/bin/tor", - ) + config={"DataDirectory": tor_data_directory.name, + "SocksPort": str(port), + "ControlSocket": tor_control_socket, + "AvoidDiskWrites": "1", + "Log": "notice stdout", + "GeoIPFile": "/usr/share/tor/geoip", + "GeoIPv6File ": "/usr/share/tor/geoip6"}, + tor_cmd="/usr/bin/tor") break except OSError: @@ -189,18 +176,13 @@ def stem_compatible_ed25519_key_from_private_key(private_key: bytes) -> str: def encode_int(y: int) -> bytes: """Encode integer to 32-byte bytestring (little-endian format).""" bits = [(y >> i) & 1 for i in range(b)] - return b"".join( - [ - bytes([(sum([bits[i * 8 + j] << j for j in range(8)]))]) - for i in range(b // 8) - ] - ) + return b''.join([bytes([(sum([bits[i * 8 + j] << j for j in range(8)]))]) for i in range(b // 8)]) def expand_private_key(sk: bytes) -> bytes: """Expand private key to base64 blob.""" h = hashlib.sha512(sk).digest() a = 2 ** (b - 2) + sum(2 ** i * bit(h, i) for i in range(3, b - 2)) - k = b"".join([bytes([h[i]]) for i in range(b // 8, b // 4)]) + k = b''.join([bytes([h[i]]) for i in range(b // 8, b // 4)]) return encode_int(a) + k @@ -212,20 +194,20 @@ def stem_compatible_ed25519_key_from_private_key(private_key: bytes) -> str: return base64.b64encode(expanded_private_key).decode() -def onion_service(queues: Dict[bytes, "Queue[Any]"]) -> None: +def onion_service(queues: Dict[bytes, 'Queue[Any]']) -> None: """Manage the Tor Onion Service and control Tor via stem.""" rp_print("Setup 0% - Waiting for Onion Service configuration...", bold=True) - while not queues[ONION_KEY_QUEUE].qsize(): + while queues[ONION_KEY_QUEUE].qsize() == 0: time.sleep(0.1) private_key, c_code = queues[ONION_KEY_QUEUE].get() # type: bytes, bytes - public_key_user = bytes(nacl.signing.SigningKey(seed=private_key).verify_key) - onion_addr_user = pub_key_to_onion_address(public_key_user) + public_key_user = bytes(nacl.signing.SigningKey(seed=private_key).verify_key) + onion_addr_user = pub_key_to_onion_address(public_key_user) try: rp_print("Setup 10% - Launching Tor...", bold=True) tor_port = get_available_port(1000, 65535) - tor = Tor() + tor = Tor() tor.connect(tor_port) except (EOFError, KeyboardInterrupt): return @@ -236,26 +218,19 @@ def onion_service(queues: Dict[bytes, "Queue[Any]"]) -> None: try: rp_print("Setup 75% - Launching Onion Service...", bold=True) key_data = stem_compatible_ed25519_key_from_private_key(private_key) - response = tor.controller.create_ephemeral_hidden_service( - ports={80: 5000}, - key_type="ED25519-V3", - key_content=key_data, - await_publication=True, - ) + response = tor.controller.create_ephemeral_hidden_service(ports={80: 5000}, + key_type='ED25519-V3', + key_content=key_data, + await_publication=True) rp_print("Setup 100% - Onion Service is now published.", bold=True) - m_print( - [ - "Your TFC account is:", - onion_addr_user, - "", - f"Onion Service confirmation code (to Transmitter): {c_code.hex()}", - ], - box=True, - ) + m_print(["Your TFC account is:", + onion_addr_user, '', + f"Onion Service confirmation code (to Transmitter): {c_code.hex()}"], box=True) # Allow the client to start looking for contacts at this point. queues[TOR_DATA_QUEUE].put((tor_port, onion_addr_user)) + queues[USER_ACCOUNT_QUEUE].put(onion_addr_user) except (KeyboardInterrupt, stem.SocketClosed): tor.stop() @@ -264,7 +239,10 @@ def onion_service(queues: Dict[bytes, "Queue[Any]"]) -> None: monitor_queues(tor, response, queues) -def monitor_queues(tor: Tor, response: Any, queues: "QueueDict") -> None: +def monitor_queues(tor: Tor, + response: Any, + queues: 'QueueDict' + ) -> None: """Monitor queues for incoming packets.""" while True: try: @@ -273,22 +251,12 @@ def monitor_queues(tor: Tor, response: Any, queues: "QueueDict") -> None: if queues[ONION_KEY_QUEUE].qsize() > 0: _, c_code = queues[ONION_KEY_QUEUE].get() - m_print( - [ - "Onion Service is already running.", - "", - f"Onion Service confirmation code (to Transmitter): {c_code.hex()}", - ], - box=True, - ) + m_print(["Onion Service is already running.", '', + f"Onion Service confirmation code (to Transmitter): {c_code.hex()}"], box=True) if queues[ONION_CLOSE_QUEUE].qsize() > 0: command = queues[ONION_CLOSE_QUEUE].get() - if ( - not tor.platform_is_tails() - and command == EXIT - and tor.controller is not None - ): + if not tor.platform_is_tails() and command == EXIT and tor.controller is not None: tor.controller.remove_hidden_service(response.service_id) tor.stop() queues[EXIT_QUEUE].put(command) diff --git a/src/relay/server.py b/src/relay/server.py index 1aea860..e2a4fed 100644 --- a/src/relay/server.py +++ b/src/relay/server.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -23,30 +23,26 @@ import hmac import logging import typing -from io import BytesIO +from io import BytesIO from multiprocessing import Queue -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional from flask import Flask, send_file -from src.common.misc import HideRunTime -from src.common.statics import ( - CONTACT_REQ_QUEUE, - F_TO_FLASK_QUEUE, - M_TO_FLASK_QUEUE, - URL_TOKEN_QUEUE, -) +from src.common.misc import HideRunTime +from src.common.statics import CONTACT_REQ_QUEUE, F_TO_FLASK_QUEUE, M_TO_FLASK_QUEUE, URL_TOKEN_QUEUE if typing.TYPE_CHECKING: - QueueDict = Dict[bytes, Queue[Any]] - PubKeyDict = Dict[str, bytes] + QueueDict = Dict[bytes, Queue[Any]] + PubKeyDict = Dict[str, bytes] MessageDict = Dict[bytes, List[str]] - FileDict = Dict[bytes, List[bytes]] + FileDict = Dict[bytes, List[bytes]] -def validate_url_token( - purp_url_token: str, queues: "QueueDict", pub_key_dict: "PubKeyDict" -) -> bool: +def validate_url_token(purp_url_token: str, + queues: 'QueueDict', + pub_key_dict: 'PubKeyDict' + ) -> bool: """Validate URL token using constant time comparison.""" # This context manager hides the duration of URL_TOKEN_QUEUE check as # well as the number of accounts in pub_key_dict when iterating over keys. @@ -76,9 +72,10 @@ def validate_url_token( return valid_url_token -def flask_server( - queues: "QueueDict", url_token_public_key: str, unit_test: bool = False -) -> Optional[Flask]: +def flask_server(queues: 'QueueDict', + url_token_public_key: str, + unit_test: bool = False + ) -> Optional[Flask]: """Run Flask web server for outgoing messages. This process runs Flask web server from where clients of contacts @@ -98,23 +95,23 @@ def flask_server( connection is strongly authenticated by the Onion Service domain name, that is, the TFC account pinned by the user. """ - app = Flask(__name__) - pub_key_dict = dict() # type: PubKeyDict - message_dict = dict() # type: MessageDict - file_dict = dict() # type: FileDict + app = Flask(__name__) + pub_key_dict = dict() # type: Dict[str, bytes] + message_dict = dict() # type: Dict[bytes, List[str]] + file_dict = dict() # type: Dict[bytes, List[bytes]] - @app.route("/") + @app.route('/') def index() -> str: """Return the URL token public key to contacts that know the .onion address.""" return url_token_public_key - @app.route("/contact_request/") + @app.route('/contact_request/') def contact_request(purp_onion_address: str) -> str: """Pass contact request to `c_req_manager`.""" queues[CONTACT_REQ_QUEUE].put(purp_onion_address) - return "OK" + return 'OK' - @app.route("//files/") + @app.route('//files/') def file_get(purp_url_token: str) -> Any: """Validate the URL token and return a queued file.""" return get_file(purp_url_token, queues, pub_key_dict, file_dict) @@ -126,7 +123,7 @@ def flask_server( # -------------------------------------------------------------------------- - log = logging.getLogger("werkzeug") + log = logging.getLogger('werkzeug') log.setLevel(logging.ERROR) if unit_test: @@ -136,15 +133,14 @@ def flask_server( return None -def get_message( - purp_url_token: str, - queues: "QueueDict", - pub_key_dict: "PubKeyDict", - message_dict: "MessageDict", -) -> str: +def get_message(purp_url_token: str, + queues: 'QueueDict', + pub_key_dict: 'PubKeyDict', + message_dict: 'MessageDict' + ) -> str: """Send queued messages to contact.""" if not validate_url_token(purp_url_token, queues, pub_key_dict): - return "" + return '' identified_onion_pub_key = pub_key_dict[purp_url_token] @@ -154,27 +150,21 @@ def get_message( packet, onion_pub_key = queues[M_TO_FLASK_QUEUE].get() message_dict.setdefault(onion_pub_key, []).append(packet) - if ( - identified_onion_pub_key in message_dict - and message_dict[identified_onion_pub_key] - ): - packets = "\n".join( - message_dict[identified_onion_pub_key] - ) # All messages for contact + if identified_onion_pub_key in message_dict and message_dict[identified_onion_pub_key]: + packets = '\n'.join(message_dict[identified_onion_pub_key]) # All messages for contact message_dict[identified_onion_pub_key] = [] return packets - return "" + return '' -def get_file( - purp_url_token: str, - queues: "QueueDict", - pub_key_dict: "PubKeyDict", - file_dict: "FileDict", -) -> Any: +def get_file(purp_url_token: str, + queues: 'QueueDict', + pub_key_dict: 'PubKeyDict', + file_dict: 'FileDict' + ) -> Any: """Send queued files to contact.""" if not validate_url_token(purp_url_token, queues, pub_key_dict): - return "" + return '' identified_onion_pub_key = pub_key_dict[purp_url_token] @@ -187,4 +177,4 @@ def get_file( mem.write(file_dict[identified_onion_pub_key].pop(0)) mem.seek(0) return send_file(mem, mimetype="application/octet-stream") - return "" + return '' diff --git a/src/relay/tcb.py b/src/relay/tcb.py index 4294953..c374601 100644 --- a/src/relay/tcb.py +++ b/src/relay/tcb.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -24,71 +24,50 @@ import typing from typing import Any, Dict, List, Tuple, Union -from src.common.encoding import bytes_to_int, pub_key_to_short_address -from src.common.encoding import int_to_bytes, b85encode +from src.common.encoding import bytes_to_int, pub_key_to_short_address +from src.common.encoding import int_to_bytes, b85encode from src.common.exceptions import SoftError -from src.common.misc import ignored, separate_header, split_byte_string -from src.common.output import rp_print -from src.common.statics import ( - COMMAND_DATAGRAM_HEADER, - DATAGRAM_HEADER_LENGTH, - DST_COMMAND_QUEUE, - DST_MESSAGE_QUEUE, - ENCODED_INTEGER_LENGTH, - FILE_DATAGRAM_HEADER, - F_TO_FLASK_QUEUE, - GATEWAY_QUEUE, - GROUP_ID_LENGTH, - GROUP_MSG_EXIT_GROUP_HEADER, - GROUP_MSG_INVITE_HEADER, - GROUP_MSG_JOIN_HEADER, - GROUP_MSG_MEMBER_ADD_HEADER, - GROUP_MSG_MEMBER_REM_HEADER, - LOCAL_KEY_DATAGRAM_HEADER, - MESSAGE_DATAGRAM_HEADER, - M_TO_FLASK_QUEUE, - ONION_SERVICE_PUBLIC_KEY_LENGTH, - ORIGIN_USER_HEADER, - PUBLIC_KEY_DATAGRAM_HEADER, - SRC_TO_RELAY_QUEUE, - UNENCRYPTED_DATAGRAM_HEADER, - UNIT_TEST_QUEUE, -) +from src.common.misc import ignored, separate_header, split_byte_string +from src.common.output import rp_print +from src.common.statics import (COMMAND_DATAGRAM_HEADER, DATAGRAM_HEADER_LENGTH, DST_COMMAND_QUEUE, + DST_MESSAGE_QUEUE, ENCODED_INTEGER_LENGTH, FILE_DATAGRAM_HEADER, F_TO_FLASK_QUEUE, + GATEWAY_QUEUE, GROUP_ID_LENGTH, GROUP_MSG_EXIT_GROUP_HEADER, GROUP_MSG_INVITE_HEADER, + GROUP_MSG_JOIN_HEADER, GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER, + LOCAL_KEY_DATAGRAM_HEADER, MESSAGE_DATAGRAM_HEADER, M_TO_FLASK_QUEUE, + ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_USER_HEADER, PUBLIC_KEY_DATAGRAM_HEADER, + SRC_TO_RELAY_QUEUE, UNENCRYPTED_DATAGRAM_HEADER, UNIT_TEST_QUEUE) if typing.TYPE_CHECKING: - from datetime import datetime - from multiprocessing import Queue + from datetime import datetime + from multiprocessing import Queue from src.common.gateway import Gateway - QueueDict = Dict[bytes, Queue[Any]] -def queue_to_flask( - packet: Union[bytes, str], - onion_pub_key: bytes, - flask_queue: "Queue[Tuple[Union[bytes, str], bytes]]", - ts: "datetime", - header: bytes, -) -> None: +def queue_to_flask(packet: Union[bytes, str], + onion_pub_key: bytes, + flask_queue: 'Queue[Tuple[Union[bytes, str], bytes]]', + ts: 'datetime', + header: bytes + ) -> None: """Put packet to flask queue and print message.""" - p_type = { - MESSAGE_DATAGRAM_HEADER: "Message ", - PUBLIC_KEY_DATAGRAM_HEADER: "Pub key ", - FILE_DATAGRAM_HEADER: "File ", - GROUP_MSG_INVITE_HEADER: "G invite ", - GROUP_MSG_JOIN_HEADER: "G join ", - GROUP_MSG_MEMBER_ADD_HEADER: "G add ", - GROUP_MSG_MEMBER_REM_HEADER: "G remove ", - GROUP_MSG_EXIT_GROUP_HEADER: "G exit ", - }[header] + p_type = {MESSAGE_DATAGRAM_HEADER: 'Message ', + PUBLIC_KEY_DATAGRAM_HEADER: 'Pub key ', + FILE_DATAGRAM_HEADER: 'File ', + GROUP_MSG_INVITE_HEADER: 'G invite ', + GROUP_MSG_JOIN_HEADER: 'G join ', + GROUP_MSG_MEMBER_ADD_HEADER: 'G add ', + GROUP_MSG_MEMBER_REM_HEADER: 'G remove ', + GROUP_MSG_EXIT_GROUP_HEADER: 'G exit '}[header] flask_queue.put((packet, onion_pub_key)) rp_print(f"{p_type} to contact {pub_key_to_short_address(onion_pub_key)}", ts) -def src_incoming( - queues: "QueueDict", gateway: "Gateway", unit_test: bool = False -) -> None: +def src_incoming(queues: 'QueueDict', + gateway: 'Gateway', + unit_test: bool = False + ) -> None: """\ Redirect datagrams received from Source Computer to appropriate queues. """ @@ -97,7 +76,7 @@ def src_incoming( while True: with ignored(EOFError, KeyboardInterrupt, SoftError): - ts, packet = load_packet_from_queue(queues, gateway) + ts, packet = load_packet_from_queue(queues, gateway) header, packet = separate_header(packet, DATAGRAM_HEADER_LENGTH) if header == UNENCRYPTED_DATAGRAM_HEADER: @@ -112,22 +91,20 @@ def src_incoming( elif header == FILE_DATAGRAM_HEADER: process_file_datagram(ts, packet, header, queues) - elif header in [ - GROUP_MSG_INVITE_HEADER, - GROUP_MSG_JOIN_HEADER, - GROUP_MSG_MEMBER_ADD_HEADER, - GROUP_MSG_MEMBER_REM_HEADER, - GROUP_MSG_EXIT_GROUP_HEADER, - ]: + elif header in [GROUP_MSG_INVITE_HEADER, + GROUP_MSG_JOIN_HEADER, + GROUP_MSG_MEMBER_ADD_HEADER, + GROUP_MSG_MEMBER_REM_HEADER, + GROUP_MSG_EXIT_GROUP_HEADER]: process_group_management_message(ts, packet, header, messages_to_flask) if unit_test: break -def load_packet_from_queue( - queues: "QueueDict", gateway: "Gateway" -) -> Tuple["datetime", bytes]: +def load_packet_from_queue(queues: 'QueueDict', + gateway: 'Gateway' + ) -> Tuple['datetime', bytes]: """Load packet from Source Computer. Perform error detection/correction. @@ -143,12 +120,14 @@ def load_packet_from_queue( return ts, packet -def process_command_datagram( - ts: "datetime", packet: bytes, header: bytes, queues: "QueueDict" -) -> None: +def process_command_datagram(ts: 'datetime', + packet: bytes, + header: bytes, + queues: 'QueueDict' + ) -> None: """Process command datagram.""" commands_to_dc = queues[DST_COMMAND_QUEUE] - ts_bytes = int_to_bytes(int(ts.strftime("%Y%m%d%H%M%S%f")[:-4])) + ts_bytes = int_to_bytes(int(ts.strftime("%Y%m%d%H%M%S%f")[:-4])) commands_to_dc.put(header + ts_bytes + packet) @@ -156,134 +135,114 @@ def process_command_datagram( rp_print(f"{p_type} to local Receiver", ts) -def process_message_datagram( - ts: "datetime", packet: bytes, header: bytes, queues: "QueueDict" -) -> None: +def process_message_datagram(ts: 'datetime', + packet: bytes, + header: bytes, + queues: 'QueueDict' + ) -> None: """Process message and public key datagram.""" - packets_to_dc = queues[DST_MESSAGE_QUEUE] + packets_to_dc = queues[DST_MESSAGE_QUEUE] messages_to_flask = queues[M_TO_FLASK_QUEUE] onion_pub_key, payload = separate_header(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH) - packet_str = header.decode() + b85encode(payload) - ts_bytes = int_to_bytes(int(ts.strftime("%Y%m%d%H%M%S%f")[:-4])) + packet_str = header.decode() + b85encode(payload) + ts_bytes = int_to_bytes(int(ts.strftime("%Y%m%d%H%M%S%f")[:-4])) queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header) if header == MESSAGE_DATAGRAM_HEADER: - packets_to_dc.put( - header + ts_bytes + onion_pub_key + ORIGIN_USER_HEADER + payload - ) + packets_to_dc.put(header + ts_bytes + onion_pub_key + ORIGIN_USER_HEADER + payload) -def process_file_datagram( - ts: "datetime", packet: bytes, header: bytes, queues: "QueueDict" -) -> None: +def process_file_datagram(ts: 'datetime', + packet: bytes, + header: bytes, + queues: 'QueueDict' + ) -> None: """Process file datagram.""" - files_to_flask = queues[F_TO_FLASK_QUEUE] + files_to_flask = queues[F_TO_FLASK_QUEUE] no_contacts_b, payload = separate_header(packet, ENCODED_INTEGER_LENGTH) - no_contacts = bytes_to_int(no_contacts_b) - ser_accounts, file_ct = separate_header( - payload, no_contacts * ONION_SERVICE_PUBLIC_KEY_LENGTH - ) - pub_keys = split_byte_string(ser_accounts, item_len=ONION_SERVICE_PUBLIC_KEY_LENGTH) + no_contacts = bytes_to_int(no_contacts_b) + ser_accounts, file_ct = separate_header(payload, no_contacts * ONION_SERVICE_PUBLIC_KEY_LENGTH) + pub_keys = split_byte_string(ser_accounts, item_len=ONION_SERVICE_PUBLIC_KEY_LENGTH) for onion_pub_key in pub_keys: queue_to_flask(file_ct, onion_pub_key, files_to_flask, ts, header) -def process_group_management_message( - ts: "datetime", - packet: bytes, - header: bytes, - messages_to_flask: "Queue[Tuple[Union[bytes, str], bytes]]", -) -> None: +def process_group_management_message(ts: 'datetime', + packet: bytes, + header: bytes, + messages_to_flask: 'Queue[Tuple[Union[bytes, str], bytes]]' + ) -> None: """Parse and display group management message.""" - header_str = header.decode() + header_str = header.decode() group_id, packet = separate_header(packet, GROUP_ID_LENGTH) if header in [GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER]: pub_keys = split_byte_string(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH) for onion_pub_key in pub_keys: - others = [k for k in pub_keys if k != onion_pub_key] - packet_str = header_str + b85encode(group_id + b"".join(others)) + others = [k for k in pub_keys if k != onion_pub_key] + packet_str = header_str + b85encode(group_id + b''.join(others)) queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header) elif header in [GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER]: - first_list_len_b, packet = separate_header(packet, ENCODED_INTEGER_LENGTH) - first_list_length = bytes_to_int(first_list_len_b) - pub_keys = split_byte_string(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH) + first_list_len_b, packet = separate_header(packet, ENCODED_INTEGER_LENGTH) + first_list_length = bytes_to_int(first_list_len_b) + pub_keys = split_byte_string(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH) before_adding = remaining = pub_keys[:first_list_length] - new_in_group = removable = pub_keys[first_list_length:] + new_in_group = removable = pub_keys[first_list_length:] if header == GROUP_MSG_MEMBER_ADD_HEADER: - process_add_or_group_remove_member( - ts, - header, - header_str, - group_id, - messages_to_flask, - before_adding, - new_in_group, - ) + process_add_or_group_remove_member(ts, header, header_str, group_id, messages_to_flask, + before_adding, new_in_group) for onion_pub_key in new_in_group: - other_new = [k for k in new_in_group if k != onion_pub_key] - packet_str = GROUP_MSG_INVITE_HEADER.decode() + b85encode( - group_id + b"".join(other_new + before_adding) - ) + other_new = [k for k in new_in_group if k != onion_pub_key] + packet_str = (GROUP_MSG_INVITE_HEADER.decode() + + b85encode(group_id + b''.join(other_new + before_adding))) queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header) elif header == GROUP_MSG_MEMBER_REM_HEADER: - process_add_or_group_remove_member( - ts, - header, - header_str, - group_id, - messages_to_flask, - remaining, - removable, - ) + process_add_or_group_remove_member(ts, header, header_str, group_id, messages_to_flask, + remaining, removable) elif header == GROUP_MSG_EXIT_GROUP_HEADER: - process_group_exit_header( - ts, packet, header, header_str, group_id, messages_to_flask - ) + process_group_exit_header(ts, packet, header, header_str, group_id, messages_to_flask) -def process_add_or_group_remove_member( - ts: "datetime", - header: bytes, - header_str: str, - group_id: bytes, - messages_to_flask: "Queue[Tuple[Union[bytes, str], bytes]]", - remaining: List[bytes], - removable: List[bytes], -) -> None: +def process_add_or_group_remove_member(ts: 'datetime', + header: bytes, + header_str: str, + group_id: bytes, + messages_to_flask: 'Queue[Tuple[Union[bytes, str], bytes]]', + remaining: List[bytes], removable: List[bytes] + ) -> None: """Process group add or remove member packet.""" packet_str = header_str + b85encode(group_id + b"".join(removable)) for onion_pub_key in remaining: queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header) -def process_group_exit_header( - ts: "datetime", - packet: bytes, - header: bytes, - header_str: str, - group_id: bytes, - messages_to_flask: "Queue[Tuple[Union[bytes, str], bytes]]", -) -> None: +def process_group_exit_header(ts: 'datetime', + packet: bytes, + header: bytes, + header_str: str, + group_id: bytes, + messages_to_flask: 'Queue[Tuple[Union[bytes, str], bytes]]' + ) -> None: """Process group exit packet.""" - pub_keys = split_byte_string(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH) + pub_keys = split_byte_string(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH) packet_str = header_str + b85encode(group_id) for onion_pub_key in pub_keys: queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header) -def dst_outgoing( - queues: "QueueDict", gateway: "Gateway", unit_test: bool = False -) -> None: +def dst_outgoing(queues: 'QueueDict', + gateway: 'Gateway', + unit_test: bool = False + ) -> None: """Output packets from queues to Destination Computer. Commands (and local keys) to local Destination Computer have higher @@ -299,10 +258,10 @@ def dst_outgoing( if c_queue.qsize() == 0 and m_queue.qsize() == 0: time.sleep(0.01) - while c_queue.qsize(): + while c_queue.qsize() != 0: gateway.write(c_queue.get()) - if m_queue.qsize(): + if m_queue.qsize() != 0: gateway.write(m_queue.get()) if unit_test and queues[UNIT_TEST_QUEUE].qsize() > 0: diff --git a/src/transmitter/__init__.py b/src/transmitter/__init__.py index 6eb560e..833769a 100755 --- a/src/transmitter/__init__.py +++ b/src/transmitter/__init__.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. diff --git a/src/transmitter/commands.py b/src/transmitter/commands.py index 1e57772..ce29f06 100755 --- a/src/transmitter/commands.py +++ b/src/transmitter/commands.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -26,277 +26,94 @@ import time import typing from multiprocessing import Queue -from typing import Any, Dict, List, Tuple, Union +from typing import Any, Dict, List, Tuple, Union -from src.common.db_logs import ( - access_logs, - change_log_db_key, - remove_logs, - replace_log_db, -) -from src.common.db_keys import KeyList -from src.common.encoding import ( - b58decode, - b58encode, - bool_to_bytes, - int_to_bytes, - onion_address_to_pub_key, -) +from src.common.db_logs import access_logs, change_log_db_key, remove_logs, replace_log_db +from src.common.db_keys import KeyList +from src.common.encoding import b58decode, b58encode, bool_to_bytes, int_to_bytes, onion_address_to_pub_key from src.common.exceptions import CriticalError, SoftError -from src.common.input import yes -from src.common.misc import ( - get_terminal_width, - ignored, - reset_terminal, - validate_onion_addr, -) -from src.common.output import clear_screen, m_print, phase, print_on_previous_line -from src.common.statics import ( - CH_MASTER_KEY, - CH_SETTING, - CLEAR, - CLEAR_SCREEN, - COMMAND_PACKET_QUEUE, - DONE, - EXIT_PROGRAM, - GROUP_ID_ENC_LENGTH, - KDB_HALT_ACK_HEADER, - KDB_M_KEY_CHANGE_HALT_HEADER, - KDB_UPDATE_SIZE_HEADER, - KEX_STATUS_UNVERIFIED, - KEX_STATUS_VERIFIED, - KEY_MANAGEMENT_QUEUE, - KEY_MGMT_ACK_QUEUE, - LOCAL_TESTING_PACKET_DELAY, - LOGFILE_MASKING_QUEUE, - LOG_DISPLAY, - LOG_EXPORT, - LOG_REMOVE, - MESSAGE, - ONION_ADDRESS_LENGTH, - RELAY_PACKET_QUEUE, - RESET_SCREEN, - RX, - SENDER_MODE_QUEUE, - TRAFFIC_MASKING_QUEUE, - TX, - UNENCRYPTED_BAUDRATE, - UNENCRYPTED_DATAGRAM_HEADER, - UNENCRYPTED_EC_RATIO, - UNENCRYPTED_EXIT_COMMAND, - UNENCRYPTED_MANAGE_CONTACT_REQ, - UNENCRYPTED_SCREEN_CLEAR, - UNENCRYPTED_SCREEN_RESET, - UNENCRYPTED_WIPE_COMMAND, - US_BYTE, - VERSION, - WIN_ACTIVITY, - WIN_SELECT, - WIN_TYPE_GROUP, - WIN_UID_FILE, - WIN_UID_COMMAND, - WIPE_USR_DATA, -) +from src.common.input import yes +from src.common.misc import get_terminal_width, ignored, reset_terminal, validate_onion_addr +from src.common.output import clear_screen, m_print, phase, print_on_previous_line +from src.common.statics import (CH_MASTER_KEY, CH_SETTING, CLEAR, CLEAR_SCREEN, COMMAND_PACKET_QUEUE, DONE, + EXIT_PROGRAM, GROUP_ID_ENC_LENGTH, KDB_HALT_ACK_HEADER, KDB_M_KEY_CHANGE_HALT_HEADER, + KDB_UPDATE_SIZE_HEADER, KEX_STATUS_UNVERIFIED, KEX_STATUS_VERIFIED, + KEY_MANAGEMENT_QUEUE, KEY_MGMT_ACK_QUEUE, LOCAL_TESTING_PACKET_DELAY, + LOGFILE_MASKING_QUEUE, LOG_DISPLAY, LOG_EXPORT, LOG_REMOVE, MESSAGE, + ONION_ADDRESS_LENGTH, RELAY_PACKET_QUEUE, RESET_SCREEN, RX, SENDER_MODE_QUEUE, + TRAFFIC_MASKING_QUEUE, TX, UNENCRYPTED_BAUDRATE, UNENCRYPTED_DATAGRAM_HEADER, + UNENCRYPTED_EC_RATIO, UNENCRYPTED_EXIT_COMMAND, UNENCRYPTED_MANAGE_CONTACT_REQ, + UNENCRYPTED_SCREEN_CLEAR, UNENCRYPTED_SCREEN_RESET, UNENCRYPTED_WIPE_COMMAND, + US_BYTE, VERSION, WIN_ACTIVITY, WIN_SELECT, WIN_TYPE_GROUP, WIN_UID_COMMAND, + WIN_UID_FILE, WIPE_USR_DATA) -from src.transmitter.commands_g import process_group_command -from src.transmitter.contact import ( - add_new_contact, - change_nick, - contact_setting, - remove_contact, -) -from src.transmitter.key_exchanges import ( - export_onion_service_data, - new_local_key, - rxp_load_psk, - verify_fingerprints, -) -from src.transmitter.packet import ( - cancel_packet, - queue_command, - queue_message, - queue_to_nc, -) -from src.transmitter.user_input import UserInput -from src.transmitter.windows import select_window +from src.transmitter.commands_g import process_group_command +from src.transmitter.contact import add_new_contact, change_nick, contact_setting, remove_contact +from src.transmitter.key_exchanges import export_onion_service_data, new_local_key, rxp_load_psk, verify_fingerprints +from src.transmitter.packet import cancel_packet, queue_command, queue_message, queue_to_nc +from src.transmitter.user_input import UserInput +from src.transmitter.windows import select_window if typing.TYPE_CHECKING: - from src.common.db_contacts import ContactList - from src.common.db_groups import GroupList + from src.common.db_contacts import ContactList + from src.common.db_groups import GroupList from src.common.db_masterkey import MasterKey - from src.common.db_onion import OnionService - from src.common.db_settings import Settings - from src.common.gateway import Gateway + from src.common.db_onion import OnionService + from src.common.db_settings import Settings + from src.common.gateway import Gateway from src.transmitter.windows import TxWindow - QueueDict = Dict[bytes, Queue[Any]] -def process_command( - user_input: "UserInput", - window: "TxWindow", - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - queues: "QueueDict", - master_key: "MasterKey", - onion_service: "OnionService", - gateway: "Gateway", -) -> None: +def process_command(user_input: 'UserInput', + window: 'TxWindow', + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + queues: 'QueueDict', + master_key: 'MasterKey', + onion_service: 'OnionService', + gateway: 'Gateway' + ) -> None: """\ Select function based on the first keyword of the issued command, and pass relevant parameters to it. """ - - d = { - "about": (print_about,), - "add": ( - add_new_contact, - contact_list, - group_list, - settings, - queues, - onion_service, - ), - "cf": (cancel_packet, user_input, window, settings, queues), - "cm": (cancel_packet, user_input, window, settings, queues), - "clear": (clear_screens, user_input, window, settings, queues), - "cmd": (rxp_show_sys_win, user_input, window, settings, queues), - "connect": ( - send_onion_service_key, - contact_list, - settings, - onion_service, - gateway, - ), - "exit": (exit_tfc, settings, queues, gateway), - "export": ( - log_command, - user_input, - window, - contact_list, - group_list, - settings, - queues, - master_key, - ), - "fw": (rxp_show_sys_win, user_input, window, settings, queues), - "group": ( - process_group_command, - user_input, - contact_list, - group_list, - settings, - queues, - master_key, - ), - "help": (print_help, settings), - "history": ( - log_command, - user_input, - window, - contact_list, - group_list, - settings, - queues, - master_key, - ), - "localkey": (new_local_key, contact_list, settings, queues,), - "logging": ( - contact_setting, - user_input, - window, - contact_list, - group_list, - settings, - queues, - ), - "msg": ( - select_window, - user_input, - window, - settings, - queues, - onion_service, - gateway, - ), - "names": (print_recipients, contact_list, group_list,), - "nick": ( - change_nick, - user_input, - window, - contact_list, - group_list, - settings, - queues, - ), - "notify": ( - contact_setting, - user_input, - window, - contact_list, - group_list, - settings, - queues, - ), - "passwd": ( - change_master_key, - user_input, - contact_list, - group_list, - settings, - queues, - master_key, - onion_service, - ), - "psk": (rxp_load_psk, window, contact_list, settings, queues), - "reset": (clear_screens, user_input, window, settings, queues), - "rm": ( - remove_contact, - user_input, - window, - contact_list, - group_list, - settings, - queues, - master_key, - ), - "rmlogs": ( - remove_log, - user_input, - contact_list, - group_list, - settings, - queues, - master_key, - ), - "set": ( - change_setting, - user_input, - window, - contact_list, - group_list, - settings, - queues, - master_key, - gateway, - ), - "settings": (print_settings, settings, gateway), - "store": ( - contact_setting, - user_input, - window, - contact_list, - group_list, - settings, - queues, - ), - "unread": (rxp_display_unread, settings, queues), - "verify": (verify, window, contact_list), - "whisper": (whisper, user_input, window, settings, queues), - "whois": (whois, user_input, contact_list, group_list), - "wipe": (wipe, settings, queues, gateway), - } # type: Dict[str, Any] + # Keyword Function to run ( Parameters ) + # ----------------------------------------------------------------------------------------------------------------------------------------- + d = {'about': (print_about, ), + 'add': (add_new_contact, contact_list, group_list, settings, queues, onion_service ), + 'cf': (cancel_packet, user_input, window, settings, queues ), + 'cm': (cancel_packet, user_input, window, settings, queues ), + 'clear': (clear_screens, user_input, window, settings, queues ), + 'cmd': (rxp_show_sys_win, user_input, window, settings, queues ), + 'connect': (send_onion_service_key, contact_list, settings, onion_service, gateway), + 'exit': (exit_tfc, settings, queues, gateway), + 'export': (log_command, user_input, window, contact_list, group_list, settings, queues, master_key ), + 'fw': (rxp_show_sys_win, user_input, window, settings, queues ), + 'group': (process_group_command, user_input, contact_list, group_list, settings, queues, master_key ), + 'help': (print_help, settings ), + 'history': (log_command, user_input, window, contact_list, group_list, settings, queues, master_key ), + 'localkey': (new_local_key, contact_list, settings, queues, ), + 'logging': (contact_setting, user_input, window, contact_list, group_list, settings, queues ), + 'msg': (select_window, user_input, window, settings, queues, onion_service, gateway), + 'names': (print_recipients, contact_list, group_list, ), + 'nick': (change_nick, user_input, window, contact_list, group_list, settings, queues ), + 'notify': (contact_setting, user_input, window, contact_list, group_list, settings, queues ), + 'passwd': (change_master_key, user_input, contact_list, group_list, settings, queues, master_key, onion_service ), + 'psk': (rxp_load_psk, window, contact_list, settings, queues ), + 'reset': (clear_screens, user_input, window, settings, queues ), + 'rm': (remove_contact, user_input, window, contact_list, group_list, settings, queues, master_key ), + 'rmlogs': (remove_log, user_input, contact_list, group_list, settings, queues, master_key ), + 'set': (change_setting, user_input, window, contact_list, group_list, settings, queues, master_key, gateway), + 'settings': (print_settings, settings, gateway), + 'store': (contact_setting, user_input, window, contact_list, group_list, settings, queues ), + 'unread': (rxp_display_unread, settings, queues ), + 'verify': (verify, window, contact_list ), + 'whisper': (whisper, user_input, window, settings, queues ), + 'whois': (whois, user_input, contact_list, group_list ), + 'wipe': (wipe, settings, queues, gateway) + } # type: Dict[str, Any] try: cmd_key = user_input.plaintext.split()[0] @@ -308,7 +125,7 @@ def process_command( except KeyError: raise SoftError(f"Error: Invalid command '{cmd_key}'.", head_clear=True) - func = from_dict[0] + func = from_dict[0] parameters = from_dict[1:] func(*parameters) @@ -316,19 +133,16 @@ def process_command( def print_about() -> None: """Print URLs that direct to TFC's project site and documentation.""" clear_screen() - print( - f"\n Tinfoil Chat {VERSION}\n\n" - " Website: https://github.com/maqp/tfc/\n" - " Wikipage: https://github.com/maqp/tfc/wiki\n" - ) + print(f"\n Tinfoil Chat {VERSION}\n\n" + " Website: https://github.com/maqp/tfc/\n" + " Wikipage: https://github.com/maqp/tfc/wiki\n") -def clear_screens( - user_input: "UserInput", - window: "TxWindow", - settings: "Settings", - queues: "QueueDict", -) -> None: +def clear_screens(user_input: 'UserInput', + window: 'TxWindow', + settings: 'Settings', + queues: 'QueueDict' + ) -> None: """Clear/reset screen of Source, Destination, and Networked Computer. Only send an unencrypted command to Networked Computer if traffic @@ -358,12 +172,11 @@ def clear_screens( reset_terminal() -def rxp_show_sys_win( - user_input: "UserInput", - window: "TxWindow", - settings: "Settings", - queues: "QueueDict", -) -> None: +def rxp_show_sys_win(user_input: 'UserInput', + window: 'TxWindow', + settings: 'Settings', + queues: 'QueueDict', + ) -> None: """\ Display a system window on Receiver Program until the user presses Enter. @@ -376,18 +189,14 @@ def rxp_show_sys_win( progress of file transmission from contacts that have traffic masking enabled. """ - cmd = user_input.plaintext.split()[0] + cmd = user_input.plaintext.split()[0] win_uid = dict(cmd=WIN_UID_COMMAND, fw=WIN_UID_FILE)[cmd] command = WIN_SELECT + win_uid queue_command(command, settings, queues) try: - m_print( - f" returns Receiver to {window.name}'s window", - manual_proceed=True, - box=True, - ) + m_print(f" returns Receiver to {window.name}'s window", manual_proceed=True, box=True) except (EOFError, KeyboardInterrupt): pass @@ -397,7 +206,10 @@ def rxp_show_sys_win( queue_command(command, settings, queues) -def exit_tfc(settings: "Settings", queues: "QueueDict", gateway: "Gateway") -> None: +def exit_tfc(settings: 'Settings', + queues: 'QueueDict', + gateway: 'Gateway' + ) -> None: """Exit TFC on all three computers. To exit TFC as fast as possible, this function starts by clearing @@ -431,15 +243,14 @@ def exit_tfc(settings: "Settings", queues: "QueueDict", gateway: "Gateway") -> N queue_to_nc(relay_command, queues[RELAY_PACKET_QUEUE]) -def log_command( - user_input: "UserInput", - window: "TxWindow", - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - queues: "QueueDict", - master_key: "MasterKey", -) -> None: +def log_command(user_input: 'UserInput', + window: 'TxWindow', + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + queues: 'QueueDict', + master_key: 'MasterKey' + ) -> None: """Display message logs or export them to plaintext file on TCBs. Transmitter Program processes sent, Receiver Program sent and @@ -454,10 +265,9 @@ def log_command( password to ensure no unauthorized user who gains momentary access to the system can the export logs from the database. """ - cmd = user_input.plaintext.split()[0] - export, header, _ = dict( - export=(True, LOG_EXPORT, "export"), history=(False, LOG_DISPLAY, "view") - )[cmd] + cmd = user_input.plaintext.split()[0] + export, header = dict(export =(True, LOG_EXPORT), + history=(False, LOG_DISPLAY))[cmd] try: msg_to_load = int(user_input.plaintext.split()[1]) @@ -471,40 +281,24 @@ def log_command( except struct.error: raise SoftError("Error: Invalid number of messages.", head_clear=True) - if export and not yes( - f"Export logs for '{window.name}' in plaintext?", abort=False - ): + if export and not yes(f"Export logs for '{window.name}' in plaintext?", abort=False): raise SoftError("Log file export aborted.", tail_clear=True, head=0, delay=1) - if settings.ask_password_for_log_access: - authenticated = master_key.authenticate_action() - else: - authenticated = True + authenticated = master_key.authenticate_action() if settings.ask_password_for_log_access else True if authenticated: queue_command(command, settings, queues) - access_logs( - window, - contact_list, - group_list, - settings, - master_key, - msg_to_load, - export=export, - ) + access_logs(window, contact_list, group_list, settings, master_key, msg_to_load, export=export) if export: - raise SoftError( - f"Exported log file of {window.type} '{window.name}'.", head_clear=True - ) + raise SoftError(f"Exported log file of {window.type} '{window.name}'.", head_clear=True) -def send_onion_service_key( - contact_list: "ContactList", - settings: "Settings", - onion_service: "OnionService", - gateway: "Gateway", -) -> None: +def send_onion_service_key(contact_list: 'ContactList', + settings: 'Settings', + onion_service: 'OnionService', + gateway: 'Gateway' + ) -> None: """Resend Onion Service key to Relay Program on Networked Computer. This command is used in cases where Relay Program had to be @@ -512,34 +306,20 @@ def send_onion_service_key( """ try: if settings.traffic_masking: - m_print( - [ - "Warning!", - "Exporting Onion Service data to Networked Computer ", - "during traffic masking can reveal to an adversary ", - "TFC is being used at the moment. You should only do ", - "this if you've had to restart the Relay Program.", - ], - bold=True, - head=1, - tail=1, - ) + m_print(["Warning!", + "Exporting Onion Service data to Networked Computer ", + "during traffic masking can reveal to an adversary ", + "TFC is being used at the moment. You should only do ", + "this if you've had to restart the Relay Program."], bold=True, head=1, tail=1) if not yes("Proceed with the Onion Service data export?", abort=False): - raise SoftError( - "Onion Service data export canceled.", - tail_clear=True, - delay=1, - head=0, - ) + raise SoftError("Onion Service data export canceled.", tail_clear=True, delay=1, head=0) export_onion_service_data(contact_list, settings, onion_service, gateway) except (EOFError, KeyboardInterrupt): - raise SoftError( - "Onion Service data export canceled.", tail_clear=True, delay=1, head=2 - ) + raise SoftError("Onion Service data export canceled.", tail_clear=True, delay=1, head=2) -def print_help(settings: "Settings") -> None: +def print_help(settings: 'Settings') -> None: """Print the list of commands.""" def help_printer(tuple_list: List[Union[Tuple[str, str, bool]]]) -> None: @@ -549,134 +329,109 @@ def print_help(settings: "Settings") -> None: Depending on whether traffic masking is enabled, some commands are either displayed or hidden. """ - len_longest_command = ( - max(len(t[0]) for t in tuple_list) + 1 - ) # Add one for spacing - wrapper = textwrap.TextWrapper( - width=max(1, terminal_width - len_longest_command) - ) + len_longest_command = max(len(t[0]) for t in tuple_list) + 1 # Add one for spacing + wrapper = textwrap.TextWrapper(width=max(1, terminal_width - len_longest_command)) for help_cmd, description, display in tuple_list: if not display: continue - desc_lines = wrapper.fill(description).split("\n") - desc_indent = (len_longest_command - len(help_cmd)) * " " + desc_lines = wrapper.fill(description).split('\n') + desc_indent = (len_longest_command - len(help_cmd)) * ' ' print(help_cmd + desc_indent + desc_lines[0]) # Print wrapped description lines with indent if len(desc_lines) > 1: for line in desc_lines[1:]: - print(len_longest_command * " " + line) - print("") + print(len_longest_command * ' ' + line) + print('') # ------------------------------------------------------------------------------------------------------------------ y_tm = settings.traffic_masking n_tm = not y_tm - common_commands = [ - ("/about", "Show links to project resources", True), - ("/add", "Add new contact", n_tm), - ("/cf", "Cancel file transmission to active contact/group", y_tm), - ("/cm", "Cancel message transmission to active contact/group", True), - ("/clear, ' '", "Clear TFC screens", True), - ("/cmd, '//'", "Display command window on Receiver", True), - ("/connect", "Resend Onion Service data to Relay", True), - ("/exit", "Exit TFC on all three computers", True), - ("/export (n)", "Export (n) messages from recipient's log file", True), - ("/file", "Send file to active contact/group", True), - ("/fw", "Display file reception window on Receiver", y_tm), - ("/help", "Display this list of commands", True), - ("/history (n)", "Print (n) messages from recipient's log file", True), - ("/localkey", "Generate new local key pair", n_tm), - ( - "/logging {on,off}(' all')", - "Change message log setting (for all contacts)", - True, - ), - ("/msg {A,N,G}", "Change recipient to Account, Nick, or Group", n_tm), - ("/names", "List contacts and groups", True), - ("/nick N", "Change nickname of active recipient/group to N", True), - ( - "/notify {on,off} (' all')", - "Change notification settings (for all contacts)", - True, - ), - ("/passwd {tx,rx}", "Change master password on target system", n_tm), - ("/psk", "Open PSK import dialog on Receiver", n_tm), - ("/reset", "Reset ephemeral session log for active window", True), - ("/rm {A,N}", "Remove contact specified by account A or nick N", n_tm), - ("/rmlogs {A,N}", "Remove log entries for account A or nick N", True), - ("/set S V", "Change setting S to value V", True), - ("/settings", "List setting names, values and descriptions", True), - ("/store {on,off} (' all')", "Change file reception (for all contacts)", True), - ("/unread, ' '", "List windows with unread messages on Receiver", True), - ("/verify", "Verify fingerprints with active contact", True), - ("/whisper M", "Send message M, asking it not to be logged", True), - ("/whois {A,N}", "Check which A corresponds to N or vice versa", True), - ("/wipe", "Wipe all TFC user data and power off systems", True), - ("Shift + PgUp/PgDn", "Scroll terminal up/down", True), - ] + common_commands = [("/about", "Show links to project resources", True), + ("/add", "Add new contact", n_tm), + ("/cf", "Cancel file transmission to active contact/group", y_tm), + ("/cm", "Cancel message transmission to active contact/group", True), + ("/clear, ' '", "Clear TFC screens", True), + ("/cmd, '//'", "Display command window on Receiver", True), + ("/connect", "Resend Onion Service data to Relay", True), + ("/exit", "Exit TFC on all three computers", True), + ("/export (n)", "Export (n) messages from recipient's log file", True), + ("/file", "Send file to active contact/group", True), + ("/fw", "Display file reception window on Receiver", y_tm), + ("/help", "Display this list of commands", True), + ("/history (n)", "Print (n) messages from recipient's log file", True), + ("/localkey", "Generate new local key pair", n_tm), + ("/logging {on,off}(' all')", "Change message log setting (for all contacts)", True), + ("/msg {A,N,G}", "Change recipient to Account, Nick, or Group", n_tm), + ("/names", "List contacts and groups", True), + ("/nick N", "Change nickname of active recipient/group to N", True), + ("/notify {on,off} (' all')", "Change notification settings (for all contacts)", True), + ("/passwd {tx,rx}", "Change master password on target system", n_tm), + ("/psk", "Open PSK import dialog on Receiver", n_tm), + ("/reset", "Reset ephemeral session log for active window", True), + ("/rm {A,N}", "Remove contact specified by account A or nick N", n_tm), + ("/rmlogs {A,N}", "Remove log entries for account A or nick N", True), + ("/set S V", "Change setting S to value V", True), + ("/settings", "List setting names, values and descriptions", True), + ("/store {on,off} (' all')", "Change file reception (for all contacts)", True), + ("/unread, ' '", "List windows with unread messages on Receiver", True), + ("/verify", "Verify fingerprints with active contact", True), + ("/whisper M", "Send message M, asking it not to be logged", True), + ("/whois {A,N}", "Check which A corresponds to N or vice versa", True), + ("/wipe", "Wipe all TFC user data and power off systems", True), + ("Shift + PgUp/PgDn", "Scroll terminal up/down", True)] - group_commands = [ - ("/group create G A₁..Aₙ", "Create group G and add accounts A₁..Aₙ", n_tm), - ( - "/group join ID G A₁..Aₙ", - "Join group ID, call it G and add accounts A₁..Aₙ", - n_tm, - ), - ("/group add G A₁..Aₙ", "Add accounts A₁..Aₙ to group G", n_tm), - ("/group rm G A₁..Aₙ", "Remove accounts A₁..Aₙ from group G", n_tm), - ("/group rm G", "Remove group G", n_tm), - ] + group_commands = [("/group create G A₁..Aₙ", "Create group G and add accounts A₁..Aₙ", n_tm), + ("/group join ID G A₁..Aₙ", "Join group ID, call it G and add accounts A₁..Aₙ", n_tm), + ("/group add G A₁..Aₙ", "Add accounts A₁..Aₙ to group G", n_tm), + ("/group rm G A₁..Aₙ", "Remove accounts A₁..Aₙ from group G", n_tm), + ("/group rm G", "Remove group G", n_tm)] terminal_width = get_terminal_width() clear_screen() print(textwrap.fill("List of commands:", width=terminal_width)) - print("") + print('') help_printer(common_commands) - print(terminal_width * "─") + print(terminal_width * '─') if settings.traffic_masking: - print("") + print('') else: print(textwrap.fill("Group management:", width=terminal_width)) - print("") + print('') help_printer(group_commands) - print(terminal_width * "─" + "\n") + print(terminal_width * '─' + '\n') -def print_recipients(contact_list: "ContactList", group_list: "GroupList") -> None: +def print_recipients(contact_list: 'ContactList', group_list: 'GroupList') -> None: """Print the list of contacts and groups.""" contact_list.print_contacts() group_list.print_groups() -def change_master_key( - user_input: "UserInput", - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - queues: "QueueDict", - master_key: "MasterKey", - onion_service: "OnionService", -) -> None: +def change_master_key(user_input: 'UserInput', + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + queues: 'QueueDict', + master_key: 'MasterKey', + onion_service: 'OnionService' + ) -> None: """Change the master key on Transmitter/Receiver Program.""" if settings.traffic_masking: - raise SoftError( - "Error: Command is disabled during traffic masking.", head_clear=True - ) + raise SoftError("Error: Command is disabled during traffic masking.", head_clear=True) try: device = user_input.plaintext.split()[1].lower() except IndexError: - raise SoftError( - f"Error: No target-system ('{TX}' or '{RX}') specified.", head_clear=True - ) + raise SoftError(f"Error: No target-system ('{TX}' or '{RX}') specified.", head_clear=True) if device not in [TX, RX]: raise SoftError(f"Error: Invalid target system '{device}'.", head_clear=True) @@ -699,16 +454,14 @@ def change_master_key( old_master_key = master_key.master_key[:] # Create new master key but do not store new master key data into any database. - new_master_key = master_key.master_key = master_key.new_master_key( - replace=False - ) + new_master_key = master_key.master_key = master_key.new_master_key(replace=False) phase("Re-encrypting databases") # Update encryption keys for databases - contact_list.database.database_key = new_master_key - key_list.database.database_key = new_master_key - group_list.database.database_key = new_master_key - settings.database.database_key = new_master_key + contact_list.database.database_key = new_master_key + key_list.database.database_key = new_master_key + group_list.database.database_key = new_master_key + settings.database.database_key = new_master_key onion_service.database.database_key = new_master_key # Create temp databases for each database, do not replace original. @@ -741,16 +494,10 @@ def change_master_key( wait_for_key_db_ack(new_master_key, queues) phase(DONE) - m_print( - "Master key successfully changed.", - bold=True, - tail_clear=True, - delay=1, - head=1, - ) + m_print("Master key successfully changed.", bold=True, tail_clear=True, delay=1, head=1) -def wait_for_key_db_halt(queues: "QueueDict") -> None: +def wait_for_key_db_halt(queues: 'QueueDict') -> None: """Wait for the key database to acknowledge it has halted output of packets.""" while not queues[KEY_MGMT_ACK_QUEUE].qsize(): time.sleep(0.001) @@ -758,7 +505,7 @@ def wait_for_key_db_halt(queues: "QueueDict") -> None: raise SoftError("Error: Key database returned wrong signal.") -def wait_for_key_db_ack(new_master_key: bytes, queues: "QueueDict") -> None: +def wait_for_key_db_ack(new_master_key: bytes, queues: 'QueueDict') -> None: """Wait for the key database to acknowledge it has replaced the master key.""" while not queues[KEY_MGMT_ACK_QUEUE].qsize(): time.sleep(0.001) @@ -766,14 +513,13 @@ def wait_for_key_db_ack(new_master_key: bytes, queues: "QueueDict") -> None: raise CriticalError("Key database failed to install new master key.") -def remove_log( - user_input: "UserInput", - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - queues: "QueueDict", - master_key: "MasterKey", -) -> None: +def remove_log(user_input: 'UserInput', + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + queues: 'QueueDict', + master_key: 'MasterKey' + ) -> None: """Remove log entries for contact or group.""" try: selection = user_input.plaintext.split()[1] @@ -792,9 +538,10 @@ def remove_log( remove_logs(contact_list, group_list, settings, master_key, selector) -def determine_selector( - selection: str, contact_list: "ContactList", group_list: "GroupList" -) -> bytes: +def determine_selector(selection: str, + contact_list: 'ContactList', + group_list: 'GroupList' + ) -> bytes: """Determine selector (group ID or Onion Service public key).""" if selection in contact_list.contact_selectors(): selector = contact_list.get_contact_by_address_or_nick(selection).onion_pub_key @@ -819,16 +566,15 @@ def determine_selector( return selector -def change_setting( - user_input: "UserInput", - window: "TxWindow", - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - queues: "QueueDict", - master_key: "MasterKey", - gateway: "Gateway", -) -> None: +def change_setting(user_input: 'UserInput', + window: 'TxWindow', + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + queues: 'QueueDict', + master_key: 'MasterKey', + gateway: 'Gateway' + ) -> None: """Change setting on Transmitter and Receiver Program.""" # Validate the KV-pair try: @@ -844,65 +590,43 @@ def change_setting( except IndexError: raise SoftError("Error: No value for setting specified.", head_clear=True) - relay_settings = dict( - serial_error_correction=UNENCRYPTED_EC_RATIO, - serial_baudrate=UNENCRYPTED_BAUDRATE, - allow_contact_requests=UNENCRYPTED_MANAGE_CONTACT_REQ, - ) # type: Dict[str, bytes] + relay_settings = dict(serial_error_correction=UNENCRYPTED_EC_RATIO, + serial_baudrate =UNENCRYPTED_BAUDRATE, + allow_contact_requests =UNENCRYPTED_MANAGE_CONTACT_REQ) # type: Dict[str, bytes] check_setting_change_conditions(setting, settings, relay_settings, master_key) - change_setting_value( - setting, - value, - relay_settings, - queues, - contact_list, - group_list, - settings, - gateway, - ) + change_setting_value(setting, value, relay_settings, queues, contact_list, group_list, settings, gateway) - propagate_setting_effects( - setting, queues, contact_list, group_list, settings, window - ) + propagate_setting_effects(setting, queues, contact_list, group_list, settings, window) -def check_setting_change_conditions( - setting: str, - settings: "Settings", - relay_settings: Dict[str, bytes], - master_key: "MasterKey", -) -> None: +def check_setting_change_conditions(setting: str, + settings: 'Settings', + relay_settings: Dict[str, bytes], + master_key: 'MasterKey' + ) -> None: """Check if the setting can be changed.""" - if settings.traffic_masking and ( - setting in relay_settings or setting == "max_number_of_contacts" - ): - raise SoftError( - "Error: Can't change this setting during traffic masking.", head_clear=True - ) + if settings.traffic_masking and (setting in relay_settings or setting == "max_number_of_contacts"): + raise SoftError("Error: Can't change this setting during traffic masking.", head_clear=True) if setting in ["use_serial_usb_adapter", "built_in_serial_interface"]: - raise SoftError( - "Error: Serial interface setting can only be changed manually.", - head_clear=True, - ) + raise SoftError("Error: Serial interface setting can only be changed manually.", head_clear=True) if setting == "ask_password_for_log_access": if not master_key.authenticate_action(): raise SoftError("Error: No permission to change setting.", head_clear=True) -def change_setting_value( - setting: str, - value: str, - relay_settings: Dict[str, bytes], - queues: "QueueDict", - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - gateway: "Gateway", -) -> None: +def change_setting_value(setting: str, + value: str, + relay_settings: Dict[str, bytes], + queues: 'QueueDict', + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + gateway: 'Gateway' + ) -> None: """Change setting value in setting databases.""" if setting in gateway.settings.key_list: gateway.settings.change_setting(setting, value) @@ -914,46 +638,43 @@ def change_setting_value( queue_command(receiver_command, settings, queues) if setting in relay_settings: - if setting == "allow_contact_requests": + if setting == 'allow_contact_requests': value = bool_to_bytes(settings.allow_contact_requests).decode() - relay_command = ( - UNENCRYPTED_DATAGRAM_HEADER + relay_settings[setting] + value.encode() - ) + relay_command = UNENCRYPTED_DATAGRAM_HEADER + relay_settings[setting] + value.encode() queue_to_nc(relay_command, queues[RELAY_PACKET_QUEUE]) -def propagate_setting_effects( - setting: str, - queues: "QueueDict", - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - window: "TxWindow", -) -> None: +def propagate_setting_effects(setting: str, + queues: 'QueueDict', + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + window: 'TxWindow' + ) -> None: """Propagate the effects of the setting.""" if setting == "max_number_of_contacts": contact_list.store_contacts() queues[KEY_MANAGEMENT_QUEUE].put((KDB_UPDATE_SIZE_HEADER, settings)) - if setting in ["max_number_of_group_members", "max_number_of_groups"]: + if setting in ['max_number_of_group_members', 'max_number_of_groups']: group_list.store_groups() - if setting == "traffic_masking": + if setting == 'traffic_masking': queues[SENDER_MODE_QUEUE].put(settings) queues[TRAFFIC_MASKING_QUEUE].put(settings.traffic_masking) window.deselect() - if setting == "log_file_masking": + if setting == 'log_file_masking': queues[LOGFILE_MASKING_QUEUE].put(settings.log_file_masking) -def print_settings(settings: "Settings", gateway: "Gateway") -> None: +def print_settings(settings: 'Settings', gateway: 'Gateway') -> None: """Print settings and gateway settings.""" settings.print_settings() gateway.settings.print_settings() -def rxp_display_unread(settings: "Settings", queues: "QueueDict") -> None: +def rxp_display_unread(settings: 'Settings', queues: 'QueueDict') -> None: """\ Display the list of windows that contain unread messages on Receiver Program. @@ -961,7 +682,7 @@ def rxp_display_unread(settings: "Settings", queues: "QueueDict") -> None: queue_command(WIN_ACTIVITY, settings, queues) -def verify(window: "TxWindow", contact_list: "ContactList") -> None: +def verify(window: 'TxWindow', contact_list: 'ContactList') -> None: """Verify fingerprints with contact.""" if window.type == WIN_TYPE_GROUP or window.contact is None: raise SoftError("Error: A group is selected.", head_clear=True) @@ -970,36 +691,24 @@ def verify(window: "TxWindow", contact_list: "ContactList") -> None: raise SoftError("Pre-shared keys have no fingerprints.", head_clear=True) try: - verified = verify_fingerprints( - window.contact.tx_fingerprint, window.contact.rx_fingerprint - ) + verified = verify_fingerprints(window.contact.tx_fingerprint, + window.contact.rx_fingerprint) except (EOFError, KeyboardInterrupt): - raise SoftError( - "Fingerprint verification aborted.", delay=1, head=2, tail_clear=True - ) + raise SoftError("Fingerprint verification aborted.", delay=1, head=2, tail_clear=True) - status_hr, status = { - True: ("Verified", KEX_STATUS_VERIFIED), - False: ("Unverified", KEX_STATUS_UNVERIFIED), - }[verified] + status_hr, status = {True: ("Verified", KEX_STATUS_VERIFIED), + False: ("Unverified", KEX_STATUS_UNVERIFIED)}[verified] window.contact.kex_status = status contact_list.store_contacts() - m_print( - f"Marked fingerprints with {window.name} as '{status_hr}'.", - bold=True, - tail_clear=True, - delay=1, - tail=1, - ) + m_print(f"Marked fingerprints with {window.name} as '{status_hr}'.", bold=True, tail_clear=True, delay=1, tail=1) -def whisper( - user_input: "UserInput", - window: "TxWindow", - settings: "Settings", - queues: "QueueDict", -) -> None: +def whisper(user_input: 'UserInput', + window: 'TxWindow', + settings: 'Settings', + queues: 'QueueDict', + ) -> None: """\ Send a message to the contact that overrides their enabled logging setting for that message. @@ -1009,23 +718,22 @@ def whisper( this feature can be used to send the message off-the-record. """ try: - message = user_input.plaintext.strip().split(" ", 1)[1] + message = user_input.plaintext.strip().split(' ', 1)[1] except IndexError: raise SoftError("Error: No whisper message specified.", head_clear=True) - queue_message( - user_input=UserInput(message, MESSAGE), - window=window, - settings=settings, - queues=queues, - whisper=True, - log_as_ph=True, - ) + queue_message(user_input=UserInput(message, MESSAGE), + window=window, + settings=settings, + queues=queues, + whisper=True, + log_as_ph=True) -def whois( - user_input: "UserInput", contact_list: "ContactList", group_list: "GroupList" -) -> None: +def whois(user_input: 'UserInput', + contact_list: 'ContactList', + group_list: 'GroupList' + ) -> None: """Do a lookup for a contact or group selector.""" try: selector = user_input.plaintext.split()[1] @@ -1034,47 +742,30 @@ def whois( # Contacts if selector in contact_list.get_list_of_addresses(): - m_print( - [ - f"Nick of '{selector}' is ", - f"{contact_list.get_contact_by_address_or_nick(selector).nick}", - ], - bold=True, - ) + m_print([f"Nick of '{selector}' is ", + f"{contact_list.get_contact_by_address_or_nick(selector).nick}"], bold=True) elif selector in contact_list.get_list_of_nicks(): - m_print( - [ - f"Account of '{selector}' is", - f"{contact_list.get_contact_by_address_or_nick(selector).onion_address}", - ], - bold=True, - ) + m_print([f"Account of '{selector}' is", + f"{contact_list.get_contact_by_address_or_nick(selector).onion_address}"], bold=True) # Groups elif selector in group_list.get_list_of_group_names(): - m_print( - [ - f"Group ID of group '{selector}' is", - f"{b58encode(group_list.get_group(selector).group_id)}", - ], - bold=True, - ) + m_print([f"Group ID of group '{selector}' is", + f"{b58encode(group_list.get_group(selector).group_id)}"], bold=True) elif selector in group_list.get_list_of_hr_group_ids(): - m_print( - [ - f"Name of group with ID '{selector}' is", - f"{group_list.get_group_by_id(b58decode(selector)).name}", - ], - bold=True, - ) + m_print([f"Name of group with ID '{selector}' is", + f"{group_list.get_group_by_id(b58decode(selector)).name}"], bold=True) else: raise SoftError("Error: Unknown selector.", head_clear=True) -def wipe(settings: "Settings", queues: "QueueDict", gateway: "Gateway") -> None: +def wipe(settings: 'Settings', + queues: 'QueueDict', + gateway: 'Gateway' + ) -> None: """\ Reset terminals, wipe all TFC user data from Source, Networked, and Destination Computer, and power all three systems off. @@ -1096,7 +787,7 @@ def wipe(settings: "Settings", queues: "QueueDict", gateway: "Gateway") -> None: clear_screen() for q in [COMMAND_PACKET_QUEUE, RELAY_PACKET_QUEUE]: - while queues[q].qsize(): + while queues[q].qsize() != 0: queues[q].get() queue_command(WIPE_USR_DATA, settings, queues) diff --git a/src/transmitter/commands_g.py b/src/transmitter/commands_g.py index e0c20b6..49bccc3 100644 --- a/src/transmitter/commands_g.py +++ b/src/transmitter/commands_g.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -24,123 +24,75 @@ import typing from typing import Callable, Dict, List, Optional, Tuple -from src.common.db_logs import remove_logs -from src.common.encoding import b58decode, int_to_bytes +from src.common.db_logs import remove_logs +from src.common.encoding import b58decode, int_to_bytes from src.common.exceptions import SoftError -from src.common.input import yes -from src.common.misc import ignored, validate_group_name -from src.common.output import group_management_print, m_print -from src.common.statics import ( - ADDED_MEMBERS, - ALREADY_MEMBER, - GROUP_ADD, - GROUP_CREATE, - GROUP_DELETE, - GROUP_ID_LENGTH, - GROUP_MSG_EXIT_GROUP_HEADER, - GROUP_MSG_INVITE_HEADER, - GROUP_MSG_JOIN_HEADER, - GROUP_MSG_MEMBER_ADD_HEADER, - GROUP_MSG_MEMBER_REM_HEADER, - GROUP_REMOVE, - GROUP_RENAME, - LOG_REMOVE, - NEW_GROUP, - NOT_IN_GROUP, - RELAY_PACKET_QUEUE, - REMOVED_MEMBERS, - UNKNOWN_ACCOUNTS, - US_BYTE, - WIN_TYPE_CONTACT, -) +from src.common.input import yes +from src.common.misc import ignored, validate_group_name +from src.common.output import group_management_print, m_print +from src.common.statics import (ADDED_MEMBERS, ALREADY_MEMBER, GROUP_ADD, GROUP_CREATE, GROUP_DELETE, + GROUP_ID_LENGTH, GROUP_MSG_EXIT_GROUP_HEADER, GROUP_MSG_INVITE_HEADER, + GROUP_MSG_JOIN_HEADER, GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER, + GROUP_REMOVE, GROUP_RENAME, LOG_REMOVE, NEW_GROUP, NOT_IN_GROUP, RELAY_PACKET_QUEUE, + REMOVED_MEMBERS, UNKNOWN_ACCOUNTS, US_BYTE, WIN_TYPE_CONTACT) -from src.transmitter.packet import queue_command, queue_to_nc +from src.transmitter.packet import queue_command, queue_to_nc from src.transmitter.user_input import UserInput if typing.TYPE_CHECKING: - from multiprocessing import Queue - from src.common.db_contacts import ContactList - from src.common.db_groups import GroupList + from multiprocessing import Queue + from src.common.db_contacts import ContactList + from src.common.db_groups import GroupList from src.common.db_masterkey import MasterKey - from src.common.db_settings import Settings + from src.common.db_settings import Settings from src.transmitter.windows import TxWindow - QueueDict = Dict[bytes, Queue[bytes]] - FuncDict = Dict[ - str, - Callable[ - [ - str, - List[bytes], - ContactList, - GroupList, - Settings, - QueueDict, - MasterKey, - Optional[bytes], - ], - None, - ], - ] + FuncDict = (Dict[str, Callable[[str, + List[bytes], + ContactList, + GroupList, + Settings, + QueueDict, + MasterKey, + Optional[bytes]], + None]]) -def process_group_command( - user_input: "UserInput", - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - queues: "QueueDict", - master_key: "MasterKey", -) -> None: +def process_group_command(user_input: 'UserInput', + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + queues: 'QueueDict', + master_key: 'MasterKey' + ) -> None: """Parse a group command and process it accordingly.""" if settings.traffic_masking: - raise SoftError( - "Error: Command is disabled during traffic masking.", head_clear=True - ) + raise SoftError("Error: Command is disabled during traffic masking.", head_clear=True) input_parameters = user_input.plaintext.split() # type: List[str] - command_type, group_id, group_name, purp_members = parse_group_command_parameters( - input_parameters, group_list - ) + command_type, group_id, group_name, purp_members = parse_group_command_parameters(input_parameters, group_list) # Swap specified strings to public keys selectors = contact_list.contact_selectors() - pub_keys = [ - contact_list.get_contact_by_address_or_nick(m).onion_pub_key - for m in purp_members - if m in selectors - ] + pub_keys = [contact_list.get_contact_by_address_or_nick(m).onion_pub_key for m in purp_members if m in selectors] + func_d = dict(create=group_create, join=group_create, add=group_add_member, rm=group_rm_member) # type: FuncDict + func = func_d[command_type] - func_d = dict( - create=group_create, join=group_create, add=group_add_member, rm=group_rm_member - ) # type: FuncDict - - func = func_d[command_type] - - func( - group_name, - pub_keys, - contact_list, - group_list, - settings, - queues, - master_key, - group_id, - ) + func(group_name, pub_keys, contact_list, group_list, settings, queues, master_key, group_id) print("") -def parse_group_command_parameters( - input_parameters: List[str], group_list: "GroupList" -) -> Tuple[str, Optional[bytes], str, List[str]]: +def parse_group_command_parameters(input_parameters: List[str], + group_list: 'GroupList' + ) -> Tuple[str, Optional[bytes], str, List[str]]: """Parse parameters for group command issued by the user.""" try: command_type = input_parameters[1] except IndexError: raise SoftError("Error: Invalid group command.", head_clear=True) - if command_type not in ["create", "join", "add", "rm"]: + if command_type not in ['create', 'join', 'add', 'rm']: raise SoftError("Error: Invalid group command.") group_id = validate_group_id(input_parameters, command_type, group_list) @@ -157,13 +109,13 @@ def parse_group_command_parameters( return command_type, group_id, group_name, purp_members -def validate_group_id( - input_parameters: List[str], command_type: str, group_list: "GroupList" -) -> Optional[bytes]: +def validate_group_id(input_parameters: List[str], + command_type: str, + group_list: 'GroupList' + ) -> Optional[bytes]: """Validate group ID for group command.""" group_id = None # type: Optional[bytes] - - if command_type == "join": + if command_type == 'join': try: group_id_s = input_parameters[2] except IndexError: @@ -174,23 +126,20 @@ def validate_group_id( raise SoftError("Error: Invalid group ID.", head_clear=True) if group_id in group_list.get_list_of_group_ids(): - raise SoftError( - "Error: Group with matching ID already exists.", head_clear=True - ) + raise SoftError("Error: Group with matching ID already exists.", head_clear=True) return group_id -def group_create( - group_name: str, - purp_members: List[bytes], - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - queues: "QueueDict", - _: "MasterKey", - group_id: Optional[bytes] = None, -) -> None: +def group_create(group_name: str, + purp_members: List[bytes], + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + queues: 'QueueDict', + _: 'MasterKey', + group_id: Optional[bytes] = None + ) -> None: """Create a new group. Validate the group name and determine what members can be added. @@ -199,23 +148,18 @@ def group_create( if error_msg: raise SoftError(error_msg, head_clear=True) - public_keys = set(contact_list.get_list_of_pub_keys()) + public_keys = set(contact_list.get_list_of_pub_keys()) purp_pub_keys = set(purp_members) - accepted = list(purp_pub_keys & public_keys) - rejected = list(purp_pub_keys - public_keys) + accepted = list(purp_pub_keys & public_keys) + rejected = list(purp_pub_keys - public_keys) if len(accepted) > settings.max_number_of_group_members: - raise SoftError( - f"Error: TFC settings only allow {settings.max_number_of_group_members} " - f"members per group.", - head_clear=True, - ) + raise SoftError(f"Error: TFC settings only allow {settings.max_number_of_group_members} members per group.", + head_clear=True) if len(group_list) == settings.max_number_of_groups: - raise SoftError( - f"Error: TFC settings only allow {settings.max_number_of_groups} groups.", - head_clear=True, - ) + raise SoftError(f"Error: TFC settings only allow {settings.max_number_of_groups} groups.", + head_clear=True) header = GROUP_MSG_INVITE_HEADER if group_id is None else GROUP_MSG_JOIN_HEADER @@ -225,166 +169,134 @@ def group_create( if group_id not in group_list.get_list_of_group_ids(): break - group_list.add_group( - group_name, - group_id, - settings.log_messages_by_default, - settings.show_notifications_by_default, - members=[contact_list.get_contact_by_pub_key(k) for k in accepted], - ) + group_list.add_group(group_name, + group_id, + settings.log_messages_by_default, + settings.show_notifications_by_default, + members=[contact_list.get_contact_by_pub_key(k) for k in accepted]) - command = ( - GROUP_CREATE + group_id + group_name.encode() + US_BYTE + b"".join(accepted) - ) + command = GROUP_CREATE + group_id + group_name.encode() + US_BYTE + b''.join(accepted) queue_command(command, settings, queues) - group_management_print(NEW_GROUP, accepted, contact_list, group_name) + group_management_print(NEW_GROUP, accepted, contact_list, group_name) group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name) if accepted: if yes("Publish the list of group members to participants?", abort=False): - create_packet = header + group_id + b"".join(accepted) + create_packet = header + group_id + b''.join(accepted) queue_to_nc(create_packet, queues[RELAY_PACKET_QUEUE]) else: m_print(f"Created an empty group '{group_name}'.", bold=True, head=1) -def group_add_member( - group_name: str, - purp_members: List["bytes"], - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - queues: "QueueDict", - master_key: "MasterKey", - _: Optional[bytes] = None, -) -> None: +def group_add_member(group_name: str, + purp_members: List['bytes'], + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + queues: 'QueueDict', + master_key: 'MasterKey', + _: Optional[bytes] = None + ) -> None: """Add new member(s) to a specified group.""" if group_name not in group_list.get_list_of_group_names(): - if yes( - f"Group {group_name} was not found. Create new group?", abort=False, head=1 - ): - group_create( - group_name, - purp_members, - contact_list, - group_list, - settings, - queues, - master_key, - ) - return None - raise SoftError("Group creation aborted.", head=0, delay=1, tail_clear=True) + if not yes(f"Group {group_name} was not found. Create new group?", abort=False, head=1): + raise SoftError("Group creation aborted.", head=0, delay=1, tail_clear=True) + group_create(group_name, purp_members, contact_list, group_list, settings, queues, master_key) + return None - purp_pub_keys = set(purp_members) - pub_keys = set(contact_list.get_list_of_pub_keys()) - before_adding = set(group_list.get_group(group_name).get_list_of_member_pub_keys()) - ok_pub_keys_set = set(pub_keys & purp_pub_keys) + purp_pub_keys = set(purp_members) + pub_keys = set(contact_list.get_list_of_pub_keys()) + before_adding = set(group_list.get_group(group_name).get_list_of_member_pub_keys()) + ok_pub_keys_set = set(pub_keys & purp_pub_keys) new_in_group_set = set(ok_pub_keys_set - before_adding) end_assembly = list(before_adding | new_in_group_set) - rejected = list(purp_pub_keys - pub_keys) + rejected = list(purp_pub_keys - pub_keys) already_in_g = list(before_adding & purp_pub_keys) new_in_group = list(new_in_group_set) - ok_pub_keys = list(ok_pub_keys_set) + ok_pub_keys = list(ok_pub_keys_set) if len(end_assembly) > settings.max_number_of_group_members: - raise SoftError( - f"Error: TFC settings only allow {settings.max_number_of_group_members} " - f"members per group.", - head_clear=True, - ) + raise SoftError(f"Error: TFC settings only allow {settings.max_number_of_group_members} members per group.", + head_clear=True) group = group_list.get_group(group_name) group.add_members([contact_list.get_contact_by_pub_key(k) for k in new_in_group]) - command = GROUP_ADD + group.group_id + b"".join(ok_pub_keys) + command = GROUP_ADD + group.group_id + b''.join(ok_pub_keys) queue_command(command, settings, queues) - group_management_print(ADDED_MEMBERS, new_in_group, contact_list, group_name) - group_management_print(ALREADY_MEMBER, already_in_g, contact_list, group_name) - group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name) + group_management_print(ADDED_MEMBERS, new_in_group, contact_list, group_name) + group_management_print(ALREADY_MEMBER, already_in_g, contact_list, group_name) + group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name) if new_in_group: if yes("Publish the list of new members to involved?", abort=False): - add_packet = ( - GROUP_MSG_MEMBER_ADD_HEADER - + group.group_id - + int_to_bytes(len(before_adding)) - + b"".join(before_adding) - + b"".join(new_in_group) - ) + add_packet = (GROUP_MSG_MEMBER_ADD_HEADER + + group.group_id + + int_to_bytes(len(before_adding)) + + b''.join(before_adding) + + b''.join(new_in_group)) queue_to_nc(add_packet, queues[RELAY_PACKET_QUEUE]) -def group_rm_member( - group_name: str, - purp_members: List[bytes], - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - queues: "QueueDict", - master_key: "MasterKey", - _: Optional[bytes] = None, -) -> None: +def group_rm_member(group_name: str, + purp_members: List[bytes], + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + queues: 'QueueDict', + master_key: 'MasterKey', + _: Optional[bytes] = None + ) -> None: """Remove member(s) from the specified group or remove the group itself.""" if not purp_members: - group_rm_group( - group_name, contact_list, group_list, settings, queues, master_key - ) + group_rm_group(group_name, contact_list, group_list, settings, queues, master_key) if group_name not in group_list.get_list_of_group_names(): raise SoftError(f"Group '{group_name}' does not exist.", head_clear=True) - purp_pub_keys = set(purp_members) - pub_keys = set(contact_list.get_list_of_pub_keys()) - before_removal = set(group_list.get_group(group_name).get_list_of_member_pub_keys()) - ok_pub_keys_set = set(purp_pub_keys & pub_keys) - removable_set = set(before_removal & ok_pub_keys_set) + purp_pub_keys = set(purp_members) + pub_keys = set(contact_list.get_list_of_pub_keys()) + before_removal = set(group_list.get_group(group_name).get_list_of_member_pub_keys()) + ok_pub_keys_set = set(purp_pub_keys & pub_keys) + removable_set = set(before_removal & ok_pub_keys_set) - remaining = list(before_removal - removable_set) + remaining = list(before_removal - removable_set) not_in_group = list(ok_pub_keys_set - before_removal) - rejected = list(purp_pub_keys - pub_keys) - removable = list(removable_set) - ok_pub_keys = list(ok_pub_keys_set) + rejected = list(purp_pub_keys - pub_keys) + removable = list(removable_set) + ok_pub_keys = list(ok_pub_keys_set) group = group_list.get_group(group_name) group.remove_members(removable) - command = GROUP_REMOVE + group.group_id + b"".join(ok_pub_keys) + command = GROUP_REMOVE + group.group_id + b''.join(ok_pub_keys) queue_command(command, settings, queues) - group_management_print(REMOVED_MEMBERS, removable, contact_list, group_name) - group_management_print(NOT_IN_GROUP, not_in_group, contact_list, group_name) - group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name) + group_management_print(REMOVED_MEMBERS, removable, contact_list, group_name) + group_management_print(NOT_IN_GROUP, not_in_group, contact_list, group_name) + group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name) - if ( - removable - and remaining - and yes( - "Publish the list of removed members to remaining members?", abort=False - ) - ): - rem_packet = ( - GROUP_MSG_MEMBER_REM_HEADER - + group.group_id - + int_to_bytes(len(remaining)) - + b"".join(remaining) - + b"".join(removable) - ) + if removable and remaining and yes("Publish the list of removed members to remaining members?", abort=False): + rem_packet = (GROUP_MSG_MEMBER_REM_HEADER + + group.group_id + + int_to_bytes(len(remaining)) + + b''.join(remaining) + + b''.join(removable)) queue_to_nc(rem_packet, queues[RELAY_PACKET_QUEUE]) -def group_rm_group( - group_name: str, - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - queues: "QueueDict", - master_key: "MasterKey", - _: Optional[bytes] = None, -) -> None: +def group_rm_group(group_name: str, + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + queues: 'QueueDict', + master_key: 'MasterKey', + _: Optional[bytes] = None + ) -> None: """Remove the group with its members.""" if not yes(f"Remove group '{group_name}'?", abort=False): raise SoftError("Group removal aborted.", head=0, delay=1, tail_clear=True) @@ -410,35 +322,26 @@ def group_rm_group( raise SoftError(f"Transmitter has no group '{group_name}' to remove.") group = group_list.get_group(group_name) - if not group.empty() and yes( - "Notify members about leaving the group?", abort=False - ): - exit_packet = ( - GROUP_MSG_EXIT_GROUP_HEADER - + group.group_id - + b"".join(group.get_list_of_member_pub_keys()) - ) + if not group.empty() and yes("Notify members about leaving the group?", abort=False): + exit_packet = (GROUP_MSG_EXIT_GROUP_HEADER + + group.group_id + + b''.join(group.get_list_of_member_pub_keys())) queue_to_nc(exit_packet, queues[RELAY_PACKET_QUEUE]) group_list.remove_group_by_name(group_name) - raise SoftError( - f"Removed group '{group_name}'.", head=0, delay=1, tail_clear=True, bold=True - ) + raise SoftError(f"Removed group '{group_name}'.", head=0, delay=1, tail_clear=True, bold=True) -def group_rename( - new_name: str, - window: "TxWindow", - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - queues: "QueueDict", -) -> None: +def group_rename(new_name: str, + window: 'TxWindow', + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + queues: 'QueueDict', + ) -> None: """Rename the active group.""" if window.type == WIN_TYPE_CONTACT or window.group is None: - raise SoftError( - "Error: Selected window is not a group window.", head_clear=True - ) + raise SoftError("Error: Selected window is not a group window.", head_clear=True) error_msg = validate_group_name(new_name, contact_list, group_list) if error_msg: @@ -447,10 +350,8 @@ def group_rename( command = GROUP_RENAME + window.uid + new_name.encode() queue_command(command, settings, queues) - old_name = window.group.name + old_name = window.group.name window.group.name = new_name group_list.store_groups() - raise SoftError( - f"Renamed group '{old_name}' to '{new_name}'.", delay=1, tail_clear=True - ) + raise SoftError(f"Renamed group '{old_name}' to '{new_name}'.", delay=1, tail_clear=True) diff --git a/src/transmitter/contact.py b/src/transmitter/contact.py index f6ade3d..27df5b0 100644 --- a/src/transmitter/contact.py +++ b/src/transmitter/contact.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -23,68 +23,40 @@ import typing from typing import Any, Dict -from src.common.db_logs import remove_logs -from src.common.encoding import onion_address_to_pub_key +from src.common.db_logs import remove_logs +from src.common.encoding import onion_address_to_pub_key from src.common.exceptions import SoftError -from src.common.input import box_input, yes -from src.common.misc import ( - ignored, - validate_key_exchange, - validate_nick, - validate_onion_addr, -) -from src.common.output import m_print -from src.common.statics import ( - ALL, - CH_FILE_RECV, - CH_LOGGING, - CH_NICKNAME, - CH_NOTIFY, - CONTACT_REM, - DISABLE, - ECDHE, - ENABLE, - KDB_REMOVE_ENTRY_HEADER, - KEY_MANAGEMENT_QUEUE, - LOGGING, - LOG_SETTING_QUEUE, - NOTIFY, - ONION_ADDRESS_LENGTH, - PSK, - RELAY_PACKET_QUEUE, - STORE, - TRUNC_ADDRESS_LENGTH, - UNENCRYPTED_ADD_NEW_CONTACT, - UNENCRYPTED_DATAGRAM_HEADER, - UNENCRYPTED_REM_CONTACT, - WIN_TYPE_CONTACT, - WIN_TYPE_GROUP, -) +from src.common.input import box_input, yes +from src.common.misc import ignored, validate_key_exchange, validate_nick, validate_onion_addr +from src.common.output import m_print, print_on_previous_line +from src.common.statics import (ALL, CH_FILE_RECV, CH_LOGGING, CH_NICKNAME, CH_NOTIFY, CONTACT_REM, DISABLE, ECDHE, + ENABLE, KDB_REMOVE_ENTRY_HEADER, KEY_MANAGEMENT_QUEUE, LOGGING, LOG_SETTING_QUEUE, + NOTIFY, ONION_ADDRESS_LENGTH, PSK, RELAY_PACKET_QUEUE, STORE, TRUNC_ADDRESS_LENGTH, + UNENCRYPTED_ACCOUNT_CHECK, UNENCRYPTED_ADD_NEW_CONTACT, UNENCRYPTED_DATAGRAM_HEADER, + UNENCRYPTED_REM_CONTACT, WIN_TYPE_CONTACT, WIN_TYPE_GROUP) -from src.transmitter.commands_g import group_rename +from src.transmitter.commands_g import group_rename from src.transmitter.key_exchanges import create_pre_shared_key, start_key_exchange -from src.transmitter.packet import queue_command, queue_to_nc +from src.transmitter.packet import queue_command, queue_to_nc if typing.TYPE_CHECKING: - from multiprocessing import Queue - from src.common.db_contacts import ContactList - from src.common.db_groups import GroupList - from src.common.db_masterkey import MasterKey - from src.common.db_onion import OnionService - from src.common.db_settings import Settings + from multiprocessing import Queue + from src.common.db_contacts import ContactList + from src.common.db_groups import GroupList + from src.common.db_masterkey import MasterKey + from src.common.db_onion import OnionService + from src.common.db_settings import Settings from src.transmitter.user_input import UserInput - from src.transmitter.windows import TxWindow - + from src.transmitter.windows import TxWindow QueueDict = Dict[bytes, Queue[Any]] -def add_new_contact( - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - queues: "QueueDict", - onion_service: "OnionService", -) -> None: +def add_new_contact(contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + queues: 'QueueDict', + onion_service: 'OnionService' + ) -> None: """Prompt for contact account details and initialize desired key exchange. This function requests the minimum amount of data about the @@ -103,90 +75,78 @@ def add_new_contact( """ try: if settings.traffic_masking: - raise SoftError( - "Error: Command is disabled during traffic masking.", head_clear=True - ) + raise SoftError("Error: Command is disabled during traffic masking.", head_clear=True) if len(contact_list) >= settings.max_number_of_contacts: - raise SoftError( - f"Error: TFC settings only allow {settings.max_number_of_contacts} accounts.", - head_clear=True, - ) + raise SoftError(f"Error: TFC settings only allow {settings.max_number_of_contacts} accounts.", + head_clear=True) m_print("Add new contact", head=1, bold=True, head_clear=True) - m_print( - [ - "Your TFC account is", - onion_service.user_onion_address, - "", - "Warning!", - "Anyone who knows this account", - "can see when your TFC is online", - ], - box=True, - ) + m_print(["Your TFC account is", + onion_service.user_onion_address, + '', "Warning!", + "Anyone who knows this account", + "can see when your TFC is online"], box=True) - contact_address = box_input( - "Contact account", - expected_len=ONION_ADDRESS_LENGTH, - validator=validate_onion_addr, - validator_args=onion_service.user_onion_address, - ).strip() - onion_pub_key = onion_address_to_pub_key(contact_address) + contact_address = get_onion_address_from_user(onion_service.user_onion_address, queues) + onion_pub_key = onion_address_to_pub_key(contact_address) - contact_nick = box_input( - "Contact nick", - expected_len=ONION_ADDRESS_LENGTH, # Limited to 255 but such long nick is unpractical. - validator=validate_nick, - validator_args=(contact_list, group_list, onion_pub_key), - ).strip() + contact_nick = box_input("Contact nick", + expected_len=ONION_ADDRESS_LENGTH, # Limited to 255 but such long nick is unpractical. + validator=validate_nick, + validator_args=(contact_list, group_list, onion_pub_key)).strip() - key_exchange = box_input( - f"Key exchange ([{ECDHE}],PSK) ", - default=ECDHE, - expected_len=28, - validator=validate_key_exchange, - ).strip() + key_exchange = box_input(f"Key exchange ([{ECDHE}],PSK) ", + default=ECDHE, + expected_len=28, + validator=validate_key_exchange).strip() - relay_command = ( - UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_ADD_NEW_CONTACT + onion_pub_key - ) + relay_command = UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_ADD_NEW_CONTACT + onion_pub_key queue_to_nc(relay_command, queues[RELAY_PACKET_QUEUE]) if key_exchange.upper() in ECDHE: - start_key_exchange( - onion_pub_key, contact_nick, contact_list, settings, queues - ) + start_key_exchange(onion_pub_key, contact_nick, contact_list, settings, queues) elif key_exchange.upper() in PSK: - create_pre_shared_key( - onion_pub_key, - contact_nick, - contact_list, - settings, - onion_service, - queues, - ) + create_pre_shared_key(onion_pub_key, contact_nick, contact_list, settings, onion_service, queues) except (EOFError, KeyboardInterrupt): raise SoftError("Contact creation aborted.", head=2, delay=1, tail_clear=True) -def remove_contact( - user_input: "UserInput", - window: "TxWindow", - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - queues: "QueueDict", - master_key: "MasterKey", -) -> None: +def get_onion_address_from_user(onion_address_user: str, queues: 'QueueDict') -> str: + """Get contact's Onion Address from user.""" + while True: + onion_address_contact = box_input("Contact account", expected_len=ONION_ADDRESS_LENGTH) + error_msg = validate_onion_addr(onion_address_contact, onion_address_user) + + if error_msg: + m_print(error_msg, head=1) + print_on_previous_line(reps=5, delay=1) + + if error_msg not in ["Error: Invalid account length.", + "Error: Account must be in lower case.", + "Error: Can not add reserved account.", + "Error: Can not add own account."]: + relay_command = UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_ACCOUNT_CHECK + onion_address_contact.encode() + queue_to_nc(relay_command, queues[RELAY_PACKET_QUEUE]) + continue + + return onion_address_contact + + +def remove_contact(user_input: 'UserInput', + window: 'TxWindow', + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + queues: 'QueueDict', + master_key: 'MasterKey' + ) -> None: """Remove contact from TFC.""" if settings.traffic_masking: - raise SoftError( - "Error: Command is disabled during traffic masking.", head_clear=True - ) + raise SoftError("Error: Command is disabled during traffic masking.", head_clear=True) try: selection = user_input.plaintext.split()[1] @@ -197,15 +157,11 @@ def remove_contact( raise SoftError("Removal of contact aborted.", head=0, delay=1, tail_clear=True) if selection in contact_list.contact_selectors(): - onion_pub_key = contact_list.get_contact_by_address_or_nick( - selection - ).onion_pub_key + onion_pub_key = contact_list.get_contact_by_address_or_nick(selection).onion_pub_key else: if validate_onion_addr(selection): - raise SoftError( - "Error: Invalid selection.", head=0, delay=1, tail_clear=True - ) + raise SoftError("Error: Invalid selection.", head=0, delay=1, tail_clear=True) onion_pub_key = onion_address_to_pub_key(selection) receiver_command = CONTACT_REM + onion_pub_key @@ -216,9 +172,7 @@ def remove_contact( queues[KEY_MANAGEMENT_QUEUE].put((KDB_REMOVE_ENTRY_HEADER, onion_pub_key)) - relay_command = ( - UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_REM_CONTACT + onion_pub_key - ) + relay_command = UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_REM_CONTACT + onion_pub_key queue_to_nc(relay_command, queues[RELAY_PACKET_QUEUE]) target = determine_target(selection, onion_pub_key, contact_list) @@ -229,13 +183,14 @@ def remove_contact( check_for_window_deselection(onion_pub_key, window, group_list) -def determine_target( - selection: str, onion_pub_key: bytes, contact_list: "ContactList" -) -> str: +def determine_target(selection: str, + onion_pub_key: bytes, + contact_list: 'ContactList' + ) -> str: """Determine name of the target that will be removed.""" if onion_pub_key in contact_list.get_list_of_pub_keys(): contact = contact_list.get_contact_by_pub_key(onion_pub_key) - target = f"{contact.nick} ({contact.short_address})" + target = f"{contact.nick} ({contact.short_address})" contact_list.remove_contact_by_pub_key(onion_pub_key) m_print(f"Removed {target} from contacts.", head=1, tail=1) else: @@ -245,9 +200,10 @@ def determine_target( return target -def check_for_window_deselection( - onion_pub_key: bytes, window: "TxWindow", group_list: "GroupList" -) -> None: +def check_for_window_deselection(onion_pub_key: bytes, + window: 'TxWindow', + group_list: 'GroupList' + ) -> None: """\ Check if the window should be deselected after contact is removed. """ @@ -268,14 +224,13 @@ def check_for_window_deselection( window.deselect() -def change_nick( - user_input: "UserInput", - window: "TxWindow", - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - queues: "QueueDict", -) -> None: +def change_nick(user_input: 'UserInput', + window: 'TxWindow', + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + queues: 'QueueDict' + ) -> None: """Change nick of contact.""" try: nick = user_input.plaintext.split()[1] @@ -289,58 +244,55 @@ def change_nick( raise SoftError("Error: Window does not have contact.") onion_pub_key = window.contact.onion_pub_key - error_msg = validate_nick(nick, (contact_list, group_list, onion_pub_key)) + error_msg = validate_nick(nick, (contact_list, group_list, onion_pub_key)) if error_msg: raise SoftError(error_msg, head_clear=True) window.contact.nick = nick - window.name = nick + window.name = nick contact_list.store_contacts() command = CH_NICKNAME + onion_pub_key + nick.encode() queue_command(command, settings, queues) -def contact_setting( - user_input: "UserInput", - window: "TxWindow", - contact_list: "ContactList", - group_list: "GroupList", - settings: "Settings", - queues: "QueueDict", -) -> None: +def contact_setting(user_input: 'UserInput', + window: 'TxWindow', + contact_list: 'ContactList', + group_list: 'GroupList', + settings: 'Settings', + queues: 'QueueDict' + ) -> None: """\ Change logging, file reception, or notification setting of a group or (all) contact(s). """ try: parameters = user_input.plaintext.split() - cmd_key = parameters[0] - cmd_header = {LOGGING: CH_LOGGING, STORE: CH_FILE_RECV, NOTIFY: CH_NOTIFY}[ - cmd_key - ] + cmd_key = parameters[0] + cmd_header = {LOGGING: CH_LOGGING, + STORE: CH_FILE_RECV, + NOTIFY: CH_NOTIFY}[cmd_key] - setting, b_value = dict(on=(ENABLE, True), off=(DISABLE, False))[parameters[1]] + setting, b_value = dict(on=(ENABLE, True), + off=(DISABLE, False))[parameters[1]] except (IndexError, KeyError): raise SoftError("Error: Invalid command.", head_clear=True) # If second parameter 'all' is included, apply setting for all contacts and groups try: - win_uid = b"" + win_uid = b'' if parameters[2] == ALL: cmd_value = setting.upper() else: raise SoftError("Error: Invalid command.", head_clear=True) except IndexError: - win_uid = window.uid + win_uid = window.uid cmd_value = setting + win_uid if win_uid: - change_setting_for_selected_contact( - cmd_key, b_value, window, contact_list, group_list - ) - + change_setting_for_selected_contact(cmd_key, b_value, window, contact_list, group_list) else: change_setting_for_all_contacts(cmd_key, b_value, contact_list, group_list) @@ -356,21 +308,20 @@ def contact_setting( queue_command(command, settings, queues) -def change_setting_for_selected_contact( - cmd_key: str, - b_value: bool, - window: "TxWindow", - contact_list: "ContactList", - group_list: "GroupList", -) -> None: +def change_setting_for_selected_contact(cmd_key: str, + b_value: bool, + window: 'TxWindow', + contact_list: 'ContactList', + group_list: 'GroupList' + ) -> None: """Change setting for selected contact.""" if window.type == WIN_TYPE_CONTACT and window.contact is not None: if cmd_key == LOGGING: - window.contact.log_messages = b_value + window.contact.log_messages = b_value if cmd_key == STORE: window.contact.file_reception = b_value if cmd_key == NOTIFY: - window.contact.notifications = b_value + window.contact.notifications = b_value contact_list.store_contacts() if window.type == WIN_TYPE_GROUP and window.group is not None: @@ -384,24 +335,24 @@ def change_setting_for_selected_contact( group_list.store_groups() -def change_setting_for_all_contacts( - cmd_key: str, b_value: bool, contact_list: "ContactList", group_list: "GroupList" -) -> None: - """Change setting for all contacts.""" +def change_setting_for_all_contacts(cmd_key: str, + b_value: bool, + contact_list: 'ContactList', + group_list: 'GroupList' + ) -> None: + """Change settings for all contacts.""" for contact in contact_list: if cmd_key == LOGGING: - contact.log_messages = b_value + contact.log_messages = b_value if cmd_key == STORE: contact.file_reception = b_value if cmd_key == NOTIFY: - contact.notifications = b_value - + contact.notifications = b_value contact_list.store_contacts() for group in group_list: if cmd_key == LOGGING: - group.log_messages = b_value + group.log_messages = b_value if cmd_key == NOTIFY: group.notifications = b_value - group_list.store_groups() diff --git a/src/transmitter/files.py b/src/transmitter/files.py index 7fe088d..6143ee5 100755 --- a/src/transmitter/files.py +++ b/src/transmitter/files.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -26,22 +26,15 @@ import zlib from typing import Tuple -from src.common.crypto import byte_padding, csprng, encrypt_and_sign -from src.common.encoding import int_to_bytes +from src.common.crypto import byte_padding, csprng, encrypt_and_sign +from src.common.encoding import int_to_bytes from src.common.exceptions import SoftError -from src.common.misc import readable_size, split_byte_string -from src.common.statics import ( - COMPRESSION_LEVEL, - FILE_ETA_FIELD_LENGTH, - FILE_PACKET_CTR_LENGTH, - FILE_SIZE_FIELD_LENGTH, - PADDING_LENGTH, - TRAFFIC_MASKING_QUEUE_CHECK_DELAY, - US_BYTE, -) +from src.common.misc import readable_size, split_byte_string +from src.common.statics import (COMPRESSION_LEVEL, FILE_ETA_FIELD_LENGTH, FILE_PACKET_CTR_LENGTH, + FILE_SIZE_FIELD_LENGTH, PADDING_LENGTH, TRAFFIC_MASKING_QUEUE_CHECK_DELAY, US_BYTE) if typing.TYPE_CHECKING: - from src.common.db_settings import Settings + from src.common.db_settings import Settings from src.transmitter.windows import TxWindow @@ -52,34 +45,35 @@ class File(object): masking. """ - def __init__(self, path: str, window: "TxWindow", settings: "Settings") -> None: + def __init__(self, + path: str, + window: 'TxWindow', + settings: 'Settings' + ) -> None: """Load file data from specified path and add headers.""" - self.window = window + self.window = window self.settings = settings - self.name = self.get_name(path) - data = self.load_file_data(path) - size, self.size_hr = self.get_size(path) - processed = self.process_file_data(data) + self.name = self.get_name(path) + data = self.load_file_data(path) + size, self.size_hr = self.get_size(path) + processed = self.process_file_data(data) self.time_hr, self.plaintext = self.finalize(size, processed) @staticmethod def get_name(path: str) -> bytes: """Parse and validate file name.""" - name = (path.split("/")[-1]).encode() + name = (path.split('/')[-1]).encode() File.name_length_check(name) return name @staticmethod def name_length_check(name: bytes) -> None: """Ensure that file header fits the first packet.""" - full_header_length = ( - FILE_PACKET_CTR_LENGTH - + FILE_ETA_FIELD_LENGTH - + FILE_SIZE_FIELD_LENGTH - + len(name) - + len(US_BYTE) - ) + full_header_length = (FILE_PACKET_CTR_LENGTH + + FILE_ETA_FIELD_LENGTH + + FILE_SIZE_FIELD_LENGTH + + len(name) + len(US_BYTE)) if full_header_length >= PADDING_LENGTH: raise SoftError("Error: File name is too long.", head_clear=True) @@ -89,7 +83,7 @@ class File(object): """Load file name, size, and data from the specified path.""" if not os.path.isfile(path): raise SoftError("Error: File not found.", head_clear=True) - with open(path, "rb") as f: + with open(path, 'rb') as f: data = f.read() return data @@ -97,9 +91,9 @@ class File(object): def get_size(path: str) -> Tuple[bytes, str]: """Get size of file in bytes and in human readable form.""" byte_size = os.path.getsize(path) - if not byte_size: + if byte_size == 0: raise SoftError("Error: Target file is empty.", head_clear=True) - size = int_to_bytes(byte_size) + size = int_to_bytes(byte_size) size_hr = readable_size(byte_size) return size, size_hr @@ -113,27 +107,24 @@ class File(object): transmission. """ compressed = zlib.compress(data, level=COMPRESSION_LEVEL) - file_key = csprng() - processed = encrypt_and_sign(compressed, key=file_key) + file_key = csprng() + processed = encrypt_and_sign(compressed, key=file_key) processed += file_key return processed def finalize(self, size: bytes, processed: bytes) -> Tuple[str, bytes]: """Finalize packet and generate plaintext.""" - time_bytes, time_print = self.update_delivery_time( - self.name, size, processed, self.settings, self.window - ) - packet_data = time_bytes + size + self.name + US_BYTE + processed + time_bytes, time_print = self.update_delivery_time(self.name, size, processed, self.settings, self.window) + packet_data = time_bytes + size + self.name + US_BYTE + processed return time_print, packet_data @staticmethod - def update_delivery_time( - name: bytes, - size: bytes, - processed: bytes, - settings: "Settings", - window: "TxWindow", - ) -> Tuple[bytes, str]: + def update_delivery_time(name: bytes, + size: bytes, + processed: bytes, + settings: 'Settings', + window: 'TxWindow' + ) -> Tuple[bytes, str]: """Calculate transmission time. Transmission time depends on delay settings, file size and @@ -141,27 +132,29 @@ class File(object): """ time_bytes = bytes(FILE_ETA_FIELD_LENGTH) no_packets = File.count_number_of_packets(name, size, processed, time_bytes) - avg_delay = settings.tm_static_delay + (settings.tm_random_delay / 2) + avg_delay = settings.tm_static_delay + (settings.tm_random_delay / 2) - total_time = len(window) * no_packets * avg_delay + total_time = len(window) * no_packets * avg_delay total_time *= 2 # Accommodate command packets between file packets total_time += no_packets * TRAFFIC_MASKING_QUEUE_CHECK_DELAY # Update delivery time time_bytes = int_to_bytes(int(total_time)) - time_hr = str(datetime.timedelta(seconds=int(total_time))) + time_hr = str(datetime.timedelta(seconds=int(total_time))) return time_bytes, time_hr @staticmethod - def count_number_of_packets( - name: bytes, size: bytes, processed: bytes, time_bytes: bytes - ) -> int: + def count_number_of_packets(name: bytes, + size: bytes, + processed: bytes, + time_bytes: bytes + ) -> int: """Count number of packets needed for file delivery.""" packet_data = time_bytes + size + name + US_BYTE + processed if len(packet_data) < PADDING_LENGTH: return 1 packet_data += bytes(FILE_PACKET_CTR_LENGTH) - packet_data = byte_padding(packet_data) + packet_data = byte_padding(packet_data) return len(split_byte_string(packet_data, item_len=PADDING_LENGTH)) diff --git a/src/transmitter/input_loop.py b/src/transmitter/input_loop.py index 61ba17c..4fa6af7 100755 --- a/src/transmitter/input_loop.py +++ b/src/transmitter/input_loop.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -27,36 +27,35 @@ import typing from typing import Dict, NoReturn from src.common.exceptions import SoftError -from src.common.misc import get_tab_completer, ignored -from src.common.statics import COMMAND, FILE, MESSAGE +from src.common.misc import get_tab_completer, ignored +from src.common.statics import COMMAND, FILE, MESSAGE -from src.transmitter.commands import process_command -from src.transmitter.contact import add_new_contact +from src.transmitter.commands import process_command +from src.transmitter.contact import add_new_contact from src.transmitter.key_exchanges import export_onion_service_data, new_local_key -from src.transmitter.packet import queue_file, queue_message -from src.transmitter.user_input import get_input -from src.transmitter.windows import TxWindow +from src.transmitter.packet import queue_file, queue_message +from src.transmitter.user_input import get_input +from src.transmitter.windows import TxWindow if typing.TYPE_CHECKING: - from multiprocessing import Queue - from src.common.db_contacts import ContactList - from src.common.db_groups import GroupList + from multiprocessing import Queue + from src.common.db_contacts import ContactList + from src.common.db_groups import GroupList from src.common.db_masterkey import MasterKey - from src.common.db_onion import OnionService - from src.common.db_settings import Settings - from src.common.gateway import Gateway + from src.common.db_onion import OnionService + from src.common.db_settings import Settings + from src.common.gateway import Gateway -def input_loop( - queues: Dict[bytes, "Queue[bytes]"], - settings: "Settings", - gateway: "Gateway", - contact_list: "ContactList", - group_list: "GroupList", - master_key: "MasterKey", - onion_service: "OnionService", - stdin_fd: int, -) -> NoReturn: +def input_loop(queues: Dict[bytes, 'Queue[bytes]'], + settings: 'Settings', + gateway: 'Gateway', + contact_list: 'ContactList', + group_list: 'GroupList', + master_key: 'MasterKey', + onion_service: 'OnionService', + stdin_fd: int + ) -> NoReturn: """Get input from user and process it accordingly. Running this loop as a process allows handling different functions @@ -64,29 +63,23 @@ def input_loop( generation, separate from assembly packet output. """ sys.stdin = os.fdopen(stdin_fd) - window = TxWindow(contact_list, group_list) + window = TxWindow(contact_list, group_list) while True: with ignored(EOFError, SoftError, KeyboardInterrupt): - readline.set_completer( - get_tab_completer(contact_list, group_list, settings, gateway) - ) - readline.parse_and_bind("tab: complete") + readline.set_completer(get_tab_completer(contact_list, group_list, settings, gateway)) + readline.parse_and_bind('tab: complete') window.update_window(group_list) while not onion_service.is_delivered: - export_onion_service_data( - contact_list, settings, onion_service, gateway - ) + export_onion_service_data(contact_list, settings, onion_service, gateway) while not contact_list.has_local_contact(): new_local_key(contact_list, settings, queues) while not contact_list.has_contacts(): - add_new_contact( - contact_list, group_list, settings, queues, onion_service - ) + add_new_contact(contact_list, group_list, settings, queues, onion_service) while not window.is_selected(): window.select_tx_window(settings, queues, onion_service, gateway) @@ -101,13 +94,4 @@ def input_loop( elif user_input.type == COMMAND: process_command( - user_input, - window, - contact_list, - group_list, - settings, - queues, - master_key, - onion_service, - gateway, - ) + user_input, window, contact_list, group_list, settings, queues, master_key, onion_service, gateway) diff --git a/src/transmitter/key_exchanges.py b/src/transmitter/key_exchanges.py index 1f8eff9..d503443 100644 --- a/src/transmitter/key_exchanges.py +++ b/src/transmitter/key_exchanges.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -25,81 +25,41 @@ import typing from typing import Any, Dict -from src.common.crypto import argon2_kdf, blake2b, csprng, encrypt_and_sign, X448 +from src.common.crypto import argon2_kdf, blake2b, csprng, encrypt_and_sign, X448 from src.common.db_masterkey import MasterKey -from src.common.encoding import ( - bool_to_bytes, - int_to_bytes, - pub_key_to_short_address, - str_to_bytes, -) -from src.common.exceptions import SoftError -from src.common.input import ask_confirmation_code, get_b58_key, nc_bypass_msg, yes -from src.common.misc import reset_terminal -from src.common.output import ( - m_print, - phase, - print_fingerprint, - print_key, - print_on_previous_line, -) -from src.common.path import ask_path_gui -from src.common.statics import ( - ARGON2_PSK_MEMORY_COST, - ARGON2_PSK_PARALLELISM, - ARGON2_PSK_TIME_COST, - B58_PUBLIC_KEY, - CONFIRM_CODE_LENGTH, - DONE, - ECDHE, - FINGERPRINT_LENGTH, - KDB_ADD_ENTRY_HEADER, - KEX_STATUS_HAS_RX_PSK, - KEX_STATUS_LOCAL_KEY, - KEX_STATUS_NO_RX_PSK, - KEX_STATUS_PENDING, - KEX_STATUS_UNVERIFIED, - KEX_STATUS_VERIFIED, - KEY_EX_ECDHE, - KEY_EX_PSK_RX, - KEY_EX_PSK_TX, - KEY_MANAGEMENT_QUEUE, - LOCAL_KEY_DATAGRAM_HEADER, - LOCAL_KEY_RDY, - LOCAL_NICK, - LOCAL_PUBKEY, - NC_BYPASS_START, - NC_BYPASS_STOP, - PUBLIC_KEY_DATAGRAM_HEADER, - RELAY_PACKET_QUEUE, - TFC_PUBLIC_KEY_LENGTH, - UNENCRYPTED_DATAGRAM_HEADER, - UNENCRYPTED_ONION_SERVICE_DATA, - WIN_TYPE_GROUP, -) +from src.common.encoding import bool_to_bytes, int_to_bytes, pub_key_to_short_address, str_to_bytes, b58encode +from src.common.exceptions import SoftError +from src.common.input import ask_confirmation_code, get_b58_key, nc_bypass_msg, yes +from src.common.misc import reset_terminal, split_to_substrings +from src.common.output import m_print, phase, print_fingerprint, print_key, print_on_previous_line +from src.common.path import ask_path_gui +from src.common.statics import (ARGON2_PSK_MEMORY_COST, ARGON2_PSK_PARALLELISM, ARGON2_PSK_TIME_COST, + B58_PUBLIC_KEY, CONFIRM_CODE_LENGTH, DONE, ECDHE, FINGERPRINT_LENGTH, + KDB_ADD_ENTRY_HEADER, KEX_STATUS_HAS_RX_PSK, KEX_STATUS_LOCAL_KEY, + KEX_STATUS_NO_RX_PSK, KEX_STATUS_PENDING, KEX_STATUS_UNVERIFIED, + KEX_STATUS_VERIFIED, KEY_EX_ECDHE, KEY_EX_PSK_RX, KEY_EX_PSK_TX, + KEY_MANAGEMENT_QUEUE, LOCAL_KEY_DATAGRAM_HEADER, LOCAL_KEY_RDY, LOCAL_NICK, + LOCAL_PUBKEY, NC_BYPASS_START, NC_BYPASS_STOP, PUBLIC_KEY_DATAGRAM_HEADER, + RELAY_PACKET_QUEUE, TFC_PUBLIC_KEY_LENGTH, UNENCRYPTED_DATAGRAM_HEADER, + UNENCRYPTED_ONION_SERVICE_DATA, UNENCRYPTED_PUBKEY_CHECK, WIN_TYPE_GROUP, ENCODED_B58_KDK_LENGTH) from src.transmitter.packet import queue_command, queue_to_nc if typing.TYPE_CHECKING: - from multiprocessing import Queue - from src.common.db_contacts import Contact, ContactList - from src.common.db_onion import OnionService - from src.common.db_settings import Settings - from src.common.gateway import Gateway + from multiprocessing import Queue + from src.common.db_contacts import Contact, ContactList + from src.common.db_onion import OnionService + from src.common.db_settings import Settings + from src.common.gateway import Gateway from src.transmitter.windows import TxWindow - QueueDict = Dict[bytes, Queue[Any]] -# Onion Service - - -def export_onion_service_data( - contact_list: "ContactList", - settings: "Settings", - onion_service: "OnionService", - gateway: "Gateway", -) -> None: +def export_onion_service_data(contact_list: 'ContactList', + settings: 'Settings', + onion_service: 'OnionService', + gateway: 'Gateway' + ) -> None: """\ Send the Tor Onion Service's private key and list of Onion Service public keys of contacts to Relay Program on Networked Computer. @@ -144,59 +104,51 @@ def export_onion_service_data( """ m_print("Onion Service setup", bold=True, head_clear=True, head=1, tail=1) - pending_contacts = b"".join(contact_list.get_list_of_pending_pub_keys()) - existing_contacts = b"".join(contact_list.get_list_of_existing_pub_keys()) - no_pending = int_to_bytes(len(contact_list.get_list_of_pending_pub_keys())) - contact_data = no_pending + pending_contacts + existing_contacts + pending_contacts = b''.join(contact_list.get_list_of_pending_pub_keys()) + existing_contacts = b''.join(contact_list.get_list_of_existing_pub_keys()) + no_pending = int_to_bytes(len(contact_list.get_list_of_pending_pub_keys())) + contact_data = no_pending + pending_contacts + existing_contacts - relay_command = ( - UNENCRYPTED_DATAGRAM_HEADER - + UNENCRYPTED_ONION_SERVICE_DATA - + onion_service.onion_private_key - + onion_service.conf_code - + bool_to_bytes(settings.allow_contact_requests) - + contact_data - ) + relay_command = (UNENCRYPTED_DATAGRAM_HEADER + + UNENCRYPTED_ONION_SERVICE_DATA + + onion_service.onion_private_key + + onion_service.conf_code + + bool_to_bytes(settings.allow_contact_requests) + + contact_data) deliver_onion_service_data(relay_command, onion_service, gateway) -def deliver_onion_service_data( - relay_command: bytes, onion_service: "OnionService", gateway: "Gateway" -) -> None: +def deliver_onion_service_data(relay_command: bytes, + onion_service: 'OnionService', + gateway: 'Gateway' + ) -> None: """Send Onion Service data to Replay Program on Networked Computer.""" gateway.write(relay_command) while True: - purp_code = ask_confirmation_code("Relay") + purp_code = ask_confirmation_code('Relay') if purp_code == onion_service.conf_code.hex(): onion_service.is_delivered = True onion_service.new_confirmation_code() break - if purp_code == "": + if purp_code == '': phase("Resending Onion Service data", head=2) gateway.write(relay_command) phase(DONE) print_on_previous_line(reps=5) else: - m_print( - [ - "Incorrect confirmation code. If Relay Program did not", - "receive Onion Service data, resend it by pressing .", - ], - head=1, - ) + m_print(["Incorrect confirmation code. If Relay Program did not", + "receive Onion Service data, resend it by pressing ."], head=1) print_on_previous_line(reps=5, delay=2) -# Local key - - -def new_local_key( - contact_list: "ContactList", settings: "Settings", queues: "QueueDict" -) -> None: +def new_local_key(contact_list: 'ContactList', + settings: 'Settings', + queues: 'QueueDict' + ) -> None: """Run local key exchange protocol. Local key encrypts commands and data sent from Source Computer to @@ -230,66 +182,52 @@ def new_local_key( """ try: if settings.traffic_masking and contact_list.has_local_contact(): - raise SoftError( - "Error: Command is disabled during traffic masking.", head_clear=True - ) + raise SoftError("Error: Command is disabled during traffic masking.", head_clear=True) m_print("Local key setup", bold=True, head_clear=True, head=1, tail=1) if not contact_list.has_local_contact(): time.sleep(0.5) - key = csprng() - hek = csprng() - kek = csprng() + key = csprng() + hek = csprng() + kek = csprng() c_code = os.urandom(CONFIRM_CODE_LENGTH) - local_key_packet = LOCAL_KEY_DATAGRAM_HEADER + encrypt_and_sign( - plaintext=key + hek + c_code, key=kek - ) + local_key_packet = LOCAL_KEY_DATAGRAM_HEADER + encrypt_and_sign(plaintext=key + hek + c_code, key=kek) deliver_local_key(local_key_packet, kek, c_code, settings, queues) # Add local contact to contact list database - contact_list.add_contact( - LOCAL_PUBKEY, - LOCAL_NICK, - bytes(FINGERPRINT_LENGTH), - bytes(FINGERPRINT_LENGTH), - KEX_STATUS_LOCAL_KEY, - False, - False, - False, - ) + contact_list.add_contact(LOCAL_PUBKEY, + LOCAL_NICK, + blake2b(b58encode(kek).encode()), + bytes(FINGERPRINT_LENGTH), + KEX_STATUS_LOCAL_KEY, + False, False, False) # Add local contact to keyset database - queues[KEY_MANAGEMENT_QUEUE].put( - (KDB_ADD_ENTRY_HEADER, LOCAL_PUBKEY, key, csprng(), hek, csprng()) - ) + queues[KEY_MANAGEMENT_QUEUE].put((KDB_ADD_ENTRY_HEADER, + LOCAL_PUBKEY, + key, csprng(), + hek, csprng())) # Notify Receiver that confirmation code was successfully entered queue_command(LOCAL_KEY_RDY, settings, queues) - m_print( - "Successfully completed the local key exchange.", - bold=True, - tail_clear=True, - delay=1, - head=1, - ) + m_print("Successfully completed the local key exchange.", bold=True, tail_clear=True, delay=1, head=1) reset_terminal() except (EOFError, KeyboardInterrupt): raise SoftError("Local key setup aborted.", tail_clear=True, delay=1, head=2) -def deliver_local_key( - local_key_packet: bytes, - kek: bytes, - c_code: bytes, - settings: "Settings", - queues: "QueueDict", -) -> None: +def deliver_local_key(local_key_packet: bytes, + kek: bytes, + c_code: bytes, + settings: 'Settings', + queues: 'QueueDict' + ) -> None: """Deliver encrypted local key to Destination Computer.""" nc_bypass_msg(NC_BYPASS_START, settings) queue_to_nc(local_key_packet, queues[RELAY_PACKET_QUEUE]) @@ -306,28 +244,19 @@ def deliver_local_key( phase(DONE) print_on_previous_line(reps=(9 if settings.local_testing_mode else 10)) else: - m_print( - [ - "Incorrect confirmation code. If Receiver did not receive", - "the encrypted local key, resend it by pressing .", - ], - head=1, - ) - print_on_previous_line( - reps=(9 if settings.local_testing_mode else 10), delay=2 - ) - + m_print(["Incorrect confirmation code. If Receiver did not receive", + "the encrypted local key, resend it by pressing ."], head=1) + print_on_previous_line(reps=(9 if settings.local_testing_mode else 10), delay=2) # ECDHE -def start_key_exchange( - onion_pub_key: bytes, # Public key of contact's v3 Onion Service - nick: str, # Contact's nickname - contact_list: "ContactList", # ContactList object - settings: "Settings", # Settings object - queues: "QueueDict", # Dictionary of multiprocessing queues -) -> None: +def start_key_exchange(onion_pub_key: bytes, # Public key of contact's v3 Onion Service + nick: str, # Contact's nickname + contact_list: 'ContactList', # ContactList object + settings: 'Settings', # Settings object + queues: 'QueueDict' # Dictionary of multiprocessing queues + ) -> None: """Start X448 key exchange with the recipient. This function first creates the X448 key pair. It then outputs the @@ -366,17 +295,12 @@ def start_key_exchange( mk = message key hk = header key """ if not contact_list.has_pub_key(onion_pub_key): - contact_list.add_contact( - onion_pub_key, - nick, - bytes(FINGERPRINT_LENGTH), - bytes(FINGERPRINT_LENGTH), - KEX_STATUS_PENDING, - settings.log_messages_by_default, - settings.accept_files_by_default, - settings.show_notifications_by_default, - ) - + contact_list.add_contact(onion_pub_key, nick, + bytes(FINGERPRINT_LENGTH), bytes(FINGERPRINT_LENGTH), + KEX_STATUS_PENDING, + settings.log_messages_by_default, + settings.accept_files_by_default, + settings.show_notifications_by_default) contact = contact_list.get_contact_by_pub_key(onion_pub_key) # Generate new private key or load cached private key @@ -386,80 +310,78 @@ def start_key_exchange( tfc_private_key_user = contact.tfc_private_key try: - tfc_public_key_user = X448.derive_public_key(tfc_private_key_user) - tfc_public_key_contact = exchange_public_keys( - onion_pub_key, tfc_public_key_user, contact, settings, queues - ) + tfc_public_key_user = X448.derive_public_key(tfc_private_key_user) + kdk_hash = contact_list.get_contact_by_pub_key(LOCAL_PUBKEY).tx_fingerprint + tfc_public_key_contact = exchange_public_keys(onion_pub_key, tfc_public_key_user, kdk_hash, contact, settings, queues) validate_contact_public_key(tfc_public_key_contact) dh_shared_key = X448.shared_key(tfc_private_key_user, tfc_public_key_contact) - tx_mk, rx_mk, tx_hk, rx_hk, tx_fp, rx_fp = X448.derive_keys( - dh_shared_key, tfc_public_key_user, tfc_public_key_contact - ) + tx_mk, rx_mk, tx_hk, rx_hk, tx_fp, rx_fp \ + = X448.derive_keys(dh_shared_key, tfc_public_key_user, tfc_public_key_contact) - kex_status = fingerprint_validation(tx_fp, rx_fp) + kex_status = validate_contact_fingerprint(tx_fp, rx_fp) - deliver_contact_data( - KEY_EX_ECDHE, - nick, - onion_pub_key, - tx_mk, - rx_mk, - tx_hk, - rx_hk, - queues, - settings, - ) + deliver_contact_data(KEY_EX_ECDHE, nick, onion_pub_key, tx_mk, rx_mk, tx_hk, rx_hk, queues, settings) # Store contact data into databases contact.tfc_private_key = None - contact.tx_fingerprint = tx_fp - contact.rx_fingerprint = rx_fp - contact.kex_status = kex_status + contact.tx_fingerprint = tx_fp + contact.rx_fingerprint = rx_fp + contact.kex_status = kex_status contact_list.store_contacts() - queues[KEY_MANAGEMENT_QUEUE].put( - (KDB_ADD_ENTRY_HEADER, onion_pub_key, tx_mk, rx_mk, tx_hk, rx_hk) - ) + queues[KEY_MANAGEMENT_QUEUE].put((KDB_ADD_ENTRY_HEADER, + onion_pub_key, + tx_mk, csprng(), + tx_hk, csprng())) - m_print( - f"Successfully added {nick}.", bold=True, tail_clear=True, delay=1, head=1 - ) + m_print(f"Successfully added {nick}.", bold=True, tail_clear=True, delay=1, head=1) except (EOFError, KeyboardInterrupt): contact.tfc_private_key = tfc_private_key_user raise SoftError("Key exchange interrupted.", tail_clear=True, delay=1, head=2) -def exchange_public_keys( - onion_pub_key: bytes, - tfc_public_key_user: bytes, - contact: "Contact", - settings: "Settings", - queues: "QueueDict", -) -> bytes: +def exchange_public_keys(onion_pub_key: bytes, + tfc_public_key_user: bytes, + kdk_hash: bytes, + contact: 'Contact', + settings: 'Settings', + queues: 'QueueDict', + ) -> bytes: """Exchange public keys with contact. This function outputs the user's public key and waits for user to enter the public key of the contact. If the User presses , the function will resend the users' public key to contact. """ + public_key_packet = PUBLIC_KEY_DATAGRAM_HEADER + onion_pub_key + tfc_public_key_user + queue_to_nc(public_key_packet, queues[RELAY_PACKET_QUEUE]) + while True: - public_key_packet = ( - PUBLIC_KEY_DATAGRAM_HEADER + onion_pub_key + tfc_public_key_user - ) - queue_to_nc(public_key_packet, queues[RELAY_PACKET_QUEUE]) + try: + tfc_public_key_contact = get_b58_key(B58_PUBLIC_KEY, settings, contact.short_address) + except ValueError as invalid_pub_key: + invalid_key = str(invalid_pub_key).encode() - tfc_public_key_contact = get_b58_key( - B58_PUBLIC_KEY, settings, contact.short_address - ) + # Do not send packet to Relay Program if the user has for some reason + # managed to embed the local key decryption key inside the public key. + substrings = split_to_substrings(invalid_key, ENCODED_B58_KDK_LENGTH) + safe_string = not any(blake2b(substring) == kdk_hash for substring in substrings) - if tfc_public_key_contact != b"": - break + if safe_string: + public_key_packet = (UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_PUBKEY_CHECK + onion_pub_key + invalid_key) + queue_to_nc(public_key_packet, queues[RELAY_PACKET_QUEUE]) + continue - return tfc_public_key_contact + if tfc_public_key_contact == b'': + public_key_packet = PUBLIC_KEY_DATAGRAM_HEADER + onion_pub_key + tfc_public_key_user + queue_to_nc(public_key_packet, queues[RELAY_PACKET_QUEUE]) + continue + + return tfc_public_key_contact def validate_contact_public_key(tfc_public_key_contact: bytes) -> None: @@ -471,34 +393,24 @@ def validate_contact_public_key(tfc_public_key_contact: bytes) -> None: `src.common.crypto` module. """ if len(tfc_public_key_contact) != TFC_PUBLIC_KEY_LENGTH: - m_print( - [ - "Warning!", - "Received invalid size public key.", - "Aborting key exchange for your safety.", - ], - bold=True, - tail=1, - ) + m_print(["Warning!", + "Received invalid size public key.", + "Aborting key exchange for your safety."], + bold=True, tail=1) raise SoftError("Error: Invalid public key length", output=False) if tfc_public_key_contact == bytes(TFC_PUBLIC_KEY_LENGTH): # The public key of contact is zero with negligible probability, # therefore we assume such key is malicious and attempts to set # the shared key to zero. - m_print( - [ - "Warning!", - "Received a malicious zero-public key.", - "Aborting key exchange for your safety.", - ], - bold=True, - tail=1, - ) + m_print(["Warning!", + "Received a malicious zero-public key.", + "Aborting key exchange for your safety."], + bold=True, tail=1) raise SoftError("Error: Zero public key", output=False) -def fingerprint_validation(tx_fp: bytes, rx_fp: bytes) -> bytes: +def validate_contact_fingerprint(tx_fp: bytes, rx_fp: bytes) -> bytes: """Validate or skip validation of contact fingerprint. This function prompts the user to verify the fingerprint of the contact. @@ -507,43 +419,28 @@ def fingerprint_validation(tx_fp: bytes, rx_fp: bytes) -> bytes: """ try: if not verify_fingerprints(tx_fp, rx_fp): - m_print( - [ - "Warning!", - "Possible man-in-the-middle attack detected.", - "Aborting key exchange for your safety.", - ], - bold=True, - tail=1, - ) + m_print(["Warning!", + "Possible man-in-the-middle attack detected.", + "Aborting key exchange for your safety."], bold=True, tail=1) raise SoftError("Error: Fingerprint mismatch", delay=2.5, output=False) kex_status = KEX_STATUS_VERIFIED except (EOFError, KeyboardInterrupt): - m_print( - [ - "Skipping fingerprint verification.", - "", - "Warning!", - "Man-in-the-middle attacks can not be detected", - "unless fingerprints are verified! To re-verify", - "the contact, use the command '/verify'.", - "", - "Press to continue.", - ], - manual_proceed=True, - box=True, - head=2, - tail=1, - ) + m_print(["Skipping fingerprint verification.", + '', "Warning!", + "Man-in-the-middle attacks can not be detected", + "unless fingerprints are verified! To re-verify", + "the contact, use the command '/verify'.", + '', "Press to continue."], + manual_proceed=True, box=True, head=2, tail=1) kex_status = KEX_STATUS_UNVERIFIED return kex_status -def verify_fingerprints( - tx_fp: bytes, rx_fp: bytes # User's fingerprint # Contact's fingerprint -) -> bool: # True if fingerprints match, else False +def verify_fingerprints(tx_fp: bytes, # User's fingerprint + rx_fp: bytes # Contact's fingerprint + ) -> bool: # True if fingerprints match, else False """\ Verify fingerprints over an authenticated out-of-band channel to detect MITM attacks against TFC's key exchange. @@ -562,16 +459,11 @@ def verify_fingerprints( authenticated channel it's possible to verify that the correct key was received from the network. """ - m_print( - "To verify received public key was not replaced by an attacker " - "call the contact over an end-to-end encrypted line, preferably Signal " - "(https://signal.org/). Make sure Signal's safety numbers have been " - "verified, and then verbally compare the key fingerprints below.", - head_clear=True, - max_width=49, - head=1, - tail=1, - ) + m_print("To verify received public key was not replaced by an attacker " + "call the contact over an end-to-end encrypted line, preferably Signal " + "(https://signal.org/). Make sure Signal's safety numbers have been " + "verified, and then verbally compare the key fingerprints below.", + head_clear=True, max_width=49, head=1, tail=1) print_fingerprint(tx_fp, " Your fingerprint (you read) ") print_fingerprint(rx_fp, "Purported fingerprint for contact (they read)") @@ -579,22 +471,19 @@ def verify_fingerprints( return yes("Is the contact's fingerprint correct?") -def deliver_contact_data( - header: bytes, # Key type (x448, PSK) - nick: str, # Contact's nickname - onion_pub_key: bytes, # Public key of contact's v3 Onion Service - tx_mk: bytes, # Message key for outgoing messages - rx_mk: bytes, # Message key for incoming messages - tx_hk: bytes, # Header key for outgoing messages - rx_hk: bytes, # Header key for incoming messages - queues: "QueueDict", # Dictionary of multiprocessing queues - settings: "Settings", # Settings object -) -> None: +def deliver_contact_data(header: bytes, # Key type (x448, PSK) + nick: str, # Contact's nickname + onion_pub_key: bytes, # Public key of contact's v3 Onion Service + tx_mk: bytes, # Message key for outgoing messages + rx_mk: bytes, # Message key for incoming messages + tx_hk: bytes, # Header key for outgoing messages + rx_hk: bytes, # Header key for incoming messages + queues: 'QueueDict', # Dictionary of multiprocessing queues + settings: 'Settings', # Settings object + ) -> None: """Deliver contact data to Destination Computer.""" - c_code = blake2b(onion_pub_key, digest_size=CONFIRM_CODE_LENGTH) - command = ( - header + onion_pub_key + tx_mk + rx_mk + tx_hk + rx_hk + str_to_bytes(nick) - ) + c_code = blake2b(onion_pub_key, digest_size=CONFIRM_CODE_LENGTH) + command = (header + onion_pub_key + tx_mk + rx_mk + tx_hk + rx_hk + str_to_bytes(nick)) queue_command(command, settings, queues) @@ -614,17 +503,13 @@ def deliver_contact_data( print_on_previous_line(reps=4, delay=2) -# PSK - - -def create_pre_shared_key( - onion_pub_key: bytes, # Public key of contact's v3 Onion Service - nick: str, # Nick of contact - contact_list: "ContactList", # Contact list object - settings: "Settings", # Settings object - onion_service: "OnionService", # OnionService object - queues: "QueueDict", # Dictionary of multiprocessing queues -) -> None: +def create_pre_shared_key(onion_pub_key: bytes, # Public key of contact's v3 Onion Service + nick: str, # Nick of contact + contact_list: 'ContactList', # Contact list object + settings: 'Settings', # Settings object + onion_service: 'OnionService', # OnionService object + queues: 'QueueDict' # Dictionary of multiprocessing queues + ) -> None: """Generate a new pre-shared key for manual key delivery. Pre-shared keys offer a low-tech solution against the slowly @@ -652,76 +537,50 @@ def create_pre_shared_key( try: tx_mk = csprng() tx_hk = csprng() - salt = csprng() + salt = csprng() password = MasterKey.new_password("password for PSK") phase("Deriving key encryption key", head=2) - kek = argon2_kdf( - password, - salt, - ARGON2_PSK_TIME_COST, - ARGON2_PSK_MEMORY_COST, - ARGON2_PSK_PARALLELISM, - ) + kek = argon2_kdf(password, salt, ARGON2_PSK_TIME_COST, ARGON2_PSK_MEMORY_COST, ARGON2_PSK_PARALLELISM) phase(DONE) ct_tag = encrypt_and_sign(tx_mk + tx_hk, key=kek) - store_keys_on_removable_drive( - ct_tag, salt, nick, onion_pub_key, onion_service, settings - ) + store_keys_on_removable_drive(ct_tag, salt, nick, onion_pub_key, onion_service, settings) - deliver_contact_data( - KEY_EX_PSK_TX, - nick, - onion_pub_key, - tx_mk, - csprng(), - tx_hk, - csprng(), - queues, - settings, - ) + deliver_contact_data(KEY_EX_PSK_TX, nick, onion_pub_key, tx_mk, csprng(), tx_hk, csprng(), queues, settings) - contact_list.add_contact( - onion_pub_key, - nick, - bytes(FINGERPRINT_LENGTH), - bytes(FINGERPRINT_LENGTH), - KEX_STATUS_NO_RX_PSK, - settings.log_messages_by_default, - settings.accept_files_by_default, - settings.show_notifications_by_default, - ) + contact_list.add_contact(onion_pub_key, nick, + bytes(FINGERPRINT_LENGTH), bytes(FINGERPRINT_LENGTH), + KEX_STATUS_NO_RX_PSK, + settings.log_messages_by_default, + settings.accept_files_by_default, + settings.show_notifications_by_default) - queues[KEY_MANAGEMENT_QUEUE].put( - (KDB_ADD_ENTRY_HEADER, onion_pub_key, tx_mk, csprng(), tx_hk, csprng()) - ) + queues[KEY_MANAGEMENT_QUEUE].put((KDB_ADD_ENTRY_HEADER, + onion_pub_key, + tx_mk, csprng(), + tx_hk, csprng())) - m_print( - f"Successfully added {nick}.", bold=True, tail_clear=True, delay=1, head=1 - ) + m_print(f"Successfully added {nick}.", bold=True, tail_clear=True, delay=1, head=1) except (EOFError, KeyboardInterrupt): raise SoftError("PSK generation aborted.", tail_clear=True, delay=1, head=2) -def store_keys_on_removable_drive( - ct_tag: bytes, # Encrypted PSK - salt: bytes, # Salt for PSK decryption key derivation - nick: str, # Contact's nickname - onion_pub_key: bytes, # Public key of contact's v3 Onion Service - onion_service: "OnionService", # OnionService object - settings: "Settings", # Settings object -) -> None: +def store_keys_on_removable_drive(ct_tag: bytes, # Encrypted PSK + salt: bytes, # Salt for PSK decryption key derivation + nick: str, # Contact's nickname + onion_pub_key: bytes, # Public key of contact's v3 Onion Service + onion_service: 'OnionService', # OnionService object + settings: 'Settings', # Settings object + ) -> None: """Store keys for contact on a removable media.""" while True: trunc_addr = pub_key_to_short_address(onion_pub_key) - store_d = ask_path_gui(f"Select removable media for {nick}", settings) - f_name = ( - f"{store_d}/{onion_service.user_short_address}.psk - Give to {trunc_addr}" - ) + store_d = ask_path_gui(f"Select removable media for {nick}", settings) + f_name = f"{store_d}/{onion_service.user_short_address}.psk - Give to {trunc_addr}" try: with open(f_name, "wb+") as f: @@ -730,50 +589,39 @@ def store_keys_on_removable_drive( os.fsync(f.fileno()) break except PermissionError: - m_print( - "Error: Did not have permission to write to the directory.", delay=0.5 - ) + m_print("Error: Did not have permission to write to the directory.", delay=0.5) continue -def rxp_load_psk( - window: "TxWindow", - contact_list: "ContactList", - settings: "Settings", - queues: "QueueDict", -) -> None: +def rxp_load_psk(window: 'TxWindow', + contact_list: 'ContactList', + settings: 'Settings', + queues: 'QueueDict', + ) -> None: """Send command to Receiver Program to load PSK for active contact.""" if settings.traffic_masking: - raise SoftError( - "Error: Command is disabled during traffic masking.", head_clear=True - ) + raise SoftError("Error: Command is disabled during traffic masking.", head_clear=True) if window.type == WIN_TYPE_GROUP or window.contact is None: raise SoftError("Error: Group is selected.", head_clear=True) if not contact_list.get_contact_by_pub_key(window.uid).uses_psk(): - raise SoftError( - f"Error: The current key was exchanged with {ECDHE}.", head_clear=True - ) + raise SoftError(f"Error: The current key was exchanged with {ECDHE}.", head_clear=True) - c_code = blake2b(window.uid, digest_size=CONFIRM_CODE_LENGTH) + c_code = blake2b(window.uid, digest_size=CONFIRM_CODE_LENGTH) command = KEY_EX_PSK_RX + c_code + window.uid queue_command(command, settings, queues) while True: try: - purp_code = ask_confirmation_code("Receiver") + purp_code = ask_confirmation_code('Receiver') if purp_code == c_code.hex(): window.contact.kex_status = KEX_STATUS_HAS_RX_PSK contact_list.store_contacts() - raise SoftError( - f"Removed PSK reminder for {window.name}.", tail_clear=True, delay=1 - ) + raise SoftError(f"Removed PSK reminder for {window.name}.", tail_clear=True, delay=1) m_print("Incorrect confirmation code.", head=1) print_on_previous_line(reps=4, delay=2) except (EOFError, KeyboardInterrupt): - raise SoftError( - "PSK verification aborted.", tail_clear=True, delay=1, head=2 - ) + raise SoftError("PSK verification aborted.", tail_clear=True, delay=1, head=2) diff --git a/src/transmitter/packet.py b/src/transmitter/packet.py index 47a368b..6d76662 100755 --- a/src/transmitter/packet.py +++ b/src/transmitter/packet.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -26,72 +26,40 @@ import zlib from typing import Any, Dict, List, Optional, Tuple, Union -from src.common.crypto import blake2b, byte_padding, csprng, encrypt_and_sign -from src.common.encoding import bool_to_bytes, int_to_bytes, str_to_bytes +from src.common.crypto import blake2b, byte_padding, csprng, encrypt_and_sign +from src.common.encoding import bool_to_bytes, int_to_bytes, str_to_bytes from src.common.exceptions import CriticalError, SoftError -from src.common.input import yes -from src.common.misc import split_byte_string -from src.common.output import m_print, phase, print_on_previous_line -from src.common.path import ask_path_gui -from src.common.statics import ( - ASSEMBLY_PACKET_LENGTH, - COMMAND, - COMMAND_DATAGRAM_HEADER, - COMMAND_PACKET_QUEUE, - COMPRESSION_LEVEL, - C_A_HEADER, - C_E_HEADER, - C_L_HEADER, - C_S_HEADER, - DONE, - FILE, - FILE_DATAGRAM_HEADER, - FILE_KEY_HEADER, - FILE_PACKET_CTR_LENGTH, - F_A_HEADER, - F_C_HEADER, - F_E_HEADER, - F_L_HEADER, - F_S_HEADER, - GROUP_MESSAGE_HEADER, - GROUP_MSG_ID_LENGTH, - LOCAL_PUBKEY, - MESSAGE, - MESSAGE_DATAGRAM_HEADER, - MESSAGE_PACKET_QUEUE, - M_A_HEADER, - M_C_HEADER, - M_E_HEADER, - M_L_HEADER, - M_S_HEADER, - PADDING_LENGTH, - PRIVATE_MESSAGE_HEADER, - RELAY_PACKET_QUEUE, - TM_COMMAND_PACKET_QUEUE, - TM_FILE_PACKET_QUEUE, - TM_MESSAGE_PACKET_QUEUE, - WIN_TYPE_GROUP, -) +from src.common.input import yes +from src.common.misc import split_byte_string +from src.common.output import m_print, phase, print_on_previous_line +from src.common.path import ask_path_gui +from src.common.statics import (ASSEMBLY_PACKET_LENGTH, COMMAND, COMMAND_DATAGRAM_HEADER, COMMAND_PACKET_QUEUE, + COMPRESSION_LEVEL, C_A_HEADER, C_E_HEADER, C_L_HEADER, C_S_HEADER, DONE, FILE, + FILE_DATAGRAM_HEADER, FILE_KEY_HEADER, FILE_PACKET_CTR_LENGTH, F_A_HEADER, + F_C_HEADER, F_E_HEADER, F_L_HEADER, F_S_HEADER, GROUP_MESSAGE_HEADER, + GROUP_MSG_ID_LENGTH, LOCAL_PUBKEY, MESSAGE, MESSAGE_DATAGRAM_HEADER, + MESSAGE_PACKET_QUEUE, M_A_HEADER, M_C_HEADER, M_E_HEADER, M_L_HEADER, M_S_HEADER, + PADDING_LENGTH, PRIVATE_MESSAGE_HEADER, RELAY_PACKET_QUEUE, TM_COMMAND_PACKET_QUEUE, + TM_FILE_PACKET_QUEUE, TM_MESSAGE_PACKET_QUEUE, WIN_TYPE_GROUP) -from src.transmitter.files import File +from src.transmitter.files import File from src.transmitter.window_mock import MockWindow -from src.transmitter.user_input import UserInput +from src.transmitter.user_input import UserInput if typing.TYPE_CHECKING: - from multiprocessing import Queue - from src.common.db_keys import KeyList + from multiprocessing import Queue + from src.common.db_keys import KeyList from src.common.db_masterkey import MasterKey - from src.common.db_settings import Settings - from src.common.gateway import Gateway + from src.common.db_settings import Settings + from src.common.gateway import Gateway from src.transmitter.windows import TxWindow - - QueueDict = Dict[bytes, Queue[Any]] - log_queue_data = Tuple[ - Optional[bytes], bytes, Optional[bool], Optional[bool], MasterKey - ] + QueueDict = Dict[bytes, Queue[Any]] + log_queue_data = Tuple[Optional[bytes], bytes, Optional[bool], Optional[bool], MasterKey] -def queue_to_nc(packet: bytes, nc_queue: "Queue[bytes]",) -> None: +def queue_to_nc(packet: bytes, + nc_queue: 'Queue[bytes]', + ) -> None: """Queue unencrypted command/exported file to Networked Computer. This function queues unencrypted packets intended for Relay Program @@ -102,22 +70,24 @@ def queue_to_nc(packet: bytes, nc_queue: "Queue[bytes]",) -> None: nc_queue.put(packet) -def queue_command(command: bytes, settings: "Settings", queues: "QueueDict") -> None: +def queue_command(command: bytes, + settings: 'Settings', + queues: 'QueueDict' + ) -> None: """Split command to assembly packets and queue them for sender_loop().""" assembly_packets = split_to_assembly_packets(command, COMMAND) queue_assembly_packets(assembly_packets, COMMAND, settings, queues) -def queue_message( - user_input: "UserInput", - window: Union["MockWindow", "TxWindow"], - settings: "Settings", - queues: "QueueDict", - header: bytes = b"", - whisper: bool = False, - log_as_ph: bool = False, -) -> None: +def queue_message(user_input: 'UserInput', + window: Union['MockWindow', 'TxWindow'], + settings: 'Settings', + queues: 'QueueDict', + header: bytes = b'', + whisper: bool = False, + log_as_ph: bool = False + ) -> None: """\ Prepend header to message, split the message into assembly packets, and queue the assembly packets. @@ -155,23 +125,20 @@ def queue_message( """ if not header: if window.type == WIN_TYPE_GROUP and window.group is not None: - header = ( - GROUP_MESSAGE_HEADER - + window.group.group_id - + os.urandom(GROUP_MSG_ID_LENGTH) - ) + header = GROUP_MESSAGE_HEADER + window.group.group_id + os.urandom(GROUP_MSG_ID_LENGTH) else: header = PRIVATE_MESSAGE_HEADER - payload = bool_to_bytes(whisper) + header + user_input.plaintext.encode() + payload = bool_to_bytes(whisper) + header + user_input.plaintext.encode() assembly_packets = split_to_assembly_packets(payload, MESSAGE) - queue_assembly_packets( - assembly_packets, MESSAGE, settings, queues, window, log_as_ph - ) + queue_assembly_packets(assembly_packets, MESSAGE, settings, queues, window, log_as_ph) -def queue_file(window: "TxWindow", settings: "Settings", queues: "QueueDict") -> None: +def queue_file(window: 'TxWindow', + settings: 'Settings', + queues: 'QueueDict' + ) -> None: """Ask file path and load file data. In TFC there are two ways to send a file. @@ -193,51 +160,35 @@ def queue_file(window: "TxWindow", settings: "Settings", queues: "QueueDict") -> """ path = ask_path_gui("Select file to send...", settings, get_file=True) - if path.endswith( - ( - "tx_contacts", - "tx_groups", - "tx_keys", - "tx_login_data", - "tx_settings", - "rx_contacts", - "rx_groups", - "rx_keys", - "rx_login_data", - "rx_settings", - "tx_serial_settings.json", - "nc_serial_settings.json", - "rx_serial_settings.json", - "tx_onion_db", - ) - ): + if path.endswith(('tx_contacts', 'tx_groups', 'tx_keys', 'tx_login_data', 'tx_settings', + 'rx_contacts', 'rx_groups', 'rx_keys', 'rx_login_data', 'rx_settings', + 'tx_serial_settings.json', 'nc_serial_settings.json', + 'rx_serial_settings.json', 'tx_onion_db')): raise SoftError("Error: Can't send TFC database.", head_clear=True) if not settings.traffic_masking: send_file(path, settings, queues, window) return - file = File(path, window, settings) + file = File(path, window, settings) assembly_packets = split_to_assembly_packets(file.plaintext, FILE) if settings.confirm_sent_files: try: - if not yes( - f"Send {file.name.decode()} ({file.size_hr}) to {window.type_print} {window.name} " - f"({len(assembly_packets)} packets, time: {file.time_hr})?" - ): + if not yes(f"Send {file.name.decode()} ({file.size_hr}) to {window.type_print} {window.name} " + f"({len(assembly_packets)} packets, time: {file.time_hr})?"): raise SoftError("File selection aborted.", head_clear=True) except (EOFError, KeyboardInterrupt): raise SoftError("File selection aborted.", head_clear=True) - queue_assembly_packets( - assembly_packets, FILE, settings, queues, window, log_as_ph=True - ) + queue_assembly_packets(assembly_packets, FILE, settings, queues, window, log_as_ph=True) -def send_file( - path: str, settings: "Settings", queues: "QueueDict", window: "TxWindow" -) -> None: +def send_file(path: str, + settings: 'Settings', + queues: 'QueueDict', + window: 'TxWindow' + ) -> None: """Send file to window members in a single transmission. This is the default mode for file transmission, used when traffic @@ -301,22 +252,20 @@ def send_file( another file instead. """ if settings.traffic_masking: - raise SoftError( - "Error: Command is disabled during traffic masking.", head_clear=True - ) + raise SoftError("Error: Command is disabled during traffic masking.", head_clear=True) - name = path.split("/")[-1] + name = path.split('/')[-1] data = bytearray() data.extend(str_to_bytes(name)) if not os.path.isfile(path): raise SoftError("Error: File not found.", head_clear=True) - if not os.path.getsize(path): + if os.path.getsize(path) == 0: raise SoftError("Error: Target file is empty.", head_clear=True) phase("Reading data") - with open(path, "rb") as f: + with open(path, 'rb') as f: data.extend(f.read()) phase(DONE) print_on_previous_line(flush=True) @@ -328,27 +277,25 @@ def send_file( phase("Encrypting data") file_key = csprng() - file_ct = encrypt_and_sign(comp, file_key) - ct_hash = blake2b(file_ct) + file_ct = encrypt_and_sign(comp, file_key) + ct_hash = blake2b(file_ct) phase(DONE) print_on_previous_line(flush=True) phase("Exporting data") - no_contacts = int_to_bytes(len(window)) - ser_contacts = b"".join([c.onion_pub_key for c in window]) - file_packet = FILE_DATAGRAM_HEADER + no_contacts + ser_contacts + file_ct + no_contacts = int_to_bytes(len(window)) + ser_contacts = b''.join([c.onion_pub_key for c in window]) + file_packet = FILE_DATAGRAM_HEADER + no_contacts + ser_contacts + file_ct queue_to_nc(file_packet, queues[RELAY_PACKET_QUEUE]) key_delivery_msg = base64.b85encode(ct_hash + file_key).decode() for contact in window: - queue_message( - user_input=UserInput(key_delivery_msg, MESSAGE), - window=MockWindow(contact.onion_pub_key, [contact]), - settings=settings, - queues=queues, - header=FILE_KEY_HEADER, - log_as_ph=True, - ) + queue_message(user_input=UserInput(key_delivery_msg, MESSAGE), + window =MockWindow(contact.onion_pub_key, [contact]), + settings =settings, + queues =queues, + header =FILE_KEY_HEADER, + log_as_ph =True) phase(DONE) print_on_previous_line(flush=True) m_print(f"Sent file '{name}' to {window.type_print} {window.name}.") @@ -388,7 +335,7 @@ def split_to_assembly_packets(payload: bytes, p_type: str) -> List[bytes]: payload = zlib.compress(payload, level=COMPRESSION_LEVEL) if len(payload) < PADDING_LENGTH: - padded = byte_padding(payload) + padded = byte_padding(payload) packet_list = [s_header + padded] else: @@ -409,23 +356,20 @@ def split_to_assembly_packets(payload: bytes, p_type: str) -> List[bytes]: if p_type == FILE: p_list[0] = int_to_bytes(len(p_list)) + p_list[0][FILE_PACKET_CTR_LENGTH:] - packet_list = ( - [l_header + p_list[0]] - + [a_header + p for p in p_list[1:-1]] - + [e_header + p_list[-1]] - ) + packet_list = ([l_header + p_list[0]] + + [a_header + p for p in p_list[1:-1]] + + [e_header + p_list[-1]]) return packet_list -def queue_assembly_packets( - assembly_packet_list: List[bytes], - p_type: str, - settings: "Settings", - queues: "QueueDict", - window: Optional[Union["TxWindow", "MockWindow"]] = None, - log_as_ph: bool = False, -) -> None: +def queue_assembly_packets(assembly_packet_list: List[bytes], + p_type: str, + settings: 'Settings', + queues: 'QueueDict', + window: Optional[Union['TxWindow', 'MockWindow']] = None, + log_as_ph: bool = False + ) -> None: """Queue assembly packets for sender_loop(). This function is the last function on Transmitter Program's @@ -437,48 +381,29 @@ def queue_assembly_packets( if p_type in [MESSAGE, FILE] and window is not None: if settings.traffic_masking: - queue = ( - queues[TM_MESSAGE_PACKET_QUEUE] - if p_type == MESSAGE - else queues[TM_FILE_PACKET_QUEUE] - ) + queue = queues[TM_MESSAGE_PACKET_QUEUE] if p_type == MESSAGE else queues[TM_FILE_PACKET_QUEUE] for assembly_packet in assembly_packet_list: queue.put((assembly_packet, window.log_messages, log_as_ph)) else: queue = queues[MESSAGE_PACKET_QUEUE] for c in window: for assembly_packet in assembly_packet_list: - queue.put( - ( - assembly_packet, - c.onion_pub_key, - window.log_messages, - log_as_ph, - window.uid, - ) - ) + queue.put((assembly_packet, c.onion_pub_key, window.log_messages, log_as_ph, window.uid)) elif p_type == COMMAND: - queue = ( - queues[TM_COMMAND_PACKET_QUEUE] - if settings.traffic_masking - else queues[COMMAND_PACKET_QUEUE] - ) + queue = queues[TM_COMMAND_PACKET_QUEUE] if settings.traffic_masking else queues[COMMAND_PACKET_QUEUE] for assembly_packet in assembly_packet_list: queue.put(assembly_packet) -def send_packet( - key_list: "KeyList", # Key list object - gateway: "Gateway", # Gateway object - log_queue: "Queue[log_queue_data]", # Multiprocessing queue for logged messages - assembly_packet: bytes, # Padded plaintext assembly packet - onion_pub_key: Optional[bytes] = None, # Recipient v3 Onion Service address - log_messages: Optional[bool] = None, # When True, log the message assembly packet - log_as_ph: Optional[ - bool - ] = None, # When True, log assembly packet as placeholder data -) -> None: +def send_packet(key_list: 'KeyList', # Key list object + gateway: 'Gateway', # Gateway object + log_queue: 'Queue[log_queue_data]', # Multiprocessing queue for logged messages + assembly_packet: bytes, # Padded plaintext assembly packet + onion_pub_key: Optional[bytes] = None, # Recipient v3 Onion Service address + log_messages: Optional[bool] = None, # When True, log the message assembly packet + log_as_ph: Optional[bool] = None # When True, log assembly packet as placeholder data + ) -> None: """Encrypt and send assembly packet. The assembly packets are encrypted using a symmetric message key. @@ -517,25 +442,22 @@ def send_packet( keyset = key_list.get_keyset(onion_pub_key) header = MESSAGE_DATAGRAM_HEADER + onion_pub_key - harac_in_bytes = int_to_bytes(keyset.tx_harac) - encrypted_harac = encrypt_and_sign(harac_in_bytes, keyset.tx_hk) + harac_in_bytes = int_to_bytes(keyset.tx_harac) + encrypted_harac = encrypt_and_sign(harac_in_bytes, keyset.tx_hk) encrypted_message = encrypt_and_sign(assembly_packet, keyset.tx_mk) - encrypted_packet = header + encrypted_harac + encrypted_message + encrypted_packet = header + encrypted_harac + encrypted_message gateway.write(encrypted_packet) keyset.rotate_tx_mk() - log_queue.put( - (onion_pub_key, assembly_packet, log_messages, log_as_ph, key_list.master_key) - ) + log_queue.put((onion_pub_key, assembly_packet, log_messages, log_as_ph, key_list.master_key)) -def cancel_packet( - user_input: "UserInput", - window: "TxWindow", - settings: "Settings", - queues: "QueueDict", -) -> None: +def cancel_packet(user_input: 'UserInput', + window: 'TxWindow', + settings: 'Settings', + queues: 'QueueDict' + ) -> None: """Cancel sent message/file to contact/group. In cases where the assembly packets have not yet been encrypted or @@ -551,21 +473,14 @@ def cancel_packet( re-writing it in a compiled language (which is very bad for users' rights). """ - header, p_type = dict(cm=(M_C_HEADER, "messages"), cf=(F_C_HEADER, "files"))[ - user_input.plaintext - ] + header, p_type = dict(cm=(M_C_HEADER, 'messages'), + cf=(F_C_HEADER, 'files' ))[user_input.plaintext] if settings.traffic_masking: - queue = ( - queues[TM_MESSAGE_PACKET_QUEUE] - if header == M_C_HEADER - else queues[TM_FILE_PACKET_QUEUE] - ) + queue = queues[TM_MESSAGE_PACKET_QUEUE] if header == M_C_HEADER else queues[TM_FILE_PACKET_QUEUE] else: if header == F_C_HEADER: - raise SoftError( - "Files are only queued during traffic masking.", head_clear=True - ) + raise SoftError("Files are only queued during traffic masking.", head_clear=True) queue = queues[MESSAGE_PACKET_QUEUE] cancel_pt = header + bytes(PADDING_LENGTH) @@ -579,14 +494,13 @@ def cancel_packet( cancel_standard_packet(cancel, cancel_pt, log_as_ph, p_type, queue, window) -def cancel_standard_packet( - cancel: bool, - cancel_pt: bytes, - log_as_ph: bool, - p_type: str, - queue: "Queue[Any]", - window: "TxWindow", -) -> None: +def cancel_standard_packet(cancel: bool, + cancel_pt: bytes, + log_as_ph: bool, + p_type: str, + queue: 'Queue[Any]', + window: 'TxWindow' + ) -> None: """Cancel standard packet.""" p_buffer = [] while queue.qsize(): @@ -602,9 +516,7 @@ def cancel_standard_packet( # Put cancel packets for each window contact to queue first if cancel: for c in window: - queue.put( - (cancel_pt, c.onion_pub_key, c.log_messages, log_as_ph, window.uid) - ) + queue.put((cancel_pt, c.onion_pub_key, c.log_messages, log_as_ph, window.uid)) # Put buffered tuples back to the queue for p in p_buffer: @@ -616,9 +528,12 @@ def cancel_standard_packet( raise SoftError(message, head_clear=True) -def cancel_traffic_masking_packet( - cancel: bool, cancel_pt: bytes, log_as_ph: bool, p_type: str, queue: "Queue[Any]" -) -> None: +def cancel_traffic_masking_packet(cancel: bool, + cancel_pt: bytes, + log_as_ph: bool, + p_type: str, + queue: 'Queue[Any]' + ) -> None: """Cancel traffic masking packet.""" if queue.qsize(): cancel = True @@ -630,8 +545,4 @@ def cancel_traffic_masking_packet( queue.put((cancel_pt, log_messages, log_as_ph)) - m_print( - f"Cancelled queues {p_type}." if cancel else f"No {p_type} to cancel.", - head=1, - tail=1, - ) + m_print(f"Cancelled queues {p_type}." if cancel else f"No {p_type} to cancel.", head=1, tail=1) diff --git a/src/transmitter/sender_loop.py b/src/transmitter/sender_loop.py index f544d57..a3acf2a 100755 --- a/src/transmitter/sender_loop.py +++ b/src/transmitter/sender_loop.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -25,49 +25,31 @@ import typing from typing import Any, Dict, List, Optional, Tuple from src.common.exceptions import SoftError -from src.common.misc import HideRunTime, ignored -from src.common.statics import ( - COMMAND_PACKET_QUEUE, - DATAGRAM_HEADER_LENGTH, - EXIT, - EXIT_QUEUE, - KEY_MANAGEMENT_QUEUE, - LOG_PACKET_QUEUE, - MESSAGE_PACKET_QUEUE, - RELAY_PACKET_QUEUE, - SENDER_MODE_QUEUE, - TM_COMMAND_PACKET_QUEUE, - TM_FILE_PACKET_QUEUE, - TM_MESSAGE_PACKET_QUEUE, - TM_NOISE_COMMAND_QUEUE, - TM_NOISE_PACKET_QUEUE, - TRAFFIC_MASKING, - TRAFFIC_MASKING_QUEUE_CHECK_DELAY, - UNENCRYPTED_EXIT_COMMAND, - UNENCRYPTED_WIPE_COMMAND, - WINDOW_SELECT_QUEUE, - WIPE, -) +from src.common.misc import HideRunTime, ignored +from src.common.statics import (COMMAND_PACKET_QUEUE, DATAGRAM_HEADER_LENGTH, EXIT, EXIT_QUEUE, KEY_MANAGEMENT_QUEUE, + LOG_PACKET_QUEUE, MESSAGE_PACKET_QUEUE, RELAY_PACKET_QUEUE, SENDER_MODE_QUEUE, + TM_COMMAND_PACKET_QUEUE, TM_FILE_PACKET_QUEUE, TM_MESSAGE_PACKET_QUEUE, + TM_NOISE_COMMAND_QUEUE, TM_NOISE_PACKET_QUEUE, TRAFFIC_MASKING, + TRAFFIC_MASKING_QUEUE_CHECK_DELAY, UNENCRYPTED_EXIT_COMMAND, + UNENCRYPTED_WIPE_COMMAND, WINDOW_SELECT_QUEUE, WIPE) from src.transmitter.packet import send_packet if typing.TYPE_CHECKING: - from multiprocessing import Queue - from src.common.db_keys import KeyList + from multiprocessing import Queue + from src.common.db_keys import KeyList from src.common.db_settings import Settings - from src.common.gateway import Gateway - - QueueDict = Dict[bytes, Queue[Any]] + from src.common.gateway import Gateway + QueueDict = Dict[bytes, Queue[Any]] MessageBuffer = Dict[bytes, List[Tuple[bytes, bytes, bool, bool, bytes]]] -def sender_loop( - queues: "QueueDict", - settings: "Settings", - gateway: "Gateway", - key_list: "KeyList", - unit_test: bool = False, -) -> None: +def sender_loop(queues: 'QueueDict', + settings: 'Settings', + gateway: 'Gateway', + key_list: 'KeyList', + unit_test: bool = False + ) -> None: """Output packets from queues based on queue priority. Depending on traffic masking setting adjusted by the user, enable @@ -79,16 +61,16 @@ def sender_loop( if settings.traffic_masking: settings = traffic_masking_loop(queues, settings, gateway, key_list) else: - settings, m_buffer = standard_sender_loop( - queues, gateway, key_list, m_buffer - ) + settings, m_buffer = standard_sender_loop(queues, gateway, key_list, m_buffer) if unit_test: break -def traffic_masking_loop( - queues: "QueueDict", settings: "Settings", gateway: "Gateway", key_list: "KeyList", -) -> "Settings": +def traffic_masking_loop(queues: 'QueueDict', + settings: 'Settings', + gateway: 'Gateway', + key_list: 'KeyList', + ) -> 'Settings': """Run Transmitter Program in traffic masking mode. The traffic masking loop loads assembly packets from a set of queues. @@ -122,23 +104,23 @@ def traffic_masking_loop( reveals to Networked Computer when the user operates the Source Computer. """ - ws_queue = queues[WINDOW_SELECT_QUEUE] - m_queue = queues[TM_MESSAGE_PACKET_QUEUE] - f_queue = queues[TM_FILE_PACKET_QUEUE] - c_queue = queues[TM_COMMAND_PACKET_QUEUE] - np_queue = queues[TM_NOISE_PACKET_QUEUE] - nc_queue = queues[TM_NOISE_COMMAND_QUEUE] + ws_queue = queues[WINDOW_SELECT_QUEUE] + m_queue = queues[TM_MESSAGE_PACKET_QUEUE] + f_queue = queues[TM_FILE_PACKET_QUEUE] + c_queue = queues[TM_COMMAND_PACKET_QUEUE] + np_queue = queues[TM_NOISE_PACKET_QUEUE] + nc_queue = queues[TM_NOISE_COMMAND_QUEUE] log_queue = queues[LOG_PACKET_QUEUE] - sm_queue = queues[SENDER_MODE_QUEUE] + sm_queue = queues[SENDER_MODE_QUEUE] while True: with ignored(EOFError, KeyboardInterrupt): - while not ws_queue.qsize(): + while ws_queue.qsize() == 0: time.sleep(0.01) window_contacts = ws_queue.get() # Window selection command to Receiver Program. - while not c_queue.qsize(): + while c_queue.qsize() == 0: time.sleep(0.01) send_packet(key_list, gateway, log_queue, c_queue.get()) break @@ -150,38 +132,22 @@ def traffic_masking_loop( # Choosing element from list is constant time. # - # First queue we evaluate: if m_queue has data - # in it, False is evaluated as 0, and we load - # the first nested list. At that point we load - # from m_queue regardless of f_queue state. - # | - # v - queue = [[m_queue, m_queue], [f_queue, np_queue]][m_queue.qsize() == 0][ - f_queue.qsize() == 0 - ] # ^ - # | - # Second queue to evaluate. If m_queue has no data but f_queue has, - # the False is evaluated as 0 meaning f_queue (True as 1 and np_queue) + # First queue we evaluate: if m_queue has data Second to evaluate. If m_queue + # in it, False is evaluated as 0, and we load has no data but f_queue has, the + # the first nested list. At that point we load False is evaluated as 0 meaning + # from m_queue regardless of f_queue state. f_queue (True as 1 and np_queue) + # | | + # v v + queue = [[m_queue, m_queue], [f_queue, np_queue]][m_queue.qsize() == 0][f_queue.qsize() == 0] # Regardless of queue, each .get() returns a tuple with identical # amount of data: 256 bytes long bytestring and two booleans. - ( - assembly_packet, - log_messages, - _, - ) = queue.get() # type: bytes, bool, bool + assembly_packet, log_messages, log_as_ph = queue.get() # type: bytes, bool, bool for c in window_contacts: # Message/file assembly packet to window contact. with HideRunTime(settings, delay_type=TRAFFIC_MASKING): - send_packet( - key_list, - gateway, - log_queue, - assembly_packet, - c.onion_pub_key, - log_messages, - ) + send_packet(key_list, gateway, log_queue, assembly_packet, c.onion_pub_key, log_messages) # Send a command between each assembly packet for each contact. with HideRunTime(settings, delay_type=TRAFFIC_MASKING): @@ -197,14 +163,12 @@ def traffic_masking_loop( exit_packet_check(queues, gateway) # If traffic masking has been disabled, wait until queued messages are sent before returning. - if sm_queue.qsize() != 0 and all( - q.qsize() == 0 for q in (m_queue, f_queue, c_queue) - ): + if sm_queue.qsize() != 0 and all(q.qsize() == 0 for q in (m_queue, f_queue, c_queue)): settings = sm_queue.get() return settings -def exit_packet_check(queues: "QueueDict", gateway: "Gateway") -> None: +def exit_packet_check(queues: 'QueueDict', gateway: 'Gateway') -> None: """Check for unencrypted commands that close TFC. The relay packet queue is empty until the user is willing to reveal to @@ -224,12 +188,11 @@ def exit_packet_check(queues: "QueueDict", gateway: "Gateway") -> None: queues[EXIT_QUEUE].put(signal) -def standard_sender_loop( - queues: "QueueDict", - gateway: "Gateway", - key_list: "KeyList", - m_buffer: Optional["MessageBuffer"] = None, -) -> Tuple["Settings", "MessageBuffer"]: +def standard_sender_loop(queues: 'QueueDict', + gateway: 'Gateway', + key_list: 'KeyList', + m_buffer: Optional['MessageBuffer'] = None + ) -> Tuple['Settings', 'MessageBuffer']: """Run Transmitter program in standard send mode. The standard sender loop loads assembly packets from a set of queues. @@ -267,10 +230,10 @@ def standard_sender_loop( adds new keys for the contact. """ km_queue = queues[KEY_MANAGEMENT_QUEUE] - c_queue = queues[COMMAND_PACKET_QUEUE] + c_queue = queues[COMMAND_PACKET_QUEUE] rp_queue = queues[RELAY_PACKET_QUEUE] sm_queue = queues[SENDER_MODE_QUEUE] - m_queue = queues[MESSAGE_PACKET_QUEUE] + m_queue = queues[MESSAGE_PACKET_QUEUE] if m_buffer is None: m_buffer = dict() @@ -288,9 +251,7 @@ def standard_sender_loop( process_new_message(m_buffer, queues, key_list, gateway) # If traffic masking has been enabled, switch send mode when all queues are empty. - if sm_queue.qsize() != 0 and all( - q.qsize() == 0 for q in (km_queue, c_queue, rp_queue, m_queue) - ): + if sm_queue.qsize() != 0 and all(q.qsize() == 0 for q in (km_queue, c_queue, rp_queue, m_queue)): settings = sm_queue.get() return settings, m_buffer @@ -300,7 +261,7 @@ def standard_sender_loop( pass -def process_key_management_command(queues: "QueueDict", key_list: "KeyList") -> None: +def process_key_management_command(queues: 'QueueDict', key_list: 'KeyList') -> None: """Process key management command.""" km_queue = queues[KEY_MANAGEMENT_QUEUE] @@ -309,11 +270,12 @@ def process_key_management_command(queues: "QueueDict", key_list: "KeyList") -> SoftError("Key management command processing complete.", output=False) -def process_command( - queues: "QueueDict", key_list: "KeyList", gateway: "Gateway" -) -> None: +def process_command(queues: 'QueueDict', + key_list: 'KeyList', + gateway: 'Gateway' + ) -> None: """Process command.""" - c_queue = queues[COMMAND_PACKET_QUEUE] + c_queue = queues[COMMAND_PACKET_QUEUE] log_queue = queues[LOG_PACKET_QUEUE] if c_queue.qsize(): @@ -322,7 +284,7 @@ def process_command( SoftError("Command processing complete.", output=False) -def process_relay_packets(queues: "QueueDict", gateway: "Gateway") -> None: +def process_relay_packets(queues: 'QueueDict', gateway: 'Gateway') -> None: """Process packet to Relay Program on Networked Computer.""" rp_queue = queues[RELAY_PACKET_QUEUE] @@ -339,35 +301,31 @@ def process_relay_packets(queues: "QueueDict", gateway: "Gateway") -> None: SoftError("Relay packet processing complete.", output=False) -def process_buffered_messages( - m_buffer: "MessageBuffer", - queues: "QueueDict", - key_list: "KeyList", - gateway: "Gateway", -) -> None: +def process_buffered_messages(m_buffer: 'MessageBuffer', + queues: 'QueueDict', + key_list: 'KeyList', + gateway: 'Gateway' + ) -> None: """Process messages cached in `m_buffer`.""" log_queue = queues[LOG_PACKET_QUEUE] for onion_pub_key in m_buffer: if key_list.has_keyset(onion_pub_key) and m_buffer[onion_pub_key]: - send_packet( - key_list, gateway, log_queue, *m_buffer[onion_pub_key].pop(0)[:-1] - ) + send_packet(key_list, gateway, log_queue, *m_buffer[onion_pub_key].pop(0)[:-1]) raise SoftError("Buffered message processing complete.", output=False) -def process_new_message( - m_buffer: "MessageBuffer", - queues: "QueueDict", - key_list: "KeyList", - gateway: "Gateway", -) -> None: +def process_new_message(m_buffer: 'MessageBuffer', + queues: 'QueueDict', + key_list: 'KeyList', + gateway: 'Gateway' + ) -> None: """Process new message in message queue.""" - m_queue = queues[MESSAGE_PACKET_QUEUE] + m_queue = queues[MESSAGE_PACKET_QUEUE] log_queue = queues[LOG_PACKET_QUEUE] if m_queue.qsize(): - queue_data = m_queue.get() # type: Tuple[bytes, bytes, bool, bool, bytes] + queue_data = m_queue.get() # type: Tuple[bytes, bytes, bool, bool, bytes] onion_pub_key = queue_data[1] if key_list.has_keyset(onion_pub_key): diff --git a/src/transmitter/traffic_masking.py b/src/transmitter/traffic_masking.py index 92b4a8d..f109055 100755 --- a/src/transmitter/traffic_masking.py +++ b/src/transmitter/traffic_masking.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -24,47 +24,39 @@ import typing from typing import Any, Dict, Optional, Tuple, Union -from src.common.misc import ignored -from src.common.statics import ( - C_N_HEADER, - NOISE_PACKET_BUFFER, - PADDING_LENGTH, - P_N_HEADER, - TM_NOISE_COMMAND_QUEUE, - TM_NOISE_PACKET_QUEUE, -) +from src.common.misc import ignored +from src.common.statics import (C_N_HEADER, NOISE_PACKET_BUFFER, PADDING_LENGTH, P_N_HEADER, + TM_NOISE_COMMAND_QUEUE, TM_NOISE_PACKET_QUEUE) if typing.TYPE_CHECKING: - from multiprocessing import Queue + from multiprocessing import Queue from src.common.db_contacts import ContactList - QueueDict = Dict[bytes, Queue[Any]] -def noise_loop( - queues: "QueueDict", - contact_list: Optional["ContactList"] = None, - unit_test: bool = False, -) -> None: +def noise_loop(queues: 'QueueDict', + contact_list: Optional['ContactList'] = None, + unit_test: bool = False + ) -> None: """Generate noise packets for traffic masking. This process ensures noise packet / noise command queue always has noise assembly packets available. """ log_messages = True # This setting is ignored: settings.log_file_masking controls logging of noise packets. - log_as_ph = True + log_as_ph = True - header = C_N_HEADER if contact_list is None else P_N_HEADER + header = C_N_HEADER if contact_list is None else P_N_HEADER noise_assembly_packet = header + bytes(PADDING_LENGTH) if contact_list is None: # Noise command - queue = queues[TM_NOISE_COMMAND_QUEUE] + queue = queues[TM_NOISE_COMMAND_QUEUE] content = noise_assembly_packet # type: Union[bytes, Tuple[bytes, bool, bool]] else: # Noise packet - queue = queues[TM_NOISE_PACKET_QUEUE] + queue = queues[TM_NOISE_PACKET_QUEUE] content = (noise_assembly_packet, log_messages, log_as_ph) while True: diff --git a/src/transmitter/user_input.py b/src/transmitter/user_input.py index 3ca2cc1..69530e5 100755 --- a/src/transmitter/user_input.py +++ b/src/transmitter/user_input.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -21,21 +21,22 @@ along with TFC. If not, see . import typing -from src.common.output import print_on_previous_line +from src.common.output import print_on_previous_line from src.common.statics import COMMAND, FILE, MESSAGE, WIN_TYPE_GROUP if typing.TYPE_CHECKING: - from src.common.db_settings import Settings + from src.common.db_settings import Settings from src.transmitter.windows import TxWindow -def process_aliases(plaintext: str, settings: "Settings", window: "TxWindow") -> str: +def process_aliases(plaintext: str, + settings: 'Settings', + window: 'TxWindow' + ) -> str: """Check if plaintext is an alias for another command.""" - aliases = [ - (" ", "/unread"), - (" ", "/exit" if settings.double_space_exits else "/clear"), - ("//", "/cmd"), - ] + aliases = [(' ', '/unread' ), + (' ', '/exit' if settings.double_space_exits else '/clear'), + ('//', '/cmd' )] for a in aliases: if plaintext == a[0]: @@ -49,15 +50,15 @@ def process_aliases(plaintext: str, settings: "Settings", window: "TxWindow") -> return plaintext -def get_input(window: "TxWindow", settings: "Settings") -> "UserInput": +def get_input(window: 'TxWindow', settings: 'Settings') -> 'UserInput': """Read and process input from the user and determine its type.""" while True: try: plaintext = input(f"Msg to {window.type_print} {window.name}: ") - if plaintext in ["", "/"]: + if plaintext in ['', '/']: raise EOFError except (EOFError, KeyboardInterrupt): - print("") + print('') print_on_previous_line() continue @@ -66,20 +67,18 @@ def get_input(window: "TxWindow", settings: "Settings") -> "UserInput": # Determine plaintext type pt_type = MESSAGE - if plaintext == "/file": + if plaintext == '/file': pt_type = FILE - elif plaintext.startswith("/"): - plaintext = plaintext[len("/") :] - pt_type = COMMAND + elif plaintext.startswith('/'): + plaintext = plaintext[len('/'):] + pt_type = COMMAND # Check if the group was empty if pt_type in [MESSAGE, FILE] and window.type == WIN_TYPE_GROUP: if window.group is not None and window.group.empty(): print_on_previous_line() - print( - f"Msg to {window.type_print} {window.name}: Error: The group is empty." - ) + print(f"Msg to {window.type_print} {window.name}: Error: The group is empty.") print_on_previous_line(delay=0.5) continue @@ -99,4 +98,4 @@ class UserInput(object): def __init__(self, plaintext: str, type_: str) -> None: """Create a new UserInput object.""" self.plaintext = plaintext - self.type = type_ + self.type = type_ diff --git a/src/transmitter/window_mock.py b/src/transmitter/window_mock.py index e32b3c1..4cef581 100644 --- a/src/transmitter/window_mock.py +++ b/src/transmitter/window_mock.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -24,7 +24,7 @@ import typing from typing import Iterable, Iterator, List, Optional from src.common.db_contacts import Contact -from src.common.statics import WIN_TYPE_CONTACT +from src.common.statics import WIN_TYPE_CONTACT if typing.TYPE_CHECKING: from src.common.db_groups import Group @@ -36,14 +36,14 @@ class MockWindow(Iterable[Contact]): automatically generated group management and key delivery messages. """ - def __init__(self, uid: bytes, contacts: List["Contact"]) -> None: + def __init__(self, uid: bytes, contacts: List['Contact']) -> None: """Create a new MockWindow object.""" self.window_contacts = contacts - self.type = WIN_TYPE_CONTACT - self.group = None # type: Optional[Group] - self.name = None # type: Optional[str] - self.uid = uid - self.log_messages = self.window_contacts[0].log_messages + self.type = WIN_TYPE_CONTACT + self.group = None # type: Optional[Group] + self.name = None # type: Optional[str] + self.uid = uid + self.log_messages = self.window_contacts[0].log_messages def __iter__(self) -> Iterator[Contact]: """Iterate over contact objects in the window.""" diff --git a/src/transmitter/windows.py b/src/transmitter/windows.py index 28ec5ad..375b35d 100755 --- a/src/transmitter/windows.py +++ b/src/transmitter/windows.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -24,30 +24,23 @@ import typing from typing import Any, Dict, Iterable, Iterator, List, Optional, Sized from src.common.db_contacts import Contact -from src.common.exceptions import SoftError -from src.common.input import yes -from src.common.output import clear_screen -from src.common.statics import ( - KEX_STATUS_PENDING, - WINDOW_SELECT_QUEUE, - WIN_SELECT, - WIN_TYPE_CONTACT, - WIN_TYPE_GROUP, -) +from src.common.exceptions import SoftError +from src.common.input import yes +from src.common.output import clear_screen +from src.common.statics import KEX_STATUS_PENDING, WINDOW_SELECT_QUEUE, WIN_SELECT, WIN_TYPE_CONTACT, WIN_TYPE_GROUP -from src.transmitter.contact import add_new_contact +from src.transmitter.contact import add_new_contact from src.transmitter.key_exchanges import export_onion_service_data, start_key_exchange -from src.transmitter.packet import queue_command +from src.transmitter.packet import queue_command if typing.TYPE_CHECKING: - from multiprocessing import Queue - from src.common.db_contacts import ContactList - from src.common.db_groups import Group, GroupList - from src.common.db_onion import OnionService - from src.common.db_settings import Settings - from src.common.gateway import Gateway + from multiprocessing import Queue + from src.common.db_contacts import ContactList + from src.common.db_groups import Group, GroupList + from src.common.db_onion import OnionService + from src.common.db_settings import Settings + from src.common.gateway import Gateway from src.transmitter.user_input import UserInput - QueueDict = Dict[bytes, Queue[Any]] @@ -57,19 +50,22 @@ class TxWindow(Iterable[Contact], Sized): group). """ - def __init__(self, contact_list: "ContactList", group_list: "GroupList") -> None: + def __init__(self, + contact_list: 'ContactList', + group_list: 'GroupList' + ) -> None: """Create a new TxWindow object.""" - self.contact_list = contact_list - self.group_list = group_list - self.window_contacts = [] # type: List[Contact] - self.contact = None # type: Optional[Contact] - self.group = None # type: Optional[Group] - self.name = "" # type: str - self.uid = b"" # type: bytes - self.group_id = None # type: Optional[bytes] - self.log_messages = None # type: Optional[bool] - self.type = "" # type: str - self.type_print = None # type: Optional[str] + self.contact_list = contact_list + self.group_list = group_list + self.window_contacts = [] # type: List[Contact] + self.contact = None # type: Optional[Contact] + self.group = None # type: Optional[Group] + self.name = '' # type: str + self.uid = b'' # type: bytes + self.group_id = None # type: Optional[bytes] + self.log_messages = None # type: Optional[bool] + self.type = '' # type: str + self.type_print = None # type: Optional[str] def __iter__(self) -> Iterator[Contact]: """Iterate over Contact objects in the window.""" @@ -79,15 +75,14 @@ class TxWindow(Iterable[Contact], Sized): """Return the number of contacts in the window.""" return len(self.window_contacts) - def select_tx_window( - self, - settings: "Settings", # Settings object - queues: "QueueDict", # Dictionary of Queues - onion_service: "OnionService", # OnionService object - gateway: "Gateway", # Gateway object - selection: Optional[str] = None, # Selector for window - cmd: bool = False, # True when `/msg` command is used to switch window - ) -> None: + def select_tx_window(self, + settings: 'Settings', # Settings object + queues: 'QueueDict', # Dictionary of Queues + onion_service: 'OnionService', # OnionService object + gateway: 'Gateway', # Gateway object + selection: Optional[str] = None, # Selector for window + cmd: bool = False # True when `/msg` command is used to switch window + ) -> None: """Select specified window or ask the user to specify one.""" if selection is None: selection = self.select_recipient() @@ -99,9 +94,7 @@ class TxWindow(Iterable[Contact], Sized): self.select_contact(selection, cmd, queues, settings) elif selection.startswith("/"): - self.window_selection_command( - selection, settings, queues, onion_service, gateway - ) + self.window_selection_command(selection, settings, queues, onion_service, gateway) else: raise SoftError("Error: No contact/group was found.") @@ -120,91 +113,85 @@ class TxWindow(Iterable[Contact], Sized): self.group_list.print_groups() if self.contact_list.has_only_pending_contacts(): - print( - "\n'/connect' sends Onion Service/contact data to Relay" - "\n'/add' adds another contact." - "\n'/rm ' removes an existing contact.\n" - ) + print("\n'/connect' sends Onion Service/contact data to Relay" + "\n'/add' adds another contact." + "\n'/rm ' removes an existing contact.\n") selection = input("Select recipient: ").strip() return selection - def select_contact( - self, selection: str, cmd: bool, queues: "QueueDict", settings: "Settings" - ) -> None: + def select_contact(self, + selection: str, + cmd: bool, + queues: 'QueueDict', + settings: 'Settings' + ) -> None: """Select contact.""" if cmd and settings.traffic_masking: contact = self.contact_list.get_contact_by_address_or_nick(selection) if contact.onion_pub_key != self.uid: - raise SoftError( - "Error: Can't change window during traffic masking.", - head_clear=True, - ) + raise SoftError("Error: Can't change window during traffic masking.", head_clear=True) self.contact = self.contact_list.get_contact_by_address_or_nick(selection) if self.contact.kex_status == KEX_STATUS_PENDING: - start_key_exchange( - self.contact.onion_pub_key, - self.contact.nick, - self.contact_list, - settings, - queues, - ) + start_key_exchange(self.contact.onion_pub_key, + self.contact.nick, + self.contact_list, + settings, queues) - self.group = None - self.group_id = None + self.group = None + self.group_id = None self.window_contacts = [self.contact] - self.name = self.contact.nick - self.uid = self.contact.onion_pub_key - self.log_messages = self.contact.log_messages - self.type = WIN_TYPE_CONTACT - self.type_print = "contact" + self.name = self.contact.nick + self.uid = self.contact.onion_pub_key + self.log_messages = self.contact.log_messages + self.type = WIN_TYPE_CONTACT + self.type_print = "contact" - def select_group(self, selection: str, cmd: bool, settings: "Settings") -> None: + def select_group(self, + selection: str, + cmd: bool, + settings: 'Settings' + ) -> None: """Select group.""" if cmd and settings.traffic_masking and selection != self.name: - raise SoftError( - "Error: Can't change window during traffic masking.", head_clear=True - ) + raise SoftError("Error: Can't change window during traffic masking.", head_clear=True) - self.contact = None - self.group = self.group_list.get_group(selection) + self.contact = None + self.group = self.group_list.get_group(selection) self.window_contacts = self.group.members - self.name = self.group.name - self.uid = self.group.group_id - self.group_id = self.group.group_id - self.log_messages = self.group.log_messages - self.type = WIN_TYPE_GROUP - self.type_print = "group" + self.name = self.group.name + self.uid = self.group.group_id + self.group_id = self.group.group_id + self.log_messages = self.group.log_messages + self.type = WIN_TYPE_GROUP + self.type_print = "group" - def window_selection_command( - self, - selection: str, - settings: "Settings", - queues: "QueueDict", - onion_service: "OnionService", - gateway: "Gateway", - ) -> None: + clear_screen() + + def window_selection_command(self, + selection: str, + settings: 'Settings', + queues: 'QueueDict', + onion_service: 'OnionService', + gateway: 'Gateway' + ) -> None: """Commands for adding and removing contacts from contact selection menu. In situations where only pending contacts are available and those contacts are not online, these commands prevent the user from not being able to add new contacts. """ - if selection == "/add": - add_new_contact( - self.contact_list, self.group_list, settings, queues, onion_service - ) + if selection == '/add': + add_new_contact(self.contact_list, self.group_list, settings, queues, onion_service) raise SoftError("New contact added.", output=False) - if selection == "/connect": - export_onion_service_data( - self.contact_list, settings, onion_service, gateway - ) + if selection == '/connect': + export_onion_service_data(self.contact_list, settings, onion_service, gateway) - elif selection.startswith("/rm"): + elif selection.startswith('/rm'): try: selection = selection.split()[1] except IndexError: @@ -214,9 +201,7 @@ class TxWindow(Iterable[Contact], Sized): raise SoftError("Removal of contact aborted.", head=0, delay=1) if selection in self.contact_list.contact_selectors(): - onion_pub_key = self.contact_list.get_contact_by_address_or_nick( - selection - ).onion_pub_key + onion_pub_key = self.contact_list.get_contact_by_address_or_nick(selection).onion_pub_key self.contact_list.remove_contact_by_pub_key(onion_pub_key) self.contact_list.store_contacts() raise SoftError(f"Removed contact '{selection}'.", delay=1) @@ -229,17 +214,17 @@ class TxWindow(Iterable[Contact], Sized): def deselect(self) -> None: """Deselect active window.""" self.window_contacts = [] - self.contact = None - self.group = None - self.name = "" - self.uid = b"" - self.log_messages = None - self.type = "" - self.type_print = None + self.contact = None + self.group = None + self.name = '' + self.uid = b'' + self.log_messages = None + self.type = '' + self.type_print = None def is_selected(self) -> bool: """Return True if a window is selected, else False.""" - return self.name != "" + return self.name != '' def update_log_messages(self) -> None: """Update window's logging setting.""" @@ -248,7 +233,7 @@ class TxWindow(Iterable[Contact], Sized): if self.type == WIN_TYPE_GROUP and self.group is not None: self.log_messages = self.group.log_messages - def update_window(self, group_list: "GroupList") -> None: + def update_window(self, group_list: 'GroupList') -> None: """Update window. Since previous input may have changed the window data, reload @@ -256,38 +241,31 @@ class TxWindow(Iterable[Contact], Sized): """ if self.type == WIN_TYPE_GROUP: if self.group_id is not None and group_list.has_group_id(self.group_id): - self.group = group_list.get_group_by_id(self.group_id) + self.group = group_list.get_group_by_id(self.group_id) self.window_contacts = self.group.members - self.name = self.group.name - self.uid = self.group.group_id + self.name = self.group.name + self.uid = self.group.group_id else: self.deselect() elif self.type == WIN_TYPE_CONTACT: - if self.contact is not None and self.contact_list.has_pub_key( - self.contact.onion_pub_key - ): + if self.contact is not None and self.contact_list.has_pub_key(self.contact.onion_pub_key): # Reload window contact in case keys were re-exchanged. - self.contact = self.contact_list.get_contact_by_pub_key( - self.contact.onion_pub_key - ) + self.contact = self.contact_list.get_contact_by_pub_key(self.contact.onion_pub_key) self.window_contacts = [self.contact] -def select_window( - user_input: "UserInput", - window: "TxWindow", - settings: "Settings", - queues: "QueueDict", - onion_service: "OnionService", - gateway: "Gateway", -) -> None: +def select_window(user_input: 'UserInput', + window: 'TxWindow', + settings: 'Settings', + queues: 'QueueDict', + onion_service: 'OnionService', + gateway: 'Gateway' + ) -> None: """Select a new window to send messages/files.""" try: selection = user_input.plaintext.split()[1] except (IndexError, TypeError): raise SoftError("Error: Invalid recipient.", head_clear=True) - window.select_tx_window( - settings, queues, onion_service, gateway, selection=selection, cmd=True - ) + window.select_tx_window(settings, queues, onion_service, gateway, selection=selection, cmd=True) diff --git a/tests/__init__.py b/tests/__init__.py index 6eb560e..833769a 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. diff --git a/tests/common/__init__.py b/tests/common/__init__.py index 6eb560e..833769a 100644 --- a/tests/common/__init__.py +++ b/tests/common/__init__.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. diff --git a/tests/common/test_crypto.py b/tests/common/test_crypto.py index bbc7e4a..6cfd66d 100644 --- a/tests/common/test_crypto.py +++ b/tests/common/test_crypto.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -26,9 +26,10 @@ import random import subprocess import unittest -from string import ascii_letters, digits -from unittest import mock +from string import ascii_letters, digits +from unittest import mock from unittest.mock import MagicMock +from typing import Callable import argon2 import nacl.exceptions @@ -36,38 +37,15 @@ import nacl.public import nacl.utils from cryptography.hazmat.primitives.asymmetric.x448 import X448PrivateKey -from cryptography.hazmat.primitives.serialization import ( - Encoding, - NoEncryption, - PrivateFormat, -) +from cryptography.hazmat.primitives.serialization import Encoding, NoEncryption, PrivateFormat -from src.common.crypto import ( - argon2_kdf, - auth_and_decrypt, - blake2b, - byte_padding, - check_kernel_version, - csprng, -) -from src.common.crypto import encrypt_and_sign, rm_padding_bytes, X448 -from src.common.statics import ( - ARGON2_MIN_MEMORY_COST, - ARGON2_MIN_PARALLELISM, - ARGON2_MIN_TIME_COST, - ARGON2_SALT_LENGTH, - BLAKE2_DIGEST_LENGTH, - BLAKE2_DIGEST_LENGTH_MAX, - BLAKE2_DIGEST_LENGTH_MIN, - BLAKE2_KEY_LENGTH_MAX, - BLAKE2_PERSON_LENGTH_MAX, - BLAKE2_SALT_LENGTH_MAX, - PADDING_LENGTH, - SYMMETRIC_KEY_LENGTH, - TFC_PRIVATE_KEY_LENGTH, - TFC_PUBLIC_KEY_LENGTH, - XCHACHA20_NONCE_LENGTH, -) +from src.common.crypto import argon2_kdf, auth_and_decrypt, blake2b, byte_padding, check_kernel_version, csprng +from src.common.crypto import encrypt_and_sign, rm_padding_bytes, X448 +from src.common.statics import (ARGON2_MIN_MEMORY_COST, ARGON2_MIN_PARALLELISM, ARGON2_MIN_TIME_COST, + ARGON2_SALT_LENGTH, BLAKE2_DIGEST_LENGTH, BLAKE2_DIGEST_LENGTH_MAX, + BLAKE2_DIGEST_LENGTH_MIN, BLAKE2_KEY_LENGTH_MAX, BLAKE2_PERSON_LENGTH_MAX, + BLAKE2_SALT_LENGTH_MAX, PADDING_LENGTH, SYMMETRIC_KEY_LENGTH, TFC_PRIVATE_KEY_LENGTH, + TFC_PUBLIC_KEY_LENGTH, XCHACHA20_NONCE_LENGTH) from tests.utils import cd_unit_test, cleanup @@ -94,39 +72,31 @@ class TestBLAKE2b(unittest.TestCase): """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - kat_file_url = "https://raw.githubusercontent.com/BLAKE2/BLAKE2/master/testvectors/blake2b-kat.txt" - kat_file_name = "blake2b-kat.txt" + kat_file_url = 'https://raw.githubusercontent.com/BLAKE2/BLAKE2/master/testvectors/blake2b-kat.txt' + kat_file_name = 'blake2b-kat.txt' # Download the test vector file. - subprocess.Popen(f"wget {kat_file_url} -O {kat_file_name}", shell=True).wait() + subprocess.Popen(f'wget {kat_file_url} -O {kat_file_name}', shell=True).wait() # Read the test vector file. with open(kat_file_name) as f: file_data = f.read() # Verify the SHA256 hash of the test vector file. - self.assertEqual( - hashlib.sha256(file_data.encode()).hexdigest(), - "82fcb3cabe8ff6e1452849e3b2a26a3631f1e2b51beb62ffb537892d2b3e364f", - ) + self.assertEqual(hashlib.sha256(file_data.encode()).hexdigest(), + '82fcb3cabe8ff6e1452849e3b2a26a3631f1e2b51beb62ffb537892d2b3e364f') # Parse the test vectors to a list of tuples: [(message1, key1, digest1), (message2, key2, digest2), ...] self.test_vectors = [] - trimmed_data = file_data[ - 2:-1 - ] # Remove empty lines from the start and the end of the read data. - test_vectors = trimmed_data.split( - "\n\n" - ) # Each tuple of test vector values is separated by an empty line. + trimmed_data = file_data[2:-1] # Remove empty lines from the start and the end of the read data. + test_vectors = trimmed_data.split('\n\n') # Each tuple of test vector values is separated by an empty line. for test_vector in test_vectors: # Each value is hex-encoded, and has a tab-separated name # (in, key, hash) prepended to it that must be separated. - message, key, digest = [ - bytes.fromhex(line.split("\t")[1]) for line in test_vector.split("\n") - ] + message, key, digest = [bytes.fromhex(line.split('\t')[1]) for line in test_vector.split('\n')] self.test_vectors.append((message, key, digest)) @@ -136,9 +106,9 @@ class TestBLAKE2b(unittest.TestCase): # Verify that messages and digests are unique, and # that identical keys are used in every test vector. self.assertEqual(len(set(messages)), 256) - self.assertEqual(len(keys), 256) - self.assertEqual(len(set(keys)), 1) - self.assertEqual(len(set(digests)), 256) + self.assertEqual(len( keys), 256) + self.assertEqual(len(set(keys)), 1) + self.assertEqual(len(set(digests)), 256) def tearDown(self) -> None: """Post-test actions.""" @@ -146,9 +116,7 @@ class TestBLAKE2b(unittest.TestCase): def test_blake2b_using_the_official_known_answer_tests(self) -> None: for message, key, digest in self.test_vectors: - purported_digest = blake2b( - message, key, digest_size=BLAKE2_DIGEST_LENGTH_MAX - ) + purported_digest = blake2b(message, key, digest_size=BLAKE2_DIGEST_LENGTH_MAX) self.assertEqual(purported_digest, digest) @@ -157,65 +125,46 @@ class TestBLAKE2bWrapper(unittest.TestCase): These tests ensure the BLAKE2b implementation detects invalid parameters. """ + + def setUp(self) -> None: + """Pre-test actions.""" + self.test_string = b'test_string' def test_invalid_size_key_raises_critical_error(self) -> None: for invalid_key_length in [BLAKE2_KEY_LENGTH_MAX + 1, 1000]: with self.assertRaises(SystemExit): - blake2b(b"test_string", key=os.urandom(invalid_key_length)) + blake2b(self.test_string, key=os.urandom(invalid_key_length)) def test_invalid_size_salt_raises_critical_error(self) -> None: for invalid_salt_length in [BLAKE2_SALT_LENGTH_MAX + 1, 1000]: with self.assertRaises(SystemExit): - blake2b(b"test_string", salt=os.urandom(invalid_salt_length)) + blake2b(self.test_string, salt=os.urandom(invalid_salt_length)) def test_invalid_size_personalization_string_raises_critical_error(self) -> None: for invalid_person_length in [BLAKE2_PERSON_LENGTH_MAX + 1, 1000]: with self.assertRaises(SystemExit): - blake2b(b"test_string", person=os.urandom(invalid_person_length)) + blake2b(self.test_string, person=os.urandom(invalid_person_length)) def test_invalid_digest_size_raises_critical_error(self) -> None: - for invalid_digest_size in [ - -1, - BLAKE2_DIGEST_LENGTH_MIN - 1, - BLAKE2_DIGEST_LENGTH_MAX + 1, - 1000, - ]: + for invalid_digest_size in [-1, BLAKE2_DIGEST_LENGTH_MIN - 1, + BLAKE2_DIGEST_LENGTH_MAX + 1, 1000]: with self.assertRaises(SystemExit): - blake2b(b"test_string", digest_size=invalid_digest_size) + blake2b(self.test_string, digest_size=invalid_digest_size) - @mock.patch( - "hashlib.blake2b", - return_value=MagicMock( - digest=(MagicMock(side_effect=[BLAKE2_DIGEST_LENGTH * "a"])) - ), - ) - def test_invalid_blake2b_digest_type_raises_critical_error( - self, mock_blake2b - ) -> None: + @mock.patch("hashlib.blake2b", return_value=MagicMock(digest=(MagicMock(side_effect=[BLAKE2_DIGEST_LENGTH * "a"])))) + def test_invalid_blake2b_digest_type_raises_critical_error(self, mock_blake2b: MagicMock) -> None: with self.assertRaises(SystemExit): - blake2b(b"test_string") + blake2b(self.test_string) mock_blake2b.assert_called() - @mock.patch( - "hashlib.blake2b", - return_value=MagicMock( - digest=( - MagicMock( - side_effect=[ - (BLAKE2_DIGEST_LENGTH - 1) * b"a", - (BLAKE2_DIGEST_LENGTH + 1) * b"a", - ] - ) - ) - ), - ) - def test_invalid_size_blake2b_digest_raises_critical_error( - self, mock_blake2b - ) -> None: + @mock.patch('hashlib.blake2b', return_value=MagicMock(digest=( + MagicMock(side_effect=[(BLAKE2_DIGEST_LENGTH - 1)*b'a', + (BLAKE2_DIGEST_LENGTH + 1)*b'a'])))) + def test_invalid_size_blake2b_digest_raises_critical_error(self, mock_blake2b: MagicMock) -> None: with self.assertRaises(SystemExit): - blake2b(b"test_string") + blake2b(self.test_string) with self.assertRaises(SystemExit): - blake2b(b"test_string") + blake2b(self.test_string) mock_blake2b.assert_called() @@ -241,32 +190,30 @@ class TestArgon2KDF(unittest.TestCase): def setUp(self) -> None: """Pre-test actions.""" - self.unit_test_dir = cd_unit_test() + self.unit_test_dir = cd_unit_test() self.number_of_tests = 256 - file_url = "https://github.com/P-H-C/phc-winner-argon2/archive/master.zip" - file_name = "phc-winner-argon2-master.zip" + file_url = 'https://github.com/P-H-C/phc-winner-argon2/archive/master.zip' + file_name = 'phc-winner-argon2-master.zip' # Download the Argon2 command-line utility. - subprocess.Popen(f"wget {file_url} -O {file_name}", shell=True).wait() + subprocess.Popen(f'wget {file_url} -O {file_name}', shell=True).wait() # Verify the SHA256 hash of the zip-file containing the command-line utility. - with open(file_name, "rb") as f: + with open(file_name, 'rb') as f: file_data = f.read() - self.assertEqual( - hashlib.sha256(file_data).hexdigest(), - "2957db15d320b0970a34be9a6ef984b11b2296b1b1f8b051a47e35035c1bc7cf", - ) + self.assertEqual(hashlib.sha256(file_data).hexdigest(), + '2957db15d320b0970a34be9a6ef984b11b2296b1b1f8b051a47e35035c1bc7cf') # Unzip, compile, and test the command-line utility. - subprocess.Popen(f"unzip {file_name}", shell=True).wait() - os.chdir("phc-winner-argon2-master/") - subprocess.Popen("make", shell=True).wait() - subprocess.Popen("make test", shell=True).wait() + subprocess.Popen(f'unzip {file_name}', shell=True).wait() + os.chdir('phc-winner-argon2-master/') + subprocess.Popen('make', shell=True).wait() + subprocess.Popen('make test', shell=True).wait() def tearDown(self) -> None: """Post-test actions.""" - os.chdir("..") + os.chdir('..') cleanup(self.unit_test_dir) def test_argon2_cffi_using_the_official_command_line_utility(self) -> None: @@ -274,18 +221,18 @@ class TestArgon2KDF(unittest.TestCase): # Command-line utility's parameter limits. min_password_length = 1 max_password_length = 127 - min_salt_length = 8 - min_parallelism = 1 - max_parallelism = multiprocessing.cpu_count() - min_time_cost = 1 - min_memory_cost = 7 - min_key_length = 4 + min_salt_length = 8 + min_parallelism = 1 + max_parallelism = multiprocessing.cpu_count() + min_time_cost = 1 + min_memory_cost = 7 + min_key_length = 4 # Arbitrary limits set for the test. max_salt_length = 128 - max_time_cost = 3 + max_time_cost = 3 max_memory_cost = 15 - max_key_length = 64 + max_key_length = 64 sys_rand = random.SystemRandom() @@ -293,145 +240,81 @@ class TestArgon2KDF(unittest.TestCase): # Generate random parameters for the test. len_password = sys_rand.randint(min_password_length, max_password_length) - len_salt = sys_rand.randint(min_salt_length, max_salt_length) - parallelism = sys_rand.randint(min_parallelism, max_parallelism) - time_cost = sys_rand.randint(min_time_cost, max_time_cost) - memory_cost = sys_rand.randint(min_memory_cost, max_memory_cost) - key_length = sys_rand.randint(min_key_length, max_key_length) + len_salt = sys_rand.randint(min_salt_length, max_salt_length) + parallelism = sys_rand.randint(min_parallelism, max_parallelism) + time_cost = sys_rand.randint(min_time_cost, max_time_cost) + memory_cost = sys_rand.randint(min_memory_cost, max_memory_cost) + key_length = sys_rand.randint(min_key_length, max_key_length) - password = "".join( - [sys_rand.choice(ascii_letters + digits) for _ in range(len_password)] - ) - salt = "".join( - [sys_rand.choice(ascii_letters + digits) for _ in range(len_salt)] - ) + password = ''.join([sys_rand.choice(ascii_letters + digits) for _ in range(len_password)]) + salt = ''.join([sys_rand.choice(ascii_letters + digits) for _ in range(len_salt)]) # Generate a key test vector using the command-line utility. output = subprocess.check_output( f'echo -n "{password}" | ./argon2 {salt} ' - f"-t {time_cost} " - f"-m {memory_cost} " - f"-p {parallelism} " - f"-l {key_length} " - f"-id", - shell=True, - ).decode() # type: str + f'-t {time_cost} ' + f'-m {memory_cost} ' + f'-p {parallelism} ' + f'-l {key_length} ' + f'-id', + shell=True).decode() # type: str - key_test_vector = output.split("\n")[4].split("\t")[-1] + key_test_vector = output.split('\n')[4].split('\t')[-1] # Generate a key using the argon2_cffi library. - purported_key = argon2.low_level.hash_secret_raw( - secret=password.encode(), - salt=salt.encode(), - time_cost=time_cost, - memory_cost=2 ** memory_cost, - parallelism=parallelism, - hash_len=key_length, - type=argon2.Type.ID, - ).hex() + purported_key = argon2.low_level.hash_secret_raw(secret=password.encode(), + salt=salt.encode(), + time_cost=time_cost, + memory_cost=2**memory_cost, + parallelism=parallelism, + hash_len=key_length, + type=argon2.Type.ID).hex() self.assertEqual(purported_key, key_test_vector) class TestArgon2Wrapper(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.salt = os.urandom(ARGON2_SALT_LENGTH) + self.salt = os.urandom(ARGON2_SALT_LENGTH) + self.password = 'password' def test_invalid_length_salt_raises_critical_error(self) -> None: - invalid_salts = [ - salt_length * b"a" - for salt_length in [0, ARGON2_SALT_LENGTH - 1, ARGON2_SALT_LENGTH + 1, 1000] - ] + invalid_salts = [salt_length * b'a' for salt_length in [0, ARGON2_SALT_LENGTH-1, + ARGON2_SALT_LENGTH+1, 1000]] for invalid_salt in invalid_salts: with self.assertRaises(SystemExit): - argon2_kdf( - "password", - invalid_salt, - ARGON2_MIN_TIME_COST, - ARGON2_MIN_MEMORY_COST, - ARGON2_MIN_PARALLELISM, - ) + argon2_kdf(self.password, invalid_salt, + ARGON2_MIN_TIME_COST, ARGON2_MIN_MEMORY_COST, ARGON2_MIN_PARALLELISM) - @mock.patch( - "argon2.low_level.hash_secret_raw", - MagicMock(side_effect=[SYMMETRIC_KEY_LENGTH * "a"]), - ) + @mock.patch("argon2.low_level.hash_secret_raw", MagicMock(side_effect=[SYMMETRIC_KEY_LENGTH*'a'])) def test_invalid_type_key_from_argon2_raises_critical_error(self) -> None: with self.assertRaises(SystemExit): - argon2_kdf( - "password", - self.salt, - ARGON2_MIN_TIME_COST, - ARGON2_MIN_MEMORY_COST, - ARGON2_MIN_PARALLELISM, - ) + argon2_kdf(self.password, self.salt, ARGON2_MIN_TIME_COST, ARGON2_MIN_MEMORY_COST, ARGON2_MIN_PARALLELISM) - @mock.patch( - "argon2.low_level.hash_secret_raw", - MagicMock( - side_effect=[ - (SYMMETRIC_KEY_LENGTH - 1) * b"a", - (SYMMETRIC_KEY_LENGTH + 1) * b"a", - ] - ), - ) + @mock.patch("argon2.low_level.hash_secret_raw", MagicMock(side_effect=[(SYMMETRIC_KEY_LENGTH-1)*b'a', + (SYMMETRIC_KEY_LENGTH+1)*b'a'])) def test_invalid_size_key_from_argon2_raises_critical_error(self) -> None: with self.assertRaises(SystemExit): - argon2_kdf( - "password", - self.salt, - ARGON2_MIN_TIME_COST, - ARGON2_MIN_MEMORY_COST, - ARGON2_MIN_PARALLELISM, - ) + argon2_kdf(self.password, self.salt, ARGON2_MIN_TIME_COST, ARGON2_MIN_MEMORY_COST, ARGON2_MIN_PARALLELISM) with self.assertRaises(SystemExit): - argon2_kdf( - "password", - self.salt, - ARGON2_MIN_TIME_COST, - ARGON2_MIN_MEMORY_COST, - ARGON2_MIN_PARALLELISM, - ) + argon2_kdf(self.password, self.salt, ARGON2_MIN_TIME_COST, ARGON2_MIN_MEMORY_COST, ARGON2_MIN_PARALLELISM) def test_too_small_time_cost_raises_critical_error(self) -> None: with self.assertRaises(SystemExit): - argon2_kdf( - "password", - self.salt, - ARGON2_MIN_TIME_COST - 1, - ARGON2_MIN_MEMORY_COST, - ARGON2_MIN_PARALLELISM, - ) + argon2_kdf(self.password, self.salt, ARGON2_MIN_TIME_COST-1, ARGON2_MIN_MEMORY_COST, ARGON2_MIN_PARALLELISM) def test_too_small_memory_cost_raises_critical_error(self) -> None: with self.assertRaises(SystemExit): - argon2_kdf( - "password", - self.salt, - ARGON2_MIN_TIME_COST, - ARGON2_MIN_MEMORY_COST - 1, - ARGON2_MIN_PARALLELISM, - ) + argon2_kdf(self.password, self.salt, ARGON2_MIN_TIME_COST, ARGON2_MIN_MEMORY_COST-1, ARGON2_MIN_PARALLELISM) def test_too_small_parallelism_raises_critical_error(self) -> None: with self.assertRaises(SystemExit): - argon2_kdf( - "password", - self.salt, - ARGON2_MIN_TIME_COST, - ARGON2_MIN_MEMORY_COST, - ARGON2_MIN_PARALLELISM - 1, - ) + argon2_kdf(self.password, self.salt, ARGON2_MIN_TIME_COST, ARGON2_MIN_MEMORY_COST, ARGON2_MIN_PARALLELISM-1) def test_argon2_kdf_key_type_and_length(self) -> None: - key = argon2_kdf( - "password", - self.salt, - ARGON2_MIN_TIME_COST, - ARGON2_MIN_MEMORY_COST, - ARGON2_MIN_PARALLELISM, - ) + key = argon2_kdf(self.password, self.salt, ARGON2_MIN_TIME_COST, ARGON2_MIN_MEMORY_COST, ARGON2_MIN_PARALLELISM) self.assertIsInstance(key, bytes) self.assertEqual(len(key), SYMMETRIC_KEY_LENGTH) @@ -459,59 +342,44 @@ class TestX448(unittest.TestCase): https://github.com/openssl/openssl/blob/master/test/curve448_internal_test.c#L654 https://github.com/openssl/openssl/blob/master/test/curve448_internal_test.c#L668 """ - sk_alice = bytes.fromhex( - "9a8f4925d1519f5775cf46b04b5800d4ee9ee8bae8bc5565d498c28d" - "d9c9baf574a9419744897391006382a6f127ab1d9ac2d8c0a598726b" - ) + '9a8f4925d1519f5775cf46b04b5800d4ee9ee8bae8bc5565d498c28d' + 'd9c9baf574a9419744897391006382a6f127ab1d9ac2d8c0a598726b') pk_alice = bytes.fromhex( - "9b08f7cc31b7e3e67d22d5aea121074a273bd2b83de09c63faa73d2c" - "22c5d9bbc836647241d953d40c5b12da88120d53177f80e532c41fa0" - ) + '9b08f7cc31b7e3e67d22d5aea121074a273bd2b83de09c63faa73d2c' + '22c5d9bbc836647241d953d40c5b12da88120d53177f80e532c41fa0') sk_bob = bytes.fromhex( - "1c306a7ac2a0e2e0990b294470cba339e6453772b075811d8fad0d1d" - "6927c120bb5ee8972b0d3e21374c9c921b09d1b0366f10b65173992d" - ) + '1c306a7ac2a0e2e0990b294470cba339e6453772b075811d8fad0d1d' + '6927c120bb5ee8972b0d3e21374c9c921b09d1b0366f10b65173992d') pk_bob = bytes.fromhex( - "3eb7a829b0cd20f5bcfc0b599b6feccf6da4627107bdb0d4f345b430" - "27d8b972fc3e34fb4232a13ca706dcb57aec3dae07bdc1c67bf33609" - ) + '3eb7a829b0cd20f5bcfc0b599b6feccf6da4627107bdb0d4f345b430' + '27d8b972fc3e34fb4232a13ca706dcb57aec3dae07bdc1c67bf33609') shared_secret = bytes.fromhex( - "07fff4181ac6cc95ec1c16a94a0f74d12da232ce40a77552281d282b" - "b60c0b56fd2464c335543936521c24403085d59a449a5037514a879d" - ) + '07fff4181ac6cc95ec1c16a94a0f74d12da232ce40a77552281d282b' + 'b60c0b56fd2464c335543936521c24403085d59a449a5037514a879d') def test_generate_private_key_function_returns_private_key_object(self) -> None: self.assertIsInstance(X448.generate_private_key(), X448PrivateKey) def test_x448_private_key_size(self) -> None: - private_key_bytes = X448.generate_private_key().private_bytes( - encoding=Encoding.Raw, - format=PrivateFormat.Raw, - encryption_algorithm=NoEncryption(), - ) + private_key_bytes = X448.generate_private_key().private_bytes(encoding=Encoding.Raw, + format=PrivateFormat.Raw, + encryption_algorithm=NoEncryption()) self.assertEqual(len(private_key_bytes), TFC_PRIVATE_KEY_LENGTH) - def test_derive_public_key_returns_public_key_with_correct_type_and_size( - self, - ) -> None: + def test_derive_public_key_returns_public_key_with_correct_type_and_size(self) -> None: private_key = X448.generate_private_key() - public_key = X448.derive_public_key(private_key) + public_key = X448.derive_public_key(private_key) self.assertIsInstance(public_key, bytes) self.assertEqual(len(public_key), TFC_PUBLIC_KEY_LENGTH) def test_deriving_invalid_type_public_key_raises_critical_error(self) -> None: - private_key = MagicMock( - public_key=MagicMock( - return_value=MagicMock( - public_bytes=MagicMock(side_effect=[TFC_PUBLIC_KEY_LENGTH * "a"]) - ) - ) - ) + private_key = MagicMock(public_key=MagicMock(return_value=MagicMock( + public_bytes=MagicMock(side_effect=[TFC_PUBLIC_KEY_LENGTH * 'a'])))) with self.assertRaises(SystemExit): X448.derive_public_key(private_key) @@ -525,36 +393,18 @@ class TestX448(unittest.TestCase): [1] https://github.com/pyca/cryptography/blob/master/src/cryptography/hazmat/backends/openssl/x448.py#L58 """ - private_key = MagicMock( - public_key=MagicMock( - return_value=MagicMock( - public_bytes=MagicMock( - side_effect=[ - (TFC_PUBLIC_KEY_LENGTH - 1) * b"a", - (TFC_PUBLIC_KEY_LENGTH + 1) * b"a", - ] - ) - ) - ) - ) + private_key = MagicMock(public_key=MagicMock(return_value=MagicMock( + public_bytes=MagicMock(side_effect=[(TFC_PUBLIC_KEY_LENGTH-1) * b'a', + (TFC_PUBLIC_KEY_LENGTH+1) * b'a'])))) with self.assertRaises(SystemExit): X448.derive_public_key(private_key) with self.assertRaises(SystemExit): X448.derive_public_key(private_key) - def test_deriving_shared_secret_with_an_invalid_size_public_key_raises_critical_error( - self, - ) -> None: - private_key = X448.generate_private_key() - invalid_public_keys = [ - key_length * b"a" - for key_length in ( - 1, - TFC_PUBLIC_KEY_LENGTH - 1, - TFC_PUBLIC_KEY_LENGTH + 1, - 1000, - ) - ] + def test_deriving_shared_secret_with_an_invalid_size_public_key_raises_critical_error(self) -> None: + private_key = X448.generate_private_key() + invalid_public_keys = [key_length * b'a' for key_length in (1, TFC_PUBLIC_KEY_LENGTH-1, + TFC_PUBLIC_KEY_LENGTH+1, 1000)] for invalid_public_key in invalid_public_keys: with self.assertRaises(SystemExit): X448.shared_key(private_key, invalid_public_key) @@ -585,20 +435,20 @@ class TestX448(unittest.TestCase): def test_x448_with_the_official_test_vectors(self) -> None: sk_alice_ = X448PrivateKey.from_private_bytes(TestX448.sk_alice) - sk_bob_ = X448PrivateKey.from_private_bytes(TestX448.sk_bob) + sk_bob_ = X448PrivateKey.from_private_bytes(TestX448.sk_bob) self.assertEqual(X448.derive_public_key(sk_alice_), TestX448.pk_alice) - self.assertEqual(X448.derive_public_key(sk_bob_), TestX448.pk_bob) + self.assertEqual(X448.derive_public_key(sk_bob_), TestX448.pk_bob) shared_secret1 = X448.shared_key(sk_alice_, TestX448.pk_bob) - shared_secret2 = X448.shared_key(sk_bob_, TestX448.pk_alice) + shared_secret2 = X448.shared_key(sk_bob_, TestX448.pk_alice) self.assertEqual(shared_secret1, blake2b(TestX448.shared_secret)) self.assertEqual(shared_secret2, blake2b(TestX448.shared_secret)) def test_non_unique_keys_raise_critical_error(self) -> None: # Setup - shared_key = os.urandom(SYMMETRIC_KEY_LENGTH) + shared_key = os.urandom(SYMMETRIC_KEY_LENGTH) tx_public_key = os.urandom(TFC_PUBLIC_KEY_LENGTH) # Test @@ -607,7 +457,7 @@ class TestX448(unittest.TestCase): def test_x448_key_derivation(self) -> None: # Setup - shared_key = os.urandom(SYMMETRIC_KEY_LENGTH) + shared_key = os.urandom(SYMMETRIC_KEY_LENGTH) tx_public_key = os.urandom(TFC_PUBLIC_KEY_LENGTH) rx_public_key = os.urandom(TFC_PUBLIC_KEY_LENGTH) @@ -650,325 +500,122 @@ class TestXChaCha20Poly1305(unittest.TestCase): """ ietf_plaintext = bytes.fromhex( - "4c 61 64 69 65 73 20 61 6e 64 20 47 65 6e 74 6c" - "65 6d 65 6e 20 6f 66 20 74 68 65 20 63 6c 61 73" - "73 20 6f 66 20 27 39 39 3a 20 49 66 20 49 20 63" - "6f 75 6c 64 20 6f 66 66 65 72 20 79 6f 75 20 6f" - "6e 6c 79 20 6f 6e 65 20 74 69 70 20 66 6f 72 20" - "74 68 65 20 66 75 74 75 72 65 2c 20 73 75 6e 73" - "63 72 65 65 6e 20 77 6f 75 6c 64 20 62 65 20 69" - "74 2e" - ) + '4c 61 64 69 65 73 20 61 6e 64 20 47 65 6e 74 6c' + '65 6d 65 6e 20 6f 66 20 74 68 65 20 63 6c 61 73' + '73 20 6f 66 20 27 39 39 3a 20 49 66 20 49 20 63' + '6f 75 6c 64 20 6f 66 66 65 72 20 79 6f 75 20 6f' + '6e 6c 79 20 6f 6e 65 20 74 69 70 20 66 6f 72 20' + '74 68 65 20 66 75 74 75 72 65 2c 20 73 75 6e 73' + '63 72 65 65 6e 20 77 6f 75 6c 64 20 62 65 20 69' + '74 2e') - ietf_ad = bytes.fromhex("50 51 52 53 c0 c1 c2 c3 c4 c5 c6 c7") + ietf_ad = bytes.fromhex( + '50 51 52 53 c0 c1 c2 c3 c4 c5 c6 c7') ietf_key = bytes.fromhex( - "80 81 82 83 84 85 86 87 88 89 8a 8b 8c 8d 8e 8f" - "90 91 92 93 94 95 96 97 98 99 9a 9b 9c 9d 9e 9f" - ) + '80 81 82 83 84 85 86 87 88 89 8a 8b 8c 8d 8e 8f' + '90 91 92 93 94 95 96 97 98 99 9a 9b 9c 9d 9e 9f') ietf_nonce = bytes.fromhex( - "40 41 42 43 44 45 46 47 48 49 4a 4b 4c 4d 4e 4f" "50 51 52 53 54 55 56 57" - ) + '40 41 42 43 44 45 46 47 48 49 4a 4b 4c 4d 4e 4f' + '50 51 52 53 54 55 56 57') ietf_ciphertext = bytes.fromhex( - "bd 6d 17 9d 3e 83 d4 3b 95 76 57 94 93 c0 e9 39" - "57 2a 17 00 25 2b fa cc be d2 90 2c 21 39 6c bb" - "73 1c 7f 1b 0b 4a a6 44 0b f3 a8 2f 4e da 7e 39" - "ae 64 c6 70 8c 54 c2 16 cb 96 b7 2e 12 13 b4 52" - "2f 8c 9b a4 0d b5 d9 45 b1 1b 69 b9 82 c1 bb 9e" - "3f 3f ac 2b c3 69 48 8f 76 b2 38 35 65 d3 ff f9" - "21 f9 66 4c 97 63 7d a9 76 88 12 f6 15 c6 8b 13" - "b5 2e" - ) + 'bd 6d 17 9d 3e 83 d4 3b 95 76 57 94 93 c0 e9 39' + '57 2a 17 00 25 2b fa cc be d2 90 2c 21 39 6c bb' + '73 1c 7f 1b 0b 4a a6 44 0b f3 a8 2f 4e da 7e 39' + 'ae 64 c6 70 8c 54 c2 16 cb 96 b7 2e 12 13 b4 52' + '2f 8c 9b a4 0d b5 d9 45 b1 1b 69 b9 82 c1 bb 9e' + '3f 3f ac 2b c3 69 48 8f 76 b2 38 35 65 d3 ff f9' + '21 f9 66 4c 97 63 7d a9 76 88 12 f6 15 c6 8b 13' + 'b5 2e') ietf_tag = bytes.fromhex( - "c0:87:59:24:c1:c7:98:79:47:de:af:d8:78:0a:cf:49".replace(":", "") - ) + 'c0:87:59:24:c1:c7:98:79:47:de:af:d8:78:0a:cf:49'.replace(':', '')) nonce_ct_tag_ietf = ietf_nonce + ietf_ciphertext + ietf_tag # --- - libsodium_plaintext = ( - b"Ladies and Gentlemen of the class of '99: If I could offer you " + libsodium_plaintext = \ + b"Ladies and Gentlemen of the class of '99: If I could offer you " \ b"only one tip for the future, sunscreen would be it." - ) - libsodium_ad = bytes( - [0x50, 0x51, 0x52, 0x53, 0xC0, 0xC1, 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7] - ) + libsodium_ad = bytes([ + 0x50, 0x51, 0x52, 0x53, 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7]) - libsodium_key = bytes( - [ - 0x80, - 0x81, - 0x82, - 0x83, - 0x84, - 0x85, - 0x86, - 0x87, - 0x88, - 0x89, - 0x8A, - 0x8B, - 0x8C, - 0x8D, - 0x8E, - 0x8F, - 0x90, - 0x91, - 0x92, - 0x93, - 0x94, - 0x95, - 0x96, - 0x97, - 0x98, - 0x99, - 0x9A, - 0x9B, - 0x9C, - 0x9D, - 0x9E, - 0x9F, - ] - ) + libsodium_key = bytes([ + 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, + 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f, + 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, + 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f]) - libsodium_nonce = bytes( - [ - 0x07, - 0x00, - 0x00, - 0x00, - 0x40, - 0x41, - 0x42, - 0x43, - 0x44, - 0x45, - 0x46, - 0x47, - 0x48, - 0x49, - 0x4A, - 0x4B, - 0x4C, - 0x4D, - 0x4E, - 0x4F, - 0x50, - 0x51, - 0x52, - 0x53, - ] - ) + libsodium_nonce = bytes([ + 0x07, 0x00, 0x00, 0x00, 0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, + 0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f, 0x50, 0x51, 0x52, 0x53]) - libsodium_ct_tag = bytes( - [ - 0xF8, - 0xEB, - 0xEA, - 0x48, - 0x75, - 0x04, - 0x40, - 0x66, - 0xFC, - 0x16, - 0x2A, - 0x06, - 0x04, - 0xE1, - 0x71, - 0xFE, - 0xEC, - 0xFB, - 0x3D, - 0x20, - 0x42, - 0x52, - 0x48, - 0x56, - 0x3B, - 0xCF, - 0xD5, - 0xA1, - 0x55, - 0xDC, - 0xC4, - 0x7B, - 0xBD, - 0xA7, - 0x0B, - 0x86, - 0xE5, - 0xAB, - 0x9B, - 0x55, - 0x00, - 0x2B, - 0xD1, - 0x27, - 0x4C, - 0x02, - 0xDB, - 0x35, - 0x32, - 0x1A, - 0xCD, - 0x7A, - 0xF8, - 0xB2, - 0xE2, - 0xD2, - 0x50, - 0x15, - 0xE1, - 0x36, - 0xB7, - 0x67, - 0x94, - 0x58, - 0xE9, - 0xF4, - 0x32, - 0x43, - 0xBF, - 0x71, - 0x9D, - 0x63, - 0x9B, - 0xAD, - 0xB5, - 0xFE, - 0xAC, - 0x03, - 0xF8, - 0x0A, - 0x19, - 0xA9, - 0x6E, - 0xF1, - 0x0C, - 0xB1, - 0xD1, - 0x53, - 0x33, - 0xA8, - 0x37, - 0xB9, - 0x09, - 0x46, - 0xBA, - 0x38, - 0x54, - 0xEE, - 0x74, - 0xDA, - 0x3F, - 0x25, - 0x85, - 0xEF, - 0xC7, - 0xE1, - 0xE1, - 0x70, - 0xE1, - 0x7E, - 0x15, - 0xE5, - 0x63, - 0xE7, - 0x76, - 0x01, - 0xF4, - 0xF8, - 0x5C, - 0xAF, - 0xA8, - 0xE5, - 0x87, - 0x76, - 0x14, - 0xE1, - 0x43, - 0xE6, - 0x84, - 0x20, - ] - ) + libsodium_ct_tag = bytes([ + 0xf8,0xeb,0xea,0x48,0x75,0x04,0x40,0x66 + ,0xfc,0x16,0x2a,0x06,0x04,0xe1,0x71,0xfe + ,0xec,0xfb,0x3d,0x20,0x42,0x52,0x48,0x56 + ,0x3b,0xcf,0xd5,0xa1,0x55,0xdc,0xc4,0x7b + ,0xbd,0xa7,0x0b,0x86,0xe5,0xab,0x9b,0x55 + ,0x00,0x2b,0xd1,0x27,0x4c,0x02,0xdb,0x35 + ,0x32,0x1a,0xcd,0x7a,0xf8,0xb2,0xe2,0xd2 + ,0x50,0x15,0xe1,0x36,0xb7,0x67,0x94,0x58 + ,0xe9,0xf4,0x32,0x43,0xbf,0x71,0x9d,0x63 + ,0x9b,0xad,0xb5,0xfe,0xac,0x03,0xf8,0x0a + ,0x19,0xa9,0x6e,0xf1,0x0c,0xb1,0xd1,0x53 + ,0x33,0xa8,0x37,0xb9,0x09,0x46,0xba,0x38 + ,0x54,0xee,0x74,0xda,0x3f,0x25,0x85,0xef + ,0xc7,0xe1,0xe1,0x70,0xe1,0x7e,0x15,0xe5 + ,0x63,0xe7,0x76,0x01,0xf4,0xf8,0x5c,0xaf + ,0xa8,0xe5,0x87,0x76,0x14,0xe1,0x43,0xe6 + ,0x84,0x20]) nonce_ct_tag_libsodium = libsodium_nonce + libsodium_ct_tag def setUp(self) -> None: """Pre-test actions.""" self.assertEqual(self.ietf_plaintext, self.libsodium_plaintext) - self.assertEqual(self.ietf_ad, self.libsodium_ad) - self.assertEqual(self.ietf_key, self.libsodium_key) + self.assertEqual(self.ietf_ad, self.libsodium_ad) + self.assertEqual(self.ietf_key, self.libsodium_key) - self.assertNotEqual(self.ietf_nonce, self.libsodium_nonce) + self.assertNotEqual(self.ietf_nonce, self.libsodium_nonce) self.assertNotEqual(self.nonce_ct_tag_ietf, self.nonce_ct_tag_libsodium) self.plaintext = self.ietf_plaintext - self.ad = self.ietf_ad - self.key = self.ietf_key + self.ad = self.ietf_ad + self.key = self.ietf_key - @mock.patch("src.common.crypto.csprng", side_effect=[ietf_nonce, libsodium_nonce]) - def test_encrypt_and_sign_with_the_official_test_vectors(self, mock_csprng) -> None: - self.assertEqual( - encrypt_and_sign(self.plaintext, self.key, self.ad), self.nonce_ct_tag_ietf - ) - self.assertEqual( - encrypt_and_sign(self.plaintext, self.key, self.ad), - self.nonce_ct_tag_libsodium, - ) + @mock.patch('src.common.crypto.csprng', side_effect=[ietf_nonce, libsodium_nonce]) + def test_encrypt_and_sign_with_the_official_test_vectors(self, mock_csprng: MagicMock) -> None: + self.assertEqual(encrypt_and_sign(self.plaintext, self.key, self.ad), self.nonce_ct_tag_ietf) + self.assertEqual(encrypt_and_sign(self.plaintext, self.key, self.ad), self.nonce_ct_tag_libsodium) mock_csprng.assert_called_with(XCHACHA20_NONCE_LENGTH) def test_auth_and_decrypt_with_the_official_test_vectors(self) -> None: - self.assertEqual( - auth_and_decrypt(self.nonce_ct_tag_ietf, self.key, ad=self.ad), - self.plaintext, - ) - self.assertEqual( - auth_and_decrypt(self.nonce_ct_tag_libsodium, self.key, ad=self.ad), - self.plaintext, - ) + self.assertEqual(auth_and_decrypt(self.nonce_ct_tag_ietf, self.key, ad=self.ad), self.plaintext) + self.assertEqual(auth_and_decrypt(self.nonce_ct_tag_libsodium, self.key, ad=self.ad), self.plaintext) def test_invalid_size_key_raises_critical_error(self) -> None: - invalid_keys = [ - key_length * b"a" - for key_length in [ - 1, - SYMMETRIC_KEY_LENGTH - 1, - SYMMETRIC_KEY_LENGTH + 1, - 1000, - ] - ] + invalid_keys = [key_length * b'a' for key_length in [1, SYMMETRIC_KEY_LENGTH-1, + SYMMETRIC_KEY_LENGTH+1, 1000]] for invalid_key in invalid_keys: with self.assertRaises(SystemExit): encrypt_and_sign(self.libsodium_plaintext, invalid_key) with self.assertRaises(SystemExit): auth_and_decrypt(self.nonce_ct_tag_ietf, invalid_key) - @mock.patch( - "src.common.crypto.csprng", return_value=(XCHACHA20_NONCE_LENGTH - 1) * b"a" - ) - def test_invalid_nonce_when_encrypting_raises_critical_error( - self, mock_csprng - ) -> None: + @mock.patch('src.common.crypto.csprng', return_value=(XCHACHA20_NONCE_LENGTH-1)*b'a') + def test_invalid_nonce_when_encrypting_raises_critical_error(self, mock_csprng: MagicMock) -> None: with self.assertRaises(SystemExit): encrypt_and_sign(self.plaintext, self.key) mock_csprng.assert_called_with(XCHACHA20_NONCE_LENGTH) def test_invalid_tag_in_data_from_database_raises_critical_error(self) -> None: with self.assertRaises(SystemExit): - auth_and_decrypt( - self.nonce_ct_tag_ietf, - key=bytes(SYMMETRIC_KEY_LENGTH), - database="path/database_filename", - ) + auth_and_decrypt(self.nonce_ct_tag_ietf, key=bytes(SYMMETRIC_KEY_LENGTH), database='path/database_filename') def test_invalid_tag_in_data_from_contact_raises_nacl_crypto_error(self) -> None: with self.assertRaises(nacl.exceptions.CryptoError): @@ -992,9 +639,9 @@ class TestBytePadding(unittest.TestCase): def test_length_of_the_padded_message_is_divisible_by_padding_size(self) -> None: padded_bytestring_lengths = set() - for message_length in range(4 * PADDING_LENGTH): + for message_length in range(4*PADDING_LENGTH): message = os.urandom(message_length) - padded = byte_padding(message) + padded = byte_padding(message) self.assertIsInstance(padded, bytes) self.assertEqual(len(padded) % PADDING_LENGTH, 0) @@ -1003,44 +650,25 @@ class TestBytePadding(unittest.TestCase): # Check that all messages were padded to multiples of # PADDING_LENGTH in the range of the loop above. - self.assertEqual( - padded_bytestring_lengths, - { - 1 * PADDING_LENGTH, - 2 * PADDING_LENGTH, - 3 * PADDING_LENGTH, - 4 * PADDING_LENGTH, - }, - ) + self.assertEqual(padded_bytestring_lengths, {1*PADDING_LENGTH, 2*PADDING_LENGTH, + 3*PADDING_LENGTH, 4*PADDING_LENGTH}) - @mock.patch( - "cryptography.hazmat.primitives.padding.PKCS7", - return_value=MagicMock( - padder=MagicMock( + @mock.patch('cryptography.hazmat.primitives.padding.PKCS7', return_value=MagicMock( - update=MagicMock(return_value=""), - finalize=MagicMock(return_value=(PADDING_LENGTH * "a")), - ) - ) - ), - ) - def test_invalid_padding_type_raises_critical_error(self, mock_padder) -> None: + padder=MagicMock(return_value=MagicMock( + update=MagicMock(return_value=''), + finalize=MagicMock(return_value=(PADDING_LENGTH*'a')))))) + def test_invalid_padding_type_raises_critical_error(self, mock_padder: MagicMock) -> None: with self.assertRaises(SystemExit): - byte_padding(b"test_string") + byte_padding(b'test_string') mock_padder.assert_called() - @mock.patch( - "cryptography.hazmat.primitives.padding.PKCS7", - return_value=MagicMock( - padder=MagicMock( + @mock.patch('cryptography.hazmat.primitives.padding.PKCS7', return_value=MagicMock( - update=MagicMock(return_value=b""), - finalize=MagicMock(return_value=(PADDING_LENGTH + 1) * b"a"), - ) - ) - ), - ) - def test_invalid_padding_size_raises_critical_error(self, mock_padder) -> None: + padder=MagicMock(return_value=MagicMock( + update=MagicMock(return_value=b''), + finalize=MagicMock(return_value=(PADDING_LENGTH+1)*b'a'))))) + def test_invalid_padding_size_raises_critical_error(self, mock_padder: MagicMock) -> None: """\ This test makes sure TFC detects if the length of the message padded by pyca/cryptography library is not correct. @@ -1048,25 +676,23 @@ class TestBytePadding(unittest.TestCase): with a message that has an incorrect length of 256 bytes. """ with self.assertRaises(SystemExit): - byte_padding(b"test_string") + byte_padding(b'test_string') mock_padder.assert_called() - def test_message_length_one_less_than_padding_size_does_not_add_a_dummy_block( - self, - ) -> None: - message = os.urandom(PADDING_LENGTH - 1) - padded = byte_padding(message) + def test_message_length_one_less_than_padding_size_does_not_add_a_dummy_block(self) -> None: + message = os.urandom(PADDING_LENGTH-1) + padded = byte_padding(message) self.assertEqual(len(padded), PADDING_LENGTH) def test_message_length_equal_to_padding_size_adds_a_dummy_block(self) -> None: message = os.urandom(PADDING_LENGTH) - padded = byte_padding(message) - self.assertEqual(len(padded), 2 * PADDING_LENGTH) + padded = byte_padding(message) + self.assertEqual(len(padded), 2*PADDING_LENGTH) def test_removal_of_padding_does_not_alter_the_original_message(self) -> None: - for message_length in range(4 * PADDING_LENGTH): + for message_length in range(4*PADDING_LENGTH): message = os.urandom(message_length) - padded = byte_padding(message) + padded = byte_padding(message) self.assertEqual(rm_padding_bytes(padded), message) @@ -1093,57 +719,44 @@ class TestCSPRNG(unittest.TestCase): (pp.72-126) of the BSI report: https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/Studies/LinuxRNG/LinuxRNG_EN.pdf """ - - mock_entropy = XCHACHA20_NONCE_LENGTH * b"a" + mock_entropy = XCHACHA20_NONCE_LENGTH * b'a' def test_default_key_type_and_size(self) -> None: key = csprng() self.assertIsInstance(key, bytes) self.assertEqual(len(key), SYMMETRIC_KEY_LENGTH) - @mock.patch("os.getrandom", return_value=mock_entropy) - def test_function_calls_getrandom_with_correct_parameters_and_hashes_entropy_with_blake2b( - self, mock_getrandom - ) -> None: + @mock.patch('os.getrandom', return_value=mock_entropy) + def test_getrandom_called_with_correct_parameters_and_hashes_with_blake2b(self, mock_getrandom: MagicMock) -> None: key = csprng(XCHACHA20_NONCE_LENGTH) - self.assertEqual( - key, blake2b(self.mock_entropy, digest_size=XCHACHA20_NONCE_LENGTH) - ) + self.assertEqual(key, blake2b(self.mock_entropy, digest_size=XCHACHA20_NONCE_LENGTH)) mock_getrandom.assert_called_with(XCHACHA20_NONCE_LENGTH, flags=0) def test_function_returns_key_of_specified_size(self) -> None: - for key_size in range(BLAKE2_DIGEST_LENGTH_MIN, BLAKE2_DIGEST_LENGTH_MAX + 1): + for key_size in range(BLAKE2_DIGEST_LENGTH_MIN, BLAKE2_DIGEST_LENGTH_MAX+1): key = csprng(key_size) self.assertEqual(len(key), key_size) - @mock.patch("os.getrandom", return_value=SYMMETRIC_KEY_LENGTH * "a") - def test_invalid_entropy_type_from_getrandom_raises_critical_error(self, _) -> None: + @mock.patch('os.getrandom', return_value=SYMMETRIC_KEY_LENGTH*'a') + def test_invalid_entropy_type_from_getrandom_raises_critical_error(self, _: Callable[..., None]) -> None: with self.assertRaises(SystemExit): csprng() - def test_subceeding_hash_function_min_digest_size_raises_critical_error( - self, - ) -> None: + def test_subceeding_hash_function_min_digest_size_raises_critical_error(self) -> None: with self.assertRaises(SystemExit): - csprng(BLAKE2_DIGEST_LENGTH_MIN - 1) + csprng(BLAKE2_DIGEST_LENGTH_MIN-1) - def test_exceeding_hash_function_max_digest_size_raises_critical_error( - self, - ) -> None: + def test_exceeding_hash_function_max_digest_size_raises_critical_error(self) -> None: with self.assertRaises(SystemExit): - csprng(BLAKE2_DIGEST_LENGTH_MAX + 1) + csprng(BLAKE2_DIGEST_LENGTH_MAX+1) - @mock.patch("src.common.crypto.blake2b") - @mock.patch( - "os.getrandom", - side_effect=[ - (SYMMETRIC_KEY_LENGTH - 1) * b"a", - (SYMMETRIC_KEY_LENGTH + 1) * b"a", - ], - ) - def test_invalid_size_entropy_from_getrandom_raises_critical_error( - self, mock_getrandom, mock_blake2b - ) -> None: + @mock.patch('src.common.crypto.blake2b') + @mock.patch('os.getrandom', side_effect=[(SYMMETRIC_KEY_LENGTH-1) * b'a', + (SYMMETRIC_KEY_LENGTH+1) * b'a']) + def test_invalid_size_entropy_from_getrandom_raises_critical_error(self, + mock_getrandom: MagicMock, + mock_blake2b: MagicMock + ) -> None: with self.assertRaises(SystemExit): csprng() with self.assertRaises(SystemExit): @@ -1155,28 +768,22 @@ class TestCSPRNG(unittest.TestCase): class TestCheckKernelVersion(unittest.TestCase): - invalid_versions = ["3.9.11", "3.19.8", "4.16.0"] - valid_versions = ["4.17.0", "4.18.1", "5.0.0"] + invalid_versions = ['3.9.11', '3.19.8', '4.16.0'] + valid_versions = ['4.17.0', '4.18.1', '5.0.0'] - @mock.patch( - "os.uname", - side_effect=[["", "", f"{version}-0-generic"] for version in invalid_versions], - ) - def test_invalid_kernel_versions_raise_critical_error(self, mock_uname) -> None: + @mock.patch('os.uname', side_effect=[['', '', f'{version}-0-generic'] for version in invalid_versions]) + def test_invalid_kernel_versions_raise_critical_error(self, mock_uname: MagicMock) -> None: for _ in self.invalid_versions: with self.assertRaises(SystemExit): check_kernel_version() mock_uname.assert_called() - @mock.patch( - "os.uname", - side_effect=[["", "", f"{version}-0-generic"] for version in valid_versions], - ) - def test_valid_kernel_versions_return_none(self, mock_uname) -> None: + @mock.patch('os.uname', side_effect=[['', '', f'{version}-0-generic'] for version in valid_versions]) + def test_valid_kernel_versions_return_none(self, mock_uname: MagicMock) -> None: for _ in self.valid_versions: self.assertIsNone(check_kernel_version()) mock_uname.assert_called() -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/common/test_database.py b/tests/common/test_database.py index 41de9bf..5c6ab94 100644 --- a/tests/common/test_database.py +++ b/tests/common/test_database.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -23,36 +23,32 @@ import sqlite3 import os import unittest -from unittest import mock +from unittest import mock from unittest.mock import MagicMock -from src.common.crypto import auth_and_decrypt, blake2b, encrypt_and_sign +from src.common.crypto import auth_and_decrypt, blake2b, encrypt_and_sign from src.common.database import TFCDatabase, MessageLog, TFCUnencryptedDatabase -from src.common.statics import ( - DB_WRITE_RETRY_LIMIT, - DIR_USER_DATA, - MASTERKEY_DB_SIZE, - LOG_ENTRY_LENGTH, - SYMMETRIC_KEY_LENGTH, -) +from src.common.statics import (DB_WRITE_RETRY_LIMIT, DIR_USER_DATA, MASTERKEY_DB_SIZE, LOG_ENTRY_LENGTH, + SYMMETRIC_KEY_LENGTH) from tests.mock_classes import MasterKey, Settings -from tests.utils import cd_unit_test, cleanup, tamper_file +from tests.utils import cd_unit_test, cleanup, tamper_file class TestTFCDatabase(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.database_name = "unittest_db" - self.master_key = MasterKey() - self.database = TFCDatabase(self.database_name, self.master_key) + self.database_name = 'unittest_db' + self.master_key = MasterKey() + self.database = TFCDatabase(self.database_name, self.master_key) def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) - @mock.patch("os.fsync", return_value=MagicMock) + @mock.patch('os.fsync', return_value=MagicMock) def test_write_to_file(self, mock_os_fsync) -> None: # Setup data = os.urandom(MASTERKEY_DB_SIZE) @@ -60,7 +56,7 @@ class TestTFCDatabase(unittest.TestCase): # Test self.assertIsNone(self.database.write_to_file(self.database_name, data)) - with open(self.database_name, "rb") as f: + with open(self.database_name, 'rb') as f: stored_data = f.read() self.assertEqual(data, stored_data) @@ -70,7 +66,7 @@ class TestTFCDatabase(unittest.TestCase): # Setup pt_bytes = os.urandom(MASTERKEY_DB_SIZE) ct_bytes = encrypt_and_sign(pt_bytes, self.master_key.master_key) - with open(self.database_name, "wb") as f: + with open(self.database_name, 'wb') as f: f.write(ct_bytes) # Test valid file content returns True. @@ -80,14 +76,10 @@ class TestTFCDatabase(unittest.TestCase): tamper_file(self.database_name, tamper_size=1) self.assertFalse(self.database.verify_file(self.database_name)) - def test_ensure_temp_write_raises_critical_error_after_exceeding_retry_limit( - self, - ) -> None: + def test_ensure_temp_write_raises_critical_error_after_exceeding_retry_limit(self) -> None: # Setup - orig_verify_file = self.database.verify_file - self.database.verify_file = MagicMock( - side_effect=DB_WRITE_RETRY_LIMIT * [False] - ) + orig_verify_file = self.database.verify_file + self.database.verify_file = MagicMock(side_effect=DB_WRITE_RETRY_LIMIT*[False]) # Test with self.assertRaises(SystemExit): @@ -98,10 +90,8 @@ class TestTFCDatabase(unittest.TestCase): def test_ensure_temp_write_succeeds_just_before_limit(self) -> None: # Setup - orig_verify_file = self.database.verify_file - self.database.verify_file = MagicMock( - side_effect=(DB_WRITE_RETRY_LIMIT - 1) * [False] + [True] - ) + orig_verify_file = self.database.verify_file + self.database.verify_file = MagicMock(side_effect=(DB_WRITE_RETRY_LIMIT-1)*[False] + [True]) # Test self.assertIsNone(self.database.store_database(os.urandom(MASTERKEY_DB_SIZE))) @@ -109,19 +99,17 @@ class TestTFCDatabase(unittest.TestCase): # Teardown self.database.verify_file = orig_verify_file - def test_store_database_encrypts_data_with_master_key_and_replaces_temp_file_and_original_file( - self, - ) -> None: + def test_store_database_encrypts_data_with_master_key_and_replaces_temp_file_and_original_file(self) -> None: # Setup pt_old = os.urandom(MASTERKEY_DB_SIZE) ct_old = encrypt_and_sign(pt_old, self.master_key.master_key) - with open(self.database_name, "wb") as f: + with open(self.database_name, 'wb') as f: f.write(ct_old) pt_new = os.urandom(MASTERKEY_DB_SIZE) ct_temp = os.urandom(MASTERKEY_DB_SIZE) - with open(self.database.database_temp, "wb") as f: + with open(self.database.database_temp, 'wb') as f: f.write(ct_temp) # Test @@ -129,7 +117,7 @@ class TestTFCDatabase(unittest.TestCase): self.assertIsNone(self.database.store_database(pt_new)) self.assertFalse(os.path.isfile(self.database.database_temp)) - with open(self.database_name, "rb") as f: + with open(self.database_name, 'rb') as f: purp_data = f.read() purp_pt = auth_and_decrypt(purp_data, self.master_key.master_key) self.assertEqual(purp_pt, pt_new) @@ -139,8 +127,8 @@ class TestTFCDatabase(unittest.TestCase): self.assertFalse(os.path.isfile(self.database.database_name)) self.assertFalse(os.path.isfile(self.database.database_temp)) - with open(self.database.database_temp, "wb") as f: - f.write(b"temp_file") + with open(self.database.database_temp, 'wb') as f: + f.write(b'temp_file') self.assertFalse(os.path.isfile(self.database.database_name)) self.assertTrue(os.path.isfile(self.database.database_temp)) @@ -155,11 +143,11 @@ class TestTFCDatabase(unittest.TestCase): # Setup pt_old = os.urandom(MASTERKEY_DB_SIZE) ct_old = encrypt_and_sign(pt_old, self.master_key.master_key) - with open(self.database_name, "wb") as f: + with open(self.database_name, 'wb') as f: f.write(ct_old) ct_temp = os.urandom(MASTERKEY_DB_SIZE) - with open(self.database.database_temp, "wb") as f: + with open(self.database.database_temp, 'wb') as f: f.write(ct_temp) # Test @@ -171,12 +159,12 @@ class TestTFCDatabase(unittest.TestCase): # Setup pt_old = os.urandom(MASTERKEY_DB_SIZE) ct_old = encrypt_and_sign(pt_old, self.master_key.master_key) - with open(self.database_name, "wb") as f: + with open(self.database_name, 'wb') as f: f.write(ct_old) pt_temp = os.urandom(MASTERKEY_DB_SIZE) ct_temp = encrypt_and_sign(pt_temp, self.master_key.master_key) - with open(self.database.database_temp, "wb") as f: + with open(self.database.database_temp, 'wb') as f: f.write(ct_temp) # Test @@ -186,17 +174,18 @@ class TestTFCDatabase(unittest.TestCase): class TestTFCUnencryptedDatabase(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.database_name = "unittest_db" - self.database = TFCUnencryptedDatabase(self.database_name) + self.database_name = 'unittest_db' + self.database = TFCUnencryptedDatabase(self.database_name) def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) - @mock.patch("os.fsync", return_value=MagicMock) + @mock.patch('os.fsync', return_value=MagicMock) def test_write_to_file(self, mock_os_fsync) -> None: # Setup data = os.urandom(MASTERKEY_DB_SIZE) @@ -204,7 +193,7 @@ class TestTFCUnencryptedDatabase(unittest.TestCase): # Test self.assertIsNone(self.database.write_to_file(self.database_name, data)) - with open(self.database_name, "rb") as f: + with open(self.database_name, 'rb') as f: stored_data = f.read() self.assertEqual(data, stored_data) @@ -212,9 +201,9 @@ class TestTFCUnencryptedDatabase(unittest.TestCase): def test_verify_file(self) -> None: # Setup - data = os.urandom(MASTERKEY_DB_SIZE) + data = os.urandom(MASTERKEY_DB_SIZE) checksummed_data = data + blake2b(data) - with open(self.database_name, "wb") as f: + with open(self.database_name, 'wb') as f: f.write(checksummed_data) # Test valid file content returns True. @@ -224,14 +213,10 @@ class TestTFCUnencryptedDatabase(unittest.TestCase): tamper_file(self.database_name, tamper_size=1) self.assertFalse(self.database.verify_file(self.database_name)) - def test_ensure_temp_write_raises_critical_error_after_exceeding_retry_limit( - self, - ) -> None: + def test_ensure_temp_write_raises_critical_error_after_exceeding_retry_limit(self) -> None: # Setup - orig_verify_file = self.database.verify_file - self.database.verify_file = MagicMock( - side_effect=DB_WRITE_RETRY_LIMIT * [False] - ) + orig_verify_file = self.database.verify_file + self.database.verify_file = MagicMock(side_effect=DB_WRITE_RETRY_LIMIT*[False]) # Test with self.assertRaises(SystemExit): @@ -242,31 +227,25 @@ class TestTFCUnencryptedDatabase(unittest.TestCase): def test_ensure_temp_write_succeeds_just_before_limit(self) -> None: # Setup - orig_verify_file = self.database.verify_file - self.database.verify_file = MagicMock( - side_effect=(DB_WRITE_RETRY_LIMIT - 1) * [False] + [True] - ) + orig_verify_file = self.database.verify_file + self.database.verify_file = MagicMock(side_effect=(DB_WRITE_RETRY_LIMIT-1)*[False] + [True]) # Test - self.assertIsNone( - self.database.store_unencrypted_database(os.urandom(MASTERKEY_DB_SIZE)) - ) + self.assertIsNone(self.database.store_unencrypted_database(os.urandom(MASTERKEY_DB_SIZE))) # Teardown self.database.verify_file = orig_verify_file - def test_store_unencrypted_database_replaces_temp_file_and_original_file( - self, - ) -> None: + def test_store_unencrypted_database_replaces_temp_file_and_original_file(self) -> None: # Setup data_old = os.urandom(MASTERKEY_DB_SIZE) - with open(self.database_name, "wb") as f: + with open(self.database_name, 'wb') as f: f.write(data_old) data_new = os.urandom(MASTERKEY_DB_SIZE) data_temp = os.urandom(MASTERKEY_DB_SIZE) - with open(self.database.database_temp, "wb") as f: + with open(self.database.database_temp, 'wb') as f: f.write(data_temp) # Test @@ -274,7 +253,7 @@ class TestTFCUnencryptedDatabase(unittest.TestCase): self.assertIsNone(self.database.store_unencrypted_database(data_new)) self.assertFalse(os.path.isfile(self.database.database_temp)) - with open(self.database_name, "rb") as f: + with open(self.database_name, 'rb') as f: purp_data = f.read() self.assertEqual(purp_data, data_new + blake2b(data_new)) @@ -284,8 +263,8 @@ class TestTFCUnencryptedDatabase(unittest.TestCase): self.assertFalse(os.path.isfile(self.database.database_name)) self.assertFalse(os.path.isfile(self.database.database_temp)) - with open(self.database.database_temp, "wb") as f: - f.write(b"temp_file") + with open(self.database.database_temp, 'wb') as f: + f.write(b'temp_file') self.assertFalse(os.path.isfile(self.database.database_name)) self.assertTrue(os.path.isfile(self.database.database_temp)) @@ -297,10 +276,10 @@ class TestTFCUnencryptedDatabase(unittest.TestCase): self.assertTrue(os.path.isfile(self.database.database_name)) def test_loading_invalid_database_data_raises_critical_error(self) -> None: - data_old = os.urandom(MASTERKEY_DB_SIZE) + data_old = os.urandom(MASTERKEY_DB_SIZE) checksummed = data_old + blake2b(data_old) - with open(self.database_name, "wb") as f: + with open(self.database_name, 'wb') as f: f.write(checksummed) tamper_file(self.database_name, tamper_size=1) @@ -310,13 +289,13 @@ class TestTFCUnencryptedDatabase(unittest.TestCase): def test_load_database_ignores_invalid_temp_database(self) -> None: # Setup - data_old = os.urandom(MASTERKEY_DB_SIZE) + data_old = os.urandom(MASTERKEY_DB_SIZE) checksummed = data_old + blake2b(data_old) - with open(self.database_name, "wb") as f: + with open(self.database_name, 'wb') as f: f.write(checksummed) data_temp = os.urandom(MASTERKEY_DB_SIZE) - with open(self.database.database_temp, "wb") as f: + with open(self.database.database_temp, 'wb') as f: f.write(data_temp) # Test @@ -326,14 +305,14 @@ class TestTFCUnencryptedDatabase(unittest.TestCase): def test_load_database_prefers_valid_temp_database(self) -> None: # Setup - data_old = os.urandom(MASTERKEY_DB_SIZE) + data_old = os.urandom(MASTERKEY_DB_SIZE) checksummed_old = data_old + blake2b(data_old) - with open(self.database_name, "wb") as f: + with open(self.database_name, 'wb') as f: f.write(checksummed_old) - data_temp = os.urandom(MASTERKEY_DB_SIZE) + data_temp = os.urandom(MASTERKEY_DB_SIZE) checksummed_temp = data_temp + blake2b(data_temp) - with open(self.database.database_temp, "wb") as f: + with open(self.database.database_temp, 'wb') as f: f.write(checksummed_temp) # Test @@ -343,117 +322,120 @@ class TestTFCUnencryptedDatabase(unittest.TestCase): self.assertFalse(os.path.isfile(self.database.database_temp)) -class TestTFCLogDatabase(unittest.TestCase): +class TestMessageLog(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.file_name = f"{DIR_USER_DATA}ut_logs" - self.temp_name = self.file_name + "_temp" - self.settings = Settings() - self.database_key = os.urandom(SYMMETRIC_KEY_LENGTH) - self.tfc_log_database = MessageLog(self.file_name, self.database_key) + self.file_name = f'{DIR_USER_DATA}ut_logs' + self.temp_name = self.file_name + '_temp' + self.settings = Settings() + self.database_key = os.urandom(SYMMETRIC_KEY_LENGTH) + self.message_log = MessageLog(self.file_name, self.database_key) def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) def test_empty_log_database_is_verified(self) -> None: - self.assertTrue(self.tfc_log_database.verify_file(self.file_name)) + self.assertTrue(self.message_log.verify_file(self.file_name)) def test_database_with_one_entry_is_verified(self) -> None: # Setup - test_entry = b"test_log_entry" - self.tfc_log_database.insert_log_entry(test_entry) + test_entry = b'test_log_entry' + self.message_log.insert_log_entry(test_entry) # Test - self.assertTrue(self.tfc_log_database.verify_file(self.file_name)) + self.assertTrue(self.message_log.verify_file(self.file_name)) + + def test_invalid_database_returns_false(self) -> None: + # Setup + self.message_log.c.execute("DROP TABLE log_entries") + self.message_log.conn.commit() + + # Test + self.assertFalse(self.message_log.verify_file(self.file_name)) def test_invalid_entry_returns_false(self) -> None: # Setup params = (os.urandom(LOG_ENTRY_LENGTH),) - self.tfc_log_database.c.execute( - f"""INSERT INTO log_entries (log_entry) VALUES (?)""", params - ) - self.tfc_log_database.conn.commit() + self.message_log.c.execute(f"""INSERT INTO log_entries (log_entry) VALUES (?)""", params) + self.message_log.conn.commit() # Test - self.assertFalse(self.tfc_log_database.verify_file(self.file_name)) + self.assertFalse(self.message_log.verify_file(self.file_name)) def test_table_creation(self) -> None: - self.assertIsInstance(self.tfc_log_database, MessageLog) + self.assertIsInstance(self.message_log, MessageLog) self.assertTrue(os.path.isfile(self.file_name)) def test_writing_to_log_database(self) -> None: data = os.urandom(LOG_ENTRY_LENGTH) - self.assertIsNone(self.tfc_log_database.insert_log_entry(data)) + self.assertIsNone(self.message_log.insert_log_entry(data)) def test_iterating_over_log_database(self) -> None: data = [os.urandom(LOG_ENTRY_LENGTH), os.urandom(LOG_ENTRY_LENGTH)] for entry in data: - self.assertIsNone(self.tfc_log_database.insert_log_entry(entry)) + self.assertIsNone(self.message_log.insert_log_entry(entry)) - for index, stored_entry in enumerate(self.tfc_log_database): + for index, stored_entry in enumerate(self.message_log): self.assertEqual(stored_entry, data[index]) def test_invalid_temp_database_is_not_loaded(self) -> None: log_file = MessageLog(self.file_name, database_key=self.database_key) tmp_file = MessageLog(self.temp_name, database_key=self.database_key) - log_file.insert_log_entry(b"a") - log_file.insert_log_entry(b"b") - log_file.insert_log_entry(b"c") - log_file.insert_log_entry(b"d") - log_file.insert_log_entry(b"e") + log_file.insert_log_entry(b'a') + log_file.insert_log_entry(b'b') + log_file.insert_log_entry(b'c') + log_file.insert_log_entry(b'd') + log_file.insert_log_entry(b'e') - tmp_file.insert_log_entry(b"a") - tmp_file.insert_log_entry(b"b") - tmp_file.c.execute( - f"""INSERT INTO log_entries (log_entry) VALUES (?)""", (b"c",) - ) + tmp_file.insert_log_entry(b'a') + tmp_file.insert_log_entry(b'b') + tmp_file.c.execute(f"""INSERT INTO log_entries (log_entry) VALUES (?)""", (b'c',)) tmp_file.conn.commit() - tmp_file.insert_log_entry(b"d") - tmp_file.insert_log_entry(b"e") + tmp_file.insert_log_entry(b'd') + tmp_file.insert_log_entry(b'e') self.assertTrue(os.path.isfile(self.temp_name)) log_file = MessageLog(self.file_name, database_key=self.database_key) - self.assertEqual(list(log_file), [b"a", b"b", b"c", b"d", b"e"]) + self.assertEqual(list(log_file), [b'a', b'b', b'c', b'd', b'e']) self.assertFalse(os.path.isfile(self.temp_name)) def test_valid_temp_database_is_loaded(self) -> None: log_file = MessageLog(self.file_name, database_key=self.database_key) tmp_file = MessageLog(self.temp_name, database_key=self.database_key) - log_file.insert_log_entry(b"a") - log_file.insert_log_entry(b"b") - log_file.insert_log_entry(b"c") - log_file.insert_log_entry(b"d") - log_file.insert_log_entry(b"e") + log_file.insert_log_entry(b'a') + log_file.insert_log_entry(b'b') + log_file.insert_log_entry(b'c') + log_file.insert_log_entry(b'd') + log_file.insert_log_entry(b'e') - tmp_file.insert_log_entry(b"f") - tmp_file.insert_log_entry(b"g") - tmp_file.insert_log_entry(b"h") - tmp_file.insert_log_entry(b"i") - tmp_file.insert_log_entry(b"j") + tmp_file.insert_log_entry(b'f') + tmp_file.insert_log_entry(b'g') + tmp_file.insert_log_entry(b'h') + tmp_file.insert_log_entry(b'i') + tmp_file.insert_log_entry(b'j') self.assertTrue(os.path.isfile(self.temp_name)) log_file = MessageLog(self.file_name, database_key=self.database_key) - self.assertEqual(list(log_file), [b"f", b"g", b"h", b"i", b"j"]) + self.assertEqual(list(log_file), [b'f', b'g', b'h', b'i', b'j']) self.assertFalse(os.path.isfile(self.temp_name)) def test_database_closing(self) -> None: - self.tfc_log_database.close_database() + self.message_log.close_database() # Test insertion would fail at this point with self.assertRaises(sqlite3.ProgrammingError): - self.tfc_log_database.c.execute( - f"""INSERT INTO log_entries (log_entry) VALUES (?)""", - (os.urandom(LOG_ENTRY_LENGTH),), - ) + self.message_log.c.execute(f"""INSERT INTO log_entries (log_entry) VALUES (?)""", + (os.urandom(LOG_ENTRY_LENGTH),)) # Test that TFC reopens closed database on write data = os.urandom(LOG_ENTRY_LENGTH) - self.assertIsNone(self.tfc_log_database.insert_log_entry(data)) + self.assertIsNone(self.message_log.insert_log_entry(data)) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/common/test_db_contacts.py b/tests/common/test_db_contacts.py index c7a3e02..9df3acd 100644 --- a/tests/common/test_db_contacts.py +++ b/tests/common/test_db_contacts.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -22,53 +22,30 @@ along with TFC. If not, see . import os import unittest -from src.common.crypto import encrypt_and_sign +from src.common.crypto import encrypt_and_sign from src.common.db_contacts import Contact, ContactList -from src.common.misc import ensure_dir -from src.common.statics import ( - CLEAR_ENTIRE_SCREEN, - CONTACT_LENGTH, - CURSOR_LEFT_UP_CORNER, - DIR_USER_DATA, - ECDHE, - FINGERPRINT_LENGTH, - KEX_STATUS_HAS_RX_PSK, - KEX_STATUS_LOCAL_KEY, - KEX_STATUS_NONE, - KEX_STATUS_NO_RX_PSK, - KEX_STATUS_PENDING, - KEX_STATUS_UNVERIFIED, - KEX_STATUS_VERIFIED, - LOCAL_ID, - POLY1305_TAG_LENGTH, - PSK, - XCHACHA20_NONCE_LENGTH, -) +from src.common.misc import ensure_dir +from src.common.statics import (CLEAR_ENTIRE_SCREEN, CONTACT_LENGTH, CURSOR_LEFT_UP_CORNER, DIR_USER_DATA, ECDHE, + FINGERPRINT_LENGTH, KEX_STATUS_HAS_RX_PSK, KEX_STATUS_LOCAL_KEY, KEX_STATUS_NONE, + KEX_STATUS_NO_RX_PSK, KEX_STATUS_PENDING, KEX_STATUS_UNVERIFIED, + KEX_STATUS_VERIFIED, LOCAL_ID, POLY1305_TAG_LENGTH, PSK, XCHACHA20_NONCE_LENGTH) from tests.mock_classes import create_contact, MasterKey, Settings -from tests.utils import ( - cd_unit_test, - cleanup, - nick_to_onion_address, - nick_to_pub_key, - tamper_file, - TFCTestCase, -) +from tests.utils import cd_unit_test, cleanup, nick_to_onion_address, nick_to_pub_key, tamper_file, TFCTestCase class TestContact(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.contact = Contact( - nick_to_pub_key("Bob"), - "Bob", - FINGERPRINT_LENGTH * b"\x01", - FINGERPRINT_LENGTH * b"\x02", - KEX_STATUS_UNVERIFIED, - log_messages=True, - file_reception=True, - notifications=True, - ) + self.contact = Contact(nick_to_pub_key('Bob'), + 'Bob', + FINGERPRINT_LENGTH * b'\x01', + FINGERPRINT_LENGTH * b'\x02', + KEX_STATUS_UNVERIFIED, + log_messages =True, + file_reception=True, + notifications =True) def test_contact_serialization_length_and_type(self) -> None: serialized = self.contact.serialize_c() @@ -80,28 +57,24 @@ class TestContact(unittest.TestCase): self.contact.kex_status = kex_status self.assertTrue(self.contact.uses_psk()) - for kex_status in [ - KEX_STATUS_NONE, - KEX_STATUS_PENDING, - KEX_STATUS_UNVERIFIED, - KEX_STATUS_VERIFIED, - KEX_STATUS_LOCAL_KEY, - ]: + for kex_status in [KEX_STATUS_NONE, KEX_STATUS_PENDING, KEX_STATUS_UNVERIFIED, + KEX_STATUS_VERIFIED, KEX_STATUS_LOCAL_KEY]: self.contact.kex_status = kex_status self.assertFalse(self.contact.uses_psk()) class TestContactList(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.unit_test_dir = cd_unit_test() - self.master_key = MasterKey() - self.settings = Settings() - self.file_name = f"{DIR_USER_DATA}{self.settings.software_operation}_contacts" - self.contact_list = ContactList(self.master_key, self.settings) - self.full_contact_list = ["Alice", "Bob", "Charlie", "David", "Eric", LOCAL_ID] + self.unit_test_dir = cd_unit_test() + self.master_key = MasterKey() + self.settings = Settings() + self.file_name = f'{DIR_USER_DATA}{self.settings.software_operation}_contacts' + self.contact_list = ContactList(self.master_key, self.settings) + self.full_contact_list = ['Alice', 'Bob', 'Charlie', 'David', 'Eric', LOCAL_ID] self.contact_list.contacts = list(map(create_contact, self.full_contact_list)) - self.real_contact_list = self.full_contact_list[:] + self.real_contact_list = self.full_contact_list[:] self.real_contact_list.remove(LOCAL_ID) def tearDown(self) -> None: @@ -112,42 +85,34 @@ class TestContactList(TFCTestCase): for c in self.contact_list: self.assertIsInstance(c, Contact) - def test_len_returns_the_number_of_contacts_and_excludes_the_local_key( - self, - ) -> None: - self.assertEqual(len(self.contact_list), len(self.real_contact_list)) + def test_len_returns_the_number_of_contacts_and_excludes_the_local_key(self) -> None: + self.assertEqual(len(self.contact_list), + len(self.real_contact_list)) def test_storing_and_loading_of_contacts(self) -> None: # Test store self.contact_list.store_contacts() - self.assertEqual( - os.path.getsize(self.file_name), - XCHACHA20_NONCE_LENGTH - + (self.settings.max_number_of_contacts + 1) * CONTACT_LENGTH - + POLY1305_TAG_LENGTH, - ) + self.assertEqual(os.path.getsize(self.file_name), + XCHACHA20_NONCE_LENGTH + + (self.settings.max_number_of_contacts + 1) * CONTACT_LENGTH + + POLY1305_TAG_LENGTH) # Test load contact_list2 = ContactList(self.master_key, self.settings) - self.assertEqual(len(contact_list2), len(self.real_contact_list)) + self.assertEqual(len(contact_list2), len(self.real_contact_list)) self.assertEqual(len(contact_list2.contacts), len(self.full_contact_list)) for c in contact_list2: self.assertIsInstance(c, Contact) def test_invalid_content_raises_critical_error(self) -> None: # Setup - invalid_data = b"a" - pt_bytes = b"".join( - [ - c.serialize_c() - for c in self.contact_list.contacts - + self.contact_list._dummy_contacts() - ] - ) - ct_bytes = encrypt_and_sign(pt_bytes + invalid_data, self.master_key.master_key) + invalid_data = b'a' + pt_bytes = b''.join([c.serialize_c() for c in self.contact_list.contacts + + self.contact_list._dummy_contacts()]) + ct_bytes = encrypt_and_sign(pt_bytes + invalid_data, self.master_key.master_key) ensure_dir(DIR_USER_DATA) - with open(self.file_name, "wb+") as f: + with open(self.file_name, 'wb+') as f: f.write(ct_bytes) # Test @@ -172,248 +137,184 @@ class TestContactList(TFCTestCase): def test_dummy_contacts(self) -> None: dummies = self.contact_list._dummy_contacts() - self.assertEqual( - len(dummies), - self.settings.max_number_of_contacts - len(self.real_contact_list), - ) + self.assertEqual(len(dummies), self.settings.max_number_of_contacts - len(self.real_contact_list)) for c in dummies: self.assertIsInstance(c, Contact) def test_add_contact(self) -> None: - tx_fingerprint = FINGERPRINT_LENGTH * b"\x03" - rx_fingerprint = FINGERPRINT_LENGTH * b"\x04" + tx_fingerprint = FINGERPRINT_LENGTH * b'\x03' + rx_fingerprint = FINGERPRINT_LENGTH * b'\x04' - self.assertIsNone( - self.contact_list.add_contact( - nick_to_pub_key("Faye"), - "Faye", - tx_fingerprint, - rx_fingerprint, - KEX_STATUS_UNVERIFIED, - self.settings.log_messages_by_default, - self.settings.accept_files_by_default, - self.settings.show_notifications_by_default, - ) - ) + self.assertIsNone(self.contact_list.add_contact(nick_to_pub_key('Faye'), + 'Faye', + tx_fingerprint, + rx_fingerprint, + KEX_STATUS_UNVERIFIED, + self.settings.log_messages_by_default, + self.settings.accept_files_by_default, + self.settings.show_notifications_by_default)) # Test new contact was stored by loading # the database from file to another object contact_list2 = ContactList(MasterKey(), Settings()) - faye = contact_list2.get_contact_by_pub_key(nick_to_pub_key("Faye")) + faye = contact_list2.get_contact_by_pub_key(nick_to_pub_key('Faye')) - self.assertEqual(len(self.contact_list), len(self.real_contact_list) + 1) + self.assertEqual(len(self.contact_list), len(self.real_contact_list)+1) self.assertIsInstance(faye, Contact) self.assertEqual(faye.tx_fingerprint, tx_fingerprint) self.assertEqual(faye.rx_fingerprint, rx_fingerprint) - self.assertEqual(faye.kex_status, KEX_STATUS_UNVERIFIED) + self.assertEqual(faye.kex_status, KEX_STATUS_UNVERIFIED) - self.assertEqual(faye.log_messages, self.settings.log_messages_by_default) + self.assertEqual(faye.log_messages, self.settings.log_messages_by_default) self.assertEqual(faye.file_reception, self.settings.accept_files_by_default) - self.assertEqual( - faye.notifications, self.settings.show_notifications_by_default - ) + self.assertEqual(faye.notifications, self.settings.show_notifications_by_default) def test_add_contact_that_replaces_an_existing_contact(self) -> None: - alice = self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice")) - new_nick = "Alice2" - new_tx_fingerprint = FINGERPRINT_LENGTH * b"\x03" - new_rx_fingerprint = FINGERPRINT_LENGTH * b"\x04" + alice = self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice')) + new_nick = 'Alice2' + new_tx_fingerprint = FINGERPRINT_LENGTH * b'\x03' + new_rx_fingerprint = FINGERPRINT_LENGTH * b'\x04' # Verify that existing nick, kex status and fingerprints are # different from those that will replace the existing data - self.assertNotEqual(alice.nick, new_nick) + self.assertNotEqual(alice.nick, new_nick) self.assertNotEqual(alice.tx_fingerprint, new_tx_fingerprint) self.assertNotEqual(alice.rx_fingerprint, new_rx_fingerprint) - self.assertNotEqual(alice.kex_status, KEX_STATUS_UNVERIFIED) + self.assertNotEqual(alice.kex_status, KEX_STATUS_UNVERIFIED) # Make sure each contact setting is opposite from default value - alice.log_messages = not self.settings.log_messages_by_default + alice.log_messages = not self.settings.log_messages_by_default alice.file_reception = not self.settings.accept_files_by_default - alice.notifications = not self.settings.show_notifications_by_default + alice.notifications = not self.settings.show_notifications_by_default # Replace the existing contact - self.assertIsNone( - self.contact_list.add_contact( - nick_to_pub_key("Alice"), - new_nick, - new_tx_fingerprint, - new_rx_fingerprint, - KEX_STATUS_UNVERIFIED, - self.settings.log_messages_by_default, - self.settings.accept_files_by_default, - self.settings.show_notifications_by_default, - ) - ) + self.assertIsNone(self.contact_list.add_contact(nick_to_pub_key('Alice'), + new_nick, + new_tx_fingerprint, + new_rx_fingerprint, + KEX_STATUS_UNVERIFIED, + self.settings.log_messages_by_default, + self.settings.accept_files_by_default, + self.settings.show_notifications_by_default)) # Load database to another object from # file to verify new contact was stored contact_list2 = ContactList(MasterKey(), Settings()) - alice = contact_list2.get_contact_by_pub_key(nick_to_pub_key("Alice")) + alice = contact_list2.get_contact_by_pub_key(nick_to_pub_key('Alice')) # Verify the content of loaded data self.assertEqual(len(contact_list2), len(self.real_contact_list)) self.assertIsInstance(alice, Contact) # Test replaced contact replaced nick, fingerprints and kex status - self.assertEqual(alice.nick, new_nick) + self.assertEqual(alice.nick, new_nick) self.assertEqual(alice.tx_fingerprint, new_tx_fingerprint) self.assertEqual(alice.rx_fingerprint, new_rx_fingerprint) - self.assertEqual(alice.kex_status, KEX_STATUS_UNVERIFIED) + self.assertEqual(alice.kex_status, KEX_STATUS_UNVERIFIED) # Test replaced contact kept settings set # to be opposite from default settings - self.assertNotEqual(alice.log_messages, self.settings.log_messages_by_default) + self.assertNotEqual(alice.log_messages, self.settings.log_messages_by_default) self.assertNotEqual(alice.file_reception, self.settings.accept_files_by_default) - self.assertNotEqual( - alice.notifications, self.settings.show_notifications_by_default - ) + self.assertNotEqual(alice.notifications, self.settings.show_notifications_by_default) def test_remove_contact_by_pub_key(self) -> None: # Verify both contacts exist - self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key("Bob"))) - self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key("Charlie"))) + self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Bob'))) + self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Charlie'))) - self.assertIsNone( - self.contact_list.remove_contact_by_pub_key(nick_to_pub_key("Bob")) - ) - self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key("Bob"))) - self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key("Charlie"))) + self.assertIsNone(self.contact_list.remove_contact_by_pub_key(nick_to_pub_key('Bob'))) + self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key('Bob'))) + self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Charlie'))) def test_remove_contact_by_address_or_nick(self) -> None: # Verify both contacts exist - self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key("Bob"))) - self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key("Charlie"))) + self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Bob'))) + self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Charlie'))) # Test removal with address - self.assertIsNone( - self.contact_list.remove_contact_by_address_or_nick( - nick_to_onion_address("Bob") - ) - ) - self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key("Bob"))) - self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key("Charlie"))) + self.assertIsNone(self.contact_list.remove_contact_by_address_or_nick(nick_to_onion_address('Bob'))) + self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key('Bob'))) + self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Charlie'))) # Test removal with nick - self.assertIsNone( - self.contact_list.remove_contact_by_address_or_nick("Charlie") - ) - self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key("Bob"))) - self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key("Charlie"))) + self.assertIsNone(self.contact_list.remove_contact_by_address_or_nick('Charlie')) + self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key('Bob'))) + self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key('Charlie'))) def test_get_contact_by_pub_key(self) -> None: - self.assertIs( - self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Bob")), - self.contact_list.get_contact_by_address_or_nick("Bob"), - ) + self.assertIs(self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Bob')), + self.contact_list.get_contact_by_address_or_nick('Bob')) - def test_get_contact_by_address_or_nick_returns_the_same_contact_object_with_address_and_nick( - self, - ) -> None: - for selector in [nick_to_onion_address("Bob"), "Bob"]: - self.assertIsInstance( - self.contact_list.get_contact_by_address_or_nick(selector), Contact - ) + def test_get_contact_by_address_or_nick_returns_the_same_contact_object_with_address_and_nick(self) -> None: + for selector in [nick_to_onion_address('Bob'), 'Bob']: + self.assertIsInstance(self.contact_list.get_contact_by_address_or_nick(selector), Contact) - self.assertIs( - self.contact_list.get_contact_by_address_or_nick("Bob"), - self.contact_list.get_contact_by_address_or_nick( - nick_to_onion_address("Bob") - ), - ) + self.assertIs(self.contact_list.get_contact_by_address_or_nick('Bob'), + self.contact_list.get_contact_by_address_or_nick(nick_to_onion_address('Bob'))) + + def test_get_nick_by_pub_key(self) -> None: + self.assertEqual(self.contact_list.get_nick_by_pub_key(nick_to_pub_key('Alice')), 'Alice') def test_get_list_of_contacts(self) -> None: - self.assertEqual( - len(self.contact_list.get_list_of_contacts()), len(self.real_contact_list) - ) + self.assertEqual(len(self.contact_list.get_list_of_contacts()), + len(self.real_contact_list)) for c in self.contact_list.get_list_of_contacts(): self.assertIsInstance(c, Contact) def test_get_list_of_addresses(self) -> None: - self.assertEqual( - self.contact_list.get_list_of_addresses(), - [ - nick_to_onion_address("Alice"), - nick_to_onion_address("Bob"), - nick_to_onion_address("Charlie"), - nick_to_onion_address("David"), - nick_to_onion_address("Eric"), - ], - ) + self.assertEqual(self.contact_list.get_list_of_addresses(), + [nick_to_onion_address('Alice'), + nick_to_onion_address('Bob'), + nick_to_onion_address('Charlie'), + nick_to_onion_address('David'), + nick_to_onion_address('Eric')]) def test_get_list_of_nicks(self) -> None: - self.assertEqual( - self.contact_list.get_list_of_nicks(), - ["Alice", "Bob", "Charlie", "David", "Eric"], - ) + self.assertEqual(self.contact_list.get_list_of_nicks(), + ['Alice', 'Bob', 'Charlie', 'David', 'Eric']) def test_get_list_of_pub_keys(self) -> None: - self.assertEqual( - self.contact_list.get_list_of_pub_keys(), - [ - nick_to_pub_key("Alice"), - nick_to_pub_key("Bob"), - nick_to_pub_key("Charlie"), - nick_to_pub_key("David"), - nick_to_pub_key("Eric"), - ], - ) + self.assertEqual(self.contact_list.get_list_of_pub_keys(), + [nick_to_pub_key('Alice'), + nick_to_pub_key('Bob'), + nick_to_pub_key('Charlie'), + nick_to_pub_key('David'), + nick_to_pub_key('Eric')]) def test_get_list_of_pending_pub_keys(self) -> None: # Set key exchange statuses to pending - for nick in ["Alice", "Bob"]: - contact = self.contact_list.get_contact_by_address_or_nick(nick) + for nick in ['Alice', 'Bob']: + contact = self.contact_list.get_contact_by_address_or_nick(nick) contact.kex_status = KEX_STATUS_PENDING # Test pending contacts are returned - self.assertEqual( - self.contact_list.get_list_of_pending_pub_keys(), - [nick_to_pub_key("Alice"), nick_to_pub_key("Bob")], - ) + self.assertEqual(self.contact_list.get_list_of_pending_pub_keys(), + [nick_to_pub_key('Alice'), + nick_to_pub_key('Bob')]) def test_get_list_of_existing_pub_keys(self) -> None: - self.contact_list.get_contact_by_address_or_nick( - "Alice" - ).kex_status = KEX_STATUS_UNVERIFIED - self.contact_list.get_contact_by_address_or_nick( - "Bob" - ).kex_status = KEX_STATUS_VERIFIED - self.contact_list.get_contact_by_address_or_nick( - "Charlie" - ).kex_status = KEX_STATUS_HAS_RX_PSK - self.contact_list.get_contact_by_address_or_nick( - "David" - ).kex_status = KEX_STATUS_NO_RX_PSK - self.contact_list.get_contact_by_address_or_nick( - "Eric" - ).kex_status = KEX_STATUS_PENDING + self.contact_list.get_contact_by_address_or_nick('Alice').kex_status = KEX_STATUS_UNVERIFIED + self.contact_list.get_contact_by_address_or_nick('Bob').kex_status = KEX_STATUS_VERIFIED + self.contact_list.get_contact_by_address_or_nick('Charlie').kex_status = KEX_STATUS_HAS_RX_PSK + self.contact_list.get_contact_by_address_or_nick('David').kex_status = KEX_STATUS_NO_RX_PSK + self.contact_list.get_contact_by_address_or_nick('Eric').kex_status = KEX_STATUS_PENDING - self.assertEqual( - self.contact_list.get_list_of_existing_pub_keys(), - [ - nick_to_pub_key("Alice"), - nick_to_pub_key("Bob"), - nick_to_pub_key("Charlie"), - nick_to_pub_key("David"), - ], - ) + self.assertEqual(self.contact_list.get_list_of_existing_pub_keys(), + [nick_to_pub_key('Alice'), + nick_to_pub_key('Bob'), + nick_to_pub_key('Charlie'), + nick_to_pub_key('David')]) def test_contact_selectors(self) -> None: - self.assertEqual( - self.contact_list.contact_selectors(), - [ - nick_to_onion_address("Alice"), - nick_to_onion_address("Bob"), - nick_to_onion_address("Charlie"), - nick_to_onion_address("David"), - nick_to_onion_address("Eric"), - "Alice", - "Bob", - "Charlie", - "David", - "Eric", - ], - ) + self.assertEqual(self.contact_list.contact_selectors(), + [nick_to_onion_address('Alice'), + nick_to_onion_address('Bob'), + nick_to_onion_address('Charlie'), + nick_to_onion_address('David'), + nick_to_onion_address('Eric'), + 'Alice', 'Bob', 'Charlie', 'David', 'Eric']) def test_has_contacts(self) -> None: self.assertTrue(self.contact_list.has_contacts()) @@ -427,18 +328,18 @@ class TestContactList(TFCTestCase): self.assertTrue(self.contact_list.has_only_pending_contacts()) # Change one from pending - alice = self.contact_list.get_contact_by_address_or_nick("Alice") + alice = self.contact_list.get_contact_by_address_or_nick('Alice') alice.kex_status = KEX_STATUS_UNVERIFIED self.assertFalse(self.contact_list.has_only_pending_contacts()) def test_has_pub_key(self) -> None: self.contact_list.contacts = [] - self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key("Bob"))) - self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key("Bob"))) + self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key('Bob'))) + self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key('Bob'))) - self.contact_list.contacts = list(map(create_contact, ["Bob", "Charlie"])) - self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key("Bob"))) - self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key("Charlie"))) + self.contact_list.contacts = list(map(create_contact, ['Bob', 'Charlie'])) + self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Bob'))) + self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Charlie'))) def test_has_local_contact(self) -> None: self.contact_list.contacts = [] @@ -449,34 +350,15 @@ class TestContactList(TFCTestCase): def test_print_contacts(self) -> None: self.contact_list.contacts.append(create_contact(LOCAL_ID)) - self.contact_list.get_contact_by_pub_key( - nick_to_pub_key("Alice") - ).log_messages = False - self.contact_list.get_contact_by_pub_key( - nick_to_pub_key("Alice") - ).kex_status = KEX_STATUS_PENDING - self.contact_list.get_contact_by_pub_key( - nick_to_pub_key("Bob") - ).notifications = False - self.contact_list.get_contact_by_pub_key( - nick_to_pub_key("Charlie") - ).kex_status = KEX_STATUS_UNVERIFIED - self.contact_list.get_contact_by_pub_key( - nick_to_pub_key("Bob") - ).file_reception = False - self.contact_list.get_contact_by_pub_key( - nick_to_pub_key("Bob") - ).kex_status = KEX_STATUS_VERIFIED - self.contact_list.get_contact_by_pub_key( - nick_to_pub_key("David") - ).rx_fingerprint = bytes(FINGERPRINT_LENGTH) - self.contact_list.get_contact_by_pub_key( - nick_to_pub_key("David") - ).kex_status = bytes(KEX_STATUS_NO_RX_PSK) - self.assert_prints( - CLEAR_ENTIRE_SCREEN - + CURSOR_LEFT_UP_CORNER - + f"""\ + self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice')).log_messages = False + self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice')).kex_status = KEX_STATUS_PENDING + self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Bob')).notifications = False + self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Charlie')).kex_status = KEX_STATUS_UNVERIFIED + self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Bob')).file_reception = False + self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Bob')).kex_status = KEX_STATUS_VERIFIED + self.contact_list.get_contact_by_pub_key(nick_to_pub_key('David')).rx_fingerprint = bytes(FINGERPRINT_LENGTH) + self.contact_list.get_contact_by_pub_key(nick_to_pub_key('David')).kex_status = bytes(KEX_STATUS_NO_RX_PSK) + self.assert_prints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + f"""\ Contact Account Logging Notify Files Key Ex ──────────────────────────────────────────────────────────────────────────────── @@ -487,10 +369,8 @@ David u22uy Yes Yes Accept {PSK} (No contact key) Eric jszzy Yes Yes Accept {ECDHE} (Verified) -""", - self.contact_list.print_contacts, - ) +""", self.contact_list.print_contacts) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/common/test_db_groups.py b/tests/common/test_db_groups.py index 9b671cd..af2a05b 100644 --- a/tests/common/test_db_groups.py +++ b/tests/common/test_db_groups.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -22,47 +22,33 @@ along with TFC. If not, see . import os import unittest -from src.common.crypto import encrypt_and_sign +from src.common.crypto import encrypt_and_sign from src.common.db_contacts import Contact, ContactList -from src.common.db_groups import Group, GroupList -from src.common.encoding import b58encode -from src.common.misc import ensure_dir -from src.common.statics import ( - DIR_USER_DATA, - GROUP_DB_HEADER_LENGTH, - GROUP_ID_LENGTH, - GROUP_STATIC_LENGTH, - ONION_SERVICE_PUBLIC_KEY_LENGTH, - POLY1305_TAG_LENGTH, - XCHACHA20_NONCE_LENGTH, -) +from src.common.db_groups import Group, GroupList +from src.common.encoding import b58encode +from src.common.misc import ensure_dir +from src.common.statics import (DIR_USER_DATA, GROUP_DB_HEADER_LENGTH, GROUP_ID_LENGTH, GROUP_STATIC_LENGTH, + ONION_SERVICE_PUBLIC_KEY_LENGTH, POLY1305_TAG_LENGTH, XCHACHA20_NONCE_LENGTH) -from tests.mock_classes import ( - create_contact, - group_name_to_group_id, - MasterKey, - nick_to_pub_key, - Settings, -) -from tests.utils import cd_unit_test, cleanup, tamper_file, TFCTestCase +from tests.mock_classes import create_contact, group_name_to_group_id, MasterKey, nick_to_pub_key, Settings +from tests.utils import cd_unit_test, cleanup, tamper_file, TFCTestCase class TestGroup(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.nicks = ["Alice", "Bob", "Charlie"] - members = list(map(create_contact, self.nicks)) - self.settings = Settings() - self.group = Group( - name="test_group", - group_id=group_name_to_group_id("test_group"), - log_messages=False, - notifications=False, - members=members, - settings=self.settings, - store_groups=lambda: None, - ) + self.nicks = ['Alice', 'Bob', 'Charlie'] + members = list(map(create_contact, self.nicks)) + self.settings = Settings() + self.group = Group(name ='test_group', + group_id =group_name_to_group_id('test_group'), + log_messages =False, + notifications=False, + members =members, + settings =self.settings, + store_groups =lambda: None) ensure_dir(DIR_USER_DATA) def tearDown(self) -> None: @@ -79,75 +65,51 @@ class TestGroup(unittest.TestCase): def test_group_serialization_length_and_type(self) -> None: serialized = self.group.serialize_g() self.assertIsInstance(serialized, bytes) - self.assertEqual( - len(serialized), - GROUP_STATIC_LENGTH - + ( - self.settings.max_number_of_group_members - * ONION_SERVICE_PUBLIC_KEY_LENGTH - ), - ) + self.assertEqual(len(serialized), GROUP_STATIC_LENGTH + (self.settings.max_number_of_group_members + * ONION_SERVICE_PUBLIC_KEY_LENGTH)) def test_add_members(self) -> None: # Test members to be added are not already in group - self.assertFalse(self.group.has_member(nick_to_pub_key("David"))) - self.assertFalse(self.group.has_member(nick_to_pub_key("Eric"))) + self.assertFalse(self.group.has_member(nick_to_pub_key('David'))) + self.assertFalse(self.group.has_member(nick_to_pub_key('Eric'))) - self.assertIsNone( - self.group.add_members( - list(map(create_contact, ["Alice", "David", "Eric"])) - ) - ) + self.assertIsNone(self.group.add_members(list(map(create_contact, ['Alice', 'David', 'Eric'])))) # Test new members were added - self.assertTrue(self.group.has_member(nick_to_pub_key("David"))) - self.assertTrue(self.group.has_member(nick_to_pub_key("Eric"))) + self.assertTrue(self.group.has_member(nick_to_pub_key('David'))) + self.assertTrue(self.group.has_member(nick_to_pub_key('Eric'))) # Test Alice was not added twice - self.assertEqual( - len(self.group), len(["Alice", "Bob", "Charlie", "David", "Eric"]) - ) + self.assertEqual(len(self.group), len(['Alice', 'Bob', 'Charlie', 'David', 'Eric'])) def test_remove_members(self) -> None: # Test members to be removed are part of group - self.assertTrue(self.group.has_member(nick_to_pub_key("Alice"))) - self.assertTrue(self.group.has_member(nick_to_pub_key("Bob"))) - self.assertTrue(self.group.has_member(nick_to_pub_key("Charlie"))) + self.assertTrue(self.group.has_member(nick_to_pub_key('Alice'))) + self.assertTrue(self.group.has_member(nick_to_pub_key('Bob'))) + self.assertTrue(self.group.has_member(nick_to_pub_key('Charlie'))) # Test first attempt to remove returns True (because Charlie was removed) - self.assertTrue( - self.group.remove_members( - [nick_to_pub_key("Charlie"), nick_to_pub_key("Unknown")] - ) - ) + self.assertTrue(self.group.remove_members([nick_to_pub_key('Charlie'), nick_to_pub_key('Unknown')])) # Test second attempt to remove returns False (because no-one was removed) - self.assertFalse( - self.group.remove_members( - [nick_to_pub_key("Charlie"), nick_to_pub_key("Unknown")] - ) - ) + self.assertFalse(self.group.remove_members([nick_to_pub_key('Charlie'), nick_to_pub_key('Unknown')])) # Test Charlie was removed - self.assertFalse(self.group.has_member(nick_to_pub_key("Charlie"))) + self.assertFalse(self.group.has_member(nick_to_pub_key('Charlie'))) # Test no other members were removed - self.assertTrue(self.group.has_member(nick_to_pub_key("Alice"))) - self.assertTrue(self.group.has_member(nick_to_pub_key("Bob"))) + self.assertTrue(self.group.has_member(nick_to_pub_key('Alice'))) + self.assertTrue(self.group.has_member(nick_to_pub_key('Bob'))) def test_get_list_of_member_pub_keys(self) -> None: - self.assertEqual( - first=self.group.get_list_of_member_pub_keys(), - second=[ - nick_to_pub_key("Alice"), - nick_to_pub_key("Bob"), - nick_to_pub_key("Charlie"), - ], - ) + self.assertEqual(first=self.group.get_list_of_member_pub_keys(), + second=[nick_to_pub_key('Alice'), + nick_to_pub_key('Bob'), + nick_to_pub_key('Charlie')]) def test_has_member(self) -> None: - self.assertTrue(self.group.has_member(nick_to_pub_key("Charlie"))) - self.assertFalse(self.group.has_member(nick_to_pub_key("David"))) + self.assertTrue(self.group.has_member(nick_to_pub_key('Charlie'))) + self.assertFalse(self.group.has_member(nick_to_pub_key('David'))) def test_has_members(self) -> None: self.assertFalse(self.group.empty()) @@ -156,62 +118,36 @@ class TestGroup(unittest.TestCase): class TestGroupList(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.master_key = MasterKey() - self.settings = Settings() - self.file_name = f"{DIR_USER_DATA}{self.settings.software_operation}_groups" - self.contact_list = ContactList(self.master_key, self.settings) - self.group_list = GroupList(self.master_key, self.settings, self.contact_list) - self.nicks = [ - "Alice", - "Bob", - "Charlie", - "David", - "Eric", - "Fido", - "Guido", - "Heidi", - "Ivan", - "Joana", - "Karol", - ] - self.group_names = [ - "test_group_1", - "test_group_2", - "test_group_3", - "test_group_4", - "test_group_5", - "test_group_6", - "test_group_7", - "test_group_8", - "test_group_9", - "test_group_10", - "test_group_11", - ] - members = list(map(create_contact, self.nicks)) + self.master_key = MasterKey() + self.settings = Settings() + self.file_name = f'{DIR_USER_DATA}{self.settings.software_operation}_groups' + self.contact_list = ContactList(self.master_key, self.settings) + self.group_list = GroupList(self.master_key, self.settings, self.contact_list) + self.nicks = ['Alice', 'Bob', 'Charlie', 'David', 'Eric', + 'Fido', 'Guido', 'Heidi', 'Ivan', 'Joana', 'Karol'] + self.group_names = ['test_group_1', 'test_group_2', 'test_group_3', 'test_group_4', 'test_group_5', + 'test_group_6', 'test_group_7', 'test_group_8', 'test_group_9', 'test_group_10', + 'test_group_11'] + members = list(map(create_contact, self.nicks)) self.contact_list.contacts = members - self.group_list.groups = [ - Group( - name=name, - group_id=group_name_to_group_id(name), - log_messages=False, - notifications=False, - members=members, - settings=self.settings, - store_groups=self.group_list.store_groups, - ) - for name in self.group_names - ] + self.group_list.groups = \ + [Group(name =name, + group_id =group_name_to_group_id(name), + log_messages =False, + notifications=False, + members =members, + settings =self.settings, + store_groups =self.group_list.store_groups) + for name in self.group_names] - self.single_member_data_len = ( - GROUP_STATIC_LENGTH - + self.settings.max_number_of_group_members - * ONION_SERVICE_PUBLIC_KEY_LENGTH - ) + self.single_member_data_len = (GROUP_STATIC_LENGTH + + self.settings.max_number_of_group_members * ONION_SERVICE_PUBLIC_KEY_LENGTH) def tearDown(self) -> None: """Post-test actions.""" @@ -228,44 +164,37 @@ class TestGroupList(TFCTestCase): self.group_list.store_groups() self.assertTrue(os.path.isfile(self.file_name)) - self.assertEqual( - os.path.getsize(self.file_name), - XCHACHA20_NONCE_LENGTH - + GROUP_DB_HEADER_LENGTH - + self.settings.max_number_of_groups * self.single_member_data_len - + POLY1305_TAG_LENGTH, - ) + self.assertEqual(os.path.getsize(self.file_name), + XCHACHA20_NONCE_LENGTH + + GROUP_DB_HEADER_LENGTH + + self.settings.max_number_of_groups * self.single_member_data_len + + POLY1305_TAG_LENGTH) # Reduce setting values from 20 to 10 - self.settings.max_number_of_groups = 10 + self.settings.max_number_of_groups = 10 self.settings.max_number_of_group_members = 10 group_list2 = GroupList(self.master_key, self.settings, self.contact_list) self.assertEqual(len(group_list2), 11) # Check that `_load_groups()` increased setting values back to 20 so it fits the 11 groups - self.assertEqual(self.settings.max_number_of_groups, 20) + self.assertEqual(self.settings.max_number_of_groups, 20) self.assertEqual(self.settings.max_number_of_group_members, 20) # Check that removed contact from contact list updates group - self.contact_list.remove_contact_by_address_or_nick("Alice") + self.contact_list.remove_contact_by_address_or_nick('Alice') group_list3 = GroupList(self.master_key, self.settings, self.contact_list) - self.assertEqual(len(group_list3.get_group("test_group_1").members), 10) + self.assertEqual(len(group_list3.get_group('test_group_1').members), 10) def test_invalid_content_raises_critical_error(self) -> None: # Setup - invalid_data = b"a" - pt_bytes = self.group_list._generate_group_db_header() - pt_bytes += b"".join( - [ - g.serialize_g() - for g in (self.group_list.groups + self.group_list._dummy_groups()) - ] - ) - ct_bytes = encrypt_and_sign(pt_bytes + invalid_data, self.master_key.master_key) + invalid_data = b'a' + pt_bytes = self.group_list._generate_group_db_header() + pt_bytes += b''.join([g.serialize_g() for g in (self.group_list.groups + self.group_list._dummy_groups())]) + ct_bytes = encrypt_and_sign(pt_bytes + invalid_data, self.master_key.master_key) ensure_dir(DIR_USER_DATA) - with open(self.file_name, "wb+") as f: + with open(self.file_name, 'wb+') as f: f.write(ct_bytes) # Test @@ -276,9 +205,7 @@ class TestGroupList(TFCTestCase): self.group_list.store_groups() # Test reading works normally - self.assertIsInstance( - GroupList(self.master_key, self.settings, self.contact_list), GroupList - ) + self.assertIsInstance(GroupList(self.master_key, self.settings, self.contact_list), GroupList) # Test loading of the tampered database raises CriticalError tamper_file(self.file_name, tamper_size=1) @@ -286,26 +213,17 @@ class TestGroupList(TFCTestCase): GroupList(self.master_key, self.settings, self.contact_list) def test_check_db_settings(self) -> None: - self.assertFalse( - self.group_list._check_db_settings( - number_of_actual_groups=self.settings.max_number_of_groups, - members_in_largest_group=self.settings.max_number_of_group_members, - ) - ) + self.assertFalse(self.group_list._check_db_settings( + number_of_actual_groups=self.settings.max_number_of_groups, + members_in_largest_group=self.settings.max_number_of_group_members)) - self.assertTrue( - self.group_list._check_db_settings( - number_of_actual_groups=self.settings.max_number_of_groups + 1, - members_in_largest_group=self.settings.max_number_of_group_members, - ) - ) + self.assertTrue(self.group_list._check_db_settings( + number_of_actual_groups=self.settings.max_number_of_groups + 1, + members_in_largest_group=self.settings.max_number_of_group_members)) - self.assertTrue( - self.group_list._check_db_settings( - number_of_actual_groups=self.settings.max_number_of_groups, - members_in_largest_group=self.settings.max_number_of_group_members + 1, - ) - ) + self.assertTrue(self.group_list._check_db_settings( + number_of_actual_groups=self.settings.max_number_of_groups, + members_in_largest_group=self.settings.max_number_of_group_members + 1)) def test_generate_group_db_header(self) -> None: header = self.group_list._generate_group_db_header() @@ -319,111 +237,88 @@ class TestGroupList(TFCTestCase): def test_dummy_groups(self) -> None: dummies = self.group_list._dummy_groups() - self.assertEqual( - len(dummies), self.settings.max_number_of_contacts - len(self.nicks) - ) + self.assertEqual(len(dummies), self.settings.max_number_of_contacts - len(self.nicks)) for g in dummies: self.assertIsInstance(g, Group) def test_add_group(self) -> None: - members = [create_contact("Laura")] - self.group_list.add_group( - "test_group_12", bytes(GROUP_ID_LENGTH), False, False, members - ) - self.group_list.add_group( - "test_group_12", bytes(GROUP_ID_LENGTH), False, True, members - ) - self.assertTrue(self.group_list.get_group("test_group_12").notifications) - self.assertEqual(len(self.group_list), len(self.group_names) + 1) + members = [create_contact('Laura')] + self.group_list.add_group('test_group_12', bytes(GROUP_ID_LENGTH), False, False, members) + self.group_list.add_group('test_group_12', bytes(GROUP_ID_LENGTH), False, True, members) + self.assertTrue(self.group_list.get_group('test_group_12').notifications) + self.assertEqual(len(self.group_list), len(self.group_names)+1) def test_remove_group_by_name(self) -> None: self.assertEqual(len(self.group_list), len(self.group_names)) # Remove non-existing group - self.assertIsNone(self.group_list.remove_group_by_name("test_group_12")) + self.assertIsNone(self.group_list.remove_group_by_name('test_group_12')) self.assertEqual(len(self.group_list), len(self.group_names)) # Remove existing group - self.assertIsNone(self.group_list.remove_group_by_name("test_group_11")) - self.assertEqual(len(self.group_list), len(self.group_names) - 1) + self.assertIsNone(self.group_list.remove_group_by_name('test_group_11')) + self.assertEqual(len(self.group_list), len(self.group_names)-1) def test_remove_group_by_id(self) -> None: self.assertEqual(len(self.group_list), len(self.group_names)) # Remove non-existing group - self.assertIsNone( - self.group_list.remove_group_by_id(group_name_to_group_id("test_group_12")) - ) + self.assertIsNone(self.group_list.remove_group_by_id(group_name_to_group_id('test_group_12'))) self.assertEqual(len(self.group_list), len(self.group_names)) # Remove existing group - self.assertIsNone( - self.group_list.remove_group_by_id(group_name_to_group_id("test_group_11")) - ) - self.assertEqual(len(self.group_list), len(self.group_names) - 1) + self.assertIsNone(self.group_list.remove_group_by_id(group_name_to_group_id('test_group_11'))) + self.assertEqual(len(self.group_list), len(self.group_names)-1) def test_get_group(self) -> None: - self.assertEqual(self.group_list.get_group("test_group_3").name, "test_group_3") + self.assertEqual(self.group_list.get_group('test_group_3').name, 'test_group_3') def test_get_group_by_id(self) -> None: - members = [create_contact("Laura")] + members = [create_contact('Laura')] group_id = os.urandom(GROUP_ID_LENGTH) - self.group_list.add_group("test_group_12", group_id, False, False, members) - self.assertEqual( - self.group_list.get_group_by_id(group_id).name, "test_group_12" - ) + self.group_list.add_group('test_group_12', group_id, False, False, members) + self.assertEqual(self.group_list.get_group_by_id(group_id).name, 'test_group_12') def test_get_list_of_group_names(self) -> None: self.assertEqual(self.group_list.get_list_of_group_names(), self.group_names) def test_get_list_of_group_ids(self) -> None: - self.assertEqual( - self.group_list.get_list_of_group_ids(), - list(map(group_name_to_group_id, self.group_names)), - ) + self.assertEqual(self.group_list.get_list_of_group_ids(), + list(map(group_name_to_group_id, self.group_names))) def test_get_list_of_hr_group_ids(self) -> None: - self.assertEqual( - self.group_list.get_list_of_hr_group_ids(), - [ - b58encode(gid) - for gid in list(map(group_name_to_group_id, self.group_names)) - ], - ) + self.assertEqual(self.group_list.get_list_of_hr_group_ids(), + [b58encode(gid) for gid in list(map(group_name_to_group_id, self.group_names))]) def test_get_group_members(self) -> None: - members = self.group_list.get_group_members( - group_name_to_group_id("test_group_1") - ) + members = self.group_list.get_group_members(group_name_to_group_id('test_group_1')) for c in members: self.assertIsInstance(c, Contact) def test_has_group(self) -> None: - self.assertTrue(self.group_list.has_group("test_group_11")) - self.assertFalse(self.group_list.has_group("test_group_12")) + self.assertTrue(self.group_list.has_group('test_group_11')) + self.assertFalse(self.group_list.has_group('test_group_12')) def test_has_group_id(self) -> None: - members = [create_contact("Laura")] + members = [create_contact('Laura')] group_id = os.urandom(GROUP_ID_LENGTH) self.assertFalse(self.group_list.has_group_id(group_id)) - self.group_list.add_group("test_group_12", group_id, False, False, members) + self.group_list.add_group('test_group_12', group_id, False, False, members) self.assertTrue(self.group_list.has_group_id(group_id)) def test_largest_group(self) -> None: self.assertEqual(self.group_list.largest_group(), len(self.nicks)) def test_print_group(self) -> None: - self.group_list.get_group("test_group_1").name = "group" - self.group_list.get_group("test_group_2").log_messages = True + self.group_list.get_group("test_group_1").name = "group" + self.group_list.get_group("test_group_2").log_messages = True self.group_list.get_group("test_group_3").notifications = True - self.group_list.get_group("test_group_4").log_messages = True + self.group_list.get_group("test_group_4").log_messages = True self.group_list.get_group("test_group_4").notifications = True - self.group_list.get_group("test_group_5").members = [] - self.group_list.get_group("test_group_6").members = list( - map(create_contact, ["Alice", "Bob", "Charlie", "David", "Eric", "Fido"]) - ) - self.assert_prints( - """\ + self.group_list.get_group("test_group_5").members = [] + self.group_list.get_group("test_group_6").members = list(map(create_contact, ['Alice', 'Bob', 'Charlie', + 'David', 'Eric', 'Fido'])) + self.assert_prints("""\ Group Group ID Logging Notify Members ──────────────────────────────────────────────────────────────────────────────── group 2drs4c4VcDdrP No No Alice, Bob, Charlie, @@ -477,10 +372,8 @@ test_group_11 2e6vAGmHmSEEJ No No Alice, Bob, Charlie, Joana, Karol -""", - self.group_list.print_groups, - ) +""", self.group_list.print_groups) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/common/test_db_keys.py b/tests/common/test_db_keys.py index 093d425..bf3ca51 100644 --- a/tests/common/test_db_keys.py +++ b/tests/common/test_db_keys.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -25,48 +25,33 @@ import time import unittest from unittest import mock +from typing import Any -from src.common.crypto import blake2b, encrypt_and_sign -from src.common.db_keys import KeyList, KeySet +from src.common.crypto import blake2b, encrypt_and_sign +from src.common.db_keys import KeyList, KeySet from src.common.encoding import int_to_bytes -from src.common.misc import ensure_dir -from src.common.statics import ( - DIR_USER_DATA, - INITIAL_HARAC, - KDB_ADD_ENTRY_HEADER, - KDB_HALT_ACK_HEADER, - KDB_M_KEY_CHANGE_HALT_HEADER, - KDB_REMOVE_ENTRY_HEADER, - KDB_UPDATE_SIZE_HEADER, - KEY_MANAGEMENT_QUEUE, - KEY_MGMT_ACK_QUEUE, - KEYSET_LENGTH, - LOCAL_ID, - LOCAL_PUBKEY, - POLY1305_TAG_LENGTH, - RX, - SYMMETRIC_KEY_LENGTH, - TX, - XCHACHA20_NONCE_LENGTH, -) +from src.common.misc import ensure_dir +from src.common.statics import (DIR_USER_DATA, INITIAL_HARAC, KDB_ADD_ENTRY_HEADER, KDB_HALT_ACK_HEADER, + KDB_M_KEY_CHANGE_HALT_HEADER, KDB_REMOVE_ENTRY_HEADER, KDB_UPDATE_SIZE_HEADER, + KEY_MANAGEMENT_QUEUE, KEY_MGMT_ACK_QUEUE, KEYSET_LENGTH, LOCAL_ID, LOCAL_PUBKEY, + POLY1305_TAG_LENGTH, RX, SYMMETRIC_KEY_LENGTH, TX, XCHACHA20_NONCE_LENGTH) from tests.mock_classes import create_keyset, MasterKey, nick_to_pub_key, Settings -from tests.utils import cd_unit_test, cleanup, tamper_file, gen_queue_dict +from tests.utils import cd_unit_test, cleanup, tamper_file, gen_queue_dict class TestKeySet(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.keyset = KeySet( - onion_pub_key=nick_to_pub_key("Alice"), - tx_mk=bytes(SYMMETRIC_KEY_LENGTH), - rx_mk=bytes(SYMMETRIC_KEY_LENGTH), - tx_hk=bytes(SYMMETRIC_KEY_LENGTH), - rx_hk=bytes(SYMMETRIC_KEY_LENGTH), - tx_harac=INITIAL_HARAC, - rx_harac=INITIAL_HARAC, - store_keys=lambda: None, - ) + self.keyset = KeySet(onion_pub_key=nick_to_pub_key('Alice'), + tx_mk=bytes(SYMMETRIC_KEY_LENGTH), + rx_mk=bytes(SYMMETRIC_KEY_LENGTH), + tx_hk=bytes(SYMMETRIC_KEY_LENGTH), + rx_hk=bytes(SYMMETRIC_KEY_LENGTH), + tx_harac=INITIAL_HARAC, + rx_harac=INITIAL_HARAC, + store_keys=lambda: None) def test_keyset_serialization_length_and_type(self) -> None: serialized = self.keyset.serialize_k() @@ -75,13 +60,8 @@ class TestKeySet(unittest.TestCase): def test_rotate_tx_mk(self) -> None: self.assertIsNone(self.keyset.rotate_tx_mk()) - self.assertEqual( - self.keyset.tx_mk, - blake2b( - bytes(SYMMETRIC_KEY_LENGTH) + int_to_bytes(INITIAL_HARAC), - digest_size=SYMMETRIC_KEY_LENGTH, - ), - ) + self.assertEqual(self.keyset.tx_mk, blake2b(bytes(SYMMETRIC_KEY_LENGTH) + int_to_bytes(INITIAL_HARAC), + digest_size=SYMMETRIC_KEY_LENGTH)) self.assertEqual(self.keyset.rx_mk, bytes(SYMMETRIC_KEY_LENGTH)) self.assertEqual(self.keyset.tx_hk, bytes(SYMMETRIC_KEY_LENGTH)) self.assertEqual(self.keyset.rx_hk, bytes(SYMMETRIC_KEY_LENGTH)) @@ -89,8 +69,8 @@ class TestKeySet(unittest.TestCase): self.assertEqual(self.keyset.rx_harac, INITIAL_HARAC) def test_update_tx_mk(self) -> None: - self.keyset.update_mk(TX, SYMMETRIC_KEY_LENGTH * b"\x01", 2) - self.assertEqual(self.keyset.tx_mk, SYMMETRIC_KEY_LENGTH * b"\x01") + self.keyset.update_mk(TX, SYMMETRIC_KEY_LENGTH * b'\x01', 2) + self.assertEqual(self.keyset.tx_mk, SYMMETRIC_KEY_LENGTH * b'\x01') self.assertEqual(self.keyset.rx_mk, bytes(SYMMETRIC_KEY_LENGTH)) self.assertEqual(self.keyset.tx_hk, bytes(SYMMETRIC_KEY_LENGTH)) self.assertEqual(self.keyset.rx_hk, bytes(SYMMETRIC_KEY_LENGTH)) @@ -98,33 +78,31 @@ class TestKeySet(unittest.TestCase): self.assertEqual(self.keyset.rx_harac, INITIAL_HARAC) def test_update_rx_mk(self) -> None: - self.keyset.update_mk(RX, SYMMETRIC_KEY_LENGTH * b"\x01", 2) + self.keyset.update_mk(RX, SYMMETRIC_KEY_LENGTH * b'\x01', 2) self.assertEqual(self.keyset.tx_mk, bytes(SYMMETRIC_KEY_LENGTH)) - self.assertEqual(self.keyset.rx_mk, SYMMETRIC_KEY_LENGTH * b"\x01") + self.assertEqual(self.keyset.rx_mk, SYMMETRIC_KEY_LENGTH * b'\x01') self.assertEqual(self.keyset.tx_hk, bytes(SYMMETRIC_KEY_LENGTH)) self.assertEqual(self.keyset.rx_hk, bytes(SYMMETRIC_KEY_LENGTH)) self.assertEqual(self.keyset.tx_harac, INITIAL_HARAC) self.assertEqual(self.keyset.rx_harac, 2) def test_invalid_direction_raises_critical_error(self) -> None: - invalid_direction = "sx" + invalid_direction = 'sx' with self.assertRaises(SystemExit): - self.keyset.update_mk(invalid_direction, SYMMETRIC_KEY_LENGTH * b"\x01", 2) + self.keyset.update_mk(invalid_direction, SYMMETRIC_KEY_LENGTH * b'\x01', 2) class TestKeyList(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.unit_test_dir = cd_unit_test() - self.master_key = MasterKey() - self.settings = Settings() - self.file_name = f"{DIR_USER_DATA}{self.settings.software_operation}_keys" - self.keylist = KeyList(self.master_key, self.settings) - self.full_contact_list = ["Alice", "Bob", "Charlie", LOCAL_ID] - self.keylist.keysets = [ - create_keyset(n, store_f=self.keylist.store_keys) - for n in self.full_contact_list - ] + self.unit_test_dir = cd_unit_test() + self.master_key = MasterKey() + self.settings = Settings() + self.file_name = f'{DIR_USER_DATA}{self.settings.software_operation}_keys' + self.keylist = KeyList(self.master_key, self.settings) + self.full_contact_list = ['Alice', 'Bob', 'Charlie', LOCAL_ID] + self.keylist.keysets = [create_keyset(n, store_f=self.keylist.store_keys) for n in self.full_contact_list] def tearDown(self) -> None: """Post-test actions.""" @@ -133,12 +111,10 @@ class TestKeyList(unittest.TestCase): def test_storing_and_loading_of_keysets(self) -> None: # Test store self.keylist.store_keys() - self.assertEqual( - os.path.getsize(self.file_name), - XCHACHA20_NONCE_LENGTH - + (self.settings.max_number_of_contacts + 1) * KEYSET_LENGTH - + POLY1305_TAG_LENGTH, - ) + self.assertEqual(os.path.getsize(self.file_name), + XCHACHA20_NONCE_LENGTH + + (self.settings.max_number_of_contacts+1) * KEYSET_LENGTH + + POLY1305_TAG_LENGTH) # Test load key_list2 = KeyList(MasterKey(), Settings()) @@ -157,17 +133,12 @@ class TestKeyList(unittest.TestCase): def test_invalid_content_raises_critical_error(self) -> None: # Setup - invalid_data = b"a" - pt_bytes = b"".join( - [ - k.serialize_k() - for k in self.keylist.keysets + self.keylist._dummy_keysets() - ] - ) - ct_bytes = encrypt_and_sign(pt_bytes + invalid_data, self.master_key.master_key) + invalid_data = b'a' + pt_bytes = b''.join([k.serialize_k() for k in self.keylist.keysets + self.keylist._dummy_keysets()]) + ct_bytes = encrypt_and_sign(pt_bytes + invalid_data, self.master_key.master_key) ensure_dir(DIR_USER_DATA) - with open(self.file_name, "wb+") as f: + with open(self.file_name, 'wb+') as f: f.write(ct_bytes) # Test @@ -181,24 +152,21 @@ class TestKeyList(unittest.TestCase): def test_dummy_keysets(self) -> None: dummies = self.keylist._dummy_keysets() - self.assertEqual( - len(dummies), - (self.settings.max_number_of_contacts + 1) - len(self.full_contact_list), - ) + self.assertEqual(len(dummies), (self.settings.max_number_of_contacts+1) - len(self.full_contact_list)) for c in dummies: self.assertIsInstance(c, KeySet) def test_add_keyset(self) -> None: - new_key = bytes(SYMMETRIC_KEY_LENGTH) + new_key = bytes(SYMMETRIC_KEY_LENGTH) self.keylist.keysets = [create_keyset(LOCAL_ID)] # Check that KeySet exists and that its keys are different self.assertNotEqual(self.keylist.keysets[0].rx_hk, new_key) # Replace existing KeySet - self.assertIsNone( - self.keylist.add_keyset(LOCAL_PUBKEY, new_key, new_key, new_key, new_key) - ) + self.assertIsNone(self.keylist.add_keyset(LOCAL_PUBKEY, + new_key, new_key, + new_key, new_key)) # Check that new KeySet replaced the old one self.assertEqual(self.keylist.keysets[0].onion_pub_key, LOCAL_PUBKEY) @@ -206,26 +174,25 @@ class TestKeyList(unittest.TestCase): def test_remove_keyset(self) -> None: # Test KeySet for Bob exists - self.assertTrue(self.keylist.has_keyset(nick_to_pub_key("Bob"))) + self.assertTrue(self.keylist.has_keyset(nick_to_pub_key('Bob'))) # Remove KeySet for Bob - self.assertIsNone(self.keylist.remove_keyset(nick_to_pub_key("Bob"))) + self.assertIsNone(self.keylist.remove_keyset(nick_to_pub_key('Bob'))) # Test KeySet was removed - self.assertFalse(self.keylist.has_keyset(nick_to_pub_key("Bob"))) + self.assertFalse(self.keylist.has_keyset(nick_to_pub_key('Bob'))) - @mock.patch("builtins.input", side_effect=["test_password"]) - def test_change_master_key(self, _) -> None: + @mock.patch('builtins.input', side_effect=['test_password']) + def test_change_master_key(self, _: Any) -> None: # Setup - key = SYMMETRIC_KEY_LENGTH * b"\x01" + key = SYMMETRIC_KEY_LENGTH * b'\x01' master_key2 = MasterKey(master_key=key) - queues = gen_queue_dict() + queues = gen_queue_dict() def queue_delayer() -> None: """Place packet to queue after timer runs out.""" time.sleep(0.1) queues[KEY_MANAGEMENT_QUEUE].put(master_key2.master_key) - threading.Thread(target=queue_delayer).start() # Test that new key is different from existing one @@ -247,58 +214,40 @@ class TestKeyList(unittest.TestCase): # Test self.assertEqual(os.path.getsize(self.file_name), 9016) - self.assertIsNone( - self.keylist.manage( - queues, KDB_UPDATE_SIZE_HEADER, Settings(max_number_of_contacts=100) - ) - ) + self.assertIsNone(self.keylist.manage(queues, KDB_UPDATE_SIZE_HEADER, Settings(max_number_of_contacts=100))) self.assertEqual(os.path.getsize(self.file_name), 17816) self.assertEqual(self.keylist.settings.max_number_of_contacts, 100) def test_get_keyset(self) -> None: - keyset = self.keylist.get_keyset(nick_to_pub_key("Alice")) + keyset = self.keylist.get_keyset(nick_to_pub_key('Alice')) self.assertIsInstance(keyset, KeySet) def test_get_list_of_pub_keys(self) -> None: - self.assertEqual( - self.keylist.get_list_of_pub_keys(), - [ - nick_to_pub_key("Alice"), - nick_to_pub_key("Bob"), - nick_to_pub_key("Charlie"), - ], - ) + self.assertEqual(self.keylist.get_list_of_pub_keys(), + [nick_to_pub_key("Alice"), + nick_to_pub_key("Bob"), + nick_to_pub_key("Charlie")]) def test_has_keyset(self) -> None: self.keylist.keysets = [] self.assertFalse(self.keylist.has_keyset(nick_to_pub_key("Alice"))) - self.keylist.keysets = [create_keyset("Alice")] + self.keylist.keysets = [create_keyset('Alice')] self.assertTrue(self.keylist.has_keyset(nick_to_pub_key("Alice"))) def test_has_rx_mk(self) -> None: - self.assertTrue(self.keylist.has_rx_mk(nick_to_pub_key("Bob"))) - self.keylist.get_keyset(nick_to_pub_key("Bob")).rx_mk = bytes( - SYMMETRIC_KEY_LENGTH - ) - self.keylist.get_keyset(nick_to_pub_key("Bob")).rx_hk = bytes( - SYMMETRIC_KEY_LENGTH - ) - self.assertFalse(self.keylist.has_rx_mk(nick_to_pub_key("Bob"))) + self.assertTrue(self.keylist.has_rx_mk(nick_to_pub_key('Bob'))) + self.keylist.get_keyset(nick_to_pub_key('Bob')).rx_mk = bytes(SYMMETRIC_KEY_LENGTH) + self.keylist.get_keyset(nick_to_pub_key('Bob')).rx_hk = bytes(SYMMETRIC_KEY_LENGTH) + self.assertFalse(self.keylist.has_rx_mk(nick_to_pub_key('Bob'))) def test_has_local_keyset(self) -> None: self.keylist.keysets = [] self.assertFalse(self.keylist.has_local_keyset()) - self.assertIsNone( - self.keylist.add_keyset( - LOCAL_PUBKEY, - bytes(SYMMETRIC_KEY_LENGTH), - bytes(SYMMETRIC_KEY_LENGTH), - bytes(SYMMETRIC_KEY_LENGTH), - bytes(SYMMETRIC_KEY_LENGTH), - ) - ) + self.assertIsNone(self.keylist.add_keyset(LOCAL_PUBKEY, + bytes(SYMMETRIC_KEY_LENGTH), bytes(SYMMETRIC_KEY_LENGTH), + bytes(SYMMETRIC_KEY_LENGTH), bytes(SYMMETRIC_KEY_LENGTH))) self.assertTrue(self.keylist.has_local_keyset()) def test_manage(self) -> None: @@ -306,32 +255,20 @@ class TestKeyList(unittest.TestCase): queues = gen_queue_dict() # Test that KeySet for David does not exist - self.assertFalse(self.keylist.has_keyset(nick_to_pub_key("David"))) + self.assertFalse(self.keylist.has_keyset(nick_to_pub_key('David'))) # Test adding KeySet - self.assertIsNone( - self.keylist.manage( - queues, - KDB_ADD_ENTRY_HEADER, - nick_to_pub_key("David"), - bytes(SYMMETRIC_KEY_LENGTH), - bytes(SYMMETRIC_KEY_LENGTH), - bytes(SYMMETRIC_KEY_LENGTH), - bytes(SYMMETRIC_KEY_LENGTH), - ) - ) - self.assertTrue(self.keylist.has_keyset(nick_to_pub_key("David"))) + self.assertIsNone(self.keylist.manage(queues, KDB_ADD_ENTRY_HEADER, nick_to_pub_key('David'), + bytes(SYMMETRIC_KEY_LENGTH), bytes(SYMMETRIC_KEY_LENGTH), + bytes(SYMMETRIC_KEY_LENGTH), bytes(SYMMETRIC_KEY_LENGTH))) + self.assertTrue(self.keylist.has_keyset(nick_to_pub_key('David'))) # Test removing KeySet - self.assertIsNone( - self.keylist.manage( - queues, KDB_REMOVE_ENTRY_HEADER, nick_to_pub_key("David") - ) - ) - self.assertFalse(self.keylist.has_keyset(nick_to_pub_key("David"))) + self.assertIsNone(self.keylist.manage(queues, KDB_REMOVE_ENTRY_HEADER, nick_to_pub_key('David'))) + self.assertFalse(self.keylist.has_keyset(nick_to_pub_key('David'))) # Test changing master key - new_key = SYMMETRIC_KEY_LENGTH * b"\x01" + new_key = SYMMETRIC_KEY_LENGTH * b'\x01' self.assertNotEqual(self.master_key.master_key, new_key) @@ -343,8 +280,8 @@ class TestKeyList(unittest.TestCase): # Test invalid KeyList management command raises Critical Error with self.assertRaises(SystemExit): - self.keylist.manage(queues, "invalid_key", None) + self.keylist.manage(queues, 'invalid_key', None) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/common/test_db_logs.py b/tests/common/test_db_logs.py index 29d9810..14ebf2e 100644 --- a/tests/common/test_db_logs.py +++ b/tests/common/test_db_logs.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -26,72 +26,36 @@ import time import unittest from unittest import mock +from typing import Any -from src.common.database import MessageLog +from src.common.database import MessageLog from src.common.db_contacts import ContactList -from src.common.db_logs import ( - access_logs, - change_log_db_key, - log_writer_loop, - remove_logs, - replace_log_db, - write_log_entry, -) -from src.common.encoding import bytes_to_timestamp -from src.common.statics import ( - CLEAR_ENTIRE_SCREEN, - CURSOR_LEFT_UP_CORNER, - C_S_HEADER, - DIR_USER_DATA, - EXIT, - F_S_HEADER, - GROUP_ID_LENGTH, - LOGFILE_MASKING_QUEUE, - LOG_ENTRY_LENGTH, - LOG_PACKET_QUEUE, - LOG_SETTING_QUEUE, - MESSAGE, - M_A_HEADER, - M_C_HEADER, - M_S_HEADER, - ORIGIN_CONTACT_HEADER, - PADDING_LENGTH, - P_N_HEADER, - RX, - SYMMETRIC_KEY_LENGTH, - TIMESTAMP_LENGTH, - TRAFFIC_MASKING_QUEUE, - TX, - UNIT_TEST_QUEUE, - WIN_TYPE_CONTACT, - WIN_TYPE_GROUP, -) +from src.common.db_logs import (access_logs, change_log_db_key, log_writer_loop, remove_logs, replace_log_db, + write_log_entry) +from src.common.encoding import bytes_to_timestamp +from src.common.statics import (CLEAR_ENTIRE_SCREEN, CURSOR_LEFT_UP_CORNER, C_S_HEADER, DIR_USER_DATA, EXIT, + F_S_HEADER, GROUP_ID_LENGTH, LOGFILE_MASKING_QUEUE, LOG_ENTRY_LENGTH, + LOG_PACKET_QUEUE, LOG_SETTING_QUEUE, MESSAGE, M_A_HEADER, M_C_HEADER, M_S_HEADER, + ORIGIN_CONTACT_HEADER, PADDING_LENGTH, P_N_HEADER, RX, SYMMETRIC_KEY_LENGTH, + TIMESTAMP_LENGTH, TRAFFIC_MASKING_QUEUE, TX, UNIT_TEST_QUEUE, WIN_TYPE_CONTACT, + WIN_TYPE_GROUP) from tests.mock_classes import create_contact, GroupList, MasterKey, RxWindow, Settings -from tests.utils import ( - assembly_packet_creator, - cd_unit_test, - cleanup, - group_name_to_group_id, - nick_to_pub_key, -) -from tests.utils import nick_to_short_address, tear_queues, TFCTestCase, gen_queue_dict +from tests.utils import assembly_packet_creator, cd_unit_test, cleanup, group_name_to_group_id, nick_to_pub_key +from tests.utils import nick_to_short_address, tear_queues, TFCTestCase, gen_queue_dict -TIMESTAMP_BYTES = bytes.fromhex("08ceae02") -STATIC_TIMESTAMP = bytes_to_timestamp(TIMESTAMP_BYTES).strftime("%H:%M:%S.%f")[ - :-TIMESTAMP_LENGTH -] -SLEEP_DELAY = 0.02 +TIMESTAMP_BYTES = bytes.fromhex('08ceae02') +STATIC_TIMESTAMP = bytes_to_timestamp(TIMESTAMP_BYTES).strftime('%H:%M:%S.%f')[:-TIMESTAMP_LENGTH] +SLEEP_DELAY = 0.02 class TestLogWriterLoop(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.master_key = MasterKey() - self.message_log = MessageLog( - f"{DIR_USER_DATA}{TX}_logs", self.master_key.master_key - ) + self.master_key = MasterKey() + self.message_log = MessageLog(f'{DIR_USER_DATA}{TX}_logs', self.master_key.master_key) def tearDown(self) -> None: """Post-test actions.""" @@ -99,58 +63,25 @@ class TestLogWriterLoop(unittest.TestCase): def test_function_logs_normal_data(self) -> None: # Setup - settings = Settings() + settings = Settings() master_key = MasterKey() - queues = gen_queue_dict() + queues = gen_queue_dict() def queue_delayer() -> None: """Place messages to queue one at a time.""" - for p in [ - ( - nick_to_pub_key("Alice"), - M_S_HEADER + bytes(PADDING_LENGTH), - False, - False, - master_key, - ), - (None, C_S_HEADER + bytes(PADDING_LENGTH), True, False, master_key), - ( - nick_to_pub_key("Alice"), - P_N_HEADER + bytes(PADDING_LENGTH), - True, - True, - master_key, - ), - ( - nick_to_pub_key("Alice"), - F_S_HEADER + bytes(PADDING_LENGTH), - True, - True, - master_key, - ), - ( - nick_to_pub_key("Alice"), - M_S_HEADER + bytes(PADDING_LENGTH), - True, - False, - master_key, - ), - ]: + for p in [(nick_to_pub_key('Alice'), M_S_HEADER + bytes(PADDING_LENGTH), False, False, master_key), + (None, C_S_HEADER + bytes(PADDING_LENGTH), True, False, master_key), + (nick_to_pub_key('Alice'), P_N_HEADER + bytes(PADDING_LENGTH), True, True, master_key), + (nick_to_pub_key('Alice'), F_S_HEADER + bytes(PADDING_LENGTH), True, True, master_key), + (nick_to_pub_key('Alice'), M_S_HEADER + bytes(PADDING_LENGTH), True, False, master_key)]: queues[LOG_PACKET_QUEUE].put(p) time.sleep(SLEEP_DELAY) queues[UNIT_TEST_QUEUE].put(EXIT) time.sleep(SLEEP_DELAY) - queues[LOG_PACKET_QUEUE].put( - ( - nick_to_pub_key("Alice"), - M_S_HEADER + bytes(PADDING_LENGTH), - True, - False, - master_key, - ) - ) + queues[LOG_PACKET_QUEUE].put(( + nick_to_pub_key('Alice'), M_S_HEADER + bytes(PADDING_LENGTH), True, False, master_key)) time.sleep(SLEEP_DELAY) # Test @@ -162,38 +93,19 @@ class TestLogWriterLoop(unittest.TestCase): def test_function_logs_traffic_masking_data(self) -> None: # Setup - settings = Settings(log_file_masking=True, traffic_masking=False) + settings = Settings(log_file_masking=True, + traffic_masking=False) master_key = MasterKey() - queues = gen_queue_dict() + queues = gen_queue_dict() queues[TRAFFIC_MASKING_QUEUE].put(True) def queue_delayer() -> None: """Place messages to queue one at a time.""" - for p in [ - ( - nick_to_pub_key("Alice"), - M_S_HEADER + bytes(PADDING_LENGTH), - False, - False, - master_key, - ), - (None, C_S_HEADER + bytes(PADDING_LENGTH), True, False, master_key), - ( - nick_to_pub_key("Alice"), - F_S_HEADER + bytes(PADDING_LENGTH), - True, - True, - master_key, - ), - ( - nick_to_pub_key("Alice"), - M_S_HEADER + bytes(PADDING_LENGTH), - True, - False, - master_key, - ), - ]: + for p in [(nick_to_pub_key('Alice'), M_S_HEADER + bytes(PADDING_LENGTH), False, False, master_key), + (None, C_S_HEADER + bytes(PADDING_LENGTH), True, False, master_key), + (nick_to_pub_key('Alice'), F_S_HEADER + bytes(PADDING_LENGTH), True, True, master_key), + (nick_to_pub_key('Alice'), M_S_HEADER + bytes(PADDING_LENGTH), True, False, master_key)]: queues[LOG_PACKET_QUEUE].put(p) time.sleep(SLEEP_DELAY) @@ -201,50 +113,28 @@ class TestLogWriterLoop(unittest.TestCase): time.sleep(SLEEP_DELAY) queues[LOG_PACKET_QUEUE].put( - ( - nick_to_pub_key("Alice"), - P_N_HEADER + bytes(PADDING_LENGTH), - True, - True, - master_key, - ) - ) + (nick_to_pub_key('Alice'), P_N_HEADER + bytes(PADDING_LENGTH), True, True, master_key)) time.sleep(SLEEP_DELAY) # Test threading.Thread(target=queue_delayer).start() - self.assertIsNone( - log_writer_loop(queues, settings, self.message_log, unit_test=True) - ) + self.assertIsNone(log_writer_loop(queues, settings, self.message_log, unit_test=True)) # Teardown tear_queues(queues) def test_function_log_file_masking_queue_controls_log_file_masking(self) -> None: # Setup - settings = Settings(log_file_masking=False, traffic_masking=True) + settings = Settings(log_file_masking=False, + traffic_masking=True) master_key = MasterKey() - queues = gen_queue_dict() + queues = gen_queue_dict() def queue_delayer() -> None: """Place messages to queue one at a time.""" - for p in [ - (None, C_S_HEADER + bytes(PADDING_LENGTH), True, False, master_key), - ( - nick_to_pub_key("Alice"), - M_S_HEADER + bytes(PADDING_LENGTH), - False, - False, - master_key, - ), - ( - nick_to_pub_key("Alice"), - F_S_HEADER + bytes(PADDING_LENGTH), - True, - True, - master_key, - ), - ]: + for p in [(None, C_S_HEADER + bytes(PADDING_LENGTH), True, False, master_key), + (nick_to_pub_key('Alice'), M_S_HEADER + bytes(PADDING_LENGTH), False, False, master_key), + (nick_to_pub_key('Alice'), F_S_HEADER + bytes(PADDING_LENGTH), True, True, master_key)]: queues[LOG_PACKET_QUEUE].put(p) time.sleep(SLEEP_DELAY) @@ -254,61 +144,36 @@ class TestLogWriterLoop(unittest.TestCase): for _ in range(2): queues[LOG_PACKET_QUEUE].put( - ( - nick_to_pub_key("Alice"), - F_S_HEADER + bytes(PADDING_LENGTH), - True, - True, - master_key, - ) - ) + (nick_to_pub_key('Alice'), F_S_HEADER + bytes(PADDING_LENGTH), True, True, master_key)) time.sleep(SLEEP_DELAY) queues[UNIT_TEST_QUEUE].put(EXIT) time.sleep(SLEEP_DELAY) queues[LOG_PACKET_QUEUE].put( - ( - nick_to_pub_key("Alice"), - M_S_HEADER + bytes(PADDING_LENGTH), - True, - False, - master_key, - ) - ) + (nick_to_pub_key('Alice'), M_S_HEADER + bytes(PADDING_LENGTH), True, False, master_key)) time.sleep(SLEEP_DELAY) # Test threading.Thread(target=queue_delayer).start() - self.assertIsNone( - log_writer_loop(queues, settings, self.message_log, unit_test=True) - ) + self.assertIsNone(log_writer_loop(queues, settings, self.message_log, unit_test=True)) # Teardown tear_queues(queues) - def test_function_allows_control_of_noise_packets_based_on_log_setting_queue( - self, - ) -> None: + def test_function_allows_control_of_noise_packets_based_on_log_setting_queue(self) -> None: # Setup - settings = Settings(log_file_masking=True, traffic_masking=True) + settings = Settings(log_file_masking=True, + traffic_masking=True) master_key = MasterKey() - queues = gen_queue_dict() + queues = gen_queue_dict() - noise_tuple = ( - nick_to_pub_key("Alice"), - P_N_HEADER + bytes(PADDING_LENGTH), - True, - True, - master_key, - ) + noise_tuple = (nick_to_pub_key('Alice'), P_N_HEADER + bytes(PADDING_LENGTH), True, True, master_key) def queue_delayer() -> None: """Place packets to log into queue after delay.""" for _ in range(5): - queues[LOG_PACKET_QUEUE].put( - noise_tuple - ) # Not logged because logging_state is False by default + queues[LOG_PACKET_QUEUE].put(noise_tuple) # Not logged because logging_state is False by default time.sleep(SLEEP_DELAY) queues[LOG_SETTING_QUEUE].put(True) @@ -318,9 +183,7 @@ class TestLogWriterLoop(unittest.TestCase): queues[LOG_SETTING_QUEUE].put(False) for _ in range(3): - queues[LOG_PACKET_QUEUE].put( - noise_tuple - ) # Not logged because logging_state is False + queues[LOG_PACKET_QUEUE].put(noise_tuple) # Not logged because logging_state is False time.sleep(SLEEP_DELAY) queues[UNIT_TEST_QUEUE].put(EXIT) @@ -331,21 +194,20 @@ class TestLogWriterLoop(unittest.TestCase): # Test threading.Thread(target=queue_delayer).start() - self.assertIsNone( - log_writer_loop(queues, settings, self.message_log, unit_test=True) - ) + self.assertIsNone(log_writer_loop(queues, settings, self.message_log, unit_test=True)) # Teardown tear_queues(queues) class TestWriteLogEntry(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.unit_test_dir = cd_unit_test() - self.master_key = MasterKey() - self.settings = Settings() - self.log_file = f"{DIR_USER_DATA}{self.settings.software_operation}_logs" + self.unit_test_dir = cd_unit_test() + self.master_key = MasterKey() + self.settings = Settings() + self.log_file = f'{DIR_USER_DATA}{self.settings.software_operation}_logs' self.tfc_log_database = MessageLog(self.log_file, self.master_key.master_key) def tearDown(self) -> None: @@ -354,71 +216,58 @@ class TestWriteLogEntry(unittest.TestCase): def test_oversize_packet_raises_critical_error(self) -> None: # Setup - assembly_p = F_S_HEADER + bytes(PADDING_LENGTH) + b"a" + assembly_p = F_S_HEADER + bytes(PADDING_LENGTH) + b'a' # Test with self.assertRaises(SystemExit): - write_log_entry(assembly_p, nick_to_pub_key("Alice"), self.tfc_log_database) + write_log_entry(assembly_p, nick_to_pub_key('Alice'), self.tfc_log_database) def test_log_entry_is_concatenated(self) -> None: - for _ in range(5): + for i in range(5): assembly_p = F_S_HEADER + bytes(PADDING_LENGTH) - self.assertIsNone( - write_log_entry( - assembly_p, nick_to_pub_key("Alice"), self.tfc_log_database - ) - ) + self.assertIsNone(write_log_entry(assembly_p, nick_to_pub_key('Alice'), self.tfc_log_database)) class TestAccessHistoryAndPrintLogs(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.unit_test_dir = cd_unit_test() - self.master_key = MasterKey() - self.settings = Settings() - self.log_file = f"{DIR_USER_DATA}{self.settings.software_operation}_logs" + self.unit_test_dir = cd_unit_test() + self.master_key = MasterKey() + self.settings = Settings() + self.log_file = f'{DIR_USER_DATA}{self.settings.software_operation}_logs' self.tfc_log_database = MessageLog(self.log_file, self.master_key.master_key) - self.window = RxWindow( - type=WIN_TYPE_CONTACT, - uid=nick_to_pub_key("Alice"), - name="Alice", - type_print="contact", - ) + self.window = RxWindow(type=WIN_TYPE_CONTACT, + uid=nick_to_pub_key('Alice'), + name='Alice', + type_print='contact') - self.contact_list = ContactList(self.master_key, self.settings) - self.contact_list.contacts = list(map(create_contact, ["Alice", "Charlie"])) + self.contact_list = ContactList(self.master_key, self.settings) + self.contact_list.contacts = list(map(create_contact, ['Alice', 'Charlie'])) self.time = STATIC_TIMESTAMP - self.group_list = GroupList(groups=["test_group"]) - self.group = self.group_list.get_group("test_group") + self.group_list = GroupList(groups=['test_group']) + self.group = self.group_list.get_group('test_group') self.group.members = self.contact_list.contacts - self.args = ( - self.window, - self.contact_list, - self.group_list, - self.settings, - self.master_key, - ) + self.args = self.window, self.contact_list, self.group_list, self.settings, self.master_key - self.msg = ( - "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis" - " dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu" - "m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma" - "ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el" - "it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic" - "tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r" - "utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali" - "quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris" - "que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti" - "s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu." - ) + self.msg = ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis" + " dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu" + "m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma" + "ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el" + "it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic" + "tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r" + "utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali" + "quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris" + "que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti" + "s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu.") def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) - def test_missing_log_file_raises_fr(self) -> None: + def test_missing_log_file_raises_se(self) -> None: # Setup os.remove(self.log_file) @@ -427,131 +276,91 @@ class TestAccessHistoryAndPrintLogs(TFCTestCase): def test_empty_log_file(self) -> None: # Setup - open(f"{DIR_USER_DATA}{self.settings.software_operation}_logs", "wb+").close() + open(f'{DIR_USER_DATA}{self.settings.software_operation}_logs', 'wb+').close() # Test - self.assert_se( - f"No logged messages for contact '{self.window.name}'.", - access_logs, - *self.args, - ) + self.assert_se(f"No logged messages for contact '{self.window.name}'.", access_logs, *self.args) - @mock.patch("struct.pack", return_value=TIMESTAMP_BYTES) - def test_display_short_private_message(self, _) -> None: + @mock.patch('struct.pack', return_value=TIMESTAMP_BYTES) + def test_display_short_private_message(self, _: Any) -> None: # Setup # Add a message from user (Bob) to different contact (Charlie). access_logs should not display this message. - for p in assembly_packet_creator(MESSAGE, "Hi Charlie"): - write_log_entry(p, nick_to_pub_key("Charlie"), self.tfc_log_database) + for p in assembly_packet_creator(MESSAGE, 'Hi Charlie'): + write_log_entry(p, nick_to_pub_key('Charlie'), self.tfc_log_database) # Add a message from contact Alice to user (Bob). - for p in assembly_packet_creator(MESSAGE, "Hi Bob"): - write_log_entry( - p, - nick_to_pub_key("Alice"), - self.tfc_log_database, - origin=ORIGIN_CONTACT_HEADER, - ) + for p in assembly_packet_creator(MESSAGE, 'Hi Bob'): + write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database, origin=ORIGIN_CONTACT_HEADER) # Add a message from user (Bob) to Alice. - for p in assembly_packet_creator(MESSAGE, "Hi Alice"): - write_log_entry(p, nick_to_pub_key("Alice"), self.tfc_log_database) + for p in assembly_packet_creator(MESSAGE, 'Hi Alice'): + write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) # Test - self.assert_prints( - ( - CLEAR_ENTIRE_SCREEN - + CURSOR_LEFT_UP_CORNER - + f"""\ + self.assert_prints((CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + f"""\ Log file of message(s) sent to contact Alice ════════════════════════════════════════════════════════════════════════════════ {self.time} Alice: Hi Bob {self.time} Me: Hi Alice -""" - ), - access_logs, - *self.args, - ) +"""), access_logs, *self.args) - @mock.patch("struct.pack", return_value=TIMESTAMP_BYTES) - def test_export_short_private_message(self, _) -> None: + @mock.patch('struct.pack', return_value=TIMESTAMP_BYTES) + def test_export_short_private_message(self, _: Any) -> None: # Setup # Test title displayed by the Receiver program. self.settings.software_operation = RX - self.log_file = f"{DIR_USER_DATA}{self.settings.software_operation}_logs" + self.log_file = f'{DIR_USER_DATA}{self.settings.software_operation}_logs' self.tfc_log_database = MessageLog(self.log_file, self.master_key.master_key) # Add a message from contact Alice to user (Bob). - for p in assembly_packet_creator(MESSAGE, "Hi Bob"): - write_log_entry( - p, - nick_to_pub_key("Alice"), - self.tfc_log_database, - origin=ORIGIN_CONTACT_HEADER, - ) + for p in assembly_packet_creator(MESSAGE, 'Hi Bob'): + write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database, origin=ORIGIN_CONTACT_HEADER) # Add a message from user (Bob) to Alice. - for p in assembly_packet_creator(MESSAGE, "Hi Alice"): - write_log_entry(p, nick_to_pub_key("Alice"), self.tfc_log_database) + for p in assembly_packet_creator(MESSAGE, 'Hi Alice'): + write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) # Test self.assertIsNone(access_logs(*self.args, export=True)) with open("Receiver - Plaintext log (Alice)") as f: - self.assertEqual( - f.read(), - f"""\ + self.assertEqual(f.read(), f"""\ Log file of message(s) to/from contact Alice ════════════════════════════════════════════════════════════════════════════════ {self.time} Alice: Hi Bob {self.time} Me: Hi Alice -""", - ) +""") - @mock.patch("struct.pack", return_value=TIMESTAMP_BYTES) - def test_long_private_message(self, _) -> None: + @mock.patch('struct.pack', return_value=TIMESTAMP_BYTES) + def test_long_private_message(self, _: Any) -> None: # Setup # Add an assembly packet sequence sent to contact Alice containing cancel packet. access_logs should skip this. packets = assembly_packet_creator(MESSAGE, self.msg) packets = packets[2:] + [M_C_HEADER + bytes(PADDING_LENGTH)] for p in packets: - write_log_entry(p, nick_to_pub_key("Alice"), self.tfc_log_database) + write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) # Add an orphaned 'append' assembly packet the function should skip. - write_log_entry( - M_A_HEADER + bytes(PADDING_LENGTH), - nick_to_pub_key("Alice"), - self.tfc_log_database, - ) + write_log_entry(M_A_HEADER + bytes(PADDING_LENGTH), nick_to_pub_key('Alice'), self.tfc_log_database) # Add a group message for a different group the function should skip. - for p in assembly_packet_creator( - MESSAGE, "This is a short message", group_id=GROUP_ID_LENGTH * b"1" - ): - write_log_entry(p, nick_to_pub_key("Alice"), self.tfc_log_database) + for p in assembly_packet_creator(MESSAGE, 'This is a short message', group_id=GROUP_ID_LENGTH * b'1'): + write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) # Add a message from contact Alice to user (Bob). for p in assembly_packet_creator(MESSAGE, self.msg): - write_log_entry( - p, - nick_to_pub_key("Alice"), - self.tfc_log_database, - origin=ORIGIN_CONTACT_HEADER, - ) + write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database, origin=ORIGIN_CONTACT_HEADER) # Add a message from user (Bob) to Alice. for p in assembly_packet_creator(MESSAGE, self.msg): - write_log_entry(p, nick_to_pub_key("Alice"), self.tfc_log_database) + write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) # Test - self.assert_prints( - ( - CLEAR_ENTIRE_SCREEN - + CURSOR_LEFT_UP_CORNER - + f"""\ + self.assert_prints((CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + f"""\ Log file of message(s) sent to contact Alice ════════════════════════════════════════════════════════════════════════════════ {self.time} Alice: Lorem ipsum dolor sit amet, consectetur adipiscing elit. @@ -590,49 +399,27 @@ Log file of message(s) sent to contact Alice ac, venenatis arcu. -""" - ), - access_logs, - *self.args, - ) +"""), access_logs, *self.args) - @mock.patch("struct.pack", return_value=TIMESTAMP_BYTES) - def test_short_group_message(self, _) -> None: + @mock.patch('struct.pack', return_value=TIMESTAMP_BYTES) + def test_short_group_message(self, _: Any) -> None: # Setup - self.window = RxWindow( - type=WIN_TYPE_GROUP, - uid=group_name_to_group_id("test_group"), - name="test_group", - group=self.group, - type_print="group", - group_list=self.group_list, - ) + self.window = RxWindow(type=WIN_TYPE_GROUP, + uid=group_name_to_group_id('test_group'), + name='test_group', + group=self.group, + type_print='group', + group_list=self.group_list) # Add messages to Alice and Charlie. Add duplicate of outgoing message that should be skipped by access_logs. - for p in assembly_packet_creator( - MESSAGE, "This is a short message", group_id=self.window.uid - ): - write_log_entry(p, nick_to_pub_key("Alice"), self.tfc_log_database) - write_log_entry( - p, - nick_to_pub_key("Alice"), - self.tfc_log_database, - origin=ORIGIN_CONTACT_HEADER, - ) - write_log_entry(p, nick_to_pub_key("Charlie"), self.tfc_log_database) - write_log_entry( - p, - nick_to_pub_key("Charlie"), - self.tfc_log_database, - origin=ORIGIN_CONTACT_HEADER, - ) + for p in assembly_packet_creator(MESSAGE, 'This is a short message', group_id=self.window.uid): + write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) + write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database, origin=ORIGIN_CONTACT_HEADER) + write_log_entry(p, nick_to_pub_key('Charlie'), self.tfc_log_database) + write_log_entry(p, nick_to_pub_key('Charlie'), self.tfc_log_database, origin=ORIGIN_CONTACT_HEADER) # Test - self.assert_prints( - ( - CLEAR_ENTIRE_SCREEN - + CURSOR_LEFT_UP_CORNER - + f"""\ + self.assert_prints((CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + f"""\ Log file of message(s) sent to group test_group ════════════════════════════════════════════════════════════════════════════════ {self.time} Me: This is a short message @@ -640,84 +427,50 @@ Log file of message(s) sent to group test_group {self.time} Charlie: This is a short message -""" - ), - access_logs, - self.window, - self.contact_list, - self.group_list, - self.settings, - self.master_key, - ) +"""), access_logs, self.window, self.contact_list, self.group_list, self.settings, self.master_key) - @mock.patch("struct.pack", return_value=TIMESTAMP_BYTES) - def test_long_group_message(self, _) -> None: + @mock.patch('struct.pack', return_value=TIMESTAMP_BYTES) + def test_long_group_message(self, _: Any) -> None: # Setup # Test title displayed by the Receiver program. self.settings.software_operation = RX - self.log_file = f"{DIR_USER_DATA}{self.settings.software_operation}_logs" - self.tfc_log_database = MessageLog(self.log_file, self.master_key.master_key) + self.log_file = f'{DIR_USER_DATA}{self.settings.software_operation}_logs' + self.tfc_log_database = MessageLog(self.log_file, self.master_key.master_key) - self.window = RxWindow( - type=WIN_TYPE_GROUP, - uid=group_name_to_group_id("test_group"), - name="test_group", - group=self.group, - type_print="group", - ) + self.window = RxWindow(type=WIN_TYPE_GROUP, + uid=group_name_to_group_id('test_group'), + name='test_group', + group=self.group, + type_print='group') # Add an assembly packet sequence sent to contact Alice in group containing cancel packet. # Access_logs should skip this. - packets = assembly_packet_creator( - MESSAGE, self.msg, group_id=group_name_to_group_id("test_group") - ) + packets = assembly_packet_creator(MESSAGE, self.msg, group_id=group_name_to_group_id('test_group')) packets = packets[2:] + [M_C_HEADER + bytes(PADDING_LENGTH)] for p in packets: - write_log_entry(p, nick_to_pub_key("Alice"), self.tfc_log_database) + write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) # Add an orphaned 'append' assembly packet. access_logs should skip this. - write_log_entry( - M_A_HEADER + bytes(PADDING_LENGTH), - nick_to_pub_key("Alice"), - self.tfc_log_database, - ) + write_log_entry(M_A_HEADER + bytes(PADDING_LENGTH), nick_to_pub_key('Alice'), self.tfc_log_database) # Add a private message. access_logs should skip this. - for p in assembly_packet_creator(MESSAGE, "This is a short private message"): - write_log_entry(p, nick_to_pub_key("Alice"), self.tfc_log_database) + for p in assembly_packet_creator(MESSAGE, 'This is a short private message'): + write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) # Add a group message for a different group. access_logs should skip this. - for p in assembly_packet_creator( - MESSAGE, "This is a short group message", group_id=GROUP_ID_LENGTH * b"1" - ): - write_log_entry(p, nick_to_pub_key("Alice"), self.tfc_log_database) + for p in assembly_packet_creator(MESSAGE, 'This is a short group message', group_id=GROUP_ID_LENGTH * b'1'): + write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) # Add messages to Alice and Charlie in group. # Add duplicate of outgoing message that should be skipped by access_logs. - for p in assembly_packet_creator( - MESSAGE, self.msg, group_id=group_name_to_group_id("test_group") - ): - write_log_entry(p, nick_to_pub_key("Alice"), self.tfc_log_database) - write_log_entry( - p, - nick_to_pub_key("Alice"), - self.tfc_log_database, - origin=ORIGIN_CONTACT_HEADER, - ) - write_log_entry(p, nick_to_pub_key("Charlie"), self.tfc_log_database) - write_log_entry( - p, - nick_to_pub_key("Charlie"), - self.tfc_log_database, - origin=ORIGIN_CONTACT_HEADER, - ) + for p in assembly_packet_creator(MESSAGE, self.msg, group_id=group_name_to_group_id('test_group')): + write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) + write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database, origin=ORIGIN_CONTACT_HEADER) + write_log_entry(p, nick_to_pub_key('Charlie'), self.tfc_log_database) + write_log_entry(p, nick_to_pub_key('Charlie'), self.tfc_log_database, origin=ORIGIN_CONTACT_HEADER) # Test - self.assert_prints( - ( - CLEAR_ENTIRE_SCREEN - + CURSOR_LEFT_UP_CORNER - + f"""\ + self.assert_prints((CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + f"""\ Log file of message(s) to/from group test_group ════════════════════════════════════════════════════════════════════════════════ {self.time} Me: Lorem ipsum dolor sit amet, consectetur adipiscing elit. @@ -773,288 +526,202 @@ Log file of message(s) to/from group test_group tortor placerat, aliquam dolor ac, venenatis arcu. -""" - ), - access_logs, - self.window, - self.contact_list, - self.group_list, - self.settings, - self.master_key, - ) +"""), access_logs, self.window, self.contact_list, self.group_list, self.settings, self.master_key) class TestReEncrypt(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.unit_test_dir = cd_unit_test() + self.unit_test_dir = cd_unit_test() self.old_master_key = MasterKey() self.new_master_key = MasterKey(master_key=os.urandom(SYMMETRIC_KEY_LENGTH)) - self.settings = Settings() - self.tmp_file_name = ( - f"{DIR_USER_DATA}{self.settings.software_operation}_logs_temp" - ) - self.time = STATIC_TIMESTAMP - self.log_file = f"{DIR_USER_DATA}{self.settings.software_operation}_logs" - self.message_log = MessageLog(self.log_file, self.old_master_key.master_key) + self.settings = Settings() + self.tmp_file_name = f"{DIR_USER_DATA}{self.settings.software_operation}_logs_temp" + self.time = STATIC_TIMESTAMP + self.log_file = f'{DIR_USER_DATA}{self.settings.software_operation}_logs' + self.message_log = MessageLog(self.log_file, self.old_master_key.master_key) def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) - def test_missing_log_database_raises_fr(self) -> None: + def test_missing_log_database_raises_se(self) -> None: # Setup os.remove(self.log_file) # Test - self.assert_se( - f"No log database available.", - change_log_db_key, - self.old_master_key.master_key, - self.new_master_key.master_key, - self.settings, - ) + self.assert_se(f"No log database available.", + change_log_db_key, self.old_master_key.master_key, self.new_master_key.master_key, self.settings) - @mock.patch("struct.pack", return_value=TIMESTAMP_BYTES) - def test_database_encryption_with_another_key(self, _) -> None: + @mock.patch('struct.pack', return_value=TIMESTAMP_BYTES) + def test_database_encryption_with_another_key(self, _: Any) -> None: # Setup - window = RxWindow( - type=WIN_TYPE_CONTACT, - uid=nick_to_pub_key("Alice"), - name="Alice", - type_print="contact", - ) - contact_list = ContactList(self.old_master_key, self.settings) - contact_list.contacts = [create_contact("Alice")] - group_list = GroupList() + window = RxWindow(type=WIN_TYPE_CONTACT, + uid=nick_to_pub_key('Alice'), + name='Alice', + type_print='contact') + contact_list = ContactList(self.old_master_key, self.settings) + contact_list.contacts = [create_contact('Alice')] + group_list = GroupList() # Create temp file that must be removed. temp_file_data = os.urandom(LOG_ENTRY_LENGTH) - with open(self.tmp_file_name, "wb+") as f: + with open(self.tmp_file_name, 'wb+') as f: f.write(temp_file_data) # Add a message from contact Alice to user (Bob). - for p in assembly_packet_creator(MESSAGE, "This is a short message"): - write_log_entry( - p, - nick_to_pub_key("Alice"), - self.message_log, - origin=ORIGIN_CONTACT_HEADER, - ) + for p in assembly_packet_creator(MESSAGE, 'This is a short message'): + write_log_entry(p, nick_to_pub_key('Alice'), self.message_log, origin=ORIGIN_CONTACT_HEADER) # Add a message from user (Bob) to Alice. - for p in assembly_packet_creator(MESSAGE, "This is a short message"): - write_log_entry(p, nick_to_pub_key("Alice"), self.message_log) + for p in assembly_packet_creator(MESSAGE, 'This is a short message'): + write_log_entry(p, nick_to_pub_key('Alice'), self.message_log) # Check logfile content. - message = ( - CLEAR_ENTIRE_SCREEN - + CURSOR_LEFT_UP_CORNER - + f"""\ + message = (CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + f"""\ Log file of message(s) sent to contact Alice ════════════════════════════════════════════════════════════════════════════════ {self.time} Alice: This is a short message {self.time} Me: This is a short message -""" - ) +""") self.assertIsNone( - change_log_db_key( - self.old_master_key.master_key, - self.new_master_key.master_key, - self.settings, - ) - ) + change_log_db_key(self.old_master_key.master_key, self.new_master_key.master_key, self.settings)) - with open(self.tmp_file_name, "rb") as f: + with open(self.tmp_file_name, 'rb') as f: purp_temp_data = f.read() self.assertNotEqual(purp_temp_data, temp_file_data) # Test that decryption with new key is identical. replace_log_db(self.settings) - self.assert_prints( - message, - access_logs, - window, - contact_list, - group_list, - self.settings, - self.new_master_key, - ) + self.assert_prints(message, access_logs, window, contact_list, group_list, self.settings, self.new_master_key) # Test that temp file is removed. self.assertFalse(os.path.isfile(self.tmp_file_name)) class TestRemoveLog(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.unit_test_dir = cd_unit_test() - self.master_key = MasterKey() - self.settings = Settings() - self.time = STATIC_TIMESTAMP - self.contact_list = ContactList(self.master_key, self.settings) - self.group_list = GroupList(groups=["test_group"]) - self.file_name = f"{DIR_USER_DATA}{self.settings.software_operation}_logs" - self.tmp_file_name = self.file_name + "_temp" + self.unit_test_dir = cd_unit_test() + self.master_key = MasterKey() + self.settings = Settings() + self.time = STATIC_TIMESTAMP + self.contact_list = ContactList(self.master_key, self.settings) + self.group_list = GroupList(groups=['test_group']) + self.file_name = f'{DIR_USER_DATA}{self.settings.software_operation}_logs' + self.tmp_file_name = self.file_name + "_temp" self.tfc_log_database = MessageLog(self.file_name, self.master_key.master_key) - self.args = self.contact_list, self.group_list, self.settings, self.master_key + self.args = self.contact_list, self.group_list, self.settings, self.master_key - self.msg = ( - "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis" - " dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu" - "m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma" - "ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el" - "it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic" - "tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r" - "utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali" - "quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris" - "que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti" - "s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu." - ) + self.msg = ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis" + " dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu" + "m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma" + "ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el" + "it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic" + "tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r" + "utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali" + "quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris" + "que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti" + "s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu.") def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) - def test_missing_log_file_raises_fr(self) -> None: + def test_missing_log_file_raises_se(self) -> None: # Setup os.remove(self.file_name) # Test - self.assert_se( - "No log database available.", - remove_logs, - *self.args, - nick_to_pub_key("Alice"), - ) + self.assert_se("No log database available.", remove_logs, *self.args, nick_to_pub_key('Alice')) def test_removal_of_group_logs(self) -> None: # Setup short_msg = "Lorem ipsum dolor sit amet, consectetur adipiscing elit." # Add long message from user (Bob) to Alice and Charlie. These should be removed. - for p in assembly_packet_creator( - MESSAGE, self.msg, group_id=group_name_to_group_id("test_group") - ): - write_log_entry(p, nick_to_pub_key("Alice"), self.tfc_log_database) - write_log_entry(p, nick_to_pub_key("Charlie"), self.tfc_log_database) + for p in assembly_packet_creator(MESSAGE, self.msg, group_id=group_name_to_group_id('test_group')): + write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) + write_log_entry(p, nick_to_pub_key('Charlie'), self.tfc_log_database) # Add short message from user (Bob) to Alice and Charlie. These should be removed. - for p in assembly_packet_creator( - MESSAGE, short_msg, group_id=group_name_to_group_id("test_group") - ): - write_log_entry(p, nick_to_pub_key("Alice"), self.tfc_log_database) - write_log_entry(p, nick_to_pub_key("Charlie"), self.tfc_log_database) + for p in assembly_packet_creator(MESSAGE, short_msg, group_id=group_name_to_group_id('test_group')): + write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) + write_log_entry(p, nick_to_pub_key('Charlie'), self.tfc_log_database) # Add short message from user (Bob) to David. This should be kept. for p in assembly_packet_creator(MESSAGE, short_msg): - write_log_entry(p, nick_to_pub_key("David"), self.tfc_log_database) + write_log_entry(p, nick_to_pub_key('David'), self.tfc_log_database) # Add long message from user (Bob) to David. These should be kept. for p in assembly_packet_creator(MESSAGE, self.msg): - write_log_entry(p, nick_to_pub_key("David"), self.tfc_log_database) + write_log_entry(p, nick_to_pub_key('David'), self.tfc_log_database) # Add short message from user (Bob) to David in a group. This should be kept as group is different. - for p in assembly_packet_creator( - MESSAGE, short_msg, group_id=group_name_to_group_id("different_group") - ): - write_log_entry(p, nick_to_pub_key("David"), self.tfc_log_database) + for p in assembly_packet_creator(MESSAGE, short_msg, group_id=group_name_to_group_id('different_group')): + write_log_entry(p, nick_to_pub_key('David'), self.tfc_log_database) # Add an orphaned 'append' assembly packet. This should be removed as it's corrupted. - write_log_entry( - M_A_HEADER + bytes(PADDING_LENGTH), - nick_to_pub_key("Alice"), - self.tfc_log_database, - ) + write_log_entry(M_A_HEADER + bytes(PADDING_LENGTH), nick_to_pub_key('Alice'), self.tfc_log_database) # Add long message to group member David, canceled half-way. This should be removed as unviewable. - packets = assembly_packet_creator( - MESSAGE, self.msg, group_id=group_name_to_group_id("test_group") - ) + packets = assembly_packet_creator(MESSAGE, self.msg, group_id=group_name_to_group_id('test_group')) packets = packets[2:] + [M_C_HEADER + bytes(PADDING_LENGTH)] for p in packets: - write_log_entry(p, nick_to_pub_key("David"), self.tfc_log_database) + write_log_entry(p, nick_to_pub_key('David'), self.tfc_log_database) # Add long message to group member David, remove_logs should keep these as group is different. - for p in assembly_packet_creator( - MESSAGE, self.msg, group_id=group_name_to_group_id("different_group") - ): - write_log_entry(p, nick_to_pub_key("David"), self.tfc_log_database) + for p in assembly_packet_creator(MESSAGE, self.msg, group_id=group_name_to_group_id('different_group')): + write_log_entry(p, nick_to_pub_key('David'), self.tfc_log_database) # Test log entries were found. - self.assert_se( - "Removed log entries for group 'test_group'.", - remove_logs, - *self.args, - selector=group_name_to_group_id("test_group"), - ) + self.assert_se("Removed log entries for group 'test_group'.", + remove_logs, *self.args, selector=group_name_to_group_id('test_group')) # Test log entries were not found when removing group again. - self.assert_se( - "Found no log entries for group 'test_group'.", - remove_logs, - *self.args, - selector=group_name_to_group_id("test_group"), - ) + self.assert_se("Found no log entries for group 'test_group'.", + remove_logs, *self.args, selector=group_name_to_group_id('test_group')) def test_removal_of_contact_logs(self) -> None: # Setup short_msg = "Lorem ipsum dolor sit amet, consectetur adipiscing elit." # Create temp file that must be removed. - with open(self.tmp_file_name, "wb+") as f: + with open(self.tmp_file_name, 'wb+') as f: f.write(os.urandom(LOG_ENTRY_LENGTH)) # Add a long message sent to both Alice and Bob. for p in assembly_packet_creator(MESSAGE, self.msg): - write_log_entry(p, nick_to_pub_key("Alice"), self.tfc_log_database) - write_log_entry(p, nick_to_pub_key("Charlie"), self.tfc_log_database) + write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) + write_log_entry(p, nick_to_pub_key('Charlie'), self.tfc_log_database) # Add a short message sent to both Alice and Bob. for p in assembly_packet_creator(MESSAGE, short_msg): - write_log_entry(p, nick_to_pub_key("Alice"), self.tfc_log_database) - write_log_entry(p, nick_to_pub_key("Charlie"), self.tfc_log_database) + write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) + write_log_entry(p, nick_to_pub_key('Charlie'), self.tfc_log_database) # Test - self.assert_se( - f"Removed log entries for contact '{nick_to_short_address('Alice')}'.", - remove_logs, - *self.args, - selector=nick_to_pub_key("Alice"), - ) + self.assert_se(f"Removed log entries for contact '{nick_to_short_address('Alice')}'.", + remove_logs, *self.args, selector=nick_to_pub_key('Alice')) - self.assert_se( - f"Removed log entries for contact '{nick_to_short_address('Charlie')}'.", - remove_logs, - *self.args, - selector=nick_to_pub_key("Charlie"), - ) + self.assert_se(f"Removed log entries for contact '{nick_to_short_address('Charlie')}'.", + remove_logs, *self.args, selector=nick_to_pub_key('Charlie')) - self.assert_se( - f"Found no log entries for contact '{nick_to_short_address('Alice')}'.", - remove_logs, - *self.args, - selector=nick_to_pub_key("Alice"), - ) + self.assert_se(f"Found no log entries for contact '{nick_to_short_address('Alice')}'.", + remove_logs, *self.args, selector=nick_to_pub_key('Alice')) - self.contact_list.contacts = [create_contact("Alice")] + self.contact_list.contacts = [create_contact('Alice')] - self.assert_se( - f"Found no log entries for contact 'Alice'.", - remove_logs, - *self.args, - selector=nick_to_pub_key("Alice"), - ) + self.assert_se(f"Found no log entries for contact 'Alice'.", + remove_logs, *self.args, selector=nick_to_pub_key('Alice')) - self.assert_se( - f"Found no log entries for group '2e8b2Wns7dWjB'.", - remove_logs, - *self.args, - selector=group_name_to_group_id("searched_group"), - ) + self.assert_se(f"Found no log entries for group '2e8b2Wns7dWjB'.", + remove_logs, *self.args, selector=group_name_to_group_id('searched_group')) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/common/test_db_masterkey.py b/tests/common/test_db_masterkey.py index 0efc6c2..81b37a5 100644 --- a/tests/common/test_db_masterkey.py +++ b/tests/common/test_db_masterkey.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -23,41 +23,33 @@ import os import os.path import unittest -from unittest import mock +from unittest import mock from unittest.mock import MagicMock +from typing import Any -from src.common.crypto import blake2b +from src.common.crypto import blake2b from src.common.db_masterkey import MasterKey -from src.common.misc import ensure_dir -from src.common.statics import ( - BLAKE2_DIGEST_LENGTH, - DIR_USER_DATA, - MASTERKEY_DB_SIZE, - PASSWORD_MIN_BIT_STRENGTH, - SYMMETRIC_KEY_LENGTH, - TX, -) +from src.common.misc import ensure_dir +from src.common.statics import (BLAKE2_DIGEST_LENGTH, DIR_USER_DATA, MASTERKEY_DB_SIZE, PASSWORD_MIN_BIT_STRENGTH, + SYMMETRIC_KEY_LENGTH, TX) + +from tests.utils import cd_unit_test, cleanup, TFCTestCase -from tests.utils import cd_unit_test, cleanup KL = SYMMETRIC_KEY_LENGTH -class TestMasterKey(unittest.TestCase): - input_list = [ - "password", - "different_password", # Invalid new password pair - "password", - "password", # Valid new password pair - "invalid_password", # Invalid login password - "password", - ] # Valid login password +class TestMasterKey(TFCTestCase): + input_list = ['password', 'different_password', # Invalid new password pair + 'password', 'password', # Valid new password pair + 'invalid_password', # Invalid login password + 'password'] # Valid login password def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.operation = TX - self.file_name = f"{DIR_USER_DATA}{self.operation}_login_data" + self.operation = TX + self.file_name = f"{DIR_USER_DATA}{self.operation}_login_data" def tearDown(self) -> None: """Post-test actions.""" @@ -66,100 +58,104 @@ class TestMasterKey(unittest.TestCase): def test_password_generation(self) -> None: bit_strength, password = MasterKey.generate_master_password() self.assertIsInstance(bit_strength, int) - self.assertIsInstance(password, str) + self.assertIsInstance(password, str) self.assertGreaterEqual(bit_strength, PASSWORD_MIN_BIT_STRENGTH) - self.assertEqual(len(password.split(" ")), 10) + self.assertEqual(len(password.split(' ')), 10) - @mock.patch("time.sleep", return_value=None) - def test_invalid_data_in_db_raises_critical_error(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_invalid_data_in_db_raises_critical_error(self, _: Any) -> None: for delta in [-1, 1]: # Setup ensure_dir(DIR_USER_DATA) data = os.urandom(MASTERKEY_DB_SIZE + delta) data += blake2b(data) - with open(self.file_name, "wb+") as f: + with open(self.file_name, 'wb+') as f: f.write(data) # Test with self.assertRaises(SystemExit): _ = MasterKey(self.operation, local_test=False) - @mock.patch("time.sleep", return_value=None) - def test_load_master_key_with_invalid_data_raises_critical_error(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_load_master_key_with_invalid_data_raises_critical_error(self, _: Any) -> None: # Setup ensure_dir(DIR_USER_DATA) data = os.urandom(MASTERKEY_DB_SIZE + BLAKE2_DIGEST_LENGTH) - with open(self.file_name, "wb+") as f: + with open(self.file_name, 'wb+') as f: f.write(data) # Test with self.assertRaises(SystemExit): _ = MasterKey(self.operation, local_test=False) - @mock.patch("src.common.db_masterkey.MIN_KEY_DERIVATION_TIME", 0.01) - @mock.patch("src.common.db_masterkey.MAX_KEY_DERIVATION_TIME", 0.1) - @mock.patch( - "os.popen", - return_value=MagicMock( - read=MagicMock( - return_value=MagicMock( - splitlines=MagicMock(return_value=["MemAvailable 10240"]) - ) - ) - ), - ) - @mock.patch("os.path.isfile", side_effect=[KeyboardInterrupt, False, True, False]) - @mock.patch("getpass.getpass", side_effect=input_list) - @mock.patch("time.sleep", return_value=None) - def test_master_key_generation_and_load(self, *_) -> None: + @mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 0.01) + @mock.patch('src.common.db_masterkey.MAX_KEY_DERIVATION_TIME', 0.1) + @mock.patch('os.popen', return_value=MagicMock( + read=MagicMock(return_value=MagicMock(splitlines=MagicMock(return_value=["MemAvailable 10240"]))))) + @mock.patch('os.path.isfile', side_effect=[KeyboardInterrupt, False, True, False]) + @mock.patch('getpass.getpass', side_effect=input_list) + @mock.patch('time.sleep', return_value=None) + def test_master_key_generation_and_load(self, *_: Any) -> None: with self.assertRaises(SystemExit): MasterKey(self.operation, local_test=True) master_key = MasterKey(self.operation, local_test=True) self.assertIsInstance(master_key.master_key, bytes) - self.assertEqual( - os.path.getsize(self.file_name), MASTERKEY_DB_SIZE + BLAKE2_DIGEST_LENGTH - ) + self.assertEqual(os.path.getsize(self.file_name), MASTERKEY_DB_SIZE + BLAKE2_DIGEST_LENGTH) master_key2 = MasterKey(self.operation, local_test=True) self.assertIsInstance(master_key2.master_key, bytes) self.assertEqual(master_key.master_key, master_key2.master_key) - @mock.patch("src.common.db_masterkey.MIN_KEY_DERIVATION_TIME", 0.01) - @mock.patch("src.common.db_masterkey.MAX_KEY_DERIVATION_TIME", 0.1) - @mock.patch("src.common.misc.reset_terminal", return_value=None) - @mock.patch( - "os.popen", - return_value=MagicMock( - read=MagicMock( - return_value=MagicMock( - splitlines=MagicMock(return_value=["MemAvailable 10240"]) - ) - ) - ), - ) - @mock.patch("getpass.getpass", side_effect=["generate"]) - @mock.patch("builtins.input", side_effect=[""]) - @mock.patch("time.sleep", return_value=None) - def test_new_masterkey_key_type(self, *_) -> None: + @mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 0.01) + @mock.patch('src.common.db_masterkey.MAX_KEY_DERIVATION_TIME', 0.1) + @mock.patch('os.popen', return_value=MagicMock( + read=MagicMock(return_value=MagicMock(splitlines=MagicMock(return_value=["MemAvailable 10240"]))))) + @mock.patch('os.path.isfile', side_effect=[False, True, False]) + @mock.patch('getpass.getpass', side_effect=4*['password']) + @mock.patch('time.sleep', return_value=None) + def test_database_data_caching_and_storage_on_command(self, *_: Any): + master_key = MasterKey(self.operation, local_test=True) + master_key.new_master_key(replace=False) + self.assertEqual(len(master_key.database_data), MASTERKEY_DB_SIZE) + master_key.replace_database_data() + self.assertIsNone(master_key.database_data) + self.assertTrue(os.path.isfile(self.file_name)) + + @mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 0.01) + @mock.patch('src.common.db_masterkey.MAX_KEY_DERIVATION_TIME', 0.1) + @mock.patch('os.popen', return_value=MagicMock( + read=MagicMock(return_value=MagicMock(splitlines=MagicMock(return_value=["MemAvailable 10240"]))))) + @mock.patch('getpass.getpass', side_effect=['generate']) + @mock.patch('builtins.input', side_effect=['']) + @mock.patch('os.system', return_value=None) + @mock.patch('time.sleep', return_value=None) + def test_password_generation(self, *_: Any) -> None: master_key = MasterKey(self.operation, local_test=True) self.assertIsInstance(master_key.master_key, bytes) - @mock.patch( - "src.common.db_masterkey.MasterKey.timed_key_derivation", - MagicMock( - side_effect=[(KL * b"a", 0.01)] - + 100 * [(KL * b"b", 5.0)] - + 2 * [(KL * b"a", 2.5)] - + [(KL * b"a", 3.0)] - ), - ) - @mock.patch("os.path.isfile", side_effect=[False, True]) - @mock.patch("getpass.getpass", side_effect=input_list) - @mock.patch("time.sleep", return_value=None) - def test_kd_binary_search(self, *_) -> None: + @mock.patch('src.common.db_masterkey.MasterKey.timed_key_derivation', + MagicMock(side_effect= [(KL*b'a', 0.01)] + + 100 * [(KL*b'b', 5.0)] + + 2 * [(KL*b'a', 2.5)] + + [(KL*b'a', 3.0)])) + @mock.patch('os.path.isfile', side_effect=[False, True]) + @mock.patch('getpass.getpass', side_effect=input_list) + @mock.patch('time.sleep', return_value=None) + def test_kd_binary_search(self, *_: Any) -> None: MasterKey(self.operation, local_test=True) + @mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 0.01) + @mock.patch('src.common.db_masterkey.MAX_KEY_DERIVATION_TIME', 0.1) + @mock.patch('os.popen', return_value=MagicMock( + read=MagicMock(return_value=MagicMock(splitlines=MagicMock(return_value=["MemAvailable 10240"]))))) + @mock.patch('getpass.getpass', side_effect=['password', 'password', KeyboardInterrupt, 'password', 'invalid_pwd']) + @mock.patch('time.sleep', return_value=None) + def test_authenticate_action(self, *_: Any) -> None: + master_key = MasterKey(self.operation, local_test=True) + self.assert_se("Authentication aborted.", master_key.authenticate_action) + self.assertTrue(master_key.authenticate_action()) -if __name__ == "__main__": + +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/common/test_db_onion.py b/tests/common/test_db_onion.py index c7fdadf..8da8ce7 100644 --- a/tests/common/test_db_onion.py +++ b/tests/common/test_db_onion.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -23,77 +23,66 @@ import os import unittest from unittest import mock +from typing import Any -from src.common.crypto import encrypt_and_sign +from src.common.crypto import encrypt_and_sign from src.common.db_onion import OnionService -from src.common.misc import ensure_dir, validate_onion_addr -from src.common.statics import ( - DIR_USER_DATA, - ONION_SERVICE_PRIVATE_KEY_LENGTH, - POLY1305_TAG_LENGTH, - TX, - XCHACHA20_NONCE_LENGTH, -) +from src.common.misc import ensure_dir, validate_onion_addr +from src.common.statics import (DIR_USER_DATA, ONION_SERVICE_PRIVATE_KEY_LENGTH, + POLY1305_TAG_LENGTH, TX, XCHACHA20_NONCE_LENGTH) from tests.mock_classes import MasterKey -from tests.utils import cd_unit_test, cleanup, tamper_file +from tests.utils import cd_unit_test, cleanup, tamper_file class TestOnionService(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.master_key = MasterKey() - self.file_name = f"{DIR_USER_DATA}{TX}_onion_db" + self.master_key = MasterKey() + self.file_name = f"{DIR_USER_DATA}{TX}_onion_db" def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) - @mock.patch("time.sleep", return_value=None) - def test_onion_service_key_generation_and_load(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_onion_service_key_generation_and_load(self, _: Any) -> None: onion_service = OnionService(self.master_key) # Test new OnionService has valid attributes - self.assertIsInstance(onion_service.master_key, MasterKey) - self.assertIsInstance(onion_service.onion_private_key, bytes) + self.assertIsInstance(onion_service.master_key, MasterKey) + self.assertIsInstance(onion_service.onion_private_key, bytes) self.assertIsInstance(onion_service.user_onion_address, str) self.assertFalse(onion_service.is_delivered) - self.assertEqual(validate_onion_addr(onion_service.user_onion_address), "") + self.assertEqual(validate_onion_addr(onion_service.user_onion_address), '') # Test data is stored to a database self.assertTrue(os.path.isfile(self.file_name)) - self.assertEqual( - os.path.getsize(self.file_name), - XCHACHA20_NONCE_LENGTH - + ONION_SERVICE_PRIVATE_KEY_LENGTH - + POLY1305_TAG_LENGTH, - ) + self.assertEqual(os.path.getsize(self.file_name), + XCHACHA20_NONCE_LENGTH + ONION_SERVICE_PRIVATE_KEY_LENGTH + POLY1305_TAG_LENGTH) # Test data can be loaded from the database onion_service2 = OnionService(self.master_key) self.assertIsInstance(onion_service2.onion_private_key, bytes) - self.assertEqual( - onion_service.onion_private_key, onion_service2.onion_private_key - ) + self.assertEqual(onion_service.onion_private_key, onion_service2.onion_private_key) - @mock.patch("time.sleep", return_value=None) - def test_loading_invalid_onion_key_raises_critical_error(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_loading_invalid_onion_key_raises_critical_error(self, _: Any) -> None: # Setup - ct_bytes = encrypt_and_sign( - (ONION_SERVICE_PRIVATE_KEY_LENGTH + 1) * b"a", self.master_key.master_key - ) + ct_bytes = encrypt_and_sign((ONION_SERVICE_PRIVATE_KEY_LENGTH + 1) * b'a', self.master_key.master_key) ensure_dir(DIR_USER_DATA) - with open(f"{DIR_USER_DATA}{TX}_onion_db", "wb+") as f: + with open(f'{DIR_USER_DATA}{TX}_onion_db', 'wb+') as f: f.write(ct_bytes) # Test with self.assertRaises(SystemExit): OnionService(self.master_key) - @mock.patch("time.sleep", return_value=None) - def test_load_of_modified_database_raises_critical_error(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_load_of_modified_database_raises_critical_error(self, _: Any) -> None: # Write data to file OnionService(self.master_key) @@ -105,22 +94,17 @@ class TestOnionService(unittest.TestCase): with self.assertRaises(SystemExit): OnionService(self.master_key) - @mock.patch( - "os.getrandom", - side_effect=[ - 1 * b"a", # Initial confirmation code - 32 * b"a", # ed25519 key - 24 * b"a", # Nonce - 1 * b"b", - ], - ) # New confirmation code (different) - @mock.patch("time.sleep", return_value=None) - def test_confirmation_code_generation(self, *_) -> None: + @mock.patch('os.getrandom', side_effect=[ 1 * b'a', # Initial confirmation code + 32 * b'a', # ed25519 key + 24 * b'a', # Nonce + 1 * b'b']) # New confirmation code (different) + @mock.patch('time.sleep', return_value=None) + def test_confirmation_code_generation(self, *_: Any) -> None: onion_service = OnionService(self.master_key) - conf_code = onion_service.conf_code + conf_code = onion_service.conf_code onion_service.new_confirmation_code() self.assertNotEqual(conf_code, onion_service.conf_code) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/common/test_db_settings.py b/tests/common/test_db_settings.py index b6747b0..a0726d5 100644 --- a/tests/common/test_db_settings.py +++ b/tests/common/test_db_settings.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -23,47 +23,40 @@ import os.path import unittest from unittest import mock +from typing import Any from src.common.db_settings import Settings -from src.common.statics import ( - CLEAR_ENTIRE_SCREEN, - CURSOR_LEFT_UP_CORNER, - DIR_USER_DATA, - RX, - SETTING_LENGTH, - TX, -) +from src.common.statics import CLEAR_ENTIRE_SCREEN, CURSOR_LEFT_UP_CORNER, DIR_USER_DATA, RX, SETTING_LENGTH, TX from tests.mock_classes import ContactList, create_group, GroupList, MasterKey -from tests.utils import cd_unit_test, cleanup, tamper_file, TFCTestCase +from tests.utils import cd_unit_test, cleanup, tamper_file, TFCTestCase class TestSettings(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.unit_test_dir = cd_unit_test() - self.file_name = f"{DIR_USER_DATA}{TX}_settings" - self.master_key = MasterKey() - self.settings = Settings(self.master_key, operation=TX, local_test=False) - self.contact_list = ContactList(nicks=[f"contact_{n}" for n in range(18)]) - self.group_list = GroupList(groups=[f"group_{n}" for n in range(18)]) - self.group_list.groups[0] = create_group( - "group_0", [f"contact_{n}" for n in range(18)] - ) - self.args = self.contact_list, self.group_list + self.unit_test_dir = cd_unit_test() + self.file_name = f"{DIR_USER_DATA}{TX}_settings" + self.master_key = MasterKey() + self.settings = Settings(self.master_key, operation=TX, local_test=False) + self.contact_list = ContactList(nicks=[f'contact_{n}' for n in range(18)]) + self.group_list = GroupList(groups=[f'group_{n}' for n in range(18)]) + self.group_list.groups[0] = create_group('group_0', [f'contact_{n}' for n in range(18)]) + self.args = self.contact_list, self.group_list def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) def test_invalid_type_raises_critical_error_on_store(self) -> None: - self.settings.tm_random_delay = b"bytestring" + self.settings.tm_random_delay = b'bytestring' with self.assertRaises(SystemExit): self.settings.store_settings() def test_invalid_type_raises_critical_error_on_load(self) -> None: with self.assertRaises(SystemExit): - self.settings.nc_bypass_messages = b"bytestring" + self.settings.nc_bypass_messages = b'bytestring' self.settings.load_settings() def test_store_and_load_tx_settings(self) -> None: @@ -96,9 +89,7 @@ class TestSettings(TFCTestCase): self.settings.store_settings() # Test reading from database works normally - self.assertIsInstance( - Settings(self.master_key, operation=TX, local_test=False), Settings - ) + self.assertIsInstance(Settings(self.master_key, operation=TX, local_test=False), Settings) # Test loading of the tampered database raises CriticalError tamper_file(self.file_name, tamper_size=1) @@ -106,160 +97,63 @@ class TestSettings(TFCTestCase): Settings(self.master_key, operation=TX, local_test=False) def test_invalid_type_raises_critical_error_when_changing_settings(self) -> None: - self.settings.traffic_masking = b"bytestring" + self.settings.traffic_masking = b'bytestring' with self.assertRaises(SystemExit): - self.assertIsNone( - self.settings.change_setting("traffic_masking", "True", *self.args) - ) + self.assertIsNone(self.settings.change_setting('traffic_masking', 'True', *self.args)) def test_change_settings(self) -> None: - self.assert_se( - "Error: Invalid setting value 'Falsee'.", - self.settings.change_setting, - "disable_gui_dialog", - "Falsee", - *self.args, - ) - self.assert_se( - "Error: Invalid setting value '1.1'.", - self.settings.change_setting, - "max_number_of_group_members", - "1.1", - *self.args, - ) - self.assert_se( - "Error: Invalid setting value '18446744073709551616'.", - self.settings.change_setting, - "max_number_of_contacts", - str(2 ** 64), - *self.args, - ) - self.assert_se( - "Error: Invalid setting value '-1.1'.", - self.settings.change_setting, - "tm_static_delay", - "-1.1", - *self.args, - ) - self.assert_se( - "Error: Invalid setting value 'True'.", - self.settings.change_setting, - "tm_static_delay", - "True", - *self.args, - ) + self.assert_se("Error: Invalid setting value 'Falsee'.", + self.settings.change_setting, 'disable_gui_dialog', 'Falsee', *self.args) + self.assert_se("Error: Invalid setting value '1.1'.", + self.settings.change_setting, 'max_number_of_group_members', '1.1', *self.args) + self.assert_se("Error: Invalid setting value '18446744073709551616'.", + self.settings.change_setting, 'max_number_of_contacts', str(2 ** 64), *self.args) + self.assert_se("Error: Invalid setting value '-1.1'.", + self.settings.change_setting, 'tm_static_delay', '-1.1', *self.args) + self.assert_se("Error: Invalid setting value 'True'.", + self.settings.change_setting, 'tm_static_delay', 'True', *self.args) - self.assertIsNone( - self.settings.change_setting("traffic_masking", "True", *self.args) - ) - self.assertIsNone( - self.settings.change_setting( - "max_number_of_group_members", "100", *self.args - ) - ) + self.assertIsNone(self.settings.change_setting('traffic_masking', 'True', *self.args)) + self.assertIsNone(self.settings.change_setting('max_number_of_group_members', '100', *self.args)) - @mock.patch("builtins.input", side_effect=["No", "Yes"]) - def test_validate_key_value_pair(self, _) -> None: - self.assert_se( - "Error: Database padding settings must be divisible by 10.", - self.settings.validate_key_value_pair, - "max_number_of_group_members", - 0, - *self.args, - ) - self.assert_se( - "Error: Database padding settings must be divisible by 10.", - self.settings.validate_key_value_pair, - "max_number_of_group_members", - 18, - *self.args, - ) - self.assert_se( - "Error: Database padding settings must be divisible by 10.", - self.settings.validate_key_value_pair, - "max_number_of_groups", - 18, - *self.args, - ) - self.assert_se( - "Error: Database padding settings must be divisible by 10.", - self.settings.validate_key_value_pair, - "max_number_of_contacts", - 18, - *self.args, - ) - self.assert_se( - "Error: Can't set the max number of members lower than 20.", - self.settings.validate_key_value_pair, - "max_number_of_group_members", - 10, - *self.args, - ) - self.assert_se( - "Error: Can't set the max number of groups lower than 20.", - self.settings.validate_key_value_pair, - "max_number_of_groups", - 10, - *self.args, - ) - self.assert_se( - "Error: Can't set the max number of contacts lower than 20.", - self.settings.validate_key_value_pair, - "max_number_of_contacts", - 10, - *self.args, - ) - self.assert_se( - "Error: Too small value for message notify duration.", - self.settings.validate_key_value_pair, - "new_message_notify_duration", - 0.04, - *self.args, - ) - self.assert_se( - "Error: Can't set static delay lower than 0.1.", - self.settings.validate_key_value_pair, - "tm_static_delay", - 0.01, - *self.args, - ) - self.assert_se( - "Error: Can't set random delay lower than 0.1.", - self.settings.validate_key_value_pair, - "tm_random_delay", - 0.01, - *self.args, - ) - self.assert_se( - "Aborted traffic masking setting change.", - self.settings.validate_key_value_pair, - "tm_random_delay", - 0.1, - *self.args, - ) + @mock.patch('builtins.input', side_effect=['No', 'Yes']) + def test_validate_key_value_pair(self, _: Any) -> None: + self.assert_se("Error: Database padding settings must be divisible by 10.", + self.settings.validate_key_value_pair, 'max_number_of_group_members', 0, *self.args) + self.assert_se("Error: Database padding settings must be divisible by 10.", + self.settings.validate_key_value_pair, 'max_number_of_group_members', 18, *self.args) + self.assert_se("Error: Database padding settings must be divisible by 10.", + self.settings.validate_key_value_pair, 'max_number_of_groups', 18, *self.args) + self.assert_se("Error: Database padding settings must be divisible by 10.", + self.settings.validate_key_value_pair, 'max_number_of_contacts', 18, *self.args) + self.assert_se("Error: Can't set the max number of members lower than 20.", + self.settings.validate_key_value_pair, 'max_number_of_group_members', 10, *self.args) + self.assert_se("Error: Can't set the max number of groups lower than 20.", + self.settings.validate_key_value_pair, 'max_number_of_groups', 10, *self.args) + self.assert_se("Error: Can't set the max number of contacts lower than 20.", + self.settings.validate_key_value_pair, 'max_number_of_contacts', 10, *self.args) + self.assert_se("Error: Too small value for message notify duration.", + self.settings.validate_key_value_pair, 'new_message_notify_duration', 0.04, *self.args) + self.assert_se("Error: Can't set static delay lower than 0.1.", + self.settings.validate_key_value_pair, 'tm_static_delay', 0.01, *self.args) + self.assert_se("Error: Can't set random delay lower than 0.1.", + self.settings.validate_key_value_pair, 'tm_random_delay', 0.01, *self.args) + self.assert_se("Aborted traffic masking setting change.", + self.settings.validate_key_value_pair, 'tm_random_delay', 0.1, *self.args) - self.assertIsNone( - self.settings.validate_key_value_pair("serial_baudrate", 9600, *self.args) - ) - self.assertIsNone( - self.settings.validate_key_value_pair("tm_static_delay", 1, *self.args) - ) + self.assertIsNone(self.settings.validate_key_value_pair("serial_baudrate", 9600, *self.args)) + self.assertIsNone(self.settings.validate_key_value_pair("tm_static_delay", 1, *self.args)) - @mock.patch("shutil.get_terminal_size", return_value=(64, 64)) - def test_too_narrow_terminal_raises_fr_when_printing_settings(self, _) -> None: + @mock.patch('shutil.get_terminal_size', return_value=(64, 64)) + def test_too_narrow_terminal_raises_fr_when_printing_settings(self, _: Any) -> None: # Test - self.assert_se( - "Error: Screen width is too small.", self.settings.print_settings - ) + self.assert_se("Error: Screen width is too small.", self.settings.print_settings) def test_print_settings(self) -> None: self.settings.max_number_of_group_members = 30 - self.settings.log_messages_by_default = True - self.settings.tm_static_delay = 10.2 - self.assert_prints( - CLEAR_ENTIRE_SCREEN - + CURSOR_LEFT_UP_CORNER - + """\ + self.settings.log_messages_by_default = True + self.settings.tm_static_delay = 10.2 + self.assert_prints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + """\ Setting name Current value Default value Description ──────────────────────────────────────────────────────────────────────────────── @@ -351,10 +245,8 @@ max_decompress_size 100000000 100000000 Max size decompressing file -""", - self.settings.print_settings, - ) +""", self.settings.print_settings) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/common/test_encoding.py b/tests/common/test_encoding.py index 4b8773a..d48d248 100644 --- a/tests/common/test_encoding.py +++ b/tests/common/test_encoding.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -25,70 +25,38 @@ import unittest from datetime import datetime -from src.common.encoding import ( - b58encode, - bool_to_bytes, - double_to_bytes, - str_to_bytes, - int_to_bytes, -) -from src.common.encoding import ( - b58decode, - bytes_to_bool, - bytes_to_double, - bytes_to_str, - bytes_to_int, -) -from src.common.encoding import ( - onion_address_to_pub_key, - unicode_padding, - pub_key_to_short_address, - b85encode, -) -from src.common.encoding import ( - pub_key_to_onion_address, - rm_padding_str, - bytes_to_timestamp, - b10encode, -) -from src.common.statics import ( - ENCODED_BOOLEAN_LENGTH, - ENCODED_FLOAT_LENGTH, - ENCODED_INTEGER_LENGTH, - FINGERPRINT_LENGTH, - ONION_SERVICE_PUBLIC_KEY_LENGTH, - PADDED_UTF32_STR_LENGTH, - PADDING_LENGTH, - SYMMETRIC_KEY_LENGTH, - TFC_PUBLIC_KEY_LENGTH, - TRUNC_ADDRESS_LENGTH, -) +from src.common.encoding import b58encode, bool_to_bytes, double_to_bytes, str_to_bytes, int_to_bytes +from src.common.encoding import b58decode, bytes_to_bool, bytes_to_double, bytes_to_str, bytes_to_int +from src.common.encoding import onion_address_to_pub_key, unicode_padding, pub_key_to_short_address, b85encode +from src.common.encoding import pub_key_to_onion_address, rm_padding_str, bytes_to_timestamp, b10encode +from src.common.statics import (ENCODED_BOOLEAN_LENGTH, ENCODED_FLOAT_LENGTH, ENCODED_INTEGER_LENGTH, + FINGERPRINT_LENGTH, ONION_SERVICE_PUBLIC_KEY_LENGTH, PADDED_UTF32_STR_LENGTH, + PADDING_LENGTH, SYMMETRIC_KEY_LENGTH, TFC_PUBLIC_KEY_LENGTH, TRUNC_ADDRESS_LENGTH) class TestBase58EncodeAndDecode(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.key = SYMMETRIC_KEY_LENGTH * b"\x01" + self.key = SYMMETRIC_KEY_LENGTH * b'\x01' def test_encoding_and_decoding_of_random_local_keys(self) -> None: for _ in range(100): - key = os.urandom(SYMMETRIC_KEY_LENGTH) + key = os.urandom(SYMMETRIC_KEY_LENGTH) encoded = b58encode(key) decoded = b58decode(encoded) self.assertEqual(key, decoded) def test_encoding_and_decoding_of_random_public_keys(self) -> None: for _ in range(100): - key = os.urandom(TFC_PUBLIC_KEY_LENGTH) - encoded = b58encode(key, public_key=True) + key = os.urandom(TFC_PUBLIC_KEY_LENGTH) + encoded = b58encode(key, public_key=True) decoded = b58decode(encoded, public_key=True) self.assertEqual(key, decoded) def test_invalid_decoding(self) -> None: - encoded = b58encode( - self.key - ) # 5HpjE2Hs7vjU4SN3YyPQCdhzCu92WoEeuE6PWNuiPyTu3ESGnzn - changed = encoded[:-1] + "a" + encoded = b58encode(self.key) # 5HpjE2Hs7vjU4SN3YyPQCdhzCu92WoEeuE6PWNuiPyTu3ESGnzn + changed = encoded[:-1] + 'a' with self.assertRaises(ValueError): b58decode(changed) @@ -106,9 +74,8 @@ class TestBase58EncodeAndDecode(unittest.TestCase): """Test vectors are available at https://en.bitcoin.it/wiki/Wallet_import_format """ - byte_key = bytes.fromhex( - "0C28FCA386C7A227600B2FE50B7CAE11" "EC86D3BF1FBE471BE89827E19D72AA1D" - ) + byte_key = bytes.fromhex("0C28FCA386C7A227600B2FE50B7CAE11" + "EC86D3BF1FBE471BE89827E19D72AA1D") b58_key = "5HueCGU8rMjxEXxiPuD5BDku4MkFqeZyd4dZ1jvhTVqvbTLvyTJ" @@ -117,99 +84,97 @@ class TestBase58EncodeAndDecode(unittest.TestCase): class TestBase85Encode(unittest.TestCase): + def test_b85encode(self) -> None: message = os.urandom(100) - self.assertEqual(b85encode(message), base64.b85encode(message).decode()) + self.assertEqual(b85encode(message), + base64.b85encode(message).decode()) class TestBase10Encode(unittest.TestCase): + def test_b10encode(self) -> None: - self.assertEqual( - b10encode(FINGERPRINT_LENGTH * b"a"), - "44046402572626160612103472728795008085361523578694645928734845681441465000289", - ) + self.assertEqual(b10encode(FINGERPRINT_LENGTH * b'a'), + '44046402572626160612103472728795008085361523578694645928734845681441465000289') class TestUnicodePadding(unittest.TestCase): + def test_padding(self) -> None: for s in range(0, PADDING_LENGTH): - string = s * "m" + string = s * 'm' padded = unicode_padding(string) self.assertEqual(len(padded), PADDING_LENGTH) # Verify removal of padding doesn't alter the string - self.assertEqual(string, padded[: -ord(padded[-1:])]) + self.assertEqual(string, padded[:-ord(padded[-1:])]) def test_oversize_msg_raises_critical_error(self) -> None: - for s in range(PADDING_LENGTH, PADDING_LENGTH + 1): + for s in range(PADDING_LENGTH, PADDING_LENGTH+1): with self.assertRaises(SystemExit): - unicode_padding(s * "m") + unicode_padding(s * 'm') class TestRmPaddingStr(unittest.TestCase): + def test_padding_removal(self) -> None: for i in range(0, 1000): - string = i * "m" + string = i * 'm' length = PADDING_LENGTH - (len(string) % PADDING_LENGTH) padded = string + length * chr(length) self.assertEqual(rm_padding_str(padded), string) class TestConversions(unittest.TestCase): + def test_conversion_back_and_forth(self) -> None: pub_key = os.urandom(SYMMETRIC_KEY_LENGTH) - self.assertEqual( - onion_address_to_pub_key(pub_key_to_onion_address(pub_key)), pub_key - ) + self.assertEqual(onion_address_to_pub_key(pub_key_to_onion_address(pub_key)), pub_key) def test_pub_key_to_short_addr(self) -> None: - self.assertEqual( - len(pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH))), - TRUNC_ADDRESS_LENGTH, - ) + self.assertEqual(len(pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH))), + TRUNC_ADDRESS_LENGTH) - self.assertIsInstance( - pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)), str - ) + self.assertIsInstance(pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)), str) def test_bool_to_bytes(self) -> None: - self.assertEqual(bool_to_bytes(False), b"\x00") - self.assertEqual(bool_to_bytes(True), b"\x01") + self.assertEqual( bool_to_bytes(False), b'\x00') + self.assertEqual( bool_to_bytes(True), b'\x01') self.assertEqual(len(bool_to_bytes(True)), ENCODED_BOOLEAN_LENGTH) def test_bytes_to_bool(self) -> None: - self.assertEqual(bytes_to_bool(b"\x00"), False) - self.assertEqual(bytes_to_bool(b"\x01"), True) + self.assertEqual(bytes_to_bool(b'\x00'), False) + self.assertEqual(bytes_to_bool(b'\x01'), True) def test_int_to_bytes(self) -> None: - self.assertEqual(int_to_bytes(1), b"\x00\x00\x00\x00\x00\x00\x00\x01") + self.assertEqual( int_to_bytes(1), b'\x00\x00\x00\x00\x00\x00\x00\x01') self.assertEqual(len(int_to_bytes(1)), ENCODED_INTEGER_LENGTH) def test_bytes_to_int(self) -> None: - self.assertEqual(bytes_to_int(b"\x00\x00\x00\x00\x00\x00\x00\x01"), 1) + self.assertEqual(bytes_to_int(b'\x00\x00\x00\x00\x00\x00\x00\x01'), 1) def test_double_to_bytes(self) -> None: - self.assertEqual(double_to_bytes(1.0), bytes.fromhex("000000000000f03f")) - self.assertEqual(double_to_bytes(1.1), bytes.fromhex("9a9999999999f13f")) + self.assertEqual( double_to_bytes(1.0), bytes.fromhex('000000000000f03f')) + self.assertEqual( double_to_bytes(1.1), bytes.fromhex('9a9999999999f13f')) self.assertEqual(len(double_to_bytes(1.1)), ENCODED_FLOAT_LENGTH) def test_bytes_to_double(self) -> None: - self.assertEqual(bytes_to_double(bytes.fromhex("000000000000f03f")), 1.0) - self.assertEqual(bytes_to_double(bytes.fromhex("9a9999999999f13f")), 1.1) + self.assertEqual(bytes_to_double(bytes.fromhex('000000000000f03f')), 1.0) + self.assertEqual(bytes_to_double(bytes.fromhex('9a9999999999f13f')), 1.1) def test_str_to_bytes(self) -> None: - encoded = str_to_bytes("test") + encoded = str_to_bytes('test') self.assertIsInstance(encoded, bytes) self.assertEqual(len(encoded), PADDED_UTF32_STR_LENGTH) def test_bytes_to_str(self) -> None: - encoded = str_to_bytes("test") - self.assertEqual(bytes_to_str(encoded), "test") + encoded = str_to_bytes('test') + self.assertEqual(bytes_to_str(encoded), 'test') def test_bytes_to_timestamp(self) -> None: - encoded = bytes.fromhex("00000000") + encoded = bytes.fromhex('00000000') self.assertIsInstance(bytes_to_timestamp(encoded), datetime) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/common/test_exceptions.py b/tests/common/test_exceptions.py index 59c7afd..6a4917d 100644 --- a/tests/common/test_exceptions.py +++ b/tests/common/test_exceptions.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -22,38 +22,41 @@ along with TFC. If not, see . import unittest from src.common.exceptions import CriticalError, SoftError, graceful_exit -from tests.mock_classes import RxWindow +from tests.mock_classes import RxWindow class TestCriticalError(unittest.TestCase): + def test_critical_error(self) -> None: with self.assertRaises(SystemExit): - CriticalError("test") + CriticalError('test') class TestSoftError(unittest.TestCase): + def test_function_return(self) -> None: - error = SoftError("test message") - self.assertEqual(error.message, "test message") + error = SoftError('test message') + self.assertEqual(error.message, 'test message') - error = SoftError("test message", head_clear=True) - self.assertEqual(error.message, "test message") + error = SoftError('test message', head_clear=True) + self.assertEqual(error.message, 'test message') - error = SoftError("test message", tail_clear=True) - self.assertEqual(error.message, "test message") + error = SoftError('test message', tail_clear=True) + self.assertEqual(error.message, 'test message') - error = SoftError("test message", window=RxWindow()) - self.assertEqual(error.message, "test message") + error = SoftError('test message', window=RxWindow()) + self.assertEqual(error.message, 'test message') class TestGracefulExit(unittest.TestCase): + def test_graceful_exit(self) -> None: with self.assertRaises(SystemExit): - graceful_exit("test message") - graceful_exit("test message", clear=False) - graceful_exit("test message", exit_code=1) - graceful_exit("test message", exit_code=2) + graceful_exit('test message') + graceful_exit('test message', clear=False) + graceful_exit('test message', exit_code=1) + graceful_exit('test message', exit_code=2) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/common/test_gateway.py b/tests/common/test_gateway.py index 020245f..c03dda5 100644 --- a/tests/common/test_gateway.py +++ b/tests/common/test_gateway.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -23,107 +23,88 @@ import os import unittest import socket -from datetime import datetime -from unittest import mock +from datetime import datetime +from unittest import mock from unittest.mock import MagicMock +from typing import Any from serial import SerialException -from src.common.crypto import blake2b -from src.common.gateway import gateway_loop, Gateway, GatewaySettings -from src.common.misc import ensure_dir +from src.common.crypto import blake2b +from src.common.gateway import gateway_loop, Gateway, GatewaySettings +from src.common.misc import ensure_dir from src.common.reed_solomon import RSCodec -from src.common.statics import ( - DIR_USER_DATA, - GATEWAY_QUEUE, - NC, - PACKET_CHECKSUM_LENGTH, - RX, - TX, -) +from src.common.statics import DIR_USER_DATA, GATEWAY_QUEUE, NC, PACKET_CHECKSUM_LENGTH, RX, TX from tests.mock_classes import Settings -from tests.utils import cd_unit_test, cleanup, gen_queue_dict, tear_queues, TFCTestCase +from tests.utils import cd_unit_test, cleanup, gen_queue_dict, tear_queues, TFCTestCase class TestGatewayLoop(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.queues = gen_queue_dict() + self.queues = gen_queue_dict() def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) tear_queues(self.queues) - @mock.patch( - "multiprocessing.connection.Listener", - return_value=MagicMock( - accept=lambda: MagicMock(recv=MagicMock(return_value="message")) - ), - ) - def test_loop(self, _) -> None: + @mock.patch('multiprocessing.connection.Listener', + return_value=MagicMock(accept=lambda: MagicMock(recv=MagicMock(return_value='message')))) + def test_loop(self, _: Any) -> None: gateway = Gateway(operation=RX, local_test=True, dd_sockets=False) self.assertIsNone(gateway_loop(self.queues, gateway, unit_test=True)) data = self.queues[GATEWAY_QUEUE].get() self.assertIsInstance(data[0], datetime) - self.assertEqual(data[1], "message") + self.assertEqual(data[1], 'message') class TestGatewaySerial(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.settings = Settings(session_usb_serial_adapter=True) + self.settings = Settings(session_usb_serial_adapter=True) def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) - @mock.patch("time.sleep", return_value=None) - @mock.patch("serial.Serial", return_value=MagicMock()) - @mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"]]) - @mock.patch("builtins.input", side_effect=["Yes"]) - def test_search_and_establish_serial(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('serial.Serial', return_value=MagicMock()) + @mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0']]) + @mock.patch('builtins.input', side_effect=['Yes']) + def test_search_and_establish_serial(self, *_: Any) -> None: gateway = Gateway(operation=RX, local_test=False, dd_sockets=False) self.assertIsInstance(gateway.rs, RSCodec) self.assertIs(gateway.tx_serial, gateway.rx_serial) - @mock.patch("time.sleep", return_value=None) - @mock.patch("serial.Serial", side_effect=SerialException) - @mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"]]) - @mock.patch("builtins.input", side_effect=["Yes"]) - def test_serialexception_during_establish_exists(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('serial.Serial', side_effect=SerialException) + @mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0']]) + @mock.patch('builtins.input', side_effect=['Yes']) + def test_serialexception_during_establish_exists(self, *_: Any) -> None: with self.assertRaises(SystemExit): Gateway(operation=RX, local_test=False, dd_sockets=False) - @mock.patch("time.sleep", return_value=None) - @mock.patch( - "serial.Serial", - return_value=MagicMock(write=MagicMock(side_effect=[SerialException, None])), - ) - @mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"], ["ttyUSB0"]]) - @mock.patch("builtins.input", side_effect=["Yes"]) - def test_write_serial_(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('serial.Serial', return_value=MagicMock(write=MagicMock(side_effect=[SerialException, None]))) + @mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0'], ['ttyUSB0']]) + @mock.patch('builtins.input', side_effect=['Yes']) + def test_write_serial_(self, *_: Any) -> None: gateway = Gateway(operation=RX, local_test=False, dd_sockets=False) self.assertIsNone(gateway.write(b"message")) @mock.patch("time.sleep", return_value=None) - @mock.patch( - "serial.Serial", - return_value=MagicMock( - read_all=MagicMock( - side_effect=[KeyboardInterrupt, SerialException, b"", b"1", b"2", b""] - ) - ), - ) + @mock.patch("serial.Serial", return_value=MagicMock(read_all=MagicMock( + side_effect=[KeyboardInterrupt, SerialException, b"", b"1", b"2", b""]))) @mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"], ["ttyUSB0"]]) @mock.patch("builtins.input", side_effect=["Yes"]) - def test_serial_uninitialized_serial_interface_for_read_raises_critical_error( - self, *_ - ) -> None: + def test_serial_uninitialized_serial_interface_for_read_raises_critical_error(self, *_) -> None: # Setup gateway = Gateway(operation=RX, local_test=False, dd_sockets=False) gateway.rx_serial = None @@ -132,13 +113,11 @@ class TestGatewaySerial(TFCTestCase): with self.assertRaises(SystemExit): gateway.read() - @mock.patch("time.sleep", return_value=None) - @mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"], ["ttyUSB0"]]) @mock.patch("multiprocessing.connection.Listener", MagicMock()) + @mock.patch("time.sleep", return_value=None) + @mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"], ["ttyUSB0"]]) @mock.patch("builtins.input", side_effect=["Yes"]) - def test_serial_uninitialized_socket_interface_for_read_raises_critical_error( - self, *_ - ) -> None: + def test_serial_uninitialized_socket_interface_for_read_raises_critical_error(self, *_) -> None: # Setup gateway = Gateway(operation=RX, local_test=True, dd_sockets=False) gateway.rx_socket = None @@ -147,15 +126,11 @@ class TestGatewaySerial(TFCTestCase): with self.assertRaises(SystemExit): gateway.read() + @mock.patch("multiprocessing.connection.Listener", return_value=MagicMock( + accept=MagicMock(return_value=MagicMock(recv=MagicMock(return_value=b"12"))))) @mock.patch("time.monotonic", side_effect=[1, 2, 3]) - @mock.patch("time.sleep", return_value=None) - @mock.patch( - "multiprocessing.connection.Listener", - return_value=MagicMock( - accept=MagicMock(return_value=MagicMock(recv=MagicMock(return_value=b"12"))) - ), - ) - @mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"], ["ttyUSB0"]]) + @mock.patch("time.sleep", return_value=None) + @mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"], ["ttyUSB0"]]) @mock.patch("builtins.input", side_effect=["Yes"]) def test_read_socket(self, *_) -> None: gateway = Gateway(operation=RX, local_test=True, dd_sockets=False) @@ -163,25 +138,19 @@ class TestGatewaySerial(TFCTestCase): self.assertEqual(data, b"12") @mock.patch("time.monotonic", side_effect=[1, 2, 3]) - @mock.patch("time.sleep", return_value=None) - @mock.patch( - "serial.Serial", - return_value=MagicMock( - read_all=MagicMock( - side_effect=[KeyboardInterrupt, SerialException, b"", b"1", b"2", b""] - ) - ), - ) - @mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"], ["ttyUSB0"]]) + @mock.patch("time.sleep", return_value=None) + @mock.patch("serial.Serial", return_value=MagicMock( + read_all=MagicMock(side_effect=[KeyboardInterrupt, SerialException, b"", b"1", b"2", b""]))) + @mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"], ["ttyUSB0"]]) @mock.patch("builtins.input", side_effect=["Yes"]) def test_read_serial(self, *_) -> None: gateway = Gateway(operation=RX, local_test=False, dd_sockets=False) data = gateway.read() self.assertEqual(data, b"12") - @mock.patch("time.sleep", return_value=None) - @mock.patch("serial.Serial", return_value=MagicMock()) - @mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"]]) + @mock.patch("time.sleep", return_value=None) + @mock.patch("serial.Serial", return_value=MagicMock()) + @mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"]]) @mock.patch("builtins.input", side_effect=["Yes"]) def test_add_error_correction(self, *_) -> None: gateway = Gateway(operation=RX, local_test=False, dd_sockets=False) @@ -190,7 +159,7 @@ class TestGatewaySerial(TFCTestCase): # Test BLAKE2b based checksum gateway.settings.session_serial_error_correction = 0 self.assertEqual( - gateway.add_error_correction(packet,), + gateway.add_error_correction(packet), packet + blake2b(packet, digest_size=PACKET_CHECKSUM_LENGTH), ) @@ -201,133 +170,109 @@ class TestGatewaySerial(TFCTestCase): gateway.add_error_correction(packet), gateway.rs.encode(packet) ) - @mock.patch("time.sleep", return_value=None) - @mock.patch("serial.Serial", return_value=MagicMock()) - @mock.patch("os.listdir", side_effect=[["ttyUSB0"], ["ttyUSB0"]]) - @mock.patch("builtins.input", side_effect=["Yes"]) - def test_detect_errors(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('serial.Serial', return_value=MagicMock()) + @mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0']]) + @mock.patch('builtins.input', side_effect=['Yes']) + def test_detect_errors(self, *_: Any) -> None: gateway = Gateway(operation=RX, local_test=False, dd_sockets=False) - packet = b"packet" + packet = b'packet' # Test BLAKE2b based checksum gateway.settings.session_serial_error_correction = 0 - self.assertEqual( - gateway.detect_errors(gateway.add_error_correction(packet)), packet - ) + self.assertEqual(gateway.detect_errors(gateway.add_error_correction(packet)), + packet) # Test unrecoverable error raises FR - self.assert_se( - "Warning! Received packet had an invalid checksum.", - gateway.detect_errors, - 300 * b"a", - ) + self.assert_se("Warning! Received packet had an invalid checksum.", + gateway.detect_errors, 300 * b'a') # Test Reed-Solomon erasure code gateway.settings.session_serial_error_correction = 5 gateway.rs = RSCodec(gateway.settings.session_serial_error_correction) - self.assertEqual( - gateway.detect_errors(gateway.add_error_correction(packet)), packet - ) + self.assertEqual(gateway.detect_errors(gateway.add_error_correction(packet)), + packet) # Test unrecoverable error raises FR - self.assert_se( - "Error: Reed-Solomon failed to correct errors in the received packet.", - gateway.detect_errors, - 300 * b"a", - ) + self.assert_se("Error: Reed-Solomon failed to correct errors in the received packet.", + gateway.detect_errors, 300 * b'a') - @mock.patch("time.sleep", return_value=None) - @mock.patch("serial.Serial", return_value=MagicMock()) - @mock.patch( - "os.listdir", - side_effect=[["ttyUSB0"], ["ttyUSB0"], [""], ["ttyUSB0"], ["ttyS0"], [""]], - ) - @mock.patch("builtins.input", side_effect=["Yes"]) - def test_search_serial_interfaces(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('serial.Serial', return_value=MagicMock()) + @mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0'], [''], ['ttyUSB0'], ['ttyS0'], ['']]) + @mock.patch('builtins.input', side_effect=['Yes']) + def test_search_serial_interfaces(self, *_: Any) -> None: gateway = Gateway(operation=RX, local_test=False, dd_sockets=False) interface = gateway.search_serial_interface() - self.assertEqual(interface, "/dev/ttyUSB0") + self.assertEqual(interface, '/dev/ttyUSB0') # Test unavailable system serial exits: gateway.settings.session_usb_serial_adapter = False interface = gateway.search_serial_interface() - self.assertEqual(interface, "/dev/ttyS0") + self.assertEqual(interface, '/dev/ttyS0') with self.assertRaises(SystemExit): gateway.search_serial_interface() - @mock.patch("time.sleep", return_value=None) - @mock.patch("multiprocessing.connection.Client", MagicMock()) - @mock.patch("multiprocessing.connection.Listener", MagicMock()) - def test_establish_local_testing_gateway(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('multiprocessing.connection.Client', MagicMock()) + @mock.patch('multiprocessing.connection.Listener', MagicMock()) + def test_establish_local_testing_gateway(self, *_: Any) -> None: gateway = Gateway(operation=NC, local_test=True, dd_sockets=False) self.assertIsInstance(gateway.rs, RSCodec) - @mock.patch("time.sleep", return_value=None) - @mock.patch( - "multiprocessing.connection.Client", MagicMock(side_effect=KeyboardInterrupt) - ) - def test_keyboard_interrupt_exits(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('multiprocessing.connection.Client', MagicMock(side_effect=KeyboardInterrupt)) + def test_keyboard_interrupt_exits(self, *_: Any) -> None: with self.assertRaises(SystemExit): Gateway(operation=TX, local_test=True, dd_sockets=False) - @mock.patch("time.sleep", return_value=None) - @mock.patch( - "multiprocessing.connection.Client", - MagicMock(side_effect=[socket.error, ConnectionRefusedError, MagicMock()]), - ) - def test_socket_client(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('multiprocessing.connection.Client', MagicMock( + side_effect=[socket.error, ConnectionRefusedError, MagicMock()])) + def test_socket_client(self, *_: Any) -> None: gateway = Gateway(operation=TX, local_test=True, dd_sockets=False) self.assertIsInstance(gateway, Gateway) - @mock.patch("time.sleep", return_value=None) - @mock.patch( - "multiprocessing.connection.Listener", - MagicMock(side_effect=[MagicMock(), KeyboardInterrupt]), - ) - def test_socket_server(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('multiprocessing.connection.Listener', MagicMock( + side_effect=[MagicMock(), KeyboardInterrupt])) + def test_socket_server(self, *_: Any) -> None: gateway = Gateway(operation=RX, local_test=True, dd_sockets=False) self.assertIsInstance(gateway, Gateway) with self.assertRaises(SystemExit): Gateway(operation=RX, local_test=True, dd_sockets=False) - @mock.patch("time.sleep", return_value=None) - @mock.patch( - "multiprocessing.connection.Listener", - return_value=MagicMock( - accept=lambda: MagicMock( - recv=MagicMock(side_effect=[KeyboardInterrupt, b"data", EOFError]) - ) - ), - ) - def test_local_testing_read(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('multiprocessing.connection.Listener', return_value=MagicMock( + accept=lambda: MagicMock(recv=MagicMock(side_effect=[KeyboardInterrupt, b'data', EOFError])))) + def test_local_testing_read(self, *_: Any) -> None: gateway = Gateway(operation=RX, local_test=True, dd_sockets=False) - self.assertEqual(gateway.read(), b"data") + self.assertEqual(gateway.read(), b'data') with self.assertRaises(SystemExit): gateway.read() - @mock.patch("time.sleep", return_value=None) - @mock.patch( - "multiprocessing.connection.Client", - return_value=MagicMock(send=MagicMock(side_effect=[None, BrokenPipeError])), - ) - def test_local_testing_write(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('multiprocessing.connection.Client', return_value=MagicMock( + send=MagicMock(side_effect=[None, BrokenPipeError]))) + def test_local_testing_write(self, *_: Any) -> None: gateway = Gateway(operation=TX, local_test=True, dd_sockets=False) - self.assertIsNone(gateway.write(b"data")) + self.assertIsNone(gateway.write(b'data')) with self.assertRaises(SystemExit): - gateway.write(b"data") + gateway.write(b'data') class TestGatewaySettings(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.unit_test_dir = cd_unit_test() + self.unit_test_dir = cd_unit_test() self.default_serialized = """\ { "serial_baudrate": 19200, @@ -340,88 +285,78 @@ class TestGatewaySettings(TFCTestCase): """Post-test actions.""" cleanup(self.unit_test_dir) - @mock.patch( - "os.listdir", side_effect=[["ttyUSB0"], ["ttyS0"], ["ttyUSB0"], ["ttyS0"]] - ) - @mock.patch("builtins.input", side_effect=["yes", "yes", "no", "no"]) - def test_gateway_setup(self, *_) -> None: + @mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyS0'], ['ttyUSB0'], ['ttyS0']]) + @mock.patch('builtins.input', side_effect=['yes', 'yes', 'no', 'no']) + def test_gateway_setup(self, *_: Any) -> None: settings = GatewaySettings(operation=TX, local_test=False, dd_sockets=True) self.assertIsNone(settings.setup()) def test_store_and_load_of_settings(self) -> None: settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) - self.assertTrue(os.path.isfile(f"{DIR_USER_DATA}/{TX}_serial_settings.json")) + self.assertTrue(os.path.isfile(f'{DIR_USER_DATA}/{TX}_serial_settings.json')) - self.assertEqual(settings.serial_baudrate, 19200) + self.assertEqual(settings.serial_baudrate, 19200) self.assertEqual(settings.use_serial_usb_adapter, True) - settings.serial_baudrate = 115200 + settings.serial_baudrate = 115200 settings.use_serial_usb_adapter = False self.assertIsNone(settings.store_settings()) settings2 = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) - self.assertEqual(settings2.serial_baudrate, 115200) + self.assertEqual(settings2.serial_baudrate, 115200) self.assertEqual(settings.use_serial_usb_adapter, False) def test_manually_edited_settings_are_loaded(self) -> None: # Setup ensure_dir(DIR_USER_DATA) - with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", "w+") as f: - f.write( - """\ + with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f: + f.write("""\ { "serial_baudrate": 9600, "serial_error_correction": 1, "use_serial_usb_adapter": false, "built_in_serial_interface": "ttyS0" -}""" - ) +}""") # Test settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) - self.assertEqual(settings.serial_baudrate, 9600) - self.assertEqual(settings.serial_error_correction, 1) - self.assertEqual(settings.use_serial_usb_adapter, False) - self.assertEqual(settings.built_in_serial_interface, "ttyS0") + self.assertEqual(settings.serial_baudrate, 9600) + self.assertEqual(settings.serial_error_correction, 1) + self.assertEqual(settings.use_serial_usb_adapter, False) + self.assertEqual(settings.built_in_serial_interface, 'ttyS0') - def test_missing_values_are_set_to_default_and_database_is_overwritten( - self, - ) -> None: + def test_missing_values_are_set_to_default_and_database_is_overwritten(self) -> None: # Setup ensure_dir(DIR_USER_DATA) - with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", "w+") as f: - f.write( - """\ + with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f: + f.write("""\ { "serial_error_correction": 1, "use_serial_usb_adapter": false, "relay_usb_serial_adapter": false -}""" - ) +}""") # Test settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) - self.assertEqual(settings.serial_baudrate, 19200) - self.assertEqual(settings.serial_error_correction, 1) - self.assertEqual(settings.use_serial_usb_adapter, False) - self.assertEqual(settings.built_in_serial_interface, "ttyS0") + self.assertEqual(settings.serial_baudrate, 19200) + self.assertEqual(settings.serial_error_correction, 1) + self.assertEqual(settings.use_serial_usb_adapter, False) + self.assertEqual(settings.built_in_serial_interface, 'ttyS0') def test_invalid_format_is_replaced_with_defaults(self) -> None: # Setup ensure_dir(DIR_USER_DATA) - with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", "w+") as f: - f.write( - """\ + with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f: + f.write("""\ { "serial_error_correction": 5, "use_serial_usb_adapter": false, -}""" - ) +}""") # Test settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) - self.assertEqual(settings.serial_baudrate, 19200) - self.assertEqual(settings.serial_error_correction, 5) - self.assertEqual(settings.use_serial_usb_adapter, True) - self.assertEqual(settings.built_in_serial_interface, "ttyS0") + self.assertEqual(settings.serial_baudrate, 19200) + self.assertEqual(settings.serial_error_correction, 5) + self.assertEqual(settings.use_serial_usb_adapter, True) + self.assertEqual(settings.built_in_serial_interface, 'ttyS0') with open(settings.file_name) as f: data = f.read() @@ -431,22 +366,20 @@ class TestGatewaySettings(TFCTestCase): def test_invalid_serial_baudrate_is_replaced_with_default(self) -> None: # Setup ensure_dir(DIR_USER_DATA) - with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", "w+") as f: - f.write( - """\ + with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f: + f.write("""\ { "serial_baudrate": 19201, "serial_error_correction": 5, "use_serial_usb_adapter": true, "built_in_serial_interface": "ttyS0" -}""" - ) +}""") # Test settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) - self.assertEqual(settings.serial_baudrate, 19200) - self.assertEqual(settings.serial_error_correction, 5) - self.assertEqual(settings.use_serial_usb_adapter, True) - self.assertEqual(settings.built_in_serial_interface, "ttyS0") + self.assertEqual(settings.serial_baudrate, 19200) + self.assertEqual(settings.serial_error_correction, 5) + self.assertEqual(settings.use_serial_usb_adapter, True) + self.assertEqual(settings.built_in_serial_interface, 'ttyS0') with open(settings.file_name) as f: data = f.read() @@ -456,22 +389,20 @@ class TestGatewaySettings(TFCTestCase): def test_invalid_serial_error_correction_is_replaced_with_default(self) -> None: # Setup ensure_dir(DIR_USER_DATA) - with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", "w+") as f: - f.write( - """\ + with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f: + f.write("""\ { "serial_baudrate": 19200, "serial_error_correction": -1, "use_serial_usb_adapter": true, "built_in_serial_interface": "ttyS0" -}""" - ) +}""") # Test settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) - self.assertEqual(settings.serial_baudrate, 19200) - self.assertEqual(settings.serial_error_correction, 5) - self.assertEqual(settings.use_serial_usb_adapter, True) - self.assertEqual(settings.built_in_serial_interface, "ttyS0") + self.assertEqual(settings.serial_baudrate, 19200) + self.assertEqual(settings.serial_error_correction, 5) + self.assertEqual(settings.use_serial_usb_adapter, True) + self.assertEqual(settings.built_in_serial_interface, 'ttyS0') with open(settings.file_name) as f: data = f.read() @@ -481,22 +412,20 @@ class TestGatewaySettings(TFCTestCase): def test_invalid_serial_interface_is_replaced_with_default(self) -> None: # Setup ensure_dir(DIR_USER_DATA) - with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", "w+") as f: - f.write( - """\ + with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f: + f.write("""\ { "serial_baudrate": 19200, "serial_error_correction": 5, "use_serial_usb_adapter": true, "built_in_serial_interface": "does_not_exist" -}""" - ) +}""") # Test settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) - self.assertEqual(settings.serial_baudrate, 19200) - self.assertEqual(settings.serial_error_correction, 5) - self.assertEqual(settings.use_serial_usb_adapter, True) - self.assertEqual(settings.built_in_serial_interface, "ttyS0") + self.assertEqual(settings.serial_baudrate, 19200) + self.assertEqual(settings.serial_error_correction, 5) + self.assertEqual(settings.use_serial_usb_adapter, True) + self.assertEqual(settings.built_in_serial_interface, 'ttyS0') with open(settings.file_name) as f: data = f.read() @@ -506,22 +435,20 @@ class TestGatewaySettings(TFCTestCase): def test_invalid_type_is_replaced_with_default(self) -> None: # Setup ensure_dir(DIR_USER_DATA) - with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", "w+") as f: - f.write( - """\ + with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f: + f.write("""\ { "serial_baudrate": "115200", "serial_error_correction": "5", "use_serial_usb_adapter": "true", "built_in_serial_interface": true -}""" - ) +}""") # Test settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) - self.assertEqual(settings.serial_baudrate, 19200) - self.assertEqual(settings.serial_error_correction, 5) - self.assertEqual(settings.use_serial_usb_adapter, True) - self.assertEqual(settings.built_in_serial_interface, "ttyS0") + self.assertEqual(settings.serial_baudrate, 19200) + self.assertEqual(settings.serial_error_correction, 5) + self.assertEqual(settings.use_serial_usb_adapter, True) + self.assertEqual(settings.built_in_serial_interface, 'ttyS0') with open(settings.file_name) as f: data = f.read() @@ -531,37 +458,33 @@ class TestGatewaySettings(TFCTestCase): def test_unknown_kv_pair_is_removed(self) -> None: # Setup ensure_dir(DIR_USER_DATA) - with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", "w+") as f: - f.write( - """\ + with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f: + f.write("""\ { "serial_baudrate": 19200, "serial_error_correction": 5, "use_serial_usb_adapter": true, "built_in_serial_interface": "ttyS0", "this_should_not_be_here": 1 -}""" - ) +}""") # Test settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) - self.assertEqual(settings.serial_baudrate, 19200) - self.assertEqual(settings.serial_error_correction, 5) - self.assertEqual(settings.use_serial_usb_adapter, True) - self.assertEqual(settings.built_in_serial_interface, "ttyS0") + self.assertEqual(settings.serial_baudrate, 19200) + self.assertEqual(settings.serial_error_correction, 5) + self.assertEqual(settings.use_serial_usb_adapter, True) + self.assertEqual(settings.built_in_serial_interface, 'ttyS0') with open(settings.file_name) as f: data = f.read() self.assertEqual(data, self.default_serialized) - @mock.patch( - "os.listdir", side_effect=[["ttyS0"], ["ttyUSB0"], ["ttyUSB0"], ["ttyS0"]] - ) - @mock.patch("builtins.input", side_effect=["Yes", "Yes", "No", "No"]) - def test_setup(self, *_) -> None: + @mock.patch('os.listdir', side_effect=[['ttyS0'], ['ttyUSB0'], ['ttyUSB0'], ['ttyS0']]) + @mock.patch('builtins.input', side_effect=['Yes', 'Yes', 'No', 'No']) + def test_setup(self, *_: Any) -> None: # Setup ensure_dir(DIR_USER_DATA) - with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", "w+") as f: + with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f: f.write(self.default_serialized) settings = GatewaySettings(operation=TX, local_test=False, dd_sockets=True) @@ -570,90 +493,48 @@ class TestGatewaySettings(TFCTestCase): self.assertIsNone(settings.setup()) self.assertIsNone(settings.setup()) - @mock.patch("time.sleep", return_value=None) - def test_change_setting(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_change_setting(self, _: Any) -> None: settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) - self.assert_se( - "Error: Invalid setting value 'Falsee'.", - settings.change_setting, - "serial_baudrate", - "Falsee", - ) - self.assert_se( - "Error: Invalid setting value '1.1'.", - settings.change_setting, - "serial_baudrate", - "1.1", - ) - self.assert_se( - "Error: Invalid setting value '18446744073709551616'.", - settings.change_setting, - "serial_baudrate", - str(2 ** 64), - ) - self.assert_se( - "Error: Invalid setting value 'Falsee'.", - settings.change_setting, - "use_serial_usb_adapter", - "Falsee", - ) + self.assert_se("Error: Invalid setting value 'Falsee'.", + settings.change_setting, 'serial_baudrate', 'Falsee') + self.assert_se("Error: Invalid setting value '1.1'.", + settings.change_setting, 'serial_baudrate', '1.1', ) + self.assert_se("Error: Invalid setting value '18446744073709551616'.", + settings.change_setting, 'serial_baudrate', str(2 ** 64)) + self.assert_se("Error: Invalid setting value 'Falsee'.", + settings.change_setting, 'use_serial_usb_adapter', 'Falsee') - self.assertIsNone(settings.change_setting("serial_baudrate", "9600")) - self.assertEqual( - GatewaySettings( - operation=TX, local_test=True, dd_sockets=True - ).serial_baudrate, - 9600, - ) + self.assertIsNone(settings.change_setting('serial_baudrate', '9600')) + self.assertEqual(GatewaySettings(operation=TX, local_test=True, dd_sockets=True).serial_baudrate, 9600) - settings.serial_baudrate = b"bytestring" + settings.serial_baudrate = b'bytestring' with self.assertRaises(SystemExit): - settings.change_setting("serial_baudrate", "9600") + settings.change_setting('serial_baudrate', '9600') def test_validate_key_value_pair(self) -> None: settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) - self.assert_se( - "Error: The specified baud rate is not supported.", - settings.validate_key_value_pair, - "serial_baudrate", - 0, - ) - self.assert_se( - "Error: The specified baud rate is not supported.", - settings.validate_key_value_pair, - "serial_baudrate", - 10, - ) - self.assert_se( - "Error: The specified baud rate is not supported.", - settings.validate_key_value_pair, - "serial_baudrate", - 9601, - ) - self.assert_se( - "Error: Invalid value for error correction ratio.", - settings.validate_key_value_pair, - "serial_error_correction", - -1, - ) + self.assert_se("Error: The specified baud rate is not supported.", + settings.validate_key_value_pair, 'serial_baudrate', 0) + self.assert_se("Error: The specified baud rate is not supported.", + settings.validate_key_value_pair, 'serial_baudrate', 10) + self.assert_se("Error: The specified baud rate is not supported.", + settings.validate_key_value_pair, 'serial_baudrate', 9601) + self.assert_se("Error: Invalid value for error correction ratio.", + settings.validate_key_value_pair, 'serial_error_correction', -1) - self.assertIsNone(settings.validate_key_value_pair("serial_baudrate", 9600)) - self.assertIsNone( - settings.validate_key_value_pair("serial_error_correction", 20) - ) - self.assertIsNone( - settings.validate_key_value_pair("use_serial_usb_adapter", True) - ) + self.assertIsNone(settings.validate_key_value_pair("serial_baudrate", 9600)) + self.assertIsNone(settings.validate_key_value_pair("serial_error_correction", 20)) + self.assertIsNone(settings.validate_key_value_pair("use_serial_usb_adapter", True)) - @mock.patch("shutil.get_terminal_size", return_value=(64, 64)) - def test_too_narrow_terminal_raises_fr_when_printing_settings(self, _) -> None: + @mock.patch('shutil.get_terminal_size', return_value=(64, 64)) + def test_too_narrow_terminal_raises_fr_when_printing_settings(self, _: Any) -> None: settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) self.assert_se("Error: Screen width is too small.", settings.print_settings) def test_print_settings(self) -> None: settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True) - self.assert_prints( - """\ + self.assert_prints("""\ Serial interface setting Current value Default value Description ──────────────────────────────────────────────────────────────────────────────── @@ -668,10 +549,8 @@ serial_error_correction 5 5 Number of byte recover from -""", - settings.print_settings, - ) +""", settings.print_settings) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/common/test_input.py b/tests/common/test_input.py index a46c7db..3d522e8 100644 --- a/tests/common/test_input.py +++ b/tests/common/test_input.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -22,146 +22,121 @@ along with TFC. If not, see . import unittest from unittest import mock +from typing import Any -from src.common.input import ( - ask_confirmation_code, - box_input, - get_b58_key, - nc_bypass_msg, - pwd_prompt, - yes, -) -from src.common.statics import ( - B58_LOCAL_KEY, - B58_PUBLIC_KEY, - NC_BYPASS_START, - NC_BYPASS_STOP, - SYMMETRIC_KEY_LENGTH, - TFC_PUBLIC_KEY_LENGTH, -) +from src.common.input import ask_confirmation_code, box_input, get_b58_key, nc_bypass_msg, pwd_prompt, yes +from src.common.statics import (B58_LOCAL_KEY, B58_PUBLIC_KEY, NC_BYPASS_START, NC_BYPASS_STOP, SYMMETRIC_KEY_LENGTH, + TFC_PUBLIC_KEY_LENGTH) from tests.mock_classes import Settings -from tests.utils import nick_to_short_address, VALID_ECDHE_PUB_KEY, VALID_LOCAL_KEY_KDK +from tests.utils import nick_to_short_address, VALID_ECDHE_PUB_KEY, VALID_LOCAL_KEY_KDK class TestAskConfirmationCode(unittest.TestCase): - confirmation_code = "ff" + confirmation_code = 'ff' - @mock.patch("builtins.input", return_value=confirmation_code) - def test_ask_confirmation_code(self, _) -> None: - self.assertEqual(ask_confirmation_code("Receiver"), self.confirmation_code) + @mock.patch('builtins.input', return_value=confirmation_code) + def test_ask_confirmation_code(self, _: Any) -> None: + self.assertEqual(ask_confirmation_code('Receiver'), self.confirmation_code) class TestBoxInput(unittest.TestCase): - @mock.patch("time.sleep", return_value=None) - @mock.patch( - "builtins.input", side_effect=["mock_input", "mock_input", "", "invalid", "ok"] - ) - def test_box_input(self, *_) -> None: - self.assertEqual(box_input("test title"), "mock_input") - self.assertEqual(box_input("test title", head=1, expected_len=20), "mock_input") - self.assertEqual( - box_input("test title", head=1, default="mock_input", expected_len=20), - "mock_input", - ) - self.assertEqual( - box_input( - "test title", - validator=lambda string, *_: "" if string == "ok" else "Error", - ), - "ok", - ) + + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=['mock_input', 'mock_input', '', 'invalid', 'ok']) + def test_box_input(self, *_: Any) -> None: + self.assertEqual(box_input('test title'), 'mock_input') + self.assertEqual(box_input('test title', head=1, expected_len=20), 'mock_input') + self.assertEqual(box_input('test title', head=1, default='mock_input', expected_len=20), 'mock_input') + self.assertEqual(box_input('test title', validator=lambda string, *_: '' if string == 'ok' else 'Error'), 'ok') class TestGetB58Key(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.settings = Settings() - @mock.patch("time.sleep", return_value=None) - @mock.patch("shutil.get_terminal_size", return_value=[200, 200]) - @mock.patch( - "builtins.input", - side_effect=( - 2 * ["invalid", VALID_LOCAL_KEY_KDK[:-1], VALID_LOCAL_KEY_KDK] - + 2 * ["invalid", VALID_ECDHE_PUB_KEY[:-1], VALID_ECDHE_PUB_KEY] - ), - ) - def test_get_b58_key(self, *_) -> None: - for boolean in [True, False]: - self.settings.local_testing_mode = boolean - key = get_b58_key(B58_LOCAL_KEY, self.settings) + def test_invalid_key_type_raises_critical_error(self) -> None: + with self.assertRaises(SystemExit): + get_b58_key('invalid_key_type', self.settings) - self.assertIsInstance(key, bytes) - self.assertEqual(len(key), SYMMETRIC_KEY_LENGTH) - with self.assertRaises(SystemExit): - get_b58_key("invalid_key_type", self.settings) - - for boolean in [True, False]: - self.settings.local_testing_mode = boolean - key = get_b58_key( - B58_PUBLIC_KEY, self.settings, nick_to_short_address("Alice") - ) + @mock.patch('time.sleep', return_value=None) + @mock.patch('shutil.get_terminal_size', return_value=[200, 200]) + @mock.patch('builtins.input', side_effect=([51*'a', + VALID_LOCAL_KEY_KDK[:-1], + VALID_LOCAL_KEY_KDK+'a', + VALID_LOCAL_KEY_KDK])) + def test_get_b58_local_key(self, *_: Any) -> None: + key = get_b58_key(B58_LOCAL_KEY, self.settings) + self.assertIsInstance(key, bytes) + self.assertEqual(len(key), SYMMETRIC_KEY_LENGTH) + @mock.patch('time.sleep', return_value=None) + @mock.patch('shutil.get_terminal_size', return_value=[200, 200]) + @mock.patch('builtins.input', side_effect=(2*[VALID_ECDHE_PUB_KEY[:-1], + VALID_ECDHE_PUB_KEY+'a', + VALID_ECDHE_PUB_KEY, + 84*'a'])) + def test_get_b58_pub_key(self, *_: Any) -> None: + for local_testing in [True, False]: + self.settings.local_testing_mode = local_testing + key = get_b58_key(B58_PUBLIC_KEY, self.settings) self.assertIsInstance(key, bytes) self.assertEqual(len(key), TFC_PUBLIC_KEY_LENGTH) - with self.assertRaises(SystemExit): - get_b58_key("invalid_key_type", self.settings) + with self.assertRaises(ValueError): + get_b58_key(B58_PUBLIC_KEY, self.settings, nick_to_short_address('Alice')) - @mock.patch("builtins.input", return_value="") - @mock.patch("shutil.get_terminal_size", return_value=[200, 200]) - def test_empty_pub_key_returns_empty_bytes(self, *_) -> None: + @mock.patch('builtins.input', return_value='') + @mock.patch('shutil.get_terminal_size', return_value=[200, 200]) + def test_empty_pub_key_returns_empty_bytes(self, *_: Any) -> None: key = get_b58_key(B58_PUBLIC_KEY, self.settings) - self.assertEqual(key, b"") + self.assertEqual(key, b'') class TestNCBypassMsg(unittest.TestCase): - @mock.patch("builtins.input", return_value="") - def test_nc_bypass_msg(self, _) -> None: + + @mock.patch('builtins.input', return_value='') + def test_nc_bypass_msg(self, _: Any) -> None: settings = Settings(nc_bypass_messages=True) self.assertIsNone(nc_bypass_msg(NC_BYPASS_START, settings)) - self.assertIsNone(nc_bypass_msg(NC_BYPASS_STOP, settings)) + self.assertIsNone(nc_bypass_msg(NC_BYPASS_STOP, settings)) class TestPwdPrompt(unittest.TestCase): - @mock.patch("getpass.getpass", return_value="test_password") - def test_pwd_prompt(self, _) -> None: - self.assertEqual(pwd_prompt("test prompt"), "test_password") + + @mock.patch('getpass.getpass', return_value='test_password') + def test_pwd_prompt(self, _: Any) -> None: + self.assertEqual(pwd_prompt("test prompt"), 'test_password') class TestYes(unittest.TestCase): - @mock.patch( - "builtins.input", - side_effect=[ - "Invalid", - "", - "invalid", - "Y", - "YES", - "N", - "NO", - KeyboardInterrupt, - KeyboardInterrupt, - EOFError, - EOFError, - ], - ) - def test_yes(self, _) -> None: - self.assertTrue(yes("test prompt", head=1, tail=1)) - self.assertTrue(yes("test prompt")) - self.assertFalse(yes("test prompt", head=1, tail=1)) - self.assertFalse(yes("test prompt")) + @mock.patch('builtins.input', side_effect=['Invalid', '', 'invalid', 'Y', 'YES', 'N', 'NO', + KeyboardInterrupt, KeyboardInterrupt, EOFError, + EOFError, EOFError, KeyboardInterrupt]) + def test_yes(self, _: Any) -> None: + self.assertTrue(yes('test prompt', head=1, tail=1)) + self.assertTrue(yes('test prompt')) - self.assertTrue(yes("test prompt", head=1, tail=1, abort=True)) - self.assertFalse(yes("test prompt", abort=False)) + self.assertFalse(yes('test prompt', head=1, tail=1)) + self.assertFalse(yes('test prompt')) - self.assertTrue(yes("test prompt", head=1, tail=1, abort=True)) - self.assertFalse(yes("test prompt", abort=False)) + self.assertTrue(yes('test prompt', head=1, tail=1, abort=True)) + self.assertFalse(yes('test prompt', abort=False)) + + self.assertTrue(yes('test prompt', head=1, tail=1, abort=True)) + self.assertFalse(yes('test prompt', abort=False)) + + with self.assertRaises(EOFError): + self.assertFalse(yes('test prompt')) + + with self.assertRaises(KeyboardInterrupt): + self.assertFalse(yes('test prompt')) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/common/test_misc.py b/tests/common/test_misc.py index 9017987..23deb7c 100644 --- a/tests/common/test_misc.py +++ b/tests/common/test_misc.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -28,61 +28,27 @@ import unittest import zlib from multiprocessing import Process -from typing import Any -from unittest import mock +from unittest import mock +from typing import Any, NoReturn -from src.common.misc import ( - calculate_race_condition_delay, - decompress, - ensure_dir, - get_tab_complete_list, -) -from src.common.misc import ( - get_tab_completer, - get_terminal_height, - get_terminal_width, - ignored, - monitor_processes, -) -from src.common.misc import ( - process_arguments, - readable_size, - round_up, - separate_header, - separate_headers, -) -from src.common.misc import ( - separate_trailer, - split_string, - split_byte_string, - terminal_width_check, -) -from src.common.misc import ( - validate_group_name, - validate_key_exchange, - validate_onion_addr, - validate_nick, -) -from src.common.statics import ( - DIR_RECV_FILES, - DIR_USER_DATA, - DUMMY_GROUP, - ECDHE, - EXIT, - EXIT_QUEUE, - LOCAL_ID, - PADDING_LENGTH, - RX, - TAILS, - WIPE, -) +from unittest.mock import MagicMock + +from src.common.misc import calculate_race_condition_delay, decompress, ensure_dir, get_tab_complete_list +from src.common.misc import get_tab_completer, get_terminal_height, get_terminal_width, HideRunTime, ignored +from src.common.misc import monitor_processes, process_arguments, readable_size, reset_terminal, round_up +from src.common.misc import separate_header, separate_headers, separate_trailer, split_string, split_byte_string +from src.common.misc import split_to_substrings, terminal_width_check, validate_group_name, validate_key_exchange +from src.common.misc import validate_onion_addr, validate_nick +from src.common.statics import (DIR_RECV_FILES, DIR_USER_DATA, DUMMY_GROUP, ECDHE, EXIT, EXIT_QUEUE, LOCAL_ID, + PADDING_LENGTH, RESET, RX, TAILS, TRAFFIC_MASKING, WIPE) from tests.mock_classes import ContactList, Gateway, GroupList, Settings -from tests.utils import cd_unit_test, cleanup, gen_queue_dict, nick_to_onion_address -from tests.utils import nick_to_pub_key, tear_queues, TFCTestCase +from tests.utils import cd_unit_test, cleanup, gen_queue_dict, nick_to_onion_address +from tests.utils import nick_to_pub_key, tear_queues, TFCTestCase class TestCalculateRaceConditionDelay(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.settings = Settings() @@ -92,94 +58,84 @@ class TestCalculateRaceConditionDelay(unittest.TestCase): class TestDecompress(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.settings = Settings() + self.settings = Settings() self.settings.max_decompress_size = 1000 def test_successful_decompression(self) -> None: # Setup - data = os.urandom(self.settings.max_decompress_size) + data = os.urandom(self.settings.max_decompress_size) compressed = zlib.compress(data) # Test - self.assertEqual( - decompress(compressed, self.settings.max_decompress_size), data - ) + self.assertEqual(decompress(compressed, self.settings.max_decompress_size), data) - def test_oversize_decompression_raises_fr(self) -> None: + def test_oversize_decompression_raises_se(self) -> None: # Setup - data = os.urandom(self.settings.max_decompress_size + 1) + data = os.urandom(self.settings.max_decompress_size + 1) compressed = zlib.compress(data) # Test - self.assert_se( - "Error: Decompression aborted due to possible zip bomb.", - decompress, - compressed, - self.settings.max_decompress_size, - ) + self.assert_se("Error: Decompression aborted due to possible zip bomb.", + decompress, compressed, self.settings.max_decompress_size) class TestEnsureDir(unittest.TestCase): + def tearDown(self) -> None: """Post-test actions.""" with ignored(OSError): - os.rmdir("test_dir/") + os.rmdir('test_dir/') def test_ensure_dir(self) -> None: - self.assertIsNone(ensure_dir("test_dir/")) - self.assertIsNone(ensure_dir("test_dir/")) - self.assertTrue(os.path.isdir("test_dir/")) + self.assertIsNone(ensure_dir('test_dir/')) + self.assertIsNone(ensure_dir('test_dir/')) + self.assertTrue(os.path.isdir('test_dir/')) class TestTabCompleteList(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.contact_list = ContactList(nicks=["Alice", "Bob"]) - self.group_list = GroupList(groups=["test_group"]) - self.settings = Settings(key_list=["key1", "key2"]) - self.gateway = Gateway() + self.contact_list = ContactList(nicks=['Alice', 'Bob']) + self.group_list = GroupList(groups=['test_group']) + self.settings = Settings(key_list=['key1', 'key2']) + self.gateway = Gateway() def test_get_tab_complete_list(self) -> None: - tab_complete_list = [a + " " for a in self.contact_list.get_list_of_addresses()] - tab_complete_list += [ - i + " " for i in self.group_list.get_list_of_hr_group_ids() - ] - tab_complete_list += [s + " " for s in self.settings.key_list] - tab_complete_list += [s + " " for s in self.gateway.settings.key_list] + tab_complete_list = [a + ' ' for a in self.contact_list.get_list_of_addresses()] + tab_complete_list += [i + ' ' for i in self.group_list.get_list_of_hr_group_ids()] + tab_complete_list += [s + ' ' for s in self.settings.key_list] + tab_complete_list += [s + ' ' for s in self.gateway.settings.key_list] - tc_list = get_tab_complete_list( - self.contact_list, self.group_list, self.settings, self.gateway - ) + tc_list = get_tab_complete_list(self.contact_list, self.group_list, self.settings, self.gateway) self.assertTrue(set(tab_complete_list) < set(tc_list)) - self.assertIsInstance( - get_tab_completer( - self.contact_list, self.group_list, self.settings, self.gateway - ), - types.FunctionType, - ) + self.assertIsInstance(get_tab_completer(self.contact_list, self.group_list, self.settings, self.gateway), + types.FunctionType) - completer = get_tab_completer( - self.contact_list, self.group_list, self.settings, self.gateway - ) - options = completer("a", state=0) + completer = get_tab_completer(self.contact_list, self.group_list, self.settings, self.gateway) + options = completer('a', state=0) - self.assertEqual(options, "all") - self.assertIsNone(completer("a", state=5)) + self.assertEqual(options, 'all') + self.assertIsNone(completer('a', state=5)) class TestGetTerminalHeight(unittest.TestCase): + def test_get_terminal_height(self) -> None: self.assertIsInstance(get_terminal_height(), int) class TestGetTerminalWidth(unittest.TestCase): + def test_get_terminal_width(self) -> None: self.assertIsInstance(get_terminal_width(), int) class TestIgnored(unittest.TestCase): + @staticmethod def func() -> None: """Mock function that raises exception.""" @@ -196,24 +152,25 @@ class TestIgnored(unittest.TestCase): class TestMonitorProcesses(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.settings = Settings() + self.settings = Settings() def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) @staticmethod - def mock_process() -> None: + def mock_process() -> NoReturn: """Mock process that does not return.""" while True: time.sleep(0.01) - @mock.patch("time.sleep", return_value=None) + @mock.patch('time.sleep', return_value=None) def test_exit(self, *_) -> None: - queues = gen_queue_dict() + queues = gen_queue_dict() process_list = [Process(target=self.mock_process)] for p in process_list: @@ -223,7 +180,6 @@ class TestMonitorProcesses(TFCTestCase): """Place EXIT packet into queue after delay.""" time.sleep(0.01) queues[EXIT_QUEUE].put(EXIT) - threading.Thread(target=queue_delayer).start() with self.assertRaises(SystemExit): @@ -231,13 +187,14 @@ class TestMonitorProcesses(TFCTestCase): tear_queues(queues) - @mock.patch("time.sleep", return_value=None) - def test_dying_process(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + def test_dying_process(self, *_: Any) -> None: + def mock_process() -> None: """Function that returns after a moment.""" time.sleep(0.01) - queues = gen_queue_dict() + queues = gen_queue_dict() process_list = [Process(target=mock_process)] for p in process_list: @@ -248,10 +205,10 @@ class TestMonitorProcesses(TFCTestCase): tear_queues(queues) - @mock.patch("time.sleep", return_value=None) - @mock.patch("os.system", return_value=None) - def test_wipe(self, mock_os_system, *_) -> None: - queues = gen_queue_dict() + @mock.patch('time.sleep', return_value=None) + @mock.patch('os.system', return_value=None) + def test_wipe(self, mock_os_system, *_: Any) -> None: + queues = gen_queue_dict() process_list = [Process(target=self.mock_process)] os.mkdir(DIR_USER_DATA) @@ -266,22 +223,21 @@ class TestMonitorProcesses(TFCTestCase): """Place WIPE packet to queue after delay.""" time.sleep(0.01) queues[EXIT_QUEUE].put(WIPE) - threading.Thread(target=queue_delayer).start() with self.assertRaises(SystemExit): monitor_processes(process_list, RX, queues) self.assertFalse(os.path.isdir(DIR_USER_DATA)) self.assertFalse(os.path.isdir(DIR_RECV_FILES)) - mock_os_system.assert_called_with("systemctl poweroff") + mock_os_system.assert_called_with('systemctl poweroff') tear_queues(queues) - @mock.patch("time.sleep", return_value=None) - @mock.patch("os.system", return_value=None) - @mock.patch("builtins.open", mock.mock_open(read_data=TAILS)) - def test_wipe_tails(self, mock_os_system, *_) -> None: - queues = gen_queue_dict() + @mock.patch('time.sleep', return_value=None) + @mock.patch('os.system', return_value=None) + @mock.patch('builtins.open', mock.mock_open(read_data=TAILS)) + def test_wipe_tails(self, mock_os_system, *_: Any) -> None: + queues = gen_queue_dict() process_list = [Process(target=self.mock_process)] os.mkdir(DIR_USER_DATA) @@ -294,13 +250,12 @@ class TestMonitorProcesses(TFCTestCase): """Place WIPE packet to queue after delay.""" time.sleep(0.01) queues[EXIT_QUEUE].put(WIPE) - threading.Thread(target=queue_delayer).start() with self.assertRaises(SystemExit): monitor_processes(process_list, RX, queues) - mock_os_system.assert_called_with("systemctl poweroff") + mock_os_system.assert_called_with('systemctl poweroff') # Test that user data wasn't removed self.assertTrue(os.path.isdir(DIR_USER_DATA)) @@ -308,34 +263,34 @@ class TestMonitorProcesses(TFCTestCase): class TestProcessArguments(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" + class Args(object): + """Mock object for command line arguments.""" + + def __init__(self) -> None: + """Create new Args mock object.""" + self.operation = True + self.local_test = True + self.data_diode_sockets = True + class MockParser(object): """MockParse object.""" - def __init__(self, *_, **__) -> None: pass - def parse_args(self) -> Any: + @staticmethod + def parse_args() -> Args: """Return Args mock object.""" - - class Args(object): - """Mock object for command line arguments.""" - - def __init__(self) -> None: - """Create new Args mock object.""" - self.operation = True - self.local_test = True - self.data_diode_sockets = True - args = Args() return args - def add_argument(self, *_, **__) -> None: + def add_argument(self, *_: Any, **__: Any) -> None: """Mock function for adding argument.""" - self.o_argparse = argparse.ArgumentParser + self.o_argparse = argparse.ArgumentParser argparse.ArgumentParser = MockParser def tearDown(self) -> None: @@ -347,18 +302,28 @@ class TestProcessArguments(unittest.TestCase): class TestReadableSize(unittest.TestCase): + def test_readable_size(self) -> None: - sizes = ["", "K", "M", "G", "T", "P", "E", "Z", "Y"] + sizes = ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y'] for i in range(0, 9): size = readable_size(1024 ** i) - self.assertEqual(size, f"1.0{sizes[i]}B") + self.assertEqual(size, f'1.0{sizes[i]}B') + + +class TestResetTerminal(unittest.TestCase): + + @mock.patch('os.system', return_value=MagicMock(return_value=None)) + def test_reset_terminal(self, oss): + self.assertIsNone(reset_terminal()) + oss.assert_called_with(RESET) class TestRoundUp(unittest.TestCase): + def test_round_up(self) -> None: - self.assertEqual(round_up(1), 10) - self.assertEqual(round_up(5), 10) - self.assertEqual(round_up(8), 10) + self.assertEqual(round_up(1), 10) + self.assertEqual(round_up(5), 10) + self.assertEqual(round_up(8), 10) self.assertEqual(round_up(10), 10) self.assertEqual(round_up(11), 20) self.assertEqual(round_up(15), 20) @@ -368,259 +333,243 @@ class TestRoundUp(unittest.TestCase): class TestSplitString(unittest.TestCase): + def test_split_string(self) -> None: - self.assertEqual( - split_string("cypherpunk", 1), - ["c", "y", "p", "h", "e", "r", "p", "u", "n", "k"], - ) + self.assertEqual(split_string('cypherpunk', 1), ['c', + 'y', + 'p', + 'h', + 'e', + 'r', + 'p', + 'u', + 'n', + 'k']) - self.assertEqual(split_string("cypherpunk", 2), ["cy", "ph", "er", "pu", "nk"]) + self.assertEqual(split_string('cypherpunk', 2), ['cy', + 'ph', + 'er', + 'pu', + 'nk']) - self.assertEqual(split_string("cypherpunk", 3), ["cyp", "her", "pun", "k"]) + self.assertEqual(split_string('cypherpunk', 3), ['cyp', + 'her', + 'pun', + 'k']) - self.assertEqual(split_string("cypherpunk", 5), ["cyphe", "rpunk"]) + self.assertEqual(split_string('cypherpunk', 5), ['cyphe', + 'rpunk']) - self.assertEqual(split_string("cypherpunk", 10), ["cypherpunk"]) - self.assertEqual(split_string("cypherpunk", 15), ["cypherpunk"]) + self.assertEqual(split_string('cypherpunk', 10), ['cypherpunk']) + self.assertEqual(split_string('cypherpunk', 15), ['cypherpunk']) class TestSplitByteString(unittest.TestCase): + def test_split_byte_string(self) -> None: - self.assertEqual( - split_byte_string(b"cypherpunk", 1), - [b"c", b"y", b"p", b"h", b"e", b"r", b"p", b"u", b"n", b"k"], - ) + self.assertEqual(split_byte_string(b'cypherpunk', 1), [b'c', + b'y', + b'p', + b'h', + b'e', + b'r', + b'p', + b'u', + b'n', + b'k']) - self.assertEqual( - split_byte_string(b"cypherpunk", 2), [b"cy", b"ph", b"er", b"pu", b"nk"] - ) + self.assertEqual(split_byte_string(b'cypherpunk', 2), [b'cy', + b'ph', + b'er', + b'pu', + b'nk']) - self.assertEqual( - split_byte_string(b"cypherpunk", 3), [b"cyp", b"her", b"pun", b"k"] - ) + self.assertEqual(split_byte_string(b'cypherpunk', 3), [b'cyp', + b'her', + b'pun', + b'k']) - self.assertEqual(split_byte_string(b"cypherpunk", 5), [b"cyphe", b"rpunk"]) + self.assertEqual(split_byte_string(b'cypherpunk', 5), [b'cyphe', + b'rpunk']) - self.assertEqual(split_byte_string(b"cypherpunk", 10), [b"cypherpunk"]) - self.assertEqual(split_byte_string(b"cypherpunk", 15), [b"cypherpunk"]) + self.assertEqual(split_byte_string(b'cypherpunk', 10), [b'cypherpunk']) + self.assertEqual(split_byte_string(b'cypherpunk', 15), [b'cypherpunk']) class TestSeparateHeader(unittest.TestCase): + def test_separate_header(self) -> None: - self.assertEqual( - separate_header(b"cypherpunk", header_length=len(b"cypher")), - (b"cypher", b"punk"), - ) + self.assertEqual(separate_header(b"cypherpunk", header_length=len(b"cypher")), + (b"cypher", b"punk")) class TestSeparateHeaders(unittest.TestCase): + def test_separate_headers(self) -> None: - self.assertEqual( - separate_headers(b"cypherpunk", header_length_list=[1, 2, 3]), - [b"c", b"yp", b"her", b"punk"], - ) + self.assertEqual(separate_headers(b"cypherpunk", header_length_list=[1, 2, 3]), + [b"c", b"yp", b"her", b"punk"]) def test_too_small_string(self) -> None: - self.assertEqual( - separate_headers(b"cypherpunk", header_length_list=[1, 2, 10]), - [b"c", b"yp", b"herpunk", b""], - ) + self.assertEqual(separate_headers(b"cypherpunk", header_length_list=[1, 2, 10]), + [b"c", b"yp", b"herpunk", b""]) class TestSeparateTrailer(unittest.TestCase): + def test_separate_header(self) -> None: - self.assertEqual( - separate_trailer(b"cypherpunk", trailer_length=len(b"punk")), - (b"cypher", b"punk"), - ) + self.assertEqual(separate_trailer(b"cypherpunk", trailer_length=len(b"punk")), + (b"cypher", b"punk")) + + +class TestSplitToSubStrings(unittest.TestCase): + + def test_splitting(self) -> None: + test_string = b'cypherpunk' + + self.assertEqual(split_to_substrings(test_string, length=5), + [b'cyphe', + b'ypher', + b'pherp', + b'herpu', + b'erpun', + b'rpunk']) + + self.assertEqual(split_to_substrings(test_string, length=7), + [b'cypherp', + b'ypherpu', + b'pherpun', + b'herpunk']) + + self.assertEqual(split_to_substrings(test_string, length=len(test_string)), + [b'cypherpunk']) + + self.assertEqual(split_to_substrings(test_string, length=len(test_string)+1), + []) class TestTerminalWidthCheck(unittest.TestCase): - @mock.patch("time.sleep", return_value=None) - @mock.patch( - "shutil.get_terminal_size", side_effect=[[50, 50], [50, 50], [100, 100]] - ) - def test_width_check(self, *_) -> None: + + @mock.patch('time.sleep', return_value=None) + @mock.patch('shutil.get_terminal_size', side_effect=[[50, 50], [50, 50], [100, 100]]) + def test_width_check(self, *_: Any) -> None: self.assertIsNone(terminal_width_check(80)) class TestValidateOnionAddr(unittest.TestCase): + def test_validate_account(self) -> None: user_account = nick_to_onion_address("Bob") - self.assertEqual( - validate_onion_addr(nick_to_onion_address("Alice"), user_account), "" - ) - self.assertEqual( - validate_onion_addr(nick_to_onion_address("Bob"), user_account), - "Error: Can not add own account.", - ) - self.assertEqual( - validate_onion_addr( - nick_to_onion_address("Alice")[:-1] + "a", user_account - ), - "Checksum error - Check that the entered account is correct.", - ) - self.assertEqual( - validate_onion_addr( - nick_to_onion_address("Alice")[:-1] + "%", user_account - ), - "Error: Invalid account format.", - ) - self.assertEqual( - validate_onion_addr(nick_to_onion_address("Alice") + "a", user_account), - "Error: Invalid account format.", - ) - self.assertEqual( - validate_onion_addr( - nick_to_onion_address("Alice")[:-1] + "€", user_account - ), - "Error: Invalid account format.", - ) - self.assertEqual( - validate_onion_addr(LOCAL_ID, user_account), - "Error: Can not add reserved account.", - ) + self.assertEqual(validate_onion_addr(nick_to_onion_address("Alice") + 'a', user_account), + 'Error: Invalid account length.') + self.assertEqual(validate_onion_addr(nick_to_onion_address("Alice").upper(), user_account), + 'Error: Account must be in lower case.') + self.assertEqual(validate_onion_addr(nick_to_onion_address("Alice")[:-1] + 'a', user_account), + 'Checksum error - Check that the entered account is correct.') + self.assertEqual(validate_onion_addr(nick_to_onion_address("Alice")[:-1] + '%', user_account), + 'Error: Invalid account format.') + self.assertEqual(validate_onion_addr(LOCAL_ID, user_account), + 'Error: Can not add reserved account.') + self.assertEqual(validate_onion_addr(nick_to_onion_address("Bob"), user_account), + 'Error: Can not add own account.') + self.assertEqual(validate_onion_addr(nick_to_onion_address("Alice"), user_account), + '') class TestValidateGroupName(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.contact_list = ContactList(nicks=["Alice"]) - self.group_list = GroupList(groups=["test_group"]) + self.contact_list = ContactList(nicks=['Alice']) + self.group_list = GroupList(groups=['test_group']) def test_validate_group_name(self) -> None: - self.assertEqual( - validate_group_name("test_group\x1f", self.contact_list, self.group_list), - "Error: Group name must be printable.", - ) - self.assertEqual( - validate_group_name( - PADDING_LENGTH * "a", self.contact_list, self.group_list - ), - "Error: Group name must be less than 255 chars long.", - ) - self.assertEqual( - validate_group_name(DUMMY_GROUP, self.contact_list, self.group_list), - "Error: Group name cannot use the name reserved for database padding.", - ) - self.assertEqual( - validate_group_name( - nick_to_onion_address("Alice"), self.contact_list, self.group_list - ), - "Error: Group name cannot have the format of an account.", - ) - self.assertEqual( - validate_group_name("Alice", self.contact_list, self.group_list), - "Error: Group name cannot be a nick of contact.", - ) - self.assertEqual( - validate_group_name("test_group", self.contact_list, self.group_list), - "Error: Group with name 'test_group' already exists.", - ) - self.assertEqual( - validate_group_name("test_group2", self.contact_list, self.group_list), "" - ) + self.assertEqual(validate_group_name('test_group\x1f', self.contact_list, self.group_list), + "Error: Group name must be printable.") + self.assertEqual(validate_group_name(PADDING_LENGTH * 'a', self.contact_list, self.group_list), + "Error: Group name must be less than 255 chars long.") + self.assertEqual(validate_group_name(DUMMY_GROUP, self.contact_list, self.group_list), + "Error: Group name cannot use the name reserved for database padding.") + self.assertEqual(validate_group_name(nick_to_onion_address("Alice"), self.contact_list, self.group_list), + "Error: Group name cannot have the format of an account.") + self.assertEqual(validate_group_name('Alice', self.contact_list, self.group_list), + "Error: Group name cannot be a nick of contact.") + self.assertEqual(validate_group_name('test_group', self.contact_list, self.group_list), + "Error: Group with name 'test_group' already exists.") + self.assertEqual(validate_group_name('test_group2', self.contact_list, self.group_list), + '') class TestValidateKeyExchange(unittest.TestCase): + def test_validate_key_exchange(self) -> None: - self.assertEqual(validate_key_exchange(""), "Invalid key exchange selection.") - self.assertEqual(validate_key_exchange("x2"), "Invalid key exchange selection.") - self.assertEqual(validate_key_exchange("x"), "") - self.assertEqual(validate_key_exchange("X"), "") - self.assertEqual(validate_key_exchange(ECDHE), "") - self.assertEqual(validate_key_exchange(ECDHE.lower()), "") - self.assertEqual(validate_key_exchange("p"), "") - self.assertEqual(validate_key_exchange("P"), "") - self.assertEqual(validate_key_exchange("psk"), "") - self.assertEqual(validate_key_exchange("PSK"), "") + self.assertEqual(validate_key_exchange(''), 'Invalid key exchange selection.') + self.assertEqual(validate_key_exchange('x2'), 'Invalid key exchange selection.') + self.assertEqual(validate_key_exchange('x'), '') + self.assertEqual(validate_key_exchange('X'), '') + self.assertEqual(validate_key_exchange(ECDHE), '') + self.assertEqual(validate_key_exchange(ECDHE.lower()), '') + self.assertEqual(validate_key_exchange('p'), '') + self.assertEqual(validate_key_exchange('P'), '') + self.assertEqual(validate_key_exchange('psk'), '') + self.assertEqual(validate_key_exchange('PSK'), '') class TestValidateNick(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.contact_list = ContactList(nicks=["Alice", "Bob"]) - self.group_list = GroupList(groups=["test_group"]) + self.contact_list = ContactList(nicks=['Alice', 'Bob']) + self.group_list = GroupList(groups=['test_group']) def test_validate_nick(self) -> None: - self.assertEqual( - validate_nick( - "Alice_", (self.contact_list, self.group_list, nick_to_pub_key("Alice")) - ), - "", - ) - self.assertEqual( - validate_nick( - 254 * "a", - (self.contact_list, self.group_list, nick_to_pub_key("Alice")), - ), - "", - ) - self.assertEqual( - validate_nick( - 255 * "a", - (self.contact_list, self.group_list, nick_to_pub_key("Alice")), - ), - "Error: Nick must be shorter than 255 chars.", - ) - self.assertEqual( - validate_nick( - "\x01Alice", - (self.contact_list, self.group_list, nick_to_pub_key("Alice")), - ), - "Error: Nick must be printable.", - ) - self.assertEqual( - validate_nick( - "", (self.contact_list, self.group_list, nick_to_pub_key("Alice")) - ), - "Error: Nick cannot be empty.", - ) - self.assertEqual( - validate_nick( - "Me", (self.contact_list, self.group_list, nick_to_pub_key("Alice")) - ), - "Error: 'Me' is a reserved nick.", - ) - self.assertEqual( - validate_nick( - "-!-", (self.contact_list, self.group_list, nick_to_pub_key("Alice")) - ), - "Error: '-!-' is a reserved nick.", - ) - self.assertEqual( - validate_nick( - LOCAL_ID, (self.contact_list, self.group_list, nick_to_pub_key("Alice")) - ), - "Error: Nick cannot have the format of an account.", - ) - self.assertEqual( - validate_nick( - nick_to_onion_address("A"), - (self.contact_list, self.group_list, nick_to_pub_key("Alice")), - ), - "Error: Nick cannot have the format of an account.", - ) - self.assertEqual( - validate_nick( - "Bob", (self.contact_list, self.group_list, nick_to_pub_key("Alice")) - ), - "Error: Nick already in use.", - ) - self.assertEqual( - validate_nick( - "Alice", (self.contact_list, self.group_list, nick_to_pub_key("Alice")) - ), - "", - ) - self.assertEqual( - validate_nick( - "test_group", - (self.contact_list, self.group_list, nick_to_pub_key("Alice")), - ), - "Error: Nick cannot be a group name.", - ) + self.assertEqual(validate_nick("Alice_", (self.contact_list, self.group_list, nick_to_pub_key( + "Alice"))), '') + self.assertEqual(validate_nick(254 * "a", (self.contact_list, self.group_list, nick_to_pub_key( + "Alice"))), '') + self.assertEqual(validate_nick(255 * "a", (self.contact_list, self.group_list, nick_to_pub_key( + "Alice"))), 'Error: Nick must be shorter than 255 chars.') + self.assertEqual(validate_nick("\x01Alice", (self.contact_list, self.group_list, nick_to_pub_key( + "Alice"))), 'Error: Nick must be printable.') + self.assertEqual(validate_nick('', (self.contact_list, self.group_list, nick_to_pub_key( + "Alice"))), "Error: Nick cannot be empty.") + self.assertEqual(validate_nick('Me', (self.contact_list, self.group_list, nick_to_pub_key( + "Alice"))), "Error: 'Me' is a reserved nick.") + self.assertEqual(validate_nick('-!-', (self.contact_list, self.group_list, nick_to_pub_key( + "Alice"))), "Error: '-!-' is a reserved nick.") + self.assertEqual(validate_nick(LOCAL_ID, (self.contact_list, self.group_list, nick_to_pub_key( + "Alice"))), "Error: Nick cannot have the format of an account.") + self.assertEqual(validate_nick(nick_to_onion_address('A'), (self.contact_list, self.group_list, nick_to_pub_key( + "Alice"))), "Error: Nick cannot have the format of an account.") + self.assertEqual(validate_nick('Bob', (self.contact_list, self.group_list, nick_to_pub_key( + "Alice"))), 'Error: Nick already in use.') + self.assertEqual(validate_nick("Alice", (self.contact_list, self.group_list, nick_to_pub_key( + "Alice"))), '') + self.assertEqual(validate_nick("test_group", (self.contact_list, self.group_list, nick_to_pub_key( + "Alice"))), "Error: Nick cannot be a group name.") -if __name__ == "__main__": +class TestHideRunTime(unittest.TestCase): + + def setUp(self) -> None: + """Pre-test actions.""" + self.settings = Settings() + self.settings.tm_random_delay = 1 + self.settings.tm_static_delay = 1 + + def test_traffic_masking_delay(self) -> None: + start = time.monotonic() + with HideRunTime(self.settings, delay_type=TRAFFIC_MASKING): + pass + duration = time.monotonic() - start + self.assertTrue(duration > self.settings.tm_static_delay) + + def test_static_time(self) -> None: + start = time.monotonic() + with HideRunTime(self.settings, duration=1): + pass + duration = time.monotonic() - start + self.assertTrue(0.9 < duration < 1.1) + + +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/common/test_output.py b/tests/common/test_output.py index 52a7252..a67c304 100644 --- a/tests/common/test_output.py +++ b/tests/common/test_output.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -23,176 +23,104 @@ import unittest from datetime import datetime from unittest import mock +from typing import Any -from src.common.output import ( - clear_screen, - group_management_print, - m_print, - phase, - print_fingerprint, - print_key, -) -from src.common.output import ( - print_title, - print_on_previous_line, - print_spacing, - rp_print, -) -from src.common.statics import ( - ADDED_MEMBERS, - ALREADY_MEMBER, - BOLD_ON, - CLEAR_ENTIRE_LINE, - CLEAR_ENTIRE_SCREEN, - CURSOR_LEFT_UP_CORNER, - CURSOR_UP_ONE_LINE, - DONE, - FINGERPRINT_LENGTH, - NEW_GROUP, - NORMAL_TEXT, - NOT_IN_GROUP, - REMOVED_MEMBERS, - RX, - SYMMETRIC_KEY_LENGTH, - TX, - UNKNOWN_ACCOUNTS, - VERSION, -) +from src.common.output import clear_screen, group_management_print, m_print, phase, print_fingerprint, print_key +from src.common.output import print_title, print_on_previous_line, print_spacing, rp_print +from src.common.statics import (ADDED_MEMBERS, ALREADY_MEMBER, BOLD_ON, CLEAR_ENTIRE_LINE, CLEAR_ENTIRE_SCREEN, + CURSOR_LEFT_UP_CORNER, CURSOR_UP_ONE_LINE, DONE, FINGERPRINT_LENGTH, NEW_GROUP, + NORMAL_TEXT, NOT_IN_GROUP, REMOVED_MEMBERS, RX, SYMMETRIC_KEY_LENGTH, TX, + UNKNOWN_ACCOUNTS, VERSION) from tests.mock_classes import ContactList, nick_to_pub_key, Settings -from tests.utils import TFCTestCase +from tests.utils import TFCTestCase class TestClearScreen(TFCTestCase): + def test_clear_screen(self) -> None: self.assert_prints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER, clear_screen) class TestGroupManagementPrint(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.contact_list = ContactList(nicks=["Alice"]) - self.lines = [nick_to_pub_key("Alice"), nick_to_pub_key("Bob")] - self.group_name = "test_group" + self.contact_list = ContactList(nicks=['Alice']) + self.lines = [nick_to_pub_key('Alice'), nick_to_pub_key('Bob')] + self.group_name = 'test_group' def test_group_management_print(self) -> None: - group_management_print( - NEW_GROUP, self.lines, self.contact_list, self.group_name - ) - self.assert_prints( - """\ + group_management_print(NEW_GROUP, self.lines, self.contact_list, self.group_name) + self.assert_prints("""\ ┌──────────────────────────────────────────────────────────────┐ │ Created new group 'test_group' with following members: │ │ * Alice │ │ * zwp3dykiztmeils2u5eqjtdtx5x3kti5ktjthpkznku3ws5u5fq2bnad │ └──────────────────────────────────────────────────────────────┘ -""", - group_management_print, - NEW_GROUP, - self.lines, - self.contact_list, - self.group_name, - ) +""", group_management_print, NEW_GROUP, self.lines, self.contact_list, self.group_name) - self.assert_prints( - """\ + self.assert_prints("""\ ┌──────────────────────────────────────────────────────────────┐ │ Added following accounts to group 'test_group': │ │ * Alice │ │ * zwp3dykiztmeils2u5eqjtdtx5x3kti5ktjthpkznku3ws5u5fq2bnad │ └──────────────────────────────────────────────────────────────┘ -""", - group_management_print, - ADDED_MEMBERS, - self.lines, - self.contact_list, - self.group_name, - ) +""", group_management_print, ADDED_MEMBERS, self.lines, self.contact_list, self.group_name) - self.assert_prints( - """\ + self.assert_prints("""\ ┌──────────────────────────────────────────────────────────────┐ │ Following accounts were already in group 'test_group': │ │ * Alice │ │ * zwp3dykiztmeils2u5eqjtdtx5x3kti5ktjthpkznku3ws5u5fq2bnad │ └──────────────────────────────────────────────────────────────┘ -""", - group_management_print, - ALREADY_MEMBER, - self.lines, - self.contact_list, - self.group_name, - ) +""", group_management_print, ALREADY_MEMBER, self.lines, self.contact_list, self.group_name) - self.assert_prints( - """\ + self.assert_prints("""\ ┌──────────────────────────────────────────────────────────────┐ │ Removed following members from group 'test_group': │ │ * Alice │ │ * zwp3dykiztmeils2u5eqjtdtx5x3kti5ktjthpkznku3ws5u5fq2bnad │ └──────────────────────────────────────────────────────────────┘ -""", - group_management_print, - REMOVED_MEMBERS, - self.lines, - self.contact_list, - self.group_name, - ) +""", group_management_print, REMOVED_MEMBERS, self.lines, self.contact_list, self.group_name) - self.assert_prints( - """\ + self.assert_prints("""\ ┌──────────────────────────────────────────────────────────────┐ │ Following accounts were not in group 'test_group': │ │ * Alice │ │ * zwp3dykiztmeils2u5eqjtdtx5x3kti5ktjthpkznku3ws5u5fq2bnad │ └──────────────────────────────────────────────────────────────┘ -""", - group_management_print, - NOT_IN_GROUP, - self.lines, - self.contact_list, - self.group_name, - ) +""", group_management_print, NOT_IN_GROUP, self.lines, self.contact_list, self.group_name) - self.assert_prints( - """\ + self.assert_prints("""\ ┌──────────────────────────────────────────────────────────────┐ │ Following unknown accounts were ignored: │ │ * Alice │ │ * zwp3dykiztmeils2u5eqjtdtx5x3kti5ktjthpkznku3ws5u5fq2bnad │ └──────────────────────────────────────────────────────────────┘ -""", - group_management_print, - UNKNOWN_ACCOUNTS, - self.lines, - self.contact_list, - self.group_name, - ) +""", group_management_print, UNKNOWN_ACCOUNTS, self.lines, self.contact_list, self.group_name) class TestMPrint(TFCTestCase): - long_msg = ( - "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis" - " dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu" - "m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma" - "ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el" - "it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic" - "tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r" - "utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali" - "quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris" - "que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti" - "s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu." - ) + long_msg = ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis" + " dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu" + "m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma" + "ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el" + "it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic" + "tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r" + "utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali" + "quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris" + "que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti" + "s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu.") - @mock.patch("builtins.input", return_value="") - def test_m_print(self, _) -> None: + @mock.patch('builtins.input', return_value='') + def test_m_print(self, _: Any) -> None: self.assert_prints("Test message\n", m_print, ["Test message"], center=False) self.assert_prints("Test message\n", m_print, "Test message", center=False) def test_long_message(self) -> None: - self.assert_prints( - """\ + self.assert_prints("""\ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendum velit maximus in. Aliquam ac @@ -206,14 +134,9 @@ aliquam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis scelerisque. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum lobortis neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu. -""", - m_print, - TestMPrint.long_msg, - center=False, - ) +""", m_print, TestMPrint.long_msg, center=False) - self.assert_prints( - """\ + self.assert_prints("""\ ┌──────────────────────────────────────────────────────────────────────────────┐ │ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum │ │ consectetur purus quis dapibus. Fusce venenatis lacus ut rhoncus faucibus. │ @@ -229,15 +152,9 @@ placerat, aliquam dolor ac, venenatis arcu. │ viverra commodo, sagittis varius purus. Pellentesque rutrum lobortis neque a │ │ facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu. │ └──────────────────────────────────────────────────────────────────────────────┘ -""", - m_print, - TestMPrint.long_msg, - center=False, - box=True, - ) +""", m_print, TestMPrint.long_msg, center=False, box=True) - self.assert_prints( - f"""\ + self.assert_prints(f"""\ {BOLD_ON}┌──────────────────────────────────────────────────────────────────────────────┐{NORMAL_TEXT} {BOLD_ON}│ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum │{NORMAL_TEXT} {BOLD_ON}│ consectetur purus quis dapibus. Fusce venenatis lacus ut rhoncus faucibus. │{NORMAL_TEXT} @@ -253,90 +170,74 @@ placerat, aliquam dolor ac, venenatis arcu. {BOLD_ON}│ viverra commodo, sagittis varius purus. Pellentesque rutrum lobortis neque a │{NORMAL_TEXT} {BOLD_ON}│ facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu. │{NORMAL_TEXT} {BOLD_ON}└──────────────────────────────────────────────────────────────────────────────┘{NORMAL_TEXT} -""", - m_print, - TestMPrint.long_msg, - center=False, - box=True, - bold=True, - ) +""", m_print, TestMPrint.long_msg, center=False, box=True, bold=True) def test_multi_line(self) -> None: - self.assert_prints( - """\ + self.assert_prints("""\ ┌─────────┐ │ Test │ │ │ │ message │ └─────────┘ -""", - m_print, - ["Test", "", "message"], - box=True, - ) +""", m_print, ["Test", '', "message"], box=True) def test_head_and_tail(self) -> None: - self.assert_prints( - """\ + self.assert_prints("""\  ┌──────┐ │ Test │ └──────┘ -""", - m_print, - ["Test"], - box=True, - head_clear=True, - tail_clear=True, - head=2, - tail=1, - ) +""", m_print, ["Test"], box=True, head_clear=True, tail_clear=True, head=2, tail=1) def test_wrapping(self) -> None: - self.assert_prints( - """\ -┌──────────────────────────────────────────────────────────────────────────────┐ -│ short message │ -│ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum │ -│ consectetur purus quis dapibus. Fusce venenatis lacus ut rhoncus faucibus. │ -│ Cras sollicitudin commodo sapien, sed bibendum velit maximus in. Aliquam ac │ -│ metus risus. Sed cursus ornare luctus. Integer aliquet lectus id massa │ -│ blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget │ -│ luctus nisl. Sed ut elit iaculis, faucibus lacus eget, sodales magna. Nunc │ -│ sed commodo arcu. In hac habitasse platea dictumst. Integer luctus aliquam │ -│ justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio rutrum, │ -│ vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla │ -│ facilisi. Nunc nec aliquam quam, quis ullamcorper leo. Nunc egestas lectus │ -│ eget est porttitor, in iaculis felis scelerisque. In sem elit, fringilla id │ -│ viverra commodo, sagittis varius purus. Pellentesque rutrum lobortis neque a │ -│ facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu. │ -└──────────────────────────────────────────────────────────────────────────────┘ -""", - m_print, - ["short message", TestMPrint.long_msg], - box=True, - ) + self.assert_prints("""\ + ┌────────────────────────────────────────────────────┐ + │ short message │ + │ Lorem ipsum dolor sit amet, consectetur adipiscing │ + │ elit. Aenean condimentum consectetur purus quis │ + │ dapibus. Fusce venenatis lacus ut rhoncus │ + │ faucibus. Cras sollicitudin commodo sapien, sed │ + │ bibendum velit maximus in. Aliquam ac metus risus. │ + │ Sed cursus ornare luctus. Integer aliquet lectus │ + │ id massa blandit imperdiet. Ut sed massa eget quam │ + │ facilisis rutrum. Mauris eget luctus nisl. Sed ut │ + │ elit iaculis, faucibus lacus eget, sodales magna. │ + │ Nunc sed commodo arcu. In hac habitasse platea │ + │ dictumst. Integer luctus aliquam justo, at │ + │ vestibulum dolor iaculis ac. Etiam laoreet est │ + │ eget odio rutrum, vel malesuada lorem rhoncus. │ + │ Cras finibus in neque eu euismod. Nulla facilisi. │ + │ Nunc nec aliquam quam, quis ullamcorper leo. Nunc │ + │ egestas lectus eget est porttitor, in iaculis │ + │ felis scelerisque. In sem elit, fringilla id │ + │ viverra commodo, sagittis varius purus. │ + │ Pellentesque rutrum lobortis neque a facilisis. │ + │ Mauris id tortor placerat, aliquam dolor ac, │ + │ venenatis arcu. │ + └────────────────────────────────────────────────────┘ +""", m_print, ["short message", TestMPrint.long_msg], box=True, max_width = 50) - @mock.patch("builtins.input", return_value="") - def test_manual_proceed(self, _) -> None: + @mock.patch("builtins.input", return_value='') + def test_manual_proceed(self, _: Any) -> None: self.assertIsNone(m_print("test", manual_proceed=True)) class TestPhase(unittest.TestCase): - @mock.patch("time.sleep", return_value=None) - def test_phase(self, _) -> None: - self.assertIsNone(phase("Entering phase")) + + @mock.patch('time.sleep', return_value=None) + def test_phase(self, _: Any) -> None: + self.assertIsNone(phase('Entering phase')) self.assertIsNone(phase(DONE)) - self.assertIsNone(phase("Starting phase", head=1, offset=len("Finished"))) - self.assertIsNone(phase("Finished", done=True)) + self.assertIsNone(phase('Starting phase', head=1, offset=len("Finished"))) + self.assertIsNone(phase('Finished', done=True)) class TestPrintFingerprint(TFCTestCase): + def test_print_fingerprints(self) -> None: - self.assert_prints( - """\ + self.assert_prints("""\ ┌───────────────────────────────┐ │ Fingerprint for Alice │ │ │ @@ -344,117 +245,83 @@ class TestPrintFingerprint(TFCTestCase): │ 54936 03101 11892 94057 51231 │ │ 59374 09637 58434 47573 71137 │ └───────────────────────────────┘ \n""", - print_fingerprint, - FINGERPRINT_LENGTH * b"\x01", - "Fingerprint for Alice", - ) + print_fingerprint, FINGERPRINT_LENGTH * b'\x01', 'Fingerprint for Alice') class TestPrintKey(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.settings = Settings() def test_print_kdk(self) -> None: - self.assert_prints( - """\ + self.assert_prints("""\ ┌─────────────────────────────────────────────────────────────────────┐ │ Local key decryption key (to Receiver) │ │ A B C D E F G H I J K L M N O P Q │ │ 5Hp Hag T65 TZz G1P H3C Su6 3k8 Dbp vD8 s5i p4n EB3 kEs reA bua tmU │ └─────────────────────────────────────────────────────────────────────┘ \n""", - print_key, - "Local key decryption key (to Receiver)", - bytes(SYMMETRIC_KEY_LENGTH), - self.settings, - ) + print_key, "Local key decryption key (to Receiver)", + bytes(SYMMETRIC_KEY_LENGTH), self.settings) def test_print_kdk_local_testing(self) -> None: self.settings.local_testing_mode = True - self.assert_prints( - """\ + self.assert_prints("""\ ┌─────────────────────────────────────────────────────┐ │ Local key decryption key (to Receiver) │ │ 5HpHagT65TZzG1PH3CSu63k8DbpvD8s5ip4nEB3kEsreAbuatmU │ └─────────────────────────────────────────────────────┘ \n""", - print_key, - "Local key decryption key (to Receiver)", - bytes(SYMMETRIC_KEY_LENGTH), - self.settings, - ) + print_key, "Local key decryption key (to Receiver)", + bytes(SYMMETRIC_KEY_LENGTH), self.settings) class TestPrintTitle(TFCTestCase): + def test_print_tx_title(self) -> None: - self.assert_prints( - f"""\ + self.assert_prints(f"""\ {CLEAR_ENTIRE_SCREEN+CURSOR_LEFT_UP_CORNER} {BOLD_ON} TFC - Transmitter {VERSION} {NORMAL_TEXT}\n -""", - print_title, - TX, - ) +""", print_title, TX) def test_print_rx_title(self) -> None: - self.assert_prints( - f"""\ + self.assert_prints(f"""\ {CLEAR_ENTIRE_SCREEN+CURSOR_LEFT_UP_CORNER} {BOLD_ON} TFC - Receiver {VERSION} {NORMAL_TEXT}\n -""", - print_title, - RX, - ) +""", print_title, RX) class TestPrintOnPreviousLine(TFCTestCase): + def test_print_on_previous_line(self) -> None: - self.assert_prints( - CURSOR_UP_ONE_LINE + CLEAR_ENTIRE_LINE, print_on_previous_line - ) - self.assert_prints( - 2 * (CURSOR_UP_ONE_LINE + CLEAR_ENTIRE_LINE), print_on_previous_line, reps=2 - ) - self.assert_prints( - 2 * (CURSOR_UP_ONE_LINE + CLEAR_ENTIRE_LINE), - print_on_previous_line, - reps=2, - flush=True, - ) + self.assert_prints(CURSOR_UP_ONE_LINE + CLEAR_ENTIRE_LINE, print_on_previous_line) + self.assert_prints(2 * (CURSOR_UP_ONE_LINE + CLEAR_ENTIRE_LINE), print_on_previous_line, reps=2) + self.assert_prints(2 * (CURSOR_UP_ONE_LINE + CLEAR_ENTIRE_LINE), print_on_previous_line, reps=2, flush=True) class TestPrintSpacing(TFCTestCase): + def test_print_spacing(self) -> None: for i in range(20): - self.assert_prints(i * "\n", print_spacing, i) + self.assert_prints(i * '\n', print_spacing, i) class TestRPPrint(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.ts = datetime.now() + self.ts = datetime.now() self.timestamp = self.ts.strftime("%b %d - %H:%M:%S.%f")[:-4] def test_bold_print(self) -> None: - self.assert_prints( - f"{BOLD_ON}{self.timestamp} - testMessage{NORMAL_TEXT}\n", - rp_print, - "testMessage", - self.ts, - bold=True, - ) + self.assert_prints(f"{BOLD_ON}{self.timestamp} - testMessage{NORMAL_TEXT}\n", + rp_print, "testMessage", self.ts, bold=True) def test_normal_print(self) -> None: - self.assert_prints( - f"{self.timestamp} - testMessage\n", - rp_print, - "testMessage", - self.ts, - bold=False, - ) + self.assert_prints(f"{self.timestamp} - testMessage\n", rp_print, "testMessage", self.ts, bold=False) def test_works_without_timestamp(self) -> None: self.assertIsNone(rp_print("testMessage")) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/common/test_path.py b/tests/common/test_path.py index 700d3d0..f374cae 100644 --- a/tests/common/test_path.py +++ b/tests/common/test_path.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -23,99 +23,77 @@ import os import _tkinter import unittest -from unittest import mock +from unittest import mock from unittest.mock import MagicMock +from typing import Any from src.common.path import ask_path_cli, ask_path_gui, Completer from tests.mock_classes import Settings -from tests.utils import cd_unit_test, cleanup, ignored, TFCTestCase +from tests.utils import cd_unit_test, cleanup, ignored, TFCTestCase class TestAskPathGui(TFCTestCase): - file_path = "/home/user/file.txt" - path = "/home/user/" + file_path = '/home/user/file.txt' + path = '/home/user/' def setUp(self) -> None: """Pre-test actions.""" self.settings = Settings() - @mock.patch("os.path.isfile", return_value=True) - @mock.patch("builtins.input", return_value=file_path) - def test_disabled_gui_uses_cli(self, *_) -> None: + @mock.patch('os.path.isfile', return_value=True) + @mock.patch('builtins.input', return_value=file_path) + def test_disabled_gui_uses_cli(self, *_: Any) -> None: self.settings.disable_gui_dialog = True - self.assertEqual( - ask_path_gui("prompt_msg", self.settings, get_file=True), self.file_path - ) + self.assertEqual(ask_path_gui('prompt_msg', self.settings, get_file=True), self.file_path) - @mock.patch("os.path.isfile", return_value=True) - @mock.patch("builtins.input", return_value=file_path) - @mock.patch("tkinter.filedialog.askopenfilename", side_effect=_tkinter.TclError) - def test_tcl_error_falls_back_to_cli(self, *_) -> None: - self.assertEqual( - ask_path_gui("prompt_msg", self.settings, get_file=True), self.file_path - ) + @mock.patch('os.path.isfile', return_value=True) + @mock.patch('builtins.input', return_value=file_path) + @mock.patch('tkinter.filedialog.askopenfilename', side_effect=_tkinter.TclError) + def test_tcl_error_falls_back_to_cli(self, *_: Any) -> None: + self.assertEqual(ask_path_gui('prompt_msg', self.settings, get_file=True), self.file_path) - @mock.patch("tkinter.Tk", return_value=MagicMock()) - @mock.patch("os.path.isfile", return_value=True) - @mock.patch("tkinter.filedialog.askopenfilename", return_value=file_path) - def test_get_path_to_file_gui(self, *_) -> None: - self.assertEqual( - ask_path_gui("path to file:", self.settings, get_file=True), self.file_path - ) + @mock.patch('tkinter.Tk', return_value=MagicMock()) + @mock.patch('os.path.isfile', return_value=True) + @mock.patch('tkinter.filedialog.askopenfilename', return_value=file_path) + def test_get_path_to_file_gui(self, *_: Any) -> None: + self.assertEqual(ask_path_gui('path to file:', self.settings, get_file=True), + self.file_path) - @unittest.skipIf( - "TRAVIS" in os.environ and os.environ["TRAVIS"] == "true", - "Skip as Travis has no $DISPLAY.", - ) - @mock.patch("tkinter.filedialog.askopenfilename", return_value="") - def test_no_path_to_file_raises_fr(self, _) -> None: - self.assert_se( - "File selection aborted.", ask_path_gui, "test message", self.settings, True - ) + @mock.patch('tkinter.Tk', return_value=MagicMock()) + @mock.patch('tkinter.filedialog.askopenfilename', return_value='') + def test_no_path_to_file_raises_se(self, *_: Any) -> None: + self.assert_se("File selection aborted.", ask_path_gui, 'test message', self.settings, True) - @unittest.skipIf( - "TRAVIS" in os.environ and os.environ["TRAVIS"] == "true", - "Skip as Travis has no $DISPLAY.", - ) - @mock.patch("tkinter.filedialog.askdirectory", return_value=path) - def test_get_path_gui(self, _) -> None: - self.assertEqual( - ask_path_gui("select path for file:", self.settings), self.path - ) + @mock.patch('tkinter.Tk', return_value=MagicMock()) + @mock.patch('tkinter.filedialog.askdirectory', return_value=path) + def test_get_path_gui(self, *_: Any) -> None: + self.assertEqual(ask_path_gui('select path for file:', self.settings), self.path) - @unittest.skipIf( - "TRAVIS" in os.environ and os.environ["TRAVIS"] == "true", - "Skip as Travis has no $DISPLAY.", - ) - @mock.patch("tkinter.filedialog.askdirectory", return_value="") - def test_no_path_raises_fr(self, _) -> None: - self.assert_se( - "Path selection aborted.", - ask_path_gui, - "test message", - self.settings, - False, - ) + @mock.patch('tkinter.Tk', return_value=MagicMock()) + @mock.patch('tkinter.filedialog.askdirectory', return_value='') + def test_no_path_raises_se(self, *_: Any) -> None: + self.assert_se("Path selection aborted.", ask_path_gui, 'test message', self.settings, False) class TestCompleter(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.cwd = os.getcwd() + self.cwd = os.getcwd() self.unit_test_dir = cd_unit_test() # Create test directory structure for the completer. - os.mkdir("outer") - os.chdir("outer/") - with open("file", "w+") as f: - f.write("text") - os.mkdir("middle") - os.chdir("middle/") - os.mkdir("inner") - os.chdir("..") - os.chdir("..") + os.mkdir('outer') + os.chdir('outer/') + with open('file', 'w+') as f: + f.write('text') + os.mkdir('middle') + os.chdir('middle/') + os.mkdir('inner') + os.chdir('..') + os.chdir('..') def tearDown(self) -> None: """Post-test actions.""" @@ -125,67 +103,52 @@ class TestCompleter(unittest.TestCase): def test_completer(self) -> None: # Test path completer = Completer(get_file=False) - self.assertEqual(completer.complete_path("outer/"), ["outer/middle/"]) - self.assertEqual(completer.path_complete(["/outer"]), []) - self.assertEqual(completer.path_complete(), ["./outer/"]) - self.assertEqual(completer.complete_path(""), ["outer/"]) - self.assertEqual( - completer.complete_path("outer/middle"), ["outer/middle/inner/"] - ) - self.assertEqual(completer.complete_path("outer/file"), ["outer/file "]) - self.assertNotEqual(completer.listdir("outer/"), []) + self.assertEqual(completer.complete_path('outer/'), ['outer/middle/']) + self.assertEqual(completer.path_complete(['/outer']), []) + self.assertEqual(completer.path_complete(), ['./outer/']) + self.assertEqual(completer.complete_path(''), ['outer/']) + self.assertEqual(completer.complete_path('outer/middle'), ['outer/middle/inner/']) + self.assertEqual(completer.complete_path('outer/file'), ['outer/file ']) + self.assertNotEqual(completer.listdir('outer/'), []) # Test file completer = Completer(get_file=True) - self.assertTrue(len(completer.complete_path("/bin/")) > 0) - self.assertTrue(completer.complete("", 0)) + self.assertTrue(len(completer.complete_path('/bin/')) > 0) + self.assertTrue(completer.complete('', 0)) class TestPath(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - with ignored(OSError): - os.mkdir("test_dir/") + with ignored(FileExistsError): + os.mkdir('test_dir/') def tearDown(self) -> None: """Post-test actions.""" with ignored(OSError): - os.remove("testfile") + os.remove('testfile') with ignored(OSError): - os.rmdir("test_dir/") + os.rmdir('test_dir/') - @mock.patch("time.sleep", return_value=None) - @mock.patch("os.path.isfile", side_effect=[False, True, True]) - @mock.patch( - "builtins.input", - side_effect=[ - "file1", - "file2", - "./test_dir", - "./testfile", - "", - "/home", - "/dir_that_does_not_exist", - "/bin/", - KeyboardInterrupt, - ], - ) - def test_ask_path_cli(self, *_) -> None: - self.assertEqual(ask_path_cli("path to file:", get_file=True), "file2") - self.assertEqual(ask_path_cli("prompt_msg"), "test_dir/") + @mock.patch('time.sleep', return_value=None) + @mock.patch('os.path.isfile', side_effect=[False, True, True]) + @mock.patch('builtins.input', side_effect=['file1', 'file2', './test_dir', './testfile', '', '/home', + '/dir_that_does_not_exist', '/bin/', KeyboardInterrupt]) + def test_ask_path_cli(self, *_: Any) -> None: + self.assertEqual(ask_path_cli('path to file:', get_file=True), 'file2') + self.assertEqual(ask_path_cli('prompt_msg'), 'test_dir/') - open("testfile", "a+").close() - self.assertEqual(ask_path_cli("prompt_msg", get_file=True), "testfile") + open('testfile', 'a+').close() + self.assertEqual(ask_path_cli('prompt_msg', get_file=True), 'testfile') - self.assert_se("File selection aborted.", ask_path_cli, "prompt_msg", True) + self.assert_se("File selection aborted.", ask_path_cli, 'prompt_msg', True) - self.assertEqual(ask_path_cli("prompt_msg"), "/home/") - self.assertEqual(ask_path_cli("prompt_msg"), "/bin/") + self.assertEqual(ask_path_cli('prompt_msg'), '/home/') + self.assertEqual(ask_path_cli('prompt_msg'), '/bin/') - self.assert_se( - "File path selection aborted.", ask_path_cli, "prompt_msg", False - ) + self.assert_se("File path selection aborted.", ask_path_cli, 'prompt_msg', False) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/common/test_reed_solomon.py b/tests/common/test_reed_solomon.py index cf0bb8e..8619857 100644 --- a/tests/common/test_reed_solomon.py +++ b/tests/common/test_reed_solomon.py @@ -14,113 +14,77 @@ https://github.com/tomerfiliba/reedsolomon/blob/master/LICENSE import unittest -from typing import Any, Generator, Iterator from random import sample +from typing import Dict, List, Union -from src.common.reed_solomon import ( - RSCodec, - ReedSolomonError, - find_prime_polys, - gf_add, - gf_div, - gf_mul, - gf_mult_nolut, - gf_mult_nolut_slow, - gf_neg, - gf_poly_mul, - gf_poly_mul_simple, - gf_poly_neg, - gf_sub, - init_tables, - itertools, - rs_check, - rs_correct_msg, - rs_correct_msg_nofsynd, - rs_encode_msg, - rs_generator_poly, - rs_generator_poly_all, - rs_simple_encode_msg, -) - - -class RandomSample(object): - """\ - RandomSample returns a set of random indexes - for the purpose of introducing errors to data. - """ - - def __init__(self, nn: int, num_errs: int) -> None: - """Create new RandomSample object.""" - self.nn = nn - self.num_errs = num_errs - - def __iter__(self) -> Iterator[int]: - """Get random indexes.""" - for i in sample(range(self.nn), self.num_errs): - index = i # type: int - yield index +from src.common.reed_solomon import (RSCodec, ReedSolomonError, find_prime_polys, gf_add, gf_div, gf_mul, gf_mult_nolut, + gf_mult_nolut_slow, gf_neg, gf_poly_mul, gf_poly_mul_simple, gf_poly_neg, gf_sub, + init_tables, itertools, rs_check, rs_correct_msg, rs_correct_msg_nofsynd, + rs_encode_msg, rs_generator_poly, rs_generator_poly_all, rs_simple_encode_msg) class TestReedSolomon(unittest.TestCase): + def test_simple(self) -> None: - rs = RSCodec() - msg = bytearray("hello world " * 10, "latin1") - enc = rs.encode(msg) + rs = RSCodec() + msg = bytearray("hello world " * 10, "latin1") + enc = rs.encode(msg) dec, dec_enc = rs.decode(enc) self.assertEqual(dec, msg) self.assertEqual(dec_enc, enc) def test_correction(self) -> None: - rs = RSCodec() - msg = bytearray("hello world " * 10, "latin1") - enc = rs.encode(msg) + rs = RSCodec() + msg = bytearray("hello world " * 10, "latin1") + enc = rs.encode(msg) rmsg, renc = rs.decode(enc) self.assertEqual(rmsg, msg) self.assertEqual(renc, enc) for i in [27, -3, -9, 7, 0]: - enc[i] = 99 - rmsg, _ = rs.decode(enc) + enc[i] = 99 + rmsg, renc = rs.decode(enc) self.assertEqual(rmsg, msg) enc[82] = 99 self.assertRaises(ReedSolomonError, rs.decode, enc) def test_check(self) -> None: - rs = RSCodec() - msg = bytearray("hello world " * 10, "latin1") - enc = rs.encode(msg) - _, renc = rs.decode(enc) + rs = RSCodec() + msg = bytearray("hello world " * 10, "latin1") + enc = rs.encode(msg) + rmsg, renc = rs.decode(enc) self.assertEqual(rs.check(enc), [True]) self.assertEqual(rs.check(renc), [True]) for i in [27, -3, -9, 7, 0]: - enc[i] = 99 - _, renc = rs.decode(enc) + enc[i] = 99 + rmsg, renc = rs.decode(enc) self.assertEqual(rs.check(enc), [False]) self.assertEqual(rs.check(renc), [True]) def test_long(self) -> None: - rs = RSCodec() - msg = bytearray("a" * 10000, "latin1") - enc = rs.encode(msg) + rs = RSCodec() + msg = bytearray("a" * 10000, "latin1") + enc = rs.encode(msg) dec, dec_enc = rs.decode(enc) self.assertEqual(dec, msg) self.assertEqual(dec_enc, enc) - enc2 = list(enc) - enc2[177] = 99 - enc2[2212] = 88 + enc2 = list(enc) + enc2[177] = 99 + enc2[2212] = 88 dec2, dec_enc2 = rs.decode(bytes(enc2)) self.assertEqual(dec2, msg) self.assertEqual(dec_enc2, enc) def test_prim_fcr_basic(self) -> None: - nn = 30 - kk = 18 - tt = nn - kk - rs = RSCodec(tt, fcr=120, prim=0x187) - hexencmsg = "00faa123555555c000000354064432" "c02800fe97c434e1ff5365cf8fafe4" - strf = str - encmsg = bytearray.fromhex(strf(hexencmsg)) - decmsg = encmsg[:kk] - tem = rs.encode(decmsg) + nn = 30 + kk = 18 + tt = nn - kk + rs = RSCodec(tt, fcr=120, prim=0x187) + hexencmsg = ('00faa123555555c000000354064432' + 'c02800fe97c434e1ff5365cf8fafe4') + strf = str + encmsg = bytearray.fromhex(strf(hexencmsg)) + decmsg = encmsg[:kk] + tem = rs.encode(decmsg) self.assertEqual(encmsg, tem, msg="encoded does not match expected") tdm, rtem = rs.decode(tem) @@ -131,55 +95,47 @@ class TestReedSolomon(unittest.TestCase): # Encoding and decoding intact message seem OK, so test errors numerrs = tt >> 1 # Inject tt/2 errors (expected to recover fully) - random_sample = RandomSample(nn, numerrs) - for i in random_sample: # Inject errors in random places - tem1[i] ^= 0xFF # flip all 8 bits + for i in sample(range(nn), numerrs): # inject errors in random places + tem1[i] ^= 0xff # flip all 8 bits tdm, _ = rs.decode(tem1) self.assertEqual(tdm, decmsg, msg="decoded with errors does not match original") tem1 = bytearray(tem) # Clone another copy numerrs += 1 # Inject tt/2 + 1 errors (expected to fail and detect it) - random_sample = RandomSample(nn, numerrs) - for i in random_sample: # Inject errors in random places - tem1[i] ^= 0xFF # Flip all 8 bits + for i in sample(range(nn), numerrs): # Inject errors in random places + tem1[i] ^= 0xff # Flip all 8 bits # If this fails, it means excessive errors not detected self.assertRaises(ReedSolomonError, rs.decode, tem1) def test_prim_fcr_long(self) -> None: - nn = 48 - kk = 34 - tt = nn - kk - rs = RSCodec(tt, fcr=120, prim=0x187) - hexencmsg = ( - "08faa123555555c000000354064432c0280e1b4d090cfc04" - "887400000003500000000e1985ff9c6b33066ca9f43d12e8" - ) - strf = str - encmsg = bytearray.fromhex(strf(hexencmsg)) - decmsg = encmsg[:kk] - tem = rs.encode(decmsg) + nn = 48 + kk = 34 + tt = nn - kk + rs = RSCodec(tt, fcr=120, prim=0x187) + hexencmsg = ('08faa123555555c000000354064432c0280e1b4d090cfc04' + '887400000003500000000e1985ff9c6b33066ca9f43d12e8') + strf = str + encmsg = bytearray.fromhex(strf(hexencmsg)) + decmsg = encmsg[:kk] + tem = rs.encode(decmsg) self.assertEqual(encmsg, tem, msg="encoded does not match expected") tdm, rtem = rs.decode(tem) self.assertEqual(tdm, decmsg, msg="decoded does not match original") self.assertEqual(rtem, tem, msg="decoded mesecc does not match original") - tem1 = bytearray(tem) + tem1 = bytearray(tem) numerrs = tt >> 1 - random_sample = RandomSample(nn, numerrs) - for i in random_sample: # Inject errors in random places - tem1[i] ^= 0xFF # flip all 8 bits + for i in sample(range(nn), numerrs): + tem1[i] ^= 0xff tdm, rtem = rs.decode(tem1) self.assertEqual(tdm, decmsg, msg="decoded with errors does not match original") - self.assertEqual( - rtem, tem, msg="decoded mesecc with errors does not match original" - ) + self.assertEqual(rtem, tem, msg="decoded mesecc with errors does not match original") - tem1 = bytearray(tem) + tem1 = bytearray(tem) numerrs += 1 - random_sample = RandomSample(nn, numerrs) - for i in random_sample: # Inject errors in random places - tem1[i] ^= 0xFF # flip all 8 bits + for i in sample(range(nn), numerrs): + tem1[i] ^= 0xff self.assertRaises(ReedSolomonError, rs.decode, tem1) def test_generator_poly(self) -> None: @@ -191,26 +147,22 @@ class TestReedSolomon(unittest.TestCase): k = 3 # Base 2 test - fcr = 120 + fcr = 120 generator = 2 - prim = 0x11D + prim = 0x11d init_tables(generator=generator, prim=prim) g = rs_generator_poly_all(n, fcr=fcr, generator=generator) - self.assertEqual( - list(g[n - k]), list(rs_generator_poly(n - k, fcr=fcr, generator=generator)) - ) - self.assertEqual(list(g[n - k]), [1, 106, 9, 105, 86, 5, 166, 76, 9]) + self.assertEqual(list(g[n-k]), list(rs_generator_poly(n-k, fcr=fcr, generator=generator))) + self.assertEqual(list(g[n-k]), [1, 106, 9, 105, 86, 5, 166, 76, 9]) # Base 3 test - fcr = 0 + fcr = 0 generator = 3 - prim = 0x11B + prim = 0x11b init_tables(generator=generator, prim=prim) g = rs_generator_poly_all(n, fcr=fcr, generator=generator) - self.assertEqual( - list(g[n - k]), list(rs_generator_poly(n - k, fcr=fcr, generator=generator)) - ) - self.assertEqual(list(g[n - k]), [1, 128, 13, 69, 36, 145, 199, 165, 30]) + self.assertEqual(list(g[n-k]), list(rs_generator_poly(n-k, fcr=fcr, generator=generator))) + self.assertEqual(list(g[n-k]), [1, 128, 13, 69, 36, 145, 199, 165, 30]) def test_prime_poly_build(self) -> None: """\ @@ -218,2229 +170,218 @@ class TestReedSolomon(unittest.TestCase): different GFs (ie, GF(2^6) to GF(2^10)) and with different generators. """ - params = { - "count": 7, - "c_exp": [6, 7, 7, 8, 8, 9, 10], - "generator": [2, 2, 3, 2, 3, 2, 2], - "expected": [ - [67, 91, 97, 103, 109, 115], - [ - 131, - 137, - 143, - 145, - 157, - 167, - 171, - 185, - 191, - 193, - 203, - 211, - 213, - 229, - 239, - 241, - 247, - 253, - ], - [ - 131, - 137, - 143, - 145, - 157, - 167, - 171, - 185, - 191, - 193, - 203, - 211, - 213, - 229, - 239, - 241, - 247, - 253, - ], - [ - 285, - 299, - 301, - 333, - 351, - 355, - 357, - 361, - 369, - 391, - 397, - 425, - 451, - 463, - 487, - 501, - ], - [ - 283, - 313, - 319, - 333, - 351, - 355, - 357, - 361, - 375, - 397, - 415, - 419, - 425, - 451, - 501, - 505, - ], - [ - 529, - 539, - 545, - 557, - 563, - 601, - 607, - 617, - 623, - 631, - 637, - 647, - 661, - 675, - 677, - 687, - 695, - 701, - 719, - 721, - 731, - 757, - 761, - 787, - 789, - 799, - 803, - 817, - 827, - 847, - 859, - 865, - 875, - 877, - 883, - 895, - 901, - 911, - 949, - 953, - 967, - 971, - 973, - 981, - 985, - 995, - 1001, - 1019, - ], - [ - 1033, - 1051, - 1063, - 1069, - 1125, - 1135, - 1153, - 1163, - 1221, - 1239, - 1255, - 1267, - 1279, - 1293, - 1305, - 1315, - 1329, - 1341, - 1347, - 1367, - 1387, - 1413, - 1423, - 1431, - 1441, - 1479, - 1509, - 1527, - 1531, - 1555, - 1557, - 1573, - 1591, - 1603, - 1615, - 1627, - 1657, - 1663, - 1673, - 1717, - 1729, - 1747, - 1759, - 1789, - 1815, - 1821, - 1825, - 1849, - 1863, - 1869, - 1877, - 1881, - 1891, - 1917, - 1933, - 1939, - 1969, - 2011, - 2035, - 2041, - ], - ], - } + params = {"count": 7, + "c_exp": [6, 7, 7, 8, 8, 9, 10], + "generator": [2, 2, 3, 2, 3, 2, 2], + "expected": + [ + [67, 91, 97, 103, 109, 115], + [131, 137, 143, 145, 157, 167, 171, 185, 191, 193, 203, 211, 213, 229, 239, 241, 247, 253], + [131, 137, 143, 145, 157, 167, 171, 185, 191, 193, 203, 211, 213, 229, 239, 241, 247, 253], + [285, 299, 301, 333, 351, 355, 357, 361, 369, 391, 397, 425, 451, 463, 487, 501], + [283, 313, 319, 333, 351, 355, 357, 361, 375, 397, 415, 419, 425, 451, 501, 505], - for i in range(params["count"]): - self.assertEqual( - find_prime_polys( - generator=params["generator"][i], c_exp=params["c_exp"][i] - ), - params["expected"][i], - ) + [529, 539, 545, 557, 563, 601, 607, 617, 623, 631, 637, 647, 661, 675, 677, 687, + 695, 701, 719, 721, 731, 757, 761, 787, 789, 799, 803, 817, 827, 847, 859, 865, + 875, 877, 883, 895, 901, 911, 949, 953, 967, 971, 973, 981, 985, 995, 1001, 1019], + + [1033, 1051, 1063, 1069, 1125, 1135, 1153, 1163, 1221, 1239, 1255, 1267, 1279, 1293, 1305, + 1315, 1329, 1341, 1347, 1367, 1387, 1413, 1423, 1431, 1441, 1479, 1509, 1527, 1531, 1555, + 1557, 1573, 1591, 1603, 1615, 1627, 1657, 1663, 1673, 1717, 1729, 1747, 1759, 1789, 1815, + 1821, 1825, 1849, 1863, 1869, 1877, 1881, 1891, 1917, 1933, 1939, 1969, 2011, 2035, 2041] + ] + } + + for i in range(params['count']): + self.assertEqual(find_prime_polys(generator=params['generator'][i], + c_exp=params['c_exp'][i]), + params["expected"][i]) def test_init_tables(self) -> None: """\ Try if the look up table generator (galois field generator) works correctly for different parameters. """ - params = [[0x11D, 2, 8], [0x11B, 3, 8], [0xFD, 3, 7]] + params = [ + [0x11d, 2, 8], + [0x11b, 3, 8], + [0xfd, 3, 7] + ] expected = [ [ - [ - 0, - 0, - 1, - 25, - 2, - 50, - 26, - 198, - 3, - 223, - 51, - 238, - 27, - 104, - 199, - 75, - 4, - 100, - 224, - 14, - 52, - 141, - 239, - 129, - 28, - 193, - 105, - 248, - 200, - 8, - 76, - 113, - 5, - 138, - 101, - 47, - 225, - 36, - 15, - 33, - 53, - 147, - 142, - 218, - 240, - 18, - 130, - 69, - 29, - 181, - 194, - 125, - 106, - 39, - 249, - 185, - 201, - 154, - 9, - 120, - 77, - 228, - 114, - 166, - 6, - 191, - 139, - 98, - 102, - 221, - 48, - 253, - 226, - 152, - 37, - 179, - 16, - 145, - 34, - 136, - 54, - 208, - 148, - 206, - 143, - 150, - 219, - 189, - 241, - 210, - 19, - 92, - 131, - 56, - 70, - 64, - 30, - 66, - 182, - 163, - 195, - 72, - 126, - 110, - 107, - 58, - 40, - 84, - 250, - 133, - 186, - 61, - 202, - 94, - 155, - 159, - 10, - 21, - 121, - 43, - 78, - 212, - 229, - 172, - 115, - 243, - 167, - 87, - 7, - 112, - 192, - 247, - 140, - 128, - 99, - 13, - 103, - 74, - 222, - 237, - 49, - 197, - 254, - 24, - 227, - 165, - 153, - 119, - 38, - 184, - 180, - 124, - 17, - 68, - 146, - 217, - 35, - 32, - 137, - 46, - 55, - 63, - 209, - 91, - 149, - 188, - 207, - 205, - 144, - 135, - 151, - 178, - 220, - 252, - 190, - 97, - 242, - 86, - 211, - 171, - 20, - 42, - 93, - 158, - 132, - 60, - 57, - 83, - 71, - 109, - 65, - 162, - 31, - 45, - 67, - 216, - 183, - 123, - 164, - 118, - 196, - 23, - 73, - 236, - 127, - 12, - 111, - 246, - 108, - 161, - 59, - 82, - 41, - 157, - 85, - 170, - 251, - 96, - 134, - 177, - 187, - 204, - 62, - 90, - 203, - 89, - 95, - 176, - 156, - 169, - 160, - 81, - 11, - 245, - 22, - 235, - 122, - 117, - 44, - 215, - 79, - 174, - 213, - 233, - 230, - 231, - 173, - 232, - 116, - 214, - 244, - 234, - 168, - 80, - 88, - 175, - ], - [ - 1, - 2, - 4, - 8, - 16, - 32, - 64, - 128, - 29, - 58, - 116, - 232, - 205, - 135, - 19, - 38, - 76, - 152, - 45, - 90, - 180, - 117, - 234, - 201, - 143, - 3, - 6, - 12, - 24, - 48, - 96, - 192, - 157, - 39, - 78, - 156, - 37, - 74, - 148, - 53, - 106, - 212, - 181, - 119, - 238, - 193, - 159, - 35, - 70, - 140, - 5, - 10, - 20, - 40, - 80, - 160, - 93, - 186, - 105, - 210, - 185, - 111, - 222, - 161, - 95, - 190, - 97, - 194, - 153, - 47, - 94, - 188, - 101, - 202, - 137, - 15, - 30, - 60, - 120, - 240, - 253, - 231, - 211, - 187, - 107, - 214, - 177, - 127, - 254, - 225, - 223, - 163, - 91, - 182, - 113, - 226, - 217, - 175, - 67, - 134, - 17, - 34, - 68, - 136, - 13, - 26, - 52, - 104, - 208, - 189, - 103, - 206, - 129, - 31, - 62, - 124, - 248, - 237, - 199, - 147, - 59, - 118, - 236, - 197, - 151, - 51, - 102, - 204, - 133, - 23, - 46, - 92, - 184, - 109, - 218, - 169, - 79, - 158, - 33, - 66, - 132, - 21, - 42, - 84, - 168, - 77, - 154, - 41, - 82, - 164, - 85, - 170, - 73, - 146, - 57, - 114, - 228, - 213, - 183, - 115, - 230, - 209, - 191, - 99, - 198, - 145, - 63, - 126, - 252, - 229, - 215, - 179, - 123, - 246, - 241, - 255, - 227, - 219, - 171, - 75, - 150, - 49, - 98, - 196, - 149, - 55, - 110, - 220, - 165, - 87, - 174, - 65, - 130, - 25, - 50, - 100, - 200, - 141, - 7, - 14, - 28, - 56, - 112, - 224, - 221, - 167, - 83, - 166, - 81, - 162, - 89, - 178, - 121, - 242, - 249, - 239, - 195, - 155, - 43, - 86, - 172, - 69, - 138, - 9, - 18, - 36, - 72, - 144, - 61, - 122, - 244, - 245, - 247, - 243, - 251, - 235, - 203, - 139, - 11, - 22, - 44, - 88, - 176, - 125, - 250, - 233, - 207, - 131, - 27, - 54, - 108, - 216, - 173, - 71, - 142, - 1, - 2, - 4, - 8, - 16, - 32, - 64, - 128, - 29, - 58, - 116, - 232, - 205, - 135, - 19, - 38, - 76, - 152, - 45, - 90, - 180, - 117, - 234, - 201, - 143, - 3, - 6, - 12, - 24, - 48, - 96, - 192, - 157, - 39, - 78, - 156, - 37, - 74, - 148, - 53, - 106, - 212, - 181, - 119, - 238, - 193, - 159, - 35, - 70, - 140, - 5, - 10, - 20, - 40, - 80, - 160, - 93, - 186, - 105, - 210, - 185, - 111, - 222, - 161, - 95, - 190, - 97, - 194, - 153, - 47, - 94, - 188, - 101, - 202, - 137, - 15, - 30, - 60, - 120, - 240, - 253, - 231, - 211, - 187, - 107, - 214, - 177, - 127, - 254, - 225, - 223, - 163, - 91, - 182, - 113, - 226, - 217, - 175, - 67, - 134, - 17, - 34, - 68, - 136, - 13, - 26, - 52, - 104, - 208, - 189, - 103, - 206, - 129, - 31, - 62, - 124, - 248, - 237, - 199, - 147, - 59, - 118, - 236, - 197, - 151, - 51, - 102, - 204, - 133, - 23, - 46, - 92, - 184, - 109, - 218, - 169, - 79, - 158, - 33, - 66, - 132, - 21, - 42, - 84, - 168, - 77, - 154, - 41, - 82, - 164, - 85, - 170, - 73, - 146, - 57, - 114, - 228, - 213, - 183, - 115, - 230, - 209, - 191, - 99, - 198, - 145, - 63, - 126, - 252, - 229, - 215, - 179, - 123, - 246, - 241, - 255, - 227, - 219, - 171, - 75, - 150, - 49, - 98, - 196, - 149, - 55, - 110, - 220, - 165, - 87, - 174, - 65, - 130, - 25, - 50, - 100, - 200, - 141, - 7, - 14, - 28, - 56, - 112, - 224, - 221, - 167, - 83, - 166, - 81, - 162, - 89, - 178, - 121, - 242, - 249, - 239, - 195, - 155, - 43, - 86, - 172, - 69, - 138, - 9, - 18, - 36, - 72, - 144, - 61, - 122, - 244, - 245, - 247, - 243, - 251, - 235, - 203, - 139, - 11, - 22, - 44, - 88, - 176, - 125, - 250, - 233, - 207, - 131, - 27, - 54, - 108, - 216, - 173, - 71, - 142, - ], - 255, + [0, 0, 1, 25, 2, 50, 26, 198, 3, 223, 51, 238, 27, 104, 199, 75, 4, 100, 224, 14, 52, + 141, 239, 129, 28, 193, 105, 248, 200, 8, 76, 113, 5, 138, 101, 47, 225, 36, 15, 33, + 53, 147, 142, 218, 240, 18, 130, 69, 29, 181, 194, 125, 106, 39, 249, 185, 201, 154, + 9, 120, 77, 228, 114, 166, 6, 191, 139, 98, 102, 221, 48, 253, 226, 152, 37, 179, + 16, 145, 34, 136, 54, 208, 148, 206, 143, 150, 219, 189, 241, 210, 19, 92, 131, 56, + 70, 64, 30, 66, 182, 163, 195, 72, 126, 110, 107, 58, 40, 84, 250, 133, 186, 61, 202, + 94, 155, 159, 10, 21, 121, 43, 78, 212, 229, 172, 115, 243, 167, 87, 7, 112, 192, + 247, 140, 128, 99, 13, 103, 74, 222, 237, 49, 197, 254, 24, 227, 165, 153, 119, 38, + 184, 180, 124, 17, 68, 146, 217, 35, 32, 137, 46, 55, 63, 209, 91, 149, 188, 207, + 205, 144, 135, 151, 178, 220, 252, 190, 97, 242, 86, 211, 171, 20, 42, 93, 158, 132, + 60, 57, 83, 71, 109, 65, 162, 31, 45, 67, 216, 183, 123, 164, 118, 196, 23, 73, 236, + 127, 12, 111, 246, 108, 161, 59, 82, 41, 157, 85, 170, 251, 96, 134, 177, 187, 204, + 62, 90, 203, 89, 95, 176, 156, 169, 160, 81, 11, 245, 22, 235, 122, 117, 44, 215, + 79, 174, 213, 233, 230, 231, 173, 232, 116, 214, 244, 234, 168, 80, 88, 175], + + [1, 2, 4, 8, 16, 32, 64, 128, 29, 58, 116, 232, 205, 135, 19, 38, 76, 152, 45, 90, + 180, 117, 234, 201, 143, 3, 6, 12, 24, 48, 96, 192, 157, 39, 78, 156, 37, 74, 148, + 53, 106, 212, 181, 119, 238, 193, 159, 35, 70, 140, 5, 10, 20, 40, 80, 160, 93, 186, + 105, 210, 185, 111, 222, 161, 95, 190, 97, 194, 153, 47, 94, 188, 101, 202, 137, 15, + 30, 60, 120, 240, 253, 231, 211, 187, 107, 214, 177, 127, 254, 225, 223, 163, 91, + 182, 113, 226, 217, 175, 67, 134, 17, 34, 68, 136, 13, 26, 52, 104, 208, 189, 103, + 206, 129, 31, 62, 124, 248, 237, 199, 147, 59, 118, 236, 197, 151, 51, 102, 204, 133, + 23, 46, 92, 184, 109, 218, 169, 79, 158, 33, 66, 132, 21, 42, 84, 168, 77, 154, 41, + 82, 164, 85, 170, 73, 146, 57, 114, 228, 213, 183, 115, 230, 209, 191, 99, 198, 145, + 63, 126, 252, 229, 215, 179, 123, 246, 241, 255, 227, 219, 171, 75, 150, 49, 98, 196, + 149, 55, 110, 220, 165, 87, 174, 65, 130, 25, 50, 100, 200, 141, 7, 14, 28, 56, 112, + 224, 221, 167, 83, 166, 81, 162, 89, 178, 121, 242, 249, 239, 195, 155, 43, 86, 172, + 69, 138, 9, 18, 36, 72, 144, 61, 122, 244, 245, 247, 243, 251, 235, 203, 139, 11, 22, + 44, 88, 176, 125, 250, 233, 207, 131, 27, 54, 108, 216, 173, 71, 142, 1, 2, 4, 8, 16, + 32, 64, 128, 29, 58, 116, 232, 205, 135, 19, 38, 76, 152, 45, 90, 180, 117, 234, 201, + 143, 3, 6, 12, 24, 48, 96, 192, 157, 39, 78, 156, 37, 74, 148, 53, 106, 212, 181, + 119, 238, 193, 159, 35, 70, 140, 5, 10, 20, 40, 80, 160, 93, 186, 105, 210, 185, 111, + 222, 161, 95, 190, 97, 194, 153, 47, 94, 188, 101, 202, 137, 15, 30, 60, 120, 240, + 253, 231, 211, 187, 107, 214, 177, 127, 254, 225, 223, 163, 91, 182, 113, 226, 217, + 175, 67, 134, 17, 34, 68, 136, 13, 26, 52, 104, 208, 189, 103, 206, 129, 31, 62, 124, + 248, 237, 199, 147, 59, 118, 236, 197, 151, 51, 102, 204, 133, 23, 46, 92, 184, 109, + 218, 169, 79, 158, 33, 66, 132, 21, 42, 84, 168, 77, 154, 41, 82, 164, 85, 170, 73, + 146, 57, 114, 228, 213, 183, 115, 230, 209, 191, 99, 198, 145, 63, 126, 252, 229, + 215, 179, 123, 246, 241, 255, 227, 219, 171, 75, 150, 49, 98, 196, 149, 55, 110, 220, + 165, 87, 174, 65, 130, 25, 50, 100, 200, 141, 7, 14, 28, 56, 112, 224, 221, 167, 83, + 166, 81, 162, 89, 178, 121, 242, 249, 239, 195, 155, 43, 86, 172, 69, 138, 9, 18, 36, + 72, 144, 61, 122, 244, 245, 247, 243, 251, 235, 203, 139, 11, 22, 44, 88, 176, 125, + 250, 233, 207, 131, 27, 54, 108, 216, 173, 71, 142], + + 255 ], [ - [ - 0, - 0, - 25, - 1, - 50, - 2, - 26, - 198, - 75, - 199, - 27, - 104, - 51, - 238, - 223, - 3, - 100, - 4, - 224, - 14, - 52, - 141, - 129, - 239, - 76, - 113, - 8, - 200, - 248, - 105, - 28, - 193, - 125, - 194, - 29, - 181, - 249, - 185, - 39, - 106, - 77, - 228, - 166, - 114, - 154, - 201, - 9, - 120, - 101, - 47, - 138, - 5, - 33, - 15, - 225, - 36, - 18, - 240, - 130, - 69, - 53, - 147, - 218, - 142, - 150, - 143, - 219, - 189, - 54, - 208, - 206, - 148, - 19, - 92, - 210, - 241, - 64, - 70, - 131, - 56, - 102, - 221, - 253, - 48, - 191, - 6, - 139, - 98, - 179, - 37, - 226, - 152, - 34, - 136, - 145, - 16, - 126, - 110, - 72, - 195, - 163, - 182, - 30, - 66, - 58, - 107, - 40, - 84, - 250, - 133, - 61, - 186, - 43, - 121, - 10, - 21, - 155, - 159, - 94, - 202, - 78, - 212, - 172, - 229, - 243, - 115, - 167, - 87, - 175, - 88, - 168, - 80, - 244, - 234, - 214, - 116, - 79, - 174, - 233, - 213, - 231, - 230, - 173, - 232, - 44, - 215, - 117, - 122, - 235, - 22, - 11, - 245, - 89, - 203, - 95, - 176, - 156, - 169, - 81, - 160, - 127, - 12, - 246, - 111, - 23, - 196, - 73, - 236, - 216, - 67, - 31, - 45, - 164, - 118, - 123, - 183, - 204, - 187, - 62, - 90, - 251, - 96, - 177, - 134, - 59, - 82, - 161, - 108, - 170, - 85, - 41, - 157, - 151, - 178, - 135, - 144, - 97, - 190, - 220, - 252, - 188, - 149, - 207, - 205, - 55, - 63, - 91, - 209, - 83, - 57, - 132, - 60, - 65, - 162, - 109, - 71, - 20, - 42, - 158, - 93, - 86, - 242, - 211, - 171, - 68, - 17, - 146, - 217, - 35, - 32, - 46, - 137, - 180, - 124, - 184, - 38, - 119, - 153, - 227, - 165, - 103, - 74, - 237, - 222, - 197, - 49, - 254, - 24, - 13, - 99, - 140, - 128, - 192, - 247, - 112, - 7, - ], - [ - 1, - 3, - 5, - 15, - 17, - 51, - 85, - 255, - 26, - 46, - 114, - 150, - 161, - 248, - 19, - 53, - 95, - 225, - 56, - 72, - 216, - 115, - 149, - 164, - 247, - 2, - 6, - 10, - 30, - 34, - 102, - 170, - 229, - 52, - 92, - 228, - 55, - 89, - 235, - 38, - 106, - 190, - 217, - 112, - 144, - 171, - 230, - 49, - 83, - 245, - 4, - 12, - 20, - 60, - 68, - 204, - 79, - 209, - 104, - 184, - 211, - 110, - 178, - 205, - 76, - 212, - 103, - 169, - 224, - 59, - 77, - 215, - 98, - 166, - 241, - 8, - 24, - 40, - 120, - 136, - 131, - 158, - 185, - 208, - 107, - 189, - 220, - 127, - 129, - 152, - 179, - 206, - 73, - 219, - 118, - 154, - 181, - 196, - 87, - 249, - 16, - 48, - 80, - 240, - 11, - 29, - 39, - 105, - 187, - 214, - 97, - 163, - 254, - 25, - 43, - 125, - 135, - 146, - 173, - 236, - 47, - 113, - 147, - 174, - 233, - 32, - 96, - 160, - 251, - 22, - 58, - 78, - 210, - 109, - 183, - 194, - 93, - 231, - 50, - 86, - 250, - 21, - 63, - 65, - 195, - 94, - 226, - 61, - 71, - 201, - 64, - 192, - 91, - 237, - 44, - 116, - 156, - 191, - 218, - 117, - 159, - 186, - 213, - 100, - 172, - 239, - 42, - 126, - 130, - 157, - 188, - 223, - 122, - 142, - 137, - 128, - 155, - 182, - 193, - 88, - 232, - 35, - 101, - 175, - 234, - 37, - 111, - 177, - 200, - 67, - 197, - 84, - 252, - 31, - 33, - 99, - 165, - 244, - 7, - 9, - 27, - 45, - 119, - 153, - 176, - 203, - 70, - 202, - 69, - 207, - 74, - 222, - 121, - 139, - 134, - 145, - 168, - 227, - 62, - 66, - 198, - 81, - 243, - 14, - 18, - 54, - 90, - 238, - 41, - 123, - 141, - 140, - 143, - 138, - 133, - 148, - 167, - 242, - 13, - 23, - 57, - 75, - 221, - 124, - 132, - 151, - 162, - 253, - 28, - 36, - 108, - 180, - 199, - 82, - 246, - 1, - 3, - 5, - 15, - 17, - 51, - 85, - 255, - 26, - 46, - 114, - 150, - 161, - 248, - 19, - 53, - 95, - 225, - 56, - 72, - 216, - 115, - 149, - 164, - 247, - 2, - 6, - 10, - 30, - 34, - 102, - 170, - 229, - 52, - 92, - 228, - 55, - 89, - 235, - 38, - 106, - 190, - 217, - 112, - 144, - 171, - 230, - 49, - 83, - 245, - 4, - 12, - 20, - 60, - 68, - 204, - 79, - 209, - 104, - 184, - 211, - 110, - 178, - 205, - 76, - 212, - 103, - 169, - 224, - 59, - 77, - 215, - 98, - 166, - 241, - 8, - 24, - 40, - 120, - 136, - 131, - 158, - 185, - 208, - 107, - 189, - 220, - 127, - 129, - 152, - 179, - 206, - 73, - 219, - 118, - 154, - 181, - 196, - 87, - 249, - 16, - 48, - 80, - 240, - 11, - 29, - 39, - 105, - 187, - 214, - 97, - 163, - 254, - 25, - 43, - 125, - 135, - 146, - 173, - 236, - 47, - 113, - 147, - 174, - 233, - 32, - 96, - 160, - 251, - 22, - 58, - 78, - 210, - 109, - 183, - 194, - 93, - 231, - 50, - 86, - 250, - 21, - 63, - 65, - 195, - 94, - 226, - 61, - 71, - 201, - 64, - 192, - 91, - 237, - 44, - 116, - 156, - 191, - 218, - 117, - 159, - 186, - 213, - 100, - 172, - 239, - 42, - 126, - 130, - 157, - 188, - 223, - 122, - 142, - 137, - 128, - 155, - 182, - 193, - 88, - 232, - 35, - 101, - 175, - 234, - 37, - 111, - 177, - 200, - 67, - 197, - 84, - 252, - 31, - 33, - 99, - 165, - 244, - 7, - 9, - 27, - 45, - 119, - 153, - 176, - 203, - 70, - 202, - 69, - 207, - 74, - 222, - 121, - 139, - 134, - 145, - 168, - 227, - 62, - 66, - 198, - 81, - 243, - 14, - 18, - 54, - 90, - 238, - 41, - 123, - 141, - 140, - 143, - 138, - 133, - 148, - 167, - 242, - 13, - 23, - 57, - 75, - 221, - 124, - 132, - 151, - 162, - 253, - 28, - 36, - 108, - 180, - 199, - 82, - 246, - ], - 255, + [0, 0, 25, 1, 50, 2, 26, 198, 75, 199, 27, 104, 51, 238, 223, 3, 100, 4, 224, 14, 52, + 141, 129, 239, 76, 113, 8, 200, 248, 105, 28, 193, 125, 194, 29, 181, 249, 185, 39, + 106, 77, 228, 166, 114, 154, 201, 9, 120, 101, 47, 138, 5, 33, 15, 225, 36, 18, 240, + 130, 69, 53, 147, 218, 142, 150, 143, 219, 189, 54, 208, 206, 148, 19, 92, 210, 241, + 64, 70, 131, 56, 102, 221, 253, 48, 191, 6, 139, 98, 179, 37, 226, 152, 34, 136, 145, + 16, 126, 110, 72, 195, 163, 182, 30, 66, 58, 107, 40, 84, 250, 133, 61, 186, 43, 121, + 10, 21, 155, 159, 94, 202, 78, 212, 172, 229, 243, 115, 167, 87, 175, 88, 168, 80, + 244, 234, 214, 116, 79, 174, 233, 213, 231, 230, 173, 232, 44, 215, 117, 122, 235, + 22, 11, 245, 89, 203, 95, 176, 156, 169, 81, 160, 127, 12, 246, 111, 23, 196, 73, + 236, 216, 67, 31, 45, 164, 118, 123, 183, 204, 187, 62, 90, 251, 96, 177, 134, 59, + 82, 161, 108, 170, 85, 41, 157, 151, 178, 135, 144, 97, 190, 220, 252, 188, 149, 207, + 205, 55, 63, 91, 209, 83, 57, 132, 60, 65, 162, 109, 71, 20, 42, 158, 93, 86, 242, + 211, 171, 68, 17, 146, 217, 35, 32, 46, 137, 180, 124, 184, 38, 119, 153, 227, 165, + 103, 74, 237, 222, 197, 49, 254, 24, 13, 99, 140, 128, 192, 247, 112, 7], + + [1, 3, 5, 15, 17, 51, 85, 255, 26, 46, 114, 150, 161, 248, 19, 53, 95, 225, 56, 72, + 216, 115, 149, 164, 247, 2, 6, 10, 30, 34, 102, 170, 229, 52, 92, 228, 55, 89, 235, + 38, 106, 190, 217, 112, 144, 171, 230, 49, 83, 245, 4, 12, 20, 60, 68, 204, 79, 209, + 104, 184, 211, 110, 178, 205, 76, 212, 103, 169, 224, 59, 77, 215, 98, 166, 241, 8, + 24, 40, 120, 136, 131, 158, 185, 208, 107, 189, 220, 127, 129, 152, 179, 206, 73, + 219, 118, 154, 181, 196, 87, 249, 16, 48, 80, 240, 11, 29, 39, 105, 187, 214, 97, + 163, 254, 25, 43, 125, 135, 146, 173, 236, 47, 113, 147, 174, 233, 32, 96, 160, 251, + 22, 58, 78, 210, 109, 183, 194, 93, 231, 50, 86, 250, 21, 63, 65, 195, 94, 226, 61, + 71, 201, 64, 192, 91, 237, 44, 116, 156, 191, 218, 117, 159, 186, 213, 100, 172, 239, + 42, 126, 130, 157, 188, 223, 122, 142, 137, 128, 155, 182, 193, 88, 232, 35, 101, + 175, 234, 37, 111, 177, 200, 67, 197, 84, 252, 31, 33, 99, 165, 244, 7, 9, 27, 45, + 119, 153, 176, 203, 70, 202, 69, 207, 74, 222, 121, 139, 134, 145, 168, 227, 62, 66, + 198, 81, 243, 14, 18, 54, 90, 238, 41, 123, 141, 140, 143, 138, 133, 148, 167, 242, + 13, 23, 57, 75, 221, 124, 132, 151, 162, 253, 28, 36, 108, 180, 199, 82, 246, 1, 3, + 5, 15, 17, 51, 85, 255, 26, 46, 114, 150, 161, 248, 19, 53, 95, 225, 56, 72, 216, + 115, 149, 164, 247, 2, 6, 10, 30, 34, 102, 170, 229, 52, 92, 228, 55, 89, 235, 38, + 106, 190, 217, 112, 144, 171, 230, 49, 83, 245, 4, 12, 20, 60, 68, 204, 79, 209, + 104, 184, 211, 110, 178, 205, 76, 212, 103, 169, 224, 59, 77, 215, 98, 166, 241, 8, + 24, 40, 120, 136, 131, 158, 185, 208, 107, 189, 220, 127, 129, 152, 179, 206, 73, + 219, 118, 154, 181, 196, 87, 249, 16, 48, 80, 240, 11, 29, 39, 105, 187, 214, 97, + 163, 254, 25, 43, 125, 135, 146, 173, 236, 47, 113, 147, 174, 233, 32, 96, 160, 251, + 22, 58, 78, 210, 109, 183, 194, 93, 231, 50, 86, 250, 21, 63, 65, 195, 94, 226, 61, + 71, 201, 64, 192, 91, 237, 44, 116, 156, 191, 218, 117, 159, 186, 213, 100, 172, + 239, 42, 126, 130, 157, 188, 223, 122, 142, 137, 128, 155, 182, 193, 88, 232, 35, + 101, 175, 234, 37, 111, 177, 200, 67, 197, 84, 252, 31, 33, 99, 165, 244, 7, 9, 27, + 45, 119, 153, 176, 203, 70, 202, 69, 207, 74, 222, 121, 139, 134, 145, 168, 227, 62, + 66, 198, 81, 243, 14, 18, 54, 90, 238, 41, 123, 141, 140, 143, 138, 133, 148, 167, + 242, 13, 23, 57, 75, 221, 124, 132, 151, 162, 253, 28, 36, 108, 180, 199, 82, 246], + + 255 ], [ - [ - 0, - 0, - 7, - 1, - 14, - 2, - 8, - 56, - 21, - 57, - 9, - 90, - 15, - 31, - 63, - 3, - 28, - 4, - 64, - 67, - 16, - 112, - 97, - 32, - 22, - 47, - 38, - 58, - 70, - 91, - 10, - 108, - 35, - 109, - 11, - 87, - 71, - 79, - 74, - 92, - 23, - 82, - 119, - 48, - 104, - 59, - 39, - 100, - 29, - 19, - 54, - 5, - 45, - 68, - 65, - 95, - 77, - 33, - 98, - 117, - 17, - 43, - 115, - 113, - 42, - 114, - 116, - 76, - 18, - 53, - 94, - 44, - 78, - 73, - 86, - 34, - 81, - 118, - 99, - 103, - 30, - 62, - 89, - 20, - 126, - 6, - 55, - 13, - 111, - 96, - 66, - 27, - 46, - 37, - 107, - 69, - 36, - 106, - 26, - 110, - 61, - 88, - 12, - 125, - 52, - 93, - 75, - 41, - 72, - 85, - 102, - 80, - 84, - 101, - 40, - 51, - 105, - 25, - 124, - 60, - 24, - 123, - 50, - 83, - 122, - 49, - 120, - 121, - ], - [ - 1, - 3, - 5, - 15, - 17, - 51, - 85, - 2, - 6, - 10, - 30, - 34, - 102, - 87, - 4, - 12, - 20, - 60, - 68, - 49, - 83, - 8, - 24, - 40, - 120, - 117, - 98, - 91, - 16, - 48, - 80, - 13, - 23, - 57, - 75, - 32, - 96, - 93, - 26, - 46, - 114, - 107, - 64, - 61, - 71, - 52, - 92, - 25, - 43, - 125, - 122, - 115, - 104, - 69, - 50, - 86, - 7, - 9, - 27, - 45, - 119, - 100, - 81, - 14, - 18, - 54, - 90, - 19, - 53, - 95, - 28, - 36, - 108, - 73, - 38, - 106, - 67, - 56, - 72, - 37, - 111, - 76, - 41, - 123, - 112, - 109, - 74, - 35, - 101, - 82, - 11, - 29, - 39, - 105, - 70, - 55, - 89, - 22, - 58, - 78, - 47, - 113, - 110, - 79, - 44, - 116, - 97, - 94, - 31, - 33, - 99, - 88, - 21, - 63, - 65, - 62, - 66, - 59, - 77, - 42, - 126, - 127, - 124, - 121, - 118, - 103, - 84, - 1, - 3, - 5, - 15, - 17, - 51, - 85, - 2, - 6, - 10, - 30, - 34, - 102, - 87, - 4, - 12, - 20, - 60, - 68, - 49, - 83, - 8, - 24, - 40, - 120, - 117, - 98, - 91, - 16, - 48, - 80, - 13, - 23, - 57, - 75, - 32, - 96, - 93, - 26, - 46, - 114, - 107, - 64, - 61, - 71, - 52, - 92, - 25, - 43, - 125, - 122, - 115, - 104, - 69, - 50, - 86, - 7, - 9, - 27, - 45, - 119, - 100, - 81, - 14, - 18, - 54, - 90, - 19, - 53, - 95, - 28, - 36, - 108, - 73, - 38, - 106, - 67, - 56, - 72, - 37, - 111, - 76, - 41, - 123, - 112, - 109, - 74, - 35, - 101, - 82, - 11, - 29, - 39, - 105, - 70, - 55, - 89, - 22, - 58, - 78, - 47, - 113, - 110, - 79, - 44, - 116, - 97, - 94, - 31, - 33, - 99, - 88, - 21, - 63, - 65, - 62, - 66, - 59, - 77, - 42, - 126, - 127, - 124, - 121, - 118, - 103, - 84, - ], - 127, - ], + [0, 0, 7, 1, 14, 2, 8, 56, 21, 57, 9, 90, 15, 31, 63, 3, 28, 4, 64, 67, 16, 112, 97, + 32, 22, 47, 38, 58, 70, 91, 10, 108, 35, 109, 11, 87, 71, 79, 74, 92, 23, 82, 119, + 48, 104, 59, 39, 100, 29, 19, 54, 5, 45, 68, 65, 95, 77, 33, 98, 117, 17, 43, 115, + 113, 42, 114, 116, 76, 18, 53, 94, 44, 78, 73, 86, 34, 81, 118, 99, 103, 30, 62, 89, + 20, 126, 6, 55, 13, 111, 96, 66, 27, 46, 37, 107, 69, 36, 106, 26, 110, 61, 88, 12, + 125, 52, 93, 75, 41, 72, 85, 102, 80, 84, 101, 40, 51, 105, 25, 124, 60, 24, 123, 50, + 83, 122, 49, 120, 121], + + [1, 3, 5, 15, 17, 51, 85, 2, 6, 10, 30, 34, 102, 87, 4, 12, 20, 60, 68, 49, 83, 8, 24, + 40, 120, 117, 98, 91, 16, 48, 80, 13, 23, 57, 75, 32, 96, 93, 26, 46, 114, 107, 64, + 61, 71, 52, 92, 25, 43, 125, 122, 115, 104, 69, 50, 86, 7, 9, 27, 45, 119, 100, 81, + 14, 18, 54, 90, 19, 53, 95, 28, 36, 108, 73, 38, 106, 67, 56, 72, 37, 111, 76, 41, + 123, 112, 109, 74, 35, 101, 82, 11, 29, 39, 105, 70, 55, 89, 22, 58, 78, 47, 113, + 110, 79, 44, 116, 97, 94, 31, 33, 99, 88, 21, 63, 65, 62, 66, 59, 77, 42, 126, 127, + 124, 121, 118, 103, 84, 1, 3, 5, 15, 17, 51, 85, 2, 6, 10, 30, 34, 102, 87, 4, 12, + 20, 60, 68, 49, 83, 8, 24, 40, 120, 117, 98, 91, 16, 48, 80, 13, 23, 57, 75, 32, 96, + 93, 26, 46, 114, 107, 64, 61, 71, 52, 92, 25, 43, 125, 122, 115, 104, 69, 50, 86, 7, + 9, 27, 45, 119, 100, 81, 14, 18, 54, 90, 19, 53, 95, 28, 36, 108, 73, 38, 106, 67, + 56, 72, 37, 111, 76, 41, 123, 112, 109, 74, 35, 101, 82, 11, 29, 39, 105, 70, 55, 89, + 22, 58, 78, 47, 113, 110, 79, 44, 116, 97, 94, 31, 33, 99, 88, 21, 63, 65, 62, 66, + 59, 77, 42, 126, 127, 124, 121, 118, 103, 84], + + 127 + ] ] for i, _ in enumerate(params): - p = params[i] + p = params[i] expected_log_t, expected_exp_t, expected_field_charac_t = expected[i] - log_t, exp_t, field_charac_t = init_tables( - prim=p[0], generator=p[1], c_exp=p[2] - ) + log_t, exp_t, field_charac_t = init_tables(prim=p[0], generator=p[1], c_exp=p[2]) self.assertEqual(field_charac_t, expected_field_charac_t) self.assertEqual(list(log_t), expected_log_t) self.assertEqual(list(exp_t), expected_exp_t) class TestBigReedSolomon(unittest.TestCase): + def test_find_prime_polys(self) -> None: self.assertEqual(find_prime_polys(c_exp=4), [19, 25]) - self.assertEqual( - find_prime_polys(), - [ - 285, - 299, - 301, - 333, - 351, - 355, - 357, - 361, - 369, - 391, - 397, - 425, - 451, - 463, - 487, - 501, - ], - ) + self.assertEqual(find_prime_polys(), + [285, 299, 301, 333, 351, 355, 357, 361, 369, 391, 397, 425, 451, 463, 487, 501]) self.assertEqual(find_prime_polys(fast_primes=True), [397, 463, 487]) self.assertEqual(find_prime_polys(c_exp=9, fast_primes=True, single=True), 557) def test_c_exp_9(self) -> None: - rsc = RSCodec(12, c_exp=9) + rsc = RSCodec(12, c_exp=9) rsc2 = RSCodec(12, nsize=511) self.assertEqual(rsc.c_exp, rsc2.c_exp) self.assertEqual(rsc.nsize, rsc2.nsize) - mes = "a" * ((511 - 12) * 2) - mesecc = rsc.encode(mes) - mesecc[2] = 1 - mesecc[-1] = 1 + mes = 'a'*((511-12)*2) + mesecc = rsc.encode(mes) + mesecc[2] = 1 + mesecc[-1] = 1 rmes, rmesecc = rsc.decode(mesecc) - self.assertEqual(rsc.check(mesecc), [False, False]) - self.assertEqual(rsc.check(rmesecc), [True, True]) - self.assertEqual([x for x in rmes], [ord(x) for x in mes]) + self.assertEqual(rsc.check(mesecc), [False, False]) + self.assertEqual(rsc.check(rmesecc), [True, True]) + self.assertEqual([x for x in rmes], [ord(x) for x in mes]) def test_c_exp_12(self) -> None: - rsc = RSCodec(12, c_exp=12) + rsc = RSCodec(12, c_exp=12) rsc2 = RSCodec(12, nsize=4095) self.assertEqual(rsc.c_exp, rsc2.c_exp) self.assertEqual(rsc.nsize, rsc2.nsize) - mes = "a" * (4095 - 12) - mesecc = rsc.encode(mes) - mesecc[2] = 1 - mesecc[-1] = 1 + mes = 'a'*(4095-12) + mesecc = rsc.encode(mes) + mesecc[2] = 1 + mesecc[-1] = 1 rmes, rmesecc = rsc.decode(mesecc) - self.assertEqual(rsc.check(mesecc), [False]) + self.assertEqual(rsc.check(mesecc), [False]) self.assertEqual(rsc.check(rmesecc), [True]) - self.assertEqual([x for x in rmes], [ord(x) for x in mes]) + self.assertEqual([x for x in rmes], [ord(x) for x in mes]) - @staticmethod - def test_multiple_rs_codec() -> None: + def test_multiple_rs_codec(self) -> None: """Test multiple RSCodec instances with different parameters.""" - mes = "A" * 30 - rs_256 = RSCodec(102) + mes = 'A' * 30 + rs_256 = RSCodec(102) rs_1024 = RSCodec(900, c_exp=10) bytearray(rs_1024.decode(rs_1024.encode(mes))[0]) rs_256.encode(mes) @@ -2461,20 +402,20 @@ class TestGFArithmetics(unittest.TestCase): b = 19 generator = 2 - prim = 0x11D + prim = 0x11d # Compare the LUT multiplication and noLUT. init_tables(prim=prim, generator=generator) self.assertEqual(gf_mul(a, b), gf_mult_nolut(a, b, prim=prim)) # More Galois Field multiplications - self.assertEqual(gf_mult_nolut(5, 6, prim=0x11B), 30) - self.assertEqual(gf_mult_nolut(3, 125, prim=0x11B), 135) - self.assertEqual(gf_mult_nolut(2, 200, prim=0x11D), 141) - self.assertEqual(gf_mult_nolut_slow(2, 200, prim=0x11D), 141) + self.assertEqual(gf_mult_nolut(5, 6, prim=0x11b), 30) + self.assertEqual(gf_mult_nolut(3, 125, prim=0x11b), 135) + self.assertEqual(gf_mult_nolut(2, 200, prim=0x11d), 141) + self.assertEqual(gf_mult_nolut_slow(2, 200, prim=0x11d), 141) # Multiplications in GF(2^7) - self.assertEqual(gf_mult_nolut(3, 125, prim=0xFD, field_charac_full=128), 122) + self.assertEqual(gf_mult_nolut(3, 125, prim=0xfd, field_charac_full=128), 122) # Multiplications outside of the finite field (we revert to # standard integer multiplications just to see if it works). @@ -2491,24 +432,24 @@ class TestGFArithmetics(unittest.TestCase): b = 19 # Addition and subtraction (they are the same in GF(2^p) - self.assertEqual(gf_add(0, 0), 0) - self.assertEqual(gf_add(0, 0), gf_sub(0, 0)) - self.assertEqual(gf_add(1, 0), 1) - self.assertEqual(gf_add(1, 0), gf_sub(1, 0)) - self.assertEqual(gf_add(0, 1), 1) - self.assertEqual(gf_add(0, 1), gf_sub(0, 1)) - self.assertEqual(gf_add(1, 1), 0) - self.assertEqual(gf_add(1, 1), gf_sub(1, 1)) - self.assertEqual(gf_add(a, b), 13) - self.assertEqual(gf_add(a, b), gf_sub(a, b)) - self.assertEqual(gf_add(0, b), b) - self.assertEqual(gf_add(0, b), gf_sub(0, b)) - self.assertEqual(gf_add(a, 0), a) - self.assertEqual(gf_add(a, 0), gf_sub(a, 0)) - self.assertEqual(gf_add(a, 1), (a + 1)) - self.assertEqual(gf_add(a, 1), gf_sub(a, 1)) - self.assertEqual(gf_add(1, a), (a + 1)) - self.assertEqual(gf_add(1, a), gf_sub(1, a)) + self.assertEqual(gf_add(0, 0), 0) + self.assertEqual(gf_add(0, 0), gf_sub(0, 0)) + self.assertEqual(gf_add(1, 0), 1) + self.assertEqual(gf_add(1, 0), gf_sub(1, 0)) + self.assertEqual(gf_add(0, 1), 1) + self.assertEqual(gf_add(0, 1), gf_sub(0, 1)) + self.assertEqual(gf_add(1, 1), 0) + self.assertEqual(gf_add(1, 1), gf_sub(1, 1)) + self.assertEqual(gf_add(a, b), 13) + self.assertEqual(gf_add(a, b), gf_sub(a, b)) + self.assertEqual(gf_add(0, b), b) + self.assertEqual(gf_add(0, b), gf_sub(0, b)) + self.assertEqual(gf_add(a, 0), a) + self.assertEqual(gf_add(a, 0), gf_sub(a, 0)) + self.assertEqual(gf_add(a, 1), (a+1)) + self.assertEqual(gf_add(a, 1), gf_sub(a, 1)) + self.assertEqual(gf_add(1, a), (a+1)) + self.assertEqual(gf_add(1, a), gf_sub(1, a)) self.assertEqual(gf_add(255, 1), 254) # Negation @@ -2516,11 +457,11 @@ class TestGFArithmetics(unittest.TestCase): self.assertEqual(gf_neg(b), b) # Division - self.assertEqual(gf_div(a, 1), a) + self.assertEqual(gf_div(a, 1), a) self.assertEqual(gf_div(12, 3), 4) - self.assertEqual(gf_div(a, b), 222) - self.assertEqual(gf_div(b, a), 25) - self.assertEqual(gf_div(0, a), 0) + self.assertEqual(gf_div(a, b), 222) + self.assertEqual(gf_div(b, a), 25) + self.assertEqual(gf_div(0, a), 0) self.assertRaises(ZeroDivisionError, gf_div, *[a, 0]) @@ -2531,8 +472,8 @@ class TestSimpleFuncs(unittest.TestCase): """ def test_gf_poly_mul_simple(self) -> None: - a = [1, 12, 14, 9] - b = [0, 23, 2, 15] + a = [1, 12, 14, 9] + b = [0, 23, 2, 15] self.assertEqual(gf_poly_mul(a, b), gf_poly_mul_simple(a, b)) def test_gf_poly_neg(self) -> None: @@ -2540,10 +481,11 @@ class TestSimpleFuncs(unittest.TestCase): self.assertEqual(gf_poly_neg(a), a) def test_rs_simple_encode_msg(self) -> None: - a = bytearray("hello world", "latin1") + a = bytearray("hello world", "latin1") nsym = 10 init_tables() - self.assertEqual(rs_simple_encode_msg(a, nsym), rs_encode_msg(a, nsym)) + self.assertEqual(rs_simple_encode_msg(a, nsym), + rs_encode_msg(a, nsym)) class TestRSCodecUniversalCrossValidation(unittest.TestCase): @@ -2554,51 +496,47 @@ class TestRSCodecUniversalCrossValidation(unittest.TestCase): """ def test_main(self) -> None: - def cartesian_product_dict_items(dicts) -> Generator[dict, Any, None]: + + def cartesian_product_dict_items(dicts: Dict[str, Union[List[int], List[List[Union[int, bool]]]]]): """Return dictionary of cartesian products.""" return (dict(zip(dicts, x)) for x in itertools.product(*dicts.values())) orig_mes = bytearray("hello world", "latin1") - n = len(orig_mes) * 2 - k = len(orig_mes) - istart = 0 + n = len(orig_mes)*2 + k = len(orig_mes) + istart = 0 - params = { - "count": 5, - "fcr": [120, 0, 1, 1, 1], - "prim": [0x187, 0x11D, 0x11B, 0xFD, 0xFD], - "generator": [2, 2, 3, 3, 2], - "c_exponent": [8, 8, 8, 7, 7], - } + params = {"count": 5, + "fcr": [120, 0, 1, 1, 1], + "prim": [0x187, 0x11d, 0x11b, 0xfd, 0xfd], + "generator": [2, 2, 3, 3, 2], + "c_exponent": [8, 8, 8, 7, 7]} cases = { - "errmode": [1, 2, 3, 4], - # Errata number (errors+erasures), erasures number and - # only_erasures: the last item is the value for only_erasures - # (True/False) - "erratasnb_errorsnb_onlyeras": [ - [8, 3, False], - [6, 5, False], - [5, 5, False], - [11, 0, True], - [11, 0, False], - [0, 0, False], - ], - } + "errmode": [1, 2, 3, 4], + # Errata number (errors+erasures), erasures number and + # only_erasures: the last item is the value for only_erasures + # (True/False) + "erratasnb_errorsnb_onlyeras": [[8, 3, False], + [6, 5, False], + [5, 5, False], + [11, 0, True], + [11, 0, False], + [0, 0, False]]} results_br = [] it = 0 for p in range(params["count"]): - fcr = params["fcr"][p] - prim = params["prim"][p] - generator = params["generator"][p] + fcr = params["fcr"][p] + prim = params["prim"][p] + generator = params["generator"][p] c_exponent = params["c_exponent"][p] for case in cartesian_product_dict_items(cases): - errmode = case["errmode"] - erratanb = case["erratasnb_errorsnb_onlyeras"][0] - errnb = case["erratasnb_errorsnb_onlyeras"][1] + errmode = case["errmode"] + erratanb = case["erratasnb_errorsnb_onlyeras"][0] + errnb = case["erratasnb_errorsnb_onlyeras"][1] only_erasures = case["erratasnb_errorsnb_onlyeras"][2] it += 1 @@ -2609,7 +547,7 @@ class TestRSCodecUniversalCrossValidation(unittest.TestCase): g = rs_generator_poly_all(n, fcr=fcr, generator=generator) # Encode the message - rmesecc = rs_encode_msg(orig_mes, n - k, gen=g[n - k]) + rmesecc = rs_encode_msg(orig_mes, n-k, gen=g[n-k]) # Make a copy of the original message to check later if # fully corrected (because the syndrome may be wrong @@ -2620,13 +558,13 @@ class TestRSCodecUniversalCrossValidation(unittest.TestCase): if erratanb > 0: if errmode == 1: - sl = slice(istart, istart + erratanb) + sl = slice(istart, istart+erratanb) elif errmode == 2: - sl = slice(-istart - erratanb - (n - k), -(n - k)) + sl = slice(-istart-erratanb-(n-k), -(n-k)) elif errmode == 3: - sl = slice(-istart - erratanb - 1, -1) + sl = slice(-istart-erratanb-1, -1) elif errmode == 4: - sl = slice(-istart - erratanb, None) + sl = slice(-istart-erratanb, None) else: raise ValueError @@ -2643,22 +581,18 @@ class TestRSCodecUniversalCrossValidation(unittest.TestCase): # Forney syndrome method. try: - rmes, recc = rs_correct_msg( - rmesecc, - n - k, - fcr=fcr, - generator=generator, - erase_pos=erase_pos, - only_erasures=only_erasures, - ) + rmes, recc = rs_correct_msg(rmesecc, + n-k, + fcr=fcr, + generator=generator, + erase_pos=erase_pos, + only_erasures=only_erasures) # Check if correct by syndrome analysis (can be wrong). - results_br.append( - rs_check(rmes + recc, n - k, fcr=fcr, generator=generator) - ) + results_br.append(rs_check(rmes + recc, n-k, fcr=fcr, generator=generator)) # Check if correct by comparing to the original message (always correct). - results_br.append(rmesecc_orig == (rmes + recc)) + results_br.append(rmesecc_orig == (rmes+recc)) except ReedSolomonError: results_br.append(False) @@ -2666,18 +600,17 @@ class TestRSCodecUniversalCrossValidation(unittest.TestCase): # Without Forney syndrome method try: - rmes, recc = rs_correct_msg_nofsynd( - rmesecc, - n - k, - fcr=fcr, - generator=generator, - erase_pos=erase_pos, - only_erasures=only_erasures, - ) - results_br.append( - rs_check(rmes + recc, n - k, fcr=fcr, generator=generator) - ) - results_br.append(rmesecc_orig == (rmes + recc)) + rmes, recc = rs_correct_msg_nofsynd(rmesecc, + n-k, + fcr=fcr, + generator=generator, + erase_pos=erase_pos, + only_erasures=only_erasures) + results_br.append(rs_check(rmes + recc, + n-k, + fcr=fcr, + generator=generator)) + results_br.append(rmesecc_orig == (rmes+recc)) except ReedSolomonError: results_br.append(False) results_br.append(False) diff --git a/tests/common/test_statics.py b/tests/common/test_statics.py index e8e1081..1d42c35 100644 --- a/tests/common/test_statics.py +++ b/tests/common/test_statics.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -24,16 +24,13 @@ import unittest import src.common.statics from src.common.encoding import onion_address_to_pub_key -from src.common.misc import validate_onion_addr +from src.common.misc import validate_onion_addr class TestStatics(unittest.TestCase): + def test_uniqueness(self) -> None: - variable_list = [ - getattr(src.common.statics, i) - for i in dir(src.common.statics) - if not i.startswith("__") - ] + variable_list = [getattr(src.common.statics, i) for i in dir(src.common.statics) if not i.startswith('__')] variable_list = [v for v in variable_list if (isinstance(v, (bytes, str)))] # Debugger @@ -43,18 +40,11 @@ class TestStatics(unittest.TestCase): if variable == unique_variable: repeats += 1 if repeats > 1: - spacing = (3 - len(unique_variable)) * " " - print( - f"Setting value '{unique_variable}'{spacing} appeared in {repeats} variables: ", - end="", - ) - items = [ - i - for i in dir(src.common.statics) - if not i.startswith("__") - and getattr(src.common.statics, i) == unique_variable - ] - print(", ".join(items)) + spacing = (3 - len(unique_variable)) * ' ' + print(f"Setting value '{unique_variable}'{spacing} appeared in {repeats} variables: ", end='') + items = [i for i in dir(src.common.statics) + if not i.startswith('__') and getattr(src.common.statics, i) == unique_variable] + print(', '.join(items)) self.assertEqual(len(list(set(variable_list))), len(variable_list)) @@ -67,49 +57,38 @@ class TestStatics(unittest.TestCase): able to distinguish what type of entries (contacts or group logs) should be removed from the database. """ - self.assertNotEqual( - src.common.statics.ONION_SERVICE_PUBLIC_KEY_LENGTH, - src.common.statics.GROUP_ID_LENGTH, - ) + self.assertNotEqual(src.common.statics.ONION_SERVICE_PUBLIC_KEY_LENGTH, + src.common.statics.GROUP_ID_LENGTH) def test_reserved_accounts_are_valid(self) -> None: """\ Each used account placeholder should be a valid, but reserved account. """ - reserved_accounts = [ - src.common.statics.LOCAL_ID, - src.common.statics.DUMMY_CONTACT, - src.common.statics.DUMMY_MEMBER, - ] + reserved_accounts = [src.common.statics.LOCAL_ID, + src.common.statics.DUMMY_CONTACT, + src.common.statics.DUMMY_MEMBER] for account in reserved_accounts: - self.assertEqual( - validate_onion_addr(account), "Error: Can not add reserved account." - ) + self.assertEqual(validate_onion_addr(account), "Error: Can not add reserved account.") # Test each account is unique. - self.assertEqual(len(reserved_accounts), len(set(reserved_accounts))) + self.assertEqual(len(reserved_accounts), + len(set(reserved_accounts))) def test_local_pubkey(self) -> None: """Test that local key's reserved public key is valid.""" - self.assertEqual( - src.common.statics.LOCAL_PUBKEY, - onion_address_to_pub_key(src.common.statics.LOCAL_ID), - ) + self.assertEqual(src.common.statics.LOCAL_PUBKEY, + onion_address_to_pub_key(src.common.statics.LOCAL_ID)) - def test_group_management_header_length_matches_datagram_header_length( - self, - ) -> None: + def test_group_management_header_length_matches_datagram_header_length(self) -> None: """ As group management messages are handled as messages available to Relay Program, the header should be the same as any datagrams handled by the Relay program. """ - self.assertEqual( - src.common.statics.GROUP_MGMT_HEADER_LENGTH, - src.common.statics.DATAGRAM_HEADER_LENGTH, - ) + self.assertEqual(src.common.statics.GROUP_MGMT_HEADER_LENGTH, + src.common.statics.DATAGRAM_HEADER_LENGTH) def test_key_exchanges_start_with_different_letter(self) -> None: """ @@ -118,8 +97,9 @@ class TestStatics(unittest.TestCase): names would ever be set to something like PUBLIC and PSK that both start with P. """ - self.assertNotEqual(src.common.statics.ECDHE[:1], src.common.statics.PSK[:1]) + self.assertNotEqual(src.common.statics.ECDHE[:1], + src.common.statics.PSK[:1]) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/common/test_word_list.py b/tests/common/test_word_list.py index 181913f..0f9a513 100644 --- a/tests/common/test_word_list.py +++ b/tests/common/test_word_list.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -25,12 +25,15 @@ from src.common.word_list import eff_wordlist class TestWordList(unittest.TestCase): + def test_each_word_is_unique(self) -> None: - self.assertEqual(len(eff_wordlist), len(set(eff_wordlist))) + self.assertEqual(len(eff_wordlist), + len(set(eff_wordlist))) def test_word_list_length(self) -> None: - self.assertEqual(len(eff_wordlist), 7776) + self.assertEqual(len(eff_wordlist), + 7776) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main() diff --git a/tests/mock_classes.py b/tests/mock_classes.py index af63ed4..f9b9d83 100644 --- a/tests/mock_classes.py +++ b/tests/mock_classes.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -23,118 +23,81 @@ import getpass import time from datetime import datetime -from typing import Generator, Iterable, List, Optional, Sized +from typing import Any, Callable, Generator, Iterable, List, Sized import nacl.signing -from src.common.database import TFCUnencryptedDatabase -from src.common.db_contacts import Contact -from src.common.db_groups import Group -from src.common.db_keys import KeySet -from src.common.db_contacts import ContactList as OrigContactList -from src.common.db_groups import GroupList as OrigGroupList -from src.common.db_onion import OnionService as OrigOnionService -from src.common.db_keys import KeyList as OrigKeyList -from src.common.db_masterkey import MasterKey as OrigMasterKey -from src.common.gateway import Gateway as OrigGateway -from src.common.gateway import GatewaySettings as OrigGatewaySettings -from src.common.db_settings import Settings as OrigSettings -from src.common.encoding import pub_key_to_onion_address, pub_key_to_short_address -from src.common.misc import calculate_race_condition_delay +from src.common.database import TFCUnencryptedDatabase +from src.common.db_contacts import Contact +from src.common.db_groups import Group +from src.common.db_keys import KeySet +from src.common.db_contacts import ContactList as OrigContactList +from src.common.db_groups import GroupList as OrigGroupList +from src.common.db_onion import OnionService as OrigOnionService +from src.common.db_keys import KeyList as OrigKeyList +from src.common.db_masterkey import MasterKey as OrigMasterKey +from src.common.gateway import Gateway as OrigGateway +from src.common.gateway import GatewaySettings as OrigGatewaySettings +from src.common.db_settings import Settings as OrigSettings +from src.common.encoding import pub_key_to_onion_address, pub_key_to_short_address +from src.common.misc import calculate_race_condition_delay from src.common.reed_solomon import RSCodec -from src.common.statics import ( - DIR_USER_DATA, - FINGERPRINT_LENGTH, - INITIAL_HARAC, - KEX_STATUS_VERIFIED, - LOCAL_ID, - LOCAL_NICK, - LOCAL_PUBKEY, - ONION_SERVICE_PRIVATE_KEY_LENGTH, - SYMMETRIC_KEY_LENGTH, - TX, - WIN_UID_COMMAND, - WIN_TYPE_GROUP, -) +from src.common.statics import (DIR_USER_DATA, FINGERPRINT_LENGTH, INITIAL_HARAC, KEX_STATUS_VERIFIED, LOCAL_ID, + LOCAL_NICK, LOCAL_PUBKEY, ONION_SERVICE_PRIVATE_KEY_LENGTH, SYMMETRIC_KEY_LENGTH, + TX, WIN_TYPE_GROUP, WIN_UID_COMMAND) from src.transmitter.windows import TxWindow as OrigTxWindow -from src.receiver.packet import PacketList as OrigPacketList -from src.receiver.windows import RxWindow as OrigRxWindow +from src.receiver.packet import PacketList as OrigPacketList +from src.receiver.windows import RxWindow as OrigRxWindow from tests.utils import nick_to_pub_key, group_name_to_group_id -def create_contact( - nick: str, - tx_fingerprint: bytes = FINGERPRINT_LENGTH * b"\x01", - rx_fingerprint: bytes = FINGERPRINT_LENGTH * b"\x02", - kex_status: bytes = KEX_STATUS_VERIFIED, - log_messages: bool = True, - file_reception: bool = True, - notifications: bool = True, -) -> Contact: +def create_contact(nick: str, + tx_fingerprint: bytes = FINGERPRINT_LENGTH * b'\x01', + rx_fingerprint: bytes = FINGERPRINT_LENGTH * b'\x02', + kex_status: bytes = KEX_STATUS_VERIFIED, + log_messages: bool = True, + file_reception: bool = True, + notifications: bool = True + ) -> Contact: """Create a mock contact object.""" if nick == LOCAL_ID: pub_key = LOCAL_PUBKEY - nick = LOCAL_NICK + nick = LOCAL_NICK else: pub_key = nick_to_pub_key(nick) - return Contact( - pub_key, - nick, - tx_fingerprint, - rx_fingerprint, - kex_status, - log_messages, - file_reception, - notifications, - ) + return Contact(pub_key, nick, + tx_fingerprint, rx_fingerprint, kex_status, + log_messages, file_reception, notifications) -def create_group(name: str, nick_list: Optional[List[str]] = None): +def create_group(name: str, nick_list: List[str] = None) -> Group: """Create a mock group object.""" if nick_list is None: - nick_list = ["Alice", "Bob"] + nick_list = ['Alice', 'Bob'] settings = Settings() - members = [create_contact(n) for n in nick_list] - return Group( - name, - group_name_to_group_id(name), - False, - False, - members, - settings, - lambda: None, - ) + members = [create_contact(n) for n in nick_list] + return Group(name, group_name_to_group_id(name), False, False, members, settings, lambda: None) -def create_keyset( - nick, - tx_key=SYMMETRIC_KEY_LENGTH * b"\x01", - tx_hek=SYMMETRIC_KEY_LENGTH * b"\x01", - rx_key=SYMMETRIC_KEY_LENGTH * b"\x01", - rx_hek=SYMMETRIC_KEY_LENGTH * b"\x01", - tx_harac=INITIAL_HARAC, - rx_harac=INITIAL_HARAC, - store_f=None, -) -> KeySet: +def create_keyset(nick: str, + tx_key: bytes = SYMMETRIC_KEY_LENGTH * b'\x01', + tx_hek: bytes = SYMMETRIC_KEY_LENGTH * b'\x01', + rx_key: bytes = SYMMETRIC_KEY_LENGTH * b'\x01', + rx_hek: bytes = SYMMETRIC_KEY_LENGTH * b'\x01', + tx_harac: int = INITIAL_HARAC, + rx_harac: int = INITIAL_HARAC, + store_f: Callable[..., None] = None) -> KeySet: """Create a mock keyset object.""" pub_key = LOCAL_PUBKEY if nick == LOCAL_ID else nick_to_pub_key(nick) - return KeySet( - pub_key, - tx_key, - tx_hek, - rx_key, - rx_hek, - tx_harac, - rx_harac, - store_keys=lambda: None if store_f is None else store_f, - ) + return KeySet(pub_key, tx_key, tx_hek, rx_key, rx_hek, tx_harac, rx_harac, + store_keys=lambda: None if store_f is None else store_f) -def create_rx_window(nick="Alice") -> OrigRxWindow: +def create_rx_window(nick: str = 'Alice') -> OrigRxWindow: """Create a mock Rx-window object.""" pub_key = LOCAL_PUBKEY if nick == LOCAL_ID else nick_to_pub_key(nick) return RxWindow(uid=pub_key) @@ -146,8 +109,8 @@ class ContactList(OrigContactList, Iterable, Sized): def __init__(self, nicks=None, **kwargs) -> None: self.master_key = MasterKey() - self.settings = Settings() - self.contacts = [] if nicks is None else [create_contact(n) for n in nicks] + self.settings = Settings() + self.contacts = [] if nicks is None else [create_contact(n) for n in nicks] for key, value in kwargs.items(): setattr(self, key, value) @@ -168,26 +131,24 @@ class ContactList(OrigContactList, Iterable, Sized): class Gateway(OrigGateway): """Mock the object for unit testing.""" - def __init__(self, **kwargs) -> None: - self.packets = [] + def __init__(self, **kwargs: Any) -> None: + self.packets = [] self.settings = GatewaySettings(**kwargs) - self.rs = RSCodec(2 * self.settings.serial_error_correction) + self.rs = RSCodec(2 * self.settings.serial_error_correction) - def write(self, orig_packet: bytes) -> None: + def write(self, output: str) -> None: """Mock method.""" - self.packets.append(orig_packet) + self.packets.append(output) class GroupList(OrigGroupList, Iterable, Sized): """Mock the object for unit testing.""" - def __init__(self, groups=None, **kwargs) -> None: - self.master_key = MasterKey() - self.settings = Settings() - self.contact_list = ContactList() - self.groups = ( - [] if groups is None else [(create_group(g)) for g in groups] - ) # type: List[Group] + def __init__(self, groups: List[str] = None, **kwargs) -> None: + self.master_key = MasterKey() + self.settings = Settings() + self.contact_list = ContactList() + self.groups = [] if groups is None else [(create_group(g)) for g in groups] # type: List[Group] self.store_groups_called = False for key, value in kwargs.items(): @@ -215,10 +176,10 @@ class GroupList(OrigGroupList, Iterable, Sized): class KeyList(OrigKeyList): """Mock the object for unit testing.""" - def __init__(self, nicks=None, **kwargs) -> None: + def __init__(self, nicks: List[str] = None, **kwargs) -> None: self.master_key = MasterKey() - self.settings = Settings() - self.keysets = [] if nicks is None else [create_keyset(n) for n in nicks] + self.settings = Settings() + self.keysets = [] if nicks is None else [create_keyset(n) for n in nicks] self.store_keys_called = False @@ -236,36 +197,34 @@ class KeyList(OrigKeyList): class MasterKey(OrigMasterKey): """Mock the object for unit testing.""" - def __init__(self, **kwargs) -> None: + def __init__(self, **kwargs: Any) -> None: """Create new MasterKey mock object.""" self.local_test = False self.master_key = bytes(SYMMETRIC_KEY_LENGTH) - self.file_name = f"{DIR_USER_DATA}{TX}_login_data" - self.database = TFCUnencryptedDatabase(self.file_name) + self.file_name = f'{DIR_USER_DATA}{TX}_login_data' + self.database = TFCUnencryptedDatabase(self.file_name) for key, value in kwargs.items(): setattr(self, key, value) def load_master_key(self) -> bytes: """Create mock master key bytes.""" - if getpass.getpass() == "test_password": + if getpass.getpass() == 'test_password': return self.master_key - return SYMMETRIC_KEY_LENGTH * b"f" + return SYMMETRIC_KEY_LENGTH * b'f' class OnionService(OrigOnionService): """Mock the object for unit testing.""" - def __init__(self, **kwargs) -> None: + def __init__(self, **kwargs: Any) -> None: """Create new OnionService mock object.""" - self.onion_private_key = ONION_SERVICE_PRIVATE_KEY_LENGTH * b"a" - self.conf_code = b"a" - self.public_key = bytes( - nacl.signing.SigningKey(seed=self.onion_private_key).verify_key - ) + self.onion_private_key = ONION_SERVICE_PRIVATE_KEY_LENGTH*b'a' + self.conf_code = b'a' + self.public_key = bytes(nacl.signing.SigningKey(seed=self.onion_private_key).verify_key) self.user_onion_address = pub_key_to_onion_address(self.public_key) self.user_short_address = pub_key_to_short_address(self.public_key) - self.is_delivered = False + self.is_delivered = False for key, value in kwargs.items(): setattr(self, key, value) @@ -275,40 +234,40 @@ class OnionService(OrigOnionService): class Settings(OrigSettings): """Mock the object for unit testing.""" - def __init__(self, **kwargs) -> None: + def __init__(self, **kwargs: Any) -> None: """Create new Settings mock object.""" - self.disable_gui_dialog = False - self.max_number_of_group_members = 50 - self.max_number_of_groups = 50 - self.max_number_of_contacts = 50 - self.log_messages_by_default = False - self.accept_files_by_default = False + self.disable_gui_dialog = False + self.max_number_of_group_members = 50 + self.max_number_of_groups = 50 + self.max_number_of_contacts = 50 + self.log_messages_by_default = False + self.accept_files_by_default = False self.show_notifications_by_default = True - self.log_file_masking = False - self.ask_password_for_log_access = True + self.log_file_masking = False + self.ask_password_for_log_access = True # Transmitter settings self.nc_bypass_messages = False self.confirm_sent_files = True self.double_space_exits = False - self.traffic_masking = False - self.tm_static_delay = 2.0 - self.tm_random_delay = 2.0 + self.traffic_masking = False + self.tm_static_delay = 2.0 + self.tm_random_delay = 2.0 # Relay settings self.allow_contact_requests = True # Receiver settings - self.new_message_notify_preview = False + self.new_message_notify_preview = False self.new_message_notify_duration = 1.0 - self.max_decompress_size = 100_000_000 + self.max_decompress_size = 100_000_000 - self.master_key = MasterKey() + self.master_key = MasterKey() self.software_operation = TX self.local_testing_mode = False self.all_keys = list(vars(self).keys()) - self.key_list = self.all_keys[: self.all_keys.index("master_key")] + self.key_list = self.all_keys[:self.all_keys.index('master_key')] self.defaults = {k: self.__dict__[k] for k in self.key_list} # Override defaults with specified kwargs @@ -322,7 +281,11 @@ class Settings(OrigSettings): """Mock method.""" @staticmethod - def validate_key_value_pair(key, value, contact_list, group_list) -> None: + def validate_key_value_pair(key: str, + value: str, + contact_list: 'ContactList', + group_list: 'GroupList' + ) -> None: """Mock method.""" @@ -330,31 +293,30 @@ class Settings(OrigSettings): class GatewaySettings(OrigGatewaySettings): """Mock the object for unit testing.""" - def __init__(self, **kwargs) -> None: + def __init__(self, **kwargs: Any) -> None: """Create new GatewaySettings mock object.""" - self.serial_baudrate = 19200 - self.serial_error_correction = 5 - self.use_serial_usb_adapter = True - self.built_in_serial_interface = "ttyS0" + self.serial_baudrate = 19200 + self.serial_error_correction = 5 + self.use_serial_usb_adapter = True + self.built_in_serial_interface = 'ttyS0' self.software_operation = TX self.local_testing_mode = False self.data_diode_sockets = False self.all_keys = list(vars(self).keys()) - self.key_list = self.all_keys[: self.all_keys.index("software_operation")] + self.key_list = self.all_keys[:self.all_keys.index('software_operation')] self.defaults = {k: self.__dict__[k] for k in self.key_list} self.session_serial_error_correction = self.serial_error_correction - self.session_serial_baudrate = self.serial_baudrate - self.session_usb_serial_adapter = self.use_serial_usb_adapter + self.session_serial_baudrate = self.serial_baudrate + self.session_usb_serial_adapter = self.use_serial_usb_adapter self.tx_inter_packet_delay = 0.0 - self.rx_receive_timeout = 0.0 + self.rx_receive_timeout = 0.0 - self.race_condition_delay = calculate_race_condition_delay( - self.session_serial_error_correction, self.serial_baudrate - ) + self.race_condition_delay = calculate_race_condition_delay(self.session_serial_error_correction, + self.serial_baudrate) # Override defaults with specified kwargs for key, value in kwargs.items(): @@ -370,18 +332,18 @@ class GatewaySettings(OrigGatewaySettings): class TxWindow(OrigTxWindow): """Mock the object for unit testing.""" - def __init__(self, **kwargs) -> None: + def __init__(self, **kwargs: Any) -> None: """Create new TxWindow mock object.""" - self.contact_list = ContactList() - self.group_list = GroupList() + self.contact_list = ContactList() + self.group_list = GroupList() self.window_contacts = [] - self.group = None - self.contact = None - self.name = None - self.type = None - self.uid = None - self.group_id = None - self.imc_name = None + self.group = None + self.contact = None + self.name = None + self.type = None + self.uid = None + self.group_id = None + self.imc_name = None for key, value in kwargs.items(): setattr(self, key, value) @@ -389,10 +351,10 @@ class TxWindow(OrigTxWindow): class UserInput(object): """Mock the object for unit testing.""" - def __init__(self, plaintext=None, **kwargs) -> None: + def __init__(self, plaintext: str = None, **kwargs: Any) -> None: """Create new UserInput mock object.""" self.plaintext = plaintext - self.type = None + self.type = None for key, value in kwargs.items(): setattr(self, key, value) @@ -401,25 +363,25 @@ class UserInput(object): class Packet(object): """Mock the object for unit testing.""" - def __init__(self, **kwargs) -> None: + def __init__(self, **kwargs: Any) -> None: """Create new Pack mock object.""" - self.account = None - self.contact = None - self.origin = None - self.type = None - self.settings = None - self.f_name = None - self.f_size = None - self.f_packets = None - self.f_eta = None - self.lt_active = False - self.is_complete = False + self.account = None + self.contact = None + self.origin = None + self.type = None + self.settings = None + self.f_name = None + self.f_size = None + self.f_packets = None + self.f_eta = None + self.lt_active = False + self.is_complete = False self.assembly_pt_list = [] - self.payload = None # Unittest mock return value + self.payload = None # Unittest mock return value for key, value in kwargs.items(): setattr(self, key, value) - def add_packet(self, packet) -> None: + def add_packet(self, packet: bytes) -> None: """Mock method.""" def assemble_message_packet(self) -> None: @@ -438,10 +400,10 @@ class Packet(object): class PacketList(OrigPacketList): """Mock the object for unit testing.""" - def __init__(self, **kwargs) -> None: - self.settings = Settings() + def __init__(self, **kwargs: Any) -> None: + self.settings = Settings() self.contact_list = ContactList() - self.packets = [] + self.packets = [] for key, value in kwargs.items(): setattr(self, key, value) @@ -450,26 +412,26 @@ class PacketList(OrigPacketList): class RxWindow(OrigRxWindow): """Mock the object for unit testing.""" - def __init__(self, **kwargs) -> None: - self.uid = None + def __init__(self, **kwargs: Any) -> None: + self.uid = None self.contact_list = ContactList() - self.group_list = GroupList() - self.settings = Settings() - self.packet_list = PacketList() + self.group_list = GroupList() + self.settings = Settings() + self.packet_list = PacketList() - self.is_active = False + self.is_active = False self.group_timestamp = time.time() * 1000 - self.group = None + self.group = None self.window_contacts = [] - self.message_log = [] - self.handle_dict = dict() + self.message_log = [] + self.handle_dict = dict() self.previous_msg_ts = datetime.now() self.unread_messages = 0 - self.type = None + self.type = None self.type_print = None - self.name = None + self.name = None for key, value in kwargs.items(): setattr(self, key, value) @@ -478,13 +440,13 @@ class RxWindow(OrigRxWindow): class WindowList(object): """Mock the object for unit testing.""" - def __init__(self, nicks=None, **kwargs) -> None: + def __init__(self, nicks: List[str] = None, **kwargs: Any) -> None: """Create new WindowList mock object.""" self.contact_list = ContactList() - self.group_list = GroupList() - self.packet_list = PacketList() - self.settings = Settings() - self.windows = [] if nicks is None else [create_rx_window(n) for n in nicks] + self.group_list = GroupList() + self.packet_list = PacketList() + self.settings = Settings() + self.windows = [] if nicks is None else [create_rx_window(n) for n in nicks] self.active_win = None for key, value in kwargs.items(): @@ -493,21 +455,21 @@ class WindowList(object): def __len__(self) -> int: return len(self.windows) - def __iter__(self) -> None: + def __iter__(self) -> RxWindow: yield from self.windows def group_windows(self) -> List[RxWindow]: """Mock method.""" return [w for w in self.windows if w.type == WIN_TYPE_GROUP] - def set_active_rx_window(self, name) -> None: + def set_active_rx_window(self, name: bytes) -> None: """Mock method.""" if self.active_win is not None: self.active_win.is_active = False - self.active_win = self.get_window(name) + self.active_win = self.get_window(name) self.active_win.is_active = True - def has_window(self, name) -> bool: + def has_window(self, name: bytes) -> bool: """Mock method.""" return name in self.get_list_of_window_names() @@ -519,24 +481,20 @@ class WindowList(object): """Mock method.""" return self.get_window(WIN_UID_COMMAND) - def remove_window(self, uid: str) -> None: + def remove_window(self, uid: bytes) -> None: """Mock method.""" for i, w in enumerate(self.windows): if uid == w.uid: del self.windows[i] break - def get_window(self, uid) -> RxWindow: + def get_window(self, uid: bytes) -> RxWindow: """Mock method.""" if not self.has_window(uid): - self.windows.append( - RxWindow( - uid=uid, - contact_list=self.contact_list, - group_list=self.group_list, - settings=self.settings, - packet_list=self.packet_list, - ) - ) + self.windows.append(RxWindow(uid=uid, + contact_list=self.contact_list, + group_list =self.group_list, + settings =self.settings, + packet_list =self.packet_list)) return next(w for w in self.windows if w.uid == uid) diff --git a/tests/receiver/__init__.py b/tests/receiver/__init__.py index 6eb560e..833769a 100644 --- a/tests/receiver/__init__.py +++ b/tests/receiver/__init__.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. diff --git a/tests/receiver/test_commands.py b/tests/receiver/test_commands.py index cc5cc9b..2d6f8e0 100644 --- a/tests/receiver/test_commands.py +++ b/tests/receiver/test_commands.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -23,213 +23,131 @@ import os import struct import unittest -from datetime import datetime +from datetime import datetime from multiprocessing import Queue -from unittest import mock -from unittest.mock import MagicMock +from unittest import mock +from unittest.mock import MagicMock from src.common.database import MessageLog, TFCDatabase -from src.common.db_logs import write_log_entry +from src.common.db_logs import write_log_entry from src.common.encoding import int_to_bytes -from src.common.statics import ( - CH_FILE_RECV, - CH_LOGGING, - CH_NOTIFY, - CLEAR_ENTIRE_LINE, - COMMAND, - CURSOR_UP_ONE_LINE, - C_L_HEADER, - DIR_USER_DATA, - DISABLE, - ENABLE, - F_S_HEADER, - LOCAL_ID, - LOCAL_PUBKEY, - LOG_REMOVE, - MESSAGE, - ORIGIN_CONTACT_HEADER, - PADDING_LENGTH, - RESET, - RX, - SYMMETRIC_KEY_LENGTH, - US_BYTE, - WIN_TYPE_CONTACT, - WIN_TYPE_GROUP, - WIN_UID_FILE, - WIPE, -) +from src.common.statics import (CH_FILE_RECV, CH_LOGGING, CH_NOTIFY, CLEAR_ENTIRE_LINE, CLEAR_SCREEN, COMMAND, + CURSOR_UP_ONE_LINE, C_L_HEADER, DIR_USER_DATA, DISABLE, ENABLE, F_S_HEADER, LOCAL_ID, + LOCAL_PUBKEY, MESSAGE, ORIGIN_CONTACT_HEADER, PADDING_LENGTH, RESET, RX, + SYMMETRIC_KEY_LENGTH, US_BYTE, WIN_TYPE_CONTACT, WIN_TYPE_GROUP, WIN_UID_FILE, WIPE) -from src.receiver.packet import PacketList -from src.receiver.commands import ( - ch_contact_s, - ch_master_key, - ch_nick, - ch_setting, - contact_rem, - exit_tfc, - log_command, -) -from src.receiver.commands import ( - process_command, - remove_log, - reset_screen, - win_activity, - win_select, - wipe, -) +from src.receiver.packet import PacketList +from src.receiver.commands import ch_contact_s, ch_master_key, ch_nick, ch_setting, contact_rem, exit_tfc, log_command +from src.receiver.commands import process_command, remove_log, reset_screen, win_activity, win_select, wipe -from tests.mock_classes import ( - ContactList, - Gateway, - group_name_to_group_id, - GroupList, - KeyList, - MasterKey, -) +from tests.mock_classes import ContactList, Gateway, group_name_to_group_id, GroupList, KeyList, MasterKey from tests.mock_classes import nick_to_pub_key, RxWindow, Settings, WindowList -from tests.utils import ( - assembly_packet_creator, - cd_unit_test, - cleanup, - ignored, - nick_to_short_address, -) -from tests.utils import tear_queue, TFCTestCase +from tests.utils import assembly_packet_creator, cd_unit_test, cleanup, ignored, nick_to_short_address +from tests.utils import tear_queue, TFCTestCase class TestProcessCommand(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.ts = datetime.now() - self.settings = Settings() - self.master_key = MasterKey() - self.group_list = GroupList() - self.exit_queue = Queue() - self.gateway = Gateway() - self.window_list = WindowList(nicks=[LOCAL_ID]) - self.contact_list = ContactList(nicks=[LOCAL_ID]) - self.packet_list = PacketList(self.settings, self.contact_list) - self.key_list = KeyList(nicks=[LOCAL_ID]) - self.key_set = self.key_list.get_keyset(LOCAL_PUBKEY) + self.ts = datetime.now() + self.settings = Settings() + self.master_key = MasterKey() + self.group_list = GroupList() + self.exit_queue = Queue() + self.gateway = Gateway() + self.window_list = WindowList(nicks=[LOCAL_ID]) + self.contact_list = ContactList(nicks=[LOCAL_ID]) + self.packet_list = PacketList(self.settings, self.contact_list) + self.key_list = KeyList(nicks=[LOCAL_ID]) + self.key_set = self.key_list.get_keyset(LOCAL_PUBKEY) - self.args = ( - self.window_list, - self.packet_list, - self.contact_list, - self.key_list, - self.group_list, - self.settings, - self.master_key, - self.gateway, - self.exit_queue, - ) + self.args = (self.window_list, self.packet_list, self.contact_list, self.key_list, self.group_list, + self.settings, self.master_key, self.gateway, self.exit_queue) def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) tear_queue(self.exit_queue) - def test_incomplete_command_raises_fr(self) -> None: - packet = assembly_packet_creator( - COMMAND, b"test_command", s_header_override=C_L_HEADER, encrypt_packet=True - )[0] - self.assert_se( - "Incomplete command.", process_command, self.ts, packet, *self.args - ) + def test_incomplete_command_raises_se(self) -> None: + packet = assembly_packet_creator(COMMAND, b'test_command', s_header_override=C_L_HEADER, encrypt_packet=True)[0] + self.assert_se("Incomplete command.", process_command, self.ts, packet, *self.args) def test_invalid_command_header(self) -> None: - packet = assembly_packet_creator( - COMMAND, b"invalid_header", encrypt_packet=True - )[0] - self.assert_se( - "Error: Received an invalid command.", - process_command, - self.ts, - packet, - *self.args, - ) + packet = assembly_packet_creator(COMMAND, b'invalid_header', encrypt_packet=True)[0] + self.assert_se("Error: Received an invalid command.", process_command, self.ts, packet, *self.args) def test_process_command(self) -> None: - packet = assembly_packet_creator(COMMAND, LOG_REMOVE, encrypt_packet=True)[0] - self.assert_se( - f"No log database available.", process_command, self.ts, packet, *self.args - ) - + packet = assembly_packet_creator(COMMAND, CLEAR_SCREEN, encrypt_packet=True)[0] + self.assert_se(f"Command completed.", process_command, self.ts, packet, *self.args) class TestWinActivity(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.window_list = WindowList() - self.window_list.windows = [ - RxWindow(name="Alice", unread_messages=4), - RxWindow(name="Bob", unread_messages=15), - ] + self.window_list = WindowList() + self.window_list.windows = [RxWindow(name='Alice', unread_messages=4), + RxWindow(name='Bob', unread_messages=15)] - @mock.patch("time.sleep", return_value=None) + @mock.patch('time.sleep', return_value=None) def test_function(self, _) -> None: - self.assert_prints( - f"""\ + self.assert_prints(f"""\ ┌─────────────────┐ │ Window activity │ │ Alice: 4 │ │ Bob: 15 │ └─────────────────┘ -{5*(CURSOR_UP_ONE_LINE+CLEAR_ENTIRE_LINE)}""", - win_activity, - self.window_list, - ) +{5*(CURSOR_UP_ONE_LINE+CLEAR_ENTIRE_LINE)}""", win_activity, self.window_list) class TestWinSelect(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.window_list = WindowList() - self.window_list.windows = [ - RxWindow(uid=nick_to_pub_key("Alice"), name="Alice"), - RxWindow(uid=nick_to_pub_key("Bob"), name="Bob"), - ] + self.window_list = WindowList() + self.window_list.windows = [RxWindow(uid=nick_to_pub_key("Alice"), name='Alice'), + RxWindow(uid=nick_to_pub_key("Bob"), name='Bob')] def test_window_selection(self) -> None: self.assertIsNone(win_select(nick_to_pub_key("Alice"), self.window_list)) - self.assertEqual(self.window_list.active_win.name, "Alice") + self.assertEqual(self.window_list.active_win.name, 'Alice') self.assertIsNone(win_select(nick_to_pub_key("Bob"), self.window_list)) - self.assertEqual(self.window_list.active_win.name, "Bob") + self.assertEqual(self.window_list.active_win.name, 'Bob') self.assertIsNone(win_select(WIN_UID_FILE, self.window_list)) self.assertEqual(self.window_list.active_win.uid, WIN_UID_FILE) class TestResetScreen(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.cmd_data = nick_to_pub_key("Alice") - self.window_list = WindowList() - self.window_list.windows = [ - RxWindow(uid=nick_to_pub_key("Alice"), name="Alice"), - RxWindow(uid=nick_to_pub_key("Bob"), name="Bob"), - ] - self.window = self.window_list.get_window(nick_to_pub_key("Alice")) - self.window.message_log = [ - (datetime.now(), "Hi Bob", nick_to_pub_key("Alice"), ORIGIN_CONTACT_HEADER) - ] + self.cmd_data = nick_to_pub_key("Alice") + self.window_list = WindowList() + self.window_list.windows = [RxWindow(uid=nick_to_pub_key("Alice"), name='Alice'), + RxWindow(uid=nick_to_pub_key("Bob"), name='Bob')] + self.window = self.window_list.get_window(nick_to_pub_key("Alice")) + self.window.message_log = [(datetime.now(), 'Hi Bob', nick_to_pub_key("Alice"), ORIGIN_CONTACT_HEADER)] - @mock.patch("os.system", return_value=None) - def test_screen_reset(self, mock_os_system) -> None: + @mock.patch('os.system', return_value=None, create_autospec=True) + def test_screen_reset(self, reset) -> None: # Ensure there is a message to be removed from the ephemeral message log self.assertEqual(len(self.window.message_log), 1) reset_screen(self.cmd_data, self.window_list) # Test that screen is reset by the command - mock_os_system.assert_called_with(RESET) + reset.assert_called_with(RESET) # Test that the ephemeral message log is empty after the command self.assertEqual(len(self.window.message_log), 0) class TestExitTFC(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.exit_queue = Queue() @@ -244,171 +162,124 @@ class TestExitTFC(unittest.TestCase): class TestLogCommand(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.unit_test_dir = cd_unit_test() - self.cmd_data = int_to_bytes(1) + nick_to_pub_key("Bob") - self.ts = datetime.now() - self.window_list = WindowList(nicks=["Alice", "Bob"]) - self.window = self.window_list.get_window(nick_to_pub_key("Bob")) + self.unit_test_dir = cd_unit_test() + self.cmd_data = int_to_bytes(1) + nick_to_pub_key("Bob") + self.ts = datetime.now() + self.window_list = WindowList(nicks=['Alice', 'Bob']) + self.window = self.window_list.get_window(nick_to_pub_key("Bob")) self.window.type_print = WIN_TYPE_CONTACT - self.window.name = "Bob" - self.window.type = WIN_TYPE_CONTACT - self.contact_list = ContactList(nicks=["Alice", "Bob"]) - self.group_list = GroupList() - self.settings = Settings(software_operation=RX) - self.master_key = MasterKey(operation=RX, local_test=True) - self.args = ( - self.ts, - self.window_list, - self.contact_list, - self.group_list, - self.settings, - self.master_key, - ) - self.log_file = f"{DIR_USER_DATA}{self.settings.software_operation}_logs" - self.tfc_log_database = MessageLog(self.log_file, self.master_key.master_key) + self.window.name = 'Bob' + self.window.type = WIN_TYPE_CONTACT + self.contact_list = ContactList(nicks=['Alice', 'Bob']) + self.group_list = GroupList() + self.settings = Settings(software_operation=RX) + self.master_key = MasterKey(operation=RX, local_test=True) + self.args = (self.ts, self.window_list, self.contact_list, + self.group_list, self.settings, self.master_key) + self.log_file = f'{DIR_USER_DATA}{self.settings.software_operation}_logs' + self.tfc_log_database = MessageLog(self.log_file, self.master_key.master_key) - time_float = struct.unpack(" None: """Post-test actions.""" cleanup(self.unit_test_dir) with ignored(OSError): - os.remove("Receiver - Plaintext log (None)") + os.remove('Receiver - Plaintext log (None)') def test_print(self) -> None: # Setup os.remove(self.log_file) # Test - self.assert_se( - f"No log database available.", log_command, self.cmd_data, *self.args - ) + self.assert_se(f"No log database available.", log_command, self.cmd_data, *self.args) - @mock.patch("struct.pack", return_value=bytes.fromhex("08ceae02")) + @mock.patch('struct.pack', return_value=bytes.fromhex('08ceae02')) def test_export(self, _) -> None: # Setup - for p in assembly_packet_creator(MESSAGE, "A short message"): - write_log_entry( - p, - nick_to_pub_key("Bob"), - self.tfc_log_database, - origin=ORIGIN_CONTACT_HEADER, - ) + for p in assembly_packet_creator(MESSAGE, 'A short message'): + write_log_entry(p, nick_to_pub_key("Bob"), self.tfc_log_database, origin=ORIGIN_CONTACT_HEADER) # Test self.assertIsNone(log_command(self.cmd_data, *self.args)) - with open("Receiver - Plaintext log (Bob)") as f: + with open('Receiver - Plaintext log (Bob)') as f: data = f.read() - self.assertEqual( - data, - f"""\ + self.assertEqual(data, f"""\ Log file of 1 most recent message(s) to/from contact Bob ════════════════════════════════════════════════════════════════════════════════ {self.time} Bob: A short message -""", - ) +""") class TestRemoveLog(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.win_name = nick_to_pub_key("Alice") - self.contact_list = ContactList() - self.group_list = GroupList() - self.settings = Settings() - self.master_key = MasterKey() + self.win_name = nick_to_pub_key("Alice") + self.contact_list = ContactList() + self.group_list = GroupList() + self.settings = Settings() + self.master_key = MasterKey() def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) def test_remove_log_file(self) -> None: - self.assert_se( - f"No log database available.", - remove_log, - self.win_name, - self.contact_list, - self.group_list, - self.settings, - self.master_key, - ) + self.assert_se(f"No log database available.", + remove_log, self.win_name, self.contact_list, self.group_list, self.settings, self.master_key) class TestChMasterKey(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.unit_test_dir = cd_unit_test() - self.ts = datetime.now() - self.master_key = MasterKey() - self.settings = Settings() - self.contact_list = ContactList(nicks=[LOCAL_ID]) - self.window_list = WindowList(nicks=[LOCAL_ID]) - self.group_list = GroupList() - self.key_list = KeyList() - self.args = ( - self.ts, - self.window_list, - self.contact_list, - self.group_list, - self.key_list, - self.settings, - self.master_key, - ) - self.log_file = f"{DIR_USER_DATA}{self.settings.software_operation}_logs" + self.unit_test_dir = cd_unit_test() + self.ts = datetime.now() + self.master_key = MasterKey() + self.settings = Settings() + self.contact_list = ContactList(nicks=[LOCAL_ID]) + self.window_list = WindowList(nicks=[LOCAL_ID]) + self.group_list = GroupList() + self.key_list = KeyList() + self.args = (self.ts, self.window_list, self.contact_list, self.group_list, + self.key_list, self.settings, self.master_key) + self.log_file = f'{DIR_USER_DATA}{self.settings.software_operation}_logs' self.tfc_log_database = MessageLog(self.log_file, self.master_key.master_key) def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) - @mock.patch("src.common.db_masterkey.MIN_KEY_DERIVATION_TIME", 0.1) - @mock.patch("src.common.db_masterkey.MIN_KEY_DERIVATION_TIME", 1.0) - @mock.patch( - "os.popen", - return_value=MagicMock( - read=MagicMock( - return_value=MagicMock( - splitlines=MagicMock(return_value=["MemAvailable 10240"]) - ) - ) - ), - ) - @mock.patch("multiprocessing.cpu_count", return_value=1) - @mock.patch("getpass.getpass", side_effect=["test_password", "a", "a"]) - @mock.patch("time.sleep", return_value=None) + @mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 0.1) + @mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 1.0) + @mock.patch('os.popen', return_value=MagicMock( + read=MagicMock(return_value=MagicMock(splitlines=MagicMock(return_value=["MemAvailable 10240"]))))) + @mock.patch('multiprocessing.cpu_count', return_value=1) + @mock.patch('getpass.getpass', side_effect=['test_password', 'a', 'a']) + @mock.patch('time.sleep', return_value=None) def test_master_key_change(self, *_) -> None: # Setup - write_log_entry( - F_S_HEADER + bytes(PADDING_LENGTH), - nick_to_pub_key("Alice"), - self.tfc_log_database, - ) + write_log_entry(F_S_HEADER + bytes(PADDING_LENGTH), nick_to_pub_key("Alice"), self.tfc_log_database) - self.contact_list.file_name = f"{DIR_USER_DATA}{RX}_contacts" - self.group_list.file_name = f"{DIR_USER_DATA}{RX}_groups" - self.key_list.file_name = f"{DIR_USER_DATA}{RX}_keys" - self.settings.file_name = f"{DIR_USER_DATA}{RX}_settings" + self.contact_list.file_name = f'{DIR_USER_DATA}{RX}_contacts' + self.group_list.file_name = f'{DIR_USER_DATA}{RX}_groups' + self.key_list.file_name = f'{DIR_USER_DATA}{RX}_keys' + self.settings.file_name = f'{DIR_USER_DATA}{RX}_settings' - self.contact_list.database = TFCDatabase( - self.contact_list.file_name, self.contact_list.master_key - ) - self.group_list.database = TFCDatabase( - self.group_list.file_name, self.group_list.master_key - ) - self.key_list.database = TFCDatabase( - self.key_list.file_name, self.group_list.master_key - ) - self.settings.database = TFCDatabase( - self.settings.file_name, self.settings.master_key - ) + self.contact_list.database = TFCDatabase(self.contact_list.file_name, self.contact_list.master_key) + self.group_list.database = TFCDatabase(self.group_list.file_name, self.group_list.master_key) + self.key_list.database = TFCDatabase(self.key_list.file_name, self.group_list.master_key) + self.settings.database = TFCDatabase(self.settings.file_name, self.settings.master_key) orig_cl_rd = self.contact_list.database.replace_database orig_gl_rd = self.group_list.database.replace_database @@ -416,9 +287,9 @@ class TestChMasterKey(TFCTestCase): orig_st_rd = self.settings.database.replace_database self.contact_list.database.replace_database = lambda: None - self.group_list.database.replace_database = lambda: None - self.key_list.database.replace_database = lambda: None - self.settings.database.replace_database = lambda: None + self.group_list.database.replace_database = lambda: None + self.key_list.database.replace_database = lambda: None + self.settings.database.replace_database = lambda: None # Test self.assertEqual(self.master_key.master_key, bytes(SYMMETRIC_KEY_LENGTH)) @@ -427,181 +298,142 @@ class TestChMasterKey(TFCTestCase): # Teardown self.contact_list.database.replace_database = orig_cl_rd - self.group_list.database.replace_database = orig_gl_rd - self.key_list.database.replace_database = orig_kl_rd - self.settings.database.replace_database = orig_st_rd + self.group_list.database.replace_database = orig_gl_rd + self.key_list.database.replace_database = orig_kl_rd + self.settings.database.replace_database = orig_st_rd - @mock.patch("src.common.db_masterkey.MIN_KEY_DERIVATION_TIME", 0.1) - @mock.patch("src.common.db_masterkey.MIN_KEY_DERIVATION_TIME", 1.0) - @mock.patch( - "os.popen", - return_value=MagicMock( - read=MagicMock( - return_value=MagicMock( - splitlines=MagicMock(return_value=["MemAvailable 10240"]) - ) - ) - ), - ) - @mock.patch("multiprocessing.cpu_count", return_value=1) - @mock.patch("getpass.getpass", return_value="a") - @mock.patch("time.sleep", return_value=None) + @mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 0.1) + @mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 1.0) + @mock.patch('os.popen', return_value=MagicMock( + read=MagicMock(return_value=MagicMock(splitlines=MagicMock(return_value=["MemAvailable 10240"]))))) + @mock.patch('multiprocessing.cpu_count', return_value=1) + @mock.patch('getpass.getpass', return_value='a') + @mock.patch('time.sleep', return_value=None) def test_invalid_password_raises_function_return(self, *_) -> None: self.assertEqual(self.master_key.master_key, bytes(SYMMETRIC_KEY_LENGTH)) self.assert_se("Error: Invalid password.", ch_master_key, *self.args) - @mock.patch("getpass.getpass", return_value="a") - @mock.patch("time.sleep", return_value=None) - @mock.patch("os.getrandom", side_effect=KeyboardInterrupt) - def test_keyboard_interrupt_raises_fr(self, *_) -> None: + @mock.patch('getpass.getpass', return_value='a') + @mock.patch('time.sleep', return_value=None) + @mock.patch('os.getrandom', side_effect=KeyboardInterrupt) + def test_keyboard_interrupt_raises_se(self, *_) -> None: self.assert_se("Error: Invalid password.", ch_master_key, *self.args) class TestChNick(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.ts = datetime.now() - self.contact_list = ContactList(nicks=["Alice"]) - self.window_list = WindowList(contact_list=self.contact_list) - self.group_list = GroupList() - self.args = self.ts, self.window_list, self.contact_list - self.window = self.window_list.get_window(nick_to_pub_key("Alice")) - self.window.type = WIN_TYPE_CONTACT + self.ts = datetime.now() + self.contact_list = ContactList(nicks=['Alice']) + self.window_list = WindowList(contact_list=self.contact_list) + self.group_list = GroupList() + self.args = self.ts, self.window_list, self.contact_list + self.window = self.window_list.get_window(nick_to_pub_key("Alice")) + self.window.type = WIN_TYPE_CONTACT - def test_unknown_account_raises_fr(self) -> None: + def test_unknown_account_raises_se(self) -> None: # Setup - cmd_data = nick_to_pub_key("Bob") + b"Bob_" + cmd_data = nick_to_pub_key("Bob") + b'Bob_' # Test - trunc_addr = nick_to_short_address("Bob") - self.assert_se( - f"Error: Receiver has no contact '{trunc_addr}' to rename.", - ch_nick, - cmd_data, - *self.args, - ) + trunc_addr = nick_to_short_address('Bob') + self.assert_se(f"Error: Receiver has no contact '{trunc_addr}' to rename.", ch_nick, cmd_data, *self.args) def test_nick_change(self) -> None: # Setup - cmd_data = nick_to_pub_key("Alice") + b"Alice_" + cmd_data = nick_to_pub_key("Alice") + b'Alice_' # Test self.assertIsNone(ch_nick(cmd_data, *self.args)) - self.assertEqual( - self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice")).nick, - "Alice_", - ) - self.assertEqual(self.window.name, "Alice_") + self.assertEqual(self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice")).nick, 'Alice_') + self.assertEqual(self.window.name, 'Alice_') class TestChSetting(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.ts = datetime.now() - self.window_list = WindowList() + self.ts = datetime.now() + self.window_list = WindowList() self.contact_list = ContactList() - self.group_list = GroupList() - self.key_list = KeyList() - self.settings = Settings() - self.gateway = Gateway() - self.args = ( - self.ts, - self.window_list, - self.contact_list, - self.group_list, - self.key_list, - self.settings, - self.gateway, - ) + self.group_list = GroupList() + self.key_list = KeyList() + self.settings = Settings() + self.gateway = Gateway() + self.args = (self.ts, self.window_list, self.contact_list, self.group_list, + self.key_list, self.settings, self.gateway) - def test_invalid_data_raises_fr(self) -> None: + def test_invalid_data_raises_se(self) -> None: # Setup - self.settings.key_list = [""] + self.settings.key_list = [''] # Test - cmd_data = b"setting" + b"True" - self.assert_se( - "Error: Received invalid setting data.", ch_setting, cmd_data, *self.args - ) + cmd_data = b'setting' + b'True' + self.assert_se("Error: Received invalid setting data.", ch_setting, cmd_data, *self.args) - def test_invalid_setting_raises_fr(self) -> None: + def test_invalid_setting_raises_se(self) -> None: # Setup - self.settings.key_list = [""] + self.settings.key_list = [''] # Test - cmd_data = b"setting" + US_BYTE + b"True" - self.assert_se( - "Error: Invalid setting 'setting'.", ch_setting, cmd_data, *self.args - ) + cmd_data = b'setting' + US_BYTE + b'True' + self.assert_se("Error: Invalid setting 'setting'.", ch_setting, cmd_data, *self.args) def test_databases(self) -> None: # Setup - self.settings.key_list = [ - "max_number_of_group_members", - "max_number_of_contacts", - ] + self.settings.key_list = ['max_number_of_group_members', 'max_number_of_contacts'] # Test - cmd_data = b"max_number_of_group_members" + US_BYTE + b"30" + cmd_data = b'max_number_of_group_members' + US_BYTE + b'30' self.assertIsNone(ch_setting(cmd_data, *self.args)) - cmd_data = b"max_number_of_contacts" + US_BYTE + b"30" + cmd_data = b'max_number_of_contacts' + US_BYTE + b'30' self.assertIsNone(ch_setting(cmd_data, *self.args)) def test_change_gateway_setting(self) -> None: # Setup - self.settings.key_list = [ - "max_number_of_group_members", - "max_number_of_contacts", - ] + self.settings.key_list = ['max_number_of_group_members', 'max_number_of_contacts'] # Test - cmd_data = b"serial_baudrate" + US_BYTE + b"115200" + cmd_data = b'serial_baudrate' + US_BYTE + b'115200' self.assertIsNone(ch_setting(cmd_data, *self.args)) class TestChContactSetting(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.ts = datetime.fromtimestamp(1502750000) - self.contact_list = ContactList(nicks=["Alice", "Bob"]) - self.group_list = GroupList(groups=["test_group", "test_group2"]) - self.window_list = WindowList( - contact_list=self.contact_list, group_list=self.group_list - ) - self.args = self.ts, self.window_list, self.contact_list, self.group_list + self.ts = datetime.fromtimestamp(1502750000) + self.contact_list = ContactList(nicks=['Alice', 'Bob']) + self.group_list = GroupList(groups=['test_group', 'test_group2']) + self.window_list = WindowList(contact_list=self.contact_list, + group_list=self.group_list) + self.args = self.ts, self.window_list, self.contact_list, self.group_list - def test_invalid_window_raises_fr(self) -> None: + def test_invalid_window_raises_se(self) -> None: # Setup - cmd_data = ENABLE + nick_to_pub_key("Bob") - header = CH_LOGGING - self.contact_list = ContactList(nicks=["Alice"]) - self.window_list = WindowList( - contact_list=self.contact_list, group_list=self.group_list - ) + cmd_data = ENABLE + nick_to_pub_key("Bob") + header = CH_LOGGING + self.contact_list = ContactList(nicks=['Alice']) + self.window_list = WindowList(contact_list=self.contact_list, + group_list=self.group_list) # Test - self.assert_se( - f"Error: Found no window for '{nick_to_short_address('Bob')}'.", - ch_contact_s, - cmd_data, - *self.args, - header, - ) + self.assert_se(f"Error: Found no window for '{nick_to_short_address('Bob')}'.", + ch_contact_s, cmd_data, *self.args, header) def test_setting_change_contact(self) -> None: # Setup - self.window = self.window_list.get_window(nick_to_pub_key("Bob")) - self.window.type = WIN_TYPE_CONTACT - self.window.type_print = "contact" + self.window = self.window_list.get_window(nick_to_pub_key("Bob")) + self.window.type = WIN_TYPE_CONTACT + self.window.type_print = 'contact' self.window.window_contacts = self.contact_list.contacts - bob = self.contact_list.get_contact_by_address_or_nick("Bob") + bob = self.contact_list.get_contact_by_address_or_nick("Bob") # Test - for attr, header in [ - ("log_messages", CH_LOGGING), - ("notifications", CH_NOTIFY), - ("file_reception", CH_FILE_RECV), - ]: + for attr, header in [('log_messages', CH_LOGGING), + ('notifications', CH_NOTIFY), + ('file_reception', CH_FILE_RECV)]: for s in [ENABLE, ENABLE, DISABLE, DISABLE]: cmd_data = s + nick_to_pub_key("Bob") self.assertIsNone(ch_contact_s(cmd_data, *self.args, header)) @@ -609,44 +441,37 @@ class TestChContactSetting(TFCTestCase): def test_setting_change_group(self) -> None: # Setup - self.window = self.window_list.get_window(group_name_to_group_id("test_group")) - self.window.type = WIN_TYPE_GROUP - self.window.type_print = "group" - self.window.window_contacts = self.group_list.get_group("test_group").members + self.window = self.window_list.get_window(group_name_to_group_id('test_group')) + self.window.type = WIN_TYPE_GROUP + self.window.type_print = 'group' + self.window.window_contacts = self.group_list.get_group('test_group').members # Test - for attr, header in [ - ("log_messages", CH_LOGGING), - ("notifications", CH_NOTIFY), - ("file_reception", CH_FILE_RECV), - ]: + for attr, header in [('log_messages', CH_LOGGING), + ('notifications', CH_NOTIFY), + ('file_reception', CH_FILE_RECV)]: for s in [ENABLE, ENABLE, DISABLE, DISABLE]: - cmd_data = s + group_name_to_group_id("test_group") + cmd_data = s + group_name_to_group_id('test_group') self.assertIsNone(ch_contact_s(cmd_data, *self.args, header)) if header in [CH_LOGGING, CH_NOTIFY]: - self.assertEqual( - self.group_list.get_group("test_group").__getattribute__(attr), - (s == ENABLE), - ) + self.assertEqual(self.group_list.get_group('test_group').__getattribute__(attr), (s == ENABLE)) if header == CH_FILE_RECV: - for m in self.group_list.get_group("test_group").members: + for m in self.group_list.get_group('test_group').members: self.assertEqual(m.file_reception, (s == ENABLE)) def test_setting_change_all(self) -> None: # Setup - self.window = self.window_list.get_window(nick_to_pub_key("Bob")) - self.window.type = WIN_TYPE_CONTACT - self.window.type_print = "contact" + self.window = self.window_list.get_window(nick_to_pub_key("Bob")) + self.window.type = WIN_TYPE_CONTACT + self.window.type_print = 'contact' self.window.window_contacts = self.contact_list.contacts # Test - for attr, header in [ - ("log_messages", CH_LOGGING), - ("notifications", CH_NOTIFY), - ("file_reception", CH_FILE_RECV), - ]: + for attr, header in [('log_messages', CH_LOGGING), + ('notifications', CH_NOTIFY), + ('file_reception', CH_FILE_RECV)]: for s in [ENABLE, ENABLE, DISABLE, DISABLE]: cmd_data = s.upper() + US_BYTE self.assertIsNone(ch_contact_s(cmd_data, *self.args, header)) @@ -663,57 +488,42 @@ class TestChContactSetting(TFCTestCase): class TestContactRemove(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.ts = datetime.now() - self.window_list = WindowList() - self.cmd_data = nick_to_pub_key("Bob") - self.settings = Settings() - self.master_key = MasterKey() - self.args = self.cmd_data, self.ts, self.window_list + self.ts = datetime.now() + self.window_list = WindowList() + self.cmd_data = nick_to_pub_key("Bob") + self.settings = Settings() + self.master_key = MasterKey() + self.args = self.cmd_data, self.ts, self.window_list def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) - def test_no_contact_raises_fr(self) -> None: + def test_no_contact_raises_se(self) -> None: # Setup - contact_list = ContactList(nicks=["Alice"]) - group_list = GroupList(groups=[]) - key_list = KeyList(nicks=["Alice"]) + contact_list = ContactList(nicks=['Alice']) + group_list = GroupList(groups=[]) + key_list = KeyList(nicks=['Alice']) # Test - self.assert_se( - f"Receiver has no account '{nick_to_short_address('Bob')}' to remove.", - contact_rem, - *self.args, - contact_list, - group_list, - key_list, - self.settings, - self.master_key, - ) + self.assert_se(f"Receiver has no account '{nick_to_short_address('Bob')}' to remove.", + contact_rem, *self.args, contact_list, group_list, key_list, self.settings, self.master_key) def test_successful_removal(self) -> None: # Setup - contact_list = ContactList(nicks=["Alice", "Bob"]) - contact = contact_list.get_contact_by_address_or_nick("Bob") - group_list = GroupList(groups=["test_group", "test_group2"]) - key_list = KeyList(nicks=["Alice", "Bob"]) + contact_list = ContactList(nicks=['Alice', 'Bob']) + contact = contact_list.get_contact_by_address_or_nick("Bob") + group_list = GroupList(groups=['test_group', 'test_group2']) + key_list = KeyList(nicks=['Alice', 'Bob']) self.window_list.windows = [RxWindow(type=WIN_TYPE_GROUP)] # Test - self.assert_se( - "No log database available.", - contact_rem, - *self.args, - contact_list, - group_list, - key_list, - self.settings, - self.master_key, - ) + self.assert_se("No log database available.", + contact_rem, *self.args, contact_list, group_list, key_list, self.settings, self.master_key) self.assertFalse(contact_list.has_pub_key(nick_to_pub_key("Bob"))) self.assertFalse(key_list.has_keyset(nick_to_pub_key("Bob"))) for g in group_list: @@ -721,6 +531,7 @@ class TestContactRemove(TFCTestCase): class TestWipe(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.exit_queue = Queue() @@ -729,11 +540,11 @@ class TestWipe(unittest.TestCase): """Post-test actions.""" tear_queue(self.exit_queue) - @mock.patch("src.common.misc.reset_terminal", return_value=None) + @mock.patch('os.system', return_value=None) def test_wipe_command(self, _) -> None: self.assertIsNone(wipe(self.exit_queue)) self.assertEqual(self.exit_queue.get(), WIPE) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/receiver/test_commands_g.py b/tests/receiver/test_commands_g.py index 0ae6dca..394c9ae 100644 --- a/tests/receiver/test_commands_g.py +++ b/tests/receiver/test_commands_g.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -24,169 +24,101 @@ import unittest from src.common.statics import US_BYTE -from src.receiver.commands_g import ( - group_add, - group_create, - group_delete, - group_remove, - group_rename, -) +from src.receiver.commands_g import group_add, group_create, group_delete, group_remove, group_rename -from tests.mock_classes import ( - Contact, - ContactList, - GroupList, - RxWindow, - Settings, - WindowList, -) -from tests.utils import ( - group_name_to_group_id, - nick_to_pub_key, - TFCTestCase, - UNDECODABLE_UNICODE, -) +from tests.mock_classes import Contact, ContactList, GroupList, RxWindow, Settings, WindowList +from tests.utils import group_name_to_group_id, nick_to_pub_key, TFCTestCase, UNDECODABLE_UNICODE class TestGroupCreate(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.ts = datetime.datetime.now() - self.settings = Settings() + self.ts = datetime.datetime.now() + self.settings = Settings() self.window_list = WindowList() - self.group_id = group_name_to_group_id("test_group") + self.group_id = group_name_to_group_id('test_group') - def test_too_many_purp_accounts_raises_fr(self) -> None: + def test_too_many_purp_accounts_raises_se(self) -> None: # Setup - create_list = [nick_to_pub_key(str(n)) for n in range(51)] - cmd_data = self.group_id + b"test_group" + US_BYTE + b"".join(create_list) - group_list = GroupList(groups=["test_group"]) - contact_list = ContactList(nicks=[str(n) for n in range(51)]) - group = group_list.get_group("test_group") + create_list = [nick_to_pub_key(str(n)) for n in range(51)] + cmd_data = self.group_id + b'test_group' + US_BYTE + b''.join(create_list) + group_list = GroupList(groups=['test_group']) + contact_list = ContactList(nicks=[str(n) for n in range(51)]) + group = group_list.get_group('test_group') group.members = contact_list.contacts # Test - self.assert_se( - "Error: TFC settings only allow 50 members per group.", - group_create, - cmd_data, - self.ts, - self.window_list, - contact_list, - group_list, - self.settings, - ) + self.assert_se("Error: TFC settings only allow 50 members per group.", + group_create, cmd_data, self.ts, self.window_list, contact_list, group_list, self.settings) - def test_full_group_list_raises_fr(self) -> None: + def test_full_group_list_raises_se(self) -> None: # Setup - cmd_data = self.group_id + b"test_group" + US_BYTE + nick_to_pub_key("51") - group_list = GroupList(groups=[f"test_group_{n}" for n in range(50)]) - contact_list = ContactList(nicks=["Alice"]) + cmd_data = self.group_id + b'test_group' + US_BYTE + nick_to_pub_key('51') + group_list = GroupList(groups=[f"test_group_{n}" for n in range(50)]) + contact_list = ContactList(nicks=['Alice']) # Test - self.assert_se( - "Error: TFC settings only allow 50 groups.", - group_create, - cmd_data, - self.ts, - self.window_list, - contact_list, - group_list, - self.settings, - ) + self.assert_se("Error: TFC settings only allow 50 groups.", + group_create, cmd_data, self.ts, self.window_list, contact_list, group_list, self.settings) def test_successful_group_creation(self) -> None: # Setup - group_list = GroupList(groups=["test_group"]) - cmd_data = ( - group_name_to_group_id("test_group") - + b"test_group2" - + US_BYTE - + nick_to_pub_key("Bob") - ) - contact_list = ContactList(nicks=["Alice", "Bob"]) - window_list = WindowList( - nicks=["Alice", "Bob"], - contact_list=contact_list, - group_lis=group_list, - packet_list=None, - settings=Settings, - ) + group_list = GroupList(groups=['test_group']) + cmd_data = group_name_to_group_id('test_group') + b'test_group2' + US_BYTE + nick_to_pub_key('Bob') + contact_list = ContactList(nicks=['Alice', 'Bob']) + window_list = WindowList(nicks =['Alice', 'Bob'], + contact_list=contact_list, + group_lis =group_list, + packet_list =None, + settings =Settings) # Test - self.assertIsNone( - group_create( - cmd_data, self.ts, window_list, contact_list, group_list, self.settings - ) - ) - self.assertEqual(len(group_list.get_group("test_group")), 2) + self.assertIsNone(group_create(cmd_data, self.ts, window_list, contact_list, group_list, self.settings)) + self.assertEqual(len(group_list.get_group('test_group')), 2) class TestGroupAdd(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.ts = datetime.datetime.now() - self.settings = Settings() + self.ts = datetime.datetime.now() + self.settings = Settings() self.window_list = WindowList() - def test_too_large_final_member_list_raises_fr(self) -> None: + def test_too_large_final_member_list_raises_se(self) -> None: # Setup - group_list = GroupList(groups=["test_group"]) - contact_list = ContactList(nicks=[str(n) for n in range(51)]) - group = group_list.get_group("test_group") + group_list = GroupList(groups=['test_group']) + contact_list = ContactList(nicks=[str(n) for n in range(51)]) + group = group_list.get_group('test_group') group.members = contact_list.contacts[:50] - cmd_data = group_name_to_group_id("test_group") + nick_to_pub_key("50") + cmd_data = group_name_to_group_id('test_group') + nick_to_pub_key('50') # Test - self.assert_se( - "Error: TFC settings only allow 50 members per group.", - group_add, - cmd_data, - self.ts, - self.window_list, - contact_list, - group_list, - self.settings, - ) + self.assert_se("Error: TFC settings only allow 50 members per group.", + group_add, cmd_data, self.ts, self.window_list, contact_list, group_list, self.settings) - def test_unknown_group_id_raises_fr(self) -> None: + def test_unknown_group_id_raises_se(self) -> None: # Setup - group_list = GroupList(groups=["test_group"]) + group_list = GroupList(groups=['test_group']) contact_list = ContactList(nicks=[str(n) for n in range(21)]) - cmd_data = group_name_to_group_id("test_group2") + nick_to_pub_key("50") + cmd_data = group_name_to_group_id('test_group2') + nick_to_pub_key('50') # Test - self.assert_se( - "Error: No group with ID '2e7mHQznTMsP6' found.", - group_add, - cmd_data, - self.ts, - self.window_list, - contact_list, - group_list, - self.settings, - ) + self.assert_se("Error: No group with ID '2e7mHQznTMsP6' found.", + group_add, cmd_data, self.ts, self.window_list, contact_list, group_list, self.settings) def test_successful_group_add(self) -> None: # Setup - contact_list = ContactList(nicks=[str(n) for n in range(21)]) - group_lst = GroupList(groups=["test_group"]) - group = group_lst.get_group("test_group") + contact_list = ContactList(nicks=[str(n) for n in range(21)]) + group_lst = GroupList(groups=['test_group']) + group = group_lst.get_group('test_group') group.members = contact_list.contacts[:19] - cmd_data = group_name_to_group_id("test_group") + nick_to_pub_key("20") + cmd_data = group_name_to_group_id('test_group') + nick_to_pub_key('20') # Test - self.assertIsNone( - group_add( - cmd_data, - self.ts, - self.window_list, - contact_list, - group_lst, - self.settings, - ) - ) + self.assertIsNone(group_add(cmd_data, self.ts, self.window_list, contact_list, group_lst, self.settings)) - group2 = group_lst.get_group("test_group") + group2 = group_lst.get_group('test_group') self.assertEqual(len(group2), 20) for c in group2: @@ -194,142 +126,101 @@ class TestGroupAdd(TFCTestCase): class TestGroupRemove(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.ts = datetime.datetime.now() - self.window_list = WindowList() - self.contact_list = ContactList(nicks=[f"contact_{n}" for n in range(21)]) - self.group_list = GroupList(groups=["test_group"]) - self.group = self.group_list.get_group("test_group") + self.ts = datetime.datetime.now() + self.window_list = WindowList() + self.contact_list = ContactList(nicks=[f"contact_{n}" for n in range(21)]) + self.group_list = GroupList(groups=['test_group']) + self.group = self.group_list.get_group('test_group') self.group.members = self.contact_list.contacts[:19] - self.settings = Settings() + self.settings = Settings() - def test_unknown_group_id_raises_fr(self) -> None: + def test_unknown_group_id_raises_se(self) -> None: # Setup - group_list = GroupList(groups=["test_group"]) + group_list = GroupList(groups=['test_group']) contact_list = ContactList(nicks=[str(n) for n in range(21)]) - cmd_data = group_name_to_group_id("test_group2") + nick_to_pub_key("20") + cmd_data = group_name_to_group_id('test_group2') + nick_to_pub_key('20') # Test - self.assert_se( - "Error: No group with ID '2e7mHQznTMsP6' found.", - group_remove, - cmd_data, - self.ts, - self.window_list, - contact_list, - group_list, - ) + self.assert_se("Error: No group with ID '2e7mHQznTMsP6' found.", + group_remove, cmd_data, self.ts, self.window_list, contact_list, group_list) def test_successful_member_removal(self) -> None: - self.cmd_data = group_name_to_group_id("test_group") + b"".join( - [nick_to_pub_key("contact_18"), nick_to_pub_key("contact_20")] - ) - self.assertIsNone( - group_remove( - self.cmd_data, - self.ts, - self.window_list, - self.contact_list, - self.group_list, - ) - ) + self.cmd_data = group_name_to_group_id('test_group') + b''.join([nick_to_pub_key('contact_18'), + nick_to_pub_key('contact_20')]) + self.assertIsNone(group_remove(self.cmd_data, self.ts, self.window_list, self.contact_list, self.group_list)) class TestGroupDelete(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.ts = datetime.datetime.now() + self.ts = datetime.datetime.now() self.window_list = WindowList() - self.group_list = GroupList(groups=["test_group"]) + self.group_list = GroupList(groups=['test_group']) - def test_missing_group_raises_fr(self) -> None: - cmd_data = group_name_to_group_id("test_group2") - self.assert_se( - "Error: No group with ID '2e7mHQznTMsP6' found.", - group_delete, - cmd_data, - self.ts, - self.window_list, - self.group_list, - ) + def test_missing_group_raises_se(self) -> None: + cmd_data = group_name_to_group_id('test_group2') + self.assert_se("Error: No group with ID '2e7mHQznTMsP6' found.", + group_delete, cmd_data, self.ts, self.window_list, self.group_list) - def test_unknown_group_id_raises_fr(self) -> None: + def test_unknown_group_id_raises_se(self) -> None: # Setup - group_list = GroupList(groups=["test_group"]) - cmd_data = group_name_to_group_id("test_group2") + group_list = GroupList(groups=['test_group']) + cmd_data = group_name_to_group_id('test_group2') # Test - self.assert_se( - "Error: No group with ID '2e7mHQznTMsP6' found.", - group_delete, - cmd_data, - self.ts, - self.window_list, - group_list, - ) + self.assert_se("Error: No group with ID '2e7mHQznTMsP6' found.", + group_delete, cmd_data, self.ts, self.window_list, group_list) def test_successful_remove(self) -> None: - cmd_data = group_name_to_group_id("test_group") - self.assertIsNone( - group_delete(cmd_data, self.ts, self.window_list, self.group_list) - ) + cmd_data = group_name_to_group_id('test_group') + self.assertIsNone(group_delete(cmd_data, self.ts, self.window_list, self.group_list)) self.assertEqual(len(self.group_list.groups), 0) class TestGroupRename(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.ts = datetime.datetime.now() - self.group_list = GroupList(groups=["test_group"]) - self.window_list = WindowList() - self.window = RxWindow() + self.ts = datetime.datetime.now() + self.group_list = GroupList(groups=['test_group']) + self.window_list = WindowList() + self.window = RxWindow() self.window_list.windows = [self.window] - self.contact_list = ContactList(nicks=["alice"]) - self.args = self.ts, self.window_list, self.contact_list, self.group_list + self.contact_list = ContactList(nicks=['alice']) + self.args = self.ts, self.window_list, self.contact_list, self.group_list - def test_missing_group_id_raises_fr(self) -> None: + def test_missing_group_id_raises_se(self) -> None: # Setup - cmd_data = group_name_to_group_id("test_group2") + b"new_name" + cmd_data = group_name_to_group_id('test_group2') + b'new_name' # Test - self.assert_se( - "Error: No group with ID '2e7mHQznTMsP6' found.", - group_rename, - cmd_data, - *self.args, - ) + self.assert_se("Error: No group with ID '2e7mHQznTMsP6' found.", group_rename, cmd_data, *self.args) - def test_invalid_group_name_encoding_raises_fr(self) -> None: + def test_invalid_group_name_encoding_raises_se(self) -> None: # Setup - cmd_data = ( - group_name_to_group_id("test_group") + b"new_name" + UNDECODABLE_UNICODE - ) + cmd_data = group_name_to_group_id('test_group') + b'new_name' + UNDECODABLE_UNICODE # Test - self.assert_se( - "Error: New name for group 'test_group' was invalid.", - group_rename, - cmd_data, - *self.args, - ) + self.assert_se("Error: New name for group 'test_group' was invalid.", group_rename, cmd_data, *self.args) - def test_invalid_group_name_raises_fr(self) -> None: + def test_invalid_group_name_raises_se(self) -> None: # Setup - cmd_data = group_name_to_group_id("test_group") + b"new_name\x1f" + cmd_data = group_name_to_group_id('test_group') + b'new_name\x1f' # Test - self.assert_se( - "Error: Group name must be printable.", group_rename, cmd_data, *self.args - ) + self.assert_se("Error: Group name must be printable.", group_rename, cmd_data, *self.args) def test_valid_group_name_change(self) -> None: # Setup - cmd_data = group_name_to_group_id("test_group") + b"new_name" + cmd_data = group_name_to_group_id('test_group') + b'new_name' # Test self.assertIsNone(group_rename(cmd_data, *self.args)) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/receiver/test_files.py b/tests/receiver/test_files.py index 236ddfc..55a39b8 100644 --- a/tests/receiver/test_files.py +++ b/tests/receiver/test_files.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -25,402 +25,278 @@ import zlib from datetime import datetime from unittest import mock +from typing import Any -from src.common.crypto import blake2b, encrypt_and_sign +from src.common.crypto import blake2b, encrypt_and_sign from src.common.encoding import str_to_bytes -from src.common.statics import ( - COMPRESSION_LEVEL, - DIR_RECV_FILES, - ORIGIN_CONTACT_HEADER, - SYMMETRIC_KEY_LENGTH, - US_BYTE, -) +from src.common.statics import COMPRESSION_LEVEL, DIR_RECV_FILES, ORIGIN_CONTACT_HEADER, SYMMETRIC_KEY_LENGTH, US_BYTE -from src.receiver.files import ( - new_file, - process_assembled_file, - process_file, - store_unique, -) +from src.receiver.files import new_file, process_assembled_file, process_file, store_unique from tests.mock_classes import ContactList, Settings, WindowList -from tests.utils import ( - cd_unit_test, - cleanup, - nick_to_pub_key, - TFCTestCase, - UNDECODABLE_UNICODE, -) +from tests.utils import cd_unit_test, cleanup, nick_to_pub_key, TFCTestCase, UNDECODABLE_UNICODE class TestStoreUnique(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.file_data = os.urandom(100) - self.file_dir = "test_dir/" - self.file_name = "test_file" + self.file_data = os.urandom(100) + self.file_dir = 'test_dir/' + self.file_name = 'test_file' def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) def test_each_file_is_store_with_unique_name(self) -> None: - self.assertEqual( - store_unique(self.file_data, self.file_dir, self.file_name), "test_file" - ) - self.assertEqual( - store_unique(self.file_data, self.file_dir, self.file_name), "test_file.1" - ) - self.assertEqual( - store_unique(self.file_data, self.file_dir, self.file_name), "test_file.2" - ) + self.assertEqual(store_unique(self.file_data, self.file_dir, self.file_name), 'test_file') + self.assertEqual(store_unique(self.file_data, self.file_dir, self.file_name), 'test_file.1') + self.assertEqual(store_unique(self.file_data, self.file_dir, self.file_name), 'test_file.2') class ProcessAssembledFile(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.ts = datetime.now() - self.onion_pub_key = nick_to_pub_key("Alice") - self.nick = "Alice" - self.settings = Settings() - self.window_list = WindowList(nick=["Alice", "Bob"]) - self.key = os.urandom(SYMMETRIC_KEY_LENGTH) - self.args = self.onion_pub_key, self.nick, self.settings, self.window_list + self.ts = datetime.now() + self.onion_pub_key = nick_to_pub_key('Alice') + self.nick = 'Alice' + self.settings = Settings() + self.window_list = WindowList(nick=['Alice', 'Bob']) + self.key = os.urandom(SYMMETRIC_KEY_LENGTH) + self.args = self.onion_pub_key, self.nick, self.settings, self.window_list def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) - def test_invalid_structure_raises_fr(self) -> None: + def test_invalid_structure_raises_se(self) -> None: # Setup - payload = b"testfile.txt" + payload = b'testfile.txt' # Test - self.assert_se( - "Error: Received file had an invalid structure.", - process_assembled_file, - self.ts, - payload, - *self.args, - ) + self.assert_se("Error: Received file had an invalid structure.", + process_assembled_file, self.ts, payload, *self.args) - def test_invalid_encoding_raises_fr(self) -> None: + def test_invalid_encoding_raises_se(self) -> None: # Setup - payload = UNDECODABLE_UNICODE + US_BYTE + b"file_data" + payload = UNDECODABLE_UNICODE + US_BYTE + b'file_data' # Test - self.assert_se( - "Error: Received file name had an invalid encoding.", - process_assembled_file, - self.ts, - payload, - *self.args, - ) + self.assert_se("Error: Received file name had an invalid encoding.", + process_assembled_file, self.ts, payload, *self.args) - def test_invalid_name_raises_fr(self) -> None: + def test_invalid_name_raises_se(self) -> None: # Setup - payload = b"\x01filename" + US_BYTE + b"file_data" + payload = b'\x01filename' + US_BYTE + b'file_data' # Test - self.assert_se( - "Error: Received file had an invalid name.", - process_assembled_file, - self.ts, - payload, - *self.args, - ) + self.assert_se("Error: Received file had an invalid name.", + process_assembled_file, self.ts, payload, *self.args) - def test_slash_in_file_name_raises_fr(self) -> None: + def test_slash_in_file_name_raises_se(self) -> None: # Setup - payload = b"file/name" + US_BYTE + b"file_data" + payload = b'file/name' + US_BYTE + b'file_data' # Test - self.assert_se( - "Error: Received file had an invalid name.", - process_assembled_file, - self.ts, - payload, - *self.args, - ) + self.assert_se("Error: Received file had an invalid name.", + process_assembled_file, self.ts, payload, *self.args) - def test_invalid_key_raises_fr(self) -> None: + def test_invalid_key_raises_se(self) -> None: # Setup - payload = b"testfile.txt" + US_BYTE + b"file_data" + payload = b'testfile.txt' + US_BYTE + b'file_data' # Test - self.assert_se( - "Error: Received file had an invalid key.", - process_assembled_file, - self.ts, - payload, - *self.args, - ) + self.assert_se("Error: Received file had an invalid key.", + process_assembled_file, self.ts, payload, *self.args) - def test_decryption_fail_raises_fr(self) -> None: + def test_decryption_fail_raises_se(self) -> None: # Setup - file_data = encrypt_and_sign(b"file_data", self.key)[::-1] - payload = b"testfile.txt" + US_BYTE + file_data + file_data = encrypt_and_sign(b'file_data', self.key)[::-1] + payload = b'testfile.txt' + US_BYTE + file_data # Test - self.assert_se( - "Error: Decryption of file data failed.", - process_assembled_file, - self.ts, - payload, - *self.args, - ) + self.assert_se("Error: Decryption of file data failed.", + process_assembled_file, self.ts, payload, *self.args) - def test_invalid_compression_raises_fr(self) -> None: + def test_invalid_compression_raises_se(self) -> None: # Setup - compressed = zlib.compress(b"file_data", level=COMPRESSION_LEVEL)[::-1] - file_data = encrypt_and_sign(compressed, self.key) + self.key - payload = b"testfile.txt" + US_BYTE + file_data + compressed = zlib.compress(b'file_data', level=COMPRESSION_LEVEL)[::-1] + file_data = encrypt_and_sign(compressed, self.key) + self.key + payload = b'testfile.txt' + US_BYTE + file_data # Test - self.assert_se( - "Error: Decompression of file data failed.", - process_assembled_file, - self.ts, - payload, - *self.args, - ) + self.assert_se("Error: Decompression of file data failed.", + process_assembled_file, self.ts, payload, *self.args) def test_successful_reception(self) -> None: # Setup - compressed = zlib.compress(b"file_data", level=COMPRESSION_LEVEL) - file_data = encrypt_and_sign(compressed, self.key) + self.key - payload = b"testfile.txt" + US_BYTE + file_data + compressed = zlib.compress(b'file_data', level=COMPRESSION_LEVEL) + file_data = encrypt_and_sign(compressed, self.key) + self.key + payload = b'testfile.txt' + US_BYTE + file_data # Test self.assertIsNone(process_assembled_file(self.ts, payload, *self.args)) - self.assertTrue(os.path.isfile(f"{DIR_RECV_FILES}Alice/testfile.txt")) + self.assertTrue(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt')) def test_successful_reception_during_traffic_masking(self) -> None: # Setup self.settings.traffic_masking = True - self.window_list.active_win = self.window_list.get_window( - nick_to_pub_key("Bob") - ) + self.window_list.active_win = self.window_list.get_window(nick_to_pub_key('Bob')) - compressed = zlib.compress(b"file_data", level=COMPRESSION_LEVEL) - file_data = encrypt_and_sign(compressed, self.key) + self.key - payload = b"testfile.txt" + US_BYTE + file_data + compressed = zlib.compress(b'file_data', level=COMPRESSION_LEVEL) + file_data = encrypt_and_sign(compressed, self.key) + self.key + payload = b'testfile.txt' + US_BYTE + file_data # Test self.assertIsNone(process_assembled_file(self.ts, payload, *self.args)) - self.assertEqual( - self.window_list.get_window(nick_to_pub_key("Bob")).message_log[0][1], - "Stored file from Alice as 'testfile.txt'.", - ) - self.assertTrue(os.path.isfile(f"{DIR_RECV_FILES}Alice/testfile.txt")) + self.assertEqual(self.window_list.get_window(nick_to_pub_key('Bob')).message_log[0][1], + "Stored file from Alice as 'testfile.txt'.") + self.assertTrue(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt')) class TestNewFile(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.ts = datetime.now() - self.packet = b"" - self.file_keys = dict() - self.file_buf = dict() - self.contact_list = ContactList(nicks=["Alice"]) - self.window_list = WindowList() - self.file_key = SYMMETRIC_KEY_LENGTH * b"a" - self.settings = Settings() - self.compressed = zlib.compress( - str_to_bytes("test_file.txt") + b"file_data", level=COMPRESSION_LEVEL - ) - self.args = ( - self.file_keys, - self.file_buf, - self.contact_list, - self.window_list, - self.settings, - ) + self.ts = datetime.now() + self.packet = b'' + self.file_keys = dict() + self.file_buf = dict() + self.contact_list = ContactList(nicks=['Alice']) + self.window_list = WindowList() + self.file_key = SYMMETRIC_KEY_LENGTH*b'a' + self.settings = Settings() + self.compressed = zlib.compress(str_to_bytes("test_file.txt") + b'file_data', level=COMPRESSION_LEVEL) + self.args = self.file_keys, self.file_buf, self.contact_list, self.window_list, self.settings def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) - def test_unknown_account_raises_fr(self) -> None: + def test_unknown_account_raises_se(self) -> None: # Setup file_ct = encrypt_and_sign(self.compressed, self.file_key) - packet = nick_to_pub_key("Bob") + ORIGIN_CONTACT_HEADER + file_ct + packet = nick_to_pub_key('Bob') + ORIGIN_CONTACT_HEADER + file_ct # Test - self.assert_se( - "File from an unknown account.", new_file, self.ts, packet, *self.args - ) + self.assert_se("File from an unknown account.", new_file, self.ts, packet, *self.args) - def test_disabled_file_reception_raises_fr(self) -> None: + def test_disabled_file_reception_raises_se(self) -> None: # Setup file_ct = encrypt_and_sign(self.compressed, self.file_key) - packet = nick_to_pub_key("Alice") + ORIGIN_CONTACT_HEADER + file_ct - self.contact_list.get_contact_by_address_or_nick("Alice").file_reception = False + packet = nick_to_pub_key('Alice') + ORIGIN_CONTACT_HEADER + file_ct + self.contact_list.get_contact_by_address_or_nick('Alice').file_reception = False # Test - self.assert_se( - "Alert! Discarded file from Alice as file reception for them is disabled.", - new_file, - self.ts, - packet, - *self.args, - ) + self.assert_se("Alert! Discarded file from Alice as file reception for them is disabled.", + new_file, self.ts, packet, *self.args) def test_valid_file_without_key_is_cached(self) -> None: # Setup - file_ct = encrypt_and_sign(self.compressed, self.file_key) + file_ct = encrypt_and_sign(self.compressed, self.file_key) file_hash = blake2b(file_ct) - packet = nick_to_pub_key("Alice") + ORIGIN_CONTACT_HEADER + file_ct + packet = nick_to_pub_key('Alice') + ORIGIN_CONTACT_HEADER + file_ct # Test self.assertIsNone(new_file(self.ts, packet, *self.args)) - self.assertEqual( - self.file_buf[nick_to_pub_key("Alice") + file_hash], (self.ts, file_ct) - ) + self.assertEqual(self.file_buf[nick_to_pub_key('Alice') + file_hash], (self.ts, file_ct)) - @mock.patch("time.sleep", return_value=None) - def test_valid_file_with_key_is_processed(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_valid_file_with_key_is_processed(self, _: Any) -> None: # Setup - file_ct = encrypt_and_sign(self.compressed, self.file_key) - file_hash = blake2b(file_ct) - packet = nick_to_pub_key("Alice") + ORIGIN_CONTACT_HEADER + file_ct - self.file_keys = {(nick_to_pub_key("Alice") + file_hash): self.file_key} - self.args = ( - self.file_keys, - self.file_buf, - self.contact_list, - self.window_list, - self.settings, - ) + file_ct = encrypt_and_sign(self.compressed, self.file_key) + file_hash = blake2b(file_ct) + packet = nick_to_pub_key('Alice') + ORIGIN_CONTACT_HEADER + file_ct + self.file_keys = {(nick_to_pub_key('Alice') + file_hash): self.file_key} + self.args = self.file_keys, self.file_buf, self.contact_list, self.window_list, self.settings # Test self.assertIsNone(new_file(self.ts, packet, *self.args)) class TestProcessFile(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.ts = datetime.now() - self.account = nick_to_pub_key("Alice") - self.file_key = SYMMETRIC_KEY_LENGTH * b"a" - self.file_ct = encrypt_and_sign(50 * b"a", key=self.file_key) - self.contact_list = ContactList(nicks=["Alice"]) - self.window_list = WindowList() - self.settings = Settings() - self.args = self.file_key, self.contact_list, self.window_list, self.settings + self.ts = datetime.now() + self.account = nick_to_pub_key('Alice') + self.file_key = SYMMETRIC_KEY_LENGTH*b'a' + self.file_ct = encrypt_and_sign(50 * b'a', key=self.file_key) + self.contact_list = ContactList(nicks=['Alice']) + self.window_list = WindowList() + self.settings = Settings() + self.args = self.file_key, self.contact_list, self.window_list, self.settings def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) - def test_invalid_key_raises_fr(self) -> None: - self.file_key = SYMMETRIC_KEY_LENGTH * b"f" - self.args = self.file_key, self.contact_list, self.window_list, self.settings - self.assert_se( - "Error: Decryption key for file from Alice was invalid.", - process_file, - self.ts, - self.account, - self.file_ct, - *self.args, - ) + def test_invalid_key_raises_se(self) -> None: + self.file_key = SYMMETRIC_KEY_LENGTH * b'f' + self.args = self.file_key, self.contact_list, self.window_list, self.settings + self.assert_se("Error: Decryption key for file from Alice was invalid.", + process_file, self.ts, self.account, self.file_ct, *self.args) - def test_invalid_compression_raises_fr(self) -> None: - compressed = zlib.compress(b"file_data", level=COMPRESSION_LEVEL)[::-1] - file_data = encrypt_and_sign(compressed, self.file_key) + def test_invalid_compression_raises_se(self) -> None: + compressed = zlib.compress(b'file_data', level=COMPRESSION_LEVEL)[::-1] + file_data = encrypt_and_sign(compressed, self.file_key) - self.assert_se( - "Error: Failed to decompress file from Alice.", - process_file, - self.ts, - self.account, - file_data, - *self.args, - ) + self.assert_se("Error: Failed to decompress file from Alice.", + process_file, self.ts, self.account, file_data, *self.args) - @mock.patch("time.sleep", return_value=None) - def test_invalid_file_name_raises_fr(self, _) -> None: - compressed = zlib.compress( - UNDECODABLE_UNICODE + b"file_data", level=COMPRESSION_LEVEL - ) - file_data = encrypt_and_sign(compressed, self.file_key) + @mock.patch('time.sleep', return_value=None) + def test_invalid_file_name_raises_se(self, _: Any) -> None: + compressed = zlib.compress(UNDECODABLE_UNICODE + b'file_data', level=COMPRESSION_LEVEL) + file_data = encrypt_and_sign(compressed, self.file_key) - self.assert_se( - "Error: Name of file from Alice had an invalid encoding.", - process_file, - self.ts, - self.account, - file_data, - *self.args, - ) + self.assert_se("Error: Name of file from Alice had an invalid encoding.", + process_file, self.ts, self.account, file_data, *self.args) - @mock.patch("time.sleep", return_value=None) - def test_non_printable_name_raises_fr(self, _) -> None: - compressed = zlib.compress( - str_to_bytes("file\x01") + b"file_data", level=COMPRESSION_LEVEL - ) - file_data = encrypt_and_sign(compressed, self.file_key) + @mock.patch('time.sleep', return_value=None) + def test_non_printable_name_raises_se(self, _: Any) -> None: + compressed = zlib.compress(str_to_bytes("file\x01") + b'file_data', level=COMPRESSION_LEVEL) + file_data = encrypt_and_sign(compressed, self.file_key) - self.assert_se( - "Error: Name of file from Alice was invalid.", - process_file, - self.ts, - self.account, - file_data, - *self.args, - ) + self.assert_se("Error: Name of file from Alice was invalid.", + process_file, self.ts, self.account, file_data, *self.args) - @mock.patch("time.sleep", return_value=None) - def test_slash_in_name_raises_fr(self, _) -> None: - compressed = zlib.compress( - str_to_bytes("Alice/file.txt") + b"file_data", level=COMPRESSION_LEVEL - ) - file_data = encrypt_and_sign(compressed, self.file_key) + @mock.patch('time.sleep', return_value=None) + def test_slash_in_name_raises_se(self, _: Any) -> None: + compressed = zlib.compress(str_to_bytes("Alice/file.txt") + b'file_data', level=COMPRESSION_LEVEL) + file_data = encrypt_and_sign(compressed, self.file_key) - self.assert_se( - "Error: Name of file from Alice was invalid.", - process_file, - self.ts, - self.account, - file_data, - *self.args, - ) + self.assert_se("Error: Name of file from Alice was invalid.", + process_file, self.ts, self.account, file_data, *self.args) - @mock.patch("time.sleep", return_value=None) - def test_successful_storage_of_file(self, _) -> None: - compressed = zlib.compress( - str_to_bytes("test_file.txt") + b"file_data", level=COMPRESSION_LEVEL - ) - file_data = encrypt_and_sign(compressed, self.file_key) + @mock.patch('time.sleep', return_value=None) + def test_successful_storage_of_file(self, _: Any) -> None: + compressed = zlib.compress(str_to_bytes("test_file.txt") + b'file_data', level=COMPRESSION_LEVEL) + file_data = encrypt_and_sign(compressed, self.file_key) self.assertIsNone(process_file(self.ts, self.account, file_data, *self.args)) - @mock.patch("time.sleep", return_value=None) - def test_successful_storage_during_traffic_masking(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_successful_storage_during_traffic_masking(self, _: Any) -> None: # Setup self.settings.traffic_masking = True - self.window_list.active_win = self.window_list.get_window( - nick_to_pub_key("Bob") - ) + self.window_list.active_win = self.window_list.get_window(nick_to_pub_key('Bob')) - compressed = zlib.compress( - str_to_bytes("testfile.txt") + b"file_data", level=COMPRESSION_LEVEL - ) - file_data = encrypt_and_sign(compressed, self.file_key) + compressed = zlib.compress(str_to_bytes("testfile.txt") + b'file_data', level=COMPRESSION_LEVEL) + file_data = encrypt_and_sign(compressed, self.file_key) self.assertIsNone(process_file(self.ts, self.account, file_data, *self.args)) - self.assertEqual( - self.window_list.get_window(nick_to_pub_key("Bob")).message_log[0][1], - "Stored file from Alice as 'testfile.txt'.", - ) + self.assertEqual(self.window_list.get_window(nick_to_pub_key('Bob')).message_log[0][1], + "Stored file from Alice as 'testfile.txt'.") - self.assertTrue(os.path.isfile(f"{DIR_RECV_FILES}Alice/testfile.txt")) + self.assertTrue(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt')) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/receiver/test_key_exchanges.py b/tests/receiver/test_key_exchanges.py index 633caf7..0b93a15 100644 --- a/tests/receiver/test_key_exchanges.py +++ b/tests/receiver/test_key_exchanges.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -25,270 +25,159 @@ import unittest from multiprocessing import Queue -from datetime import datetime -from unittest import mock +from datetime import datetime +from unittest import mock from unittest.mock import MagicMock +from typing import Any -from src.common.crypto import argon2_kdf, encrypt_and_sign -from src.common.encoding import b58encode, str_to_bytes +from src.common.crypto import argon2_kdf, encrypt_and_sign +from src.common.encoding import b58encode, str_to_bytes from src.common.exceptions import SoftError -from src.common.statics import ( - ARGON2_SALT_LENGTH, - BOLD_ON, - CLEAR_ENTIRE_SCREEN, - CONFIRM_CODE_LENGTH, - CURSOR_LEFT_UP_CORNER, - FINGERPRINT_LENGTH, - LOCAL_ID, - NORMAL_TEXT, - PSK_FILE_SIZE, - SYMMETRIC_KEY_LENGTH, - WIN_TYPE_CONTACT, - WIN_UID_COMMAND, - XCHACHA20_NONCE_LENGTH, -) +from src.common.statics import (ARGON2_SALT_LENGTH, BOLD_ON, CLEAR_ENTIRE_SCREEN, CONFIRM_CODE_LENGTH, + CURSOR_LEFT_UP_CORNER, FINGERPRINT_LENGTH, LOCAL_ID, NORMAL_TEXT, PSK_FILE_SIZE, + SYMMETRIC_KEY_LENGTH, WIN_TYPE_CONTACT, WIN_UID_COMMAND, XCHACHA20_NONCE_LENGTH) -from src.receiver.key_exchanges import ( - key_ex_ecdhe, - key_ex_psk_rx, - key_ex_psk_tx, - local_key_rdy, - process_local_key, -) +from src.receiver.key_exchanges import key_ex_ecdhe, key_ex_psk_rx, key_ex_psk_tx, local_key_rdy, process_local_key -from tests.mock_classes import ( - Contact, - ContactList, - KeyList, - KeySet, - Settings, - WindowList, -) -from tests.utils import ( - cd_unit_test, - cleanup, - nick_to_short_address, - nick_to_pub_key, - tear_queue, - TFCTestCase, -) -from tests.utils import UNDECODABLE_UNICODE +from tests.mock_classes import Contact, ContactList, KeyList, KeySet, Settings, WindowList +from tests.utils import cd_unit_test, cleanup, nick_to_short_address, nick_to_pub_key, tear_queue, TFCTestCase +from tests.utils import UNDECODABLE_UNICODE class TestProcessLocalKey(TFCTestCase): - kek = os.urandom(SYMMETRIC_KEY_LENGTH) + kek = os.urandom(SYMMETRIC_KEY_LENGTH) new_kek = os.urandom(SYMMETRIC_KEY_LENGTH) def setUp(self) -> None: """Pre-test actions.""" - self.contact_list = ContactList(nicks=[LOCAL_ID, "Alice"]) - self.key_list = KeyList(nicks=[LOCAL_ID, "Alice"]) - self.window_list = WindowList(nicks=[LOCAL_ID, "Alice"]) - self.settings = Settings() - self.ts = datetime.now() - self.kdk_hashes = list() + self.contact_list = ContactList(nicks=[LOCAL_ID, 'Alice']) + self.key_list = KeyList( nicks=[LOCAL_ID, 'Alice']) + self.window_list = WindowList( nicks=[LOCAL_ID, 'Alice']) + self.settings = Settings() + self.ts = datetime.now() + self.kdk_hashes = list() self.packet_hashes = list() - self.l_queue = Queue() - self.key = os.urandom(SYMMETRIC_KEY_LENGTH) - self.hek = os.urandom(SYMMETRIC_KEY_LENGTH) - self.conf_code = os.urandom(CONFIRM_CODE_LENGTH) - self.packet = encrypt_and_sign( - self.key + self.hek + self.conf_code, key=self.kek - ) - self.args = ( - self.window_list, - self.contact_list, - self.key_list, - self.settings, - self.kdk_hashes, - self.packet_hashes, - self.l_queue, - ) + self.l_queue = Queue() + self.key = os.urandom(SYMMETRIC_KEY_LENGTH) + self.hek = os.urandom(SYMMETRIC_KEY_LENGTH) + self.conf_code = os.urandom(CONFIRM_CODE_LENGTH) + self.packet = encrypt_and_sign(self.key + self.hek + self.conf_code, key=self.kek) + self.args = (self.window_list, self.contact_list, self.key_list, self.settings, + self.kdk_hashes, self.packet_hashes, self.l_queue) def tearDown(self) -> None: """Post-test actions.""" tear_queue(self.l_queue) - @mock.patch("tkinter.Tk", return_value=MagicMock()) - @mock.patch("time.sleep", return_value=None) - @mock.patch( - "builtins.input", - return_value="5KfgdgUvseWfNkoUPWSvxMPNStu5wBBxyjz1zpZtLEjk7ZvwEAT", - ) - def test_invalid_decryption_key_raises_fr(self, *_) -> None: + @mock.patch('tkinter.Tk', return_value=MagicMock()) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', return_value='5KfgdgUvseWfNkoUPWSvxMPNStu5wBBxyjz1zpZtLEjk7ZvwEAT') + def test_invalid_decryption_key_raises_se(self, *_: Any) -> None: # Setup - packet = b"" + packet = b'' self.key_list.keysets = [] # Test - self.assert_se( - "Error: Incorrect key decryption key.", - process_local_key, - self.ts, - packet, - *self.args, - ) + self.assert_se("Error: Incorrect key decryption key.", process_local_key, self.ts, packet, *self.args) @mock.patch("src.common.misc.reset_terminal", return_value=None) - @mock.patch("tkinter.Tk", return_value=MagicMock()) - @mock.patch("time.sleep", return_value=None) - @mock.patch( - "builtins.input", - side_effect=[ - "5KfgdgUvseWfNkoUPWSvxMPNStu5wBBxyjz1zpZtLEjk7ZvwEAT", - b58encode(kek), - ], - ) - def test_successful_local_key_processing_with_existing_local_key(self, *_) -> None: - self.assert_se( - "Error: Incorrect key decryption key.", - process_local_key, - self.ts, - self.packet, - *self.args, - ) - self.assert_se( - "Added new local key.", process_local_key, self.ts, self.packet, *self.args - ) + @mock.patch('tkinter.Tk', return_value=MagicMock()) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=['5KfgdgUvseWfNkoUPWSvxMPNStu5wBBxyjz1zpZtLEjk7ZvwEAT', b58encode(kek)]) + def test_successful_local_key_processing_with_existing_local_key(self, *_: Any) -> None: + self.assert_se("Error: Incorrect key decryption key.", process_local_key, self.ts, self.packet, *self.args) + self.assert_se("Added new local key.", process_local_key, self.ts, self.packet, *self.args) @mock.patch("src.common.misc.reset_terminal", return_value=None) - @mock.patch("tkinter.Tk", return_value=MagicMock()) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", return_value=b58encode(kek)) - def test_successful_local_key_processing_existing_bootstrap(self, *_) -> None: + @mock.patch('tkinter.Tk', return_value=MagicMock()) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', return_value=b58encode(kek)) + def test_successful_local_key_processing_existing_bootstrap(self, *_: Any) -> None: # Setup self.key_list.keysets = [] # Test - self.assert_se( - "Added new local key.", process_local_key, self.ts, self.packet, *self.args - ) + self.assert_se("Added new local key.", process_local_key, self.ts, self.packet, *self.args) self.assertEqual(self.window_list.active_win.uid, WIN_UID_COMMAND) - @mock.patch("tkinter.Tk", return_value=MagicMock()) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=KeyboardInterrupt) - def test_keyboard_interrupt_raises_fr(self, *_) -> None: + @mock.patch('tkinter.Tk', return_value=MagicMock()) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=KeyboardInterrupt) + def test_keyboard_interrupt_raises_se(self, *_: Any) -> None: # Setup - self.window_list.active_win = self.window_list.get_window( - nick_to_pub_key("Alice") - ) + self.window_list.active_win = self.window_list.get_window(nick_to_pub_key('Alice')) # Test - self.assert_se( - "Local key setup aborted.", - process_local_key, - self.ts, - bytes(SYMMETRIC_KEY_LENGTH), - *self.args, - ) + self.assert_se("Local key setup aborted.", process_local_key, self.ts, bytes(SYMMETRIC_KEY_LENGTH), *self.args) @mock.patch("src.common.misc.reset_terminal", return_value=None) - @mock.patch("tkinter.Tk", return_value=MagicMock()) - @mock.patch("time.sleep", return_value=None) - @mock.patch( - "builtins.input", - side_effect=[ - b58encode(kek), - b58encode(kek), - b58encode(kek), - b58encode(new_kek), - ], - ) - def test_old_local_key_packet_raises_fr(self, *_) -> None: + @mock.patch('tkinter.Tk', return_value=MagicMock()) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=[b58encode(kek), b58encode(kek), b58encode(kek), b58encode(new_kek)]) + def test_old_local_key_packet_raises_se(self, *_: Any) -> None: # Setup self.key_list.keysets = [] - new_key = os.urandom(SYMMETRIC_KEY_LENGTH) - new_hek = os.urandom(SYMMETRIC_KEY_LENGTH) - new_conf_code = os.urandom(CONFIRM_CODE_LENGTH) - new_packet = encrypt_and_sign( - new_key + new_hek + new_conf_code, key=self.new_kek - ) + new_key = os.urandom(SYMMETRIC_KEY_LENGTH) + new_hek = os.urandom(SYMMETRIC_KEY_LENGTH) + new_conf_code = os.urandom(CONFIRM_CODE_LENGTH) + new_packet = encrypt_and_sign(new_key + new_hek + new_conf_code, key=self.new_kek) # Test - self.assert_se( - "Added new local key.", process_local_key, self.ts, self.packet, *self.args - ) - self.assert_se( - "Error: Received old local key packet.", - process_local_key, - self.ts, - self.packet, - *self.args, - ) - self.assert_se( - "Added new local key.", process_local_key, self.ts, new_packet, *self.args - ) + self.assert_se("Added new local key.", process_local_key, self.ts, self.packet, *self.args) + self.assert_se("Error: Received old local key packet.", process_local_key, self.ts, self.packet, *self.args) + self.assert_se("Added new local key.", process_local_key, self.ts, new_packet, *self.args) @mock.patch("src.common.misc.reset_terminal", return_value=None) - @mock.patch( - "tkinter.Tk", - side_effect=[ - MagicMock( - clipboard_get=MagicMock(return_value=b58encode(new_kek)), - clipboard_clear=MagicMock(side_effect=[tkinter.TclError]), - ) - ], - ) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=[b58encode(new_kek)]) - def test_loading_local_key_from_queue(self, *_) -> None: + @mock.patch('tkinter.Tk', side_effect=[MagicMock(clipboard_get =MagicMock(return_value=b58encode(new_kek)), + clipboard_clear=MagicMock(side_effect=[tkinter.TclError]))]) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=[b58encode(new_kek)]) + def test_loading_local_key_from_queue(self, *_: Any) -> None: # Setup self.key_list.keysets = [] - new_key = os.urandom(SYMMETRIC_KEY_LENGTH) - new_hek = os.urandom(SYMMETRIC_KEY_LENGTH) - new_conf_code = os.urandom(CONFIRM_CODE_LENGTH) - new_packet = encrypt_and_sign( - new_key + new_hek + new_conf_code, key=self.new_kek - ) - next_packet = os.urandom(len(new_packet)) - first_packet = os.urandom(len(new_packet)) + new_key = os.urandom(SYMMETRIC_KEY_LENGTH) + new_hek = os.urandom(SYMMETRIC_KEY_LENGTH) + new_conf_code = os.urandom(CONFIRM_CODE_LENGTH) + new_packet = encrypt_and_sign(new_key + new_hek + new_conf_code, key=self.new_kek) + next_packet = os.urandom(len(new_packet)) + first_packet = os.urandom(len(new_packet)) self.l_queue.put((datetime.now(), first_packet)) self.l_queue.put((datetime.now(), new_packet)) self.l_queue.put((datetime.now(), next_packet)) # Test self.assertEqual(self.l_queue.qsize(), 3) - self.assert_se( - "Added new local key.", process_local_key, self.ts, self.packet, *self.args - ) + self.assert_se("Added new local key.", process_local_key, self.ts, self.packet, *self.args) self.assertEqual(self.l_queue.qsize(), 1) class TestLocalKeyRdy(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.ts = datetime.fromtimestamp(1502750000) - @mock.patch("time.sleep", return_value=None) - def test_local_key_installed_no_contacts(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_local_key_installed_no_contacts(self, _: Any) -> None: # Setup - self.window_list = WindowList(nicks=[LOCAL_ID]) + self.window_list = WindowList(nicks=[LOCAL_ID]) self.contact_list = ContactList(nicks=[LOCAL_ID]) # Test - self.assert_prints( - f"""\ + self.assert_prints(f"""\ {BOLD_ON} Successfully completed the local key setup. {NORMAL_TEXT} {CLEAR_ENTIRE_SCREEN+CURSOR_LEFT_UP_CORNER} {BOLD_ON} Waiting for new contacts {NORMAL_TEXT} -""", - local_key_rdy, - self.ts, - self.window_list, - self.contact_list, - ) +""", local_key_rdy, self.ts, self.window_list, self.contact_list) - @mock.patch("time.sleep", return_value=None) - def test_local_key_installed_existing_contact(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_local_key_installed_existing_contact(self, _: Any) -> None: # Setup - self.window_list = WindowList(nicks=[LOCAL_ID, "Alice"]) - self.contact_list = ContactList(nicks=[LOCAL_ID, "Alice"]) - self.window_list.active_win = self.window_list.get_window( - nick_to_pub_key("Alice") - ) + self.window_list = WindowList(nicks=[LOCAL_ID, 'Alice']) + self.contact_list = ContactList(nicks=[LOCAL_ID, 'Alice']) + self.window_list.active_win = self.window_list.get_window(nick_to_pub_key('Alice')) self.window_list.active_win.type = WIN_TYPE_CONTACT # Test @@ -296,138 +185,102 @@ class TestLocalKeyRdy(TFCTestCase): class TestKeyExECDHE(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.ts = datetime.fromtimestamp(1502750000) - self.window_list = WindowList(nicks=[LOCAL_ID]) + self.ts = datetime.fromtimestamp(1502750000) + self.window_list = WindowList(nicks=[LOCAL_ID]) self.contact_list = ContactList() - self.key_list = KeyList() - self.settings = Settings() - self.packet = ( - nick_to_pub_key("Alice") - + SYMMETRIC_KEY_LENGTH * b"\x01" - + SYMMETRIC_KEY_LENGTH * b"\x02" - + SYMMETRIC_KEY_LENGTH * b"\x03" - + SYMMETRIC_KEY_LENGTH * b"\x04" - + str_to_bytes("Alice") - ) - self.args = ( - self.packet, - self.ts, - self.window_list, - self.contact_list, - self.key_list, - self.settings, - ) + self.key_list = KeyList() + self.settings = Settings() + self.packet = (nick_to_pub_key("Alice") + + SYMMETRIC_KEY_LENGTH * b'\x01' + + SYMMETRIC_KEY_LENGTH * b'\x02' + + SYMMETRIC_KEY_LENGTH * b'\x03' + + SYMMETRIC_KEY_LENGTH * b'\x04' + + str_to_bytes('Alice')) + self.args = self.packet, self.ts, self.window_list, self.contact_list, self.key_list, self.settings - @mock.patch("time.sleep", return_value=None) - def test_invalid_nick_raises_fr(self, _) -> None: - self.packet = ( - nick_to_pub_key("Alice") - + SYMMETRIC_KEY_LENGTH * b"\x01" - + SYMMETRIC_KEY_LENGTH * b"\x02" - + SYMMETRIC_KEY_LENGTH * b"\x03" - + SYMMETRIC_KEY_LENGTH * b"\x04" - + UNDECODABLE_UNICODE - ) - self.args = ( - self.packet, - self.ts, - self.window_list, - self.contact_list, - self.key_list, - self.settings, - ) + @mock.patch('time.sleep', return_value=None) + def test_invalid_nick_raises_se(self, _: Any) -> None: + self.packet = (nick_to_pub_key("Alice") + + SYMMETRIC_KEY_LENGTH * b'\x01' + + SYMMETRIC_KEY_LENGTH * b'\x02' + + SYMMETRIC_KEY_LENGTH * b'\x03' + + SYMMETRIC_KEY_LENGTH * b'\x04' + + UNDECODABLE_UNICODE) + self.args = self.packet, self.ts, self.window_list, self.contact_list, self.key_list, self.settings self.assert_se("Error: Received invalid contact data", key_ex_ecdhe, *self.args) - @mock.patch("time.sleep", return_value=None) - def test_add_ecdhe_keys(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_add_ecdhe_keys(self, _: Any) -> None: self.assertIsNone(key_ex_ecdhe(*self.args)) keyset = self.key_list.get_keyset(nick_to_pub_key("Alice")) self.assertIsInstance(keyset, KeySet) self.assertEqual(keyset.onion_pub_key, nick_to_pub_key("Alice")) - self.assertEqual(keyset.tx_mk, SYMMETRIC_KEY_LENGTH * b"\x01") - self.assertEqual(keyset.rx_mk, SYMMETRIC_KEY_LENGTH * b"\x02") - self.assertEqual(keyset.tx_hk, SYMMETRIC_KEY_LENGTH * b"\x03") - self.assertEqual(keyset.rx_hk, SYMMETRIC_KEY_LENGTH * b"\x04") + self.assertEqual(keyset.tx_mk, SYMMETRIC_KEY_LENGTH * b'\x01') + self.assertEqual(keyset.rx_mk, SYMMETRIC_KEY_LENGTH * b'\x02') + self.assertEqual(keyset.tx_hk, SYMMETRIC_KEY_LENGTH * b'\x03') + self.assertEqual(keyset.rx_hk, SYMMETRIC_KEY_LENGTH * b'\x04') contact = self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice")) self.assertIsInstance(contact, Contact) self.assertEqual(contact.onion_pub_key, nick_to_pub_key("Alice")) - self.assertEqual(contact.nick, "Alice") + self.assertEqual(contact.nick, 'Alice') self.assertEqual(contact.rx_fingerprint, bytes(FINGERPRINT_LENGTH)) self.assertEqual(contact.tx_fingerprint, bytes(FINGERPRINT_LENGTH)) class TestKeyExPSKTx(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.ts = datetime.fromtimestamp(1502750000) - self.window_list = WindowList(nicks=[LOCAL_ID]) + self.ts = datetime.fromtimestamp(1502750000) + self.window_list = WindowList(nicks=[LOCAL_ID]) self.contact_list = ContactList() - self.key_list = KeyList() - self.settings = Settings() - self.packet = ( - nick_to_pub_key("Alice") - + SYMMETRIC_KEY_LENGTH * b"\x01" - + bytes(SYMMETRIC_KEY_LENGTH) - + SYMMETRIC_KEY_LENGTH * b"\x02" - + bytes(SYMMETRIC_KEY_LENGTH) - + str_to_bytes("Alice") - ) - self.args = ( - self.packet, - self.ts, - self.window_list, - self.contact_list, - self.key_list, - self.settings, - ) + self.key_list = KeyList() + self.settings = Settings() + self.packet = (nick_to_pub_key("Alice") + + SYMMETRIC_KEY_LENGTH * b'\x01' + + bytes(SYMMETRIC_KEY_LENGTH) + + SYMMETRIC_KEY_LENGTH * b'\x02' + + bytes(SYMMETRIC_KEY_LENGTH) + + str_to_bytes('Alice')) + self.args = self.packet, self.ts, self.window_list, self.contact_list, self.key_list, self.settings - @mock.patch("time.sleep", return_value=None) - def test_invalid_nick_raises_fr(self, _) -> None: - self.packet = ( - nick_to_pub_key("Alice") - + SYMMETRIC_KEY_LENGTH * b"\x01" - + bytes(SYMMETRIC_KEY_LENGTH) - + SYMMETRIC_KEY_LENGTH * b"\x02" - + bytes(SYMMETRIC_KEY_LENGTH) - + UNDECODABLE_UNICODE - ) - self.args = ( - self.packet, - self.ts, - self.window_list, - self.contact_list, - self.key_list, - self.settings, - ) + @mock.patch('time.sleep', return_value=None) + def test_invalid_nick_raises_se(self, _: Any) -> None: + self.packet = (nick_to_pub_key("Alice") + + SYMMETRIC_KEY_LENGTH * b'\x01' + + bytes(SYMMETRIC_KEY_LENGTH) + + SYMMETRIC_KEY_LENGTH * b'\x02' + + bytes(SYMMETRIC_KEY_LENGTH) + + UNDECODABLE_UNICODE) + self.args = self.packet, self.ts, self.window_list, self.contact_list, self.key_list, self.settings - self.assert_se( - "Error: Received invalid contact data", key_ex_psk_tx, *self.args - ) + self.assert_se("Error: Received invalid contact data", key_ex_psk_tx, *self.args) - @mock.patch("time.sleep", return_value=None) - def test_add_psk_tx_keys(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_add_psk_tx_keys(self, _: Any) -> None: self.assertIsNone(key_ex_psk_tx(*self.args)) keyset = self.key_list.get_keyset(nick_to_pub_key("Alice")) self.assertIsInstance(keyset, KeySet) self.assertEqual(keyset.onion_pub_key, nick_to_pub_key("Alice")) - self.assertEqual(keyset.tx_mk, SYMMETRIC_KEY_LENGTH * b"\x01") - self.assertEqual(keyset.rx_mk, bytes(SYMMETRIC_KEY_LENGTH)) - self.assertEqual(keyset.tx_hk, SYMMETRIC_KEY_LENGTH * b"\x02") - self.assertEqual(keyset.rx_hk, bytes(SYMMETRIC_KEY_LENGTH)) + self.assertEqual(keyset.tx_mk, SYMMETRIC_KEY_LENGTH * b'\x01') + self.assertEqual(keyset.rx_mk, bytes(SYMMETRIC_KEY_LENGTH)) + self.assertEqual(keyset.tx_hk, SYMMETRIC_KEY_LENGTH * b'\x02') + self.assertEqual(keyset.rx_hk, bytes(SYMMETRIC_KEY_LENGTH)) contact = self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice")) self.assertIsInstance(contact, Contact) - self.assertEqual(contact.onion_pub_key, nick_to_pub_key("Alice")) - self.assertEqual(contact.nick, "Alice") + self.assertEqual(contact.onion_pub_key, nick_to_pub_key("Alice")) + self.assertEqual(contact.nick, 'Alice') self.assertEqual(contact.tx_fingerprint, bytes(FINGERPRINT_LENGTH)) self.assertEqual(contact.rx_fingerprint, bytes(FINGERPRINT_LENGTH)) @@ -439,115 +292,93 @@ class TestKeyExPSKRx(TFCTestCase): def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.packet = b"\x00" + nick_to_pub_key("Alice") - self.ts = datetime.now() - self.window_list = WindowList(nicks=["Alice", LOCAL_ID]) - self.contact_list = ContactList(nicks=["Alice", LOCAL_ID]) - self.key_list = KeyList(nicks=["Alice", LOCAL_ID]) - self.settings = Settings(disable_gui_dialog=True) - self.file_name = self.file_name - self.args = ( - self.packet, - self.ts, - self.window_list, - self.contact_list, - self.key_list, - self.settings, - ) + self.packet = b'\x00' + nick_to_pub_key("Alice") + self.ts = datetime.now() + self.window_list = WindowList( nicks=['Alice', LOCAL_ID]) + self.contact_list = ContactList(nicks=['Alice', LOCAL_ID]) + self.key_list = KeyList( nicks=['Alice', LOCAL_ID]) + self.settings = Settings(disable_gui_dialog=True) + self.file_name = self.file_name + self.args = self.packet, self.ts, self.window_list, self.contact_list, self.key_list, self.settings def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) - def test_unknown_account_raises_fr(self) -> None: - self.assert_se( - f"Error: Unknown account '{nick_to_short_address('Bob')}'.", - key_ex_psk_rx, - b"\x00" + nick_to_pub_key("Bob"), - self.ts, - self.window_list, - self.contact_list, - self.key_list, - self.settings, - ) + def test_unknown_account_raises_se(self) -> None: + self.assert_se(f"Error: Unknown account '{nick_to_short_address('Bob')}'.", + key_ex_psk_rx, b'\x00' + nick_to_pub_key("Bob"), + self.ts, self.window_list, self.contact_list, self.key_list, self.settings) - @mock.patch("builtins.input", return_value=file_name) - def test_invalid_psk_data_raises_fr(self, _) -> None: + @mock.patch('builtins.input', return_value=file_name) + def test_invalid_psk_data_raises_se(self, _: Any) -> None: # Setup - with open(self.file_name, "wb+") as f: + with open(self.file_name, 'wb+') as f: f.write(os.urandom(135)) # Test - self.assert_se( - "Error: The PSK data in the file was invalid.", key_ex_psk_rx, *self.args - ) + self.assert_se("Error: The PSK data in the file was invalid.", key_ex_psk_rx, *self.args) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", return_value=file_name) - def test_permission_error_raises_fr(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', return_value=file_name) + def test_permission_error_raises_se(self, *_: Any) -> None: # Setup - with open(self.file_name, "wb+") as f: + with open(self.file_name, 'wb+') as f: f.write(os.urandom(PSK_FILE_SIZE)) # Test error_raised = False try: - with mock.patch("builtins.open", side_effect=PermissionError): + with mock.patch('builtins.open', side_effect=PermissionError): key_ex_psk_rx(*self.args) except SoftError as inst: error_raised = True - self.assertEqual( - "Error: No read permission for the PSK file.", inst.message - ) + self.assertEqual("Error: No read permission for the PSK file.", inst.message) self.assertTrue(error_raised) - @mock.patch("src.receiver.key_exchanges.ARGON2_PSK_PARALLELISM", 1) - @mock.patch("src.receiver.key_exchanges.ARGON2_PSK_TIME_COST", 1) - @mock.patch("src.receiver.key_exchanges.ARGON2_PSK_MEMORY_COST", 100) - @mock.patch("getpass.getpass", side_effect=["invalid", "password"]) - @mock.patch("time.sleep", return_value=None) - @mock.patch("os.urandom", side_effect=[bytes(XCHACHA20_NONCE_LENGTH)]) - @mock.patch("builtins.input", return_value=file_name) - def test_invalid_keys_raise_fr(self, *_) -> None: + @mock.patch('src.receiver.key_exchanges.ARGON2_PSK_PARALLELISM', 1) + @mock.patch('src.receiver.key_exchanges.ARGON2_PSK_TIME_COST', 1) + @mock.patch('src.receiver.key_exchanges.ARGON2_PSK_MEMORY_COST', 100) + @mock.patch('getpass.getpass', side_effect=['invalid', 'password']) + @mock.patch('time.sleep', return_value=None) + @mock.patch('os.urandom', side_effect=[bytes(XCHACHA20_NONCE_LENGTH)]) + @mock.patch('builtins.input', return_value=file_name) + def test_invalid_keys_raise_se(self, *_: Any) -> None: # Setup - keyset = self.key_list.get_keyset(nick_to_pub_key("Alice")) + keyset = self.key_list.get_keyset(nick_to_pub_key("Alice")) keyset.rx_mk = bytes(SYMMETRIC_KEY_LENGTH) keyset.rx_hk = bytes(SYMMETRIC_KEY_LENGTH) - salt = bytes(ARGON2_SALT_LENGTH) + salt = bytes(ARGON2_SALT_LENGTH) rx_key = bytes(SYMMETRIC_KEY_LENGTH) rx_hek = bytes(SYMMETRIC_KEY_LENGTH) - kek = argon2_kdf("password", salt, time_cost=1, memory_cost=100, parallelism=1) + kek = argon2_kdf('password', salt, time_cost=1, memory_cost=100, parallelism=1) ct_tag = encrypt_and_sign(rx_key + rx_hek, key=kek) - with open(self.file_name, "wb+") as f: + with open(self.file_name, 'wb+') as f: f.write(salt + ct_tag) # Test - self.assert_se( - "Error: Received invalid keys from contact.", key_ex_psk_rx, *self.args - ) + self.assert_se("Error: Received invalid keys from contact.", key_ex_psk_rx, *self.args) - @mock.patch("src.receiver.key_exchanges.ARGON2_PSK_PARALLELISM", 1) - @mock.patch("src.receiver.key_exchanges.ARGON2_PSK_TIME_COST", 1) - @mock.patch("src.receiver.key_exchanges.ARGON2_PSK_MEMORY_COST", 100) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", return_value=file_name) - @mock.patch("getpass.getpass", return_value="test_password") - def test_valid_psk(self, *_) -> None: + @mock.patch('src.receiver.key_exchanges.ARGON2_PSK_PARALLELISM', 1) + @mock.patch('src.receiver.key_exchanges.ARGON2_PSK_TIME_COST', 1) + @mock.patch('src.receiver.key_exchanges.ARGON2_PSK_MEMORY_COST', 100) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', return_value=file_name) + @mock.patch('getpass.getpass', return_value='test_password') + def test_valid_psk(self, *_: Any) -> None: # Setup - keyset = self.key_list.get_keyset(nick_to_pub_key("Alice")) + keyset = self.key_list.get_keyset(nick_to_pub_key("Alice")) keyset.rx_mk = bytes(SYMMETRIC_KEY_LENGTH) keyset.rx_hk = bytes(SYMMETRIC_KEY_LENGTH) - salt = os.urandom(ARGON2_SALT_LENGTH) - rx_key = os.urandom(SYMMETRIC_KEY_LENGTH) - rx_hek = os.urandom(SYMMETRIC_KEY_LENGTH) - kek = argon2_kdf( - "test_password", salt, time_cost=1, memory_cost=100, parallelism=1 - ) - ct_tag = encrypt_and_sign(rx_key + rx_hek, key=kek) + salt = os.urandom(ARGON2_SALT_LENGTH) + rx_key = os.urandom(SYMMETRIC_KEY_LENGTH) + rx_hek = os.urandom(SYMMETRIC_KEY_LENGTH) + kek = argon2_kdf('test_password', salt, time_cost=1, memory_cost=100, parallelism=1) + ct_tag = encrypt_and_sign(rx_key + rx_hek, key=kek) - with open(self.file_name, "wb+") as f: + with open(self.file_name, 'wb+') as f: f.write(salt + ct_tag) # Test @@ -557,28 +388,26 @@ class TestKeyExPSKRx(TFCTestCase): self.assertEqual(keyset.rx_mk, rx_key) self.assertEqual(keyset.rx_hk, rx_hek) - @mock.patch("src.receiver.key_exchanges.ARGON2_PSK_PARALLELISM", 1) - @mock.patch("src.receiver.key_exchanges.ARGON2_PSK_TIME_COST", 1) - @mock.patch("src.receiver.key_exchanges.ARGON2_PSK_MEMORY_COST", 100) - @mock.patch("subprocess.Popen") - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=[file_name, ""]) - @mock.patch("getpass.getpass", return_value="test_password") - def test_valid_psk_overwrite_failure(self, *_) -> None: + @mock.patch('src.receiver.key_exchanges.ARGON2_PSK_PARALLELISM', 1) + @mock.patch('src.receiver.key_exchanges.ARGON2_PSK_TIME_COST', 1) + @mock.patch('src.receiver.key_exchanges.ARGON2_PSK_MEMORY_COST', 100) + @mock.patch('subprocess.Popen') + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=[file_name, '']) + @mock.patch('getpass.getpass', return_value='test_password') + def test_valid_psk_overwrite_failure(self, *_: Any) -> None: # Setup - keyset = self.key_list.get_keyset(nick_to_pub_key("Alice")) + keyset = self.key_list.get_keyset(nick_to_pub_key("Alice")) keyset.rx_mk = bytes(SYMMETRIC_KEY_LENGTH) keyset.rx_hk = bytes(SYMMETRIC_KEY_LENGTH) - salt = os.urandom(ARGON2_SALT_LENGTH) + salt = os.urandom(ARGON2_SALT_LENGTH) rx_key = os.urandom(SYMMETRIC_KEY_LENGTH) rx_hek = os.urandom(SYMMETRIC_KEY_LENGTH) - kek = argon2_kdf( - "test_password", salt, time_cost=1, memory_cost=100, parallelism=1 - ) + kek = argon2_kdf('test_password', salt, time_cost=1, memory_cost=100, parallelism=1) ct_tag = encrypt_and_sign(rx_key + rx_hek, key=kek) - with open(self.file_name, "wb+") as f: + with open(self.file_name, 'wb+') as f: f.write(salt + ct_tag) # Test @@ -588,18 +417,19 @@ class TestKeyExPSKRx(TFCTestCase): self.assertEqual(keyset.rx_mk, rx_key) self.assertEqual(keyset.rx_hk, rx_hek) - @mock.patch("src.receiver.key_exchanges.ARGON2_PSK_TIME_COST", 1) - @mock.patch("src.receiver.key_exchanges.ARGON2_PSK_MEMORY_COST", 100) - @mock.patch("subprocess.Popen") - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=[file_name, ""]) - @mock.patch("getpass.getpass", side_effect=[KeyboardInterrupt]) - def test_valid_psk_keyboard_interrupt_raises_fr(self, *_) -> None: - with open(self.file_name, "wb+") as f: + @mock.patch('src.receiver.key_exchanges.ARGON2_PSK_TIME_COST', 1) + @mock.patch('src.receiver.key_exchanges.ARGON2_PSK_MEMORY_COST', 100) + @mock.patch('subprocess.Popen') + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=[file_name, '']) + @mock.patch('getpass.getpass', side_effect=[KeyboardInterrupt]) + def test_valid_psk_keyboard_interrupt_raises_se(self, *_: Any) -> None: + with open(self.file_name, 'wb+') as f: f.write(bytes(PSK_FILE_SIZE)) - self.assert_se("PSK import aborted.", key_ex_psk_rx, *self.args) + self.assert_se("PSK import aborted.", + key_ex_psk_rx, *self.args) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/receiver/test_messages.py b/tests/receiver/test_messages.py index 5d20f37..8bed0f2 100644 --- a/tests/receiver/test_messages.py +++ b/tests/receiver/test_messages.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -24,91 +24,61 @@ import unittest from datetime import datetime from unittest import mock +from typing import Any from src.common.database import MessageLog from src.common.encoding import bool_to_bytes -from src.common.misc import ensure_dir -from src.common.statics import ( - BLAKE2_DIGEST_LENGTH, - DIR_USER_DATA, - FILE, - FILE_KEY_HEADER, - GROUP_ID_LENGTH, - LOCAL_ID, - LOCAL_PUBKEY, - MESSAGE, - MESSAGE_LENGTH, - ORIGIN_CONTACT_HEADER, - ORIGIN_USER_HEADER, - SYMMETRIC_KEY_LENGTH, -) +from src.common.misc import ensure_dir +from src.common.statics import (BLAKE2_DIGEST_LENGTH, DIR_USER_DATA, FILE, FILE_KEY_HEADER, GROUP_ID_LENGTH, LOCAL_ID, + LOCAL_PUBKEY, MESSAGE, MESSAGE_LENGTH, ORIGIN_CONTACT_HEADER, ORIGIN_USER_HEADER, + SYMMETRIC_KEY_LENGTH) from src.receiver.messages import process_message_packet -from src.receiver.packet import PacketList -from src.receiver.windows import WindowList +from src.receiver.packet import PacketList +from src.receiver.windows import WindowList from tests.mock_classes import ContactList, GroupList, KeyList, MasterKey, Settings -from tests.utils import ( - assembly_packet_creator, - cd_unit_test, - cleanup, - group_name_to_group_id, -) -from tests.utils import nick_to_pub_key, TFCTestCase +from tests.utils import assembly_packet_creator, cd_unit_test, cleanup, group_name_to_group_id +from tests.utils import nick_to_pub_key, TFCTestCase -class TestProcessMessage(TFCTestCase): +class TestProcessMessagePacket(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.msg = ( - "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis" - " dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu" - "m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma" - "ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el" - "it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic" - "tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r" - "utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali" - "quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris" - "que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti" - "s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu." - ) + self.msg = ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis" + " dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu" + "m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma" + "ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el" + "it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic" + "tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r" + "utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali" + "quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris" + "que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti" + "s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu.") - self.ts = datetime.now() + self.ts = datetime.now() self.master_key = MasterKey() - self.settings = Settings(log_file_masking=True) - self.file_name = f"{DIR_USER_DATA}{self.settings.software_operation}_logs" + self.settings = Settings(log_file_masking=True) + self.file_name = f'{DIR_USER_DATA}{self.settings.software_operation}_logs' - self.contact_list = ContactList(nicks=["Alice", "Bob", "Charlie", LOCAL_ID]) - self.key_list = KeyList(nicks=["Alice", "Bob", "Charlie", LOCAL_ID]) - self.group_list = GroupList(groups=["test_group"]) - self.packet_list = PacketList( - contact_list=self.contact_list, settings=self.settings - ) - self.window_list = WindowList( - contact_list=self.contact_list, - settings=self.settings, - group_list=self.group_list, - packet_list=self.packet_list, - ) - self.group_id = group_name_to_group_id("test_group") - self.file_keys = dict() + self.contact_list = ContactList(nicks=['Alice', 'Bob', 'Charlie', LOCAL_ID]) + self.key_list = KeyList( nicks=['Alice', 'Bob', 'Charlie', LOCAL_ID]) + self.group_list = GroupList( groups=['test_group']) + self.packet_list = PacketList(contact_list=self.contact_list, settings=self.settings) + self.window_list = WindowList(contact_list=self.contact_list, settings=self.settings, + group_list=self.group_list, packet_list=self.packet_list) + self.group_id = group_name_to_group_id('test_group') + self.file_keys = dict() - self.log_file = f"{DIR_USER_DATA}{self.settings.software_operation}_logs" + self.log_file = f'{DIR_USER_DATA}{self.settings.software_operation}_logs' self.tfc_log_database = MessageLog(self.log_file, self.master_key.master_key) - self.group_list.get_group("test_group").log_messages = True - self.args = ( - self.window_list, - self.packet_list, - self.contact_list, - self.key_list, - self.group_list, - self.settings, - self.file_keys, - self.tfc_log_database, - ) + self.group_list.get_group('test_group').log_messages = True + self.args = (self.window_list, self.packet_list, self.contact_list, self.key_list, + self.group_list, self.settings, self.file_keys, self.tfc_log_database) ensure_dir(DIR_USER_DATA) @@ -117,411 +87,239 @@ class TestProcessMessage(TFCTestCase): cleanup(self.unit_test_dir) # Invalid packets - @mock.patch("time.sleep", return_value=None) - def test_invalid_origin_header_raises_fr(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_invalid_origin_header_raises_se(self, _: Any) -> None: # Setup - invalid_origin_header = b"e" - packet = ( - nick_to_pub_key("Alice") + invalid_origin_header + MESSAGE_LENGTH * b"m" - ) + invalid_origin_header = b'e' + packet = nick_to_pub_key('Alice') + invalid_origin_header + MESSAGE_LENGTH * b'm' # Test - self.assert_se( - "Error: Received packet had an invalid origin-header.", - process_message_packet, - self.ts, - packet, - *self.args, - ) + self.assert_se("Error: Received packet had an invalid origin-header.", + process_message_packet, self.ts, packet, *self.args) - @mock.patch("time.sleep", return_value=None) - def test_masqueraded_command_raises_fr(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_masqueraded_command_raises_se(self, _: Any) -> None: for origin_header in [ORIGIN_USER_HEADER, ORIGIN_CONTACT_HEADER]: # Setup - packet = LOCAL_PUBKEY + origin_header + MESSAGE_LENGTH * b"m" + packet = LOCAL_PUBKEY + origin_header + MESSAGE_LENGTH * b'm' # Test - self.assert_se( - "Warning! Received packet masqueraded as a command.", - process_message_packet, - self.ts, - packet, - *self.args, - ) + self.assert_se("Warning! Received packet masqueraded as a command.", + process_message_packet, self.ts, packet, *self.args) # Private messages - @mock.patch("time.sleep", return_value=None) - def test_private_msg_from_contact(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_private_msg_from_contact(self, _: Any) -> None: # Setup - assembly_ct_list = assembly_packet_creator( - MESSAGE, - self.msg, - origin_header=ORIGIN_CONTACT_HEADER, - encrypt_packet=True, - onion_pub_key=nick_to_pub_key("Alice"), - ) + assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, origin_header=ORIGIN_CONTACT_HEADER, + encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice')) # Test for p in assembly_ct_list: self.assertIsNone(process_message_packet(self.ts, p, *self.args)) - @mock.patch("time.sleep", return_value=None) - def test_private_msg_from_user(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_private_msg_from_user(self, _: Any) -> None: # Setup - assembly_ct_list = assembly_packet_creator( - MESSAGE, - self.msg, - origin_header=ORIGIN_USER_HEADER, - encrypt_packet=True, - onion_pub_key=nick_to_pub_key("Alice"), - ) + assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, origin_header=ORIGIN_USER_HEADER, + encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice')) # Test for p in assembly_ct_list: self.assertIsNone(process_message_packet(self.ts, p, *self.args)) # Whispered messages - @mock.patch("time.sleep", return_value=None) - def test_whisper_msg_from_contact(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_whisper_msg_from_contact(self, _: Any) -> None: # Setup - assembly_ct_list = assembly_packet_creator( - MESSAGE, - self.msg, - origin_header=ORIGIN_CONTACT_HEADER, - encrypt_packet=True, - onion_pub_key=nick_to_pub_key("Alice"), - whisper_header=bool_to_bytes(True), - ) + assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, origin_header=ORIGIN_CONTACT_HEADER, + encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'), + whisper_header=bool_to_bytes(True)) # Test for p in assembly_ct_list[:-1]: self.assertIsNone(process_message_packet(self.ts, p, *self.args)) for p in assembly_ct_list[-1:]: - self.assert_se( - "Whisper message complete.", - process_message_packet, - self.ts, - p, - *self.args, - ) + self.assert_se("Whisper message complete.", + process_message_packet, self.ts, p, *self.args) - @mock.patch("time.sleep", return_value=None) - def test_whisper_msg_from_user(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_whisper_msg_from_user(self, _: Any) -> None: # Setup - assembly_ct_list = assembly_packet_creator( - MESSAGE, - self.msg, - origin_header=ORIGIN_USER_HEADER, - encrypt_packet=True, - onion_pub_key=nick_to_pub_key("Alice"), - whisper_header=bool_to_bytes(True), - ) + assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, origin_header=ORIGIN_USER_HEADER, + encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'), + whisper_header=bool_to_bytes(True)) # Test for p in assembly_ct_list[:-1]: self.assertIsNone(process_message_packet(self.ts, p, *self.args)) for p in assembly_ct_list[-1:]: - self.assert_se( - "Whisper message complete.", - process_message_packet, - self.ts, - p, - *self.args, - ) + self.assert_se("Whisper message complete.", process_message_packet, self.ts, p, *self.args) - @mock.patch("time.sleep", return_value=None) - def test_empty_whisper_msg_from_user(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_empty_whisper_msg_from_user(self, _: Any) -> None: # Setup - assembly_ct_list = assembly_packet_creator( - MESSAGE, - "", - origin_header=ORIGIN_USER_HEADER, - encrypt_packet=True, - onion_pub_key=nick_to_pub_key("Alice"), - whisper_header=bool_to_bytes(True), - ) + assembly_ct_list = assembly_packet_creator(MESSAGE, '', origin_header=ORIGIN_USER_HEADER, + encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'), + whisper_header=bool_to_bytes(True)) # Test for p in assembly_ct_list[:-1]: self.assertIsNone(process_message_packet(self.ts, p, *self.args)) for p in assembly_ct_list[-1:]: - self.assert_se( - "Whisper message complete.", - process_message_packet, - self.ts, - p, - *self.args, - ) + self.assert_se("Whisper message complete.", process_message_packet, self.ts, p, *self.args) # File key messages - @mock.patch("time.sleep", return_value=None) - def test_user_origin_raises_fr(self, _) -> None: - assembly_ct_list = assembly_packet_creator( - MESSAGE, - " ", - origin_header=ORIGIN_USER_HEADER, - encrypt_packet=True, - onion_pub_key=nick_to_pub_key("Alice"), - message_header=FILE_KEY_HEADER, - ) + @mock.patch('time.sleep', return_value=None) + def test_user_origin_raises_se(self, _: Any) -> None: + assembly_ct_list = assembly_packet_creator(MESSAGE, ' ', origin_header=ORIGIN_USER_HEADER, + encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'), + message_header=FILE_KEY_HEADER) for p in assembly_ct_list[-1:]: - self.assert_se( - "File key message from the user.", - process_message_packet, - self.ts, - p, - *self.args, - ) + self.assert_se("File key message from the user.", process_message_packet, self.ts, p, *self.args) - @mock.patch("time.sleep", return_value=None) - def test_invalid_file_key_data_raises_fr(self, _) -> None: - assembly_ct_list = assembly_packet_creator( - MESSAGE, - " ", - origin_header=ORIGIN_CONTACT_HEADER, - encrypt_packet=True, - onion_pub_key=nick_to_pub_key("Alice"), - message_header=FILE_KEY_HEADER, - ) + @mock.patch('time.sleep', return_value=None) + def test_invalid_file_key_data_raises_se(self, _: Any) -> None: + assembly_ct_list = assembly_packet_creator(MESSAGE, ' ', origin_header=ORIGIN_CONTACT_HEADER, + encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'), + message_header=FILE_KEY_HEADER) for p in assembly_ct_list[-1:]: - self.assert_se( - "Error: Received an invalid file key message.", - process_message_packet, - self.ts, - p, - *self.args, - ) + self.assert_se("Error: Received an invalid file key message.", + process_message_packet, self.ts, p, *self.args) - @mock.patch("time.sleep", return_value=None) - def test_too_large_file_key_data_raises_fr(self, _) -> None: - assembly_ct_list = assembly_packet_creator( - MESSAGE, - base64.b85encode( - BLAKE2_DIGEST_LENGTH * b"a" + SYMMETRIC_KEY_LENGTH * b"b" + b"a" - ).decode(), - origin_header=ORIGIN_CONTACT_HEADER, - encrypt_packet=True, - onion_pub_key=nick_to_pub_key("Alice"), - message_header=FILE_KEY_HEADER, - ) + @mock.patch('time.sleep', return_value=None) + def test_too_large_file_key_data_raises_se(self, _: Any) -> None: + assembly_ct_list = assembly_packet_creator(MESSAGE, base64.b85encode(BLAKE2_DIGEST_LENGTH * b'a' + + SYMMETRIC_KEY_LENGTH * b'b' + + b'a').decode(), + origin_header=ORIGIN_CONTACT_HEADER, + encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'), + message_header=FILE_KEY_HEADER) for p in assembly_ct_list[-1:]: - self.assert_se( - "Error: Received an invalid file key message.", - process_message_packet, - self.ts, - p, - *self.args, - ) + self.assert_se("Error: Received an invalid file key message.", + process_message_packet, self.ts, p, *self.args) - @mock.patch("time.sleep", return_value=None) - def test_valid_file_key_message(self, _) -> None: - assembly_ct_list = assembly_packet_creator( - MESSAGE, - base64.b85encode( - BLAKE2_DIGEST_LENGTH * b"a" + SYMMETRIC_KEY_LENGTH * b"b" - ).decode(), - origin_header=ORIGIN_CONTACT_HEADER, - encrypt_packet=True, - onion_pub_key=nick_to_pub_key("Alice"), - message_header=FILE_KEY_HEADER, - ) + @mock.patch('time.sleep', return_value=None) + def test_valid_file_key_message(self, _: Any) -> None: + assembly_ct_list = assembly_packet_creator(MESSAGE, base64.b85encode(BLAKE2_DIGEST_LENGTH * b'a' + + SYMMETRIC_KEY_LENGTH * b'b').decode(), + origin_header=ORIGIN_CONTACT_HEADER, + encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'), + message_header=FILE_KEY_HEADER) for p in assembly_ct_list[-1:]: - self.assert_se( - "Received file decryption key from Alice", - process_message_packet, - self.ts, - p, - *self.args, - ) + self.assert_se("Received file decryption key from Alice", + process_message_packet, self.ts, p, *self.args) # Group messages - @mock.patch("time.sleep", return_value=None) - def test_invalid_message_header_raises_fr(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_invalid_message_header_raises_se(self, _: Any) -> None: # Setup - assembly_ct_list = assembly_packet_creator( - MESSAGE, - "test_message", - origin_header=ORIGIN_CONTACT_HEADER, - encrypt_packet=True, - onion_pub_key=nick_to_pub_key("Alice"), - message_header=b"Z", - ) + assembly_ct_list = assembly_packet_creator(MESSAGE, 'test_message', origin_header=ORIGIN_CONTACT_HEADER, + encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'), + message_header=b'Z') # Test - self.assert_se( - "Error: Message from contact had an invalid header.", - process_message_packet, - self.ts, - assembly_ct_list[0], - *self.args, - ) + self.assert_se("Error: Message from contact had an invalid header.", + process_message_packet, self.ts, assembly_ct_list[0], *self.args) - @mock.patch("time.sleep", return_value=None) - def test_invalid_window_raises_fr(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_invalid_window_raises_se(self, _: Any) -> None: # Setup - assembly_ct_list = assembly_packet_creator( - MESSAGE, - "test_message", - origin_header=ORIGIN_CONTACT_HEADER, - encrypt_packet=True, - onion_pub_key=nick_to_pub_key("Alice"), - group_id=self.group_id, - ) + assembly_ct_list = assembly_packet_creator(MESSAGE, 'test_message', origin_header=ORIGIN_CONTACT_HEADER, + encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'), + group_id=self.group_id) - self.group_list.get_group("test_group").group_id = GROUP_ID_LENGTH * b"a" + self.group_list.get_group('test_group').group_id = GROUP_ID_LENGTH * b'a' # Test - self.assert_se( - "Error: Received message to an unknown group.", - process_message_packet, - self.ts, - assembly_ct_list[0], - *self.args, - ) + self.assert_se("Error: Received message to an unknown group.", + process_message_packet, self.ts, assembly_ct_list[0], *self.args) - @mock.patch("time.sleep", return_value=None) - def test_invalid_message_raises_fr(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_invalid_message_raises_se(self, _: Any) -> None: # Setup - assembly_ct_list = assembly_packet_creator( - MESSAGE, - " ", - origin_header=ORIGIN_CONTACT_HEADER, - encrypt_packet=True, - onion_pub_key=nick_to_pub_key("Alice"), - group_id=self.group_id, - tamper_plaintext=True, - ) + assembly_ct_list = assembly_packet_creator(MESSAGE, ' ', origin_header=ORIGIN_CONTACT_HEADER, + encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'), + group_id=self.group_id, tamper_plaintext=True) # Test - self.assert_se( - "Error: Received an invalid group message.", - process_message_packet, - self.ts, - assembly_ct_list[0], - *self.args, - ) + self.assert_se("Error: Received an invalid group message.", + process_message_packet, self.ts, assembly_ct_list[0], *self.args) - @mock.patch("time.sleep", return_value=None) - def test_invalid_whisper_header_raises_fr(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_invalid_whisper_header_raises_se(self, _: Any) -> None: # Setup - assembly_ct_list = assembly_packet_creator( - MESSAGE, - "", - origin_header=ORIGIN_CONTACT_HEADER, - encrypt_packet=True, - onion_pub_key=nick_to_pub_key("Alice"), - whisper_header=b"", - message_header=b"", - ) + assembly_ct_list = assembly_packet_creator(MESSAGE, '', origin_header=ORIGIN_CONTACT_HEADER, + encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'), + whisper_header=b'', message_header=b'') # Test - self.assert_se( - "Error: Message from contact had an invalid whisper header.", - process_message_packet, - self.ts, - assembly_ct_list[0], - *self.args, - ) + self.assert_se("Error: Message from contact had an invalid whisper header.", + process_message_packet, self.ts, assembly_ct_list[0], *self.args) - @mock.patch("time.sleep", return_value=None) - def test_contact_not_in_group_raises_fr(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_contact_not_in_group_raises_se(self, _: Any) -> None: # Setup - assembly_ct_list = assembly_packet_creator( - MESSAGE, - "test_message", - origin_header=ORIGIN_CONTACT_HEADER, - encrypt_packet=True, - group_id=self.group_id, - onion_pub_key=nick_to_pub_key("Charlie"), - ) + assembly_ct_list = assembly_packet_creator(MESSAGE, 'test_message', origin_header=ORIGIN_CONTACT_HEADER, + encrypt_packet=True, group_id=self.group_id, + onion_pub_key=nick_to_pub_key('Charlie')) # Test - self.assert_se( - "Error: Account is not a member of the group.", - process_message_packet, - self.ts, - assembly_ct_list[0], - *self.args, - ) + self.assert_se("Error: Account is not a member of the group.", + process_message_packet, self.ts, assembly_ct_list[0], *self.args) - @mock.patch("time.sleep", return_value=None) - def test_normal_group_msg_from_contact(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_normal_group_msg_from_contact(self, _: Any) -> None: # Setup - assembly_ct_list = assembly_packet_creator( - MESSAGE, - self.msg, - origin_header=ORIGIN_CONTACT_HEADER, - group_id=self.group_id, - encrypt_packet=True, - onion_pub_key=nick_to_pub_key("Alice"), - ) + assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, origin_header=ORIGIN_CONTACT_HEADER, + group_id=self.group_id, encrypt_packet=True, + onion_pub_key=nick_to_pub_key('Alice')) for p in assembly_ct_list: self.assertIsNone(process_message_packet(self.ts, p, *self.args)) - @mock.patch("time.sleep", return_value=None) - def test_normal_group_msg_from_user(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_normal_group_msg_from_user(self, _: Any) -> None: # Setup - assembly_ct_list = assembly_packet_creator( - MESSAGE, - self.msg, - origin_header=ORIGIN_USER_HEADER, - group_id=self.group_id, - encrypt_packet=True, - onion_pub_key=nick_to_pub_key("Alice"), - ) + assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, origin_header=ORIGIN_USER_HEADER, + group_id=self.group_id, encrypt_packet=True, + onion_pub_key=nick_to_pub_key('Alice')) for p in assembly_ct_list: self.assertIsNone(process_message_packet(self.ts, p, *self.args)) # Files - @mock.patch("time.sleep", return_value=None) - def test_file(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_file(self, _: Any) -> None: # Setup - assembly_ct_list = assembly_packet_creator( - FILE, - origin_header=ORIGIN_CONTACT_HEADER, - encrypt_packet=True, - onion_pub_key=nick_to_pub_key("Alice"), - ) + assembly_ct_list = assembly_packet_creator(FILE, origin_header=ORIGIN_CONTACT_HEADER, + encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice')) # Test for p in assembly_ct_list[:-1]: self.assertIsNone(process_message_packet(self.ts, p, *self.args)) for p in assembly_ct_list[-1:]: - self.assert_se( - "File storage complete.", process_message_packet, self.ts, p, *self.args - ) + self.assert_se("File storage complete.", + process_message_packet, self.ts, p, *self.args) - @mock.patch("time.sleep", return_value=None) - def test_file_when_reception_is_disabled(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_file_when_reception_is_disabled(self, _: Any) -> None: # Setup - assembly_ct_list = assembly_packet_creator( - FILE, - origin_header=ORIGIN_CONTACT_HEADER, - encrypt_packet=True, - onion_pub_key=nick_to_pub_key("Alice"), - ) + assembly_ct_list = assembly_packet_creator(FILE, origin_header=ORIGIN_CONTACT_HEADER, + encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice')) - self.contact_list.get_contact_by_pub_key( - nick_to_pub_key("Alice") - ).file_reception = False + self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice')).file_reception = False # Test - self.assert_se( - "Alert! File transmission from Alice but reception is disabled.", - process_message_packet, - self.ts, - assembly_ct_list[0], - *self.args, - ) + self.assert_se("Alert! File transmission from Alice but reception is disabled.", + process_message_packet, self.ts, assembly_ct_list[0], *self.args) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/receiver/test_output_loop.py b/tests/receiver/test_output_loop.py index 72ade9a..e447d45 100644 --- a/tests/receiver/test_output_loop.py +++ b/tests/receiver/test_output_loop.py @@ -25,127 +25,88 @@ import threading import time import unittest -from typing import Tuple -from unittest import mock +from typing import Tuple +from unittest import mock from unittest.mock import MagicMock -from src.common.crypto import blake2b, encrypt_and_sign +from src.common.crypto import blake2b, encrypt_and_sign from src.common.database import MessageLog from src.common.encoding import b58encode, bool_to_bytes, int_to_bytes, str_to_bytes -from src.common.statics import ( - CH_FILE_RECV, - COMMAND, - COMMAND_DATAGRAM_HEADER, - CONFIRM_CODE_LENGTH, - DIR_USER_DATA, - ENABLE, - EXIT, - FILE_DATAGRAM_HEADER, - FILE_KEY_HEADER, - INITIAL_HARAC, - KEY_EX_ECDHE, - LOCAL_KEY_DATAGRAM_HEADER, - MESSAGE, - MESSAGE_DATAGRAM_HEADER, - ORIGIN_CONTACT_HEADER, - PRIVATE_MESSAGE_HEADER, - SYMMETRIC_KEY_LENGTH, - UNIT_TEST_QUEUE, - US_BYTE, - WIN_SELECT, - WIN_UID_COMMAND, - WIN_UID_FILE, -) +from src.common.statics import (CH_FILE_RECV, COMMAND, COMMAND_DATAGRAM_HEADER, CONFIRM_CODE_LENGTH, DIR_USER_DATA, + ENABLE, EXIT, FILE_DATAGRAM_HEADER, FILE_KEY_HEADER, INITIAL_HARAC, KEY_EX_ECDHE, + LOCAL_KEY_DATAGRAM_HEADER, MESSAGE, MESSAGE_DATAGRAM_HEADER, ORIGIN_CONTACT_HEADER, + PRIVATE_MESSAGE_HEADER, SYMMETRIC_KEY_LENGTH, UNIT_TEST_QUEUE, US_BYTE, WIN_SELECT, + WIN_UID_COMMAND, WIN_UID_FILE) from src.transmitter.packet import split_to_assembly_packets from src.receiver.output_loop import output_loop -from tests.mock_classes import ( - ContactList, - Gateway, - GroupList, - KeyList, - MasterKey, - nick_to_pub_key, - Settings, -) -from tests.utils import cd_unit_test, cleanup, gen_queue_dict, tear_queues +from tests.mock_classes import ContactList, Gateway, GroupList, KeyList, MasterKey, nick_to_pub_key, Settings +from tests.utils import cd_unit_test, cleanup, gen_queue_dict, tear_queues def rotate_key(key: bytes, harac: int) -> Tuple[bytes, int]: """Move to next key in hash ratchet.""" - return ( - blake2b(key + int_to_bytes(harac), digest_size=SYMMETRIC_KEY_LENGTH), - harac + 1, - ) + return blake2b(key + int_to_bytes(harac), digest_size=SYMMETRIC_KEY_LENGTH), harac + 1 class TestOutputLoop(unittest.TestCase): def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.o_sleep = time.sleep - time.sleep = lambda _: None + self.o_sleep = time.sleep + time.sleep = lambda _: None def tearDown(self) -> None: """Post-test actions.""" time.sleep = self.o_sleep cleanup(self.unit_test_dir) - @mock.patch("os.system", return_value=None) - @mock.patch("tkinter.Tk", return_value=MagicMock()) - @mock.patch( - "builtins.input", - side_effect=[ - b58encode(SYMMETRIC_KEY_LENGTH * b"a"), - bytes(CONFIRM_CODE_LENGTH).hex(), - b58encode(SYMMETRIC_KEY_LENGTH * b"a", public_key=True), - ], - ) + @mock.patch("os.system", return_value=None) + @mock.patch("tkinter.Tk", return_value=MagicMock()) + @mock.patch("builtins.input", side_effect=[b58encode(SYMMETRIC_KEY_LENGTH * b"a"), + bytes(CONFIRM_CODE_LENGTH).hex(), + b58encode(SYMMETRIC_KEY_LENGTH * b"a", public_key=True)]) def test_loop(self, *_) -> None: # Setup - queues = gen_queue_dict() - kek = SYMMETRIC_KEY_LENGTH * b"a" - conf_code = bytes(1) + queues = gen_queue_dict() + kek = SYMMETRIC_KEY_LENGTH * b"a" + conf_code = bytes(1) tx_pub_key = nick_to_pub_key("Bob") - o_sleep = self.o_sleep + o_sleep = self.o_sleep test_delay = 0.3 def queue_packet(mk, hk, tx_harac, packet, onion_pub_key=None) -> None: """Create encrypted datagram.""" if onion_pub_key is None: header = b"" - queue = queues[COMMAND_DATAGRAM_HEADER] + queue = queues[COMMAND_DATAGRAM_HEADER] packet = split_to_assembly_packets(packet, COMMAND)[0] else: header = onion_pub_key + ORIGIN_CONTACT_HEADER - queue = queues[MESSAGE_DATAGRAM_HEADER] + queue = queues[MESSAGE_DATAGRAM_HEADER] packet = split_to_assembly_packets(packet, MESSAGE)[0] - encrypted_harac = encrypt_and_sign(int_to_bytes(tx_harac), hk) + encrypted_harac = encrypt_and_sign(int_to_bytes(tx_harac), hk) encrypted_message = encrypt_and_sign(packet, mk) - encrypted_packet = header + encrypted_harac + encrypted_message + encrypted_packet = header + encrypted_harac + encrypted_message queue.put((datetime.datetime.now(), encrypted_packet)) def queue_delayer() -> None: """Place datagrams into queue after delay.""" o_sleep(test_delay) local_harac = INITIAL_HARAC - tx_harac = INITIAL_HARAC - local_hek = SYMMETRIC_KEY_LENGTH * b"a" - file_key = SYMMETRIC_KEY_LENGTH * b"b" - local_key = SYMMETRIC_KEY_LENGTH * b"a" - tx_mk = SYMMETRIC_KEY_LENGTH * b"a" - tx_hk = SYMMETRIC_KEY_LENGTH * b"a" + tx_harac = INITIAL_HARAC + local_hek = SYMMETRIC_KEY_LENGTH * b"a" + file_key = SYMMETRIC_KEY_LENGTH * b"b" + local_key = SYMMETRIC_KEY_LENGTH * b"a" + tx_mk = SYMMETRIC_KEY_LENGTH * b"a" + tx_hk = SYMMETRIC_KEY_LENGTH * b"a" # Queue local key packet - local_key_packet = encrypt_and_sign( - local_key + local_hek + conf_code, key=kek - ) - queues[LOCAL_KEY_DATAGRAM_HEADER].put( - (datetime.datetime.now(), local_key_packet) - ) + local_key_packet = encrypt_and_sign(local_key + local_hek + conf_code, key=kek) + queues[LOCAL_KEY_DATAGRAM_HEADER].put((datetime.datetime.now(), local_key_packet)) o_sleep(test_delay) # Select file window @@ -161,34 +122,28 @@ class TestOutputLoop(unittest.TestCase): o_sleep(test_delay) # A message that goes to buffer - queue_packet( - tx_mk, - tx_hk, - tx_harac, - bool_to_bytes(False) + PRIVATE_MESSAGE_HEADER + b"Hi Bob", - tx_pub_key, - ) + queue_packet(tx_mk, + tx_hk, + tx_harac, + bool_to_bytes(False) + PRIVATE_MESSAGE_HEADER + b"Hi Bob", + tx_pub_key) tx_mk, tx_harac = rotate_key(tx_mk, tx_harac) # ECDHE keyset for Bob - command = ( - KEY_EX_ECDHE - + nick_to_pub_key("Bob") - + (4 * SYMMETRIC_KEY_LENGTH * b"a") - + str_to_bytes("Bob") - ) + command = (KEY_EX_ECDHE + + nick_to_pub_key("Bob") + + (4 * SYMMETRIC_KEY_LENGTH * b"a") + + str_to_bytes("Bob")) queue_packet(local_key, tx_hk, local_harac, command) local_key, local_harac = rotate_key(local_key, local_harac) o_sleep(test_delay) # Message for Bob - queue_packet( - tx_mk, - tx_hk, - tx_harac, - bool_to_bytes(False) + PRIVATE_MESSAGE_HEADER + b"Hi Bob", - tx_pub_key, - ) + queue_packet(tx_mk, + tx_hk, + tx_harac, + bool_to_bytes(False) + PRIVATE_MESSAGE_HEADER + b"Hi Bob", + tx_pub_key) tx_mk, tx_harac = rotate_key(tx_mk, tx_harac) o_sleep(test_delay) @@ -198,22 +153,18 @@ class TestOutputLoop(unittest.TestCase): o_sleep(test_delay) # File packet from Bob - ct = encrypt_and_sign(b"test", file_key) + ct = encrypt_and_sign(b"test", file_key) f_hash = blake2b(ct) packet = nick_to_pub_key("Bob") + ORIGIN_CONTACT_HEADER + ct queues[FILE_DATAGRAM_HEADER].put((datetime.datetime.now(), packet)) o_sleep(test_delay) # File key packet from Bob - queue_packet( - tx_mk, - tx_hk, - tx_harac, - bool_to_bytes(False) - + FILE_KEY_HEADER - + base64.b85encode(f_hash + file_key), - tx_pub_key, - ) + queue_packet(tx_mk, + tx_hk, + tx_harac, + bool_to_bytes(False) + FILE_KEY_HEADER + base64.b85encode(f_hash + file_key), + tx_pub_key) o_sleep(test_delay) # Queue exit message to break the loop @@ -224,26 +175,12 @@ class TestOutputLoop(unittest.TestCase): threading.Thread(target=queue_delayer).start() # Test - master_key = MasterKey() - settings = Settings() - message_log = MessageLog( - f"{DIR_USER_DATA}{settings.software_operation}_logs", master_key.master_key - ) + master_key = MasterKey() + settings = Settings() + message_log = MessageLog(f"{DIR_USER_DATA}{settings.software_operation}_logs", master_key.master_key) - self.assertIsNone( - output_loop( - queues, - Gateway(), - settings, - ContactList(), - KeyList(), - GroupList(), - master_key, - message_log, - stdin_fd=1, - unit_test=True, - ) - ) + self.assertIsNone(output_loop(queues, Gateway(), settings, ContactList(), KeyList(), GroupList(), + master_key, message_log, stdin_fd=1, unit_test=True,)) # Teardown tear_queues(queues) diff --git a/tests/receiver/test_packet.py b/tests/receiver/test_packet.py index 43a86d2..a752edf 100644 --- a/tests/receiver/test_packet.py +++ b/tests/receiver/test_packet.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -25,349 +25,219 @@ import zlib from datetime import datetime from unittest import mock +from typing import Any -from src.common.crypto import byte_padding, encrypt_and_sign +from src.common.crypto import byte_padding, encrypt_and_sign from src.common.encoding import int_to_bytes -from src.common.statics import ( - COMMAND, - COMPRESSION_LEVEL, - DIR_RECV_FILES, - FILE, - F_C_HEADER, - LOCAL_ID, - MESSAGE, - M_A_HEADER, - M_E_HEADER, - ORIGIN_CONTACT_HEADER, - ORIGIN_USER_HEADER, - PADDING_LENGTH, - PRIVATE_MESSAGE_HEADER, - P_N_HEADER, - SYMMETRIC_KEY_LENGTH, - US_BYTE, -) +from src.common.statics import (COMMAND, COMPRESSION_LEVEL, DIR_RECV_FILES, FILE, F_C_HEADER, LOCAL_ID, MESSAGE, + M_A_HEADER, M_E_HEADER, ORIGIN_CONTACT_HEADER, ORIGIN_USER_HEADER, PADDING_LENGTH, + PRIVATE_MESSAGE_HEADER, P_N_HEADER, SYMMETRIC_KEY_LENGTH, US_BYTE) from src.transmitter.packet import split_to_assembly_packets from src.receiver.packet import decrypt_assembly_packet, Packet, PacketList -from tests.mock_classes import ( - ContactList, - create_contact, - KeyList, - Settings, - WindowList, -) -from tests.utils import ( - assembly_packet_creator, - cd_unit_test, - cleanup, - nick_to_pub_key, - TFCTestCase, -) -from tests.utils import UNDECODABLE_UNICODE +from tests.mock_classes import ContactList, create_contact, KeyList, Settings, WindowList +from tests.utils import assembly_packet_creator, cd_unit_test, cleanup, nick_to_pub_key, TFCTestCase +from tests.utils import UNDECODABLE_UNICODE class TestDecryptAssemblyPacket(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.onion_pub_key = nick_to_pub_key("Alice") - self.origin = ORIGIN_CONTACT_HEADER - self.window_list = WindowList(nicks=["Alice", LOCAL_ID]) - self.contact_list = ContactList(nicks=["Alice", LOCAL_ID]) - self.key_list = KeyList(nicks=["Alice", LOCAL_ID]) - self.keyset = self.key_list.get_keyset(nick_to_pub_key("Alice")) - self.args = ( - self.onion_pub_key, - self.origin, - self.window_list, - self.contact_list, - self.key_list, - ) + self.origin = ORIGIN_CONTACT_HEADER + self.window_list = WindowList(nicks=['Alice', LOCAL_ID]) + self.contact_list = ContactList(nicks=['Alice', LOCAL_ID]) + self.key_list = KeyList(nicks=['Alice', LOCAL_ID]) + self.keyset = self.key_list.get_keyset(nick_to_pub_key("Alice")) + self.args = self.onion_pub_key, self.origin, self.window_list, self.contact_list, self.key_list - def test_decryption_with_zero_rx_key_raises_fr(self) -> None: + def test_decryption_with_zero_rx_key_raises_se(self) -> None: # Setup - keyset = self.key_list.get_keyset(nick_to_pub_key("Alice")) + keyset = self.key_list.get_keyset(nick_to_pub_key("Alice")) keyset.rx_mk = bytes(SYMMETRIC_KEY_LENGTH) - packet = assembly_packet_creator( - MESSAGE, payload="Test message", encrypt_packet=True - )[0] + packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True)[0] # Test - self.assert_se( - "Warning! Loaded zero-key for packet decryption.", - decrypt_assembly_packet, - packet, - *self.args, - ) + self.assert_se("Warning! Loaded zero-key for packet decryption.", + decrypt_assembly_packet, packet, *self.args) - def test_invalid_harac_ct_raises_fr(self) -> None: - packet = assembly_packet_creator( - MESSAGE, payload="Test message", encrypt_packet=True, tamper_harac=True - )[0] - self.assert_se( - "Warning! Received packet from Alice had an invalid hash ratchet MAC.", - decrypt_assembly_packet, - packet, - *self.args, - ) + def test_invalid_harac_ct_raises_se(self) -> None: + packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True, tamper_harac=True)[0] + self.assert_se("Warning! Received packet from Alice had an invalid hash ratchet MAC.", + decrypt_assembly_packet, packet, *self.args) - def test_decryption_with_zero_rx_hek_raises_fr(self) -> None: + def test_decryption_with_zero_rx_hek_raises_se(self) -> None: # Setup - keyset = self.key_list.get_keyset(nick_to_pub_key("Alice")) + keyset = self.key_list.get_keyset(nick_to_pub_key("Alice")) keyset.rx_hk = bytes(SYMMETRIC_KEY_LENGTH) - packet = assembly_packet_creator( - MESSAGE, payload="Test message", encrypt_packet=True - )[0] + packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True)[0] # Test - self.assert_se( - "Warning! Loaded zero-key for packet decryption.", - decrypt_assembly_packet, - packet, - *self.args, - ) + self.assert_se("Warning! Loaded zero-key for packet decryption.", decrypt_assembly_packet, packet, *self.args) - def test_expired_harac_raises_fr(self) -> None: + def test_expired_harac_raises_se(self) -> None: # Setup self.keyset.rx_harac = 1 # Test - packet = assembly_packet_creator( - MESSAGE, payload="Test message", encrypt_packet=True, harac=0 - )[0] - self.assert_se( - "Warning! Received packet from Alice had an expired hash ratchet counter.", - decrypt_assembly_packet, - packet, - *self.args, - ) + packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True, harac=0)[0] + self.assert_se("Warning! Received packet from Alice had an expired hash ratchet counter.", + decrypt_assembly_packet, packet, *self.args) - @mock.patch("builtins.input", return_value="No") - def test_harac_dos_can_be_interrupted(self, _) -> None: - packet = assembly_packet_creator( - MESSAGE, payload="Test message", encrypt_packet=True, harac=100_001 - )[0] - self.assert_se( - "Dropped packet from Alice.", decrypt_assembly_packet, packet, *self.args - ) + @mock.patch('builtins.input', return_value='No') + def test_harac_dos_can_be_interrupted(self, _: Any) -> None: + packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True, harac=100_001)[0] + self.assert_se("Dropped packet from Alice.", + decrypt_assembly_packet, packet, *self.args) - def test_invalid_packet_ct_raises_fr(self) -> None: - packet = assembly_packet_creator( - MESSAGE, payload="Test message", encrypt_packet=True, tamper_message=True - )[0] - self.assert_se( - "Warning! Received packet from Alice had an invalid MAC.", - decrypt_assembly_packet, - packet, - *self.args, - ) + def test_invalid_packet_ct_raises_se(self) -> None: + packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True, tamper_message=True)[0] + self.assert_se("Warning! Received packet from Alice had an invalid MAC.", + decrypt_assembly_packet, packet, *self.args) def test_successful_packet_decryption(self) -> None: - packet = assembly_packet_creator( - MESSAGE, payload="Test message", encrypt_packet=True - )[0] - self.assertEqual( - decrypt_assembly_packet(packet, *self.args), - assembly_packet_creator(MESSAGE, payload="Test message")[0], - ) + packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True)[0] + self.assertEqual(decrypt_assembly_packet(packet, *self.args), + assembly_packet_creator(MESSAGE, payload="Test message")[0]) def test_successful_packet_decryption_with_offset(self) -> None: - packet = assembly_packet_creator( - MESSAGE, payload="Test message", encrypt_packet=True, message_number=3 - )[0] - self.assertEqual( - decrypt_assembly_packet(packet, *self.args), - assembly_packet_creator(MESSAGE, payload="Test message", message_number=3)[ - 0 - ], - ) + packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True, message_number=3)[0] + self.assertEqual(decrypt_assembly_packet(packet, *self.args), + assembly_packet_creator(MESSAGE, payload="Test message", message_number=3)[0]) def test_successful_command_decryption(self) -> None: - packet = assembly_packet_creator( - COMMAND, payload=b"command_data", encrypt_packet=True - )[0] - self.assertEqual( - decrypt_assembly_packet(packet, *self.args), - assembly_packet_creator(COMMAND, payload=b"command_data")[0], - ) + packet = assembly_packet_creator(COMMAND, payload=b"command_data", encrypt_packet=True)[0] + self.assertEqual(decrypt_assembly_packet(packet, *self.args), + assembly_packet_creator(COMMAND, payload=b"command_data")[0]) class TestPacket(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.short_msg = "Lorem ipsum dolor sit amet, consectetur adipiscing elit" - self.msg = ( - "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis" - " dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu" - "m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma" - "ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el" - "it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic" - "tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r" - "utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali" - "quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris" - "que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti" - "s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu." - ) + self.msg = ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis" + " dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu" + "m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma" + "ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el" + "it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic" + "tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r" + "utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali" + "quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris" + "que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti" + "s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu.") - self.unit_test_dir = cd_unit_test() - self.ts = datetime.now() - self.contact = create_contact("Alice") - self.settings = Settings(log_file_masking=True) - self.onion_pub_key = nick_to_pub_key("Alice") - self.window_list = WindowList() - self.whisper_header = b"\x00" + self.unit_test_dir = cd_unit_test() + self.ts = datetime.now() + self.contact = create_contact('Alice') + self.settings = Settings(log_file_masking=True) + self.onion_pub_key = nick_to_pub_key('Alice') + self.window_list = WindowList() + self.whisper_header = b'\x00' - compressed = zlib.compress(b"file_data", level=COMPRESSION_LEVEL) - file_key = os.urandom(SYMMETRIC_KEY_LENGTH) - encrypted = encrypt_and_sign(compressed, key=file_key) - encrypted += file_key - self.short_f_data = ( - int_to_bytes(1) + int_to_bytes(2) + b"testfile.txt" + US_BYTE + encrypted - ) + compressed = zlib.compress(b'file_data', level=COMPRESSION_LEVEL) + file_key = os.urandom(SYMMETRIC_KEY_LENGTH) + encrypted = encrypt_and_sign(compressed, key=file_key) + encrypted += file_key + self.short_f_data = (int_to_bytes(1) + int_to_bytes(2) + b'testfile.txt' + US_BYTE + encrypted) def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) - def test_invalid_assembly_packet_header_raises_fr(self) -> None: + def test_invalid_assembly_packet_header_raises_se(self) -> None: # Setup - packet = Packet( - self.onion_pub_key, - ORIGIN_CONTACT_HEADER, - MESSAGE, - self.contact, - self.settings, - ) - a_packet = assembly_packet_creator( - MESSAGE, payload=self.short_msg, s_header_override=b"i" - )[0] + packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE, self.contact, self.settings) + a_packet = assembly_packet_creator(MESSAGE, payload=self.short_msg, s_header_override=b'i')[0] # Test - self.assert_se( - "Error: Received packet had an invalid assembly packet header.", - packet.add_packet, - a_packet, - ) + self.assert_se("Error: Received packet had an invalid assembly packet header.", packet.add_packet, a_packet) self.assertEqual(packet.log_masking_ctr, 1) - def test_missing_start_packet_raises_fr(self) -> None: + def test_missing_start_packet_raises_se(self) -> None: # Setup - packet = Packet( - self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings - ) + packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings) # Test for header in [M_A_HEADER, M_E_HEADER]: - self.assert_se( - "Missing start packet.", - packet.add_packet, - header + bytes(PADDING_LENGTH), - ) + self.assert_se("Missing start packet.", packet.add_packet, header + bytes(PADDING_LENGTH)) self.assertEqual(packet.log_masking_ctr, 2) def test_short_message(self) -> None: # Setup - packet = Packet( - self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings - ) + packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings) packet_list = assembly_packet_creator(MESSAGE, self.short_msg) for p in packet_list: - packet.add_packet(p, packet_ct=b"test_ct") + packet.add_packet(p, packet_ct=b'test_ct') # Test - self.assertEqual( - packet.assemble_message_packet(), - self.whisper_header + PRIVATE_MESSAGE_HEADER + self.short_msg.encode(), - ) - self.assertEqual(packet.log_ct_list, [b"test_ct"]) + self.assertEqual(packet.assemble_message_packet(), + self.whisper_header + PRIVATE_MESSAGE_HEADER + self.short_msg.encode()) + self.assertEqual(packet.log_ct_list, [b'test_ct']) - def test_compression_error_raises_fr(self) -> None: + def test_compression_error_raises_se(self) -> None: # Setup - packet = Packet( - self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings - ) - packet_list = assembly_packet_creator( - MESSAGE, self.short_msg, tamper_compression=True - ) + packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings) + packet_list = assembly_packet_creator(MESSAGE, self.short_msg, tamper_compression=True) for p in packet_list: packet.add_packet(p) # Test - self.assert_se( - "Error: Decompression of message failed.", packet.assemble_message_packet - ) + self.assert_se("Error: Decompression of message failed.", packet.assemble_message_packet) def test_long_message(self) -> None: # Setup - packet = Packet( - self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings - ) + packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings) packet_list = assembly_packet_creator(MESSAGE, self.msg) for p in packet_list: - packet.add_packet(p, packet_ct=b"test_ct") + packet.add_packet(p, packet_ct=b'test_ct') # Test message = packet.assemble_message_packet() - self.assertEqual( - message, self.whisper_header + PRIVATE_MESSAGE_HEADER + self.msg.encode() - ) - self.assertEqual(packet.log_ct_list, 3 * [b"test_ct"]) + self.assertEqual(message, self.whisper_header + PRIVATE_MESSAGE_HEADER + self.msg.encode()) + self.assertEqual(packet.log_ct_list, 3 * [b'test_ct']) - def test_decryption_error_raises_fr(self) -> None: + def test_decryption_error_raises_se(self) -> None: # Setup - packet = Packet( - self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings - ) + packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings) packet_list = assembly_packet_creator(MESSAGE, self.msg, tamper_ciphertext=True) for p in packet_list: packet.add_packet(p) # Test - self.assert_se( - "Error: Decryption of message failed.", packet.assemble_message_packet - ) + self.assert_se("Error: Decryption of message failed.", packet.assemble_message_packet) def test_short_file(self) -> None: # Setup packets = split_to_assembly_packets(self.short_f_data, FILE) # Test - self.assertFalse(os.path.isfile(f"{DIR_RECV_FILES}Alice/testfile.txt")) - self.assertFalse(os.path.isfile(f"{DIR_RECV_FILES}Alice/testfile.txt.1")) + self.assertFalse(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt')) + self.assertFalse(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt.1')) - packet = Packet( - self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings - ) + packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings) packet.long_active = True for p in packets: packet.add_packet(p) - self.assertIsNone( - packet.assemble_and_store_file( - self.ts, self.onion_pub_key, self.window_list - ) - ) - self.assertTrue(os.path.isfile(f"{DIR_RECV_FILES}Alice/testfile.txt")) + self.assertIsNone(packet.assemble_and_store_file(self.ts, self.onion_pub_key, self.window_list)) + self.assertTrue(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt')) for p in packets: packet.add_packet(p) - self.assertIsNone( - packet.assemble_and_store_file( - self.ts, self.onion_pub_key, self.window_list - ) - ) - self.assertTrue(os.path.isfile(f"{DIR_RECV_FILES}Alice/testfile.txt.1")) + self.assertIsNone(packet.assemble_and_store_file(self.ts, self.onion_pub_key, self.window_list)) + self.assertTrue(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt.1')) - def test_short_file_from_user_raises_fr(self) -> None: + def test_short_file_from_user_raises_se(self) -> None: # Setup - packet = Packet( - self.onion_pub_key, ORIGIN_USER_HEADER, FILE, self.contact, self.settings - ) + packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, FILE, self.contact, self.settings) packets = split_to_assembly_packets(self.short_f_data, FILE) # Test @@ -375,50 +245,36 @@ class TestPacket(TFCTestCase): self.assert_se("Ignored file from the user.", packet.add_packet, p) self.assertEqual(packet.log_masking_ctr, 1) - def test_unauthorized_file_from_contact_raises_fr(self) -> None: + def test_unauthorized_file_from_contact_raises_se(self) -> None: # Setup self.contact.file_reception = False - packet = Packet( - self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings - ) + packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings) packets = split_to_assembly_packets(self.short_f_data, FILE) # Test for p in packets: - self.assert_se( - "Alert! File transmission from Alice but reception is disabled.", - packet.add_packet, - p, - ) + self.assert_se("Alert! File transmission from Alice but reception is disabled.", packet.add_packet, p) self.assertEqual(packet.log_masking_ctr, 1) def test_long_file(self) -> None: # Setup - packet = Packet( - self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings - ) + packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings) packet.long_active = True - packet_list = assembly_packet_creator(FILE) + packet_list = assembly_packet_creator(FILE) for p in packet_list: packet.add_packet(p) # Test - self.assertIsNone( - packet.assemble_and_store_file( - self.ts, self.onion_pub_key, self.window_list - ) - ) - self.assertEqual(os.path.getsize(f"{DIR_RECV_FILES}Alice/test_file.txt"), 10000) + self.assertIsNone(packet.assemble_and_store_file(self.ts, self.onion_pub_key, self.window_list)) + self.assertEqual(os.path.getsize(f'{DIR_RECV_FILES}Alice/test_file.txt'), 10000) def test_disabled_file_reception_raises_fr_with_append_packet(self) -> None: # Setup - packet = Packet( - self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings - ) + packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings) packet.long_active = True - packet_list = assembly_packet_creator(FILE) + packet_list = assembly_packet_creator(FILE) for p in packet_list[:2]: self.assertIsNone(packet.add_packet(p)) @@ -426,11 +282,7 @@ class TestPacket(TFCTestCase): packet.contact.file_reception = False # Test - self.assert_se( - "Alert! File reception disabled mid-transfer.", - packet.add_packet, - packet_list[2], - ) + self.assert_se("Alert! File reception disabled mid-transfer.", packet.add_packet, packet_list[2]) for p in packet_list[3:]: self.assert_se("Missing start packet.", packet.add_packet, p) @@ -439,11 +291,9 @@ class TestPacket(TFCTestCase): def test_disabled_file_reception_raises_fr_with_end_packet(self) -> None: # Setup - packet = Packet( - self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings - ) + packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings) packet.long_active = True - packet_list = assembly_packet_creator(FILE) + packet_list = assembly_packet_creator(FILE) for p in packet_list[:-1]: self.assertIsNone(packet.add_packet(p)) @@ -452,59 +302,42 @@ class TestPacket(TFCTestCase): # Test for p in packet_list[-1:]: - self.assert_se( - "Alert! File reception disabled mid-transfer.", packet.add_packet, p - ) + self.assert_se("Alert! File reception disabled mid-transfer.", packet.add_packet, p) self.assertEqual(packet.log_masking_ctr, len(packet_list)) - def test_long_file_from_user_raises_fr(self) -> None: + def test_long_file_from_user_raises_se(self) -> None: # Setup - packet = Packet( - self.onion_pub_key, ORIGIN_USER_HEADER, FILE, self.contact, self.settings - ) + packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, FILE, self.contact, self.settings) packet_list = assembly_packet_creator(FILE) # Test self.assert_se("Ignored file from the user.", packet.add_packet, packet_list[0]) self.assertEqual(packet.log_masking_ctr, 1) - def test_unauthorized_long_file_raises_fr(self) -> None: + def test_unauthorized_long_file_raises_se(self) -> None: # Setup self.contact.file_reception = False - packet = Packet( - self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings - ) + packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings) packet_list = assembly_packet_creator(FILE) # Test - self.assert_se( - "Alert! File transmission from Alice but reception is disabled.", - packet.add_packet, - packet_list[0], - ) + self.assert_se("Alert! File transmission from Alice but reception is disabled.", + packet.add_packet, packet_list[0]) self.assertEqual(packet.log_masking_ctr, 1) - def test_invalid_long_file_header_raises_fr(self) -> None: + def test_invalid_long_file_header_raises_se(self) -> None: # Setup - packet = Packet( - self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings - ) + packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings) packet_list = assembly_packet_creator(FILE, file_name=UNDECODABLE_UNICODE) # Test - self.assert_se( - "Error: Received file packet had an invalid header.", - packet.add_packet, - packet_list[0], - ) + self.assert_se("Error: Received file packet had an invalid header.", packet.add_packet, packet_list[0]) self.assertEqual(packet.log_masking_ctr, 1) def test_contact_canceled_file(self) -> None: # Setup - packet = Packet( - self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings - ) + packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings) packet_list = assembly_packet_creator(FILE)[:20] packet_list.append(byte_padding(F_C_HEADER)) # Add cancel packet @@ -512,18 +345,14 @@ class TestPacket(TFCTestCase): packet.add_packet(p) # Test - self.assertEqual( - len(packet.assembly_pt_list), 0 - ) # Cancel packet empties packet list + self.assertEqual(len(packet.assembly_pt_list), 0) # Cancel packet empties packet list self.assertFalse(packet.long_active) self.assertFalse(packet.is_complete) self.assertEqual(packet.log_masking_ctr, len(packet_list)) def test_noise_packet_interrupts_file(self) -> None: # Setup - packet = Packet( - self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings - ) + packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings) packet_list = assembly_packet_creator(FILE)[:20] packet_list.append(byte_padding(P_N_HEADER)) # Add noise packet @@ -531,33 +360,27 @@ class TestPacket(TFCTestCase): packet.add_packet(p) # Test - self.assertEqual( - len(packet.assembly_pt_list), 0 - ) # Noise packet empties packet list + self.assertEqual(len(packet.assembly_pt_list), 0) # Noise packet empties packet list self.assertFalse(packet.long_active) self.assertFalse(packet.is_complete) self.assertEqual(packet.log_masking_ctr, len(packet_list)) def test_short_command(self) -> None: # Setup - packet = Packet( - LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings - ) - packets = assembly_packet_creator(COMMAND, b"test_command") + packet = Packet(LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings) + packets = assembly_packet_creator(COMMAND, b'test_command') for p in packets: packet.add_packet(p) # Test - self.assertEqual(packet.assemble_command_packet(), b"test_command") + self.assertEqual(packet.assemble_command_packet(), b'test_command') self.assertEqual(packet.log_masking_ctr, 0) def test_long_command(self) -> None: # Setup - packet = Packet( - LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings - ) - command = 500 * b"test_command" + packet = Packet(LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings) + command = 500 * b'test_command' packets = assembly_packet_creator(COMMAND, command) for p in packets: @@ -567,58 +390,42 @@ class TestPacket(TFCTestCase): self.assertEqual(packet.assemble_command_packet(), command) self.assertEqual(packet.log_masking_ctr, 0) - def test_long_command_hash_mismatch_raises_fr(self) -> None: + def test_long_command_hash_mismatch_raises_se(self) -> None: # Setup - packet = Packet( - LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings - ) - packet_list = assembly_packet_creator( - COMMAND, os.urandom(500), tamper_cmd_hash=True - ) + packet = Packet(LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings) + packet_list = assembly_packet_creator(COMMAND, os.urandom(500), tamper_cmd_hash=True) for p in packet_list: packet.add_packet(p) # Test - self.assert_se( - "Error: Received an invalid command.", packet.assemble_command_packet - ) + self.assert_se("Error: Received an invalid command.", packet.assemble_command_packet) self.assertEqual(packet.log_masking_ctr, 0) - def test_long_command_compression_error_raises_fr(self) -> None: + def test_long_command_compression_error_raises_se(self) -> None: # Setup - packet = Packet( - LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings - ) - packet_list = assembly_packet_creator( - COMMAND, os.urandom(500), tamper_compression=True - ) + packet = Packet(LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings) + packet_list = assembly_packet_creator(COMMAND, os.urandom(500), tamper_compression=True) for p in packet_list: packet.add_packet(p) # Test - self.assert_se( - "Error: Decompression of command failed.", packet.assemble_command_packet - ) + self.assert_se("Error: Decompression of command failed.", packet.assemble_command_packet) self.assertEqual(packet.log_masking_ctr, 0) class TestPacketList(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.contact_list = ContactList(nicks=["Alice", "Bob"]) - self.settings = Settings() - self.onion_pub_key = nick_to_pub_key("Alice") - packet = Packet( - self.onion_pub_key, - ORIGIN_CONTACT_HEADER, - MESSAGE, - self.contact_list.get_contact_by_address_or_nick("Alice"), - self.settings, - ) + self.contact_list = ContactList(nicks=['Alice', 'Bob']) + self.settings = Settings() + self.onion_pub_key = nick_to_pub_key('Alice') + packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE, + self.contact_list.get_contact_by_address_or_nick('Alice'), self.settings) - self.packet_list = PacketList(self.settings, self.contact_list) + self.packet_list = PacketList(self.settings, self.contact_list) self.packet_list.packets = [packet] def test_packet_list_iterates_over_contact_objects(self) -> None: @@ -629,30 +436,30 @@ class TestPacketList(unittest.TestCase): self.assertEqual(len(self.packet_list), 1) def test_has_packet(self) -> None: - self.assertTrue( - self.packet_list.has_packet( - self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE - ) - ) - self.assertFalse( - self.packet_list.has_packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE) - ) + self.assertTrue(self.packet_list.has_packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE)) + self.assertFalse(self.packet_list.has_packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE)) def test_get_packet(self) -> None: - packet = self.packet_list.get_packet( - self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE - ) + packet = self.packet_list.get_packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE) self.assertEqual(packet.onion_pub_key, self.onion_pub_key) self.assertEqual(packet.origin, ORIGIN_CONTACT_HEADER) self.assertEqual(packet.type, MESSAGE) - packet = self.packet_list.get_packet( - self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE - ) + packet = self.packet_list.get_packet(nick_to_pub_key('Bob'), ORIGIN_CONTACT_HEADER, MESSAGE) + self.assertEqual(packet.onion_pub_key, nick_to_pub_key('Bob')) + self.assertEqual(packet.origin, ORIGIN_CONTACT_HEADER) + self.assertEqual(packet.type, MESSAGE) + + packet = self.packet_list.get_packet(nick_to_pub_key('Charlie'), ORIGIN_CONTACT_HEADER, MESSAGE, log_access=True) + self.assertEqual(packet.onion_pub_key, nick_to_pub_key('Charlie')) + self.assertEqual(packet.origin, ORIGIN_CONTACT_HEADER) + self.assertEqual(packet.type, MESSAGE) + + packet = self.packet_list.get_packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE) self.assertEqual(packet.onion_pub_key, self.onion_pub_key) self.assertEqual(packet.origin, ORIGIN_CONTACT_HEADER) self.assertEqual(packet.type, MESSAGE) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/receiver/test_receiver_loop.py b/tests/receiver/test_receiver_loop.py index 3c50f43..ddb1548 100644 --- a/tests/receiver/test_receiver_loop.py +++ b/tests/receiver/test_receiver_loop.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -23,60 +23,48 @@ import threading import time import unittest -from datetime import datetime +from datetime import datetime from multiprocessing import Queue -from src.common.encoding import int_to_bytes +from src.common.encoding import int_to_bytes from src.common.reed_solomon import RSCodec -from src.common.statics import ( - COMMAND_DATAGRAM_HEADER, - FILE_DATAGRAM_HEADER, - GATEWAY_QUEUE, - LOCAL_KEY_DATAGRAM_HEADER, - MESSAGE_DATAGRAM_HEADER, - ONION_SERVICE_PUBLIC_KEY_LENGTH, -) +from src.common.statics import (COMMAND_DATAGRAM_HEADER, FILE_DATAGRAM_HEADER, GATEWAY_QUEUE, + LOCAL_KEY_DATAGRAM_HEADER, MESSAGE_DATAGRAM_HEADER, + ONION_SERVICE_PUBLIC_KEY_LENGTH) from src.receiver.receiver_loop import receiver_loop from tests.mock_classes import Gateway -from tests.utils import tear_queue +from tests.utils import tear_queue class TestReceiverLoop(unittest.TestCase): + def test_receiver_loop(self) -> None: # Setup gateway = Gateway(local_test=False) - rs = RSCodec(2 * gateway.settings.serial_error_correction) - queues = { - MESSAGE_DATAGRAM_HEADER: Queue(), - FILE_DATAGRAM_HEADER: Queue(), - COMMAND_DATAGRAM_HEADER: Queue(), - LOCAL_KEY_DATAGRAM_HEADER: Queue(), - } + rs = RSCodec(2 * gateway.settings.serial_error_correction) + queues = {MESSAGE_DATAGRAM_HEADER: Queue(), + FILE_DATAGRAM_HEADER: Queue(), + COMMAND_DATAGRAM_HEADER: Queue(), + LOCAL_KEY_DATAGRAM_HEADER: Queue()} all_q = dict(queues) all_q.update({GATEWAY_QUEUE: Queue()}) - ts = datetime.now() - ts_bytes = int_to_bytes(int(ts.strftime("%Y%m%d%H%M%S%f")[:-4])) + ts = datetime.now() + ts_bytes = int_to_bytes(int(ts.strftime('%Y%m%d%H%M%S%f')[:-4])) for key in queues: - packet = key + ts_bytes + bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH) - encoded = rs.encode(packet) - broken_p = ( - key - + bytes.fromhex("df9005313af4136d") - + bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH) - ) - broken_p += rs.encode(b"a") + packet = key + ts_bytes + bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH) + encoded = rs.encode(packet) + broken_p = key + bytes.fromhex('df9005313af4136d') + bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH) + broken_p += rs.encode(b'a') def queue_delayer() -> None: """Place datagrams into queue after delay.""" time.sleep(0.01) - all_q[GATEWAY_QUEUE].put( - (datetime.now(), rs.encode(8 * b"1" + b"undecodable")) - ) + all_q[GATEWAY_QUEUE].put((datetime.now(), rs.encode(8 * b'1' + b'undecodable'))) all_q[GATEWAY_QUEUE].put((datetime.now(), broken_p)) all_q[GATEWAY_QUEUE].put((datetime.now(), encoded)) @@ -91,5 +79,5 @@ class TestReceiverLoop(unittest.TestCase): tear_queue(queues[key]) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/receiver/test_windows.py b/tests/receiver/test_windows.py index 545e6ec..0d17a07 100644 --- a/tests/receiver/test_windows.py +++ b/tests/receiver/test_windows.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -22,70 +22,38 @@ along with TFC. If not, see . import unittest from datetime import datetime +from typing import Any from unittest import mock -from src.common.statics import ( - BOLD_ON, - CLEAR_ENTIRE_LINE, - CLEAR_ENTIRE_SCREEN, - CURSOR_LEFT_UP_CORNER, - CURSOR_UP_ONE_LINE, - FILE, - GROUP_ID_LENGTH, - LOCAL_ID, - NORMAL_TEXT, - ONION_SERVICE_PUBLIC_KEY_LENGTH, - ORIGIN_CONTACT_HEADER, - ORIGIN_USER_HEADER, - WIN_TYPE_COMMAND, - WIN_TYPE_CONTACT, - WIN_TYPE_FILE, - WIN_TYPE_GROUP, - WIN_UID_COMMAND, - WIN_UID_FILE, -) +from src.common.statics import (BOLD_ON, CLEAR_ENTIRE_LINE, CLEAR_ENTIRE_SCREEN, CURSOR_LEFT_UP_CORNER, + CURSOR_UP_ONE_LINE, FILE, GROUP_ID_LENGTH, LOCAL_ID, NORMAL_TEXT, + ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_CONTACT_HEADER, ORIGIN_USER_HEADER, + WIN_TYPE_COMMAND, WIN_TYPE_CONTACT, WIN_TYPE_FILE, WIN_TYPE_GROUP, WIN_UID_COMMAND, + WIN_UID_FILE) from src.receiver.windows import RxWindow, WindowList -from tests.mock_classes import ( - create_contact, - ContactList, - GroupList, - Packet, - PacketList, - Settings, -) -from tests.utils import ( - group_name_to_group_id, - nick_to_pub_key, - nick_to_short_address, - TFCTestCase, -) +from tests.mock_classes import create_contact, ContactList, GroupList, Packet, PacketList, Settings +from tests.utils import group_name_to_group_id, nick_to_pub_key, nick_to_short_address, TFCTestCase class TestRxWindow(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.contact_list = ContactList(nicks=["Alice", "Bob", "Charlie", LOCAL_ID]) - self.group_list = GroupList(groups=["test_group", "test_group2"]) - self.settings = Settings() - self.packet_list = PacketList() - self.ts = datetime.fromtimestamp(1502750000) - self.time = self.ts.strftime("%H:%M:%S.%f")[:-4] + self.contact_list = ContactList(nicks=['Alice', 'Bob', 'Charlie', LOCAL_ID]) + self.group_list = GroupList(groups=['test_group', 'test_group2']) + self.settings = Settings() + self.packet_list = PacketList() + self.ts = datetime.fromtimestamp(1502750000) + self.time = self.ts.strftime('%H:%M:%S.%f')[:-4] - group = self.group_list.get_group("test_group") - group.members = list( - map( - self.contact_list.get_contact_by_address_or_nick, - ["Alice", "Bob", "Charlie"], - ) - ) + group = self.group_list.get_group('test_group') + group.members = list(map(self.contact_list.get_contact_by_address_or_nick, ['Alice', 'Bob', 'Charlie'])) def create_window(self, uid: bytes): """Create new RxWindow object.""" - return RxWindow( - uid, self.contact_list, self.group_list, self.settings, self.packet_list - ) + return RxWindow(uid, self.contact_list, self.group_list, self.settings, self.packet_list) def test_command_window_creation(self) -> None: window = self.create_window(WIN_UID_COMMAND) @@ -99,608 +67,346 @@ class TestRxWindow(TFCTestCase): def test_contact_window_creation(self) -> None: window = self.create_window(nick_to_pub_key("Alice")) self.assertEqual(window.type, WIN_TYPE_CONTACT) - self.assertEqual( - window.window_contacts[0].onion_pub_key, nick_to_pub_key("Alice") - ) - self.assertEqual(window.name, "Alice") + self.assertEqual(window.window_contacts[0].onion_pub_key, nick_to_pub_key("Alice")) + self.assertEqual(window.name, 'Alice') def test_group_window_creation(self) -> None: - window = self.create_window(group_name_to_group_id("test_group")) + window = self.create_window(group_name_to_group_id('test_group')) self.assertEqual(window.type, WIN_TYPE_GROUP) - self.assertEqual( - window.window_contacts[0].onion_pub_key, nick_to_pub_key("Alice") - ) - self.assertEqual(window.name, "test_group") + self.assertEqual(window.window_contacts[0].onion_pub_key, nick_to_pub_key("Alice")) + self.assertEqual(window.name, 'test_group') - def test_invalid_uid_raises_fr(self) -> None: - self.assert_se( - "Invalid window 'mfqwcylbmfqwcylbmfqwcylbmfqwcylbmfqwcylbmfqwcylbmfqwbfad'.", - self.create_window, - ONION_SERVICE_PUBLIC_KEY_LENGTH * b"a", - ) + def test_invalid_uid_raises_se(self) -> None: + self.assert_se("Invalid window 'mfqwcylbmfqwcylbmfqwcylbmfqwcylbmfqwcylbmfqwcylbmfqwbfad'.", + self.create_window, ONION_SERVICE_PUBLIC_KEY_LENGTH * b'a') - self.assert_se( - "Invalid window '2dnAMoWNfTXAJ'.", - self.create_window, - GROUP_ID_LENGTH * b"a", - ) + self.assert_se("Invalid window '2dnAMoWNfTXAJ'.", + self.create_window, GROUP_ID_LENGTH * b'a') - self.assert_se( - "Invalid window ''.", self.create_window, b"bad_uid" - ) + self.assert_se("Invalid window ''.", + self.create_window, b'bad_uid') def test_window_iterates_over_message_tuples(self) -> None: # Setup - window = self.create_window(nick_to_pub_key("Alice")) - window.message_log = 5 * [ - ( - datetime.now(), - "Lorem ipsum", - nick_to_pub_key("Alice"), - ORIGIN_CONTACT_HEADER, - False, - False, - ) - ] + window = self.create_window(nick_to_pub_key("Alice")) + window.message_log = 5*[(datetime.now(), 'Lorem ipsum', nick_to_pub_key("Alice"), + ORIGIN_CONTACT_HEADER, False, False)] # Test for mt in window: - self.assertEqual( - mt[1:], - ( - "Lorem ipsum", - nick_to_pub_key("Alice"), - ORIGIN_CONTACT_HEADER, - False, - False, - ), - ) + self.assertEqual(mt[1:], + ("Lorem ipsum", nick_to_pub_key("Alice"), ORIGIN_CONTACT_HEADER, False, False)) def test_len_returns_number_of_messages_in_window(self) -> None: # Setup - window = self.create_window(nick_to_pub_key("Alice")) - window.message_log = 5 * [ - ( - datetime.now(), - "Lorem ipsum", - nick_to_pub_key("Alice"), - ORIGIN_CONTACT_HEADER, - False, - False, - ) - ] + window = self.create_window(nick_to_pub_key("Alice")) + window.message_log = 5*[(datetime.now(), "Lorem ipsum", nick_to_pub_key("Alice"), + ORIGIN_CONTACT_HEADER, False, False)] # Test self.assertEqual(len(window), 5) def test_remove_contacts(self) -> None: # Setup - window = self.create_window(group_name_to_group_id("test_group")) + window = self.create_window(group_name_to_group_id('test_group')) # Test self.assertEqual(len(window.window_contacts), 3) - self.assertIsNone( - window.remove_contacts( - [ - nick_to_pub_key("Alice"), - nick_to_pub_key("Bob"), - nick_to_pub_key("DoesNotExist"), - ] - ) - ) + self.assertIsNone(window.remove_contacts([nick_to_pub_key("Alice"), + nick_to_pub_key("Bob"), + nick_to_pub_key("DoesNotExist")])) self.assertEqual(len(window.window_contacts), 1) def test_add_contacts(self) -> None: # Setup - window = self.create_window(group_name_to_group_id("test_group")) - window.window_contacts = [ - self.contact_list.get_contact_by_address_or_nick("Alice") - ] + window = self.create_window(group_name_to_group_id('test_group')) + window.window_contacts = [self.contact_list.get_contact_by_address_or_nick('Alice')] # Test - self.assertIsNone( - window.add_contacts( - [ - nick_to_pub_key("Alice"), - nick_to_pub_key("Bob"), - nick_to_pub_key("DoesNotExist"), - ] - ) - ) + self.assertIsNone(window.add_contacts([nick_to_pub_key("Alice"), + nick_to_pub_key("Bob"), + nick_to_pub_key("DoesNotExist")])) self.assertEqual(len(window.window_contacts), 2) def test_reset_window(self) -> None: # Setup - window = self.create_window(group_name_to_group_id("test_group")) - window.message_log = [ - ( - datetime.now(), - "Hi everybody", - nick_to_pub_key("Alice"), - ORIGIN_USER_HEADER, - False, - False, - ), - ( - datetime.now(), - "Hi David", - nick_to_pub_key("Alice"), - ORIGIN_CONTACT_HEADER, - False, - False, - ), - ( - datetime.now(), - "Hi David", - nick_to_pub_key("Bob"), - ORIGIN_CONTACT_HEADER, - False, - False, - ), - ] + window = self.create_window(group_name_to_group_id('test_group')) + window.message_log = \ + [(datetime.now(), "Hi everybody", nick_to_pub_key("Alice"), ORIGIN_USER_HEADER, False, False), + (datetime.now(), "Hi David", nick_to_pub_key("Alice"), ORIGIN_CONTACT_HEADER, False, False), + (datetime.now(), "Hi David", nick_to_pub_key("Bob"), ORIGIN_CONTACT_HEADER, False, False)] # Test self.assertIsNone(window.reset_window()) self.assertEqual(len(window), 0) def test_has_contact(self) -> None: - window = self.create_window(group_name_to_group_id("test_group")) + window = self.create_window(group_name_to_group_id('test_group')) self.assertTrue(window.has_contact(nick_to_pub_key("Alice"))) self.assertFalse(window.has_contact(nick_to_pub_key("DoesNotExist"))) def test_create_handle_dict(self) -> None: # Setup - window = self.create_window(group_name_to_group_id("test_group")) - message_log = [ - ( - datetime.now(), - "Lorem ipsum", - nick_to_pub_key("Alice"), - ORIGIN_CONTACT_HEADER, - False, - False, - ), - ( - datetime.now(), - "Lorem ipsum", - nick_to_pub_key("Bob"), - ORIGIN_USER_HEADER, - False, - False, - ), - ( - datetime.now(), - "Lorem ipsum", - nick_to_pub_key("Charlie"), - ORIGIN_CONTACT_HEADER, - False, - False, - ), - ( - datetime.now(), - "Lorem ipsum", - nick_to_pub_key("Charlie"), - ORIGIN_CONTACT_HEADER, - True, - False, - ), - ( - datetime.now(), - "Lorem ipsum", - nick_to_pub_key("Charlie"), - ORIGIN_CONTACT_HEADER, - False, - False, - ), - ( - datetime.now(), - "Lorem ipsum", - nick_to_pub_key("David"), - ORIGIN_CONTACT_HEADER, - False, - False, - ), - ( - datetime.now(), - "Lorem ipsum", - nick_to_pub_key("Eric"), - ORIGIN_CONTACT_HEADER, - False, - False, - ), - ] + window = self.create_window(group_name_to_group_id('test_group')) + message_log = [(datetime.now(), "Lorem ipsum", nick_to_pub_key("Alice"), ORIGIN_CONTACT_HEADER, False, False), + (datetime.now(), "Lorem ipsum", nick_to_pub_key("Bob"), ORIGIN_USER_HEADER, False, False), + (datetime.now(), "Lorem ipsum", nick_to_pub_key("Charlie"), ORIGIN_CONTACT_HEADER, False, False), + (datetime.now(), "Lorem ipsum", nick_to_pub_key("Charlie"), ORIGIN_CONTACT_HEADER, True, False), + (datetime.now(), "Lorem ipsum", nick_to_pub_key("Charlie"), ORIGIN_CONTACT_HEADER, False, False), + (datetime.now(), "Lorem ipsum", nick_to_pub_key("David"), ORIGIN_CONTACT_HEADER, False, False), + (datetime.now(), "Lorem ipsum", nick_to_pub_key("Eric"), ORIGIN_CONTACT_HEADER, False, False)] # Test self.assertIsNone(window.create_handle_dict(message_log)) - self.assertEqual( - window.handle_dict, - { - nick_to_pub_key("Alice"): "Alice", - nick_to_pub_key("Bob"): "Bob", - nick_to_pub_key("Charlie"): "Charlie", - nick_to_pub_key("David"): nick_to_short_address("David"), - nick_to_pub_key("Eric"): nick_to_short_address("Eric"), - }, - ) + self.assertEqual(window.handle_dict, {nick_to_pub_key("Alice"): 'Alice', + nick_to_pub_key("Bob"): 'Bob', + nick_to_pub_key("Charlie"): 'Charlie', + nick_to_pub_key("David"): nick_to_short_address("David"), + nick_to_pub_key("Eric"): nick_to_short_address("Eric")}) def test_get_command_handle(self) -> None: # Setup - window = self.create_window(WIN_UID_COMMAND) + window = self.create_window(WIN_UID_COMMAND) window.is_active = True # Test - self.assertEqual( - window.get_handle(self.ts, WIN_UID_COMMAND, ORIGIN_USER_HEADER), - f"{self.time} -!- ", - ) + self.assertEqual(window.get_handle(self.ts, WIN_UID_COMMAND, ORIGIN_USER_HEADER), f"{self.time} -!- ") def test_get_contact_handle(self) -> None: # Setup - window = self.create_window(nick_to_pub_key("Alice")) - window.is_active = True - window.handle_dict = {nick_to_pub_key("Alice"): "Alice"} + window = self.create_window(nick_to_pub_key("Alice")) + window.is_active = True + window.handle_dict = {nick_to_pub_key("Alice"): 'Alice'} # Test - self.assertEqual( - window.get_handle(self.ts, nick_to_pub_key("Alice"), ORIGIN_USER_HEADER), - f"{self.time} Me: ", - ) - self.assertEqual( - window.get_handle(self.ts, nick_to_pub_key("Alice"), ORIGIN_CONTACT_HEADER), - f"{self.time} Alice: ", - ) + self.assertEqual(window.get_handle(self.ts, nick_to_pub_key("Alice"), ORIGIN_USER_HEADER), + f"{self.time} Me: ") + self.assertEqual(window.get_handle(self.ts, nick_to_pub_key("Alice"), ORIGIN_CONTACT_HEADER), + f"{self.time} Alice: ") window.is_active = False - self.assertEqual( - window.get_handle(self.ts, nick_to_pub_key("Alice"), ORIGIN_USER_HEADER), - f"{self.time} Me (private message): ", - ) - self.assertEqual( - window.get_handle(self.ts, nick_to_pub_key("Alice"), ORIGIN_CONTACT_HEADER), - f"{self.time} Alice (private message): ", - ) + self.assertEqual(window.get_handle(self.ts, nick_to_pub_key("Alice"), ORIGIN_USER_HEADER), + f"{self.time} Me (private message): ") + self.assertEqual(window.get_handle(self.ts, nick_to_pub_key("Alice"), ORIGIN_CONTACT_HEADER), + f"{self.time} Alice (private message): ") def test_get_group_contact_handle(self) -> None: # Setup - window = self.create_window(group_name_to_group_id("test_group")) - window.is_active = True - window.handle_dict = { - nick_to_pub_key("Alice"): "Alice", - nick_to_pub_key("Charlie"): "Charlie", - nick_to_pub_key("David"): nick_to_short_address("David"), - nick_to_pub_key("Eric"): nick_to_short_address("Eric"), - } + window = self.create_window(group_name_to_group_id('test_group')) + window.is_active = True + window.handle_dict = {nick_to_pub_key("Alice"): 'Alice', + nick_to_pub_key("Charlie"): 'Charlie', + nick_to_pub_key("David"): nick_to_short_address("David"), + nick_to_pub_key("Eric"): nick_to_short_address("Eric")} # Test - self.assertEqual( - window.get_handle(self.ts, nick_to_pub_key("Alice"), ORIGIN_USER_HEADER), - f"{self.time} Me: ", - ) - self.assertEqual( - window.get_handle( - self.ts, nick_to_pub_key("Charlie"), ORIGIN_CONTACT_HEADER - ), - f"{self.time} Charlie: ", - ) + self.assertEqual(window.get_handle(self.ts, nick_to_pub_key("Alice"), ORIGIN_USER_HEADER), + f"{self.time} Me: ") + self.assertEqual(window.get_handle(self.ts, nick_to_pub_key("Charlie"), ORIGIN_CONTACT_HEADER), + f"{self.time} Charlie: ") window.is_active = False - self.assertEqual( - window.get_handle(self.ts, nick_to_pub_key("Alice"), ORIGIN_USER_HEADER), - f"{self.time} Me (group test_group): ", - ) - self.assertEqual( - window.get_handle( - self.ts, nick_to_pub_key("Charlie"), ORIGIN_CONTACT_HEADER - ), - f"{self.time} Charlie (group test_group): ", - ) + self.assertEqual(window.get_handle(self.ts, nick_to_pub_key("Alice"), ORIGIN_USER_HEADER), + f"{self.time} Me (group test_group): ") + self.assertEqual(window.get_handle(self.ts, nick_to_pub_key("Charlie"), ORIGIN_CONTACT_HEADER), + f"{self.time} Charlie (group test_group): ") - @mock.patch("time.sleep", return_value=None) - def test_print_to_inactive_window_preview_on_short_message(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_print_to_inactive_window_preview_on_short_message(self, _: Any) -> None: # Setup - window = self.create_window(nick_to_pub_key("Alice")) - window.handle_dict = {nick_to_pub_key("Alice"): "Alice"} - window.is_active = False - window.settings = Settings(new_message_notify_preview=True) - msg_tuple = ( - self.ts, - "Hi Bob", - nick_to_pub_key("Bob"), - ORIGIN_USER_HEADER, - False, - False, - ) + window = self.create_window(nick_to_pub_key("Alice")) + window.handle_dict = {nick_to_pub_key("Alice"): 'Alice'} + window.is_active = False + window.settings = Settings(new_message_notify_preview=True) + msg_tuple = (self.ts, "Hi Bob", nick_to_pub_key("Bob"), ORIGIN_USER_HEADER, False, False) # Test - self.assert_prints( - f"{BOLD_ON}{self.time} Me (private message): {NORMAL_TEXT}" - f"Hi Bob\n{CURSOR_UP_ONE_LINE}{CLEAR_ENTIRE_LINE}", - window.print, - msg_tuple, - ) + self.assert_prints(f"{BOLD_ON}{self.time} Me (private message): {NORMAL_TEXT}" + f"Hi Bob\n{CURSOR_UP_ONE_LINE}{CLEAR_ENTIRE_LINE}", + window.print, msg_tuple) - @mock.patch("time.sleep", return_value=None) - def test_print_to_inactive_window_preview_on_long_message(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_print_to_inactive_window_preview_on_long_message(self, _: Any) -> None: # Setup - window = self.create_window(nick_to_pub_key("Alice")) - window.is_active = False - window.handle_dict = {nick_to_pub_key("Alice"): "Alice"} - window.settings = Settings(new_message_notify_preview=True) - long_message = ( - "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Quisque consequat libero et lao" - "reet egestas. Aliquam a arcu malesuada, elementum metus eget, elementum mi. Vestibulum i" - "d arcu sem. Ut sodales odio sed viverra mollis. Praesent gravida ante tellus, pellentesq" - "ue venenatis massa placerat quis. Nullam in magna porta, hendrerit sem vel, dictum ipsum" - ". Ut sagittis, ipsum ut bibendum ornare, ex lorem congue metus, vel posuere metus nulla " - "at augue." - ) - msg_tuple = ( - self.ts, - long_message, - nick_to_pub_key("Bob"), - ORIGIN_USER_HEADER, - False, - False, - ) + window = self.create_window(nick_to_pub_key("Alice")) + window.is_active = False + window.handle_dict = {nick_to_pub_key("Alice"): 'Alice'} + window.settings = Settings(new_message_notify_preview=True) + long_message = ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Quisque consequat libero et lao" + "reet egestas. Aliquam a arcu malesuada, elementum metus eget, elementum mi. Vestibulum i" + "d arcu sem. Ut sodales odio sed viverra mollis. Praesent gravida ante tellus, pellentesq" + "ue venenatis massa placerat quis. Nullam in magna porta, hendrerit sem vel, dictum ipsum" + ". Ut sagittis, ipsum ut bibendum ornare, ex lorem congue metus, vel posuere metus nulla " + "at augue.") + msg_tuple = (self.ts, long_message, nick_to_pub_key("Bob"), ORIGIN_USER_HEADER, False, False) # Test - self.assert_prints( - f"{BOLD_ON}{self.time} Me (private message): {NORMAL_TEXT}Lorem ipsum dolor sit " - f"amet, consectetu…\n{CURSOR_UP_ONE_LINE}{CLEAR_ENTIRE_LINE}", - window.print, - msg_tuple, - ) + self.assert_prints(f"{BOLD_ON}{self.time} Me (private message): {NORMAL_TEXT}Lorem ipsum dolor sit " + f"amet, consectetu…\n{CURSOR_UP_ONE_LINE}{CLEAR_ENTIRE_LINE}", + window.print, msg_tuple) - @mock.patch("time.sleep", return_value=None) - def test_print_to_inactive_window_preview_off(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_print_to_inactive_window_preview_off(self, _: Any) -> None: # Setup - window = self.create_window(nick_to_pub_key("Alice")) - window.is_active = False - window.handle_dict = {nick_to_pub_key("Alice"): "Alice"} - window.settings = Settings(new_message_notify_preview=False) - msg_tuple = ( - self.ts, - "Hi Bob", - nick_to_pub_key("Bob"), - ORIGIN_USER_HEADER, - False, - False, - ) + window = self.create_window(nick_to_pub_key("Alice")) + window.is_active = False + window.handle_dict = {nick_to_pub_key("Alice"): 'Alice'} + window.settings = Settings(new_message_notify_preview=False) + msg_tuple = (self.ts, "Hi Bob", nick_to_pub_key("Bob"), ORIGIN_USER_HEADER, False, False) # Test self.assert_prints( f"{BOLD_ON}{self.time} Me (private message): {NORMAL_TEXT}{BOLD_ON}1 unread message{NORMAL_TEXT}\n" - f"{CURSOR_UP_ONE_LINE}{CLEAR_ENTIRE_LINE}", - window.print, - msg_tuple, - ) + f"{CURSOR_UP_ONE_LINE}{CLEAR_ENTIRE_LINE}", window.print, msg_tuple) def test_print_to_active_window_no_date_change(self) -> None: # Setup - window = self.create_window(nick_to_pub_key("Alice")) + window = self.create_window(nick_to_pub_key("Alice")) window.previous_msg_ts = datetime.fromtimestamp(1502750000) - window.is_active = True - window.handle_dict = {nick_to_pub_key("Bob"): "Bob"} - window.settings = Settings(new_message_notify_preview=False) - msg_tuple = ( - self.ts, - "Hi Alice", - nick_to_pub_key("Bob"), - ORIGIN_CONTACT_HEADER, - False, - False, - ) + window.is_active = True + window.handle_dict = {nick_to_pub_key("Bob"): 'Bob'} + window.settings = Settings(new_message_notify_preview=False) + msg_tuple = (self.ts, "Hi Alice", nick_to_pub_key("Bob"), ORIGIN_CONTACT_HEADER, False, False) # Test - self.assert_prints( - f"{BOLD_ON}{self.time} Bob: {NORMAL_TEXT}Hi Alice\n", - window.print, - msg_tuple, - ) + self.assert_prints(f"{BOLD_ON}{self.time} Bob: {NORMAL_TEXT}Hi Alice\n", + window.print, msg_tuple) def test_print_to_active_window_with_date_change_and_whisper(self) -> None: # Setup - window = self.create_window(nick_to_pub_key("Alice")) + window = self.create_window(nick_to_pub_key("Alice")) window.previous_msg_ts = datetime.fromtimestamp(1501750000) - window.is_active = True - window.handle_dict = {nick_to_pub_key("Bob"): "Bob"} - window.settings = Settings(new_message_notify_preview=False) - msg_tuple = ( - self.ts, - "Hi Alice", - nick_to_pub_key("Bob"), - ORIGIN_CONTACT_HEADER, - True, - False, - ) - self.time = self.ts.strftime("%H:%M:%S.%f")[:-4] + window.is_active = True + window.handle_dict = {nick_to_pub_key("Bob"): 'Bob'} + window.settings = Settings(new_message_notify_preview=False) + msg_tuple = (self.ts, "Hi Alice", nick_to_pub_key("Bob"), ORIGIN_CONTACT_HEADER, True, False) + self.time = self.ts.strftime('%H:%M:%S.%f')[:-4] # Test - self.assert_prints( - f"""\ + self.assert_prints(f"""\ {BOLD_ON}00:00 -!- Day changed to 2017-08-15{NORMAL_TEXT} {BOLD_ON}{self.time} Bob (whisper): {NORMAL_TEXT}Hi Alice -""", - window.print, - msg_tuple, - ) +""", window.print, msg_tuple) - def test_print_to_active_window_with_date_change_and_whisper_empty_message( - self, - ) -> None: + def test_print_to_active_window_with_date_change_and_whisper_empty_message(self) -> None: # Setup - window = self.create_window(nick_to_pub_key("Alice")) + window = self.create_window(nick_to_pub_key("Alice")) window.previous_msg_ts = datetime.fromtimestamp(1501750000) - window.is_active = True - window.handle_dict = {nick_to_pub_key("Bob"): "Bob"} - window.settings = Settings(new_message_notify_preview=False) - msg_tuple = ( - self.ts, - " ", - nick_to_pub_key("Bob"), - ORIGIN_CONTACT_HEADER, - True, - False, - ) + window.is_active = True + window.handle_dict = {nick_to_pub_key("Bob"): 'Bob'} + window.settings = Settings(new_message_notify_preview=False) + msg_tuple = (self.ts, " ", nick_to_pub_key("Bob"), ORIGIN_CONTACT_HEADER, True, False) # Test - self.assert_prints( - f"""\ + self.assert_prints(f"""\ {BOLD_ON}00:00 -!- Day changed to 2017-08-15{NORMAL_TEXT} {BOLD_ON}{self.time} Bob (whisper): {NORMAL_TEXT} -""", - window.print, - msg_tuple, - ) +""", window.print, msg_tuple) - @mock.patch("time.sleep", return_value=None) - def test_print_new(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_print_new(self, _: Any) -> None: # Setup window = self.create_window(nick_to_pub_key("Alice")) # Test - self.assertIsNone( - window.add_new( - self.ts, - "Hi Alice", - nick_to_pub_key("Bob"), - ORIGIN_CONTACT_HEADER, - output=True, - ) - ) + self.assertIsNone(window.add_new(self.ts, "Hi Alice", nick_to_pub_key("Bob"), + ORIGIN_CONTACT_HEADER, output=True)) self.assertEqual(len(window.message_log), 1) - self.assertEqual(window.handle_dict[nick_to_pub_key("Bob")], "Bob") + self.assertEqual(window.handle_dict[nick_to_pub_key("Bob")], 'Bob') def test_redraw_message_window(self) -> None: # Setup - window = self.create_window(nick_to_pub_key("Alice")) - window.is_active = True - window.message_log = [ - ( - self.ts, - "Hi Alice", - nick_to_pub_key("Bob"), - ORIGIN_CONTACT_HEADER, - False, - False, - ) - ] + window = self.create_window(nick_to_pub_key("Alice")) + window.is_active = True + window.message_log = [(self.ts, "Hi Alice", nick_to_pub_key("Bob"), ORIGIN_CONTACT_HEADER, False, False)] window.unread_messages = 1 # Test - self.assert_prints( - f"""\ + self.assert_prints(f"""\ {CLEAR_ENTIRE_SCREEN}{CURSOR_LEFT_UP_CORNER} ------------------------------- Unread Messages -------------------------------- {BOLD_ON}{self.time} Bob: {NORMAL_TEXT}Hi Alice -""", - window.redraw, - ) +""", window.redraw) self.assertEqual(window.unread_messages, 0) def test_redraw_empty_window(self) -> None: # Setup - window = self.create_window(nick_to_pub_key("Alice")) - window.is_active = True + window = self.create_window(nick_to_pub_key("Alice")) + window.is_active = True window.message_log = [] # Test - self.assert_prints( - f"""\ + self.assert_prints(f"""\ {CLEAR_ENTIRE_SCREEN}{CURSOR_LEFT_UP_CORNER} {BOLD_ON} This window for Alice is currently empty. {NORMAL_TEXT}\n -""", - window.redraw, - ) +""", window.redraw) - @mock.patch("time.sleep", return_value=None) - def test_redraw_file_win(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_redraw_file_win(self, _: Any) -> None: # Setup - self.packet_list.packets = [ - Packet( - type=FILE, - name="testfile.txt", - assembly_pt_list=5 * [b"a"], - packets=10, - size="100.0KB", - contact=create_contact("Bob"), - ), - Packet( - type=FILE, - name="testfile2.txt", - assembly_pt_list=7 * [b"a"], - packets=100, - size="15.0KB", - contact=create_contact("Charlie"), - ), - ] + self.packet_list.packets = [Packet(type=FILE, + name='testfile.txt', + assembly_pt_list=5*[b'a'], + packets=10, + size="100.0KB", + contact=create_contact('Bob')), + Packet(type=FILE, + name='testfile2.txt', + assembly_pt_list=7 * [b'a'], + packets=100, + size="15.0KB", + contact=create_contact('Charlie'))] # Test window = self.create_window(WIN_UID_FILE) - self.assert_prints( - f"""\ + self.assert_prints(f"""\ File name Size Sender Complete ──────────────────────────────────────────────────────────────────────────────── testfile.txt 100.0KB Bob 50.00% testfile2.txt 15.0KB Charlie 7.00% -{6*(CURSOR_UP_ONE_LINE+CLEAR_ENTIRE_LINE)}""", - window.redraw_file_win, - ) +{6*(CURSOR_UP_ONE_LINE+CLEAR_ENTIRE_LINE)}""", window.redraw_file_win) - @mock.patch("time.sleep", return_value=None) - def test_redraw_empty_file_win(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_redraw_empty_file_win(self, _: Any) -> None: # Setup self.packet_list.packet_l = [] # Test window = self.create_window(WIN_UID_FILE) - self.assert_prints( - f"""\ + self.assert_prints(f"""\ {BOLD_ON} No file transmissions currently in progress. {NORMAL_TEXT} -{3*(CURSOR_UP_ONE_LINE+CLEAR_ENTIRE_LINE)}""", - window.redraw_file_win, - ) +{3*(CURSOR_UP_ONE_LINE+CLEAR_ENTIRE_LINE)}""", window.redraw_file_win) class TestWindowList(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.settings = Settings() - self.contact_list = ContactList(nicks=["Alice", "Bob", "Charlie", LOCAL_ID]) - self.group_list = GroupList(groups=["test_group", "test_group2"]) - self.packet_list = PacketList() + self.settings = Settings() + self.contact_list = ContactList(nicks=['Alice', 'Bob', 'Charlie', LOCAL_ID]) + self.group_list = GroupList(groups=['test_group', 'test_group2']) + self.packet_list = PacketList() - group = self.group_list.get_group("test_group") - group.members = list( - map( - self.contact_list.get_contact_by_address_or_nick, - ["Alice", "Bob", "Charlie"], - ) - ) + group = self.group_list.get_group('test_group') + group.members = list(map(self.contact_list.get_contact_by_address_or_nick, ['Alice', 'Bob', 'Charlie'])) - self.window_list = WindowList( - self.settings, self.contact_list, self.group_list, self.packet_list - ) + self.window_list = WindowList(self.settings, self.contact_list, self.group_list, self.packet_list) - def create_window(self, uid) -> RxWindow: + def create_window(self, uid: bytes) -> RxWindow: """Create new RxWindow object.""" - return RxWindow( - uid, self.contact_list, self.group_list, self.settings, self.packet_list - ) + return RxWindow(uid, self.contact_list, self.group_list, self.settings, self.packet_list) def test_active_win_is_none_if_local_key_is_not_present(self) -> None: # Setup self.contact_list.contacts = [] # Test - window_list = WindowList( - self.settings, self.contact_list, self.group_list, self.packet_list - ) + window_list = WindowList(self.settings, self.contact_list, self.group_list, self.packet_list) self.assertEqual(window_list.active_win, None) def test_active_win_is_command_win_if_local_key_is_present(self) -> None: @@ -719,10 +425,8 @@ class TestWindowList(TFCTestCase): def test_group_windows(self) -> None: # Setup - self.window_list.windows = [ - self.create_window(group_name_to_group_id(g)) - for g in ["test_group", "test_group2"] - ] + self.window_list.windows = [self.create_window(group_name_to_group_id(g)) for g in ['test_group', + 'test_group2']] # Test for g in self.window_list.get_group_windows(): @@ -730,144 +434,105 @@ class TestWindowList(TFCTestCase): def test_has_window(self) -> None: # Setup - self.window_list.windows = [ - self.create_window(group_name_to_group_id(g)) - for g in ["test_group", "test_group2"] - ] + self.window_list.windows = [self.create_window(group_name_to_group_id(g)) for g in ['test_group', + 'test_group2']] # Test - self.assertTrue( - self.window_list.has_window(group_name_to_group_id("test_group")) - ) - self.assertTrue( - self.window_list.has_window(group_name_to_group_id("test_group2")) - ) - self.assertFalse( - self.window_list.has_window(group_name_to_group_id("test_group3")) - ) + self.assertTrue(self.window_list.has_window(group_name_to_group_id('test_group'))) + self.assertTrue(self.window_list.has_window(group_name_to_group_id('test_group2'))) + self.assertFalse(self.window_list.has_window(group_name_to_group_id('test_group3'))) def test_remove_window(self) -> None: # Setup - self.window_list.windows = [ - self.create_window(group_name_to_group_id(g)) - for g in ["test_group", "test_group2"] - ] + self.window_list.windows = [self.create_window(group_name_to_group_id(g)) for g in ['test_group', + 'test_group2']] # Test self.assertEqual(len(self.window_list), 2) - self.assertIsNone( - self.window_list.remove_window(group_name_to_group_id("test_group3")) - ) + self.assertIsNone(self.window_list.remove_window(group_name_to_group_id('test_group3'))) self.assertEqual(len(self.window_list), 2) - self.assertIsNone( - self.window_list.remove_window(group_name_to_group_id("test_group2")) - ) + self.assertIsNone(self.window_list.remove_window(group_name_to_group_id('test_group2'))) self.assertEqual(len(self.window_list), 1) def test_select_rx_window(self) -> None: # Setup - self.window_list.windows = [ - self.create_window(group_name_to_group_id(g)) - for g in ["test_group", "test_group2"] - ] - tg_win = self.window_list.windows[0] - tg2_win = self.window_list.windows[1] - tg_win.is_active = True + self.window_list.windows = [self.create_window(group_name_to_group_id(g)) for g in ['test_group', + 'test_group2']] + tg_win = self.window_list.windows[0] + tg2_win = self.window_list.windows[1] + tg_win.is_active = True self.window_list.active_win = tg_win # Test - self.assert_prints( - f"""{CLEAR_ENTIRE_SCREEN}{CURSOR_LEFT_UP_CORNER} + self.assert_prints(f"""{CLEAR_ENTIRE_SCREEN}{CURSOR_LEFT_UP_CORNER} {BOLD_ON} This window for test_group2 is currently empty. {NORMAL_TEXT} -""", - self.window_list.set_active_rx_window, - group_name_to_group_id("test_group2"), - ) +""", self.window_list.set_active_rx_window, group_name_to_group_id('test_group2')) self.assertFalse(tg_win.is_active) self.assertTrue(tg2_win.is_active) - @mock.patch("time.sleep", return_value=None) - def test_select_rx_file_window(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_select_rx_file_window(self, _: Any) -> None: # Setup - self.window_list.windows = [self.create_window(WIN_UID_FILE)] - self.window_list.windows += [ - self.create_window(group_name_to_group_id(g)) - for g in ["test_group", "test_group2"] - ] - tg_win = self.window_list.get_window(group_name_to_group_id("test_group")) - tg_win.is_active = True + self.window_list.windows = [self.create_window(WIN_UID_FILE)] + self.window_list.windows += [self.create_window(group_name_to_group_id(g)) for g in ['test_group', + 'test_group2']] + tg_win = self.window_list.get_window(group_name_to_group_id('test_group')) + tg_win.is_active = True self.window_list.active_win = tg_win - self.packet_list.packets = [ - Packet( - type=FILE, - name="testfile.txt", - assembly_pt_list=5 * [b"a"], - packets=10, - size="100.0KB", - contact=create_contact("Bob"), - ) - ] + self.packet_list.packets = [Packet(type=FILE, + name='testfile.txt', + assembly_pt_list=5 * [b'a'], + packets=10, + size="100.0KB", + contact=create_contact('Bob'))] # Test - self.assert_prints( - f"""\ + self.assert_prints(f"""\ File name Size Sender Complete ──────────────────────────────────────────────────────────────────────────────── testfile.txt 100.0KB Bob 50.00% -{5*(CURSOR_UP_ONE_LINE+CLEAR_ENTIRE_LINE)}""", - self.window_list.set_active_rx_window, - WIN_UID_FILE, - ) +{5*(CURSOR_UP_ONE_LINE+CLEAR_ENTIRE_LINE)}""", self.window_list.set_active_rx_window, WIN_UID_FILE) self.assertFalse(tg_win.is_active) self.assertTrue(self.window_list.get_window(WIN_UID_FILE).is_active) + def test_refresh_file_window_check(self) -> None: + # Setup + self.window_list.active_win.uid = WIN_UID_FILE + + # Test + self.assertIsNone(self.window_list.refresh_file_window_check()) + def test_get_command_window(self) -> None: # Setup - self.window_list.windows = [ - self.create_window(uid) - for uid in [ - group_name_to_group_id("test_group"), - group_name_to_group_id("test_group2"), - WIN_UID_FILE, - WIN_UID_COMMAND, - ] - ] + self.window_list.windows = [self.create_window(uid) for uid in [group_name_to_group_id('test_group'), + group_name_to_group_id('test_group2'), + WIN_UID_FILE, + WIN_UID_COMMAND]] # Test self.assertEqual(self.window_list.get_command_window().uid, WIN_UID_COMMAND) def test_get_non_existing_window(self) -> None: # Setup - self.window_list.windows = [ - self.create_window(uid) - for uid in [ - group_name_to_group_id("test_group"), - WIN_UID_FILE, - WIN_UID_COMMAND, - ] - ] + self.window_list.windows = [self.create_window(uid) for uid in [group_name_to_group_id('test_group'), + WIN_UID_FILE, + WIN_UID_COMMAND]] # Test existing window - self.assertTrue( - self.window_list.has_window(group_name_to_group_id("test_group")) - ) - window = self.window_list.get_window(group_name_to_group_id("test_group")) - self.assertEqual(window.uid, group_name_to_group_id("test_group")) + self.assertTrue(self.window_list.has_window(group_name_to_group_id('test_group'))) + window = self.window_list.get_window( group_name_to_group_id('test_group')) + self.assertEqual(window.uid, group_name_to_group_id('test_group')) # Test non-existing window - self.assertFalse( - self.window_list.has_window(group_name_to_group_id("test_group2")) - ) - window2 = self.window_list.get_window(group_name_to_group_id("test_group2")) - self.assertEqual(window2.uid, group_name_to_group_id("test_group2")) - self.assertTrue( - self.window_list.has_window(group_name_to_group_id("test_group2")) - ) + self.assertFalse(self.window_list.has_window(group_name_to_group_id('test_group2'))) + window2 = self.window_list.get_window( group_name_to_group_id('test_group2')) + self.assertEqual(window2.uid, group_name_to_group_id('test_group2')) + self.assertTrue(self.window_list.has_window( group_name_to_group_id('test_group2'))) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/relay/__init__.py b/tests/relay/__init__.py index 6eb560e..833769a 100644 --- a/tests/relay/__init__.py +++ b/tests/relay/__init__.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. diff --git a/tests/relay/test_client.py b/tests/relay/test_client.py index 543f1f2..cbdda17 100644 --- a/tests/relay/test_client.py +++ b/tests/relay/test_client.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -25,67 +25,37 @@ import time import unittest from unittest import mock -from typing import Any, List +from typing import Any, List import requests -from src.common.crypto import X448 +from src.common.crypto import X448 from src.common.db_onion import pub_key_to_onion_address, pub_key_to_short_address -from src.common.statics import ( - CONTACT_MGMT_QUEUE, - CONTACT_REQ_QUEUE, - C_REQ_MGMT_QUEUE, - C_REQ_STATE_QUEUE, - DST_MESSAGE_QUEUE, - EXIT, - GROUP_ID_LENGTH, - GROUP_MGMT_QUEUE, - GROUP_MSG_EXIT_GROUP_HEADER, - GROUP_MSG_INVITE_HEADER, - GROUP_MSG_JOIN_HEADER, - GROUP_MSG_MEMBER_ADD_HEADER, - GROUP_MSG_MEMBER_REM_HEADER, - GROUP_MSG_QUEUE, - MESSAGE_DATAGRAM_HEADER, - ONION_SERVICE_PUBLIC_KEY_LENGTH, - PUBLIC_KEY_DATAGRAM_HEADER, - RP_ADD_CONTACT_HEADER, - RP_REMOVE_CONTACT_HEADER, - TFC_PUBLIC_KEY_LENGTH, - TOR_DATA_QUEUE, - UNIT_TEST_QUEUE, - URL_TOKEN_QUEUE, -) +from src.common.statics import (CONTACT_MGMT_QUEUE, CONTACT_REQ_QUEUE, C_REQ_MGMT_QUEUE, C_REQ_STATE_QUEUE, + DST_MESSAGE_QUEUE, EXIT, GROUP_ID_LENGTH, GROUP_MGMT_QUEUE, + GROUP_MSG_EXIT_GROUP_HEADER, GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER, + GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER, GROUP_MSG_QUEUE, + MESSAGE_DATAGRAM_HEADER, ONION_SERVICE_PUBLIC_KEY_LENGTH, PUBLIC_KEY_DATAGRAM_HEADER, + RP_ADD_CONTACT_HEADER, RP_REMOVE_CONTACT_HEADER, TFC_PUBLIC_KEY_LENGTH, TOR_DATA_QUEUE, + UNIT_TEST_QUEUE, URL_TOKEN_QUEUE) -from src.relay.client import ( - c_req_manager, - client, - client_scheduler, - g_msg_manager, - get_data_loop, -) +from src.relay.client import c_req_manager, client, client_scheduler, g_msg_manager, get_data_loop from tests.mock_classes import Gateway -from tests.utils import ( - gen_queue_dict, - nick_to_onion_address, - nick_to_pub_key, - tear_queues, -) +from tests.utils import gen_queue_dict, nick_to_onion_address, nick_to_pub_key, tear_queues class TestClient(unittest.TestCase): - ut_private_key = X448.generate_private_key() - ut_public_key = X448.derive_public_key(ut_private_key) - ut = X448.shared_key(ut_private_key, ut_public_key).hex() + url_token_private_key = X448.generate_private_key() + url_token_public_key = X448.derive_public_key(url_token_private_key) + url_token = X448.shared_key(url_token_private_key, url_token_public_key).hex() class MockResponse(object): """Mock Response object.""" - - def __init__(self, text: str) -> None: + def __init__(self, text) -> None: """Create new MockResponse object.""" - self.text = text + self.text = text self.content = text class MockSession(object): @@ -95,10 +65,10 @@ class TestClient(unittest.TestCase): """Create new MockSession object.""" self.proxies = dict() self.timeout = None - self.url = None + self.url = None self.test_no = 0 - def get(self, url: str, timeout: int = 0, stream: bool = False): + def get(self, url, timeout=0, stream=False) -> Any: """Mock .get() method.""" self.timeout = timeout @@ -107,17 +77,15 @@ class TestClient(unittest.TestCase): if stream: (_ for _ in ()).throw(requests.exceptions.RequestException) - if url.startswith( - "http://hpcrayuxhrcy2wtpfwgwjibderrvjll6azfr4tqat3eka2m2gbb55bid.onion/" - ): + if url.startswith("http://hpcrayuxhrcy2wtpfwgwjibderrvjll6azfr4tqat3eka2m2gbb55bid.onion/"): - if not self.test_no: + if self.test_no == 0: self.test_no += 1 (_ for _ in ()).throw(requests.exceptions.RequestException) if self.test_no == 1: self.test_no += 1 - return TestClient.MockResponse("OK") + return TestClient.MockResponse('OK') # Test function recovers from RequestException. if self.test_no == 2: @@ -127,19 +95,17 @@ class TestClient(unittest.TestCase): # Test function recovers from invalid public key. if self.test_no == 3: self.test_no += 1 - return TestClient.MockResponse( - ((ONION_SERVICE_PUBLIC_KEY_LENGTH - 1) * b"a").hex() - ) + return TestClient.MockResponse(((ONION_SERVICE_PUBLIC_KEY_LENGTH-1)*b'a').hex()) # Test client prints online/offline messages. if self.test_no < 10: self.test_no += 1 - return TestClient.MockResponse("") + return TestClient.MockResponse('') # Test valid public key moves function to `get_data_loop`. if self.test_no == 10: self.test_no += 1 - return TestClient.MockResponse(TestClient.ut_public_key.hex()) + return TestClient.MockResponse(TestClient.url_token_public_key.hex()) @staticmethod def mock_session() -> MockSession: @@ -148,8 +114,8 @@ class TestClient(unittest.TestCase): def setUp(self) -> None: """Pre-test actions.""" - self.o_session = requests.session - self.queues = gen_queue_dict() + self.o_session = requests.session + self.queues = gen_queue_dict() requests.session = TestClient.mock_session def tearDown(self) -> None: @@ -157,48 +123,33 @@ class TestClient(unittest.TestCase): requests.session = self.o_session tear_queues(self.queues) - @mock.patch("time.sleep", return_value=None) + @mock.patch('time.sleep', return_value=None) def test_client(self, _) -> None: - onion_pub_key = nick_to_pub_key("Alice") - onion_address = nick_to_onion_address("Alice") - tor_port = "1337" - settings = Gateway() - sk = TestClient.ut_private_key - self.assertIsNone( - client( - onion_pub_key, - self.queues, - sk, - tor_port, - settings, - onion_address, - unit_test=True, - ) - ) - self.assertEqual( - self.queues[URL_TOKEN_QUEUE].get(), (onion_pub_key, TestClient.ut) - ) + onion_pub_key = nick_to_pub_key('Alice') + onion_address = nick_to_onion_address('Alice') + tor_port = '1337' + settings = Gateway() + sk = TestClient.url_token_private_key + self.assertIsNone(client(onion_pub_key, self.queues, sk, tor_port, settings, onion_address, unit_test=True)) + self.assertEqual(self.queues[URL_TOKEN_QUEUE].get(), (onion_pub_key, TestClient.url_token)) class TestGetDataLoop(unittest.TestCase): - url_token_private_key_user = X448.generate_private_key() - url_token_public_key_user = X448.derive_public_key(url_token_private_key_user) + url_token_private_key_user = X448.generate_private_key() + url_token_public_key_user = X448.derive_public_key(url_token_private_key_user) url_token_public_key_contact = X448.derive_public_key(X448.generate_private_key()) - url_token = X448.shared_key( - url_token_private_key_user, url_token_public_key_contact - ).hex() + url_token = X448.shared_key(url_token_private_key_user, url_token_public_key_contact).hex() class MockResponse(object): """Mock Response object.""" - def __init__(self) -> None: self.test_no = 0 def iter_lines(self) -> List[bytes]: """Return data depending test number.""" self.test_no += 1 - message = b"" + message = b'' # Empty message if self.test_no == 1: @@ -206,23 +157,19 @@ class TestGetDataLoop(unittest.TestCase): # Invalid message elif self.test_no == 2: - message = MESSAGE_DATAGRAM_HEADER + b"\x1f" + message = MESSAGE_DATAGRAM_HEADER + b'\x1f' # Valid message elif self.test_no == 3: - message = MESSAGE_DATAGRAM_HEADER + base64.b85encode(b"test") + b"\n" + message = MESSAGE_DATAGRAM_HEADER + base64.b85encode(b'test') + b'\n' # Invalid public key elif self.test_no == 4: - message = PUBLIC_KEY_DATAGRAM_HEADER + base64.b85encode( - (TFC_PUBLIC_KEY_LENGTH - 1) * b"\x01" - ) + message = PUBLIC_KEY_DATAGRAM_HEADER + base64.b85encode((TFC_PUBLIC_KEY_LENGTH-1) * b'\x01') # Valid public key elif self.test_no == 5: - message = PUBLIC_KEY_DATAGRAM_HEADER + base64.b85encode( - TFC_PUBLIC_KEY_LENGTH * b"\x01" - ) + message = PUBLIC_KEY_DATAGRAM_HEADER + base64.b85encode(TFC_PUBLIC_KEY_LENGTH * b'\x01') # Group management headers elif self.test_no == 6: @@ -242,13 +189,13 @@ class TestGetDataLoop(unittest.TestCase): # Invalid header elif self.test_no == 11: - message = b"\x1f" + message = b'\x1f' # RequestException (no remaining data) elif self.test_no == 12: (_ for _ in ()).throw(requests.exceptions.RequestException) - return message.split(b"\n") + return message.split(b'\n') class MockFileResponse(object): """MockFileResponse object.""" @@ -261,21 +208,19 @@ class TestGetDataLoop(unittest.TestCase): def __init__(self) -> None: """Create new Session object.""" - self.proxies = dict() - self.timeout = None - self.url = None - self.stream = False - self.test_no = 0 - self.response = TestGetDataLoop.MockResponse() + self.proxies = dict() + self.timeout = None + self.url = None + self.stream = False + self.test_no = 0 + self.response = TestGetDataLoop.MockResponse() self.url_token = TestGetDataLoop.url_token - self.onion_url = ( - "http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaam2dqd.onion" - ) + self.onion_url = 'http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaam2dqd.onion' def get(self, url: str, timeout: int = 0, stream: bool = False) -> Any: """Return data depending on what test is in question.""" - self.stream = stream + self.stream = stream self.timeout = timeout if url == f"{self.onion_url}/{self.url_token}/messages": @@ -292,9 +237,9 @@ class TestGetDataLoop(unittest.TestCase): elif url == f"{self.onion_url}/{self.url_token}/files": # Test file data is received - if not self.test_no: + if self.test_no == 0: self.test_no += 1 - return TestGetDataLoop.MockFileResponse(b"test") + return TestGetDataLoop.MockFileResponse(b'test') # Test function recovers from RequestException. if self.test_no > 1: @@ -307,8 +252,8 @@ class TestGetDataLoop(unittest.TestCase): def setUp(self) -> None: """Pre-test actions.""" - self.o_session = requests.session - self.queues = gen_queue_dict() + self.o_session = requests.session + self.queues = gen_queue_dict() requests.session = TestGetDataLoop.mock_session def tearDown(self) -> None: @@ -319,43 +264,24 @@ class TestGetDataLoop(unittest.TestCase): def test_get_data_loop(self) -> None: onion_pub_key = bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH) - settings = Gateway() - onion_addr = pub_key_to_onion_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)) - short_addr = pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)) - url_token = TestGetDataLoop.url_token - session = TestGetDataLoop.mock_session() + settings = Gateway() + onion_addr = pub_key_to_onion_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)) + short_addr = pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)) + url_token = TestGetDataLoop.url_token + session = TestGetDataLoop.mock_session() - self.assertIsNone( - get_data_loop( - onion_addr, - url_token, - short_addr, - onion_pub_key, - self.queues, - session, - settings, - ) - ) + self.assertIsNone(get_data_loop(onion_addr, url_token, short_addr, + onion_pub_key, self.queues, session, settings)) - self.assertIsNone( - get_data_loop( - onion_addr, - url_token, - short_addr, - onion_pub_key, - self.queues, - session, - settings, - ) - ) + self.assertIsNone(get_data_loop(onion_addr, url_token, short_addr, + onion_pub_key, self.queues, session, settings)) self.assertEqual(self.queues[DST_MESSAGE_QUEUE].qsize(), 2) # Message and file - self.assertEqual( - self.queues[GROUP_MSG_QUEUE].qsize(), 5 - ) # 5 group management messages + self.assertEqual(self.queues[GROUP_MSG_QUEUE].qsize(), 5) # 5 group management messages class TestGroupManager(unittest.TestCase): + def test_group_manager(self) -> None: queues = gen_queue_dict() @@ -365,61 +291,38 @@ class TestGroupManager(unittest.TestCase): time.sleep(0.1) # Test function recovers from incorrect group ID size - queues[GROUP_MSG_QUEUE].put( - ( - GROUP_MSG_EXIT_GROUP_HEADER, - bytes((GROUP_ID_LENGTH - 1)), - pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)), - ) - ) + queues[GROUP_MSG_QUEUE].put(( + GROUP_MSG_EXIT_GROUP_HEADER, + bytes((GROUP_ID_LENGTH - 1)), + pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)) + )) # Test group invite for added and removed contacts - queues[GROUP_MGMT_QUEUE].put( - ( - RP_ADD_CONTACT_HEADER, - nick_to_pub_key("Alice") + nick_to_pub_key("Bob"), - ) - ) - queues[GROUP_MGMT_QUEUE].put( - (RP_REMOVE_CONTACT_HEADER, nick_to_pub_key("Alice")) - ) + queues[GROUP_MGMT_QUEUE].put((RP_ADD_CONTACT_HEADER, nick_to_pub_key('Alice') + nick_to_pub_key('Bob'))) + queues[GROUP_MGMT_QUEUE].put((RP_REMOVE_CONTACT_HEADER, nick_to_pub_key('Alice'))) - for header in [ - GROUP_MSG_INVITE_HEADER, - GROUP_MSG_JOIN_HEADER, - GROUP_MSG_MEMBER_ADD_HEADER, - GROUP_MSG_MEMBER_REM_HEADER, - ]: + for header in [GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER, + GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER]: queues[GROUP_MSG_QUEUE].put( - ( - header, - bytes(GROUP_ID_LENGTH) - + nick_to_pub_key("Bob") - + nick_to_pub_key("Charlie"), - pub_key_to_short_address( - bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH) - ), - ) - ) + (header, + bytes(GROUP_ID_LENGTH) + nick_to_pub_key('Bob') + nick_to_pub_key('Charlie'), + pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)) + )) queues[GROUP_MSG_QUEUE].put( - ( - GROUP_MSG_EXIT_GROUP_HEADER, - bytes(GROUP_ID_LENGTH), - pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)), - ) - ) + (GROUP_MSG_EXIT_GROUP_HEADER, + bytes(GROUP_ID_LENGTH), + pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)) + )) # Exit test time.sleep(0.2) queues[UNIT_TEST_QUEUE].put(EXIT) queues[GROUP_MSG_QUEUE].put( - ( - GROUP_MSG_EXIT_GROUP_HEADER, - bytes(GROUP_ID_LENGTH), - pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)), - ) - ) + (GROUP_MSG_EXIT_GROUP_HEADER, + bytes(GROUP_ID_LENGTH), + pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)) + )) # Test threading.Thread(target=queue_delayer).start() @@ -428,30 +331,22 @@ class TestGroupManager(unittest.TestCase): class TestClientScheduler(unittest.TestCase): + def test_client_scheduler(self) -> None: - queues = gen_queue_dict() - gateway = Gateway() + queues = gen_queue_dict() + gateway = Gateway() server_private_key = X448.generate_private_key() def queue_delayer() -> None: """Place messages to queue one at a time.""" time.sleep(0.1) - queues[TOR_DATA_QUEUE].put(("1234", nick_to_onion_address("Alice"))) + queues[TOR_DATA_QUEUE].put( + ('1234', nick_to_onion_address('Alice'))) queues[CONTACT_MGMT_QUEUE].put( - ( - RP_ADD_CONTACT_HEADER, - b"".join([nick_to_pub_key("Alice"), nick_to_pub_key("Bob")]), - True, - ) - ) + (RP_ADD_CONTACT_HEADER, b''.join([nick_to_pub_key('Alice'), nick_to_pub_key('Bob')]), True)) time.sleep(0.1) queues[CONTACT_MGMT_QUEUE].put( - ( - RP_REMOVE_CONTACT_HEADER, - b"".join([nick_to_pub_key("Alice"), nick_to_pub_key("Bob")]), - True, - ) - ) + (RP_REMOVE_CONTACT_HEADER, b''.join([nick_to_pub_key('Alice'), nick_to_pub_key('Bob')]), True)) time.sleep(0.1) queues[UNIT_TEST_QUEUE].put(EXIT) time.sleep(0.1) @@ -459,13 +354,12 @@ class TestClientScheduler(unittest.TestCase): threading.Thread(target=queue_delayer).start() - self.assertIsNone( - client_scheduler(queues, gateway, server_private_key, unit_test=True) - ) + self.assertIsNone(client_scheduler(queues, gateway, server_private_key, unit_test=True)) tear_queues(queues) class TestContactRequestManager(unittest.TestCase): + def test_contact_request_manager(self) -> None: queues = gen_queue_dict() @@ -474,29 +368,23 @@ class TestContactRequestManager(unittest.TestCase): """Place messages to queue one at a time.""" time.sleep(0.1) queues[C_REQ_MGMT_QUEUE].put( - ( - RP_ADD_CONTACT_HEADER, - b"".join(list(map(nick_to_pub_key, ["Alice", "Bob"]))), - ) - ) + (RP_ADD_CONTACT_HEADER, b''.join(list(map(nick_to_pub_key, ['Alice', 'Bob']))))) time.sleep(0.1) # Test that request from Alice does not appear - queues[CONTACT_REQ_QUEUE].put((nick_to_onion_address("Alice"))) + queues[CONTACT_REQ_QUEUE].put((nick_to_onion_address('Alice'))) time.sleep(0.1) # Test that request from Charlie appears - queues[CONTACT_REQ_QUEUE].put((nick_to_onion_address("Charlie"))) + queues[CONTACT_REQ_QUEUE].put((nick_to_onion_address('Charlie'))) time.sleep(0.1) # Test that another request from Charlie does not appear - queues[CONTACT_REQ_QUEUE].put((nick_to_onion_address("Charlie"))) + queues[CONTACT_REQ_QUEUE].put((nick_to_onion_address('Charlie'))) time.sleep(0.1) # Remove Alice - queues[C_REQ_MGMT_QUEUE].put( - (RP_REMOVE_CONTACT_HEADER, nick_to_pub_key("Alice")) - ) + queues[C_REQ_MGMT_QUEUE].put((RP_REMOVE_CONTACT_HEADER, nick_to_pub_key('Alice'))) time.sleep(0.1) # Load settings from queue @@ -504,17 +392,17 @@ class TestContactRequestManager(unittest.TestCase): queues[C_REQ_STATE_QUEUE].put(True) # Test that request from Alice is accepted - queues[CONTACT_REQ_QUEUE].put((nick_to_onion_address("Alice"))) + queues[CONTACT_REQ_QUEUE].put((nick_to_onion_address('Alice'))) time.sleep(0.1) # Exit test queues[UNIT_TEST_QUEUE].put(EXIT) - queues[CONTACT_REQ_QUEUE].put(nick_to_pub_key("Charlie")) + queues[CONTACT_REQ_QUEUE].put(nick_to_pub_key('Charlie')) threading.Thread(target=queue_delayer).start() self.assertIsNone(c_req_manager(queues, unit_test=True)) tear_queues(queues) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/relay/test_commands.py b/tests/relay/test_commands.py index ad006eb..1c246c9 100644 --- a/tests/relay/test_commands.py +++ b/tests/relay/test_commands.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -27,129 +27,99 @@ from unittest import mock from unittest.mock import MagicMock -from src.common.encoding import int_to_bytes -from src.common.statics import ( - CLEAR_ENTIRE_SCREEN, - CONTACT_MGMT_QUEUE, - CURSOR_LEFT_UP_CORNER, - C_REQ_MGMT_QUEUE, - C_REQ_STATE_QUEUE, - EXIT, - GROUP_MGMT_QUEUE, - LOCAL_TESTING_PACKET_DELAY, - ONION_CLOSE_QUEUE, - ONION_KEY_QUEUE, - ONION_SERVICE_PRIVATE_KEY_LENGTH, - RP_ADD_CONTACT_HEADER, - RP_REMOVE_CONTACT_HEADER, - SRC_TO_RELAY_QUEUE, - UNENCRYPTED_SCREEN_CLEAR, - WIPE, -) +from src.common.encoding import b58encode, int_to_bytes +from src.common.statics import (ACCOUNT_CHECK_QUEUE, CLEAR_ENTIRE_SCREEN, CONTACT_MGMT_QUEUE, CURSOR_LEFT_UP_CORNER, + C_REQ_MGMT_QUEUE, C_REQ_STATE_QUEUE, EXIT, GROUP_MGMT_QUEUE, + LOCAL_TESTING_PACKET_DELAY, ONION_CLOSE_QUEUE, ONION_KEY_QUEUE, + ONION_SERVICE_PRIVATE_KEY_LENGTH, ONION_SERVICE_PUBLIC_KEY_LENGTH, + PUB_KEY_CHECK_QUEUE, RP_ADD_CONTACT_HEADER, RP_REMOVE_CONTACT_HEADER, SRC_TO_RELAY_QUEUE, + TFC_PUBLIC_KEY_LENGTH, UNENCRYPTED_SCREEN_CLEAR, WIPE) -from src.relay.commands import ( - add_contact, - add_onion_data, - change_baudrate, - change_ec_ratio, - clear_windows, - exit_tfc, -) -from src.relay.commands import ( - manage_contact_req, - process_command, - race_condition_delay, - relay_command, - remove_contact, -) -from src.relay.commands import reset_windows, wipe +from src.relay.commands import add_contact, add_onion_data, change_baudrate, change_ec_ratio, clear_windows +from src.relay.commands import compare_accounts, compare_pub_keys, exit_tfc, manage_contact_req, process_command +from src.relay.commands import race_condition_delay, relay_command, remove_contact, reset_windows, wipe from tests.mock_classes import Gateway, nick_to_pub_key -from tests.utils import gen_queue_dict, tear_queues, TFCTestCase +from tests.utils import gen_queue_dict, tear_queues, TFCTestCase class TestRelayCommand(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.gateway = Gateway() - self.queues = gen_queue_dict() + self.queues = gen_queue_dict() self.gateway.settings.race_condition_delay = 0.0 def tearDown(self) -> None: """Post-test actions.""" tear_queues(self.queues) - @mock.patch("sys.stdin", MagicMock()) - @mock.patch("os.fdopen", MagicMock()) + @mock.patch('sys.stdin', MagicMock()) + @mock.patch('os.fdopen', MagicMock()) def test_packet_reading(self, *_) -> None: + def queue_delayer() -> None: """Place packet into queue after delay.""" time.sleep(0.1) self.queues[SRC_TO_RELAY_QUEUE].put(UNENCRYPTED_SCREEN_CLEAR) threading.Thread(target=queue_delayer).start() - self.assertIsNone( - relay_command(self.queues, self.gateway, stdin_fd=1, unit_test=True) - ) + self.assertIsNone(relay_command(self.queues, self.gateway, unit_test=True)) class TestProcessCommand(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.gateway = Gateway() - self.queues = gen_queue_dict() + self.queues = gen_queue_dict() def tearDown(self) -> None: """Post-test actions.""" tear_queues(self.queues) def test_invalid_key(self) -> None: - self.assert_se( - "Error: Received an invalid command.", - process_command, - b"INVALID", - self.gateway, - self.queues, - ) + self.assert_se("Error: Received an invalid command.", process_command, b'INVALID', self.gateway, self.queues) class TestRaceConditionDelay(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.gateway = Gateway(local_testing_mode=True, data_diode_sockets=True) + self.gateway = Gateway(local_testing_mode=True, + data_diode_sockets=True) - @mock.patch("time.sleep", return_value=None) + @mock.patch('time.sleep', return_value=None) def test_delay(self, mock_sleep) -> None: self.assertIsNone(race_condition_delay(self.gateway)) - self.assertEqual( - mock_sleep.call_args_list, - [mock.call(LOCAL_TESTING_PACKET_DELAY), mock.call(1.0)], - ) + self.assertEqual(mock_sleep.call_args_list, [mock.call(LOCAL_TESTING_PACKET_DELAY), mock.call(1.0)]) class TestClearWindows(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.gateway = Gateway(race_condition_delay=0.0) def test_clear_display(self) -> None: - self.assert_prints( - CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER, clear_windows, self.gateway - ) + self.assert_prints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER, clear_windows, self.gateway) class TestResetWindows(TFCTestCase): - @mock.patch("src.common.misc.reset_terminal", return_value=None) + + @mock.patch('os.system', return_value=None) def test_reset_display(self, _) -> None: self.gateway = Gateway(race_condition_delay=0.0) self.assertIsNone(reset_windows(self.gateway)) class TestExitTFC(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.gateway = Gateway(race_condition_delay=0.0) - self.queues = gen_queue_dict() + self.queues = gen_queue_dict() def tearDown(self) -> None: """Post-test actions.""" @@ -161,74 +131,62 @@ class TestExitTFC(unittest.TestCase): class TestChangeECRatio(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.gateway = Gateway() - def test_non_digit_value_raises_fr(self) -> None: - self.assert_se( - "Error: Received invalid EC ratio value from Transmitter Program.", - change_ec_ratio, - b"a", - self.gateway, - ) + def test_non_digit_value_raises_se(self) -> None: + self.assert_se("Error: Received invalid EC ratio value from Transmitter Program.", + change_ec_ratio, b'a', self.gateway) - def test_invalid_digit_value_raises_fr(self) -> None: - self.assert_se( - "Error: Received invalid EC ratio value from Transmitter Program.", - change_ec_ratio, - b"-1", - self.gateway, - ) + def test_invalid_digit_value_raises_se(self) -> None: + self.assert_se("Error: Received invalid EC ratio value from Transmitter Program.", + change_ec_ratio, b'-1', self.gateway) def test_change_value(self) -> None: - self.assertIsNone(change_ec_ratio(b"3", self.gateway)) + self.assertIsNone(change_ec_ratio(b'3', self.gateway)) self.assertEqual(self.gateway.settings.serial_error_correction, 3) class TestChangeBaudrate(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.gateway = Gateway() - def test_non_digit_value_raises_fr(self) -> None: - self.assert_se( - "Error: Received invalid baud rate value from Transmitter Program.", - change_baudrate, - b"a", - self.gateway, - ) + def test_non_digit_value_raises_se(self) -> None: + self.assert_se("Error: Received invalid baud rate value from Transmitter Program.", + change_baudrate, b'a', self.gateway) - def test_invalid_digit_value_raises_fr(self) -> None: - self.assert_se( - "Error: Received invalid baud rate value from Transmitter Program.", - change_baudrate, - b"1300", - self.gateway, - ) + def test_invalid_digit_value_raises_se(self) -> None: + self.assert_se("Error: Received invalid baud rate value from Transmitter Program.", + change_baudrate, b'1300', self.gateway) def test_change_value(self) -> None: - self.assertIsNone(change_baudrate(b"9600", self.gateway)) + self.assertIsNone(change_baudrate(b'9600', self.gateway)) self.assertEqual(self.gateway.settings.serial_baudrate, 9600) class TestWipe(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.gateway = Gateway(race_condition_delay=0.0) - self.queues = gen_queue_dict() + self.queues = gen_queue_dict() def tearDown(self) -> None: """Post-test actions.""" tear_queues(self.queues) - @mock.patch("src.common.misc.reset_terminal", return_value=None) + @mock.patch('os.system', return_value=None) def test_wipe_command(self, _) -> None: self.assertIsNone(wipe(self.gateway, self.queues)) self.assertEqual(self.queues[ONION_CLOSE_QUEUE].get(), WIPE) class TestManageContactReq(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.queues = gen_queue_dict() @@ -238,14 +196,15 @@ class TestManageContactReq(unittest.TestCase): tear_queues(self.queues) def test_setting_management(self) -> None: - manage_contact_req(b"\x01", self.queues) + manage_contact_req(b'\x01', self.queues) self.assertTrue(self.queues[C_REQ_STATE_QUEUE].get()) - manage_contact_req(b"\x00", self.queues) + manage_contact_req(b'\x00', self.queues) self.assertFalse(self.queues[C_REQ_STATE_QUEUE].get()) class TestAddContact(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.queues = gen_queue_dict() @@ -255,30 +214,20 @@ class TestAddContact(unittest.TestCase): tear_queues(self.queues) def test_add_contact(self) -> None: - command = b"".join([nick_to_pub_key("Alice"), nick_to_pub_key("Bob")]) + command = b''.join([nick_to_pub_key('Alice'), nick_to_pub_key('Bob')]) self.assertIsNone(add_contact(command, True, self.queues)) self.assertEqual(self.queues[CONTACT_MGMT_QUEUE].qsize(), 1) for q in [GROUP_MGMT_QUEUE, C_REQ_MGMT_QUEUE]: command = self.queues[q].get() - self.assertEqual( - command, - ( - RP_ADD_CONTACT_HEADER, - b"".join([nick_to_pub_key("Alice"), nick_to_pub_key("Bob")]), - ), - ) - self.assertEqual( - self.queues[CONTACT_MGMT_QUEUE].get(), - ( - RP_ADD_CONTACT_HEADER, - b"".join(list(map(nick_to_pub_key, ["Alice", "Bob"]))), - True, - ), - ) + self.assertEqual(command, + (RP_ADD_CONTACT_HEADER, b''.join([nick_to_pub_key('Alice'), nick_to_pub_key('Bob')]))) + self.assertEqual(self.queues[CONTACT_MGMT_QUEUE].get(), + (RP_ADD_CONTACT_HEADER, b''.join(list(map(nick_to_pub_key, ['Alice', 'Bob']))), True)) class TestRemContact(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.queues = gen_queue_dict() @@ -288,31 +237,24 @@ class TestRemContact(unittest.TestCase): tear_queues(self.queues) def test_add_contact(self) -> None: - command = b"".join([nick_to_pub_key("Alice"), nick_to_pub_key("Bob")]) + command = b''.join([nick_to_pub_key('Alice'), nick_to_pub_key('Bob')]) self.assertIsNone(remove_contact(command, self.queues)) self.assertEqual(self.queues[CONTACT_MGMT_QUEUE].qsize(), 1) - self.assertEqual( - self.queues[CONTACT_MGMT_QUEUE].get(), - ( - RP_REMOVE_CONTACT_HEADER, - b"".join([nick_to_pub_key("Alice"), nick_to_pub_key("Bob")]), - False, - ), - ) + self.assertEqual(self.queues[CONTACT_MGMT_QUEUE].get(), + (RP_REMOVE_CONTACT_HEADER, + b''.join([nick_to_pub_key('Alice'), nick_to_pub_key('Bob')]), + False) + ) for q in [GROUP_MGMT_QUEUE, C_REQ_MGMT_QUEUE]: command = self.queues[q].get() - self.assertEqual( - command, - ( - RP_REMOVE_CONTACT_HEADER, - b"".join([nick_to_pub_key("Alice"), nick_to_pub_key("Bob")]), - ), - ) + self.assertEqual(command, (RP_REMOVE_CONTACT_HEADER, + b''.join([nick_to_pub_key('Alice'), nick_to_pub_key('Bob')]))) class TestAddOnionKey(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.queues = gen_queue_dict() @@ -322,21 +264,52 @@ class TestAddOnionKey(unittest.TestCase): tear_queues(self.queues) def test_add_contact(self) -> None: - command = ( - ONION_SERVICE_PRIVATE_KEY_LENGTH * b"a" - + b"b" - + b"\x01" - + int_to_bytes(1) - + nick_to_pub_key("Alice") - + nick_to_pub_key("Bob") - ) + command = (ONION_SERVICE_PRIVATE_KEY_LENGTH * b'a' + + b'b' + + b'\x01' + + int_to_bytes(1) + + nick_to_pub_key('Alice') + + nick_to_pub_key('Bob')) self.assertIsNone(add_onion_data(command, self.queues)) self.assertEqual(self.queues[ONION_KEY_QUEUE].qsize(), 1) - self.assertEqual( - self.queues[ONION_KEY_QUEUE].get(), - (ONION_SERVICE_PRIVATE_KEY_LENGTH * b"a", b"b"), - ) + self.assertEqual(self.queues[ONION_KEY_QUEUE].get(), (ONION_SERVICE_PRIVATE_KEY_LENGTH * b'a', b'b')) -if __name__ == "__main__": +class TestCompareAccounts(unittest.TestCase): + + def setUp(self) -> None: + """Pre-test actions.""" + self.queues = gen_queue_dict() + + def tearDown(self) -> None: + """Post-test actions.""" + tear_queues(self.queues) + + def test_compare_accounts(self): + account = b58encode(TFC_PUBLIC_KEY_LENGTH*b'a').encode() + compare_accounts(account, self.queues) + self.assertEqual(self.queues[ACCOUNT_CHECK_QUEUE].get(), account.decode()) + + +class TestComparePubKeys(unittest.TestCase): + + def setUp(self) -> None: + """Pre-test actions.""" + self.queues = gen_queue_dict() + + def tearDown(self) -> None: + """Post-test actions.""" + tear_queues(self.queues) + + def test_compare_pub_keys(self): + # Setup + onion_pub_key = ONION_SERVICE_PUBLIC_KEY_LENGTH * b'a' + invalid_pub_key = b58encode(TFC_PUBLIC_KEY_LENGTH * b'a').encode() + + # Test + compare_pub_keys(onion_pub_key + invalid_pub_key, self.queues) + self.assertEqual(self.queues[PUB_KEY_CHECK_QUEUE].get(), (onion_pub_key, invalid_pub_key)) + + +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/relay/test_diffs.py b/tests/relay/test_diffs.py new file mode 100644 index 0000000..192dbf4 --- /dev/null +++ b/tests/relay/test_diffs.py @@ -0,0 +1,178 @@ +#!/usr/bin/env python3.7 +# -*- coding: utf-8 -*- + +""" +TFC - Onion-routed, endpoint secure messaging system +Copyright (C) 2013-2020 Markus Ottela + +This file is part of TFC. + +TFC is free software: you can redistribute it and/or modify it under the terms +of the GNU General Public License as published by the Free Software Foundation, +either version 3 of the License, or (at your option) any later version. + +TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; +without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR +PURPOSE. See the GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with TFC. If not, see . +""" + +import time +import threading +import unittest + +from typing import Any +from unittest import mock +from unittest.mock import MagicMock + +from src.common.encoding import b58encode +from src.common.statics import (ACCOUNT_CHECK_QUEUE, ACCOUNT_SEND_QUEUE, GUI_INPUT_QUEUE, PUB_KEY_CHECK_QUEUE, + PUB_KEY_SEND_QUEUE, TFC_PUBLIC_KEY_LENGTH, USER_ACCOUNT_QUEUE) + +from src.relay.diffs import account_checker, GetAccountFromUser, pub_key_checker, show_value_diffs + +from tests.utils import gen_queue_dict, nick_to_pub_key, nick_to_onion_address, tear_queues, TFCTestCase + + +class TestGetAccountFromUser(unittest.TestCase): + + def setUp(self) -> None: + """Pre-test actions.""" + self.queues = gen_queue_dict() + + def tearDown(self) -> None: + """Post-test actions.""" + tear_queues(self.queues) + + @mock.patch('tkinter.Tk', MagicMock()) + @mock.patch('tkinter.Entry.get', side_effect=[nick_to_onion_address('Alice'), + nick_to_onion_address('Bob')]) + def test_input(self, *_: Any) -> None: + self.queue = self.queues[GUI_INPUT_QUEUE] + app = GetAccountFromUser(self.queue, nick_to_onion_address('Alice')) + self.assertIsNone(app.evaluate_account()) + self.assertIsNone(app.evaluate_account()) + self.assertIsNone(app.dismiss_window()) + self.assertEqual(self.queue.get(), nick_to_onion_address('Bob')) + + +class TestAccountChecker(unittest.TestCase): + + + def setUp(self) -> None: + """Pre-test actions.""" + self.queues = gen_queue_dict() + + def tearDown(self) -> None: + """Post-test actions.""" + tear_queues(self.queues) + + @mock.patch('src.relay.diffs.GetAccountFromUser', return_value=MagicMock(return_value=None)) + def test_account_checker(self, *_: Any) -> None: + # Setup + user_account = b58encode(nick_to_pub_key('Alice')) + account = b58encode(nick_to_pub_key('Bob')) + unknown_account = b58encode(nick_to_pub_key('Charlie')) + invalid_account1 = account[:-1] + 'c' + invalid_account2 = unknown_account[:-1] + 'c' + + def queue_delayer() -> None: + """Place messages to queue one at a time.""" + time.sleep(0.05) + self.queues[USER_ACCOUNT_QUEUE].put(user_account) + threading.Thread(target=queue_delayer).start() + + self.queues[GUI_INPUT_QUEUE].put(unknown_account) + self.queues[ACCOUNT_SEND_QUEUE].put(invalid_account1) + self.queues[ACCOUNT_CHECK_QUEUE].put(account) + self.queues[ACCOUNT_CHECK_QUEUE].put(invalid_account2) + + # Test + with mock.patch('time.sleep', lambda _: None): + self.assertIsNone(account_checker(self.queues, stdin_fd=1, unit_test=True)) + + +class TestPubKeyChecker(unittest.TestCase): + + def setUp(self) -> None: + """Pre-test actions.""" + self.queues = gen_queue_dict() + + def tearDown(self) -> None: + """Post-test actions.""" + tear_queues(self.queues) + + @mock.patch('shutil.get_terminal_size', return_value=[200, 200]) + def test_pub_key_checker(self, _: Any) -> None: + # Setup + public_key = TFC_PUBLIC_KEY_LENGTH*b'a' + invalid_public_key = b58encode(public_key, public_key=True)[:-1] + 'a' + account = nick_to_pub_key('Bob') + + for local_test in [True, False]: + self.queues[PUB_KEY_SEND_QUEUE].put((account, public_key)) + self.queues[PUB_KEY_CHECK_QUEUE].put((account, invalid_public_key.encode())) + + # Test + self.assertIsNone(pub_key_checker(self.queues, local_test=local_test, unit_test=True)) + self.assertIsNone(pub_key_checker(self.queues, local_test=local_test, unit_test=True)) + + + +class TestShowValueDiffs(TFCTestCase): + + @mock.patch('shutil.get_terminal_size', return_value=[110, 110]) + def test_show_public_key_diffs(self, _: Any) -> None: + + self.assert_prints("""\ + ┌──────────────────────────────────────────────────────────────────────────────────────┐ + │ Source Computer received an invalid public key. │ + │ See arrows below that point to correct characters. │ + │ │ + │ 4EEue4P8vkwzjAEnxiUw9s4ibVA3YVWvzshd6tCQp67qjqda7n93SCtM8Z24tVFd8ZuS9Kt5kecghuajaneR │ + │ ↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓ ↓↓↓↓↓↓↓ ↓↓↓↓↓↓↓↓↓↓↓↓↓ ↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓ │ + │ 4EEjKap9yReFo8SdSKPhUgsQgsKD19nJBrhiBuDmcB7yzucbYMaGtpQF8de99KHWLqWtohzLKWtqTv9HG5Fb │ + └──────────────────────────────────────────────────────────────────────────────────────┘ +""", show_value_diffs, 'public key', + b58encode(TFC_PUBLIC_KEY_LENGTH*b'a', public_key=True), + b58encode(TFC_PUBLIC_KEY_LENGTH*b'b', public_key=True), + local_test=True) + + self.assert_prints("""\ + ┌─────────────────────────────────────────────────────────────────────────────────────────────────┐ + │ Source Computer received an invalid public key. │ + │ See arrows below that point to correct characters. │ + │ │ + │ A B C D E F G H I J K L │ + │ 4EEue4P 8vkwzjA EnxiUw9 s4ibVA3 YVWvzsh d6tCQp6 7qjqda7 n93SCtM 8Z24tVF d8ZuS9K t5kecgh uajaneR │ + │ ↓↓↓↓ ↓↓↓↓↓↓↓ ↓↓↓↓↓↓↓ ↓↓↓↓↓↓↓ ↓↓↓↓↓↓ ↓↓↓↓↓↓↓ ↓↓↓↓↓↓ ↓↓↓↓↓↓↓ ↓↓↓↓↓↓ ↓↓↓↓↓↓↓ ↓↓↓↓↓↓↓ ↓↓↓↓↓↓↓ │ + │ 4EEjKap 9yReFo8 SdSKPhU gsQgsKD 19nJBrh iBuDmcB 7yzucbY MaGtpQF 8de99KH WLqWtoh zLKWtqT v9HG5Fb │ + │ A B C D E F G H I J K L │ + └─────────────────────────────────────────────────────────────────────────────────────────────────┘ +""", show_value_diffs, 'public key', + b58encode(TFC_PUBLIC_KEY_LENGTH*b'a', public_key=True), + b58encode(TFC_PUBLIC_KEY_LENGTH*b'b', public_key=True), + local_test=False) + + @mock.patch('shutil.get_terminal_size', return_value=[80, 80]) + def test_show_account_diffs(self, _: Any) -> None: + + self.assert_prints("""\ + ┌──────────────────────────────────────────────────────────┐ + │ Source Computer received an invalid account. │ + │ See arrows below that point to correct characters. │ + │ │ + │ zwp3dykiztmeils2u5eqjtdtx5x3kti5ktjthpkznku3ws5u5fq2bnad │ + │ ↓↓↓↓↓ ↓↓↓↓↓↓↓↓↓↓↓↓↓↓ ↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓ │ + │ hpcrayuxhrcy2wtpfwgwjibderrvjll6azfr4tqat3eka2m2gbb55bid │ + └──────────────────────────────────────────────────────────┘ +""", show_value_diffs, 'account', + nick_to_onion_address('Alice'), + nick_to_onion_address('Bob'), + local_test=True) + + +if __name__ == '__main__': + unittest.main(exit=False) diff --git a/tests/relay/test_onion.py b/tests/relay/test_onion.py index 4ac482c..e215bb9 100644 --- a/tests/relay/test_onion.py +++ b/tests/relay/test_onion.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -24,28 +24,17 @@ import threading import time import unittest -from unittest import mock +from unittest import mock from unittest.mock import MagicMock +from typing import Any import stem.control -from src.common.misc import validate_onion_addr -from src.common.statics import ( - EXIT, - EXIT_QUEUE, - ONION_CLOSE_QUEUE, - ONION_KEY_QUEUE, - ONION_SERVICE_PRIVATE_KEY_LENGTH, - TOR_DATA_QUEUE, - TOR_SOCKS_PORT, -) +from src.common.misc import validate_onion_addr +from src.common.statics import (EXIT, EXIT_QUEUE, ONION_CLOSE_QUEUE, ONION_KEY_QUEUE, ONION_SERVICE_PRIVATE_KEY_LENGTH, + TOR_DATA_QUEUE, TOR_SOCKS_PORT) -from src.relay.onion import ( - get_available_port, - onion_service, - stem_compatible_ed25519_key_from_private_key, - Tor, -) +from src.relay.onion import get_available_port, onion_service, stem_compatible_ed25519_key_from_private_key, Tor from tests.utils import gen_queue_dict, tear_queues @@ -63,90 +52,58 @@ class TestGetAvailablePort(unittest.TestCase): class TestTor(unittest.TestCase): - @mock.patch("time.sleep", return_value=None) - @mock.patch("os.path.isfile", return_value=False) - def test_missing_binary_raises_critical_error(self, *_) -> None: + + @mock.patch('time.sleep', return_value=None) + @mock.patch('os.path.isfile', return_value=False) + def test_missing_binary_raises_critical_error(self, *_: Any) -> None: tor = Tor() with self.assertRaises(SystemExit): tor.connect(1234) - @mock.patch("time.sleep", return_value=None) - @mock.patch( - "stem.process.launch_tor_with_config", - side_effect=[MagicMock(), OSError, MagicMock()], - ) - @mock.patch( - "stem.control.Controller.from_socket_file", - return_value=MagicMock( - get_info=MagicMock( - side_effect=[ - 'NOTICE BOOTSTRAP PROGRESS=100 TAG=done SUMMARY="Done"', - stem.SocketClosed, - ] - ) - ), - ) - def test_closed_socket_raises_critical_error(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('stem.process.launch_tor_with_config', side_effect=[MagicMock(), OSError, MagicMock()]) + @mock.patch('stem.control.Controller.from_socket_file', return_value=MagicMock(get_info=MagicMock( + side_effect=['NOTICE BOOTSTRAP PROGRESS=100 TAG=done SUMMARY="Done"', stem.SocketClosed]))) + def test_closed_socket_raises_critical_error(self, *_: Any) -> None: tor = Tor() self.assertIsNone(tor.connect(1234)) with self.assertRaises(SystemExit): tor.connect(1234) - @mock.patch("time.sleep", return_value=None) - @mock.patch("time.monotonic", side_effect=[1, 20, 30, 40]) - @mock.patch( - "stem.control.Controller.from_socket_file", - return_value=MagicMock( - get_info=MagicMock( - side_effect=[ - 'NOTICE BOOTSTRAP PROGRESS=100 TAG=done SUMMARY="Nope"', - 'NOTICE BOOTSTRAP PROGRESS=100 TAG=done SUMMARY="Done"', - ] - ) - ), - ) - @mock.patch( - "stem.process.launch_tor_with_config", - return_value=MagicMock(poll=lambda: False), - ) - def test_timeout_restarts_tor(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('time.monotonic', side_effect=[1, 20, 30, 40]) + @mock.patch('stem.control.Controller.from_socket_file', return_value=MagicMock(get_info=MagicMock( + side_effect=['NOTICE BOOTSTRAP PROGRESS=100 TAG=done SUMMARY="Nope"', + 'NOTICE BOOTSTRAP PROGRESS=100 TAG=done SUMMARY="Done"']))) + @mock.patch('stem.process.launch_tor_with_config', return_value=MagicMock(poll=lambda: False)) + def test_timeout_restarts_tor(self, *_: Any) -> None: tor = Tor() self.assertIsNone(tor.connect(1234)) tor.stop() class TestTorKeyExpansion(unittest.TestCase): + def test_invalid_key_size_raises_critical_error(self) -> None: for ks in [ks for ks in range(64) if ks != ONION_SERVICE_PRIVATE_KEY_LENGTH]: with self.assertRaises(SystemExit): stem_compatible_ed25519_key_from_private_key(os.urandom(ks)) def test_valid_key_size(self) -> None: - self.assertEqual( - stem_compatible_ed25519_key_from_private_key( - bytes(ONION_SERVICE_PRIVATE_KEY_LENGTH) - ), - "UEatwduoOIZ7K7v90MNCPli1eXC1JnqQ9XlgkkqH8VYKaoXqpkLayDVCS118jWN8AECMenPaZyt/SYUhQgtt0w==", - ) + self.assertEqual(stem_compatible_ed25519_key_from_private_key(bytes(ONION_SERVICE_PRIVATE_KEY_LENGTH)), + 'UEatwduoOIZ7K7v90MNCPli1eXC1JnqQ9XlgkkqH8VYKaoXqpkLayDVCS118jWN8AECMenPaZyt/SYUhQgtt0w==') class TestOnionService(unittest.TestCase): - @mock.patch( - "shlex.split", - return_value=[ - "NOTICE", - "BOOTSTRAP", - "PROGRESS=100", - "TAG=done", - "SUMMARY=Done", - ], - ) - @mock.patch("stem.control.Controller.from_socket_file", return_value=MagicMock()) - @mock.patch("src.relay.onion.get_available_port", side_effect=KeyboardInterrupt) - def test_returns_with_keyboard_interrupt(self, *_) -> None: + + @mock.patch('shlex.split', return_value=['NOTICE', 'BOOTSTRAP', 'PROGRESS=100', + 'TAG=done', 'SUMMARY=Done']) + @mock.patch('stem.control.Controller.from_socket_file', return_value=MagicMock()) + @mock.patch('src.relay.onion.get_available_port', side_effect=KeyboardInterrupt) + def test_returns_with_keyboard_interrupt(self, *_: Any) -> None: # Setup queues = gen_queue_dict() - queues[ONION_KEY_QUEUE].put((bytes(ONION_SERVICE_PRIVATE_KEY_LENGTH), b"\x01")) + queues[ONION_KEY_QUEUE].put((bytes(ONION_SERVICE_PRIVATE_KEY_LENGTH), b'\x01')) # Test self.assertIsNone(onion_service(queues)) @@ -154,31 +111,19 @@ class TestOnionService(unittest.TestCase): # Teardown tear_queues(queues) - @mock.patch( - "shlex.split", - return_value=[ - "NOTICE", - "BOOTSTRAP", - "PROGRESS=100", - "TAG=done", - "SUMMARY=Done", - ], - ) - @mock.patch("stem.control.Controller.from_socket_file", return_value=MagicMock()) - @mock.patch("stem.process.launch_tor_with_config", return_value=MagicMock()) - def test_onion_service(self, *_) -> None: + @mock.patch('shlex.split', return_value=['NOTICE', 'BOOTSTRAP', 'PROGRESS=100', + 'TAG=done', 'SUMMARY=Done']) + @mock.patch('stem.control.Controller.from_socket_file', return_value=MagicMock()) + @mock.patch('stem.process.launch_tor_with_config', return_value=MagicMock()) + def test_onion_service(self, *_: Any) -> None: # Setup queues = gen_queue_dict() def queue_delayer() -> None: """Place Onion Service data into queue after delay.""" time.sleep(0.5) - queues[ONION_KEY_QUEUE].put( - (bytes(ONION_SERVICE_PRIVATE_KEY_LENGTH), b"\x01") - ) - queues[ONION_KEY_QUEUE].put( - (bytes(ONION_SERVICE_PRIVATE_KEY_LENGTH), b"\x01") - ) + queues[ONION_KEY_QUEUE].put((bytes(ONION_SERVICE_PRIVATE_KEY_LENGTH), b'\x01')) + queues[ONION_KEY_QUEUE].put((bytes(ONION_SERVICE_PRIVATE_KEY_LENGTH), b'\x01')) time.sleep(0.1) queues[ONION_CLOSE_QUEUE].put(EXIT) @@ -190,30 +135,22 @@ class TestOnionService(unittest.TestCase): port, address = queues[TOR_DATA_QUEUE].get() self.assertIsInstance(port, int) - self.assertEqual(validate_onion_addr(address), "") + self.assertEqual(validate_onion_addr(address), '') self.assertEqual(queues[EXIT_QUEUE].get(), EXIT) # Teardown tear_queues(queues) - @mock.patch("time.sleep", return_value=None) - @mock.patch( - "shlex.split", - return_value=[ - "NOTICE", - "BOOTSTRAP", - "PROGRESS=100", - "TAG=done", - "SUMMARY=Done", - ], - ) - @mock.patch("shutil.get_terminal_size", side_effect=[stem.SocketClosed]) - @mock.patch("stem.control.Controller.from_socket_file", return_value=MagicMock()) - @mock.patch("stem.process.launch_tor_with_config", return_value=MagicMock()) - def test_exception_during_onion_service_setup_returns(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('shlex.split', return_value=['NOTICE', 'BOOTSTRAP', 'PROGRESS=100', + 'TAG=done', 'SUMMARY=Done']) + @mock.patch('shutil.get_terminal_size', side_effect=[stem.SocketClosed]) + @mock.patch('stem.control.Controller.from_socket_file', return_value=MagicMock()) + @mock.patch('stem.process.launch_tor_with_config', return_value=MagicMock()) + def test_exception_during_onion_service_setup_returns(self, *_: Any) -> None: # Setup queues = gen_queue_dict() - queues[ONION_KEY_QUEUE].put((bytes(ONION_SERVICE_PRIVATE_KEY_LENGTH), b"\x01")) + queues[ONION_KEY_QUEUE].put((bytes(ONION_SERVICE_PRIVATE_KEY_LENGTH), b'\x01')) # Test self.assertIsNone(onion_service(queues)) @@ -221,30 +158,18 @@ class TestOnionService(unittest.TestCase): # Teardown tear_queues(queues) - @mock.patch( - "time.sleep", - side_effect=[None, None, KeyboardInterrupt, stem.SocketClosed, None], - ) - @mock.patch( - "shlex.split", - return_value=[ - "NOTICE", - "BOOTSTRAP", - "PROGRESS=100", - "TAG=done", - "SUMMARY=Done", - ], - ) - @mock.patch("stem.control.Controller.from_socket_file", return_value=MagicMock()) - @mock.patch("stem.process.launch_tor_with_config", return_value=MagicMock()) - def test_socket_closed_returns(self, *_) -> None: + @mock.patch('time.sleep', side_effect=[None, None, KeyboardInterrupt, stem.SocketClosed, None]) + @mock.patch('shlex.split', return_value=['NOTICE', 'BOOTSTRAP', 'PROGRESS=100', 'TAG=done', 'SUMMARY=Done']) + @mock.patch('stem.control.Controller.from_socket_file', return_value=MagicMock()) + @mock.patch('stem.process.launch_tor_with_config', return_value=MagicMock()) + def test_socket_closed_returns(self, *_: Any) -> None: # Setup queues = gen_queue_dict() controller = stem.control.Controller controller.create_ephemeral_hidden_service = MagicMock() - queues[ONION_KEY_QUEUE].put((bytes(ONION_SERVICE_PRIVATE_KEY_LENGTH), b"\x01")) + queues[ONION_KEY_QUEUE].put((bytes(ONION_SERVICE_PRIVATE_KEY_LENGTH), b'\x01')) # Test self.assertIsNone(onion_service(queues)) @@ -252,24 +177,24 @@ class TestOnionService(unittest.TestCase): # Teardown tear_queues(queues) - @mock.patch("stem.control.Controller.from_port", MagicMock()) - @mock.patch("builtins.open", mock.mock_open(read_data='TAILS_PRODUCT_NAME="Tails"')) - def test_no_tor_process_is_created_when_tails_is_used(self, *_) -> None: + @mock.patch('stem.control.Controller.from_port', MagicMock()) + @mock.patch('builtins.open', mock.mock_open(read_data='TAILS_PRODUCT_NAME="Tails"')) + def test_no_tor_process_is_created_when_tails_is_used(self, *_: Any) -> None: tor = Tor() self.assertIsNone(tor.connect(1234)) self.assertIsNone(tor.tor_process) - @mock.patch("time.sleep", return_value=None) - def test_missing_tor_controller_raises_critical_error(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + def test_missing_tor_controller_raises_critical_error(self, *_: Any) -> None: # Setup - queues = gen_queue_dict() + queues = gen_queue_dict() orig_tor_connect = Tor.connect - Tor.connect = MagicMock(return_value=None) + Tor.connect = MagicMock(return_value=None) controller = stem.control.Controller controller.create_ephemeral_hidden_service = MagicMock() - queues[ONION_KEY_QUEUE].put((bytes(ONION_SERVICE_PRIVATE_KEY_LENGTH), b"\x01")) + queues[ONION_KEY_QUEUE].put((bytes(ONION_SERVICE_PRIVATE_KEY_LENGTH), b'\x01')) # Test with self.assertRaises(SystemExit): @@ -280,5 +205,5 @@ class TestOnionService(unittest.TestCase): Tor.connect = orig_tor_connect -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/relay/test_server.py b/tests/relay/test_server.py index 7095aa0..0e51f21 100644 --- a/tests/relay/test_server.py +++ b/tests/relay/test_server.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -21,84 +21,75 @@ along with TFC. If not, see . import unittest -from src.common.crypto import X448 -from src.common.statics import ( - CONTACT_REQ_QUEUE, - F_TO_FLASK_QUEUE, - M_TO_FLASK_QUEUE, - URL_TOKEN_QUEUE, -) +from src.common.crypto import X448 +from src.common.statics import CONTACT_REQ_QUEUE, F_TO_FLASK_QUEUE, M_TO_FLASK_QUEUE, URL_TOKEN_QUEUE from src.relay.server import flask_server -from tests.utils import ( - gen_queue_dict, - nick_to_onion_address, - nick_to_pub_key, - tear_queues, -) +from tests.utils import gen_queue_dict, nick_to_onion_address, nick_to_pub_key, tear_queues class TestFlaskServer(unittest.TestCase): + def test_flask_server(self) -> None: # Setup - queues = gen_queue_dict() + queues = gen_queue_dict() url_token_private_key = X448.generate_private_key() - url_token_public_key = X448.derive_public_key(url_token_private_key).hex() - ut = "a450987345098723459870234509827340598273405983274234098723490285" - ut_old = "a450987345098723459870234509827340598273405983274234098723490286" - ut_invalid = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" - onion_pub_key = nick_to_pub_key("Alice") - onion_address = nick_to_onion_address("Alice") - packet1 = "packet1" - packet2 = "packet2" - packet3 = b"packet3" + url_token_public_key = X448.derive_public_key(url_token_private_key).hex() + url_token = 'a450987345098723459870234509827340598273405983274234098723490285' + url_token_old = 'a450987345098723459870234509827340598273405983274234098723490286' + url_token_invalid = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + onion_pub_key = nick_to_pub_key('Alice') + onion_address = nick_to_onion_address('Alice') + packet1 = "packet1" + packet2 = "packet2" + packet3 = b"packet3" # Test app = flask_server(queues, url_token_public_key, unit_test=True) with app.test_client() as c: # Test root domain returns public key of server. - resp = c.get("/") + resp = c.get('/') self.assertEqual(resp.data, url_token_public_key.encode()) - resp = c.get(f"/contact_request/{onion_address}") - self.assertEqual(b"OK", resp.data) + resp = c.get(f'/contact_request/{onion_address}') + self.assertEqual(b'OK', resp.data) self.assertEqual(queues[CONTACT_REQ_QUEUE].qsize(), 1) # Test invalid URL token returns empty response - resp = c.get(f"/{ut_invalid}/messages/") - self.assertEqual(b"", resp.data) - resp = c.get(f"/{ut_invalid}/files/") - self.assertEqual(b"", resp.data) + resp = c.get(f'/{url_token_invalid}/messages/') + self.assertEqual(b'', resp.data) + resp = c.get(f'/{url_token_invalid}/files/') + self.assertEqual(b'', resp.data) # Test valid URL token returns all queued messages - queues[URL_TOKEN_QUEUE].put((onion_pub_key, ut_old)) - queues[URL_TOKEN_QUEUE].put((onion_pub_key, ut)) + queues[URL_TOKEN_QUEUE].put((onion_pub_key, url_token_old)) + queues[URL_TOKEN_QUEUE].put((onion_pub_key, url_token)) queues[M_TO_FLASK_QUEUE].put((packet1, onion_pub_key)) queues[M_TO_FLASK_QUEUE].put((packet2, onion_pub_key)) queues[F_TO_FLASK_QUEUE].put((packet3, onion_pub_key)) with app.test_client() as c: - resp = c.get(f"/{ut}/messages/") - self.assertEqual(b"packet1\npacket2", resp.data) + resp = c.get(f'/{url_token}/messages/') + self.assertEqual(b'packet1\npacket2', resp.data) with app.test_client() as c: - resp = c.get(f"/{ut}/files/") - self.assertEqual(b"packet3", resp.data) + resp = c.get(f'/{url_token}/files/') + self.assertEqual(b'packet3', resp.data) # Test valid URL token returns nothing as queues are empty with app.test_client() as c: - resp = c.get(f"/{ut}/messages/") - self.assertEqual(b"", resp.data) + resp = c.get(f'/{url_token}/messages/') + self.assertEqual(b'', resp.data) with app.test_client() as c: - resp = c.get(f"/{ut}/files/") - self.assertEqual(b"", resp.data) + resp = c.get(f'/{url_token}/files/') + self.assertEqual(b'', resp.data) # Teardown tear_queues(queues) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/relay/test_tcb.py b/tests/relay/test_tcb.py index cfea4e0..596b822 100644 --- a/tests/relay/test_tcb.py +++ b/tests/relay/test_tcb.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -26,48 +26,33 @@ import unittest from datetime import datetime from unittest import mock -from src.common.encoding import int_to_bytes +from src.common.encoding import int_to_bytes from src.common.reed_solomon import RSCodec -from src.common.statics import ( - COMMAND_DATAGRAM_HEADER, - DST_COMMAND_QUEUE, - DST_MESSAGE_QUEUE, - EXIT, - FILE_DATAGRAM_HEADER, - F_TO_FLASK_QUEUE, - GATEWAY_QUEUE, - GROUP_ID_LENGTH, - GROUP_MSG_EXIT_GROUP_HEADER, - GROUP_MSG_INVITE_HEADER, - GROUP_MSG_JOIN_HEADER, - GROUP_MSG_MEMBER_ADD_HEADER, - GROUP_MSG_MEMBER_REM_HEADER, - LOCAL_KEY_DATAGRAM_HEADER, - MESSAGE_DATAGRAM_HEADER, - M_TO_FLASK_QUEUE, - PUBLIC_KEY_DATAGRAM_HEADER, - SRC_TO_RELAY_QUEUE, - TFC_PUBLIC_KEY_LENGTH, - UNENCRYPTED_DATAGRAM_HEADER, - UNIT_TEST_QUEUE, -) +from src.common.statics import (COMMAND_DATAGRAM_HEADER, DST_COMMAND_QUEUE, DST_MESSAGE_QUEUE, EXIT, + FILE_DATAGRAM_HEADER, F_TO_FLASK_QUEUE, GATEWAY_QUEUE, GROUP_ID_LENGTH, + GROUP_MSG_EXIT_GROUP_HEADER, GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER, + GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER, + LOCAL_KEY_DATAGRAM_HEADER, MESSAGE_DATAGRAM_HEADER, M_TO_FLASK_QUEUE, + PUBLIC_KEY_DATAGRAM_HEADER, SRC_TO_RELAY_QUEUE, TFC_PUBLIC_KEY_LENGTH, + UNENCRYPTED_DATAGRAM_HEADER, UNIT_TEST_QUEUE) from src.relay.tcb import dst_outgoing, src_incoming from tests.mock_classes import Gateway, nick_to_pub_key, Settings -from tests.utils import cd_unit_test, cleanup, gen_queue_dict, tear_queues +from tests.utils import cd_unit_test, cleanup, gen_queue_dict, tear_queues class TestSRCIncoming(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.settings = Settings() + self.settings = Settings() self.unit_test_dir = cd_unit_test() - self.gateway = Gateway() - self.rs = RSCodec(2 * self.gateway.settings.serial_error_correction) - self.ts = datetime.now() - self.queues = gen_queue_dict() - self.args = self.queues, self.gateway + self.gateway = Gateway() + self.rs = RSCodec(2 * self.gateway.settings.serial_error_correction) + self.ts = datetime.now() + self.queues = gen_queue_dict() + self.args = self.queues, self.gateway def tearDown(self) -> None: """Post-test actions.""" @@ -80,8 +65,8 @@ class TestSRCIncoming(unittest.TestCase): def test_unencrypted_datagram(self) -> None: # Setup - packet = self.create_packet(UNENCRYPTED_DATAGRAM_HEADER + b"test") - self.queues[GATEWAY_QUEUE].put((self.ts, 640 * b"a")) + packet = self.create_packet(UNENCRYPTED_DATAGRAM_HEADER + b'test') + self.queues[GATEWAY_QUEUE].put((self.ts, 640 * b'a')) self.queues[GATEWAY_QUEUE].put((self.ts, packet)) # Test @@ -90,7 +75,7 @@ class TestSRCIncoming(unittest.TestCase): def test_local_key_datagram(self) -> None: # Setup - packet = self.create_packet(LOCAL_KEY_DATAGRAM_HEADER + b"test") + packet = self.create_packet(LOCAL_KEY_DATAGRAM_HEADER + b'test') def queue_delayer() -> None: """Place packet into queue after delay.""" @@ -105,7 +90,7 @@ class TestSRCIncoming(unittest.TestCase): def test_command_datagram(self) -> None: # Setup - packet = self.create_packet(COMMAND_DATAGRAM_HEADER + b"test") + packet = self.create_packet(COMMAND_DATAGRAM_HEADER + b'test') self.queues[GATEWAY_QUEUE].put((self.ts, packet)) # Test @@ -114,23 +99,17 @@ class TestSRCIncoming(unittest.TestCase): def test_message_datagram(self) -> None: # Setup - packet = self.create_packet( - MESSAGE_DATAGRAM_HEADER + 344 * b"a" + nick_to_pub_key("bob") - ) + packet = self.create_packet(MESSAGE_DATAGRAM_HEADER + 344 * b'a' + nick_to_pub_key('bob')) self.queues[GATEWAY_QUEUE].put((self.ts, packet)) # Test self.assertIsNone(src_incoming(*self.args, unit_test=True)) - self.assertEqual(self.queues[M_TO_FLASK_QUEUE].qsize(), 1) + self.assertEqual(self.queues[M_TO_FLASK_QUEUE].qsize(), 1) self.assertEqual(self.queues[DST_MESSAGE_QUEUE].qsize(), 1) def test_public_key_datagram(self) -> None: # Setup - packet = self.create_packet( - PUBLIC_KEY_DATAGRAM_HEADER - + nick_to_pub_key("bob") - + TFC_PUBLIC_KEY_LENGTH * b"a" - ) + packet = self.create_packet(PUBLIC_KEY_DATAGRAM_HEADER + nick_to_pub_key('bob') + TFC_PUBLIC_KEY_LENGTH * b'a') self.queues[GATEWAY_QUEUE].put((self.ts, packet)) # Test @@ -139,103 +118,92 @@ class TestSRCIncoming(unittest.TestCase): def test_file_datagram(self) -> None: # Setup - packet = self.create_packet( - FILE_DATAGRAM_HEADER - + int_to_bytes(2) - + nick_to_pub_key("Alice") - + nick_to_pub_key("Bob") - + 200 * b"a" - ) + packet = self.create_packet(FILE_DATAGRAM_HEADER + + int_to_bytes(2) + + nick_to_pub_key('Alice') + + nick_to_pub_key('Bob') + + 200 * b'a') self.queues[GATEWAY_QUEUE].put((self.ts, packet)) # Test self.assertIsNone(src_incoming(*self.args, unit_test=True)) self.assertEqual(self.queues[DST_MESSAGE_QUEUE].qsize(), 0) - self.assertEqual(self.queues[F_TO_FLASK_QUEUE].qsize(), 2) + self.assertEqual(self.queues[F_TO_FLASK_QUEUE].qsize(), 2) def test_group_invitation_datagram(self) -> None: # Setup - packet = self.create_packet( - GROUP_MSG_INVITE_HEADER - + bytes(GROUP_ID_LENGTH) - + nick_to_pub_key("Alice") - + nick_to_pub_key("Bob") - ) + packet = self.create_packet(GROUP_MSG_INVITE_HEADER + + bytes(GROUP_ID_LENGTH) + + nick_to_pub_key('Alice') + + nick_to_pub_key('Bob')) self.queues[GATEWAY_QUEUE].put((self.ts, packet)) # Test self.assertIsNone(src_incoming(*self.args, unit_test=True)) self.assertEqual(self.queues[DST_MESSAGE_QUEUE].qsize(), 0) - self.assertEqual(self.queues[M_TO_FLASK_QUEUE].qsize(), 2) + self.assertEqual(self.queues[M_TO_FLASK_QUEUE].qsize(), 2) def test_group_join_datagram(self) -> None: # Setup - packet = self.create_packet( - GROUP_MSG_JOIN_HEADER - + bytes(GROUP_ID_LENGTH) - + nick_to_pub_key("Alice") - + nick_to_pub_key("Bob") - ) + packet = self.create_packet(GROUP_MSG_JOIN_HEADER + + bytes(GROUP_ID_LENGTH) + + nick_to_pub_key('Alice') + + nick_to_pub_key('Bob')) self.queues[GATEWAY_QUEUE].put((self.ts, packet)) # Test self.assertIsNone(src_incoming(*self.args, unit_test=True)) self.assertEqual(self.queues[DST_MESSAGE_QUEUE].qsize(), 0) - self.assertEqual(self.queues[M_TO_FLASK_QUEUE].qsize(), 2) + self.assertEqual(self.queues[M_TO_FLASK_QUEUE].qsize(), 2) def test_group_add_datagram(self) -> None: # Setup - packet = self.create_packet( - GROUP_MSG_MEMBER_ADD_HEADER - + bytes(GROUP_ID_LENGTH) - + int_to_bytes(1) - + nick_to_pub_key("Alice") - + nick_to_pub_key("Bob") - ) + packet = self.create_packet(GROUP_MSG_MEMBER_ADD_HEADER + + bytes(GROUP_ID_LENGTH) + + int_to_bytes(1) + + nick_to_pub_key('Alice') + + nick_to_pub_key('Bob')) self.queues[GATEWAY_QUEUE].put((self.ts, packet)) # Test self.assertIsNone(src_incoming(*self.args, unit_test=True)) self.assertEqual(self.queues[DST_MESSAGE_QUEUE].qsize(), 0) - self.assertEqual(self.queues[M_TO_FLASK_QUEUE].qsize(), 2) + self.assertEqual(self.queues[M_TO_FLASK_QUEUE].qsize(), 2) def test_group_remove_datagram(self) -> None: # Setup - packet = self.create_packet( - GROUP_MSG_MEMBER_REM_HEADER - + bytes(GROUP_ID_LENGTH) - + int_to_bytes(2) - + nick_to_pub_key("Alice") - + nick_to_pub_key("Bob") - ) + packet = self.create_packet(GROUP_MSG_MEMBER_REM_HEADER + + bytes(GROUP_ID_LENGTH) + + int_to_bytes(2) + + nick_to_pub_key('Alice') + + nick_to_pub_key('Bob')) self.queues[GATEWAY_QUEUE].put((self.ts, packet)) # Test self.assertIsNone(src_incoming(*self.args, unit_test=True)) self.assertEqual(self.queues[DST_MESSAGE_QUEUE].qsize(), 0) - self.assertEqual(self.queues[M_TO_FLASK_QUEUE].qsize(), 2) + self.assertEqual(self.queues[M_TO_FLASK_QUEUE].qsize(), 2) def test_group_exit_datagram(self) -> None: # Setup - packet = self.create_packet( - GROUP_MSG_EXIT_GROUP_HEADER - + bytes(GROUP_ID_LENGTH) - + nick_to_pub_key("Alice") - + nick_to_pub_key("Bob") - ) + packet = self.create_packet(GROUP_MSG_EXIT_GROUP_HEADER + + bytes(GROUP_ID_LENGTH) + + nick_to_pub_key('Alice') + + nick_to_pub_key('Bob')) self.queues[GATEWAY_QUEUE].put((self.ts, packet)) # Test self.assertIsNone(src_incoming(*self.args, unit_test=True)) self.assertEqual(self.queues[DST_MESSAGE_QUEUE].qsize(), 0) - self.assertEqual(self.queues[M_TO_FLASK_QUEUE].qsize(), 2) + self.assertEqual(self.queues[M_TO_FLASK_QUEUE].qsize(), 2) class TestDSTOutGoing(unittest.TestCase): + def test_loop(self) -> None: # Setup - packet = b"test_packet" - queues = gen_queue_dict() + packet = b'test_packet' + queues = gen_queue_dict() gateway = Gateway() def queue_delayer() -> None: @@ -251,7 +219,7 @@ class TestDSTOutGoing(unittest.TestCase): # Test side_effects = [EOFError, KeyboardInterrupt, None] + [None] * 100_000 - with unittest.mock.patch("time.sleep", side_effect=side_effects): + with unittest.mock.patch('time.sleep', side_effect=side_effects): self.assertIsNone(dst_outgoing(queues, gateway, unit_test=True)) self.assertEqual(packet, gateway.packets[0]) @@ -259,5 +227,5 @@ class TestDSTOutGoing(unittest.TestCase): tear_queues(queues) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/test_dd.py b/tests/test_dd.py index 1e73e34..4bbb2da 100644 --- a/tests/test_dd.py +++ b/tests/test_dd.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -25,21 +25,12 @@ import time import unittest from multiprocessing import Queue -from unittest import mock -from unittest.mock import MagicMock +from unittest import mock +from unittest.mock import MagicMock +from typing import Any -from src.common.statics import ( - DATA_FLOW, - DST_LISTEN_SOCKET, - EXIT, - EXIT_QUEUE, - IDLE, - NCDCLR, - NCDCRL, - RP_LISTEN_SOCKET, - SCNCLR, - SCNCRL, -) +from src.common.statics import (DATA_FLOW, DST_LISTEN_SOCKET, EXIT, EXIT_QUEUE, IDLE, + NCDCLR, NCDCRL, RP_LISTEN_SOCKET, SCNCLR, SCNCRL) from dd import animate, draw_frame, main, process_arguments, rx_loop, tx_loop @@ -47,111 +38,80 @@ from tests.utils import tear_queue, TFCTestCase class TestDrawFrame(TFCTestCase): + def test_left_to_right_oriented_data_diode_frames(self) -> None: for argv in [SCNCLR, NCDCRL]: - self.assert_prints( - """\ + self.assert_prints("""\ \n\n\n\n\n\n\n\n Data flow → ────╮ ╭──── Tx │ > │ Rx ────╯ ╰──── -""", - draw_frame, - argv, - DATA_FLOW, - high=True, - ) +""", draw_frame, argv, DATA_FLOW, high=True) - self.assert_prints( - """\ + self.assert_prints("""\ \n\n\n\n\n\n\n\n Data flow → ────╮ ╭──── Tx │ │ Rx ────╯ ╰──── -""", - draw_frame, - argv, - DATA_FLOW, - high=False, - ) +""", draw_frame, argv, DATA_FLOW, high=False) - self.assert_prints( - """\ + self.assert_prints("""\ \n\n\n\n\n\n\n\n Idle ────╮ ╭──── Tx │ │ Rx ────╯ ╰──── -""", - draw_frame, - argv, - IDLE, - ) +""", draw_frame, argv, IDLE) def test_right_to_left_oriented_data_diode_frames(self) -> None: for argv in [SCNCRL, NCDCLR]: - self.assert_prints( - """\ + self.assert_prints("""\ \n\n\n\n\n\n\n\n Data flow ← ────╮ ╭──── Rx │ < │ Tx ────╯ ╰──── -""", - draw_frame, - argv, - DATA_FLOW, - high=True, - ) +""", draw_frame, argv, DATA_FLOW, high=True) - self.assert_prints( - """\ + self.assert_prints("""\ \n\n\n\n\n\n\n\n Data flow ← ────╮ ╭──── Rx │ │ Tx ────╯ ╰──── -""", - draw_frame, - argv, - DATA_FLOW, - high=False, - ) +""", draw_frame, argv, DATA_FLOW, high=False) - self.assert_prints( - """\ + self.assert_prints("""\ \n\n\n\n\n\n\n\n Idle ────╮ ╭──── Rx │ │ Tx ────╯ ╰──── -""", - draw_frame, - argv, - IDLE, - ) +""", draw_frame, argv, IDLE) class TestAnimate(unittest.TestCase): - @mock.patch("time.sleep", return_value=MagicMock) - def test_animate(self, _) -> None: + + @mock.patch('time.sleep', return_value=MagicMock) + def test_animate(self, _: Any) -> None: for arg in [SCNCLR, NCDCLR, SCNCRL, NCDCRL]: self.assertIsNone(animate(arg)) class TestRxLoop(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.queue = Queue() @@ -160,34 +120,21 @@ class TestRxLoop(unittest.TestCase): """Post-test actions.""" tear_queue(self.queue) - @mock.patch( - "multiprocessing.connection.Listener", - return_value=MagicMock( - accept=MagicMock( - return_value=MagicMock( - recv=MagicMock( - side_effect=[ - b"test_data", - b"test_data", - KeyboardInterrupt, - EOFError, - ] - ) - ) - ) - ), - ) - def test_rx_loop(self, _) -> None: + @mock.patch('multiprocessing.connection.Listener', return_value=MagicMock( + accept=MagicMock(return_value=MagicMock( + recv=MagicMock(side_effect=[b'test_data', b'test_data', KeyboardInterrupt, EOFError]))))) + def test_rx_loop(self, _: Any) -> None: with self.assertRaises(SystemExit): rx_loop(self.queue, RP_LISTEN_SOCKET) self.assertEqual(self.queue.qsize(), 2) - while self.queue.qsize(): - self.assertEqual(self.queue.get(), b"test_data") + while self.queue.qsize() != 0: + self.assertEqual(self.queue.get(), b'test_data') class TestTxLoop(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.o_sleep = time.sleep @@ -196,20 +143,16 @@ class TestTxLoop(unittest.TestCase): """Post-test actions.""" time.sleep = self.o_sleep - @mock.patch("time.sleep", lambda _: None) - @mock.patch( - "multiprocessing.connection.Client", - side_effect=[socket.error, MagicMock(send=MagicMock)], - ) - def test_tx_loop(self, *_) -> None: + @mock.patch('time.sleep', lambda _: None) + @mock.patch('multiprocessing.connection.Client', side_effect=[socket.error, MagicMock(send=MagicMock)]) + def test_tx_loop(self, *_: Any) -> None: # Setup queue = Queue() def queue_delayer() -> None: """Place packet to queue after timer runs out.""" self.o_sleep(0.1) - queue.put(b"test_packet") - + queue.put(b'test_packet') threading.Thread(target=queue_delayer).start() # Test @@ -220,22 +163,24 @@ class TestTxLoop(unittest.TestCase): class TestProcessArguments(unittest.TestCase): - def test_invalid_arguments_exit(self, *_) -> None: - for argument in ["", "invalid"]: - with mock.patch("sys.argv", ["dd.py", argument]): + + def test_invalid_arguments_exit(self, *_: Any) -> None: + for argument in ['', 'invalid']: + with mock.patch('sys.argv', ['dd.py', argument]): with self.assertRaises(SystemExit): process_arguments() - def test_valid_arguments(self, *_) -> None: + def test_valid_arguments(self, *_: Any) -> None: for argument in [SCNCLR, SCNCRL, NCDCLR, NCDCRL]: - with mock.patch("sys.argv", ["dd.py", argument]): + with mock.patch('sys.argv', ['dd.py', argument]): arg, input_socket, output_socket = process_arguments() self.assertEqual(arg, argument) - self.assertIsInstance(input_socket, int) + self.assertIsInstance(input_socket, int) self.assertIsInstance(output_socket, int) class TestMain(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.queue = Queue() @@ -244,9 +189,9 @@ class TestMain(unittest.TestCase): """Post-test actions.""" tear_queue(self.queue) - @mock.patch("time.sleep", lambda _: None) - @mock.patch("sys.argv", ["dd.py", SCNCLR]) - def test_main(self, *_) -> None: + @mock.patch('time.sleep', lambda _: None) + @mock.patch('sys.argv', ['dd.py', SCNCLR]) + def test_main(self, *_: Any) -> None: # Setup queues = {EXIT_QUEUE: self.queue} @@ -254,7 +199,6 @@ class TestMain(unittest.TestCase): """Place packet to queue after timer runs out.""" time.sleep(0.1) queues[EXIT_QUEUE].put(EXIT) - threading.Thread(target=queue_delayer).start() # Test @@ -262,5 +206,5 @@ class TestMain(unittest.TestCase): main(queues=queues) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/transmitter/__init__.py b/tests/transmitter/__init__.py index 6eb560e..833769a 100644 --- a/tests/transmitter/__init__.py +++ b/tests/transmitter/__init__.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. diff --git a/tests/transmitter/test_commands.py b/tests/transmitter/test_commands.py index 27f2ed7..6dffac5 100644 --- a/tests/transmitter/test_commands.py +++ b/tests/transmitter/test_commands.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -24,249 +24,172 @@ import time import unittest from multiprocessing import Process -from unittest import mock -from unittest.mock import MagicMock +from unittest import mock +from unittest.mock import MagicMock +from typing import Any from src.common.database import TFCDatabase, MessageLog -from src.common.db_logs import write_log_entry -from src.common.db_masterkey import MasterKey as OriginalMasterKey +from src.common.db_logs import write_log_entry from src.common.encoding import bool_to_bytes -from src.common.statics import ( - BOLD_ON, - CLEAR_ENTIRE_SCREEN, - COMMAND_PACKET_QUEUE, - CURSOR_LEFT_UP_CORNER, - DIR_USER_DATA, - KEY_MGMT_ACK_QUEUE, - KEX_STATUS_NO_RX_PSK, - KEX_STATUS_UNVERIFIED, - KEX_STATUS_VERIFIED, - KEY_MANAGEMENT_QUEUE, - LOGFILE_MASKING_QUEUE, - MESSAGE, - MESSAGE_PACKET_QUEUE, - M_S_HEADER, - NORMAL_TEXT, - PADDING_LENGTH, - PRIVATE_MESSAGE_HEADER, - RELAY_PACKET_QUEUE, - RESET, - SENDER_MODE_QUEUE, - TM_COMMAND_PACKET_QUEUE, - TRAFFIC_MASKING_QUEUE, - TX, - UNENCRYPTED_DATAGRAM_HEADER, - UNENCRYPTED_WIPE_COMMAND, - VERSION, - WIN_TYPE_CONTACT, - WIN_TYPE_GROUP, - KDB_HALT_ACK_HEADER, - KDB_M_KEY_CHANGE_HALT_HEADER, -) +from src.common.db_masterkey import MasterKey as OrigMasterKey +from src.common.statics import (BOLD_ON, CLEAR_ENTIRE_SCREEN, COMMAND_PACKET_QUEUE, CURSOR_LEFT_UP_CORNER, + DIR_USER_DATA, KEY_MGMT_ACK_QUEUE, KEX_STATUS_NO_RX_PSK, KEX_STATUS_UNVERIFIED, + KEX_STATUS_VERIFIED, KEY_MANAGEMENT_QUEUE, LOGFILE_MASKING_QUEUE, MESSAGE, + MESSAGE_PACKET_QUEUE, M_S_HEADER, NORMAL_TEXT, PADDING_LENGTH, + PRIVATE_MESSAGE_HEADER, RELAY_PACKET_QUEUE, RESET, SENDER_MODE_QUEUE, + TM_COMMAND_PACKET_QUEUE, TRAFFIC_MASKING_QUEUE, TX, UNENCRYPTED_DATAGRAM_HEADER, + UNENCRYPTED_WIPE_COMMAND, VERSION, WIN_TYPE_CONTACT, WIN_TYPE_GROUP, + KDB_HALT_ACK_HEADER, KDB_M_KEY_CHANGE_HALT_HEADER) -from src.transmitter.commands import ( - change_master_key, - change_setting, - clear_screens, - exit_tfc, - log_command, -) -from src.transmitter.commands import ( - print_about, - print_help, - print_recipients, - print_settings, - process_command, -) -from src.transmitter.commands import ( - remove_log, - rxp_display_unread, - rxp_show_sys_win, - send_onion_service_key, - verify, -) +from src.transmitter.commands import change_master_key, change_setting, clear_screens, exit_tfc, log_command +from src.transmitter.commands import print_about, print_help, print_recipients, print_settings, process_command +from src.transmitter.commands import remove_log, rxp_display_unread, rxp_show_sys_win, send_onion_service_key, verify from src.transmitter.commands import whisper, whois, wipe -from src.transmitter.packet import split_to_assembly_packets +from src.transmitter.packet import split_to_assembly_packets -from tests.mock_classes import ( - ContactList, - create_contact, - Gateway, - GroupList, - MasterKey, - OnionService, - Settings, -) +from tests.mock_classes import ContactList, create_contact, Gateway, GroupList, MasterKey, OnionService, Settings from tests.mock_classes import TxWindow, UserInput -from tests.utils import ( - assembly_packet_creator, - cd_unit_test, - cleanup, - group_name_to_group_id, -) -from tests.utils import ( - gen_queue_dict, - nick_to_onion_address, - nick_to_pub_key, - tear_queues, - TFCTestCase, -) +from tests.utils import assembly_packet_creator, cd_unit_test, cleanup, group_name_to_group_id +from tests.utils import gen_queue_dict, nick_to_onion_address, nick_to_pub_key, tear_queues, TFCTestCase class TestProcessCommand(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.window = TxWindow() - self.contact_list = ContactList() - self.group_list = GroupList() - self.settings = Settings() - self.queues = gen_queue_dict() - self.master_key = MasterKey() + self.window = TxWindow() + self.contact_list = ContactList() + self.group_list = GroupList() + self.settings = Settings() + self.queues = gen_queue_dict() + self.master_key = MasterKey() self.onion_service = OnionService() - self.gateway = Gateway() - self.args = ( - self.window, - self.contact_list, - self.group_list, - self.settings, - self.queues, - self.master_key, - self.onion_service, - self.gateway, - ) + self.gateway = Gateway() + self.args = (self.window, self.contact_list, self.group_list, self.settings, + self.queues, self.master_key, self.onion_service, self.gateway) def tearDown(self) -> None: """Post-test actions.""" tear_queues(self.queues) def test_valid_command(self) -> None: - self.assertIsNone(process_command(UserInput("about"), *self.args)) + self.assertIsNone(process_command(UserInput('about'), *self.args)) def test_invalid_command(self) -> None: - self.assert_se( - "Error: Invalid command 'abou'.", - process_command, - UserInput("abou"), - *self.args, - ) + self.assert_se("Error: Invalid command 'abou'.", process_command, UserInput('abou'), *self.args) def test_empty_command(self) -> None: - self.assert_se( - "Error: Invalid command.", process_command, UserInput(" "), *self.args - ) + self.assert_se("Error: Invalid command.", process_command, UserInput(' '), *self.args) class TestPrintAbout(TFCTestCase): + def test_print_about(self) -> None: - self.assert_prints( - CLEAR_ENTIRE_SCREEN - + CURSOR_LEFT_UP_CORNER - + f"""\ + self.assert_prints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + f"""\ Tinfoil Chat {VERSION} Website: https://github.com/maqp/tfc/ Wikipage: https://github.com/maqp/tfc/wiki -""", - print_about, - ) +""", print_about) class TestClearScreens(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.window = TxWindow(uid=nick_to_pub_key("Alice")) + self.window = TxWindow(uid=nick_to_pub_key('Alice')) self.settings = Settings() - self.queues = gen_queue_dict() - self.args = self.window, self.settings, self.queues + self.queues = gen_queue_dict() + self.args = self.window, self.settings, self.queues def tearDown(self) -> None: """Post-test actions.""" tear_queues(self.queues) - @mock.patch("os.system", return_value=None) + @mock.patch('os.system', return_value=None) def test_clear_screens(self, _) -> None: - self.assertIsNone(clear_screens(UserInput("clear"), *self.args)) + self.assertIsNone(clear_screens(UserInput('clear'), *self.args)) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) - self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) + self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) - @mock.patch("os.system", return_value=None) + @mock.patch('os.system', return_value=None) def test_no_relay_clear_cmd_when_traffic_masking_is_enabled(self, _) -> None: # Setup self.settings.traffic_masking = True # Test - self.assertIsNone(clear_screens(UserInput("clear"), *self.args)) + self.assertIsNone(clear_screens(UserInput('clear'), *self.args)) self.assertEqual(self.queues[TM_COMMAND_PACKET_QUEUE].qsize(), 1) - self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 0) + self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 0) - @mock.patch("os.system", return_value=None) + @mock.patch('os.system', return_value=None) def test_reset_screens(self, mock_os_system) -> None: - self.assertIsNone(clear_screens(UserInput("reset"), *self.args)) + self.assertIsNone(clear_screens(UserInput('reset'), *self.args)) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) - self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) + self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) mock_os_system.assert_called_with(RESET) - @mock.patch("os.system", return_value=None) - def test_no_relay_reset_cmd_when_traffic_masking_is_enabled( - self, mock_os_system - ) -> None: + @mock.patch('os.system', return_value=None) + def test_no_relay_reset_cmd_when_traffic_masking_is_enabled(self, mock_os_system: MagicMock) -> None: # Setup self.settings.traffic_masking = True # Test - self.assertIsNone(clear_screens(UserInput("reset"), *self.args)) + self.assertIsNone(clear_screens(UserInput('reset'), *self.args)) self.assertEqual(self.queues[TM_COMMAND_PACKET_QUEUE].qsize(), 1) - self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 0) + self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 0) mock_os_system.assert_called_with(RESET) class TestRXPShowSysWin(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.window = TxWindow(name="Alice", uid=nick_to_pub_key("Alice")) + self.window = TxWindow(name='Alice', uid=nick_to_pub_key('Alice')) self.settings = Settings() - self.queues = gen_queue_dict() - self.args = self.window, self.settings, self.queues + self.queues = gen_queue_dict() + self.args = self.window, self.settings, self.queues def tearDown(self) -> None: """Post-test actions.""" tear_queues(self.queues) - @mock.patch("builtins.input", side_effect=["", EOFError, KeyboardInterrupt]) - def test_cmd_window(self, _) -> None: - self.assertIsNone(rxp_show_sys_win(UserInput(plaintext="cmd"), *self.args)) + @mock.patch('builtins.input', side_effect=['', EOFError, KeyboardInterrupt]) + def test_cmd_window(self, _: Any) -> None: + self.assertIsNone(rxp_show_sys_win(UserInput(plaintext='cmd'), *self.args)) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 2) - self.assertIsNone(rxp_show_sys_win(UserInput(plaintext="cmd"), *self.args)) + self.assertIsNone(rxp_show_sys_win(UserInput(plaintext='cmd'), *self.args)) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 4) - self.assertIsNone(rxp_show_sys_win(UserInput(plaintext="cmd"), *self.args)) + self.assertIsNone(rxp_show_sys_win(UserInput(plaintext='cmd'), *self.args)) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 6) - @mock.patch("builtins.input", side_effect=["", EOFError, KeyboardInterrupt]) - def test_file_window(self, _) -> None: - self.assertIsNone(rxp_show_sys_win(UserInput(plaintext="fw"), *self.args)) + @mock.patch('builtins.input', side_effect=['', EOFError, KeyboardInterrupt]) + def test_file_window(self, _: Any) -> None: + self.assertIsNone(rxp_show_sys_win(UserInput(plaintext='fw'), *self.args)) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 2) - self.assertIsNone(rxp_show_sys_win(UserInput(plaintext="fw"), *self.args)) + self.assertIsNone(rxp_show_sys_win(UserInput(plaintext='fw'), *self.args)) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 4) - self.assertIsNone(rxp_show_sys_win(UserInput(plaintext="fw"), *self.args)) + self.assertIsNone(rxp_show_sys_win(UserInput(plaintext='fw'), *self.args)) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 6) class TestExitTFC(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.settings = Settings(local_testing_mode=True) - self.queues = gen_queue_dict() - self.gateway = Gateway(data_diode_sockets=True) - self.args = self.settings, self.queues, self.gateway + self.queues = gen_queue_dict() + self.gateway = Gateway(data_diode_sockets=True) + self.args = self.settings, self.queues, self.gateway def tearDown(self) -> None: """Post-test actions.""" tear_queues(self.queues) - @mock.patch("time.sleep", return_value=None) - def test_exit_tfc_local_test(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_exit_tfc_local_test(self, _: Any) -> None: # Setup for _ in range(2): self.queues[COMMAND_PACKET_QUEUE].put("dummy command") @@ -274,10 +197,10 @@ class TestExitTFC(unittest.TestCase): # Test self.assertIsNone(exit_tfc(*self.args)) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) - self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) + self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) - @mock.patch("time.sleep", return_value=None) - def test_exit_tfc(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_exit_tfc(self, _: Any) -> None: # Setup self.settings.local_testing_mode = False for _ in range(2): @@ -286,28 +209,24 @@ class TestExitTFC(unittest.TestCase): # Test self.assertIsNone(exit_tfc(*self.args)) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) - self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) + self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) class TestLogCommand(TFCTestCase): - def setUp(self) -> None: + + @mock.patch("getpass.getpass", return_value='test_password') + def setUp(self, _: Any) -> None: """Pre-test actions.""" - self.unit_test_dir = cd_unit_test() - self.window = TxWindow(name="Alice", uid=nick_to_pub_key("Alice")) - self.contact_list = ContactList() - self.group_list = GroupList() - self.settings = Settings() - self.queues = gen_queue_dict() - self.master_key = MasterKey() - self.args = ( - self.window, - self.contact_list, - self.group_list, - self.settings, - self.queues, - self.master_key, - ) - self.log_file = f"{DIR_USER_DATA}{self.settings.software_operation}_logs" + self.unit_test_dir = cd_unit_test() + self.window = TxWindow(name='Alice', uid=nick_to_pub_key('Alice')) + self.contact_list = ContactList() + self.group_list = GroupList() + self.settings = Settings() + self.queues = gen_queue_dict() + self.master_key = MasterKey() + self.args = (self.window, self.contact_list, self.group_list, + self.settings, self.queues, self.master_key) + self.log_file = f'{DIR_USER_DATA}{self.settings.software_operation}_logs' self.tfc_log_database = MessageLog(self.log_file, self.master_key.master_key) def tearDown(self) -> None: @@ -316,25 +235,17 @@ class TestLogCommand(TFCTestCase): tear_queues(self.queues) def test_invalid_export(self) -> None: - self.assert_se( - "Error: Invalid number of messages.", - log_command, - UserInput("history a"), - *self.args, - ) + self.assert_se("Error: Invalid number of messages.", + log_command, UserInput("history a"), *self.args) - @mock.patch("getpass.getpass", return_value="test_password") - def test_log_printing(self, _) -> None: + @mock.patch("getpass.getpass", return_value='test_password') + def test_log_printing(self, _: Any) -> None: # Setup os.remove(self.log_file) # Test - self.assert_se( - f"No log database available.", - log_command, - UserInput("history 4"), - *self.args, - ) + self.assert_se(f"No log database available.", + log_command, UserInput("history 4"), *self.args) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) def test_log_printing_when_no_password_is_asked(self) -> None: @@ -343,185 +254,128 @@ class TestLogCommand(TFCTestCase): os.remove(self.log_file) # Test - self.assert_se( - f"No log database available.", - log_command, - UserInput("history 4"), - *self.args, - ) + self.assert_se(f"No log database available.", + log_command, UserInput("history 4"), *self.args) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) - @mock.patch("getpass.getpass", return_value="test_password") - def test_log_printing_all(self, _) -> None: + @mock.patch("getpass.getpass", return_value='test_password') + def test_log_printing_all(self, _: Any) -> None: # Setup os.remove(self.log_file) # Test - self.assert_se( - f"No log database available.", log_command, UserInput("history"), *self.args - ) + self.assert_se(f"No log database available.", + log_command, UserInput("history"), *self.args) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) - def test_invalid_number_raises_fr(self) -> None: - self.assert_se( - "Error: Invalid number of messages.", - log_command, - UserInput("history a"), - *self.args, - ) + def test_invalid_number_raises_se(self) -> None: + self.assert_se("Error: Invalid number of messages.", + log_command, UserInput('history a'), *self.args) - def test_too_high_number_raises_fr(self) -> None: - self.assert_se( - "Error: Invalid number of messages.", - log_command, - UserInput("history 94857634985763454345"), - *self.args, - ) + def test_too_high_number_raises_se(self) -> None: + self.assert_se("Error: Invalid number of messages.", + log_command, UserInput('history 94857634985763454345'), *self.args) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", return_value="No") - def test_user_abort_raises_fr(self, *_) -> None: - self.assert_se( - "Log file export aborted.", log_command, UserInput("export"), *self.args - ) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', return_value='No') + def test_user_abort_raises_se(self, *_: Any) -> None: + self.assert_se("Log file export aborted.", + log_command, UserInput('export'), *self.args) - @mock.patch("src.common.db_masterkey.MIN_KEY_DERIVATION_TIME", 0.1) - @mock.patch("src.common.db_masterkey.MAX_KEY_DERIVATION_TIME", 1.0) - @mock.patch( - "os.popen", - return_value=MagicMock( - read=MagicMock( - return_value=MagicMock( - splitlines=MagicMock(return_value=["MemAvailable 10240"]) - ) - ) - ), - ) + @mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 0.1) + @mock.patch('src.common.db_masterkey.MAX_KEY_DERIVATION_TIME', 1.0) + @mock.patch('os.popen', return_value=MagicMock( + read=MagicMock(return_value=MagicMock(splitlines=MagicMock(return_value=["MemAvailable 10240"]))))) @mock.patch("multiprocessing.cpu_count", return_value=1) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", return_value="Yes") - @mock.patch( - "getpass.getpass", - side_effect=["test_password", "test_password", KeyboardInterrupt], - ) - def test_keyboard_interrupt_raises_fr(self, *_) -> None: - self.master_key = OriginalMasterKey(operation=TX, local_test=True) - self.assert_se( - "Authentication aborted.", log_command, UserInput("export"), *self.args - ) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', return_value='Yes') + @mock.patch('getpass.getpass', side_effect=['test_password', 'test_password', KeyboardInterrupt]) + def test_keyboard_interrupt_raises_se(self, *_: Any) -> None: + self.master_key = OrigMasterKey(operation=TX, local_test=True) + self.assert_se("Authentication aborted.", + log_command, UserInput('export'), *self.args) - @mock.patch("src.common.db_masterkey.MIN_KEY_DERIVATION_TIME", 0.1) - @mock.patch("src.common.db_masterkey.MAX_KEY_DERIVATION_TIME", 1.0) - @mock.patch( - "os.popen", - return_value=MagicMock( - read=MagicMock( - return_value=MagicMock( - splitlines=MagicMock(return_value=["MemAvailable 10240"]) - ) - ) - ), - ) + @mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 0.1) + @mock.patch('src.common.db_masterkey.MAX_KEY_DERIVATION_TIME', 1.0) + @mock.patch('os.popen', return_value=MagicMock( + read=MagicMock(return_value=MagicMock(splitlines=MagicMock(return_value=["MemAvailable 10240"]))))) @mock.patch("multiprocessing.cpu_count", return_value=1) - @mock.patch( - "getpass.getpass", - side_effect=3 * ["test_password"] + ["invalid_password"] + ["test_password"], - ) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", return_value="Yes") - def test_successful_export_command(self, *_) -> None: + @mock.patch("getpass.getpass", side_effect=3*['test_password'] + ['invalid_password'] + ['test_password']) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', return_value='Yes') + def test_successful_export_command(self, *_: Any) -> None: # Setup - self.master_key = OriginalMasterKey(operation=TX, local_test=True) + self.master_key = OrigMasterKey(operation=TX, local_test=True) self.window.type = WIN_TYPE_CONTACT - self.window.uid = nick_to_pub_key("Alice") - whisper_header = bool_to_bytes(False) - packet = split_to_assembly_packets( - whisper_header + PRIVATE_MESSAGE_HEADER + b"test", MESSAGE - )[0] + self.window.uid = nick_to_pub_key('Alice') + whisper_header = bool_to_bytes(False) + packet = split_to_assembly_packets(whisper_header + PRIVATE_MESSAGE_HEADER + b'test', MESSAGE)[0] self.tfc_log_database.database_key = self.master_key.master_key - write_log_entry(packet, nick_to_pub_key("Alice"), self.tfc_log_database) + write_log_entry(packet, nick_to_pub_key('Alice'), self.tfc_log_database) # Test - for command in ["export", "export 1"]: - self.assert_se( - f"Exported log file of contact 'Alice'.", - log_command, - UserInput(command), - self.window, - ContactList(nicks=["Alice"]), - self.group_list, - self.settings, - self.queues, - self.master_key, - ) + for command in ['export', 'export 1']: + self.assert_se(f"Exported log file of contact 'Alice'.", + log_command, UserInput(command), self.window, ContactList(nicks=['Alice']), + self.group_list, self.settings, self.queues, self.master_key) class TestSendOnionServiceKey(TFCTestCase): - confirmation_code = b"a" + confirmation_code = b'a' def setUp(self) -> None: """Pre-test actions.""" - self.contact_list = ContactList() - self.settings = Settings() + self.contact_list = ContactList() + self.settings = Settings() self.onion_service = OnionService() - self.gateway = Gateway() - self.args = self.contact_list, self.settings, self.onion_service, self.gateway + self.gateway = Gateway() + self.args = self.contact_list, self.settings, self.onion_service, self.gateway - @mock.patch("time.sleep", return_value=None) - @mock.patch("os.urandom", return_value=confirmation_code) - @mock.patch("builtins.input", side_effect=["Yes", confirmation_code.hex()]) - def test_onion_service_key_delivery_traffic_masking(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('os.urandom', return_value=confirmation_code) + @mock.patch('builtins.input', side_effect=['Yes', confirmation_code.hex()]) + def test_onion_service_key_delivery_traffic_masking(self, *_: Any) -> None: self.assertIsNone(send_onion_service_key(*self.args)) self.assertEqual(len(self.gateway.packets), 1) - @mock.patch("time.sleep", return_value=None) - @mock.patch("os.urandom", return_value=confirmation_code) - @mock.patch("builtins.input", side_effect=[KeyboardInterrupt, "No"]) - def test_onion_service_key_delivery_traffic_masking_abort(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('os.urandom', return_value=confirmation_code) + @mock.patch('builtins.input', side_effect=[KeyboardInterrupt, 'No']) + def test_onion_service_key_delivery_traffic_masking_abort(self, *_: Any) -> None: # Setup self.settings.traffic_masking = True # Test for _ in range(2): - self.assert_se( - "Onion Service data export canceled.", - send_onion_service_key, - *self.args, - ) + self.assert_se("Onion Service data export canceled.", send_onion_service_key, *self.args) - @mock.patch("os.urandom", return_value=confirmation_code) - @mock.patch("builtins.input", return_value=confirmation_code.hex()) - def test_onion_service_key_delivery(self, *_) -> None: + @mock.patch('os.urandom', return_value=confirmation_code) + @mock.patch('builtins.input', return_value=confirmation_code.hex()) + def test_onion_service_key_delivery(self, *_: Any) -> None: self.assertIsNone(send_onion_service_key(*self.args)) self.assertEqual(len(self.gateway.packets), 1) - @mock.patch("time.sleep", return_value=None) - @mock.patch("os.urandom", return_value=confirmation_code) - @mock.patch("builtins.input", side_effect=[EOFError, KeyboardInterrupt]) - def test_onion_service_key_delivery_cancel(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('os.urandom', return_value=confirmation_code) + @mock.patch('builtins.input', side_effect=[EOFError, KeyboardInterrupt]) + def test_onion_service_key_delivery_cancel(self, *_: Any) -> None: for _ in range(2): - self.assert_se( - "Onion Service data export canceled.", - send_onion_service_key, - *self.args, - ) + self.assert_se("Onion Service data export canceled.", send_onion_service_key, *self.args) class TestPrintHelp(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.settings = Settings() + self.settings = Settings() self.settings.traffic_masking = False - @mock.patch("shutil.get_terminal_size", return_value=[60, 60]) - def test_print_normal(self, _) -> None: - self.assert_prints( - CLEAR_ENTIRE_SCREEN - + CURSOR_LEFT_UP_CORNER - + """\ + @mock.patch('shutil.get_terminal_size', return_value=[60, 60]) + def test_print_normal(self, _: Any) -> None: + self.assert_prints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + """\ List of commands: /about Show links to project resources @@ -605,18 +459,12 @@ Group management: /group rm G Remove group G ──────────────────────────────────────────────────────────── -""", - print_help, - self.settings, - ) +""", print_help, self.settings) - @mock.patch("shutil.get_terminal_size", return_value=[80, 80]) - def test_print_during_traffic_masking(self, _) -> None: + @mock.patch('shutil.get_terminal_size', return_value=[80, 80]) + def test_print_during_traffic_masking(self, _: Any) -> None: self.settings.traffic_masking = True - self.assert_prints( - CLEAR_ENTIRE_SCREEN - + CURSOR_LEFT_UP_CORNER - + """\ + self.assert_prints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + """\ List of commands: /about Show links to project resources @@ -648,48 +496,39 @@ List of commands: Shift + PgUp/PgDn Scroll terminal up/down ──────────────────────────────────────────────────────────────────────────────── -""", - print_help, - self.settings, - ) +""", print_help, self.settings) class TestPrintRecipients(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.contact_list = ContactList(nicks=["Alice", "Bob"]) - self.group_list = GroupList(groups=["test_group", "test_group_2"]) - self.args = self.contact_list, self.group_list + self.contact_list = ContactList(nicks=['Alice', 'Bob']) + self.group_list = GroupList(groups=['test_group', 'test_group_2']) + self.args = self.contact_list, self.group_list def test_printing(self) -> None: self.assertIsNone(print_recipients(*self.args)) class TestChangeMasterKey(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.unit_test_dir = cd_unit_test() - self.contact_list = ContactList() - self.group_list = GroupList() - self.settings = Settings() - self.queues = gen_queue_dict() - self.master_key = MasterKey() - self.file_name = f"{DIR_USER_DATA}/unittest" - self.log_file = f"{DIR_USER_DATA}{self.settings.software_operation}_logs" + self.unit_test_dir = cd_unit_test() + self.contact_list = ContactList() + self.group_list = GroupList() + self.settings = Settings() + self.queues = gen_queue_dict() + self.master_key = MasterKey() + self.file_name = f'{DIR_USER_DATA}/unittest' + self.log_file = f'{DIR_USER_DATA}{self.settings.software_operation}_logs' self.tfc_log_database = MessageLog(self.log_file, self.master_key.master_key) - self.onion_service = OnionService( - master_key=self.master_key, - file_name=self.file_name, - database=TFCDatabase(self.file_name, self.master_key), - ) - self.args = ( - self.contact_list, - self.group_list, - self.settings, - self.queues, - self.master_key, - self.onion_service, - ) + self.onion_service = OnionService(master_key=self.master_key, + file_name=self.file_name, + database=TFCDatabase(self.file_name, self.master_key)) + self.args = (self.contact_list, self.group_list, self.settings, + self.queues, self.master_key, self.onion_service) def tearDown(self) -> None: """Post-test actions.""" @@ -701,127 +540,85 @@ class TestChangeMasterKey(TFCTestCase): self.settings.traffic_masking = True # Test - self.assert_se( - "Error: Command is disabled during traffic masking.", - change_master_key, - UserInput(), - *self.args, - ) + self.assert_se("Error: Command is disabled during traffic masking.", + change_master_key, UserInput(), *self.args) - def test_missing_target_sys_raises_fr(self) -> None: - self.assert_se( - "Error: No target-system ('tx' or 'rx') specified.", - change_master_key, - UserInput("passwd "), - *self.args, - ) + def test_missing_target_sys_raises_se(self) -> None: + self.assert_se("Error: No target-system ('tx' or 'rx') specified.", + change_master_key, UserInput("passwd "), *self.args) - @mock.patch("getpass.getpass", return_value="test_password") - def test_invalid_target_sys_raises_fr(self, _) -> None: - self.assert_se( - "Error: Invalid target system 't'.", - change_master_key, - UserInput("passwd t"), - *self.args, - ) + @mock.patch('getpass.getpass', return_value='test_password') + def test_invalid_target_sys_raises_se(self, _: Any) -> None: + self.assert_se("Error: Invalid target system 't'.", + change_master_key, UserInput("passwd t"), *self.args) - @mock.patch("src.common.db_keys.KeyList", return_value=MagicMock()) - @mock.patch( - "os.popen", - return_value=MagicMock(read=MagicMock(return_value="foo\nMemAvailable 200")), - ) - @mock.patch("getpass.getpass", side_effect=["test_password", "a", "a"]) - @mock.patch("time.sleep", return_value=None) - @mock.patch("src.common.db_masterkey.MIN_KEY_DERIVATION_TIME", 0.01) - def test_invalid_response_from_key_db_raises_fr(self, *_) -> None: + @mock.patch('src.common.db_keys.KeyList', return_value=MagicMock()) + @mock.patch('os.popen', return_value=MagicMock(read=MagicMock(return_value='foo\nMemAvailable 200'))) + @mock.patch('getpass.getpass', side_effect=['test_password', 'a', 'a']) + @mock.patch('time.sleep', return_value=None) + @mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 0.01) + def test_invalid_response_from_key_db_raises_se(self, *_: Any) -> None: # Setup def mock_sender_loop() -> None: """Mock sender loop key management functionality.""" while self.queues[KEY_MANAGEMENT_QUEUE].empty(): time.sleep(0.1) - if ( - self.queues[KEY_MANAGEMENT_QUEUE].get()[0] - == KDB_M_KEY_CHANGE_HALT_HEADER - ): - self.queues[KEY_MGMT_ACK_QUEUE].put("WRONG_HEADER") + if self.queues[KEY_MANAGEMENT_QUEUE].get()[0] == KDB_M_KEY_CHANGE_HALT_HEADER: + self.queues[KEY_MGMT_ACK_QUEUE].put('WRONG_HEADER') p = Process(target=mock_sender_loop, args=()) p.start() # Test - self.assert_se( - "Error: Key database returned wrong signal.", - change_master_key, - UserInput("passwd tx"), - *self.args, - ) + self.assert_se("Error: Key database returned wrong signal.", + change_master_key, UserInput("passwd tx"), *self.args) # Teardown p.terminate() - @mock.patch("src.common.db_keys.KeyList", return_value=MagicMock()) - @mock.patch( - "os.popen", - return_value=MagicMock(read=MagicMock(return_value="foo\nMemAvailable 200")), - ) - @mock.patch("getpass.getpass", side_effect=["test_password", "a", "a"]) - @mock.patch("time.sleep", return_value=None) - @mock.patch("src.common.db_masterkey.MIN_KEY_DERIVATION_TIME", 0.01) - def test_transmitter_command_raises_system_exit_if_key_database_returns_invalid_master_key( - self, *_ - ) -> None: + @mock.patch('src.common.db_keys.KeyList', return_value=MagicMock()) + @mock.patch('os.popen', return_value=MagicMock(read=MagicMock(return_value='foo\nMemAvailable 200'))) + @mock.patch('getpass.getpass', side_effect=['test_password', 'a', 'a']) + @mock.patch('time.sleep', return_value=None) + @mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 0.01) + def test_transmitter_command_raises_system_exit_if_key_database_returns_invalid_master_key(self, *_: Any) -> None: # Setup def mock_sender_loop() -> None: """Mock sender loop key management functionality.""" while self.queues[KEY_MANAGEMENT_QUEUE].empty(): time.sleep(0.1) - if ( - self.queues[KEY_MANAGEMENT_QUEUE].get()[0] - == KDB_M_KEY_CHANGE_HALT_HEADER - ): + if self.queues[KEY_MANAGEMENT_QUEUE].get()[0] == KDB_M_KEY_CHANGE_HALT_HEADER: self.queues[KEY_MGMT_ACK_QUEUE].put(KDB_HALT_ACK_HEADER) while self.queues[KEY_MANAGEMENT_QUEUE].empty(): time.sleep(0.1) _ = self.queues[KEY_MANAGEMENT_QUEUE].get() - self.queues[KEY_MGMT_ACK_QUEUE].put(b"invalid_master_key") + self.queues[KEY_MGMT_ACK_QUEUE].put(b'invalid_master_key') p = Process(target=mock_sender_loop, args=()) p.start() - self.contact_list.file_name = f"{DIR_USER_DATA}{TX}_contacts" - self.group_list.file_name = f"{DIR_USER_DATA}{TX}_groups" - self.settings.file_name = f"{DIR_USER_DATA}{TX}_settings" - self.onion_service.file_name = f"{DIR_USER_DATA}{TX}_onion_db" + self.contact_list.file_name = f'{DIR_USER_DATA}{TX}_contacts' + self.group_list.file_name = f'{DIR_USER_DATA}{TX}_groups' + self.settings.file_name = f'{DIR_USER_DATA}{TX}_settings' + self.onion_service.file_name = f'{DIR_USER_DATA}{TX}_onion_db' - self.contact_list.database = TFCDatabase( - self.contact_list.file_name, self.contact_list.master_key - ) - self.group_list.database = TFCDatabase( - self.group_list.file_name, self.group_list.master_key - ) - self.settings.database = TFCDatabase( - self.settings.file_name, self.settings.master_key - ) - self.onion_service.database = TFCDatabase( - self.onion_service.file_name, self.onion_service.master_key - ) + self.contact_list.database = TFCDatabase(self.contact_list.file_name, self.contact_list.master_key) + self.group_list.database = TFCDatabase(self.group_list.file_name, self.group_list.master_key) + self.settings.database = TFCDatabase(self.settings.file_name, self.settings.master_key) + self.onion_service.database = TFCDatabase(self.onion_service.file_name, self.onion_service.master_key) orig_cl_rd = self.contact_list.database.replace_database orig_gl_rd = self.group_list.database.replace_database orig_st_rd = self.settings.database.replace_database orig_os_rd = self.onion_service.database.replace_database - self.contact_list.database.replace_database = lambda: None - self.group_list.database.replace_database = lambda: None - self.settings.database.replace_database = lambda: None + self.contact_list.database.replace_database = lambda: None + self.group_list.database.replace_database = lambda: None + self.settings.database.replace_database = lambda: None self.onion_service.database.replace_database = lambda: None - write_log_entry( - M_S_HEADER + PADDING_LENGTH * b"a", - nick_to_pub_key("Alice"), - self.tfc_log_database, - ) + write_log_entry(M_S_HEADER + PADDING_LENGTH * b'a', nick_to_pub_key('Alice'), self.tfc_log_database) # Test with self.assertRaises(SystemExit): @@ -830,29 +627,23 @@ class TestChangeMasterKey(TFCTestCase): # Teardown p.terminate() - self.contact_list.database.replace_database = orig_cl_rd - self.group_list.database.replace_database = orig_gl_rd - self.settings.database.replace_database = orig_st_rd + self.contact_list.database.replace_database = orig_cl_rd + self.group_list.database.replace_database = orig_gl_rd + self.settings.database.replace_database = orig_st_rd self.onion_service.database.replace_database = orig_os_rd - @mock.patch("src.common.db_keys.KeyList", return_value=MagicMock()) - @mock.patch( - "os.popen", - return_value=MagicMock(read=MagicMock(return_value="foo\nMemAvailable 200")), - ) - @mock.patch("getpass.getpass", side_effect=["test_password", "a", "a"]) - @mock.patch("time.sleep", return_value=None) - @mock.patch("src.common.db_masterkey.MIN_KEY_DERIVATION_TIME", 0.01) - def test_transmitter_command(self, *_) -> None: + @mock.patch('src.common.db_keys.KeyList', return_value=MagicMock()) + @mock.patch('os.popen', return_value=MagicMock(read=MagicMock(return_value='foo\nMemAvailable 200'))) + @mock.patch('getpass.getpass', side_effect=['test_password', 'a', 'a']) + @mock.patch('time.sleep', return_value=None) + @mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 0.01) + def test_transmitter_command(self, *_: Any) -> None: # Setup def mock_sender_loop() -> None: """Mock sender loop key management functionality.""" while self.queues[KEY_MANAGEMENT_QUEUE].empty(): time.sleep(0.1) - if ( - self.queues[KEY_MANAGEMENT_QUEUE].get()[0] - == KDB_M_KEY_CHANGE_HALT_HEADER - ): + if self.queues[KEY_MANAGEMENT_QUEUE].get()[0] == KDB_M_KEY_CHANGE_HALT_HEADER: self.queues[KEY_MGMT_ACK_QUEUE].put(KDB_HALT_ACK_HEADER) while self.queues[KEY_MANAGEMENT_QUEUE].empty(): @@ -863,48 +654,36 @@ class TestChangeMasterKey(TFCTestCase): p = Process(target=mock_sender_loop, args=()) p.start() - self.contact_list.file_name = f"{DIR_USER_DATA}{TX}_contacts" - self.group_list.file_name = f"{DIR_USER_DATA}{TX}_groups" - self.settings.file_name = f"{DIR_USER_DATA}{TX}_settings" - self.onion_service.file_name = f"{DIR_USER_DATA}{TX}_onion_db" + self.contact_list.file_name = f'{DIR_USER_DATA}{TX}_contacts' + self.group_list.file_name = f'{DIR_USER_DATA}{TX}_groups' + self.settings.file_name = f'{DIR_USER_DATA}{TX}_settings' + self.onion_service.file_name = f'{DIR_USER_DATA}{TX}_onion_db' - self.contact_list.database = TFCDatabase( - self.contact_list.file_name, self.contact_list.master_key - ) - self.group_list.database = TFCDatabase( - self.group_list.file_name, self.group_list.master_key - ) - self.settings.database = TFCDatabase( - self.settings.file_name, self.settings.master_key - ) - self.onion_service.database = TFCDatabase( - self.onion_service.file_name, self.onion_service.master_key - ) + self.contact_list.database = TFCDatabase(self.contact_list.file_name, self.contact_list.master_key) + self.group_list.database = TFCDatabase(self.group_list.file_name, self.group_list.master_key) + self.settings.database = TFCDatabase(self.settings.file_name, self.settings.master_key) + self.onion_service.database = TFCDatabase(self.onion_service.file_name, self.onion_service.master_key) orig_cl_rd = self.contact_list.database.replace_database orig_gl_rd = self.group_list.database.replace_database orig_st_rd = self.settings.database.replace_database orig_os_rd = self.onion_service.database.replace_database - self.contact_list.database.replace_database = lambda: None - self.group_list.database.replace_database = lambda: None - self.settings.database.replace_database = lambda: None + self.contact_list.database.replace_database = lambda: None + self.group_list.database.replace_database = lambda: None + self.settings.database.replace_database = lambda: None self.onion_service.database.replace_database = lambda: None - write_log_entry( - M_S_HEADER + PADDING_LENGTH * b"a", - nick_to_pub_key("Alice"), - self.tfc_log_database, - ) + write_log_entry(M_S_HEADER + PADDING_LENGTH * b'a', nick_to_pub_key('Alice'), self.tfc_log_database) # Test self.assertIsNone(change_master_key(UserInput("passwd tx"), *self.args)) p.terminate() # Teardown - self.contact_list.database.replace_database = orig_cl_rd - self.group_list.database.replace_database = orig_gl_rd - self.settings.database.replace_database = orig_st_rd + self.contact_list.database.replace_database = orig_cl_rd + self.group_list.database.replace_database = orig_gl_rd + self.settings.database.replace_database = orig_st_rd self.onion_service.database.replace_database = orig_os_rd def test_receiver_command(self) -> None: @@ -912,35 +691,25 @@ class TestChangeMasterKey(TFCTestCase): self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) self.assertEqual(self.queues[KEY_MANAGEMENT_QUEUE].qsize(), 0) - @mock.patch("time.sleep", return_value=None) - @mock.patch("getpass.getpass", side_effect=KeyboardInterrupt) - def test_keyboard_interrupt_raises_fr(self, *_) -> None: - self.assert_se( - "Authentication aborted.", - change_master_key, - UserInput("passwd tx"), - *self.args, - ) + @mock.patch('time.sleep', return_value=None) + @mock.patch('getpass.getpass', side_effect=KeyboardInterrupt) + def test_keyboard_interrupt_raises_se(self, *_: Any) -> None: + self.assert_se("Authentication aborted.", change_master_key, UserInput("passwd tx"), *self.args) class TestRemoveLog(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.unit_test_dir = cd_unit_test() - self.contact_list = ContactList(nicks=["Alice"]) - self.group_list = GroupList(groups=["test_group"]) - self.settings = Settings() - self.queues = gen_queue_dict() - self.master_key = MasterKey() - self.file_name = f"{DIR_USER_DATA}{self.settings.software_operation}_logs" - self.args = ( - self.contact_list, - self.group_list, - self.settings, - self.queues, - self.master_key, - ) - self.log_file = f"{DIR_USER_DATA}{self.settings.software_operation}_logs" + self.unit_test_dir = cd_unit_test() + self.contact_list = ContactList(nicks=['Alice']) + self.group_list = GroupList(groups=['test_group']) + self.settings = Settings() + self.queues = gen_queue_dict() + self.master_key = MasterKey() + self.file_name = f'{DIR_USER_DATA}{self.settings.software_operation}_logs' + self.args = self.contact_list, self.group_list, self.settings, self.queues, self.master_key + self.log_file = f'{DIR_USER_DATA}{self.settings.software_operation}_logs' self.tfc_log_database = MessageLog(self.log_file, self.master_key.master_key) def tearDown(self) -> None: @@ -948,292 +717,179 @@ class TestRemoveLog(TFCTestCase): tear_queues(self.queues) cleanup(self.unit_test_dir) - def test_missing_contact_raises_fr(self) -> None: - self.assert_se( - "Error: No contact/group specified.", remove_log, UserInput(""), *self.args - ) + def test_missing_contact_raises_se(self) -> None: + self.assert_se("Error: No contact/group specified.", + remove_log, UserInput(''), *self.args) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", return_value="No") - def test_no_aborts_removal(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', return_value='No') + def test_no_aborts_removal(self, *_: Any) -> None: # Setup - self.assertIsNone( - write_log_entry( - M_S_HEADER + PADDING_LENGTH * b"a", - nick_to_pub_key("Alice"), - self.tfc_log_database, - ) - ) + self.assertIsNone(write_log_entry(M_S_HEADER + PADDING_LENGTH * b'a', nick_to_pub_key('Alice'), + self.tfc_log_database)) # Test - self.assert_se( - "Log file removal aborted.", - remove_log, - UserInput("/rmlogs Alice"), - *self.args, - ) + self.assert_se("Log file removal aborted.", + remove_log, UserInput('/rmlogs Alice'), *self.args) - @mock.patch("shutil.get_terminal_size", return_value=[150, 150]) - @mock.patch("builtins.input", return_value="Yes") - def test_removal_with_invalid_account_raises_fr(self, *_) -> None: - self.assert_se( - "Error: Invalid account.", - remove_log, - UserInput(f'/rmlogs {nick_to_onion_address("Alice")[:-1] + "a"}'), - *self.args, - ) + @mock.patch('shutil.get_terminal_size', return_value=[150, 150]) + @mock.patch('builtins.input', return_value='Yes') + def test_removal_with_invalid_account_raises_se(self, *_: Any) -> None: + self.assert_se("Error: Invalid account.", + remove_log, UserInput(f'/rmlogs {nick_to_onion_address("Alice")[:-1] + "a"}'), *self.args) - @mock.patch("builtins.input", return_value="Yes") - def test_invalid_group_id_raises_fr(self, _) -> None: - self.assert_se( - "Error: Invalid group ID.", - remove_log, - UserInput(f'/rmlogs {group_name_to_group_id("test_group")[:-1] + b"a"}'), - *self.args, - ) + @mock.patch('builtins.input', return_value='Yes') + def test_invalid_group_id_raises_se(self, _: Any) -> None: + self.assert_se("Error: Invalid group ID.", + remove_log, UserInput(f'/rmlogs {group_name_to_group_id("test_group")[:-1] + b"a"}'), *self.args) - @mock.patch("builtins.input", return_value="Yes") - def test_log_remove_with_nick(self, _) -> None: + @mock.patch('builtins.input', return_value='Yes') + def test_log_remove_with_nick(self, _: Any) -> None: # Setup - write_log_entry( - M_S_HEADER + PADDING_LENGTH * b"a", - nick_to_pub_key("Alice"), - self.tfc_log_database, - ) + write_log_entry(M_S_HEADER + PADDING_LENGTH * b'a', nick_to_pub_key("Alice"), self.tfc_log_database) # Test - self.assert_se( - "Removed log entries for contact 'Alice'.", - remove_log, - UserInput("/rmlogs Alice"), - *self.args, - ) + self.assert_se("Removed log entries for contact 'Alice'.", + remove_log, UserInput('/rmlogs Alice'), *self.args) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) - @mock.patch("shutil.get_terminal_size", return_value=[150, 150]) - @mock.patch("builtins.input", return_value="Yes") - def test_log_remove_with_onion_address(self, *_) -> None: + @mock.patch('shutil.get_terminal_size', return_value=[150, 150]) + @mock.patch('builtins.input', return_value='Yes') + def test_log_remove_with_onion_address(self, *_: Any) -> None: # Setup - write_log_entry( - M_S_HEADER + PADDING_LENGTH * b"a", - nick_to_pub_key("Alice"), - self.tfc_log_database, - ) + write_log_entry(M_S_HEADER + PADDING_LENGTH * b'a', nick_to_pub_key("Alice"), self.tfc_log_database) # Test - self.assert_se( - "Removed log entries for contact 'Alice'.", - remove_log, - UserInput(f'/rmlogs {nick_to_onion_address("Alice")}'), - *self.args, - ) + self.assert_se("Removed log entries for contact 'Alice'.", + remove_log, UserInput(f'/rmlogs {nick_to_onion_address("Alice")}'), *self.args) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) - @mock.patch("shutil.get_terminal_size", return_value=[150, 150]) - @mock.patch("builtins.input", return_value="Yes") - def test_log_remove_with_unknown_onion_address(self, *_) -> None: + @mock.patch('shutil.get_terminal_size', return_value=[150, 150]) + @mock.patch('builtins.input', return_value='Yes') + def test_log_remove_with_unknown_onion_address(self, *_: Any) -> None: # Setup - write_log_entry( - M_S_HEADER + PADDING_LENGTH * b"a", - nick_to_pub_key("Alice"), - self.tfc_log_database, - ) + write_log_entry(M_S_HEADER + PADDING_LENGTH * b'a', nick_to_pub_key("Alice"), self.tfc_log_database) # Test - self.assert_se( - "Found no log entries for contact 'w5sm3'.", - remove_log, - UserInput(f'/rmlogs {nick_to_onion_address("Unknown")}'), - *self.args, - ) + self.assert_se("Found no log entries for contact 'w5sm3'.", + remove_log, UserInput(f'/rmlogs {nick_to_onion_address("Unknown")}'), *self.args) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) - @mock.patch("builtins.input", return_value="Yes") - def test_log_remove_with_group_name(self, _) -> None: + @mock.patch('builtins.input', return_value='Yes') + def test_log_remove_with_group_name(self, _: Any) -> None: # Setup - for p in assembly_packet_creator( - MESSAGE, - "This is a short group message", - group_id=group_name_to_group_id("test_group"), - ): - write_log_entry(p, nick_to_pub_key("Alice"), self.tfc_log_database) + for p in assembly_packet_creator(MESSAGE, 'This is a short group message', + group_id=group_name_to_group_id('test_group')): + write_log_entry(p, nick_to_pub_key('Alice'), self.tfc_log_database) # Test - self.assert_se( - "Removed log entries for group 'test_group'.", - remove_log, - UserInput(f"/rmlogs test_group"), - *self.args, - ) + self.assert_se("Removed log entries for group 'test_group'.", + remove_log, UserInput(f'/rmlogs test_group'), *self.args) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) - @mock.patch("builtins.input", return_value="Yes") - def test_unknown_selector_raises_fr(self, _) -> None: + @mock.patch('builtins.input', return_value='Yes') + def test_unknown_selector_raises_se(self, _: Any) -> None: # Setup - write_log_entry( - M_S_HEADER + PADDING_LENGTH * b"a", - nick_to_pub_key("Alice"), - self.tfc_log_database, - ) + write_log_entry(M_S_HEADER + PADDING_LENGTH * b'a', nick_to_pub_key("Alice"), self.tfc_log_database) # Test - self.assert_se( - "Error: Unknown selector.", - remove_log, - UserInput(f"/rmlogs unknown"), - *self.args, - ) + self.assert_se("Error: Unknown selector.", remove_log, UserInput(f'/rmlogs unknown'), *self.args) class TestChangeSetting(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.window = TxWindow() + self.window = TxWindow() self.contact_list = ContactList() - self.group_list = GroupList() - self.settings = Settings() - self.queues = gen_queue_dict() - self.master_key = MasterKey() - self.gateway = Gateway() - self.args = ( - self.window, - self.contact_list, - self.group_list, - self.settings, - self.queues, - self.master_key, - self.gateway, - ) + self.group_list = GroupList() + self.settings = Settings() + self.queues = gen_queue_dict() + self.master_key = MasterKey() + self.gateway = Gateway() + self.args = (self.window, self.contact_list, self.group_list, + self.settings, self.queues, self.master_key, self.gateway) def tearDown(self) -> None: """Post-test actions.""" tear_queues(self.queues) - def test_missing_setting_raises_fr(self) -> None: - self.assert_se( - "Error: No setting specified.", change_setting, UserInput("set"), *self.args - ) + def test_missing_setting_raises_se(self) -> None: + self.assert_se("Error: No setting specified.", + change_setting, UserInput('set'), *self.args) - def test_invalid_setting_raises_fr(self) -> None: - self.assert_se( - "Error: Invalid setting 'e_correction_ratia'.", - change_setting, - UserInput("set e_correction_ratia true"), - *self.args, - ) + def test_invalid_setting_raises_se(self) -> None: + self.assert_se("Error: Invalid setting 'e_correction_ratia'.", + change_setting, UserInput("set e_correction_ratia true"), *self.args) - def test_missing_value_raises_fr(self) -> None: - self.assert_se( - "Error: No value for setting specified.", - change_setting, - UserInput("set serial_error_correction"), - *self.args, - ) + def test_missing_value_raises_se(self) -> None: + self.assert_se("Error: No value for setting specified.", + change_setting, UserInput("set serial_error_correction"), *self.args) - def test_serial_settings_raise_fr(self) -> None: - self.assert_se( - "Error: Serial interface setting can only be changed manually.", - change_setting, - UserInput("set use_serial_usb_adapter True"), - *self.args, - ) + def test_serial_settings_raise_se(self) -> None: + self.assert_se("Error: Serial interface setting can only be changed manually.", + change_setting, UserInput("set use_serial_usb_adapter True"), *self.args) - self.assert_se( - "Error: Serial interface setting can only be changed manually.", - change_setting, - UserInput("set built_in_serial_interface Truej"), - *self.args, - ) + self.assert_se("Error: Serial interface setting can only be changed manually.", + change_setting, UserInput("set built_in_serial_interface Truej"), *self.args) - @mock.patch("time.sleep", return_value=None) - @mock.patch("getpass.getpass", side_effect=[KeyboardInterrupt]) - def test_changing_ask_password_for_log_access_asks_for_password(self, *_) -> None: - self.assert_se( - "Authentication aborted.", - change_setting, - UserInput("set ask_password_for_log_access False"), - *self.args, - ) + @mock.patch('time.sleep', return_value=None) + @mock.patch('getpass.getpass', side_effect=[KeyboardInterrupt]) + def test_changing_ask_password_for_log_access_asks_for_password(self, *_: Any) -> None: + self.assert_se("Authentication aborted.", + change_setting, UserInput("set ask_password_for_log_access False"), *self.args) - @mock.patch("time.sleep", return_value=None) - @mock.patch("getpass.getpass", return_value="invalid_password") - def test_invalid_password_raises_function_return(self, *_) -> None: - self.assert_se( - "Error: No permission to change setting.", - change_setting, - UserInput("set ask_password_for_log_access False"), - *self.args, - ) + @mock.patch('time.sleep', return_value=None) + @mock.patch('getpass.getpass', return_value='invalid_password') + def test_invalid_password_raises_function_return(self, *_: Any) -> None: + self.assert_se("Error: No permission to change setting.", + change_setting, UserInput("set ask_password_for_log_access False"), *self.args) def test_relay_commands_raise_fr_when_traffic_masking_is_enabled(self) -> None: # Setup self.settings.traffic_masking = True # Test - key_list = [ - "serial_error_correction", - "serial_baudrate", - "allow_contact_requests", - ] - for key, value in zip(key_list, ["5", "5", "True"]): - self.assert_se( - "Error: Can't change this setting during traffic masking.", - change_setting, - UserInput(f"set {key} {value}"), - *self.args, - ) + key_list = ['serial_error_correction', 'serial_baudrate', 'allow_contact_requests'] + for key, value in zip(key_list, ['5', '5', 'True']): + self.assert_se("Error: Can't change this setting during traffic masking.", + change_setting, UserInput(f"set {key} {value}"), *self.args) def test_individual_settings(self) -> None: - self.assertIsNone( - change_setting(UserInput("set serial_error_correction 5"), *self.args) - ) + self.assertIsNone(change_setting(UserInput("set serial_error_correction 5"), *self.args)) self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) - self.assertIsNone( - change_setting(UserInput("set serial_baudrate 9600"), *self.args) - ) + self.assertIsNone(change_setting(UserInput("set serial_baudrate 9600"), *self.args)) self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 2) - self.assertIsNone( - change_setting(UserInput("set allow_contact_requests True"), *self.args) - ) + self.assertIsNone(change_setting(UserInput("set allow_contact_requests True"), *self.args)) self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 3) - self.assertIsNone( - change_setting(UserInput("set traffic_masking True"), *self.args) - ) + self.assertIsNone(change_setting(UserInput("set traffic_masking True"), *self.args)) self.assertIsInstance(self.queues[SENDER_MODE_QUEUE].get(), Settings) self.assertTrue(self.queues[TRAFFIC_MASKING_QUEUE].get()) self.settings.traffic_masking = False - self.assertIsNone( - change_setting(UserInput("set max_number_of_group_members 100"), *self.args) - ) + self.assertIsNone(change_setting(UserInput("set max_number_of_group_members 100"), *self.args)) self.assertTrue(self.group_list.store_groups_called) self.group_list.store_groups_called = False - self.assertIsNone( - change_setting(UserInput("set max_number_of_groups 100"), *self.args) - ) + self.assertIsNone(change_setting(UserInput("set max_number_of_groups 100"), *self.args)) self.assertTrue(self.group_list.store_groups_called) self.group_list.store_groups_called = False - self.assertIsNone( - change_setting(UserInput("set max_number_of_contacts 100"), *self.args) - ) + self.assertIsNone(change_setting(UserInput("set max_number_of_contacts 100"), *self.args)) self.assertEqual(self.queues[KEY_MANAGEMENT_QUEUE].qsize(), 1) - self.assertIsNone( - change_setting(UserInput("set log_file_masking True"), *self.args) - ) + self.assertIsNone(change_setting(UserInput("set log_file_masking True"), *self.args)) self.assertTrue(self.queues[LOGFILE_MASKING_QUEUE].get()) class TestPrintSettings(TFCTestCase): + def test_print_settings(self) -> None: - self.assert_prints( - f"""\ + self.assert_prints(f"""\ {CLEAR_ENTIRE_SCREEN}{CURSOR_LEFT_UP_CORNER} Setting name Current value Default value Description ──────────────────────────────────────────────────────────────────────────────── @@ -1339,14 +995,11 @@ serial_error_correction 5 5 Number of byte recover from -""", - print_settings, - Settings(), - Gateway(), - ) +""", print_settings, Settings(), Gateway()) class TestRxPDisplayUnread(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.queues = gen_queue_dict() @@ -1361,31 +1014,30 @@ class TestRxPDisplayUnread(unittest.TestCase): class TestVerify(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.window = TxWindow( - uid=nick_to_pub_key("Alice"), - name="Alice", - window_contacts=[create_contact("test_group")], - log_messages=True, - type=WIN_TYPE_CONTACT, - ) - self.contact_list = ContactList(nicks=["Alice"]) - self.contact = self.contact_list.get_contact_by_address_or_nick("Alice") + self.window = TxWindow(uid=nick_to_pub_key("Alice"), + name='Alice', + window_contacts=[create_contact('test_group')], + log_messages=True, + type=WIN_TYPE_CONTACT) + self.contact_list = ContactList(nicks=['Alice']) + self.contact = self.contact_list.get_contact_by_address_or_nick('Alice') self.window.contact = self.contact - self.args = self.window, self.contact_list + self.args = self.window, self.contact_list - def test_active_group_raises_fr(self) -> None: + def test_active_group_raises_se(self) -> None: self.window.type = WIN_TYPE_GROUP self.assert_se("Error: A group is selected.", verify, *self.args) - def test_psk_raises_fr(self) -> None: + def test_psk_raises_se(self) -> None: self.contact.kex_status = KEX_STATUS_NO_RX_PSK self.assert_se("Pre-shared keys have no fingerprints.", verify, *self.args) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=["No", "Yes"]) - def test_fingerprint_check(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=['No', 'Yes']) + def test_fingerprint_check(self, *_: Any) -> None: self.contact.kex_status = KEX_STATUS_VERIFIED self.assertIsNone(verify(*self.args)) @@ -1394,131 +1046,101 @@ class TestVerify(TFCTestCase): self.assertIsNone(verify(*self.args)) self.assertEqual(self.contact.kex_status, KEX_STATUS_VERIFIED) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=KeyboardInterrupt) - def test_keyboard_interrupt_raises_fr(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=KeyboardInterrupt) + def test_keyboard_interrupt_raises_se(self, *_: Any) -> None: self.contact.kex_status = KEX_STATUS_VERIFIED self.assert_se("Fingerprint verification aborted.", verify, *self.args) self.assertEqual(self.contact.kex_status, KEX_STATUS_VERIFIED) class TestWhisper(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.window = TxWindow( - uid=nick_to_pub_key("Alice"), - name="Alice", - window_contacts=[create_contact("Alice")], - log_messages=True, - ) + self.window = TxWindow(uid=nick_to_pub_key("Alice"), + name='Alice', + window_contacts=[create_contact('Alice')], + log_messages=True) self.settings = Settings() - self.queues = gen_queue_dict() - self.args = self.window, self.settings, self.queues + self.queues = gen_queue_dict() + self.args = self.window, self.settings, self.queues - def test_empty_input_raises_fr(self) -> None: - self.assert_se( - "Error: No whisper message specified.", - whisper, - UserInput("whisper"), - *self.args, - ) + def test_empty_input_raises_se(self) -> None: + self.assert_se("Error: No whisper message specified.", + whisper, UserInput("whisper"), *self.args) def test_whisper(self) -> None: - self.assertIsNone( - whisper( - UserInput("whisper This message ought not to be logged."), *self.args - ) - ) + self.assertIsNone(whisper(UserInput("whisper This message ought not to be logged."), *self.args)) - message, pub_key, logging, log_as_ph, win_uid = self.queues[ - MESSAGE_PACKET_QUEUE - ].get() - self.assertIsInstance(message, bytes) + message, pub_key, logging, log_as_ph, win_uid = self.queues[MESSAGE_PACKET_QUEUE].get() self.assertEqual(pub_key, nick_to_pub_key("Alice")) self.assertTrue(logging) self.assertTrue(log_as_ph) - self.assertEqual(win_uid, nick_to_pub_key("Alice")) class TestWhois(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.contact_list = ContactList(nicks=["Alice"]) - self.group_list = GroupList(groups=["test_group"]) - self.args = self.contact_list, self.group_list + self.contact_list = ContactList(nicks=['Alice']) + self.group_list = GroupList(groups=['test_group']) + self.args = self.contact_list, self.group_list - def test_missing_selector_raises_fr(self) -> None: - self.assert_se( - "Error: No account or nick specified.", - whois, - UserInput("whois"), - *self.args, - ) + def test_missing_selector_raises_se(self) -> None: + self.assert_se("Error: No account or nick specified.", whois, UserInput("whois"), *self.args) - def test_unknown_account_raises_fr(self) -> None: - self.assert_se( - "Error: Unknown selector.", whois, UserInput("whois alice"), *self.args - ) + def test_unknown_account_raises_se(self) -> None: + self.assert_se("Error: Unknown selector.", whois, UserInput("whois alice"), *self.args) def test_nick_from_account(self) -> None: self.assert_prints( f"""\ {BOLD_ON} Nick of 'hpcrayuxhrcy2wtpfwgwjibderrvjll6azfr4tqat3eka2m2gbb55bid' is {NORMAL_TEXT} {BOLD_ON} Alice {NORMAL_TEXT}\n""", - whois, - UserInput("whois hpcrayuxhrcy2wtpfwgwjibderrvjll6azfr4tqat3eka2m2gbb55bid"), - *self.args, - ) + whois, UserInput("whois hpcrayuxhrcy2wtpfwgwjibderrvjll6azfr4tqat3eka2m2gbb55bid"), *self.args) def test_account_from_nick(self) -> None: self.assert_prints( f"""\ {BOLD_ON} Account of 'Alice' is {NORMAL_TEXT} {BOLD_ON} hpcrayuxhrcy2wtpfwgwjibderrvjll6azfr4tqat3eka2m2gbb55bid {NORMAL_TEXT}\n""", - whois, - UserInput("whois Alice"), - *self.args, - ) + whois, UserInput("whois Alice"), *self.args) def test_group_id_from_group_name(self) -> None: self.assert_prints( f"""\ {BOLD_ON} Group ID of group 'test_group' is {NORMAL_TEXT} {BOLD_ON} 2dbCCptB9UGo9 {NORMAL_TEXT}\n""", - whois, - UserInput(f"whois test_group"), - *self.args, - ) + whois, UserInput(f"whois test_group"), *self.args) def test_group_name_from_group_id(self) -> None: self.assert_prints( f"""\ {BOLD_ON} Name of group with ID '2dbCCptB9UGo9' is {NORMAL_TEXT} {BOLD_ON} test_group {NORMAL_TEXT}\n""", - whois, - UserInput("whois 2dbCCptB9UGo9"), - *self.args, - ) + whois, UserInput("whois 2dbCCptB9UGo9"), *self.args) class TestWipe(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.settings = Settings() - self.queues = gen_queue_dict() - self.gateway = Gateway() - self.args = self.settings, self.queues, self.gateway + self.queues = gen_queue_dict() + self.gateway = Gateway() + self.args = self.settings, self.queues, self.gateway - @mock.patch("builtins.input", return_value="No") - def test_no_raises_fr(self, _) -> None: + @mock.patch('builtins.input', return_value='No') + def test_no_raises_se(self, _: Any) -> None: self.assert_se("Wipe command aborted.", wipe, *self.args) - @mock.patch("src.common.misc.reset_terminal", return_value=None) - @mock.patch("builtins.input", return_value="Yes") - @mock.patch("time.sleep", return_value=None) - def test_wipe_local_testing(self, *_) -> None: + @mock.patch('os.system', return_value=None) + @mock.patch('builtins.input', return_value='Yes') + @mock.patch('time.sleep', return_value=None) + def test_wipe_local_testing(self, *_: Any) -> None: # Setup - self.settings.local_testing_mode = True + self.settings.local_testing_mode = True self.gateway.settings.data_diode_sockets = True for _ in range(2): self.queues[COMMAND_PACKET_QUEUE].put("dummy command") @@ -1529,10 +1151,10 @@ class TestWipe(TFCTestCase): wipe_packet = UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_WIPE_COMMAND self.assertTrue(self.queues[RELAY_PACKET_QUEUE].get().startswith(wipe_packet)) - @mock.patch("src.common.misc.reset_terminal", return_value=None) - @mock.patch("builtins.input", return_value="Yes") - @mock.patch("time.sleep", return_value=None) - def test_wipe(self, *_) -> None: + @mock.patch('os.system', return_value=None) + @mock.patch('builtins.input', return_value='Yes') + @mock.patch('time.sleep', return_value=None) + def test_wipe(self, *_: Any) -> None: # Setup for _ in range(2): self.queues[COMMAND_PACKET_QUEUE].put("dummy command") @@ -1544,5 +1166,5 @@ class TestWipe(TFCTestCase): self.assertTrue(self.queues[RELAY_PACKET_QUEUE].get().startswith(wipe_packet)) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/transmitter/test_commands_g.py b/tests/transmitter/test_commands_g.py index 6bb664b..4a06ca6 100644 --- a/tests/transmitter/test_commands_g.py +++ b/tests/transmitter/test_commands_g.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -22,59 +22,29 @@ along with TFC. If not, see . import unittest from unittest import mock +from typing import Any from src.common.encoding import b58encode -from src.common.statics import ( - COMMAND_PACKET_QUEUE, - GROUP_ID_LENGTH, - RELAY_PACKET_QUEUE, - WIN_TYPE_CONTACT, - WIN_TYPE_GROUP, -) +from src.common.statics import (COMMAND_PACKET_QUEUE, GROUP_ID_LENGTH, RELAY_PACKET_QUEUE, + WIN_TYPE_CONTACT, WIN_TYPE_GROUP) -from src.transmitter.commands_g import ( - group_add_member, - group_create, - group_rm_group, - group_rm_member, -) +from src.transmitter.commands_g import group_add_member, group_create, group_rm_group, group_rm_member from src.transmitter.commands_g import process_group_command, group_rename -from tests.mock_classes import ( - create_group, - Contact, - ContactList, - GroupList, - MasterKey, - Settings, - UserInput, - TxWindow, -) -from tests.utils import ( - cd_unit_test, - cleanup, - gen_queue_dict, - nick_to_pub_key, - tear_queues, - TFCTestCase, -) +from tests.mock_classes import create_group, Contact, ContactList, GroupList, MasterKey, Settings, UserInput, TxWindow +from tests.utils import cd_unit_test, cleanup, gen_queue_dict, nick_to_pub_key, tear_queues, TFCTestCase class TestProcessGroupCommand(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.contact_list = ContactList(nicks=["Alice"]) - self.group_list = GroupList() - self.settings = Settings() - self.queues = gen_queue_dict() - self.master_key = MasterKey() - self.args = ( - self.contact_list, - self.group_list, - self.settings, - self.queues, - self.settings, - ) + self.contact_list = ContactList(nicks=['Alice']) + self.group_list = GroupList() + self.settings = Settings() + self.queues = gen_queue_dict() + self.master_key = MasterKey() + self.args = self.contact_list, self.group_list, self.settings, self.queues, self.settings def tearDown(self) -> None: """Post-test actions.""" @@ -85,84 +55,43 @@ class TestProcessGroupCommand(TFCTestCase): self.settings.traffic_masking = True # Test - self.assert_se( - "Error: Command is disabled during traffic masking.", - process_group_command, - UserInput(), - *self.args, - ) + self.assert_se("Error: Command is disabled during traffic masking.", + process_group_command, UserInput(), *self.args) - def test_invalid_command_raises_fr(self) -> None: - self.assert_se( - "Error: Invalid group command.", - process_group_command, - UserInput("group "), - *self.args, - ) + def test_invalid_command_raises_se(self) -> None: + self.assert_se("Error: Invalid group command.", process_group_command, UserInput('group '), *self.args) - def test_invalid_command_parameters_raises_fr(self) -> None: - self.assert_se( - "Error: Invalid group command.", - process_group_command, - UserInput("group bad"), - *self.args, - ) + def test_invalid_command_parameters_raises_se(self) -> None: + self.assert_se("Error: Invalid group command.", process_group_command, UserInput('group bad'), *self.args) - def test_missing_group_id_raises_fr(self) -> None: - self.assert_se( - "Error: No group ID specified.", - process_group_command, - UserInput("group join "), - *self.args, - ) + def test_missing_group_id_raises_se(self) -> None: + self.assert_se("Error: No group ID specified.", process_group_command, UserInput('group join '), *self.args) - def test_invalid_group_id_raises_fr(self) -> None: - self.assert_se( - "Error: Invalid group ID.", - process_group_command, - UserInput("group join invalid"), - *self.args, - ) + def test_invalid_group_id_raises_se(self) -> None: + self.assert_se("Error: Invalid group ID.", process_group_command, UserInput('group join invalid'), *self.args) - def test_missing_name_raises_fr(self) -> None: - self.assert_se( - "Error: No group name specified.", - process_group_command, - UserInput("group create "), - *self.args, - ) + def test_missing_name_raises_se(self) -> None: + self.assert_se("Error: No group name specified.", process_group_command, UserInput('group create '), *self.args) - @mock.patch("builtins.input", return_value="Yes") - @mock.patch("os.urandom", return_value=GROUP_ID_LENGTH * b"a") - def test_successful_command(self, *_) -> None: - self.assertIsNone( - process_group_command(UserInput("group create team Alice"), *self.args) - ) + @mock.patch('builtins.input', return_value='Yes') + @mock.patch('os.urandom', return_value=GROUP_ID_LENGTH*b'a') + def test_successful_command(self, *_: Any) -> None: + self.assertIsNone(process_group_command(UserInput('group create team Alice'), *self.args)) user_input = UserInput(f"group join {b58encode(GROUP_ID_LENGTH*b'a')} team2") - self.assert_se( - "Error: Group with matching ID already exists.", - process_group_command, - user_input, - *self.args, - ) + self.assert_se("Error: Group with matching ID already exists.", process_group_command, user_input, *self.args) class TestGroupCreate(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.contact_list = ContactList(nicks=["Alice", "Bob"]) - self.group_list = GroupList() - self.settings = Settings() - self.queues = gen_queue_dict() - self.master_key = MasterKey() + self.contact_list = ContactList(nicks=['Alice', 'Bob']) + self.group_list = GroupList() + self.settings = Settings() + self.queues = gen_queue_dict() + self.master_key = MasterKey() self.account_list = None - self.args = ( - self.contact_list, - self.group_list, - self.settings, - self.queues, - self.settings, - ) + self.args = self.contact_list, self.group_list, self.settings, self.queues, self.settings def tearDown(self) -> None: """Post-test actions.""" @@ -170,91 +99,63 @@ class TestGroupCreate(TFCTestCase): def configure_groups(self, no_contacts: int) -> None: """Configure group list.""" - self.contact_list = ContactList(nicks=[str(n) for n in range(no_contacts)]) - self.group_list = GroupList(groups=["test_group"]) - self.group = self.group_list.get_group("test_group") + self.contact_list = ContactList(nicks=[str(n) for n in range(no_contacts)]) + self.group_list = GroupList(groups=['test_group']) + self.group = self.group_list.get_group('test_group') self.group.members = self.contact_list.contacts - self.account_list = [nick_to_pub_key(str(n)) for n in range(no_contacts)] + self.account_list = [nick_to_pub_key(str(n)) for n in range(no_contacts)] - def test_invalid_group_name_raises_fr(self) -> None: + def test_invalid_group_name_raises_se(self) -> None: # Setup self.configure_groups(no_contacts=21) # Test - self.assert_se( - "Error: Group name must be printable.", - group_create, - "test_group\x1f", - self.account_list, - *self.args, - ) + self.assert_se("Error: Group name must be printable.", + group_create, 'test_group\x1f', self.account_list, *self.args) - def test_too_many_purp_accounts_raises_fr(self) -> None: + def test_too_many_purp_accounts_raises_se(self) -> None: # Setup self.configure_groups(no_contacts=60) # Test cl_str = [nick_to_pub_key(str(n)) for n in range(51)] - self.assert_se( - "Error: TFC settings only allow 50 members per group.", - group_create, - "test_group_50", - cl_str, - self.contact_list, - self.group_list, - self.settings, - self.queues, - self.master_key, - ) + self.assert_se("Error: TFC settings only allow 50 members per group.", + group_create, 'test_group_50', cl_str, + self.contact_list, self.group_list, self.settings, self.queues, self.master_key) - def test_full_group_list_raises_fr(self) -> None: + def test_full_group_list_raises_se(self) -> None: # Setup self.group_list = GroupList(groups=[f"testgroup_{n}" for n in range(50)]) # Test - self.assert_se( - "Error: TFC settings only allow 50 groups.", - group_create, - "testgroup_50", - [nick_to_pub_key("Alice")], - self.contact_list, - self.group_list, - self.settings, - self.queues, - self.master_key, - ) + self.assert_se("Error: TFC settings only allow 50 groups.", + group_create, 'testgroup_50', [nick_to_pub_key("Alice")], + self.contact_list, self.group_list, self.settings, self.queues, self.master_key) - @mock.patch("builtins.input", return_value="Yes") - def test_successful_group_creation(self, _) -> None: + @mock.patch('builtins.input', return_value='Yes') + def test_successful_group_creation(self, _: Any) -> None: # Test - self.assertIsNone( - group_create("test_group_2", [nick_to_pub_key("Alice")], *self.args) - ) + self.assertIsNone(group_create('test_group_2', [nick_to_pub_key("Alice")], *self.args)) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) - self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) + self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) def test_successful_empty_group_creation(self) -> None: - self.assertIsNone(group_create("test_group_2", [], *self.args)) + self.assertIsNone(group_create('test_group_2', [], *self.args)) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) - self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 0) + self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 0) class TestGroupAddMember(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.user_input = UserInput() - self.contact_list = ContactList(nicks=["Alice", "Bob"]) - self.group_list = GroupList() - self.settings = Settings() - self.queues = gen_queue_dict() - self.master_key = MasterKey() - self.args = ( - self.contact_list, - self.group_list, - self.settings, - self.queues, - self.settings, - ) + self.user_input = UserInput() + self.contact_list = ContactList(nicks=['Alice', 'Bob']) + self.group_list = GroupList() + self.settings = Settings() + self.queues = gen_queue_dict() + self.master_key = MasterKey() + self.args = self.contact_list, self.group_list, self.settings, self.queues, self.settings def tearDown(self) -> None: """Post-test actions.""" @@ -262,69 +163,45 @@ class TestGroupAddMember(TFCTestCase): def configure_groups(self, no_contacts: int) -> None: """Configure group database.""" - self.contact_list = ContactList(nicks=[str(n) for n in range(no_contacts)]) - self.group_list = GroupList(groups=["test_group"]) - self.group = self.group_list.get_group("test_group") + self.contact_list = ContactList(nicks=[str(n) for n in range(no_contacts)]) + self.group_list = GroupList(groups=['test_group']) + self.group = self.group_list.get_group('test_group') self.group.members = self.contact_list.contacts - self.account_list = [nick_to_pub_key(str(n)) for n in range(no_contacts)] + self.account_list = [nick_to_pub_key(str(n)) for n in range(no_contacts)] - @mock.patch("builtins.input", return_value="Yes") - def test_new_group_is_created_if_specified_group_does_not_exist_and_user_chooses_yes( - self, _ - ) -> None: - self.assertIsNone(group_add_member("test_group", [], *self.args)) + @mock.patch('builtins.input', return_value='Yes') + def test_new_group_is_created_if_specified_group_does_not_exist_and_user_chooses_yes(self, _: Any) -> None: + self.assertIsNone(group_add_member('test_group', [], *self.args)) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) - self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 0) + self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 0) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", return_value="No") - def test_raises_fr_if_specified_group_does_not_exist_and_user_chooses_no( - self, *_ - ) -> None: - self.assert_se( - "Group creation aborted.", group_add_member, "test_group", [], *self.args - ) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', return_value='No') + def test_raises_fr_if_specified_group_does_not_exist_and_user_chooses_no(self, *_: Any) -> None: + self.assert_se("Group creation aborted.", group_add_member, 'test_group', [], *self.args) - def test_too_large_final_member_list_raises_fr(self) -> None: + def test_too_large_final_member_list_raises_se(self) -> None: # Setup - contact_list = ContactList(nicks=[str(n) for n in range(51)]) - group_list = GroupList(groups=["testgroup"]) - group = group_list.get_group("testgroup") + contact_list = ContactList(nicks=[str(n) for n in range(51)]) + group_list = GroupList(groups=['testgroup']) + group = group_list.get_group('testgroup') group.members = contact_list.contacts[:49] # Test m_to_add = [nick_to_pub_key("49"), nick_to_pub_key("50")] - self.assert_se( - "Error: TFC settings only allow 50 members per group.", - group_add_member, - "testgroup", - m_to_add, - contact_list, - group_list, - self.settings, - self.queues, - self.master_key, - ) + self.assert_se("Error: TFC settings only allow 50 members per group.", group_add_member, + 'testgroup', m_to_add, contact_list, group_list, self.settings, self.queues, self.master_key) - @mock.patch("builtins.input", return_value="Yes") - def test_successful_group_add(self, _) -> None: + @mock.patch('builtins.input', return_value='Yes') + def test_successful_group_add(self, _: Any) -> None: # Setup self.configure_groups(no_contacts=51) self.group.members = self.contact_list.contacts[:49] # Test - self.assertIsNone( - group_add_member( - "test_group", - [nick_to_pub_key("49")], - self.contact_list, - self.group_list, - self.settings, - self.queues, - self.master_key, - ) - ) - group2 = self.group_list.get_group("test_group") + self.assertIsNone(group_add_member('test_group', [nick_to_pub_key("49")], self.contact_list, + self.group_list, self.settings, self.queues, self.master_key)) + group2 = self.group_list.get_group('test_group') self.assertEqual(len(group2), 50) for c in group2: @@ -335,177 +212,125 @@ class TestGroupAddMember(TFCTestCase): class TestGroupRmMember(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.user_input = UserInput() - self.contact_list = ContactList(nicks=["Alice", "Bob"]) - self.group_list = GroupList(groups=["test_group"]) - self.settings = Settings() - self.queues = gen_queue_dict() - self.master_key = MasterKey() - self.args = ( - self.contact_list, - self.group_list, - self.settings, - self.queues, - self.settings, - ) + self.user_input = UserInput() + self.contact_list = ContactList(nicks=['Alice', 'Bob']) + self.group_list = GroupList(groups=["test_group"]) + self.settings = Settings() + self.queues = gen_queue_dict() + self.master_key = MasterKey() + self.args = self.contact_list, self.group_list, self.settings, self.queues, self.settings def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) tear_queues(self.queues) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", return_value="Yes") - def test_no_accounts_removes_group(self, *_) -> None: - self.assert_se( - "Removed group 'test_group'.", group_rm_member, "test_group", [], *self.args - ) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', return_value='Yes') + def test_no_accounts_removes_group(self, *_: Any) -> None: + self.assert_se("Removed group 'test_group'.", group_rm_member, 'test_group', [], *self.args) - @mock.patch("builtins.input", return_value="Yes") - def test_remove_members_from_unknown_group(self, _) -> None: - self.assert_se( - "Group 'test_group_2' does not exist.", - group_rm_member, - "test_group_2", - [nick_to_pub_key("Alice")], - *self.args, - ) + @mock.patch('builtins.input', return_value='Yes') + def test_remove_members_from_unknown_group(self, _: Any) -> None: + self.assert_se("Group 'test_group_2' does not exist.", + group_rm_member, 'test_group_2', [nick_to_pub_key("Alice")], *self.args) - @mock.patch("builtins.input", return_value="Yes") - def test_successful_group_remove(self, _) -> None: - self.assertIsNone( - group_rm_member("test_group", [nick_to_pub_key("Alice")], *self.args) - ) + @mock.patch('builtins.input', return_value='Yes') + def test_successful_group_remove(self, _: Any) -> None: + self.assertIsNone(group_rm_member('test_group', [nick_to_pub_key("Alice")], *self.args)) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) class TestGroupRmGroup(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.user_input = UserInput() - self.contact_list = ContactList(nicks=["Alice", "Bob"]) - self.group_list = GroupList(groups=["test_group"]) - self.settings = Settings() - self.queues = gen_queue_dict() - self.master_key = MasterKey() - self.args = ( - self.contact_list, - self.group_list, - self.settings, - self.queues, - self.settings, - ) + self.user_input = UserInput() + self.contact_list = ContactList(nicks=['Alice', 'Bob']) + self.group_list = GroupList(groups=['test_group']) + self.settings = Settings() + self.queues = gen_queue_dict() + self.master_key = MasterKey() + self.args = self.contact_list, self.group_list, self.settings, self.queues, self.settings def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) tear_queues(self.queues) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", return_value="No") - def test_cancel_of_remove_raises_fr(self, *_) -> None: - self.assert_se( - "Group removal aborted.", group_rm_group, "test_group", *self.args - ) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', return_value='No') + def test_cancel_of_remove_raises_se(self, *_: Any) -> None: + self.assert_se("Group removal aborted.", group_rm_group, 'test_group', *self.args) - @mock.patch("builtins.input", return_value="Yes") - def test_remove_group_not_on_transmitter_raises_fr(self, _) -> None: + @mock.patch('builtins.input', return_value='Yes') + def test_remove_group_not_on_transmitter_raises_se(self, _: Any) -> None: unknown_group_id = b58encode(bytes(GROUP_ID_LENGTH)) - self.assert_se( - "Transmitter has no group '2dVseX46KS9Sp' to remove.", - group_rm_group, - unknown_group_id, - *self.args, - ) + self.assert_se("Transmitter has no group '2dVseX46KS9Sp' to remove.", + group_rm_group, unknown_group_id, *self.args) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 2) - @mock.patch("builtins.input", return_value="Yes") - def test_invalid_group_id_raises_fr(self, _) -> None: + @mock.patch('builtins.input', return_value='Yes') + def test_invalid_group_id_raises_se(self, _: Any) -> None: invalid_group_id = b58encode(bytes(GROUP_ID_LENGTH))[:-1] - self.assert_se( - "Error: Invalid group name/ID.", - group_rm_group, - invalid_group_id, - *self.args, - ) + self.assert_se("Error: Invalid group name/ID.", group_rm_group, invalid_group_id, *self.args) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", return_value="Yes") - def test_remove_group_and_notify(self, *_) -> None: - self.assert_se( - "Removed group 'test_group'.", group_rm_group, "test_group", *self.args - ) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', return_value='Yes') + def test_remove_group_and_notify(self, *_: Any) -> None: + self.assert_se("Removed group 'test_group'.", group_rm_group, 'test_group', *self.args) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 2) - self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) + self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) class TestGroupRename(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.queues = gen_queue_dict() - self.settings = Settings() + self.queues = gen_queue_dict() + self.settings = Settings() self.contact_list = ContactList() - self.group_list = GroupList(groups=["test_group"]) - self.window = TxWindow() - self.args = ( - self.window, - self.contact_list, - self.group_list, - self.settings, - self.queues, - ) + self.group_list = GroupList(groups=['test_group']) + self.window = TxWindow() + self.args = self.window, self.contact_list, self.group_list, self.settings, self.queues def tearDown(self) -> None: """Post-test actions.""" tear_queues(self.queues) - def test_contact_window_raises_fr(self) -> None: + def test_contact_window_raises_se(self) -> None: # Setup self.window.type = WIN_TYPE_CONTACT # Test - self.assert_se( - "Error: Selected window is not a group window.", - group_rename, - "window", - *self.args, - ) + self.assert_se("Error: Selected window is not a group window.", group_rename, "window", *self.args) - def test_invalid_group_name_raises_fr(self) -> None: + def test_invalid_group_name_raises_se(self) -> None: # Setup - self.window.type = WIN_TYPE_GROUP - self.window.group = self.group_list.get_group("test_group") + self.window.type = WIN_TYPE_GROUP + self.window.group = self.group_list.get_group('test_group') # Test - self.assert_se( - "Error: Group name must be printable.", - group_rename, - "window\x1f", - *self.args, - ) + self.assert_se("Error: Group name must be printable.", group_rename, "window\x1f", *self.args) - @mock.patch("time.sleep", return_value=None) - def test_successful_group_change(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_successful_group_change(self, _: Any) -> None: # Setup - group = create_group("test_group") - self.window.type = WIN_TYPE_GROUP - self.window.uid = group.group_id + group = create_group('test_group') + self.window.type = WIN_TYPE_GROUP + self.window.uid = group.group_id self.window.group = group # Test - self.assert_se( - "Renamed group 'test_group' to 'window'.", - group_rename, - "window", - *self.args, - ) + self.assert_se("Renamed group 'test_group' to 'window'.", group_rename, "window", *self.args) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/transmitter/test_contact.py b/tests/transmitter/test_contact.py index 6eddbb9..d21a66a 100644 --- a/tests/transmitter/test_contact.py +++ b/tests/transmitter/test_contact.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -23,235 +23,177 @@ import os import unittest from unittest import mock +from typing import Any -from src.common.crypto import blake2b -from src.common.statics import ( - COMMAND_PACKET_QUEUE, - CONFIRM_CODE_LENGTH, - FINGERPRINT_LENGTH, - KDB_REMOVE_ENTRY_HEADER, - KEY_MANAGEMENT_QUEUE, - LOG_SETTING_QUEUE, - RELAY_PACKET_QUEUE, - TM_COMMAND_PACKET_QUEUE, - WIN_TYPE_CONTACT, - WIN_TYPE_GROUP, -) +from src.common.crypto import blake2b +from src.common.statics import (COMMAND_PACKET_QUEUE, CONFIRM_CODE_LENGTH, FINGERPRINT_LENGTH, KDB_REMOVE_ENTRY_HEADER, + KEY_MANAGEMENT_QUEUE, LOCAL_ID, LOG_SETTING_QUEUE, RELAY_PACKET_QUEUE, + TM_COMMAND_PACKET_QUEUE, WIN_TYPE_CONTACT, WIN_TYPE_GROUP) -from src.transmitter.contact import ( - add_new_contact, - change_nick, - contact_setting, - remove_contact, -) +from src.transmitter.contact import add_new_contact, change_nick, contact_setting, get_onion_address_from_user +from src.transmitter.contact import remove_contact -from tests.mock_classes import ( - ContactList, - create_contact, - create_group, - Group, - GroupList, - MasterKey, - OnionService, -) +from tests.mock_classes import ContactList, create_contact, create_group, Group, GroupList, MasterKey, OnionService from tests.mock_classes import Settings, TxWindow, UserInput -from tests.utils import ( - cd_unit_test, - cleanup, - gen_queue_dict, - group_name_to_group_id, - ignored, -) -from tests.utils import ( - nick_to_onion_address, - nick_to_pub_key, - tear_queues, - TFCTestCase, - VALID_ECDHE_PUB_KEY, -) +from tests.utils import cd_unit_test, cleanup, gen_queue_dict, group_name_to_group_id, ignored +from tests.utils import nick_to_onion_address, nick_to_pub_key, tear_queues, TFCTestCase, VALID_ECDHE_PUB_KEY class TestAddNewContact(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.contact_list = ContactList() - self.group_list = GroupList() - self.settings = Settings(disable_gui_dialog=True) - self.queues = gen_queue_dict() + self.contact_list = ContactList(nicks=[LOCAL_ID]) + self.group_list = GroupList() + self.settings = Settings(disable_gui_dialog=True) + self.queues = gen_queue_dict() self.onion_service = OnionService() - self.args = ( - self.contact_list, - self.group_list, - self.settings, - self.queues, - self.onion_service, - ) + self.args = self.contact_list, self.group_list, self.settings, self.queues, self.onion_service def tearDown(self) -> None: """Post-test actions.""" with ignored(OSError): - os.remove(f"v4dkh.psk - Give to hpcra") + os.remove(f'v4dkh.psk - Give to hpcra') tear_queues(self.queues) - def test_adding_new_contact_during_traffic_masking_raises_fr(self) -> None: + def test_adding_new_contact_during_traffic_masking_raises_se(self) -> None: # Setup self.settings.traffic_masking = True # Test - self.assert_se( - "Error: Command is disabled during traffic masking.", - add_new_contact, - *self.args, - ) + self.assert_se("Error: Command is disabled during traffic masking.", add_new_contact, *self.args) - def test_contact_list_full_raises_fr(self) -> None: + def test_contact_list_full_raises_se(self) -> None: # Setup - contact_list = ContactList(nicks=[str(n) for n in range(50)]) + contact_list = ContactList(nicks=[str(n) for n in range(50)]) self.contact_list.contacts = contact_list.contacts # Test - self.assert_se( - "Error: TFC settings only allow 50 accounts.", add_new_contact, *self.args - ) + self.assert_se("Error: TFC settings only allow 50 accounts.", add_new_contact, *self.args) - @mock.patch( - "builtins.input", - side_effect=[ - nick_to_onion_address("Bob"), - "Bob", - "", - VALID_ECDHE_PUB_KEY, - "Yes", - blake2b(nick_to_pub_key("Bob"), digest_size=CONFIRM_CODE_LENGTH).hex(), - ], - ) - @mock.patch("shutil.get_terminal_size", return_value=[200, 200]) - @mock.patch("time.sleep", return_value=None) - def test_default_nick_ecdhe(self, *_) -> None: + @mock.patch('builtins.input', side_effect=[nick_to_onion_address("Bob"), 'Bob', '', VALID_ECDHE_PUB_KEY, 'Yes', + blake2b(nick_to_pub_key('Bob'), digest_size=CONFIRM_CODE_LENGTH).hex()]) + @mock.patch('shutil.get_terminal_size', return_value=[200, 200]) + @mock.patch('time.sleep', return_value=None) + def test_default_nick_ecdhe(self, *_: Any) -> None: self.assertIsNone(add_new_contact(*self.args)) contact = self.contact_list.get_contact_by_address_or_nick("Bob") - self.assertEqual(contact.nick, "Bob") + self.assertEqual(contact.nick, 'Bob') self.assertNotEqual(contact.tx_fingerprint, bytes(FINGERPRINT_LENGTH)) - @mock.patch("src.transmitter.key_exchanges.ARGON2_PSK_MEMORY_COST", 200) - @mock.patch("src.common.statics.MIN_KEY_DERIVATION_TIME", 0.1) - @mock.patch("src.common.statics.MAX_KEY_DERIVATION_TIME", 1.0) - @mock.patch( - "builtins.input", - side_effect=[ - nick_to_onion_address("Alice"), - "Alice_", - "psk", - ".", - "", - "ff", - "fc", - ], - ) - @mock.patch("getpass.getpass", return_value="test_password") - @mock.patch("time.sleep", return_value=None) - def test_standard_nick_psk_kex(self, *_) -> None: - self.onion_service.account = nick_to_onion_address("Bob").encode() + @mock.patch('src.transmitter.key_exchanges.ARGON2_PSK_MEMORY_COST', 200) + @mock.patch('src.common.statics.MIN_KEY_DERIVATION_TIME', 0.1) + @mock.patch('src.common.statics.MAX_KEY_DERIVATION_TIME', 1.0) + @mock.patch('builtins.input', side_effect=[nick_to_onion_address("Alice"), 'Alice_', 'psk', '.', '', 'ff', 'fc']) + @mock.patch('getpass.getpass', return_value='test_password') + @mock.patch('time.sleep', return_value=None) + def test_standard_nick_psk_kex(self, *_: Any) -> None: + self.onion_service.account = nick_to_onion_address('Bob').encode() self.assertIsNone(add_new_contact(*self.args)) contact = self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice")) - self.assertEqual(contact.nick, "Alice_") + self.assertEqual(contact.nick, 'Alice_') self.assertEqual(contact.tx_fingerprint, bytes(FINGERPRINT_LENGTH)) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=KeyboardInterrupt) - def test_keyboard_interrupt_raises_fr(self, *_) -> None: - self.assert_se("Contact creation aborted.", add_new_contact, *self.args) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=KeyboardInterrupt) + def test_keyboard_interrupt_raises_se(self, *_: Any) -> None: + self.assert_se('Contact creation aborted.', add_new_contact, *self.args) + + +class TestGetOnionAddressFromUser(unittest.TestCase): + + def setUp(self) -> None: + """Pre-test actions.""" + self.queues = gen_queue_dict() + + def tearDown(self) -> None: + """Post-test actions.""" + tear_queues(self.queues) + + @mock.patch('builtins.input', side_effect=[nick_to_onion_address('Alice')[:-1]+'a', + nick_to_onion_address('Bob')]) + def test_invalid_onion_address_from_user_gets_sent_to_relay_program(self, _: Any) -> None: + onion_addres_user = nick_to_onion_address('Alice') + + self.assertEqual(get_onion_address_from_user(onion_addres_user, self.queues), + nick_to_onion_address('Bob')) + self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) + + @mock.patch('builtins.input', side_effect=[nick_to_onion_address('Alice'), + nick_to_onion_address('Bob')]) + def test_user_or_valid_onion_address_from_user_does_not_get_sent_to_relay_program(self, _: Any) -> None: + onion_addres_user = nick_to_onion_address('Alice') + + self.assertEqual(get_onion_address_from_user(onion_addres_user, self.queues), + nick_to_onion_address('Bob')) + self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 0) class TestRemoveContact(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.contact_list = ContactList(nicks=["Alice"]) - self.group_list = GroupList(groups=["test_group"]) - self.settings = Settings() - self.queues = gen_queue_dict() - self.master_key = MasterKey() - self.pub_key = nick_to_pub_key("Alice") - self.args = ( - self.contact_list, - self.group_list, - self.settings, - self.queues, - self.master_key, - ) + self.contact_list = ContactList(nicks=['Alice']) + self.group_list = GroupList(groups=['test_group']) + self.settings = Settings() + self.queues = gen_queue_dict() + self.master_key = MasterKey() + self.pub_key = nick_to_pub_key('Alice') + self.args = self.contact_list, self.group_list, self.settings, self.queues, self.master_key def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) tear_queues(self.queues) - def test_contact_removal_during_traffic_masking_raises_fr(self) -> None: + def test_contact_removal_during_traffic_masking_raises_se(self) -> None: # Setup self.settings.traffic_masking = True # Test - self.assert_se( - "Error: Command is disabled during traffic masking.", - remove_contact, - UserInput(), - None, - *self.args, - ) + self.assert_se("Error: Command is disabled during traffic masking.", + remove_contact, UserInput(), None, *self.args) - def test_missing_account_raises_fr(self) -> None: - self.assert_se( - "Error: No account specified.", - remove_contact, - UserInput("rm "), - None, - *self.args, - ) + def test_missing_account_raises_se(self) -> None: + self.assert_se("Error: No account specified.", remove_contact, UserInput('rm '), None, *self.args) - @mock.patch("time.sleep", return_value=None) - @mock.patch("shutil.get_terminal_size", return_value=[150, 150]) - @mock.patch("builtins.input", return_value="Yes") - def test_invalid_account_raises_fr(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('shutil.get_terminal_size', return_value=[150, 150]) + @mock.patch('builtins.input', return_value='Yes') + def test_invalid_account_raises_se(self, *_: Any) -> None: # Setup user_input = UserInput(f'rm {nick_to_onion_address("Alice")[:-1]}') - window = TxWindow( - window_contacts=[self.contact_list.get_contact_by_address_or_nick("Alice")], - type=WIN_TYPE_CONTACT, - uid=self.pub_key, - ) + window = TxWindow(window_contacts=[self.contact_list.get_contact_by_address_or_nick('Alice')], + type=WIN_TYPE_CONTACT, + uid=self.pub_key) # Test - self.assert_se( - "Error: Invalid selection.", remove_contact, user_input, window, *self.args - ) + self.assert_se("Error: Invalid selection.", remove_contact, user_input, window, *self.args) - @mock.patch("time.sleep", return_value=None) - @mock.patch("shutil.get_terminal_size", return_value=[150, 150]) - @mock.patch("builtins.input", return_value="No") - def test_user_abort_raises_fr(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('shutil.get_terminal_size', return_value=[150, 150]) + @mock.patch('builtins.input', return_value='No') + def test_user_abort_raises_se(self, *_: Any) -> None: # Setup user_input = UserInput(f'rm {nick_to_onion_address("Alice")}') # Test - self.assert_se( - "Removal of contact aborted.", remove_contact, user_input, None, *self.args - ) + self.assert_se("Removal of contact aborted.", remove_contact, user_input, None, *self.args) - @mock.patch("builtins.input", return_value="Yes") - def test_successful_removal_of_contact(self, _) -> None: + @mock.patch('builtins.input', return_value='Yes') + def test_successful_removal_of_contact(self, _: Any) -> None: # Setup - window = TxWindow( - window_contacts=[self.contact_list.get_contact_by_address_or_nick("Alice")], - type=WIN_TYPE_CONTACT, - uid=self.pub_key, - ) + window = TxWindow(window_contacts=[self.contact_list.get_contact_by_address_or_nick('Alice')], + type=WIN_TYPE_CONTACT, + uid=self.pub_key) # Test for g in self.group_list: self.assertIsInstance(g, Group) self.assertTrue(g.has_member(self.pub_key)) - self.assertIsNone(remove_contact(UserInput("rm Alice"), window, *self.args)) + self.assertIsNone(remove_contact(UserInput('rm Alice'), window, *self.args)) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) km_data = self.queues[KEY_MANAGEMENT_QUEUE].get() @@ -262,18 +204,16 @@ class TestRemoveContact(TFCTestCase): self.assertIsInstance(g, Group) self.assertFalse(g.has_member(self.pub_key)) - @mock.patch("builtins.input", return_value="Yes") - def test_successful_removal_of_last_member_of_active_group(self, _) -> None: + @mock.patch('builtins.input', return_value='Yes') + def test_successful_removal_of_last_member_of_active_group(self, _: Any) -> None: # Setup - user_input = UserInput("rm Alice") - window = TxWindow( - window_contacts=[self.contact_list.get_contact_by_address_or_nick("Alice")], - type=WIN_TYPE_GROUP, - name="test_group", - ) - group = self.group_list.get_group("test_group") + user_input = UserInput('rm Alice') + window = TxWindow(window_contacts=[self.contact_list.get_contact_by_address_or_nick("Alice")], + type=WIN_TYPE_GROUP, + name='test_group') + group = self.group_list.get_group('test_group') group.members = [self.contact_list.get_contact_by_address_or_nick("Alice")] - pub_key = nick_to_pub_key("Alice") + pub_key = nick_to_pub_key('Alice') # Test for g in self.group_list: @@ -293,173 +233,139 @@ class TestRemoveContact(TFCTestCase): km_data = self.queues[KEY_MANAGEMENT_QUEUE].get() self.assertEqual(km_data, (KDB_REMOVE_ENTRY_HEADER, pub_key)) - @mock.patch("shutil.get_terminal_size", return_value=[150, 150]) - @mock.patch("builtins.input", return_value="Yes") - def test_no_contact_found_on_transmitter(self, *_) -> None: + @mock.patch('shutil.get_terminal_size', return_value=[150, 150]) + @mock.patch('builtins.input', return_value='Yes') + def test_no_contact_found_on_transmitter(self, *_: Any) -> None: # Setup - user_input = UserInput(f'rm {nick_to_onion_address("Charlie")}') - contact_list = ContactList(nicks=["Bob"]) - window = TxWindow( - window_contact=[contact_list.get_contact_by_address_or_nick("Bob")], - type=WIN_TYPE_GROUP, - ) + user_input = UserInput(f'rm {nick_to_onion_address("Charlie")}') + contact_list = ContactList(nicks=['Bob']) + window = TxWindow(window_contact=[contact_list.get_contact_by_address_or_nick('Bob')], + type=WIN_TYPE_GROUP) # Test self.assertIsNone(remove_contact(user_input, window, *self.args)) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) - self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) + self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) command_packet = self.queues[COMMAND_PACKET_QUEUE].get() self.assertIsInstance(command_packet, bytes) class TestChangeNick(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.contact_list = ContactList(nicks=["Alice"]) - self.group_list = GroupList() - self.settings = Settings() - self.queues = gen_queue_dict() - self.args = self.contact_list, self.group_list, self.settings, self.queues + self.contact_list = ContactList(nicks=['Alice']) + self.group_list = GroupList() + self.settings = Settings() + self.queues = gen_queue_dict() + self.args = self.contact_list, self.group_list, self.settings, self.queues def tearDown(self) -> None: """Post-test actions.""" tear_queues(self.queues) - def test_missing_nick_raises_fr(self) -> None: - self.assert_se( - "Error: No nick specified.", - change_nick, - UserInput("nick "), - TxWindow(type=WIN_TYPE_CONTACT), - *self.args, - ) + def test_missing_nick_raises_se(self) -> None: + self.assert_se("Error: No nick specified.", + change_nick, UserInput("nick "), TxWindow(type=WIN_TYPE_CONTACT), *self.args) - def test_invalid_nick_raises_fr(self) -> None: + def test_invalid_nick_raises_se(self) -> None: # Setup - window = TxWindow(type=WIN_TYPE_CONTACT, contact=create_contact("Bob")) + window = TxWindow(type=WIN_TYPE_CONTACT, + contact=create_contact('Bob')) # Test - self.assert_se( - "Error: Nick must be printable.", - change_nick, - UserInput("nick Alice\x01"), - window, - *self.args, - ) + self.assert_se("Error: Nick must be printable.", + change_nick, UserInput("nick Alice\x01"), window, *self.args) - def test_no_contact_raises_fr(self) -> None: + def test_no_contact_raises_se(self) -> None: # Setup - window = TxWindow(type=WIN_TYPE_CONTACT, contact=create_contact("Bob")) + window = TxWindow(type=WIN_TYPE_CONTACT, + contact=create_contact('Bob')) window.contact = None # Test - self.assert_se( - "Error: Window does not have contact.", - change_nick, - UserInput("nick Alice\x01"), - window, - *self.args, - ) + self.assert_se("Error: Window does not have contact.", + change_nick, UserInput("nick Alice\x01"), window, *self.args) def test_successful_nick_change(self) -> None: # Setup - window = TxWindow( - name="Alice", - type=WIN_TYPE_CONTACT, - contact=self.contact_list.get_contact_by_address_or_nick("Alice"), - ) + window = TxWindow(name='Alice', + type=WIN_TYPE_CONTACT, + contact=self.contact_list.get_contact_by_address_or_nick('Alice')) # Test self.assertIsNone(change_nick(UserInput("nick Alice_"), window, *self.args)) - self.assertEqual( - self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice")).nick, - "Alice_", - ) + self.assertEqual(self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice')).nick, 'Alice_') - @mock.patch("time.sleep", return_value=None) - def test_successful_group_nick_change(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_successful_group_nick_change(self, _: Any) -> None: # Setup - group = create_group("test_group") + group = create_group('test_group') user_input = UserInput("nick group2") - window = TxWindow( - name="test_group", type=WIN_TYPE_GROUP, group=group, uid=group.group_id - ) + window = TxWindow(name ='test_group', + type =WIN_TYPE_GROUP, + group=group, + uid =group.group_id) # Test - self.assert_se( - "Renamed group 'test_group' to 'group2'.", - change_nick, - user_input, - window, - *self.args, - ) - self.assertEqual(window.group.name, "group2") + self.assert_se("Renamed group 'test_group' to 'group2'.", change_nick, user_input, window, *self.args) + self.assertEqual(window.group.name, 'group2') class TestContactSetting(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.contact_list = ContactList(nicks=["Alice", "Bob"]) - self.group_list = GroupList(groups=["test_group"]) - self.settings = Settings() - self.queues = gen_queue_dict() - self.pub_key = nick_to_pub_key("Alice") - self.args = self.contact_list, self.group_list, self.settings, self.queues + self.contact_list = ContactList(nicks=['Alice', 'Bob']) + self.group_list = GroupList(groups=['test_group']) + self.settings = Settings() + self.queues = gen_queue_dict() + self.pub_key = nick_to_pub_key("Alice") + self.args = self.contact_list, self.group_list, self.settings, self.queues def tearDown(self) -> None: """Post-test actions.""" tear_queues(self.queues) - def test_invalid_command_raises_fr(self) -> None: - self.assert_se( - "Error: Invalid command.", - contact_setting, - UserInput("loging on"), - None, - *self.args, - ) + def test_invalid_command_raises_se(self) -> None: + self.assert_se("Error: Invalid command.", contact_setting, UserInput('loging on'), None, *self.args) - def test_missing_parameter_raises_fr(self) -> None: - self.assert_se( - "Error: Invalid command.", contact_setting, UserInput(""), None, *self.args - ) + def test_missing_parameter_raises_se(self) -> None: + self.assert_se("Error: Invalid command.", contact_setting, UserInput(''), None, *self.args) - def test_invalid_extra_parameter_raises_fr(self) -> None: - self.assert_se( - "Error: Invalid command.", - contact_setting, - UserInput("logging on al"), - None, - *self.args, - ) + def test_invalid_extra_parameter_raises_se(self) -> None: + self.assert_se("Error: Invalid command.", contact_setting, UserInput('logging on al'), None, *self.args) def test_enable_logging_for_user(self) -> None: # Setup - contact = self.contact_list.get_contact_by_address_or_nick("Alice") + contact = self.contact_list.get_contact_by_address_or_nick('Alice') contact.log_messages = False - window = TxWindow(uid=self.pub_key, type=WIN_TYPE_CONTACT, contact=contact) + window = TxWindow(uid=self.pub_key, + type=WIN_TYPE_CONTACT, + contact=contact) # Test self.assertFalse(contact.log_messages) - self.assertIsNone(contact_setting(UserInput("logging on"), window, *self.args)) + self.assertIsNone(contact_setting(UserInput('logging on'), window, *self.args)) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) - self.assertEqual(self.queues[LOG_SETTING_QUEUE].qsize(), 0) + self.assertEqual(self.queues[LOG_SETTING_QUEUE].qsize(), 0) self.assertTrue(contact.log_messages) def test_enable_logging_for_user_during_traffic_masking(self) -> None: # Setup - contact = self.contact_list.get_contact_by_address_or_nick("Alice") + contact = self.contact_list.get_contact_by_address_or_nick('Alice') contact.log_messages = False - window = TxWindow( - uid=self.pub_key, type=WIN_TYPE_CONTACT, contact=contact, log_messages=False - ) + window = TxWindow(uid=self.pub_key, + type=WIN_TYPE_CONTACT, + contact=contact, + log_messages=False) self.settings.traffic_masking = True # Test self.assertFalse(contact.log_messages) self.assertFalse(window.log_messages) - self.assertIsNone(contact_setting(UserInput("logging on"), window, *self.args)) + self.assertIsNone(contact_setting(UserInput('logging on'), window, *self.args)) self.assertEqual(self.queues[TM_COMMAND_PACKET_QUEUE].qsize(), 1) self.assertTrue(self.queues[LOG_SETTING_QUEUE].get()) @@ -468,29 +374,25 @@ class TestContactSetting(TFCTestCase): def test_enable_logging_for_group(self) -> None: # Setup - group = self.group_list.get_group("test_group") + group = self.group_list.get_group('test_group') group.log_messages = False - window = TxWindow( - uid=group_name_to_group_id("test_group"), - type=WIN_TYPE_GROUP, - group=group, - window_contacts=group.members, - ) + window = TxWindow(uid=group_name_to_group_id('test_group'), + type=WIN_TYPE_GROUP, + group=group, + window_contacts=group.members) # Test self.assertFalse(group.log_messages) - self.assertIsNone(contact_setting(UserInput("logging on"), window, *self.args)) + self.assertIsNone(contact_setting(UserInput('logging on'), window, *self.args)) self.assertTrue(group.log_messages) def test_enable_logging_for_all_users(self) -> None: # Setup contact = self.contact_list.get_contact_by_address_or_nick("Alice") - window = TxWindow( - uid=self.pub_key, - type=WIN_TYPE_CONTACT, - contact=contact, - window_contacts=[contact], - ) + window = TxWindow(uid=self.pub_key, + type=WIN_TYPE_CONTACT, + contact=contact, + window_contacts=[contact]) for c in self.contact_list: c.log_messages = False @@ -503,9 +405,7 @@ class TestContactSetting(TFCTestCase): for g in self.group_list: self.assertFalse(g.log_messages) - self.assertIsNone( - contact_setting(UserInput("logging on all"), window, *self.args) - ) + self.assertIsNone(contact_setting(UserInput('logging on all'), window, *self.args)) for c in self.contact_list: self.assertTrue(c.log_messages) @@ -514,45 +414,39 @@ class TestContactSetting(TFCTestCase): def test_disable_logging_for_user(self) -> None: # Setup - contact = self.contact_list.get_contact_by_address_or_nick("Alice") + contact = self.contact_list.get_contact_by_address_or_nick('Alice') contact.log_messages = True - window = TxWindow( - uid=self.pub_key, - type=WIN_TYPE_CONTACT, - contact=contact, - window_contacts=[contact], - ) + window = TxWindow(uid=self.pub_key, + type=WIN_TYPE_CONTACT, + contact=contact, + window_contacts=[contact]) # Test self.assertTrue(contact.log_messages) - self.assertIsNone(contact_setting(UserInput("logging off"), window, *self.args)) + self.assertIsNone(contact_setting(UserInput('logging off'), window, *self.args)) self.assertFalse(contact.log_messages) def test_disable_logging_for_group(self) -> None: # Setup - group = self.group_list.get_group("test_group") + group = self.group_list.get_group('test_group') group.log_messages = True - window = TxWindow( - uid=group_name_to_group_id("test_group"), - type=WIN_TYPE_GROUP, - group=group, - window_contacts=group.members, - ) + window = TxWindow(uid=group_name_to_group_id('test_group'), + type=WIN_TYPE_GROUP, + group=group, + window_contacts=group.members) # Test self.assertTrue(group.log_messages) - self.assertIsNone(contact_setting(UserInput("logging off"), window, *self.args)) + self.assertIsNone(contact_setting(UserInput('logging off'), window, *self.args)) self.assertFalse(group.log_messages) def test_disable_logging_for_all_users(self) -> None: # Setup contact = self.contact_list.get_contact_by_address_or_nick("Alice") - window = TxWindow( - uid=self.pub_key, - type=WIN_TYPE_CONTACT, - contact=contact, - window_contacts=[contact], - ) + window = TxWindow(uid=self.pub_key, + type=WIN_TYPE_CONTACT, + contact=contact, + window_contacts=[contact]) for c in self.contact_list: c.log_messages = True @@ -565,9 +459,7 @@ class TestContactSetting(TFCTestCase): for g in self.group_list: self.assertTrue(g.log_messages) - self.assertIsNone( - contact_setting(UserInput("logging off all"), window, *self.args) - ) + self.assertIsNone(contact_setting(UserInput('logging off all'), window, *self.args)) for c in self.contact_list: self.assertFalse(c.log_messages) @@ -576,29 +468,25 @@ class TestContactSetting(TFCTestCase): def test_enable_file_reception_for_user(self) -> None: # Setup - contact = self.contact_list.get_contact_by_address_or_nick("Alice") + contact = self.contact_list.get_contact_by_address_or_nick('Alice') contact.file_reception = False - window = TxWindow( - uid=self.pub_key, - type=WIN_TYPE_CONTACT, - contact=contact, - window_contacts=[contact], - ) + window = TxWindow(uid=self.pub_key, + type=WIN_TYPE_CONTACT, + contact=contact, + window_contacts=[contact]) # Test self.assertFalse(contact.file_reception) - self.assertIsNone(contact_setting(UserInput("store on"), window, *self.args)) + self.assertIsNone(contact_setting(UserInput('store on'), window, *self.args)) self.assertTrue(contact.file_reception) def test_enable_file_reception_for_group(self) -> None: # Setup - group = self.group_list.get_group("test_group") - window = TxWindow( - uid=group_name_to_group_id("test_group"), - type=WIN_TYPE_GROUP, - group=group, - window_contacts=group.members, - ) + group = self.group_list.get_group('test_group') + window = TxWindow(uid=group_name_to_group_id('test_group'), + type=WIN_TYPE_GROUP, + group=group, + window_contacts=group.members) for m in group: m.file_reception = False @@ -606,19 +494,17 @@ class TestContactSetting(TFCTestCase): # Test for m in group: self.assertFalse(m.file_reception) - self.assertIsNone(contact_setting(UserInput("store on"), window, *self.args)) + self.assertIsNone(contact_setting(UserInput('store on'), window, *self.args)) for m in group: self.assertTrue(m.file_reception) def test_enable_file_reception_for_all_users(self) -> None: # Setup contact = self.contact_list.get_contact_by_address_or_nick("Alice") - window = TxWindow( - uid=self.pub_key, - type=WIN_TYPE_CONTACT, - contact=contact, - window_contacts=[contact], - ) + window = TxWindow(uid=self.pub_key, + type=WIN_TYPE_CONTACT, + contact=contact, + window_contacts=[contact]) for c in self.contact_list: c.file_reception = False @@ -627,37 +513,31 @@ class TestContactSetting(TFCTestCase): for c in self.contact_list: self.assertFalse(c.file_reception) - self.assertIsNone( - contact_setting(UserInput("store on all"), window, *self.args) - ) + self.assertIsNone(contact_setting(UserInput('store on all'), window, *self.args)) for c in self.contact_list: self.assertTrue(c.file_reception) def test_disable_file_reception_for_user(self) -> None: # Setup - contact = self.contact_list.get_contact_by_address_or_nick("Alice") + contact = self.contact_list.get_contact_by_address_or_nick('Alice') contact.file_reception = True - window = TxWindow( - uid=self.pub_key, - type=WIN_TYPE_CONTACT, - contact=contact, - window_contacts=[contact], - ) + window = TxWindow(uid=self.pub_key, + type=WIN_TYPE_CONTACT, + contact=contact, + window_contacts=[contact]) # Test self.assertTrue(contact.file_reception) - self.assertIsNone(contact_setting(UserInput("store off"), window, *self.args)) + self.assertIsNone(contact_setting(UserInput('store off'), window, *self.args)) self.assertFalse(contact.file_reception) def test_disable_file_reception_for_group(self) -> None: # Setup - group = self.group_list.get_group("test_group") - window = TxWindow( - uid=group_name_to_group_id("test_group"), - type=WIN_TYPE_GROUP, - group=group, - window_contacts=group.members, - ) + group = self.group_list.get_group('test_group') + window = TxWindow(uid=group_name_to_group_id('test_group'), + type=WIN_TYPE_GROUP, + group=group, + window_contacts=group.members) for m in group: m.file_reception = True @@ -666,19 +546,17 @@ class TestContactSetting(TFCTestCase): for m in group: self.assertTrue(m.file_reception) - self.assertIsNone(contact_setting(UserInput("store off"), window, *self.args)) + self.assertIsNone(contact_setting(UserInput('store off'), window, *self.args)) for m in group: self.assertFalse(m.file_reception) def test_disable_file_reception_for_all_users(self) -> None: # Setup contact = self.contact_list.get_contact_by_address_or_nick("Alice") - window = TxWindow( - uid=self.pub_key, - type=WIN_TYPE_CONTACT, - contact=contact, - window_contacts=[contact], - ) + window = TxWindow(uid=self.pub_key, + type=WIN_TYPE_CONTACT, + contact=contact, + window_contacts=[contact]) for c in self.contact_list: c.file_reception = True @@ -686,34 +564,32 @@ class TestContactSetting(TFCTestCase): # Test for c in self.contact_list: self.assertTrue(c.file_reception) - self.assertIsNone( - contact_setting(UserInput("store off all"), window, *self.args) - ) + self.assertIsNone(contact_setting(UserInput('store off all'), window, *self.args)) for c in self.contact_list: self.assertFalse(c.file_reception) def test_enable_notifications_for_user(self) -> None: # Setup - contact = self.contact_list.get_contact_by_address_or_nick("Alice") + contact = self.contact_list.get_contact_by_address_or_nick('Alice') contact.notifications = False - window = TxWindow(uid=self.pub_key, type=WIN_TYPE_CONTACT, contact=contact) + window = TxWindow(uid=self.pub_key, + type=WIN_TYPE_CONTACT, + contact=contact) # Test self.assertFalse(contact.notifications) - self.assertIsNone(contact_setting(UserInput("notify on"), window, *self.args)) + self.assertIsNone(contact_setting(UserInput('notify on'), window, *self.args)) self.assertTrue(contact.notifications) def test_enable_notifications_for_group(self) -> None: # Setup - user_input = UserInput("notify on") - group = self.group_list.get_group("test_group") + user_input = UserInput('notify on') + group = self.group_list.get_group('test_group') group.notifications = False - window = TxWindow( - uid=group_name_to_group_id("test_group"), - type=WIN_TYPE_GROUP, - group=group, - window_contacts=group.members, - ) + window = TxWindow(uid=group_name_to_group_id('test_group'), + type=WIN_TYPE_GROUP, + group=group, + window_contacts=group.members) # Test self.assertFalse(group.notifications) @@ -723,12 +599,10 @@ class TestContactSetting(TFCTestCase): def test_enable_notifications_for_all_users(self) -> None: # Setup contact = self.contact_list.get_contact_by_address_or_nick("Alice") - window = TxWindow( - uid=self.pub_key, - type=WIN_TYPE_CONTACT, - contact=contact, - window_contacts=[contact], - ) + window = TxWindow(uid=self.pub_key, + type=WIN_TYPE_CONTACT, + contact=contact, + window_contacts=[contact]) for c in self.contact_list: c.notifications = False @@ -741,9 +615,7 @@ class TestContactSetting(TFCTestCase): for g in self.group_list: self.assertFalse(g.notifications) - self.assertIsNone( - contact_setting(UserInput("notify on all"), window, *self.args) - ) + self.assertIsNone(contact_setting(UserInput('notify on all'), window, *self.args)) for c in self.contact_list: self.assertTrue(c.notifications) @@ -752,45 +624,39 @@ class TestContactSetting(TFCTestCase): def test_disable_notifications_for_user(self) -> None: # Setup - contact = self.contact_list.get_contact_by_address_or_nick("Alice") + contact = self.contact_list.get_contact_by_address_or_nick('Alice') contact.notifications = True - window = TxWindow( - uid=self.pub_key, - type=WIN_TYPE_CONTACT, - contact=contact, - window_contacts=[contact], - ) + window = TxWindow(uid=self.pub_key, + type=WIN_TYPE_CONTACT, + contact=contact, + window_contacts=[contact]) # Test self.assertTrue(contact.notifications) - self.assertIsNone(contact_setting(UserInput("notify off"), window, *self.args)) + self.assertIsNone(contact_setting(UserInput('notify off'), window, *self.args)) self.assertFalse(contact.notifications) def test_disable_notifications_for_group(self) -> None: # Setup - group = self.group_list.get_group("test_group") + group = self.group_list.get_group('test_group') group.notifications = True - window = TxWindow( - uid=group_name_to_group_id("test_group"), - type=WIN_TYPE_GROUP, - group=group, - window_contacts=group.members, - ) + window = TxWindow(uid=group_name_to_group_id('test_group'), + type=WIN_TYPE_GROUP, + group=group, + window_contacts=group.members) # Test self.assertTrue(group.notifications) - self.assertIsNone(contact_setting(UserInput("notify off"), window, *self.args)) + self.assertIsNone(contact_setting(UserInput('notify off'), window, *self.args)) self.assertFalse(group.notifications) def test_disable_notifications_for_all_users(self) -> None: # Setup contact = self.contact_list.get_contact_by_address_or_nick("Alice") - window = TxWindow( - uid=self.pub_key, - type=WIN_TYPE_CONTACT, - contact=contact, - window_contacts=[contact], - ) + window = TxWindow(uid=self.pub_key, + type=WIN_TYPE_CONTACT, + contact=contact, + window_contacts=[contact]) for c in self.contact_list: c.notifications = True @@ -803,9 +669,7 @@ class TestContactSetting(TFCTestCase): for g in self.group_list: self.assertTrue(g.notifications) - self.assertIsNone( - contact_setting(UserInput("notify off all"), window, *self.args) - ) + self.assertIsNone(contact_setting(UserInput('notify off all'), window, *self.args)) for c in self.contact_list: self.assertFalse(c.notifications) @@ -813,5 +677,5 @@ class TestContactSetting(TFCTestCase): self.assertFalse(g.notifications) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/transmitter/test_files.py b/tests/transmitter/test_files.py index 9d3071b..433c8d9 100644 --- a/tests/transmitter/test_files.py +++ b/tests/transmitter/test_files.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -25,79 +25,78 @@ import unittest from src.transmitter.files import File from tests.mock_classes import create_contact, Settings, TxWindow -from tests.utils import cd_unit_test, cleanup, TFCTestCase +from tests.utils import cd_unit_test, cleanup, TFCTestCase class TestFile(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.window = TxWindow() - self.settings = Settings() - self.args = self.window, self.settings + self.window = TxWindow() + self.settings = Settings() + self.args = self.window, self.settings def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) - def test_missing_file_raises_fr(self) -> None: - self.assert_se("Error: File not found.", File, "./testfile.txt", *self.args) + def test_missing_file_raises_se(self) -> None: + self.assert_se("Error: File not found.", File, './testfile.txt', *self.args) - def test_empty_file_raises_fr(self) -> None: + def test_empty_file_raises_se(self) -> None: # Setup - with open("testfile.txt", "wb+") as f: - f.write(b"") + with open('testfile.txt', 'wb+') as f: + f.write(b'') # Test - self.assert_se( - "Error: Target file is empty.", File, "./testfile.txt", *self.args - ) + self.assert_se("Error: Target file is empty.", File, './testfile.txt', *self.args) - def test_oversize_filename_raises_fr(self) -> None: + def test_oversize_filename_raises_se(self) -> None: # Setup - f_name = 250 * "a" + ".txt" - with open(f_name, "wb+") as f: - f.write(b"a") + f_name = 250 * 'a' + '.txt' + with open(f_name, 'wb+') as f: + f.write(b'a') # Test - self.assert_se("Error: File name is too long.", File, f"./{f_name}", *self.args) + self.assert_se("Error: File name is too long.", File, f'./{f_name}', *self.args) def test_small_file(self) -> None: # Setup input_data = os.urandom(5) - with open("testfile.txt", "wb+") as f: + with open('testfile.txt', 'wb+') as f: f.write(input_data) - self.settings.traffic_masking = True + self.settings.traffic_masking = True self.settings.multi_packet_random_delay = True # Test - file = File("./testfile.txt", *self.args) + file = File('./testfile.txt', *self.args) - self.assertEqual(file.name, b"testfile.txt") - self.assertEqual(file.size_hr, "5.0B") + self.assertEqual(file.name, b'testfile.txt') + self.assertEqual(file.size_hr, '5.0B') self.assertEqual(len(file.plaintext), 114) self.assertIsInstance(file.plaintext, bytes) def test_large_file_and_local_testing(self) -> None: # Setup input_data = os.urandom(2000) - with open("testfile.txt", "wb+") as f: + with open('testfile.txt', 'wb+') as f: f.write(input_data) self.settings.multi_packet_random_delay = True - self.settings.local_testing_mode = True - self.window.window_contacts = [create_contact(c) for c in ["Alice", "Bob"]] + self.settings.local_testing_mode = True + self.window.window_contacts = [create_contact(c) for c in ['Alice', 'Bob']] # Test - file = File("./testfile.txt", *self.args) + file = File('./testfile.txt', *self.args) - self.assertEqual(file.name, b"testfile.txt") + self.assertEqual(file.name, b'testfile.txt') self.assertEqual(len(file.plaintext), 2112) - self.assertEqual(file.size_hr, "2.0KB") + self.assertEqual(file.size_hr, '2.0KB') self.assertIsInstance(file.plaintext, bytes) - self.assertEqual(file.time_hr, "0:01:48") + self.assertEqual(file.time_hr, '0:01:48') -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/transmitter/test_input_loop.py b/tests/transmitter/test_input_loop.py index 09c11dc..b713622 100644 --- a/tests/transmitter/test_input_loop.py +++ b/tests/transmitter/test_input_loop.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -21,88 +21,64 @@ along with TFC. If not, see . import unittest -from unittest import mock +from unittest import mock from unittest.mock import MagicMock +from typing import Any -from src.common.crypto import blake2b +from src.common.crypto import blake2b from src.common.statics import CONFIRM_CODE_LENGTH from src.transmitter.input_loop import input_loop -from tests.mock_classes import ( - ContactList, - Gateway, - GroupList, - MasterKey, - OnionService, - Settings, -) -from tests.utils import ( - gen_queue_dict, - nick_to_onion_address, - nick_to_pub_key, - tear_queues, - VALID_ECDHE_PUB_KEY, -) +from tests.mock_classes import ContactList, Gateway, GroupList, MasterKey, OnionService, Settings +from tests.utils import gen_queue_dict, nick_to_onion_address, nick_to_pub_key, tear_queues, VALID_ECDHE_PUB_KEY class TestInputLoop(unittest.TestCase): - conf_code = blake2b(nick_to_pub_key("Alice"), digest_size=CONFIRM_CODE_LENGTH).hex() - input_list = [ - "61", # Enter Relay confirmation code - "61", # Enter Receiver confirmation code - nick_to_onion_address("Alice"), # Enter rx-account for new contact - "Alice", # Enter nick for contact - "", # Enter to default for ECDHE - VALID_ECDHE_PUB_KEY, # Enter public key for contact - "Yes", # Accept key fingerprints for Alice - conf_code, # Confirmation code - "Alice", # Select Alice as the recipient - "Test", # Send test message - "/file", # Open file selection prompt - "", # Give empty string to abort - "/exit", - ] # Enter exit command + conf_code = blake2b(nick_to_pub_key('Alice'), digest_size=CONFIRM_CODE_LENGTH).hex() + input_list = ['61', # Enter Relay confirmation code + '61', # Enter Receiver confirmation code + nick_to_onion_address("Alice"), # Enter rx-account for new contact + 'Alice', # Enter nick for contact + '', # Enter to default for ECDHE + VALID_ECDHE_PUB_KEY, # Enter public key for contact + 'Yes', # Accept key fingerprints for Alice + conf_code, # Confirmation code + 'Alice', # Select Alice as the recipient + 'Test', # Send test message + '/file', # Open file selection prompt + '', # Give empty string to abort + '/exit'] # Enter exit command def setUp(self) -> None: """Pre-test actions.""" - self.settings = Settings(disable_gui_dialog=True) - self.gateway = Gateway() - self.contact_list = ContactList() - self.group_list = GroupList() - self.master_key = MasterKey() + self.settings = Settings(disable_gui_dialog=True) + self.gateway = Gateway() + self.contact_list = ContactList() + self.group_list = GroupList() + self.master_key = MasterKey() self.onion_service = OnionService() - self.queues = gen_queue_dict() + self.queues = gen_queue_dict() def tearDown(self) -> None: """Post-test actions.""" tear_queues(self.queues) - @mock.patch("builtins.input", side_effect=input_list) - @mock.patch("os.fdopen", MagicMock()) - @mock.patch("os.getrandom", lambda n, flags: n * b"a") - @mock.patch("os.urandom", lambda n: n * b"a") - @mock.patch("shutil.get_terminal_size", return_value=[200, 200]) - @mock.patch("src.transmitter.commands.exit_tfc", side_effect=SystemExit) - @mock.patch("sys.stdin", MagicMock()) - @mock.patch("time.sleep", return_value=None) - @mock.patch("src.common.misc.reset_terminal", return_value=None) - def test_input_loop_functions(self, *_) -> None: + @mock.patch('builtins.input', side_effect=input_list) + @mock.patch('os.fdopen', MagicMock()) + @mock.patch('os.getrandom', lambda n, flags: n*b'a') + @mock.patch('os.urandom', lambda n: n*b'a') + @mock.patch('shutil.get_terminal_size', return_value=[200, 200]) + @mock.patch('src.transmitter.commands.exit_tfc', side_effect=SystemExit) + @mock.patch('sys.stdin', MagicMock()) + @mock.patch('time.sleep', return_value=None) + @mock.patch('os.system', return_value=None) + def test_input_loop_functions(self, *_: Any) -> None: with self.assertRaises(SystemExit): - self.assertIsNone( - input_loop( - self.queues, - self.settings, - self.gateway, - self.contact_list, - self.group_list, - self.master_key, - self.onion_service, - stdin_fd=1, - ) - ) + self.assertIsNone(input_loop(self.queues, self.settings, self.gateway, self.contact_list, + self.group_list, self.master_key, self.onion_service, stdin_fd=1)) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/transmitter/test_key_exchanges.py b/tests/transmitter/test_key_exchanges.py index 9244946..feff025 100644 --- a/tests/transmitter/test_key_exchanges.py +++ b/tests/transmitter/test_key_exchanges.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -23,129 +23,82 @@ import os import unittest from unittest import mock +from typing import Any -from src.common.crypto import blake2b +from src.common.crypto import blake2b from src.common.encoding import b58encode -from src.common.statics import ( - COMMAND_PACKET_QUEUE, - CONFIRM_CODE_LENGTH, - ECDHE, - FINGERPRINT_LENGTH, - KDB_ADD_ENTRY_HEADER, - KEX_STATUS_HAS_RX_PSK, - KEX_STATUS_NO_RX_PSK, - KEX_STATUS_PENDING, - KEX_STATUS_UNVERIFIED, - KEX_STATUS_VERIFIED, - KEY_MANAGEMENT_QUEUE, - LOCAL_ID, - LOCAL_NICK, - LOCAL_PUBKEY, - RELAY_PACKET_QUEUE, - SYMMETRIC_KEY_LENGTH, - TFC_PUBLIC_KEY_LENGTH, - WIN_TYPE_CONTACT, - WIN_TYPE_GROUP, - XCHACHA20_NONCE_LENGTH, -) +from src.common.statics import (COMMAND_PACKET_QUEUE, CONFIRM_CODE_LENGTH, ECDHE, FINGERPRINT_LENGTH, + KDB_ADD_ENTRY_HEADER, KEX_STATUS_HAS_RX_PSK, KEX_STATUS_NO_RX_PSK, KEX_STATUS_PENDING, + KEX_STATUS_UNVERIFIED, KEX_STATUS_VERIFIED, KEY_MANAGEMENT_QUEUE, LOCAL_ID, LOCAL_NICK, + LOCAL_PUBKEY, RELAY_PACKET_QUEUE, SYMMETRIC_KEY_LENGTH, TFC_PUBLIC_KEY_LENGTH, + WIN_TYPE_CONTACT, WIN_TYPE_GROUP, XCHACHA20_NONCE_LENGTH) -from src.transmitter.key_exchanges import ( - create_pre_shared_key, - export_onion_service_data, - new_local_key, -) -from src.transmitter.key_exchanges import ( - rxp_load_psk, - start_key_exchange, - verify_fingerprints, -) +from src.transmitter.key_exchanges import create_pre_shared_key, export_onion_service_data, new_local_key +from src.transmitter.key_exchanges import rxp_load_psk, start_key_exchange, verify_fingerprints -from tests.mock_classes import ( - ContactList, - create_contact, - Gateway, - OnionService, - Settings, - TxWindow, -) -from tests.utils import cd_unit_test, cleanup, gen_queue_dict, ignored, nick_to_pub_key -from tests.utils import ( - nick_to_short_address, - tear_queues, - TFCTestCase, - VALID_ECDHE_PUB_KEY, -) +from tests.mock_classes import ContactList, create_contact, Gateway, OnionService, Settings, TxWindow +from tests.utils import cd_unit_test, cleanup, gen_queue_dict, ignored, nick_to_pub_key +from tests.utils import nick_to_short_address, tear_queues, TFCTestCase, VALID_ECDHE_PUB_KEY class TestOnionService(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.contact_list = ContactList() - self.settings = Settings() + self.contact_list = ContactList() + self.settings = Settings() self.onion_service = OnionService() - self.queues = gen_queue_dict() - self.gateway = Gateway() + self.queues = gen_queue_dict() + self.gateway = Gateway() - @mock.patch("os.urandom", side_effect=[b"a"]) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=["invalid_cc", "", "61"]) - def test_onion_service_delivery(self, *_) -> None: - self.assertIsNone( - export_onion_service_data( - self.contact_list, self.settings, self.onion_service, self.gateway - ) - ) + @mock.patch('os.urandom', side_effect=[b'a']) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=['invalid_cc', '', '61']) + def test_onion_service_delivery(self, *_: Any) -> None: + self.assertIsNone(export_onion_service_data(self.contact_list, self.settings, self.onion_service, self.gateway)) self.assertEqual(len(self.gateway.packets), 2) class TestLocalKey(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.contact_list = ContactList() - self.settings = Settings() - self.queues = gen_queue_dict() - self.args = self.contact_list, self.settings, self.queues + self.settings = Settings() + self.queues = gen_queue_dict() + self.args = self.contact_list, self.settings, self.queues def tearDown(self) -> None: """Post-test actions.""" tear_queues(self.queues) - def test_new_local_key_when_traffic_masking_is_enabled_raises_fr(self) -> None: + def test_new_local_key_when_traffic_masking_is_enabled_raises_se(self) -> None: self.settings.traffic_masking = True - self.contact_list.contacts = [create_contact(LOCAL_ID)] - self.assert_se( - "Error: Command is disabled during traffic masking.", - new_local_key, - *self.args, - ) + self.contact_list.contacts = [create_contact(LOCAL_ID)] + self.assert_se("Error: Command is disabled during traffic masking.", new_local_key, *self.args) - @mock.patch("src.common.misc.reset_terminal", return_value=None) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=["bad", "", "61"]) - @mock.patch( - "os.getrandom", - side_effect=[ - SYMMETRIC_KEY_LENGTH * b"a", - SYMMETRIC_KEY_LENGTH * b"a", - SYMMETRIC_KEY_LENGTH * b"a", - XCHACHA20_NONCE_LENGTH * b"a", - SYMMETRIC_KEY_LENGTH * b"a", - SYMMETRIC_KEY_LENGTH * b"a", - ], - ) - @mock.patch("os.urandom", return_value=CONFIRM_CODE_LENGTH * b"a") - def test_new_local_key(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=['bad', '', '61']) + @mock.patch('os.getrandom', side_effect=[SYMMETRIC_KEY_LENGTH*b'a', + SYMMETRIC_KEY_LENGTH*b'a', + SYMMETRIC_KEY_LENGTH*b'a', + XCHACHA20_NONCE_LENGTH*b'a', + SYMMETRIC_KEY_LENGTH*b'a', + SYMMETRIC_KEY_LENGTH*b'a']) + @mock.patch('os.urandom', return_value=CONFIRM_CODE_LENGTH*b'a') + @mock.patch('os.system', return_value=None) + def test_new_local_key(self, *_: Any) -> None: # Setup self.settings.nc_bypass_messages = False - self.settings.traffic_masking = False + self.settings.traffic_masking = False # Test self.assertIsNone(new_local_key(*self.args)) local_contact = self.contact_list.get_contact_by_pub_key(LOCAL_PUBKEY) - self.assertEqual(local_contact.onion_pub_key, LOCAL_PUBKEY) - self.assertEqual(local_contact.nick, LOCAL_NICK) - self.assertEqual(local_contact.tx_fingerprint, bytes(FINGERPRINT_LENGTH)) + self.assertEqual(local_contact.onion_pub_key, LOCAL_PUBKEY) + self.assertEqual(local_contact.nick, LOCAL_NICK) + self.assertEqual(local_contact.tx_fingerprint, blake2b(b58encode(blake2b(SYMMETRIC_KEY_LENGTH*b'a')).encode())) self.assertEqual(local_contact.rx_fingerprint, bytes(FINGERPRINT_LENGTH)) self.assertFalse(local_contact.log_messages) self.assertFalse(local_contact.file_reception) @@ -153,9 +106,7 @@ class TestLocalKey(TFCTestCase): self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) - cmd, account, tx_key, rx_key, tx_hek, rx_hek = self.queues[ - KEY_MANAGEMENT_QUEUE - ].get() + cmd, account, tx_key, rx_key, tx_hek, rx_hek = self.queues[KEY_MANAGEMENT_QUEUE].get() self.assertEqual(cmd, KDB_ADD_ENTRY_HEADER) self.assertEqual(account, LOCAL_PUBKEY) @@ -163,111 +114,75 @@ class TestLocalKey(TFCTestCase): self.assertIsInstance(key, bytes) self.assertEqual(len(key), SYMMETRIC_KEY_LENGTH) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=KeyboardInterrupt) - @mock.patch("os.getrandom", lambda x, flags: x * b"a") - def test_keyboard_interrupt_raises_fr(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=KeyboardInterrupt) + @mock.patch('os.getrandom', lambda x, flags: x * b'a') + def test_keyboard_interrupt_raises_se(self, *_: Any) -> None: self.assert_se("Local key setup aborted.", new_local_key, *self.args) class TestVerifyFingerprints(unittest.TestCase): - @mock.patch("builtins.input", return_value="Yes") - def test_correct_fingerprint(self, _) -> None: - self.assertTrue( - verify_fingerprints(bytes(FINGERPRINT_LENGTH), bytes(FINGERPRINT_LENGTH)) - ) - @mock.patch("builtins.input", return_value="No") - def test_incorrect_fingerprint(self, _) -> None: - self.assertFalse( - verify_fingerprints(bytes(FINGERPRINT_LENGTH), bytes(FINGERPRINT_LENGTH)) - ) + @mock.patch('builtins.input', return_value='Yes') + def test_correct_fingerprint(self, _: Any) -> None: + self.assertTrue(verify_fingerprints(bytes(FINGERPRINT_LENGTH), bytes(FINGERPRINT_LENGTH))) + + @mock.patch('builtins.input', return_value='No') + def test_incorrect_fingerprint(self, _: Any) -> None: + self.assertFalse(verify_fingerprints(bytes(FINGERPRINT_LENGTH), bytes(FINGERPRINT_LENGTH))) class TestKeyExchange(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.contact_list = ContactList() - self.settings = Settings() - self.queues = gen_queue_dict() - self.args = self.contact_list, self.settings, self.queues + self.contact_list = ContactList(nicks=[LOCAL_ID]) + self.settings = Settings() + self.queues = gen_queue_dict() + self.args = self.contact_list, self.settings, self.queues def tearDown(self) -> None: """Post-test actions.""" tear_queues(self.queues) - @mock.patch("shutil.get_terminal_size", return_value=[200, 200]) - @mock.patch( - "builtins.input", - return_value=b58encode(bytes(TFC_PUBLIC_KEY_LENGTH), public_key=True), - ) - def test_zero_public_key_raises_fr(self, *_) -> None: - self.assert_se( - "Error: Zero public key", - start_key_exchange, - nick_to_pub_key("Alice"), - "Alice", - *self.args, - ) + @mock.patch('shutil.get_terminal_size', return_value=[200, 200]) + @mock.patch('builtins.input', return_value=b58encode(bytes(TFC_PUBLIC_KEY_LENGTH), public_key=True)) + def test_zero_public_key_raises_se(self, *_: Any) -> None: + self.assert_se("Error: Zero public key", start_key_exchange, nick_to_pub_key("Alice"), 'Alice', *self.args) - @mock.patch("shutil.get_terminal_size", return_value=[200, 200]) - @mock.patch( - "builtins.input", - return_value=b58encode((TFC_PUBLIC_KEY_LENGTH - 1) * b"a", public_key=True), - ) - def test_invalid_public_key_length_raises_fr(self, *_) -> None: - self.assert_se( - "Error: Invalid public key length", - start_key_exchange, - nick_to_pub_key("Alice"), - "Alice", - *self.args, - ) + @mock.patch('shutil.get_terminal_size', return_value=[200, 200]) + @mock.patch('builtins.input', return_value=b58encode((TFC_PUBLIC_KEY_LENGTH-1)*b'a', public_key=True)) + def test_invalid_public_key_length_raises_se(self, *_: Any) -> None: + self.assert_se("Error: Invalid public key length", + start_key_exchange, nick_to_pub_key("Alice"), 'Alice', *self.args) - @mock.patch( - "builtins.input", - side_effect=[ - "", # Empty message should resend key - VALID_ECDHE_PUB_KEY[:-1], # Short key should fail - VALID_ECDHE_PUB_KEY + "a", # Long key should fail - VALID_ECDHE_PUB_KEY[:-1] + "a", # Invalid key should fail - VALID_ECDHE_PUB_KEY, # Correct key - "No", - ], - ) # Fingerprint mismatch) - @mock.patch("time.sleep", return_value=None) - @mock.patch("shutil.get_terminal_size", return_value=[200, 200]) - def test_fingerprint_mismatch_raises_fr(self, *_) -> None: - self.assert_se( - "Error: Fingerprint mismatch", - start_key_exchange, - nick_to_pub_key("Alice"), - "Alice", - *self.args, - ) + @mock.patch('builtins.input', side_effect=['', # Empty message should resend key + VALID_ECDHE_PUB_KEY[:-1], # Short key should fail + VALID_ECDHE_PUB_KEY + 'a', # Long key should fail + VALID_ECDHE_PUB_KEY[:-1] + 'a', # Invalid key should fail + VALID_ECDHE_PUB_KEY, # Correct key + 'No']) # Fingerprint mismatch) + @mock.patch('time.sleep', return_value=None) + @mock.patch('shutil.get_terminal_size', return_value=[200, 200]) + def test_fingerprint_mismatch_raises_se(self, *_: Any) -> None: + self.assert_se("Error: Fingerprint mismatch", start_key_exchange, nick_to_pub_key("Alice"), 'Alice', *self.args) - @mock.patch( - "builtins.input", - side_effect=[ - "", # Resend public key - VALID_ECDHE_PUB_KEY, # Correct key - "Yes", # Fingerprint match - "", # Resend contact data - "ff", # Invalid confirmation code - blake2b(nick_to_pub_key("Alice"), digest_size=CONFIRM_CODE_LENGTH).hex(), - ], - ) - @mock.patch("shutil.get_terminal_size", return_value=[200, 200]) - @mock.patch("time.sleep", return_value=None) - def test_successful_exchange(self, *_) -> None: - self.assertIsNone( - start_key_exchange(nick_to_pub_key("Alice"), "Alice", *self.args) - ) + @mock.patch('builtins.input', side_effect=['', # Resend public key + VALID_ECDHE_PUB_KEY, # Correct key + 'Yes', # Fingerprint match + '', # Resend contact data + 'ff', # Invalid confirmation code + blake2b(nick_to_pub_key('Alice'), digest_size=CONFIRM_CODE_LENGTH).hex() + ]) + @mock.patch('shutil.get_terminal_size', return_value=[200, 200]) + @mock.patch('time.sleep', return_value=None) + def test_successful_exchange(self, *_: Any) -> None: + self.assertIsNone(start_key_exchange(nick_to_pub_key("Alice"), 'Alice', *self.args)) contact = self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice")) - self.assertEqual(contact.onion_pub_key, nick_to_pub_key("Alice")) - self.assertEqual(contact.nick, "Alice") - self.assertEqual(contact.kex_status, KEX_STATUS_VERIFIED) + self.assertEqual(contact.onion_pub_key, nick_to_pub_key("Alice")) + self.assertEqual(contact.nick, 'Alice') + self.assertEqual(contact.kex_status, KEX_STATUS_VERIFIED) self.assertIsInstance(contact.tx_fingerprint, bytes) self.assertIsInstance(contact.rx_fingerprint, bytes) self.assertEqual(len(contact.tx_fingerprint), FINGERPRINT_LENGTH) @@ -277,126 +192,99 @@ class TestKeyExchange(TFCTestCase): self.assertTrue(contact.notifications) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 2) - self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 2) + self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 2) - cmd, account, tx_key, rx_key, tx_hek, rx_hek = self.queues[ - KEY_MANAGEMENT_QUEUE - ].get() + cmd, account, tx_key, rx_key, tx_hek, rx_hek = self.queues[KEY_MANAGEMENT_QUEUE].get() - self.assertEqual(cmd, KDB_ADD_ENTRY_HEADER) - self.assertEqual(account, nick_to_pub_key("Alice")) + self.assertEqual(cmd, KDB_ADD_ENTRY_HEADER) + self.assertEqual(account, nick_to_pub_key("Alice")) self.assertEqual(len(tx_key), SYMMETRIC_KEY_LENGTH) for key in [tx_key, rx_key, tx_hek, rx_hek]: self.assertIsInstance(key, bytes) self.assertEqual(len(key), SYMMETRIC_KEY_LENGTH) - @mock.patch( - "builtins.input", - side_effect=[ - "", # Resend public key - VALID_ECDHE_PUB_KEY, # Correct key - KeyboardInterrupt, # Skip fingerprint verification - "", # Manual proceed for warning message - blake2b(nick_to_pub_key("Alice"), digest_size=CONFIRM_CODE_LENGTH).hex(), - ], - ) - @mock.patch("time.sleep", return_value=None) - @mock.patch("shutil.get_terminal_size", return_value=[200, 200]) - def test_successful_exchange_skip_fingerprint_verification(self, *_) -> None: - self.assertIsNone( - start_key_exchange(nick_to_pub_key("Alice"), "Alice", *self.args) - ) + @mock.patch('builtins.input', side_effect=['', # Resend public key + VALID_ECDHE_PUB_KEY, # Correct key + KeyboardInterrupt, # Skip fingerprint verification + '', # Manual proceed for warning message + blake2b(nick_to_pub_key('Alice'), + digest_size=CONFIRM_CODE_LENGTH).hex()]) + @mock.patch('time.sleep', return_value=None) + @mock.patch('shutil.get_terminal_size', return_value=[200, 200]) + def test_successful_exchange_skip_fingerprint_verification(self, *_: Any) -> None: + self.assertIsNone(start_key_exchange(nick_to_pub_key("Alice"), 'Alice', *self.args)) contact = self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice")) self.assertEqual(contact.onion_pub_key, nick_to_pub_key("Alice")) - self.assertEqual(contact.nick, "Alice") - self.assertEqual(contact.kex_status, KEX_STATUS_UNVERIFIED) + self.assertEqual(contact.nick, 'Alice') + self.assertEqual(contact.kex_status, KEX_STATUS_UNVERIFIED) - @mock.patch( - "os.getrandom", - side_effect=[SYMMETRIC_KEY_LENGTH * b"a", SYMMETRIC_KEY_LENGTH * b"a"], - ) - @mock.patch( - "builtins.input", - side_effect=[ - KeyboardInterrupt, - VALID_ECDHE_PUB_KEY, - "Yes", - blake2b(nick_to_pub_key("Alice"), digest_size=CONFIRM_CODE_LENGTH).hex(), - ], - ) - @mock.patch("time.sleep", return_value=None) - @mock.patch("shutil.get_terminal_size", return_value=[200, 200]) - def test_successful_exchange_with_previous_key(self, *_) -> None: + @mock.patch('os.getrandom', side_effect=[SYMMETRIC_KEY_LENGTH * b'a', + SYMMETRIC_KEY_LENGTH * b'a']) + @mock.patch('builtins.input', side_effect=[KeyboardInterrupt, + VALID_ECDHE_PUB_KEY, + 'Yes', + blake2b(nick_to_pub_key('Alice'), + digest_size=CONFIRM_CODE_LENGTH).hex()]) + @mock.patch('time.sleep', return_value=None) + @mock.patch('shutil.get_terminal_size', return_value=[200, 200]) + def test_successful_exchange_with_previous_key(self, *_: Any) -> None: # Test caching of private key - self.assert_se( - "Key exchange interrupted.", - start_key_exchange, - nick_to_pub_key("Alice"), - "Alice", - *self.args, - ) + self.assert_se("Key exchange interrupted.", start_key_exchange, nick_to_pub_key('Alice'), 'Alice', *self.args) - alice = self.contact_list.get_contact_by_address_or_nick("Alice") + alice = self.contact_list.get_contact_by_address_or_nick('Alice') self.assertEqual(alice.kex_status, KEX_STATUS_PENDING) # Test re-using private key - self.assertIsNone( - start_key_exchange(nick_to_pub_key("Alice"), "Alice", *self.args) - ) + self.assertIsNone(start_key_exchange(nick_to_pub_key('Alice'), 'Alice', *self.args)) self.assertIsNone(alice.tfc_private_key) self.assertEqual(alice.kex_status, KEX_STATUS_VERIFIED) class TestPSK(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.contact_list = ContactList() - self.settings = Settings(disable_gui_dialog=True) - self.queues = gen_queue_dict() + self.contact_list = ContactList() + self.settings = Settings(disable_gui_dialog=True) + self.queues = gen_queue_dict() self.onion_service = OnionService() - self.args = self.contact_list, self.settings, self.onion_service, self.queues + self.args = self.contact_list, self.settings, self.onion_service, self.queues def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) with ignored(OSError): - os.remove( - f"{self.onion_service.user_short_address}.psk - Give to {nick_to_short_address('Alice')}" - ) + os.remove(f"{self.onion_service.user_short_address}.psk - Give to {nick_to_short_address('Alice')}") tear_queues(self.queues) - @mock.patch("builtins.input", side_effect=["/root/", ".", "fc"]) - @mock.patch("time.sleep", return_value=None) - @mock.patch("getpass.getpass", return_value="test_password") - @mock.patch("src.transmitter.key_exchanges.ARGON2_PSK_MEMORY_COST", 1000) - @mock.patch("src.transmitter.key_exchanges.ARGON2_PSK_TIME_COST", 1) - def test_psk_creation(self, *_) -> None: - self.assertIsNone( - create_pre_shared_key(nick_to_pub_key("Alice"), "Alice", *self.args) - ) + @mock.patch('builtins.input', side_effect=['/root/', '.', 'fc']) + @mock.patch('time.sleep', return_value=None) + @mock.patch('getpass.getpass', return_value='test_password') + @mock.patch('src.transmitter.key_exchanges.ARGON2_PSK_MEMORY_COST', 1000) + @mock.patch('src.transmitter.key_exchanges.ARGON2_PSK_TIME_COST', 1) + def test_psk_creation(self, *_: Any) -> None: + self.assertIsNone(create_pre_shared_key(nick_to_pub_key("Alice"), 'Alice', *self.args)) contact = self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice")) - self.assertEqual(contact.onion_pub_key, nick_to_pub_key("Alice")) - self.assertEqual(contact.nick, "Alice") + self.assertEqual(contact.onion_pub_key, nick_to_pub_key("Alice")) + self.assertEqual(contact.nick, 'Alice') self.assertEqual(contact.tx_fingerprint, bytes(FINGERPRINT_LENGTH)) self.assertEqual(contact.rx_fingerprint, bytes(FINGERPRINT_LENGTH)) - self.assertEqual(contact.kex_status, KEX_STATUS_NO_RX_PSK) + self.assertEqual(contact.kex_status, KEX_STATUS_NO_RX_PSK) self.assertFalse(contact.log_messages) self.assertFalse(contact.file_reception) self.assertTrue(contact.notifications) - cmd, account, tx_key, rx_key, tx_hek, rx_hek = self.queues[ - KEY_MANAGEMENT_QUEUE - ].get() + cmd, account, tx_key, rx_key, tx_hek, rx_hek = self.queues[KEY_MANAGEMENT_QUEUE].get() - self.assertEqual(cmd, KDB_ADD_ENTRY_HEADER) + self.assertEqual(cmd, KDB_ADD_ENTRY_HEADER) self.assertEqual(account, nick_to_pub_key("Alice")) for key in [tx_key, rx_key, tx_hek, rx_hek]: @@ -404,30 +292,22 @@ class TestPSK(TFCTestCase): self.assertEqual(len(key), SYMMETRIC_KEY_LENGTH) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) - self.assertTrue( - os.path.isfile( - f"{self.onion_service.user_short_address}.psk - Give to {nick_to_short_address('Alice')}" - ) - ) + self.assertTrue(os.path.isfile( + f"{self.onion_service.user_short_address}.psk - Give to {nick_to_short_address('Alice')}")) - @mock.patch("time.sleep", return_value=None) - @mock.patch("getpass.getpass", side_effect=KeyboardInterrupt) - def test_keyboard_interrupt_raises_fr(self, *_) -> None: - self.assert_se( - "PSK generation aborted.", - create_pre_shared_key, - nick_to_pub_key("Alice"), - "Alice", - *self.args, - ) + @mock.patch('time.sleep', return_value=None) + @mock.patch('getpass.getpass', side_effect=KeyboardInterrupt) + def test_keyboard_interrupt_raises_se(self, *_: Any) -> None: + self.assert_se("PSK generation aborted.", create_pre_shared_key, nick_to_pub_key("Alice"), 'Alice', *self.args) class TestReceiverLoadPSK(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.settings = Settings() - self.queues = gen_queue_dict() - self.args = self.settings, self.queues + self.queues = gen_queue_dict() + self.args = self.settings, self.queues def tearDown(self) -> None: """Post-test actions.""" @@ -438,87 +318,59 @@ class TestReceiverLoadPSK(TFCTestCase): self.settings.traffic_masking = True # Test - self.assert_se( - "Error: Command is disabled during traffic masking.", - rxp_load_psk, - None, - None, - *self.args, - ) + self.assert_se("Error: Command is disabled during traffic masking.", rxp_load_psk, None, None, *self.args) - def test_active_group_raises_fr(self) -> None: + def test_active_group_raises_se(self) -> None: # Setup window = TxWindow(type=WIN_TYPE_GROUP) # Test - self.assert_se( - "Error: Group is selected.", rxp_load_psk, window, None, *self.args - ) + self.assert_se("Error: Group is selected.", rxp_load_psk, window, None, *self.args) - def test_ecdhe_key_raises_fr(self) -> None: + def test_ecdhe_key_raises_se(self) -> None: # Setup - contact = create_contact("Alice") + contact = create_contact('Alice') contact_list = ContactList(contacts=[contact]) - window = TxWindow( - type=WIN_TYPE_CONTACT, uid=nick_to_pub_key("Alice"), contact=contact - ) + window = TxWindow(type=WIN_TYPE_CONTACT, + uid=nick_to_pub_key("Alice"), + contact=contact) # Test - self.assert_se( - f"Error: The current key was exchanged with {ECDHE}.", - rxp_load_psk, - window, - contact_list, - *self.args, - ) + self.assert_se(f"Error: The current key was exchanged with {ECDHE}.", + rxp_load_psk, window, contact_list, *self.args) - @mock.patch("src.transmitter.key_exchanges.ARGON2_PSK_MEMORY_COST", 1000) - @mock.patch("src.transmitter.key_exchanges.ARGON2_PSK_TIME_COST", 0.01) - @mock.patch("time.sleep", return_value=None) - @mock.patch( - "builtins.input", - side_effect=[ - b"0".hex(), - blake2b(nick_to_pub_key("Alice"), digest_size=CONFIRM_CODE_LENGTH).hex(), - ], - ) - def test_successful_command(self, *_) -> None: + @mock.patch('src.transmitter.key_exchanges.ARGON2_PSK_MEMORY_COST', 1000) + @mock.patch('src.transmitter.key_exchanges.ARGON2_PSK_TIME_COST', 0.01) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=[b'0'.hex(), blake2b(nick_to_pub_key('Alice'), + digest_size=CONFIRM_CODE_LENGTH).hex()]) + def test_successful_command(self, *_: Any) -> None: # Setup - contact = create_contact("Alice", kex_status=KEX_STATUS_NO_RX_PSK) + contact = create_contact('Alice', kex_status=KEX_STATUS_NO_RX_PSK) contact_list = ContactList(contacts=[contact]) - window = TxWindow( - type=WIN_TYPE_CONTACT, - name="Alice", - uid=nick_to_pub_key("Alice"), - contact=contact, - ) + window = TxWindow(type=WIN_TYPE_CONTACT, + name='Alice', + uid=nick_to_pub_key("Alice"), + contact=contact) # Test - self.assert_se( - "Removed PSK reminder for Alice.", - rxp_load_psk, - window, - contact_list, - *self.args, - ) + self.assert_se("Removed PSK reminder for Alice.", rxp_load_psk, window, contact_list, *self.args) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) self.assertEqual(contact.kex_status, KEX_STATUS_HAS_RX_PSK) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=KeyboardInterrupt) - def test_keyboard_interrupt_raises_fr(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=KeyboardInterrupt) + def test_keyboard_interrupt_raises_se(self, *_: Any) -> None: # Setup - contact = create_contact("Alice", kex_status=KEX_STATUS_NO_RX_PSK) + contact = create_contact('Alice', kex_status=KEX_STATUS_NO_RX_PSK) contact_list = ContactList(contacts=[contact]) - window = TxWindow( - type=WIN_TYPE_CONTACT, uid=nick_to_pub_key("Alice"), contact=contact - ) + window = TxWindow(type=WIN_TYPE_CONTACT, + uid=nick_to_pub_key("Alice"), + contact=contact) # Test - self.assert_se( - "PSK verification aborted.", rxp_load_psk, window, contact_list, *self.args - ) + self.assert_se("PSK verification aborted.", rxp_load_psk, window, contact_list, *self.args) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/transmitter/test_packet.py b/tests/transmitter/test_packet.py index c382238..d5cd1ee 100644 --- a/tests/transmitter/test_packet.py +++ b/tests/transmitter/test_packet.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -25,78 +25,31 @@ import time import unittest from multiprocessing import Queue -from unittest import mock +from unittest import mock +from typing import Any -from src.common.statics import ( - ASSEMBLY_PACKET_LENGTH, - COMMAND, - COMMAND_PACKET_QUEUE, - C_A_HEADER, - C_E_HEADER, - C_L_HEADER, - C_S_HEADER, - FILE, - F_A_HEADER, - F_E_HEADER, - F_L_HEADER, - F_S_HEADER, - GROUP_MSG_INVITE_HEADER, - LOCAL_ID, - MESSAGE, - MESSAGE_PACKET_QUEUE, - M_A_HEADER, - M_E_HEADER, - M_L_HEADER, - M_S_HEADER, - RELAY_PACKET_QUEUE, - SYMMETRIC_KEY_LENGTH, - TM_COMMAND_PACKET_QUEUE, - TM_FILE_PACKET_QUEUE, - TM_MESSAGE_PACKET_QUEUE, - WIN_TYPE_CONTACT, - WIN_TYPE_GROUP, -) +from src.common.statics import (ASSEMBLY_PACKET_LENGTH, COMMAND, COMMAND_PACKET_QUEUE, C_A_HEADER, C_E_HEADER, + C_L_HEADER, C_S_HEADER, FILE, F_A_HEADER, F_E_HEADER, F_L_HEADER, F_S_HEADER, + GROUP_MSG_INVITE_HEADER, LOCAL_ID, MESSAGE, MESSAGE_PACKET_QUEUE, M_A_HEADER, + M_E_HEADER, M_L_HEADER, M_S_HEADER, RELAY_PACKET_QUEUE, SYMMETRIC_KEY_LENGTH, + TM_COMMAND_PACKET_QUEUE, TM_FILE_PACKET_QUEUE, TM_MESSAGE_PACKET_QUEUE, + WIN_TYPE_CONTACT, WIN_TYPE_GROUP) -from src.transmitter.packet import ( - cancel_packet, - queue_command, - queue_file, - queue_message, - queue_assembly_packets, -) +from src.transmitter.packet import cancel_packet, queue_command, queue_file, queue_message, queue_assembly_packets from src.transmitter.packet import send_file, send_packet, split_to_assembly_packets -from tests.mock_classes import ( - create_contact, - create_group, - create_keyset, - Gateway, - ContactList, - KeyList, -) -from tests.mock_classes import ( - nick_to_pub_key, - OnionService, - Settings, - TxWindow, - UserInput, -) -from tests.utils import ( - cd_unit_test, - cleanup, - gen_queue_dict, - tear_queue, - tear_queues, - TFCTestCase, -) +from tests.mock_classes import create_contact, create_group, create_keyset, Gateway, ContactList, KeyList +from tests.mock_classes import nick_to_pub_key, OnionService, Settings, TxWindow, UserInput +from tests.utils import cd_unit_test, cleanup, gen_queue_dict, tear_queue, tear_queues, TFCTestCase class TestQueueMessage(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.queues = gen_queue_dict() + self.queues = gen_queue_dict() self.settings = Settings() - self.args = self.settings, self.queues + self.args = self.settings, self.queues def tearDown(self) -> None: """Post-test actions.""" @@ -104,9 +57,9 @@ class TestQueueMessage(unittest.TestCase): def test_private_message_header(self) -> None: # Setup - user_input = UserInput(plaintext="Test message", type=MESSAGE) - window = TxWindow(log_messages=True) - window.window_contacts = [create_contact("Alice")] + user_input = UserInput(plaintext='Test message', type=MESSAGE) + window = TxWindow(log_messages=True) + window.window_contacts = [create_contact('Alice')] # Test self.assertIsNone(queue_message(user_input, window, *self.args)) @@ -114,14 +67,12 @@ class TestQueueMessage(unittest.TestCase): def test_group_message_header(self) -> None: # Setup - user_input = UserInput(plaintext="Test message", type=MESSAGE) - window = TxWindow( - name="test_group", - type=WIN_TYPE_GROUP, - group=create_group("test_group"), - log_messages=True, - ) - window.window_contacts = [create_contact("Alice")] + user_input = UserInput(plaintext='Test message', type=MESSAGE) + window = TxWindow(name='test_group', + type=WIN_TYPE_GROUP, + group=create_group('test_group'), + log_messages=True) + window.window_contacts = [create_contact('Alice')] # Test self.assertIsNone(queue_message(user_input, window, *self.args)) @@ -129,180 +80,143 @@ class TestQueueMessage(unittest.TestCase): def test_group_management_message_header(self) -> None: # Setup - user_input = UserInput(plaintext="Test message", type=MESSAGE) - window = TxWindow(log_messages=True) - window.window_contacts = [create_contact("Alice")] + user_input = UserInput(plaintext='Test message', type=MESSAGE) + window = TxWindow(log_messages=True) + window.window_contacts = [create_contact('Alice')] # Test - self.assertIsNone( - queue_message( - user_input, window, *self.args, header=GROUP_MSG_INVITE_HEADER - ) - ) + self.assertIsNone(queue_message(user_input, window, *self.args, header=GROUP_MSG_INVITE_HEADER)) self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 1) class TestSendFile(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.settings = Settings() - self.queues = gen_queue_dict() - self.window = TxWindow() + self.settings = Settings() + self.queues = gen_queue_dict() + self.window = TxWindow() self.onion_service = OnionService() - self.contact_list = ContactList(nicks=["Alice", "Bob", "Charlie"]) - self.args = self.settings, self.queues, self.window + self.contact_list = ContactList(nicks=['Alice', 'Bob', 'Charlie']) + self.args = self.settings, self.queues, self.window def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) tear_queues(self.queues) - def test_traffic_masking_raises_fr(self) -> None: + def test_traffic_masking_raises_se(self) -> None: self.settings.traffic_masking = True - self.assert_se( - "Error: Command is disabled during traffic masking.", - send_file, - "testfile.txt", - *self.args - ) + self.assert_se("Error: Command is disabled during traffic masking.", send_file, "testfile.txt", *self.args) - def test_missing_file_raises_fr(self) -> None: + def test_missing_file_raises_se(self) -> None: self.assert_se("Error: File not found.", send_file, "testfile.txt", *self.args) - def test_empty_file_raises_fr(self) -> None: + def test_empty_file_raises_se(self) -> None: # Setup - open("testfile.txt", "wb+").close() + open('testfile.txt', 'wb+').close() # Test - self.assert_se( - "Error: Target file is empty.", send_file, "testfile.txt", *self.args - ) + self.assert_se("Error: Target file is empty.", send_file, "testfile.txt", *self.args) - @mock.patch("time.sleep", return_value=None) - def test_file_transmission_to_contact(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_file_transmission_to_contact(self, _: Any) -> None: # Setup - self.window.window_contacts = [ - self.contact_list.get_contact_by_address_or_nick("Alice") - ] - self.window.type_print = "contact" + self.window.window_contacts = [self.contact_list.get_contact_by_address_or_nick('Alice')] + self.window.type_print = 'contact' input_data = os.urandom(5) - with open("testfile.txt", "wb+") as f: + with open('testfile.txt', 'wb+') as f: f.write(input_data) # Test self.assertIsNone(send_file("testfile.txt", *self.args)) self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 1) - self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) + self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) - @mock.patch("time.sleep", return_value=None) - def test_file_transmission_to_group(self, _) -> None: + @mock.patch('time.sleep', return_value=None) + def test_file_transmission_to_group(self, _: Any) -> None: # Setup - self.window.window_contacts = [ - self.contact_list.get_contact_by_address_or_nick("Alice"), - self.contact_list.get_contact_by_address_or_nick("Bob"), - ] - self.window.type_print = "group" + self.window.window_contacts = [self.contact_list.get_contact_by_address_or_nick('Alice'), + self.contact_list.get_contact_by_address_or_nick('Bob')] + self.window.type_print = 'group' input_data = os.urandom(5) - with open("testfile.txt", "wb+") as f: + with open('testfile.txt', 'wb+') as f: f.write(input_data) self.assertIsNone(send_file("testfile.txt", *self.args)) self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 2) - self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) + self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) class TestQueueFile(TFCTestCase): - file_list = ( - "tx_contacts", - "tx_groups", - "tx_keys", - "tx_login_data", - "tx_settings", - "rx_contacts", - "rx_groups", - "rx_keys", - "rx_login_data", - "rx_settings", - "tx_serial_settings.json", - "nc_serial_settings.json", - "rx_serial_settings.json", - "tx_onion_db", - ) + file_list = ('tx_contacts', 'tx_groups', 'tx_keys', 'tx_login_data', 'tx_settings', + 'rx_contacts', 'rx_groups', 'rx_keys', 'rx_login_data', 'rx_settings', + 'tx_serial_settings.json', 'nc_serial_settings.json', + 'rx_serial_settings.json', 'tx_onion_db') def setUp(self) -> None: """Pre-test actions.""" self.unit_test_dir = cd_unit_test() - self.queues = gen_queue_dict() + self.queues = gen_queue_dict() def tearDown(self) -> None: """Post-test actions.""" cleanup(self.unit_test_dir) tear_queues(self.queues) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=file_list) - def test_tfc_database_raises_fr(self, *_) -> None: - window = TxWindow( - name="Alice", - type=WIN_TYPE_CONTACT, - type_print="contact", - uid=nick_to_pub_key("Alice"), - ) - settings = Settings(traffic_masking=True, disable_gui_dialog=True) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=file_list) + def test_tfc_database_raises_se(self, *_: Any) -> None: + window = TxWindow(name='Alice', + type=WIN_TYPE_CONTACT, + type_print='contact', + uid=nick_to_pub_key("Alice")) + settings = Settings(traffic_masking=True, + disable_gui_dialog=True) for file in self.file_list: - with open(file, "wb+") as f: - f.write(b"a") + with open(file, 'wb+') as f: + f.write(b'a') - self.assert_se( - "Error: Can't send TFC database.", - queue_file, - window, - settings, - self.queues, - ) + self.assert_se("Error: Can't send TFC database.", queue_file, window, settings, self.queues) - @mock.patch("shutil.get_terminal_size", return_value=[150, 150]) - @mock.patch("builtins.input", side_effect=["./testfile.txt", "No"]) - def test_aborted_file(self, *_) -> None: + @mock.patch('shutil.get_terminal_size', return_value=[150, 150]) + @mock.patch('builtins.input', side_effect=['./testfile.txt', 'No']) + def test_aborted_file(self, *_: Any) -> None: # Setup input_data = os.urandom(5) - with open("testfile.txt", "wb+") as f: + with open('testfile.txt', 'wb+') as f: f.write(input_data) - window = TxWindow( - name="Alice", - type=WIN_TYPE_CONTACT, - type_print="contact", - uid=nick_to_pub_key("Alice"), - ) - settings = Settings(traffic_masking=True, disable_gui_dialog=True) + window = TxWindow(name='Alice', + type=WIN_TYPE_CONTACT, + type_print='contact', + uid=nick_to_pub_key("Alice")) + settings = Settings(traffic_masking=True, + disable_gui_dialog=True) # Test - self.assert_se( - "File selection aborted.", queue_file, window, settings, self.queues - ) + self.assert_se("File selection aborted.", queue_file, window, settings, self.queues) - @mock.patch("shutil.get_terminal_size", return_value=[150, 150]) - @mock.patch("builtins.input", side_effect=["./testfile.txt", "Yes"]) - def test_file_queue_short_traffic_masking(self, *_) -> None: + @mock.patch('shutil.get_terminal_size', return_value=[150, 150]) + @mock.patch('builtins.input', side_effect=['./testfile.txt', 'Yes']) + def test_file_queue_short_traffic_masking(self, *_: Any) -> None: # Setup input_data = os.urandom(5) - with open("testfile.txt", "wb+") as f: + with open('testfile.txt', 'wb+') as f: f.write(input_data) - window = TxWindow( - name="Alice", - type=WIN_TYPE_CONTACT, - type_print="contact", - uid=nick_to_pub_key("Alice"), - log_messages=True, - ) - settings = Settings(traffic_masking=True, disable_gui_dialog=True) + window = TxWindow(name='Alice', + type=WIN_TYPE_CONTACT, + type_print='contact', + uid=nick_to_pub_key("Alice"), + log_messages=True) + settings = Settings(traffic_masking=True, + disable_gui_dialog=True) # Test self.assertIsNone(queue_file(window, settings, self.queues)) @@ -313,69 +227,60 @@ class TestQueueFile(TFCTestCase): self.assertTrue(log_messages) self.assertTrue(log_as_ph) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=["./testfile.txt", "Yes"]) - def test_file_queue_long_normal(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=['./testfile.txt', 'Yes']) + def test_file_queue_long_normal(self, *_: Any) -> None: # Setup input_data = os.urandom(2000) - with open("testfile.txt", "wb+") as f: + with open('testfile.txt', 'wb+') as f: f.write(input_data) - window = TxWindow( - name="Alice", - type=WIN_TYPE_CONTACT, - type_print="contact", - uid=nick_to_pub_key("Alice"), - window_contacts=[create_contact("Alice")], - log_messages=True, - ) - settings = Settings( - traffic_masking=False, - disable_gui_dialog=True, - confirm_sent_files=True, - multi_packet_random_delay=True, - ) + window = TxWindow(name='Alice', + type=WIN_TYPE_CONTACT, + type_print='contact', + uid=nick_to_pub_key("Alice"), + window_contacts=[create_contact('Alice')], + log_messages=True) + settings = Settings(traffic_masking=False, + disable_gui_dialog=True, + confirm_sent_files=True, + multi_packet_random_delay=True) # Test self.assertIsNone(queue_file(window, settings, self.queues)) self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1) - @mock.patch("shutil.get_terminal_size", return_value=[150, 150]) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=["./testfile.txt", KeyboardInterrupt]) - def test_keyboard_interrupt_raises_fr(self, *_) -> None: + @mock.patch('shutil.get_terminal_size', return_value=[150, 150]) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=['./testfile.txt', KeyboardInterrupt]) + def test_keyboard_interrupt_raises_se(self, *_: Any) -> None: # Setup input_data = os.urandom(2000) - with open("testfile.txt", "wb+") as f: + with open('testfile.txt', 'wb+') as f: f.write(input_data) - window = TxWindow( - name="Alice", - type=WIN_TYPE_CONTACT, - type_print="contact", - uid=nick_to_pub_key("Alice"), - window_contacts=[create_contact("Alice")], - log_messages=True, - ) - settings = Settings( - traffic_masking=True, - disable_gui_dialog=True, - confirm_sent_files=True, - multi_packet_random_delay=True, - ) + window = TxWindow(name='Alice', + type=WIN_TYPE_CONTACT, + type_print='contact', + uid=nick_to_pub_key("Alice"), + window_contacts=[create_contact('Alice')], + log_messages=True) + settings = Settings(traffic_masking=True, + disable_gui_dialog=True, + confirm_sent_files=True, + multi_packet_random_delay=True) # Test - self.assert_se( - "File selection aborted.", queue_file, window, settings, self.queues - ) + self.assert_se("File selection aborted.", queue_file, window, settings, self.queues) self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 0) class TestQueueCommand(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.settings = Settings() - self.queues = gen_queue_dict() + self.queues = gen_queue_dict() def tearDown(self) -> None: """Post-test actions.""" @@ -388,8 +293,9 @@ class TestQueueCommand(unittest.TestCase): class TestSplitToAssemblyPackets(unittest.TestCase): + def test_short_message(self) -> None: - packet_list = split_to_assembly_packets(b"Short message", MESSAGE) + packet_list = split_to_assembly_packets(b'Short message', MESSAGE) self.assertEqual(len(packet_list), 1) self.assertTrue(packet_list[0].startswith(M_S_HEADER)) @@ -409,9 +315,7 @@ class TestSplitToAssemblyPackets(unittest.TestCase): def test_long_file(self) -> None: packet_list = split_to_assembly_packets(os.urandom(800), FILE) self.assertEqual(len(packet_list), 4) - self.assertTrue( - packet_list[0].startswith(F_L_HEADER + b"\x00\x00\x00\x00\x00\x00\x00\x04") - ) + self.assertTrue(packet_list[0].startswith(F_L_HEADER + b'\x00\x00\x00\x00\x00\x00\x00\x04')) self.assertTrue(packet_list[1].startswith(F_A_HEADER)) self.assertTrue(packet_list[2].startswith(F_A_HEADER)) self.assertTrue(packet_list[3].startswith(F_E_HEADER)) @@ -431,21 +335,23 @@ class TestSplitToAssemblyPackets(unittest.TestCase): class TestQueueAssemblyPackets(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.settings = Settings() - self.queues = gen_queue_dict() - self.window = TxWindow(uid=nick_to_pub_key("Alice"), log_messages=True) - self.window.window_contacts = [create_contact("Alice")] - self.args = self.settings, self.queues, self.window - + self.queues = gen_queue_dict() + self.window = TxWindow(uid=nick_to_pub_key("Alice"), + log_messages=True) + self.window.window_contacts = [create_contact('Alice')] + self.args = self.settings, self.queues, self.window + def tearDown(self) -> None: """Post-test actions.""" tear_queues(self.queues) def test_queue_message_traffic_masking(self) -> None: # Setup - packet_list = split_to_assembly_packets(os.urandom(200), MESSAGE) + packet_list = split_to_assembly_packets(os.urandom(200), MESSAGE) self.settings.traffic_masking = True # Test @@ -464,9 +370,7 @@ class TestQueueAssemblyPackets(unittest.TestCase): self.assertIsNone(queue_assembly_packets(packet_list, MESSAGE, *self.args)) self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 1) - packet, pub_key, log_setting, log_as_ph, win_uid = self.queues[ - MESSAGE_PACKET_QUEUE - ].get() + packet, pub_key, log_setting, log_as_ph, win_uid = self.queues[MESSAGE_PACKET_QUEUE].get() self.assertIsInstance(packet, bytes) self.assertEqual(pub_key, nick_to_pub_key("Alice")) self.assertEqual(win_uid, nick_to_pub_key("Alice")) @@ -475,7 +379,7 @@ class TestQueueAssemblyPackets(unittest.TestCase): def test_queue_file_traffic_masking(self) -> None: # Setup - packet_list = split_to_assembly_packets(os.urandom(200), FILE) + packet_list = split_to_assembly_packets(os.urandom(200), FILE) self.settings.traffic_masking = True # Test @@ -488,7 +392,7 @@ class TestQueueAssemblyPackets(unittest.TestCase): def test_queue_command_traffic_masking(self) -> None: # Setup - packet_list = split_to_assembly_packets(os.urandom(200), COMMAND) + packet_list = split_to_assembly_packets(os.urandom(200), COMMAND) self.settings.traffic_masking = True # Test @@ -499,8 +403,8 @@ class TestQueueAssemblyPackets(unittest.TestCase): def test_queue_command_traffic_masking_no_window(self) -> None: # Setup - self.window = None - packet_list = split_to_assembly_packets(os.urandom(200), COMMAND) + self.window = None + packet_list = split_to_assembly_packets(os.urandom(200), COMMAND) self.settings.traffic_masking = True # Test @@ -538,10 +442,10 @@ class TestSendPacket(unittest.TestCase): def setUp(self) -> None: """Pre-test actions.""" - self.l_queue = Queue() - self.key_list = KeyList(nicks=["Alice"]) - self.settings = Settings() - self.gateway = Gateway() + self.l_queue = Queue() + self.key_list = KeyList(nicks=['Alice']) + self.settings = Settings() + self.gateway = Gateway() self.onion_service = OnionService() def tearDown(self) -> None: @@ -553,69 +457,37 @@ class TestSendPacket(unittest.TestCase): pub_key = nick_to_pub_key("Alice") for msg_len in range(1, 256): with self.assertRaises(SystemExit): - send_packet( - self.key_list, - self.gateway, - self.l_queue, - bytes(msg_len), - pub_key, - True, - ) + send_packet(self.key_list, self.gateway, self.l_queue, bytes(msg_len), pub_key, True) for msg_len in range(257, 300): with self.assertRaises(SystemExit): - send_packet( - self.key_list, - self.gateway, - self.l_queue, - bytes(msg_len), - pub_key, - True, - ) + send_packet(self.key_list, self.gateway, self.l_queue, bytes(msg_len), pub_key, True) def test_invalid_harac_raises_raises_struct_error(self) -> None: # Check that in the case where an internal error caused bytestring (possible key material) to end up in hash # ratchet value, the system raises some error that prevents the output of packet. In this case the, error comes # from the unsuccessful encoding of hash ratchet counter. - for harac_len in range(1, 33): - key_list = KeyList() - key_list.keysets = [ - create_keyset( - "Alice", - tx_key=SYMMETRIC_KEY_LENGTH * b"\x02", - tx_harac=harac_len * b"k", - ) - ] + for msg_len in range(1, 33): + key_list = KeyList() + key_list.keysets = [create_keyset('Alice', + tx_key=SYMMETRIC_KEY_LENGTH * b'\x02', + tx_harac=msg_len * b'k')] # <-- Intentional type error with self.assertRaises(struct.error): - send_packet( - key_list, - self.gateway, - self.l_queue, - bytes(ASSEMBLY_PACKET_LENGTH), - nick_to_pub_key("Alice"), - True, - ) + send_packet(key_list, self.gateway, self.l_queue, + bytes(ASSEMBLY_PACKET_LENGTH), nick_to_pub_key("Alice"), True) def test_valid_message_packet(self) -> None: # Setup - gateway = Gateway(serial_error_correction=5) - key_list = KeyList(master_key=bytes(SYMMETRIC_KEY_LENGTH)) - key_list.keysets = [ - create_keyset("Alice", tx_key=SYMMETRIC_KEY_LENGTH * b"\x02", tx_harac=8) - ] + gateway = Gateway(serial_error_correction=5) + key_list = KeyList(master_key=bytes(SYMMETRIC_KEY_LENGTH)) + key_list.keysets = [create_keyset('Alice', + tx_key=SYMMETRIC_KEY_LENGTH * b'\x02', + tx_harac=8)] # Test - self.assertIsNone( - send_packet( - key_list, - gateway, - self.l_queue, - bytes(ASSEMBLY_PACKET_LENGTH), - nick_to_pub_key("Alice"), - True, - ) - ) + self.assertIsNone(send_packet(key_list, gateway, self.l_queue, + bytes(ASSEMBLY_PACKET_LENGTH), nick_to_pub_key("Alice"), True)) self.assertEqual(len(gateway.packets), 1) time.sleep(0.01) self.assertFalse(self.l_queue.empty()) @@ -629,21 +501,19 @@ class TestSendPacket(unittest.TestCase): compromise plaintext. """ # Setup - key_list = KeyList(master_key=bytes(SYMMETRIC_KEY_LENGTH)) + key_list = KeyList(master_key=bytes(SYMMETRIC_KEY_LENGTH)) key_list.keysets = [create_keyset(LOCAL_ID)] # Test - self.assertIsNone( - send_packet( - key_list, self.gateway, self.l_queue, bytes(ASSEMBLY_PACKET_LENGTH) - ) - ) + self.assertIsNone(send_packet(key_list, self.gateway, self.l_queue, + bytes(ASSEMBLY_PACKET_LENGTH))) self.assertEqual(len(self.gateway.packets), 1) self.assertEqual(len(self.gateway.packets[0]), 345) self.assertEqual(self.l_queue.qsize(), 1) class TestCancelPacket(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" self.queues = gen_queue_dict() @@ -654,115 +524,67 @@ class TestCancelPacket(TFCTestCase): def test_cancel_message_during_normal(self) -> None: # Setup - user_input = UserInput("cm") - settings = Settings() - window = TxWindow( - name="Alice", - type=WIN_TYPE_CONTACT, - type_print="contact", - uid=nick_to_pub_key("Alice"), - ) - window.window_contacts = [create_contact("Alice")] + user_input = UserInput('cm') + settings = Settings() + window = TxWindow(name='Alice', + type=WIN_TYPE_CONTACT, + type_print='contact', + uid=nick_to_pub_key("Alice")) + window.window_contacts = [create_contact('Alice')] self.queues[MESSAGE_PACKET_QUEUE].put( - ( - "test_message1", - nick_to_pub_key("Alice"), - False, - False, - nick_to_pub_key("Alice"), - ) - ) + ('test_message1', nick_to_pub_key("Alice"), False, False, nick_to_pub_key("Alice"))) self.queues[MESSAGE_PACKET_QUEUE].put( - ( - "test_message2", - nick_to_pub_key("Charlie"), - False, - False, - nick_to_pub_key("Charlie"), - ) - ) + ('test_message2', nick_to_pub_key("Charlie"), False, False, nick_to_pub_key("Charlie"))) self.queues[MESSAGE_PACKET_QUEUE].put( - ( - "test_message3", - nick_to_pub_key("Alice"), - False, - False, - nick_to_pub_key("Alice"), - ) - ) + ('test_message3', nick_to_pub_key("Alice"), False, False, nick_to_pub_key("Alice"))) # Test - self.assert_se( - "Cancelled queued messages to contact Alice.", - cancel_packet, - user_input, - window, - settings, - self.queues, - ) + self.assert_se("Cancelled queued messages to contact Alice.", + cancel_packet, user_input, window, settings, self.queues) self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 2) def test_cancel_group_message_during_normal(self) -> None: # Setup - user_input = UserInput("cm") - settings = Settings() - window = TxWindow( - name="test_group", type=WIN_TYPE_GROUP, type_print="group", uid="test_group" - ) - window.window_contacts = [create_contact("Alice")] + user_input = UserInput('cm') + settings = Settings() + window = TxWindow(name='test_group', + type=WIN_TYPE_GROUP, + type_print='group', + uid='test_group') + window.window_contacts = [create_contact('Alice')] - self.queues[MESSAGE_PACKET_QUEUE].put( - ("test_message1", nick_to_pub_key("Alice"), False, False, "test_group") - ) - self.queues[MESSAGE_PACKET_QUEUE].put( - ("test_message2", nick_to_pub_key("Alice"), False, False, "test_group") - ) + self.queues[MESSAGE_PACKET_QUEUE].put(('test_message1', nick_to_pub_key("Alice"), False, False, 'test_group')) + self.queues[MESSAGE_PACKET_QUEUE].put(('test_message2', nick_to_pub_key("Alice"), False, False, 'test_group')) # Test - self.assert_se( - "Cancelled queued messages to group test_group.", - cancel_packet, - user_input, - window, - settings, - self.queues, - ) + self.assert_se("Cancelled queued messages to group test_group.", + cancel_packet, user_input, window, settings, self.queues) self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 1) # Cancel packet def test_cancel_message_during_traffic_masking(self) -> None: # Setup - user_input = UserInput("cm") - settings = Settings(traffic_masking=True) - window = TxWindow() - window.window_contacts = [create_contact("Alice")] + user_input = UserInput('cm') + settings = Settings(traffic_masking=True) + window = TxWindow() + window.window_contacts = [create_contact('Alice')] - self.queues[TM_MESSAGE_PACKET_QUEUE].put( - ("test_message1", {nick_to_pub_key("Alice"): False}) - ) - self.queues[TM_MESSAGE_PACKET_QUEUE].put( - ("test_message2", {nick_to_pub_key("Alice"): False}) - ) + self.queues[TM_MESSAGE_PACKET_QUEUE].put(('test_message1', {nick_to_pub_key("Alice"): False})) + self.queues[TM_MESSAGE_PACKET_QUEUE].put(('test_message2', {nick_to_pub_key("Alice"): False})) # Test self.assertIsNone(cancel_packet(user_input, window, settings, self.queues)) - self.assertEqual( - self.queues[TM_MESSAGE_PACKET_QUEUE].qsize(), 1 - ) # Cancel packet in queue + self.assertEqual(self.queues[TM_MESSAGE_PACKET_QUEUE].qsize(), 1) # Cancel packet in queue def test_cancel_file_during_traffic_masking(self) -> None: # Setup - user_input = UserInput("cf") - settings = Settings(traffic_masking=True) - window = TxWindow() - window.window_contacts = [create_contact("Alice")] + user_input = UserInput('cf') + settings = Settings(traffic_masking=True) + window = TxWindow() + window.window_contacts = [create_contact('Alice')] - self.queues[TM_FILE_PACKET_QUEUE].put( - ("testfile1", {nick_to_pub_key("Alice"): False}) - ) - self.queues[TM_FILE_PACKET_QUEUE].put( - ("testfile2", {nick_to_pub_key("Alice"): False}) - ) + self.queues[TM_FILE_PACKET_QUEUE].put(('testfile1', {nick_to_pub_key("Alice"): False})) + self.queues[TM_FILE_PACKET_QUEUE].put(('testfile2', {nick_to_pub_key("Alice"): False})) # Test self.assertIsNone(cancel_packet(user_input, window, settings, self.queues)) @@ -770,37 +592,27 @@ class TestCancelPacket(TFCTestCase): def test_cancel_file_during_normal(self) -> None: # Setup - user_input = UserInput("cf") - settings = Settings() - window = TxWindow( - name="Alice", - type=WIN_TYPE_CONTACT, - type_print="contact", - uid=nick_to_pub_key("Alice"), - ) - window.window_contacts = [create_contact("Alice")] + user_input = UserInput('cf') + settings = Settings() + window = TxWindow(name='Alice', + type=WIN_TYPE_CONTACT, + type_print='contact', + uid=nick_to_pub_key("Alice")) + window.window_contacts = [create_contact('Alice')] # Test - self.assert_se( - "Files are only queued during traffic masking.", - cancel_packet, - user_input, - window, - settings, - self.queues, - ) + self.assert_se('Files are only queued during traffic masking.', + cancel_packet, user_input, window, settings, self.queues) def test_cancel_file_when_nothing_to_cancel(self) -> None: # Setup - user_input = UserInput("cf") - settings = Settings(traffic_masking=True) - window = TxWindow( - name="Alice", - type=WIN_TYPE_CONTACT, - type_print="contact", - uid=nick_to_pub_key("Alice"), - ) - window.window_contacts = [create_contact("Alice")] + user_input = UserInput('cf') + settings = Settings(traffic_masking=True) + window = TxWindow(name='Alice', + type=WIN_TYPE_CONTACT, + type_print='contact', + uid=nick_to_pub_key("Alice")) + window.window_contacts = [create_contact('Alice')] # Test self.assertIsNone(cancel_packet(user_input, window, settings, self.queues)) @@ -808,27 +620,19 @@ class TestCancelPacket(TFCTestCase): def test_cancel_message_when_nothing_to_cancel(self) -> None: # Setup - user_input = UserInput("cm") - settings = Settings() - window = TxWindow( - name="Alice", - type=WIN_TYPE_CONTACT, - type_print="contact", - uid=nick_to_pub_key("Alice"), - ) - window.window_contacts = [create_contact("Alice")] + user_input = UserInput('cm') + settings = Settings() + window = TxWindow(name='Alice', + type=WIN_TYPE_CONTACT, + type_print='contact', + uid=nick_to_pub_key("Alice")) + window.window_contacts = [create_contact('Alice')] # Test - self.assert_se( - "No messages queued for contact Alice.", - cancel_packet, - user_input, - window, - settings, - self.queues, - ) + self.assert_se("No messages queued for contact Alice.", + cancel_packet, user_input, window, settings, self.queues) self.assertEqual(self.queues[TM_FILE_PACKET_QUEUE].qsize(), 0) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/transmitter/test_sender_loop.py b/tests/transmitter/test_sender_loop.py index 2f46c81..572016e 100644 --- a/tests/transmitter/test_sender_loop.py +++ b/tests/transmitter/test_sender_loop.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -23,120 +23,78 @@ import threading import time import unittest -from src.common.statics import ( - C_N_HEADER, - EXIT_QUEUE, - KDB_ADD_ENTRY_HEADER, - KEY_MANAGEMENT_QUEUE, - LOCAL_ID, - LOCAL_PUBKEY, - PADDING_LENGTH, - PUBLIC_KEY_DATAGRAM_HEADER, - P_N_HEADER, - RELAY_PACKET_QUEUE, - SENDER_MODE_QUEUE, - SYMMETRIC_KEY_LENGTH, - TFC_PUBLIC_KEY_LENGTH, - TM_NOISE_COMMAND_QUEUE, - TM_NOISE_PACKET_QUEUE, - UNENCRYPTED_DATAGRAM_HEADER, - UNENCRYPTED_EXIT_COMMAND, - UNENCRYPTED_WIPE_COMMAND, - WINDOW_SELECT_QUEUE, -) +from src.common.statics import (C_N_HEADER, EXIT_QUEUE, KDB_ADD_ENTRY_HEADER, KEY_MANAGEMENT_QUEUE, LOCAL_ID, + LOCAL_PUBKEY, PADDING_LENGTH, PUBLIC_KEY_DATAGRAM_HEADER, P_N_HEADER, + RELAY_PACKET_QUEUE, SENDER_MODE_QUEUE, SYMMETRIC_KEY_LENGTH, TFC_PUBLIC_KEY_LENGTH, + TM_NOISE_COMMAND_QUEUE, TM_NOISE_PACKET_QUEUE, UNENCRYPTED_DATAGRAM_HEADER, + UNENCRYPTED_EXIT_COMMAND, UNENCRYPTED_WIPE_COMMAND, WINDOW_SELECT_QUEUE) -from src.transmitter.commands import queue_command -from src.transmitter.packet import queue_message, queue_to_nc -from src.transmitter.sender_loop import ( - sender_loop, - standard_sender_loop, - traffic_masking_loop, -) +from src.transmitter.commands import queue_command +from src.transmitter.packet import queue_message, queue_to_nc +from src.transmitter.sender_loop import sender_loop, standard_sender_loop, traffic_masking_loop -from tests.mock_classes import ( - ContactList, - Gateway, - KeyList, - nick_to_pub_key, - Settings, - TxWindow, - UserInput, -) -from tests.utils import gen_queue_dict, tear_queues +from tests.mock_classes import ContactList, Gateway, KeyList, nick_to_pub_key, Settings, TxWindow, UserInput +from tests.utils import gen_queue_dict, tear_queues class TestSenderLoop(unittest.TestCase): + def test_loops(self) -> None: - queues = gen_queue_dict() - window = TxWindow(log_messages=True) - settings = Settings( - traffic_masking=True, tm_static_delay=0.001, tm_random_delay=0.001 - ) - gateway = Gateway() - key_list = KeyList(nicks=["Bob", LOCAL_ID]) # Output Bob as existing contact + queues = gen_queue_dict() + window = TxWindow(log_messages=True) + settings = Settings(traffic_masking=True, + tm_static_delay=0.001, + tm_random_delay=0.001) + gateway = Gateway() + key_list = KeyList(nicks=['Bob', LOCAL_ID]) # Output Bob as existing contact queues[TM_NOISE_COMMAND_QUEUE].put((C_N_HEADER + bytes(PADDING_LENGTH))) - queues[TM_NOISE_PACKET_QUEUE].put( - (P_N_HEADER + bytes(PADDING_LENGTH), True, True) - ) + queues[TM_NOISE_PACKET_QUEUE].put((P_N_HEADER + bytes(PADDING_LENGTH), True, True)) queues[WINDOW_SELECT_QUEUE].put(window.window_contacts) queues[SENDER_MODE_QUEUE].put(settings) - queue_command(b"test", settings, queues) # Output command - self.assertIsNone( - sender_loop(queues, settings, gateway, key_list, unit_test=True) - ) + queue_command(b'test', settings, queues) # Output command + self.assertIsNone(sender_loop(queues, settings, gateway, key_list, unit_test=True)) self.assertEqual(len(gateway.packets), 1) settings.traffic_masking = False queues[SENDER_MODE_QUEUE].put(settings) - self.assertIsNone( - sender_loop(queues, settings, gateway, key_list, unit_test=True) - ) # Output Alice & Bob again + self.assertIsNone(sender_loop(queues, settings, gateway, key_list, unit_test=True)) # Output Alice & Bob again self.assertEqual(len(gateway.packets), 1) class TestTrafficMaskingLoop(unittest.TestCase): + def test_loop(self) -> None: # Setup - queues = gen_queue_dict() - settings = Settings( - traffic_masking=True, tm_static_delay=0.001, tm_random_delay=0.001 - ) - gateway = Gateway() - key_list = KeyList(nicks=["Alice", LOCAL_ID]) - window = TxWindow(log_messages=True) - contact_list = ContactList(nicks=["Alice", LOCAL_ID]) - window.contact_list = contact_list - window.window_contacts = [contact_list.get_contact_by_address_or_nick("Alice")] - user_input = UserInput(plaintext="test") + queues = gen_queue_dict() + settings = Settings(traffic_masking=True, + tm_static_delay=0.001, + tm_random_delay=0.001) + gateway = Gateway() + key_list = KeyList(nicks=['Alice', LOCAL_ID]) + window = TxWindow(log_messages=True) + contact_list = ContactList(nicks=['Alice', LOCAL_ID]) + window.contact_list = contact_list + window.window_contacts = [contact_list.get_contact_by_address_or_nick('Alice')] + user_input = UserInput(plaintext='test') def queue_delayer() -> None: """Place packets to queue after delay.""" time.sleep(0.01) queues[WINDOW_SELECT_QUEUE].put(window.window_contacts) time.sleep(0.01) - queue_command(b"test", settings, queues) # 1 - queue_message(user_input, window, settings, queues) # 2 - queue_message(user_input, window, settings, queues) # 3 - queue_command(b"test", settings, queues) # 4 - queues[TM_NOISE_COMMAND_QUEUE].put( - (C_N_HEADER + bytes(PADDING_LENGTH)) - ) # 5 - queue_to_nc( - UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_EXIT_COMMAND, - queues[RELAY_PACKET_QUEUE], - ) # 6 - queue_to_nc( - UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_WIPE_COMMAND, - queues[RELAY_PACKET_QUEUE], - ) # 7 + queue_command(b'test', settings, queues) # 1 + queue_message(user_input, window, settings, queues) # 2 + queue_message(user_input, window, settings, queues) # 3 + queue_command(b'test', settings, queues) # 4 + queues[TM_NOISE_COMMAND_QUEUE].put((C_N_HEADER + bytes(PADDING_LENGTH))) # 5 + queue_to_nc(UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_EXIT_COMMAND, queues[RELAY_PACKET_QUEUE]) # 6 + queue_to_nc(UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_WIPE_COMMAND, queues[RELAY_PACKET_QUEUE]) # 7 queues[SENDER_MODE_QUEUE].put(settings) # Test threading.Thread(target=queue_delayer).start() - self.assertIsInstance( - traffic_masking_loop(queues, settings, gateway, key_list), Settings - ) + self.assertIsInstance(traffic_masking_loop(queues, settings, gateway, key_list), Settings) self.assertEqual(len(gateway.packets), 7) # Teardown @@ -144,56 +102,40 @@ class TestTrafficMaskingLoop(unittest.TestCase): class TestStandardSenderLoop(unittest.TestCase): + def test_loop(self) -> None: # Setup - queues = gen_queue_dict() - settings = Settings(traffic_masking=False) - gateway = Gateway() - key_list = KeyList() - window = TxWindow(log_messages=True) - contact_list = ContactList(nicks=["Alice", LOCAL_ID]) - window.contact_list = contact_list - window.window_contacts = [contact_list.get_contact_by_address_or_nick("Alice")] - user_input = UserInput(plaintext="test") + queues = gen_queue_dict() + settings = Settings(traffic_masking=False) + gateway = Gateway() + key_list = KeyList() + window = TxWindow(log_messages=True) + contact_list = ContactList(nicks=['Alice', LOCAL_ID]) + window.contact_list = contact_list + window.window_contacts = [contact_list.get_contact_by_address_or_nick('Alice')] + user_input = UserInput(plaintext='test') delay = 0.015 def queue_delayer() -> None: """Place datagrams into queue after delay.""" time.sleep(delay) - queue_command(b"test", settings, queues) + queue_command(b'test', settings, queues) time.sleep(delay) - queue_to_nc( - PUBLIC_KEY_DATAGRAM_HEADER - + TFC_PUBLIC_KEY_LENGTH * b"a" - + nick_to_pub_key("Alice"), # 1 - queues[RELAY_PACKET_QUEUE], - ) + queue_to_nc(PUBLIC_KEY_DATAGRAM_HEADER + TFC_PUBLIC_KEY_LENGTH * b'a' + nick_to_pub_key('Alice'), # 1 + queues[RELAY_PACKET_QUEUE]) time.sleep(delay) - queue_to_nc( - UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_WIPE_COMMAND, - queues[RELAY_PACKET_QUEUE], - ) # 2 + queue_to_nc(UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_WIPE_COMMAND, queues[RELAY_PACKET_QUEUE]) # 2 time.sleep(delay) - queue_to_nc( - UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_EXIT_COMMAND, - queues[RELAY_PACKET_QUEUE], - ) # 3 + queue_to_nc(UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_EXIT_COMMAND, queues[RELAY_PACKET_QUEUE]) # 3 time.sleep(delay) - queues[KEY_MANAGEMENT_QUEUE].put( - ( - KDB_ADD_ENTRY_HEADER, - LOCAL_PUBKEY, # 4 - SYMMETRIC_KEY_LENGTH * b"a", - SYMMETRIC_KEY_LENGTH * b"a", - SYMMETRIC_KEY_LENGTH * b"a", - SYMMETRIC_KEY_LENGTH * b"a", - ) - ) + queues[KEY_MANAGEMENT_QUEUE].put((KDB_ADD_ENTRY_HEADER, LOCAL_PUBKEY, # 4 + SYMMETRIC_KEY_LENGTH * b'a', SYMMETRIC_KEY_LENGTH * b'a', + SYMMETRIC_KEY_LENGTH * b'a', SYMMETRIC_KEY_LENGTH * b'a')) time.sleep(delay) queue_message(user_input, window, settings, queues) # 5 @@ -202,16 +144,9 @@ class TestStandardSenderLoop(unittest.TestCase): queue_message(user_input, window, settings, queues) # 6 time.sleep(delay) - queues[KEY_MANAGEMENT_QUEUE].put( - ( - KDB_ADD_ENTRY_HEADER, - nick_to_pub_key("Alice"), - SYMMETRIC_KEY_LENGTH * b"a", - SYMMETRIC_KEY_LENGTH * b"a", - SYMMETRIC_KEY_LENGTH * b"a", - SYMMETRIC_KEY_LENGTH * b"a", - ) - ) + queues[KEY_MANAGEMENT_QUEUE].put((KDB_ADD_ENTRY_HEADER, nick_to_pub_key('Alice'), + SYMMETRIC_KEY_LENGTH * b'a', SYMMETRIC_KEY_LENGTH * b'a', + SYMMETRIC_KEY_LENGTH * b'a', SYMMETRIC_KEY_LENGTH * b'a')) time.sleep(delay) queue_message(user_input, window, settings, queues) # 7 @@ -227,13 +162,13 @@ class TestStandardSenderLoop(unittest.TestCase): # Test settings, m_buffer = standard_sender_loop(queues, gateway, key_list) self.assertIsInstance(settings, Settings) - self.assertEqual(m_buffer, {nick_to_pub_key("Alice"): []}) - self.assertEqual(len(gateway.packets), 8) + self.assertEqual(m_buffer, {nick_to_pub_key('Alice'): []}) + self.assertEqual(len(gateway.packets), 8) self.assertEqual(queues[EXIT_QUEUE].qsize(), 2) # Teardown tear_queues(queues) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/transmitter/test_traffic_masking.py b/tests/transmitter/test_traffic_masking.py index 5d9e3b6..7c541d4 100644 --- a/tests/transmitter/test_traffic_masking.py +++ b/tests/transmitter/test_traffic_masking.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -19,52 +19,23 @@ You should have received a copy of the GNU General Public License along with TFC. If not, see . """ -import time import unittest -from src.common.misc import HideRunTime -from src.common.statics import ( - C_N_HEADER, - PADDING_LENGTH, - PLACEHOLDER_DATA, - TM_NOISE_COMMAND_QUEUE, - TM_NOISE_PACKET_QUEUE, - TRAFFIC_MASKING, -) +from src.common.statics import (C_N_HEADER, PADDING_LENGTH, PLACEHOLDER_DATA, TM_NOISE_COMMAND_QUEUE, + TM_NOISE_PACKET_QUEUE) from src.transmitter.traffic_masking import noise_loop -from tests.mock_classes import ContactList, Settings -from tests.utils import gen_queue_dict, tear_queues - - -class TestHideRunTime(unittest.TestCase): - def setUp(self) -> None: - """Pre-test actions.""" - self.settings = Settings() - self.settings.tm_random_delay = 1 - self.settings.tm_static_delay = 1 - - def test_traffic_masking_delay(self) -> None: - start = time.monotonic() - with HideRunTime(self.settings, delay_type=TRAFFIC_MASKING): - pass - duration = time.monotonic() - start - self.assertTrue(duration > self.settings.tm_static_delay) - - def test_static_time(self) -> None: - start = time.monotonic() - with HideRunTime(self.settings, duration=1): - pass - duration = time.monotonic() - start - self.assertTrue(0.9 < duration < 1.1) +from tests.mock_classes import ContactList +from tests.utils import gen_queue_dict, tear_queues class TestNoiseLoop(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.queues = gen_queue_dict() - self.contact_list = ContactList(nicks=["Alice"]) + self.queues = gen_queue_dict() + self.contact_list = ContactList(nicks=['Alice']) def tearDown(self) -> None: """Post-test actions.""" @@ -83,5 +54,5 @@ class TestNoiseLoop(unittest.TestCase): self.assertTrue(log_as_ph) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/transmitter/test_user_input.py b/tests/transmitter/test_user_input.py index cdc691d..844280e 100644 --- a/tests/transmitter/test_user_input.py +++ b/tests/transmitter/test_user_input.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -22,90 +22,90 @@ along with TFC. If not, see . import unittest from unittest import mock +from typing import Any -from src.common.statics import COMMAND, FILE, MESSAGE, WIN_TYPE_CONTACT, WIN_TYPE_GROUP +from src.common.statics import COMMAND, FILE, MESSAGE, WIN_TYPE_CONTACT, WIN_TYPE_GROUP from src.transmitter.user_input import get_input, process_aliases, UserInput -from tests.mock_classes import create_contact, create_group, Settings, TxWindow +from tests.mock_classes import create_contact, create_group, Settings, TxWindow class TestProcessAliases(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.settings = Settings() - self.window = TxWindow( - name="Alice", - type=WIN_TYPE_CONTACT, - type_print="contact", - window_contacts=[create_contact("Alice")], - ) + self.window = TxWindow(name='Alice', + type=WIN_TYPE_CONTACT, + type_print='contact', + window_contacts=[create_contact('Alice')]) def test_unread_shortcut(self) -> None: - self.assertEqual(process_aliases(" ", self.settings, self.window), "/unread") + self.assertEqual(process_aliases(' ', self.settings, self.window), '/unread') def test_clear_shortcut(self) -> None: - self.assertEqual(process_aliases(" ", self.settings, self.window), "/clear") + self.assertEqual(process_aliases(' ', self.settings, self.window), '/clear') def test_exit_shortcut(self) -> None: # Setup self.settings.double_space_exits = True # Test - self.assertEqual(process_aliases(" ", self.settings, self.window), "/exit") + self.assertEqual(process_aliases(' ', self.settings, self.window), '/exit') def test_cmd_shortcut(self) -> None: - self.assertEqual(process_aliases("//", self.settings, self.window), "/cmd") + self.assertEqual(process_aliases('//', self.settings, self.window), '/cmd') class TestGetInput(unittest.TestCase): + def setUp(self) -> None: """Pre-test actions.""" self.settings = Settings() - self.window = TxWindow( - name="Alice", - type=WIN_TYPE_CONTACT, - type_print="contact", - window_contacts=[create_contact("Alice")], - ) - self.window.group = create_group("test_group") + self.window = TxWindow(name='Alice', + type=WIN_TYPE_CONTACT, + type_print='contact', + window_contacts=[create_contact('Alice')]) + self.window.group = create_group('test_group') - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=["/", "", "test_message"]) - def test_message(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=['/', '', 'test_message']) + def test_message(self, *_: Any) -> None: user_input = get_input(self.window, self.settings) - self.assertEqual(user_input.plaintext, "test_message") + self.assertEqual(user_input.plaintext, 'test_message') self.assertEqual(user_input.type, MESSAGE) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=["/", "", "test_message", "/clear"]) - def test_message_and_command_to_empty_group(self, *_) -> None: - self.window.type = WIN_TYPE_GROUP + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=['/', '', 'test_message', '/clear']) + def test_message_and_command_to_empty_group(self, *_: Any) -> None: + self.window.type = WIN_TYPE_GROUP self.window.window_contacts = [] - self.window.group.members = [] + self.window.group.members = [] user_input = get_input(self.window, self.settings) - self.assertEqual(user_input.plaintext, "clear") + self.assertEqual(user_input.plaintext, 'clear') self.assertEqual(user_input.type, COMMAND) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", return_value="/file") - def test_file(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', return_value='/file') + def test_file(self, *_: Any) -> None: user_input = get_input(self.window, self.settings) - self.assertEqual(user_input.plaintext, "/file") + self.assertEqual(user_input.plaintext, '/file') self.assertEqual(user_input.type, FILE) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", return_value="/clear") - def test_command(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', return_value='/clear') + def test_command(self, *_: Any) -> None: user_input = get_input(self.window, self.settings) - self.assertEqual(user_input.plaintext, "clear") + self.assertEqual(user_input.plaintext, 'clear') self.assertEqual(user_input.type, COMMAND) class TestUserInput(unittest.TestCase): + def test_user_input(self) -> None: - user_input = UserInput("test_plaintext", FILE) - self.assertEqual(user_input.plaintext, "test_plaintext") + user_input = UserInput('test_plaintext', FILE) + self.assertEqual(user_input.plaintext, 'test_plaintext') self.assertEqual(user_input.type, FILE) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/transmitter/test_window_mock.py b/tests/transmitter/test_window_mock.py new file mode 100644 index 0000000..edc25da --- /dev/null +++ b/tests/transmitter/test_window_mock.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python3.7 +# -*- coding: utf-8 -*- + +""" +TFC - Onion-routed, endpoint secure messaging system +Copyright (C) 2013-2020 Markus Ottela + +This file is part of TFC. + +TFC is free software: you can redistribute it and/or modify it under the terms +of the GNU General Public License as published by the Free Software Foundation, +either version 3 of the License, or (at your option) any later version. + +TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; +without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR +PURPOSE. See the GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with TFC. If not, see . +""" + +import unittest + +from src.transmitter.window_mock import MockWindow + +from tests.mock_classes import create_contact, Contact +from tests.utils import nick_to_pub_key + + +class TestMockWindow(unittest.TestCase): + + def setUp(self) -> None: + """Pre-test actions.""" + self.window = MockWindow(nick_to_pub_key("Alice"), contacts=[create_contact(n) for n in ['Alice', 'Bob']]) + + def test_window_iterates_over_contacts(self) -> None: + for c in self.window: + self.assertIsInstance(c, Contact) diff --git a/tests/transmitter/test_windows.py b/tests/transmitter/test_windows.py index 509d80a..d8fe15f 100644 --- a/tests/transmitter/test_windows.py +++ b/tests/transmitter/test_windows.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -22,66 +22,35 @@ along with TFC. If not, see . import unittest from unittest import mock +from typing import Any -from src.common.crypto import blake2b +from src.common.crypto import blake2b from src.common.db_contacts import Contact -from src.common.statics import ( - COMMAND_PACKET_QUEUE, - CONFIRM_CODE_LENGTH, - KEX_STATUS_PENDING, - KEX_STATUS_VERIFIED, - WINDOW_SELECT_QUEUE, - WIN_TYPE_CONTACT, - WIN_TYPE_GROUP, -) +from src.common.statics import (COMMAND_PACKET_QUEUE, CONFIRM_CODE_LENGTH, KEX_STATUS_PENDING, KEX_STATUS_VERIFIED, + LOCAL_ID, WINDOW_SELECT_QUEUE, WIN_TYPE_CONTACT, WIN_TYPE_GROUP) from src.transmitter.windows import select_window, TxWindow -from src.transmitter.window_mock import MockWindow - -from tests.mock_classes import ( - ContactList, - create_contact, - Gateway, - GroupList, - OnionService, - Settings, - UserInput, -) -from tests.utils import ( - gen_queue_dict, - group_name_to_group_id, - nick_to_onion_address, - nick_to_pub_key, -) -from tests.utils import tear_queues, TFCTestCase, VALID_ECDHE_PUB_KEY -class TestMockWindow(unittest.TestCase): - def setUp(self) -> None: - """Pre-test actions.""" - self.window = MockWindow( - nick_to_pub_key("Alice"), - contacts=[create_contact(n) for n in ["Alice", "Bob"]], - ) - - def test_window_iterates_over_contacts(self) -> None: - for c in self.window: - self.assertIsInstance(c, Contact) +from tests.mock_classes import ContactList, create_contact, Gateway, GroupList, OnionService, Settings, UserInput +from tests.utils import gen_queue_dict, group_name_to_group_id, nick_to_onion_address, nick_to_pub_key +from tests.utils import tear_queues, TFCTestCase, VALID_ECDHE_PUB_KEY class TestTxWindow(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.contact_list = ContactList(["Alice", "Bob"]) - self.group_list = GroupList(groups=["test_group", "test_group_2"]) - self.window = TxWindow(self.contact_list, self.group_list) - self.window.group = self.group_list.get_group("test_group") - self.window.type = WIN_TYPE_GROUP - self.settings = Settings() - self.queues = gen_queue_dict() + self.contact_list = ContactList(['Alice', 'Bob', LOCAL_ID]) + self.group_list = GroupList(groups=['test_group', 'test_group_2']) + self.window = TxWindow(self.contact_list, self.group_list) + self.window.group = self.group_list.get_group('test_group') + self.window.type = WIN_TYPE_GROUP + self.settings = Settings() + self.queues = gen_queue_dict() self.onion_service = OnionService() - self.gateway = Gateway() - self.args = self.settings, self.queues, self.onion_service, self.gateway + self.gateway = Gateway() + self.args = self.settings, self.queues, self.onion_service, self.gateway def tearDown(self) -> None: """Post-test actions.""" @@ -97,89 +66,69 @@ class TestTxWindow(TFCTestCase): def test_len_returns_number_of_contacts_in_window(self) -> None: # Setup - self.window.window_contacts = self.contact_list.contacts + self.window.window_contacts = [self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice')), + self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Bob'))] # Test self.assertEqual(len(self.window), 2) - def test_group_window_change_during_traffic_masking_raises_fr(self) -> None: + def test_group_window_change_during_traffic_masking_raises_se(self) -> None: # Setup self.settings.traffic_masking = True - self.window.uid = "test_group" + self.window.uid = 'test_group' # Test - self.assert_se( - "Error: Can't change window during traffic masking.", - self.window.select_tx_window, - *self.args, - selection="test_group_2", - cmd=True, - ) + self.assert_se("Error: Can't change window during traffic masking.", + self.window.select_tx_window, *self.args, selection='test_group_2', cmd=True) - def test_contact_window_change_during_traffic_masking_raises_fr(self) -> None: + def test_contact_window_change_during_traffic_masking_raises_se(self) -> None: # Setup self.settings.traffic_masking = True - self.window.uid = nick_to_pub_key("Alice") + self.window.uid = nick_to_pub_key("Alice") # Test - self.assert_se( - "Error: Can't change window during traffic masking.", - self.window.select_tx_window, - *self.args, - selection=nick_to_onion_address("Bob"), - cmd=True, - ) + self.assert_se("Error: Can't change window during traffic masking.", + self.window.select_tx_window, *self.args, selection=nick_to_onion_address("Bob"), cmd=True) def test_contact_window_reload_during_traffic_masking(self) -> None: # Setup self.settings.traffic_masking = True - self.window.uid = nick_to_pub_key("Alice") + self.window.uid = nick_to_pub_key("Alice") # Test - self.assertIsNone( - self.window.select_tx_window( - *self.args, selection=nick_to_onion_address("Alice"), cmd=True - ) - ) + self.assertIsNone(self.window.select_tx_window(*self.args, selection=nick_to_onion_address("Alice"), cmd=True)) self.assertEqual(self.window.uid, nick_to_pub_key("Alice")) def test_group_window_reload_during_traffic_masking(self) -> None: # Setup self.settings.traffic_masking = True - self.window.name = "test_group" - self.window.uid = group_name_to_group_id("test_group") + self.window.name = 'test_group' + self.window.uid = group_name_to_group_id('test_group') # Test - self.assertIsNone( - self.window.select_tx_window(*self.args, selection="test_group", cmd=True) - ) - self.assertEqual(self.window.uid, group_name_to_group_id("test_group")) + self.assertIsNone(self.window.select_tx_window(*self.args, selection='test_group', cmd=True)) + self.assertEqual(self.window.uid, group_name_to_group_id('test_group')) - def test_invalid_selection_raises_fr(self) -> None: + def test_invalid_selection_raises_se(self) -> None: # Setup self.window.uid = nick_to_pub_key("Alice") # Test - self.assert_se( - "Error: No contact/group was found.", - self.window.select_tx_window, - *self.args, - selection=nick_to_onion_address("Charlie"), - cmd=True, - ) + self.assert_se("Error: No contact/group was found.", + self.window.select_tx_window, *self.args, selection=nick_to_onion_address("Charlie"), cmd=True) - @mock.patch("builtins.input", return_value=nick_to_onion_address("Bob")) - def test_window_selection_during_traffic_masking(self, *_) -> None: + @mock.patch('builtins.input', return_value=nick_to_onion_address("Bob")) + def test_window_selection_during_traffic_masking(self, *_: Any) -> None: # Setup self.settings.traffic_masking = True - self.window.uid = None + self.window.uid = None # Test self.assertIsNone(self.window.select_tx_window(*self.args)) self.assertEqual(self.queues[WINDOW_SELECT_QUEUE].qsize(), 1) - @mock.patch("builtins.input", return_value=nick_to_onion_address("Bob")) - def test_contact_window_selection_from_input(self, *_) -> None: + @mock.patch('builtins.input', return_value=nick_to_onion_address("Bob")) + def test_contact_window_selection_from_input(self, *_: Any) -> None: # Setup self.window.uid = None @@ -191,28 +140,26 @@ class TestTxWindow(TFCTestCase): # Setup self.window.uid = None - self.assertIsNone( - self.window.select_tx_window(*self.args, selection="test_group", cmd=True) - ) - self.assertEqual(self.window.uid, group_name_to_group_id("test_group")) + self.assertIsNone(self.window.select_tx_window(*self.args, selection='test_group', cmd=True)) + self.assertEqual(self.window.uid, group_name_to_group_id('test_group')) def test_deselect_window(self) -> None: # Setup self.window.window_contacts = self.contact_list.contacts - self.window.contact = self.contact_list.get_contact_by_address_or_nick("Bob") - self.window.name = "Bob" - self.window.type = WIN_TYPE_CONTACT - self.window.uid = nick_to_pub_key("Bob") + self.window.contact = self.contact_list.get_contact_by_address_or_nick("Bob") + self.window.name = 'Bob' + self.window.type = WIN_TYPE_CONTACT + self.window.uid = nick_to_pub_key("Bob") # Test self.assertIsNone(self.window.deselect()) self.assertIsNone(self.window.contact) - self.assertEqual(self.window.name, "") - self.assertEqual(self.window.type, "") - self.assertEqual(self.window.uid, b"") + self.assertEqual(self.window.name, '') + self.assertEqual(self.window.type, '') + self.assertEqual(self.window.uid, b'') def test_is_selected(self) -> None: - self.window.name = "" + self.window.name = '' self.assertFalse(self.window.is_selected()) self.window.name = nick_to_pub_key("Bob") @@ -220,9 +167,9 @@ class TestTxWindow(TFCTestCase): def test_update_log_messages_for_contact(self) -> None: # Setup - self.window.type = WIN_TYPE_CONTACT - self.window.log_messages = None - self.window.contact = self.contact_list.get_contact_by_address_or_nick("Alice") + self.window.type = WIN_TYPE_CONTACT + self.window.log_messages = None + self.window.contact = self.contact_list.get_contact_by_address_or_nick('Alice') self.window.contact.log_messages = False # Test @@ -231,9 +178,9 @@ class TestTxWindow(TFCTestCase): def test_update_log_messages_for_group(self) -> None: # Setup - self.window.type = WIN_TYPE_GROUP - self.window.log_messages = None - self.window.group = self.group_list.get_group("test_group") + self.window.type = WIN_TYPE_GROUP + self.window.log_messages = None + self.window.group = self.group_list.get_group('test_group') self.window.group.log_messages = False # Test @@ -243,180 +190,147 @@ class TestTxWindow(TFCTestCase): def test_update_group_win_members_if_group_is_available(self) -> None: # Setup self.window.window_contacts = [] - self.window.group = None - self.window.group_id = group_name_to_group_id("test_group") - self.window.name = "test_group" - self.window.type = WIN_TYPE_GROUP + self.window.group = None + self.window.group_id = group_name_to_group_id('test_group') + self.window.name = 'test_group' + self.window.type = WIN_TYPE_GROUP # Test self.assertIsNone(self.window.update_window(self.group_list)) - self.assertEqual(self.window.group, self.group_list.get_group("test_group")) + self.assertEqual(self.window.group, self.group_list.get_group('test_group')) self.assertEqual(self.window.window_contacts, self.window.group.members) def test_window_contact_is_reloaded_when_contact_is_active(self) -> None: # Setup - self.window.type = WIN_TYPE_CONTACT - self.window.contact = create_contact("Alice") + self.window.type = WIN_TYPE_CONTACT + self.window.contact = create_contact('Alice') self.window.window_contacts = [self.window.contact] - self.assertIsNot( - self.window.contact, - self.window.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice")), - ) - self.assertIsNot( - self.window.window_contacts[0], - self.window.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice")), - ) + self.assertIsNot(self.window.contact, + self.window.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice'))) + self.assertIsNot(self.window.window_contacts[0], + self.window.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice'))) # Test self.assertIsNone(self.window.update_window(self.group_list)) - self.assertIs( - self.window.contact, - self.window.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice")), - ) - self.assertIs( - self.window.window_contacts[0], - self.window.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice")), - ) + self.assertIs(self.window.contact, + self.window.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice'))) + self.assertIs(self.window.window_contacts[0], + self.window.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice'))) def test_deactivate_window_if_group_is_not_available(self) -> None: # Setup self.window.window_contacts = [] - self.window.group = None - self.window.name = "test_group_3" - self.window.type = WIN_TYPE_GROUP + self.window.group = None + self.window.name = 'test_group_3' + self.window.type = WIN_TYPE_GROUP # Test self.assertIsNone(self.window.update_window(self.group_list)) self.assertIsNone(self.window.contact) - self.assertEqual(self.window.name, "") - self.assertEqual(self.window.type, "") - self.assertEqual(self.window.uid, b"") + self.assertEqual(self.window.name, '') + self.assertEqual(self.window.type, '') + self.assertEqual(self.window.uid, b'') - @mock.patch("time.sleep", return_value=None) - @mock.patch( - "builtins.input", - side_effect=[ - "Alice", - VALID_ECDHE_PUB_KEY, - "yes", - blake2b(nick_to_pub_key("Alice"), digest_size=CONFIRM_CODE_LENGTH).hex(), - ], - ) - @mock.patch("shutil.get_terminal_size", return_value=[200, 200]) - def test_selecting_pending_contact_starts_key_exchange(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=['Alice', + VALID_ECDHE_PUB_KEY, + 'yes', + blake2b(nick_to_pub_key('Alice'), + digest_size=CONFIRM_CODE_LENGTH).hex()]) + @mock.patch('shutil.get_terminal_size', return_value=[200, 200]) + def test_selecting_pending_contact_starts_key_exchange(self, *_: Any) -> None: # Setup - alice = self.contact_list.get_contact_by_address_or_nick("Alice") - bob = self.contact_list.get_contact_by_address_or_nick("Bob") + alice = self.contact_list.get_contact_by_address_or_nick('Alice') + bob = self.contact_list.get_contact_by_address_or_nick('Bob') alice.kex_status = KEX_STATUS_PENDING - bob.kex_status = KEX_STATUS_PENDING + bob.kex_status = KEX_STATUS_PENDING # Test self.assertIsNone(self.window.select_tx_window(*self.args)) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 2) - self.assertEqual(self.queues[WINDOW_SELECT_QUEUE].qsize(), 0) + self.assertEqual(self.queues[WINDOW_SELECT_QUEUE].qsize(), 0) self.assertEqual(alice.kex_status, KEX_STATUS_VERIFIED) - @mock.patch("time.sleep", return_value=None) - @mock.patch( - "builtins.input", - side_effect=[ - "/add", - nick_to_onion_address("Alice"), - "Alice", - "", - VALID_ECDHE_PUB_KEY, - "yes", - blake2b(nick_to_pub_key("Alice"), digest_size=CONFIRM_CODE_LENGTH).hex(), - ], - ) - @mock.patch("shutil.get_terminal_size", return_value=[200, 200]) - def test_adding_new_contact_from_contact_selection(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=['/add', + nick_to_onion_address('Alice'), + 'Alice', + '', + VALID_ECDHE_PUB_KEY, + 'yes', + blake2b(nick_to_pub_key('Alice'), + digest_size=CONFIRM_CODE_LENGTH).hex()]) + @mock.patch('shutil.get_terminal_size', return_value=[200, 200]) + def test_adding_new_contact_from_contact_selection(self, *_: Any) -> None: # Setup - alice = self.contact_list.get_contact_by_address_or_nick("Alice") + alice = self.contact_list.get_contact_by_address_or_nick('Alice') alice.kex_status = KEX_STATUS_PENDING # Test - self.assert_se("New contact added.", self.window.select_tx_window, *self.args) + self.assert_se('New contact added.', + self.window.select_tx_window, *self.args) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) - self.assertEqual(self.queues[WINDOW_SELECT_QUEUE].qsize(), 0) + self.assertEqual(self.queues[WINDOW_SELECT_QUEUE].qsize(), 0) self.assertEqual(alice.kex_status, KEX_STATUS_VERIFIED) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=["/rm "]) - def test_missing_account_when_removing_raises_fr(self, *_) -> None: - self.assert_se( - "Error: No account specified.", self.window.select_tx_window, *self.args - ) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=['/rm ']) + def test_missing_account_when_removing_raises_se(self, *_: Any) -> None: + self.assert_se("Error: No account specified.", self.window.select_tx_window, *self.args) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=["/rm Charlie", "yes"]) - def test_unknown_account_when_removing_raises_fr(self, *_) -> None: - self.assert_se( - "Error: Unknown contact 'Charlie'.", - self.window.select_tx_window, - *self.args, - ) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=['/rm Charlie', 'yes']) + def test_unknown_account_when_removing_raises_se(self, *_: Any) -> None: + self.assert_se("Error: Unknown contact 'Charlie'.", self.window.select_tx_window, *self.args) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=["/rm Alice", "no"]) - def test_abort_removal_of_contact_form_contact_selection(self, *_) -> None: - self.assert_se( - "Removal of contact aborted.", self.window.select_tx_window, *self.args - ) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=['/rm Alice', 'no']) + def test_abort_removal_of_contact_form_contact_selection(self, *_: Any) -> None: + self.assert_se("Removal of contact aborted.", self.window.select_tx_window, *self.args) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=["/rm Alice", "yes"]) - def test_removing_pending_contact_from_contact_selection(self, *_) -> None: - self.assert_se( - "Removed contact 'Alice'.", self.window.select_tx_window, *self.args - ) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=['/rm Alice', 'yes']) + def test_removing_pending_contact_from_contact_selection(self, *_: Any) -> None: + self.assert_se("Removed contact 'Alice'.", self.window.select_tx_window, *self.args) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=["/connect", b"a".hex()]) - def test_sending_onion_service_data_from_contact_selection(self, *_) -> None: + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=['/connect', b'a'.hex()]) + def test_sending_onion_service_data_from_contact_selection(self, *_: Any) -> None: self.assertIsNone(self.window.select_tx_window(*self.args)) self.assertEqual(len(self.gateway.packets), 1) - @mock.patch("time.sleep", return_value=None) - @mock.patch("builtins.input", side_effect=["/help"]) - def test_invalid_command_raises_fr(self, *_) -> None: - self.assert_se( - "Error: Invalid command.", self.window.select_tx_window, *self.args - ) + @mock.patch('time.sleep', return_value=None) + @mock.patch('builtins.input', side_effect=['/help']) + def test_invalid_command_raises_se(self, *_: Any) -> None: + self.assert_se("Error: Invalid command.", self.window.select_tx_window, *self.args) class TestSelectWindow(TFCTestCase): + def setUp(self) -> None: """Pre-test actions.""" - self.contact_list = ContactList(nicks=["Alice"]) - self.group_list = GroupList() - self.user_input = UserInput() - self.window = TxWindow(self.contact_list, self.group_list) - self.settings = Settings() - self.queues = gen_queue_dict() + self.contact_list = ContactList(nicks=['Alice']) + self.group_list = GroupList() + self.user_input = UserInput() + self.window = TxWindow(self.contact_list, self.group_list) + self.settings = Settings() + self.queues = gen_queue_dict() self.onion_service = OnionService() - self.gateway = Gateway() - self.args = ( - self.user_input, - self.window, - self.settings, - self.queues, - self.onion_service, - self.gateway, - ) + self.gateway = Gateway() + self.args = self.user_input, self.window, self.settings, self.queues, self.onion_service, self.gateway def tearDown(self) -> None: """Post-test actions.""" tear_queues(self.queues) - def test_invalid_selection_raises_fr(self) -> None: + def test_invalid_selection_raises_se(self) -> None: # Setup - self.user_input.plaintext = "msg" + self.user_input.plaintext = 'msg' self.assert_se("Error: Invalid recipient.", select_window, *self.args) # Test self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 0) - self.assertEqual(self.queues[WINDOW_SELECT_QUEUE].qsize(), 0) + self.assertEqual(self.queues[WINDOW_SELECT_QUEUE].qsize(), 0) def test_window_selection(self) -> None: # Setup @@ -425,8 +339,8 @@ class TestSelectWindow(TFCTestCase): # Test self.assertIsNone(select_window(*self.args)) self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1) - self.assertEqual(self.queues[WINDOW_SELECT_QUEUE].qsize(), 0) + self.assertEqual(self.queues[WINDOW_SELECT_QUEUE].qsize(), 0) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main(exit=False) diff --git a/tests/utils.py b/tests/utils.py index 9dc8dac..f3fbc72 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -26,86 +26,36 @@ import shutil import unittest import zlib -from contextlib import contextmanager, redirect_stdout +from contextlib import contextmanager, redirect_stdout from multiprocessing import Queue -from typing import Any, Callable, Dict, List, Union +from typing import Any, Callable, Dict, List, Union -from src.common.crypto import blake2b, byte_padding, csprng, encrypt_and_sign -from src.common.encoding import int_to_bytes, pub_key_to_onion_address -from src.common.misc import split_byte_string +from src.common.crypto import blake2b, byte_padding, csprng, encrypt_and_sign +from src.common.encoding import int_to_bytes, pub_key_to_onion_address +from src.common.misc import split_byte_string from src.common.exceptions import SoftError -from src.common.statics import ( - COMMAND, - COMMAND_DATAGRAM_HEADER, - COMMAND_PACKET_QUEUE, - COMPRESSION_LEVEL, - CONTACT_MGMT_QUEUE, - CONTACT_REQ_QUEUE, - C_A_HEADER, - C_E_HEADER, - C_L_HEADER, - C_REQ_MGMT_QUEUE, - C_REQ_STATE_QUEUE, - C_S_HEADER, - DST_COMMAND_QUEUE, - DST_MESSAGE_QUEUE, - EXIT_QUEUE, - FILE, - FILE_DATAGRAM_HEADER, - FILE_PACKET_CTR_LENGTH, - F_A_HEADER, - F_E_HEADER, - F_L_HEADER, - F_S_HEADER, - F_TO_FLASK_QUEUE, - GATEWAY_QUEUE, - GROUP_ID_LENGTH, - GROUP_MESSAGE_HEADER, - GROUP_MGMT_QUEUE, - GROUP_MSG_ID_LENGTH, - GROUP_MSG_QUEUE, - INITIAL_HARAC, - KDB_M_KEY_CHANGE_HALT_HEADER, - KEY_MANAGEMENT_QUEUE, - KEY_MGMT_ACK_QUEUE, - LOCAL_KEY_DATAGRAM_HEADER, - LOGFILE_MASKING_QUEUE, - LOG_PACKET_QUEUE, - LOG_SETTING_QUEUE, - MESSAGE, - MESSAGE_DATAGRAM_HEADER, - MESSAGE_PACKET_QUEUE, - M_A_HEADER, - M_E_HEADER, - M_L_HEADER, - M_S_HEADER, - M_TO_FLASK_QUEUE, - ONION_CLOSE_QUEUE, - ONION_KEY_QUEUE, - PADDING_LENGTH, - PRIVATE_MESSAGE_HEADER, - RELAY_PACKET_QUEUE, - SENDER_MODE_QUEUE, - SRC_TO_RELAY_QUEUE, - SYMMETRIC_KEY_LENGTH, - TM_COMMAND_PACKET_QUEUE, - TM_FILE_PACKET_QUEUE, - TM_MESSAGE_PACKET_QUEUE, - TM_NOISE_COMMAND_QUEUE, - TM_NOISE_PACKET_QUEUE, - TOR_DATA_QUEUE, - TRAFFIC_MASKING_QUEUE, - TRUNC_ADDRESS_LENGTH, - UNIT_TEST_QUEUE, - URL_TOKEN_QUEUE, - US_BYTE, - WINDOW_SELECT_QUEUE, -) +from src.common.statics import (ACCOUNT_CHECK_QUEUE, ACCOUNT_SEND_QUEUE, COMMAND, COMMAND_DATAGRAM_HEADER, + COMMAND_PACKET_QUEUE, COMPRESSION_LEVEL, CONTACT_MGMT_QUEUE, CONTACT_REQ_QUEUE, + C_A_HEADER, C_E_HEADER, C_L_HEADER, C_REQ_MGMT_QUEUE, C_REQ_STATE_QUEUE, C_S_HEADER, + DST_COMMAND_QUEUE, DST_MESSAGE_QUEUE, EXIT_QUEUE, FILE, FILE_DATAGRAM_HEADER, + FILE_PACKET_CTR_LENGTH, F_A_HEADER, F_E_HEADER, F_L_HEADER, F_S_HEADER, + F_TO_FLASK_QUEUE, GATEWAY_QUEUE, GROUP_ID_LENGTH, GROUP_MESSAGE_HEADER, + GROUP_MGMT_QUEUE, GROUP_MSG_ID_LENGTH, GROUP_MSG_QUEUE, GUI_INPUT_QUEUE, + INITIAL_HARAC, KDB_M_KEY_CHANGE_HALT_HEADER, KEY_MANAGEMENT_QUEUE, + KEY_MGMT_ACK_QUEUE, LOCAL_KEY_DATAGRAM_HEADER, LOGFILE_MASKING_QUEUE, + LOG_PACKET_QUEUE, LOG_SETTING_QUEUE, MESSAGE, MESSAGE_DATAGRAM_HEADER, + MESSAGE_PACKET_QUEUE, M_A_HEADER, M_E_HEADER, M_L_HEADER, M_S_HEADER, + M_TO_FLASK_QUEUE, ONION_CLOSE_QUEUE, ONION_KEY_QUEUE, PADDING_LENGTH, + PRIVATE_MESSAGE_HEADER, PUB_KEY_CHECK_QUEUE, PUB_KEY_SEND_QUEUE,RELAY_PACKET_QUEUE, + SENDER_MODE_QUEUE, SRC_TO_RELAY_QUEUE, SYMMETRIC_KEY_LENGTH, TM_COMMAND_PACKET_QUEUE, + TM_FILE_PACKET_QUEUE, TM_MESSAGE_PACKET_QUEUE, TM_NOISE_COMMAND_QUEUE, + TM_NOISE_PACKET_QUEUE, TOR_DATA_QUEUE, TRAFFIC_MASKING_QUEUE, TRUNC_ADDRESS_LENGTH, + UNIT_TEST_QUEUE, URL_TOKEN_QUEUE, US_BYTE, USER_ACCOUNT_QUEUE, WINDOW_SELECT_QUEUE) -UNDECODABLE_UNICODE = bytes.fromhex("3f264d4189d7a091") -VALID_ECDHE_PUB_KEY = "4EcuqaDddsdsucgBX2PY2qR8hReAaeSN2ohJB9w5Cvq6BQjDaPPgzSvW932aHiosT42SKJGu2PpS1Za3Xrao" -VALID_LOCAL_KEY_KDK = "5HueCGU8rMjxEXxiPuD5BDku4MkFqeZyd4dZ1jvhTVqvbTLvyTJ" +UNDECODABLE_UNICODE = bytes.fromhex('3f264d4189d7a091') +VALID_ECDHE_PUB_KEY = '4EcuqaDddsdsucgBX2PY2qR8hReAaeSN2ohJB9w5Cvq6BQjDaPPgzSvW932aHiosT42SKJGu2PpS1Za3Xrao' +VALID_LOCAL_KEY_KDK = '5HueCGU8rMjxEXxiPuD5BDku4MkFqeZyd4dZ1jvhTVqvbTLvyTJ' def nick_to_pub_key(nick: str) -> bytes: @@ -129,7 +79,8 @@ def group_name_to_group_id(name: str) -> bytes: class TFCTestCase(unittest.TestCase): - def assert_se(self, msg, func, *args, **kwargs) -> None: + + def assert_se(self, msg: str, func: Callable[..., Any], *args: Any, **kwargs: Any) -> None: """\ Check that SoftError error is raised and that a specific message is displayed. @@ -143,7 +94,7 @@ class TFCTestCase(unittest.TestCase): self.assertTrue(e_raised) - def assert_prints(self, msg, func, *args, **kwargs) -> None: + def assert_prints(self, msg: str, func: Callable[..., Any], *args: Any, **kwargs: Any) -> None: """Check that specific message is printed by function.""" f = io.StringIO() with redirect_stdout(f): @@ -177,8 +128,8 @@ def cd_unit_test() -> str: def cleanup(name) -> None: """Remove unit test related directory.""" - os.chdir("..") - shutil.rmtree(f"{name}/") + os.chdir('..') + shutil.rmtree(f'{name}/') def func_that_raises(exception: Any) -> Callable: @@ -189,7 +140,7 @@ def func_that_raises(exception: Any) -> Callable: def tamper_file(file_name: str, tamper_size: int) -> None: """Change `tamper_size` bytes in file `file_name`.""" - with open(file_name, "rb") as f: + with open(file_name, 'rb') as f: data = f.read() while True: @@ -198,18 +149,18 @@ def tamper_file(file_name: str, tamper_size: int) -> None: break new_data = tampered_bytes + data[tamper_size:] - with open(file_name, "wb") as f: + with open(file_name, 'wb') as f: f.write(new_data) -def tear_queue(queue: "Queue"): +def tear_queue(queue: 'Queue') -> None: """Empty and close multiprocessing queue.""" - while queue.qsize(): + while queue.qsize() != 0: queue.get() queue.close() -def tear_queues(queues: Dict[bytes, "Queue"]): +def tear_queues(queues: Dict[bytes, 'Queue']) -> None: """Empty and close multiprocessing queues.""" for q in queues: tear_queue(queues[q]) @@ -221,46 +172,49 @@ def tamper_last_byte(byte_string: bytes) -> bytes: def assembly_packet_creator( - # --- Payload creation --- - # Common settings - packet_type: str, # Packet type (MESSAGE, FILE, or COMMAND, do not use tampered values) - payload: Union[ - bytes, str - ] = None, # Payload message content (Plaintext message (str), file data (bytes), or command (bytes)) - inner_key: bytes = None, # Key for inner encryption layer - tamper_ciphertext: bool = False, # When True, tampers with the inner layer of encryption to make it undecryptable - # Message packet parameters - message_header: bytes = None, # Message header (PRIVATE/GROUP_MESSAGE_HEADER, FILE_KEY_HEADER, or tamper byte) - tamper_plaintext: bool = False, # When true, replaces plaintext with undecodable bytestring. - group_id: bytes = None, # When specified, creates message for group (4 byte random string) - group_msg_id: bytes = None, # The group message id (16 byte random string) - whisper_header: bytes = b"\x00", # Define whisper-header (b'\x00' = False, b'\x01' = True, b'\x02' = tampered) - # File packet parameters - create_zip_bomb: bool = False, # When True, creates large enough ciphertext to trigger zip bomb protection - tamper_compression: bool = False, # When True, tampers with compression to make decompression impossible - packet_time: bytes = None, # Allows overriding the 8-byte packet time header - packet_size: bytes = None, # Allows overriding the 8-byte packet size header - file_name: bytes = None, # Name of the file (allows e.g. injection of invalid file names) - omit_header_delim: bool = False, # When True, omits the file_name<>file_data delimiter. - # --- Assembly packet splitting --- - s_header_override: bytes = None, # Allows overriding the `short packet` assembly packet header - l_header_override: bytes = None, # Allows overriding the `start of long packet` assembly packet header - a_header_override: bytes = None, # Allows overriding the `appended long packet` assembly packet header - e_header_override: bytes = None, # Allows overriding the `last packet of long packet` assembly packet header - tamper_cmd_hash: bool = False, # When True, tampers with the command hash to make it undecryptable - no_padding: bool = False, # When True, does not add padding to assembly packet. - split_length: int = PADDING_LENGTH, # Allows configuring the length to which assembly packets are split - # --- Packet encryption --- - encrypt_packet: bool = False, # When True, encrypts packet into set of datagrams starting with key (32*b'\x01') - message_number: int = 0, # Determines the message key and harac for message - harac: int = INITIAL_HARAC, # Allows choosing the hash ratchet counter for packet encryption - message_key: bytes = None, # Allows choosing the message key to encrypt message with - header_key: bytes = None, # Allows choosing the header key for hash ratchet encryption - tamper_harac: bool = False, # When True, tampers with the MAC of encrypted harac - tamper_message: bool = False, # When True, tampers with the MAC of encrypted message - onion_pub_key: bytes = b"", # Defines the contact public key to use with datagram creation - origin_header: bytes = b"", # Allows editing the origin header -) -> List[bytes]: + # --- Payload creation --- + + # Common settings + packet_type: str, # Packet type (MESSAGE, FILE, or COMMAND, do not use tampered values) + payload: Union[bytes, str] = None, # Payload message content (Plaintext message (str), file data (bytes), or command (bytes)) + inner_key: bytes = None, # Key for inner encryption layer + tamper_ciphertext: bool = False, # When True, tampers with the inner layer of encryption to make it undecryptable + + # Message packet parameters + message_header: bytes = None, # Message header (PRIVATE_MESSAGE_HEADER, GROUP_MESSAGE_HEADER, FILE_KEY_HEADER, or tamper byte) + tamper_plaintext: bool = False, # When true, replaces plaintext with undecodable bytestring. + group_id: bytes = None, # When specified, creates message for group (4 byte random string) + group_msg_id: bytes = None, # The group message id (16 byte random string) + whisper_header: bytes = b'\x00', # Define whisper-header (b'\x00' for False, b'\x01' for True, others for tampering) + + # File packet parameters + create_zip_bomb: bool = False, # When True, creates large enough ciphertext to trigger zip bomb protection + tamper_compression: bool = False, # When True, tampers with compression to make decompression impossible + packet_time: bytes = None, # Allows overriding the 8-byte packet time header + packet_size: bytes = None, # Allows overriding the 8-byte packet size header + file_name: bytes = None, # Name of the file (allows e.g. injection of invalid file names) + omit_header_delim: bool = False, # When True, omits the file_name<>file_data delimiter. + + # --- Assembly packet splitting --- + s_header_override: bytes = None, # Allows overriding the `short packet` assembly packet header + l_header_override: bytes = None, # Allows overriding the `start of long packet` assembly packet header + a_header_override: bytes = None, # Allows overriding the `appended long packet` assembly packet header + e_header_override: bytes = None, # Allows overriding the `last packet of long packet` assembly packet header + tamper_cmd_hash: bool = False, # When True, tampers with the command hash to make it undecryptable + no_padding: bool = False, # When True, does not add padding to assembly packet. + split_length: int = PADDING_LENGTH, # Allows configuring the length to which assembly packets are split + + # --- Packet encryption --- + encrypt_packet: bool = False, # When True, encrypts packet into set of datagrams starting with default key (32*b'\x01') + message_number: int = 0, # Determines the message key and harac for message + harac: int = INITIAL_HARAC, # Allows choosing the hash ratchet counter for packet encryption + message_key: bytes = None, # Allows choosing the message key to encrypt message with + header_key: bytes = None, # Allows choosing the header key for hash ratchet encryption + tamper_harac: bool = False, # When True, tampers with the MAC of encrypted harac + tamper_message: bool = False, # When True, tampers with the MAC of encrypted message + onion_pub_key: bytes = b'', # Defines the contact public key to use with datagram creation + origin_header: bytes = b'', # Allows editing the origin header + ) -> List[bytes]: """Create assembly packet list and optionally encrypt it to create datagram list.""" # ------------------------------------------------------------------------------------------------------------------ @@ -269,14 +223,11 @@ def assembly_packet_creator( if packet_type == MESSAGE: - if not isinstance(payload, str): - raise SystemExit("Invalid payload type.") + assert isinstance(payload, str) if message_header is None: if group_id is not None: - group_msg_id_bytes = ( - bytes(GROUP_MSG_ID_LENGTH) if group_msg_id is None else group_msg_id - ) + group_msg_id_bytes = bytes(GROUP_MSG_ID_LENGTH) if group_msg_id is None else group_msg_id header = GROUP_MESSAGE_HEADER + group_id + group_msg_id_bytes else: header = PRIVATE_MESSAGE_HEADER @@ -291,25 +242,21 @@ def assembly_packet_creator( elif packet_type == FILE: # Create packets for traffic masking file transmission - file_data_size = 100_000_001 if create_zip_bomb else 10_000 - payload_bytes = os.urandom(file_data_size) if payload is None else payload + file_data_size = 100_000_001 if create_zip_bomb else 10_000 + payload_bytes = os.urandom(file_data_size) if payload is None else payload - compressed = zlib.compress(payload_bytes, level=COMPRESSION_LEVEL) - compressed = compressed if not tamper_compression else compressed[::-1] - file_key_bytes = ( - os.urandom(SYMMETRIC_KEY_LENGTH) if inner_key is None else inner_key - ) + compressed = zlib.compress(payload_bytes, level=COMPRESSION_LEVEL) + compressed = compressed if not tamper_compression else compressed[::-1] + file_key_bytes = os.urandom(SYMMETRIC_KEY_LENGTH) if inner_key is None else inner_key - ciphertext = encrypt_and_sign(compressed, key=file_key_bytes) - ciphertext = ciphertext if not tamper_ciphertext else ciphertext[::-1] - ct_with_key = ciphertext + file_key_bytes + ciphertext = encrypt_and_sign(compressed, key=file_key_bytes) + ciphertext = ciphertext if not tamper_ciphertext else ciphertext[::-1] + ct_with_key = ciphertext + file_key_bytes - time_bytes = int_to_bytes(2) if packet_time is None else packet_time - size_bytes = ( - int_to_bytes(file_data_size) if packet_size is None else packet_size - ) - file_name_bytes = b"test_file.txt" if file_name is None else file_name - delimiter = US_BYTE if not omit_header_delim else b"" + time_bytes = int_to_bytes(2) if packet_time is None else packet_time + size_bytes = int_to_bytes(file_data_size) if packet_size is None else packet_size + file_name_bytes = b'test_file.txt' if file_name is None else file_name + delimiter = US_BYTE if not omit_header_delim else b'' payload = time_bytes + size_bytes + file_name_bytes + delimiter + ct_with_key @@ -335,17 +282,17 @@ def assembly_packet_creator( if packet_type in [MESSAGE, COMMAND]: compressed = zlib.compress(payload, level=COMPRESSION_LEVEL) - payload = compressed if not tamper_compression else compressed[::-1] + payload = compressed if not tamper_compression else compressed[::-1] if len(payload) < PADDING_LENGTH: - padded = byte_padding(payload) + padded = byte_padding(payload) packet_list = [s_header + padded] else: if packet_type == MESSAGE: - msg_key = csprng() if inner_key is None else inner_key - payload = encrypt_and_sign(payload, msg_key) - payload = payload if not tamper_ciphertext else payload[::-1] + msg_key = csprng() if inner_key is None else inner_key + payload = encrypt_and_sign(payload, msg_key) + payload = payload if not tamper_ciphertext else payload[::-1] payload += msg_key elif packet_type == FILE: @@ -354,7 +301,7 @@ def assembly_packet_creator( elif packet_type == COMMAND: command_hash = blake2b(payload) command_hash = command_hash if not tamper_cmd_hash else command_hash[::-1] - payload += command_hash + payload += command_hash padded = payload if no_padding else byte_padding(payload) p_list = split_byte_string(padded, item_len=split_length) @@ -362,11 +309,9 @@ def assembly_packet_creator( if packet_type == FILE: p_list[0] = int_to_bytes(len(p_list)) + p_list[0][FILE_PACKET_CTR_LENGTH:] - packet_list = ( - [l_header + p_list[0]] - + [a_header + p for p in p_list[1:-1]] - + [e_header + p_list[-1]] - ) + packet_list = ([l_header + p_list[0]] + + [a_header + p for p in p_list[1:-1]] + + [e_header + p_list[-1]]) if not encrypt_packet: return packet_list @@ -375,40 +320,28 @@ def assembly_packet_creator( # | Encrypt assembly packets to create datagrams | # ------------------------------------------------------------------------------------------------------------------ - message_key = SYMMETRIC_KEY_LENGTH * b"\x01" if message_key is None else message_key - header_key = SYMMETRIC_KEY_LENGTH * b"\x01" if header_key is None else header_key + message_key = SYMMETRIC_KEY_LENGTH * b'\x01' if message_key is None else message_key + header_key = SYMMETRIC_KEY_LENGTH * b'\x01' if header_key is None else header_key for _ in range(message_number): - message_key = blake2b( - message_key + int_to_bytes(harac), digest_size=SYMMETRIC_KEY_LENGTH - ) - harac += 1 + message_key = blake2b(message_key + int_to_bytes(harac), digest_size=SYMMETRIC_KEY_LENGTH) + harac += 1 assembly_ct_list = [] for packet in packet_list: - harac_in_bytes = int_to_bytes(harac) - encrypted_harac = encrypt_and_sign(harac_in_bytes, header_key) - encrypted_message = encrypt_and_sign(packet, message_key) + harac_in_bytes = int_to_bytes(harac) + encrypted_harac = encrypt_and_sign(harac_in_bytes, header_key) + encrypted_message = encrypt_and_sign(packet, message_key) - encrypted_harac = ( - encrypted_harac if not tamper_harac else tamper_last_byte(encrypted_harac) - ) - encrypted_message = ( - encrypted_message - if not tamper_message - else tamper_last_byte(encrypted_message) - ) + encrypted_harac = encrypted_harac if not tamper_harac else tamper_last_byte(encrypted_harac) + encrypted_message = encrypted_message if not tamper_message else tamper_last_byte(encrypted_message) - encrypted_packet = ( - onion_pub_key + origin_header + encrypted_harac + encrypted_message - ) + encrypted_packet = onion_pub_key + origin_header + encrypted_harac + encrypted_message assembly_ct_list.append(encrypted_packet) - message_key = blake2b( - message_key + int_to_bytes(harac), digest_size=SYMMETRIC_KEY_LENGTH - ) + message_key = blake2b(message_key + int_to_bytes(harac), digest_size=SYMMETRIC_KEY_LENGTH) harac += 1 return assembly_ct_list @@ -416,61 +349,59 @@ def assembly_packet_creator( def gen_queue_dict() -> Dict[bytes, Queue]: """Create dictionary that has all the queues used by TFC processes.""" - transmitter_queues = [ - MESSAGE_PACKET_QUEUE, - COMMAND_PACKET_QUEUE, - TM_MESSAGE_PACKET_QUEUE, - TM_FILE_PACKET_QUEUE, - TM_COMMAND_PACKET_QUEUE, - TM_NOISE_PACKET_QUEUE, - TM_NOISE_COMMAND_QUEUE, - RELAY_PACKET_QUEUE, - LOG_PACKET_QUEUE, - LOG_SETTING_QUEUE, - TRAFFIC_MASKING_QUEUE, - LOGFILE_MASKING_QUEUE, - KDB_M_KEY_CHANGE_HALT_HEADER, - KEY_MANAGEMENT_QUEUE, - KEY_MGMT_ACK_QUEUE, - SENDER_MODE_QUEUE, - WINDOW_SELECT_QUEUE, - EXIT_QUEUE, - ] + transmitter_queues = [MESSAGE_PACKET_QUEUE, + COMMAND_PACKET_QUEUE, + TM_MESSAGE_PACKET_QUEUE, + TM_FILE_PACKET_QUEUE, + TM_COMMAND_PACKET_QUEUE, + TM_NOISE_PACKET_QUEUE, + TM_NOISE_COMMAND_QUEUE, + RELAY_PACKET_QUEUE, + LOG_PACKET_QUEUE, + LOG_SETTING_QUEUE, + TRAFFIC_MASKING_QUEUE, + LOGFILE_MASKING_QUEUE, + KDB_M_KEY_CHANGE_HALT_HEADER, + KEY_MANAGEMENT_QUEUE, + KEY_MGMT_ACK_QUEUE, + SENDER_MODE_QUEUE, + WINDOW_SELECT_QUEUE, + EXIT_QUEUE] - receiver_queues = [ - GATEWAY_QUEUE, - LOCAL_KEY_DATAGRAM_HEADER, - MESSAGE_DATAGRAM_HEADER, - FILE_DATAGRAM_HEADER, - COMMAND_DATAGRAM_HEADER, - EXIT_QUEUE, - ] + receiver_queues = [GATEWAY_QUEUE, + LOCAL_KEY_DATAGRAM_HEADER, + MESSAGE_DATAGRAM_HEADER, + FILE_DATAGRAM_HEADER, + COMMAND_DATAGRAM_HEADER, + EXIT_QUEUE] - relay_queues = [ - GATEWAY_QUEUE, - DST_MESSAGE_QUEUE, - M_TO_FLASK_QUEUE, - F_TO_FLASK_QUEUE, - SRC_TO_RELAY_QUEUE, - DST_COMMAND_QUEUE, - CONTACT_MGMT_QUEUE, - C_REQ_STATE_QUEUE, - URL_TOKEN_QUEUE, - GROUP_MSG_QUEUE, - CONTACT_REQ_QUEUE, - C_REQ_MGMT_QUEUE, - GROUP_MGMT_QUEUE, - ONION_CLOSE_QUEUE, - ONION_KEY_QUEUE, - TOR_DATA_QUEUE, - EXIT_QUEUE, - ] + relay_queues = [GATEWAY_QUEUE, + DST_MESSAGE_QUEUE, + M_TO_FLASK_QUEUE, + F_TO_FLASK_QUEUE, + SRC_TO_RELAY_QUEUE, + DST_COMMAND_QUEUE, + CONTACT_MGMT_QUEUE, + C_REQ_STATE_QUEUE, + URL_TOKEN_QUEUE, + GROUP_MSG_QUEUE, + CONTACT_REQ_QUEUE, + C_REQ_MGMT_QUEUE, + GROUP_MGMT_QUEUE, + ONION_CLOSE_QUEUE, + ONION_KEY_QUEUE, + TOR_DATA_QUEUE, + EXIT_QUEUE, + ACCOUNT_CHECK_QUEUE, + ACCOUNT_SEND_QUEUE, + USER_ACCOUNT_QUEUE, + PUB_KEY_CHECK_QUEUE, + PUB_KEY_SEND_QUEUE, + GUI_INPUT_QUEUE] unit_test_queue = [UNIT_TEST_QUEUE] - queue_list = set( - transmitter_queues + receiver_queues + relay_queues + unit_test_queue - ) + queue_list = set(transmitter_queues + receiver_queues + relay_queues + unit_test_queue) queue_dict = dict() for q in queue_list: diff --git a/tfc.py b/tfc.py index 8dc7ad6..3b3b768 100755 --- a/tfc.py +++ b/tfc.py @@ -3,7 +3,7 @@ """ TFC - Onion-routed, endpoint secure messaging system -Copyright (C) 2013-2019 Markus Ottela +Copyright (C) 2013-2020 Markus Ottela This file is part of TFC. @@ -23,53 +23,33 @@ import os import sys from multiprocessing import Process, Queue -from typing import Any, Dict +from typing import Any, Dict -from src.common.crypto import check_kernel_version -from src.common.database import MessageLog -from src.common.db_contacts import ContactList -from src.common.db_groups import GroupList -from src.common.db_keys import KeyList -from src.common.db_logs import log_writer_loop +from src.common.crypto import check_kernel_version +from src.common.database import MessageLog +from src.common.db_contacts import ContactList +from src.common.db_groups import GroupList +from src.common.db_keys import KeyList +from src.common.db_logs import log_writer_loop from src.common.db_masterkey import MasterKey -from src.common.db_onion import OnionService -from src.common.db_settings import Settings -from src.common.gateway import Gateway, gateway_loop -from src.common.misc import ensure_dir, monitor_processes, process_arguments -from src.common.output import print_title -from src.common.statics import ( - COMMAND_DATAGRAM_HEADER, - COMMAND_PACKET_QUEUE, - DIR_TFC, - EXIT_QUEUE, - DIR_USER_DATA, - FILE_DATAGRAM_HEADER, - GATEWAY_QUEUE, - KEY_MANAGEMENT_QUEUE, - KEY_MGMT_ACK_QUEUE, - LOCAL_KEY_DATAGRAM_HEADER, - LOGFILE_MASKING_QUEUE, - LOG_PACKET_QUEUE, - LOG_SETTING_QUEUE, - MESSAGE_DATAGRAM_HEADER, - MESSAGE_PACKET_QUEUE, - RELAY_PACKET_QUEUE, - SENDER_MODE_QUEUE, - TM_COMMAND_PACKET_QUEUE, - TM_FILE_PACKET_QUEUE, - TM_MESSAGE_PACKET_QUEUE, - TM_NOISE_COMMAND_QUEUE, - TM_NOISE_PACKET_QUEUE, - TRAFFIC_MASKING_QUEUE, - TX, - WINDOW_SELECT_QUEUE, -) +from src.common.db_onion import OnionService +from src.common.db_settings import Settings +from src.common.gateway import Gateway, gateway_loop +from src.common.misc import ensure_dir, monitor_processes, process_arguments +from src.common.output import print_title +from src.common.statics import (COMMAND_DATAGRAM_HEADER, COMMAND_PACKET_QUEUE, DIR_TFC, EXIT_QUEUE, DIR_USER_DATA, + FILE_DATAGRAM_HEADER, GATEWAY_QUEUE, KEY_MANAGEMENT_QUEUE, KEY_MGMT_ACK_QUEUE, + LOCAL_KEY_DATAGRAM_HEADER, LOGFILE_MASKING_QUEUE, LOG_PACKET_QUEUE, + LOG_SETTING_QUEUE, MESSAGE_DATAGRAM_HEADER, MESSAGE_PACKET_QUEUE, + RELAY_PACKET_QUEUE, SENDER_MODE_QUEUE, TM_COMMAND_PACKET_QUEUE, + TM_FILE_PACKET_QUEUE, TM_MESSAGE_PACKET_QUEUE, TM_NOISE_COMMAND_QUEUE, + TM_NOISE_PACKET_QUEUE, TRAFFIC_MASKING_QUEUE, TX, WINDOW_SELECT_QUEUE) -from src.transmitter.input_loop import input_loop -from src.transmitter.sender_loop import sender_loop +from src.transmitter.input_loop import input_loop +from src.transmitter.sender_loop import sender_loop from src.transmitter.traffic_masking import noise_loop -from src.receiver.output_loop import output_loop +from src.receiver.output_loop import output_loop from src.receiver.receiver_loop import receiver_loop @@ -121,87 +101,56 @@ def main() -> None: print_title(operation) - master_key = MasterKey(operation, local_test) - gateway = Gateway(operation, local_test, data_diode_sockets) - settings = Settings(master_key, operation, local_test) + master_key = MasterKey( operation, local_test) + gateway = Gateway( operation, local_test, data_diode_sockets) + settings = Settings( master_key, operation, local_test) contact_list = ContactList(master_key, settings) - key_list = KeyList(master_key, settings) - group_list = GroupList(master_key, settings, contact_list) - message_log = MessageLog( - f"{DIR_USER_DATA}{settings.software_operation}_logs", master_key.master_key - ) + key_list = KeyList( master_key, settings) + group_list = GroupList( master_key, settings, contact_list) + message_log = MessageLog(f'{DIR_USER_DATA}{settings.software_operation}_logs', master_key.master_key) if settings.software_operation == TX: onion_service = OnionService(master_key) - queues = { - MESSAGE_PACKET_QUEUE: Queue(), # Standard messages - COMMAND_PACKET_QUEUE: Queue(), # Standard commands - TM_MESSAGE_PACKET_QUEUE: Queue(), # Traffic masking messages - TM_FILE_PACKET_QUEUE: Queue(), # Traffic masking files - TM_COMMAND_PACKET_QUEUE: Queue(), # Traffic masking commands - TM_NOISE_PACKET_QUEUE: Queue(), # Traffic masking noise packets - TM_NOISE_COMMAND_QUEUE: Queue(), # Traffic masking noise commands - RELAY_PACKET_QUEUE: Queue(), # Unencrypted datagrams to Networked Computer - LOG_PACKET_QUEUE: Queue(), # `log_writer_loop` assembly packets to be logged - LOG_SETTING_QUEUE: Queue(), # `log_writer_loop` logging state management between noise packets - TRAFFIC_MASKING_QUEUE: Queue(), # `log_writer_loop` traffic masking setting management commands - LOGFILE_MASKING_QUEUE: Queue(), # `log_writer_loop` logfile masking setting management commands - KEY_MANAGEMENT_QUEUE: Queue(), # `sender_loop` key database management commands - KEY_MGMT_ACK_QUEUE: Queue(), # `sender_loop` key management ACK messages to `input_loop` - SENDER_MODE_QUEUE: Queue(), # `sender_loop` default/traffic masking mode switch commands - WINDOW_SELECT_QUEUE: Queue(), # `sender_loop` window selection commands during traffic masking - EXIT_QUEUE: Queue(), # EXIT/WIPE signal from `input_loop` to `main` - } # type: Dict[bytes, Queue[Any]] + queues = {MESSAGE_PACKET_QUEUE: Queue(), # Standard messages + COMMAND_PACKET_QUEUE: Queue(), # Standard commands + TM_MESSAGE_PACKET_QUEUE: Queue(), # Traffic masking messages + TM_FILE_PACKET_QUEUE: Queue(), # Traffic masking files + TM_COMMAND_PACKET_QUEUE: Queue(), # Traffic masking commands + TM_NOISE_PACKET_QUEUE: Queue(), # Traffic masking noise packets + TM_NOISE_COMMAND_QUEUE: Queue(), # Traffic masking noise commands + RELAY_PACKET_QUEUE: Queue(), # Unencrypted datagrams to Networked Computer + LOG_PACKET_QUEUE: Queue(), # `log_writer_loop` assembly packets to be logged + LOG_SETTING_QUEUE: Queue(), # `log_writer_loop` logging state management between noise packets + TRAFFIC_MASKING_QUEUE: Queue(), # `log_writer_loop` traffic masking setting management commands + LOGFILE_MASKING_QUEUE: Queue(), # `log_writer_loop` logfile masking setting management commands + KEY_MANAGEMENT_QUEUE: Queue(), # `sender_loop` key database management commands + KEY_MGMT_ACK_QUEUE: Queue(), # `sender_loop` key management ACK messages to `input_loop` + SENDER_MODE_QUEUE: Queue(), # `sender_loop` default/traffic masking mode switch commands + WINDOW_SELECT_QUEUE: Queue(), # `sender_loop` window selection commands during traffic masking + EXIT_QUEUE: Queue() # EXIT/WIPE signal from `input_loop` to `main` + } # type: Dict[bytes, Queue[Any]] - process_list = [ - Process( - target=input_loop, - args=( - queues, - settings, - gateway, - contact_list, - group_list, - master_key, - onion_service, - sys.stdin.fileno(), - ), - ), - Process(target=sender_loop, args=(queues, settings, gateway, key_list)), - Process(target=log_writer_loop, args=(queues, settings, message_log)), - Process(target=noise_loop, args=(queues, contact_list)), - Process(target=noise_loop, args=(queues,)), - ] + process_list = [Process(target=input_loop, args=(queues, settings, gateway, contact_list, group_list, + master_key, onion_service, sys.stdin.fileno())), + Process(target=sender_loop, args=(queues, settings, gateway, key_list)), + Process(target=log_writer_loop, args=(queues, settings, message_log)), + Process(target=noise_loop, args=(queues, contact_list)), + Process(target=noise_loop, args=(queues,))] else: - queues = { - GATEWAY_QUEUE: Queue(), # Buffer for incoming datagrams - LOCAL_KEY_DATAGRAM_HEADER: Queue(), # Local key datagrams - MESSAGE_DATAGRAM_HEADER: Queue(), # Message datagrams - FILE_DATAGRAM_HEADER: Queue(), # File datagrams - COMMAND_DATAGRAM_HEADER: Queue(), # Command datagrams - EXIT_QUEUE: Queue(), # EXIT/WIPE signal from `output_loop` to `main` - } + queues = {GATEWAY_QUEUE: Queue(), # Buffer for incoming datagrams + LOCAL_KEY_DATAGRAM_HEADER: Queue(), # Local key datagrams + MESSAGE_DATAGRAM_HEADER: Queue(), # Message datagrams + FILE_DATAGRAM_HEADER: Queue(), # File datagrams + COMMAND_DATAGRAM_HEADER: Queue(), # Command datagrams + EXIT_QUEUE: Queue() # EXIT/WIPE signal from `output_loop` to `main` + } - process_list = [ - Process(target=gateway_loop, args=(queues, gateway)), - Process(target=receiver_loop, args=(queues, gateway)), - Process( - target=output_loop, - args=( - queues, - gateway, - settings, - contact_list, - key_list, - group_list, - master_key, - message_log, - sys.stdin.fileno(), - ), - ), - ] + process_list = [Process(target=gateway_loop, args=(queues, gateway)), + Process(target=receiver_loop, args=(queues, gateway)), + Process(target=output_loop, args=(queues, gateway, settings, contact_list, key_list, + group_list, master_key, message_log, sys.stdin.fileno()))] for p in process_list: p.start() @@ -209,5 +158,5 @@ def main() -> None: monitor_processes(process_list, settings.software_operation, queues) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/uninstall.sh b/uninstall.sh index a93a49c..466a15e 100644 --- a/uninstall.sh +++ b/uninstall.sh @@ -1,7 +1,7 @@ #!/usr/bin/env bash # TFC - Onion-routed, endpoint secure messaging system -# Copyright (C) 2013-2019 Markus Ottela +# Copyright (C) 2013-2020 Markus Ottela # # This file is part of TFC. #