diff --git a/.coveragerc b/.coveragerc
index 74613d3..a3eb218 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -4,24 +4,11 @@
# Regexes for lines to exclude from consideration
exclude_lines =
+ # Debugging code for third-party modules
+ pragma: no cover
+
# TYPE_CHECKING is True only during type checking
if typing.TYPE_CHECKING:
- # Ignore catchers for KeyboardInterrupt (^C) and EOF (^D) signals from user:
- except EOFError
- except KeyboardInterrupt:
- except \(EOFError, KeyboardInterrupt\):
- except \(FunctionReturn, KeyboardInterrupt\):
-
- # Ignore errors specific to gateway libraries
- except SerialException:
- except socket.error
- except ConnectionRefusedError:
-
- # Ignore lines for Settings database testing that
- # can not be mocked without overwriting user data
- if operation == RX:
-
-omit =
- # Since dbus is not available for python3.6, it is currently not possible to test nh/pidgin.py
- src/nh/pidgin.py
+ # Ignore Flask server init under standard operation
+ else: \# not unittest
diff --git a/.travis.yml b/.travis.yml
index 3c15f75..0e7da63 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -3,13 +3,23 @@ language: python
python:
- '3.6'
+dist: xenial
+sudo: required
+
before_install:
- - sudo apt install python3-tk
- - export TZ=Europe/Helsinki
+ - echo "deb https://deb.torproject.org/torproject.org xenial main" | sudo tee -a /etc/apt/sources.list.d/torproject.list
+ - echo "deb-src https://deb.torproject.org/torproject.org xenial main" | sudo tee -a /etc/apt/sources.list.d/torproject.list
+ - echo "deb https://deb.torproject.org/torproject.org tor-nightly-master-xenial main" | sudo tee -a /etc/apt/sources.list.d/torproject.list
+ - echo "deb-src https://deb.torproject.org/torproject.org tor-nightly-master-xenial main" | sudo tee -a /etc/apt/sources.list.d/torproject.list
+ - gpg --keyserver khkp://keys.gnupg.net --recv-keys A3C4F0F979CAA22CDBA8F512EE8CBC9E886DDD89
+ - gpg --export A3C4F0F979CAA22CDBA8F512EE8CBC9E886DDD89 | sudo apt-key add -
+ - sudo apt update
+ - sudo apt install python3-setuptools python3-tk tor -y
install:
- pip install pytest pytest-cov pyyaml coveralls
- - pip install -r requirements.txt --require-hashes
+ - pip install -r requirements.txt --require-hashes
+ - pip install -r requirements-relay.txt --require-hashes
script:
- py.test --cov=src tests/
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..f288702
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,674 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see .
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+ Copyright (C)
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+.
+
+ The GNU General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License. But first, please read
+.
diff --git a/LICENSE-3RD-PARTY b/LICENSE-3RD-PARTY
new file mode 100644
index 0000000..95e1dda
--- /dev/null
+++ b/LICENSE-3RD-PARTY
@@ -0,0 +1,2906 @@
+This file contains the third-party licenses for libraries and software downloaded
+by the TFC installer.
+
+-----------------------------------------------------------------------------
+ BSD 3-Clause License
+
+ applies to:
+ - The pySerial library, Copyright © 2001-2016, Chris Liechti
+ (https://github.com/pyserial/pyserial)
+
+ - The Cryptography library, Copyright © Individual contributors
+ (https://github.com/pyca/cryptography)
+
+ - The pycparser library, Copyright © 2008-2017, Eli Bendersky
+ (https://github.com/eliben/pycparser)
+
+ - The Six library, Copyright © 2010-2017, Benjamin Peterson
+ (https://pypi.org/project/six/)
+
+ - The PySocks library, Copyright © 2006, Dan-Haim
+ (https://github.com/Anorov/PySocks)
+
+ - The IDNA library, Copyright © 2013-2019, Kim Davies
+ (https://github.com/kjd/idna)
+
+ - The Flask web application framework, Copyright © 2010, the Pallets team
+ (https://github.com/pallets/flask)
+
+ - The Click library, Copyright © 2010, the Pallets team
+ (https://github.com/pallets/click)
+
+ - The It's Dangerous library, Copyright © 2011, the Pallets team
+ (https://github.com/pallets/itsdangerous)
+
+ - The MarkupSafe library, Copyright © 2010, the Pallets team
+ (https://github.com/pallets/markupsafe)
+
+ - The Werkzeug library, Copyright © 2007, the Pallets team
+ (https://github.com/pallets/werkzeug)
+
+ - The Jinja2 library, Copyright © 2009, the Jinja Team, see AUTHORS for more details.
+ (https://github.com/pallets/jinja)
+
+ - The Tor application, Copyright (c) 2001-2004, Roger Dingledine
+ Copyright (c) 2004-2006, Roger Dingledine, Nick Mathewson
+ Copyright (c) 2007-2019, The Tor Project, Inc.
+ (https://torproject.org)
+ (See 3rd party licences at https://gitweb.torproject.org/tor.git/tree/LICENSE)
+
+- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+* Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+-----------------------------------------------------------------------------
+ MIT License
+
+ applies to:
+ - The Argon2 library, Copyright © 2015, Hynek Schlawack
+ (https://github.com/hynek/argon2_cffi)
+
+ - The src.common.encoding Base58 implementation, Copyright © 2015, David Keijser
+ (https://github.com/keis/base58)
+
+ - The cffi library, Copyright © 2012-2019, Armin Rigo, Maciej Fijalkowski
+ (https://bitbucket.org/cffi/cffi/overview)
+
+ - The urllib3 library, Copyright © 2008-2016, Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+ (https://github.com/urllib3/urllib3)
+
+ - The virtualenv tool, Copyright © 2007, Ian Bicking and Contributors
+ Copyright © 2009, Ian Bicking, The Open Planning Project
+ Copyright © 2011-2016, The virtualenv developers
+ (https://github.com/pypa/virtualenv)
+
+ - The Pip tool, Copyright (c) 2008-2019 The pip developers (see AUTHORS.txt file)
+ (https://github.com/pypa/pip)
+
+ - The Setuptools library, Copyright (C) 2016 Jason R Coombs
+ (https://github.com/pypa/setuptools)
+
+- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+
+-----------------------------------------------------------------------------
+ ISC License
+
+ applies to:
+ - The libsodium library, Copyright © 2013-2019, Frank Denis
+ (https://github.com/jedisct1/libsodium)
+
+- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+
+-----------------------------------------------------------------------------
+ GNU GENERAL PUBLIC LICENSE
+ Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ applies to:
+ - The net-tools application, Copyright 1996 © Bernd Eckenfels
+ (https://sourceforge.net/projects/net-tools/)
+
+ - The Terminator application, Copyright © Stephen Boddy, Chris Jones
+ (https://gnometerminator.blogspot.com/p/introduction.html)
+
+ - The Git application, Copyright © Linus Torvalds
+ (https://git.kernel.org/pub/scm/git/git.git/tree/)
+
+- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users. This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it. (Some other Free Software Foundation software is covered by
+the GNU Lesser General Public License instead.) You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+ To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have. You must make sure that they, too, receive or can get the
+source code. And you must show them these terms so they know their
+rights.
+
+ We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+ Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software. If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+ Finally, any free program is threatened constantly by software
+patents. We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary. To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ GNU GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License. The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language. (Hereinafter, translation is included without limitation in
+the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+ 1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+ 2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) You must cause the modified files to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ b) You must cause any work that you distribute or publish, that in
+ whole or in part contains or is derived from the Program or any
+ part thereof, to be licensed as a whole at no charge to all third
+ parties under the terms of this License.
+
+ c) If the modified program normally reads commands interactively
+ when run, you must cause it, when started running for such
+ interactive use in the most ordinary way, to print or display an
+ announcement including an appropriate copyright notice and a
+ notice that there is no warranty (or else, saying that you provide
+ a warranty) and that users may redistribute the program under
+ these conditions, and telling the user how to view a copy of this
+ License. (Exception: if the Program itself is interactive but
+ does not normally print such an announcement, your work based on
+ the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+ a) Accompany it with the complete corresponding machine-readable
+ source code, which must be distributed under the terms of Sections
+ 1 and 2 above on a medium customarily used for software interchange; or,
+
+ b) Accompany it with a written offer, valid for at least three
+ years, to give any third party, for a charge no more than your
+ cost of physically performing source distribution, a complete
+ machine-readable copy of the corresponding source code, to be
+ distributed under the terms of Sections 1 and 2 above on a medium
+ customarily used for software interchange; or,
+
+ c) Accompany it with the information you received as to the offer
+ to distribute corresponding source code. (This alternative is
+ allowed only for noncommercial distribution and only if you
+ received the program in object code or executable form with such
+ an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it. For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable. However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License. Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+ 5. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Program or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+ 6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+ 7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all. For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+ 8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded. In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+ 9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation. If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+ 10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission. For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this. Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+ NO WARRANTY
+
+ 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+ 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 2 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License along
+ with this program; if not, write to the Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+ Gnomovision version 69, Copyright (C) year name of author
+ Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+ `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+ , 1 April 1989
+ Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs. If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.
+
+
+-----------------------------------------------------------------------------
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ applies to:
+ - The src.relay.onion Tor class, Copyright © 2014-2019, Micah Lee
+ (https://github.com/micahflee/onionshare)
+
+- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see .
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+ Copyright (C)
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+.
+
+ The GNU General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License. But first, please read
+.
+
+
+-----------------------------------------------------------------------------
+ Apache License
+ Version 2.0, January 2004
+ https://www.apache.org/licenses/
+
+ applies to:
+ - The PyNaCl library, Copyright © 2013, Donald Stufft and individual contributors
+ (https://github.com/pyca/pynacl)
+
+ - The Requests library, Copyright © 2019, Kenneth Reitz
+ (https://github.com/requests/requests)
+
+- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+
+-----------------------------------------------------------------------------
+ Mozilla Public License Version 2.0
+
+ applies to:
+ - The Certifi library, Copyright (c) Kenneth Reitz
+ (https://certifi.io/en/latest/)
+
+- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+
+1. Definitions
+--------------
+
+1.1. "Contributor"
+ means each individual or legal entity that creates, contributes to
+ the creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+ means the combination of the Contributions of others (if any) used
+ by a Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+ means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+ means Source Code Form to which the initial Contributor has attached
+ the notice in Exhibit A, the Executable Form of such Source Code
+ Form, and Modifications of such Source Code Form, in each case
+ including portions thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+ means
+
+ (a) that the initial Contributor has attached the notice described
+ in Exhibit B to the Covered Software; or
+
+ (b) that the Covered Software was made available under the terms of
+ version 1.1 or earlier of the License, but not also under the
+ terms of a Secondary License.
+
+1.6. "Executable Form"
+ means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+ means a work that combines Covered Software with other material, in
+ a separate file or files, that is not Covered Software.
+
+1.8. "License"
+ means this document.
+
+1.9. "Licensable"
+ means having the right to grant, to the maximum extent possible,
+ whether at the time of the initial grant or subsequently, any and
+ all of the rights conveyed by this License.
+
+1.10. "Modifications"
+ means any of the following:
+
+ (a) any file in Source Code Form that results from an addition to,
+ deletion from, or modification of the contents of Covered
+ Software; or
+
+ (b) any new file in Source Code Form that contains any Covered
+ Software.
+
+1.11. "Patent Claims" of a Contributor
+ means any patent claim(s), including without limitation, method,
+ process, and apparatus claims, in any patent Licensable by such
+ Contributor that would be infringed, but for the grant of the
+ License, by the making, using, selling, offering for sale, having
+ made, import, or transfer of either its Contributions or its
+ Contributor Version.
+
+1.12. "Secondary License"
+ means either the GNU General Public License, Version 2.0, the GNU
+ Lesser General Public License, Version 2.1, the GNU Affero General
+ Public License, Version 3.0, or any later versions of those
+ licenses.
+
+1.13. "Source Code Form"
+ means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+ means an individual or a legal entity exercising rights under this
+ License. For legal entities, "You" includes any entity that
+ controls, is controlled by, or is under common control with You. For
+ purposes of this definition, "control" means (a) the power, direct
+ or indirect, to cause the direction or management of such entity,
+ whether by contract or otherwise, or (b) ownership of more than
+ fifty percent (50%) of the outstanding shares or beneficial
+ ownership of such entity.
+
+2. License Grants and Conditions
+--------------------------------
+
+2.1. Grants
+
+Each Contributor hereby grants You a world-wide, royalty-free,
+non-exclusive license:
+
+(a) under intellectual property rights (other than patent or trademark)
+ Licensable by such Contributor to use, reproduce, make available,
+ modify, display, perform, distribute, and otherwise exploit its
+ Contributions, either on an unmodified basis, with Modifications, or
+ as part of a Larger Work; and
+
+(b) under Patent Claims of such Contributor to make, use, sell, offer
+ for sale, have made, import, and otherwise transfer either its
+ Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+The licenses granted in Section 2.1 with respect to any Contribution
+become effective for each Contribution on the date the Contributor first
+distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+The licenses granted in this Section 2 are the only rights granted under
+this License. No additional rights or licenses will be implied from the
+distribution or licensing of Covered Software under this License.
+Notwithstanding Section 2.1(b) above, no patent license is granted by a
+Contributor:
+
+(a) for any code that a Contributor has removed from Covered Software;
+ or
+
+(b) for infringements caused by: (i) Your and any other third party's
+ modifications of Covered Software, or (ii) the combination of its
+ Contributions with other software (except as part of its Contributor
+ Version); or
+
+(c) under Patent Claims infringed by Covered Software in the absence of
+ its Contributions.
+
+This License does not grant any rights in the trademarks, service marks,
+or logos of any Contributor (except as may be necessary to comply with
+the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+No Contributor makes additional grants as a result of Your choice to
+distribute the Covered Software under a subsequent version of this
+License (see Section 10.2) or under the terms of a Secondary License (if
+permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+Each Contributor represents that the Contributor believes its
+Contributions are its original creation(s) or it has sufficient rights
+to grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+This License is not intended to limit any rights You have under
+applicable copyright doctrines of fair use, fair dealing, or other
+equivalents.
+
+2.7. Conditions
+
+Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
+in Section 2.1.
+
+3. Responsibilities
+-------------------
+
+3.1. Distribution of Source Form
+
+All distribution of Covered Software in Source Code Form, including any
+Modifications that You create or to which You contribute, must be under
+the terms of this License. You must inform recipients that the Source
+Code Form of the Covered Software is governed by the terms of this
+License, and how they can obtain a copy of this License. You may not
+attempt to alter or restrict the recipients' rights in the Source Code
+Form.
+
+3.2. Distribution of Executable Form
+
+If You distribute Covered Software in Executable Form then:
+
+(a) such Covered Software must also be made available in Source Code
+ Form, as described in Section 3.1, and You must inform recipients of
+ the Executable Form how they can obtain a copy of such Source Code
+ Form by reasonable means in a timely manner, at a charge no more
+ than the cost of distribution to the recipient; and
+
+(b) You may distribute such Executable Form under the terms of this
+ License, or sublicense it under different terms, provided that the
+ license for the Executable Form does not attempt to limit or alter
+ the recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+You may create and distribute a Larger Work under terms of Your choice,
+provided that You also comply with the requirements of this License for
+the Covered Software. If the Larger Work is a combination of Covered
+Software with a work governed by one or more Secondary Licenses, and the
+Covered Software is not Incompatible With Secondary Licenses, this
+License permits You to additionally distribute such Covered Software
+under the terms of such Secondary License(s), so that the recipient of
+the Larger Work may, at their option, further distribute the Covered
+Software under the terms of either this License or such Secondary
+License(s).
+
+3.4. Notices
+
+You may not remove or alter the substance of any license notices
+(including copyright notices, patent notices, disclaimers of warranty,
+or limitations of liability) contained within the Source Code Form of
+the Covered Software, except that You may alter any license notices to
+the extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+You may choose to offer, and to charge a fee for, warranty, support,
+indemnity or liability obligations to one or more recipients of Covered
+Software. However, You may do so only on Your own behalf, and not on
+behalf of any Contributor. You must make it absolutely clear that any
+such warranty, support, indemnity, or liability obligation is offered by
+You alone, and You hereby agree to indemnify every Contributor for any
+liability incurred by such Contributor as a result of warranty, support,
+indemnity or liability terms You offer. You may include additional
+disclaimers of warranty and limitations of liability specific to any
+jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+---------------------------------------------------
+
+If it is impossible for You to comply with any of the terms of this
+License with respect to some or all of the Covered Software due to
+statute, judicial order, or regulation then You must: (a) comply with
+the terms of this License to the maximum extent possible; and (b)
+describe the limitations and the code they affect. Such description must
+be placed in a text file included with all distributions of the Covered
+Software under this License. Except to the extent prohibited by statute
+or regulation, such description must be sufficiently detailed for a
+recipient of ordinary skill to be able to understand it.
+
+5. Termination
+--------------
+
+5.1. The rights granted under this License will terminate automatically
+if You fail to comply with any of its terms. However, if You become
+compliant, then the rights granted under this License from a particular
+Contributor are reinstated (a) provisionally, unless and until such
+Contributor explicitly and finally terminates Your grants, and (b) on an
+ongoing basis, if such Contributor fails to notify You of the
+non-compliance by some reasonable means prior to 60 days after You have
+come back into compliance. Moreover, Your grants from a particular
+Contributor are reinstated on an ongoing basis if such Contributor
+notifies You of the non-compliance by some reasonable means, this is the
+first time You have received notice of non-compliance with this License
+from such Contributor, and You become compliant prior to 30 days after
+Your receipt of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+infringement claim (excluding declaratory judgment actions,
+counter-claims, and cross-claims) alleging that a Contributor Version
+directly or indirectly infringes any patent, then the rights granted to
+You by any and all Contributors for the Covered Software under Section
+2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all
+end user license agreements (excluding distributors and resellers) which
+have been validly granted by You or Your distributors under this License
+prior to termination shall survive termination.
+
+************************************************************************
+* *
+* 6. Disclaimer of Warranty *
+* ------------------------- *
+* *
+* Covered Software is provided under this License on an "as is" *
+* basis, without warranty of any kind, either expressed, implied, or *
+* statutory, including, without limitation, warranties that the *
+* Covered Software is free of defects, merchantable, fit for a *
+* particular purpose or non-infringing. The entire risk as to the *
+* quality and performance of the Covered Software is with You. *
+* Should any Covered Software prove defective in any respect, You *
+* (not any Contributor) assume the cost of any necessary servicing, *
+* repair, or correction. This disclaimer of warranty constitutes an *
+* essential part of this License. No use of any Covered Software is *
+* authorized under this License except under this disclaimer. *
+* *
+************************************************************************
+
+************************************************************************
+* *
+* 7. Limitation of Liability *
+* -------------------------- *
+* *
+* Under no circumstances and under no legal theory, whether tort *
+* (including negligence), contract, or otherwise, shall any *
+* Contributor, or anyone who distributes Covered Software as *
+* permitted above, be liable to You for any direct, indirect, *
+* special, incidental, or consequential damages of any character *
+* including, without limitation, damages for lost profits, loss of *
+* goodwill, work stoppage, computer failure or malfunction, or any *
+* and all other commercial damages or losses, even if such party *
+* shall have been informed of the possibility of such damages. This *
+* limitation of liability shall not apply to liability for death or *
+* personal injury resulting from such party's negligence to the *
+* extent applicable law prohibits such limitation. Some *
+* jurisdictions do not allow the exclusion or limitation of *
+* incidental or consequential damages, so this exclusion and *
+* limitation may not apply to You. *
+* *
+************************************************************************
+
+8. Litigation
+-------------
+
+Any litigation relating to this License may be brought only in the
+courts of a jurisdiction where the defendant maintains its principal
+place of business and such litigation shall be governed by laws of that
+jurisdiction, without reference to its conflict-of-law provisions.
+Nothing in this Section shall prevent a party's ability to bring
+cross-claims or counter-claims.
+
+9. Miscellaneous
+----------------
+
+This License represents the complete agreement concerning the subject
+matter hereof. If any provision of this License is held to be
+unenforceable, such provision shall be reformed only to the extent
+necessary to make it enforceable. Any law or regulation which provides
+that the language of a contract shall be construed against the drafter
+shall not be used to construe this License against a Contributor.
+
+10. Versions of the License
+---------------------------
+
+10.1. New Versions
+
+Mozilla Foundation is the license steward. Except as provided in Section
+10.3, no one other than the license steward has the right to modify or
+publish new versions of this License. Each version will be given a
+distinguishing version number.
+
+10.2. Effect of New Versions
+
+You may distribute the Covered Software under the terms of the version
+of the License under which You originally received the Covered Software,
+or under the terms of any subsequent version published by the license
+steward.
+
+10.3. Modified Versions
+
+If you create software not governed by this License, and you want to
+create a new license for such software, you may create and use a
+modified version of this License if you rename the license and remove
+any references to the name of the license steward (except to note that
+such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+Licenses
+
+If You choose to distribute Source Code Form that is Incompatible With
+Secondary Licenses under the terms of this version of the License, the
+notice described in Exhibit B of this License must be attached.
+
+Exhibit A - Source Code Form License Notice
+-------------------------------------------
+
+ This Source Code Form is subject to the terms of the Mozilla Public
+ License, v. 2.0. If a copy of the MPL was not distributed with this
+ file, You can obtain one at https://www.mozilla.org/en-US/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular
+file, then You may include the notice in a location (such as a LICENSE
+file in a relevant directory) where a recipient would be likely to look
+for such a notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+---------------------------------------------------------
+
+ This Source Code Form is "Incompatible With Secondary Licenses", as
+ defined by the Mozilla Public License, v. 2.0.
+
+
+-----------------------------------------------------------------------------
+ GNU LESSER GENERAL PUBLIC LICENSE
+ Version 2.1, February 1999
+
+ Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ applies to:
+ - The Chardet library, Copyright (c) Daniel Blanchard
+ (https://github.com/chardet/chardet)
+
+- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+
+[This is the first released version of the Lesser GPL. It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+Licenses are intended to guarantee your freedom to share and change
+free software--to make sure the software is free for all its users.
+
+ This license, the Lesser General Public License, applies to some
+specially designated software packages--typically libraries--of the
+Free Software Foundation and other authors who decide to use it. You
+can use it too, but we suggest you first think carefully about whether
+this license or the ordinary General Public License is the better
+strategy to use in any particular case, based on the explanations below.
+
+ When we speak of free software, we are referring to freedom of use,
+not price. Our General Public Licenses are designed to make sure that
+you have the freedom to distribute copies of free software (and charge
+for this service if you wish); that you receive source code or can get
+it if you want it; that you can change the software and use pieces of
+it in new free programs; and that you are informed that you can do
+these things.
+
+ To protect your rights, we need to make restrictions that forbid
+distributors to deny you these rights or to ask you to surrender these
+rights. These restrictions translate to certain responsibilities for
+you if you distribute copies of the library or if you modify it.
+
+ For example, if you distribute copies of the library, whether gratis
+or for a fee, you must give the recipients all the rights that we gave
+you. You must make sure that they, too, receive or can get the source
+code. If you link other code with the library, you must provide
+complete object files to the recipients, so that they can relink them
+with the library after making changes to the library and recompiling
+it. And you must show them these terms so they know their rights.
+
+ We protect your rights with a two-step method: (1) we copyright the
+library, and (2) we offer you this license, which gives you legal
+permission to copy, distribute and/or modify the library.
+
+ To protect each distributor, we want to make it very clear that
+there is no warranty for the free library. Also, if the library is
+modified by someone else and passed on, the recipients should know
+that what they have is not the original version, so that the original
+author's reputation will not be affected by problems that might be
+introduced by others.
+
+ Finally, software patents pose a constant threat to the existence of
+any free program. We wish to make sure that a company cannot
+effectively restrict the users of a free program by obtaining a
+restrictive license from a patent holder. Therefore, we insist that
+any patent license obtained for a version of the library must be
+consistent with the full freedom of use specified in this license.
+
+ Most GNU software, including some libraries, is covered by the
+ordinary GNU General Public License. This license, the GNU Lesser
+General Public License, applies to certain designated libraries, and
+is quite different from the ordinary General Public License. We use
+this license for certain libraries in order to permit linking those
+libraries into non-free programs.
+
+ When a program is linked with a library, whether statically or using
+a shared library, the combination of the two is legally speaking a
+combined work, a derivative of the original library. The ordinary
+General Public License therefore permits such linking only if the
+entire combination fits its criteria of freedom. The Lesser General
+Public License permits more lax criteria for linking other code with
+the library.
+
+ We call this license the "Lesser" General Public License because it
+does Less to protect the user's freedom than the ordinary General
+Public License. It also provides other free software developers Less
+of an advantage over competing non-free programs. These disadvantages
+are the reason we use the ordinary General Public License for many
+libraries. However, the Lesser license provides advantages in certain
+special circumstances.
+
+ For example, on rare occasions, there may be a special need to
+encourage the widest possible use of a certain library, so that it becomes
+a de-facto standard. To achieve this, non-free programs must be
+allowed to use the library. A more frequent case is that a free
+library does the same job as widely used non-free libraries. In this
+case, there is little to gain by limiting the free library to free
+software only, so we use the Lesser General Public License.
+
+ In other cases, permission to use a particular library in non-free
+programs enables a greater number of people to use a large body of
+free software. For example, permission to use the GNU C Library in
+non-free programs enables many more people to use the whole GNU
+operating system, as well as its variant, the GNU/Linux operating
+system.
+
+ Although the Lesser General Public License is Less protective of the
+users' freedom, it does ensure that the user of a program that is
+linked with the Library has the freedom and the wherewithal to run
+that program using a modified version of the Library.
+
+ The precise terms and conditions for copying, distribution and
+modification follow. Pay close attention to the difference between a
+"work based on the library" and a "work that uses the library". The
+former contains code derived from the library, whereas the latter must
+be combined with the library in order to run.
+
+ GNU LESSER GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License Agreement applies to any software library or other
+program which contains a notice placed by the copyright holder or
+other authorized party saying it may be distributed under the terms of
+this Lesser General Public License (also called "this License").
+Each licensee is addressed as "you".
+
+ A "library" means a collection of software functions and/or data
+prepared so as to be conveniently linked with application programs
+(which use some of those functions and data) to form executables.
+
+ The "Library", below, refers to any such software library or work
+which has been distributed under these terms. A "work based on the
+Library" means either the Library or any derivative work under
+copyright law: that is to say, a work containing the Library or a
+portion of it, either verbatim or with modifications and/or translated
+straightforwardly into another language. (Hereinafter, translation is
+included without limitation in the term "modification".)
+
+ "Source code" for a work means the preferred form of the work for
+making modifications to it. For a library, complete source code means
+all the source code for all modules it contains, plus any associated
+interface definition files, plus the scripts used to control compilation
+and installation of the library.
+
+ Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running a program using the Library is not restricted, and output from
+such a program is covered only if its contents constitute a work based
+on the Library (independent of the use of the Library in a tool for
+writing it). Whether that is true depends on what the Library does
+and what the program that uses the Library does.
+
+ 1. You may copy and distribute verbatim copies of the Library's
+complete source code as you receive it, in any medium, provided that
+you conspicuously and appropriately publish on each copy an
+appropriate copyright notice and disclaimer of warranty; keep intact
+all the notices that refer to this License and to the absence of any
+warranty; and distribute a copy of this License along with the
+Library.
+
+ You may charge a fee for the physical act of transferring a copy,
+and you may at your option offer warranty protection in exchange for a
+fee.
+
+ 2. You may modify your copy or copies of the Library or any portion
+of it, thus forming a work based on the Library, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) The modified work must itself be a software library.
+
+ b) You must cause the files modified to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ c) You must cause the whole of the work to be licensed at no
+ charge to all third parties under the terms of this License.
+
+ d) If a facility in the modified Library refers to a function or a
+ table of data to be supplied by an application program that uses
+ the facility, other than as an argument passed when the facility
+ is invoked, then you must make a good faith effort to ensure that,
+ in the event an application does not supply such function or
+ table, the facility still operates, and performs whatever part of
+ its purpose remains meaningful.
+
+ (For example, a function in a library to compute square roots has
+ a purpose that is entirely well-defined independent of the
+ application. Therefore, Subsection 2d requires that any
+ application-supplied function or table used by this function must
+ be optional: if the application does not supply it, the square
+ root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Library,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Library, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote
+it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library
+with the Library (or with a work based on the Library) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may opt to apply the terms of the ordinary GNU General Public
+License instead of this License to a given copy of the Library. To do
+this, you must alter all the notices that refer to this License, so
+that they refer to the ordinary GNU General Public License, version 2,
+instead of to this License. (If a newer version than version 2 of the
+ordinary GNU General Public License has appeared, then you can specify
+that version instead if you wish.) Do not make any other change in
+these notices.
+
+ Once this change is made in a given copy, it is irreversible for
+that copy, so the ordinary GNU General Public License applies to all
+subsequent copies and derivative works made from that copy.
+
+ This option is useful when you wish to copy part of the code of
+the Library into a program that is not a library.
+
+ 4. You may copy and distribute the Library (or a portion or
+derivative of it, under Section 2) in object code or executable form
+under the terms of Sections 1 and 2 above provided that you accompany
+it with the complete corresponding machine-readable source code, which
+must be distributed under the terms of Sections 1 and 2 above on a
+medium customarily used for software interchange.
+
+ If distribution of object code is made by offering access to copy
+from a designated place, then offering equivalent access to copy the
+source code from the same place satisfies the requirement to
+distribute the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 5. A program that contains no derivative of any portion of the
+Library, but is designed to work with the Library by being compiled or
+linked with it, is called a "work that uses the Library". Such a
+work, in isolation, is not a derivative work of the Library, and
+therefore falls outside the scope of this License.
+
+ However, linking a "work that uses the Library" with the Library
+creates an executable that is a derivative of the Library (because it
+contains portions of the Library), rather than a "work that uses the
+library". The executable is therefore covered by this License.
+Section 6 states terms for distribution of such executables.
+
+ When a "work that uses the Library" uses material from a header file
+that is part of the Library, the object code for the work may be a
+derivative work of the Library even though the source code is not.
+Whether this is true is especially significant if the work can be
+linked without the Library, or if the work is itself a library. The
+threshold for this to be true is not precisely defined by law.
+
+ If such an object file uses only numerical parameters, data
+structure layouts and accessors, and small macros and small inline
+functions (ten lines or less in length), then the use of the object
+file is unrestricted, regardless of whether it is legally a derivative
+work. (Executables containing this object code plus portions of the
+Library will still fall under Section 6.)
+
+ Otherwise, if the work is a derivative of the Library, you may
+distribute the object code for the work under the terms of Section 6.
+Any executables containing that work also fall under Section 6,
+whether or not they are linked directly with the Library itself.
+
+ 6. As an exception to the Sections above, you may also combine or
+link a "work that uses the Library" with the Library to produce a
+work containing portions of the Library, and distribute that work
+under terms of your choice, provided that the terms permit
+modification of the work for the customer's own use and reverse
+engineering for debugging such modifications.
+
+ You must give prominent notice with each copy of the work that the
+Library is used in it and that the Library and its use are covered by
+this License. You must supply a copy of this License. If the work
+during execution displays copyright notices, you must include the
+copyright notice for the Library among them, as well as a reference
+directing the user to the copy of this License. Also, you must do one
+of these things:
+
+ a) Accompany the work with the complete corresponding
+ machine-readable source code for the Library including whatever
+ changes were used in the work (which must be distributed under
+ Sections 1 and 2 above); and, if the work is an executable linked
+ with the Library, with the complete machine-readable "work that
+ uses the Library", as object code and/or source code, so that the
+ user can modify the Library and then relink to produce a modified
+ executable containing the modified Library. (It is understood
+ that the user who changes the contents of definitions files in the
+ Library will not necessarily be able to recompile the application
+ to use the modified definitions.)
+
+ b) Use a suitable shared library mechanism for linking with the
+ Library. A suitable mechanism is one that (1) uses at run time a
+ copy of the library already present on the user's computer system,
+ rather than copying library functions into the executable, and (2)
+ will operate properly with a modified version of the library, if
+ the user installs one, as long as the modified version is
+ interface-compatible with the version that the work was made with.
+
+ c) Accompany the work with a written offer, valid for at
+ least three years, to give the same user the materials
+ specified in Subsection 6a, above, for a charge no more
+ than the cost of performing this distribution.
+
+ d) If distribution of the work is made by offering access to copy
+ from a designated place, offer equivalent access to copy the above
+ specified materials from the same place.
+
+ e) Verify that the user has already received a copy of these
+ materials or that you have already sent this user a copy.
+
+ For an executable, the required form of the "work that uses the
+Library" must include any data and utility programs needed for
+reproducing the executable from it. However, as a special exception,
+the materials to be distributed need not include anything that is
+normally distributed (in either source or binary form) with the major
+components (compiler, kernel, and so on) of the operating system on
+which the executable runs, unless that component itself accompanies
+the executable.
+
+ It may happen that this requirement contradicts the license
+restrictions of other proprietary libraries that do not normally
+accompany the operating system. Such a contradiction means you cannot
+use both them and the Library together in an executable that you
+distribute.
+
+ 7. You may place library facilities that are a work based on the
+Library side-by-side in a single library together with other library
+facilities not covered by this License, and distribute such a combined
+library, provided that the separate distribution of the work based on
+the Library and of the other library facilities is otherwise
+permitted, and provided that you do these two things:
+
+ a) Accompany the combined library with a copy of the same work
+ based on the Library, uncombined with any other library
+ facilities. This must be distributed under the terms of the
+ Sections above.
+
+ b) Give prominent notice with the combined library of the fact
+ that part of it is a work based on the Library, and explaining
+ where to find the accompanying uncombined form of the same work.
+
+ 8. You may not copy, modify, sublicense, link with, or distribute
+the Library except as expressly provided under this License. Any
+attempt otherwise to copy, modify, sublicense, link with, or
+distribute the Library is void, and will automatically terminate your
+rights under this License. However, parties who have received copies,
+or rights, from you under this License will not have their licenses
+terminated so long as such parties remain in full compliance.
+
+ 9. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Library or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Library (or any work based on the
+Library), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Library or works based on it.
+
+ 10. Each time you redistribute the Library (or any work based on the
+Library), the recipient automatically receives a license from the
+original licensor to copy, distribute, link with or modify the Library
+subject to these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties with
+this License.
+
+ 11. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Library at all. For example, if a patent
+license would not permit royalty-free redistribution of the Library by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under any
+particular circumstance, the balance of the section is intended to apply,
+and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+ 12. If the distribution and/or use of the Library is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Library under this License may add
+an explicit geographical distribution limitation excluding those countries,
+so that distribution is permitted only in or among countries not thus
+excluded. In such case, this License incorporates the limitation as if
+written in the body of this License.
+
+ 13. The Free Software Foundation may publish revised and/or new
+versions of the Lesser General Public License from time to time.
+Such new versions will be similar in spirit to the present version,
+but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library
+specifies a version number of this License which applies to it and
+"any later version", you have the option of following the terms and
+conditions either of that version or of any later version published by
+the Free Software Foundation. If the Library does not specify a
+license version number, you may choose any version ever published by
+the Free Software Foundation.
+
+ 14. If you wish to incorporate parts of the Library into other free
+programs whose distribution conditions are incompatible with these,
+write to the author to ask for permission. For software which is
+copyrighted by the Free Software Foundation, write to the Free
+Software Foundation; we sometimes make exceptions for this. Our
+decision will be guided by the two goals of preserving the free status
+of all derivatives of our free software and of promoting the sharing
+and reuse of software generally.
+
+ NO WARRANTY
+
+ 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
+WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
+EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
+OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
+KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
+LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
+THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
+WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
+AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
+FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
+CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
+LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
+RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
+FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
+SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Libraries
+
+ If you develop a new library, and you want it to be of the greatest
+possible use to the public, we recommend making it free software that
+everyone can redistribute and change. You can do so by permitting
+redistribution under these terms (or, alternatively, under the terms of the
+ordinary General Public License).
+
+ To apply these terms, attach the following notices to the library. It is
+safest to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least the
+"copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
+ USA
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the library, if
+necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the
+ library `Frob' (a library for tweaking knobs) written by James Random
+ Hacker.
+
+ , 1 April 1990
+ Ty Coon, President of Vice
+
+That's all there is to it!
+
+
+-----------------------------------------------------------------------------
+ GNU LESSER GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ applies to:
+ - The Stem library, Copyright 2011-2017, Damian Johnson and The Tor Project
+ (https://stem.torproject.org/)
+
+- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+
+ This version of the GNU Lesser General Public License incorporates
+the terms and conditions of version 3 of the GNU General Public
+License, supplemented by the additional permissions listed below.
+
+ 0. Additional Definitions.
+
+ As used herein, "this License" refers to version 3 of the GNU Lesser
+General Public License, and the "GNU GPL" refers to version 3 of the GNU
+General Public License.
+
+ "The Library" refers to a covered work governed by this License,
+other than an Application or a Combined Work as defined below.
+
+ An "Application" is any work that makes use of an interface provided
+by the Library, but which is not otherwise based on the Library.
+Defining a subclass of a class defined by the Library is deemed a mode
+of using an interface provided by the Library.
+
+ A "Combined Work" is a work produced by combining or linking an
+Application with the Library. The particular version of the Library
+with which the Combined Work was made is also called the "Linked
+Version".
+
+ The "Minimal Corresponding Source" for a Combined Work means the
+Corresponding Source for the Combined Work, excluding any source code
+for portions of the Combined Work that, considered in isolation, are
+based on the Application, and not on the Linked Version.
+
+ The "Corresponding Application Code" for a Combined Work means the
+object code and/or source code for the Application, including any data
+and utility programs needed for reproducing the Combined Work from the
+Application, but excluding the System Libraries of the Combined Work.
+
+ 1. Exception to Section 3 of the GNU GPL.
+
+ You may convey a covered work under sections 3 and 4 of this License
+without being bound by section 3 of the GNU GPL.
+
+ 2. Conveying Modified Versions.
+
+ If you modify a copy of the Library, and, in your modifications, a
+facility refers to a function or data to be supplied by an Application
+that uses the facility (other than as an argument passed when the
+facility is invoked), then you may convey a copy of the modified
+version:
+
+ a) under this License, provided that you make a good faith effort to
+ ensure that, in the event an Application does not supply the
+ function or data, the facility still operates, and performs
+ whatever part of its purpose remains meaningful, or
+
+ b) under the GNU GPL, with none of the additional permissions of
+ this License applicable to that copy.
+
+ 3. Object Code Incorporating Material from Library Header Files.
+
+ The object code form of an Application may incorporate material from
+a header file that is part of the Library. You may convey such object
+code under terms of your choice, provided that, if the incorporated
+material is not limited to numerical parameters, data structure
+layouts and accessors, or small macros, inline functions and templates
+(ten or fewer lines in length), you do both of the following:
+
+ a) Give prominent notice with each copy of the object code that the
+ Library is used in it and that the Library and its use are
+ covered by this License.
+
+ b) Accompany the object code with a copy of the GNU GPL and this license
+ document.
+
+ 4. Combined Works.
+
+ You may convey a Combined Work under terms of your choice that,
+taken together, effectively do not restrict modification of the
+portions of the Library contained in the Combined Work and reverse
+engineering for debugging such modifications, if you also do each of
+the following:
+
+ a) Give prominent notice with each copy of the Combined Work that
+ the Library is used in it and that the Library and its use are
+ covered by this License.
+
+ b) Accompany the Combined Work with a copy of the GNU GPL and this license
+ document.
+
+ c) For a Combined Work that displays copyright notices during
+ execution, include the copyright notice for the Library among
+ these notices, as well as a reference directing the user to the
+ copies of the GNU GPL and this license document.
+
+ d) Do one of the following:
+
+ 0) Convey the Minimal Corresponding Source under the terms of this
+ License, and the Corresponding Application Code in a form
+ suitable for, and under terms that permit, the user to
+ recombine or relink the Application with a modified version of
+ the Linked Version to produce a modified Combined Work, in the
+ manner specified by section 6 of the GNU GPL for conveying
+ Corresponding Source.
+
+ 1) Use a suitable shared library mechanism for linking with the
+ Library. A suitable mechanism is one that (a) uses at run time
+ a copy of the Library already present on the user's computer
+ system, and (b) will operate properly with a modified version
+ of the Library that is interface-compatible with the Linked
+ Version.
+
+ e) Provide Installation Information, but only if you would otherwise
+ be required to provide such information under section 6 of the
+ GNU GPL, and only to the extent that such information is
+ necessary to install and execute a modified version of the
+ Combined Work produced by recombining or relinking the
+ Application with a modified version of the Linked Version. (If
+ you use option 4d0, the Installation Information must accompany
+ the Minimal Corresponding Source and Corresponding Application
+ Code. If you use option 4d1, you must provide the Installation
+ Information in the manner specified by section 6 of the GNU GPL
+ for conveying Corresponding Source.)
+
+ 5. Combined Libraries.
+
+ You may place library facilities that are a work based on the
+Library side by side in a single library together with other library
+facilities that are not Applications and are not covered by this
+License, and convey such a combined library under terms of your
+choice, if you do both of the following:
+
+ a) Accompany the combined library with a copy of the same work based
+ on the Library, uncombined with any other library facilities,
+ conveyed under the terms of this License.
+
+ b) Give prominent notice with the combined library that part of it
+ is a work based on the Library, and explaining where to find the
+ accompanying uncombined form of the same work.
+
+ 6. Revised Versions of the GNU Lesser General Public License.
+
+ The Free Software Foundation may publish revised and/or new versions
+of the GNU Lesser General Public License from time to time. Such new
+versions will be similar in spirit to the present version, but may
+differ in detail to address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Library as you received it specifies that a certain numbered version
+of the GNU Lesser General Public License "or any later version"
+applies to it, you have the option of following the terms and
+conditions either of that published version or of any later version
+published by the Free Software Foundation. If the Library as you
+received it does not specify a version number of the GNU Lesser
+General Public License, you may choose any version of the GNU Lesser
+General Public License ever published by the Free Software Foundation.
+
+ If the Library as you received it specifies that a proxy can decide
+whether future versions of the GNU Lesser General Public License shall
+apply, that proxy's public statement of acceptance of any version is
+permanent authorization for you to choose that version for the
+Library.
+
+
+-----------------------------------------------------------------------------
+ Public domain unlicense
+
+ Note: This unlicense is meant only as a description
+
+ applies to:
+ - The Reed-Solomon erasure code library by Tomer Filiba, Stephen Larroque
+ (https://github.com/lrq3000/reedsolomon/)
+ (https://github.com/tomerfiliba/reedsolomon)
+
+ The original implementation is based on the tutorial at
+ https://en.wikiversity.org/wiki/Reed%E2%80%93Solomon_codes_for_coders
+
+- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+
+This is free and unencumbered software released into the public domain.
+
+Anyone is free to copy, modify, publish, use, compile, sell, or
+distribute this software, either in source code form or as a compiled
+binary, for any purpose, commercial or non-commercial, and by any
+means.
+
+In jurisdictions that recognize copyright laws, the author or authors
+of this software dedicate any and all copyright interest in the
+software to the public domain. We make this dedication for the benefit
+of the public at large and to the detriment of our heirs and
+successors. We intend this dedication to be an overt act of
+relinquishment in perpetuity of all present and future rights to this
+software under copyright law.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
+
+For more information, please refer to
+
+
+-----------------------------------------------------------------------------
+ GNU Free Documentation License
+ Version 1.3, 3 November 2008
+
+Copyright © 2000, 2001, 2002, 2007, 2008 Free Software Foundation, Inc.
+
+
+ applies to:
+ - The RS-232 data diode documentation and schematics, Copyright © 2006, Douglas W. Jones and Tom Bowersox
+ (https://homepage.cs.uiowa.edu/~jones/voting/diode/RS232tech.pdf)
+
+ - The TTL data diode documentation and schematics, Copyright © 2016, Sancho_P (pseudonym, real name unknown)
+ (https://imgur.com/a/5Cv19)
+
+- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+
+Everyone is permitted to copy and distribute verbatim copies of this license
+document, but changing it is not allowed.
+
+### 0. PREAMBLE
+
+The purpose of this License is to make a manual, textbook, or other functional
+and useful document "free" in the sense of freedom: to assure everyone the
+effective freedom to copy and redistribute it, with or without modifying it,
+either commercially or noncommercially. Secondarily, this License preserves for
+the author and publisher a way to get credit for their work, while not being
+considered responsible for modifications made by others.
+
+This License is a kind of "copyleft", which means that derivative works of the
+document must themselves be free in the same sense. It complements the GNU
+General Public License, which is a copyleft license designed for free software.
+
+We have designed this License in order to use it for manuals for free software,
+because free software needs free documentation: a free program should come with
+manuals providing the same freedoms that the software does. But this License is
+not limited to software manuals; it can be used for any textual work, regardless
+of subject matter or whether it is published as a printed book. We recommend
+this License principally for works whose purpose is instruction or reference.
+
+### 1. APPLICABILITY AND DEFINITIONS
+
+This License applies to any manual or other work, in any medium, that contains a
+notice placed by the copyright holder saying it can be distributed under the
+terms of this License. Such a notice grants a world-wide, royalty-free license,
+unlimited in duration, to use that work under the conditions stated herein. The
+"Document", below, refers to any such manual or work. Any member of the public
+is a licensee, and is addressed as "you". You accept the license if you copy,
+modify or distribute the work in a way requiring permission under copyright law.
+
+A "Modified Version" of the Document means any work containing the Document or a
+portion of it, either copied verbatim, or with modifications and/or translated
+into another language.
+
+A "Secondary Section" is a named appendix or a front-matter section of the
+Document that deals exclusively with the relationship of the publishers or
+authors of the Document to the Document's overall subject (or to related
+matters) and contains nothing that could fall directly within that overall
+subject. (Thus, if the Document is in part a textbook of mathematics, a
+Secondary Section may not explain any mathematics.) The relationship could be a
+matter of historical connection with the subject or with related matters, or of
+legal, commercial, philosophical, ethical or political position regarding them.
+
+The "Invariant Sections" are certain Secondary Sections whose titles are
+designated, as being those of Invariant Sections, in the notice that says that
+the Document is released under this License. If a section does not fit the above
+definition of Secondary then it is not allowed to be designated as Invariant.
+The Document may contain zero Invariant Sections. If the Document does not
+identify any Invariant Sections then there are none.
+
+The "Cover Texts" are certain short passages of text that are listed, as
+Front-Cover Texts or Back-Cover Texts, in the notice that says that the Document
+is released under this License. A Front-Cover Text may be at most 5 words, and a
+Back-Cover Text may be at most 25 words.
+
+A "Transparent" copy of the Document means a machine-readable copy, represented
+in a format whose specification is available to the general public, that is
+suitable for revising the document straightforwardly with generic text editors
+or (for images composed of pixels) generic paint programs or (for drawings) some
+widely available drawing editor, and that is suitable for input to text
+formatters or for automatic translation to a variety of formats suitable for
+input to text formatters. A copy made in an otherwise Transparent file format
+whose markup, or absence of markup, has been arranged to thwart or discourage
+subsequent modification by readers is not Transparent. An image format is not
+Transparent if used for any substantial amount of text. A copy that is not
+"Transparent" is called "Opaque".
+
+Examples of suitable formats for Transparent copies include plain ASCII without
+markup, Texinfo input format, LaTeX input format, SGML or XML using a publicly
+available DTD, and standard-conforming simple HTML, PostScript or PDF designed
+for human modification. Examples of transparent image formats include PNG, XCF
+and JPG. Opaque formats include proprietary formats that can be read and edited
+only by proprietary word processors, SGML or XML for which the DTD and/or
+processing tools are not generally available, and the machine-generated HTML,
+PostScript or PDF produced by some word processors for output purposes only.
+
+The "Title Page" means, for a printed book, the title page itself, plus such
+following pages as are needed to hold, legibly, the material this License
+requires to appear in the title page. For works in formats which do not have any
+title page as such, "Title Page" means the text near the most prominent
+appearance of the work's title, preceding the beginning of the body of the text.
+
+The "publisher" means any person or entity that distributes copies of the
+Document to the public.
+
+A section "Entitled XYZ" means a named subunit of the Document whose title
+either is precisely XYZ or contains XYZ in parentheses following text that
+translates XYZ in another language. (Here XYZ stands for a specific section name
+mentioned below, such as "Acknowledgements", "Dedications", "Endorsements", or
+"History".) To "Preserve the Title" of such a section when you modify the
+Document means that it remains a section "Entitled XYZ" according to this
+definition.
+
+The Document may include Warranty Disclaimers next to the notice which states
+that this License applies to the Document. These Warranty Disclaimers are
+considered to be included by reference in this License, but only as regards
+disclaiming warranties: any other implication that these Warranty Disclaimers
+may have is void and has no effect on the meaning of this License.
+
+### 2. VERBATIM COPYING
+
+You may copy and distribute the Document in any medium, either commercially or
+noncommercially, provided that this License, the copyright notices, and the
+license notice saying this License applies to the Document are reproduced in all
+copies, and that you add no other conditions whatsoever to those of this
+License. You may not use technical measures to obstruct or control the reading
+or further copying of the copies you make or distribute. However, you may accept
+compensation in exchange for copies. If you distribute a large enough number of
+copies you must also follow the conditions in section 3.
+
+You may also lend copies, under the same conditions stated above, and you may
+publicly display copies.
+
+### 3. COPYING IN QUANTITY
+
+If you publish printed copies (or copies in media that commonly have printed
+covers) of the Document, numbering more than 100, and the Document's license
+notice requires Cover Texts, you must enclose the copies in covers that carry,
+clearly and legibly, all these Cover Texts: Front-Cover Texts on the front
+cover, and Back-Cover Texts on the back cover. Both covers must also clearly and
+legibly identify you as the publisher of these copies. The front cover must
+present the full title with all words of the title equally prominent and
+visible. You may add other material on the covers in addition. Copying with
+changes limited to the covers, as long as they preserve the title of the
+Document and satisfy these conditions, can be treated as verbatim copying in
+other respects.
+
+If the required texts for either cover are too voluminous to fit legibly, you
+should put the first ones listed (as many as fit reasonably) on the actual
+cover, and continue the rest onto adjacent pages.
+
+If you publish or distribute Opaque copies of the Document numbering more than
+100, you must either include a machine-readable Transparent copy along with each
+Opaque copy, or state in or with each Opaque copy a computer-network location
+from which the general network-using public has access to download using
+public-standard network protocols a complete Transparent copy of the Document,
+free of added material. If you use the latter option, you must take reasonably
+prudent steps, when you begin distribution of Opaque copies in quantity, to
+ensure that this Transparent copy will remain thus accessible at the stated
+location until at least one year after the last time you distribute an Opaque
+copy (directly or through your agents or retailers) of that edition to the
+public.
+
+It is requested, but not required, that you contact the authors of the Document
+well before redistributing any large number of copies, to give them a chance to
+provide you with an updated version of the Document.
+
+### 4. MODIFICATIONS
+
+You may copy and distribute a Modified Version of the Document under the
+conditions of sections 2 and 3 above, provided that you release the Modified
+Version under precisely this License, with the Modified Version filling the role
+of the Document, thus licensing distribution and modification of the Modified
+Version to whoever possesses a copy of it. In addition, you must do these things
+in the Modified Version:
+
+* A. Use in the Title Page (and on the covers, if any) a title distinct from
+that of the Document, and from those of previous versions (which should, if
+there were any, be listed in the History section of the Document). You may use
+the same title as a previous version if the original publisher of that version
+gives permission.
+
+* B. List on the Title Page, as authors, one or more persons or entities
+responsible for authorship of the modifications in the Modified Version,
+together with at least five of the principal authors of the Document (all of
+its principal authors, if it has fewer than five), unless they release you from
+this requirement.
+
+* C. State on the Title page the name of the publisher of the Modified Version,
+as the publisher.
+
+* D. Preserve all the copyright notices of the Document.
+
+* E. Add an appropriate copyright notice for your modifications adjacent to the
+other copyright notices.
+
+* F. Include, immediately after the copyright notices, a license notice giving
+the public permission to use the Modified Version under the terms of this
+License, in the form shown in the Addendum below.
+
+* G. Preserve in that license notice the full lists of Invariant Sections and
+required Cover Texts given in the Document's license notice.
+
+* H. Include an unaltered copy of this License.
+
+* I. Preserve the section Entitled "History", Preserve its Title, and add to it
+an item stating at least the title, year, new authors, and publisher of the
+Modified Version as given on the Title Page. If there is no section Entitled
+"History" in the Document, create one stating the title, year, authors, and
+publisher of the Document as given on its Title Page, then add an item
+describing the Modified Version as stated in the previous sentence.
+
+* J. Preserve the network location, if any, given in the Document for public
+access to a Transparent copy of the Document, and likewise the network locations
+given in the Document for previous versions it was based on. These may be placed
+in the "History" section. You may omit a network location for a work that was
+published at least four years before the Document itself, or if the original
+publisher of the version it refers to gives permission.
+
+* K. For any section Entitled "Acknowledgements" or "Dedications", Preserve the
+Title of the section, and preserve in the section all the substance and tone of
+each of the contributor acknowledgements and/or dedications given therein.
+
+* L. Preserve all the Invariant Sections of the Document, unaltered in their
+text and in their titles. Section numbers or the equivalent are not considered
+part of the section titles.
+
+* M. Delete any section Entitled "Endorsements". Such a section may not be
+included in the Modified Version.
+
+* N. Do not retitle any existing section to be Entitled "Endorsements" or to
+conflict in title with any Invariant Section.
+
+* O. Preserve any Warranty Disclaimers.
+
+If the Modified Version includes new front-matter sections or appendices that
+qualify as Secondary Sections and contain no material copied from the Document,
+you may at your option designate some or all of these sections as invariant. To
+do this, add their titles to the list of Invariant Sections in the Modified
+Version's license notice. These titles must be distinct from any other section
+titles.
+
+You may add a section Entitled "Endorsements", provided it contains nothing but
+endorsements of your Modified Version by various parties—for example, statements
+of peer review or that the text has been approved by an organization as the
+authoritative definition of a standard.
+
+You may add a passage of up to five words as a Front-Cover Text, and a passage
+of up to 25 words as a Back-Cover Text, to the end of the list of Cover Texts in
+the Modified Version. Only one passage of Front-Cover Text and one of Back-Cover
+Text may be added by (or through arrangements made by) any one entity. If the
+Document already includes a cover text for the same cover, previously added by
+you or by arrangement made by the same entity you are acting on behalf of, you
+may not add another; but you may replace the old one, on explicit permission
+from the previous publisher that added the old one.
+
+The author(s) and publisher(s) of the Document do not by this License give
+permission to use their names for publicity for or to assert or imply
+endorsement of any Modified Version.
+
+### 5. COMBINING DOCUMENTS
+
+You may combine the Document with other documents released under this License,
+under the terms defined in section 4 above for modified versions, provided that
+you include in the combination all of the Invariant Sections of all of the
+original documents, unmodified, and list them all as Invariant Sections of your
+combined work in its license notice, and that you preserve all their Warranty
+Disclaimers.
+
+The combined work need only contain one copy of this License, and multiple
+identical Invariant Sections may be replaced with a single copy. If there are
+multiple Invariant Sections with the same name but different contents, make the
+title of each such section unique by adding at the end of it, in parentheses,
+the name of the original author or publisher of that section if known, or else a
+unique number. Make the same adjustment to the section titles in the list of
+Invariant Sections in the license notice of the combined work.
+
+In the combination, you must combine any sections Entitled "History" in the
+various original documents, forming one section Entitled "History"; likewise
+combine any sections Entitled "Acknowledgements", and any sections Entitled
+"Dedications". You must delete all sections Entitled "Endorsements".
+
+### 6. COLLECTIONS OF DOCUMENTS
+
+You may make a collection consisting of the Document and other documents
+released under this License, and replace the individual copies of this License
+in the various documents with a single copy that is included in the collection,
+provided that you follow the rules of this License for verbatim copying of each
+of the documents in all other respects.
+
+You may extract a single document from such a collection, and distribute it
+individually under this License, provided you insert a copy of this License into
+the extracted document, and follow this License in all other respects regarding
+verbatim copying of that document.
+
+### 7. AGGREGATION WITH INDEPENDENT WORKS
+
+A compilation of the Document or its derivatives with other separate and
+independent documents or works, in or on a volume of a storage or distribution
+medium, is called an "aggregate" if the copyright resulting from the compilation
+is not used to limit the legal rights of the compilation's users beyond what the
+individual works permit. When the Document is included in an aggregate, this
+License does not apply to the other works in the aggregate which are not
+themselves derivative works of the Document.
+
+If the Cover Text requirement of section 3 is applicable to these copies of the
+Document, then if the Document is less than one half of the entire aggregate,
+the Document's Cover Texts may be placed on covers that bracket the Document
+within the aggregate, or the electronic equivalent of covers if the Document is
+in electronic form. Otherwise they must appear on printed covers that bracket
+the whole aggregate.
+
+### 8. TRANSLATION
+
+Translation is considered a kind of modification, so you may distribute
+translations of the Document under the terms of section 4. Replacing Invariant
+Sections with translations requires special permission from their copyright
+holders, but you may include translations of some or all Invariant Sections in
+addition to the original versions of these Invariant Sections. You may include a
+translation of this License, and all the license notices in the Document, and
+any Warranty Disclaimers, provided that you also include the original English
+version of this License and the original versions of those notices and
+disclaimers. In case of a disagreement between the translation and the original
+version of this License or a notice or disclaimer, the original version will
+prevail.
+
+If a section in the Document is Entitled "Acknowledgements", "Dedications", or
+"History", the requirement (section 4) to Preserve its Title (section 1) will
+typically require changing the actual title.
+
+### 9. TERMINATION
+
+You may not copy, modify, sublicense, or distribute the Document except as
+expressly provided under this License. Any attempt otherwise to copy, modify,
+sublicense, or distribute it is void, and will automatically terminate your
+rights under this License.
+
+However, if you cease all violation of this License, then your license from a
+particular copyright holder is reinstated (a) provisionally, unless and until
+the copyright holder explicitly and finally terminates your license, and (b)
+permanently, if the copyright holder fails to notify you of the violation by
+some reasonable means prior to 60 days after the cessation.
+
+Moreover, your license from a particular copyright holder is reinstated
+permanently if the copyright holder notifies you of the violation by some
+reasonable means, this is the first time you have received notice of violation
+of this License (for any work) from that copyright holder, and you cure the
+violation prior to 30 days after your receipt of the notice.
+
+Termination of your rights under this section does not terminate the licenses of
+parties who have received copies or rights from you under this License. If your
+rights have been terminated and not permanently reinstated, receipt of a copy of
+some or all of the same material does not give you any rights to use it.
+
+### 10. FUTURE REVISIONS OF THIS LICENSE
+
+The Free Software Foundation may publish new, revised versions of the GNU Free
+Documentation License from time to time. Such new versions will be similar in
+spirit to the present version, but may differ in detail to address new problems
+or concerns. See https://www.gnu.org/licenses/.
+
+Each version of the License is given a distinguishing version number. If the
+Document specifies that a particular numbered version of this License "or any
+later version" applies to it, you have the option of following the terms and
+conditions either of that specified version or of any later version that has
+been published (not as a draft) by the Free Software Foundation. If the Document
+does not specify a version number of this License, you may choose any version
+ever published (not as a draft) by the Free Software Foundation. If the Document
+specifies that a proxy can decide which future versions of this License can be
+used, that proxy's public statement of acceptance of a version permanently
+authorizes you to choose that version for the Document.
+
+### 11. RELICENSING
+
+"Massive Multiauthor Collaboration Site" (or "MMC Site") means any World Wide
+Web server that publishes copyrightable works and also provides prominent
+facilities for anybody to edit those works. A public wiki that anybody can edit
+is an example of such a server. A "Massive Multiauthor Collaboration" (or "MMC")
+contained in the site means any set of copyrightable works thus published on the
+MMC site.
+
+"CC-BY-SA" means the Creative Commons Attribution-Share Alike 3.0 license
+published by Creative Commons Corporation, a not-for-profit corporation with a
+principal place of business in San Francisco, California, as well as future
+copyleft versions of that license published by that same organization.
+
+"Incorporate" means to publish or republish a Document, in whole or in part,
+as part of another Document.
+
+An MMC is "eligible for relicensing" if it is licensed under this License, and
+if all works that were first published under this License somewhere other than
+this MMC, and subsequently incorporated in whole or in part into the MMC, (1)
+had no cover texts or invariant sections, and (2) were thus incorporated prior
+to November 1, 2008.
+
+The operator of an MMC Site may republish an MMC contained in the site under
+CC-BY-SA on the same site at any time before August 1, 2009, provided the MMC is
+eligible for relicensing.
+
+ADDENDUM: How to use this License for your documents
+
+To use this License in a document you have written, include a copy of the
+License in the document and put the following copyright and license notices just
+after the title page:
+
+ Copyright (C) YEAR YOUR NAME.
+ Permission is granted to copy, distribute and/or modify this document
+ under the terms of the GNU Free Documentation License, Version 1.3
+ or any later version published by the Free Software Foundation;
+ with no Invariant Sections, no Front-Cover Texts, and no Back-Cover Texts.
+ A copy of the license is included in the section entitled "GNU
+ Free Documentation License".
+
+If you have Invariant Sections, Front-Cover Texts and Back-Cover Texts, replace
+the "with … Texts." line with this:
+
+ with the Invariant Sections being LIST THEIR TITLES, with the
+ Front-Cover Texts being LIST, and with the Back-Cover Texts being LIST.
+
+If you have Invariant Sections without Cover Texts, or some other combination of
+the three, merge those two alternatives to suit the situation.
+
+If your document contains nontrivial examples of program code, we recommend
+releasing these examples in parallel under your choice of free software license,
+such as the GNU General Public License, to permit their use in free software.
diff --git a/LICENSE.md b/LICENSE.md
deleted file mode 100644
index 7085ad0..0000000
--- a/LICENSE.md
+++ /dev/null
@@ -1,864 +0,0 @@
-# Licenses
-
-### TFC
-
-TFC 1.17.08 Copyright (C) 2013-2017 Markus Ottela
-
-This program is free software: you can redistribute it and/or modify it under
-the terms of the GNU General Public License as published by the Free Software
-Foundation, either version 3 of the License, or (at your option) any later
-version.
-
-This program is distributed in the hope that it will be useful, but WITHOUT ANY
-WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
-PARTICULAR PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License along with
-this program. If not, see http://www.gnu.org/licenses/.
-
-(You can find the license at the bottom of this file)
-
-#### TFC Documentation including white paper and GitHub wiki are released under
-GNU Free Documentation License 1.3
-
-## Third Party licenses
-
-
-### TTL Data diode
-
-Copyrights for the schematics of the TTL data diode presented in the
-documentation belong to pseudonym Sancho_P and are published under GNU Free
-Documentation License v1.3.
-
-
-### RS-232 Data diode
-
-Copyrights for the schematics of the RS-232 Data Diode presented in the
-documentation belong to Douglas W. Jones and are published used under GNU Free
-Documentation License. URL to original work:
-http://homepage.cs.uiowa.edu/~jones/voting/diode/RS232tech.pdf
-
-
-### Base58
-
-The Base58 implementation is used and modified under MIT license
-https://github.com/keis/base58
-
-
-### PySerial
-
-The PySerial library is used under BSD-3-Clause license
-https://github.com/pyserial/pyserial/blob/master/LICENSE.txt
-
-
-### Reed-Solomon erasure code
-
-The Reed Solomon erasure code library has been released to the public domain.
-
-License: https://github.com/tomerfiliba/reedsolomon/blob/master/LICENSE
-
-Original python implementation:
-https://github.com/tomerfiliba/reedsolomon/blob/master/reedsolo.py
-
-Implementation is based on tutorial at
-http://en.wikiversity.org/wiki/Reed%E2%80%93Solomon_codes_for_coders
-
-
-### Argon2_cffi
-
-The Argon2 library is used under MIT license
-https://github.com/hynek/argon2_cffi/blob/master/LICENSE
-
-
-### PyNaCl
-
-The PyNaCl library is licensed under Apache License 2.0 and is compatible with
-GNU GPLv3 license:
-
-Version 2.0, January 2004 http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-Definitions.
-
-"License" shall mean the terms and conditions for use, reproduction, and
-distribution as defined by Sections 1 through 9 of this document.
-
-"Licensor" shall mean the copyright owner or entity authorized by the copyright
-owner that is granting the License.
-
-"Legal Entity" shall mean the union of the acting entity and all other entities
-that control, are controlled by, or are under common control with that entity.
-For the purposes of this definition, "control" means (i) the power, direct or
-indirect, to cause the direction or management of such entity, whether by
-contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the
-outstanding shares, or (iii) beneficial ownership of such entity.
-
-"You" (or "Your") shall mean an individual or Legal Entity exercising
-permissions granted by this License.
-
-"Source" form shall mean the preferred form for making modifications, including
-but not limited to software source code, documentation source, and
-configuration files.
-
-"Object" form shall mean any form resulting from mechanical transformation or
-translation of a Source form, including but not limited to compiled object
-code, generated documentation, and conversions to other media types.
-
-"Work" shall mean the work of authorship, whether in Source or Object form,
-made available under the License, as indicated by a copyright notice that is
-included in or attached to the work (an example is provided in the Appendix
-below).
-
-"Derivative Works" shall mean any work, whether in Source or Object form, that
-is based on (or derived from) the Work and for which the editorial revisions,
-annotations, elaborations, or other modifications represent, as a whole, an
-original work of authorship. For the purposes of this License, Derivative Works
-shall not include works that remain separable from, or merely link (or bind by
-name) to the interfaces of, the Work and Derivative Works thereof.
-
-"Contribution" shall mean any work of authorship, including the original
-version of the Work and any modifications or additions to that Work or
-Derivative Works thereof, that is intentionally submitted to Licensor for
-inclusion in the Work by the copyright owner or by an individual or Legal
-Entity authorized to submit on behalf of the copyright owner. For the purposes
-of this definition, "submitted" means any form of electronic, verbal, or
-written communication sent to the Licensor or its representatives, including
-but not limited to communication on electronic mailing lists, source code
-control systems, and issue tracking systems that are managed by, or on behalf
-of, the Licensor for the purpose of discussing and improving the Work, but
-excluding communication that is conspicuously marked or otherwise designated
-in writing by the copyright owner as "Not a Contribution."
-
-"Contributor" shall mean Licensor and any individual or Legal Entity on behalf
-of whom a Contribution has been received by Licensor and subsequently
-incorporated within the Work.
-
-Grant of Copyright License. Subject to the terms and conditions of this
-License, each Contributor hereby grants to You a perpetual, worldwide,
-non-exclusive, no-charge, royalty-free, irrevocable copyright license to
-reproduce, prepare Derivative Works of, publicly display, publicly perform,
-sublicense, and distribute the Work and such Derivative Works in Source or
-Object form.
-
-Grant of Patent License. Subject to the terms and conditions of this License,
-each Contributor hereby grants to You a perpetual, worldwide, non-exclusive,
-no-charge, royalty-free, irrevocable (except as stated in this section) patent
-license to make, have made, use, offer to sell, sell, import, and otherwise
-transfer the Work, where such license applies only to those patent claims
-licensable by such Contributor that are necessarily infringed by their
-Contribution(s) alone or by combination of their Contribution(s) with the Work
-to which such Contribution(s) was submitted. If You institute patent litigation
-against any entity (including a cross-claim or counterclaim in a lawsuit)
-alleging that the Work or a Contribution incorporated within the Work
-constitutes direct or contributory patent infringement, then any patent
-licenses granted to You under this License for that Work shall terminate as of
-the date such litigation is filed.
-
-Redistribution. You may reproduce and distribute copies of the Work or
-Derivative Works thereof in any medium, with or without modifications, and in
-Source or Object form, provided that You meet the following conditions:
-
-(a) You must give any other recipients of the Work or Derivative Works a copy
-of this License; and
-
-(b) You must cause any modified files to carry prominent notices stating that
-You changed the files; and
-
-(c) You must retain, in the Source form of any Derivative Works that You
-distribute, all copyright, patent, trademark, and attribution notices from the
-Source form of the Work, excluding those notices that do not pertain to any
-part of the Derivative Works; and
-
-(d) If the Work includes a "NOTICE" text file as part of its distribution, then
-any Derivative Works that You distribute must include a readable copy of the
-attribution notices contained within such NOTICE file, excluding those notices
-that do not pertain to any part of the Derivative Works, in at least one of the
-following places: within a NOTICE text file distributed as part of the
-Derivative Works; within the Source form or documentation, if provided along
-with the Derivative Works; or, within a display generated by the Derivative
-Works, if and wherever such third-party notices normally appear. The contents
-of the NOTICE file are for informational purposes only and do not modify the
-License. You may add Your own attribution notices within Derivative Works that
-You distribute, alongside or as an addendum to the NOTICE text from the Work,
-provided that such additional attribution notices cannot be construed as
-modifying the License.
-
-You may add Your own copyright statement to Your modifications and may provide
-additional or different license terms and conditions for use, reproduction, or
-distribution of Your modifications, or for any such Derivative Works as a
-whole, provided Your use, reproduction, and distribution of the Work otherwise
-complies with the conditions stated in this License.
-
-Submission of Contributions. Unless You explicitly state otherwise, any
-Contribution intentionally submitted for inclusion in the Work by You to the
-Licensor shall be under the terms and conditions of this License, without any
-additional terms or conditions. Notwithstanding the above, nothing herein shall
-supersede or modify the terms of any separate license agreement you may have
-executed with Licensor regarding such Contributions.
-
-Trademarks. This License does not grant permission to use the trade names,
-trademarks, service marks, or product names of the Licensor, except as required
-for reasonable and customary use in describing the origin of the Work and
-reproducing the content of the NOTICE file.
-
-Disclaimer of Warranty. Unless required by applicable law or agreed to in
-writing, Licensor provides the Work (and each Contributor provides its
-Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied, including, without limitation, any warranties
-or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-PARTICULAR PURPOSE. You are solely responsible for determining the
-appropriateness of using or redistributing the Work and assume any risks
-associated with Your exercise of permissions under this License.
-
-Limitation of Liability. In no event and under no legal theory, whether in tort
-(including negligence), contract, or otherwise, unless required by applicable
-law (such as deliberate and grossly negligent acts) or agreed to in writing,
-shall any Contributor be liable to You for damages, including any direct,
-indirect, special, incidental, or consequential damages of any character
-arising as a result of this License or out of the use or inability to use the
-Work (including but not limited to damages for loss of goodwill, work stoppage,
-computer failure or malfunction, or any and all other commercial damages or
-losses), even if such Contributor has been advised of the possibility of such
-damages.
-
-Accepting Warranty or Additional Liability. While redistributing the Work or
-Derivative Works thereof, You may choose to offer, and charge a fee for,
-acceptance of support, warranty, indemnity, or other liability obligations
-and/or rights consistent with this License. However, in accepting such
-obligations, You may act only on Your own behalf and on Your sole
-responsibility, not on behalf of any other Contributor, and only if You agree
-to indemnify, defend, and hold each Contributor harmless for any liability
-incurred by, or claims asserted against, such Contributor by reason of your
-accepting any such warranty or additional liability.
-
-
-# GNU GENERAL PUBLIC LICENSE
-
-## Version 3, 29 June 2007
-
-Copyright © 2007 Free Software Foundation, Inc. http://fsf.org/
-
-Everyone is permitted to copy and distribute verbatim copies of this license
-document, but changing it is not allowed.
-
-Preamble
-
-The GNU General Public License is a free, copyleft license for software and
-other kinds of works.
-
-The licenses for most software and other practical works are designed to take
-away your freedom to share and change the works. By contrast, the GNU General
-Public License is intended to guarantee your freedom to share and change all
-versions of a program--to make sure it remains free software for all its users.
-We, the Free Software Foundation, use the GNU General Public License for most
-of our software; it applies also to any other work released this way by its
-authors. You can apply it to your programs, too.
-
-When we speak of free software, we are referring to freedom, not price. Our
-General Public Licenses are designed to make sure that you have the freedom
-to distribute copies of free software (and charge for them if you wish), that
-you receive source code or can get it if you want it, that you can change the
-software or use pieces of it in new free programs, and that you know you can
-do these things.
-
-To protect your rights, we need to prevent others from denying you these rights
-or asking you to surrender the rights. Therefore, you have certain
-responsibilities if you distribute copies of the software, or if you modify it:
-responsibilities to respect the freedom of others.
-
-For example, if you distribute copies of such a program, whether gratis or for
-a fee, you must pass on to the recipients the same freedoms that you received.
-You must make sure that they, too, receive or can get the source code. And you
-must show them these terms so they know their rights.
-
-Developers that use the GNU GPL protect your rights with two steps: (1) assert
-copyright on the software, and (2) offer you this License giving you legal
-permission to copy, distribute and/or modify it.
-
-For the developers' and authors' protection, the GPL clearly explains that
-there is no warranty for this free software. For both users' and authors' sake,
-the GPL requires that modified versions be marked as changed, so that their
-problems will not be attributed erroneously to authors of previous versions.
-
-Some devices are designed to deny users access to install or run modified
-versions of the software inside them, although the manufacturer can do so. This
-is fundamentally incompatible with the aim of protecting users' freedom to
-change the software. The systematic pattern of such abuse occurs in the area of
-products for individuals to use, which is precisely where it is most
-unacceptable. Therefore, we have designed this version of the GPL to prohibit
-the practice for those products. If such problems arise substantially in other
-domains, we stand ready to extend this provision to those domains in future
-versions of the GPL, as needed to protect the freedom of users.
-
-Finally, every program is threatened constantly by software patents. States
-should not allow patents to restrict development and use of software on
-general-purpose computers, but in those that do, we wish to avoid the special
-danger that patents applied to a free program could make it effectively
-proprietary. To prevent this, the GPL assures that patents cannot be used to
-render the program non-free.
-
-The precise terms and conditions for copying, distribution and modification
-follow.
-
-### TERMS AND CONDITIONS
-
-Definitions.
-
-“This License” refers to version 3 of the GNU General Public License.
-
-“Copyright” also means copyright-like laws that apply to other kinds of works,
-such as semiconductor masks.
-
-“The Program” refers to any copyrightable work licensed under this License.
-Each licensee is addressed as “you”. “Licensees” and “recipients” may be
-individuals or organizations.
-
-To “modify” a work means to copy from or adapt all or part of the work in a
-fashion requiring copyright permission, other than the making of an exact copy.
-The resulting work is called a “modified version” of the earlier work or a work
-“based on” the earlier work.
-
-A “covered work” means either the unmodified Program or a work based on the
-Program.
-
-To “propagate” a work means to do anything with it that, without permission,
-would make you directly or secondarily liable for infringement under applicable
-copyright law, except executing it on a computer or modifying a private copy.
-Propagation includes copying, distribution (with or without modification),
-making available to the public, and in some countries other activities as well.
-
-To “convey” a work means any kind of propagation that enables other parties to
-make or receive copies. Mere interaction with a user through a computer
-network, with no transfer of a copy, is not conveying.
-
-An interactive user interface displays “Appropriate Legal Notices” to the
-extent that it includes a convenient and prominently visible feature that (1)
-displays an appropriate copyright notice, and (2) tells the user that there is
-no warranty for the work (except to the extent that warranties are provided),
-that licensees may convey the work under this License, and how to view a copy
-of this License. If the interface presents a list of user commands or options,
-such as a menu, a prominent item in the list meets this criterion.
-
-Source Code.
-
-The “source code” for a work means the preferred form of the work for making
-modifications to it. “Object code” means any non-source form of a work.
-
-A “Standard Interface” means an interface that either is an official standard
-defined by a recognized standards body, or, in the case of interfaces specified
-for a particular programming language, one that is widely used among developers
-working in that language.
-
-The “System Libraries” of an executable work include anything, other than the
-work as a whole, that (a) is included in the normal form of packaging a Major
-Component, but which is not part of that Major Component, and (b) serves only
-to enable use of the work with that Major Component, or to implement a Standard
-Interface for which an implementation is available to the public in source code
-form. A “Major Component”, in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system (if any) on
-which the executable work runs, or a compiler used to produce the work, or an
-object code interpreter used to run it.
-
-The “Corresponding Source” for a work in object code form means all the source
-code needed to generate, install, and (for an executable work) run the object
-code and to modify the work, including scripts to control those activities.
-However, it does not include the work's System Libraries, or general-purpose
-tools or generally available free programs which are used unmodified in
-performing those activities but which are not part of the work. For example,
-Corresponding Source includes interface definition files associated with source
-files for the work, and the source code for shared libraries and dynamically
-linked subprograms that the work is specifically designed to require, such as
-by intimate data communication or control flow between those subprograms and
-other parts of the work.
-
-The Corresponding Source need not include anything that users can regenerate
-automatically from other parts of the Corresponding Source.
-
-The Corresponding Source for a work in source code form is that same work.
-
-Basic Permissions.
-
-All rights granted under this License are granted for the term of copyright on
-the Program, and are irrevocable provided the stated conditions are met. This
-License explicitly affirms your unlimited permission to run the unmodified
-Program. The output from running a covered work is covered by this License only
-if the output, given its content, constitutes a covered work. This License
-acknowledges your rights of fair use or other equivalent, as provided by
-copyright law.
-
-You may make, run and propagate covered works that you do not convey, without
-conditions so long as your license otherwise remains in force. You may convey
-covered works to others for the sole purpose of having them make modifications
-exclusively for you, or provide you with facilities for running those works,
-provided that you comply with the terms of this License in conveying all
-material for which you do not control copyright. Those thus making or running
-the covered works for you must do so exclusively on your behalf, under your
-direction and control, on terms that prohibit them from making any copies of
-your copyrighted material outside their relationship with you.
-
-Conveying under any other circumstances is permitted solely under the
-conditions stated below. Sublicensing is not allowed; section 10 makes it
-unnecessary.
-
-Protecting Users' Legal Rights From Anti-Circumvention Law.
-
-No covered work shall be deemed part of an effective technological measure
-under any applicable law fulfilling obligations under article 11 of the WIPO
-copyright treaty adopted on 20 December 1996, or similar laws prohibiting or
-restricting circumvention of such measures.
-
-When you convey a covered work, you waive any legal power to forbid
-circumvention of technological measures to the extent such circumvention is
-effected by exercising rights under this License with respect to the covered
-work, and you disclaim any intention to limit operation or modification of the
-work as a means of enforcing, against the work's users, your or third parties'
-legal rights to forbid circumvention of technological measures.
-
-Conveying Verbatim Copies.
-
-You may convey verbatim copies of the Program's source code as you receive it,
-in any medium, provided that you conspicuously and appropriately publish on
-each copy an appropriate copyright notice; keep intact all notices stating that
-this License and any non-permissive terms added in accord with section 7 apply
-to the code; keep intact all notices of the absence of any warranty; and give
-all recipients a copy of this License along with the Program.
-
-You may charge any price or no price for each copy that you convey, and you may
-offer support or warranty protection for a fee.
-
-Conveying Modified Source Versions.
-
-You may convey a work based on the Program, or the modifications to produce it
-from the Program, in the form of source code under the terms of section 4,
-provided that you also meet all of these conditions:
-
-a) The work must carry prominent notices stating that you modified it, and
- giving a relevant date.
-
-b) The work must carry prominent notices stating that it is released under this
- License and any conditions added under section 7. This requirement modifies
- the requirement in section 4 to “keep intact all notices”.
-
-c) You must license the entire work, as a whole, under this License to anyone
- who comes into possession of a copy. This License will therefore apply,
- along with any applicable section 7 additional terms, to the whole of the
- work, and all its parts, regardless of how they are packaged. This License
- gives no permission to license the work in any other way, but it does not
- invalidate such permission if you have separately received it.
-
-d) If the work has interactive user interfaces, each must display Appropriate
- Legal Notices; however, if the Program has interactive interfaces that do
- not display Appropriate Legal Notices, your work need not make them do so.
-
-A compilation of a covered work with other separate and independent works,
-which are not by their nature extensions of the covered work, and which are not
-combined with it such as to form a larger program, in or on a volume of a
-storage or distribution medium, is called an “aggregate” if the compilation and
-its resulting copyright are not used to limit the access or legal rights of the
-compilation's users beyond what the individual works permit. Inclusion of a
-covered work in an aggregate does not cause this License to apply to the other
-parts of the aggregate.
-
-Conveying Non-Source Forms.
-
-You may convey a covered work in object code form under the terms of sections 4
-and 5, provided that you also convey the machine-readable Corresponding Source
-under the terms of this License, in one of these ways:
-
-a) Convey the object code in, or embodied in, a physical product (including a
- physical distribution medium), accompanied by the Corresponding Source fixed
- on a durable physical medium customarily used for software interchange.
-
-b) Convey the object code in, or embodied in, a physical product (including a
- physical distribution medium), accompanied by a written offer, valid for at
- least three years and valid for as long as you offer spare parts or customer
- support for that product model, to give anyone who possesses the object code
- either (1) a copy of the Corresponding Source for all the software in the
- product that is covered by this License, on a durable physical medium
- customarily used for software interchange, for a price no more than your
- reasonable cost of physically performing this conveying of source, or (2)
- access to copy the Corresponding Source from a network server at no charge.
-
-c) Convey individual copies of the object code with a copy of the written offer
- to provide the Corresponding Source. This alternative is allowed only
- occasionally and noncommercially, and only if you received the object code
- with such an offer, in accord with subsection 6b.
-
-d) Convey the object code by offering access from a designated place (gratis or
- for a charge), and offer equivalent access to the Corresponding Source in
- the same way through the same place at no further charge. You need not
- require recipients to copy the Corresponding Source along with the object
- code. If the place to copy the object code is a network server, the
- Corresponding Source may be on a different server (operated by you or a
- third party) that supports equivalent copying facilities, provided you
- maintain clear directions next to the object code saying where to find the
- Corresponding Source. Regardless of what server hosts the Corresponding
- Source, you remain obligated to ensure that it is available for as long as
- needed to satisfy these requirements.
-
-e) Convey the object code using peer-to-peer transmission, provided
- you inform other peers where the object code and Corresponding
- Source of the work are being offered to the general public at no
- charge under subsection 6d.
-
-A separable portion of the object code, whose source code is excluded from the
-Corresponding Source as a System Library, need not be included in conveying the
-object code work.
-
-A “User Product” is either (1) a “consumer product”, which means any tangible
-personal property which is normally used for personal, family, or household
-purposes, or (2) anything designed or sold for incorporation into a dwelling.
-In determining whether a product is a consumer product, doubtful cases shall
-be resolved in favor of coverage. For a particular product received by a
-particular user, “normally used” refers to a typical or common use of that
-class of product, regardless of the status of the particular user or of the way
-in which the particular user actually uses, or expects or is expected to use,
-the product. A product is a consumer product regardless of whether the product
-has substantial commercial, industrial or non-consumer uses, unless such uses
-represent the only significant mode of use of the product.
-
-“Installation Information” for a User Product means any methods, procedures,
-authorization keys, or other information required to install and execute
-modified versions of a covered work in that User Product from a modified
-version of its Corresponding Source. The information must suffice to ensure
-that the continued functioning of the modified object code is in no case
-prevented or interfered with solely because modification has been made.
-
-If you convey an object code work under this section in, or with, or
-specifically for use in, a User Product, and the conveying occurs as part of a
-transaction in which the right of possession and use of the User Product is
-transferred to the recipient in perpetuity or for a fixed term (regardless of
-how the transaction is characterized), the Corresponding Source conveyed under
-this section must be accompanied by the Installation Information. But this
-requirement does not apply if neither you nor any third party retains the
-ability to install modified object code on the User Product (for example, the
-work has been installed in ROM).
-
-The requirement to provide Installation Information does not include a
-requirement to continue to provide support service, warranty, or updates for a
-work that has been modified or installed by the recipient, or for the User
-Product in which it has been modified or installed. Access to a network may be
-denied when the modification itself materially and adversely affects the
-operation of the network or violates the rules and protocols for communication
-across the network.
-
-Corresponding Source conveyed, and Installation Information provided, in accord
-with this section must be in a format that is publicly documented (and with an
-implementation available to the public in source code form), and must require
-no special password or key for unpacking, reading or copying.
-
-Additional Terms.
-
-“Additional permissions” are terms that supplement the terms of this License by
-making exceptions from one or more of its conditions. Additional permissions
-that are applicable to the entire Program shall be treated as though they were
-included in this License, to the extent that they are valid under applicable
-law. If additional permissions apply only to part of the Program, that part may
-be used separately under those permissions, but the entire Program remains
-governed by this License without regard to the additional permissions.
-
-When you convey a copy of a covered work, you may at your option remove any
-additional permissions from that copy, or from any part of it. (Additional
-permissions may be written to require their own removal in certain cases when
-you modify the work.) You may place additional permissions on material, added
-by you to a covered work, for which you have or can give appropriate copyright
-permission.
-
-Notwithstanding any other provision of this License, for material you add to a
-covered work, you may (if authorized by the copyright holders of that material)
-supplement the terms of this License with terms:
-
-a) Disclaiming warranty or limiting liability differently from the terms of
- sections 15 and 16 of this License; or
-
-b) Requiring preservation of specified reasonable legal notices or author
- attributions in that material or in the Appropriate Legal Notices displayed
- by works containing it; or
-
-c) Prohibiting misrepresentation of the origin of that material, or requiring
- that modified versions of such material be marked in reasonable ways as
- different from the original version; or
-
-d) Limiting the use for publicity purposes of names of licensors or authors of
- the material; or
-
-e) Declining to grant rights under trademark law for use of some
- trade names, trademarks, or service marks; or
-
-f) Requiring indemnification of licensors and authors of that material by
- anyone who conveys the material (or modified versions of it) with
- contractual assumptions of liability to the recipient, for any liability
- that these contractual assumptions directly impose on those licensors and
- authors.
-
-All other non-permissive additional terms are considered “further restrictions”
-within the meaning of section 10. If the Program as you received it, or any
-part of it, contains a notice stating that it is governed by this License along
-with a term that is a further restriction, you may remove that term. If a
-license document contains a further restriction but permits relicensing or
-conveying under this License, you may add to a covered work material governed
-by the terms of that license document, provided that the further restriction
-does not survive such relicensing or conveying.
-
-If you add terms to a covered work in accord with this section, you must place,
- in the relevant source files, a statement of the additional terms that apply
- to those files, or a notice indicating where to find the applicable terms.
-
-Additional terms, permissive or non-permissive, may be stated in the form of a
-separately written license, or stated as exceptions; the above requirements
-apply either way.
-
-Termination.
-
-You may not propagate or modify a covered work except as expressly provided
-under this License. Any attempt otherwise to propagate or modify it is void,
-and will automatically terminate your rights under this License (including any
-patent licenses granted under the third paragraph of section 11).
-
-However, if you cease all violation of this License, then your license from a
-particular copyright holder is reinstated (a) provisionally, unless and until
-the copyright holder explicitly and finally terminates your license, and (b)
-permanently, if the copyright holder fails to notify you of the violation by
-some reasonable means prior to 60 days after the cessation.
-
-Moreover, your license from a particular copyright holder is reinstated
-permanently if the copyright holder notifies you of the violation by some
-reasonable means, this is the first time you have received notice of violation
-of this License (for any work) from that copyright holder, and you cure the
-violation prior to 30 days after your receipt of the notice.
-
-Termination of your rights under this section does not terminate the licenses
-of parties who have received copies or rights from you under this License. If
-your rights have been terminated and not permanently reinstated, you do not
-qualify to receive new licenses for the same material under section 10.
-
-Acceptance Not Required for Having Copies.
-
-You are not required to accept this License in order to receive or run a copy
-of the Program. Ancillary propagation of a covered work occurring solely as a
-consequence of using peer-to-peer transmission to receive a copy likewise does
-not require acceptance. However, nothing other than this License grants you
-permission to propagate or modify any covered work. These actions infringe
-copyright if you do not accept this License. Therefore, by modifying or
-propagating a covered work, you indicate your acceptance of this License to do
-so.
-
-Automatic Licensing of Downstream Recipients.
-
-Each time you convey a covered work, the recipient automatically receives a
-license from the original licensors, to run, modify and propagate that work,
-subject to this License. You are not responsible for enforcing compliance by
-third parties with this License.
-
-An “entity transaction” is a transaction transferring control of an
-organization, or substantially all assets of one, or subdividing an
-organization, or merging organizations. If propagation of a covered work
-results from an entity transaction, each party to that transaction who receives
-a copy of the work also receives whatever licenses to the work the party's
-predecessor in interest had or could give under the previous paragraph, plus a
-right to possession of the Corresponding Source of the work from the
-predecessor in interest, if the predecessor has it or can get it with
-reasonable efforts.
-
-You may not impose any further restrictions on the exercise of the rights
-granted or affirmed under this License. For example, you may not impose a
-license fee, royalty, or other charge for exercise of rights granted under this
-License, and you may not initiate litigation (including a cross-claim or
-counterclaim in a lawsuit) alleging that any patent claim is infringed by
-making, using, selling, offering for sale, or importing the Program or any
-portion of it.
-
-Patents.
-
-A “contributor” is a copyright holder who authorizes use under this License of
-the Program or a work on which the Program is based. The work thus licensed is
-called the contributor's “contributor version”.
-
-A contributor's “essential patent claims” are all patent claims owned or
-controlled by the contributor, whether already acquired or hereafter acquired,
-that would be infringed by some manner, permitted by this License, of making,
-using, or selling its contributor version, but do not include claims that would
-be infringed only as a consequence of further modification of the contributor
-version. For purposes of this definition, “control” includes the right to grant
-patent sublicenses in a manner consistent with the requirements of this
-License.
-
-Each contributor grants you a non-exclusive, worldwide, royalty-free patent
-license under the contributor's essential patent claims, to make, use, sell,
-offer for sale, import and otherwise run, modify and propagate the contents of
-its contributor version.
-
-In the following three paragraphs, a “patent license” is any express agreement
-or commitment, however denominated, not to enforce a patent (such as an express
-permission to practice a patent or covenant not to sue for patent infringement).
-To “grant” such a patent license to a party means to make such an agreement or
-commitment not to enforce a patent against the party.
-
-If you convey a covered work, knowingly relying on a patent license, and the
-Corresponding Source of the work is not available for anyone to copy, free of
-charge and under the terms of this License, through a publicly available
-network server or other readily accessible means, then you must either (1)
-cause the Corresponding Source to be so available, or (2) arrange to deprive
-yourself of the benefit of the patent license for this particular work, or (3)
-arrange, in a manner consistent with the requirements of this License, to
-extend the patent license to downstream recipients. “Knowingly relying” means
-you have actual knowledge that, but for the patent license, your conveying the
-covered work in a country, or your recipient's use of the covered work in a
-country, would infringe one or more identifiable patents in that country that
-you have reason to believe are valid.
-
-If, pursuant to or in connection with a single transaction or arrangement, you
-convey, or propagate by procuring conveyance of, a covered work, and grant a
-patent license to some of the parties receiving the covered work authorizing
-them to use, propagate, modify or convey a specific copy of the covered work,
-then the patent license you grant is automatically extended to all recipients
-of the covered work and works based on it.
-
-A patent license is “discriminatory” if it does not include within the scope of
-its coverage, prohibits the exercise of, or is conditioned on the non-exercise
-of one or more of the rights that are specifically granted under this License.
-You may not convey a covered work if you are a party to an arrangement with a
-third party that is in the business of distributing software, under which you
-make payment to the third party based on the extent of your activity of
-conveying the work, and under which the third party grants, to any of the
-parties who would receive the covered work from you, a discriminatory patent
-license (a) in connection with copies of the covered work conveyed by you (or
-copies made from those copies), or (b) primarily for and in connection with
-specific products or compilations that contain the covered work, unless you
-entered into that arrangement, or that patent license was granted, prior to
-28 March 2007.
-
-Nothing in this License shall be construed as excluding or limiting any implied
-license or other defenses to infringement that may otherwise be available to
-you under applicable patent law.
-
-No Surrender of Others' Freedom.
-
-If conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not excuse
-you from the conditions of this License. If you cannot convey a covered work so
-as to satisfy simultaneously your obligations under this License and any other
-pertinent obligations, then as a consequence you may not convey it at all. For
-example, if you agree to terms that obligate you to collect a royalty for
-further conveying from those to whom you convey the Program, the only way you
-could satisfy both those terms and this License would be to refrain entirely
-from conveying the Program.
-
-Use with the GNU Affero General Public License.
-
-Notwithstanding any other provision of this License, you have permission to
-link or combine any covered work with a work licensed under version 3 of the
-GNU Affero General Public License into a single combined work, and to convey
-the resulting work. The terms of this License will continue to apply to the
-part which is the covered work, but the special requirements of the GNU Affero
-General Public License, section 13, concerning interaction through a network
-will apply to the combination as such.
-
-Revised Versions of this License.
-
-The Free Software Foundation may publish revised and/or new versions of the GNU
-General Public License from time to time. Such new versions will be similar in
-spirit to the present version, but may differ in detail to address new problems
-or concerns.
-
-Each version is given a distinguishing version number. If the Program specifies
-that a certain numbered version of the GNU General Public License “or any later
-version” applies to it, you have the option of following the terms and
-conditions either of that numbered version or of any later version published by
-the Free Software Foundation. If the Program does not specify a version number
-of the GNU General Public License, you may choose any version ever published by
-the Free Software Foundation.
-
-If the Program specifies that a proxy can decide which future versions of the
-GNU General Public License can be used, that proxy's public statement of
-acceptance of a version permanently authorizes you to choose that version for
-the Program.
-
-Later license versions may give you additional or different permissions.
-However, no additional obligations are imposed on any author or copyright
-holder as a result of your choosing to follow a later version.
-
-Disclaimer of Warranty.
-
-THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE
-LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER
-PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY OF ANY KIND, EITHER
-EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE
-QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE
-DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR
-CORRECTION.
-
-Limitation of Liability.
-
-IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY
-COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS
-PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL,
-INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE
-THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED
-INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE
-PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY
-HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
-
-Interpretation of Sections 15 and 16.
-
-If the disclaimer of warranty and limitation of liability provided above cannot
-be given local legal effect according to their terms, reviewing courts shall
-apply local law that most closely approximates an absolute waiver of all civil
-liability in connection with the Program, unless a warranty or assumption of
-liability accompanies a copy of the Program in return for a fee.
-
-END OF TERMS AND CONDITIONS
-
-How to Apply These Terms to Your New Programs
-
-If you develop a new program, and you want it to be of the greatest possible
-use to the public, the best way to achieve this is to make it free software
-which everyone can redistribute and change under these terms.
-
-To do so, attach the following notices to the program. It is safest to attach
-them to the start of each source file to most effectively state the exclusion
-of warranty; and each file should have at least the “copyright” line and a
-pointer to where the full notice is found.
-
-
-Copyright (C)
-
-This program is free software: you can redistribute it and/or modify it under
-the terms of the GNU General Public License as published by the Free Software
-Foundation, either version 3 of the License, or (at your option) any later
-version.
-
-This program is distributed in the hope that it will be useful, but WITHOUT ANY
-WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
-PARTICULAR PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License along with
-this program. If not, see .
-
-Also add information on how to contact you by electronic and paper mail.
-
-If the program does terminal interaction, make it output a short notice like
-this when it starts in an interactive mode:
-
- Copyright (C) This program comes with
-ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and
-you are welcome to redistribute it under certain conditions; type `show c' for
-details.
-
-The hypothetical commands show w' and show c' should show the appropriate parts
-of the General Public License. Of course, your program's commands might be
-different; for a GUI interface, you would use an “about box”.
-
-You should also get your employer (if you work as a programmer) or school, if
-any, to sign a “copyright disclaimer” for the program, if necessary. For more
-information on this, and how to apply and follow the GNU GPL, see
-http://www.gnu.org/licenses/.
-
-The GNU General Public License does not permit incorporating your program into
-proprietary programs. If your program is a subroutine library, you may consider
-it more useful to permit linking proprietary applications with the library. If
-this is what you want to do, use the GNU Lesser General Public License instead
-of this License. But first, please read
-http://www.gnu.org/philosophy/why-not-lgpl.html.
diff --git a/README.md b/README.md
index 546f6b3..8fdce8a 100755
--- a/README.md
+++ b/README.md
@@ -2,116 +2,138 @@
### Tinfoil Chat
-[![Build Status](https://travis-ci.org/maqp/tfc.svg?branch=master)](https://travis-ci.org/maqp/tfc) [![Coverage Status](https://coveralls.io/repos/github/maqp/tfc/badge.svg?branch=master)](https://coveralls.io/github/maqp/tfc?branch=master)
+[![Build Status](https://travis-ci.org/maqp/tfc.svg?branch=master)](https://travis-ci.org/maqp/tfc)
+[![Coverage Status](https://coveralls.io/repos/github/maqp/tfc/badge.svg?branch=master)](https://coveralls.io/github/maqp/tfc?branch=master)
-Tinfoil Chat (TFC) is a high assurance encrypted messaging system that
-operates on top of existing IM clients. The
-[free and open source software](https://www.gnu.org/philosophy/free-sw.html)
-is used together with free hardware to protect users from
+Tinfoil Chat (TFC) is a
+[FOSS](https://www.gnu.org/philosophy/free-sw.html)+[FHD](https://www.gnu.org/philosophy/free-hardware-designs.en.html)
+messaging system that relies on high assurance hardware architecture to protect
+users from
[passive eavesdropping](https://en.wikipedia.org/wiki/Upstream_collection),
[active MITM attacks](https://en.wikipedia.org/wiki/Man-in-the-middle_attack)
-and [remote CNE](https://www.youtube.com/watch?v=3euYBPlX9LM) practised by
-organized crime and nation state attackers.
-
-[XSalsa20](https://cr.yp.to/snuffle/salsafamily-20071225.pdf)
-encryption and
-[Poly1305-AES](https://cr.yp.to/mac/poly1305-20050329.pdf)
-MACs provide
-[end-to-end encrypted](https://en.wikipedia.org/wiki/End-to-end_encryption)
-communication with
-[deniable authentication](https://en.wikipedia.org/wiki/Deniable_encryption#Deniable_authentication):
-Symmetric keys are either pre-shared, or exchanged using
-[X25519](https://cr.yp.to/ecdh/curve25519-20060209.pdf),
-the base-10 fingerprints of which are verified via out-of-band channel. TFC provides
-per-packet forward secrecy with
-[hash ratchet](https://en.wikipedia.org/wiki/Double_Ratchet_Algorithm)
-the KDF of which chains
-[SHA3-256](http://keccak.noekeon.org/Keccak-implementation-3.2.pdf),
-[Blake2s](https://blake2.net/blake2_20130129.pdf)
and
-[SHA256](http://www.iwar.org.uk/comsec/resources/cipher/sha256-384-512.pdf).
+[remote exfiltration](https://www.youtube.com/watch?v=3euYBPlX9LM)
+(=hacking) practised by organized crime and nation state actors.
-The software is used in hardware configuration that provides strong endpoint
-security: Encryption and decryption are separated on two isolated computers.
-The split
+##### State-of-the-art cryptography
+TFC uses
+[XChaCha20](https://cr.yp.to/chacha/chacha-20080128.pdf)-[Poly1305](https://cr.yp.to/mac/poly1305-20050329.pdf)
+[end-to-end encryption](https://en.wikipedia.org/wiki/End-to-end_encryption)
+with
+[deniable authentication](https://en.wikipedia.org/wiki/Deniable_encryption#Deniable_authentication).
+The symmetric keys are either
+[pre-shared](https://en.wikipedia.org/wiki/Pre-shared_key),
+or exchanged using
+[X448](https://eprint.iacr.org/2015/625.pdf),
+the base-10
+[fingerprints](https://en.wikipedia.org/wiki/Public_key_fingerprint)
+of which are verified via out-of-band channel. TFC provides per-message
+[forward secrecy](https://en.wikipedia.org/wiki/Forward_secrecy)
+with
+[BLAKE2b](https://blake2.net/blake2.pdf)
+based
+[hash ratchet](https://en.wikipedia.org/wiki/Double_Ratchet_Algorithm).
+All persistent user data is encrypted locally using XChaCha20-Poly1305, the key
+of which is derived from password and salt using
+[Argon2d](https://github.com/P-H-C/phc-winner-argon2/blob/master/argon2-specs.pdf).
+Key generation of TFC relies on Linux kernel's
+[getrandom()](https://manpages.debian.org/testing/manpages-dev/getrandom.2.en.html),
+a syscall for its ChaCha20 based CSPRNG.
+
+##### First messaging system with endpoint security
+The software is used in hardware configuration that provides strong
+[endpoint security](https://en.wikipedia.org/wiki/Endpoint_security):
+Encryption and decryption are separated on two isolated computers. The split
[TCB](https://en.wikipedia.org/wiki/Trusted_computing_base)
-interacts with a third, networked computer through unidirectional
-[serial](https://en.wikipedia.org/wiki/Universal_asynchronous_receiver/transmitter)
-interfaces. Direction of data flow is enforced with free hardware design
-[data diodes](https://en.wikipedia.org/wiki/Unidirectional_network);
-Lack of bidirectional channels to isolated computers prevents insertion of malware
-to the encrypting computer and exfiltration of keys and plaintexts from the
-decrypting computer -- even with exploits against
-[zero-day vulnerabilities](https://en.wikipedia.org/wiki/Zero-day_(computing))
-in software and operating systems running on the TCB halves.
+interacts with a third, Networked Computer, through unidirectional
+[serial](https://en.wikipedia.org/wiki/Universal_asynchronous_receiver/transmitter)
+interfaces. The direction of data flow is enforced with free hardware design
+[data diodes](https://en.wikipedia.org/wiki/Unidirectional_network),
+technology the certified implementations of which are typically found in
+critical infrastructure protection and government networks where classification
+level of data varies.
-TFC supports multiple IM accounts per user to hide the social graph of
-communicating parties, even during end-to-end encrypted group conversations.
-
-TFC allows a group or two parties to defeat metadata about quantity and
-schedule of communication with traffic masking, where messages and background
-file transmission is inserted into a constant stream of encrypted noise traffic.
+##### Anonymous by design
+TFC routes all communication through next generation
+[Tor](https://www.torproject.org/about/overview.html.en)
+([v3](https://trac.torproject.org/projects/tor/wiki/doc/NextGenOnions))
+[Onion Services](https://www.torproject.org/docs/onion-services)
+to hide metadata about real-life identity and geolocation of users, when and how
+much they communicate, the social graph of the users and the fact TFC is
+running. TFC also features a traffic masking mode that hides the type, quantity,
+and schedule of communication, even if the Networked Computer is compromised.
### How it works
-![](https://cs.helsinki.fi/u/oottela/tfcwiki/tfc_overview.jpg)
+![](https://www.cs.helsinki.fi/u/oottela/wiki/readme/how_it_works.png)
+[System overview](https://www.cs.helsinki.fi/u/oottela/wiki/readme/how_it_works.png)
-TFC uses three computers per endpoint. Alice enters her messages and commands
-to Transmitter program running on her transmitter computer (TxM), a TCB
-separated from network. The Transmitter program encrypts and signs plaintext
-data and relays the ciphertext from TxM to her networked computer (NH) trough a
-serial interface and a hardware data diode.
+TFC uses three computers per endpoint: Source Computer, Networked Computer, and
+Destination Computer.
-Messages and commands received to NH are relayed to IM client (Pidgin or
-Finch), and to Alice's receiver computer (RxM) via another serial interface and
-data diode. The Receiver program on Alice's RxM authenticates, decrypts and
-processes the received messages and commands.
+Alice enters messages and commands to Transmitter Program running on her Source
+Computer. Transmitter Program encrypts and signs plaintext data and relays the
+ciphertexts from Source Computer to her Networked Computer through a serial
+interface and a hardware data diode.
-The IM client sends the packet either directly or through Tor network to IM
-server, that then forwards it directly (or again through Tor) to Bob.
+Relay Program on Alice's Networked Computer relays commands and copies of
+outgoing messages to her Destination Computer via the serial interface and data
+diode. Receiver Program on Alice's Destination Computer authenticates, decrypts
+and processes the received message/command.
-IM client on Bob's NH forwards packet to nh.py plugin program, that then
-forwards it to Bob's RxM (again through serial interface and data diode).
-Bob's Receiver program on his RxM then authenticates, decrypts, and processes
-the packet.
+Alice's Relay Program shares messages and files to Bob over Tor Onion Service.
+The web client of Bob's Relay Program fetches the ciphertext from Alice's Onion
+Service and forwards it to his Destination Computer (again through a serial
+interface and data diode). Bob's Receiver Program then authenticates, decrypts
+and processes the received message/file.
-When Bob responds, he will type the message to his transmitter computer and in
-the end, Alice reads the message from her receiver computer.
+When Bob responds, he will type his message to his Source Computer, and after a
+mirrored process, Alice reads the message from her Destination Computer.
-### Why keys can not be exfiltrated
+### Why keys and plaintexts cannot be exfiltrated
-1. Malware that exploits an unknown vulnerability in RxM can infiltrate the
-system, but is unable to exfiltrate keys or plaintexts, as data diode prevents
-all outbound traffic.
+TFC is designed to combine the
+[classical and alternative data diode models](https://en.wikipedia.org/wiki/Unidirectional_network#Applications)
+to provide hardware enforced endpoint security:
-2. Malware can not infiltrate TxM as data diode prevents all inbound traffic.
-The only data input to TxM is the public key of contact (e.g.
-`5J8 C2h AVE Wv2 cGz oSd oQv Nkm 9tu ABP qwt Kz8 ou4 xvA HGx HUh sJC`),
-which is manually typed by the user.
+1. The Destination Computer uses the classical data diode model. It is designed
+to receive data from the insecure Networked Computer while preventing the export
+of any data back to the Networked Computer. Not even malware on Destination
+Computer can exfiltrate keys or plaintexts as the data diode prevents all
+outbound traffic.
-3. The NH is assumed to be compromised: all sensitive data that passes through
-it is always encrypted and signed.
+2. The Source Computer uses the alternative data diode model that is designed to
+allow the export of data to the Networked Computer. The data diode protects the
+Source Computer from attacks by physically preventing all inbound traffic. To
+allow key exchanges, the short elliptic-curve public keys are input manually by
+the user.
-![](https://cs.helsinki.fi/u/oottela/tfcwiki/tfc_attacks.jpg)
+3. The Networked Computer is assumed to be compromised. All sensitive data that
+passes through it is encrypted and signed with no exceptions.
+![](https://www.cs.helsinki.fi/u/oottela/wiki/readme/attacks.png)
+[Exfiltration security](https://www.cs.helsinki.fi/u/oottela/wiki/readme/attacks.png)
+
+#### Data diode
Optical repeater inside the
-[optocoupler](https://en.wikipedia.org/wiki/Opto-isolator)
-of the data diode (below) enforces direction of data transmission with the laws
-of physics.
+[optocouplers](https://en.wikipedia.org/wiki/Opto-isolator)
+of the data diode (below) enforce direction of data transmission with the
+fundamental laws of physics.
-![](https://www.cs.helsinki.fi/u/oottela/tfcwiki/ttl_dd_pb/23.jpg)
+![](https://www.cs.helsinki.fi/u/oottela/wiki/readme/readme_dd.jpg)
+[TFC data diode](https://www.cs.helsinki.fi/u/oottela/wiki/readme/readme_dd.jpg)
### Supported Operating Systems
-#### TxM and RxM
-- *buntu 17.04 (64-bit)
+#### Source/Destination Computer
+- *buntu 18.04 (or newer)
-#### NH
-- Tails 3.1
-- *buntu 17.04 (64-bit)
+#### Networked Computer
+- Tails (Debian Buster or newer)
+- *buntu 18.04 (or newer)
### More information
@@ -127,4 +149,4 @@ Software
[Installation](https://github.com/maqp/tfc/wiki/Installation)
[How to use](https://github.com/maqp/tfc/wiki/How-to-use)
-[Update Log](https://github.com/maqp/tfc/wiki/Update-Log)
+[Update log](https://github.com/maqp/tfc/wiki/Update-Log)
diff --git a/dd.py b/dd.py
index 1ec8317..06d7a27 100644
--- a/dd.py
+++ b/dd.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,7 +16,7 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
import multiprocessing.connection
@@ -26,55 +27,77 @@ import time
from multiprocessing import Process, Queue
from typing import Tuple
-from src.common.misc import get_terminal_height, ignored
-from src.common.output import c_print, clear_screen
+from src.common.misc import get_terminal_height, get_terminal_width, ignored, monitor_processes
+from src.common.output import clear_screen
from src.common.statics import *
-def draw_frame(argv: str, message: str, high: bool) -> None:
- """Draw data diode animation frame.
+def draw_frame(argv: str, # Arguments for simulator position/orientation
+ message: str, # Status message to print
+ high: bool = False # Determines the signal's state (high/low)
+ ) -> None:
+ """Draw a data diode animation frame."""
+ l, r, blink, arrow = dict(scnclr=('Tx', 'Rx', '>', '→'),
+ scncrl=('Rx', 'Tx', '<', '←'),
+ ncdclr=('Rx', 'Tx', '<', '←'),
+ ncdcrl=('Tx', 'Rx', '>', '→'))[argv]
- :param argv: Arguments for simulator position/orientation
- :param message: Status message to print
- :param high: Determines signal's state (high/low)
- :return: None
- """
- l, r, symbol, arrow = dict(txnhlr=('Tx', 'Rx', '>', '→'),
- nhrxrl=('Tx', 'Rx', '>', '→'),
- txnhrl=('Rx', 'Tx', '<', '←'),
- nhrxlr=('Rx', 'Tx', '<', '←'))[argv]
-
- arrow = ' ' if message == 'Idle' else arrow
- blink = symbol if high else ' '
+ arrow = arrow if message != 'Idle' else ' '
+ blink = blink if high else ' '
offset_from_center = 4
print(((get_terminal_height() // 2) - offset_from_center) * '\n')
+ terminal_width = get_terminal_width()
+
+ def c_print(msg: str) -> None:
+ """Print string in the center of the screen."""
+ print(msg.center(terminal_width))
+
c_print(message)
c_print(arrow)
- c_print( "─────╮ " + ' ' + " ╭─────" )
- c_print(f" {l} │ " + blink + f" │ {r} ")
- c_print( "─────╯ " + ' ' + " ╰─────" )
+ c_print( "────╮ " + ' ' + " ╭────" )
+ c_print(f" {l} │ " + blink + f" │ {r} ")
+ c_print( "────╯ " + ' ' + " ╰────" )
def animate(argv: str) -> None:
- """Animate the data diode."""
+ """Animate the data diode transmission indicator."""
animation_length = 16
for i in range(animation_length):
clear_screen()
draw_frame(argv, 'Data flow', high=(i % 2 == 0))
time.sleep(0.04)
clear_screen()
- draw_frame(argv, 'Idle', high=False)
+ draw_frame(argv, 'Idle')
-def tx_loop(io_queue: 'Queue', output_socket: int, argv: str) -> None:
- """Loop that sends packets to receiving program."""
- draw_frame(argv, 'Idle', high=False)
+def rx_loop(io_queue: 'Queue', # Queue through which to push datagrams through
+ input_socket: int # Socket number for Transmitter/Relay Program
+ ) -> None:
+ """Read datagrams from a transmitting program."""
+ listener = multiprocessing.connection.Listener((LOCALHOST, input_socket))
+ interface = listener.accept()
while True:
try:
- interface = multiprocessing.connection.Client(('localhost', output_socket))
+ io_queue.put(interface.recv())
+ except KeyboardInterrupt:
+ pass
+ except EOFError:
+ sys.exit(0)
+
+
+def tx_loop(io_queue: 'Queue', # Queue through which to push datagrams through
+ output_socket: int, # Socket number for Relay/Receiver Program
+ argv: str # Arguments for simulator position/orientation
+ ) -> None:
+ """Send queued datagrams to a receiving program."""
+ draw_frame(argv, 'Idle')
+
+ while True:
+ try:
+ interface = multiprocessing.connection.Client((LOCALHOST, output_socket))
break
except socket.error:
time.sleep(0.01)
@@ -87,29 +110,14 @@ def tx_loop(io_queue: 'Queue', output_socket: int, argv: str) -> None:
interface.send(io_queue.get())
-def rx_loop(io_queue: 'Queue', input_socket: int) -> None:
- """Loop that reads packets from transmitting program."""
- listener = multiprocessing.connection.Listener(('localhost', input_socket))
- interface = listener.accept()
-
- while True:
- time.sleep(0.01)
- try:
- io_queue.put(interface.recv())
- except KeyboardInterrupt:
- pass
- except EOFError:
- sys.exit(0)
-
-
def process_arguments() -> Tuple[str, int, int]:
"""Load simulator settings from command line arguments."""
try:
argv = str(sys.argv[1])
- input_socket, output_socket = dict(txnhlr=(TXM_DD_LISTEN_SOCKET, NH_LISTEN_SOCKET),
- txnhrl=(TXM_DD_LISTEN_SOCKET, NH_LISTEN_SOCKET),
- nhrxlr=(RXM_DD_LISTEN_SOCKET, RXM_LISTEN_SOCKET),
- nhrxrl=(RXM_DD_LISTEN_SOCKET, RXM_LISTEN_SOCKET))[argv]
+ input_socket, output_socket = dict(scnclr=(SRC_DD_LISTEN_SOCKET, RP_LISTEN_SOCKET),
+ scncrl=(SRC_DD_LISTEN_SOCKET, RP_LISTEN_SOCKET),
+ ncdclr=(DST_DD_LISTEN_SOCKET, DST_LISTEN_SOCKET),
+ ncdcrl=(DST_DD_LISTEN_SOCKET, DST_LISTEN_SOCKET))[argv]
return argv, input_socket, output_socket
@@ -117,32 +125,47 @@ def process_arguments() -> Tuple[str, int, int]:
clear_screen()
print("\nUsage: python3.6 dd.py [OPTION]\n\n"
"\nMandatory arguments"
- "\n txnhlr Simulate data diode between TxM and NH (left to right)"
- "\n txnhrl Simulate data diode between TxM and NH (right to left)"
- "\n nhrxlr Simulate data diode between NH and RxM (left to right)"
- "\n nhrxrl Simulate data diode between NH and RxM (right to left)")
+ "\n Argument Simulate data diodes between..."
+ "\n scnclr Source Computer and Networked Computer (left to right)"
+ "\n scncrl Source Computer and Networked Computer (right to left)"
+ "\n ncdclr Networked Computer and Destination Computer (left to right)"
+ "\n ncdcrl Networked Computer and Destination Computer (right to left)")
sys.exit(1)
def main() -> None:
- """Read argument from command line and launch processes."""
- time.sleep(0.5)
+ """
+ Read argument from the command line and launch the data diode simulator.
+
+ This application is the data diode simulator program used to
+ visualize data transfer inside the data diode #1 between the Source
+ Computer and the Networked Computer, or data transfer inside the
+ data diode #2 between the Networked Computer and the Destination
+ Computer. The local testing terminal multiplexer configurations that
+ use data diode simulators run two instances of this program.
+
+ The visualization is done with an indicator ('<' or '>') that blinks
+ when data passes from one program to another. The data diode
+ simulator does not provide any of the security properties to the
+ endpoint that the hardware data diodes do.
+
+ The visualization is designed to make data transfer between programs
+ slower than is the case with actual serial interfaces. This allows
+ the user to track the movement of data from one program to another
+ with their eyes.
+ """
+ time.sleep(0.5) # Wait for terminal multiplexer size to stabilize
+
argv, input_socket, output_socket = process_arguments()
- io_queue = Queue()
- process_list = [Process(target=tx_loop, args=(io_queue, output_socket, argv)),
- Process(target=rx_loop, args=(io_queue, input_socket ))]
+ io_queue = Queue() # type: Queue
+ process_list = [Process(target=rx_loop, args=(io_queue, input_socket )),
+ Process(target=tx_loop, args=(io_queue, output_socket, argv))]
for p in process_list:
p.start()
- while True:
- with ignored(EOFError, KeyboardInterrupt):
- time.sleep(0.1)
- if not all([p.is_alive() for p in process_list]):
- for p in process_list:
- p.terminate()
- sys.exit(0)
+ monitor_processes(process_list, NC, {EXIT_QUEUE: Queue()}, error_exit_code=0)
if __name__ == '__main__':
diff --git a/install.sh b/install.sh
index 40079e4..05b4e2d 100644
--- a/install.sh
+++ b/install.sh
@@ -1,6 +1,7 @@
#!/usr/bin/env bash
-# Copyright (C) 2013-2017 Markus Ottela
+# TFC - Onion-routed, endpoint secure messaging system
+# Copyright (C) 2013-2019 Markus Ottela
#
# This file is part of TFC.
#
@@ -13,180 +14,497 @@
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
-# along with TFC. If not, see .
+# along with TFC. If not, see .
dl_verify () {
- if ! [ -z "$2" ]; then
- mkdir -p $2 2>/dev/null
- fi
+ # Download a TFC file from the GitHub repository and authenticate it
+ # by comparing its SHA512 hash against the hash pinned in this
+ # installer file.
- wget https://raw.githubusercontent.com/maqp/tfc/master/$2$3 -q -O $2$3
+ torify wget https://raw.githubusercontent.com/maqp/tfc/master/$2$3 -q
- if sha512sum $2$3 | grep -Eo '^\w+' | cmp -s <(echo "$1")
- then
- echo Valid SHA512 hash for file $2$3
+ # Check the SHA512 hash of the downloaded file
+ if sha512sum $3 | grep -Eo '^\w+' | cmp -s <(echo "$1"); then
+ if [[ ${sudo_pwd} ]]; then
+ echo ${sudo_pwd} | sudo -S mkdir --parents /opt/tfc/$2
+ echo ${sudo_pwd} | sudo -S mv $3 /opt/tfc/$2
+ echo ${sudo_pwd} | sudo -S chown root /opt/tfc/$2$3
+ echo ${sudo_pwd} | sudo -S chmod 644 /opt/tfc/$2$3
else
- echo Error: $2$3 had invalid SHA512 hash
+ sudo mkdir --parents /opt/tfc/$2
+ sudo mv $3 /opt/tfc/$2
+ sudo chown root /opt/tfc/$2$3
+ sudo chmod 644 /opt/tfc/$2$3
+ fi
+
+ # Check the SHA512 hash of the moved file
+ if sha512sum /opt/tfc/$2$3 | grep -Eo '^\w+' | cmp -s <(echo "$1"); then
+ echo OK - Pinned SHA512 hash matched file /opt/tfc/$2$3
+ else
+ echo Error: /opt/tfc/$2$3 had invalid SHA512 hash
exit 1
+ fi
+
+ else
+ echo Error: $3 had invalid SHA512 hash
+ exit 1
fi
}
download_common () {
-dl_verify f91061cbff71f74b65f3dc1df5420d95a6a0f152e7fbda1aa8be1cccbad37966310b8e89f087a4bb0da8ef3b3e1d0af87c1210b2f930b0a43b90b59e74dfb1ed '' LICENSE.md
+dl_verify d361e5e8201481c6346ee6a886592c51265112be550d5224f1a7a6e116255c2f1ab8788df579d9b8372ed7bfd19bac4b6e70e00b472642966ab5b319b99a2686 '' LICENSE
+dl_verify 04bc1b0bf748da3f3a69fda001a36b7e8ed36901fa976d6b9a4da0847bb0dcaf20cdeb884065ecb45b80bd520df9a4ebda2c69154696c63d9260a249219ae68a '' LICENSE-3RD-PARTY
dl_verify 6d93d5513f66389778262031cbba95e1e38138edaec66ced278db2c2897573247d1de749cf85362ec715355c5dfa5c276c8a07a394fd5cf9b45c7a7ae6249a66 '' tfc.png
dl_verify cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e src/ __init__.py
dl_verify cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e src/common/ __init__.py
-dl_verify 094943d26876c8e494e4ffbdaff57557004150193876b6131010e86ce098f3178bf4b813710ac176f361c42582f9b91b96a6526461b39e9080873dc4f8fd792e src/common/ crypto.py
-dl_verify b4407e85a84d6e070b252f2c1c91268005d1ae6f69c9309723d2564d89b585e558fa80b7a8f1f52cc7d40e6595c3395cb5b68e3594af9d3e720a4a31ee8ba592 src/common/ db_contacts.py
-dl_verify 1cc269c493969ccf98ef51a89895d0f279efdcf0e5c89c2e2e384e0cc7f1fea425566bc619e02ff0ed5ab3d28c3bd9bad93652f08f088c2915cfc3d28cd00d76 src/common/ db_groups.py
-dl_verify 0c27e847aee638883928f4437adb8077de2a9444e7f06f48c45ec17e46bda43d8434934b8a04cfc6cfb4006554b5578cfba402f9a4ef96f7329a33d26fc0ac39 src/common/ db_keys.py
-dl_verify a38dd34dd681dc7993623921010d5e50ecee5192cd45e37db25a90ebe1e58c1a44864d95b11a607021773d6fe2578f1ac9eb287bfe6d5004a816f88770ab2b6b src/common/ db_logs.py
-dl_verify 1516e939ff34838586389b4f920d310d79d09baa7173ef3a5a844d5982d747f4a120be9ac977189fd94d6b97792bb5e52ec78478781ecaa55d2643226a05fdd0 src/common/ db_masterkey.py
-dl_verify c9ddfc92ec0043e3253950dd5d0b551bd5b92bc1c5b12aac14b99274e73d891dc10bc4081b9eae71f50af30a52d31507fef5ca309d9e6043aa93fd1dba5ff441 src/common/ db_settings.py
-dl_verify a3911e2e60e31154f40d548edc7470c1ed963f4225e0005eba9499dd7b752879b5fd65fae983b513b0d76523b5a7cd3b9744721213a27f4e844a6c797e7780a0 src/common/ encoding.py
-dl_verify f67c414fea948fd9b81bf8a53158b159085a34bae562d74cb2aa56fa317b65323b92a3a2d787377900cdecb65a1af8c224a9c7efd3969c377149284fd8a5882f src/common/ exceptions.py
-dl_verify be34431336fb68429a9f6ec8603b9a475104a2e0c15b3c4beac63a50d2c4024863d769c7b8d154872afc80a0b8d82635448c29c89b40edcc74595db28a7364d4 src/common/ gateway.py
-dl_verify aa1f94542fc78d4a9dd7212d02e4cf710ecbef1edc31662445e6682469e32059e5c3047fe512f751354c869fe9cb03bb3126ca987d7d1570ca9dacc1870ec759 src/common/ input.py
-dl_verify 27b562f0d9083aa906465e9ece1817a3a03cf6980a9262ad1fc855e1989491d331871d41530919ee1cd35db8564f54b3c44492b6ef90f2836a2c3a8404f5b3d2 src/common/ misc.py
-dl_verify 87e62112217263d4eda7d0a2a0cfdc0a3a698be136e650f3e32c7ffae7450706d059dc307abc40a1ce2b225c718ef34cca9ceaff1dcb51e28a2eb0972b9122cf src/common/ output.py
-dl_verify 20a7ec5b54834c54fdaf889bb6261165b630f0f801a7055cab347d26e58cdde16d27d84ff0b437a318bdc5a12c575ee6e7f1d7d3c3897140f3c5ef1f75019f94 src/common/ path.py
-dl_verify adea6b33ff23f9fe34539d38b3eb602b3a1075d92d9b8c5fdb4f12ebdf06fdcf6833edb3d94f91c4c0a2d160e0d152594aed776310cbd7cb5f2baf1579edd21d src/common/ reed_solomon.py
-dl_verify 71f9221ad6ac787f1ee391487d5f14a31518c496e164022b83eac293d8e717751f1240449206b8f7cdee06fa625f407a32ba2add823f63d4b5eda073eb141308 src/common/ statics.py
+dl_verify 003915a43670bbb3185e045de1d9cede67160d9da0a24a72650862e978106c451d94a2da4aa2e1d161315db7575251933b80881294f33f195531c75462bbcf9c src/common/ crypto.py
+dl_verify 0dfae6aa49c399983a990ca672e24eef9aa3ed7782686dd6c78ab8041023650e195304a07d40b934ea6f73bb46189529983de4093144ffdef40e718263232365 src/common/ db_contacts.py
+dl_verify 49ebf5dff5f34a373dccfaa0a8152e5bea11e6c3afc997d4c83d45b19351b62e0138555647c2ca796faf3cfc946f16d779af4ef9938b5ebffafa9ab155761696 src/common/ db_groups.py
+dl_verify 157bc8b1cfea322118b880d9bcc76b695405668af718276246c334f76226781a55779da4adcea571472bfcc7ced2cdd908d49e181268707b16ef71ff4c8ff833 src/common/ db_keys.py
+dl_verify 04cc3f2816b903d82e7baaa0bc9e406d7058c27537e8d07db67882a88deb4289fdff84150eb0dd1806721bf0ae1dd7f2757b916670eff6d1c122c660ac6d4ba2 src/common/ db_logs.py
+dl_verify 8d53e7348abf71aa1e054e5e852e171e58ed409c394213d97edc392f016c38ce43ed67090d3623aaa5a3f335992fd5b0681cfb6b3170b639c2fa0e80a62af3a4 src/common/ db_masterkey.py
+dl_verify 907c8997158a160b71bb964191848db42260a201e80b61133be1e7c7a650604792164499b85eaa4e84c58a7bc1598aff6ed10fda8442d60eb7f939d9de7f09c8 src/common/ db_onion.py
+dl_verify 83b2a6d36de528106202eebccc50ca412fc4f0b6d0e5566c8f5e42e25dd18c67ae1b65cf4c19d3824123c59a23d6258e8af739c3d9147f2be04813c7ede3761d src/common/ db_settings.py
+dl_verify 88f628cef1973cf0c9a9c8661a527570e01311efbbb6903760abec2b7ff6f4f42b3ff0e00c020d7b1912d66ac647b59b502942199334a83bb9d9dddc2a70c943 src/common/ encoding.py
+dl_verify 0e3e6a40928ab781dbbca03f2378a14d6390444b13e85392ea4bdfb8e58ae63f25d6f55b2637f6749e463844784ea9242db5d18291e891ee88776d4c14498060 src/common/ exceptions.py
+dl_verify 77b810f709739543dc40b1d1fbafb2a95d1c1772b929d3a4247c32e20b9bb40039c900ff4967c4b41118567463e59b7523fbbbf993b34251e46c60b8588f34ab src/common/ gateway.py
+dl_verify 42742ab0e0f6f61bd6b8d7d32644a98e526fa7fd0fd7ed8e790c25e365874d77a6611849c168649160b84774059675a066dd0711db59ed41ffc449790fb5ffa0 src/common/ input.py
+dl_verify 18efc508382167d3259c2eb2b8adcddda280c7dbc73e3b958a10cf4895c6eb8e7d4407bc4dc0ee1d0ab7cc974a609786649491874e72b4c31ad45b34d6e91be3 src/common/ misc.py
+dl_verify f47308851d7f239237ed2ae82dd1e7cf92921c83bfb89ad44d976ebc0c78db722203c92a93b8b668c6fab6baeca8db207016ca401d4c548f505972d9aaa76b83 src/common/ output.py
+dl_verify dc5fdd0f8262815386896e91e08324cda4aa27b5829d8f114e00128eb8e341c3d648ef2522f8eb5b413907975b1270771f60f9f6cdf0ddfaf01f288ba2768e14 src/common/ path.py
+dl_verify f80a9906b7de273cec5ca32df80048a70ea95e7877cd093e50f9a8357c2459e5cffb9257c15bf0b44b5475cdd5aaf94eeec903cc72114210e19ac12f139e87f3 src/common/ reed_solomon.py
+dl_verify 421fa2ec82f35a384baf5f5a4000afa4701e814ff28b4e8fa45478226cbf2f9272854ddf171def4ad7a489a77531457b9b6d62b68c4417b26b026e0ee6e521e8 src/common/ statics.py
}
-download_nh () {
-dl_verify 27a60f6f2c4024c41ae11669d6695662b47aa0b1efb21c6cc0af19a20ad66c6e8a34ac57db1558f1d5e84300d43618b72542bb80c3b0aa309fadeacaae14f339 '' nh.py
-dl_verify 569f3baa7ad3589f8c95f9ae1c00f2fe19e4031b04f31e68536fb924b19d433adfeff788a6eeb21a4960e44d2f575eaa7479de268ca2333781d4de618295156f '' requirements-nh.txt
+download_relay () {
+dl_verify 9ff2e54072e9cd9a87d167961bb5dd299caa035f634c08223262cda562faf9407ec09435c63e9cce7cb4121a6273ae0300835334e03f859df3e7f85b367d685c '' relay.py
+dl_verify ddcefcf52d992f9027b530471a213e224382db5fbb516cc8dee73d519e40110f9fcca1de834a34e226c8621a96870f546b9a6b2f0e937b11fd8cd35198589e8b '' requirements-relay.txt
-dl_verify 3444adc5cd050351bc975397da22a04becefc49a69234bd9d6b41f2333feb5cf0a31765ad6c832f69280120d159e2792dba3d9ed0fd269e0b8e04ec053c2095d launchers/ TFC-NH.desktop
-dl_verify 8138bb15be64281c35310a711a136d6953985a0819bc5e47c1b224a848c70a01a4f60bb56e04724a919b1f84a4adfe5bf090109ace48d294f19349c051d3e443 launchers/ TFC-NH-Tails.desktop
+dl_verify f2b23d37a3753a906492fcb3e84df42b62bed660f568a0a5503b188f140fa91f86b6efa733b653fceff650168934e2f3f1174c892e7c28712eda7676b076dab8 launchers/ TFC-RP.desktop
+dl_verify a86f3ac28bbd902dfec74451034c68c01e74bbe6b6ec609014329fba17cc1224dc34942b103620109ef19336daa72e50dae1a0b25a1a2720445863427724d544 launchers/ TFC-RP-Tails.desktop
-dl_verify cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e src/nh/ __init__.py
-dl_verify 5cfc25f56763c4ce96013eb3062ab62646f1a9300a8c596d83e4d7bb4e08754bcee4179301290489ab667ba2229d9a599767e2271f081d0035e4cf0783eddc95 src/nh/ commands.py
-dl_verify 98c53fb80482e1941d74ce34b222c9457f4d2a346f352f7624f3e6376843598b3b2a3ad1136c3f6fc9e4df2e42f372d7470dcde2c8ada40b4cef896ae8ed61a5 src/nh/ gateway.py
-dl_verify 4c293c3abd62aa0997014423d1b145df144247e834a552a1172a4c06e3dad487ac9c7c0ee56de74c29a4f89a538902206dfda62b8a105e47acb22b842d98f55e src/nh/ misc.py
-dl_verify 93c7d4ec6f80e46b5a46a404a5eb676d8efd1700e74fdd06a65bc823fb566a6eee63bccd6da520e56bb54310089aebbffb12483a6c908c66348a4f34c13d600e src/nh/ pidgin.py
-dl_verify 97a8d945ebf88708180186f6a7c19cf3bba314da656b46dae2a1fbbeaeda143fd3f31d2ba9ed1981960bd8b04c1143a4b580643595d394f9bdf8ecb560d33d10 src/nh/ settings.py
-dl_verify d83d3b0f1157e60589c7428f33091c2239e910e410c94e3254fcbaea8cffbe8a783cc7175dc6230fb10525d17f6056579810100ba0600f0d4a5127bfd4ee0dd2 src/nh/ tcb.py
+dl_verify cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e src/relay/ __init__.py
+dl_verify d009954abc9fa78350f721458071aeec78b6cd8773db588626a248f0756d1e39b32a8c8c58c370e87e9e4eb63f0ea150a427ad2b92b641c8fd71117933059db8 src/relay/ client.py
+dl_verify 02c764d58ef8d02f95050cec41aa41fa90938ea08e0107ed49d3ae73357115b48f23f291dfc238ec3e45b12a705089b5c2ad3a1b30f27abb0a4c7498271161a3 src/relay/ commands.py
+dl_verify fa7350a1dafe7e27638cb505a30e43815e157b08fc26b700f15633ab34f8ac3ad782a4396cc6b9aba3b59cd48d2e37b6f72befcafbd14772e135bc40fc080050 src/relay/ onion.py
+dl_verify fe666032c2448d87355931bef235085039087b701b7b79a74b23f663d06b78264686c53800729f8a4197bf419076d76d1fe3ae74afa9141180035a6b807f0bb5 src/relay/ server.py
+dl_verify 380a78c8c0918e33fb6be39a4c51f51a93aa35b0cf320370d6fb892b5dade920e8ca4e4fe9d319c0a0cdc5b3a97f609fdee392b2b41175379200b1d793b75593 src/relay/ tcb.py
}
download_tcb () {
-dl_verify ba9fc6dad29b91a78d58f6a7c430e42eb75363d14de69668d293041bf36bb5eea0666007535c8f5a122e0a72d0da7122ff45d8e6c081c9ccacdaeeb47cb93b44 '' tfc.py
-dl_verify c2f6afa281f91b88da85668dcfe0cade4af01927ac748ee1dc76c6f160149742980b3d6996c7d04e7fbbf5abca8f79100fd746e71187990d972f4b1aa2c1bf63 '' requirements.txt
+dl_verify cec2bc228cd3ef6190ea5637e95b0d65ea821fc159ebb2441f8420af0cdf440b964bdffd8e0791a77ab48081f5b6345a59134db4b8e2752062d7c7f4348a4f0f '' tfc.py
+dl_verify 0711aabf9c0a60f6bd4afec9f272ab1dd7e85f1a92ee03b02395f65ed51f130d594d82565df98888dbf3e0bd6dfa30159f8bd1afed9b5ed3b9c6df2766b99793 '' requirements.txt
-dl_verify cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e src/tx/ __init__.py
-dl_verify 19c6542e34e58fa8504193d71435c2f06fbb4d5d770342fcc37a27acf401aa05857733a6e364ade4cea1407762fe7340c0e4cd9d3031daf8943a13d14b1e92f1 src/tx/ commands.py
-dl_verify 63bf0e11f46d8e5544e091110fd24e1241ddd650daa9cf76c39ed7db43a7062dc252a6b37ef26d55fb875fbc51314b47d23c98176d4fc1bf51fafef7a1f69763 src/tx/ commands_g.py
-dl_verify e660fc6368a430a82a8a2d0e38bd4e8aaf94bc0ac5fc6b2c63eceb58f1579ce75ac3cb83382202e929da76fe3617d553732d1798beaded4f52ce0bf7e53b75bc src/tx/ contact.py
-dl_verify d215e8983de808526cf9b76b0d299b7cc93a1cb15316113930028fbb0cf66bde51daa57a1e7ef6cfbd9f65e515553631943e142ab78ab89b78571f8612355b51 src/tx/ files.py
-dl_verify 4f0fe9684e1aa9caf665fcfa037e7ccba61c9e4385621178912e2875e1a28fed72b9fc48581782dab3c25c29e0cb38bfed2906b2e19179b43a8b35da72656112 src/tx/ input_loop.py
-dl_verify 69a90b3e908769821c419ac80779d0b09401103e4b8f79a0bf444fda8f6a20d0c559679f1595869c4bfa569631211f1297141ada7e91b1c3d28ce804961e00f4 src/tx/ key_exchanges.py
-dl_verify c782cdeda0faf946a4c97924668697a479d7d60051988e96bb4e62bf0e1ef82bfc982b8fb3465e5371b446d3f042b1c54a32a31393ea64764d281abac95850d9 src/tx/ packet.py
-dl_verify 05e76b6d62e694d1f887853ed987a770debf44acf8da12091f9a4f614a8a26c5771593d14f53beeafb7f684d56e0ecaa000f3a73bb69342cb6667f9758b56c9d src/tx/ sender_loop.py
-dl_verify afcf71e6d407bc7ef391e795441c3343fd2f172f2636fd1b06ffbadb8d0d38368007be9d8e69916a02679f576407200e836c1eaddf0dd3255d8dc073993d07b1 src/tx/ traffic_masking.py
-dl_verify c806320893ecd097ed5f8d14619cb453315fc369d0c081ef40d48cbdce46630fcd3006bd11d8712c0f6d89d7468b674e78b50257048a3a99180093f0a361615f src/tx/ user_input.py
-dl_verify 827ecad844d1fb3709b81e59f6f1ad88362a3140517a8a5d36506415e1494d554d00e2dc1dc7cc65db06d09a1182acb1150b939fcffdcd0939e70229de03f3bc src/tx/ windows.py
+dl_verify cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e src/transmitter/ __init__.py
+dl_verify f91c0f616555725e0d2a4d8e2ee2bf39e1ebc4cbdf0a2547f4e4b5e4f1ee88743273cffb422a43dff98ba42772b18ceb4c270628f933392e27fa5cd6cae991ce src/transmitter/ commands.py
+dl_verify f7cf493506a19b9732ae9f780aeb131342a47644632fcf88f0df01f0bda88252fdbad37a4b80e87f97e57feb50079ac2e5194598d745163846e30fdd6d32fe60 src/transmitter/ commands_g.py
+dl_verify a1b6af28645df531be3a670375ce3a3da1a48b279d646f04b3c14cfbdf7006060955f33595a2963f98a495ec16dfe969325842495d8fbfae5f93e1459ed047c4 src/transmitter/ contact.py
+dl_verify 184c35a32a3858893c67622a21fc7fdbd88bc61f82d4b655ad26ef008563cdb31430a3b713b92c98ea8d983ebadd0db6f9de3f9b1c07ac3dce4cf405aedf21ae src/transmitter/ files.py
+dl_verify 019c178982f89b93ba69d26e60625a868380ac102b10351ac42c4d1321a45dd7186694d86028371185a096cce2e2bbe2d68210552439e34c3d5166f67b3578ee src/transmitter/ input_loop.py
+dl_verify 742fba91ebd67dca247d03df4cf1820fc6b07e6966449282d7c4019f48cc902dc8dfc4120be9fdd6e61a4f00dd7753a08565a1b04395bc347064631d957c9d82 src/transmitter/ key_exchanges.py
+dl_verify a59619b239b747298cc676a53aa6f87a9ef6511f5e84ec9e8a8e323c65ab5e9234cb7878bd25d2e763d5f74b8ff9fe395035637b8340a5fd525c3dc5ccbf7223 src/transmitter/ packet.py
+dl_verify c2f77f8d3ebf12c3816c5876cd748dc4d7e9cd11fe8305d247783df510685a9f7a6157762d8c80afda55572dcae5fe60c9f39d5ec599a64d40928a09dd789c35 src/transmitter/ sender_loop.py
+dl_verify 5d42f94bf6a6a4b70c3059fd827449af5b0e169095d8c50b37a922d70955bf79058adc10da77ebb79fb565830168dccb774547b6af513b7c866faf786da7c324 src/transmitter/ traffic_masking.py
+dl_verify 22e8ba63c1391233612155099f5f9017d33918180f35c2552e31213862c76e3048d552f193f9cd3e4e9a240c0ef9bef4eabefe70b37e911553afeceede1133ca src/transmitter/ user_input.py
+dl_verify 39a7b3e4457d9aa6d53cb53d38c3ed9adbd9e3250008b4e79b5a174b9227fd0fac6dad30e6e9b8fe3d635b25b2d4dfc049804df48d04f5dfcc1016b2e0a42577 src/transmitter/ windows.py
-dl_verify cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e src/rx/ __init__.py
-dl_verify 04f23a236a7f8b5c43a532ef2b3278202a17b026a47b6d1f880a6fb2e775824aff3be78a14167905c955f98a01239bd1c5e63cd08566dc759fe259a4b0c6a74a src/rx/ commands.py
-dl_verify eb307d3b780dd90ab2618909707c4cd56db829dc94d49408c4a6b84f46292f395927fde0d36451c90a595fbf948cbcb3f1aa8676ca5658d6b113a3b45f2216db src/rx/ commands_g.py
-dl_verify ede3aa62af2b120078f12bbdf7d21364484652c5204817436e30cc5af70ba73fba68a6a7cfd08f43734f6c5778e710508674f7a9653d4b51922460ba1cbec796 src/rx/ files.py
-dl_verify 835f6f673b7bc1785b8c311f21aebc7ffab1a4570152f3888d13e00d763c66c81b5a77f602e7488962737c6b675beeda0bb347dfb1d11af51ea036be8932398d src/rx/ key_exchanges.py
-dl_verify c06e19c1fc279346d8454eed45fc9d2f6c1b3c561d9b9b45957b145f23ca9ba016cef51d1fad4fadabd9669c6ab4443679ac98630194073294c1ee20afc725de src/rx/ messages.py
-dl_verify 425e9bbd17c13f62732687cc798e7fd49159d5f5a291ee4ff292dd45a65bdc8146f2a90c0d4abe7fb28baea855c396335832c484a0c753067db4fa7974cce651 src/rx/ output_loop.py
-dl_verify 5f7d66daedb0cf60737a14fe428e3f420b66a08ae7c5b63135d11e17a1f3e11ce43f50d54516249fe7a065b69a17082ee81297f7f4a8c4c9a1f26918575c8dbc src/rx/ packet.py
-dl_verify 9f5f9ddf01af12e43cbb7d8423bff2cdaa4a6d3848f1ba9e1e2bbb20da08221b84de4538700c642fdcfa3637db6ad03cd2f7dfe04e67544559b8e4cc96608e61 src/rx/ receiver_loop.py
-dl_verify d26e949e7fa57b43a6489e3fe01e2bc26f7e7dfa8ec99915afd2f54f7a3e2a1e86ac16f3d95642e80ae431e35f933a07244d8ca49b3861aad6bcf462dcf2791a src/rx/ windows.py
+dl_verify cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e src/receiver/ __init__.py
+dl_verify 35b035f2794b5d7618eeafd91781246a0100bac9ff6a1f643b16068d5b2dc2946c799e91beba77d94e4118f99d6d6653974ebd5d4008133131f3bf44a7a190fb src/receiver/ commands.py
+dl_verify 09f921aaaeae96ee6e9ff787990864ba491d4f8b10c613ab2a01f74c00b62d570270323ea2f5dc08befd8aa7bf4be0c609f8dca1862e4465e521b8016dff14da src/receiver/ commands_g.py
+dl_verify 7b1d45caf3faf28c484d7d8d0c96ff9ba6e840682b002e438eac620904d3ca39483009a079d300489d80e22025ba301fa483f235193de5b55a62e9dedb25967f src/receiver/ files.py
+dl_verify eab31c334f09930f1167b15fae4d0126711d6fb0efbe5b8ca9e6e49bdbf0a9ca90279be6d2cd0080d588cf15d83686ba895ee60dc6a2bb2cba0f8ed8005c99eb src/receiver/ key_exchanges.py
+dl_verify 2894c847fe3f69a829ed7d8e7933b4c5f97355a0d99df7125cee17fffdca9c8740b17aa512513ae02f8f70443d3143f26baea268ace7a197609f6b47b17360b7 src/receiver/ messages.py
+dl_verify 57ebdf412723b5ab4f683afeda55f771ef6ef81fde5a18f05c470bca5262f9ff5eefd04a3648f12f749cec58a25fa62e6dfb1c35e3d03082c3ea464ef98168b1 src/receiver/ output_loop.py
+dl_verify 3b84dbe9faffeab8b1d5953619e38aefc278ce4e603fd63beaee878af7b5daff46b8ed053ad56f11db164b1a3f5b694c6704c66588386b06db697281c9f81bbf src/receiver/ packet.py
+dl_verify 1e5240d346a016b154faf877199227edf76e027d75e1e921f2024c5dd1d0a40c1de7e9197077786a21474a4bbf2c305d290214aacdea50f5abaeb39963ca08a6 src/receiver/ receiver_loop.py
+dl_verify e84a92fa500492af0cc16038fd388c74c387334898b870e57bc599d1b95da85b579d50ba403cdfc82ce8d4d5765fc59e772796d54faa914d0b5874150428d762 src/receiver/ windows.py
}
download_common_tests () {
dl_verify cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e tests/ __init__.py
-dl_verify 9cba0c6eb96f5e827a669312c2c8d4d52b24ca5133d294ab946fca8d508b71f898328487ec8213af639a61fcf7fee8fef3102c5f1341cd4c588289a03e820003 tests/ mock_classes.py
-dl_verify c6432382c52a7665bf2da5ff4c6e502d46b0d29f7d8eeab2feacd77e4e4bd954227c57f9baf1251feb0f4d6923380fe64a38ca8d12d0d7cbb2b8d34c5b803b5a tests/ utils.py
+dl_verify c20421e2293f058df4e03dee49e609b51fc1d39e69b4c44dd7580f88a5b2bf0729261167cb69fb0ff81b3838e3edca0e408c5c6410e4d43d06d6c0aa1ef6f805 tests/ mock_classes.py
+dl_verify 2acdcd76d44caa417e9d1b3439816c4f07f763258b8240aa165a1dc0c948d68c4d4d5ac5e0ff7c02a0abc594e3d23883463a9578455749c92769fea8ee81490d tests/ utils.py
dl_verify cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e tests/common/ __init__.py
-dl_verify 52c111cc9a956354f5f5a317cff4209003481f4f8bf3c248df529c4925202780c0c2fea3a3fe2289a2651d82c9bcbc8a2801141f2b5b2a8d4ba1b74943de6587 tests/common/ test_crypto.py
-dl_verify 8e1b790d9143a7d2decd5dab97826cc3fdf85c071da95340da7a4fdc862d94099408675ad7422c8d105e988aa39eb5b5ef1a39fce9be5a6ae6877fd820e1f899 tests/common/ test_db_contacts.py
-dl_verify 8190d1525f5f603293f30a07d2e8e15becad13094458d6b3e75a8f45bf7751019ed9fea8df9b366c09bef083d3eb1b4bf0e3c165912069ddfa862f86107cd420 tests/common/ test_db_groups.py
-dl_verify e11f05a0193bfa013c487ff4b646f8f54b5b3ac71e136d69d38d4e572afffd0849ce3f4b0c1639b77f6506c33e6f13c65ca5b4b3f3e8a421a17f89fe2113141f tests/common/ test_db_keys.py
-dl_verify 32e6b8562a758eaa29c9e32720434915c7b32a5815203b2b4d11acd81cd9b3669e88ee41d660681d2fb7015f9f4346919e74c901a50bb8202a4f93ba316b0b3d tests/common/ test_db_logs.py
-dl_verify e5c0fd0fcff438b92933e81389053b3d5a4440d0b37d5e9744a96c6a8cf5c14169ae90a2714d5490f4f920b0335235d9d5cd6f42e806698333a0ef2821b56e92 tests/common/ test_db_masterkey.py
-dl_verify 19233b6f6aa19e50f36d8ca595e93b8a782c20a9f6076e966da8a7c5619ff33a0b8b02a93d16903ecc873930e0a263a79edc4a2c85e39aeaac81279ba1a65d0e tests/common/ test_db_settings.py
-dl_verify 4472f5528c6c9c60b4c4dbbc6c41dbe19734710be37b9ffdb27081c84fe308230c4e5b0180c006fdf47e75bb05050e41958df25b6feb752fb7951141bd59c6fa tests/common/ test_encoding.py
-dl_verify aad18d42e5366223a88d14e809f8897cf4f989de5e7115b1b5052675b134d9e5bfe30c21bef2cc8d5150385dbb029350f1ce36d388fffbb184b8872014209acb tests/common/ test_exceptions.py
-dl_verify 12f791c529dc447c6940049e3b9b44cfd3847c25089864820677e23446ed72d212bdf1dcd849bf80d0ebb1a438337730e5fab395b1f183b98190e49575391038 tests/common/ test_gateway.py
-dl_verify 01df5269c6189a55bbed7e5894aa126d5e16d16f6b945160e63c929b397f06ef238b3a4be8fa3d5431567d1b62a0d4eb86faa320cb6df9dcfed971d98df936da tests/common/ test_input.py
-dl_verify 029cc1f4cd983c32a4b2ee0b78c0f3f9e40ed3ff417ed323927325a582d5e77c52c2ca48e3ea38471fbe431d87a4e35355de0a6b17e2cb6331d04a25ecda1358 tests/common/ test_misc.py
-dl_verify 7ca3a76b69a96e33ce8ef0404bbed696f3c82d63cc8940e25763ec241e7d8be2cf033c54d28a193bed911b3646bf4c111450a30d90f25af347a323e3018da04c tests/common/ test_output.py
-dl_verify a17d3bd4fc7b44216a2c59789fb9322a4cdee52c9763dd8f7cc59908c42b500db51aab4681b7372fcfbe6a152055bf823073797b3f94275791b1c56f2a363395 tests/common/ test_path.py
-dl_verify bdea73b00b14b8de136112e9c6e1257aca971a704bf0a104e3aefd1014a0d94ce0cd941a2568e058b27202ec595476692c22ac1244d626759965b8242fa3ea74 tests/common/ test_reed_solomon.py
-dl_verify 946812a0c4e368b349b31622ddd21ed863cd2feeec1ff145c45a96a5953a47c5865eade0fbe391510cfd116fa35d9f8253e4314187884762e3ae3000dcbc9db3 tests/common/ test_statics.py
+dl_verify b62eeed36733c4ddcbb657cf7b2b37737f2a1b0b5d11c7720cb13703f09a99ccb0ead2a379caeff073955a31a5ae123342c925d93bbdd3338cfc8e4efb83fa38 tests/common/ test_crypto.py
+dl_verify 7c222cc89248f09992def8fa30c32a9c98a9188c0b30af5f352eeef7b1932bdbf070a87879b47fe09c5cb6f19ad69038f3f8e906479773987e3f47908119f444 tests/common/ test_db_contacts.py
+dl_verify cb8e18ba393d05e89c635d9ee22f0a15bc3a2039c68c85cc0e3eafe6d5855601b0c00473d6284bb33c4f88184932f2413793e185e5478e6cb456976bc79ad790 tests/common/ test_db_groups.py
+dl_verify b894e5719bbf666b2e86f911b422c857c8e3795b527e346e510ff636c8b9733607c8e4115168584fba3fd6144d64b53b85f65cbba18b21c7dd80ff6e0de2a271 tests/common/ test_db_keys.py
+dl_verify ed68245632dcab1a0ff63aa18408514a8c902ffdaa509ee5f9ae6a4f4b57fc11d64d5a4b70cc2884b8f428afb2ee23a586ba0595ad9b921f66b735ae90f257a2 tests/common/ test_db_logs.py
+dl_verify 4e7436d7316d56f50f604a900eddc6427bb2fe348073848b1d7845484f51739686c781935118a18bdc52d7848a46f24909ea630306c46f518ec9b72768c3f648 tests/common/ test_db_masterkey.py
+dl_verify 9eb4af866f9e5f1561401a3b62f924e8133464dfc3bb06f5e17dc18f2c09b785133ad38cf45d6d218ef7c5eadad4207d53ad6492e82754753ed568884ba4d383 tests/common/ test_db_onion.py
+dl_verify 58ed5e733ac373a6c3d69ff7218207a60b9e4138a549da1a9de158d770f5b2514d7042e4ec7feed86966388523ace278797535a77be926f34c406ac3bc4e96ce tests/common/ test_db_settings.py
+dl_verify a2036517d264bbaf2db9683e573000fa222067c6a8e3e72337e5b31c6554c1c33259f885540aad73f2cc454f8d0ef289df9557106e43ca4504fbad447c7e4c04 tests/common/ test_encoding.py
+dl_verify 3dea267fa9b4361890f374157b137c9f76946f3289f4faf4b293814f26f9769fb202ec98c6fd044891b2a51a3bb69f67fec46022210ebaf27f7270e9dfc779eb tests/common/ test_exceptions.py
+dl_verify 3d2d5077bc946a1327c64598a3d7bb30786a6ccb089f5fc67330b05a3d867c46deb0d5cec593927782e1bfbf7efe74678f6aa4b62a3306ba33fa406537ee6499 tests/common/ test_gateway.py
+dl_verify dad966ace979c486134dd3146a50eb2d26054984ca8fcad203d61bf9ae804db04664df21e8293e307fbfe9c331cb59a06a46626fb36f445f50ef0fba63b5d93d tests/common/ test_input.py
+dl_verify 23d4ddd293defa5ac3dd4eada0e8e9263203c51d9d0260d370a362557f93bb74dbfff75620463e4c046db3350b54ee75889398c58be16df8dcffb928220815a9 tests/common/ test_misc.py
+dl_verify d595d7b6c0e05f1c99a89f8dc2e662eff4127f0ad0b807156a4e6f42c9113e33302c00b311e9fdfcfce20e1fea331da02bbeb41a7c44d8e05795317711da8225 tests/common/ test_output.py
+dl_verify 4a38809c9afad404b563cbaffe89d9a23b9785ab246c71136b9bb2c802f7b1039ad375580a3076ba671f97beb48bb3f51a6bded4f8179d3c5b8f73899101cd9b tests/common/ test_path.py
+dl_verify 1e320f69f236daed5f0fb2e6fda4b5b533dd628fff7db0ee8a6b405efe3c24138a43f24b45693017219cd885779f5ae57d3523d264e077ba9d3b9d2027b95d9c tests/common/ test_reed_solomon.py
+dl_verify 223f66cbb3ff0567eba27b66c3be30bd292b6ab1405ea52af79e4adafc87901212998576665bfee5e40e9ece7cc0d369179945be903ae36e5016942cf8c7fd2b tests/common/ test_statics.py
}
-download_nh_tests () {
-dl_verify cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e tests/nh/ __init__.py
-dl_verify 8a3b29d367987feae53c62a08fa3523a2e1fd032d9043f445244a9fd4026f73476daaf5fb9dbfe732b7bbfc5b4b0495f1566bb4cced9d41854a7128ccb802097 tests/nh/ test_commands.py
-dl_verify 045f61820b739ad86d475a460788f27a92cfcf651ad4b4d4e798f6f3f4672e3e10fee2941057c919dac23fd1231df06b78f6be3e3a749e7b9d51504ec49044a2 tests/nh/ test_gateway.py
-dl_verify 512ad346e350713bd551447e1c305d25d038a6c1a6faaf2a9880c52352255bcf5b057c89148804ec495cd5d996b832f7d139691ef9a3fc3fd65b927a3548aee9 tests/nh/ test_misc.py
-dl_verify a32e36680caa2bbcb841369062996d1a1656c13c5eca6bdd75f15841a5123c6a90bf65b85acfc3d8536a888b4e41a1b591a2b44b3b871cb3f0ebe50b63509b1d tests/nh/ test_settings.py
-dl_verify 825f26a6baf24fc650a9e3dfc09a2361b1000e48b754273c2b0321b7c01f08f71ebb40bf1617f948ba13bec925158b8f1db974003aa8ef3363ad69f4fd88e843 tests/nh/ test_tcb.py
+download_relay_tests () {
+dl_verify cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e tests/relay/ __init__.py
+dl_verify 9d132ad47baca57c5ce8d7f07222b6c778aec697c190c48b82c86c4eb8588de1935f2309994c05bcdfd44fe2d8d85d20980520aa22771f3846e5ce89ac68a232 tests/relay/ test_client.py
+dl_verify 2431fd853a9a0089a3837f1e20455c2d58d96722d5b803fe9e3dc9aa09a3e5fbffa3b0fa9e3e723d81a2aa2abd6b19275777ba6eb541ec1b403854260dd14591 tests/relay/ test_commands.py
+dl_verify b64b8cef7f1c4699e34344b6c6ba255d6ead3e8f4765dfd5fb88d2a676962a7d8231d261f68d3399d9eb65196ea0cefb31e6800aa6cc6662dcf0fd927be8c1a4 tests/relay/ test_onion.py
+dl_verify 42e494245869a5e652fe6bdcf5e21d1a0299c9ad7485d075fe7cf1d2d53118b444d8563bbea837316f00cbfea31117d569cf4e8694443ab5b50f606369aec987 tests/relay/ test_server.py
+dl_verify 54c3026e797e75c46ca1d1493f6a396643948f707f1bc8ad377b7c625fda39d4e0fa6b0ec0fe39149ef0250568caf954e22ae8ebe7e7ac00ca8802ffbc6ae324 tests/relay/ test_tcb.py
}
download_tcb_tests () {
-dl_verify cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e tests/tx/ __init__.py
-dl_verify 6eed2a31a017772c767907be7b5c825207d25d394d4517c856a882265de84c50f56ae60020e8983c2269d356fc73286bffe414c28d894d851d05bb45c3ef79f5 tests/tx/ test_commands.py
-dl_verify 8be45e9c005d6ddb89d0d8a1dc3477c39e13e5b95dfac1d38f94f45a886ee0af64f9b95bf25ee26b1ad2085fbd285237b68145dba916fc56844dbb740ba0d52c tests/tx/ test_commands_g.py
-dl_verify b9a27910eba3f09b09c5d88c41ec95629ec0a8cfae8cd393bbabe5ffb699b5a1db98bca825fbf320eae48c8fd9125a7d2dc64e94c992dbd4799d7f00ad0a34b0 tests/tx/ test_contact.py
-dl_verify 2b15f293950ce0961e2975a20b60e7dc7e5668507941ce01bcb9147799c2b4f72a1ee35206e58f4e9d3f40f6ff758e0206c3bd6eb428c2d504aded8c254792f7 tests/tx/ test_files.py
-dl_verify a6e64b203c0c0b5a7d09e1a41e2faccaa6eeaadfd108117f1895c7120e833b79ac73166cd516c13fa9a2cf31d0196e4e2215a3d9100e26255eb57be738478efd tests/tx/ test_input_loop.py
-dl_verify 783a0d0b6fc3b04abfe474b4e5829dce333bc727fe9a2dd570b37ac63dfaa0426e71b24d0b02a5254a1e2711943bb0d61516297cf3a872bd55d57728fcaf6d84 tests/tx/ test_key_exchanges.py
-dl_verify 485f6ea31486b6aeceb7c6359bfb46c4a107f2f971b84c3bc36eeddf6cbec0dbbe730ca5109673d66dda61bf1ccb24dfb3f15575dfc0279b6adb6a1c504a2ce4 tests/tx/ test_packet.py
-dl_verify 3967b417f32779187a9dff95187a63dc02a7c8dc314f92c029351c9be180344e560574007566050dac58b4c3f066ac9e3e11ea8047b61801f8530808d4d55ed8 tests/tx/ test_sender_loop.py
-dl_verify dc783f22c8e0e48430269ef5001c7e4c361a3b555b5e48a9cff136007534f4c093f1d1cfe2b55751adc1c9145d6de08e2cd21332c75e2533d50c2fda70060d21 tests/tx/ test_traffic_masking.py
-dl_verify 35774f4d935ba91600b11b73b75aa12605a64297914cfd2eba793d3ebaaf4cc6ad48d8e8ffed43a37d3dd5054bf134b9e7cae693ef7d7232d02c9a0e5b54386d tests/tx/ test_user_input.py
-dl_verify ba9abe1222c4bf409c00e5cbbcdcfb28753f3c0b85e52aa89e45c81a2831a461cff6ec16d1ebc7690419b6d02bf220de0ac6b30b7eabd0c040fa571fc4e61f9f tests/tx/ test_windows.py
+dl_verify cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e tests/transmitter/ __init__.py
+dl_verify 3bdb8fd64bb2b4070da025e0187e434b5178b645fb08ec822bdd732bac3824316a8d13ded95e5e7bf754dddda5ea1f5805b6c2a3b46e8100509d3f5b32d18278 tests/transmitter/ test_commands.py
+dl_verify c2429b5ffc32aa4a6377fef726553d7c731672367cb4eaa338c0a2099b3fe0455fa8a79c4b86afd9077a53422403649bc1fcf7540e4f996dc0890819c34d9135 tests/transmitter/ test_commands_g.py
+dl_verify 3baaa1dc6dff7771f6167d699a81c6cb14f7b0ea307b83797d342a95b21f89d9f2c21e54feac0474f61174a1c708b3f02bc0e3a6b0b504bda8c03cdd16e5fefe tests/transmitter/ test_contact.py
+dl_verify 3d86131dfd775aea2ea7c0500759befac8a5d7fe35f590974b2af56da42929db927c0bd86a352a38412fbb79c2bff09d33271b26ebd9aead1bf2b702918cc02a tests/transmitter/ test_files.py
+dl_verify 3bc9c3275353f49516fdb2bc9d9a86286c121f085d5382980e118b0ea123da9b9829edeb172448416f30955c9a1c1c3704f36cfa4700ced86c33009e362d0b69 tests/transmitter/ test_input_loop.py
+dl_verify 284fefc2a4986948a5ee4de1f935482b43011347b5454ab685f4a79a1036d1bf0518db536381dfddf706318bb44b584db37cfbf8fa07aac1b631a278dfe298d7 tests/transmitter/ test_key_exchanges.py
+dl_verify 0c16f45ad9fda006b58a45a7c9a4b9777cf05d08f59c9207addbc27936c29a6aa2aa59146f0ef32fb883a5e24211c5dbdfbf5ad9cf9b72e999e599e9eda0d2ef tests/transmitter/ test_packet.py
+dl_verify 49aa0e761771893e8bc057c8e305eb8b5e7103df9a31c80eba333db739f0b2c521eca59901f35bf2e319360902c8be12b112a29948461b73662554bdf55bf6d4 tests/transmitter/ test_sender_loop.py
+dl_verify fd4d6cf68a4e555a60caf8efc6ebc6747990ed1c582036c6cc92012c5af82b49b32c42398bf822fda8257e84c822bdb8158260164a8774aea72723ddbe99e639 tests/transmitter/ test_traffic_masking.py
+dl_verify b71f7d8e3ce943dca2516f730c9919633f40568af905ac32e05b126e06f2c968c9b0b795cfad81a696511cd07534a0593ef1c9b5d5299ab88b2aff32b9059b64 tests/transmitter/ test_user_input.py
+dl_verify 5be56563cab2c9007b6be7ff767778e3fb0df1d3374174d6b6ef7dc6d66b0c692cd798a0a77f156c3eb1ad979a3b532b681db97c4d1948ff8f85cd4a1fa2d51d tests/transmitter/ test_windows.py
-dl_verify cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e tests/rx/ __init__.py
-dl_verify 8c4aa1d4e7df0228172c38e375682f5cdd32fd918168620a61a98d17d0ce79f30215e7793be29390e0a6a51c5daf26a2d80db56458b5d02524f7878a2849c5bd tests/rx/ test_commands.py
-dl_verify 467a91fa2161c172506036ba36f8f31cbcf1b9aa1a91f1e7aef2727e3113edae8b24b26488b82b1ba1d4d00411e79944568b8d9c9e2d7e22c3b30ce759ab0137 tests/rx/ test_commands_g.py
-dl_verify 081ff658de5c46327ea840038e44d1d1dd5682d31950145affc8f2536e2c06ab779f672db779a555a75a2bed9a1e323117e07bf89d20d5f2ba06a09dedd87e8f tests/rx/ test_files.py
-dl_verify 7c0d97bfd5dca727ee36573cdc1b5683077524ff28236e01d8b011da8d51c09988985b76e054c2cdebf6a95fd2e68a14d7a976f1c03a1a39ab9d2a3672e89143 tests/rx/ test_key_exchanges.py
-dl_verify aef0fe0e208ce91002924ec2d103c4575079ca3c72544774ba904e44f99ae78aa13cb242a61f2b1fa7c5e7ab8095b0836d17ce276e888792dcdc2b34b8603339 tests/rx/ test_messages.py
-dl_verify b6a33ed791e6daab20ee10f304390a8bc890a984c1bf1bec4a57d04741797cfc242d1f1067a0a2854f4daf35fb1302d652fc5ed17749884b5424d700ffb32642 tests/rx/ test_output_loop.py
-dl_verify 8dbd77abca3bdab031f5a2e16d5789c2359088c9817a53188a4d6b6b45d4bce087e0ec872810401f35d6cdb170b3052dc27f826e4906ab3f41bb71e49fcfb29e tests/rx/ test_packet.py
-dl_verify 6b87bc6c6beaf421c8f9f27ec6ced2d3248efb7b7cd966646b41a486d82d7665f7d2bb2879e1b6baf84fdf77dbef1eba565adcafd8228e7dde5919f8a12e47d1 tests/rx/ test_receiver_loop.py
-dl_verify 96e8ad84c9cce083d8a5a85b928a2c78d4b336739a894fdfb69abdef880dbe0fc72f05515393ad576d86250d32f4fc93b65f657c5f7dd7d4aa4c7c2e8b24b62f tests/rx/ test_windows.py
+dl_verify cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e tests/receiver/ __init__.py
+dl_verify d80af580f76c3c58d72828ab190a055a03f7e74ae17ccbaa2f70dd94e01b7efd85888ac51eefed94d6671027660a8080600f2e1e908bd77622c36ba258a8936e tests/receiver/ test_commands.py
+dl_verify dce0fe6cd05915f1a0450259a08e9935b077f9b3af61f315812834811a5c82095b72bea5e4b283fd2b8285e86f8ee4897d43f42a99261767b77841deb471d980 tests/receiver/ test_commands_g.py
+dl_verify eb86007ca9b0cfeb4d364b1fb53409443c8b9f95770979c471b8462c1c41205b96afd357670a9cd5949e8360b738d9284a9e726ee6ab89e09a0306b105f1a720 tests/receiver/ test_files.py
+dl_verify 01bf3274c675b8cbe6379f8fb1883e0d4ed6c69d164b2c6a44794786d21f2604efc262b34372dfb581607655e6e1e73c178660d3e97f4f2c9bdfb11e4166b2fd tests/receiver/ test_key_exchanges.py
+dl_verify 7b9d27497d5765739ee435c02a379e792ad510dd893ff0d3871a7d3f97d196274921a2d26fa656edb5e7974a390155e7c1914135d3e1b6a82ed8f94d46263b66 tests/receiver/ test_messages.py
+dl_verify affbd5bccd0fcd87bb50e13b497b1ba3c29ccec954fa53f62bff1a28baa7b35376f614fb54c922ed4605a37f6aa1463efff43a6267619b04a605a2181222e873 tests/receiver/ test_output_loop.py
+dl_verify da34f5bdcd8b108b45e955d545954de32c9d8959c26e9d2e3104106139fb2fec69aabd6d5d127beacef7a09ee4f16aab0a92ee7d76b0fa6cd199e56032c12257 tests/receiver/ test_packet.py
+dl_verify 717722763a41267929b6038abe859eececee20e68497d0f3c04268b6b8274a04e39e3f8d37d0928c8459c7ef52478176c933d8ec8b2bd0b93ff952a9b92b86f4 tests/receiver/ test_receiver_loop.py
+dl_verify e6df26dc7b829b8536e454b99c6c448330fc5cff3ff12a5ebc70103a5fb15ab4fcb8fcb785e27201228b6f50ec610ef214bee4f2d5ff35995b4f00ae23217bc0 tests/receiver/ test_windows.py
}
download_local_test_specific () {
-dl_verify b42135363e8ba718e76756496de34a5fad0510162677eeaa584b083342e20c91732b6589bc6c14a7951f100b52f1612634a3c640857276edabccf423daecc705 launchers/ config
-dl_verify 17c83b0fe035fe4412531e06e795e6d5b2aa97ea1827e3c7249f9746067cf1f6c7d2351cbd291851fa91d27c565409e66f0e01ec432b040a74123fa4f1631611 launchers/ TFC-local-test.desktop
-dl_verify 1defc149fec09999ab424b68c768b8aa43dc171a49016cff069f01c096542d2c3092124e95d4a140f72f7ba9098e9c148eb2297688771eb2404b204a9f88131b '' dd.py
+dl_verify dec90e113335d3274d87c3e12dda5a3205df57bd10c1e0532ecad34409520ce0596db21e989478836d4a0ea44da8c42902d2d8f05c9ad027a5560b4d0d5b9f13 '' dd.py
+
+dl_verify 2f426d4d971d67ebf2f59b54fb31cff1a3e2567e343bfa1b3e638b8e0dffed5d0c3cac1f33229b98c302fee0cca3cc43567c2c615b5249a2db6d444e89e5fc70 launchers/ config
+dl_verify 5d5351dd24d7afd4dc717835cfffee718fca707133127d1826ae099c66b0bddd878d104c1ad43546c8157807c984bd26b562e455fe219c1a00cf49df6bb73009 launchers/ TFC-local-test.desktop
}
download_tcb_specific () {
-dl_verify f4f46d0d44234c094f566e88cc257d07399ee9552ff203181ca415ea2265b091bf14adf570122be7253b3d7fe22cac71f476b2d1fce5a6263f3c3cc7aaa2e8dc launchers/ TFC-TxM.desktop
-dl_verify f3c0f471e8046cda7e66c153403c76ea55558bc06e2ee574f300b7507fa81bd2f8e5542ef342b4329f9cb6aee0d050ef4cad43170fbb2f36ac69358e74c035f5 launchers/ TFC-RxM.desktop
+dl_verify 883d8df82240d840a215a4a946ba3a15def11b9c50f659e84bdb3543e484fed3e520c471cc10301743d38a7560c2672f1cfd22efa99de495685a90b8559db4ee launchers/ TFC-TxP.desktop
+dl_verify c10fb76486ada483cfdd9e351b6d9b89907ae6ccccb32cf4299bc4e67ba565aac7b05a2d62a89c0146a1783c9d0616ee3c9a9660173a98ca6b03f72c3fbe6202 launchers/ TFC-RxP.desktop
}
-activate_nh_venv () {
- . $HOME/tfc/venv_nh/bin/activate
+download_dev_specific () {
+dl_verify 2865708ab24c3ceeaf0a6ec382fb7c331fdee52af55a111c1afb862a336dd757d597f91b94267da009eb74bbc77d01bf78824474fa6f0aa820cd8c62ddb72138 '' requirements-dev.txt
}
-activate_tfc_venv () {
- . $HOME/tfc/venv_tfc/bin/activate
+download_venv () {
+dl_verify f74b9aeb3a17ef86782afb8c2f621709801631430423d13025310809e6d14ffecb3805ee600cd3740287105b7a0e0726f8ced202e7b55be7bf5b79240e34d35d '' requirements-venv.txt
+}
+
+
+install_tcb () {
+ create_install_dir
+ dpkg_check
+
+ sudo torify apt update
+ sudo torify apt install libssl-dev python3-pip python3-setuptools python3-tk net-tools -y
+
+ download_venv
+ download_common
+ download_tcb
+ download_tcb_specific
+ #download_common_tests
+ #download_tcb_tests
+
+ create_user_data_dir
+ cd $HOME/tfc/
+
+ torify pip3 download -r /opt/tfc/requirements-venv.txt --require-hashes
+ torify pip3 download -r /opt/tfc/requirements.txt --require-hashes
+
+ kill_network
+
+ pip3 install setuptools-40.6.3-py2.py3-none-any.whl
+ pip3 install virtualenv-16.2.0-py2.py3-none-any.whl
+ sudo python3 -m virtualenv /opt/tfc/venv_tcb --system-site-packages --never-download
+
+ . /opt/tfc/venv_tcb/bin/activate
+ sudo pip3 install six-1.12.0-py2.py3-none-any.whl
+ sudo pip3 install pycparser-2.19.tar.gz
+ sudo pip3 install cffi-1.11.5-cp36-cp36m-manylinux1_x86_64.whl
+ sudo pip3 install argon2_cffi-19.1.0-cp34-abi3-manylinux1_x86_64.whl
+ sudo pip3 install PyNaCl-1.3.0-cp34-abi3-manylinux1_x86_64.whl
+ sudo pip3 install pyserial-3.4-py2.py3-none-any.whl
+ sudo pip3 install asn1crypto-0.24.0-py2.py3-none-any.whl
+ sudo pip3 install cryptography-2.5-cp34-abi3-manylinux1_x86_64.whl
+ deactivate
+
+ sudo mv /opt/tfc/tfc.png /usr/share/pixmaps/
+ sudo mv /opt/tfc/launchers/TFC-TxP.desktop /usr/share/applications/
+ sudo mv /opt/tfc/launchers/TFC-RxP.desktop /usr/share/applications/
+
+ sudo rm -r /opt/tfc/launchers/
+ sudo rm /opt/tfc/requirements.txt
+ sudo rm /opt/tfc/requirements-venv.txt
+
+ rm $HOME/tfc/setuptools-40.6.3-py2.py3-none-any.whl
+ rm $HOME/tfc/virtualenv-16.2.0-py2.py3-none-any.whl
+ rm $HOME/tfc/six-1.12.0-py2.py3-none-any.whl
+ rm $HOME/tfc/pycparser-2.19.tar.gz
+ rm $HOME/tfc/cffi-1.11.5-cp36-cp36m-manylinux1_x86_64.whl
+ rm $HOME/tfc/argon2_cffi-19.1.0-cp34-abi3-manylinux1_x86_64.whl
+ rm $HOME/tfc/PyNaCl-1.3.0-cp34-abi3-manylinux1_x86_64.whl
+ rm $HOME/tfc/pyserial-3.4-py2.py3-none-any.whl
+ rm $HOME/tfc/asn1crypto-0.24.0-py2.py3-none-any.whl
+ rm $HOME/tfc/cryptography-2.5-cp34-abi3-manylinux1_x86_64.whl
+
+ add_serial_permissions
+
+ install_complete "Installation of TFC on this device is now complete."
+}
+
+
+install_local_test () {
+ create_install_dir
+ dpkg_check
+
+ tor_dependencies
+ sudo torify apt update
+ sudo torify apt install libssl-dev python3-pip python3-setuptools python3-tk tor deb.torproject.org-keyring terminator -y
+
+ download_venv
+ download_common
+ download_tcb
+ download_relay
+ download_local_test_specific
+ #download_common_tests
+ #download_tcb_tests
+ #download_relay_tests
+
+ torify pip3 install -r /opt/tfc/requirements-venv.txt --require-hashes
+ sudo python3 -m virtualenv /opt/tfc/venv_tfc --system-site-packages
+
+ . /opt/tfc/venv_tfc/bin/activate
+ sudo torify pip3 install -r /opt/tfc/requirements.txt --require-hashes
+ sudo torify pip3 install -r /opt/tfc/requirements-relay.txt --require-hashes
+ deactivate
+
+ sudo mv /opt/tfc/tfc.png /usr/share/pixmaps/
+ sudo mv /opt/tfc/launchers/TFC-local-test.desktop /usr/share/applications/
+
+ create_terminator_config "/opt/tfc/launchers/config"
+
+ sudo rm -r /opt/tfc/launchers/
+ sudo rm /opt/tfc/requirements.txt
+ sudo rm /opt/tfc/requirements-relay.txt
+ sudo rm /opt/tfc/requirements-venv.txt
+
+ install_complete "Installation of TFC for local testing is now complete."
+}
+
+
+install_developer () {
+ dpkg_check
+
+ tor_dependencies
+ sudo torify apt update
+ sudo torify apt install git libssl-dev python3-pip python3-setuptools python3-tk tor deb.torproject.org-keyring terminator -y
+
+ cd $HOME
+ torify git clone https://github.com/maqp/tfc.git
+ cd $HOME/tfc/
+
+ torify pip3 install -r requirements-venv.txt --require-hashes
+ python3.6 -m virtualenv venv_tfc --system-site-packages
+
+ . /$HOME/tfc/venv_tfc/bin/activate
+ torify pip3 install -r requirements.txt --require-hashes
+ torify pip3 install -r requirements-relay.txt --require-hashes
+ torify pip3 install -r requirements-dev.txt
+ deactivate
+
+ sudo cp $HOME/tfc/launchers/TFC-local-test.desktop /usr/share/applications/
+ sudo cp $HOME/tfc/tfc.png /usr/share/pixmaps/
+
+ create_terminator_config "$HOME/tfc/launchers/config"
+
+ chmod a+rwx -R $HOME/tfc/
+
+ add_serial_permissions
+
+ install_complete "Installation of the TFC dev environment is now complete."
+}
+
+
+install_relay_ubuntu () {
+ create_install_dir
+ dpkg_check
+
+ tor_dependencies
+ sudo torify apt update
+ sudo torify apt install libssl-dev python3-pip python3-setuptools tor deb.torproject.org-keyring -y
+
+ download_venv
+ download_common
+ download_relay
+ #download_common_tests
+ #download_relay_tests
+
+ torify pip3 install -r /opt/tfc/requirements-venv.txt --require-hashes
+ sudo python3.6 -m virtualenv /opt/tfc/venv_relay --system-site-packages
+
+ . /opt/tfc/venv_relay/bin/activate
+ sudo torify pip3 install -r /opt/tfc/requirements-relay.txt --require-hashes
+ deactivate
+
+ sudo mv /opt/tfc/tfc.png /usr/share/pixmaps/
+ sudo mv /opt/tfc/launchers/TFC-RP.desktop /usr/share/applications/
+
+ sudo rm -r /opt/tfc/launchers/
+ sudo rm /opt/tfc/requirements-venv.txt
+ sudo rm /opt/tfc/requirements-relay.txt
+
+ add_serial_permissions
+
+ install_complete "Installation of the TFC Relay configuration is now complete."
+}
+
+
+install_relay_tails () {
+ check_tails_tor_version
+
+ # Cache password so that Debian doesn't keep asking
+ # for it during install (it won't be stored on disk).
+ read_sudo_pwd
+ create_install_dir
+
+ echo ${sudo_pwd} | sudo -S apt update
+ echo ${sudo_pwd} | sudo -S apt install libssl-dev python3-pip python3-setuptools -y
+
+ download_common
+ download_relay
+ #download_common_tests
+ #download_relay_tests
+
+ create_user_data_dir
+ cd $HOME/tfc/
+
+ torify pip3 download -r /opt/tfc/requirements-relay.txt --require-hashes
+
+ # Pyserial
+ echo ${sudo_pwd} | sudo -S python3.6 -m pip install pyserial-3.4-py2.py3-none-any.whl
+
+ # Stem
+ echo ${sudo_pwd} | sudo -S python3.6 -m pip install stem-1.7.1.tar.gz
+
+ # PySocks
+ echo ${sudo_pwd} | sudo -S python3.6 -m pip install PySocks-1.6.8.tar.gz
+
+ # Requests
+ echo ${sudo_pwd} | sudo -S python3.6 -m pip install urllib3-1.24.1-py2.py3-none-any.whl
+ echo ${sudo_pwd} | sudo -S python3.6 -m pip install idna-2.8-py2.py3-none-any.whl
+ echo ${sudo_pwd} | sudo -S python3.6 -m pip install chardet-3.0.4-py2.py3-none-any.whl
+ echo ${sudo_pwd} | sudo -S python3.6 -m pip install certifi-2018.11.29-py2.py3-none-any.whl
+ echo ${sudo_pwd} | sudo -S python3.6 -m pip install requests-2.21.0-py2.py3-none-any.whl
+
+ # Flask
+ echo ${sudo_pwd} | sudo -S python3.6 -m pip install Werkzeug-0.14.1-py2.py3-none-any.whl
+ echo ${sudo_pwd} | sudo -S python3.6 -m pip install MarkupSafe-1.1.0-cp36-cp36m-manylinux1_x86_64.whl
+ echo ${sudo_pwd} | sudo -S python3.6 -m pip install Jinja2-2.10-py2.py3-none-any.whl
+ echo ${sudo_pwd} | sudo -S python3.6 -m pip install itsdangerous-1.1.0-py2.py3-none-any.whl
+ echo ${sudo_pwd} | sudo -S python3.6 -m pip install Click-7.0-py2.py3-none-any.whl
+ echo ${sudo_pwd} | sudo -S python3.6 -m pip install Flask-1.0.2-py2.py3-none-any.whl
+
+ # Cryptography
+ echo ${sudo_pwd} | sudo -S python3.6 -m pip install six-1.12.0-py2.py3-none-any.whl
+ echo ${sudo_pwd} | sudo -S python3.6 -m pip install asn1crypto-0.24.0-py2.py3-none-any.whl
+ echo ${sudo_pwd} | sudo -S python3.6 -m pip install pycparser-2.19.tar.gz
+ echo ${sudo_pwd} | sudo -S python3.6 -m pip install cffi-1.11.5-cp36-cp36m-manylinux1_x86_64.whl
+ echo ${sudo_pwd} | sudo -S python3.6 -m pip install cryptography-2.5-cp34-abi3-manylinux1_x86_64.whl
+
+ cd $HOME
+ rm -r $HOME/tfc
+
+ echo ${sudo_pwd} | sudo -S mv /opt/tfc/tfc.png /usr/share/pixmaps/
+ echo ${sudo_pwd} | sudo -S mv /opt/tfc/launchers/TFC-RP-Tails.desktop /usr/share/applications/
+
+ echo ${sudo_pwd} | sudo -S rm -r /opt/tfc/launchers/
+ echo ${sudo_pwd} | sudo -S rm /opt/tfc/requirements-relay.txt
+
+ install_complete "Installation of the TFC Relay configuration is now complete."
+}
+
+
+install_relay () {
+ if [[ "$(lsb_release -a 2>/dev/null | grep Tails)" ]]; then
+ install_relay_tails
+ else
+ install_relay_ubuntu
+ fi
+}
+
+
+read_sudo_pwd () {
+ read -s -p "[sudo] password for ${USER}: " sudo_pwd
+ until (echo ${sudo_pwd} | sudo -S echo '' 2>/dev/null)
+ do
+ echo -e '\nSorry, try again.'
+ read -s -p "[sudo] password for ${USER}: " sudo_pwd
+ done
+ echo
+}
+
+
+check_tails_tor_version () {
+ included=($(tor --version |awk '{print $3}' |head -c 5))
+ required="0.3.5"
+
+ if ! [[ "$(printf '%s\n' "$required" "$included" | sort -V | head -n1)" = "$required" ]]; then
+ clear
+ echo -e "\nError: This Tails includes Tor $included but Tor $required is required. Exiting.\n" 1>&2
+ exit 1
+ fi
+}
+
+
+tor_dependencies () {
+ available=($(apt-cache policy tor |grep Candidate | awk '{print $2}' |head -c 5))
+ required="0.3.5"
+
+ if ! [[ "$(printf '%s\n' "$required" "$available" | sort -V | head -n1)" = "$required" ]]; then
+ # If repository does not provide 0.3.5, default to 0.3.5 experimental.
+ sudo sudo rm /etc/apt/sources.list.d/torproject.list 2>/dev/null || true
+
+ if [[ -f /etc/upstream-release/lsb-release ]]; then
+ # Linux Mint etc.
+ codename=($(cat /etc/upstream-release/lsb-release |grep DISTRIB_CODENAME |cut -c 18-))
+ else
+ # *buntu
+ codename=($(lsb_release -a 2>/dev/null |grep Codename |awk '{print $2}'))
+ fi
+
+ url="https://deb.torproject.org/torproject.org"
+
+ echo "deb ${url} ${codename} main" | sudo tee -a /etc/apt/sources.list.d/torproject.list
+ echo "deb-src ${url} ${codename} main" | sudo tee -a /etc/apt/sources.list.d/torproject.list
+ echo "deb ${url} ${codename} main" | sudo tee -a /etc/apt/sources.list.d/torproject.list
+ echo "deb-src ${url} ${codename} main" | sudo tee -a /etc/apt/sources.list.d/torproject.list
+
+ # SKS Keyservers' Onion Service URL is verifiable via https://sks-keyservers.net/overview-of-pools.php
+ gpg --keyserver hkp://jirk5u4osbsr34t5.onion --recv-keys A3C4F0F979CAA22CDBA8F512EE8CBC9E886DDD89
+ gpg --export A3C4F0F979CAA22CDBA8F512EE8CBC9E886DDD89 | sudo apt-key add -
+ fi
}
@@ -196,188 +514,126 @@ kill_network () {
done
clear
- echo -e "\nThis computer needs to be airgapped. The installer has"\
- "\ndisabled network interfaces as a first line of defense."
-
- read -n 1 -s -p "\nDisconnect Ethernet cable now and press any key to continue the installation."
+ c_echo ''
+ c_echo " This computer needs to be air gapped. The installer has "
+ c_echo "disabled network interfaces as the first line of defense."
+ c_echo ''
+ c_echo "Disconnect the Ethernet cable and press any key to continue."
+ read -n 1 -s -p ''
echo -e '\n'
}
-install_tcb () {
- sudo apt update
- sudo apt install python3-pip python3-tk python3.6 python3.6-dev libffi-dev net-tools -y
-
- download_common
- download_tcb
- download_tcb_specific
- # download_common_tests
- # download_tcb_tests
-
- python3.6 -m pip download -r requirements.txt --require-hashes
-
- kill_network
-
- python3.6 -m pip install virtualenv-15.1.0-py2.py3-none-any.whl
- python3.6 -m virtualenv --system-site-packages venv_tfc
-
- activate_tfc_venv
- python3.6 -m pip install six-1.10.0-py2.py3-none-any.whl
- python3.6 -m pip install pycparser-2.18.tar.gz
- python3.6 -m pip install cffi-1.10.0-cp36-cp36m-manylinux1_x86_64.whl
- python3.6 -m pip install argon2_cffi-16.3.0-cp36-cp36m-manylinux1_x86_64.whl
- python3.6 -m pip install PyNaCl-1.1.2-cp36-cp36m-manylinux1_x86_64.whl
- python3.6 -m pip install pyserial-3.4-py2.py3-none-any.whl
- deactivate
-
- sudo mv $HOME/tfc/tfc.png /usr/share/pixmaps/
- sudo mv $HOME/tfc/launchers/TFC-TxM.desktop /usr/share/applications/
- sudo mv $HOME/tfc/launchers/TFC-RxM.desktop /usr/share/applications/
-
- chmod a+rwx -R $HOME/tfc/
-
- rm -r $HOME/tfc/launchers/
- rm $HOME/tfc/requirements.txt
- rm $HOME/tfc/virtualenv-15.1.0-py2.py3-none-any.whl
- rm $HOME/tfc/six-1.10.0-py2.py3-none-any.whl
- rm $HOME/tfc/pycparser-2.18.tar.gz
- rm $HOME/tfc/cffi-1.10.0-cp36-cp36m-manylinux1_x86_64.whl
- rm $HOME/tfc/argon2_cffi-16.3.0-cp36-cp36m-manylinux1_x86_64.whl
- rm $HOME/tfc/PyNaCl-1.1.2-cp36-cp36m-manylinux1_x86_64.whl
- rm $HOME/tfc/pyserial-3.4-py2.py3-none-any.whl
-
- sudo adduser $USER dialout
-
+add_serial_permissions () {
clear
- echo -e "\nInstallation of TFC on this device is now complete."\
- "\nReboot the computer to update serial port use rights.\n"
-}
+ c_echo ''
+ c_echo "Setting serial permissions. If available, please connect the"
+ c_echo "USB-to-serial/TTL adapter now and press any key to continue."
+ read -n 1 -s -p ''
+ echo -e '\n'
+ sleep 3 # Wait for USB serial interfaces to register
+ # Add user to the dialout group to allow serial access after reboot
+ sudo adduser ${USER} dialout
-install_local_test () {
- sudo apt update
- sudo apt install python3-pip python3-tk python3.6 python3.6-dev libffi-dev pidgin pidgin-otr terminator -y
+ # Add temporary permissions for serial interfaces until reboot
+ arr=($(ls /sys/class/tty | grep USB)) || true
+ for i in "${arr[@]}"; do
+ sudo chmod 666 /dev/${i}
+ done
- download_common
- download_tcb
- download_nh
- download_local_test_specific
- # download_common_tests
- # download_tcb_tests
- # download_nh_tests
-
- python3.5 -m pip install virtualenv
- python3.6 -m pip install virtualenv
- python3.5 -m virtualenv --system-site-packages venv_nh
- python3.6 -m virtualenv --system-site-packages venv_tfc
-
- activate_nh_venv
- python3.5 -m pip install -r requirements-nh.txt --require-hashes
- deactivate
-
- activate_tfc_venv
- python3.6 -m pip install -r requirements.txt --require-hashes
- deactivate
-
- sudo mv $HOME/tfc/tfc.png /usr/share/pixmaps/
- sudo mv $HOME/tfc/launchers/TFC-local-test.desktop /usr/share/applications/
-
- mkdir -p $HOME/.config/terminator 2>/dev/null
- if [ -f $HOME/.config/terminator/config ]; then
- mv $HOME/.config/terminator/config "$HOME/.config/terminator/config_backup_at_$(date +%Y-%m-%d_%H-%M-%S)" 2>/dev/null
+ if [[ -e /dev/ttyS0 ]]; then
+ sudo chmod 666 /dev/ttyS0
fi
- mv $HOME/tfc/launchers/config $HOME/.config/terminator/config
- sudo chown $USER -R $HOME/.config/terminator/
-
- chmod a+rwx -R $HOME/tfc/
-
- rm -r $HOME/tfc/launchers/
- rm $HOME/tfc/requirements.txt
- rm $HOME/tfc/requirements-nh.txt
-
- clear
- echo -e "\nInstallation of TFC for local testing is now complete.\n"
}
-install_nh_ubuntu () {
- sudo apt update
- sudo apt install python3-pip python3-tk pidgin pidgin-otr -y
-
- download_common
- download_nh
- # download_common_tests
- # download_nh_tests
-
- python3.5 -m pip install virtualenv
- python3.5 -m virtualenv --system-site-packages venv_nh
-
- activate_nh_venv
- python3.5 -m pip install -r requirements-nh.txt --require-hashes
- deactivate
-
- sudo mv $HOME/tfc/tfc.png /usr/share/pixmaps/
- sudo mv $HOME/tfc/launchers/TFC-NH.desktop /usr/share/applications/
-
- chmod a+rwx -R $HOME/tfc/
-
- rm -r $HOME/tfc/launchers/
- rm $HOME/tfc/requirements-nh.txt
-
- sudo adduser $USER dialout
-
- clear
- echo -e "\nInstallation of NH configuration is now complete."\
- "\nReboot the computer to update serial port use rights.\n"
+c_echo () {
+ # Justify printed text to center of terminal
+ printf "%*s\n" $(( ( $(echo $1 | wc -c ) + 80 ) / 2 )) "$1"
}
-install_nh_tails () {
- sudo apt update
- sudo apt install python3-tk
+create_install_dir () {
+ if [[ ${sudo_pwd} ]]; then
+ # Tails
+ if [[ -d "/opt/tfc" ]]; then
+ echo ${sudo_pwd} | sudo -S rm -r /opt/tfc
+ fi
+ echo ${sudo_pwd} | sudo -S mkdir -p /opt/tfc 2>/dev/null
- download_common
- download_nh
- # download_common_tests
- # download_nh_tests
-
- sudo mv tfc.png /usr/share/pixmaps/
- sudo mv $HOME/tfc/launchers/TFC-NH-Tails.desktop /usr/share/applications/
-
- chmod a+rwx -R $HOME/tfc/
-
- rm -r $HOME/tfc/launchers/
- rm $HOME/tfc/requirements-nh.txt
-
- clear
- echo -e "\nInstallation of NH configuration is now complete.\n"
- # Tails user is already in dialout group so no restart is required.
-}
-
-
-install_nh () {
- if [ "$(lsb_release -a 2>/dev/null | grep Tails)" ]; then
- install_nh_tails
else
- install_nh_ubuntu
+ # *buntu
+ if [[ -d "/opt/tfc" ]]; then
+ sudo rm -r /opt/tfc
+ fi
+ sudo mkdir -p /opt/tfc 2>/dev/null
fi
}
-architecture_check () {
- if ! [ "$(uname -m 2>/dev/null | grep x86_64)" ]; then
- echo -e "\nError: Invalid system architecture. Exiting.\n" 1>&2
- exit 1
+create_user_data_dir () {
+ if [[ -d "$HOME/tfc" ]]; then
+ mv $HOME/tfc tfc_backup_at_$(date +%Y-%m-%d_%H-%M-%S)
+ fi
+ mkdir -p $HOME/tfc 2>/dev/null
+}
+
+
+create_terminator_config () {
+ mkdir -p $HOME/.config/terminator 2>/dev/null
+ if [[ -f $HOME/.config/terminator/config ]]; then
+
+ backup_file="$HOME/.config/terminator/config_backup_at_$(date +%Y-%m-%d_%H-%M-%S)"
+ mv $HOME/.config/terminator/config ${backup_file} 2>/dev/null
+
+ clear
+ c_echo ''
+ c_echo "NOTICE"
+ c_echo "An existing configuration file for the Terminator"
+ c_echo "application was found and backed up into"
+ c_echo ''
+ c_echo "${backup_file}"
+ c_echo ''
+ c_echo "Press any key to continue."
+ read -n 1 -s -p ''
+ echo ''
+ fi
+
+ cp $1 $HOME/.config/terminator/config
+ sudo chown ${USER} -R $HOME/.config/terminator/
+ modify_terminator_font_size
+}
+
+
+modify_terminator_font_size () {
+ width=$(get_screen_width)
+ # Defaults in terminator config file are for 1920 pixels wide screens
+ if (( $width < 1600 )); then
+ sed -i -e 's/font = Monospace 11/font = Monospace 8/g' $HOME/.config/terminator/config # Normal config
+ sed -i -e 's/font = Monospace 10.5/font = Monospace 7/g' $HOME/.config/terminator/config # Data Diode config
+ elif (( $width < 1920 )); then
+ sed -i -e 's/font = Monospace 11/font = Monospace 9/g' $HOME/.config/terminator/config # Normal config
+ sed -i -e 's/font = Monospace 10.5/font = Monospace 8.5/g' $HOME/.config/terminator/config # Data Diode config
fi
}
-root_check() {
- if [[ !$EUID -ne 0 ]]; then
- clear
- echo -e "\nError: This installer must not be run as root.\n" 1>&2
- exit 1
- fi
+get_screen_width () {
+ xdpyinfo | grep dimensions | sed -r 's/^[^0-9]*([0-9]+).*$/\1/'
+}
+
+
+install_complete () {
+ clear
+ c_echo ''
+ c_echo "$*"
+ c_echo ''
+ c_echo "Press any key to close the installer."
+ read -n 1 -s -p ''
+ echo ''
+
+ kill -9 $PPID
}
@@ -386,16 +642,17 @@ dpkg_check () {
tput sc
while sudo fuser /var/lib/dpkg/lock >/dev/null 2>&1 ; do
case $(($i % 4)) in
- 0 ) j="-" ;;
- 1 ) j="\\" ;;
- 2 ) j="|" ;;
- 3 ) j="/" ;;
+ 0 ) j="." ;;
+ 1 ) j="o" ;;
+ 2 ) j="O" ;;
+ 3 ) j="o" ;;
esac
tput rc
- echo -en "\r[$j] Waiting for other software managers to finish..."
+ echo -en "\rWaiting for other software managers to finish..$j"
sleep 0.5
((i=i+1))
done
+ echo ''
}
@@ -403,32 +660,40 @@ arg_error () {
clear
echo -e "\nUsage: bash install.sh [OPTION]\n"
echo "Mandatory arguments"
- echo " tcb Install TxM/RxM configuration (Ubuntu 17.04 64-bit)"
- echo " nh Install NH configuration (Ubuntu 17.04 64-bit / Tails 3.0+)"
- echo -e " lt local testing mode (Ubuntu 17.04 64-bit)\n"
+ echo " tcb Install Transmitter/Receiver Program (*buntu 18.04+)"
+ echo " relay Install Relay Program (*buntu 18.04+ / Tails (Debian Buster+))"
+ echo -e " local Install insecure local testing mode (*buntu 18.04+)\n"
exit 1
}
-create_install_dir () {
- if [ -d "$HOME/tfc" ]; then
- mv $HOME/tfc tfc_backup_at_$(date +%Y-%m-%d_%H-%M-%S)
+root_check() {
+ if [[ !$EUID -ne 0 ]]; then
+ clear
+ echo -e "\nError: This installer must not be run as root. Exiting.\n" 1>&2
+ exit 1
+ fi
+}
+
+
+architecture_check () {
+ if ! [[ "$(uname -m 2>/dev/null | grep x86_64)" ]]; then
+ clear
+ echo -e "\nError: Invalid system architecture. Exiting.\n" 1>&2
+ exit 1
fi
- mkdir -p $HOME/tfc 2>/dev/null
}
set -e
architecture_check
root_check
-dpkg_check
-
-create_install_dir
-cd $HOME/tfc/
+sudo_pwd='';
case $1 in
- tcb ) install_tcb;;
- nh ) install_nh;;
- lt ) install_local_test;;
- * ) arg_error;;
+ tcb ) install_tcb;;
+ relay ) install_relay;;
+ local ) install_local_test;;
+ dev ) install_developer;;
+ * ) arg_error;;
esac
diff --git a/install.sh.asc b/install.sh.asc
deleted file mode 100644
index 5c91c7f..0000000
--- a/install.sh.asc
+++ /dev/null
@@ -1,17 +0,0 @@
------BEGIN PGP SIGNATURE-----
-Version: GnuPG v1
-
-iQIcBAABAgAGBQJX431gAAoJENJrq8gPhjL09I8QAK23lNDZvRrWiqipHTV8+RIB
-n7MYk69FjgWnbBwLBlqfBrlGiNu5sE0j7yLGZrPUmKJv5s4exKY9Aw8iz+IDK85r
-z6a4Ag84hnBwbkGwf/4qVmHFZUfvPFUgRUbPPH/PvB+N8pJbhF90UgaWcNGEJQCi
-+jBMUcP9MEUcnUOA5oPwa7U9SfNim9daQEBcwvHiAJwM6kfVqv1ZY8IlhqwpT43x
-6IQhhzJSzIwyZR/v1ZVNsGtGd/V33iELaJNscS81dvt8zuv8t3hPc34ea7UCu4Kp
-16mdzpzApawN4cwH2CGQomBSkECI7Lo9MMl969w39LXxpq3Y8lvkFyULy1Mi34Fu
-BBDzvdsOH4uLFnUML7Y0jn72xU+nsSzN7YYxRqdd+pSkNvv0jSvc/nzocCkPinBU
-50toZu0fco21wAjcRaqQ487jfLBNdXvqJ6Shnb0FYl3t4YyKqLWSXnQLnQschEww
-tFQ1AlnK1hG7kvdYOMhdFt/02E8/+ANuyavLixDSrOdyAwSeKdG3f6qKyI638izN
-P4yF3FNdswxjXHaf1skVN0d27OUc1lezAinOWKbj0PtTQtH/tWccOvVqKStV1xiz
-MUP4AX7g4M8V2QgBhDgMZFlqj9fUuqo94ZdmGGoNXeRgKybRmm32GPqll/4M2c0M
-2UwA3ijKZWN3fji1jzSt
-=OB/+
------END PGP SIGNATURE-----
diff --git a/launchers/TFC-NH-Tails.desktop b/launchers/TFC-NH-Tails.desktop
deleted file mode 100755
index 509c643..0000000
--- a/launchers/TFC-NH-Tails.desktop
+++ /dev/null
@@ -1,8 +0,0 @@
-[Desktop Entry]
-Version=1.17.08
-Name=TFC-NH
-Exec=gnome-terminal -x bash -c "cd $HOME/tfc && python3.5 'nh.py' || bash"
-Icon=tfc.png
-Terminal=false
-Type=Application
-Categories=Network;Messaging;Security;
\ No newline at end of file
diff --git a/launchers/TFC-NH.desktop b/launchers/TFC-NH.desktop
deleted file mode 100755
index 2235e96..0000000
--- a/launchers/TFC-NH.desktop
+++ /dev/null
@@ -1,8 +0,0 @@
-[Desktop Entry]
-Version=1.17.08
-Name=TFC-NH
-Exec=gnome-terminal --disable-factory -x bash -c "cd $HOME/tfc && source venv_nh/bin/activate && python3.5 'nh.py' && deactivate || bash"
-Icon=tfc.png
-Terminal=false
-Type=Application
-Categories=Network;Messaging;Security;
\ No newline at end of file
diff --git a/launchers/TFC-RP-Tails.desktop b/launchers/TFC-RP-Tails.desktop
new file mode 100755
index 0000000..5e89361
--- /dev/null
+++ b/launchers/TFC-RP-Tails.desktop
@@ -0,0 +1,8 @@
+[Desktop Entry]
+Version=1.19.01
+Name=TFC-Relay
+Exec=gnome-terminal -x bash -c "cd /opt/tfc && python3.5 'relay.py' || bash"
+Icon=tfc.png
+Terminal=false
+Type=Application
+Categories=Network;Messaging;Security;
\ No newline at end of file
diff --git a/launchers/TFC-RP.desktop b/launchers/TFC-RP.desktop
new file mode 100755
index 0000000..e276ffe
--- /dev/null
+++ b/launchers/TFC-RP.desktop
@@ -0,0 +1,8 @@
+[Desktop Entry]
+Version=1.19.01
+Name=TFC-Relay
+Exec=gnome-terminal --disable-factory -x bash -c "cd /opt/tfc && source venv_relay/bin/activate && python3.6 'relay.py' && deactivate || bash"
+Icon=tfc.png
+Terminal=false
+Type=Application
+Categories=Network;Messaging;Security;
\ No newline at end of file
diff --git a/launchers/TFC-RxM.desktop b/launchers/TFC-RxM.desktop
deleted file mode 100755
index 7a8c2b7..0000000
--- a/launchers/TFC-RxM.desktop
+++ /dev/null
@@ -1,8 +0,0 @@
-[Desktop Entry]
-Version=1.17.08
-Name=TFC-RxM
-Exec=gnome-terminal --disable-factory --maximize -x bash -c "cd $HOME/tfc && source venv_tfc/bin/activate && python3.6 'tfc.py' -rx && deactivate || bash"
-Icon=tfc.png
-Terminal=false
-Type=Application
-Categories=Network;Messaging;Security;
\ No newline at end of file
diff --git a/launchers/TFC-RxP.desktop b/launchers/TFC-RxP.desktop
new file mode 100755
index 0000000..5f4e493
--- /dev/null
+++ b/launchers/TFC-RxP.desktop
@@ -0,0 +1,8 @@
+[Desktop Entry]
+Version=1.19.01
+Name=TFC-Receiver
+Exec=gnome-terminal --disable-factory --maximize -x bash -c "cd /opt/tfc && source venv_tcb/bin/activate && python3.6 'tfc.py' -r && deactivate || bash"
+Icon=tfc.png
+Terminal=false
+Type=Application
+Categories=Network;Messaging;Security;
\ No newline at end of file
diff --git a/launchers/TFC-TxM.desktop b/launchers/TFC-TxM.desktop
deleted file mode 100755
index 31059c9..0000000
--- a/launchers/TFC-TxM.desktop
+++ /dev/null
@@ -1,8 +0,0 @@
-[Desktop Entry]
-Version=1.17.08
-Name=TFC-TxM
-Exec=gnome-terminal --disable-factory --maximize -x bash -c "cd $HOME/tfc && source venv_tfc/bin/activate && python3.6 'tfc.py' && deactivate || bash"
-Icon=tfc.png
-Terminal=false
-Type=Application
-Categories=Network;Messaging;Security;
\ No newline at end of file
diff --git a/launchers/TFC-TxP.desktop b/launchers/TFC-TxP.desktop
new file mode 100755
index 0000000..294cfbb
--- /dev/null
+++ b/launchers/TFC-TxP.desktop
@@ -0,0 +1,8 @@
+[Desktop Entry]
+Version=1.19.01
+Name=TFC-Transmitter
+Exec=gnome-terminal --disable-factory --maximize -x bash -c "cd /opt/tfc && source venv_tcb/bin/activate && python3.6 'tfc.py' && deactivate || bash"
+Icon=tfc.png
+Terminal=false
+Type=Application
+Categories=Network;Messaging;Security;
\ No newline at end of file
diff --git a/launchers/TFC-local-test.desktop b/launchers/TFC-local-test.desktop
index 269c591..ca6c8f5 100755
--- a/launchers/TFC-local-test.desktop
+++ b/launchers/TFC-local-test.desktop
@@ -1,8 +1,8 @@
[Desktop Entry]
-Version=1.17.08
+Version=1.19.01
Name=TFC-LR
Comment=Local testing
-Exec=terminator -m -p tfc -l tfc-lr
+Exec=terminator -m -u -p tfc -l tfc-lr
Icon=tfc.png
Terminal=false
Type=Application
@@ -11,15 +11,15 @@ Actions=TFC-RL;TFC-DD-LR;TFC-DD-RL
[Desktop Action TFC-RL]
Name=TFC-RL
-Exec=terminator -m -p tfc -l tfc-rl
+Exec=terminator -m -u -p tfc -l tfc-rl
OnlyShowIn=Unity;
[Desktop Action TFC-DD-LR]
Name=TFC-DD-LR
-Exec=terminator -m -p tfc -l tfc-dd-lr
+Exec=terminator -m -u -p tfc-dd -l tfc-dd-lr
OnlyShowIn=Unity;
[Desktop Action TFC-DD-RL]
Name=TFC-DD-RL
-Exec=terminator -m -p tfc -l tfc-dd-rl
+Exec=terminator -m -u -p tfc-dd -l tfc-dd-rl
OnlyShowIn=Unity;
\ No newline at end of file
diff --git a/launchers/config b/launchers/config
index 4154965..feec00b 100644
--- a/launchers/config
+++ b/launchers/config
@@ -24,7 +24,7 @@
[[[child1]]]
order = 0
parent = root
- ratio = 0.5
+ ratio = 0.585
type = HPaned
[[[child2]]]
order = 0
@@ -32,22 +32,22 @@
ratio = 0.5
type = VPaned
- [[[txm_emulator]]]
- command = cd $HOME/tfc/ && source venv_tfc/bin/activate && python3.6 tfc.py -l && deactivate || bash
+ [[[source_computer_emulator]]]
+ command = cd /opt/tfc/ && source venv_tfc/bin/activate && python3.6 tfc.py -l && deactivate || bash
directory = ""
order = 1
parent = child2
profile = tfc
type = Terminal
- [[[rxm_emulator]]]
- command = cd $HOME/tfc/ && source venv_tfc/bin/activate && python3.6 tfc.py -l -rx && deactivate || bash
+ [[[destination_computer_emulator]]]
+ command = cd /opt/tfc/ && source venv_tfc/bin/activate && python3.6 tfc.py -l -r && deactivate || bash
directory = ""
order = 0
parent = child2
profile = tfc
type = Terminal
- [[[nh_emulator]]]
- command = cd $HOME/tfc/ && source venv_nh/bin/activate && python3.5 nh.py -l && deactivate || bash
+ [[[networked_computer_emulator]]]
+ command = cd /opt/tfc/ && source venv_tfc/bin/activate && python3.6 relay.py -l && deactivate || bash
directory = ""
order = 1
parent = child1
@@ -67,7 +67,7 @@
[[[child1]]]
order = 0
parent = root
- ratio = 0.5
+ ratio = 0.415
type = HPaned
[[[child2]]]
order = 1
@@ -75,22 +75,22 @@
ratio = 0.5
type = VPaned
- [[[txm_emulator]]]
- command = cd $HOME/tfc/ && source venv_tfc/bin/activate && python3.6 tfc.py -l && deactivate || bash
+ [[[source_computer_emulator]]]
+ command = cd /opt/tfc/ && source venv_tfc/bin/activate && python3.6 tfc.py -l && deactivate || bash
directory = ""
order = 1
parent = child2
profile = tfc
type = Terminal
- [[[rxm_emulator]]]
- command = cd $HOME/tfc/ && source venv_tfc/bin/activate && python3.6 tfc.py -l -rx && deactivate || bash
+ [[[destination_computer_emulator]]]
+ command = cd /opt/tfc/ && source venv_tfc/bin/activate && python3.6 tfc.py -l -r && deactivate || bash
directory = ""
order = 0
parent = child2
profile = tfc
type = Terminal
- [[[nh_emulator]]]
- command = cd $HOME/tfc/ && source venv_nh/bin/activate && python3.5 nh.py -l && deactivate || bash
+ [[[networked_computer_emulator]]]
+ command = cd /opt/tfc/ && source venv_tfc/bin/activate && python3.6 relay.py -l && deactivate || bash
directory = ""
order = 0
parent = child1
@@ -110,7 +110,7 @@
[[[child1]]]
order = 0
parent = root
- ratio = 0.45
+ ratio = 0.545
type = HPaned
[[[child2]]]
order = 0
@@ -120,7 +120,7 @@
[[[child3]]]
order = 1
parent = child1
- ratio = 0.18
+ ratio = 0.14
type = HPaned
[[[child4]]]
order = 0
@@ -128,44 +128,44 @@
ratio = 0.5
type = VPaned
- [[[txm_emulator]]]
- command = cd $HOME/tfc/ && source venv_tfc/bin/activate && python3.6 tfc.py -l -d && deactivate || bash
+ [[[source_computer_emulator]]]
+ command = cd /opt/tfc/ && source venv_tfc/bin/activate && python3.6 tfc.py -l -d && deactivate || bash
directory = ""
order = 1
parent = child2
- profile = tfc
+ profile = tfc-dd
type = Terminal
- [[[rxm_emulator]]]
- command = cd $HOME/tfc/ && source venv_tfc/bin/activate && python3.6 tfc.py -l -rx && deactivate || bash
+ [[[destination_computer_emulator]]]
+ command = cd /opt/tfc/ && source venv_tfc/bin/activate && python3.6 tfc.py -l -r && deactivate || bash
directory = ""
order = 0
parent = child2
- profile = tfc
+ profile = tfc-dd
type = Terminal
- [[[nh_emulator]]]
- command = cd $HOME/tfc/ && source venv_nh/bin/activate && python3.5 nh.py -l -d && deactivate || bash
+ [[[networked_computer_emulator]]]
+ command = cd /opt/tfc/ && source venv_tfc/bin/activate && python3.6 relay.py -l -d && deactivate || bash
directory = ""
order = 1
parent = child3
- profile = tfc
+ profile = tfc-dd
type = Terminal
- [[[txm_dd_emulator]]]
- command = cd $HOME/tfc/ && python3.6 dd.py txnhlr
+ [[[source_computer_dd_emulator]]]
+ command = cd /opt/tfc/ && python3.6 dd.py scnclr
directory = ""
order = 1
parent = child4
- profile = tfc
+ profile = tfc-dd
type = Terminal
- [[[rxm_dd_emulator]]]
- command = cd $HOME/tfc/ && python3.6 dd.py nhrxlr
+ [[[destination_computer_dd_emulator]]]
+ command = cd /opt/tfc/ && python3.6 dd.py ncdclr
directory = ""
order = 0
parent = child4
- profile = tfc
+ profile = tfc-dd
type = Terminal
-
+
[[tfc-dd-rl]]
[[[root]]]
fullscreen = False
@@ -178,12 +178,12 @@
[[[child1]]]
order = 0
parent = root
- ratio = 0.55
+ ratio = 0.451
type = HPaned
[[[child2]]]
order = 0
parent = child1
- ratio = 0.82
+ ratio = 0.867
type = HPaned
[[[child3]]]
order = 1
@@ -196,41 +196,41 @@
ratio = 0.5
type = VPaned
- [[[txm_emulator]]]
- command = cd $HOME/tfc/ && source venv_tfc/bin/activate && python3.6 tfc.py -l -d && deactivate || bash
+ [[[source_computer_emulator]]]
+ command = cd /opt/tfc/ && source venv_tfc/bin/activate && python3.6 tfc.py -l -d && deactivate || bash
directory = ""
order = 1
parent = child4
- profile = tfc
+ profile = tfc-dd
type = Terminal
- [[[rxm_emulator]]]
- command = cd $HOME/tfc/ && source venv_tfc/bin/activate && python3.6 tfc.py -l -rx && deactivate || bash
+ [[[destination_computer_emulator]]]
+ command = cd /opt/tfc/ && source venv_tfc/bin/activate && python3.6 tfc.py -l -r && deactivate || bash
directory = ""
order = 0
parent = child4
- profile = tfc
+ profile = tfc-dd
type = Terminal
- [[[nh_emulator]]]
- command = cd $HOME/tfc/ && source venv_nh/bin/activate && python3.5 nh.py -l -d && deactivate || bash
+ [[[networked_computer_emulator]]]
+ command = cd /opt/tfc/ && source venv_tfc/bin/activate && python3.6 relay.py -l -d && deactivate || bash
directory = ""
order = 0
parent = child2
- profile = tfc
+ profile = tfc-dd
type = Terminal
- [[[txm_dd_emulator]]]
- command = cd $HOME/tfc/ && python3.6 dd.py txnhrl
+ [[[source_computer_dd_emulator]]]
+ command = cd /opt/tfc/ && python3.6 dd.py scncrl
directory = ""
order = 1
parent = child3
- profile = tfc
+ profile = tfc-dd
type = Terminal
- [[[rxm_dd_emulator]]]
- command = cd $HOME/tfc/ && python3.6 dd.py nhrxrl
+ [[[destination_computer_dd_emulator]]]
+ command = cd /opt/tfc/ && python3.6 dd.py ncdcrl
directory = ""
order = 0
parent = child3
- profile = tfc
+ profile = tfc-dd
type = Terminal
@@ -246,5 +246,19 @@
[[tfc]]
background_color = "#3c3f41"
background_image = None
+ use_system_font = False
+ font = Monospace 11
foreground_color = "#a1b6bd"
- show_titlebar = False
\ No newline at end of file
+ scrollback_infinite = True
+ show_titlebar = False
+ scrollbar_position = hidden
+
+ [[tfc-dd]]
+ background_color = "#3c3f41"
+ background_image = None
+ use_system_font = False
+ font = Monospace 10.5
+ foreground_color = "#a1b6bd"
+ scrollback_infinite = True
+ show_titlebar = False
+ scrollbar_position = hidden
diff --git a/nh.py b/nh.py
deleted file mode 100644
index 2e0575c..0000000
--- a/nh.py
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/usr/bin/env python3.5
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import os
-import subprocess
-import sys
-import time
-
-from multiprocessing import Process, Queue
-
-from src.common.misc import ignored
-from src.common.output import c_print, clear_screen
-from src.common.statics import *
-
-from src.nh.commands import nh_command
-from src.nh.gateway import Gateway, gateway_loop
-from src.nh.misc import process_arguments
-from src.nh.pidgin import ensure_im_connection, im_command, im_incoming, im_outgoing
-from src.nh.settings import Settings
-from src.nh.tcb import rxm_outgoing, txm_incoming
-
-
-def main() -> None:
- """Load settings, establish gateway and initialize processes."""
- settings = Settings(*process_arguments())
- gateway = Gateway(settings)
-
- clear_screen()
- c_print(TFC, head=1, tail=1)
-
- ensure_im_connection()
-
- queues = {TXM_INCOMING_QUEUE: Queue(), # Packets from gateway to 'txm_incoming' process
- RXM_OUTGOING_QUEUE: Queue(), # Packets from TxM/IM client to RxM
- TXM_TO_IM_QUEUE: Queue(), # Packets from TxM to IM client
- TXM_TO_NH_QUEUE: Queue(), # Commands from TxM to NH
- TXM_TO_RXM_QUEUE: Queue(), # Commands from TxM to RxM
- NH_TO_IM_QUEUE: Queue(), # Commands from NH to IM client
- EXIT_QUEUE: Queue()} # Signal for normal exit
-
- process_list = [Process(target=gateway_loop, args=(queues, gateway )),
- Process(target=txm_incoming, args=(queues, settings )),
- Process(target=rxm_outgoing, args=(queues, settings, gateway )),
- Process(target=im_incoming, args=(queues, )),
- Process(target=im_outgoing, args=(queues, settings )),
- Process(target=im_command, args=(queues, )),
- Process(target=nh_command, args=(queues, settings, sys.stdin.fileno()))]
-
- for p in process_list:
- p.start()
-
- while True:
- with ignored(EOFError, KeyboardInterrupt):
- time.sleep(0.1)
- if not all([p.is_alive() for p in process_list]):
- for p in process_list:
- p.terminate()
- sys.exit(1)
-
- if not queues[EXIT_QUEUE].empty():
- command = queues[EXIT_QUEUE].get()
- for p in process_list:
- p.terminate()
- if command == WIPE:
- if TAILS in subprocess.check_output('lsb_release -a', shell=True):
- os.system('sudo poweroff')
- else:
- subprocess.Popen("find {} -name '{}*' -type f -exec shred -n 3 -z -u {{}} \;".format(DIR_USER_DATA, NH), shell=True).wait()
- subprocess.Popen("find {} -type f -exec shred -n 3 -z -u {{}} \;".format('$HOME/.purple/'), shell=True).wait()
- os.system('poweroff')
- else:
- sys.exit(0)
-
-
-if __name__ == '__main__':
- main()
diff --git a/relay.py b/relay.py
new file mode 100644
index 0000000..4f04e2a
--- /dev/null
+++ b/relay.py
@@ -0,0 +1,182 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import os
+import sys
+
+from multiprocessing import Process, Queue
+from typing import Dict
+
+from cryptography.hazmat.primitives.asymmetric.x448 import X448PrivateKey
+from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat
+
+from src.common.gateway import Gateway, gateway_loop
+from src.common.misc import ensure_dir, monitor_processes, process_arguments
+from src.common.output import print_title
+from src.common.statics import *
+
+from src.relay.client import c_req_manager, client_manager, g_msg_manager
+from src.relay.commands import relay_command
+from src.relay.onion import onion_service
+from src.relay.server import flask_server
+from src.relay.tcb import dst_outgoing, src_incoming
+
+
+def main() -> None:
+ """Load persistent settings and launch the Relay Program.
+
+ This function loads settings from the settings database and launches
+ processes for the Relay Program. It then monitors the EXIT_QUEUE for
+ EXIT/WIPE signals and each process in case one of them dies.
+
+ If you're reading this code to get the big picture on how TFC works,
+ start by looking at `tfc.py` for Transmitter Program functionality.
+ After you have reviewed the Transmitter Program's code, revisit the
+ code of this program.
+
+ The Relay Program operates multiple processes to enable real time IO
+ between multiple data sources and destinations.
+
+ Symbols:
+ process_name denotes the name of the process
+
+ ─>, <─, ↑, ↓ denotes the direction of data passed from one
+ process to another
+
+ (Description) denotes the description of data passed from one
+ process to another
+
+ ┈, ┊ denotes the link between a description and path
+ of data matching the description
+
+ ▶|, |◀ denotes the gateways where the direction of data
+ flow is enforced with hardware data diodes
+
+
+ Relay Program (Networked Computer)
+ ┏━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━┓
+ ┃ ┃
+ (Contact management commands)
+ ┃ ┌─────────────────────────────┬─────────────────────┐ ┃
+ | | ↓
+ ┃ | ┌─────> relay_command ┌───> c_req_manager ┃
+ | │ │ |
+ ┃ | │ (Onion Service┈│ |┈(Contact requests) ┃
+ | │ private key) │ |
+ ┃ | │ ↓ | ┃
+ | │ onion_service ───────────────────────────> client on contact's
+ ┃ | (Relay Program┈│ ↑ ┊ ┃ Networked Computer
+ | commands) │ │┈(Outgoing msg/file/public key)
+ ┃ | │ │ ┃
+ Source ───▶|─────(── gateway_loop ─> src_incoming ─> flask_server <─┐
+ Computer ┃ | | | ┃
+ | | |
+ ┃ | (Local keys, commands | | ┃
+ | and copies of messages)┄| |
+ ┃ | ┊ ↓ | ┃
+ Destination <──|◀─────(───────────────────── dst_outgoing |
+ Computer ┃ | ┊ ↑ | ┃
+ ├──> g_msg_manager ┊ │ |
+ ┃ | ↑ ┊ │ | ┃
+ | (Group┈│ (Incoming┈│ (URL token)┈|
+ ┃ | management │ messages) │ | ┃
+ │ messages) │ │ |
+ ┃ ↓ │ │ | ┃
+ client_mgr │ │ |
+ ┃ └─> client ───────────┴─────────────────────┘ ┃
+ ↑
+ ┃ │ ┃
+ └──────────────────────────────────────────────────────────── flask_server on
+ ┃ ┊ ┃ contact's Networked
+ (Incoming message/file/public key/group management message) Computer
+ ┃ ┃
+ ┗━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━ ━━┛
+
+
+ The image above gives a rough overview of the structure of the Relay
+ Program. The Relay Program acts as a protocol converter that reads
+ datagrams from the Source Computer. Outgoing message/file/public key
+ datagrams are made available in the user's Tor v3 Onion Service.
+ Copies of sent message datagrams as well as datagrams from contacts'
+ Onion Services are forwarded to the Destination Computer.
+ The Relay-to-Relay encrypted datagrams from contacts such as contact
+ requests, public keys and group management messages are displayed by
+ the Relay Program.
+
+ Outgoing message datagrams are loaded by contacts from the user's
+ Flask web server. To request messages intended for them, each
+ contact uses a contact-specific URL token to load the messages.
+ The URL token is the X448 shared secret derived from the per-session
+ ephemeral X448 values of the two conversing parties. The private
+ value stays on the Relay Program -- the public value is obtained by
+ connecting to the root domain of contact's Onion Service.
+ """
+ working_dir = f'{os.getenv("HOME")}/{DIR_TFC}'
+ ensure_dir(working_dir)
+ os.chdir(working_dir)
+
+ _, local_test, data_diode_sockets = process_arguments()
+ gateway = Gateway(NC, local_test, data_diode_sockets)
+
+ print_title(NC)
+
+ url_token_private_key = X448PrivateKey.generate()
+ url_token_public_key = url_token_private_key.public_key().public_bytes(encoding=Encoding.Raw,
+ format=PublicFormat.Raw).hex()
+
+ queues = \
+ {GATEWAY_QUEUE: Queue(), # All datagrams from `gateway_loop` to `src_incoming`
+ DST_MESSAGE_QUEUE: Queue(), # Message datagrams from `src_incoming`/`client` to `dst_outgoing`
+ M_TO_FLASK_QUEUE: Queue(), # Message/pubkey datagrams from `src_incoming` to `flask_server`
+ F_TO_FLASK_QUEUE: Queue(), # File datagrams from `src_incoming` to `flask_server`
+ SRC_TO_RELAY_QUEUE: Queue(), # Command datagrams from `src_incoming` to `relay_command`
+ DST_COMMAND_QUEUE: Queue(), # Command datagrams from `src_incoming` to `dst_outgoing`
+ CONTACT_KEY_QUEUE: Queue(), # Contact management commands from `relay_command` to `client_manager`
+ C_REQ_MGR_QUEUE: Queue(), # Contact requests management from `relay_command` to `c_req_manager`
+ URL_TOKEN_QUEUE: Queue(), # URL tokens from `client` to `flask_server`
+ GROUP_MSG_QUEUE: Queue(), # Group management messages from `client` to `g_msg_manager`
+ CONTACT_REQ_QUEUE: Queue(), # Contact requests from `flask_server` to `c_req_manager`
+ F_REQ_MGMT_QUEUE: Queue(), # Contact list management from `relay_command` to `c_req_manager`
+ GROUP_MGMT_QUEUE: Queue(), # Contact list management from `relay_command` to `g_msg_manager`
+ ONION_CLOSE_QUEUE: Queue(), # Onion Service close command from `relay_command` to `onion_service`
+ ONION_KEY_QUEUE: Queue(), # Onion Service private key from `relay_command` to `onion_service`
+ TOR_DATA_QUEUE: Queue(), # Open port for Tor from `onion_service` to `client_manager`
+ EXIT_QUEUE: Queue() # EXIT/WIPE signal from `relay_command` to `main`
+ } # type: Dict[bytes, Queue]
+
+ process_list = [Process(target=gateway_loop, args=(queues, gateway )),
+ Process(target=src_incoming, args=(queues, gateway )),
+ Process(target=dst_outgoing, args=(queues, gateway )),
+ Process(target=client_manager, args=(queues, gateway, url_token_private_key)),
+ Process(target=g_msg_manager, args=(queues, )),
+ Process(target=c_req_manager, args=(queues, )),
+ Process(target=flask_server, args=(queues, url_token_public_key)),
+ Process(target=onion_service, args=(queues, )),
+ Process(target=relay_command, args=(queues, gateway, sys.stdin.fileno()) )]
+
+ for p in process_list:
+ p.start()
+
+ monitor_processes(process_list, NC, queues)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/requirements-dev.txt b/requirements-dev.txt
new file mode 100644
index 0000000..c74152a
--- /dev/null
+++ b/requirements-dev.txt
@@ -0,0 +1,7 @@
+# Static type checking tool
+mypy
+
+# Unit test tools
+pytest
+pytest-cov
+pytest-xdist
diff --git a/requirements-nh.txt b/requirements-nh.txt
deleted file mode 100644
index 057248d..0000000
--- a/requirements-nh.txt
+++ /dev/null
@@ -1 +0,0 @@
-pyserial==3.4 --hash=sha512:8333ac2843fd136d5d0d63b527b37866f7d18afc3bb33c4938b63af077492aeb118eb32a89ac78547f14d59a2adb1e5d00728728275de62317da48dadf6cdff9
\ No newline at end of file
diff --git a/requirements-relay.txt b/requirements-relay.txt
new file mode 100644
index 0000000..d8ebe86
--- /dev/null
+++ b/requirements-relay.txt
@@ -0,0 +1,32 @@
+# Sub-dependencies are listed below dependencies
+
+# Pyserial (Connects the Source/Destination Computer to the Networked Computer)
+pyserial==3.4 --hash=sha512:8333ac2843fd136d5d0d63b527b37866f7d18afc3bb33c4938b63af077492aeb118eb32a89ac78547f14d59a2adb1e5d00728728275de62317da48dadf6cdff9
+
+# Stem (Connects to Tor and manages Onion Services)
+stem==1.7.1 --hash=sha512:a275f59bba650cb5bb151cf53fb1dd820334f9abbeae1a25e64502adc854c7f54c51bc3d6c1656b595d142fc0695ffad53aab3c57bc285421c1f4f10c9c3db4c
+
+# PySocks (Routes requests library through SOCKS5 proxy making Onion Service connections possible)
+pysocks==1.6.8 --hash=sha512:9b544cf11464142a5f347cd5688b48422249363a425ccf3887117152f2f1969713674c4bba714242432ae85f3d62e03edeb9cb7b73ebd225ed3b47b3da6896d5
+
+# Requests (Connects to the contact's Tor Onion Service)
+requests==2.21.0 --hash=sha512:f5db1cc049948a8cc38d1c3c2de9f997bc99b65b88bd2e052be62a8c2934773d33f471ce86d8cdcacc2e651b1545d88cc571ace62154a6ccb285a19c83836483
+certifi==2018.11.29 --hash=sha512:6f6cb73ec56d85ffc62eddd506c44fa597dfd3a7b74bad7f301482cad47c79d0ab7a3a390905ae46fe2a49f1007f6a1c33c41987ce769f9b5a1ea5fa773ea4eb
+chardet==3.0.4 --hash=sha512:bfae58c8ea19c87cc9c9bf3d0b6146bfdb3630346bd954fe8e9f7da1f09da1fc0d6943ff04802798a665ea3b610ee2d65658ce84fe5a89f9e93625ea396a17f4
+idna==2.8 --hash=sha512:fb07dbec1de86efbad82a4f73d98123c59b083c1f1277445204bef75de99ca200377ad2f1db8924ae79b31b3dd984891c87d0a6344ec4d07a0ddbbbc655821a3
+urllib3==1.24.1 --hash=sha512:fdba3d58539eb31dff22cdfad91536587db3ce575af4f4c803758211dbec46944e6cf9d5459d22da620c49a36fe3ca1ae2067c741bb3f643e7b548c4abfb0d7f
+
+# Flask (Onion Service web server that serves TFC public keys and ciphertexts to contacts)
+flask==1.0.2 --hash=sha512:0cca42400dc1019eb8c9fae32460967f64880f05627bdcb06c8df0ef0f7cc2d791c2a96ab6313bca10120a6f785aa0ccdad093e6ab3d7e997ed354fd432257e7
+click==7.0 --hash=sha512:6b30987349df7c45c5f41cff9076ed45b178b444fca1ab1965f4ae33d1631522ce0a2868392c736666e83672b8b20e9503ae9ce5016dce3fa8f77bc8a3674130
+itsdangerous==1.1.0 --hash=sha512:891c294867f705eb9c66274bd04ac5d93140d6e9beea6cbf9a44e7f9c13c0e2efa3554bdf56620712759a5cd579e112a782d25f3f91ba9419d60b2b4d2bc5b7c
+jinja2==2.10 --hash=sha512:672c1a112f76f399600a069c5ee882d5fdf065ff25f6b729ec12a266d7ef6f638c26d5cc680db7b3a375d9e1ae7323aed3c2a49eb03fc39dd1a1ca8b0d658b63
+markupsafe==1.1.0 --hash=sha512:103e80f9307ebb46178aad44d8d0fe36cfc019656ecb0249767d2cd249e8fbfc48ee9b2a5d7f25845312662ccf8b09dbee0a93f5ff573883eb40ec4511c89959
+werkzeug==0.14.1 --hash=sha512:0fa694cd71fa83d4a178e9f831fa9784c26e42feb5987e390ed88eb60ea2f829da5795206983236e3442ee1479dd4ca587d26dcb074a881d6d1b055bfc493c56
+
+# Cryptography (Handles URL token derivation)
+cryptography==2.5 --hash=sha512:820b591f3c838f86ee59e027986511abd3eb537bf8f5f4d2d499ab950a128bd2960c138616f0a6c36408fc72d6eefc27a14fddab9c5a6f4118e6bbad5e9d9d7f
+asn1crypto==0.24.0 --hash=sha512:8d9bc344981079ac6c00e71e161c34b6f403e575bbfe1ad06e30a3bcb33e0db317bdcb7aed2d18d510cb1b3ee340a649f7f77a00d271fcf3cc388e6655b67533
+cffi==1.11.5 --hash=sha512:32631c8a407f77c4580e75122a79d2f14fbc90ea958ecd9ff0a01c83280aec8b48ac202fc55c1d4aaf09975c9d1b8c21858666076ab554a71577c7a89236e87f
+pycparser==2.19 --hash=sha512:7f830e1c9066ee2d297a55e2bf6db4bf6447b6d9da0145d11a88c3bb98505755fb7986eafa6e06ae0b7680838f5e5d6a6d188245ca5ad45c2a727587bac93ab5
+six==1.12.0 --hash=sha512:326574c7542110d2cd8071136a36a6cffc7637ba948b55e0abb7f30f3821843073223301ecbec1d48b8361b0d7ccb338725eeb0424696efedc3f6bd2a23331d3
diff --git a/requirements-venv.txt b/requirements-venv.txt
new file mode 100644
index 0000000..1699c74
--- /dev/null
+++ b/requirements-venv.txt
@@ -0,0 +1,5 @@
+# Sub-dependencies are listed below dependencies
+
+# Virtual environment (Used to create an isolated Python environment for TFC dependencies)
+virtualenv==16.2.0 --hash=sha512:d08800652cf3c2a695971b54be32ded4bccd6b0223b8586c6e2348b8f60be2df7f47aed693f20e106fba11267819c81b7f7a5c3a75f89e36740c6639274a9a50
+setuptools==40.6.3 --hash=sha512:bdbd2079d053409838690709389fa09cb498ee055c829e622d57c0b07069b0ec5065c64f5f76994c27fc8563ad47cd08eef843240539744223f5371b4d2daf1a
diff --git a/requirements.txt b/requirements.txt
index e7dcd1f..d86e87c 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,11 +1,25 @@
-pyserial==3.4 --hash=sha512:8333ac2843fd136d5d0d63b527b37866f7d18afc3bb33c4938b63af077492aeb118eb32a89ac78547f14d59a2adb1e5d00728728275de62317da48dadf6cdff9
-virtualenv==15.1.0 --hash=sha512:9988af801d9ad15c3f9831489ee9b49b54388e8349be201e7f7db3f2f1e59d033d3117f12e2f1909d65f052c5f1eacd87a894c6f7f703d770add3a0179e95863
+# Sub-dependencies are listed below dependencies
-# Argon2
-six==1.10.0 --hash=sha512:a41b40b720c5267e4a47ffb98cdc79238831b4fbc0b20abb125504881b73ae38d5ef0215ee91f0d3582e7887244346e45da9410195d023105fccd96239f0ee95
-argon2_cffi==16.3.0 --hash=sha512:0198e9d9c438a4472ee44d73737cace6e15229daca6a82425f67832db79631e9fe56e64bbce68dd06c07de7b408c864df4c1d2e99e7b5729c93391c7f3e72327
+# Pyserial (Connects the Source/Destination Computer to the Networked Computer)
+pyserial==3.4 --hash=sha512:8333ac2843fd136d5d0d63b527b37866f7d18afc3bb33c4938b63af077492aeb118eb32a89ac78547f14d59a2adb1e5d00728728275de62317da48dadf6cdff9
-# PyNaCl
-pycparser==2.18 --hash=sha512:4754e4e7556d21da328bf7dbabf72f940c9b18f1457260d48208033b05e576919f45ab399e86ea49e82120116980d7d6f53e8b959d21b7b03a3b5bbea3672f13
-cffi==1.10.0 --hash=sha512:b2d3b0ff8c2c750cd405d2fd88555dff10e1d1d4a01a8a0ad636b4e1c9220bc2070e23619a70f0422d8d5b15f88f61fed129f27280520f7208c52df3fc133ec5
-PyNaCl==1.1.2 --hash=sha512:05148abb695b79edc118d646aa227a17ba636d07b253ac366c2d9cf7643e1e09c08daa6ffa2d81f9a1156f3446fd9ce770919b17c9205783f843fa176f993c1c
\ No newline at end of file
+# Argon2 (Derives keys that protect persistent user data)
+argon2_cffi==19.1.0 --hash=sha512:77b17303a5d22fc35ac4771be5c710627c80ed7d6bf6705f70015197dbbc2b699ad6af0604b4517d1afd2f6d153058150a5d2933d38e4b4ca741e4ac560ddf72
+cffi==1.11.5 --hash=sha512:32631c8a407f77c4580e75122a79d2f14fbc90ea958ecd9ff0a01c83280aec8b48ac202fc55c1d4aaf09975c9d1b8c21858666076ab554a71577c7a89236e87f
+pycparser==2.19 --hash=sha512:7f830e1c9066ee2d297a55e2bf6db4bf6447b6d9da0145d11a88c3bb98505755fb7986eafa6e06ae0b7680838f5e5d6a6d188245ca5ad45c2a727587bac93ab5
+six==1.12.0 --hash=sha512:326574c7542110d2cd8071136a36a6cffc7637ba948b55e0abb7f30f3821843073223301ecbec1d48b8361b0d7ccb338725eeb0424696efedc3f6bd2a23331d3
+
+# PyNaCl (Handles TCB-side XChaCha20-Poly1305 symmetric encryption)
+PyNaCl==1.3.0 --hash=sha512:c4017c38b026a5c531b15839b8d61d1fae9907ba1960c2f97f4cd67fe0827729346d5186a6d6927ba84f64b4cbfdece12b287aa7750a039f4160831be871cea3
+# Duplicate sub-dependencies
+# cffi==1.11.5 --hash=sha512:32631c8a407f77c4580e75122a79d2f14fbc90ea958ecd9ff0a01c83280aec8b48ac202fc55c1d4aaf09975c9d1b8c21858666076ab554a71577c7a89236e87f
+# pycparser==2.19 --hash=sha512:7f830e1c9066ee2d297a55e2bf6db4bf6447b6d9da0145d11a88c3bb98505755fb7986eafa6e06ae0b7680838f5e5d6a6d188245ca5ad45c2a727587bac93ab5
+# six==1.12.0 --hash=sha512:326574c7542110d2cd8071136a36a6cffc7637ba948b55e0abb7f30f3821843073223301ecbec1d48b8361b0d7ccb338725eeb0424696efedc3f6bd2a23331d3
+
+# Cryptography (Handles TCB-side X448 key exchange)
+cryptography==2.5 --hash=sha512:820b591f3c838f86ee59e027986511abd3eb537bf8f5f4d2d499ab950a128bd2960c138616f0a6c36408fc72d6eefc27a14fddab9c5a6f4118e6bbad5e9d9d7f
+asn1crypto==0.24.0 --hash=sha512:8d9bc344981079ac6c00e71e161c34b6f403e575bbfe1ad06e30a3bcb33e0db317bdcb7aed2d18d510cb1b3ee340a649f7f77a00d271fcf3cc388e6655b67533
+# Duplicate sub-dependencies
+# cffi==1.11.5 --hash=sha512:32631c8a407f77c4580e75122a79d2f14fbc90ea958ecd9ff0a01c83280aec8b48ac202fc55c1d4aaf09975c9d1b8c21858666076ab554a71577c7a89236e87f
+# pycparser==2.19 --hash=sha512:7f830e1c9066ee2d297a55e2bf6db4bf6447b6d9da0145d11a88c3bb98505755fb7986eafa6e06ae0b7680838f5e5d6a6d188245ca5ad45c2a727587bac93ab5
+# six==1.12.0 --hash=sha512:326574c7542110d2cd8071136a36a6cffc7637ba948b55e0abb7f30f3821843073223301ecbec1d48b8361b0d7ccb338725eeb0424696efedc3f6bd2a23331d3
diff --git a/src/common/crypto.py b/src/common/crypto.py
index 774fe60..4ddcd74 100755
--- a/src/common/crypto.py
+++ b/src/common/crypto.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,256 +16,462 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
+
+---
+
+This module contains TFC's cryptographic functions. Most algorithms are
+based on the ChaCha20 stream cipher by Daniel J. Bernstein (djb).
+
+X448
+ChaCha20
+├─ Linux kernel CSPRNG
+├─ XChaCha20-Poly1305 (IETF) AEAD
+└─ BLAKE2b cryptographic hash function
+ └─ Argon2d key derivation function
"""
import hashlib
-import multiprocessing
import os
-from typing import Tuple
-
import argon2
-import nacl.encoding
+import nacl.bindings
import nacl.exceptions
-import nacl.public
import nacl.secret
import nacl.utils
+from cryptography.hazmat.primitives.asymmetric.x448 import X448PrivateKey, X448PublicKey
+from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat
+
from src.common.exceptions import CriticalError
-from src.common.misc import ignored
-from src.common.output import c_print, clear_screen, phase, print_on_previous_line
+from src.common.misc import ignored, separate_header
+from src.common.output import m_print, phase, print_on_previous_line
from src.common.statics import *
-def sha3_256(message: bytes) -> bytes:
- """Generate SHA3-256 digest from message."""
- return hashlib.sha3_256(message).digest()
+def blake2b(message: bytes, # Message to hash
+ key: bytes = b'', # Key for keyed hashing
+ salt: bytes = b'', # Salt for randomized hashing
+ person: bytes = b'', # Personalization string
+ digest_size: int = BLAKE2_DIGEST_LENGTH # Length of the digest
+ ) -> bytes: # The BLAKE2b digest
+ """Generate BLAKE2b digest (i.e. cryptographic hash) of a message.
+ BLAKE2 is the successor of SHA3-finalist BLAKE*, designed by
+ Jean-Philippe Aumasson, Samuel Neves, Zooko Wilcox-O'Hearn and
+ Christian Winnerlein. The hash function is based on the ChaCha20
+ stream cipher, designed by djb.
-def blake2s(message: bytes, key: bytes = b'') -> bytes:
- """Generate Blake2s digest from message."""
- return hashlib.blake2s(message, key=key).digest()
+ * BLAKE was designed by Jean-Philippe Aumasson, Luca Henzen,
+ Willi Meier, and Raphael C.-W. Phan.
+ For more details, see
+ https://blake2.net/
+ https://leastauthority.com/blog/BLAKE2-harder-better-faster-stronger-than-MD5/
-def sha256(message: bytes) -> bytes:
- """Generate SHA256 digest from message."""
- return hashlib.sha256(message).digest()
+ The reasons for using BLAKE2b in TFC include
+ o BLAKE received* more in-depth cryptanalysis than Keccak (SHA3):
-def hash_chain(message: bytes) -> bytes:
- """Mix several hash functions to distribute trust.
+ "Keccak received a significant amount of cryptanalysis,
+ although not quite the depth of analysis applied to BLAKE,
+ Grøstl, or Skein."
+ (https://nvlpubs.nist.gov/nistpubs/ir/2012/NIST.IR.7896.pdf # p. 13)
- This construction remains secure in case a weakness is discovered
- in one of the hash functions (e.g. insecure algorithm that is not
- unpredictable or that has weak preimage resistance, or if the
- algorithm is badly implemented).
+ * https://blake2.net/#cr
- In case where the implementation is malicious, this construction
- forces stateless implementations -- that try to compromise mixing
- phase -- to guess it's position in the construction, which will
- eventually lead to key state mismatch and thus detection.
+ o BLAKE shares design elements with SHA-2 that has 16 years of
+ cryptanalysis behind it.
+ (https://en.wikipedia.org/wiki/SHA-2#Cryptanalysis_and_validation)
+
+ o 128-bit collision/preimage/second-preimage resistance against
+ Grover's algorithm running on a quantum Turing machine.
+
+ o The algorithm is bundled in Python3.6's hashlib.
+
+ o Compared to SHA3-256, the algorithm runs faster on CPUs which
+ means better hash ratchet performance.
+
+ o Compared to SHA3-256, the algorithm runs slower on ASICs which
+ means attacks by high-budget adversaries are slower.
+
+ Note that while the default length of BLAKE2b (the implementation
+ optimized for AMD64 systems) digest is 512 bits, the digest size is
+ truncated to 256 bits for the use in TFC.
+
+ The correctness of the BLAKE2b implementation* is tested by TFC unit
+ tests. The testing is done in limited scope by using an official KAT.
+
+ * https://github.com/python/cpython/tree/3.6/Modules/_blake2
+ https://github.com/python/cpython/blob/3.6/Lib/hashlib.py
"""
- d1 = sha3_256(blake2s(sha256(message)))
- d2 = sha3_256(sha256(blake2s(message)))
-
- d3 = blake2s(sha3_256(sha256(message)))
- d4 = blake2s(sha256(sha3_256(message)))
-
- d5 = sha256(blake2s(sha3_256(message)))
- d6 = sha256(sha3_256(blake2s(message)))
-
- d7 = sha3_256(message)
- d8 = blake2s(message)
- d9 = sha256(message)
-
- # Mixing phase
- x1 = xor(d1, d2)
- x2 = xor(x1, d3)
- x3 = xor(x2, d4)
- x4 = xor(x3, d5)
- x5 = xor(x4, d6)
- x6 = xor(x5, d7)
- x7 = xor(x6, d8)
- x8 = xor(x7, d9)
-
- return x8
+ return hashlib.blake2b(message, digest_size=digest_size, key=key, salt=salt, person=person).digest()
-def argon2_kdf(password: str,
- salt: bytes,
- rounds: int = ARGON2_ROUNDS,
- memory: int = ARGON2_MIN_MEMORY,
- parallelism: int = None,
- local_test: bool = False) -> Tuple[bytes, int]:
- """Derive key from password and salt using Argon2d (PHC winner).
+def argon2_kdf(password: str, # Password to derive the key from
+ salt: bytes, # Salt to derive the key from
+ rounds: int = ARGON2_ROUNDS, # Number of iterations
+ memory: int = ARGON2_MIN_MEMORY, # Amount of memory to use (in bytes)
+ parallelism: int = 1 # Number of threads to use
+ ) -> bytes: # The derived key
+ """Derive an encryption key from password and salt using Argon2d.
- :param password: Password to derive key from
- :param salt: Salt to derive key from
- :param rounds: Number of iterations
- :param memory: Memory usage
- :param parallelism: Number of threads to use
- :param local_test: When True, splits parallelism to half
- :return: Derived key, amount of memory and number of threads used
+ Argon2 is a key derivation function (KDF) designed by Alex Biryukov,
+ Daniel Dinu, and Dmitry Khovratovich from the University of
+ Luxembourg. The algorithm is the winner of the 2015 Password Hashing
+ Competition (PHC).
+
+ For more details, see
+ https://password-hashing.net/
+ https://github.com/P-H-C/phc-winner-argon2/blob/master/argon2-specs.pdf
+ https://en.wikipedia.org/wiki/Argon2
+
+ The purpose of the KDF is to stretch a password into a 256-bit key.
+ Argon2 features a slow, memory-hard hash function that consumes
+ computational resources of an attacker that attempts a dictionary
+ or a brute force attack. The accompanied 256-bit salt prevents
+ rainbow-table attacks, forcing each attack to take place against an
+ individual (physically compromised) TFC-endpoint, or PSK
+ transmission media.
+
+ The used Argon2 version is Argon2d that uses data-dependent memory
+ access, which maximizes security against time-memory trade-off
+ (TMTO) attacks at the risk of side-channel attacks. The IETF
+ recommends using Argon2id (that is side-channel resistant and almost
+ as secure as Argon2d against TMTO attacks) **except** when there is
+ a reason to prefer Argon2d (or Argon2i). The reason TFC uses Argon2d
+ is key derivation only takes place on Source and Destination
+ Computer. As these computers are connected to the Networked Computer
+ only via a data diode, they do not leak any information via
+ side-channels to the adversary. The expected attacks are against
+ physically compromised data storage devices where the encrypted data
+ is at rest. In such situation, Argon2d is the most secure option.
+
+ The correctness of the Argon2d implementation* is tested by TFC unit
+ tests. The testing is done in limited scope by using an official KAT.
+
+ * https://github.com/P-H-C/phc-winner-argon2
+ https://github.com/hynek/argon2_cffi
"""
- assert len(salt) == ARGON2_SALT_LEN
-
- if parallelism is None:
- parallelism = multiprocessing.cpu_count()
- if local_test:
- parallelism = max(1, parallelism // 2)
+ assert len(salt) == ARGON2_SALT_LENGTH
key = argon2.low_level.hash_secret_raw(secret=password.encode(),
salt=salt,
time_cost=rounds,
memory_cost=memory,
parallelism=parallelism,
- hash_len=KEY_LENGTH,
- type=argon2.Type.D)
- return key, parallelism
+ hash_len=SYMMETRIC_KEY_LENGTH,
+ type=argon2.Type.D) # type: bytes
+ return key
-def encrypt_and_sign(plaintext: bytes, key: bytes) -> bytes:
- """Encrypt plaintext with XSalsa20-Poly1305.
-
- :param plaintext: Plaintext to encrypt
- :param key: 32-byte key
- :return: Ciphertext + tag
+class X448(object):
"""
- assert len(key) == KEY_LENGTH
+ X448 is the Diffie-Hellman function for Curve448-Goldilocks, a
+ state-of-the-art elliptical curve designed by Mike Hamburg in 2014:
+ https://eprint.iacr.org/2015/625.pdf
- secret_box = nacl.secret.SecretBox(key)
- nonce = nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE)
- return bytes(secret_box.encrypt(plaintext, nonce))
+ The reasons for using X448 in TFC include
+ o It meets the criterion for a safe curve.
+ (https://safecurves.cr.yp.to/)
-def auth_and_decrypt(nonce_ct_tag: bytes,
- key: bytes,
- soft_e: bool = False) -> bytes:
- """Authenticate and decrypt XSalsa20-Poly1305 ciphertext.
+ o NIST has announced X448 will be included in the SP 800-186.
+ (https://csrc.nist.gov/News/2017/Transition-Plans-for-Key-Establishment-Schemes)
- :param nonce_ct_tag: Nonce, ciphertext and tag
- :param key: 32-byte key
- :param soft_e: When True, raises soft error
- :return: Plaintext
+ o It provides conservative 224 bits of symmetric security.
+
+ o It is immune against invalid curve attacks: Its public keys do
+ not require validation as long as the public key is not zero.
+
+ o Its public keys are reasonably short (84 Base58 chars) to be
+ manually typed from Networked Computer to Source Computer.
+
+ The correctness of the X448 implementation* is tested by TFC unit
+ tests. The testing is done in limited scope by using official test
+ vectors.
+
+ * https://github.com/openssl/openssl/tree/OpenSSL_1_1_1-stable/crypto/ec/curve448
+ https://github.com/pyca/cryptography/blob/master/src/cryptography/hazmat/primitives/asymmetric/x448.py
"""
- assert len(key) == KEY_LENGTH
+ @staticmethod
+ def generate_private_key() -> 'X448PrivateKey':
+ """Generate the X448 private key.
+
+ The size of the private key is 56 bytes (448 bits).
+ """
+ return X448PrivateKey.generate()
+
+ @staticmethod
+ def derive_public_key(private_key: 'X448PrivateKey') -> bytes:
+ """Derive public key from X448 private key."""
+ public_key = private_key.public_key().public_bytes(encoding=Encoding.Raw,
+ format=PublicFormat.Raw) # type: bytes
+ return public_key
+
+ @staticmethod
+ def shared_key(private_key: 'X448PrivateKey', public_key: bytes) -> bytes:
+ """Derive the X448 shared key.
+
+ Since the shared secret is zero if contact's public key is zero,
+ this function asserts the public key is a valid non-zero
+ bytestring.
+
+ Because the raw bits of the X448 shared secret might not be
+ uniformly distributed in the keyspace (i.e. bits might have bias
+ towards 0 or 1), the raw shared secret is passed through BLAKE2b
+ CSPRF to ensure uniformly random shared key.
+ """
+ assert len(public_key) == TFC_PUBLIC_KEY_LENGTH
+ assert public_key != bytes(TFC_PUBLIC_KEY_LENGTH)
+
+ shared_secret = private_key.exchange(X448PublicKey.from_public_bytes(public_key))
+ return blake2b(shared_secret, digest_size=SYMMETRIC_KEY_LENGTH)
+
+
+def encrypt_and_sign(plaintext: bytes, # Plaintext to encrypt
+ key: bytes, # 32-byte symmetric key
+ ad: bytes = b'' # Associated data
+ ) -> bytes: # Nonce + ciphertext + tag
+ """Encrypt plaintext with XChaCha20-Poly1305.
+
+ ChaCha20 is a stream cipher published by Daniel J. Bernstein (djb)
+ in 2008. The algorithm is an improved version of Salsa20 -- another
+ stream cipher by djb -- selected by ECRYPT into the eSTREAM
+ portfolio in 2008. The improvement in question is, ChaCha20
+ increases the per-round diffusion compared to Salsa20 while
+ maintaining or increasing speed.
+
+ For more details, see
+ https://cr.yp.to/chacha/chacha-20080128.pdf
+ https://en.wikipedia.org/wiki/Salsa20#ChaCha_variant
+
+ The Poly1305 is a Wegman-Carter Message Authentication Code also
+ designed by djb. The MAC is provably secure if ChaCha20 is secure.
+ The 128-bit tag space ensures the attacker's advantage to create an
+ existential forgery is negligible.
+
+ For more details, see
+ https://cr.yp.to/mac.html
+
+ The version used in TFC is the XChaCha20-Poly1305-IETF*, a variant
+ of the ChaCha20-Poly1305-IETF (RFC 7539**). Quoting libsodium, the
+ XChaCha20 (=eXtended-nonce ChaCha20) variant allows encryption of
+ ~2^64 bytes per message, encryption of up to 2^64 messages per key,
+ and safe use of random nonces due to the 192-bit nonce space***.
+
+ * https://tools.ietf.org/html/draft-arciszewski-xchacha-00
+ ** https://tools.ietf.org/html/rfc7539
+ *** https://download.libsodium.org/doc/secret-key_cryptography/aead/chacha20-poly1305#variants
+
+ The reasons for using XChaCha20-Poly1305 in TFC include
+
+ o The Salsa20 algorithm has 14 years of cryptanalysis behind it.
+ (https://en.wikipedia.org/wiki/Salsa20#Cryptanalysis_of_Salsa20)
+
+ o The increased diffusion over the well-received Salsa20.
+
+ o The algorithm is much faster compared to AES (in cases where
+ the CPU and/or implementation does not support AES-NI).
+
+ o Security against cache-timing attacks on all CPUs (unlike AES
+ on CPUs without AES-NI).
+
+ o The good name of djb.
+
+ The correctness of the XChaCha20-Poly1305 implementation* is tested
+ by TFC unit tests. The testing is done in limited scope by using
+ libsodium and IETF test vectors.
+
+ * https://github.com/jedisct1/libsodium/tree/master/src/libsodium/crypto_aead/xchacha20poly1305/sodium
+ https://github.com/pyca/pynacl/blob/master/src/nacl/bindings/crypto_aead.py
+ """
+ assert len(key) == SYMMETRIC_KEY_LENGTH
+
+ nonce = csprng(XCHACHA20_NONCE_LENGTH)
+ ct_tag = nacl.bindings.crypto_aead_xchacha20poly1305_ietf_encrypt(plaintext, ad, nonce, key) # type: bytes
+
+ return nonce + ct_tag
+
+
+def auth_and_decrypt(nonce_ct_tag: bytes, # Nonce + ciphertext + tag
+ key: bytes, # 32-byte symmetric key
+ database: str = '', # When provided, gracefully exists TFC when the tag is invalid
+ ad: bytes = b'' # Associated data
+ ) -> bytes: # Plaintext
+ """Authenticate and decrypt XChaCha20-Poly1305 ciphertext.
+
+ The Poly1305 tag is checked using constant time `sodium_memcmp`:
+ https://download.libsodium.org/doc/helpers#constant-time-test-for-equality
+
+ When TFC decrypts ciphertext from an untrusted source (i.e., a
+ contact), no `database` parameter is provided. In such situation, if
+ the tag of the untrusted ciphertext is invalid, TFC discards the
+ ciphertext and recovers appropriately.
+
+ When TFC decrypts ciphertext from a trusted source (i.e., a
+ database), the `database` parameter is provided, so the function
+ knows which database is in question. In case the authentication
+ fails due to invalid tag, the data is assumed to be either tampered
+ or corrupted. TFC will in such case gracefully exit to avoid
+ processing the unsafe data and warn the user in which database the
+ issue was detected.
+ """
+ assert len(key) == SYMMETRIC_KEY_LENGTH
+
+ nonce, ct_tag = separate_header(nonce_ct_tag, XCHACHA20_NONCE_LENGTH)
try:
- secret_box = nacl.secret.SecretBox(key)
- return secret_box.decrypt(nonce_ct_tag)
+ plaintext = nacl.bindings.crypto_aead_xchacha20poly1305_ietf_decrypt(ct_tag, ad, nonce, key) # type: bytes
+ return plaintext
except nacl.exceptions.CryptoError:
- if not soft_e:
- raise CriticalError("Ciphertext MAC fail.")
+ if database:
+ raise CriticalError(f"Authentication of data in database '{database}' failed.")
raise
-def byte_padding(string: bytes) -> bytes:
- """Pad byte string to next 255 bytes.
+def byte_padding(bytestring: bytes # Bytestring to be padded
+ ) -> bytes: # Padded bytestring
+ """Pad bytestring to next 255 bytes.
- Padding of output messages hides plaintext length and contributes
- to traffic flow confidentiality when traffic masking is enabled.
+ TFC adds padding to messages it outputs. The padding ensures each
+ assembly packet has a constant length. When traffic masking is
+ disabled, because of padding the packet length reveals only the
+ maximum length of the compressed message.
- :param string: String to be padded
- :return: Padded string
+ When traffic masking is enabled, the padding contributes to traffic
+ flow confidentiality: During traffic masking, TFC will output a
+ constant stream of padded packets at constant intervals that hides
+ metadata about message length (i.e., the adversary won't be able to
+ distinguish when transmission of packet or series of packets starts
+ and stops), as well as the type (message/file) of transferred data.
+
+ TFC uses PKCS #7 padding scheme described in RFC 2315 and RFC 5652:
+ https://tools.ietf.org/html/rfc2315#section-10.3
+ https://tools.ietf.org/html/rfc5652#section-6.3
+
+ For a better explanation, see
+ https://en.wikipedia.org/wiki/Padding_(cryptography)#PKCS#5_and_PKCS#7
"""
- length = PADDING_LEN - (len(string) % PADDING_LEN)
- string += length * bytes([length])
+ padding_len = PADDING_LENGTH - (len(bytestring) % PADDING_LENGTH)
+ bytestring += padding_len * bytes([padding_len])
- assert len(string) % PADDING_LEN == 0
+ assert len(bytestring) % PADDING_LENGTH == 0
- return string
+ return bytestring
-def rm_padding_bytes(string: bytes) -> bytes:
+def rm_padding_bytes(bytestring: bytes # Bytestring from which padding is removed
+ ) -> bytes: # Bytestring without padding
"""Remove padding from plaintext.
- The length of padding is determined by the ord-value
- of last character that is always part of padding.
-
- :param string: String from which padding is removed
- :return: String without padding
+ The length of padding is determined by the ord-value of the last
+ byte that is always part of the padding.
"""
- return string[:-ord(string[-1:])]
+ length = ord(bytestring[-1:])
+ return bytestring[:-length]
-def xor(string1: bytes, string2: bytes) -> bytes:
- """XOR two byte strings together."""
- if len(string1) != len(string2):
- raise CriticalError("String length mismatch.")
+def csprng(key_length: int = SYMMETRIC_KEY_LENGTH) -> bytes:
+ """Generate a cryptographically secure random key.
- return b''.join([bytes([b1 ^ b2]) for b1, b2 in zip(string1, string2)])
+ The default key length is 256 bits.
+ The key is generated by the Linux kernel's cryptographically secure
+ pseudo-random number generator (CSPRNG).
-def csprng() -> bytes:
- """Generate a cryptographically secure, 256-bit random key.
+ Since Python 3.6.0, `os.urandom` is a wrapper for best available
+ CSPRNG. The 3.17 and earlier versions of Linux kernel do not support
+ the GETRANDOM call, and Python 3.6's `os.urandom` will in those
+ cases fall back to non-blocking `/dev/urandom` that is not secure on
+ live distros as they have low entropy at the start of the session.
- Key is generated with kernel CSPRNG, the output of which is further
- compressed with hash_chain. This increases preimage resistance that
- protects the internal state of the entropy pool. Additional hashing
- is done as per the recommendation of djb:
+ TFC uses `os.getrandom(n, flags=0)` explicitly. This forces use of
+ recent enough Python interpreter (3.6.0 or later) and limits Linux
+ kernel version to 3.17 or later.* The flag 0 will block urandom if
+ the internal state of the CSPRNG has less than 128 bits of entropy.
+ See PEP 524 for more details:
+ https://www.python.org/dev/peps/pep-0524/
+
+ * The `/dev/urandom` was redesigned around ChaCha20 in the version
+ 4.8 of Linux kernel (https://lwn.net/Articles/686033/), so as a
+ good practice TFC runs the `check_kernel_version` to ensure only
+ the new design of the CSPRNG is used.
+
+ Quoting PEP 524:
+ "The os.getrandom() is a thin wrapper on the getrandom()
+ syscall/C function and so inherit of its behaviour. For
+ example, on Linux, it can return less bytes than
+ requested if the syscall is interrupted by a signal."
+
+ However, quoting (https://lwn.net/Articles/606141/) on GETRANDOM:
+ "--reads of 256 bytes or less from /dev/urandom are guaranteed to
+ return the full request once that device has been initialized."
+
+ Since the largest key generated in TFC is the 56-byte X448 private
+ key, GETRANDOM is guaranteed to always work. As a good practice
+ however, TFC asserts that the length of the obtained entropy is
+ correct.
+
+ The output of GETRANDOM is further compressed with BLAKE2b. The
+ preimage resistance of the hash function protects the internal
+ state of the entropy pool just in case some user decides to modify
+ the source to accept pre-4.8 Linux Kernel that has no backtracking
+ protection. Another reason for the hashing is its recommended by djb:
https://media.ccc.de/v/32c3-7210-pqchacks#video&t=1116
- Since Python3.6.0, os.urandom is a wrapper for best available
- CSPRNG. The 3.17 and earlier versions of Linux kernel do not support
- the GETRANDOM call, and Python3.6's os.urandom will in those cases
- fallback to non-blocking /dev/urandom that is not secure on live
- distros as they have low entropy at the start of the session.
-
- TFC uses os.getrandom(32, flags=0) explicitly. This forces use of
- recent enough Python interpreter (3.6 or later) and limits Linux
- kernel version to 3.17 or later.* The flag 0 will block urandom if
- internal state of CSPRNG has less than 128 bits of entropy.
-
- * Since kernel 4.8, ChaCha20 has replaced SHA-1 as the compressor
- for /dev/urandom. As a good practice, TFC runs the
- check_kernel_version to ensure minimum version is actually 4.8,
- not 3.17.
-
- :return: Cryptographically secure 256-bit random key
+ Since BLAKE2b only produces 1..64 byte digests, its use limits the
+ size of the key to 64 bytes. This is not a problem for TFC because
+ again, the largest key it generates is the 56-byte X448 private key.
"""
- # As Travis CI lacks GETRANDOM syscall, fallback to urandom.
- if 'TRAVIS' in os.environ and os.environ['TRAVIS'] == 'true':
- entropy = os.urandom(KEY_LENGTH)
- else:
- entropy = os.getrandom(KEY_LENGTH, flags=0)
+ assert key_length <= BLAKE2_DIGEST_LENGTH_MAX
- assert len(entropy) == KEY_LENGTH
+ entropy = os.getrandom(key_length, flags=0)
+ assert len(entropy) == key_length
- return hash_chain(entropy)
+ compressed = blake2b(entropy, digest_size=key_length)
+ assert len(compressed) == key_length
+
+ return compressed
def check_kernel_entropy() -> None:
- """Wait until Kernel CSPRNG is sufficiently seeded.
+ """Wait until the kernel CSPRNG is sufficiently seeded.
- Wait until entropy_avail file states that system has at least 512
- bits of entropy. The headroom allows room for error in accuracy of
- entropy collector's entropy estimator; As long as input has at least
- 4 bits per byte of actual entropy, kernel CSPRNG will be sufficiently
- seeded when it generates 256-bit keys.
+ Wait until the `entropy_avail` file states that kernel entropy pool
+ has at least 512 bits of entropy. The waiting ensures the ChaCha20
+ CSPRNG is fully seeded (i.e., it has the maximum of 384 bits of
+ entropy) when it generates keys. The same entropy threshold is used
+ by the GETRANDOM syscall in random.c:
+ #define CRNG_INIT_CNT_THRESH (2*CHACHA20_KEY_SIZE)
+
+ For more information on the kernel CSPRNG threshold, see
+ https://security.stackexchange.com/a/175771/123524
+ https://crypto.stackexchange.com/a/56377
"""
- clear_screen()
- phase("Waiting for Kernel CSPRNG entropy pool to fill up", head=1)
+ message = "Waiting for kernel CSPRNG entropy pool to fill up"
+ phase(message, head=1)
ent_avail = 0
while ent_avail < ENTROPY_THRESHOLD:
with ignored(EOFError, KeyboardInterrupt):
with open('/proc/sys/kernel/random/entropy_avail') as f:
- value = f.read()
- ent_avail = int(value.strip())
- c_print(f"{ent_avail}/{ENTROPY_THRESHOLD}")
+ ent_avail = int(f.read().strip())
+ m_print(f"{ent_avail}/{ENTROPY_THRESHOLD}")
print_on_previous_line(delay=0.1)
print_on_previous_line()
- phase("Waiting for Kernel CSPRNG entropy pool to fill up")
+ phase(message)
phase(DONE)
def check_kernel_version() -> None:
"""Check that the Linux kernel version is at least 4.8.
- This check ensures that TFC only runs on Linux kernels that use
- the new ChaCha20 based CSPRNG: https://lkml.org/lkml/2016/7/25/43
+ This check ensures that TFC only runs on Linux kernels that use the
+ new ChaCha20 based CSPRNG that among many things, adds backtracking
+ protection:
+ https://lkml.org/lkml/2016/7/25/43
"""
major_v, minor_v = [int(i) for i in os.uname()[2].split('.')[:2]]
diff --git a/src/common/db_contacts.py b/src/common/db_contacts.py
index 1b66a4c..77f6690 100755
--- a/src/common/db_contacts.py
+++ b/src/common/db_contacts.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,67 +16,193 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
import os
import typing
-from typing import Generator, Iterable, List, Sized
+from typing import Generator, Iterable, List, Optional, Sized
from src.common.crypto import auth_and_decrypt, encrypt_and_sign
-from src.common.encoding import bool_to_bytes, str_to_bytes
-from src.common.encoding import bytes_to_bool, bytes_to_str
-from src.common.misc import ensure_dir, get_terminal_width, split_byte_string
+from src.common.encoding import bool_to_bytes, pub_key_to_onion_address, str_to_bytes, pub_key_to_short_address
+from src.common.encoding import bytes_to_bool, onion_address_to_pub_key, bytes_to_str
+from src.common.misc import ensure_dir, get_terminal_width, separate_headers, split_byte_string
from src.common.output import clear_screen
from src.common.statics import *
if typing.TYPE_CHECKING:
from src.common.db_masterkey import MasterKey
from src.common.db_settings import Settings
+ from cryptography.hazmat.primitives.asymmetric.x448 import X448PrivateKey
class Contact(object):
"""\
- Contact object collects data related to
- contact that is not part of key rotation.
+ Contact object contains contact data not related to key management
+ and hash ratchet state:
+
+ onion_pub_key: The public key of the contact's v3 Tor Onion
+ Service. The Relay Program on user's Networked
+ Computer uses this public key to anonymously
+ discover the Onion Service and to authenticate the
+ end-to-end encryption used between Relay Computers.
+ Since Relay Program might run on an amnesic distro
+ like Tails, the Transmitter and Receiver Programs
+ handle long-term storage of the contact's Onion
+ Service public key. All `onion_pub_key` variables
+ across the codebase refer to the public key of a
+ contact (never that of the user).
+
+ nick: As per Zooko's triangle and Stiegler's Petname
+ Systems, .onion names (i.e., TFC accounts) cannot
+ be global, secure and memorable at the same time*.
+ To deal with hard to remember accounts, in TFC
+ contacts (and groups) are managed mostly with
+ nicknames assigned by the user. The nickname must
+ be unique among both contacts and groups so that
+ single command `/msg ` can select a
+ specific contact or group. Some nicknames are
+ reserved so that messages from contacts cannot be
+ confused with system messages of Receiver Program.
+ Nicknames also have a length limit of 254 chars.
+
+ * https://trac.torproject.org/projects/tor/wiki/doc/HiddenServiceNames#Whyare.onionnamescreatedthatway
+
+ TFC stores the 32-byte public key fingerprints of the ECDHE key
+ exchange into the contact database. These values allow the user to
+ verify at any time no MITM attack took place during the key
+ exchange. When PSKs are used, a null-byte string is used as a
+ placeholder value.
+
+ tx_fingerprint: The user's fingerprint. This fingerprint is derived
+ from the user's public key which means it's
+ automatically authentic. During verification over
+ an authenticated channel, the user reads this value
+ to the contact out loud.
+
+ rx_fingerprint: The purported fingerprint for the contact. This
+ fingerprint depends on the public key received from
+ the insecure network and therefore, it shouldn't be
+ trusted implicitly. During verification over an
+ authenticated channel, the contact reads their
+ `tx_fingerprint` to the user out loud, and the user
+ then compares it to this purported value.
+
+ kex_status: This byte remembers the key exchange status of the
+ contact.
+
+ TFC stores the contact-specific settings to the contact database:
+
+ log_messages: This setting defines whether the Receiver Program
+ on Destination Computer writes the assembly packets
+ of a successfully received message into a log file.
+ When logging is enabled, Transmitter Program will
+ also log assembly packets of sent messages to its
+ log file.
+
+ file_reception: This setting defines whether the Receiver Program
+ accepts files sent by the contact. The setting has
+ no effect on user's Transmitter Program.
+
+ notifications: This setting defines whether, in situations where
+ some other window is active, the Receiver Program
+ displays a notification about the contact sending a
+ new message to their window. The setting has no
+ effect on user's Transmitter Program.
+
+ tfc_private_key: This value is an ephemerally stored private key
+ for situations where the user interrupts the key
+ exchange. The purpose of the value is to prevent
+ the user from generating different ECDHE values
+ when re-selecting the contact to continue the key
+ exchange. Note that once a shared key is derived
+ from this private key (and contact's public key),
+ it is discarded. New private key will thus be
+ generated if the users decide to exchange new keys
+ with each other.
"""
def __init__(self,
- rx_account: str,
- tx_account: str,
+ onion_pub_key: bytes,
nick: str,
tx_fingerprint: bytes,
rx_fingerprint: bytes,
+ kex_status: bytes,
log_messages: bool,
file_reception: bool,
- notifications: bool) -> None:
- """Create a new Contact object."""
- self.rx_account = rx_account
- self.tx_account = tx_account
- self.nick = nick
- self.tx_fingerprint = tx_fingerprint
- self.rx_fingerprint = rx_fingerprint
- self.log_messages = log_messages
- self.file_reception = file_reception
- self.notifications = notifications
+ notifications: bool
+ ) -> None:
+ """Create a new Contact object.
+
+ `self.short_address` is a truncated version of the account used
+ to identify TFC account in printed messages.
+ """
+ self.onion_pub_key = onion_pub_key
+ self.nick = nick
+ self.tx_fingerprint = tx_fingerprint
+ self.rx_fingerprint = rx_fingerprint
+ self.kex_status = kex_status
+ self.log_messages = log_messages
+ self.file_reception = file_reception
+ self.notifications = notifications
+ self.onion_address = pub_key_to_onion_address(self.onion_pub_key)
+ self.short_address = pub_key_to_short_address(self.onion_pub_key)
+ self.tfc_private_key = None # type: Optional[X448PrivateKey]
def serialize_c(self) -> bytes:
- """Return contact data as constant length byte string."""
- return (str_to_bytes(self.rx_account)
- + str_to_bytes(self.tx_account)
- + str_to_bytes(self.nick)
+ """Return contact data as a constant length byte string.
+
+ This function serializes the contact's data into a byte string
+ that has the exact length of 3*32 + 4*1 + 1024 = 1124 bytes. The
+ length is guaranteed regardless of the content or length of the
+ attributes' values, including the contact's nickname. The
+ purpose of the constant length serialization is to hide any
+ metadata about the contact the ciphertext length of the contact
+ database would reveal.
+ """
+ return (self.onion_pub_key
+ self.tx_fingerprint
+ self.rx_fingerprint
+ + self.kex_status
+ bool_to_bytes(self.log_messages)
+ bool_to_bytes(self.file_reception)
- + bool_to_bytes(self.notifications))
+ + bool_to_bytes(self.notifications)
+ + str_to_bytes(self.nick))
+
+ def uses_psk(self) -> bool:
+ """\
+ Return True if the user and the contact are using pre-shared
+ keys (PSKs), else False.
+
+ When the user sets up pre-shared keys with the contact, the key
+ exchange status can only have two specific values (that remember
+ whether the PSK of the contact has been imported). That fact can
+ be used to determine whether the keys with contact were
+ pre-shared.
+ """
+ return self.kex_status in [KEX_STATUS_NO_RX_PSK, KEX_STATUS_HAS_RX_PSK]
class ContactList(Iterable, Sized):
"""\
- ContactList object manages list of contact
- objects and the encrypted contact database.
+ ContactList object manages TFC's Contact objects and the storage of
+ the objects in an encrypted database.
+
+ The main purpose of this object is to manage the `self.contacts`
+ list that contains TFC's contacts. The database is stored on disk
+ in encrypted form. Prior to encryption, the database is padded with
+ dummy contacts. The dummy contacts hide the number of actual
+ contacts that would otherwise be revealed by the size of the
+ encrypted database. As long as the user has less than 50 contacts,
+ the database will effectively hide the actual number of contacts.
+ The maximum number of contacts (and thus the size of the database)
+ can be changed by editing the `max_number_of_contacts` setting. This
+ can however, in theory, reveal to a physical attacker the user has
+ more than 50 contacts.
+
+ The ContactList object also provides handy methods with human-
+ readable names for making queries to the database.
"""
def __init__(self, master_key: 'MasterKey', settings: 'Settings') -> None:
@@ -84,159 +211,288 @@ class ContactList(Iterable, Sized):
self.settings = settings
self.contacts = [] # type: List[Contact]
self.dummy_contact = self.generate_dummy_contact()
- self.dummy_id = self.dummy_contact.rx_account.encode('utf-32')
self.file_name = f'{DIR_USER_DATA}{settings.software_operation}_contacts'
ensure_dir(DIR_USER_DATA)
if os.path.isfile(self.file_name):
- self.load_contacts()
+ self._load_contacts()
else:
self.store_contacts()
def __iter__(self) -> Generator:
- """Iterate over contacts in contact list."""
+ """Iterate over Contact objects in `self.contacts`."""
yield from self.contacts
def __len__(self) -> int:
- """Return number of contacts in contact list."""
- return len(self.contacts)
+ """Return the number of contacts in `self.contacts`.
+
+ The Contact object that represents the local key is left out of
+ the calculation.
+ """
+ return len(self.get_list_of_contacts())
def store_contacts(self) -> None:
- """Write contacts to encrypted database."""
- contacts = self.contacts + [self.dummy_contact] * (self.settings.max_number_of_contacts - len(self.contacts))
- pt_bytes = b''.join([c.serialize_c() for c in contacts])
+ """Write the list of contacts to an encrypted database.
+
+ This function will first create a list of contacts and dummy
+ contacts. It will then serialize every Contact object on that
+ list and join the constant length byte strings to form the
+ plaintext that will be encrypted and stored in the database.
+
+ By default, TFC has a maximum number of 50 contacts. In
+ addition, the database stores the contact that represents the
+ local key (used to encrypt commands from Transmitter to Receiver
+ Program). The plaintext length of 51 serialized contacts is
+ 51*1124 = 57364 bytes. The ciphertext includes a 24-byte nonce
+ and a 16-byte tag, so the size of the final database is 57313
+ bytes.
+ """
+ pt_bytes = b''.join([c.serialize_c() for c in self.contacts + self._dummy_contacts()])
ct_bytes = encrypt_and_sign(pt_bytes, self.master_key.master_key)
ensure_dir(DIR_USER_DATA)
with open(self.file_name, 'wb+') as f:
f.write(ct_bytes)
- def load_contacts(self) -> None:
- """Load contacts from encrypted database."""
+ def _load_contacts(self) -> None:
+ """Load contacts from the encrypted database.
+
+ This function first reads and decrypts the database content. It
+ then splits the plaintext into a list of 1124-byte blocks: each
+ block contains the serialized data of one contact. Next, the
+ function will remove from the list all dummy contacts (that
+ start with dummy contact's public key). The function will then
+ populate the `self.contacts` list with Contact objects, the data
+ of which is sliced and decoded from the dummy-free blocks.
+ """
with open(self.file_name, 'rb') as f:
ct_bytes = f.read()
- pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key)
- entries = split_byte_string(pt_bytes, item_len=CONTACT_LENGTH)
- contacts = [e for e in entries if not e.startswith(self.dummy_id)]
+ pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key, database=self.file_name)
+ blocks = split_byte_string(pt_bytes, item_len=CONTACT_LENGTH)
+ df_blocks = [b for b in blocks if not b.startswith(self.dummy_contact.onion_pub_key)]
- for c in contacts:
- assert len(c) == CONTACT_LENGTH
+ for block in df_blocks:
+ assert len(block) == CONTACT_LENGTH
- self.contacts.append(Contact(rx_account =bytes_to_str( c[ 0:1024]),
- tx_account =bytes_to_str( c[1024:2048]),
- nick =bytes_to_str( c[2048:3072]),
- tx_fingerprint= c[3072:3104],
- rx_fingerprint= c[3104:3136],
- log_messages =bytes_to_bool(c[3136:3137]),
- file_reception=bytes_to_bool(c[3137:3138]),
- notifications =bytes_to_bool(c[3138:3139])))
+ (onion_pub_key, tx_fingerprint, rx_fingerprint, kex_status_byte,
+ log_messages_byte, file_reception_byte, notifications_byte,
+ nick_bytes) = separate_headers(block,
+ [ONION_SERVICE_PUBLIC_KEY_LENGTH]
+ + 2*[FINGERPRINT_LENGTH]
+ + 4*[ENCODED_BOOLEAN_LENGTH])
+
+ self.contacts.append(Contact(onion_pub_key =onion_pub_key,
+ tx_fingerprint=tx_fingerprint,
+ rx_fingerprint=rx_fingerprint,
+ kex_status =kex_status_byte,
+ log_messages =bytes_to_bool(log_messages_byte),
+ file_reception=bytes_to_bool(file_reception_byte),
+ notifications =bytes_to_bool(notifications_byte),
+ nick =bytes_to_str(nick_bytes)))
@staticmethod
def generate_dummy_contact() -> Contact:
- """Generate a dummy contact."""
- return Contact(rx_account =DUMMY_CONTACT,
- tx_account =DUMMY_STR,
- nick =DUMMY_STR,
- tx_fingerprint=bytes(FINGERPRINT_LEN),
- rx_fingerprint=bytes(FINGERPRINT_LEN),
+ """Generate a dummy Contact object.
+
+ The dummy contact simplifies the code around the constant length
+ serialization when the data is stored to, or read from the
+ database.
+ """
+ return Contact(onion_pub_key =onion_address_to_pub_key(DUMMY_CONTACT),
+ nick =DUMMY_NICK,
+ tx_fingerprint=bytes(FINGERPRINT_LENGTH),
+ rx_fingerprint=bytes(FINGERPRINT_LENGTH),
+ kex_status =KEX_STATUS_NONE,
log_messages =False,
file_reception=False,
notifications =False)
+ def _dummy_contacts(self) -> List[Contact]:
+ """\
+ Generate a list of dummy contacts for database padding.
+
+ The number of dummy contacts depends on the number of actual
+ contacts.
+
+ The additional contact (+1) is the local contact used to
+ represent the presence of the local key on Transmitter Program's
+ `input_loop` process side that does not have access to the
+ KeyList database that contains the local key.
+ """
+ number_of_contacts_to_store = self.settings.max_number_of_contacts + 1
+ number_of_dummies = number_of_contacts_to_store - len(self.contacts)
+ return [self.dummy_contact] * number_of_dummies
+
def add_contact(self,
- rx_account: str,
- tx_account: str,
+ onion_pub_key: bytes,
nick: str,
tx_fingerprint: bytes,
rx_fingerprint: bytes,
+ kex_status: bytes,
log_messages: bool,
file_reception: bool,
- notifications: bool) -> None:
- """Add new contact to contact list, write changes to database."""
- if self.has_contact(rx_account):
- self.remove_contact(rx_account)
+ notifications: bool
+ ) -> None:
+ """\
+ Add a new contact to `self.contacts` list and write changes to
+ the database.
- self.contacts.append(Contact(rx_account, tx_account, nick,
- tx_fingerprint, rx_fingerprint,
- log_messages, file_reception, notifications))
+ Because TFC's hardware separation prevents automated DH-ratchet,
+ the only way for the users to re-negotiate new keys is to start
+ a new session by re-adding the contact. If the contact is
+ re-added, TFC will need to remove the existing Contact object
+ before adding the new one. In such case, TFC will update the
+ nick, kex status, and fingerprints, but it will keep the old
+ logging, file reception, and notification settings of the
+ contact (as opposed to using the defaults determined by TFC's
+ Settings object).
+ """
+ if self.has_pub_key(onion_pub_key):
+ current_contact = self.get_contact_by_pub_key(onion_pub_key)
+ log_messages = current_contact.log_messages
+ file_reception = current_contact.file_reception
+ notifications = current_contact.notifications
+ self.remove_contact_by_pub_key(onion_pub_key)
+
+ self.contacts.append(Contact(onion_pub_key,
+ nick,
+ tx_fingerprint,
+ rx_fingerprint,
+ kex_status,
+ log_messages,
+ file_reception,
+ notifications))
self.store_contacts()
- def remove_contact(self, selector: str) -> None:
- """\
- Remove account based on account/nick,
- update database file if necessary.
+ def remove_contact_by_pub_key(self, onion_pub_key: bytes) -> None:
+ """Remove the contact that has a matching Onion Service public key.
+
+ If the contact was found and removed, write changes to the database.
"""
for i, c in enumerate(self.contacts):
- if selector in [c.rx_account, c.nick]:
+ if c.onion_pub_key == onion_pub_key:
del self.contacts[i]
self.store_contacts()
break
- def get_contact(self, selector: str) -> Contact:
- """Get contact from list based on UID (account name or nick)."""
- return next(c for c in self.contacts if selector in [c.rx_account, c.nick])
+ def remove_contact_by_address_or_nick(self, selector: str) -> None:
+ """Remove the contact that has a matching nick or Onion Service address.
+
+ If the contact was found and removed, write changes to the database.
+ """
+ for i, c in enumerate(self.contacts):
+ if selector in [c.onion_address, c.nick]:
+ del self.contacts[i]
+ self.store_contacts()
+ break
+
+ def get_contact_by_pub_key(self, onion_pub_key: bytes) -> Contact:
+ """\
+ Return the Contact object from `self.contacts` list that has the
+ matching Onion Service public key.
+ """
+ return next(c for c in self.contacts if onion_pub_key == c.onion_pub_key)
+
+ def get_contact_by_address_or_nick(self, selector: str) -> Contact:
+ """\
+ Return the Contact object from `self.contacts` list that has the
+ matching nick or Onion Service address.
+ """
+ return next(c for c in self.contacts if selector in [c.onion_address, c.nick])
def get_list_of_contacts(self) -> List[Contact]:
- """Return list of contacts."""
- return [c for c in self.contacts if c.rx_account != LOCAL_ID]
+ """Return list of Contact objects in `self.contacts` list."""
+ return [c for c in self.contacts if c.onion_address != LOCAL_ID]
- def get_list_of_accounts(self) -> List[str]:
- """Return list of accounts."""
- return [c.rx_account for c in self.contacts if c.rx_account != LOCAL_ID]
+ def get_list_of_addresses(self) -> List[str]:
+ """Return list of contacts' TFC accounts."""
+ return [c.onion_address for c in self.contacts if c.onion_address != LOCAL_ID]
def get_list_of_nicks(self) -> List[str]:
- """Return list of nicks."""
- return [c.nick for c in self.contacts if c.nick != LOCAL_ID]
+ """Return list of contacts' nicks."""
+ return [c.nick for c in self.contacts if c.onion_address != LOCAL_ID]
- def get_list_of_users_accounts(self) -> List[str]:
- """Return list of user's accounts."""
- return list(set([c.tx_account for c in self.contacts if c.tx_account != LOCAL_ID]))
+ def get_list_of_pub_keys(self) -> List[bytes]:
+ """Return list of contacts' public keys."""
+ return [c.onion_pub_key for c in self.contacts if c.onion_address != LOCAL_ID]
+
+ def get_list_of_pending_pub_keys(self) -> List[bytes]:
+ """Return list of public keys for contacts that haven't completed key exchange yet."""
+ return [c.onion_pub_key for c in self.contacts if c.kex_status == KEX_STATUS_PENDING]
+
+ def get_list_of_existing_pub_keys(self) -> List[bytes]:
+ """Return list of public keys for contacts with whom key exchange has been completed."""
+ return [c.onion_pub_key for c in self.get_list_of_contacts()
+ if c.kex_status in [KEX_STATUS_UNVERIFIED, KEX_STATUS_VERIFIED,
+ KEX_STATUS_HAS_RX_PSK, KEX_STATUS_NO_RX_PSK]]
def contact_selectors(self) -> List[str]:
- """Return list of UIDs contacts can be selected with."""
- return self.get_list_of_accounts() + self.get_list_of_nicks()
+ """Return list of string-type UIDs that can be used to select a contact."""
+ return self.get_list_of_addresses() + self.get_list_of_nicks()
def has_contacts(self) -> bool:
- """Return True if contact list has any contacts, else False."""
- return any(self.get_list_of_accounts())
+ """Return True if ContactList has any contacts, else False."""
+ return any(self.get_list_of_contacts())
- def has_contact(self, selector: str) -> bool:
- """Return True if contact with account/nick exists, else False."""
- return selector in self.contact_selectors()
+ def has_only_pending_contacts(self) -> bool:
+ """Return True if ContactList only has pending contacts, else False."""
+ return all(c.kex_status == KEX_STATUS_PENDING for c in self.get_list_of_contacts())
+
+ def has_pub_key(self, onion_pub_key: bytes) -> bool:
+ """Return True if contact with public key exists, else False."""
+ return onion_pub_key in self.get_list_of_pub_keys()
def has_local_contact(self) -> bool:
- """Return True if local key exists, else False."""
- return any(c.rx_account == LOCAL_ID for c in self.contacts)
+ """Return True if the local key has been exchanged, else False."""
+ return any(c.onion_address == LOCAL_ID for c in self.contacts)
def print_contacts(self) -> None:
- """Print list of contacts."""
- # Columns
- c1 = ['Contact']
- c2 = ['Logging']
- c3 = ['Notify']
- c4 = ['Files ']
- c5 = ['Key Ex']
- c6 = ['Account']
+ """Print the list of contacts.
+ Neatly printed contact list allows easy contact management:
+ It allows the user to check active logging, file reception and
+ notification settings, as well as what key exchange was used
+ and what is the state of that key exchange. The contact list
+ also shows and what the account displayed by the Relay Program
+ corresponds to what nick etc.
+ """
+ # Initialize columns
+ c1 = ['Contact']
+ c2 = ['Account']
+ c3 = ['Logging']
+ c4 = ['Notify']
+ c5 = ['Files ']
+ c6 = ['Key Ex']
+
+ # Key exchange status dictionary
+ kex_dict = {KEX_STATUS_PENDING: f"{ECDHE} (Pending)",
+ KEX_STATUS_UNVERIFIED: f"{ECDHE} (Unverified)",
+ KEX_STATUS_VERIFIED: f"{ECDHE} (Verified)",
+ KEX_STATUS_NO_RX_PSK: f"{PSK} (No contact key)",
+ KEX_STATUS_HAS_RX_PSK: PSK
+ }
+
+ # Populate columns with contact data
for c in self.get_list_of_contacts():
c1.append(c.nick)
- c2.append('Yes' if c.log_messages else 'No')
- c3.append('Yes' if c.notifications else 'No')
- c4.append('Accept' if c.file_reception else 'Reject')
- c5.append('PSK' if c.tx_fingerprint == bytes(FINGERPRINT_LEN) else 'X25519')
- c6.append(c.rx_account)
+ c2.append(c.short_address)
+ c3.append('Yes' if c.log_messages else 'No')
+ c4.append('Yes' if c.notifications else 'No')
+ c5.append('Accept' if c.file_reception else 'Reject')
+ c6.append(kex_dict[c.kex_status])
- lst = []
- for nick, log_setting, notify_setting, file_rcv_setting, key_exchange, account in zip(c1, c2, c3, c4, c5, c6):
- lst.append('{0:{1}} {2:{3}} {4:{5}} {6:{7}} {8:{9}} {10}'.format(
- nick, max(len(v) for v in c1) + CONTACT_LIST_INDENT,
- log_setting, max(len(v) for v in c2) + CONTACT_LIST_INDENT,
- notify_setting, max(len(v) for v in c3) + CONTACT_LIST_INDENT,
- file_rcv_setting, max(len(v) for v in c4) + CONTACT_LIST_INDENT,
- key_exchange, max(len(v) for v in c5) + CONTACT_LIST_INDENT,
- account, max(len(v) for v in c6) + CONTACT_LIST_INDENT))
+ # Calculate column widths
+ c1w, c2w, c3w, c4w, c5w, = [max(len(v) for v in column) + CONTACT_LIST_INDENT
+ for column in [c1, c2, c3, c4, c5]]
- lst.insert(1, get_terminal_width() * '─')
+ # Align columns by adding whitespace between fields of each line
+ lines = [f'{f1:{c1w}}{f2:{c2w}}{f3:{c3w}}{f4:{c4w}}{f5:{c5w}}{f6}'
+ for f1, f2, f3, f4, f5, f6 in zip(c1, c2, c3, c4, c5, c6)]
+
+ # Add a terminal-wide line between the column names and the data
+ lines.insert(1, get_terminal_width() * '─')
+
+ # Print the contact list
clear_screen()
- print('\n' + '\n'.join(lst) + '\n\n')
+ print('\n' + '\n'.join(lines) + '\n\n')
diff --git a/src/common/db_groups.py b/src/common/db_groups.py
index 17b562a..eae246a 100755
--- a/src/common/db_groups.py
+++ b/src/common/db_groups.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,7 +16,7 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
import os
@@ -25,9 +26,10 @@ import typing
from typing import Callable, Generator, Iterable, List, Sized
from src.common.crypto import auth_and_decrypt, encrypt_and_sign
-from src.common.encoding import bool_to_bytes, int_to_bytes, str_to_bytes
+from src.common.encoding import bool_to_bytes, int_to_bytes, str_to_bytes, onion_address_to_pub_key, b58encode
from src.common.encoding import bytes_to_bool, bytes_to_int, bytes_to_str
-from src.common.misc import ensure_dir, get_terminal_width, round_up, split_byte_string
+from src.common.misc import ensure_dir, get_terminal_width, round_up, separate_header, separate_headers
+from src.common.misc import split_byte_string
from src.common.statics import *
if typing.TYPE_CHECKING:
@@ -38,19 +40,73 @@ if typing.TYPE_CHECKING:
class Group(Iterable, Sized):
"""\
- Group object contains a list of contact objects
- (group members) and settings related to the group.
+ Group object contains a list of Contact objects (group members) and
+ settings related to the group:
+
+ name: In TFC, groups are identified by random group IDs
+ that are hard to remember. Groups are therefore
+ managed mostly with names assigned by the user. The
+ name of the group must be unique among group names
+ and nicknames of contacts. This way a single command
+ `/msg ` can select the specified contact
+ or group. Some group names are reserved, e.g., for
+ database padding. Group names also have a length
+ limit of 254 chars.
+
+ group_id: Group ID is a random 4-byte value used to identify a
+ group among user's peers. To prevent data leakage
+ from Destination Computer via group IDs, the
+ received group management messages are displayed by
+ the Relay Program on Networked Computer. Since group
+ ID must be considered public information, they are
+ random. For more details on Destination Computer
+ exfiltration attacks, refer to TFC's documentation
+ regarding Security Design. Identification of groups
+ via a separate group ID allows the user to choose the
+ name for the group which is useful because users do
+ not need to take into account what names their
+ contacts have chosen for their groups.
+
+ log_messages: This setting defines whether the Receiver Program
+ writes the assembly packets of a successfully
+ received group message into a log file. When logging
+ is enabled, Transmitter Program will also log
+ assembly packets of sent group messages to its log
+ file.
+
+ notifications: This setting defines whether in situations where some
+ other window is active the Receiver Program displays
+ a notification about a group member sending a new
+ message to the group's window. The setting has no
+ effect on user's Transmitter Program.
+
+ members: Manually managed list of Contact objects that the
+ user accepts as members of their side of the group.
+ The Transmitter Program of user multicasts messages
+ to these contacts when the group is active. The
+ Receiver Program of user accepts messages from these
+ contacts to Group's window when the contact sends the
+ user a message, that contains the group ID in its
+ header.
"""
def __init__(self,
name: str,
+ group_id: bytes,
log_messages: bool,
notifications: bool,
members: List['Contact'],
settings: 'Settings',
- store_groups: Callable) -> None:
- """Create a new Group object."""
+ store_groups: Callable
+ ) -> None:
+ """Create a new Group object.
+
+ The `self.store_groups` is a reference to the method of the
+ parent object GroupList that stores the list of groups into an
+ encrypted database.
+ """
self.name = name
+ self.group_id = group_id
self.log_messages = log_messages
self.notifications = notifications
self.members = members
@@ -58,67 +114,99 @@ class Group(Iterable, Sized):
self.store_groups = store_groups
def __iter__(self) -> Generator:
- """Iterate over members in group."""
+ """Iterate over members (Contact objects) in the Group object."""
yield from self.members
def __len__(self) -> int:
- """Return number of members in group."""
+ """Return the number of members in the Group object."""
return len(self.members)
def serialize_g(self) -> bytes:
- """Return group data as constant length byte string."""
- name = str_to_bytes(self.name)
- log_messages = bool_to_bytes(self.log_messages)
- notifications = bool_to_bytes(self.notifications)
- members = self.get_list_of_member_accounts()
- num_of_dummies = self.settings.max_number_of_group_members - len(self.members)
- members += num_of_dummies * [DUMMY_MEMBER]
- member_bytes = b''.join([str_to_bytes(m) for m in members])
+ """Return group data as a constant length bytestring.
- return name + log_messages + notifications + member_bytes
+ This function serializes the group's data into a bytestring
+ that always has a constant length. The exact length depends on
+ the attribute `max_number_of_group_members` of TFC's Settings
+ object. With the default setting of 50 members per group, the
+ length of the serialized data is
+ 1024 + 4 + 2*1 + 50*32 = 2630 bytes
+ The purpose of the constant length serialization is to hide any
+ metadata the ciphertext length of the group database could
+ reveal.
+ """
+ members = self.get_list_of_member_pub_keys()
+ number_of_dummies = self.settings.max_number_of_group_members - len(self.members)
+ members += number_of_dummies * [onion_address_to_pub_key(DUMMY_MEMBER)]
+ member_bytes = b''.join(members)
+
+ return (str_to_bytes(self.name)
+ + self.group_id
+ + bool_to_bytes(self.log_messages)
+ + bool_to_bytes(self.notifications)
+ + member_bytes)
def add_members(self, contacts: List['Contact']) -> None:
- """Add list of contact objects to group."""
- for c in contacts:
- if c.rx_account not in self.get_list_of_member_accounts():
- self.members.append(c)
+ """Add a list of Contact objects to the group."""
+ pre_existing = self.get_list_of_member_pub_keys()
+ self.members.extend((c for c in contacts if c.onion_pub_key not in pre_existing))
self.store_groups()
- def remove_members(self, accounts: List[str]) -> bool:
- """Remove contact objects from group."""
- to_remove = set(accounts) & set(self.get_list_of_member_accounts())
+ def remove_members(self, pub_keys: List[bytes]) -> bool:
+ """Remove a list of Contact objects from the group.
+
+ Return True if the member(s) were removed, else False.
+ """
+ to_remove = set(pub_keys) & set(self.get_list_of_member_pub_keys())
if to_remove:
- self.members = [m for m in self.members if m.rx_account not in to_remove]
+ self.members = [m for m in self.members if m.onion_pub_key not in to_remove]
self.store_groups()
return any(to_remove)
- def get_list_of_member_accounts(self) -> List[str]:
- """Return list of members' rx_accounts."""
- return [m.rx_account for m in self.members]
+ def get_list_of_member_pub_keys(self) -> List[bytes]:
+ """Return list of members' public keys."""
+ return [m.onion_pub_key for m in self.members]
- def get_list_of_member_nicks(self) -> List[str]:
- """Return list of members' nicks."""
- return [m.nick for m in self.members]
+ def has_member(self, onion_pub_key: bytes) -> bool:
+ """Return True if a member with Onion public key is in the group, else False."""
+ return any(m.onion_pub_key == onion_pub_key for m in self.members)
- def has_member(self, account: str) -> bool:
- """Return True if specified account is in group, else False."""
- return any(m.rx_account == account for m in self.members)
-
- def has_members(self) -> bool:
- """Return True if group has contact objects, else False."""
- return any(self.members)
+ def empty(self) -> bool:
+ """Return True if the group is empty, else False."""
+ return not any(self.members)
class GroupList(Iterable, Sized):
"""\
- GroupList object manages list of group
- objects and encrypted group database.
+ GroupList object manages TFC's Group objects and the storage of the
+ objects in an encrypted database.
+
+ The main purpose of this object is to manage the `self.groups`-list
+ that contains TFC's groups. The database is stored on disk in
+ encrypted form. Prior to encryption, the database is padded with
+ dummy groups. Because each group might have a different number of
+ members, each group is also padded with dummy members. The dummy
+ groups and members hide the actual number of groups and members that
+ could otherwise be revealed by the size of the encrypted database.
+
+ As long as the user sticks to default settings that limits TFC's
+ group database to 50 groups and 50 members per group, the database
+ will effectively hide the actual number of groups and number of
+ members in them. The maximum number of groups and number of members
+ per group can be changed by editing the `max_number_of_groups` and
+ `max_number_of_group_members` settings respectively. Deviating from
+ the default settings can, however, in theory, reveal to a physical
+ attacker, the user has more than 50 groups or more than 50 members
+ in a group.
+
+ The GroupList object also provides handy methods with human-readable
+ names for making queries to the database.
"""
def __init__(self,
master_key: 'MasterKey',
settings: 'Settings',
- contact_list: 'ContactList') -> None:
+ contact_list: 'ContactList'
+ ) -> None:
"""Create a new GroupList object."""
self.master_key = master_key
self.settings = settings
@@ -128,197 +216,316 @@ class GroupList(Iterable, Sized):
ensure_dir(DIR_USER_DATA)
if os.path.isfile(self.file_name):
- self.load_groups()
+ self._load_groups()
else:
self.store_groups()
def __iter__(self) -> Generator:
- """Iterate over list of groups."""
+ """Iterate over Group objects in `self.groups`."""
yield from self.groups
def __len__(self) -> int:
- """Return number of groups."""
+ """Return the number of Group objects in `self.groups`."""
return len(self.groups)
def store_groups(self) -> None:
- """Write groups to encrypted database."""
- groups = self.groups + [self.generate_dummy_group()] * (self.settings.max_number_of_groups - len(self.groups))
- pt_bytes = self.generate_group_db_header()
- pt_bytes += b''.join([g.serialize_g() for g in groups])
+ """Write the list of groups to an encrypted database.
+
+ This function will first generate a header that stores
+ information about the group database content and padding at the
+ moment of calling. Next, the function will serialize every Group
+ object (including dummy groups) to form the constant length
+ plaintext that will be encrypted and stored in the database.
+
+ By default, TFC has a maximum number of 50 groups with 50
+ members. In addition, the group database stores the header that
+ contains four 8-byte values. The database plaintext length with
+ 50 groups, each with 50 members is
+ 4*8 + 50*( 1024 + 4 + 2*1 + 50*32)
+ = 32 + 50*2630
+ = 131532 bytes.
+
+ The ciphertext includes a 24-byte nonce and a 16-byte tag, so
+ the size of the final database is 131572 bytes.
+ """
+ pt_bytes = self._generate_group_db_header()
+ pt_bytes += b''.join([g.serialize_g() for g in (self.groups + self._dummy_groups())])
ct_bytes = encrypt_and_sign(pt_bytes, self.master_key.master_key)
ensure_dir(DIR_USER_DATA)
with open(self.file_name, 'wb+') as f:
f.write(ct_bytes)
- def load_groups(self) -> None:
- """Load groups from encrypted database."""
+ def _load_groups(self) -> None:
+ """Load groups from the encrypted database.
+
+ The function first reads, authenticates and decrypts the group
+ database data. Next, it slices and decodes the header values
+ that help the function to properly de-serialize the database
+ content. The function then removes dummy groups based on header
+ data. Next, the function updates the group database settings if
+ necessary. It then splits group data based on header data into
+ blocks, which are further sliced, and processed if necessary, to
+ obtain data required to create Group objects. Finally, if
+ needed, the function will update the group database content.
+ """
with open(self.file_name, 'rb') as f:
ct_bytes = f.read()
-
- pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key)
- update_db = False
+ pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key, database=self.file_name)
# Slice and decode headers
- padding_for_group_db = bytes_to_int(pt_bytes[0:8])
- padding_for_members = bytes_to_int(pt_bytes[8:16])
- number_of_actual_groups = bytes_to_int(pt_bytes[16:24])
- largest_group = bytes_to_int(pt_bytes[24:32])
+ group_db_headers, pt_bytes = separate_header(pt_bytes, GROUP_DB_HEADER_LENGTH)
- if number_of_actual_groups > self.settings.max_number_of_groups:
- self.settings.max_number_of_groups = round_up(number_of_actual_groups)
- self.settings.store_settings()
- update_db = True
- print("Group database had {} groups. Increased max number of groups to {}."
- .format(number_of_actual_groups, self.settings.max_number_of_groups))
+ padding_for_group_db, padding_for_members, number_of_groups, members_in_largest_group \
+ = list(map(bytes_to_int, split_byte_string(group_db_headers, ENCODED_INTEGER_LENGTH)))
- if largest_group > self.settings.max_number_of_group_members:
- self.settings.max_number_of_group_members = round_up(largest_group)
- self.settings.store_settings()
- update_db = True
- print("A group in group database had {} members. Increased max size of groups to {}."
- .format(largest_group, self.settings.max_number_of_group_members))
+ # Slice dummy groups
+ bytes_per_group = GROUP_STATIC_LENGTH + padding_for_members * ONION_SERVICE_PUBLIC_KEY_LENGTH
+ dummy_data_len = (padding_for_group_db - number_of_groups) * bytes_per_group
+ group_data = pt_bytes[:-dummy_data_len]
- group_name_field = 1
- string_fields_in_group = padding_for_members + group_name_field
- bytes_per_group = string_fields_in_group * PADDED_UTF32_STR_LEN + 2 * BOOLEAN_SETTING_LEN
+ update_db = self._check_db_settings(number_of_groups, members_in_largest_group)
+ blocks = split_byte_string(group_data, item_len=bytes_per_group)
- # Remove group header and dummy groups
- dummy_group_data = (padding_for_group_db - number_of_actual_groups) * bytes_per_group
- group_data = pt_bytes[GROUP_DB_HEADER_LEN:-dummy_group_data]
+ all_pub_keys = self.contact_list.get_list_of_pub_keys()
+ dummy_pub_key = onion_address_to_pub_key(DUMMY_MEMBER)
- groups = split_byte_string(group_data, item_len=bytes_per_group)
+ # Deserialize group objects
+ for block in blocks:
+ assert len(block) == bytes_per_group
- for g in groups:
- assert len(g) == bytes_per_group
+ name_bytes, group_id, log_messages_byte, notification_byte, ser_pub_keys \
+ = separate_headers(block, [PADDED_UTF32_STR_LENGTH, GROUP_ID_LENGTH] + 2*[ENCODED_BOOLEAN_LENGTH])
- name = bytes_to_str( g[ 0:1024])
- log_messages = bytes_to_bool( g[1024:1025])
- notifications = bytes_to_bool( g[1025:1026])
- members_bytes = split_byte_string(g[1026:], item_len=PADDED_UTF32_STR_LEN)
- members_w_dummies = [bytes_to_str(m) for m in members_bytes]
- members = [m for m in members_w_dummies if m != DUMMY_MEMBER]
+ pub_key_list = split_byte_string(ser_pub_keys, item_len=ONION_SERVICE_PUBLIC_KEY_LENGTH)
+ group_pub_keys = [k for k in pub_key_list if k != dummy_pub_key]
+ group_members = [self.contact_list.get_contact_by_pub_key(k) for k in group_pub_keys if k in all_pub_keys]
- # Load contacts based on stored rx_account
- group_members = [self.contact_list.get_contact(m) for m in members if self.contact_list.has_contact(m)]
+ self.groups.append(Group(name =bytes_to_str(name_bytes),
+ group_id =group_id,
+ log_messages =bytes_to_bool(log_messages_byte),
+ notifications=bytes_to_bool(notification_byte),
+ members =group_members,
+ settings =self.settings,
+ store_groups =self.store_groups))
- # Update group database if any member has been removed from contact database
- if not all(m in self.contact_list.get_list_of_accounts() for m in members):
- update_db = True
-
- self.groups.append(Group(name, log_messages, notifications, group_members, self.settings, self.store_groups))
+ update_db |= set(all_pub_keys) > set(group_pub_keys)
if update_db:
self.store_groups()
- def generate_group_db_header(self) -> bytes:
+ def _check_db_settings(self,
+ number_of_actual_groups: int,
+ members_in_largest_group: int
+ ) -> bool:
+ """\
+ Adjust TFC's settings automatically if loaded group database was
+ stored using larger database setting values.
+
+ If settings had to be adjusted, return True so
+ `self._load_groups` knows to write changes to a new database.
+ """
+ update_db = False
+
+ if number_of_actual_groups > self.settings.max_number_of_groups:
+ self.settings.max_number_of_groups = round_up(number_of_actual_groups)
+ update_db = True
+
+ if members_in_largest_group > self.settings.max_number_of_group_members:
+ self.settings.max_number_of_group_members = round_up(members_in_largest_group)
+ update_db = True
+
+ if update_db:
+ self.settings.store_settings()
+
+ return update_db
+
+ def _generate_group_db_header(self) -> bytes:
"""Generate group database metadata header.
- padding_for_group_db helps define how many groups are actually in the database.
+ This function produces a 32-byte bytestring that contains four
+ values that allow the Transmitter or Receiver program to
+ properly de-serialize the database content:
- padding_for_members defines to how many members each group is padded to.
+ `max_number_of_groups` helps slice off dummy groups when
+ loading the database.
- number_of_actual_groups helps define how many groups are actually in the database.
- Also allows TFC to automatically adjust the minimum
- settings for number of groups. This is needed e.g. in cases
- where the group database is swapped to a backup that has
- different number of groups than TFC's settings expect.
+ `max_number_of_group_members` helps split dummy free group data
+ into proper length blocks that can
+ be further sliced and decoded to
+ data used to build Group objects.
- largest_group helps TFC to automatically adjust minimum setting for max
- number of members in each group (e.g. in cases like the one
- described above).
+ `len(self.groups)` helps slice off dummy groups when
+ loading the database. It also
+ allows TFC to automatically adjust
+ the max_number_of_groups setting.
+ The value is needed, e.g., in
+ cases where the group database is
+ swapped to a backup that has a
+ different number of groups than
+ TFC's settings expect.
+
+ `self.largest_group()` helps TFC to automatically adjust
+ the max_number_of_group_members
+ setting (e.g., in cases like the
+ one described above).
"""
return b''.join(list(map(int_to_bytes, [self.settings.max_number_of_groups,
self.settings.max_number_of_group_members,
len(self.groups),
self.largest_group()])))
- def generate_dummy_group(self) -> 'Group':
- """Generate a dummy group."""
+ def _generate_dummy_group(self) -> 'Group':
+ """Generate a dummy Group object.
+
+ The dummy group simplifies the code around the constant length
+ serialization when the data is stored to, or read from the
+ database.
+ """
+ dummy_member = self.contact_list.generate_dummy_contact()
+
return Group(name =DUMMY_GROUP,
+ group_id =bytes(GROUP_ID_LENGTH),
log_messages =False,
notifications=False,
- members =self.settings.max_number_of_group_members * [self.contact_list.generate_dummy_contact()],
+ members =self.settings.max_number_of_group_members * [dummy_member],
settings =self.settings,
store_groups =lambda: None)
+ def _dummy_groups(self) -> List[Group]:
+ """Generate a proper size list of dummy groups for database padding."""
+ number_of_dummies = self.settings.max_number_of_groups - len(self.groups)
+ dummy_group = self._generate_dummy_group()
+ return [dummy_group] * number_of_dummies
+
def add_group(self,
name: str,
+ group_id: bytes,
log_messages: bool,
notifications: bool,
members: List['Contact']) -> None:
- """Add a new group to group list."""
+ """Add a new group to `self.groups` and write changes to the database."""
if self.has_group(name):
- self.remove_group(name)
+ self.remove_group_by_name(name)
- self.groups.append(Group(name, log_messages, notifications, members, self.settings, self.store_groups))
+ self.groups.append(Group(name,
+ group_id,
+ log_messages,
+ notifications,
+ members,
+ self.settings,
+ self.store_groups))
self.store_groups()
- def remove_group(self, name: str) -> None:
- """Remove group from group list."""
+ def remove_group_by_name(self, name: str) -> None:
+ """Remove the specified group from the group list.
+
+ If a group with the matching name was found and removed, write
+ changes to the database.
+ """
for i, g in enumerate(self.groups):
if g.name == name:
del self.groups[i]
self.store_groups()
break
+ def remove_group_by_id(self, group_id: bytes) -> None:
+ """Remove the specified group from the group list.
+
+ If a group with the matching group ID was found and removed,
+ write changes to the database.
+ """
+ for i, g in enumerate(self.groups):
+ if g.group_id == group_id:
+ del self.groups[i]
+ self.store_groups()
+ break
+
+ def get_group(self, name: str) -> Group:
+ """Return Group object based on its name."""
+ return next(g for g in self.groups if g.name == name)
+
+ def get_group_by_id(self, group_id: bytes) -> Group:
+ """Return Group object based on its group ID."""
+ return next(g for g in self.groups if g.group_id == group_id)
+
def get_list_of_group_names(self) -> List[str]:
"""Return list of group names."""
return [g.name for g in self.groups]
- def get_group(self, name: str) -> Group:
- """Return group object based on it's name."""
- return next(g for g in self.groups if g.name == name)
+ def get_list_of_group_ids(self) -> List[bytes]:
+ """Return list of group IDs."""
+ return [g.group_id for g in self.groups]
- def get_group_members(self, name: str) -> List['Contact']:
- """Return list of group members."""
- return self.get_group(name).members
+ def get_list_of_hr_group_ids(self) -> List[str]:
+ """Return list of human readable (B58 encoded) group IDs."""
+ return [b58encode(g.group_id) for g in self.groups]
+
+ def get_group_members(self, group_id: bytes) -> List['Contact']:
+ """Return list of group members (Contact objects)."""
+ return self.get_group_by_id(group_id).members
def has_group(self, name: str) -> bool:
- """Return True if group list has group with specified name, else False."""
- return any([g.name == name for g in self.groups])
+ """Return True if group list has a group with the specified name, else False."""
+ return any(g.name == name for g in self.groups)
- def has_groups(self) -> bool:
- """Return True if group list has groups, else False."""
- return any(self.groups)
+ def has_group_id(self, group_id: bytes) -> bool:
+ """Return True if group list has a group with the specified group ID, else False."""
+ return any(g.group_id == group_id for g in self.groups)
def largest_group(self) -> int:
- """Return size of group with most members."""
+ """Return size of the group that has the most members."""
return max([0] + [len(g) for g in self.groups])
def print_groups(self) -> None:
- """Print list of groups."""
- # Columns
- c1 = ['Group ']
- c2 = ['Logging']
- c3 = ['Notify' ]
- c4 = ['Members']
+ """Print list of groups.
+ Neatly printed group list allows easy group management and it
+ also allows the user to check active logging and notification
+ setting, as well as what group ID Relay Program shows
+ corresponds to what group, and which contacts are in the group.
+ """
+ # Initialize columns
+ c1 = ['Group' ]
+ c2 = ['Group ID']
+ c3 = ['Logging ']
+ c4 = ['Notify' ]
+ c5 = ['Members' ]
+
+ # Populate columns with group data that has only a single line
for g in self.groups:
c1.append(g.name)
- c2.append('Yes' if g.log_messages else 'No')
- c3.append('Yes' if g.notifications else 'No')
+ c2.append(b58encode(g.group_id))
+ c3.append('Yes' if g.log_messages else 'No')
+ c4.append('Yes' if g.notifications else 'No')
- if g.has_members():
- m_indent = max(len(g.name) for g in self.groups) + 28
- m_string = ', '.join(sorted([m.nick for m in g.members]))
- wrapper = textwrap.TextWrapper(width=max(1, (get_terminal_width() - m_indent)))
- mem_lines = wrapper.fill(m_string).split('\n')
- f_string = mem_lines[0] + '\n'
+ # Calculate the width of single-line columns
+ c1w, c2w, c3w, c4w = [max(len(v) for v in column) + CONTACT_LIST_INDENT for column in [c1, c2, c3, c4]]
- for l in mem_lines[1:]:
- f_string += m_indent * ' ' + l + '\n'
- c4.append(f_string)
+ # Create a wrapper for Members-column
+ wrapped_members_line_indent = c1w + c2w + c3w + c4w
+ members_column_width = max(1, get_terminal_width() - wrapped_members_line_indent)
+ wrapper = textwrap.TextWrapper(width=members_column_width)
+
+ # Populate the Members-column
+ for g in self.groups:
+ if g.empty():
+ c5.append("\n")
else:
- c4.append("\n")
+ comma_separated_nicks = ', '.join(sorted([m.nick for m in g.members]))
+ members_column_lines = wrapper.fill(comma_separated_nicks).split('\n')
- lst = []
- for name, log_setting, notify_setting, members in zip(c1, c2, c3, c4):
- lst.append('{0:{1}} {2:{3}} {4:{5}} {6}'.format(
- name, max(len(v) for v in c1) + CONTACT_LIST_INDENT,
- log_setting, max(len(v) for v in c2) + CONTACT_LIST_INDENT,
- notify_setting, max(len(v) for v in c3) + CONTACT_LIST_INDENT,
- members))
+ final_str = members_column_lines[0] + '\n'
+ for line in members_column_lines[1:]:
+ final_str += wrapped_members_line_indent * ' ' + line + '\n'
- lst.insert(1, get_terminal_width() * '─')
- print('\n'.join(lst) + '\n')
+ c5.append(final_str)
+
+ # Align columns by adding whitespace between fields of each line
+ lines = [f'{f1:{c1w}}{f2:{c2w}}{f3:{c3w}}{f4:{c4w}}{f5}' for f1, f2, f3, f4, f5 in zip(c1, c2, c3, c4, c5)]
+
+ # Add a terminal-wide line between the column names and the data
+ lines.insert(1, get_terminal_width() * '─')
+
+ # Print the group list
+ print('\n'.join(lines) + '\n')
diff --git a/src/common/db_keys.py b/src/common/db_keys.py
index 1d39709..015ba40 100644
--- a/src/common/db_keys.py
+++ b/src/common/db_keys.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,7 +16,7 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
import os
@@ -23,11 +24,11 @@ import typing
from typing import Any, Callable, List
-from src.common.crypto import auth_and_decrypt, encrypt_and_sign, hash_chain
+from src.common.crypto import auth_and_decrypt, blake2b, csprng, encrypt_and_sign
+from src.common.encoding import int_to_bytes, onion_address_to_pub_key
+from src.common.encoding import bytes_to_int
from src.common.exceptions import CriticalError
-from src.common.encoding import str_to_bytes, int_to_bytes
-from src.common.encoding import bytes_to_str, bytes_to_int
-from src.common.misc import ensure_dir, split_byte_string
+from src.common.misc import ensure_dir, separate_headers, split_byte_string
from src.common.statics import *
if typing.TYPE_CHECKING:
@@ -37,69 +38,114 @@ if typing.TYPE_CHECKING:
class KeySet(object):
"""\
- KeySet object handles frequently changing
- keys and hash ratchet counters of contacts.
+ KeySet object contains frequently changing keys and hash ratchet
+ counters of contacts:
+
+ onion_pub_key: The public key that corresponds to the contact's v3
+ Tor Onion Service address. Used to uniquely identify
+ the KeySet object.
+
+ tx_mk: Forward secret message key for sent messages.
+
+ rx_mk: Forward secret message key for received messages.
+ Used only by the Receiver Program.
+
+ tx_hk: Static header key used to encrypt and sign the hash
+ ratchet counter provided along the encrypted
+ assembly packet.
+
+ rx_hk: Static header key used to authenticate and decrypt
+ the hash ratchet counter of received messages. Used
+ only by the Receiver Program.
+
+ tx_harac: The hash ratchet counter for sent messages.
+
+ rx_harac: The hash ratchet counter for received messages. Used
+ only by the Receiver Program.
"""
def __init__(self,
- rx_account: str,
- tx_key: bytes,
- rx_key: bytes,
- tx_hek: bytes,
- rx_hek: bytes,
- tx_harac: int,
- rx_harac: int,
- store_keys: Callable) -> None:
+ onion_pub_key: bytes,
+ tx_mk: bytes,
+ rx_mk: bytes,
+ tx_hk: bytes,
+ rx_hk: bytes,
+ tx_harac: int,
+ rx_harac: int,
+ store_keys: Callable
+ ) -> None:
"""Create a new KeySet object.
- :param rx_account: UID for each recipient
- :param tx_key: Forward secret message key for sent messages
- :param rx_key: Forward secret message key for received messages (RxM only)
- :param tx_hek: Static header key for hash ratchet counter of sent messages
- :param rx_hek: Static header key for hash ratchet counter of received messages (RxM only)
- :param tx_harac: Hash ratchet counter for sent messages
- :param rx_harac: Hash ratchet counter for received messages (RxM only)
- :param store_keys: Reference to KeyLists's method that writes all keys to db
+ The `self.store_keys` is a reference to the method of the parent
+ object KeyList that stores the list of KeySet objects into an
+ encrypted database.
"""
- self.rx_account = rx_account
- self.tx_key = tx_key
- self.rx_key = rx_key
- self.tx_hek = tx_hek
- self.rx_hek = rx_hek
- self.tx_harac = tx_harac
- self.rx_harac = rx_harac
- self.store_keys = store_keys
+ self.onion_pub_key = onion_pub_key
+ self.tx_mk = tx_mk
+ self.rx_mk = rx_mk
+ self.tx_hk = tx_hk
+ self.rx_hk = rx_hk
+ self.tx_harac = tx_harac
+ self.rx_harac = rx_harac
+ self.store_keys = store_keys
def serialize_k(self) -> bytes:
- """Return keyset data as constant length byte string."""
- return (str_to_bytes(self.rx_account)
- + self.tx_key
- + self.rx_key
- + self.tx_hek
- + self.rx_hek
+ """Return KeySet data as a constant length byte string.
+
+ This function serializes the KeySet's data into a byte string
+ that has the exact length of 32 + 4*32 + 2*8 = 176 bytes. The
+ length is guaranteed regardless of the content of the
+ attributes' values. The purpose of the constant length
+ serialization is to hide any metadata about the KeySet database
+ the ciphertext length of the key database would reveal.
+ """
+ return (self.onion_pub_key
+ + self.tx_mk
+ + self.rx_mk
+ + self.tx_hk
+ + self.rx_hk
+ int_to_bytes(self.tx_harac)
+ int_to_bytes(self.rx_harac))
- def rotate_tx_key(self) -> None:
+ def rotate_tx_mk(self) -> None:
"""\
- Update TxM side tx-key and harac (provides
- forward secrecy for sent messages).
+ Update Transmitter Program's tx-message key and tx-harac.
+
+ Replacing the key with its hash provides per-message forward
+ secrecy for sent messages. The hash ratchet used is also known
+ as the SCIMP Ratchet[1], and it is widely used, e.g., as part of
+ Signal's Double Ratchet[2].
+
+ To ensure the hash ratchet does not fall into a short cycle of
+ keys, the harac (that is a non-repeating value) is used as an
+ additional input when deriving the next key.
+
+ [1] (pp. 17-18) https://netzpolitik.org/wp-upload/SCIMP-paper.pdf
+ [2] https://signal.org/blog/advanced-ratcheting/
"""
- self.tx_key = hash_chain(self.tx_key)
+ self.tx_mk = blake2b(self.tx_mk + int_to_bytes(self.tx_harac), digest_size=SYMMETRIC_KEY_LENGTH)
self.tx_harac += 1
self.store_keys()
- def update_key(self, direction: str, key: bytes, offset: int) -> None:
- """\
- Update RxM side tx/rx-key and harac (provides
- forward secrecy for received messages).
+ def update_mk(self,
+ direction: str,
+ key: bytes,
+ offset: int
+ ) -> None:
+ """Update Receiver Program's tx/rx-message key and tx/rx-harac.
+
+ This method provides per-message forward secrecy for received
+ messages. Due to the possibility of dropped packets, the
+ Receiver Program might have to jump over some key values and
+ ratchet counter states. Therefore, the increase done by this
+ function is not linear like in the case of `rotate_tx_mk`.
"""
if direction == TX:
- self.tx_key = key
+ self.tx_mk = key
self.tx_harac += offset
self.store_keys()
elif direction == RX:
- self.rx_key = key
+ self.rx_mk = key
self.rx_harac += offset
self.store_keys()
else:
@@ -108,12 +154,22 @@ class KeySet(object):
class KeyList(object):
"""\
- KeyList object manages list of KeySet
- objects and encrypted keyset database.
+ KeyList object manages TFC's KeySet objects and the storage of the
+ objects in an encrypted database.
- The keyset database is separated from contact database as traffic
+ The main purpose of this object is to manage the `self.keysets`-list
+ that contains TFC's keys. The database is stored on disk in
+ encrypted form. Prior to encryption, the database is padded with
+ dummy KeySets. The dummy KeySets hide the number of actual KeySets
+ and thus the number of contacts, that would otherwise be revealed by
+ the size of the encrypted database. As long as the user has less
+ than 50 contacts, the database will effectively hide the actual
+ number of contacts.
+
+ The KeySet database is separated from contact database as traffic
masking needs to update keys frequently with no risk of read/write
- queue blocking that occurs e.g. when new nick is being stored.
+ queue blocking that occurs, e.g., when an updated nick of contact is
+ being stored in the database.
"""
def __init__(self, master_key: 'MasterKey', settings: 'Settings') -> None:
@@ -122,114 +178,190 @@ class KeyList(object):
self.settings = settings
self.keysets = [] # type: List[KeySet]
self.dummy_keyset = self.generate_dummy_keyset()
- self.dummy_id = self.dummy_keyset.rx_account.encode('utf-32')
+ self.dummy_id = self.dummy_keyset.onion_pub_key
self.file_name = f'{DIR_USER_DATA}{settings.software_operation}_keys'
ensure_dir(DIR_USER_DATA)
if os.path.isfile(self.file_name):
- self.load_keys()
+ self._load_keys()
else:
self.store_keys()
def store_keys(self) -> None:
- """Write keys to encrypted database."""
- keysets = self.keysets + [self.dummy_keyset] * (self.settings.max_number_of_contacts - len(self.keysets))
- pt_bytes = b''.join([k.serialize_k() for k in keysets])
+ """Write the list of KeySet objects to an encrypted database.
+
+ This function will first create a list of KeySets and dummy
+ KeySets. It will then serialize every KeySet object on that list
+ and join the constant length byte strings to form the plaintext
+ that will be encrypted and stored in the database.
+
+ By default, TFC has a maximum number of 50 contacts. In
+ addition, the database stores the KeySet used to encrypt
+ commands from Transmitter to Receiver Program). The plaintext
+ length of 51 serialized KeySets is 51*176 = 8976 bytes. The
+ ciphertext includes a 24-byte nonce and a 16-byte tag, so the
+ size of the final database is 9016 bytes.
+ """
+ pt_bytes = b''.join([k.serialize_k() for k in self.keysets + self._dummy_keysets()])
ct_bytes = encrypt_and_sign(pt_bytes, self.master_key.master_key)
ensure_dir(DIR_USER_DATA)
with open(self.file_name, 'wb+') as f:
f.write(ct_bytes)
- def load_keys(self) -> None:
- """Load keys from encrypted database."""
+ def _load_keys(self) -> None:
+ """Load KeySets from the encrypted database.
+
+ This function first reads and decrypts the database content. It
+ then splits the plaintext into a list of 176-byte blocks. Each
+ block contains the serialized data of one KeySet. Next, the
+ function will remove from the list all dummy KeySets (that start
+ with the `dummy_id` byte string). The function will then
+ populate the `self.keysets` list with KeySet objects, the data
+ of which is sliced and decoded from the dummy-free blocks.
+ """
with open(self.file_name, 'rb') as f:
ct_bytes = f.read()
- pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key)
- entries = split_byte_string(pt_bytes, item_len=KEYSET_LENGTH)
- keysets = [e for e in entries if not e.startswith(self.dummy_id)]
+ pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key, database=self.file_name)
+ blocks = split_byte_string(pt_bytes, item_len=KEYSET_LENGTH)
+ df_blocks = [b for b in blocks if not b.startswith(self.dummy_id)]
- for k in keysets:
- assert len(k) == KEYSET_LENGTH
+ for block in df_blocks:
+ assert len(block) == KEYSET_LENGTH
- self.keysets.append(KeySet(rx_account=bytes_to_str(k[ 0:1024]),
- tx_key = k[1024:1056],
- rx_key = k[1056:1088],
- tx_hek = k[1088:1120],
- rx_hek = k[1120:1152],
- tx_harac =bytes_to_int(k[1152:1160]),
- rx_harac =bytes_to_int(k[1160:1168]),
+ onion_pub_key, tx_mk, rx_mk, tx_hk, rx_hk, tx_harac_bytes, rx_harac_bytes \
+ = separate_headers(block, [ONION_SERVICE_PUBLIC_KEY_LENGTH] + 4*[SYMMETRIC_KEY_LENGTH] + [HARAC_LENGTH])
+
+ self.keysets.append(KeySet(onion_pub_key=onion_pub_key,
+ tx_mk=tx_mk,
+ rx_mk=rx_mk,
+ tx_hk=tx_hk,
+ rx_hk=rx_hk,
+ tx_harac=bytes_to_int(tx_harac_bytes),
+ rx_harac=bytes_to_int(rx_harac_bytes),
store_keys=self.store_keys))
- def change_master_key(self, master_key: 'MasterKey') -> None:
- """Change master key and encrypt database with new key."""
- self.master_key = master_key
- self.store_keys()
-
@staticmethod
def generate_dummy_keyset() -> 'KeySet':
- """Generate dummy keyset."""
- return KeySet(rx_account=DUMMY_CONTACT,
- tx_key =bytes(KEY_LENGTH),
- rx_key =bytes(KEY_LENGTH),
- tx_hek =bytes(KEY_LENGTH),
- rx_hek =bytes(KEY_LENGTH),
- tx_harac =INITIAL_HARAC,
- rx_harac =INITIAL_HARAC,
+ """Generate a dummy KeySet object.
+
+ The dummy KeySet simplifies the code around the constant length
+ serialization when the data is stored to, or read from the
+ database.
+
+ In case the dummy keyset would ever be loaded accidentally, it
+ uses a set of random keys to prevent decryption by eavesdropper.
+ """
+ return KeySet(onion_pub_key=onion_address_to_pub_key(DUMMY_CONTACT),
+ tx_mk=csprng(),
+ rx_mk=csprng(),
+ tx_hk=csprng(),
+ rx_hk=csprng(),
+ tx_harac=INITIAL_HARAC,
+ rx_harac=INITIAL_HARAC,
store_keys=lambda: None)
- def add_keyset(self,
- rx_account: str,
- tx_key: bytes,
- rx_key: bytes,
- tx_hek: bytes,
- rx_hek: bytes) -> None:
- """Add new keyset to key list and write changes to database."""
- if self.has_keyset(rx_account):
- self.remove_keyset(rx_account)
+ def _dummy_keysets(self) -> List[KeySet]:
+ """\
+ Generate a proper size list of dummy KeySets for database
+ padding.
- self.keysets.append(KeySet(rx_account,
- tx_key, rx_key,
- tx_hek, rx_hek,
- INITIAL_HARAC, INITIAL_HARAC,
- self.store_keys))
+ The additional contact (+1) is the local key.
+ """
+ number_of_contacts_to_store = self.settings.max_number_of_contacts + 1
+ number_of_dummies = number_of_contacts_to_store - len(self.keysets)
+ return [self.dummy_keyset] * number_of_dummies
+
+ def add_keyset(self,
+ onion_pub_key: bytes,
+ tx_mk: bytes,
+ rx_mk: bytes,
+ tx_hk: bytes,
+ rx_hk: bytes) -> None:
+ """\
+ Add a new KeySet to `self.keysets` list and write changes to the
+ database.
+ """
+ if self.has_keyset(onion_pub_key):
+ self.remove_keyset(onion_pub_key)
+
+ self.keysets.append(KeySet(onion_pub_key=onion_pub_key,
+ tx_mk=tx_mk,
+ rx_mk=rx_mk,
+ tx_hk=tx_hk,
+ rx_hk=rx_hk,
+ tx_harac=INITIAL_HARAC,
+ rx_harac=INITIAL_HARAC,
+ store_keys=self.store_keys))
self.store_keys()
- def remove_keyset(self, name: str) -> None:
+ def remove_keyset(self, onion_pub_key: bytes) -> None:
"""\
- Remove keyset from keys based on account
- and write changes to database.
+ Remove KeySet from `self.keysets` based on Onion Service public key.
+
+ If the KeySet was found and removed, write changes to the database.
"""
for i, k in enumerate(self.keysets):
- if name == k.rx_account:
+ if k.onion_pub_key == onion_pub_key:
del self.keysets[i]
self.store_keys()
break
- def get_keyset(self, account: str) -> KeySet:
- """Load keyset from list based on unique account name."""
- return next(k for k in self.keysets if account == k.rx_account)
+ def change_master_key(self, master_key: 'MasterKey') -> None:
+ """Change the master key and encrypt the database with the new key."""
+ self.master_key = master_key
+ self.store_keys()
- def has_keyset(self, account: str) -> bool:
- """Return True if keyset for account exists, else False."""
- return any(account == k.rx_account for k in self.keysets)
+ def update_database(self, settings: 'Settings') -> None:
+ """Update settings and database size."""
+ self.settings = settings
+ self.store_keys()
- def has_rx_key(self, account: str) -> bool:
- """Return True if keyset has rx-key, else False."""
- return self.get_keyset(account).rx_key != bytes(KEY_LENGTH)
+ def get_keyset(self, onion_pub_key: bytes) -> KeySet:
+ """\
+ Return KeySet object from `self.keysets`-list that matches the
+ Onion Service public key used as the selector.
+ """
+ return next(k for k in self.keysets if k.onion_pub_key == onion_pub_key)
- def has_local_key(self) -> bool:
- """Return True if local key exists, else False."""
- return any(k.rx_account == LOCAL_ID for k in self.keysets)
+ def get_list_of_pub_keys(self) -> List[bytes]:
+ """Return list of Onion Service public keys for KeySets."""
+ return [k.onion_pub_key for k in self.keysets if k.onion_pub_key != LOCAL_PUBKEY]
+
+ def has_keyset(self, onion_pub_key: bytes) -> bool:
+ """Return True if KeySet with matching Onion Service public key exists, else False."""
+ return any(onion_pub_key == k.onion_pub_key for k in self.keysets)
+
+ def has_rx_mk(self, onion_pub_key: bytes) -> bool:
+ """\
+ Return True if KeySet with matching Onion Service public key has
+ rx-message key, else False.
+
+ When the PSK key exchange option is selected, the KeySet for
+ newly created contact on Receiver Program is a null-byte string.
+ This default value indicates the PSK of contact has not yet been
+ imported.
+ """
+ return self.get_keyset(onion_pub_key).rx_mk != bytes(SYMMETRIC_KEY_LENGTH)
+
+ def has_local_keyset(self) -> bool:
+ """Return True if local KeySet object exists, else False."""
+ return any(k.onion_pub_key == LOCAL_PUBKEY for k in self.keysets)
def manage(self, command: str, *params: Any) -> None:
- """Manage keyset database based on data received from km_queue."""
+ """Manage KeyList based on a command.
+
+ The command is delivered from `input_process` to `sender_loop`
+ process via the `KEY_MANAGEMENT_QUEUE`.
+ """
if command == KDB_ADD_ENTRY_HEADER:
self.add_keyset(*params)
elif command == KDB_REMOVE_ENTRY_HEADER:
self.remove_keyset(*params)
elif command == KDB_CHANGE_MASTER_KEY_HEADER:
self.change_master_key(*params)
+ elif command == KDB_UPDATE_SIZE_HEADER:
+ self.update_database(*params)
else:
raise CriticalError("Invalid KeyList management command.")
diff --git a/src/common/db_logs.py b/src/common/db_logs.py
index 35478e5..50eccfa 100644
--- a/src/common/db_logs.py
+++ b/src/common/db_logs.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,30 +16,28 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
import os.path
-import re
import struct
import sys
import textwrap
import time
import typing
-import zlib
-from collections import defaultdict
-from datetime import datetime
-from typing import DefaultDict, Dict, List, Tuple, Union
+from datetime import datetime
+from typing import Dict, IO, List, Tuple, Union
-from src.common.crypto import auth_and_decrypt, encrypt_and_sign, rm_padding_bytes
+from src.common.crypto import auth_and_decrypt, encrypt_and_sign
+from src.common.encoding import b58encode, bytes_to_bool, bytes_to_timestamp, pub_key_to_short_address
from src.common.exceptions import FunctionReturn
-from src.common.encoding import bytes_to_str, str_to_bytes
-from src.common.misc import ensure_dir, get_terminal_width, ignored
-from src.common.output import c_print, clear_screen
+from src.common.misc import ensure_dir, get_terminal_width, ignored, separate_header, separate_headers
+from src.common.output import clear_screen
from src.common.statics import *
-from src.rx.windows import RxWindow
+from src.receiver.packet import PacketList
+from src.receiver.windows import RxWindow
if typing.TYPE_CHECKING:
from multiprocessing import Queue
@@ -46,76 +45,128 @@ if typing.TYPE_CHECKING:
from src.common.db_groups import GroupList
from src.common.db_masterkey import MasterKey
from src.common.db_settings import Settings
- from src.tx.windows import TxWindow
+ from src.transmitter.windows import TxWindow
+
+MsgTuple = Tuple[datetime, str, bytes, bytes, bool, bool]
-def log_writer_loop(queues: Dict[bytes, 'Queue'], unittest: bool = False) -> None:
- """Read log data from queue and write entry to log database.
+def log_writer_loop(queues: Dict[bytes, 'Queue'], # Dictionary of queues
+ settings: 'Settings', # Settings object
+ unittest: bool = False # When True, exits the loop when UNITTEST_QUEUE is no longer empty.
+ ) -> None:
+ """Write assembly packets to log database.
- When traffic masking is enabled, this process separates writing to
- logfile from sender_loop to prevent IO delays (caused by access to
- logfile) from revealing metadata about when communication takes place.
+ When traffic masking is enabled, the fact this loop is run as a
+ separate process, means the rate at which `sender_loop` outputs
+ packets is not altered by i/o delays (caused by access to the log
+ file). This hides metadata about when communication takes place,
+ even from an adversary performing timing attacks from within the
+ Networked Computer of the user.
"""
- queue = queues[LOG_PACKET_QUEUE]
+ log_packet_queue = queues[LOG_PACKET_QUEUE]
+ log_setting_queue = queues[LOG_SETTING_QUEUE]
+ traffic_masking_queue = queues[TRAFFIC_MASKING_QUEUE]
+ logfile_masking_queue = queues[LOGFILE_MASKING_QUEUE]
+
+ logging_state = False
+ logfile_masking = settings.log_file_masking
+ traffic_masking = settings.traffic_masking
while True:
with ignored(EOFError, KeyboardInterrupt):
- while queue.qsize() == 0:
+
+ while log_packet_queue.qsize() == 0:
time.sleep(0.01)
- log_packet, log_as_ph, packet, rx_account, settings, master_key = queue.get()
+ if traffic_masking_queue.qsize() != 0:
+ traffic_masking = traffic_masking_queue.get()
+ if logfile_masking_queue.qsize() != 0:
+ logfile_masking = logfile_masking_queue.get()
- if rx_account is None or not log_packet:
+ onion_pub_key, assembly_packet, log_messages, log_as_ph, master_key = log_packet_queue.get()
+
+ # Detect and ignore commands.
+ if onion_pub_key is None:
continue
- header = bytes([packet[0]])
+ # `logging_state` retains the logging setting for noise packets
+ # that do not know the log setting of the window. To prevent
+ # logging of noise packets in situation where logging has
+ # been disabled, but no new message assembly packet carrying
+ # the logging setting is received, the LOG_SETTING_QUEUE
+ # is checked for up-to-date logging setting for every
+ # received noise packet.
+ if assembly_packet[:ASSEMBLY_PACKET_HEADER_LENGTH] == P_N_HEADER:
+ if log_setting_queue.qsize() != 0:
+ logging_state = log_setting_queue.get()
+ else:
+ logging_state = log_messages
- if header == P_N_HEADER or header.isupper() or log_as_ph:
- packet = PLACEHOLDER_DATA
- if not (settings.session_traffic_masking and settings.logfile_masking):
+ # Detect if we are going to log the packet at all.
+ if not logging_state:
+ continue
+
+ # Only noise packets, whisper-messages, file key delivery
+ # packets and file assembly packets have `log_as_ph` enabled.
+ # These packets are stored as placeholder data to hide
+ # metadata revealed by the differences in log file size vs
+ # the number of sent assembly packets.
+ if log_as_ph:
+
+ # It's pointless to hide number of messages in the log
+ # file if that information is revealed by observing the
+ # Networked Computer when traffic masking is disabled.
+ if not traffic_masking:
continue
- write_log_entry(packet, rx_account, settings, master_key)
+ # If traffic masking is enabled, log file masking might
+ # still be unnecessary if the user does not care to hide
+ # the tiny amount of metadata (total amount of
+ # communication) from a physical attacker. This after
+ # all consumes 333 bytes of disk space per noise packet.
+ # So finally we check that the user has opted in for log
+ # file masking.
+ if not logfile_masking:
+ continue
+
+ assembly_packet = PLACEHOLDER_DATA
+
+ write_log_entry(assembly_packet, onion_pub_key, settings, master_key)
if unittest and queues[UNITTEST_QUEUE].qsize() != 0:
break
-def write_log_entry(assembly_packet: bytes,
- account: str,
- settings: 'Settings',
- master_key: 'MasterKey',
- origin: bytes = ORIGIN_USER_HEADER) -> None:
- """Add assembly packet to encrypted logfile.
+def write_log_entry(assembly_packet: bytes, # Assembly packet to log
+ onion_pub_key: bytes, # Onion Service public key of the associated contact
+ settings: 'Settings', # Settings object
+ master_key: 'MasterKey', # Master key object
+ origin: bytes = ORIGIN_USER_HEADER # The direction of logged packet
+ ) -> None:
+ """Add an assembly packet to the encrypted log database.
- This method of logging allows reconstruction of conversation while
- protecting the metadata about the length of messages other logfile
- formats would reveal.
+ Logging assembly packets allows reconstruction of conversation while
+ protecting metadata about the length of messages alternative log
+ file formats could reveal.
- TxM can only log sent messages. This is not useful for recalling
- conversations but serves an important role in audit of recipient's
- RxM-side logs, where malware could have substituted logged data.
+ Transmitter Program can only log sent messages. This is not useful
+ for recalling conversations but it makes it possible to audit
+ recipient's Destination Computer-side logs, where malware could have
+ substituted content of the sent messages.
- Files are not content produced or accessed by TFC, thus keeping a
- copy of file data in log database is pointless and potentially
- dangerous if user thinks they have deleted the file from their
- system. However, from the perspective of metadata, having a
- difference in number of logged packets when compared to number of
- output packets could reveal additional metadata about file
- transmission. To solve both issues, TFC only logs placeholder data.
-
- :param assembly_packet: Assembly packet to log
- :param account: Recipient's account (UID)
- :param settings: Settings object
- :param master_key: Master key object
- :param origin: Direction of logged packet
- :return: None
+ Files are not produced or accessed by TFC. Thus, keeping a copy of
+ file data in the log database is pointless and potentially dangerous,
+ because the user should be right to assume deleting the file from
+ `received_files` directory is enough. However, from the perspective
+ of metadata, a difference between the number of logged packets and
+ the number of output packets could reveal additional metadata about
+ communication. Thus, during traffic masking, if
+ `settings.log_file_masking` is enabled, instead of file data, TFC
+ writes placeholder data to the log database.
"""
- encoded_account = str_to_bytes(account)
- unix_timestamp = int(time.time())
- timestamp_bytes = struct.pack(' IO:
+ """Load file descriptor for log database."""
+ ensure_dir(DIR_USER_DATA)
+ if not os.path.isfile(file_name):
+ raise FunctionReturn("No log database available.")
+ return open(file_name, 'rb')
+
+
def access_logs(window: Union['TxWindow', 'RxWindow'],
contact_list: 'ContactList',
group_list: 'GroupList',
settings: 'Settings',
master_key: 'MasterKey',
msg_to_load: int = 0,
- export: bool = False) -> None:
+ export: bool = False
+ ) -> None:
"""\
- Decrypt 'msg_to_load' last messages from
- log database and display/export it.
- """
- ensure_dir(DIR_USER_DATA)
- file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs'
- if not os.path.isfile(file_name):
- raise FunctionReturn(f"Error: Could not find log database.")
+ Load 'msg_to_load' last messages from log database and display or
+ export them.
- log_file = open(file_name, 'rb')
- ts_message_list = [] # type: List[Tuple['datetime', str, str, bytes, bool]]
- assembly_p_buf = defaultdict(list) # type: DefaultDict[str, List[bytes]]
- group_msg_id = b''
+ The default value of zero for `msg_to_load` means all messages for
+ the window will be retrieved from the log database.
+ """
+ file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs'
+ log_file = get_logfile(file_name)
+ packet_list = PacketList(settings, contact_list)
+ message_log = [] # type: List[MsgTuple]
+ group_msg_id = b''
for ct in iter(lambda: log_file.read(LOG_ENTRY_LENGTH), b''):
- pt = auth_and_decrypt(ct, key=master_key.master_key)
- account = bytes_to_str(pt[0:1024])
+ plaintext = auth_and_decrypt(ct, master_key.master_key, database=file_name)
- if window.type == WIN_TYPE_CONTACT and window.uid != account:
+ onion_pub_key, timestamp, origin, assembly_packet = separate_headers(plaintext,
+ [ONION_SERVICE_PUBLIC_KEY_LENGTH,
+ TIMESTAMP_LENGTH,
+ ORIGIN_HEADER_LENGTH])
+ if window.type == WIN_TYPE_CONTACT and onion_pub_key != window.uid:
continue
- time_stamp = datetime.fromtimestamp(struct.unpack(' None:
- """Print list of logged messages to screen."""
+def print_logs(message_list: List[MsgTuple],
+ export: bool,
+ msg_to_load: int,
+ window: Union['TxWindow', 'RxWindow'],
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings'
+ ) -> None:
+ """Print list of logged messages to screen or export them to file."""
terminal_width = get_terminal_width()
-
- system, m_dir = dict(tx=("TxM", "sent to"),
- rx=("RxM", "to/from"),
- ut=("UtM", "to/from"))[settings.software_operation]
+ system, m_dir = {TX: ("Transmitter", "sent to"),
+ RX: ("Receiver", "to/from")}[settings.software_operation]
f_name = open(f"{system} - Plaintext log ({window.name})", 'w+') if export else sys.stdout
subset = '' if msg_to_load == 0 else f"{msg_to_load} most recent "
- title = textwrap.fill(f"Logfile of {subset}message{'' if msg_to_load == 1 else 's'} {m_dir} {window.name}", terminal_width)
+ title = textwrap.fill(f"Log file of {subset}message(s) {m_dir} {window.type} {window.name}", terminal_width)
- log_window = RxWindow(window.uid, contact_list, group_list, settings)
+ packet_list = PacketList(settings, contact_list)
+ log_window = RxWindow(window.uid, contact_list, group_list, settings, packet_list)
log_window.is_active = True
- log_window.message_log = ts_message_list
+ log_window.message_log = message_list
- if ts_message_list:
+ if message_list:
if not export:
clear_screen()
- print(title + '\n' + terminal_width * '═', file=f_name)
- log_window.redraw( file=f_name)
- print("\n", file=f_name)
+ print(title, file=f_name)
+ print(terminal_width * '═', file=f_name)
+ log_window.redraw( file=f_name)
+ print("\n", file=f_name)
else:
- raise FunctionReturn(f"No logged messages for '{window.uid}'")
+ raise FunctionReturn(f"No logged messages for {window.type} '{window.name}'.", head_clear=True)
if export:
f_name.close()
-def re_encrypt(previous_key: bytes, new_key: bytes, settings: 'Settings') -> None:
- """Re-encrypt log database with new master key."""
+def change_log_db_key(previous_key: bytes,
+ new_key: bytes,
+ settings: 'Settings'
+ ) -> None:
+ """Re-encrypt log database with a new master key."""
ensure_dir(DIR_USER_DATA)
file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs'
- temp_name = f'{DIR_USER_DATA}{settings.software_operation}_logs_temp'
+ temp_name = f'{file_name}_temp'
if not os.path.isfile(file_name):
- raise FunctionReturn(f"Error: Could not find log database.")
+ raise FunctionReturn("Error: Could not find log database.")
if os.path.isfile(temp_name):
os.remove(temp_name)
@@ -260,9 +305,9 @@ def re_encrypt(previous_key: bytes, new_key: bytes, settings: 'Settings') -> Non
f_old = open(file_name, 'rb')
f_new = open(temp_name, 'ab+')
- for ct_old in iter(lambda: f_old.read(LOG_ENTRY_LENGTH), b''):
- pt_new = auth_and_decrypt(ct_old, key=previous_key)
- f_new.write(encrypt_and_sign(pt_new, key=new_key))
+ for ct in iter(lambda: f_old.read(LOG_ENTRY_LENGTH), b''):
+ pt = auth_and_decrypt(ct, key=previous_key, database=file_name)
+ f_new.write(encrypt_and_sign(pt, key=new_key))
f_old.close()
f_new.close()
@@ -271,115 +316,85 @@ def re_encrypt(previous_key: bytes, new_key: bytes, settings: 'Settings') -> Non
os.rename(temp_name, file_name)
-def remove_logs(selector: str,
- settings: 'Settings',
- master_key: 'MasterKey') -> None:
- """Remove log entries for selector (group name / account).
+def remove_logs(contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings',
+ master_key: 'MasterKey',
+ selector: bytes
+ ) -> None:
+ """\
+ Remove log entries for selector (public key of an account/group ID).
- If selector is a contact, all messages sent to and received from
- the contact are removed. If selector is a group, only messages
- for that group are removed.
+ If the selector is a public key, all messages (both the private
+ conversation and any associated group messages) sent to and received
+ from the associated contact are removed. If the selector is a group
+ ID, only messages for group determined by that group ID are removed.
"""
ensure_dir(DIR_USER_DATA)
- file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs'
- if not os.path.isfile(file_name):
- raise FunctionReturn(f"Error: Could not find log database.")
-
- log_file = open(file_name, 'rb')
- ct_to_keep = [] # type: List[bytes]
- maybe_keep_buf = defaultdict(list) # type: DefaultDict[str, List[bytes]]
- assembly_p_buf = defaultdict(list) # type: DefaultDict[str, List[bytes]]
- removed = False
- window_type = WIN_TYPE_CONTACT if re.match(ACCOUNT_FORMAT, selector) else WIN_TYPE_GROUP
+ file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs'
+ temp_name = f'{file_name}_temp'
+ log_file = get_logfile(file_name)
+ packet_list = PacketList(settings, contact_list)
+ ct_to_keep = [] # type: List[bytes]
+ removed = False
+ contact = len(selector) == ONION_SERVICE_PUBLIC_KEY_LENGTH
for ct in iter(lambda: log_file.read(LOG_ENTRY_LENGTH), b''):
- pt = auth_and_decrypt(ct, key=master_key.master_key)
- account = bytes_to_str(pt[0:1024])
+ plaintext = auth_and_decrypt(ct, master_key.master_key, database=file_name)
- if window_type == WIN_TYPE_CONTACT:
- if selector == account:
+ onion_pub_key, _, origin, assembly_packet = separate_headers(plaintext, [ONION_SERVICE_PUBLIC_KEY_LENGTH,
+ TIMESTAMP_LENGTH,
+ ORIGIN_HEADER_LENGTH])
+ if contact:
+ if onion_pub_key == selector:
removed = True
- continue
else:
ct_to_keep.append(ct)
- # To remove messages for specific group, messages in log database must
- # be assembled to reveal their group name. Assembly packets' ciphertexts are
- # buffered to 'maybe_keep_buf', from where they will be moved to 'ct_to_keep'
- # if their associated group name differs from the one selected for log removal.
- elif window_type == WIN_TYPE_GROUP:
- origin = pt[1028:1029]
- assembly_header = pt[1029:1030]
- assembly_pt = pt[1030:1325]
- key = origin.decode() + account
+ else: # Group
+ packet = packet_list.get_packet(onion_pub_key, origin, MESSAGE, log_access=True)
+ try:
+ packet.add_packet(assembly_packet, ct)
+ except FunctionReturn:
+ continue
+ if not packet.is_complete:
+ continue
- if assembly_header == M_C_HEADER:
- # Since log database is being altered anyway, also discard
- # sequences of assembly packets that end in cancel packet.
- assembly_p_buf.pop(key, None)
- maybe_keep_buf.pop(key, None)
+ _, header, message = separate_headers(packet.assemble_message_packet(), [WHISPER_FIELD_LENGTH,
+ MESSAGE_HEADER_LENGTH])
- elif assembly_header == M_L_HEADER:
- maybe_keep_buf[key] = [ct]
- assembly_p_buf[key] = [assembly_pt]
+ if header == PRIVATE_MESSAGE_HEADER:
+ ct_to_keep.extend(packet.log_ct_list)
+ packet.clear_assembly_packets()
- elif assembly_header == M_A_HEADER:
- if key not in assembly_p_buf:
- continue
- maybe_keep_buf[key].append(ct)
- assembly_p_buf[key].append(assembly_pt)
-
- elif assembly_header in [M_S_HEADER, M_E_HEADER]:
-
- if assembly_header == M_S_HEADER:
- maybe_keep_buf[key] = [ct]
- depadded = rm_padding_bytes(assembly_pt)
- decompressed = zlib.decompress(depadded)
+ elif header == GROUP_MESSAGE_HEADER:
+ group_id, _ = separate_header(message, GROUP_ID_LENGTH)
+ if group_id == selector:
+ removed = True
else:
- if key not in assembly_p_buf:
- continue
- maybe_keep_buf[key].append(ct)
- assembly_p_buf[key].append(assembly_pt)
-
- buffered_pt = b''.join(assembly_p_buf.pop(key))
- inner_layer = rm_padding_bytes(buffered_pt)
- decrypted = auth_and_decrypt(nonce_ct_tag=inner_layer[:-KEY_LENGTH],
- key =inner_layer[-KEY_LENGTH:])
- decompressed = zlib.decompress(decrypted)
-
- # The message is assembled by this point. We thus know if the
- # long message was a group message, and if it's to be removed.
- header = decompressed[:1]
-
- if header == PRIVATE_MESSAGE_HEADER:
- ct_to_keep.extend(maybe_keep_buf.pop(key))
-
- elif header == GROUP_MESSAGE_HEADER:
- group_name, *_ = [f.decode() for f in decompressed[1+GROUP_MSG_ID_LEN:].split(US_BYTE)] # type: Tuple[str, Union[str, List[str]]]
- if group_name == selector:
- removed = True
- else:
- ct_to_keep.extend(maybe_keep_buf[key])
- maybe_keep_buf.pop(key)
-
- elif header in [GROUP_MSG_INVITEJOIN_HEADER, GROUP_MSG_MEMBER_ADD_HEADER,
- GROUP_MSG_MEMBER_REM_HEADER, GROUP_MSG_EXIT_GROUP_HEADER]:
- group_name, *_ = [f.decode() for f in decompressed[1:].split(US_BYTE)]
- if group_name == selector:
- removed = True
- else:
- ct_to_keep.extend(maybe_keep_buf[key])
- maybe_keep_buf.pop(key)
+ ct_to_keep.extend(packet.log_ct_list)
+ packet.clear_assembly_packets()
log_file.close()
- with open(file_name, 'wb+') as f:
+ if os.path.isfile(temp_name):
+ os.remove(temp_name)
+
+ with open(temp_name, 'wb+') as f:
if ct_to_keep:
f.write(b''.join(ct_to_keep))
- w_type = {WIN_TYPE_GROUP: 'group', WIN_TYPE_CONTACT: 'contact'}[window_type]
+ os.remove(file_name)
+ os.rename(temp_name, file_name)
- if not removed:
- raise FunctionReturn(f"Found no log entries for {w_type} '{selector}'")
+ try:
+ name = contact_list.get_contact_by_pub_key(selector).nick \
+ if contact else group_list.get_group_by_id(selector).name
+ except StopIteration:
+ name = pub_key_to_short_address(selector) \
+ if contact else b58encode(selector)
- c_print(f"Removed log entries for {w_type} '{selector}'", head=1, tail=1)
+ action = "Removed" if removed else "Found no"
+ win_type = "contact" if contact else "group"
+
+ raise FunctionReturn(f"{action} log entries for {win_type} '{name}'.")
diff --git a/src/common/db_masterkey.py b/src/common/db_masterkey.py
index 957a1f5..9c5e4bd 100755
--- a/src/common/db_masterkey.py
+++ b/src/common/db_masterkey.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,107 +16,145 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
+import multiprocessing
import os.path
import time
-from src.common.crypto import argon2_kdf, csprng, hash_chain
-from src.common.encoding import int_to_bytes, bytes_to_int
-from src.common.exceptions import graceful_exit
+from src.common.crypto import argon2_kdf, blake2b, csprng
+from src.common.encoding import bytes_to_int, int_to_bytes
+from src.common.exceptions import CriticalError, graceful_exit
from src.common.input import pwd_prompt
-from src.common.misc import ensure_dir
-from src.common.output import c_print, clear_screen, phase, print_on_previous_line
+from src.common.misc import ensure_dir, separate_headers
+from src.common.output import clear_screen, m_print, phase, print_on_previous_line
from src.common.statics import *
class MasterKey(object):
"""\
- MasterKey object manages the 32-byte
- master key and methods related to it.
+ MasterKey object manages the 32-byte master key and methods related
+ to it. Master key is the key that protects all data written on disk.
"""
def __init__(self, operation: str, local_test: bool) -> None:
"""Create a new MasterKey object."""
- self.master_key = None # type: bytes
self.file_name = f'{DIR_USER_DATA}{operation}_login_data'
self.local_test = local_test
+ ensure_dir(DIR_USER_DATA)
try:
if os.path.isfile(self.file_name):
- self.load_master_key()
+ self.master_key = self.load_master_key()
else:
- self.new_master_key()
- except KeyboardInterrupt:
+ self.master_key = self.new_master_key()
+ except (EOFError, KeyboardInterrupt):
graceful_exit()
- def new_master_key(self) -> None:
- """Create a new master key from salt and password."""
+ def new_master_key(self) -> bytes:
+ """Create a new master key from password and salt.
+
+ The generated master key depends on a 256-bit salt and the
+ password entered by the user. Additional computational strength
+ is added by the slow hash function (Argon2d). This method
+ automatically tweaks the Argon2 memory parameter so that key
+ derivation on used hardware takes at least three seconds. The
+ more cores and the faster each core is, the more security a
+ given password provides.
+
+ The preimage resistance of BLAKE2b prevents derivation of master
+ key from the stored hash, and Argon2d ensures brute force and
+ dictionary attacks against the master password are painfully
+ slow even with GPUs/ASICs/FPGAs, as long as the password is
+ sufficiently strong.
+
+ The salt does not need additional protection as the security it
+ provides depends on the salt space in relation to the number of
+ attacked targets (i.e. if two or more physically compromised
+ systems happen to share the same salt, the attacker can speed up
+ the attack against those systems with time-memory-trade-off
+ attack).
+
+ A 256-bit salt ensures that even in a group of 4.8*10^29 users,
+ the probability that two users share the same salt is just
+ 10^(-18).*
+ * https://en.wikipedia.org/wiki/Birthday_attack
+ """
password = MasterKey.new_password()
- salt = csprng()
- rounds = ARGON2_ROUNDS
+ salt = csprng(ARGON2_SALT_LENGTH)
memory = ARGON2_MIN_MEMORY
+ parallelism = multiprocessing.cpu_count()
+ if self.local_test:
+ parallelism = max(1, parallelism // 2)
+
phase("Deriving master key", head=2)
while True:
time_start = time.monotonic()
- master_key, parallellism = argon2_kdf(password, salt, rounds, memory=memory, local_test=self.local_test)
- time_final = time.monotonic() - time_start
+ master_key = argon2_kdf(password, salt, ARGON2_ROUNDS, memory, parallelism)
+ kd_time = time.monotonic() - time_start
- if time_final > 3.0:
- self.master_key = master_key
- ensure_dir(f'{DIR_USER_DATA}/')
+ if kd_time < MIN_KEY_DERIVATION_TIME:
+ memory *= 2
+ else:
+ ensure_dir(DIR_USER_DATA)
with open(self.file_name, 'wb+') as f:
f.write(salt
- + hash_chain(self.master_key)
- + int_to_bytes(rounds)
+ + blake2b(master_key)
+ int_to_bytes(memory)
- + int_to_bytes(parallellism))
+ + int_to_bytes(parallelism))
phase(DONE)
- break
- else:
- memory *= 2
+ return master_key
- def load_master_key(self) -> None:
- """Derive master key from password and salt."""
+ def load_master_key(self) -> bytes:
+ """Derive the master key from password and salt.
+
+ Load the salt, hash, and key derivation settings from the login
+ database. Derive the purported master key from the salt and
+ entered password. If the BLAKE2b hash of derived master key
+ matches the hash in the login database, accept the derived
+ master key.
+ """
with open(self.file_name, 'rb') as f:
data = f.read()
- salt = data[0:32]
- key_hash = data[32:64]
- rounds = bytes_to_int(data[64:72])
- memory = bytes_to_int(data[72:80])
- parallelism = bytes_to_int(data[80:88])
+
+ if len(data) != MASTERKEY_DB_SIZE:
+ raise CriticalError(f"Invalid {self.file_name} database size.")
+
+ salt, key_hash, memory_bytes, parallelism_bytes \
+ = separate_headers(data, [ARGON2_SALT_LENGTH, BLAKE2_DIGEST_LENGTH, ENCODED_INTEGER_LENGTH])
+
+ memory = bytes_to_int(memory_bytes)
+ parallelism = bytes_to_int(parallelism_bytes)
while True:
password = MasterKey.get_password()
- phase("Deriving master key", head=2, offset=16)
- purp_key, _ = argon2_kdf(password, salt, rounds, memory, parallelism)
+ phase("Deriving master key", head=2, offset=len("Password correct"))
+ purp_key = argon2_kdf(password, salt, ARGON2_ROUNDS, memory, parallelism)
- if hash_chain(purp_key) == key_hash:
- self.master_key = purp_key
- phase("Password correct", done=True)
- clear_screen(delay=0.5)
- break
+ if blake2b(purp_key) == key_hash:
+ phase("Password correct", done=True, delay=1)
+ clear_screen()
+ return purp_key
else:
- phase("Invalid password", done=True)
- print_on_previous_line(reps=5, delay=1)
+ phase("Invalid password", done=True, delay=1)
+ print_on_previous_line(reps=5)
@classmethod
def new_password(cls, purpose: str = "master password") -> str:
- """Prompt user to enter and confirm a new password."""
+ """Prompt the user to enter and confirm a new password."""
password_1 = pwd_prompt(f"Enter a new {purpose}: ")
- password_2 = pwd_prompt(f"Confirm the {purpose}: ", second=True)
+ password_2 = pwd_prompt(f"Confirm the {purpose}: ", repeat=True)
if password_1 == password_2:
return password_1
else:
- c_print("Error: Passwords did not match. Try again.", head=1, tail=1)
- time.sleep(1)
- print_on_previous_line(reps=7)
+ m_print("Error: Passwords did not match. Try again.", head=1, tail=1)
+ print_on_previous_line(delay=1, reps=7)
return cls.new_password(purpose)
@classmethod
def get_password(cls, purpose: str = "master password") -> str:
- """Prompt user to enter a password."""
+ """Prompt the user to enter a password."""
return pwd_prompt(f"Enter {purpose}: ")
diff --git a/src/common/db_onion.py b/src/common/db_onion.py
new file mode 100644
index 0000000..1ebfac8
--- /dev/null
+++ b/src/common/db_onion.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import os
+import typing
+
+import nacl.signing
+
+from src.common.crypto import auth_and_decrypt, csprng, encrypt_and_sign
+from src.common.encoding import pub_key_to_onion_address, pub_key_to_short_address
+from src.common.misc import ensure_dir
+from src.common.output import phase
+from src.common.statics import *
+
+if typing.TYPE_CHECKING:
+ from src.common.db_masterkey import MasterKey
+
+
+class OnionService(object):
+ """\
+ OnionService object manages the persistent Ed25519 key used
+ to create a v3 Tor Onion Service on the Networked Computer.
+
+ The reason the key is generated by Transmitter Program on Source
+ Computer, is this ensures that even when Networked Computer runs an
+ amnesic Linux distribution like Tails, the long term private
+ signing key is not lost between sessions.
+
+ The private key for Onion Service can not be kept as protected as
+ TFC's other private message/header keys (that never leave
+ Source/Destination computer). This is however OK, as the Onion
+ Service private key is only as secure as the networked endpoint
+ anyway.
+ """
+
+ def __init__(self, master_key: 'MasterKey') -> None:
+ """Create a new OnionService object."""
+ self.master_key = master_key
+ self.file_name = f'{DIR_USER_DATA}{TX}_onion_db'
+ self.is_delivered = False
+ self.conf_code = csprng(CONFIRM_CODE_LENGTH)
+
+ ensure_dir(DIR_USER_DATA)
+ if os.path.isfile(self.file_name):
+ self.onion_private_key = self.load_onion_service_private_key()
+ else:
+ self.onion_private_key = self.new_onion_service_private_key()
+ self.store_onion_service_private_key()
+
+ assert len(self.onion_private_key) == ONION_SERVICE_PRIVATE_KEY_LENGTH
+
+ self.public_key = bytes(nacl.signing.SigningKey(seed=self.onion_private_key).verify_key)
+
+ self.user_onion_address = pub_key_to_onion_address(self.public_key)
+ self.user_short_address = pub_key_to_short_address(self.public_key)
+
+ @staticmethod
+ def new_onion_service_private_key() -> bytes:
+ """Generate a new Onion Service private key and store it."""
+ phase("Generate Tor OS key")
+ onion_private_key = csprng(ONION_SERVICE_PRIVATE_KEY_LENGTH)
+ phase(DONE)
+ return onion_private_key
+
+ def store_onion_service_private_key(self) -> None:
+ """Store Onion Service private key to an encrypted database."""
+ ct_bytes = encrypt_and_sign(self.onion_private_key, self.master_key.master_key)
+
+ ensure_dir(DIR_USER_DATA)
+ with open(self.file_name, 'wb+') as f:
+ f.write(ct_bytes)
+
+ def load_onion_service_private_key(self) -> bytes:
+ """Load the Onion Service private key from the encrypted database."""
+ with open(self.file_name, 'rb') as f:
+ ct_bytes = f.read()
+
+ onion_private_key = auth_and_decrypt(ct_bytes, self.master_key.master_key, database=self.file_name)
+
+ return onion_private_key
+
+ def new_confirmation_code(self) -> None:
+ """Generate new confirmation code for Onion Service data."""
+ self.conf_code = csprng(CONFIRM_CODE_LENGTH)
diff --git a/src/common/db_settings.py b/src/common/db_settings.py
index 70b2f9c..d3a9a5b 100755
--- a/src/common/db_settings.py
+++ b/src/common/db_settings.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,11 +16,10 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
import os
-import serial
import textwrap
import typing
@@ -29,10 +29,9 @@ from src.common.crypto import auth_and_decrypt, encrypt_and_sign
from src.common.encoding import bool_to_bytes, double_to_bytes, int_to_bytes
from src.common.encoding import bytes_to_bool, bytes_to_double, bytes_to_int
from src.common.exceptions import CriticalError, FunctionReturn
-from src.common.misc import calculate_race_condition_delay, calculate_serial_delays
-from src.common.misc import ensure_dir, get_terminal_width, round_up
from src.common.input import yes
-from src.common.output import c_print, clear_screen
+from src.common.misc import ensure_dir, get_terminal_width, round_up
+from src.common.output import clear_screen, m_print
from src.common.statics import *
if typing.TYPE_CHECKING:
@@ -43,101 +42,84 @@ if typing.TYPE_CHECKING:
class Settings(object):
"""\
- Settings object stores all user adjustable
- settings under an encrypted database.
+ Settings object stores user adjustable settings (excluding those
+ related to serial interface) under an encrypted database.
"""
def __init__(self,
- master_key: 'MasterKey',
- operation: str,
- local_test: bool,
- dd_sockets: bool) -> None:
+ master_key: 'MasterKey', # MasterKey object
+ operation: str, # Operation mode of the program (Tx or Rx)
+ local_test: bool, # Local testing setting from command-line argument
+ ) -> None:
"""Create a new Settings object.
- The settings below are altered from within the program itself.
- Changes made to the default settings are stored in encrypted
- settings database.
-
- :param master_key: MasterKey object
- :param operation: Operation mode of the program (tx or rx)
- :param local_test: Setting value passed from command-line argument
- :param dd_sockets: Setting value passed from command-line argument
+ The settings below are defaults, and are only to be altered from
+ within the program itself. Changes made to the default settings
+ are stored in the encrypted settings database, from which they
+ are loaded when the program starts.
"""
# Common settings
self.disable_gui_dialog = False
- self.max_number_of_group_members = 20
- self.max_number_of_groups = 20
- self.max_number_of_contacts = 20
- self.serial_baudrate = 19200
- self.serial_error_correction = 5
+ self.max_number_of_group_members = 50
+ self.max_number_of_groups = 50
+ self.max_number_of_contacts = 50
self.log_messages_by_default = False
self.accept_files_by_default = False
self.show_notifications_by_default = True
- self.logfile_masking = False
+ self.log_file_masking = False
# Transmitter settings
- self.txm_usb_serial_adapter = True
- self.nh_bypass_messages = True
- self.confirm_sent_files = True
- self.double_space_exits = False
- self.traffic_masking = False
- self.traffic_masking_static_delay = 2.0
- self.traffic_masking_random_delay = 2.0
- self.multi_packet_random_delay = False
- self.max_duration_of_random_delay = 10.0
+ self.nc_bypass_messages = False
+ self.confirm_sent_files = True
+ self.double_space_exits = False
+ self.traffic_masking = False
+ self.tm_static_delay = 2.0
+ self.tm_random_delay = 2.0
+
+ # Relay Settings
+ self.allow_contact_requests = True
# Receiver settings
- self.rxm_usb_serial_adapter = True
- self.new_message_notify_preview = False
- self.new_message_notify_duration = 1.0
+ self.new_message_notify_preview = False
+ self.new_message_notify_duration = 1.0
+ self.max_decompress_size = 100_000_000
self.master_key = master_key
self.software_operation = operation
self.local_testing_mode = local_test
- self.data_diode_sockets = dd_sockets
self.file_name = f'{DIR_USER_DATA}{operation}_settings'
- self.key_list = list(vars(self).keys())
- self.key_list = self.key_list[:self.key_list.index('master_key')]
+ self.all_keys = list(vars(self).keys())
+ self.key_list = self.all_keys[:self.all_keys.index('master_key')]
self.defaults = {k: self.__dict__[k] for k in self.key_list}
ensure_dir(DIR_USER_DATA)
if os.path.isfile(self.file_name):
self.load_settings()
- if operation == RX:
- # TxM is unable to send serial interface type changing command if
- # RxM looks for the type of adapter user doesn't have available.
- # Therefore setup() is run every time the Receiver program starts.
- self.setup()
else:
- self.setup()
- self.store_settings()
-
- # Following settings change only when program is restarted
- self.session_serial_error_correction = self.serial_error_correction
- self.session_serial_baudrate = self.serial_baudrate
- self.session_traffic_masking = self.traffic_masking
- self.session_usb_serial_adapter = self.rxm_usb_serial_adapter if operation == RX else self.txm_usb_serial_adapter
- self.race_condition_delay = calculate_race_condition_delay(self, txm=True)
-
- self.rxm_receive_timeout, self.txm_inter_packet_delay = calculate_serial_delays(self.session_serial_baudrate)
+ self.store_settings()
def store_settings(self) -> None:
- """Store settings to encrypted database."""
+ """Store settings to an encrypted database.
+
+ The plaintext in the encrypted database is a constant
+ length bytestring regardless of stored setting values.
+ """
attribute_list = [self.__getattribute__(k) for k in self.key_list]
- pt_bytes = b''
+ bytes_lst = []
for a in attribute_list:
if isinstance(a, bool):
- pt_bytes += bool_to_bytes(a)
+ bytes_lst.append(bool_to_bytes(a))
elif isinstance(a, int):
- pt_bytes += int_to_bytes(a)
+ bytes_lst.append(int_to_bytes(a))
elif isinstance(a, float):
- pt_bytes += double_to_bytes(a)
+ bytes_lst.append(double_to_bytes(a))
else:
raise CriticalError("Invalid attribute type in settings.")
+ pt_bytes = b''.join(bytes_lst)
ct_bytes = encrypt_and_sign(pt_bytes, self.master_key.master_key)
ensure_dir(DIR_USER_DATA)
@@ -145,11 +127,11 @@ class Settings(object):
f.write(ct_bytes)
def load_settings(self) -> None:
- """Load settings from encrypted database."""
+ """Load settings from the encrypted database."""
with open(self.file_name, 'rb') as f:
ct_bytes = f.read()
- pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key)
+ pt_bytes = auth_and_decrypt(ct_bytes, self.master_key.master_key, database=self.file_name)
# Update settings based on plaintext byte string content
for key in self.key_list:
@@ -158,15 +140,15 @@ class Settings(object):
if isinstance(attribute, bool):
value = bytes_to_bool(pt_bytes[0]) # type: Union[bool, int, float]
- pt_bytes = pt_bytes[BOOLEAN_SETTING_LEN:]
+ pt_bytes = pt_bytes[ENCODED_BOOLEAN_LENGTH:]
elif isinstance(attribute, int):
- value = bytes_to_int(pt_bytes[:INTEGER_SETTING_LEN])
- pt_bytes = pt_bytes[INTEGER_SETTING_LEN:]
+ value = bytes_to_int(pt_bytes[:ENCODED_INTEGER_LENGTH])
+ pt_bytes = pt_bytes[ENCODED_INTEGER_LENGTH:]
elif isinstance(attribute, float):
- value = bytes_to_double(pt_bytes[:FLOAT_SETTING_LEN])
- pt_bytes = pt_bytes[FLOAT_SETTING_LEN:]
+ value = bytes_to_double(pt_bytes[:ENCODED_FLOAT_LENGTH])
+ pt_bytes = pt_bytes[ENCODED_FLOAT_LENGTH:]
else:
raise CriticalError("Invalid data type in settings default values.")
@@ -174,34 +156,33 @@ class Settings(object):
setattr(self, key, value)
def change_setting(self,
- key: str,
- value_str: str,
+ key: str, # Name of the setting
+ value_str: str, # Value of the setting
contact_list: 'ContactList',
- group_list: 'GroupList') -> None:
+ group_list: 'GroupList'
+ ) -> None:
"""Parse, update and store new setting value."""
attribute = self.__getattribute__(key)
try:
if isinstance(attribute, bool):
- value_ = value_str.lower()
- if value_ not in ['true', 'false']:
- raise ValueError
- value = (value_ == 'true') # type: Union[bool, int, float]
+ value = dict(true=True, false=False)[value_str.lower()] # type: Union[bool, int, float]
elif isinstance(attribute, int):
value = int(value_str)
- if value < 0 or value > 2**64-1:
+ if value < 0 or value > MAX_INT:
raise ValueError
elif isinstance(attribute, float):
value = float(value_str)
if value < 0.0:
raise ValueError
+
else:
raise CriticalError("Invalid attribute type in settings.")
- except ValueError:
- raise FunctionReturn(f"Error: Invalid value '{value_str}'")
+ except (KeyError, ValueError):
+ raise FunctionReturn(f"Error: Invalid value '{value_str}'.", head_clear=True)
self.validate_key_value_pair(key, value, contact_list, group_list)
@@ -209,60 +190,52 @@ class Settings(object):
self.store_settings()
@staticmethod
- def validate_key_value_pair(key: str,
- value: Union[int, float, bool],
+ def validate_key_value_pair(key: str, # Name of the setting
+ value: Union[int, float, bool], # Value of the setting
contact_list: 'ContactList',
- group_list: 'GroupList') -> None:
- """\
- Perform further evaluation on settings
- the values of which have restrictions.
- """
+ group_list: 'GroupList'
+ ) -> None:
+ """Evaluate values for settings that have further restrictions."""
if key in ['max_number_of_group_members', 'max_number_of_groups', 'max_number_of_contacts']:
if value % 10 != 0 or value == 0:
- raise FunctionReturn("Error: Database padding settings must be divisible by 10.")
+ raise FunctionReturn("Error: Database padding settings must be divisible by 10.", head_clear=True)
if key == 'max_number_of_group_members':
min_size = round_up(group_list.largest_group())
if value < min_size:
- raise FunctionReturn(f"Error: Can't set max number of members lower than {min_size}.")
+ raise FunctionReturn(
+ f"Error: Can't set the max number of members lower than {min_size}.", head_clear=True)
if key == 'max_number_of_groups':
min_size = round_up(len(group_list))
if value < min_size:
- raise FunctionReturn(f"Error: Can't set max number of groups lower than {min_size}.")
+ raise FunctionReturn(
+ f"Error: Can't set the max number of groups lower than {min_size}.", head_clear=True)
if key == 'max_number_of_contacts':
min_size = round_up(len(contact_list))
if value < min_size:
- raise FunctionReturn(f"Error: Can't set max number of contacts lower than {min_size}.")
-
- if key == 'serial_baudrate':
- if value not in serial.Serial().BAUDRATES:
- raise FunctionReturn("Error: Specified baud rate is not supported.")
- c_print("Baud rate will change on restart.", head=1, tail=1)
-
- if key == 'serial_error_correction':
- if value < 1:
- raise FunctionReturn("Error: Invalid value for error correction ratio.")
- c_print("Error correction ratio will change on restart.", head=1, tail=1)
+ raise FunctionReturn(
+ f"Error: Can't set the max number of contacts lower than {min_size}.", head_clear=True)
if key == 'new_message_notify_duration' and value < 0.05:
- raise FunctionReturn("Error: Too small value for message notify duration.")
+ raise FunctionReturn("Error: Too small value for message notify duration.", head_clear=True)
- if key in ['rxm_usb_serial_adapter', 'txm_usb_serial_adapter']:
- c_print("Interface will change on restart.", head=1, tail=1)
+ if key in ['tm_static_delay', 'tm_random_delay']:
- if key in ['traffic_masking', 'traffic_masking_static_delay', 'traffic_masking_random_delay']:
- c_print("Traffic masking setting will change on restart.", head=1, tail=1)
+ for key_, name, min_setting in [('tm_static_delay', 'static', TRAFFIC_MASKING_MIN_STATIC_DELAY),
+ ('tm_random_delay', 'random', TRAFFIC_MASKING_MIN_RANDOM_DELAY)]:
+ if key == key_ and value < min_setting:
+ raise FunctionReturn(f"Error: Can't set {name} delay lower than {min_setting}.", head_clear=True)
- def setup(self) -> None:
- """Prompt user to enter initial settings."""
- clear_screen()
- if not self.local_testing_mode:
- if self.software_operation == TX:
- self.txm_usb_serial_adapter = yes("Does TxM use USB-to-serial/TTL adapter?", head=1, tail=1)
- else:
- self.rxm_usb_serial_adapter = yes("Does RxM use USB-to-serial/TTL adapter?", head=1, tail=1)
+ if contact_list.settings.software_operation == TX:
+ m_print(["WARNING!", "Changing traffic masking delay can make your endpoint and traffic look unique!"],
+ bold=True, head=1, tail=1)
+
+ if not yes("Proceed anyway?"):
+ raise FunctionReturn("Aborted traffic masking setting change.", head_clear=True)
+
+ m_print("Traffic masking setting will change on restart.", head=1, tail=1)
def print_settings(self) -> None:
"""\
@@ -271,32 +244,30 @@ class Settings(object):
"""
desc_d = {
# Common settings
- "disable_gui_dialog": "True replaces Tkinter dialogs with CLI prompts",
- "max_number_of_group_members": "Max members in group (TxM/RxM must have the same value)",
- "max_number_of_groups": "Max number of groups (TxM/RxM must have the same value)",
- "max_number_of_contacts": "Max number of contacts (TxM/RxM must have the same value)",
- "serial_baudrate": "The speed of serial interface in bauds per second",
- "serial_error_correction": "Number of byte errors serial datagrams can recover from",
+ "disable_gui_dialog": "True replaces GUI dialogs with CLI prompts",
+ "max_number_of_group_members": "Maximum number of members in a group",
+ "max_number_of_groups": "Maximum number of groups",
+ "max_number_of_contacts": "Maximum number of contacts",
"log_messages_by_default": "Default logging setting for new contacts/groups",
"accept_files_by_default": "Default file reception setting for new contacts",
"show_notifications_by_default": "Default message notification setting for new contacts/groups",
- "logfile_masking": "True hides real size of logfile during traffic masking",
+ "log_file_masking": "True hides real size of log file during traffic masking",
# Transmitter settings
- "txm_usb_serial_adapter": "False uses system's integrated serial interface",
- "nh_bypass_messages": "False removes NH bypass interrupt messages",
+ "nc_bypass_messages": "False removes Networked Computer bypass interrupt messages",
"confirm_sent_files": "False sends files without asking for confirmation",
"double_space_exits": "True exits, False clears screen with double space command",
"traffic_masking": "True enables traffic masking to hide metadata",
- "traffic_masking_static_delay": "Static delay between traffic masking packets",
- "traffic_masking_random_delay": "Max random delay for traffic masking timing obfuscation",
- "multi_packet_random_delay": "True adds IM server spam guard evading delay",
- "max_duration_of_random_delay": "Maximum time for random spam guard evasion delay",
+ "tm_static_delay": "The static delay between traffic masking packets",
+ "tm_random_delay": "Max random delay for traffic masking timing obfuscation",
+
+ # Relay settings
+ "allow_contact_requests": "When False, does not show TFC contact requests",
# Receiver settings
- "rxm_usb_serial_adapter": "False uses system's integrated serial interface",
- "new_message_notify_preview": "When True, shows preview of received message",
- "new_message_notify_duration": "Number of seconds new message notification appears"}
+ "new_message_notify_preview": "When True, shows a preview of the received message",
+ "new_message_notify_duration": "Number of seconds new message notification appears",
+ "max_decompress_size": "Max size Receiver accepts when decompressing file"}
# Columns
c1 = ['Setting name']
@@ -304,38 +275,40 @@ class Settings(object):
c3 = ['Default value']
c4 = ['Description']
- terminal_width = get_terminal_width()
- desc_line_indent = 64
+ terminal_width = get_terminal_width()
+ description_indent = 64
- if terminal_width < desc_line_indent + 1:
- raise FunctionReturn("Error: Screen width is too small.")
+ if terminal_width < description_indent + 1:
+ raise FunctionReturn("Error: Screen width is too small.", head_clear=True)
+ # Populate columns with setting data
for key in self.defaults:
c1.append(key)
c2.append(str(self.__getattribute__(key)))
c3.append(str(self.defaults[key]))
description = desc_d[key]
- wrapper = textwrap.TextWrapper(width=max(1, (terminal_width - desc_line_indent)))
+ wrapper = textwrap.TextWrapper(width=max(1, (terminal_width - description_indent)))
desc_lines = wrapper.fill(description).split('\n')
desc_string = desc_lines[0]
- for l in desc_lines[1:]:
- desc_string += '\n' + desc_line_indent * ' ' + l
+ for line in desc_lines[1:]:
+ desc_string += '\n' + description_indent * ' ' + line
if len(desc_lines) > 1:
desc_string += '\n'
c4.append(desc_string)
- lst = []
- for name, current, default, description in zip(c1, c2, c3, c4):
- lst.append('{0:{1}} {2:{3}} {4:{5}} {6}'.format(
- name, max(len(v) for v in c1) + SETTINGS_INDENT,
- current, max(len(v) for v in c2) + SETTINGS_INDENT,
- default, max(len(v) for v in c3) + SETTINGS_INDENT,
- description))
+ # Calculate column widths
+ c1w, c2w, c3w = [max(len(v) for v in column) + SETTINGS_INDENT for column in [c1, c2, c3]]
- lst.insert(1, get_terminal_width() * '─')
+ # Align columns by adding whitespace between fields of each line
+ lines = [f'{f1:{c1w}} {f2:{c2w}} {f3:{c3w}} {f4}' for f1, f2, f3, f4 in zip(c1, c2, c3, c4)]
+
+ # Add a terminal-wide line between the column names and the data
+ lines.insert(1, get_terminal_width() * '─')
+
+ # Print the settings
clear_screen()
- print('\n' + '\n'.join(lst) + '\n')
+ print('\n' + '\n'.join(lines))
diff --git a/src/common/encoding.py b/src/common/encoding.py
index 6d8a44f..7c64351 100755
--- a/src/common/encoding.py
+++ b/src/common/encoding.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,36 +16,43 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
+import base64
import hashlib
import struct
-from typing import List, Union
+from datetime import datetime
+from typing import List, Union
from src.common.statics import *
def sha256d(message: bytes) -> bytes:
"""Chain SHA256 twice for Bitcoin WIF format."""
- return hashlib.sha256(hashlib.sha256(message).digest()).digest()
+ return hashlib.sha256(
+ hashlib.sha256(message).digest()
+ ).digest()
-def b58encode(byte_string: bytes, file_key: bool = False) -> str:
- """Encode byte string to checksummed Base58 string.
+def b58encode(byte_string: bytes, public_key: bool = False) -> str:
+ """Encode byte string to check-summed Base58 string.
- This format is exactly the same as Bitcoin's Wallet
- Import Format for mainnet/testnet addresses.
+ This format is exactly the same as Bitcoin's Wallet Import Format
+ (WIF) for mainnet and testnet addresses.
https://en.bitcoin.it/wiki/Wallet_import_format
"""
b58_alphabet = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
- net_id = b'\xef' if file_key else b'\x80'
+
+ mainnet_header = b'\x80'
+ testnet_header = b'\xef'
+ net_id = testnet_header if public_key else mainnet_header
byte_string = net_id + byte_string
- byte_string += sha256d(byte_string)[:B58_CHKSUM_LEN]
+ byte_string += sha256d(byte_string)[:B58_CHECKSUM_LENGTH]
- orig_len = len(byte_string)
+ original_len = len(byte_string)
byte_string = byte_string.lstrip(b'\x00')
new_len = len(byte_string)
@@ -58,15 +66,19 @@ def b58encode(byte_string: bytes, file_key: bool = False) -> str:
acc, mod = divmod(acc, 58)
encoded += b58_alphabet[mod]
- return (encoded + (orig_len - new_len) * '1')[::-1]
+ return (encoded + (original_len - new_len) * b58_alphabet[0])[::-1]
-def b58decode(string: str, file_key: bool = False) -> bytes:
- """Decode a Base58-encoded string and verify checksum."""
+def b58decode(string: str, public_key: bool = False) -> bytes:
+ """Decode a Base58-encoded string and verify the checksum."""
b58_alphabet = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
+ mainnet_header = b'\x80'
+ testnet_header = b'\xef'
+ net_id = testnet_header if public_key else mainnet_header
+
orig_len = len(string)
- string = string.lstrip('1')
+ string = string.lstrip(b58_alphabet[0])
new_len = len(string)
p, acc = 1, 0
@@ -81,65 +93,103 @@ def b58decode(string: str, file_key: bool = False) -> bytes:
decoded_ = (bytes(decoded) + (orig_len - new_len) * b'\x00')[::-1] # type: Union[bytes, List[int]]
- if sha256d(bytes(decoded_[:-B58_CHKSUM_LEN]))[:B58_CHKSUM_LEN] != decoded_[-B58_CHKSUM_LEN:]:
+ if sha256d(bytes(decoded_[:-B58_CHECKSUM_LENGTH]))[:B58_CHECKSUM_LENGTH] != decoded_[-B58_CHECKSUM_LENGTH:]:
raise ValueError
- net_id = b'\xef' if file_key else b'\x80'
- if decoded_[:1] != net_id:
+ if decoded_[:len(net_id)] != net_id:
raise ValueError
- return bytes(decoded_[1:-B58_CHKSUM_LEN])
+ return bytes(decoded_[len(net_id):-B58_CHECKSUM_LENGTH])
+
+
+def b85encode(data: bytes) -> str:
+ """Encode byte string with base85.
+
+ The encoding is slightly more inefficient, but allows variable
+ length transmissions when used together with a delimiter char.
+ """
+ return base64.b85encode(data).decode()
+
+
+def b10encode(fingerprint: bytes) -> str:
+ """Encode bytestring in base10.
+
+ Base10 encoding is used in fingerprint comparison to allow distinct
+ communication:
+
+ Base64 has 75% efficiency, but encoding is bad as the user might
+ confuse uppercase I with lower case l, 0 with O, etc.
+
+ Base58 has 73% efficiency and removes the problem of Base64
+ explained above, but works only when manually typing
+ strings because the user has to take time to explain which
+ letters were capitalized etc.
+
+ Base16 has 50% efficiency and removes the capitalization problem
+ with Base58 but the choice is bad as '3', 'b', 'c', 'd'
+ and 'e' are hard to distinguish in the English language
+ (fingerprints are usually read aloud over off band call).
+
+ Base10 has 41% efficiency but natural languages have evolved in a
+ way that makes a clear distinction between the way different numbers
+ are pronounced: reading them is faster and less error-prone.
+ Compliments to Signal/WA developers for discovering this.
+ https://signal.org/blog/safety-number-updates/
+ """
+ return str(int(fingerprint.hex(), base=16))
# Database unicode string padding
def unicode_padding(string: str) -> str:
- """Pad unicode string to 255 chars.
+ """Pad Unicode string to 255 chars.
- Database fields are padded with unicode chars and then encoded
+ Database fields are padded with Unicode chars and then encoded
with UTF-32 to hide the metadata about plaintext field length.
-
- :param string: String to be padded
- :return: Padded string
"""
- assert len(string) < PADDING_LEN
+ assert len(string) < PADDING_LENGTH
- length = PADDING_LEN - (len(string) % PADDING_LEN)
+ length = PADDING_LENGTH - (len(string) % PADDING_LENGTH)
string += length * chr(length)
- assert len(string) == PADDING_LEN
+ assert len(string) == PADDING_LENGTH
return string
def rm_padding_str(string: str) -> str:
- """Remove padding from plaintext.
-
- :param string: String from which padding is removed
- :return: String without padding
- """
+ """Remove padding from plaintext."""
return string[:-ord(string[-1:])]
# Database constant length encoding
+def onion_address_to_pub_key(account: str) -> bytes:
+ """Encode TFC account to a public key byte string.
+
+ The public key is the most compact possible representation of a TFC
+ account, so it is useful when storing the address into databases.
+ """
+ return base64.b32decode(account.upper())[:-(ONION_ADDRESS_CHECKSUM_LENGTH + ONION_SERVICE_VERSION_LENGTH)]
+
+
def bool_to_bytes(boolean: bool) -> bytes:
- """Convert boolean value to 1-byte byte string."""
+ """Convert boolean value to a 1-byte byte string."""
return bytes([boolean])
def int_to_bytes(integer: int) -> bytes:
- """Convert integer to 8-byte byte string."""
+ """Convert integer to an 8-byte byte string."""
return struct.pack('!Q', integer)
def double_to_bytes(double_: float) -> bytes:
- """Convert double to 8-byte byte string."""
+ """Convert double to an 8-byte byte string."""
return struct.pack('d', double_)
def str_to_bytes(string: str) -> bytes:
- """Pad string with unicode chars and encode it with UTF-32.
+ """Pad string with Unicode chars and encode it with UTF-32.
Length of padded string is 255 * 4 + 4 (BOM) = 1024 bytes.
"""
@@ -148,26 +198,53 @@ def str_to_bytes(string: str) -> bytes:
# Decoding
+def pub_key_to_onion_address(public_key: bytes) -> str:
+ """Decode public key byte string to TFC account.
+
+ This decoding is exactly the same process as conversion of Ed25519
+ public key of v3 Onion Service into service ID:
+ https://gitweb.torproject.org/torspec.git/tree/rend-spec-v3.txt#n2019
+ """
+ checksum = hashlib.sha3_256(ONION_ADDRESS_CHECKSUM_ID
+ + public_key
+ + ONION_SERVICE_VERSION
+ ).digest()[:ONION_ADDRESS_CHECKSUM_LENGTH]
+
+ return base64.b32encode(public_key + checksum + ONION_SERVICE_VERSION).lower().decode()
+
+
+def pub_key_to_short_address(public_key: bytes) -> str:
+ """Decode public key to TFC account and truncate it."""
+ return pub_key_to_onion_address(public_key)[:TRUNC_ADDRESS_LENGTH]
+
+
def bytes_to_bool(byte_string: Union[bytes, int]) -> bool:
- """Convert 1-byte byte string to boolean value."""
+ """Convert 1-byte byte string to a boolean value."""
if isinstance(byte_string, bytes):
byte_string = byte_string[0]
return bool(byte_string)
def bytes_to_int(byte_string: bytes) -> int:
- """Convert 8-byte byte string to integer."""
- return struct.unpack('!Q', byte_string)[0]
+ """Convert 8-byte byte string to an integer."""
+ int_format = struct.unpack('!Q', byte_string)[0] # type: int
+ return int_format
def bytes_to_double(byte_string: bytes) -> float:
"""Convert 8-byte byte string to double."""
- return struct.unpack('d', byte_string)[0]
+ float_format = struct.unpack('d', byte_string)[0] # type: float
+ return float_format
def bytes_to_str(byte_string: bytes) -> str:
- """Convert 1024-byte byte string to unicode string.
+ """Convert 1024-byte byte string to Unicode string.
- Decode byte string with UTF-32 and remove unicode padding.
+ Decode byte string with UTF-32 and remove Unicode padding.
"""
return rm_padding_str(byte_string.decode('utf-32'))
+
+
+def bytes_to_timestamp(byte_string: bytes) -> datetime:
+ """Covert 4-byte byte string to datetime object."""
+ return datetime.fromtimestamp(struct.unpack('.
+along with TFC. If not, see .
"""
import inspect
import sys
-import time
import typing
from datetime import datetime
+from typing import Optional
-from src.common.output import c_print, clear_screen
+from src.common.output import clear_screen, m_print
+from src.common.statics import *
if typing.TYPE_CHECKING:
- from src.rx.windows import RxWindow
+ from src.receiver.windows import RxWindow
class CriticalError(Exception):
- """A variety of errors during which TFC should gracefully exit."""
+ """A severe exception that requires TFC to gracefully exit."""
- def __init__(self, error_message: str) -> None:
- graceful_exit("Critical error in function '{}':\n{}"
- .format(inspect.stack()[1][3], error_message), clear=False, exit_code=1)
+ def __init__(self, error_message: str, exit_code: int = 1) -> None:
+ """A severe exception that requires TFC to gracefully exit."""
+ graceful_exit(f"Critical error in function '{inspect.stack()[1][3]}':\n{error_message}",
+ clear=False, exit_code=exit_code)
class FunctionReturn(Exception):
@@ -44,33 +47,40 @@ class FunctionReturn(Exception):
def __init__(self,
message: str,
- output: bool = True,
- delay: float = 0,
- window: 'RxWindow' = None,
- head: int = 1,
- tail: int = 1,
- head_clear: bool = False,
- tail_clear: bool = False) -> None:
+ window: Optional['RxWindow'] = None, # The window to include the message in
+ output: bool = True, # When False, doesn't print message when adding it to window
+ bold: bool = False, # When True, prints the message in bold
+ head_clear: bool = False, # When True, clears the screen before printing message
+ tail_clear: bool = False, # When True, clears the screen after message (needs delay)
+ delay: float = 0, # The delay before continuing
+ head: int = 1, # The number of new-lines to print before the message
+ tail: int = 1, # The number of new-lines to print after message
+ ) -> None:
+ """Print return message and return to exception handler function."""
self.message = message
if window is None:
if output:
- if head_clear:
- clear_screen()
- c_print(self.message, head=head, tail=tail)
- time.sleep(delay)
- if tail_clear:
- clear_screen()
+ m_print(self.message,
+ bold=bold,
+ head_clear=head_clear,
+ tail_clear=tail_clear,
+ delay=delay,
+ head=head,
+ tail=tail)
else:
window.add_new(datetime.now(), self.message, output=output)
-def graceful_exit(message: str ='', clear: bool = True, exit_code: int = 0) -> None:
+def graceful_exit(message: str ='', # Exit message to print
+ clear: bool = True, # When False, does not clear screen before printing message
+ exit_code: int = 0 # Value returned to parent process
+ ) -> None:
"""Display a message and exit TFC."""
if clear:
clear_screen()
if message:
print('\n' + message)
- print("\nExiting TFC.\n")
+ print(f"\nExiting {TFC}.\n")
sys.exit(exit_code)
diff --git a/src/common/gateway.py b/src/common/gateway.py
index 2f0ea54..63760b7 100644
--- a/src/common/gateway.py
+++ b/src/common/gateway.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,166 +16,567 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
+import hashlib
+import json
import multiprocessing.connection
+import os
import os.path
import serial
import socket
+import textwrap
import time
import typing
-from serial.serialutil import SerialException
-from typing import Any, Dict, Union
+from datetime import datetime
+from typing import Dict, Optional, Tuple, Union
-from src.common.exceptions import CriticalError, graceful_exit
-from src.common.misc import ignored
-from src.common.output import phase, print_on_previous_line
-from src.common.statics import *
+from serial.serialutil import SerialException
+
+from src.common.exceptions import CriticalError, FunctionReturn, graceful_exit
+from src.common.input import yes
+from src.common.misc import calculate_race_condition_delay, ensure_dir, ignored, get_terminal_width
+from src.common.misc import separate_trailer
+from src.common.output import m_print, phase, print_on_previous_line
+from src.common.reed_solomon import ReedSolomonError, RSCodec
+from src.common.statics import *
if typing.TYPE_CHECKING:
- from multiprocessing import Queue
- from src.common.db_settings import Settings
+ from multiprocessing import Queue
def gateway_loop(queues: Dict[bytes, 'Queue'],
gateway: 'Gateway',
- unittest: bool = False) -> None:
- """Loop that loads data from NH side gateway to RxM."""
+ unittest: bool = False
+ ) -> None:
+ """Load data from serial interface or socket into a queue.
+
+ Also place the current timestamp to queue to be delivered to the
+ Receiver Program. The timestamp is used both to notify when the sent
+ message was received by Relay Program, and as part of a commitment
+ scheme: For more information, see the section on "Covert channel
+ based on user interaction" under TFC's Security Design wiki article.
+ """
queue = queues[GATEWAY_QUEUE]
while True:
with ignored(EOFError, KeyboardInterrupt):
- queue.put(gateway.read())
+ queue.put((datetime.now(), gateway.read()))
if unittest:
break
class Gateway(object):
- """Gateway object is a wrapper for interfaces that connect TxM/RxM with NH."""
+ """\
+ Gateway object is a wrapper for interfaces that connect
+ Source/Destination Computer with the Networked computer.
+ """
- def __init__(self, settings: 'Settings') -> None:
+ def __init__(self,
+ operation: str,
+ local_test: bool,
+ dd_sockets: bool
+ ) -> None:
"""Create a new Gateway object."""
- self.settings = settings
- self.interface = None # type: Union[Any]
+ self.settings = GatewaySettings(operation, local_test, dd_sockets)
+ self.tx_serial = None # type: serial.Serial
+ self.rx_serial = None # type: serial.Serial
+ self.rx_socket = None # type: Optional[multiprocessing.connection.Connection]
+ self.tx_socket = None # type: Optional[multiprocessing.connection.Connection]
- # Set True when serial adapter is initially found so that further
- # serial interface searches know to announce disconnection.
+ # Initialize Reed-Solomon erasure code handler
+ self.rs = RSCodec(2 * self.settings.session_serial_error_correction)
+
+ # Set True when the serial interface is initially found so that
+ # further interface searches know to announce disconnection.
self.init_found = False
if self.settings.local_testing_mode:
- if self.settings.software_operation == TX:
+ if self.settings.software_operation in [TX, NC]:
self.client_establish_socket()
- else:
+ if self.settings.software_operation in [NC, RX]:
self.server_establish_socket()
else:
self.establish_serial()
- def write(self, packet: bytes) -> None:
- """Output data via socket/serial interface."""
- if self.settings.local_testing_mode:
- self.interface.send(packet)
+ def establish_serial(self) -> None:
+ """Create a new Serial object.
+
+ By setting the Serial object's timeout to 0, the method
+ `Serial().read_all()` will return 0..N bytes where N is the serial
+ interface buffer size (496 bytes for FTDI FT232R for example).
+ This is not enough for large packets. However, in this case,
+ `read_all` will return
+ a) immediately when the buffer is full
+ b) if no bytes are received during the time it would take
+ to transmit the next byte of the datagram.
+
+ This type of behaviour allows us to read 0..N bytes from the
+ serial interface at a time, and add them to a bytearray buffer.
+
+ In our implementation below, if the receiver side stops
+ receiving data when it calls `read_all`, it starts a timer that
+ is evaluated with every subsequent call of `read_all` that
+ returns an empty string. If the timer exceeds the
+ `settings.rx_receive_timeout` value (twice the time it takes to
+ send the next byte with given baud rate), the gateway object
+ will return the received packet.
+
+ The timeout timer is triggered intentionally by the transmitter
+ side Gateway object, that after each transmission sleeps for
+ `settings.tx_inter_packet_delay` seconds. This value is set to
+ twice the length of `settings.rx_receive_timeout`, or four times
+ the time it takes to send one byte with given baud rate.
+ """
+ try:
+ serial_interface = self.search_serial_interface()
+ baudrate = self.settings.session_serial_baudrate
+ self.tx_serial = self.rx_serial = serial.Serial(serial_interface, baudrate, timeout=0)
+ except SerialException:
+ raise CriticalError("SerialException. Ensure $USER is in the dialout group by restarting this computer.")
+
+ def write(self, orig_packet: bytes) -> None:
+ """Add error correction data and output data via socket/serial interface.
+
+ After outputting the packet via serial, sleep long enough to
+ trigger the Rx-side timeout timer, or if local testing is
+ enabled, add slight delay to simulate that introduced by the
+ serial interface.
+ """
+ packet = self.add_error_correction(orig_packet)
+
+ if self.settings.local_testing_mode and self.tx_socket is not None:
+ try:
+ self.tx_socket.send(packet)
+ time.sleep(LOCAL_TESTING_PACKET_DELAY)
+ except BrokenPipeError:
+ raise CriticalError("Relay IPC server disconnected.", exit_code=0)
else:
try:
- self.interface.write(packet)
- self.interface.flush()
- time.sleep(self.settings.txm_inter_packet_delay)
+ self.tx_serial.write(packet)
+ self.tx_serial.flush()
+ time.sleep(self.settings.tx_inter_packet_delay)
except SerialException:
self.establish_serial()
- self.write(packet)
+ self.write(orig_packet)
def read(self) -> bytes:
- """Read data via socket/serial interface."""
- if self.settings.local_testing_mode:
+ """Read data via socket/serial interface.
+
+ Read 0..N bytes from serial interface, where N is the buffer
+ size of the serial interface. Once `read_buffer` has data, and
+ the interface hasn't returned data long enough for the timer to
+ exceed the timeout value, return received data.
+ """
+ if self.settings.local_testing_mode and self.rx_socket is not None:
while True:
try:
- return self.interface.recv()
+ packet = self.rx_socket.recv() # type: bytes
+ return packet
except KeyboardInterrupt:
pass
except EOFError:
- raise CriticalError("IPC client disconnected.")
+ raise CriticalError("Relay IPC client disconnected.", exit_code=0)
else:
while True:
try:
start_time = 0.0
read_buffer = bytearray()
while True:
- read = self.interface.read(1000)
+ read = self.rx_serial.read_all()
if read:
start_time = time.monotonic()
read_buffer.extend(read)
else:
if read_buffer:
delta = time.monotonic() - start_time
- if delta > self.settings.rxm_receive_timeout:
+ if delta > self.settings.rx_receive_timeout:
return bytes(read_buffer)
else:
- time.sleep(0.001)
+ time.sleep(0.0001)
- except KeyboardInterrupt:
+ except (EOFError, KeyboardInterrupt):
pass
- except SerialException:
+ except (OSError, SerialException):
self.establish_serial()
- self.read()
- def server_establish_socket(self) -> None:
- """Establish IPC server."""
- listener = multiprocessing.connection.Listener(('localhost', RXM_LISTEN_SOCKET))
- self.interface = listener.accept()
+ def add_error_correction(self, packet: bytes) -> bytes:
+ """Add error correction to packet that will be output.
- def client_establish_socket(self) -> None:
- """Establish IPC client."""
- try:
- phase("Waiting for connection to NH", offset=11)
- while True:
- try:
- socket_number = TXM_DD_LISTEN_SOCKET if self.settings.data_diode_sockets else NH_LISTEN_SOCKET
- self.interface = multiprocessing.connection.Client(('localhost', socket_number))
- phase("Established", done=True)
- break
- except socket.error:
- time.sleep(0.1)
+ If the error correction setting is set to 1 or higher, TFC adds
+ Reed-Solomon erasure codes to detect and correct errors during
+ transmission over the serial interface. For more information on
+ Reed-Solomon, see
+ https://en.wikipedia.org/wiki/Reed%E2%80%93Solomon_error_correction
+ https://www.cs.cmu.edu/~guyb/realworld/reedsolomon/reed_solomon_codes.html
- except KeyboardInterrupt:
- graceful_exit()
+ If error correction is set to 0, errors are only detected. This
+ is done by using a BLAKE2b based, 128-bit checksum.
+ """
+ if self.settings.session_serial_error_correction:
+ packet = self.rs.encode(packet)
+ else:
+ packet = packet + hashlib.blake2b(packet, digest_size=PACKET_CHECKSUM_LENGTH).digest()
+ return packet
- def establish_serial(self) -> None:
- """Create a new Serial object."""
- try:
- serial_nh = self.search_serial_interface()
- self.interface = serial.Serial(serial_nh, self.settings.session_serial_baudrate, timeout=0)
- except SerialException:
- raise CriticalError("SerialException. Ensure $USER is in the dialout group.")
+ def detect_errors(self, packet: bytes) -> bytes:
+ """Handle received packet error detection and/or correction."""
+ if self.settings.session_serial_error_correction:
+ try:
+ packet, _ = self.rs.decode(packet)
+ return bytes(packet)
+ except ReedSolomonError:
+ raise FunctionReturn("Error: Reed-Solomon failed to correct errors in the received packet.", bold=True)
+ else:
+ packet, checksum = separate_trailer(packet, PACKET_CHECKSUM_LENGTH)
+ if hashlib.blake2b(packet, digest_size=PACKET_CHECKSUM_LENGTH).digest() != checksum:
+ raise FunctionReturn("Warning! Received packet had an invalid checksum.", bold=True)
+ return packet
def search_serial_interface(self) -> str:
- """Search for serial interface."""
+ """Search for a serial interface."""
if self.settings.session_usb_serial_adapter:
search_announced = False
if not self.init_found:
- print_on_previous_line()
- phase("Searching for USB-to-serial interface")
+ phase("Searching for USB-to-serial interface", offset=len('Found'))
while True:
- time.sleep(0.1)
- for f in sorted(os.listdir('/dev')):
+ for f in sorted(os.listdir('/dev/')):
if f.startswith('ttyUSB'):
if self.init_found:
- time.sleep(1.5)
+ time.sleep(1)
phase('Found', done=True)
if self.init_found:
print_on_previous_line(reps=2)
self.init_found = True
return f'/dev/{f}'
else:
- if not search_announced:
- if self.init_found:
- phase("Serial adapter disconnected. Waiting for interface", head=1)
+ time.sleep(0.1)
+ if self.init_found and not search_announced:
+ phase("Serial adapter disconnected. Waiting for interface", head=1, offset=len('Found'))
search_announced = True
else:
- f = 'ttyS0'
- if f in sorted(os.listdir('/dev/')):
- return f'/dev/{f}'
- raise CriticalError(f"Error: /dev/{f} was not found.")
+ if self.settings.built_in_serial_interface in sorted(os.listdir('/dev/')):
+ return f'/dev/{self.settings.built_in_serial_interface}'
+ raise CriticalError(f"Error: /dev/{self.settings.built_in_serial_interface} was not found.")
+
+ # Local testing
+
+ def server_establish_socket(self) -> None:
+ """Initialize the receiver (IPC server).
+
+ The multiprocessing connection during local test does not
+ utilize authentication keys* because a MITM-attack against the
+ connection requires endpoint compromise, and in such situation,
+ MITM attack is not nearly as effective as key/screen logging or
+ RAM dump.
+
+ * https://docs.python.org/3/library/multiprocessing.html#authentication-keys
+
+ Similar to the case of standard mode of operation, all sensitive
+ data that passes through the socket/serial interface and Relay
+ Program is encrypted. A MITM attack between the sockets could of
+ course be used to e.g. inject public keys, but like with all key
+ exchanges, that would only work if the user neglects fingerprint
+ verification.
+
+ Another reason why the authentication key is useless, is the key
+ needs to be pre-shared. This means there's two ways to share it:
+
+ 1) Hard-code the key to source file from where malware could
+ read it.
+
+ 2) Force the user to manually copy the PSK from one program
+ to another. This would change the workflow that the local
+ test configuration tries to simulate.
+
+ To conclude, the local test configuration should never be used
+ under a threat model where endpoint security is of importance.
+ """
+ try:
+ socket_number = RP_LISTEN_SOCKET if self.settings.software_operation == NC else DST_LISTEN_SOCKET
+ listener = multiprocessing.connection.Listener((LOCALHOST, socket_number))
+ self.rx_socket = listener.accept()
+ except KeyboardInterrupt:
+ graceful_exit()
+
+ def client_establish_socket(self) -> None:
+ """Initialize the transmitter (IPC client)."""
+ try:
+ target = RXP if self.settings.software_operation == NC else RP
+ phase(f"Connecting to {target}")
+ while True:
+ try:
+ if self.settings.software_operation == TX:
+ socket_number = SRC_DD_LISTEN_SOCKET if self.settings.data_diode_sockets else RP_LISTEN_SOCKET
+ else:
+ socket_number = DST_DD_LISTEN_SOCKET if self.settings.data_diode_sockets else DST_LISTEN_SOCKET
+
+ try:
+ self.tx_socket = multiprocessing.connection.Client((LOCALHOST, socket_number))
+ except ConnectionRefusedError:
+ time.sleep(0.1)
+ continue
+
+ phase(DONE)
+ break
+
+ except socket.error:
+ time.sleep(0.1)
+
+ except KeyboardInterrupt:
+ graceful_exit()
+
+
+class GatewaySettings(object):
+ """\
+ Gateway settings store settings for serial interface in an
+ unencrypted JSON database.
+
+ The reason these settings are in plaintext is it protects the system
+ from inconsistent state of serial settings: Would the user reconfigure
+ their serial settings, and would the setting altering packet to
+ Receiver Program drop, Relay Program could in some situations no
+ longer communicate with the Receiver Program.
+
+ Serial interface settings are not sensitive enough to justify the
+ inconvenience of encrypting the setting values.
+ """
+
+ def __init__(self,
+ operation: str,
+ local_test: bool,
+ dd_sockets: bool
+ ) -> None:
+ """Create a new Settings object.
+
+ The settings below are altered from within the program itself.
+ Changes made to the default settings are stored in the JSON
+ file under $HOME/tfc/user_data from where, if needed, they can
+ be manually altered by the user.
+ """
+ self.serial_baudrate = 19200
+ self.serial_error_correction = 5
+ self.use_serial_usb_adapter = True
+ self.built_in_serial_interface = 'ttyS0'
+
+ self.software_operation = operation
+ self.local_testing_mode = local_test
+ self.data_diode_sockets = dd_sockets
+
+ self.all_keys = list(vars(self).keys())
+ self.key_list = self.all_keys[:self.all_keys.index('software_operation')]
+ self.defaults = {k: self.__dict__[k] for k in self.key_list}
+
+ self.file_name = f'{DIR_USER_DATA}{self.software_operation}_serial_settings.json'
+
+ ensure_dir(DIR_USER_DATA)
+ if os.path.isfile(self.file_name):
+ self.load_settings()
+ else:
+ self.setup()
+ self.store_settings()
+
+ self.session_serial_baudrate = self.serial_baudrate
+ self.session_serial_error_correction = self.serial_error_correction
+ self.session_usb_serial_adapter = self.use_serial_usb_adapter
+
+ self.tx_inter_packet_delay, self.rx_receive_timeout = self.calculate_serial_delays(self.session_serial_baudrate)
+
+ self.race_condition_delay = calculate_race_condition_delay(self.session_serial_error_correction,
+ self.serial_baudrate)
+
+ @classmethod
+ def calculate_serial_delays(cls, baud_rate: int) -> Tuple[float, float]:
+ """Calculate the inter-packet delay and receive timeout.
+
+ Although this calculation mainly depends on the baud rate, a
+ minimal value will be set for rx_receive_timeout. This is to
+ ensure high baud rates do not cause issues by having shorter
+ delays than what the `time.sleep()` resolution allows.
+ """
+ bytes_per_sec = baud_rate / BAUDS_PER_BYTE
+ byte_travel_t = 1 / bytes_per_sec
+
+ rx_receive_timeout = max(2 * byte_travel_t, SERIAL_RX_MIN_TIMEOUT)
+ tx_inter_packet_delay = 2 * rx_receive_timeout
+
+ return tx_inter_packet_delay, rx_receive_timeout
+
+ def setup(self) -> None:
+ """Prompt the user to enter initial serial interface setting.
+
+ Ensure that the serial interface is available before proceeding.
+ """
+ if not self.local_testing_mode:
+ name = {TX: TXP, NC: RP, RX: RXP}[self.software_operation]
+
+ self.use_serial_usb_adapter = yes(f"Use USB-to-serial/TTL adapter for {name} Computer?", head=1, tail=1)
+
+ if self.use_serial_usb_adapter:
+ for f in sorted(os.listdir('/dev/')):
+ if f.startswith('ttyUSB'):
+ return None
+ else:
+ m_print("Error: USB-to-serial/TTL adapter not found.")
+ self.setup()
+ else:
+ if self.built_in_serial_interface in sorted(os.listdir('/dev/')):
+ return None
+ else:
+ m_print(f"Error: Serial interface /dev/{self.built_in_serial_interface} not found.")
+ self.setup()
+
+ def store_settings(self) -> None:
+ """Store serial settings in JSON format."""
+ serialized = json.dumps(self, default=(lambda o: {k: self.__dict__[k] for k in self.key_list}), indent=4)
+ with open(self.file_name, 'w+') as f:
+ f.write(serialized)
+
+ def invalid_setting(self, key: str, json_dict: Dict[str, Union[bool, int, str]]) -> None:
+ """Notify about setting an invalid value to default value."""
+ m_print([f"Error: Invalid value '{json_dict[key]}' for setting '{key}' in '{self.file_name}'.",
+ f"The value has been set to default ({self.defaults[key]})."], head=1, tail=1)
+ setattr(self, key, self.defaults[key])
+
+ def load_settings(self) -> None:
+ """Load and validate JSON settings for serial interface."""
+ with open(self.file_name) as f:
+ try:
+ json_dict = json.load(f)
+ except json.decoder.JSONDecodeError:
+ os.remove(self.file_name)
+ self.store_settings()
+ print(f"\nError: Invalid JSON format in '{self.file_name}'."
+ "\nSerial interface settings have been set to default values.\n")
+ return None
+
+ # Check for missing setting
+ for key in self.key_list:
+ if key not in json_dict:
+ m_print([f"Error: Missing setting '{key}' in '{self.file_name}'.",
+ f"The value has been set to default ({self.defaults[key]})."], head=1, tail=1)
+ setattr(self, key, self.defaults[key])
+ continue
+
+ # Closer inspection of each setting value
+ if key == 'serial_baudrate' and json_dict[key] not in serial.Serial().BAUDRATES:
+ self.invalid_setting(key, json_dict)
+ continue
+
+ elif key == 'serial_error_correction' and (not isinstance(json_dict[key], int) or json_dict[key] < 0):
+ self.invalid_setting(key, json_dict)
+ continue
+
+ elif key == 'use_serial_usb_adapter':
+ if not isinstance(json_dict[key], bool):
+ self.invalid_setting(key, json_dict)
+ continue
+
+ elif key == 'built_in_serial_interface':
+ if not isinstance(json_dict[key], str):
+ self.invalid_setting(key, json_dict)
+ continue
+ if not any(json_dict[key] == f for f in os.listdir('/sys/class/tty')):
+ self.invalid_setting(key, json_dict)
+ continue
+
+ setattr(self, key, json_dict[key])
+
+ # Store after loading to add missing, to replace invalid settings,
+ # and to remove settings that do not belong in the JSON file.
+ self.store_settings()
+
+ def change_setting(self, key: str, value_str: str) -> None:
+ """Parse, update and store new setting value."""
+ attribute = self.__getattribute__(key)
+ try:
+ if isinstance(attribute, bool):
+ value = dict(true=True, false=False)[value_str.lower()] # type: Union[bool, int]
+
+ elif isinstance(attribute, int):
+ value = int(value_str)
+ if value < 0 or value > MAX_INT:
+ raise ValueError
+
+ else:
+ raise CriticalError("Invalid attribute type in settings.")
+
+ except (KeyError, ValueError):
+ raise FunctionReturn(f"Error: Invalid value '{value_str}'.", delay=1, tail_clear=True)
+
+ self.validate_key_value_pair(key, value)
+
+ setattr(self, key, value)
+ self.store_settings()
+
+ @staticmethod
+ def validate_key_value_pair(key: str, value: Union[int, bool]) -> None:
+ """\
+ Perform further evaluation on settings the values of which have
+ restrictions.
+ """
+ if key == 'serial_baudrate':
+ if value not in serial.Serial().BAUDRATES:
+ raise FunctionReturn("Error: The specified baud rate is not supported.")
+ m_print("Baud rate will change on restart.", head=1, tail=1)
+
+ if key == 'serial_error_correction':
+ if value < 0:
+ raise FunctionReturn("Error: Invalid value for error correction ratio.")
+ m_print("Error correction ratio will change on restart.", head=1, tail=1)
+
+ def print_settings(self) -> None:
+ """\
+ Print list of settings, their current and
+ default values, and setting descriptions.
+ """
+ desc_d = {"serial_baudrate": "The speed of serial interface in bauds per second",
+ "serial_error_correction": "Number of byte errors serial datagrams can recover from"}
+
+ # Columns
+ c1 = ['Serial interface setting']
+ c2 = ['Current value']
+ c3 = ['Default value']
+ c4 = ['Description']
+
+ terminal_width = get_terminal_width()
+ description_indent = 64
+
+ if terminal_width < description_indent + 1:
+ raise FunctionReturn("Error: Screen width is too small.")
+
+ # Populate columns with setting data
+ for key in desc_d:
+ c1.append(key)
+ c2.append(str(self.__getattribute__(key)))
+ c3.append(str(self.defaults[key]))
+
+ description = desc_d[key]
+ wrapper = textwrap.TextWrapper(width=max(1, (terminal_width - description_indent)))
+ desc_lines = wrapper.fill(description).split('\n')
+ desc_string = desc_lines[0]
+
+ for line in desc_lines[1:]:
+ desc_string += '\n' + description_indent * ' ' + line
+
+ if len(desc_lines) > 1:
+ desc_string += '\n'
+
+ c4.append(desc_string)
+
+ # Calculate column widths
+ c1w, c2w, c3w = [max(len(v) for v in column) + SETTINGS_INDENT for column in [c1, c2, c3]]
+
+ # Align columns by adding whitespace between fields of each line
+ lines = [f'{f1:{c1w}} {f2:{c2w}} {f3:{c3w}} {f4}' for f1, f2, f3, f4 in zip(c1, c2, c3, c4)]
+
+ # Add a terminal-wide line between the column names and the data
+ lines.insert(1, get_terminal_width() * '─')
+
+ # Print the settings
+ print('\n' + '\n'.join(lines) + '\n')
diff --git a/src/common/input.py b/src/common/input.py
index 39efda0..db66913 100644
--- a/src/common/input.py
+++ b/src/common/input.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,195 +16,181 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
import getpass
import typing
-from typing import Any, Callable
+from typing import Any, Callable, Optional
from src.common.encoding import b58decode
from src.common.exceptions import CriticalError
-from src.common.misc import get_terminal_width
-from src.common.output import box_print, c_print, clear_screen, message_printer, print_on_previous_line
+from src.common.misc import get_terminal_width, terminal_width_check
+from src.common.output import clear_screen, m_print, print_on_previous_line, print_spacing
from src.common.statics import *
if typing.TYPE_CHECKING:
from src.common.db_settings import Settings
-def ask_confirmation_code() -> str:
+def ask_confirmation_code(source: str # The system the confirmation code is displayed by
+ ) -> str: # The confirmation code entered by the user
"""\
- Ask user to input confirmation code from RxM
- to verify that local key has been installed.
-
- Input box accommodates room for the 'resend' command.
+ Ask the user to input confirmation code from Source Computer to
+ verify local key has been installed.
"""
- title = "Enter confirmation code (from RxM): "
- space = len(' resend ')
+ title = f"Enter confirmation code (from {source}): "
+ input_space = len(' ff ')
- upper_line = ('┌' + (len(title) + space) * '─' + '┐')
- title_line = ('│' + title + space * ' ' + '│')
- lower_line = ('└' + (len(title) + space) * '─' + '┘')
+ upper_line = ('┌' + (len(title) + input_space) * '─' + '┐')
+ title_line = ('│' + title + input_space * ' ' + '│')
+ lower_line = ('└' + (len(title) + input_space) * '─' + '┘')
terminal_w = get_terminal_width()
upper_line = upper_line.center(terminal_w)
title_line = title_line.center(terminal_w)
lower_line = lower_line.center(terminal_w)
+ terminal_width_check(len(upper_line))
+
print(upper_line)
print(title_line)
print(lower_line)
print(3 * CURSOR_UP_ONE_LINE)
indent = title_line.find('│')
- return input(indent * ' ' + '│ {}'.format(title))
+ return input(indent * ' ' + f'│ {title}')
-def box_input(message: str,
- default: str = '',
- head: int = 0,
- tail: int = 1,
- expected_len: int = 0,
- validator: Callable = None,
- validator_args: Any = None,
- key_input: bool = False) -> str:
- """Display boxed input prompt with title.
+def box_input(message: str, # Input prompt message
+ default: str = '', # Default return value
+ head: int = 0, # Number of new lines to print before the input
+ tail: int = 1, # Number of new lines to print after input
+ expected_len: int = 0, # Expected length of the input
+ key_type: str = '', # When specified, sets input width
+ guide: bool = False, # When True, prints the guide for key
+ validator: Optional[Callable] = None, # Input validator function
+ validator_args: Optional[Any] = None # Arguments required by the validator
+ ) -> str: # Input from user
+ """Display boxed input prompt with the title."""
+ print_spacing(head)
- :param message: Input prompt message
- :param default: Default return value
- :param head: Number of new lines to print before input
- :param tail: Number of new lines to print after input
- :param expected_len Expected length of input
- :param validator: Input validator function
- :param validator_args: Arguments required by the validator
- :param key_input: When True, prints key input position guide
- :return: Input from user
- """
- for _ in range(head):
+ terminal_width = get_terminal_width()
+
+ if key_type:
+ key_guide = {B58_LOCAL_KEY: B58_LOCAL_KEY_GUIDE,
+ B58_PUBLIC_KEY: B58_PUBLIC_KEY_GUIDE}.get(key_type, '')
+ if guide:
+ inner_spc = len(key_guide) + 2
+ else:
+ inner_spc = (86 if key_type == B58_PUBLIC_KEY else 53)
+ else:
+ key_guide = ''
+ inner_spc = terminal_width - 2 if expected_len == 0 else expected_len + 2
+
+ upper_line = '┌' + inner_spc * '─' + '┐'
+ guide_line = '│ ' + key_guide + ' │'
+ input_line = '│' + inner_spc * ' ' + '│'
+ lower_line = '└' + inner_spc * '─' + '┘'
+ box_indent = (terminal_width - len(upper_line)) // 2 * ' '
+
+ terminal_width_check(len(upper_line))
+
+ print(box_indent + upper_line)
+ if guide:
+ print(box_indent + guide_line)
+ print(box_indent + input_line)
+ print(box_indent + lower_line)
+ print((5 if guide else 4) * CURSOR_UP_ONE_LINE)
+ print(box_indent + '┌─┤' + message + '├')
+ if guide:
print('')
- terminal_w = get_terminal_width()
- input_len = terminal_w - 2 if expected_len == 0 else expected_len + 2
-
- if key_input:
- input_len += 2
-
- input_top_line = '┌' + input_len * '─' + '┐'
- key_pos_guide = '│ ' + ' '.join('ABCDEFGHIJKLMNOPQ') + ' │'
- input_line = '│' + input_len * ' ' + '│'
- input_bot_line = '└' + input_len * '─' + '┘'
-
- input_line_indent = (terminal_w - len(input_line)) // 2
- input_box_indent = input_line_indent * ' '
-
- print(input_box_indent + input_top_line)
- if key_input:
- print(input_box_indent + key_pos_guide)
- print(input_box_indent + input_line)
- print(input_box_indent + input_bot_line)
- print((5 if key_input else 4) * CURSOR_UP_ONE_LINE)
- print(input_box_indent + '┌─┤' + message + '├')
- if key_input:
- print('')
-
- user_input = input(input_box_indent + '│ ')
+ user_input = input(box_indent + '│ ')
if user_input == '':
print(2 * CURSOR_UP_ONE_LINE)
- print(input_box_indent + '│ {}'.format(default))
+ print(box_indent + '│ ' + default)
user_input = default
if validator is not None:
error_msg = validator(user_input, validator_args)
if error_msg:
- c_print("Error: {}".format(error_msg), head=1)
- print_on_previous_line(reps=4, delay=1.5)
- return box_input(message, default, head, tail, expected_len, validator, validator_args)
+ m_print(error_msg, head=1)
+ print_on_previous_line(reps=4, delay=1)
+ return box_input(message, default, head, tail, expected_len, key_type, guide, validator, validator_args)
- for _ in range(tail):
- print('')
+ print_spacing(tail)
return user_input
-def get_b58_key(key_type: str, settings: 'Settings') -> bytes:
- """Ask user to input Base58 encoded public key from RxM.
-
- For file keys, use testnet address format instead to
- prevent file injected via import from accidentally
- being decrypted with public key from adversary.
- """
- if key_type == B58_PUB_KEY:
+def get_b58_key(key_type: str, # The type of Base58 key to be entered
+ settings: 'Settings', # Settings object
+ short_address: str = '' # The contact's short Onion address
+ ) -> bytes: # The B58 decoded key
+ """Ask the user to input a Base58 encoded key."""
+ if key_type == B58_PUBLIC_KEY:
clear_screen()
- c_print("Import public key from RxM", head=1, tail=1)
- c_print("WARNING")
- message_printer("Outside specific requests TxM (this computer) "
- "makes, you must never copy any data from "
- "NH/RxM to TxM. Doing so could infect TxM, that "
- "could then later covertly transmit private "
- "keys/messages to attacker.", head=1, tail=1)
- message_printer("You can resend your public key by typing 'resend'", tail=1)
- box_msg = "Enter contact's public key from RxM"
+ m_print(f"{ECDHE} key exchange", head=1, tail=1, bold=True)
+ m_print("If needed, resend your public key to the contact by pressing ", tail=1)
+
+ box_msg = f"Enter public key of {short_address} (from Relay)"
elif key_type == B58_LOCAL_KEY:
- box_msg = "Enter local key decryption key from TxM"
- elif key_type == B58_FILE_KEY:
- box_msg = "Enter file decryption key"
+ box_msg = "Enter local key decryption key (from Transmitter)"
else:
raise CriticalError("Invalid key type")
while True:
- if settings.local_testing_mode or key_type == B58_FILE_KEY:
- pub_key = box_input(box_msg, expected_len=51)
- small = True
- else:
- pub_key = box_input(box_msg, expected_len=65, key_input=True)
- small = False
- pub_key = ''.join(pub_key.split())
+ rx_pk = box_input(box_msg, key_type=key_type, guide=not settings.local_testing_mode)
+ rx_pk = ''.join(rx_pk.split())
- if key_type == B58_PUB_KEY and pub_key == RESEND:
- return pub_key.encode()
+ if key_type == B58_PUBLIC_KEY and rx_pk == '':
+ return rx_pk.encode()
try:
- return b58decode(pub_key, file_key=(key_type==B58_FILE_KEY))
+ return b58decode(rx_pk, public_key=(key_type == B58_PUBLIC_KEY))
except ValueError:
- c_print("Checksum error - Check that entered key is correct.", head=1)
- print_on_previous_line(reps=5 if small else 6, delay=1.5)
+ m_print("Checksum error - Check that the entered key is correct.")
+ print_on_previous_line(reps=(4 if settings.local_testing_mode else 5), delay=1)
-def nh_bypass_msg(key: str, settings: 'Settings') -> None:
- """Print messages about bypassing NH.
+def nc_bypass_msg(key: str, settings: 'Settings') -> None:
+ """Print messages about bypassing Networked Computer.
- During ciphertext delivery of local key exchange, NH bypass messages
- tell user when to bypass and remove bypass of NH. This makes initial
- key bootstrap more secure in case key decryption key input is not safe.
+ During ciphertext delivery of local key exchange, these bypass
+ messages tell the user when to bypass and remove bypass of Networked
+ Computer. Bypass of Networked Computer makes initial bootstrap more
+ secure by denying remote attacker the access to the encrypted local
+ key. Without the ciphertext, e.g. a visually collected local key
+ decryption key is useless.
"""
- m = {NH_BYPASS_START: "Bypass NH if needed. Press to send local key.",
- NH_BYPASS_STOP: "Remove bypass of NH. Press to continue."}
+ m = {NC_BYPASS_START: "Bypass Networked Computer if needed. Press to send local key.",
+ NC_BYPASS_STOP: "Remove bypass of Networked Computer. Press to continue."}
- if settings.nh_bypass_messages:
- box_print(m[key], manual_proceed=True, head=(1 if key == NH_BYPASS_STOP else 0))
+ if settings.nc_bypass_messages:
+ m_print(m[key], manual_proceed=True, box=True, head=(1 if key == NC_BYPASS_STOP else 0))
-def pwd_prompt(message: str, second: bool = False) -> str:
- """Prompt user to enter a password.
+def pwd_prompt(message: str, # Prompt message
+ repeat: bool = False # When True, prints corner chars for the second box
+ ) -> str: # Password from user
+ """Prompt the user to enter a password.
- :param message: Prompt message
- :param second: When True, prints corner chars for second box
- :return: Password from user
+ The getpass library ensures the password is not echoed on screen
+ when it is typed.
"""
- l, r = {False: ('┌', '┐'),
- True: ('├', '┤')}[second]
+ l, r = ('├', '┤') if repeat else ('┌', '┐')
- upper_line = ( l + (len(message) + 3) * '─' + r )
- title_line = ('│' + message + 3 * ' ' + '│')
- lower_line = ('└' + (len(message) + 3) * '─' + '┘')
+ terminal_w = get_terminal_width()
+ input_space = len(' c ') # `c` is where the caret sits
- terminal_w = get_terminal_width()
- upper_line = upper_line.center(terminal_w)
- title_line = title_line.center(terminal_w)
- lower_line = lower_line.center(terminal_w)
+ upper_line = ( l + (len(message) + input_space) * '─' + r ).center(terminal_w)
+ title_line = ('│' + message + input_space * ' ' + '│').center(terminal_w)
+ lower_line = ('└' + (len(message) + input_space) * '─' + '┘').center(terminal_w)
+
+ terminal_width_check(len(upper_line))
print(upper_line)
print(title_line)
@@ -211,27 +198,25 @@ def pwd_prompt(message: str, second: bool = False) -> str:
print(3 * CURSOR_UP_ONE_LINE)
indent = title_line.find('│')
- user_input = getpass.getpass(indent * ' ' + '│ {}'.format(message))
+ user_input = getpass.getpass(indent * ' ' + f'│ {message}')
return user_input
-def yes(prompt: str, head: int = 0, tail: int = 0) -> bool:
- """Prompt user a question that is answered with yes / no.
+def yes(prompt: str, # Question to be asked
+ abort: Optional[bool] = None, # Determines the return value of ^C and ^D
+ head: int = 0, # Number of new lines to print before prompt
+ tail: int = 0 # Number of new lines to print after prompt
+ ) -> bool: # True/False depending on input
+ """Prompt the user a question that is answered with yes/no."""
+ print_spacing(head)
- :param prompt: Question to be asked
- :param head: Number of new lines to print before prompt
- :param tail: Number of new lines to print after prompt
- :return: True if user types 'y' or 'yes'
- False if user types 'n' or 'no'
- """
- for _ in range(head):
- print('')
+ prompt = f"{prompt} (y/n): "
+ input_space = len(' yes ')
- prompt = "{} (y/n): ".format(prompt)
- upper_line = ('┌' + (len(prompt) + 5) * '─' + '┐')
- title_line = ('│' + prompt + 5 * ' ' + '│')
- lower_line = ('└' + (len(prompt) + 5) * '─' + '┘')
+ upper_line = ('┌' + (len(prompt) + input_space) * '─' + '┐')
+ title_line = ('│' + prompt + input_space * ' ' + '│')
+ lower_line = ('└' + (len(prompt) + input_space) * '─' + '┘')
terminal_w = get_terminal_width()
upper_line = upper_line.center(terminal_w)
@@ -240,25 +225,33 @@ def yes(prompt: str, head: int = 0, tail: int = 0) -> bool:
indent = title_line.find('│')
+ terminal_width_check(len(upper_line))
+
print(upper_line)
while True:
print(title_line)
print(lower_line)
print(3 * CURSOR_UP_ONE_LINE)
- user_input = input(indent * ' ' + '│ {}'.format(prompt))
+
+ try:
+ user_input = input(indent * ' ' + f'│ {prompt}')
+ except (EOFError, KeyboardInterrupt):
+ if abort is None:
+ raise
+ print('')
+ user_input = 'y' if abort else 'n'
+
print_on_previous_line()
if user_input == '':
continue
if user_input.lower() in ['y', 'yes']:
- print(indent * ' ' + '│ {}Yes │\n'.format(prompt))
- for _ in range(tail):
- print('')
+ print(indent * ' ' + f'│ {prompt}Yes │\n')
+ print_spacing(tail)
return True
elif user_input.lower() in ['n', 'no']:
- print(indent * ' ' + '│ {}No │\n'.format(prompt))
- for _ in range(tail):
- print('')
+ print(indent * ' ' + f'│ {prompt}No │\n')
+ print_spacing(tail)
return False
diff --git a/src/common/misc.py b/src/common/misc.py
index d01c4bf..d6f8a66 100755
--- a/src/common/misc.py
+++ b/src/common/misc.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,18 +16,25 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
import argparse
+import base64
+import binascii
+import hashlib
import math
import os
-import re
import shutil
+import subprocess
+import sys
+import time
import typing
+import zlib
-from contextlib import contextmanager
-from typing import Any, Callable, Generator, List, Tuple, Union
+from contextlib import contextmanager
+from typing import Any, Callable, Dict, Generator, List, Tuple, Union
+from multiprocessing import Process, Queue
from src.common.reed_solomon import RSCodec
from src.common.statics import *
@@ -35,139 +43,214 @@ if typing.TYPE_CHECKING:
from src.common.db_contacts import ContactList
from src.common.db_groups import GroupList
from src.common.db_settings import Settings
- from src.nh.settings import Settings as NHSettings
+ from src.common.gateway import Gateway
-def calculate_race_condition_delay(settings: Union['Settings', 'NHSettings'], txm: bool = False) -> float:
- """Calculate NH race condition delay.
+def calculate_race_condition_delay(serial_error_correction: int,
+ serial_baudrate: int
+ ) -> float:
+ """\
+ Calculate the delay required to prevent Relay Program race condition.
- This value is the max time it takes for NH to deliver
- command received from TxM all the way to RxM.
+ When Transmitter Program outputs a command to exit or wipe data,
+ Relay program will also receive a copy of the command. If Relay
+ Program acts on the command too early, Receiver Program will not
+ receive the exit/wipe command at all.
- :param settings: Settings object
- :param txm: When True, allocate time for command delivery from TxM to NH
- :return: Time to wait to prevent race condition
+ This program calculates the delay Transmitter Program should wait
+ before outputting command for Relay Program, to ensure Receiver
+ Program has received the encrypted command.
"""
- rs = RSCodec(2 * settings.session_serial_error_correction)
- max_account_length = 254
- max_message_length = PACKET_LENGTH + 2 * max_account_length
- command_length = 365*2 if txm else 365
- max_bytes = (len(rs.encode(os.urandom(max_message_length)))
- + len(rs.encode(os.urandom(command_length))))
+ rs = RSCodec(2 * serial_error_correction)
+ message_length = PACKET_LENGTH + ONION_ADDRESS_LENGTH
+ enc_msg_length = len(rs.encode(os.urandom(message_length)))
+ enc_cmd_length = len(rs.encode(os.urandom(COMMAND_LENGTH)))
+ max_bytes = enc_msg_length + (2 * enc_cmd_length)
- return (max_bytes * BAUDS_PER_BYTE) / settings.serial_baudrate
+ return (max_bytes * BAUDS_PER_BYTE) / serial_baudrate
-def calculate_serial_delays(session_serial_baudrate: int) -> Tuple[float, float]:
- """Calculate transmission delay and receive timeout."""
- bytes_per_sec = session_serial_baudrate / BAUDS_PER_BYTE
- byte_travel_t = 1 / bytes_per_sec
+def decompress(data: bytes, # Data to be decompressed
+ max_size: int # The maximum size of decompressed data.
+ ) -> bytes: # Decompressed data
+ """Decompress received data.
- rxm_receive_timeout = max(2 * byte_travel_t, 0.02)
- txm_inter_packet_delay = 2 * rxm_receive_timeout
+ The decompressed data has a maximum size, designed to prevent zip
+ bombs from filling the drive of an unsuspecting user.
+ """
+ from src.common.exceptions import FunctionReturn # Avoid circular import
- return rxm_receive_timeout, txm_inter_packet_delay
+ dec = zlib.decompressobj()
+ data = dec.decompress(data, max_size)
+ if dec.unconsumed_tail:
+ raise FunctionReturn("Error: Decompression aborted due to possible zip bomb.")
+ del dec
+
+ return data
def ensure_dir(directory: str) -> None:
- """Ensure directory exists."""
+ """Ensure directory exists.
+
+ This function is run before checking a database exists in the
+ specified directory, or before storing data into a directory.
+ It prevents errors in case user has for some reason removed
+ the directory.
+ """
name = os.path.dirname(directory)
if not os.path.exists(name):
- os.makedirs(name)
+ with ignored(FileExistsError):
+ os.makedirs(name)
def get_tab_complete_list(contact_list: 'ContactList',
group_list: 'GroupList',
- settings: 'Settings') -> List[str]:
+ settings: 'Settings',
+ gateway: 'Gateway'
+ ) -> List[str]:
"""Return a list of tab-complete words."""
- tc_list = ['about', 'add ', 'all', 'clear', 'cmd', 'create ', 'exit', 'export ',
- 'false', 'file', 'fingerprints', 'group ', 'help', 'history ', 'join ', 'localkey',
- 'logging ', 'msg ', 'names', 'nick ', 'notify ', 'passwd ', 'psk', 'reset',
- 'rm', 'rmlogs ', 'set ', 'settings', 'store ', 'true', 'unread', 'whisper ']
+ commands = ['about', 'add ', 'clear', 'cmd', 'connect', 'exit', 'export ', 'file', 'group ', 'help', 'history ',
+ 'localkey', 'logging ', 'msg ', 'names', 'nick ', 'notify ', 'passwd ', 'psk', 'reset', 'rmlogs ',
+ 'set ', 'settings', 'store ', 'unread', 'verify', 'whisper ', 'whois ']
- tc_list += [(c + ' ') for c in contact_list.get_list_of_accounts()]
+ tc_list = ['all', 'create ', 'false', 'False', 'join ', 'true', 'True']
+ tc_list += commands
+ tc_list += [(a + ' ') for a in contact_list.get_list_of_addresses()]
tc_list += [(n + ' ') for n in contact_list.get_list_of_nicks()]
- tc_list += [(u + ' ') for u in contact_list.get_list_of_users_accounts()]
tc_list += [(g + ' ') for g in group_list.get_list_of_group_names()]
+ tc_list += [(i + ' ') for i in group_list.get_list_of_hr_group_ids()]
tc_list += [(s + ' ') for s in settings.key_list]
+ tc_list += [(s + ' ') for s in gateway.settings.key_list]
return tc_list
def get_tab_completer(contact_list: 'ContactList',
group_list: 'GroupList',
- settings: 'Settings') -> Callable:
- """Return tab completer object."""
+ settings: 'Settings',
+ gateway: 'Gateway'
+ ) -> Callable:
+ """Return the tab completer object."""
- def tab_complete(text, state) -> List[str]:
- """Return tab_complete options."""
- tab_complete_list = get_tab_complete_list(contact_list, group_list, settings)
- options = [t for t in tab_complete_list if t.startswith(text)]
+ def tab_complete(text: str, state: Any) -> List[str]:
+ """Return tab-complete options."""
+ tab_complete_list = get_tab_complete_list(contact_list, group_list, settings, gateway)
+ options = [t for t in tab_complete_list if t.startswith(text)] # type: List[str]
with ignored(IndexError):
- return options[state]
+ tc = options[state] # type: List[str]
+ return tc
return tab_complete
def get_terminal_height() -> int:
- """Return height of terminal."""
- return int(shutil.get_terminal_size()[1])
+ """Return the height of the terminal."""
+ return shutil.get_terminal_size()[1]
def get_terminal_width() -> int:
- """Return width of terminal."""
+ """Return the width of the terminal."""
return shutil.get_terminal_size()[0]
@contextmanager
def ignored(*exceptions: Any) -> Generator:
- """Ignore exception."""
+ """Ignore an exception."""
try:
yield
except exceptions:
pass
-def process_arguments() -> Tuple[str, bool, bool]:
- """Load TxM/RxM startup settings from command line arguments."""
- parser = argparse.ArgumentParser('python3.6 tfc.py',
- usage='%(prog)s [OPTION]',
- description='')
+def monitor_processes(process_list: List[Process],
+ software_operation: str,
+ queues: Dict[bytes, Queue],
+ error_exit_code: int = 1
+ ) -> None:
+ """Monitor the status of `process_list` and EXIT_QUEUE.
- parser.add_argument('-rx',
+ This function monitors a list of processes. If one of them dies, it
+ terminates the rest and closes TFC with exit code 1.
+
+ If EXIT or WIPE signal is received to EXIT_QUEUE, the function
+ terminates running processes and closes the program with exit code 0
+ or overwrites existing user data and powers the system off.
+ """
+ while True:
+ with ignored(EOFError, KeyboardInterrupt):
+ time.sleep(0.1)
+ if not all([p.is_alive() for p in process_list]):
+ for p in process_list:
+ p.terminate()
+ sys.exit(error_exit_code)
+
+ if queues[EXIT_QUEUE].qsize() > 0:
+ command = queues[EXIT_QUEUE].get()
+
+ for p in process_list:
+ p.terminate()
+
+ if command == EXIT:
+ sys.exit(0)
+
+ if command == WIPE:
+ if TAILS not in subprocess.check_output('lsb_release -a', shell=True):
+ if software_operation == RX:
+ subprocess.Popen("find {} -type f -exec shred -n 3 -z -u {{}} \;"
+ .format(DIR_RECV_FILES), shell=True).wait()
+
+ subprocess.Popen("find {} -name '{}*' -type f -exec shred -n 3 -z -u {{}} \;"
+ .format(DIR_USER_DATA, software_operation), shell=True).wait()
+
+ for d in [DIR_USER_DATA, DIR_RECV_FILES]:
+ with ignored(FileNotFoundError):
+ shutil.rmtree(d)
+ os.system(POWEROFF)
+
+
+def process_arguments() -> Tuple[str, bool, bool]:
+ """Load program-specific settings from command line arguments.
+
+ The arguments are determined by the desktop entries and in the
+ Terminator configuration file for local testing. The descriptions
+ here are provided for the sake of completeness.
+ """
+ parser = argparse.ArgumentParser(f'python3.6 {sys.argv[0]}',
+ usage='%(prog)s [OPTION]',
+ epilog='Full documentation at: ')
+
+ parser.add_argument('-r',
action='store_true',
default=False,
dest='operation',
- help="Run RxM side program")
+ help="run Receiver instead of Transmitter Program")
parser.add_argument('-l',
action='store_true',
default=False,
dest='local_test',
- help="Enable local testing mode")
+ help="enable local testing mode")
parser.add_argument('-d',
action='store_true',
default=False,
- dest='dd_sockets',
- help="Data diode simulator socket configuration for local testing")
+ dest='data_diode_sockets',
+ help="use data diode simulator sockets during local testing mode")
- args = parser.parse_args()
- operation = RX if args.operation else TX
- local_test = args.local_test
- dd_sockets = args.dd_sockets
+ args = parser.parse_args()
+ operation = RX if args.operation else TX
- return operation, local_test, dd_sockets
+ return operation, args.local_test, args.data_diode_sockets
def readable_size(size: int) -> str:
- """Convert file size from bytes to human readable form."""
+ """Convert file size from bytes to a human-readable form."""
f_size = float(size)
for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
if abs(f_size) < 1024.0:
- return '{:3.1f}{}B'.format(f_size, unit)
+ return f'{f_size:3.1f}{unit}B'
f_size /= 1024.0
- return '{:3.1f}YB'.format(f_size)
+ return f'{f_size:3.1f}YB'
def round_up(value: Union[int, float]) -> int:
@@ -175,110 +258,178 @@ def round_up(value: Union[int, float]) -> int:
return int(math.ceil(value / 10.0)) * 10
-def split_byte_string(string: bytes, item_len: int) -> List[bytes]:
- """Split byte string into list of specific length substrings.
+def split_byte_string(bytestring: bytes, # Bytestring to split
+ item_len: int # Length of each substring
+ ) -> List[bytes]: # List of substrings
+ """Split a bytestring into a list of specific length substrings."""
+ return [bytestring[i:i + item_len] for i in range(0, len(bytestring), item_len)]
- :param string: String to split
- :param item_len: Length of list items
- :return: String split to list
- """
+
+def split_string(string: str, # String to split
+ item_len: int # Length of each substring
+ ) -> List[str]: # List of substrings
+ """Split a string into a list of specific length substrings."""
return [string[i:i + item_len] for i in range(0, len(string), item_len)]
-def split_string(string: str, item_len: int) -> List[str]:
- """Split string into list of specific length substrings.
+def separate_header(bytestring: bytes, # Bytestring to slice
+ header_length: int # Number of header bytes to separate
+ ) -> Tuple[bytes, bytes]: # Header and payload
+ """Separate `header_length` first bytes from a bytestring."""
+ return bytestring[:header_length], bytestring[header_length:]
- :param string: String to split
- :param item_len: Length of list items
- :return: String split to list
+
+def separate_headers(bytestring: bytes, # Bytestring to slice
+ header_length_list: List[int], # List of header lengths
+ ) -> List[bytes]: # Header and payload
+ """Separate a list of headers from bytestring.
+
+ Length of each header is determined in the `header_length_list`.
"""
- return [string[i:i + item_len] for i in range(0, len(string), item_len)]
+ fields = []
+ for header_length in header_length_list:
+ field, bytestring = separate_header(bytestring, header_length)
+ fields.append(field)
+ fields.append(bytestring)
+
+ return fields
-def validate_account(account: str, *_: Any) -> str:
- """Validate account name.
+def separate_trailer(bytestring: bytes, # Bytestring to slice
+ trailer_length: int # Number of trailer bytes to separate
+ ) -> Tuple[bytes, bytes]: # Payload and trailer
+ """Separate `trailer_length` last bytes from a bytestring.
- :param account: Account name to validate
- :param _: Unused arguments
- :return: Error message if validation failed, else empty string
+ This saves space and makes trailer separation more readable.
"""
+ return bytestring[:-trailer_length], bytestring[-trailer_length:]
+
+
+def terminal_width_check(minimum_width: int) -> None:
+ """Wait until user re-sizes their terminal to specified width. """
+ if get_terminal_width() < minimum_width:
+ print("Please make the terminal wider.")
+ while get_terminal_width() < minimum_width:
+ time.sleep(0.1)
+ time.sleep(0.1)
+ print(2*CURSOR_UP_ONE_LINE)
+
+
+def validate_onion_addr(onion_address_contact: str, # String to slice
+ onion_address_user: str = '' # Number of header chars to separate
+ ) -> str: # Payload and trailer
+ """Validate a v3 Onion Service address."""
error_msg = ''
- # Length limited by database's unicode padding
- if len(account) >= PADDING_LEN:
- error_msg = "Account must be shorter than {} chars.".format(PADDING_LEN)
+ try:
+ decoded = base64.b32decode(onion_address_contact.upper())
- if not re.match(ACCOUNT_FORMAT, account):
- error_msg = "Invalid account format."
+ public_key, checksum, version \
+ = separate_headers(decoded, [ONION_SERVICE_PUBLIC_KEY_LENGTH, ONION_ADDRESS_CHECKSUM_LENGTH])
- # Avoid delimiter char collision in output packets
- if not account.isprintable():
- error_msg = "Account must be printable."
+ if checksum != hashlib.sha3_256(ONION_ADDRESS_CHECKSUM_ID
+ + public_key
+ + version
+ ).digest()[:ONION_ADDRESS_CHECKSUM_LENGTH]:
+ error_msg = "Checksum error - Check that the entered account is correct."
+
+ except (binascii.Error, ValueError):
+ return "Error: Invalid account format."
+
+ if onion_address_contact in (LOCAL_ID, DUMMY_CONTACT, DUMMY_MEMBER) or public_key == LOCAL_PUBKEY:
+ error_msg = "Error: Can not add reserved account."
+
+ if onion_address_user and onion_address_contact == onion_address_user:
+ error_msg = "Error: Can not add own account."
return error_msg
-def validate_key_exchange(key_ex: str, *_: Any) -> str:
- """Validate specified key exchange.
-
- :param key_ex: Key exchange selection to validate
- :param _: Unused arguments
- :return: Error message if validation failed, else empty string
- """
+def validate_group_name(group_name: str, # Name of the group
+ contact_list: 'ContactList', # ContactList object
+ group_list: 'GroupList' # GroupList object
+ ) -> str: # Error message if validation failed, else empty string
+ """Validate the specified group name."""
error_msg = ''
- if key_ex.lower() not in ['x', 'x25519', 'p', 'psk']:
+ # Avoids collision with delimiters
+ if not group_name.isprintable():
+ error_msg = "Error: Group name must be printable."
+
+ # Length is limited by database's Unicode padding
+ if len(group_name) >= PADDING_LENGTH:
+ error_msg = f"Error: Group name must be less than {PADDING_LENGTH} chars long."
+
+ if group_name == DUMMY_GROUP:
+ error_msg = "Error: Group name cannot use the name reserved for database padding."
+
+ if not validate_onion_addr(group_name):
+ error_msg = "Error: Group name cannot have the format of an account."
+
+ if group_name in contact_list.get_list_of_nicks():
+ error_msg = "Error: Group name cannot be a nick of contact."
+
+ if group_name in group_list.get_list_of_group_names():
+ error_msg = f"Error: Group with name '{group_name}' already exists."
+
+ return error_msg
+
+
+def validate_key_exchange(key_ex: str, # Key exchange selection to validate
+ *_: Any # Unused arguments
+ ) -> str: # Error message if validation failed, else empty string
+ """Validate the specified key exchange."""
+ error_msg = ''
+
+ if key_ex.upper() not in [ECDHE, ECDHE[:1], PSK, PSK[:1]]:
error_msg = "Invalid key exchange selection."
return error_msg
-def validate_nick(nick: str, args: Tuple['ContactList', 'GroupList', str]) -> str:
- """Validate nickname for account.
-
- :param nick: Nick to validate
- :param args: Contact list and group list databases
- :return: Error message if validation failed, else empty string
- """
- contact_list, group_list, account = args
+def validate_nick(nick: str, # Nick to validate
+ args: Tuple['ContactList', 'GroupList', bytes] # Contact list and group list databases
+ ) -> str: # Error message if validation failed, else ''
+ """Validate the specified nickname."""
+ contact_list, group_list, onion_pub_key = args
error_msg = ''
- # Length limited by database's unicode padding
- if len(nick) >= PADDING_LEN:
- error_msg = "Nick must be shorter than {} chars.".format(PADDING_LEN)
+ # Length is limited by database's Unicode padding
+ if len(nick) >= PADDING_LENGTH:
+ error_msg = f"Error: Nick must be shorter than {PADDING_LENGTH} chars."
# Avoid delimiter char collision in output packets
if not nick.isprintable():
- error_msg = "Nick must be printable."
+ error_msg = "Error: Nick must be printable."
if nick == '':
- error_msg = "Nick can't be empty."
+ error_msg = "Error: Nick cannot be empty."
- # RxM displays sent messages under 'Me'
- if nick.lower() == 'me':
- error_msg = "'Me' is a reserved nick."
+ # Receiver displays sent messages under 'Me'
+ if nick.lower() == ME.lower():
+ error_msg = f"Error: '{ME}' is a reserved nick."
- # RxM displays system notifications under '-!-'
- if nick.lower() == '-!-':
- error_msg = "'-!-' is a reserved nick."
+ # Receiver displays system notifications under reserved notification symbol
+ if nick == EVENT:
+ error_msg = f"Error: '{EVENT}' is a reserved nick."
# Ensure that nicks, accounts and group names are UIDs in recipient selection
- if nick == 'local':
- error_msg = "Nick can't refer to local keyfile."
+ if validate_onion_addr(nick) == '': # If no error message was received, nick had format of account
+ error_msg = "Error: Nick cannot have the format of an account."
- if re.match(ACCOUNT_FORMAT, nick):
- error_msg = "Nick can't have format of an account."
+ if nick in (LOCAL_ID, DUMMY_CONTACT, DUMMY_MEMBER):
+ error_msg = "Error: Nick cannot have the format of an account."
if nick in contact_list.get_list_of_nicks():
- error_msg = "Nick already in use."
+ error_msg = "Error: Nick already in use."
- # Allow if nick matches the account the key is being re-exchanged for
- if contact_list.has_contact(account):
- if nick == contact_list.get_contact(account).nick:
+ # Allow existing nick if it matches the account being replaced.
+ if contact_list.has_pub_key(onion_pub_key):
+ if nick == contact_list.get_contact_by_pub_key(onion_pub_key).nick:
error_msg = ''
if nick in group_list.get_list_of_group_names():
- error_msg = "Nick can't be a group name."
+ error_msg = "Error: Nick cannot be a group name."
return error_msg
diff --git a/src/common/output.py b/src/common/output.py
index 51399e9..860f2c6 100644
--- a/src/common/output.py
+++ b/src/common/output.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,98 +16,40 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
-import binascii
import textwrap
import time
import typing
import sys
-from typing import List, Union
+from datetime import datetime
+from typing import List, Optional, Union
-from src.common.encoding import b58encode
+from src.common.encoding import b10encode, b58encode, pub_key_to_onion_address
from src.common.misc import get_terminal_width, split_string
from src.common.statics import *
if typing.TYPE_CHECKING:
from src.common.db_contacts import ContactList
from src.common.db_settings import Settings
-
-
-def box_print(msg_list: Union[str, list],
- manual_proceed: bool = False,
- head: int = 0,
- tail: int = 0) -> None:
- """Print message inside a box.
-
- :param msg_list: List of lines to print
- :param manual_proceed: Wait for user input before continuing
- :param head: Number of new lines to print before box
- :param tail: Number of new lines to print after box
- :return: None
- """
- for _ in range(head):
- print('')
-
- if isinstance(msg_list, str):
- msg_list = [msg_list]
-
- len_widest = max(len(m) for m in msg_list)
- msg_list = ['{:^{}}'.format(m, len_widest) for m in msg_list]
-
- top_line = '┌' + (len(msg_list[0]) + 2) * '─' + '┐'
- bot_line = '└' + (len(msg_list[0]) + 2) * '─' + '┘'
- msg_list = ['│ {} │'.format(m) for m in msg_list]
-
- terminal_w = get_terminal_width()
- top_line = top_line.center(terminal_w)
- msg_list = [m.center(terminal_w) for m in msg_list]
- bot_line = bot_line.center(terminal_w)
-
- print(top_line)
- for m in msg_list:
- print(m)
- print(bot_line)
-
- for _ in range(tail):
- print('')
-
- if manual_proceed:
- input('')
- print_on_previous_line()
-
-
-def c_print(string: str, head: int = 0, tail: int = 0) -> None:
- """Print string to center of screen.
-
- :param string: String to print
- :param head: Number of new lines to print before string
- :param tail: Number of new lines to print after string
- :return: None
- """
- for _ in range(head):
- print('')
-
- print(string.center(get_terminal_width()))
-
- for _ in range(tail):
- print('')
+ from src.common.gateway import GatewaySettings as GWSettings
def clear_screen(delay: float = 0.0) -> None:
- """Clear terminal window."""
+ """Clear the terminal window."""
time.sleep(delay)
sys.stdout.write(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER)
sys.stdout.flush()
-def group_management_print(key: str,
- members: List[str],
- contact_list: 'ContactList',
- group_name: str = '') -> None:
- """List purported member status during group management."""
+def group_management_print(key: str, # Group management message identifier
+ members: List[bytes], # List of members' Onion public keys
+ contact_list: 'ContactList', # ContactList object
+ group_name: str = '' # Name of the group
+ ) -> None:
+ """Print group management command results."""
m = {NEW_GROUP: "Created new group '{}' with following members:".format(group_name),
ADDED_MEMBERS: "Added following accounts to group '{}':" .format(group_name),
ALREADY_MEMBER: "Following accounts were already in group '{}':".format(group_name),
@@ -115,155 +58,194 @@ def group_management_print(key: str,
UNKNOWN_ACCOUNTS: "Following unknown accounts were ignored:"}[key]
if members:
- m_list = ([contact_list.get_contact(m).nick for m in members if contact_list.has_contact(m)]
- + [m for m in members if not contact_list.has_contact(m)])
+ m_list = ([contact_list.get_contact_by_pub_key(m).nick for m in members if contact_list.has_pub_key(m)]
+ + [pub_key_to_onion_address(m) for m in members if not contact_list.has_pub_key(m)])
just_len = max(len(m) for m in m_list)
- justified = [m] + [" * {}".format(m.ljust(just_len)) for m in m_list]
- box_print(justified, head=1, tail=1)
+ justified = [m] + [f" * {m.ljust(just_len)}" for m in m_list]
+ m_print(justified, box=True)
-def message_printer(message: str, head: int = 0, tail: int = 0) -> None:
- """Print long message in the middle of the screen.
+def m_print(msg_list: Union[str, list], # List of lines to print
+ manual_proceed: bool = False, # Wait for user input before continuing
+ bold: bool = False, # When True, prints the message in bold style
+ center: bool = True, # When False, does not center message
+ box: bool = False, # When True, prints a box around the message
+ head_clear: bool = False, # When True, clears screen before printing message
+ tail_clear: bool = False, # When True, clears screen after printing message (requires delay)
+ delay: float = 0, # Delay before continuing
+ max_width: int = 0, # Maximum width of message
+ head: int = 0, # Number of new lines to print before the message
+ tail: int = 0, # Number of new lines to print after the message
+ ) -> None:
+ """Print message to screen.
- :param message: Message to print
- :param head: Number of new lines to print before message
- :param tail: Number of new lines to print after message
- :return: None
+ The message automatically wraps if the terminal is too narrow to
+ display the message.
"""
- for _ in range(head):
- print('')
+ if isinstance(msg_list, str):
+ msg_list = [msg_list]
- line_list = (textwrap.fill(message, min(49, (get_terminal_width() - 6))).split('\n'))
- for l in line_list:
- c_print(l)
+ terminal_width = get_terminal_width()
+ len_widest_msg = max(len(m) for m in msg_list)
+ spc_around_msg = 4 if box else 2
+ max_msg_width = terminal_width - spc_around_msg
- for _ in range(tail):
- print('')
+ if max_width:
+ max_msg_width = min(max_width, max_msg_width)
+
+ # Split any message too wide on separate lines
+ if len_widest_msg > max_msg_width:
+ new_msg_list = []
+ for msg in msg_list:
+ if len(msg) > max_msg_width:
+ new_msg_list.extend(textwrap.fill(msg, max_msg_width).split('\n'))
+ else:
+ new_msg_list.append(msg)
+
+ msg_list = new_msg_list
+ len_widest_msg = max(len(m) for m in msg_list)
+
+ if box or center:
+ # Insert whitespace around every line to make them equally long
+ msg_list = [f'{m:^{len_widest_msg}}' for m in msg_list]
+
+ if box:
+ # Add box chars around the message
+ msg_list = [f'│ {m} │' for m in msg_list]
+ msg_list.insert(0, '┌' + (len_widest_msg + 2) * '─' + '┐')
+ msg_list.append( '└' + (len_widest_msg + 2) * '─' + '┘')
+
+ # Print the message
+ if head_clear:
+ clear_screen()
+ print_spacing(head)
+
+ for message in msg_list:
+ if center:
+ message = message.center(terminal_width)
+ if bold:
+ message = BOLD_ON + message + NORMAL_TEXT
+ print(message)
+
+ print_spacing(tail)
+ time.sleep(delay)
+ if tail_clear:
+ clear_screen()
+
+ # Check if message needs to be manually dismissed
+ if manual_proceed:
+ input('')
+ print_on_previous_line()
-def phase(string: str,
- done: bool = False,
- head: int = 0,
- offset: int = 2) -> None:
- """Print name of next phase.
+def phase(string: str, # Description of the phase
+ done: bool = False, # When True, uses string as the phase completion message
+ head: int = 0, # Number of inserted new lines before print
+ offset: int = 4, # Offset of phase string from center to left
+ delay: float = 0.5 # Duration of phase completion message
+ ) -> None:
+ """Print the name of the next phase.
- Message about completion will be printed on same line.
-
- :param string: String to be printed
- :param done: When True, allows custom string to notify completion
- :param head: Number of inserted new lines before print
- :param offset: Offset of message from center to left
- :return: None
+ The notification of completion of the phase is printed on the same
+ line as the phase message.
"""
- for _ in range(head):
- print('')
+ print_spacing(head)
if string == DONE or done:
print(string)
- time.sleep(0.5)
+ time.sleep(delay)
else:
- string = '{}... '.format(string)
- indent = ((get_terminal_width() - (len(string) + offset)) // 2) * ' '
+ string += '... '
+ indent = ((get_terminal_width() - (len(string) + offset)) // 2) * ' '
print(indent + string, end='', flush=True)
-def print_fingerprint(fp: bytes, msg: str = '') -> None:
- """Print formatted message and fingerprint inside box.
+def print_fingerprint(fp: bytes, # Contact's fingerprint
+ msg: str = '' # Title message
+ ) -> None:
+ """Print a formatted message and fingerprint inside the box.
- :param fp: Contact's fingerprint
- :param msg: Title message
- :return: None
+ Truncate fingerprint for clean layout with three rows that have
+ five groups of five numbers. The resulting fingerprint has
+ 249.15 bits of entropy which is more than the symmetric security
+ of X448.
"""
-
- def base10encode(fingerprint: bytes) -> str:
- """Encode fingerprint to decimals for distinct communication.
-
- Base64 has 75% efficiency but encoding is bad as user might
- confuse upper case I with lower case l, 0 with O etc.
-
- Base58 has 73% efficiency and removes the problem of Base64
- explained above, but works only when manually typing
- strings because user has to take time to explain which
- letters were capitalized etc.
-
- Base16 has 50% efficiency and removes the capitalisation problem
- with Base58 but the choice is bad as '3', 'b', 'c', 'd'
- and 'e' are hard to distinguish in English language
- (fingerprints are usually read aloud over off band call).
-
- Base10 has 41% efficiency but as languages have evolved in a
- way that makes clear distinction between the way different
- numbers are pronounced: reading them is faster and less
- error prone. Compliments to OWS/WA developers for
- discovering this.
-
- Truncate fingerprint for clean layout with three rows that each
- have five groups of five numbers. The resulting fingerprint has
- 249.15 bits of entropy.
- """
- hex_representation = binascii.hexlify(fingerprint)
- dec_representation = str(int(hex_representation, base=16))
- return dec_representation[:75]
-
p_lst = [msg, ''] if msg else []
- parts = split_string(base10encode(fp), item_len=25)
- p_lst += [' '.join(p[i:i + 5] for i in range(0, len(p), 5)) for p in parts]
+ b10fp = b10encode(fp)[:(3*5*5)]
+ parts = split_string(b10fp, item_len=(5*5))
+ p_lst += [' '.join(split_string(p, item_len=5)) for p in parts]
- box_print(p_lst)
+ m_print(p_lst, box=True)
-def print_key(message: str,
- key_bytes: bytes,
- settings: 'Settings',
- no_split: bool = False,
- file_key: bool = False) -> None:
- """Print symmetric key.
+def print_key(message: str, # Instructive message
+ key_bytes: bytes, # 32-byte key to be displayed
+ settings: Union['Settings', 'GWSettings'], # Settings object
+ public_key: bool = False # When True, uses Testnet address WIF format
+ ) -> None:
+ """Print a symmetric key in WIF format.
- If local testing is not enabled, this function will add spacing in the
- middle of the key to help user keep track of typing progress. The ideal
- substring length in Cowan's `focus of attention` is four digits:
+ If local testing is not enabled, this function adds spacing in the
+ middle of the key, as well as guide letters to help the user keep
+ track of typing progress:
- https://en.wikipedia.org/wiki/Working_memory#Working_memory_as_part_of_long-term_memory
+ Local key encryption keys:
- The 51 char KDK is however not divisible by 4, and remembering which
- symbols are letters and if they are capitalized is harder than remembering
- just digits. 51 is divisible by 3. The 17 segments are displayed with guide
- letter A..Q to help keep track when typing:
+ A B C D E F G H I J K L M N O P Q
+ 5Ka 52G yNz vjF nM4 2jw Duu rWo 7di zgi Y8g iiy yGd 78L cCx mwQ mWV
- A B C D E F G H I J K L M N O P Q
- 5Ka 52G yNz vjF nM4 2jw Duu rWo 7di zgi Y8g iiy yGd 78L cCx mwQ mWV
+ X448 public keys:
- :param message: Message to print
- :param key_bytes: Decryption key
- :param settings: Settings object
- :param no_split: When True, does not split decryption key to chunks
- :param file_key When True, uses testnet address format
- :return: None
+ A B C D E F H H I J K L
+ 4EcuqaD ddsdsuc gBX2PY2 qR8hReA aeSN2oh JB9w5Cv q6BQjDa PPgzSvW 932aHio sT42SKJ Gu2PpS1 Za3Xrao
"""
- b58key = b58encode(key_bytes, file_key)
- if settings.local_testing_mode or no_split:
- box_print([message, b58key])
+ b58key = b58encode(key_bytes, public_key)
+ if settings.local_testing_mode:
+ m_print([message, b58key], box=True)
else:
- box_print([message,
- ' '.join('ABCDEFGHIJKLMNOPQ'),
- ' '.join(split_string(b58key, item_len=3))])
+ guide, chunk_len = (B58_PUBLIC_KEY_GUIDE, 7) if public_key else (B58_LOCAL_KEY_GUIDE, 3)
+
+ key = ' '.join(split_string(b58key, item_len=chunk_len))
+ m_print([message, guide, key], box=True)
-def print_on_previous_line(reps: int = 1,
- delay: float = 0.0,
- flush: bool = False) -> None:
- """Next message will be printed on upper line.
+def print_title(operation: str) -> None:
+ """Print the TFC title."""
+ operation_name = {TX: TXP, RX: RXP, NC: RP}[operation]
+ m_print(f"{TFC} - {operation_name} {VERSION}", bold=True, head_clear=True, head=1, tail=1)
- :param reps: Number of times to repeat action
- :param delay: Time to sleep before clearing lines above
- :param flush: Flush stdout when true
- :return: None
- """
+
+def print_on_previous_line(reps: int = 1, # Number of times to repeat the action
+ delay: float = 0.0, # Time to sleep before clearing lines above
+ flush: bool = False # Flush stdout when true
+ ) -> None:
+ """Next message is printed on upper line."""
time.sleep(delay)
for _ in range(reps):
sys.stdout.write(CURSOR_UP_ONE_LINE + CLEAR_ENTIRE_LINE)
if flush:
sys.stdout.flush()
+
+
+def print_spacing(count: int = 0) -> None:
+ """Print `count` many new-lines."""
+ for _ in range(count):
+ print()
+
+
+def rp_print(message: str, # Message to print
+ ts: Optional['datetime'] = None, # Timestamp for displayed event
+ bold: bool = False # When True, prints the message in bold style
+ ) -> None:
+ """Print an event in Relay Program."""
+ if ts is None:
+ ts = datetime.now()
+ ts_fmt = ts.strftime('%b %d - %H:%M:%S.%f')[:-4]
+
+ if bold:
+ print(f"{BOLD_ON}{ts_fmt} - {message}{NORMAL_TEXT}")
+ else:
+ print(f"{ts_fmt} - {message}")
diff --git a/src/common/path.py b/src/common/path.py
index 2f96576..636ab49 100644
--- a/src/common/path.py
+++ b/src/common/path.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,52 +16,47 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
import os
import readline
-import time
import _tkinter
import typing
-from tkinter import filedialog, Tk
-from typing import Union
+from typing import Any, List, Optional
+
+import tkinter
+from tkinter import filedialog
from src.common.exceptions import FunctionReturn
-from src.common.output import c_print, print_on_previous_line
+from src.common.output import m_print, print_on_previous_line
if typing.TYPE_CHECKING:
from src.common.db_settings import Settings
- from src.nh.settings import Settings as nhSettings
-def ask_path_gui(prompt_msg: str,
- settings: Union['Settings', 'nhSettings'],
- get_file: bool = False) -> str:
- """Prompt (file) path with Tkinter / CLI prompt.
-
- :param prompt_msg: Directory selection prompt
- :param settings: Settings object
- :param get_file: When True, prompts for path to file instead of directory
- :return: Selected directory / file
- """
+def ask_path_gui(prompt_msg: str, # Directory selection prompt
+ settings: 'Settings', # Settings object
+ get_file: bool = False # When True, prompts for a path to file instead of a directory
+ ) -> str: # Selected directory or file
+ """Prompt (file) path with Tkinter / CLI prompt."""
try:
if settings.disable_gui_dialog:
raise _tkinter.TclError
- root = Tk()
+ root = tkinter.Tk()
root.withdraw()
if get_file:
- file_path = filedialog.askopenfilename(title=prompt_msg)
+ file_path = filedialog.askopenfilename(title=prompt_msg) # type: str
else:
file_path = filedialog.askdirectory(title=prompt_msg)
root.destroy()
if not file_path:
- raise FunctionReturn(("File" if get_file else "Path") + " selection aborted.")
+ raise FunctionReturn(("File" if get_file else "Path") + " selection aborted.", head_clear=True)
return file_path
@@ -71,12 +67,12 @@ def ask_path_gui(prompt_msg: str,
class Completer(object):
"""readline tab-completer for paths and files."""
- def __init__(self, get_file):
+ def __init__(self, get_file: bool) -> None:
"""Create new completer object."""
self.get_file = get_file
- def listdir(self, root):
- """List directory 'root' appending the path separator to subdirs."""
+ def listdir(self, root: str) -> Any:
+ """List directory 'root' appending the path separator to sub-dirs."""
res = []
for name in os.listdir(root):
path = os.path.join(root, name)
@@ -88,18 +84,18 @@ class Completer(object):
res.append(name)
return res
- def complete_path(self, path=None):
- """Perform completion of filesystem path."""
+ def complete_path(self, path: Optional[str] = None) -> Any:
+ """Perform completion of the filesystem path."""
if not path:
return self.listdir('.')
- dirname, rest = os.path.split(path)
- tmp = dirname if dirname else '.'
- res = [os.path.join(dirname, p) for p in self.listdir(tmp) if p.startswith(rest)]
+ dir_name, rest = os.path.split(path)
+ tmp = dir_name if dir_name else '.'
+ matches = [os.path.join(dir_name, p) for p in self.listdir(tmp) if p.startswith(rest)]
# More than one match, or single match which does not exist (typo)
- if len(res) > 1 or not os.path.exists(path):
- return res
+ if len(matches) > 1 or not os.path.exists(path):
+ return matches
# Resolved to a single directory: return list of files below it
if os.path.isdir(path):
@@ -108,75 +104,81 @@ class Completer(object):
# Exact file match terminates this completion
return [path + ' ']
- def path_complete(self, args=None):
- """Return list of directories from current directory."""
+ def path_complete(self, args: Optional[List[str]] = None) -> Any:
+ """Return the list of directories from the current directory."""
if not args:
return self.complete_path('.')
# Treat the last arg as a path and complete it
return self.complete_path(args[-1])
- def complete(self, _, state):
+ def complete(self, _: str, state: int) -> Any:
"""Generic readline completion entry point."""
line = readline.get_line_buffer().split()
return self.path_complete(line)[state]
-def ask_path_cli(prompt_msg: str, get_file: bool = False) -> str:
+def ask_path_cli(prompt_msg: str, # File selection prompt
+ get_file: bool = False # When True, prompts for a file instead of a directory
+ ) -> str: # Selected directory or file
"""\
- Prompt file location / store dir for
- file with tab-complete supported CLI.
-
- :param prompt_msg: File selection prompt
- :param get_file: When True, prompts for file instead of directory
- :return: Selected directory
+ Prompt file location or store directory for a file with tab-complete
+ supported CLI.
"""
- comp = Completer(get_file)
readline.set_completer_delims(' \t\n;')
readline.parse_and_bind('tab: complete')
- readline.set_completer(comp.complete)
+ readline.set_completer(Completer(get_file).complete)
print('')
if get_file:
- while True:
- try:
- path_to_file = input(prompt_msg + ": ")
-
- if not path_to_file:
- print_on_previous_line()
- raise KeyboardInterrupt
-
- if os.path.isfile(path_to_file):
- if path_to_file.startswith('./'):
- path_to_file = path_to_file[2:]
- print('')
- return path_to_file
-
- c_print("File selection error.", head=1, tail=1)
- time.sleep(1.5)
- print_on_previous_line(reps=4)
-
- except KeyboardInterrupt:
- print_on_previous_line()
- raise FunctionReturn("File selection aborted.")
-
+ return cli_get_file(prompt_msg)
else:
- while True:
- try:
- directory = input(prompt_msg + ": ")
+ return cli_get_path(prompt_msg)
- if directory.startswith('./'):
- directory = directory[2:]
- if not directory.endswith(os.sep):
- directory += os.sep
+def cli_get_file(prompt_msg: str) -> str:
+ """Ask the user to specify file to load."""
+ while True:
+ try:
+ path_to_file = input(prompt_msg + ": ")
- if not os.path.isdir(directory):
- c_print("Error: Invalid directory.", head=1, tail=1)
- print_on_previous_line(reps=4, delay=1.5)
- continue
+ if not path_to_file:
+ print_on_previous_line()
+ raise KeyboardInterrupt
- return directory
+ if os.path.isfile(path_to_file):
+ if path_to_file.startswith('./'):
+ path_to_file = path_to_file[len('./'):]
+ print('')
+ return path_to_file
- except KeyboardInterrupt:
- raise FunctionReturn("File path selection aborted.")
+ m_print("File selection error.", head=1, tail=1)
+ print_on_previous_line(reps=4, delay=1)
+
+ except (EOFError, KeyboardInterrupt):
+ print_on_previous_line()
+ raise FunctionReturn("File selection aborted.", head_clear=True)
+
+
+def cli_get_path(prompt_msg: str) -> str:
+ """Ask the user to specify path for file."""
+ while True:
+ try:
+ directory = input(prompt_msg + ": ")
+
+ if directory.startswith('./'):
+ directory = directory[len('./'):]
+
+ if not directory.endswith(os.sep):
+ directory += os.sep
+
+ if not os.path.isdir(directory):
+ m_print("Error: Invalid directory.", head=1, tail=1)
+ print_on_previous_line(reps=4, delay=1)
+ continue
+
+ return directory
+
+ except (EOFError, KeyboardInterrupt):
+ print_on_previous_line()
+ raise FunctionReturn("File path selection aborted.", head_clear=True)
diff --git a/src/common/reed_solomon.py b/src/common/reed_solomon.py
old mode 100755
new mode 100644
index d7f62fd..aa2070a
--- a/src/common/reed_solomon.py
+++ b/src/common/reed_solomon.py
@@ -1,123 +1,506 @@
-#!/usr/bin/env python3.5
+#!/usr/bin/env python3.6
# -*- coding: utf-8 -*-
"""
# Copyright (c) 2012-2015 Tomer Filiba
# Copyright (c) 2015 rotorgit
-# Copyright (c) 2015 Stephen Larroque
+# Copyright (c) 2015-2017 Stephen Larroque
-The code below is edited and used under public domain license:
+The Reed Solomon erasure code library has been released to the public domain.
+
+https://github.com/lrq3000/reedsolomon/blob/master/LICENSE
https://github.com/tomerfiliba/reedsolomon/blob/master/LICENSE
-The comments/unused code have been intentionally removed. Original code is at
-https://github.com/tomerfiliba/reedsolomon/blob/master/reedsolo.py
+Reed Solomon
+============
+
+A pure-python universal errors-and-erasures Reed-Solomon Codec
+ https://en.wikipedia.org/wiki/Reed%E2%80%93Solomon_error_correction
+
+based on the wonderful tutorial at
+ https://en.wikiversity.org/wiki/Reed%E2%80%93Solomon_codes_for_coders
+ written by "Bobmath" and "LRQ3000".
+
+The code of wikiversity is here consolidated into a nice API with
+exceptions handling. The algorithm can correct up to 2*e+v <= nsym,
+where e is the number of errors, v the number of erasures and
+nsym = n-k = the number of ECC (error correction code) symbols. This
+means that you can either correct exactly floor(nsym/2) errors, or nsym
+erasures (errors where you know the position), and a combination of
+both errors and erasures. The code should work on pretty much any
+reasonable version of python (2.4-3.5), but I'm only testing on 2.7-3.4.
+
+.. note::
+ The codec is universal, meaning that it can decode any message
+ encoded by another RS encoder as long as you provide the correct
+ parameters. Note however that if you use higher fields
+ (i.e., bigger c_exp), the algorithms will be slower, first because
+ we cannot then use the optimized bytearray() structure but only
+ array.array('i', ...), and also because Reed-Solomon's complexity is
+ quadratic (both in encoding and decoding), so this means that the
+ longer your messages, the longer it will take to encode/decode
+ (quadratically!).
+
+ The algorithm itself can handle messages up to (2^c_exp)-1 symbols,
+ including the ECC symbols, and each symbol can have a value of up to
+ (2^c_exp)-1 (indeed, both the message length and the maximum value
+ for one character is constrained by the same mathematical reason).
+ By default, we use the field GF(2^8), which means that you are
+ limited to values between 0 and 255 (perfect to represent a single
+ hexadecimal symbol on computers, so you can encode any binary
+ stream) and limited to messages+ecc of maximum length 255. However,
+ you can "chunk" longer messages to fit them into the message length
+ limit. The ``RSCodec`` class will automatically apply chunking, by
+ splitting longer messages into chunks and encode/decode them
+ separately; it shouldn't make a difference from an API perspective
+ (i.e., from your POV).
+::
+
+ # Initialization
+ # >>> from reedsolo import RSCodec
+ # >>> rsc = RSCodec(10) # 10 ecc symbols
+
+ # # Encoding
+ # >>> rsc.encode([1,2,3,4])
+ # b'\x01\x02\x03\x04,\x9d\x1c+=\xf8h\xfa\x98M'
+ # >>> rsc.encode(bytearray([1,2,3,4]))
+ # bytearray(b'\x01\x02\x03\x04,\x9d\x1c+=\xf8h\xfa\x98M')
+ # >>> rsc.encode(b'hello world')
+ # b'hello world\xed%T\xc4\xfd\xfd\x89\xf3\xa8\xaa'
+ # # Note that chunking is supported transparently to encode any string length.
+
+ # # Decoding (repairing)
+ # >>> rsc.decode(b'hello world\xed%T\xc4\xfd\xfd\x89\xf3\xa8\xaa')[0]
+ # b'hello world'
+ # >>> rsc.decode(b'heXlo worXd\xed%T\xc4\xfdX\x89\xf3\xa8\xaa')[0] # 3 errors
+ # b'hello world'
+ # >>> rsc.decode(b'hXXXo worXd\xed%T\xc4\xfdX\x89\xf3\xa8\xaa')[0] # 5 errors
+ # b'hello world'
+ # >>> rsc.decode(b'hXXXo worXd\xed%T\xc4\xfdXX\xf3\xa8\xaa')[0] # 6 errors - fail
+ # Traceback (most recent call last):
+ # ...
+ # ReedSolomonError: Could not locate error
+
+ # >>> rsc = RSCodec(12) # using 2 more ecc symbols (to correct max 6 errors or 12 erasures)
+ # >>> rsc.encode(b'hello world')
+ # b'hello world?Ay\xb2\xbc\xdc\x01q\xb9\xe3\xe2='
+ # >>> rsc.decode(b'hello worXXXXy\xb2XX\x01q\xb9\xe3\xe2=')[0] # 6 errors - ok
+ # b'hello world'
+ # >>> rsc.decode(b'helXXXXXXXXXXy\xb2XX\x01q\xb9\xe3\xe2=', erase_pos=[3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 15, 16])[0]
+ # b'hello world'
+
+ # Checking
+ >> rsc.check(b'hello worXXXXy\xb2XX\x01q\xb9\xe3\xe2=')
+ [False]
+ >> rmes, rmesecc = rsc.decode(b'hello worXXXXy\xb2XX\x01q\xb9\xe3\xe2=')
+ >> rsc.check(rmesecc)
+ [True]
+
+ # To use longer chunks or bigger values than 255 (may be very slow)
+ >> rsc = RSCodec(12, nsize=4095) # always use a power of 2 minus 1
+ >> rsc = RSCodec(12, c_exp=12) # alternative way to set nsize=4095
+ >> mes = 'a' * (4095-12)
+ >> mesecc = rsc.encode(mes)
+ >> mesecc[2] = 1
+ >> mesecc[-1] = 1
+ >> rmes, rmesecc = rsc.decode(mesecc)
+ >> rsc.check(mesecc)
+ [False]
+ >> rsc.check(rmesecc)
+ [True]
+
+ If you want full control, you can skip the API and directly use the
+ library as-is. Here's how:
+
+ First you need to init the precomputed tables:
+ >> import reedsolo as rs
+ >> rs.init_tables(0x11d)
+
+ Pro tip: if you get the error: ValueError: byte must be in
+ range(0, 256), please check that your prime polynomial is correct
+ for your field.
+
+ Pro tip2: by default, you can only encode messages of max length
+ and max symbol value = 256. If you want to encode bigger messages,
+ please use the following (where c_exp is the exponent of your
+ Galois Field, e.g., 12 = max length 2^12 = 4096):
+ >> prim = rs.find_prime_polys(c_exp=12, fast_primes=True, single=True)
+ >> rs.init_tables(c_exp=12, prim=prim)
+
+ Let's define our RS message and ecc size:
+ >> n = 255 # length of total message+ecc
+ >> nsym = 12 # length of ecc
+ >> mes = "a" * (n-nsym) # generate a sample message
+
+ To optimize, you can precompute the generator polynomial:
+ >> gen = rs.rs_generator_poly_all(n)
+
+ Then to encode:
+ >> mesecc = rs.rs_encode_msg(mes, nsym, gen=gen[nsym])
+
+ Let's tamper our message:
+ >> mesecc[1] = 0
+
+ To decode:
+ >> rmes, recc = rs.rs_correct_msg(mesecc, nsym, erase_pos=erase_pos)
+
+ Note that both the message and the ecc are corrected (if possible
+ of course).
+
+ Pro tip: if you know a few erasures positions, you can specify them
+ in a list `erase_pos` to double the repair power. But you can also
+ just specify an empty list.
+
+ If the decoding fails, it will normally automatically check and
+ raise a ReedSolomonError exception that you can handle. However
+ if you want to manually check if the repaired message is correct,
+ you can do so:
+ >> rs.rs_check(rmes + recc, nsym)
+
+ Note: if you want to use multiple Reed-Solomon with different
+ parameters, you need to backup the globals and restore them before
+ calling reedsolo functions:
+
+ >> rs.init_tables()
+ >> global gf_log, gf_exp, field_charac
+ >> bak_gf_log, bak_gf_exp, bak_field_charac = gf_log, gf_exp, field_charac
+
+ Then at anytime, you can do:
+ >> global gf_log, gf_exp, field_charac
+ >> gf_log, gf_exp, field_charac = bak_gf_log, bak_gf_exp, bak_field_charac
+ >> mesecc = rs.rs_encode_msg(mes, nsym)
+ >> rmes, recc = rs.rs_correct_msg(mesecc, nsym)
+
+ The globals backup is not necessary if you use RSCodec, it will be
+ automatically managed. Read the source code's comments for more info
+ about how it works, and for the various parameters you can setup if
+ you need to interface with other RS codecs.
+
+TO DO IMPORTANT: try to keep the same convention for the ordering of
+polynomials inside lists throughout the code and functions (because
+for now, there are a lot of list reversing in order to make it work,
+you never know the order of a polynomial, i.e., if the first coefficient
+is the major degree or the constant term...).
"""
import itertools
+import math
import shutil
+from array import array
+from typing import Any, Dict, Generator, List, Optional, Tuple, Union
+
class ReedSolomonError(Exception):
- """Reed solomon error stub."""
+ """Reed-Solomon exception stub."""
pass
-gf_exp = bytearray([1] * 512)
-gf_log = bytearray(256)
+
+"""
+For efficiency, gf_exp[] has size 2*GF_SIZE, so that a simple
+multiplication of two numbers can be resolved without calling % 255.
+For more info on how to generate this extended exponentiation table,
+see paper:
+ "Fast software implementation of finite field operations",
+ Cheng Huang and Lihao Xu
+ Washington University in St. Louis, Tech. Rep (2003).
+"""
+_bytearray = bytearray # type: Any
+gf_exp = _bytearray([1] * 512)
+gf_log = _bytearray(256)
field_charac = int(2 ** 8 - 1)
-def find_prime_polys(generator=2, c_exp=8):
- """\
- Compute the list of prime polynomials for the given
- generator and galois field characteristic exponent.
+# Galois Field elements maths
+
+def rwh_primes1(n: int) -> List[int]:
+ """Returns a list of primes < n
+ https://stackoverflow.com/questions/2068372/fastest-way-to-list-all-primes-below-n/3035188#3035188
"""
- root_charac = 2
+ sieve = [True] * int(n / 2)
+ for i in range(3, int(n ** 0.5) + 1, 2):
+ if sieve[int(i / 2)]:
+ sieve[int((i * i) / 2)::i] = [False] * int((n - i * i - 1) / (2 * i) + 1)
+ return [2] + [2 * i + 1 for i in range(1, int(n / 2)) if sieve[i]]
+
+
+def find_prime_polys(generator: int = 2,
+ c_exp: int = 8,
+ fast_primes: bool = False,
+ single: bool = False
+ ) -> Any:
+ """
+ Compute the list of prime polynomials for the given generator and
+ Galois Field characteristic exponent.
+
+ fast_primes will output less results but will be significantly faster.
+ Single will output the first prime polynomial found, so if all you
+ want is to just find one prime polynomial to generate the LUT for
+ Reed-Solomon to work, then just use that.
+
+ A prime polynomial (necessarily irreducible) is necessary to reduce
+ the multiplications in the Galois Field, so as to avoid overflows.
+
+ Why do we need a "prime polynomial"? Can't we just reduce modulo 255
+ (for GF(2^8) for example)? Because we need the values to be unique.
+
+ For example: if the generator (alpha) = 2 and c_exp = 8 (GF(2^8) == GF(256)),
+ then the generated Galois Field (0, 1, α, α^1, α^2, ..., α^(p-1))
+ will be Galois Field it becomes 0, 1, 2, 4, 8, 16, etc. However,
+ upon reaching 128, the next value will be doubled (i.e., next power of
+ 2), which will give 256. Then we must reduce, because we have
+ overflowed above the maximum value of 255. But, if we modulo 255,
+ this will generate 256 == 1. Then 2, 4, 8, 16, etc. giving us a
+ repeating pattern of numbers. This is very bad, as it's then not
+ anymore a bijection (i.e., a non-zero value doesn't have a unique
+ index). That's why we can't just modulo 255, but we need another
+ number above 255, which is called the prime polynomial.
+ # Why so much hassle? Because we are using precomputed look-up
+ tables for multiplication: instead of multiplying a*b, we precompute
+ alpha^a, alpha^b and alpha^(a+b), so that we can just use our lookup
+ table at alpha^(a+b) and get our result. But just like in our
+ original field we had 0,1,2,...,p-1 distinct unique values, in our
+ "LUT" field using alpha we must have unique distinct values (we
+ don't care that they are different from the original field as long
+ as they are unique and distinct). That's why we need to avoid
+ duplicated values, and to avoid duplicated values we need to use a
+ prime irreducible polynomial.
+
+ # Here is implemented a brute-force approach to find all these prime
+ polynomials, by generating every possible prime polynomials (i.e.,
+ every integers between field_charac+1 and field_charac*2), and then
+ we build the whole Galois Field, and we reject the candidate prime
+ polynomial if it duplicates even one value or if it generates a
+ value above field_charac (i.e., cause an overflow).
+
+ Note that this algorithm is slow if the field is too big (above 12),
+ because it's an exhaustive search algorithm. There are probabilistic
+ approaches, and almost surely prime approaches, but there is no
+ deterministic polynomial time algorithm to find irreducible monic
+ polynomials. More info can be found at:
+ https://people.mpi-inf.mpg.de/~csaha/lectures/lec9.pdf
+
+ Another faster algorithm may be found at
+ "Finding irreducible polynomials over finite fields."
+ Adleman, Leonard M., and Hendrik W. Lenstra.
+
+ Proceedings of the eighteenth annual
+ ACM Symposium on Theory of computing. ACM, 1986.
+ """
+
+ # Prepare the finite field characteristic (2^p - 1), this
+ # also represent the maximum possible value in this field
+ root_charac = 2 # we're in GF(2)
field_charac_ = int(root_charac ** c_exp - 1)
field_charac_next = int(root_charac ** (c_exp + 1) - 1)
- prim_candidates = range(field_charac_ + 2, field_charac_next, root_charac)
+ if fast_primes:
+ # Generate maybe prime polynomials and
+ # check later if they really are irreducible
+ prim_candidates = rwh_primes1(field_charac_next)
+ prim_candidates = [x for x in prim_candidates if x > field_charac_] # filter out too small primes
+ else:
+ # try each possible prime polynomial, but skip even numbers
+ # (because divisible by 2 so necessarily not irreducible)
+ prim_candidates = list(range(field_charac_ + 2, field_charac_next, root_charac))
+ # Start of the main loop
correct_primes = []
+
+ # try potential candidates primitive irreducible polys
for prim in prim_candidates:
- seen = bytearray(field_charac_ + 1)
- conflict = False
- x = 1
+ # memory variable to indicate if a value was already generated
+ # in the field (value at index x is set to 1) or not (set to
+ # 0 by default)
+ seen = _bytearray(field_charac_ + 1)
+ conflict = False # flag to know if there was at least one conflict
+
+ # Second loop, build the whole Galois Field
+ x = 1
for i in range(field_charac_):
+ # Compute the next value in the field
+ # (i.e., the next power of alpha/generator)
x = gf_mult_nolut(x, generator, prim, field_charac_ + 1)
+
+ # Rejection criterion: if the value overflowed (above
+ # field_charac) or is a duplicate of a previously generated
+ # power of alpha, then we reject this polynomial (not prime)
if x > field_charac_ or seen[x] == 1:
conflict = True
break
+
+ # Else we flag this value as seen (to maybe detect future
+ # duplicates), and we continue onto the next power of alpha
else:
seen[x] = 1
+ # End of the second loop: if there's no conflict (no overflow
+ # nor duplicated value), this is a prime polynomial!
if not conflict:
correct_primes.append(prim)
+ if single:
+ return prim
+ # Return the list of all prime polynomials.
return correct_primes
+ # You can use the following to print the hexadecimal representation
+ # of each prime polynomial: print [hex(i) for i in correct_primes]
-def init_tables(prim=0x11d, generator=2, c_exp=8):
+
+def init_tables(prim: int = 0x11d,
+ generator: int = 2,
+ c_exp: int = 8
+ ) -> List[Union[Any, Any, int]]:
"""\
Precompute the logarithm and anti-log tables for faster computation
later, using the provided primitive polynomial. These tables are
used for multiplication/division since addition/substraction are
- simple XOR operations inside GF of characteristic 2. The basic idea
- is quite simple: since b**(log_b(x), log_b(y)) == x * y given any
- number b (the base or generator of the logarithm), then we can use
- any number b to precompute logarithm and anti-log (exponentiation)
- tables to use for multiplying two numbers x and y.
+ simple XOR operations inside GF of characteristic 2.
+ The basic idea is quite simple: since b**(log_b(x), log_b(y)) == x * y
+ given any number b (the base or generator of the logarithm), then we
+ can use any number b to precompute logarithm and anti-log
+ (exponentiation) tables to use for multiplying two numbers x and y.
That's why when we use a different base/generator number, the log
and anti-log tables are drastically different, but the resulting
computations are the same given any such tables.
-
- For more information, see
+ For more info, see
https://en.wikipedia.org/wiki/Finite_field_arithmetic#Implementation_tricks
+
+ Generator is the generator number (the "increment" that will be used
+ to walk through the field by multiplication, this must be a prime
+ number). This is basically the base of the logarithm/anti-log tables.
+ Also often noted "alpha" in academic books.
+
+ Prim is the primitive/prime (binary) polynomial and must be
+ irreducible (i.e., it can't represented as the product of two smaller
+ polynomials). It's a polynomial in the binary sense: each bit is a
+ coefficient, but in fact it's an integer between field_charac+1 and
+ field_charac*2, and not a list of gf values. The prime polynomial
+ will be used to reduce the overflows back into the range of the
+ Galois Field without duplicating values (all values should be
+ unique). See the function find_prime_polys() and:
+ https://research.swtch.com/field and https://www.pclviewer.com/rs2/galois.html
+
+ Note that the choice of generator or prime polynomial doesn't matter
+ very much: any two finite fields of size p^n have identical
+ structure, even if they give the individual elements different names
+ (i.e., the coefficients of the codeword will be different, but the
+ final result will be the same: you can always correct as many
+ errors/erasures with any choice for those parameters). That's why it
+ makes sense to refer to all the finite fields, and all decoders
+ based on Reed-Solomon, of size p^n as one concept: GF(p^n). It can
+ however impact sensibly the speed (because some parameters will
+ generate sparser tables).
+
+ c_exp is the exponent for the field's characteristic GF(2^c_exp)
"""
+ # Redefine _bytearray() in case we need to
+ # support integers or messages of length > 256
+ global _bytearray
+ if c_exp <= 8:
+ _bytearray = bytearray
+ else:
+ def _bytearray(obj: Any = 0, encoding: str = "latin-1") -> array:
+ """Fake bytearray replacement, supporting int values above 255"""
+ # always use Latin-1 and not UTF8 because Latin-1 maps the
+ # first 256 characters to their byte value equivalents. UTF8
+ # may mangle your data (particularly at vale 128).
+ if isinstance(obj, str): # obj is a string, convert to list of ints
+ obj = obj.encode(encoding)
+ obj = [int(c) for c in obj]
+
+ # Compatibility with list preallocation bytearray(int)
+ elif isinstance(obj, int):
+ obj = [0] * obj
+
+ # Else obj is a list of int, it's ok
+ return array("i", obj)
+
+ # Init global tables
global gf_exp, gf_log, field_charac
field_charac = int(2 ** c_exp - 1)
- gf_exp = bytearray(field_charac * 2)
- gf_log = bytearray(field_charac + 1)
- x = 1
- for i in range(field_charac):
- gf_exp[i] = x
- gf_log[x] = i
- x = fg_mult_nolut(x, generator, prim, field_charac + 1)
+ gf_exp = _bytearray(field_charac * 2)
+ # Anti-log (exponential) table. The first two
+ # elements will always be [GF256int(1), generator]
+ # log table, log[0] is impossible and thus unused
+ gf_log = _bytearray(field_charac + 1)
+
+ # For each possible value in the Galois Field 2^8, we will
+ # pre-compute the logarithm and anti-logarithm (exponential) of this
+ # value To do that, we generate the Galois Field F(2^p) by building
+ # a list starting with the element 0 followed by the (p-1)
+ # successive powers of the generator α : 1, α, α^1, α^2, ..., α^(p-1).
+ x = 1
+
+ # We could skip index 255 which is equal to index 0 because of modulo:
+ # g^255==g^0 but either way, this does not change the later outputs
+ # (i.e., the ecc symbols will be the same either way).
+ for i in range(field_charac):
+ gf_exp[i] = x # compute anti-log for this value and store it in a table
+ gf_log[x] = i # compute log at the same time
+ x = gf_mult_nolut(x, generator, prim, field_charac + 1)
+
+ # If you use only generator==2 or a power of 2, you can use the
+ # following which is faster than gf_mult_noLUT():
+ # x <<= 1 # multiply by 2 (change 1 by another number y to
+ # multiply by a power of 2^y) if x & 0x100: # similar to x >= 256,
+ # but a lot faster (because 0x100 == 256) x ^= prim substract the
+ # primary polynomial to the current value (instead of 255, so
+ # that we get a unique set made of coprime numbers), this is the
+ # core of the tables generation
+
+ # Optimization: double the size of the anti-log table so that we
+ # don't need to mod 255 to stay inside the bounds (because we will
+ # mainly use this table for the multiplication of two GF numbers,
+ # no more).
for i in range(field_charac, field_charac * 2):
gf_exp[i] = gf_exp[i - field_charac]
- return [gf_log, gf_exp]
+ return [gf_log, gf_exp, field_charac]
-def gf_sub(x, y):
+def gf_add(x: int, y: int) -> int:
return x ^ y
-def gf_inverse(x):
+def gf_sub(x: int, y: int) -> int:
+ # In binary Galois Field, subtraction is
+ # just the same as addition (since we mod 2)
+ return x ^ y
+
+
+def gf_neg(x: int) -> int:
+ return x
+
+
+def gf_inverse(x: int) -> int:
+ # gf_inverse(x) == gf_div(1, x)
return gf_exp[field_charac - gf_log[x]]
-def gf_mul(x, y):
+def gf_mul(x: int, y: int) -> int:
if x == 0 or y == 0:
return 0
return gf_exp[(gf_log[x] + gf_log[y]) % field_charac]
-def gf_div(x, y):
+def gf_div(x: int, y: int) -> int:
if y == 0:
- raise ReedSolomonError("Divider was zero.")
+ raise ZeroDivisionError()
if x == 0:
return 0
return gf_exp[(gf_log[x] + field_charac - gf_log[y]) % field_charac]
-def gf_pow(x, power):
+def gf_pow(x: int, power: int) -> int:
return gf_exp[(gf_log[x] * power) % field_charac]
-def gf_mult_nolut_slow(x, y, prim=0):
+def gf_mult_nolut_slow(x: int, y: int, prim: int = 0) -> int:
"""\
Multiplication in Galois Fields without using a precomputed look-up
table (and thus it's slower) by using the standard carry-less
@@ -125,7 +508,8 @@ def gf_mult_nolut_slow(x, y, prim=0):
polynomial.
"""
- def cl_mult(x_, y_):
+ # Define bitwise carry-less operations as inner functions
+ def cl_mult(x_: int, y_: int) -> int:
"""Bitwise carry-less multiplication on integers"""
z = 0
i = 0
@@ -135,125 +519,225 @@ def gf_mult_nolut_slow(x, y, prim=0):
i += 1
return z
- def bit_length(n):
+ def bit_length(n: int) -> int:
"""\
Compute the position of the most significant bit
(1) of an integer. Equivalent to int.bit_length()
"""
bits = 0
- while n >> bits: bits += 1
+ while n >> bits:
+ bits += 1
return bits
- def cl_div(dividend, divisor=None):
+ def cl_div(dividend: int, divisor: int) -> int:
"""\
Bitwise carry-less long division on
- integers and returns the remainder.
+ integers and returns the remainder
"""
-
+ # Compute the position of the most
+ # significant bit for each integers
dl1 = bit_length(dividend)
dl2 = bit_length(divisor)
- if dl1 < dl2:
+ # If the dividend is smaller than the divisor, just exit
+ if dl1 < dl2: # pragma: no cover
return dividend
+ # Else, align the most significant 1 of the divisor to the
+ # most significant 1 of the dividend (by shifting the divisor)
for i in range(dl1 - dl2, -1, -1):
+ # Check that the dividend is divisible (useless for the
+ # first iteration but important for the next ones)
if dividend & (1 << i + dl2 - 1):
+ # If divisible, then shift the divisor to align the most
+ # significant bits and XOR (carry-less substraction)
dividend ^= divisor << i
return dividend
+ # --- Main GF multiplication routine ---
+
+ # Multiply the gf numbers
result = cl_mult(x, y)
+
+ # Then do a modular reduction (i.e., remainder from the division) with
+ # an irreducible primitive polynomial so that it stays inside GF bounds
if prim > 0:
result = cl_div(result, prim)
return result
-def fg_mult_nolut(x, y, prim=0, field_charac_full=256, carryless=True):
+def gf_mult_nolut(x: int,
+ y: int,
+ prim: int = 0,
+ field_charac_full: int = 256,
+ carryless: bool = True
+ ) -> int:
"""\
- Galois Field integer multiplication using Russian Peasant Multiplication
- algorithm (faster than the standard multiplication + modular reduction).
- If prim is 0 and carryless=False, then the function produces the result
- for a standard integers multiplication (no carry-less arithmetics nor
- modular reduction).
+ Galois Field integer multiplication using Russian Peasant
+ Multiplication algorithm (faster than the standard multiplication
+ + modular reduction). If prim is 0 and carryless=False, then the
+ function produces the result for a standard integers multiplication
+ (no carry-less arithmetics nor modular reduction).
"""
r = 0
- while y:
+ while y: # while y is above 0
if y & 1:
+ # y is odd, then add the corresponding x to r (the sum of
+ # all x's corresponding to odd y's will give the final
+ # product). Note that since we're in GF(2), the addition is
+ # in fact an XOR (very important because in GF(2) the
+ # multiplication and additions are carry-less, thus it
+ # changes the result!).
r = r ^ x if carryless else r + x
- y >>= 1
- x <<= 1
+ y >>= 1 # equivalent to y // 2
+ x <<= 1 # equivalent to x*2
if prim > 0 and x & field_charac_full:
+ # GF modulo: if x >= 256 then apply modular reduction using
+ # the primitive polynomial (we just substract, but since the
+ # primitive number can be above 256 then we directly XOR).
x ^= prim
return r
-def gf_mult_nolut(x, y, prim=0, field_charac_full=256, carryless=True):
- """\
- Galois Field integer multiplication using Russian Peasant
- Multiplication algorithm (faster than the standard
- multiplication + modular reduction).
+# Galois Field polynomials maths
- If prim is 0 and carryless=False, then the function produces the
- result for a standard integers multiplication (no carry-less
- arithmetics nor modular reduction)."""
-
- r = 0
- while y:
- if y & 1: r = r ^ x if carryless else r + x
- y >>= 1
- x <<= 1
- if prim > 0 and x & field_charac_full: x ^= prim
- return r
+def gf_poly_scale(p: bytes, x: int) -> Any:
+ return _bytearray([gf_mul(p[i], x) for i in range(len(p))])
-def gf_poly_scale(p, x):
- return bytearray([gf_mul(p[i], x) for i in range(len(p))])
+def gf_poly_add(p: bytes, q: Any) -> Any:
+ r = _bytearray(max(len(p), len(q)))
-
-def gf_poly_add(p, q):
- r = bytearray(max(len(p), len(q)))
r[len(r) - len(p):len(r)] = p
+ # for i in range(len(p)):
+ # r[i + len(r) - len(p)] = p[i]
for i in range(len(q)):
r[i + len(r) - len(q)] ^= q[i]
return r
-def gf_poly_mul(p, q):
+def gf_poly_mul(p: Any,
+ q: List[Any]
+ ) -> Any:
"""\
Multiply two polynomials, inside Galois Field (but the procedure
is generic). Optimized function by precomputation of log.
"""
- r = bytearray(len(p) + len(q) - 1)
+ # Pre-allocate the result array
+ r = _bytearray(len(p) + len(q) - 1)
+
+ # Precompute the logarithm of p
lp = [gf_log[p[i]] for i in range(len(p))]
+
+ # Compute the polynomial multiplication (just like the
+ # outer product of two vectors, we multiply each
+ # coefficients of p with all coefficients of q)
for j in range(len(q)):
+ # Optimization: load the coefficient once
qj = q[j]
+ # log(0) is undefined, we need to check that
if qj != 0:
+ # Optimization: precache the logarithm
+ # of the current coefficient of q
lq = gf_log[qj]
for i in range(len(p)):
+ # log(0) is undefined, need to check that...
if p[i] != 0:
+ # Equivalent to:
+ # r[i + j] = gf_add(r[i+j], gf_mul(p[i], q[j]))
r[i + j] ^= gf_exp[lp[i] + lq]
return r
-def gf_poly_div(dividend, divisor):
- """\
- Fast polynomial division by using Extended Synthetic Division and
- optimized for GF(2^p) computations (doesn't work with standard
- polynomials outside of this galois field).
+def gf_poly_mul_simple(p: List[int],
+ q: List[int]
+ ) -> bytearray:
+ """Multiply two polynomials, inside Galois Field
+
+ Simple equivalent way of multiplying two polynomials
+ without precomputation, but thus it's slower
"""
- msg_out = bytearray(dividend)
+ # Pre-allocate the result array
+ r = _bytearray(len(p) + len(q) - 1)
+
+ # Compute the polynomial multiplication (just like the outer product
+ # of two vectors, we multiply each coefficients of p with all
+ # coefficients of q)
+ for j in range(len(q)):
+ for i in range(len(p)):
+ # equivalent to: r[i + j] = gf_add(r[i+j], gf_mul(p[i], q[j]))
+ # -- you can see it's your usual polynomial multiplication
+ r[i + j] ^= gf_mul(p[i], q[j])
+ return r
+
+
+def gf_poly_neg(poly: List[int]) -> List[int]:
+ """\
+ Returns the polynomial with all coefficients negated. In GF(2^p),
+ negation does not change the coefficient, so we return the
+ polynomial as-is.
+ """
+ return poly
+
+
+def gf_poly_div(dividend: Any,
+ divisor: Any
+ ) -> Tuple[Any, Any]:
+ """Fast polynomial division by using Extended Synthetic Division and
+ optimized for GF(2^p) computations (doesn't work with standard
+ polynomials outside of this Galois Field).
+
+ CAUTION: this function expects polynomials to follow the opposite
+ convention at decoding: the terms must go from the biggest to lowest
+ degree (while most other functions here expect a list from lowest to
+ biggest degree). eg: 1 + 2x + 5x^2 = [5, 2, 1], NOT [1, 2, 5]
+ """
+ # Copy the dividend list and pad with 0
+ # where the ecc bytes will be computed
+ msg_out = _bytearray(dividend)
+
+ # normalizer = divisor[0] # precomputing for performance
for i in range(len(dividend) - (len(divisor) - 1)):
- coef = msg_out[i]
+ # For general polynomial division (when polynomials are
+ # non-monic), the usual way of using synthetic division is to
+ # divide the divisor g(x) with its leading coefficient (call it
+ # a). In this implementation, this means we need to compute:
+ # coef = msg_out[i] / gen[0]. For more info, see
+ # https://en.wikipedia.org/wiki/Synthetic_division
+ # msg_out[i] /= normalizer
+ coef = msg_out[i] # precaching
+
+ # log(0) is undefined, so we need to avoid that case explicitly
+ # (and it's also a good optimization). In fact if you remove it,
+ # it should still work because gf_mul() will take care of the
+ # condition. But it's still a good practice to put the condition
+ # here.
if coef != 0:
+ # In synthetic division, we always skip the first coefficient
+ # of the divisor, because it's only used to normalize the
+ # dividend coefficient
for j in range(1, len(divisor)):
+ # log(0) is undefined
if divisor[j] != 0:
+ # Equivalent to the more mathematically correct (but
+ # XORing directly is faster):
+ # msg_out[i + j] += -divisor[j] * coef
msg_out[i + j] ^= gf_mul(divisor[j], coef)
+ # The resulting msg_out contains both the quotient and the remainder,
+ # the remainder being the size of the divisor (the remainder has
+ # necessarily the same degree as the divisor -- not length but
+ # degree == length-1 -- since it's what we couldn't divide from the
+ # dividend), so we compute the index where this separation is, and
+ # return the quotient and remainder.
separator = -(len(divisor) - 1)
+
+ # Return quotient, remainder.
return msg_out[:separator], msg_out[separator:]
-def gf_poly_eval(poly, x):
+def gf_poly_eval(poly: Any, x: int) -> Any:
"""\
Evaluates a polynomial in GF(2^p) given the value for x.
This is based on Horner's scheme for maximum efficiency.
@@ -264,153 +748,434 @@ def gf_poly_eval(poly, x):
return y
-def rs_generator_poly(nsym, fcr=0, generator=2):
+# Reed-Solomon encoding
+
+def rs_generator_poly(nsym: int,
+ fcr: int = 0,
+ generator: int = 2
+ ) -> bytearray:
"""\
Generate an irreducible generator polynomial
(necessary to encode a message into Reed-Solomon)
"""
- g = bytearray([1])
+ g = _bytearray([1])
for i in range(nsym):
g = gf_poly_mul(g, [1, gf_pow(generator, i + fcr)])
return g
-def rs_generator_poly_all(max_nsym, fcr=0, generator=2):
+def rs_generator_poly_all(max_nsym: int,
+ fcr: int = 0,
+ generator: int = 2
+ ) -> Dict[int, Any]:
"""\
Generate all irreducible generator polynomials up to max_nsym
- (usually you can use n, the length of the message+ecc). Very
- useful to reduce processing time if you want to encode using
- variable schemes and nsym rates.
+ (usually you can use n, the length of the message+ecc). Very useful
+ to reduce processing time if you want to encode using variable
+ schemes and nsym rates.
"""
- g_all = dict()
- g_all[0] = g_all[1] = [1]
+ g_all = {0: _bytearray([1]), 1: _bytearray([1])}
for nsym in range(max_nsym):
g_all[nsym] = rs_generator_poly(nsym, fcr, generator)
return g_all
-def rs_encode_msg(msg_in, nsym, fcr=0, generator=2, gen=None):
+def rs_simple_encode_msg(msg_in: bytearray,
+ nsym: int,
+ fcr: int = 0,
+ generator: int = 2
+ ) -> bytearray:
+ """\
+ Simple Reed-Solomon encoding (mainly an example for you to
+ understand how it works, because it's slower than the in-lined
+ function below)
+ """
+ global field_charac
+
+ if (len(msg_in) + nsym) > field_charac: # pragma: no cover
+ raise ValueError("Message is too long (%i when max is %i)"
+ % (len(msg_in) + nsym, field_charac))
+
+ gen = rs_generator_poly(nsym, fcr, generator)
+
+ # Pad the message, then divide it by
+ # the irreducible generator polynomial
+ _, remainder = gf_poly_div(msg_in + _bytearray(len(gen) - 1), gen)
+
+ # The remainder is our RS code! Just append it to our original
+ # message to get our full codeword (this represents a polynomial
+ # of max 256 terms)
+ msg_out = msg_in + remainder
+
+ # Return the codeword
+ return msg_out
+
+
+def rs_encode_msg(msg_in: bytes,
+ nsym: int,
+ fcr: int = 0,
+ generator: int = 2,
+ gen: Any = None
+ ) -> bytearray:
"""\
Reed-Solomon main encoding function, using polynomial division
(Extended Synthetic Division, the fastest algorithm available to my
- knowledge), better explained at http://research.swtch.com/field
+ knowledge), better explained at https://research.swtch.com/field
"""
global field_charac
- if (len(msg_in) + nsym) > field_charac:
- raise ValueError("Message is too long ({} when max is {})"
- .format(len(msg_in) + nsym, field_charac))
-
+ if (len(msg_in) + nsym) > field_charac: # pragma: no cover
+ raise ValueError("Message is too long (%i when max is %i)"
+ % (len(msg_in) + nsym, field_charac))
if gen is None:
gen = rs_generator_poly(nsym, fcr, generator)
+ msg_in = _bytearray(msg_in)
- msg_in = bytearray(msg_in)
- msg_out = bytearray(msg_in) + bytes(bytearray(len(gen) - 1))
- lgen = bytearray([gf_log[gen[j]] for j in range(len(gen))])
+ # init msg_out with the values inside msg_in and pad with
+ # len(gen)-1 bytes (which is the number of ecc symbols).
+ msg_out = _bytearray(msg_in) + _bytearray(len(gen) - 1)
+ # Precompute the logarithm of every items in the generator
+ lgen = _bytearray([gf_log[gen[j]] for j in range(len(gen))])
+
+ # Extended synthetic division main loop
+ # Fastest implementation with PyPy (but the Cython
+ # version in creedsolo.pyx is about 2x faster)
for i in range(len(msg_in)):
+ # Note that it's msg_out here, not msg_in. Thus, we reuse the
+ # updated value at each iteration (this is how Synthetic Division
+ # works: instead of storing in a temporary register the
+ # intermediate values, we directly commit them to the output).
coef = msg_out[i]
+ # coef = gf_mul(msg_out[i], gf_inverse(gen[0])) # for general
+ # polynomial division (when polynomials are non-monic), the
+ # usual way of using synthetic division is to divide the divisor
+ # g(x) with its leading coefficient (call it a). In this
+ # implementation, this means:we need to compute:
+ # coef = msg_out[i] / gen[0]
+
+ # log(0) is undefined, so we need to manually check for this
+ # case. There's no need to check the divisor here because we
+ # know it can't be 0 since we generated it.
if coef != 0:
- lcoef = gf_log[coef]
+ lcoef = gf_log[coef] # precaching
+
+ # In synthetic division, we always skip the first
+ # coefficient of the divisor, because it's only used to
+ # normalize the dividend coefficient (which is here useless
+ # since the divisor, the generator polynomial, is always
+ # monic)
for j in range(1, len(gen)):
+ # If gen[j] != 0: # log(0) is undefined so we need to
+ # check that, but it slows things down in fact and it's
+ # useless in our case (Reed-Solomon encoding) since we
+ # know that all coefficients in the generator are not 0
+
+ # Optimization: equivalent to gf_mul(gen[j], msg_out[i])
+ # and we just substract it to msg_out[i+j] (but since we
+ # are in GF256, it's equivalent to an addition and to an
+ # XOR). In other words, this is simply a
+ # "multiply-accumulate operation"
msg_out[i + j] ^= gf_exp[lcoef + lgen[j]]
+ # Recopy the original message bytes (overwrites
+ # the part where the quotient was computed)
+
+ # Equivalent to c = mprime - b, where
+ # mprime is msg_in padded with [0]*nsym
msg_out[:len(msg_in)] = msg_in
return msg_out
-def rs_calc_syndromes(msg, nsym, fcr=0, generator=2):
+# Reed-Solomon decoding
+
+def rs_calc_syndromes(msg: bytearray,
+ nsym: int,
+ fcr: int = 0,
+ generator: int = 2
+ ) -> List[int]:
"""\
Given the received codeword msg and the number of error correcting
symbols (nsym), computes the syndromes polynomial. Mathematically,
it's essentially equivalent to a Fourier Transform (Chien search
being the inverse).
+
+ Note the "[0] +" : we add a 0 coefficient for the lowest degree (the
+ constant). This effectively shifts the syndrome, and will shift
+ every computations depending on the syndromes (such as the errors
+ locator polynomial, errors evaluator polynomial, etc. but not the
+ errors positions).
+
+ This is not necessary as anyway syndromes are defined such as there
+ are only non-zero coefficients (the only 0 is the shift of the
+ constant here) and subsequent computations will/must account for the
+ shift by skipping the first iteration (e.g., the often seen
+ range(1, n-k+1)), but you can also avoid prepending the 0 coeff and
+ adapt every subsequent computations to start from 0 instead of 1.
"""
- return [0] + [gf_poly_eval(msg, gf_pow(generator, i + fcr))
- for i in range(nsym)]
+ return [0] + [gf_poly_eval(msg, gf_pow(generator, i + fcr)) for i in range(nsym)]
-def rs_correct_errata(msg_in, synd, err_pos, fcr=0, generator=2):
+def rs_correct_errata(msg_in: bytearray,
+ synd: List[int],
+ err_pos: List[int],
+ fcr: int = 0,
+ generator: int = 2
+ ) -> bytearray:
"""\
- Forney algorithm, computes the values
- (error magnitude) to correct in_msg.
+ Forney algorithm, computes the values (error
+ magnitude) to correct the input message.
+
+ err_pos is a list of the positions of the errors/erasures/errata
"""
global field_charac
- msg = bytearray(msg_in)
+ msg = _bytearray(msg_in)
+ # Calculate errata locator polynomial to correct both errors and
+ # erasures (by combining the errors positions given by the error
+ # locator polynomial found by BM with the erasures positions given
+ # by caller).
+
+ # Need to convert the positions to coefficients degrees for the
+ # errata locator algorithm to work (e.g. instead of [0, 1, 2] it
+ # will become [len(msg)-1, len(msg)-2, len(msg) -3])
coef_pos = [len(msg) - 1 - p for p in err_pos]
err_loc = rs_find_errata_locator(coef_pos, generator)
+
+ # Calculate errata evaluator polynomial (often
+ # called Omega or Gamma in academic papers)
err_eval = rs_find_error_evaluator(synd[::-1], err_loc, len(err_loc) - 1)[::-1]
- x = []
+ # Second part of Chien search to get the error location polynomial X
+ # from the error positions in err_pos (the roots of the error
+ # locator polynomial, i.e., where it evaluates to 0)
+ x = [] # will store the position of the errors
for i in range(len(coef_pos)):
- l = field_charac - coef_pos[i]
- x.append(gf_pow(generator, -l))
+ pos = field_charac - coef_pos[i]
+ x.append(gf_pow(generator, -pos))
- e_ = bytearray(len(msg))
+ # Forney algorithm: Compute the magnitudes will store the values
+ # that need to be corrected (subtracted) to the message containing
+ # errors. This is sometimes called the error magnitude polynomial.
+ e = _bytearray(len(msg))
xlength = len(x)
- for i, Xi in enumerate(x):
- xi_inv = gf_inverse(Xi)
+ for i, xi in enumerate(x):
+ xi_inv = gf_inverse(xi)
+
+ # Compute the formal derivative of the error locator polynomial
+ # (see Blahut, Algebraic codes for data transmission, pp 196-197).
+ # The formal derivative of the errata locator is used as the
+ # denominator of the Forney Algorithm, which simply says that
+ # the ith error value is given by error_evaluator(gf_inverse(Xi))
+ # / error_locator_derivative(gf_inverse(Xi)). See Blahut,
+ # Algebraic codes for data transmission, pp 196-197.
err_loc_prime_tmp = []
for j in range(xlength):
if j != i:
err_loc_prime_tmp.append(gf_sub(1, gf_mul(xi_inv, x[j])))
+ # Compute the product, which is the denominator of
+ # the Forney algorithm (errata locator derivative).
err_loc_prime = 1
for coef in err_loc_prime_tmp:
err_loc_prime = gf_mul(err_loc_prime, coef)
+ # Equivalent to:
+ # err_loc_prime = functools.reduce(gf_mul, err_loc_prime_tmp, 1)
- y = gf_poly_eval(err_eval[::-1], xi_inv)
- y = gf_mul(gf_pow(Xi, 1 - fcr), y)
- magnitude = gf_div(y, err_loc_prime)
- e_[err_pos[i]] = magnitude
+ # Compute y (evaluation of the errata evaluator polynomial)
+ # This is a more faithful translation of the theoretical equation
+ # contrary to the old Forney method. Here it is exactly
+ # copy/pasted from the included presentation decoding_rs.pdf:
+ # Yl = omega(Xl.inverse()) / prod(1 - Xj*Xl.inverse()) for j in len(X)
+ # (in the paper it's for j in s, but it's useless when
+ # len(X) < s because we compute neutral terms 1 for nothing, and
+ # wrong when correcting more than s erasures or erasures+errors
+ # since it prevents computing all required terms).
+
+ # Thus here this method works with erasures too because firstly
+ # we fixed the equation to be like the theoretical one (don't
+ # know why it was modified in _old_forney(), if it's an
+ # optimization, it doesn't enhance anything), and secondly
+ # because we removed the product bound on s, which prevented
+ # computing errors and erasures above the s=(n-k)//2 bound.
+
+ # Numerator of the Forney algorithm (errata evaluator evaluated)
+ y = gf_poly_eval(err_eval[::-1], xi_inv)
+ y = gf_mul(gf_pow(xi, 1 - fcr), y) # adjust to fcr parameter
+
+ # Compute the magnitude
+
+ # Magnitude value of the error, calculated by the Forney
+ # algorithm (an equation in fact): Dividing the errata evaluator
+ # with the errata locator derivative gives us the errata
+ # magnitude (i.e., value to repair) the i'th symbol
+ magnitude = gf_div(y, err_loc_prime)
+
+ # Store the magnitude for this error into the magnitude polynomial
+ e[err_pos[i]] = magnitude
+
+ # Apply the correction of values to get our message corrected!
+ # Note that the ecc bytes also gets corrected! This isn't the
+ # Forney algorithm, we just apply the result of decoding here.
+ msg = gf_poly_add(msg, e)
+
+ # Equivalent to Ci = Ri - Ei where Ci is the correct message, Ri the
+ # received (senseword) message, and Ei the errata magnitudes (minus
+ # is replaced by XOR since it's equivalent in GF(2^p)). So in fact
+ # here we subtract from the received message the error's magnitude,
+ # which logically corrects the value to what it should be.
- msg = gf_poly_add(msg, e_)
return msg
-def rs_find_error_locator(synd, nsym, erase_loc=None, erase_count=0):
+def rs_find_error_locator(synd: List[int],
+ nsym: int,
+ erase_loc: Optional[bytearray] = None,
+ erase_count: int = 0
+ ) -> List[int]:
"""\
Find error/errata locator and evaluator
- polynomials with Berlekamp-Massey algorithm.
+ polynomials with Berlekamp-Massey algorithm
"""
- if erase_loc:
- err_loc = bytearray(erase_loc)
- old_loc = bytearray(erase_loc)
- else:
- err_loc = bytearray([1])
- old_loc = bytearray([1])
+ # The idea is that BM will iteratively estimate the error locator
+ # polynomial. To do this, it will compute a Discrepancy term called
+ # Delta, which will tell us if the error locator polynomial needs an
+ # update or not (hence why it's called discrepancy: it tells us when
+ # we are getting off board from the correct value).
+ # Init the polynomials
+
+ # If the erasure locator polynomial is supplied, we init with its
+ # value, so that we include erasures in the final locator polynomial
+ if erase_loc:
+ err_loc = _bytearray(erase_loc)
+ old_loc = _bytearray(erase_loc)
+ else:
+ # This is the main variable we want to fill, also called Sigma
+ # in other notations or more formally the errors/errata locator
+ # polynomial.
+ err_loc = _bytearray([1])
+
+ # BM is an iterative algorithm, and we need the errata locator
+ # polynomial of the previous iteration in order to update other
+ # necessary variables.
+ old_loc = _bytearray([1])
+
+ # L = 0
+ # Update flag variable, not needed here because we use an
+ # alternative equivalent way of checking if update is needed (but
+ # using the flag could potentially be faster depending on if using
+ # length(list) is taking linear time in your language, here in
+ # Python it's constant so it's as fast.
+
+ # Fix the syndrome shifting: when computing the syndrome, some
+ # implementations may prepend a 0 coefficient for the lowest degree
+ # term (the constant). This is a case of syndrome shifting, thus the
+ # syndrome will be bigger than the number of ecc symbols (I don't
+ # know what purpose serves this shifting). If that's the case, then
+ # we need to account for the syndrome shifting when we use the
+ # syndrome such as inside BM, by skipping those prepended
+ # coefficients. Another way to detect the shifting is to detect the
+ # 0 coefficients: by definition, a syndrome does not contain any 0
+ # coefficient (except if there are no errors/erasures, in this case
+ # they are all 0). This however doesn't work with the modified
+ # Forney syndrome, which set to 0 the coefficients corresponding to
+ # erasures, leaving only the coefficients corresponding to errors.
synd_shift = 0
if len(synd) > nsym:
synd_shift = len(synd) - nsym
+ # Generally: nsym-erase_count == len(synd), except when you input a
+ # partial erase_loc and using the full syndrome instead of the
+ # Forney syndrome, in which case nsym-erase_count is more correct
+ # (len(synd) will fail badly with IndexError).
for i in range(nsym - erase_count):
+
+ # If an erasures locator polynomial was provided to init the
+ # errors locator polynomial, then we must skip the first
+ # erase_count iterations (not the last iterations, this is very
+ # important!)
if erase_loc:
- k_ = erase_count + i + synd_shift
+ k = erase_count + i + synd_shift
+
+ # If erasures locator is not provided, then either there's no
+ # erasures to account or we use the Forney syndromes, so we
+ # don't need to use erase_count nor erase_loc (the erasures have
+ # been trimmed out of the Forney syndromes).
else:
- k_ = i + synd_shift
+ k = i + synd_shift
- delta = synd[k_]
+ # Compute the discrepancy Delta
+
+ # Here is the close-to-the-books operation to compute the
+ # discrepancy Delta: it's a simple polynomial multiplication of
+ # error locator with the syndromes, and then we get the Kth
+ # element. delta = gf_poly_mul(err_loc[::-1], synd)[k]
+ # theoretically it should be gf_poly_add(synd[::-1], [1])[::-1]
+ # instead of just synd, but it seems it's not absolutely
+ # necessary to correctly decode. But this can be optimized:
+ # Since we only need the K'th element, we don't need to compute
+ # the polynomial multiplication for any other element but the
+ # K'th. Thus to optimize, we compute the polymul only at the item
+ # we need, skipping the rest (avoiding a nested loop, thus we
+ # are linear time instead of quadratic). This optimization is
+ # actually described in several figures of the book
+ # "Algebraic codes for data transmission"
+ # Blahut, Richard E., 2003, Cambridge university press.
+ delta = synd[k]
for j in range(1, len(err_loc)):
- delta ^= gf_mul(err_loc[-(j + 1)], synd[k_ - j])
- old_loc += bytearray([0])
+ # delta is also called discrepancy. Here we do a partial
+ # polynomial multiplication (i.e., we compute the polynomial
+ # multiplication only for the term of degree k). Should be
+ # equivalent to brownanrs.polynomial.mul_at().
+ delta ^= gf_mul(err_loc[-(j + 1)], synd[k - j])
- if delta != 0:
+ # Shift polynomials to compute the next degree
+ old_loc += _bytearray([0])
+
+ # Iteratively estimate the errata locator and evaluator polynomials
+ if delta != 0: # Update only if there's a discrepancy
+ # Rule B (rule A is implicitly defined because rule A just
+ # says that we skip any modification for this iteration)
if len(old_loc) > len(err_loc):
+ # `2*L <= k+erase_count` is equivalent to
+ # `len(old_loc) > len(err_loc)` as long as L is
+ # correctly computed Computing errata locator polynomial
+ # Sigma.
new_loc = gf_poly_scale(old_loc, delta)
+
+ # Effectively we are doing err_loc * 1/delta = err_loc // delta
old_loc = gf_poly_scale(err_loc, gf_inverse(delta))
err_loc = new_loc
+
+ # Update the update flag
+ # L = k - L # the update flag L is tricky: in Blahut's
+ # schema, it's mandatory to use `L = k - L - erase_count`
+ # (and indeed in a previous draft of this function, if
+ # you forgot to do `- erase_count` it would lead to
+ # correcting only 2*(errors+erasures) <= (n-k) instead
+ # of 2*errors+erasures <= (n-k)), but in this latest
+ # draft, this will lead to a wrong decoding in some
+ # cases where it should correctly decode! Thus you
+ # should try with and without `- erase_count` to update
+ # L on your own implementation and see which one works
+ # OK without producing wrong decoding failures.
+
+ # Update with the discrepancy
err_loc = gf_poly_add(err_loc, gf_poly_scale(old_loc, delta))
+ # Check if the result is correct, that there's not too many errors to
+ # correct drop leading 0s, else errs will not be of the correct size
err_loc = list(itertools.dropwhile(lambda x: x == 0, err_loc))
errs = len(err_loc) - 1
- if (errs - erase_count) * 2 + erase_count > nsym:
+ if (errs - erase_count) * 2 + erase_count > nsym: # pragma: no cover
raise ReedSolomonError("Too many errors to correct")
return err_loc
-def rs_find_errata_locator(e_pos, generator=2):
+def rs_find_errata_locator(e_pos: List[int],
+ generator: int = 2
+ ) -> List[int]:
"""\
Compute the erasures/errors/errata locator polynomial from the
erasures/errors/errata positions (the positions must be relative to
@@ -419,33 +1184,54 @@ def rs_find_errata_locator(e_pos, generator=2):
here the string positions are [1, 4], but the coefficients are
reversed since the ecc characters are placed as the first
coefficients of the polynomial, thus the coefficients of the erased
- characters are n-1 - [1, 4] = [18, 15] = erasures_loc to be specified
- as an argument.
+ characters are n-1 - [1, 4] = [18, 15] = erasures_loc to be
+ specified as an argument.
+
+ See:
+ http://ocw.usu.edu/Electrical_and_Computer_Engineering/Error_Control_Coding/lecture7.pdf
+ and
+ Blahut, Richard E. "Transform techniques for error control codes."
+ IBM Journal of Research and development 23.3 (1979): 299-315.
+ http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.92.600&rep=rep1&type=pdf
+ and also a MatLab implementation here:
+ https://www.mathworks.com/matlabcentral/fileexchange/23567-reed-solomon-errors-and-erasures-decoder
"""
- e_loc = [1]
+
+ # Just to init because we will multiply, so it must be 1 so that
+ # the multiplication starts correctly without nulling any term.
+ e_loc = [1] # type: List[int]
terminal_width = shutil.get_terminal_size()[0]
def c_print(string: str) -> None:
+ """Print to middle of the screen."""
print(string.center(terminal_width))
if len(e_pos) > 0:
print('')
for s in ["Warning! Reed-Solomon erasure code",
"detected and corrected {} errors in ".format(len(e_pos)),
- "received packet. This might indicate",
+ "a received packet. This might indicate",
"bad connection, an eminent adapter or",
"data diode HW failure or that serial",
- "interface's baudrate is set too high."]:
+ "interface's baud rate is set too high."]:
c_print(s)
print('')
+ # erasures_loc is very simple to compute:
+ # erasures_loc = prod(1 - x*alpha**i) for i in erasures_pos and
+ # where alpha is the alpha chosen to evaluate polynomials (here in
+ # this library it's gf(3)). To generate c*x where c is a constant,
+ # we simply generate a Polynomial([c, 0]) where 0 is the constant
+ # and c is positioned to be the coefficient for x^1.
for i in e_pos:
- e_loc = gf_poly_mul(e_loc, gf_poly_add([1], [gf_pow(generator, i), 0]))
+ e_loc = gf_poly_mul(e_loc, gf_poly_add(_bytearray([1]), [gf_pow(generator, i), 0]))
return e_loc
-def rs_find_error_evaluator(synd, err_loc, nsym):
+def rs_find_error_evaluator(synd: List[int],
+ err_loc: List[int],
+ nsym: int) -> bytearray:
"""\
Compute the error (or erasures if you supply sigma=erasures locator
polynomial, or errata) evaluator polynomial Omega from the syndrome
@@ -455,78 +1241,290 @@ def rs_find_error_evaluator(synd, err_loc, nsym):
Omega afterwards using this method, or just ensure that Omega
computed by BM is correct given Sigma.
"""
+ # Omega(x) = [ Synd(x) * Error_loc(x) ] mod x^(n-k+1)
+ # first multiply syndromes * errata_locator, then do a polynomial
+ # division to truncate the polynomial to the required length
_, remainder = gf_poly_div(gf_poly_mul(synd, err_loc), ([1] + [0] * (nsym + 1)))
+
+ # Faster way that is equivalent:
+ # First multiply the syndromes with the errata locator polynomial
+ # remainder = gf_poly_mul(synd, err_loc)
+ #
+ # then divide by a polynomial of the length we want, which is
+ # equivalent to slicing the list (which represents the polynomial)
+ # remainder = remainder[len(remainder)-(nsym+1):]
return remainder
-def rs_find_errors(err_loc, nmess, generator=2):
+def rs_find_errors(err_loc: Any,
+ nmess: int,
+ generator: int = 2
+ ) -> List[int]:
"""\
- Find the roots (ie, where evaluation = zero) of error polynomial by
- bruteforce trial, this is a sort of Chien's search (but less
+ Find the roots (i.e., where evaluation = zero) of error polynomial by
+ brute-force trial, this is a sort of Chien's search (but less
efficient, Chien's search is a way to evaluate the polynomial such
that each evaluation only takes constant time).
"""
errs = len(err_loc) - 1
err_pos = []
+
+ # Normally we should try all 2^8 possible values, but here
+ # we optimize to just check the interesting symbols
for i in range(nmess):
if gf_poly_eval(err_loc, gf_pow(generator, i)) == 0:
+ # It's a 0? Bingo, it's a root of the error locator
+ # polynomial, in other terms this is the location of an error
err_pos.append(nmess - 1 - i)
+ # Sanity check: the number of errors/errata positions found should
+ # be exactly the same as the length of the errata locator polynomial
if len(err_pos) != errs:
- raise ReedSolomonError("Too many (or few) errors found by Chien "
- "search for the errata locator polynomial!")
+ # (TO DO) to decode messages+ecc with length n > 255, we may try
+ # to use a brute-force approach: the correct positions ARE in the
+ # final array j, but the problem is because we are above the
+ # Galois Field's range, there is a wraparound so that for
+ # example if j should be [0, 1, 2, 3], we will also get
+ # [255, 256, 257, 258] (because 258 % 255 == 3, same for the
+ # other values), so we can't discriminate. The issue is that
+ # fixing any errs_nb errors among those will always give a
+ # correct output message (in the sense that the syndrome will be
+ # all 0), so we may not even be able to check if that's correct
+ # or not, so I'm not sure the brute-force approach may even be
+ # possible.
+ raise ReedSolomonError("Too many (or few) errors found by Chien"
+ " Search for the errata locator polynomial!")
return err_pos
-def rs_forney_syndromes(synd, pos, nmess, generator=2):
+def rs_forney_syndromes(synd: List[int],
+ pos: List[int],
+ nmess: int,
+ generator: int = 2
+ ) -> list:
+ """\
+ Compute Forney syndromes, which computes a modified syndromes to
+ compute only errors (erasures are trimmed out). Do not confuse this
+ with Forney algorithm, which allows to correct the message based on
+ the location of errors.
+ """
+
+ # Prepare the coefficient degree positions
+ # (instead of the erasures positions)
erase_pos_reversed = [nmess - 1 - p for p in pos]
+
+ # Optimized method, all operations are in-lined make a copy and
+ # trim the first coefficient which is always 0 by definition
fsynd = list(synd[1:])
for i in range(len(pos)):
x = gf_pow(generator, erase_pos_reversed[i])
for j in range(len(fsynd) - 1):
fsynd[j] = gf_mul(fsynd[j], x) ^ fsynd[j + 1]
+ # fsynd.pop() # useless? it doesn't change the
+ # results of computations to leave it there
+
+ # Theoretical way of computing the modified Forney syndromes:
+ # fsynd = (erase_loc * synd) % x^(n-k) -- although the trimming by
+ # using x^(n-k) is maybe not necessary as many books do not even
+ # mention it (and it works without trimming)
+ # See
+ # Shao, H. M., Truong, T. K., Deutsch, L. J., & Reed, I. S.
+ # (1986, April). A single chip VLSI Reed-Solomon decoder.
+ #
+ # In Acoustics, Speech, and Signal Processing
+ # IEEE International Conference on ICASSP'86.
+ # (Vol. 11, pp. 2151-2154). IEEE.ISO 690
+ #
+ # Computing the erasures locator polynomial
+ # erase_loc = rs_find_errata_locator(erase_pos_reversed, generator=generator)
+ #
+ # then multiply with the syndrome to get the untrimmed forney syndrome
+ # fsynd = gf_poly_mul(erase_loc[::-1], synd[1:])
+ #
+ # then trim the first erase_pos coefficients which are useless.
+ # Seems to be not necessary, but this reduces the computation time
+ # later in BM (thus it's an optimization).
+ # fsynd = fsynd[len(pos):]
return fsynd
-def rs_correct_msg(msg_in, nsym, fcr=0, generator=2, erase_pos=None, only_erasures=False):
- """Reed-Solomon main decoding function."""
+def rs_correct_msg(msg_in: bytearray,
+ nsym: int,
+ fcr: int = 0,
+ generator: int = 2,
+ erase_pos: Optional[List[int]] = None,
+ only_erasures: bool = False
+ ) -> Tuple[Any, Any]:
+ """Reed-Solomon main decoding function"""
global field_charac
- if len(msg_in) > field_charac:
- raise ValueError("Message is too long ({} when max is {})"
- .format(len(msg_in), field_charac))
+ if len(msg_in) > field_charac: # pragma: no cover
+ # Note that it is in fact possible to encode/decode messages
+ # that are longer than field_charac, but because this will be
+ # above the field, this will generate more error positions
+ # during Chien Search than it should, because this will generate
+ # duplicate values, which should normally be prevented thank's
+ # to the prime polynomial reduction (e.g., because it can't
+ # discriminate between error at position 1 or 256, both being
+ # exactly equal under Galois Field 2^8). So it's really not
+ # advised to do it, but it's possible (but then you're not
+ # guaranteed to be able to correct any error/erasure on symbols
+ # with a position above the length of field_charac -- if you
+ # really need a bigger message without chunking, then you should
+ # better enlarge c_exp so that you get a bigger field).
+ raise ValueError("Message is too long (%i when max is %i)"
+ % (len(msg_in), field_charac))
- msg_out = bytearray(msg_in)
- if erase_pos is None:
+ msg_out = _bytearray(msg_in) # copy of message
+
+ # Erasures: set them to null bytes for easier decoding (but this is
+ # not necessary, they will be corrected anyway, but debugging will
+ # be easier with null bytes because the error locator polynomial
+ # values will only depend on the errors locations, not their values).
+ if erase_pos is None: # pragma: no cover
erase_pos = []
else:
for e_pos in erase_pos:
msg_out[e_pos] = 0
- if len(erase_pos) > nsym:
+ # Check if there are too many erasures to correct (beyond the
+ # Singleton bound).
+ if len(erase_pos) > nsym: # pragma: no cover
raise ReedSolomonError("Too many erasures to correct")
+
+ # Prepare the syndrome polynomial using only errors (i.e., errors
+ # = characters that were either replaced by null byte or changed to
+ # another character, but we don't know their positions).
synd = rs_calc_syndromes(msg_out, nsym, fcr, generator)
+ # Check if there's any error/erasure in the input codeword. If not
+ # (all syndromes coefficients are 0), then just return the codeword
+ # as-is.
if max(synd) == 0:
- return msg_out[:-nsym], msg_out[-nsym:]
+ return msg_out[:-nsym], msg_out[-nsym:] # no errors
+ # Find errors locations
if only_erasures:
- err_pos = []
+ err_pos = [] # type: List[int]
else:
- fsynd = rs_forney_syndromes(synd, erase_pos, len(msg_out), generator)
- err_loc = rs_find_error_locator(fsynd, nsym, erase_count=len(erase_pos))
- err_pos = rs_find_errors(err_loc[::-1], len(msg_out), generator)
+ # Compute the Forney syndromes, which hide the erasures from the
+ # original syndrome (so that BM will just have to deal with
+ # errors, not erasures).
+ fsynd = rs_forney_syndromes(synd, erase_pos, len(msg_out), generator)
- if err_pos is None:
+ # Compute the error locator polynomial using Berlekamp-Massey.
+ err_loc = rs_find_error_locator(fsynd, nsym, erase_count=len(erase_pos))
+
+ # Locate the message errors using Chien search (or brute-force search).
+ err_pos = rs_find_errors(err_loc[::-1], len(msg_out), generator)
+ if err_pos is None: # pragma: no cover
raise ReedSolomonError("Could not locate error")
- msg_out = rs_correct_errata(msg_out, synd, (erase_pos + err_pos), fcr, generator)
- synd = rs_calc_syndromes(msg_out, nsym, fcr, generator)
+ # Find errors values and apply them to correct the message compute
+ # errata evaluator and errata magnitude polynomials, then correct
+ # errors and erasures.
+
+ # Note that we here use the original syndrome, not the Forney
+ # syndrome (because we will correct both errors and erasures,
+ # so we need the full syndrome).
+ msg_out = rs_correct_errata(msg_out, synd, erase_pos + err_pos, fcr, generator)
+
+ # Check if the final message is fully repaired.
+ synd = rs_calc_syndromes(msg_out, nsym, fcr, generator)
if max(synd) > 0:
raise ReedSolomonError("Could not correct message")
+
+ # Return the successfully decoded message. Also return the corrected
+ # ecc block so that the user can check().
return msg_out[:-nsym], msg_out[-nsym:]
-def rs_check(msg, nsym, fcr=0, generator=2):
+def rs_correct_msg_nofsynd(msg_in: bytearray,
+ nsym: int,
+ fcr: int = 0,
+ generator: int = 2,
+ erase_pos: Optional[List[int]] = None,
+ only_erasures: bool = False
+ ) -> Tuple[Any, Any]:
+ """\
+ Reed-Solomon main decoding function, without using the modified
+ Forney syndromes.
+ """
+ global field_charac
+ if len(msg_in) > field_charac: # pragma: no cover
+ raise ValueError("Message is too long (%i when max is %i)"
+ % (len(msg_in), field_charac))
+
+ msg_out = _bytearray(msg_in) # copy of message
+
+ # Erasures: set them to null bytes for easier decoding (but this is
+ # not necessary, they will be corrected anyway, but debugging will
+ # be easier with null bytes because the error locator polynomial
+ # values will only depend on the errors locations, not their values).
+ if erase_pos is None: # pragma: no cover
+ erase_pos = []
+ else:
+ for e_pos in erase_pos:
+ msg_out[e_pos] = 0
+
+ # Check if there are too many erasures.
+ if len(erase_pos) > nsym: # pragma: no cover
+ raise ReedSolomonError("Too many erasures to correct")
+
+ # Prepare the syndrome polynomial using only errors (i.e.,
+ # errors = characters that were either replaced by null byte or
+ # changed to another character, but we don't know their positions).
+ synd = rs_calc_syndromes(msg_out, nsym, fcr, generator)
+
+ # Check if there's any error/erasure in the input codeword. If not
+ # (all syndromes coefficients are 0), then just return the codeword
+ # as-is.
+ if max(synd) == 0:
+ return msg_out[:-nsym], msg_out[-nsym:] # no errors
+
+ # Prepare erasures locator and evaluator polynomials.
+ erase_loc = bytearray()
+
+ # erase_eval = None
+ erase_count = 0
+ if erase_pos:
+ erase_count = len(erase_pos)
+ erase_pos_reversed = [len(msg_out) - 1 - eras for eras in erase_pos]
+ erase_loc = bytearray(rs_find_errata_locator(erase_pos_reversed, generator=generator))
+
+ # Prepare errors/errata locator polynomial
+ if only_erasures:
+ err_loc = erase_loc[::-1]
+ else:
+ err_loc = bytearray(rs_find_error_locator(synd, nsym, erase_loc=erase_loc, erase_count=erase_count))
+ err_loc = err_loc[::-1]
+
+ # Locate the message errors
+
+ # Find the roots of the errata locator polynomial (i.e., the
+ # positions of the errors/errata).
+ err_pos = rs_find_errors(err_loc, len(msg_out), generator)
+ if err_pos is None: # pragma: no cover
+ raise ReedSolomonError("Could not locate error")
+
+ # Compute errata evaluator and errata magnitude polynomials, then
+ # correct errors and erasures.
+ msg_out = rs_correct_errata(msg_out, synd, err_pos, fcr=fcr, generator=generator)
+
+ # Check if the final message is fully repaired.
+ synd = rs_calc_syndromes(msg_out, nsym, fcr, generator)
+ if max(synd) > 0: # pragma: no cover
+ raise ReedSolomonError("Could not correct message")
+
+ # Return the successfully decoded message. Also return the corrected
+ # ecc block so that the user can check.
+ return msg_out[:-nsym], msg_out[-nsym:]
+
+
+def rs_check(msg: bytearray,
+ nsym: int,
+ fcr: int = 0,
+ generator: int = 2
+ ) -> bool:
"""\
Returns true if the message + ecc has no error of false otherwise
(may not always catch a wrong decoding or a wrong message,
@@ -551,41 +1549,191 @@ class RSCodec(object):
specify different primitive polynomial and non-zero first
consecutive root (fcr). For UAT/ADSB use, set fcr=120 and prim=0x187
when instantiating the class; leaving them out will default for
- previous values (0 and 0x11d)
+ previous values (0 and 0x11d).
"""
- def __init__(self, nsym=10, nsize=255, fcr=0, prim=0x11d, generator=2, c_exp=8):
+ def __init__(self,
+ nsym: int = 10,
+ nsize: int = 255,
+ fcr: int = 0,
+ prim: int = 0x11d,
+ generator: int = 2,
+ c_exp: int = 8,
+ single_gen: bool = True
+ ) -> None:
"""\
- Initialize the Reed-Solomon codec. Note that different parameters
- change the internal values (the ecc symbols, look-up table values,
- etc) but not the output result (whether your message can be
- repaired or not, there is no influence of the parameters).
- """
- self.nsym = nsym
- self.nsize = nsize
- self.fcr = fcr
- self.prim = prim
- self.generator = generator
- self.c_exp = c_exp
- init_tables(prim, generator, c_exp)
+ Initialize the Reed-Solomon codec. Note that different
+ parameters change the internal values (the ecc symbols, look-up
+ table values, etc) but not the output result (whether your
+ message can be repaired or not, there is no influence of the
+ parameters).
- def encode(self, data):
+ nsym : number of ecc symbols (you can repair nsym/2 errors
+ and nsym erasures.
+ nsize : maximum length of each chunk. If higher than 255,
+ will use a higher Galois Field, but the algorithm's
+ complexity and computational cost will raise
+ quadratically...
+ single_gen : if you want to use the same RSCodec for different
+ nsym parameters (but nsize the same), then set
+ single_gen = False.
+ """
+
+ # Auto-setup if Galois Field or message length is different than
+ # default (exponent 8).
+
+ # If nsize (chunk size) is larger than the Galois Field, we
+ # resize the Galois Field.
+ if nsize > 255 and c_exp <= 8:
+ # Get the next closest power of two
+ c_exp = int(math.log(2 ** (math.floor(math.log(nsize) / math.log(2)) + 1), 2))
+
+ # prim was not correctly defined, find one
+ if c_exp != 8 and prim == 0x11d:
+ prim = find_prime_polys(generator=generator, c_exp=c_exp, fast_primes=True, single=True)
+ if nsize == 255: # Resize chunk size if not set
+ nsize = int(2 ** c_exp - 1)
+
+ # Memorize variables
+
+ # Number of ecc symbols (i.e., the repairing rate will be
+ # r=(nsym/2)/nsize, so for example if you have nsym=5 and
+ # nsize=10, you have a rate r=0.25, so you can correct up to
+ # 0.25% errors (or exactly 2 symbols out of 10), and 0.5%
+ # erasures (5 symbols out of 10).
+ self.nsym = nsym
+
+ # Maximum length of one chunk (i.e., message + ecc symbols after
+ # encoding, for the message alone it's nsize-nsym)
+ self.nsize = nsize
+
+ # First consecutive root, can be any value between 0 and (2**c_exp)-1
+ self.fcr = fcr
+
+ # Prime irreducible polynomial, use find_prime_polys() to find a prime poly
+ self.prim = prim
+
+ # Generator integer, must be prime
+ self.generator = generator
+
+ # Exponent of the field's characteristic. This both defines the
+ # maximum value per symbol and the maximum length of one chunk.
+ # By default it's GF(2^8), do not change if you're not sure what
+ # it means.
+ self.c_exp = c_exp
+
+ # Initialize the look-up tables for easy
+ # and quick multiplication/division
+ self.gf_log, self.gf_exp, self.field_charac = init_tables(prim, generator, c_exp)
+
+ # Pre-compute the generator polynomials
+ if single_gen:
+ self.gen = {nsym: rs_generator_poly(nsym, fcr=fcr, generator=generator)}
+ else: # pragma: no cover
+ self.gen = rs_generator_poly_all(nsize, fcr=fcr, generator=generator)
+
+ @staticmethod
+ def chunk(data: bytes,
+ chunk_size: int
+ ) -> Generator:
+ """Split a long message into chunks"""
+ for i in range(0, len(data), chunk_size):
+ # Split the long message in a chunk.
+ chunk = data[i:i + chunk_size]
+ yield chunk
+
+ def encode(self,
+ data_: Union[bytes, str],
+ nsym: Optional[int] = None
+ ) -> bytearray:
"""\
- Encode a message (ie, add the ecc symbols) using Reed-Solomon,
+ Encode a message (i.e., add the ecc symbols) using Reed-Solomon,
whatever the length of the message because we use chunking.
"""
- chunk_size = self.nsize - self.nsym
- enc = bytearray()
+ # Restore precomputed tables (allow to use multiple RSCodec in
+ # one script).
+ global gf_log, gf_exp, field_charac
+ gf_log, gf_exp, field_charac = self.gf_log, self.gf_exp, self.field_charac
- for i in range(0, len(data), chunk_size):
- chunk = data[i:i + chunk_size]
- enc.extend(rs_encode_msg(chunk, self.nsym, fcr=self.fcr, generator=self.generator))
+ if not nsym:
+ nsym = self.nsym
+
+ if isinstance(data_, str):
+ data = _bytearray(data_)
+ else:
+ data = data_
+ enc = _bytearray()
+ for chunk in self.chunk(data, self.nsize - self.nsym):
+ enc.extend(rs_encode_msg(chunk, self.nsym, fcr=self.fcr, generator=self.generator, gen=self.gen[nsym]))
return enc
- def decode(self, data):
- """Repair a message, whatever its size is, by using chunking."""
- dec = bytearray()
- for i in range(0, len(data), self.nsize):
- chunk = data[i:i + self.nsize]
- dec.extend(rs_correct_msg(chunk, self.nsym, fcr=self.fcr, generator=self.generator, only_erasures=False)[0])
- return dec
+ def decode(self,
+ data: Any,
+ nsym: Optional[int] = None,
+ erase_pos: Optional[List[int]] = None,
+ only_erasures: bool = False
+ ) -> Tuple[bytearray, bytearray]:
+ """\
+ Repair a message, whatever its size is, by using chunking. May
+ return a wrong result if number of errors > nsym. Note that it
+ returns a couple of vars: the repaired messages, and the
+ repaired messages+ecc (useful for checking).
+
+ Usage: rmes, rmesecc = RSCodec.decode(data).
+ """
+ # erase_pos is a list of positions where you know (or greatly
+ # suspect at least) there is an erasure (i.e., wrong character but
+ # you know it's at this position). Just input the list of all
+ # positions you know there are errors, and this method will
+ # automatically split the erasures positions to attach to the
+ # corresponding data chunk.
+
+ # Restore precomputed tables (allow to use multiple RSCodec in
+ # one script)
+ global gf_log, gf_exp, field_charac
+ gf_log, gf_exp, field_charac = self.gf_log, self.gf_exp, self.field_charac
+
+ if not nsym:
+ nsym = self.nsym
+
+ if isinstance(data, str): # pragma: no cover
+ data = _bytearray(data)
+ dec = _bytearray()
+ dec_full = _bytearray()
+ for chunk in self.chunk(data, self.nsize):
+ # Extract the erasures for this chunk
+ e_pos = [] # type: List[int]
+ if erase_pos: # pragma: no cover
+ # First extract the erasures for this chunk
+ # (all erasures below the maximum chunk length)
+ e_pos = [x for x in erase_pos if x <= self.nsize]
+
+ # Then remove the extract erasures from the big list and
+ # also decrement all subsequent positions values by
+ # nsize (the current chunk's size) so as to prepare the
+ # correct alignment for the next iteration
+ erase_pos = [x - (self.nsize + 1) for x in erase_pos if x > self.nsize]
+
+ # Decode/repair this chunk!
+ rmes, recc = rs_correct_msg(chunk, nsym, fcr=self.fcr, generator=self.generator,
+ erase_pos=e_pos, only_erasures=only_erasures)
+ dec.extend(rmes)
+ dec_full.extend(rmes + recc)
+ return dec, dec_full
+
+ def check(self,
+ data: bytearray,
+ nsym: Optional[int] = None
+ ) -> List[bool]:
+ """\
+ Check if a message+ecc stream is not corrupted (or fully repaired).
+ Note: may return a wrong result if number of errors > nsym.
+ """
+ if not nsym:
+ nsym = self.nsym
+ if isinstance(data, str): # pragma: no cover
+ data = _bytearray(data)
+ check = []
+ for chunk in self.chunk(data, self.nsize):
+ check.append(rs_check(chunk, nsym, fcr=self.fcr, generator=self.generator))
+ return check
diff --git a/src/common/statics.py b/src/common/statics.py
index 26ae670..8545853 100644
--- a/src/common/statics.py
+++ b/src/common/statics.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,32 +16,44 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
"""Program details"""
TFC = 'TFC'
-VERSION = '1.17.08'
+TXP = 'Transmitter'
+RXP = 'Receiver'
+RP = 'Relay'
+VERSION = '1.19.01'
-"""Identifiers"""
-LOCAL_ID = 'local_id'
-DUMMY_CONTACT = 'dummy_contact'
-DUMMY_USER = 'dummy_user'
-DUMMY_STR = 'dummy_str'
-DUMMY_MEMBER = 'dummy_member'
+"""Identifiers
+
+Placeholder accounts for databases need to be valid v3 Onion addresses.
+"""
+LOCAL_ID = 'localidlocalidlocalidlocalidlocalidlocalidlocalidloj7uyd'
+LOCAL_PUBKEY = b'[\x84\x05\xa0kp\x80\xb4\rn\x10\x16\x81\xad\xc2\x02\xd05\xb8@Z\x06\xb7\x08\x0b@\xd6\xe1\x01h\x1a\xdc'
+LOCAL_NICK = 'local Source Computer'
+DUMMY_CONTACT = 'dummycontactdummycontactdummycontactdummycontactdumhsiid'
+DUMMY_MEMBER = 'dummymemberdummymemberdummymemberdummymemberdummymedakad'
+DUMMY_NICK = 'dummy_nick'
DUMMY_GROUP = 'dummy_group'
TX = 'tx'
RX = 'rx'
-NH = 'nh'
+NC = 'nc'
TAILS = b'Tails'
-"""Window identifiers (string)"""
-WIN_TYPE_COMMAND = 'win_type_command'
-WIN_TYPE_FILE = 'win_type_file'
-WIN_TYPE_CONTACT = 'win_type_contact'
-WIN_TYPE_GROUP = 'win_type_group'
+"""Window identifiers"""
+WIN_TYPE_COMMAND = 'system messages'
+WIN_TYPE_FILE = 'incoming files'
+WIN_TYPE_CONTACT = 'contact'
+WIN_TYPE_GROUP = 'group'
+
+
+"""Window UIDs"""
+WIN_UID_LOCAL = b'win_uid_local'
+WIN_UID_FILE = b'win_uid_file'
"""Packet types"""
@@ -59,14 +72,18 @@ UNKNOWN_ACCOUNTS = 'unknown_accounts'
"""Base58 key types"""
-B58_PUB_KEY = 'b58_pub_key'
-B58_LOCAL_KEY = 'b58_local_key'
-B58_FILE_KEY = 'b58_file_key'
+B58_PUBLIC_KEY = 'b58_public_key'
+B58_LOCAL_KEY = 'b58_local_key'
+
+
+"""Key input guides"""
+B58_PUBLIC_KEY_GUIDE = ' A B C D E F H H I J K L '
+B58_LOCAL_KEY_GUIDE = ' A B C D E F G H I J K L M N O P Q '
"""Key exchange types"""
-X25519 = 'x25519'
-PSK = 'psk'
+ECDHE = 'X448'
+PSK = 'PSK'
"""Contact setting types"""
@@ -76,30 +93,32 @@ NOTIFY = 'notify'
"""Command identifiers"""
-CLEAR = 'clear'
-RESET = 'reset'
-
+CLEAR = 'clear'
+RESET = 'reset'
+POWEROFF = 'poweroff'
"""Contact setting management"""
-ENABLE = b'es'
-DISABLE = b'ds'
-ALL = 'all'
+CONTACT_SETTING_HEADER_LENGTH = 2
+ENABLE = b'es'
+DISABLE = b'ds'
+ALL = 'all'
-"""NH bypass states"""
-NH_BYPASS_START = 'nh_bypass_start'
-NH_BYPASS_STOP = 'nh_bypass_stop'
-RESEND = 'resend'
+"""Networked Computer bypass states"""
+NC_BYPASS_START = 'nc_bypass_start'
+NC_BYPASS_STOP = 'nc_bypass_stop'
-"""Phase messages"""
-DONE = 'DONE'
+"""Status messages"""
+DONE = 'DONE'
+EVENT = '-!-'
+ME = 'Me'
"""VT100 codes
-VT100 codes are used to control printing to terminals. These
-make building functions like text box drawers possible.
+VT100 codes are used to control printing to the terminal. These make
+building functions like textbox drawers possible.
"""
CURSOR_UP_ONE_LINE = '\x1b[1A'
CURSOR_RIGHT_ONE_COLUMN = '\x1b[1C'
@@ -112,182 +131,247 @@ NORMAL_TEXT = '\033[0m'
"""Separators
-Separator byte/char is a non-printable byte used
-to separate fields in serialized data structures.
+Separator byte is a non-printable byte used to separate fields in
+serialized data structures.
"""
US_BYTE = b'\x1f'
-US_STR = '\x1f'
"""Datagram headers
These headers are prepended to datagrams that are transmitted over
-Serial or over the network. They tell receiving device what type of
-packet is in question.
+serial or over the network. They tell the receiving device what type of
+datagram is in question.
-Local key packets are only accepted by NH from local TxM. Even if NH is
-compromised, the worst case scenario is a denial of service attack
-where RxM receives new local keys. As user does not know the correct
-decryption key, they would have to manually cancel packets.
+Datagrams with local key header contain the encrypted local key, used to
+encrypt commands and data transferred between local Source and
+Destination computers. Packets with the header are only accepted by the
+Relay Program when they originate from the user's Source Computer. Even
+if the Networked Computer is compromised and the local key datagram is
+injected to the Destination Computer, the injected key could not be
+accepted by the user as they don't know the decryption key for it. The
+worst case scenario is a DoS attack where the Receiver Program receives
+new local keys continuously. Such an attack would, however, reveal the
+user they are under a sophisticated attack, and that their Networked
+Computer has been compromised.
-Public keys are delivered from contact all the way to RxM provided they
-are of correct format.
+Datagrams with Public key header contain TCB-level public keys that
+originate from the sender's Source Computer, and are displayed by the
+recipient's Networked Computer, from where they are manually typed to
+recipient's Destination Computer.
-Message and command packet headers tell RxM whether to parse trailing
-fields that determine which XSalsa20-Poly1305 decryption keys it should
-load. Contacts can alter their packets to deliver COMMAND_PACKET_HEADER
-header, but NH will by design drop them and even if it somehow couldn't,
-RxM would drop the packet after MAC verification of encrypted harac
-fails.
+Message and command type datagrams tell the Receiver Program whether to
+parse the trailing fields that determine which XChaCha20-Poly1305
+decryption keys it should load. Contacts can of course try to alter
+their datagrams to contain a COMMAND_DATAGRAM_HEADER header, but Relay
+Program will by design drop them. Even if a compromised Networked
+Computer injects such a datagram to Destination Computer, the Receiver
+Program will drop the datagram when the MAC verification of the
+encrypted hash ratchet counter value fails.
-Unencrypted packet headers are intended to notify NH that the packet
-is intended for it. These commands are not delivered to RxM, but a
-standard encrypted command is sent to RxM before any unencrypted command
-is sent to NH. During traffic masking connection, unencrypted commands
-are disabled to hide the quantity and schedule of communication even if
-NH is compromised and monitoring the user. Unencrypted commands do not
-cause issues in security because if adversary can compromise NH to the
-point it can issue commands to NH, they could DoS NH anyway.
+File type datagram contains an encrypted file that the Receiver Program
+caches until its decryption key arrives from the sender inside a
+special, automated key delivery message.
-File CT headers are for file export from TxM to NH and in receiving end,
-import from NH to RxM.
+Unencrypted type datagrams contain commands intended for the Relay
+Program. These commands are in some cases preceded by an encrypted
+version of the command, that the Relay Program forwards to Receiver
+Program on Destination Computer. The unencrypted Relay commands are
+disabled during traffic masking to hide the quantity and schedule of
+communication even from the Networked Computer (in case it's compromised
+and monitoring the user). The fact these commands are unencrypted, do
+not cause security issues because if an adversary can compromise the
+Networked Computer to the point it can issue commands to the Relay
+Program, they could DoS the Relay Program, and thus TFC, anyway.
"""
-LOCAL_KEY_PACKET_HEADER = b'L'
-PUBLIC_KEY_PACKET_HEADER = b'P'
-MESSAGE_PACKET_HEADER = b'M'
-COMMAND_PACKET_HEADER = b'Y'
-UNENCRYPTED_PACKET_HEADER = b'U'
-EXPORTED_FILE_HEADER = b'O'
-IMPORTED_FILE_HEADER = b'I'
+DATAGRAM_TIMESTAMP_LENGTH = 8
+DATAGRAM_HEADER_LENGTH = 1
+LOCAL_KEY_DATAGRAM_HEADER = b'L'
+PUBLIC_KEY_DATAGRAM_HEADER = b'P'
+MESSAGE_DATAGRAM_HEADER = b'M'
+COMMAND_DATAGRAM_HEADER = b'K'
+FILE_DATAGRAM_HEADER = b'F'
+UNENCRYPTED_DATAGRAM_HEADER = b'U'
+
+
+"""Group management headers
+
+Group management datagrams are are automatic messages that the
+Transmitter Program recommends the user to send when they make changes
+to the member list of a group, or when they add or remove groups. These
+messages are displayed by the Relay Program.
+"""
+GROUP_ID_LENGTH = 4
+GROUP_ID_ENC_LENGTH = 13
+GROUP_MSG_ID_LENGTH = 16
+GROUP_MGMT_HEADER_LENGTH = 1
+GROUP_MSG_INVITE_HEADER = b'I'
+GROUP_MSG_JOIN_HEADER = b'J'
+GROUP_MSG_MEMBER_ADD_HEADER = b'N'
+GROUP_MSG_MEMBER_REM_HEADER = b'R'
+GROUP_MSG_EXIT_GROUP_HEADER = b'X'
"""Assembly packet headers
-These one byte assembly packet headers are not part of the padded
+These one-byte assembly packet headers are not part of the padded
message parsed from assembly packets. They are however the very first
-plaintext byte, prepended to every padded assembly packet delivered to
-recipient or local RxM. They deliver information about if and when to
-process the packet and when to drop previously collected assembly
-packets.
+plaintext byte, prepended to every padded assembly packet that is
+delivered to the recipient/local Destination Computer. The header
+delivers the information about if and when to assemble the packet,
+as well as when to drop any previously collected assembly packets.
"""
+FILE_PACKET_CTR_LENGTH = 8
+ASSEMBLY_PACKET_HEADER_LENGTH = 1
+
M_S_HEADER = b'a' # Short message packet
-M_L_HEADER = b'b' # First packet of multi-packet message
+M_L_HEADER = b'b' # First packet of multi-packet message
M_A_HEADER = b'c' # Appended packet of multi-packet message
-M_E_HEADER = b'd' # Last packet of multi-packet message
-M_C_HEADER = b'e' # Cancelled multi-packet message
+M_E_HEADER = b'd' # Last packet of multi-packet message
+M_C_HEADER = b'e' # Cancelled multi-packet message
P_N_HEADER = b'f' # Noise message packet
F_S_HEADER = b'A' # Short file packet
-F_L_HEADER = b'B' # First packet of multi-packet file
+F_L_HEADER = b'B' # First packet of multi-packet file
F_A_HEADER = b'C' # Appended packet of multi-packet file
-F_E_HEADER = b'D' # Last packet of multi-packet file
-F_C_HEADER = b'E' # Cancelled multi-packet file
+F_E_HEADER = b'D' # Last packet of multi-packet file
+F_C_HEADER = b'E' # Cancelled multi-packet file
C_S_HEADER = b'0' # Short command packet
-C_L_HEADER = b'1' # First packet of multi-packet command
+C_L_HEADER = b'1' # First packet of multi-packet command
C_A_HEADER = b'2' # Appended packet of multi-packet command
-C_E_HEADER = b'3' # Last packet of multi-packet command
-C_C_HEADER = b'4' # Cancelled multi-packet command (not implemented)
+C_E_HEADER = b'3' # Last packet of multi-packet command
+C_C_HEADER = b'4' # Cancelled multi-packet command (reserved but not in use)
C_N_HEADER = b'5' # Noise command packet
"""Unencrypted command headers
-These two-byte headers are only used to control NH. These commands will
-not be used during traffic masking to hide when TFC is being used. These
-commands are not encrypted because if attacker is able to inject
-commands from within NH, they could also access any keys stored on NH.
+These two-byte headers are only used to control the Relay Program on
+Networked Computer. These commands will not be used during traffic
+masking, as they would reveal when TFC is being used. These commands do
+not require encryption, because if an attacker can compromise the
+Networked Computer to the point it could inject commands to Relay
+Program, it could most likely also access any decryption keys used by
+the Relay Program.
"""
-UNENCRYPTED_SCREEN_CLEAR = b'UC'
-UNENCRYPTED_SCREEN_RESET = b'UR'
-UNENCRYPTED_EXIT_COMMAND = b'UX'
-UNENCRYPTED_IMPORT_COMMAND = b'UI'
-UNENCRYPTED_EC_RATIO = b'UE'
-UNENCRYPTED_BAUDRATE = b'UB'
-UNENCRYPTED_GUI_DIALOG = b'UD'
-UNENCRYPTED_WIPE_COMMAND = b'UW'
+UNENCRYPTED_COMMAND_HEADER_LENGTH = 2
+UNENCRYPTED_SCREEN_CLEAR = b'UC'
+UNENCRYPTED_SCREEN_RESET = b'UR'
+UNENCRYPTED_EXIT_COMMAND = b'UX'
+UNENCRYPTED_EC_RATIO = b'UE'
+UNENCRYPTED_BAUDRATE = b'UB'
+UNENCRYPTED_WIPE_COMMAND = b'UW'
+UNENCRYPTED_ADD_NEW_CONTACT = b'UN'
+UNENCRYPTED_ADD_EXISTING_CONTACT = b'UA'
+UNENCRYPTED_REM_CONTACT = b'UD'
+UNENCRYPTED_ONION_SERVICE_DATA = b'UO'
+UNENCRYPTED_MANAGE_CONTACT_REQ = b'UM'
"""Encrypted command headers
-These two-byte headers are prepended to each command delivered to local
-RxM. The header is evaluated after RxM has received all assembly packets
-of one transmission. These headers tell RxM to what function the command
-must be redirected to.
+These two-byte headers determine the type of command for Receiver
+Program on local Destination Computer. The header is evaluated after the
+Receiver Program has received all assembly packets and assembled the
+command. These headers tell the Receiver Program to which function the
+provided parameters (if any) must be redirected.
"""
-LOCAL_KEY_INSTALLED_HEADER = b'LI'
-SHOW_WINDOW_ACTIVITY_HEADER = b'SA'
-WINDOW_SELECT_HEADER = b'WS'
-CLEAR_SCREEN_HEADER = b'SC'
-RESET_SCREEN_HEADER = b'SR'
-EXIT_PROGRAM_HEADER = b'EX'
-LOG_DISPLAY_HEADER = b'LD'
-LOG_EXPORT_HEADER = b'LE'
-LOG_REMOVE_HEADER = b'LR'
-CHANGE_MASTER_K_HEADER = b'MK'
-CHANGE_NICK_HEADER = b'NC'
-CHANGE_SETTING_HEADER = b'CS'
-CHANGE_LOGGING_HEADER = b'CL'
-CHANGE_FILE_R_HEADER = b'CF'
-CHANGE_NOTIFY_HEADER = b'CN'
-GROUP_CREATE_HEADER = b'GC'
-GROUP_ADD_HEADER = b'GA'
-GROUP_REMOVE_M_HEADER = b'GR'
-GROUP_DELETE_HEADER = b'GD'
-KEY_EX_X25519_HEADER = b'KE'
-KEY_EX_PSK_TX_HEADER = b'KT'
-KEY_EX_PSK_RX_HEADER = b'KR'
-CONTACT_REMOVE_HEADER = b'CR'
-WIPE_USER_DATA_HEADER = b'WD'
+ENCRYPTED_COMMAND_HEADER_LENGTH = 2
+LOCAL_KEY_RDY = b'LI'
+WIN_ACTIVITY = b'SA'
+WIN_SELECT = b'WS'
+CLEAR_SCREEN = b'SC'
+RESET_SCREEN = b'SR'
+EXIT_PROGRAM = b'EX'
+LOG_DISPLAY = b'LD'
+LOG_EXPORT = b'LE'
+LOG_REMOVE = b'LR'
+CH_MASTER_KEY = b'MK'
+CH_NICKNAME = b'NC'
+CH_SETTING = b'CS'
+CH_LOGGING = b'CL'
+CH_FILE_RECV = b'CF'
+CH_NOTIFY = b'CN'
+GROUP_CREATE = b'GC'
+GROUP_ADD = b'GA'
+GROUP_REMOVE = b'GR'
+GROUP_DELETE = b'GD'
+GROUP_RENAME = b'GN'
+KEY_EX_ECDHE = b'KE'
+KEY_EX_PSK_TX = b'KT'
+KEY_EX_PSK_RX = b'KR'
+CONTACT_REM = b'CR'
+WIPE_USR_DATA = b'WD'
"""Origin headers
-This one byte header notifies RxM whether the account
-included in the packet is the source or destination.
+This one-byte header tells the Relay and Receiver Programs whether the
+account included in the packet is the source or the destination of the
+transmission. The user origin header is used when the Relay Program
+forwards the message packets from user's Source Computer to user's
+Destination Computer. The contact origin header is used when the program
+forwards packets that are loaded from servers of contacts to the user's
+Destination Computer.
+
+On Destination Computer, the Receiver Program uses the origin header to
+determine which unidirectional keys it should load to decrypt the
+datagram payload.
"""
+ORIGIN_HEADER_LENGTH = 1
ORIGIN_USER_HEADER = b'o'
ORIGIN_CONTACT_HEADER = b'i'
"""Message headers
-This one byte header will be prepended to each plaintext message prior
-to padding and splitting the message. It will be evaluated once RxM has
-received all assembly packets. It allows RxM to detect whether the
-message should be displayed on private or group window. This does not
-allow spoofing of messages in unauthorized group windows, because the
-(group configuration managed personally by the recipient) white lists
-accounts who are authorized to display the message under the group
-window.
+This one-byte header will be prepended to each plaintext message before
+padding and splitting the message. It will be evaluated once the Relay
+Program has received all assembly packets and assembled the message.
-Whisper message header is message with "sender based control". Unless
-contact is malicious, these messages are not logged.
+The private and group message headers allow the Receiver Program to
+determine whether the message should be displayed in a private or in a
+group window. This does not allow re-direction of messages to
+unauthorized group windows, because TFC's manually managed group
+configuration is also a whitelist for accounts that are authorized to
+display messages under the group's window.
+
+Messages with the whisper message header have "sender-based control".
+Unless the contact maliciously alters their Receiver Program's behavior,
+whispered messages are not logged regardless of in-program controlled
+settings.
+
+Messages with file key header contain the hash of the file ciphertext
+that was sent to the user earlier. It also contains the symmetric
+decryption key for that file.
"""
+MESSAGE_HEADER_LENGTH = 1
+WHISPER_FIELD_LENGTH = 1
PRIVATE_MESSAGE_HEADER = b'p'
GROUP_MESSAGE_HEADER = b'g'
-WHISPER_MESSAGE_HEADER = b'w'
-
-
-"""Group management headers
-
-Group messages are automatically parsed messages that TxM recommends
-user to send when they make changes to group members or add/remove
-groups. These messages are displayed temporarily on whatever active
-window and later in command window.
-"""
-GROUP_MSG_INVITEJOIN_HEADER = b'T'
-GROUP_MSG_MEMBER_ADD_HEADER = b'N'
-GROUP_MSG_MEMBER_REM_HEADER = b'R'
-GROUP_MSG_EXIT_GROUP_HEADER = b'X'
+FILE_KEY_HEADER = b'k'
"""Delays
-Traffic masking packet queue check delay ensures that
-the lookup time for packet queue is obfuscated.
+Traffic masking packet queue check delay ensures that the lookup time
+for the packet queue is obfuscated.
+
+The local testing packet delay is an arbitrary delay that simulates the
+slight delay caused by data transmission over a serial interface.
+
+The Relay client delays are values that determine the delays between
+checking the online status of the contact (and the state of their
+ephemeral URL token public key).
"""
TRAFFIC_MASKING_QUEUE_CHECK_DELAY = 0.1
+TRAFFIC_MASKING_MIN_STATIC_DELAY = 0.1
+TRAFFIC_MASKING_MIN_RANDOM_DELAY = 0.1
+LOCAL_TESTING_PACKET_DELAY = 0.1
+RELAY_CLIENT_MAX_DELAY = 16
+RELAY_CLIENT_MIN_DELAY = 0.125
+CLIENT_OFFLINE_THRESHOLD = 4.0
"""Constant time delay types"""
@@ -296,144 +380,202 @@ TRAFFIC_MASKING = 'traffic_masking'
"""Default folders"""
-DIR_USER_DATA = 'user_data/'
-DIR_RX_FILES = 'received_files/'
-DIR_IMPORTED = 'imported_files/'
+DIR_USER_DATA = 'user_data/'
+DIR_RECV_FILES = 'received_files/'
+DIR_TFC = 'tfc/'
-"""Regular expressions
-
-These are used to specify exact format of some inputs.
-"""
-ACCOUNT_FORMAT = '(^.[^/:,]*@.[^/:,]*\.[^/:,]*.$)' # @.
+"""Key exchange status states"""
+KEX_STATUS_NONE = b'\xa0'
+KEX_STATUS_PENDING = b'\xa1'
+KEX_STATUS_UNVERIFIED = b'\xa2'
+KEX_STATUS_VERIFIED = b'\xa3'
+KEX_STATUS_NO_RX_PSK = b'\xa4'
+KEX_STATUS_HAS_RX_PSK = b'\xa5'
+KEX_STATUS_LOCAL_KEY = b'\xa6'
"""Queue dictionary keys"""
-
# Common
EXIT_QUEUE = b'exit'
GATEWAY_QUEUE = b'gateway'
-UNITTEST_QUEUE = b'unittest_queue'
+UNITTEST_QUEUE = b'unittest'
# Transmitter
-MESSAGE_PACKET_QUEUE = b'message_packet'
-FILE_PACKET_QUEUE = b'file_packet'
-COMMAND_PACKET_QUEUE = b'command_packet'
-NH_PACKET_QUEUE = b'nh_packet'
-LOG_PACKET_QUEUE = b'log_packet'
-NOISE_PACKET_QUEUE = b'noise_packet'
-NOISE_COMMAND_QUEUE = b'noise_command'
-KEY_MANAGEMENT_QUEUE = b'key_management'
-WINDOW_SELECT_QUEUE = b'window_select'
+MESSAGE_PACKET_QUEUE = b'message_packet'
+COMMAND_PACKET_QUEUE = b'command_packet'
+TM_MESSAGE_PACKET_QUEUE = b'tm_message_packet'
+TM_FILE_PACKET_QUEUE = b'tm_file_packet'
+TM_COMMAND_PACKET_QUEUE = b'tm_command_packet'
+TM_NOISE_PACKET_QUEUE = b'tm_noise_packet'
+TM_NOISE_COMMAND_QUEUE = b'tm_noise_command'
+RELAY_PACKET_QUEUE = b'relay_packet'
+LOG_PACKET_QUEUE = b'log_packet'
+LOG_SETTING_QUEUE = b'log_setting'
+TRAFFIC_MASKING_QUEUE = b'traffic_masking'
+LOGFILE_MASKING_QUEUE = b'logfile_masking'
+KEY_MANAGEMENT_QUEUE = b'key_management'
+SENDER_MODE_QUEUE = b'sender_mode'
+WINDOW_SELECT_QUEUE = b'window_select'
-# NH
-TXM_INCOMING_QUEUE = b'txm_incoming'
-RXM_OUTGOING_QUEUE = b'rxm_outgoing'
-TXM_TO_IM_QUEUE = b'txm_to_im'
-TXM_TO_NH_QUEUE = b'txm_to_nh'
-TXM_TO_RXM_QUEUE = b'txm_to_rxm'
-NH_TO_IM_QUEUE = b'nh_to_im'
+# Relay
+DST_COMMAND_QUEUE = b'dst_command'
+DST_MESSAGE_QUEUE = b'dst_message'
+M_TO_FLASK_QUEUE = b'm_to_flask'
+F_TO_FLASK_QUEUE = b'f_to_flask'
+SRC_TO_RELAY_QUEUE = b'src_to_relay'
+URL_TOKEN_QUEUE = b'url_token'
+GROUP_MGMT_QUEUE = b'group_mgmt'
+GROUP_MSG_QUEUE = b'group_msg'
+CONTACT_REQ_QUEUE = b'contact_req'
+F_REQ_MGMT_QUEUE = b'f_req_mgmt'
+CONTACT_KEY_QUEUE = b'contact_key'
+C_REQ_MGR_QUEUE = b'c_req_mgr'
+ONION_KEY_QUEUE = b'onion_key'
+ONION_CLOSE_QUEUE = b'close_onion'
+TOR_DATA_QUEUE = b'tor_data'
"""Queue signals"""
KDB_ADD_ENTRY_HEADER = 'ADD'
KDB_REMOVE_ENTRY_HEADER = 'REM'
KDB_CHANGE_MASTER_KEY_HEADER = 'KEY'
+KDB_UPDATE_SIZE_HEADER = 'STO'
+RP_ADD_CONTACT_HEADER = 'RAC'
+RP_REMOVE_CONTACT_HEADER = 'RRC'
EXIT = 'EXIT'
WIPE = 'WIPE'
-"""Static values
+"""Static values"""
-These values are not settings but descriptive integer values.
-"""
+# Serial interface
+BAUDS_PER_BYTE = 10
+SERIAL_RX_MIN_TIMEOUT = 0.05
+
+# CLI indents
+CONTACT_LIST_INDENT = 4
+FILE_TRANSFER_INDENT = 4
+SETTINGS_INDENT = 2
+
+# Compression
+COMPRESSION_LEVEL = 9
+MAX_MESSAGE_SIZE = 100_000 # bytes
+
+# Traffic masking
+NOISE_PACKET_BUFFER = 100
+
+# Local testing
+LOCALHOST = 'localhost'
+SRC_DD_LISTEN_SOCKET = 5005
+RP_LISTEN_SOCKET = 5006
+DST_DD_LISTEN_SOCKET = 5007
+DST_LISTEN_SOCKET = 5008
+
+# Field lengths
+ENCODED_BOOLEAN_LENGTH = 1
+ENCODED_BYTE_LENGTH = 1
+TIMESTAMP_LENGTH = 4
+ENCODED_INTEGER_LENGTH = 8
+ENCODED_FLOAT_LENGTH = 8
+FILE_ETA_FIELD_LENGTH = 8
+FILE_SIZE_FIELD_LENGTH = 8
+GROUP_DB_HEADER_LENGTH = 32
+PADDED_UTF32_STR_LENGTH = 1024
+CONFIRM_CODE_LENGTH = 1
+PACKET_CHECKSUM_LENGTH = 16
+
+# Onion address format
+ONION_ADDRESS_CHECKSUM_ID = b".onion checksum"
+ONION_SERVICE_VERSION = b'\x03'
+ONION_SERVICE_VERSION_LENGTH = 1
+ONION_ADDRESS_CHECKSUM_LENGTH = 2
+ONION_ADDRESS_LENGTH = 56
# Misc
-BAUDS_PER_BYTE = 10
-COMPRESSION_LEVEL = 9
-ENTROPY_THRESHOLD = 512
+MAX_INT = 2 ** 64 - 1
+B58_CHECKSUM_LENGTH = 4
+TRUNC_ADDRESS_LENGTH = 5
+
+# Key derivation
+ARGON2_SALT_LENGTH = 32
+ARGON2_ROUNDS = 25
+ARGON2_MIN_MEMORY = 64000 # bytes
+MIN_KEY_DERIVATION_TIME = 3.0 # seconds
+
+# Cryptographic field sizes
+TFC_PRIVATE_KEY_LENGTH = 56
+TFC_PUBLIC_KEY_LENGTH = 56
+FINGERPRINT_LENGTH = 32
+ONION_SERVICE_PRIVATE_KEY_LENGTH = 32
+ONION_SERVICE_PUBLIC_KEY_LENGTH = 32
+XCHACHA20_NONCE_LENGTH = 24
+SYMMETRIC_KEY_LENGTH = 32
+POLY1305_TAG_LENGTH = 16
+BLAKE2_DIGEST_LENGTH = 32
+BLAKE2_DIGEST_LENGTH_MAX = 64
+ENTROPY_THRESHOLD = 512
+HARAC_LENGTH = 8
+PADDING_LENGTH = 255
# Forward secrecy
INITIAL_HARAC = 0
-HARAC_WARN_THRESHOLD = 1000
-
-# CLI indents
-CONTACT_LIST_INDENT = 4
-SETTINGS_INDENT = 2
-
-# Local testing
-TXM_DD_LISTEN_SOCKET = 5000
-NH_LISTEN_SOCKET = 5001
-RXM_DD_LISTEN_SOCKET = 5002
-RXM_LISTEN_SOCKET = 5003
-LOCAL_TESTING_PACKET_DELAY = 0.1
-
-# Field lengths
-BOOLEAN_SETTING_LEN = 1
-ORIGIN_HEADER_LEN = 1
-TIMESTAMP_LEN = 4
-INTEGER_SETTING_LEN = 8
-FLOAT_SETTING_LEN = 8
-FILE_PACKET_CTR_LEN = 8
-FILE_ETA_FIELD_LEN = 8
-FILE_SIZE_FIELD_LEN = 8
-GROUP_MSG_ID_LEN = 16
-GROUP_DB_HEADER_LEN = 32
-PADDED_UTF32_STR_LEN = 1024
-
-ARGON2_SALT_LEN = 32
-ARGON2_ROUNDS = 25
-ARGON2_MIN_MEMORY = 64000
-XSALSA20_NONCE_LEN = 24
-POLY1305_TAG_LEN = 16
-
-FINGERPRINT_LEN = 32
-KEY_LENGTH = 32
-HARAC_LEN = 8
-B58_CHKSUM_LEN = 4
-
-PADDING_LEN = 255
-ASSEMBLY_PACKET_LEN = 256
+HARAC_WARN_THRESHOLD = 100_000
# Special messages
-PLACEHOLDER_DATA = P_N_HEADER + bytes(PADDING_LEN)
-
+PLACEHOLDER_DATA = P_N_HEADER + bytes(PADDING_LENGTH)
# Field lengths
-MESSAGE_LENGTH = (XSALSA20_NONCE_LEN
- + HARAC_LEN
- + POLY1305_TAG_LEN
+ASSEMBLY_PACKET_LENGTH = ASSEMBLY_PACKET_HEADER_LENGTH + PADDING_LENGTH
- + XSALSA20_NONCE_LEN
- + ASSEMBLY_PACKET_LEN
- + POLY1305_TAG_LEN)
+HARAC_CT_LENGTH = (XCHACHA20_NONCE_LENGTH
+ + HARAC_LENGTH
+ + POLY1305_TAG_LENGTH)
-PACKET_LENGTH = (len(MESSAGE_PACKET_HEADER)
- + MESSAGE_LENGTH
- + ORIGIN_HEADER_LEN)
+ASSEMBLY_PACKET_CT_LENGTH = (XCHACHA20_NONCE_LENGTH
+ + ASSEMBLY_PACKET_LENGTH
+ + POLY1305_TAG_LENGTH)
-CONTACT_LENGTH = (3*PADDED_UTF32_STR_LEN
- + 2*FINGERPRINT_LEN
- + 3*BOOLEAN_SETTING_LEN)
+MESSAGE_LENGTH = HARAC_CT_LENGTH + ASSEMBLY_PACKET_CT_LENGTH
-KEYSET_LENGTH = (PADDED_UTF32_STR_LEN
- + 4*KEY_LENGTH
- + 2*HARAC_LEN)
+COMMAND_LENGTH = (DATAGRAM_HEADER_LENGTH
+ + MESSAGE_LENGTH)
-PSK_FILE_SIZE = (XSALSA20_NONCE_LEN
- + ARGON2_SALT_LEN
- + 2*KEY_LENGTH
- + POLY1305_TAG_LEN)
+PACKET_LENGTH = (DATAGRAM_HEADER_LENGTH
+ + MESSAGE_LENGTH
+ + ORIGIN_HEADER_LENGTH)
-LOG_ENTRY_LENGTH = (XSALSA20_NONCE_LEN
- + PADDED_UTF32_STR_LEN
- + TIMESTAMP_LEN
- + ORIGIN_HEADER_LEN
- + ASSEMBLY_PACKET_LEN
- + POLY1305_TAG_LEN)
+GROUP_STATIC_LENGTH = (PADDED_UTF32_STR_LENGTH
+ + GROUP_ID_LENGTH
+ + 2 * ENCODED_BOOLEAN_LENGTH)
-SETTING_LENGTH = (XSALSA20_NONCE_LEN
- + 5*INTEGER_SETTING_LEN
- + 4*FLOAT_SETTING_LEN
- + 13*BOOLEAN_SETTING_LEN
- + POLY1305_TAG_LEN)
+CONTACT_LENGTH = (ONION_SERVICE_PUBLIC_KEY_LENGTH
+ + 2 * FINGERPRINT_LENGTH
+ + 4 * ENCODED_BOOLEAN_LENGTH
+ + PADDED_UTF32_STR_LENGTH)
+
+KEYSET_LENGTH = (ONION_SERVICE_PUBLIC_KEY_LENGTH
+ + 4 * SYMMETRIC_KEY_LENGTH
+ + 2 * HARAC_LENGTH)
+
+PSK_FILE_SIZE = (XCHACHA20_NONCE_LENGTH
+ + ARGON2_SALT_LENGTH
+ + 2 * SYMMETRIC_KEY_LENGTH
+ + POLY1305_TAG_LENGTH)
+
+LOG_ENTRY_LENGTH = (XCHACHA20_NONCE_LENGTH
+ + ONION_SERVICE_PUBLIC_KEY_LENGTH
+ + TIMESTAMP_LENGTH
+ + ORIGIN_HEADER_LENGTH
+ + ASSEMBLY_PACKET_LENGTH
+ + POLY1305_TAG_LENGTH)
+
+MASTERKEY_DB_SIZE = (ARGON2_SALT_LENGTH
+ + BLAKE2_DIGEST_LENGTH
+ + 2 * ENCODED_INTEGER_LENGTH)
+
+SETTING_LENGTH = (XCHACHA20_NONCE_LENGTH
+ + 4 * ENCODED_INTEGER_LENGTH
+ + 3 * ENCODED_FLOAT_LENGTH
+ + 11 * ENCODED_BOOLEAN_LENGTH
+ + POLY1305_TAG_LENGTH)
diff --git a/src/nh/commands.py b/src/nh/commands.py
deleted file mode 100644
index e08fdad..0000000
--- a/src/nh/commands.py
+++ /dev/null
@@ -1,173 +0,0 @@
-#!/usr/bin/env python3.5
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import os
-import serial
-import sys
-import time
-import typing
-
-from typing import Any, Dict
-
-from src.common.exceptions import FunctionReturn
-from src.common.misc import ignored
-from src.common.output import c_print, clear_screen
-from src.common.path import ask_path_gui
-from src.common.statics import *
-
-if typing.TYPE_CHECKING:
- from multiprocessing import Queue
- from src.nh.settings import Settings
-
-
-def nh_command(queues: Dict[bytes, 'Queue'],
- settings: 'Settings',
- stdin_fd: int,
- unittest: bool = False) -> None:
- """Loop that processes NH side commands."""
- sys.stdin = os.fdopen(stdin_fd)
- queue_from_txm = queues[TXM_TO_NH_QUEUE]
-
- while True:
- with ignored(EOFError, FunctionReturn, KeyboardInterrupt):
- while queue_from_txm.qsize() == 0:
- time.sleep(0.01)
-
- command = queue_from_txm.get()
- process_command(settings, command, queues)
-
- if unittest:
- break
-
-
-def process_command(settings: 'Settings', command: bytes, queues: Dict[bytes, 'Queue']) -> None:
- """Process received command."""
- # Keyword Function to run ( Parameters )
- # -----------------------------------------------------------------------------------------------
- function_d = {UNENCRYPTED_SCREEN_CLEAR: (clear_windows, settings, command, queues[NH_TO_IM_QUEUE] ),
- UNENCRYPTED_SCREEN_RESET: (reset_windows, settings, command, queues[NH_TO_IM_QUEUE] ),
- UNENCRYPTED_EXIT_COMMAND: (exit_tfc, settings, queues[EXIT_QUEUE] ),
- UNENCRYPTED_WIPE_COMMAND: (wipe, settings, queues[EXIT_QUEUE] ),
- UNENCRYPTED_IMPORT_COMMAND: (rxm_import, settings, queues[RXM_OUTGOING_QUEUE] ),
- UNENCRYPTED_EC_RATIO: (change_ec_ratio, settings, command ),
- UNENCRYPTED_BAUDRATE: (change_baudrate, settings, command ),
- UNENCRYPTED_GUI_DIALOG: (change_gui_dialog, settings, command )} # type: Dict[bytes, Any]
-
- header = command[:2]
-
- if header not in function_d:
- raise FunctionReturn("Error: Received an invalid command.")
-
- from_dict = function_d[header]
- func = from_dict[0]
- parameters = from_dict[1:]
- func(*parameters)
-
-
-def race_condition_delay(settings: 'Settings') -> None:
- """Handle race condition with RxM command notification."""
- if settings.local_testing_mode:
- time.sleep(0.1)
- if settings.data_diode_sockets:
- time.sleep(1)
-
-def clear_windows(settings: 'Settings', command: bytes, queue_to_im: 'Queue') -> None:
- """Clear NH screen and IM client window."""
- race_condition_delay(settings)
- queue_to_im.put(command)
- clear_screen()
-
-
-def reset_windows(settings: 'Settings', command: bytes, queue_to_im: 'Queue') -> None:
- """Reset NH screen and clear IM client window."""
- race_condition_delay(settings)
- queue_to_im.put(command)
- os.system('reset')
-
-
-def exit_tfc(settings: 'Settings', queue_exit: 'Queue') -> None:
- """Exit TFC."""
- race_condition_delay(settings)
- queue_exit.put(EXIT)
-
-
-def rxm_import(settings: 'Settings', queue_to_rxm: 'Queue') -> None:
- """Import encrypted file to RxM."""
- f_path = ask_path_gui("Select file to import...", settings, get_file=True)
- with open(f_path, 'rb') as f:
- f_data = f.read()
- queue_to_rxm.put(IMPORTED_FILE_HEADER + f_data)
-
-
-def change_ec_ratio(settings: 'Settings', command: bytes) -> None:
- """Change Reed-Solomon erasure code correction ratio setting on NH."""
- try:
- value = int(command[2:])
- if value < 1 or value > 2 ** 64 - 1:
- raise ValueError
- except ValueError:
- raise FunctionReturn("Error: Received invalid EC ratio value from TxM.")
-
- settings.serial_error_correction = value
- settings.store_settings()
- c_print("Error correction ratio will change on restart.", head=1, tail=1)
-
-
-def change_baudrate(settings: 'Settings', command: bytes) -> None:
- """Change serial interface baud rate setting on NH."""
- try:
- value = int(command[2:])
- if value not in serial.Serial.BAUDRATES:
- raise ValueError
- except ValueError:
- raise FunctionReturn("Error: Received invalid baud rate value from TxM.")
-
- settings.serial_baudrate = value
- settings.store_settings()
- c_print("Baud rate will change on restart.", head=1, tail=1)
-
-
-def change_gui_dialog(settings: 'Settings', command: bytes) -> None:
- """Change file selection (GUI/CLI prompt) setting on NH."""
- try:
- value_bytes = command[2:].lower()
- if value_bytes not in [b'true', b'false']:
- raise ValueError
- value = (value_bytes == b'true')
- except ValueError:
- raise FunctionReturn("Error: Received invalid GUI dialog setting value from TxM.")
-
- settings.disable_gui_dialog = value
- settings.store_settings()
-
- c_print("Changed setting disable_gui_dialog to {}.".format(value), head=1, tail=1)
-
-
-def wipe(settings: 'Settings', queue_exit: 'Queue') -> None:
- """Reset terminal, wipe all user data from NH and power off system.
-
- No effective RAM overwriting tool currently exists, so as long as TxM/RxM
- use FDE and DDR3 memory, recovery of user data becomes impossible very fast:
-
- https://www1.cs.fau.de/filepool/projects/coldboot/fares_coldboot.pdf
- """
- os.system('reset')
- race_condition_delay(settings)
- queue_exit.put(WIPE)
diff --git a/src/nh/gateway.py b/src/nh/gateway.py
deleted file mode 100644
index 86208d0..0000000
--- a/src/nh/gateway.py
+++ /dev/null
@@ -1,169 +0,0 @@
-#!/usr/bin/env python3.5
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import multiprocessing.connection
-import os.path
-import serial
-import time
-import typing
-
-from serial.serialutil import SerialException
-from typing import Any, Dict
-
-from src.common.exceptions import CriticalError, graceful_exit
-from src.common.misc import ignored
-from src.common.output import phase, print_on_previous_line
-from src.common.statics import *
-
-if typing.TYPE_CHECKING:
- from multiprocessing import Queue
- from src.nh.settings import Settings
-
-
-def gateway_loop(queues: Dict[bytes, 'Queue'],
- gateway: 'Gateway',
- unittest: bool = False) -> None:
- """Loop that loads data from TxM side gateway to NH."""
- queue = queues[TXM_INCOMING_QUEUE]
-
- while True:
- with ignored(EOFError, KeyboardInterrupt):
- queue.put(gateway.read())
- if unittest:
- break
-
-
-class Gateway(object):
- """Gateway object is a wrapper for interfaces that connect NH with TxM/RxM."""
-
- def __init__(self, settings: 'Settings') -> None:
- """Create a new Gateway object."""
- self.settings = settings
- self.txm_interface = None # type: Any
- self.rxm_interface = None # type: Any
-
- # Set True when serial adapter is initially found so that further
- # serial interface searches know to announce disconnection.
- self.init_found = False
-
- if settings.local_testing_mode:
- self.establish_socket()
- else:
- self.txm_interface = self.rxm_interface = self.establish_serial()
-
- def write(self, packet: bytes) -> None:
- """Output data via socket/serial interface."""
- if self.settings.local_testing_mode:
- self.rxm_interface.send(packet)
- else:
- try:
- self.rxm_interface.write(packet)
- self.rxm_interface.flush()
- time.sleep(self.settings.transmit_delay)
- except SerialException:
- self.rxm_interface = self.establish_serial()
- self.write(packet)
-
- def read(self) -> bytes:
- """Read data via socket/serial interface."""
- if self.settings.local_testing_mode:
- while True:
- try:
- return self.txm_interface.recv()
- except KeyboardInterrupt:
- pass
- except EOFError:
- graceful_exit("IPC client disconnected.")
- else:
- while True:
- try:
- start_time = 0.0
- read_buffer = bytearray()
- while True:
- read = self.txm_interface.read(1000)
- if read:
- start_time = time.monotonic()
- read_buffer.extend(read)
- else:
- if read_buffer:
- delta = time.monotonic() - start_time
- if delta > self.settings.receive_timeout:
- return bytes(read_buffer)
- else:
- time.sleep(0.001)
-
- except KeyboardInterrupt:
- pass
- except SerialException:
- self.txm_interface = self.establish_serial()
- self.read()
-
- def establish_socket(self) -> None:
- """Establish local testing socket connections."""
- listener = multiprocessing.connection.Listener(('localhost', NH_LISTEN_SOCKET))
- self.txm_interface = listener.accept()
-
- while True:
- try:
- rxm_socket = RXM_DD_LISTEN_SOCKET if self.settings.data_diode_sockets else RXM_LISTEN_SOCKET
- self.rxm_interface = multiprocessing.connection.Client(('localhost', rxm_socket))
- break
- except ConnectionRefusedError:
- time.sleep(0.1)
-
- def establish_serial(self) -> Any:
- """Create a new Serial object."""
- try:
- serial_nh = self.search_serial_interface()
- return serial.Serial(serial_nh, self.settings.session_serial_baudrate, timeout=0)
- except SerialException:
- graceful_exit("SerialException. Ensure $USER is in the dialout group.")
-
- def search_serial_interface(self) -> str:
- """Search for serial interface."""
- if self.settings.serial_usb_adapter:
- search_announced = False
-
- if not self.init_found:
- print_on_previous_line()
- phase("Searching for USB-to-serial interface")
-
- while True:
- time.sleep(0.1)
- for f in sorted(os.listdir('/dev')):
- if f.startswith('ttyUSB'):
- if self.init_found:
- time.sleep(1.5)
- phase('Found', done=True)
- if self.init_found:
- print_on_previous_line(reps=2)
- self.init_found = True
- return '/dev/{}'.format(f)
- else:
- if not search_announced:
- if self.init_found:
- phase("Serial adapter disconnected. Waiting for interface", head=1)
- search_announced = True
-
- else:
- f = 'ttyS0'
- if f in sorted(os.listdir('/dev/')):
- return '/dev/{}'.format(f)
- raise CriticalError("Error: /dev/{} was not found.".format(f))
diff --git a/src/nh/misc.py b/src/nh/misc.py
deleted file mode 100644
index e6f6bd2..0000000
--- a/src/nh/misc.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python3.5
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import argparse
-
-from typing import Tuple
-
-
-def process_arguments() -> Tuple[bool, bool]:
- """Define nh.py settings from arguments passed from command line."""
- parser = argparse.ArgumentParser("python3.6 nh.py",
- usage="%(prog)s [OPTION]",
- description="More options inside nh.py")
-
- parser.add_argument('-l',
- action='store_true',
- default=False,
- dest='local_test',
- help="Enable local testing mode")
-
- parser.add_argument('-d',
- action='store_true',
- default=False,
- dest='dd_sockets',
- help="Enable data diode simulator sockets")
-
- args = parser.parse_args()
-
- return args.local_test, args.dd_sockets
diff --git a/src/nh/pidgin.py b/src/nh/pidgin.py
deleted file mode 100644
index 5bba828..0000000
--- a/src/nh/pidgin.py
+++ /dev/null
@@ -1,174 +0,0 @@
-#!/usr/bin/env python3.5
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import base64
-import dbus
-import dbus.exceptions
-import time
-import typing
-
-from datetime import datetime
-from typing import Any, Dict, Tuple
-
-from dbus.mainloop.glib import DBusGMainLoop
-from gi.repository import GObject
-
-from src.common.misc import ignored
-from src.common.output import box_print, c_print, clear_screen, phase
-from src.common.statics import *
-
-if typing.TYPE_CHECKING:
- from multiprocessing import Queue
- from src.nh.settings import Settings
-
-
-def ensure_im_connection() -> None:
- """\
- Check that nh.py has connection to Pidgin
- before launching other processes.
- """
- phase("Waiting for enabled account in Pidgin", offset=1)
-
- while True:
- try:
- bus = dbus.SessionBus(private=True)
- obj = bus.get_object("im.pidgin.purple.PurpleService", "/im/pidgin/purple/PurpleObject")
- purple = dbus.Interface(obj, "im.pidgin.purple.PurpleInterface")
-
- while not purple.PurpleAccountsGetAllActive():
- time.sleep(0.01)
- phase('OK', done=True)
-
- accounts = []
- for a in purple.PurpleAccountsGetAllActive():
- accounts.append(purple.PurpleAccountGetUsername(a)[:-1])
-
- just_len = len(max(accounts, key=len))
- justified = ["Active accounts in Pidgin:"] + ["* {}".format(a.ljust(just_len)) for a in accounts]
- box_print(justified, head=1, tail=1)
- return None
-
- except (IndexError, dbus.exceptions.DBusException):
- continue
- except (EOFError, KeyboardInterrupt):
- clear_screen()
- exit()
-
-
-def im_command(queues: Dict[bytes, 'Queue']) -> None:
- """Loop that executes commands on IM client."""
- bus = dbus.SessionBus(private=True)
- obj = bus.get_object("im.pidgin.purple.PurpleService", "/im/pidgin/purple/PurpleObject")
- purple = dbus.Interface(obj, "im.pidgin.purple.PurpleInterface")
- account = purple.PurpleAccountsGetAllActive()[0]
- queue = queues[NH_TO_IM_QUEUE]
-
- while True:
- with ignored(dbus.exceptions.DBusException, EOFError, KeyboardInterrupt):
- while queue.qsize() == 0:
- time.sleep(0.01)
-
- command = queue.get()
-
- if command[:2] in [UNENCRYPTED_SCREEN_CLEAR, UNENCRYPTED_SCREEN_RESET]:
- contact = command[2:]
- new_conv = purple.PurpleConversationNew(1, account, contact)
- purple.PurpleConversationClearMessageHistory(new_conv)
-
-
-def im_incoming(queues: Dict[bytes, 'Queue']) -> None:
- """Loop that maintains signal receiver process."""
-
- def pidgin_to_rxm(account: str, sender: str, message: str, *_: Any) -> None:
- """Signal receiver process that receives packets from Pidgin."""
- sender = sender.split('/')[0]
- ts = datetime.now().strftime("%m-%d / %H:%M:%S")
- d_bus = dbus.SessionBus(private=True)
- obj = d_bus.get_object("im.pidgin.purple.PurpleService", "/im/pidgin/purple/PurpleObject")
- purple = dbus.Interface(obj, "im.pidgin.purple.PurpleInterface")
-
- user = ''
- for a in purple.PurpleAccountsGetAllActive():
- if a == account:
- user = purple.PurpleAccountGetUsername(a)[:-1]
-
- if not message.startswith(TFC):
- return None
-
- try:
- __, header, payload = message.split('|') # type: Tuple[str, str, str]
- except ValueError:
- return None
-
- if header.encode() == PUBLIC_KEY_PACKET_HEADER:
- print("{} - pub key {} > {} > RxM".format(ts, sender, user))
-
- elif header.encode() == MESSAGE_PACKET_HEADER:
- print("{} - message {} > {} > RxM".format(ts, sender, user))
-
- else:
- print("Received invalid packet from {}".format(sender))
- return None
-
- decoded = base64.b64decode(payload)
- packet = header.encode() + decoded + ORIGIN_CONTACT_HEADER + sender.encode()
- queues[RXM_OUTGOING_QUEUE].put(packet)
-
- while True:
- with ignored(dbus.exceptions.DBusException, EOFError, KeyboardInterrupt):
- bus = dbus.SessionBus(private=True, mainloop=DBusGMainLoop())
- bus.add_signal_receiver(pidgin_to_rxm, dbus_interface="im.pidgin.purple.PurpleInterface", signal_name="ReceivedImMsg")
- GObject.MainLoop().run()
-
-
-def im_outgoing(queues: Dict[bytes, 'Queue'], settings: 'Settings') -> None:
- """\
- Loop that outputs messages and public keys from
- queue and sends them to contacts over Pidgin.
- """
- bus = dbus.SessionBus(private=True)
- obj = bus.get_object("im.pidgin.purple.PurpleService", "/im/pidgin/purple/PurpleObject")
- purple = dbus.Interface(obj, "im.pidgin.purple.PurpleInterface")
- queue = queues[TXM_TO_IM_QUEUE]
-
- while True:
- with ignored(dbus.exceptions.DBusException, EOFError, KeyboardInterrupt):
- while queue.qsize() == 0:
- time.sleep(0.01)
-
- header, payload, user, contact = queue.get()
-
- b64_str = base64.b64encode(payload).decode()
- payload = '|'.join([TFC, header.decode(), b64_str])
- user = user.decode()
- contact = contact.decode()
-
- user_found = False
- for u in purple.PurpleAccountsGetAllActive():
- if user == purple.PurpleAccountGetUsername(u)[:-1]:
- user_found = True
- if settings.relay_to_im_client:
- new_conv = purple.PurpleConversationNew(1, u, contact)
- sel_conv = purple.PurpleConvIm(new_conv)
- purple.PurpleConvImSend(sel_conv, payload)
- continue
-
- if not user_found:
- c_print("Error: No user {} found.".format(user), head=1, tail=1)
diff --git a/src/nh/settings.py b/src/nh/settings.py
deleted file mode 100644
index 6049ddd..0000000
--- a/src/nh/settings.py
+++ /dev/null
@@ -1,95 +0,0 @@
-#!/usr/bin/env python3.5
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import os.path
-
-from src.common.encoding import bool_to_bytes, int_to_bytes
-from src.common.encoding import bytes_to_bool, bytes_to_int
-from src.common.input import yes
-from src.common.misc import calculate_race_condition_delay, calculate_serial_delays, ensure_dir
-from src.common.statics import *
-
-
-class Settings(object):
- """Settings object stores NH side persistent settings.
-
- NH-side settings are not encrypted because NH is assumed to be in
- control of the adversary. Encryption would require password and
- because some users might use same password for NH and TxM/RxM,
- sensitive passwords might leak to remote attacker who might later
- physically compromise the endpoint.
- """
-
- def __init__(self, local_testing: bool, dd_sockets: bool, operation=NH) -> None:
- # Fixed settings
- self.relay_to_im_client = True # False stops forwarding messages to IM client
-
- # Controllable settings
- self.serial_usb_adapter = True # False uses system's integrated serial interface
- self.disable_gui_dialog = False # True replaces Tkinter dialogs with CLI prompts
- self.serial_baudrate = 19200 # The speed of serial interface in bauds per second
- self.serial_error_correction = 5 # Number of byte errors serial datagrams can recover from
-
- self.software_operation = operation
- self.file_name = '{}{}_settings'.format(DIR_USER_DATA, operation)
-
- # Settings from launcher / CLI arguments
- self.local_testing_mode = local_testing
- self.data_diode_sockets = dd_sockets
-
- ensure_dir(DIR_USER_DATA)
- if os.path.isfile(self.file_name):
- self.load_settings()
- else:
- self.setup()
- self.store_settings()
-
- # Following settings change only when program is restarted
- self.session_serial_error_correction = self.serial_error_correction
- self.session_serial_baudrate = self.serial_baudrate
- self.race_condition_delay = calculate_race_condition_delay(self)
-
- self.receive_timeout, self.transmit_delay = calculate_serial_delays(self.session_serial_baudrate)
-
- def store_settings(self) -> None:
- """Store persistent settings to file."""
- setting_data = int_to_bytes(self.serial_baudrate)
- setting_data += int_to_bytes(self.serial_error_correction)
- setting_data += bool_to_bytes(self.serial_usb_adapter)
- setting_data += bool_to_bytes(self.disable_gui_dialog)
-
- ensure_dir(DIR_USER_DATA)
- with open(self.file_name, 'wb+') as f:
- f.write(setting_data)
-
- def load_settings(self) -> None:
- """Load persistent settings from file."""
- with open(self.file_name, 'rb') as f:
- settings = f.read()
-
- self.serial_baudrate = bytes_to_int(settings[0:8])
- self.serial_error_correction = bytes_to_int(settings[8:16])
- self.serial_usb_adapter = bytes_to_bool(settings[16:17])
- self.disable_gui_dialog = bytes_to_bool(settings[17:18])
-
- def setup(self) -> None:
- """Prompt user to enter initial settings."""
- if not self.local_testing_mode:
- self.serial_usb_adapter = yes("Does NH use USB-to-serial/TTL adapter?", tail=1)
diff --git a/src/nh/tcb.py b/src/nh/tcb.py
deleted file mode 100644
index 5fc2241..0000000
--- a/src/nh/tcb.py
+++ /dev/null
@@ -1,130 +0,0 @@
-#!/usr/bin/env python3.5
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import os
-import time
-import typing
-
-from datetime import datetime
-from typing import Dict
-
-from src.common.misc import ignored
-from src.common.output import box_print
-from src.common.reed_solomon import ReedSolomonError, RSCodec
-from src.common.statics import *
-
-if typing.TYPE_CHECKING:
- from multiprocessing import Queue
- from src.nh.gateway import Gateway
- from src.nh.settings import Settings
-
-
-def txm_incoming(queues: Dict[bytes, 'Queue'],
- settings: 'Settings',
- unittest: bool = False) -> None:
- """Loop that places messages received from TxM to appropriate queues."""
- rs = RSCodec(2 * settings.session_serial_error_correction)
-
- q_to_tip = queues[TXM_INCOMING_QUEUE]
- m_to_rxm = queues[RXM_OUTGOING_QUEUE]
- c_to_rxm = queues[TXM_TO_RXM_QUEUE]
- q_to_im = queues[TXM_TO_IM_QUEUE]
- q_to_nh = queues[TXM_TO_NH_QUEUE]
-
- while True:
- with ignored(EOFError, KeyboardInterrupt):
- while q_to_tip.qsize() == 0:
- time.sleep(0.01)
-
- packet = q_to_tip.get()
-
- try:
- packet = bytes(rs.decode(packet))
- except ReedSolomonError:
- box_print("Warning! Failed to correct errors in received packet.", head=1, tail=1)
- continue
-
- ts = datetime.now().strftime("%m-%d / %H:%M:%S")
- header = packet[:1]
-
- if header == UNENCRYPTED_PACKET_HEADER:
- q_to_nh.put(packet[1:])
-
- elif header in [LOCAL_KEY_PACKET_HEADER, COMMAND_PACKET_HEADER]:
- p_type = 'local key' if header == LOCAL_KEY_PACKET_HEADER else 'command'
- print("{} - {} TxM > RxM".format(ts, p_type))
- c_to_rxm.put(packet)
-
- elif header in [MESSAGE_PACKET_HEADER, PUBLIC_KEY_PACKET_HEADER]:
- payload_len, p_type = {PUBLIC_KEY_PACKET_HEADER: (KEY_LENGTH, 'pub key'),
- MESSAGE_PACKET_HEADER: (MESSAGE_LENGTH, 'message')}[header]
- payload = packet[1:1 + payload_len]
- trailer = packet[1 + payload_len:]
- user, contact = trailer.split(US_BYTE)
-
- print("{} - {} TxM > {} > {}".format(ts, p_type, user.decode(), contact.decode()))
- q_to_im.put((header, payload, user, contact))
- m_to_rxm.put(header + payload + ORIGIN_USER_HEADER + contact)
-
- elif header == EXPORTED_FILE_HEADER:
- payload = packet[1:]
-
- file_name = os.urandom(8).hex()
- while os.path.isfile(file_name):
- file_name = os.urandom(8).hex()
-
- with open(file_name, 'wb+') as f:
- f.write(payload)
- print("{} - Exported file from TxM as {}".format(ts, file_name))
-
- if unittest:
- break
-
-
-def rxm_outgoing(queues: Dict[bytes, 'Queue'],
- settings: 'Settings',
- gateway: 'Gateway',
- unittest: bool = False) -> None:
- """Loop that outputs packets from queues to RxM.
-
- Commands (and local keys) from TxM to RxM have higher priority
- than messages and public keys from contacts. This prevents
- contact from doing DoS on RxM by filling queue with packets.
- """
- rs = RSCodec(2 * settings.session_serial_error_correction)
- c_queue = queues[TXM_TO_RXM_QUEUE]
- m_queue = queues[RXM_OUTGOING_QUEUE]
-
- while True:
- try:
- time.sleep(0.01)
-
- while c_queue.qsize() != 0:
- packet = rs.encode(bytearray(c_queue.get()))
- gateway.write(packet)
-
- if m_queue.qsize() != 0:
- packet = rs.encode(bytearray(m_queue.get()))
- gateway.write(packet)
-
- if unittest:
- break
- except (EOFError, KeyboardInterrupt):
- pass
diff --git a/src/nh/__init__.py b/src/receiver/__init__.py
old mode 100644
new mode 100755
similarity index 100%
rename from src/nh/__init__.py
rename to src/receiver/__init__.py
diff --git a/src/receiver/commands.py b/src/receiver/commands.py
new file mode 100644
index 0000000..e6977c4
--- /dev/null
+++ b/src/receiver/commands.py
@@ -0,0 +1,389 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import os
+import typing
+
+from typing import Any, Dict, Union
+
+from src.common.db_logs import access_logs, change_log_db_key, remove_logs
+from src.common.encoding import bytes_to_int, pub_key_to_short_address
+from src.common.exceptions import FunctionReturn
+from src.common.misc import ensure_dir, separate_header
+from src.common.output import clear_screen, m_print, phase, print_on_previous_line
+from src.common.statics import *
+
+from src.receiver.commands_g import group_add, group_create, group_delete, group_remove, group_rename
+from src.receiver.key_exchanges import key_ex_ecdhe, key_ex_psk_rx, key_ex_psk_tx, local_key_rdy
+from src.receiver.packet import decrypt_assembly_packet
+
+if typing.TYPE_CHECKING:
+ from datetime import datetime
+ from multiprocessing import Queue
+ from src.common.db_contacts import Contact, ContactList
+ from src.common.db_groups import Group, GroupList
+ from src.common.db_keys import KeyList
+ from src.common.db_masterkey import MasterKey
+ from src.common.db_settings import Settings
+ from src.common.gateway import Gateway
+ from src.receiver.packet import PacketList
+ from src.receiver.windows import WindowList
+
+
+def process_command(ts: 'datetime',
+ assembly_ct: bytes,
+ window_list: 'WindowList',
+ packet_list: 'PacketList',
+ contact_list: 'ContactList',
+ key_list: 'KeyList',
+ group_list: 'GroupList',
+ settings: 'Settings',
+ master_key: 'MasterKey',
+ gateway: 'Gateway',
+ exit_queue: 'Queue'
+ ) -> None:
+ """Decrypt command assembly packet and process command."""
+ assembly_packet = decrypt_assembly_packet(assembly_ct, LOCAL_PUBKEY, ORIGIN_USER_HEADER,
+ window_list, contact_list, key_list)
+
+ cmd_packet = packet_list.get_packet(LOCAL_PUBKEY, ORIGIN_USER_HEADER, COMMAND)
+ cmd_packet.add_packet(assembly_packet)
+
+ if not cmd_packet.is_complete:
+ raise FunctionReturn("Incomplete command.", output=False)
+
+ header, cmd = separate_header(cmd_packet.assemble_command_packet(), ENCRYPTED_COMMAND_HEADER_LENGTH)
+ no = None
+
+ # Keyword Function to run ( Parameters )
+ # --------------------------------------------------------------------------------------------------------------
+ d = {LOCAL_KEY_RDY: (local_key_rdy, ts, window_list, contact_list ),
+ WIN_ACTIVITY: (win_activity, window_list ),
+ WIN_SELECT: (win_select, cmd, window_list ),
+ CLEAR_SCREEN: (clear_screen, ),
+ RESET_SCREEN: (reset_screen, cmd, window_list ),
+ EXIT_PROGRAM: (exit_tfc, exit_queue),
+ LOG_DISPLAY: (log_command, cmd, no, window_list, contact_list, group_list, settings, master_key),
+ LOG_EXPORT: (log_command, cmd, ts, window_list, contact_list, group_list, settings, master_key),
+ LOG_REMOVE: (remove_log, cmd, contact_list, group_list, settings, master_key),
+ CH_MASTER_KEY: (ch_master_key, ts, window_list, contact_list, group_list, key_list, settings, master_key),
+ CH_NICKNAME: (ch_nick, cmd, ts, window_list, contact_list, ),
+ CH_SETTING: (ch_setting, cmd, ts, window_list, contact_list, group_list, key_list, settings, gateway ),
+ CH_LOGGING: (ch_contact_s, cmd, ts, window_list, contact_list, group_list, header ),
+ CH_FILE_RECV: (ch_contact_s, cmd, ts, window_list, contact_list, group_list, header ),
+ CH_NOTIFY: (ch_contact_s, cmd, ts, window_list, contact_list, group_list, header ),
+ GROUP_CREATE: (group_create, cmd, ts, window_list, contact_list, group_list, settings ),
+ GROUP_ADD: (group_add, cmd, ts, window_list, contact_list, group_list, settings ),
+ GROUP_REMOVE: (group_remove, cmd, ts, window_list, contact_list, group_list ),
+ GROUP_DELETE: (group_delete, cmd, ts, window_list, group_list ),
+ GROUP_RENAME: (group_rename, cmd, ts, window_list, contact_list, group_list ),
+ KEY_EX_ECDHE: (key_ex_ecdhe, cmd, ts, window_list, contact_list, key_list, settings ),
+ KEY_EX_PSK_TX: (key_ex_psk_tx, cmd, ts, window_list, contact_list, key_list, settings ),
+ KEY_EX_PSK_RX: (key_ex_psk_rx, cmd, ts, window_list, contact_list, key_list, settings ),
+ CONTACT_REM: (contact_rem, cmd, ts, window_list, contact_list, group_list, key_list, settings, master_key),
+ WIPE_USR_DATA: (wipe, exit_queue)
+ } # type: Dict[bytes, Any]
+
+ try:
+ from_dict = d[header]
+ except KeyError:
+ raise FunctionReturn("Error: Received an invalid command.")
+
+ func = from_dict[0]
+ parameters = from_dict[1:]
+ func(*parameters)
+
+
+def win_activity(window_list: 'WindowList') -> None:
+ """Show number of unread messages in each window."""
+ unread_wins = [w for w in window_list if (w.uid != WIN_UID_LOCAL and w.unread_messages > 0)]
+ print_list = ["Window activity"] if unread_wins else ["No window activity"]
+ print_list += [f"{w.name}: {w.unread_messages}" for w in unread_wins]
+
+ m_print(print_list, box=True)
+ print_on_previous_line(reps=(len(print_list) + 2), delay=1)
+
+
+def win_select(window_uid: bytes, window_list: 'WindowList') -> None:
+ """Select window specified by the Transmitter Program."""
+ if window_uid == WIN_UID_FILE:
+ clear_screen()
+ window_list.set_active_rx_window(window_uid)
+
+
+def reset_screen(win_uid: bytes, window_list: 'WindowList') -> None:
+ """Reset window specified by the Transmitter Program."""
+ window = window_list.get_window(win_uid)
+ window.reset_window()
+ os.system(RESET)
+
+
+def exit_tfc(exit_queue: 'Queue') -> None:
+ """Exit TFC."""
+ exit_queue.put(EXIT)
+
+
+def log_command(cmd_data: bytes,
+ ts: 'datetime',
+ window_list: 'WindowList',
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings',
+ master_key: 'MasterKey'
+ ) -> None:
+ """Display or export log file for the active window."""
+ export = ts is not None
+ ser_no_msg, uid = separate_header(cmd_data, ENCODED_INTEGER_LENGTH)
+ no_messages = bytes_to_int(ser_no_msg)
+ window = window_list.get_window(uid)
+ access_logs(window, contact_list, group_list, settings, master_key, msg_to_load=no_messages, export=export)
+
+ if export:
+ local_win = window_list.get_local_window()
+ local_win.add_new(ts, f"Exported log file of {window.type} '{window.name}'.", output=True)
+
+
+def remove_log(cmd_data: bytes,
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings',
+ master_key: 'MasterKey'
+ ) -> None:
+ """Remove log entries for contact or group."""
+ remove_logs(contact_list, group_list, settings, master_key, selector=cmd_data)
+
+
+def ch_master_key(ts: 'datetime',
+ window_list: 'WindowList',
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ key_list: 'KeyList',
+ settings: 'Settings',
+ master_key: 'MasterKey'
+ ) -> None:
+ """Prompt the user for a new master password and derive a new master key from that."""
+ try:
+ old_master_key = master_key.master_key[:]
+ master_key.master_key = master_key.new_master_key()
+
+ phase("Re-encrypting databases")
+
+ ensure_dir(DIR_USER_DATA)
+ file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs'
+ if os.path.isfile(file_name):
+ change_log_db_key(old_master_key, master_key.master_key, settings)
+
+ key_list.store_keys()
+ settings.store_settings()
+ contact_list.store_contacts()
+ group_list.store_groups()
+
+ phase(DONE)
+ m_print("Master password successfully changed.", bold=True, tail_clear=True, delay=1, head=1)
+
+ local_win = window_list.get_local_window()
+ local_win.add_new(ts, "Changed Receiver master password.")
+
+ except (EOFError, KeyboardInterrupt):
+ raise FunctionReturn("Password change aborted.", tail_clear=True, delay=1, head=2)
+
+
+def ch_nick(cmd_data: bytes,
+ ts: 'datetime',
+ window_list: 'WindowList',
+ contact_list: 'ContactList'
+ ) -> None:
+ """Change nickname of contact."""
+ onion_pub_key, nick_bytes = separate_header(cmd_data, header_length=ONION_SERVICE_PUBLIC_KEY_LENGTH)
+ nick = nick_bytes.decode()
+ short_addr = pub_key_to_short_address(onion_pub_key)
+
+ try:
+ contact = contact_list.get_contact_by_pub_key(onion_pub_key)
+ except StopIteration:
+ raise FunctionReturn(f"Error: Receiver has no contact '{short_addr}' to rename.")
+
+ contact.nick = nick
+ contact_list.store_contacts()
+
+ window = window_list.get_window(onion_pub_key)
+ window.name = nick
+ window.handle_dict[onion_pub_key] = nick
+
+ if window.type == WIN_TYPE_CONTACT:
+ window.redraw()
+
+ cmd_win = window_list.get_local_window()
+ cmd_win.add_new(ts, f"Changed {short_addr} nick to '{nick}'.", output=True)
+
+
+def ch_setting(cmd_data: bytes,
+ ts: 'datetime',
+ window_list: 'WindowList',
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ key_list: 'KeyList',
+ settings: 'Settings',
+ gateway: 'Gateway'
+ ) -> None:
+ """Change TFC setting."""
+ try:
+ setting, value = [f.decode() for f in cmd_data.split(US_BYTE)]
+ except ValueError:
+ raise FunctionReturn("Error: Received invalid setting data.")
+
+ if setting in settings.key_list:
+ settings.change_setting(setting, value, contact_list, group_list)
+ elif setting in gateway.settings.key_list:
+ gateway.settings.change_setting(setting, value)
+ else:
+ raise FunctionReturn(f"Error: Invalid setting '{setting}'.")
+
+ local_win = window_list.get_local_window()
+ local_win.add_new(ts, f"Changed setting '{setting}' to '{value}'.", output=True)
+
+ if setting == 'max_number_of_contacts':
+ contact_list.store_contacts()
+ key_list.store_keys()
+ if setting in ['max_number_of_group_members', 'max_number_of_groups']:
+ group_list.store_groups()
+
+
+def ch_contact_s(cmd_data: bytes,
+ ts: 'datetime',
+ window_list: 'WindowList',
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ header: bytes
+ ) -> None:
+ """Change contact/group related setting."""
+ setting, win_uid = separate_header(cmd_data, CONTACT_SETTING_HEADER_LENGTH)
+ attr, desc, file_cmd = {CH_LOGGING: ('log_messages', "Logging of messages", False),
+ CH_FILE_RECV: ('file_reception', "Reception of files", True),
+ CH_NOTIFY: ('notifications', "Message notifications", False)}[header]
+
+ action, b_value = {ENABLE: ('enabled', True),
+ DISABLE: ('disabled', False)}[setting.lower()]
+
+ if setting.isupper():
+ # Change settings for all contacts (and groups)
+ enabled = [getattr(c, attr) for c in contact_list.get_list_of_contacts()]
+ enabled += [getattr(g, attr) for g in group_list] if not file_cmd else []
+ status = "was already" if ((all(enabled) and b_value) or (not any(enabled) and not b_value)) else "has been"
+ specifier = "every "
+ w_type = "contact"
+ w_name = "." if file_cmd else " and group."
+
+ # Set values
+ for c in contact_list.get_list_of_contacts():
+ setattr(c, attr, b_value)
+ contact_list.store_contacts()
+
+ if not file_cmd:
+ for g in group_list:
+ setattr(g, attr, b_value)
+ group_list.store_groups()
+
+ else:
+ # Change setting for contacts in specified window
+ if not window_list.has_window(win_uid):
+ raise FunctionReturn(f"Error: Found no window for '{pub_key_to_short_address(win_uid)}'.")
+
+ window = window_list.get_window(win_uid)
+ group_window = window.type == WIN_TYPE_GROUP
+ contact_window = window.type == WIN_TYPE_CONTACT
+
+ if contact_window:
+ target = contact_list.get_contact_by_pub_key(win_uid) # type: Union[Contact, Group]
+ else:
+ target = group_list.get_group_by_id(win_uid)
+
+ if file_cmd:
+ enabled = [getattr(m, attr) for m in window.window_contacts]
+ changed = not all(enabled) if b_value else any(enabled)
+ else:
+ changed = getattr(target, attr) != b_value
+
+ status = "has been" if changed else "was already"
+ specifier = "members in " if (file_cmd and group_window) else ''
+ w_type = window.type
+ w_name = f" {window.name}."
+
+ # Set values
+ if contact_window or (group_window and file_cmd):
+ for c in window.window_contacts:
+ setattr(c, attr, b_value)
+ contact_list.store_contacts()
+
+ elif group_window:
+ setattr(group_list.get_group_by_id(win_uid), attr, b_value)
+ group_list.store_groups()
+
+ message = f"{desc} {status} {action} for {specifier}{w_type}{w_name}"
+ local_win = window_list.get_local_window()
+ local_win.add_new(ts, message, output=True)
+
+
+def contact_rem(onion_pub_key: bytes,
+ ts: 'datetime',
+ window_list: 'WindowList',
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ key_list: 'KeyList',
+ settings: 'Settings',
+ master_key: 'MasterKey'
+ ) -> None:
+ """Remove contact from Receiver Program."""
+ key_list.remove_keyset(onion_pub_key)
+ window_list.remove_window(onion_pub_key)
+ short_addr = pub_key_to_short_address(onion_pub_key)
+
+ try:
+ contact = contact_list.get_contact_by_pub_key(onion_pub_key)
+ except StopIteration:
+ raise FunctionReturn(f"Receiver has no account '{short_addr}' to remove.")
+
+ nick = contact.nick
+ in_group = any([g.remove_members([onion_pub_key]) for g in group_list])
+
+ contact_list.remove_contact_by_pub_key(onion_pub_key)
+
+ message = f"Removed {nick} ({short_addr}) from contacts{' and groups' if in_group else ''}."
+ m_print(message, bold=True, head=1, tail=1)
+
+ local_win = window_list.get_local_window()
+ local_win.add_new(ts, message)
+
+ remove_logs(contact_list, group_list, settings, master_key, onion_pub_key)
+
+
+def wipe(exit_queue: 'Queue') -> None:
+ """\
+ Reset terminals, wipe all TFC user data on Destination Computer and
+ power off the system.
+
+ No effective RAM overwriting tool currently exists, so as long as
+ Source and Destination Computers use FDE and DDR3 memory, recovery
+ of user data becomes impossible very fast:
+ https://www1.cs.fau.de/filepool/projects/coldboot/fares_coldboot.pdf
+ """
+ os.system(RESET)
+ exit_queue.put(WIPE)
diff --git a/src/receiver/commands_g.py b/src/receiver/commands_g.py
new file mode 100644
index 0000000..b90e634
--- /dev/null
+++ b/src/receiver/commands_g.py
@@ -0,0 +1,218 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import typing
+
+from src.common.encoding import b58encode
+from src.common.exceptions import FunctionReturn
+from src.common.misc import separate_header, split_byte_string, validate_group_name
+from src.common.output import group_management_print, m_print
+from src.common.statics import *
+
+if typing.TYPE_CHECKING:
+ from datetime import datetime
+ from src.common.db_contacts import ContactList
+ from src.common.db_groups import GroupList
+ from src.common.db_settings import Settings
+ from src.receiver.windows import WindowList
+
+
+def group_create(cmd_data: bytes,
+ ts: 'datetime',
+ window_list: 'WindowList',
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings'
+ ) -> None:
+ """Create a new group."""
+ group_id, variable_len_data = separate_header(cmd_data, GROUP_ID_LENGTH)
+ group_name_bytes, ser_members = variable_len_data.split(US_BYTE, 1)
+ group_name = group_name_bytes.decode()
+
+ purp_pub_keys = set(split_byte_string(ser_members, ONION_SERVICE_PUBLIC_KEY_LENGTH))
+ pub_keys = set(contact_list.get_list_of_pub_keys())
+ accepted = list(purp_pub_keys & pub_keys)
+ rejected = list(purp_pub_keys - pub_keys)
+
+ if len(accepted) > settings.max_number_of_group_members:
+ raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_group_members} "
+ f"members per group.")
+
+ if len(group_list) == settings.max_number_of_groups:
+ raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_groups} groups.")
+
+ accepted_contacts = [contact_list.get_contact_by_pub_key(k) for k in accepted]
+ group_list.add_group(group_name,
+ group_id,
+ settings.log_messages_by_default,
+ settings.show_notifications_by_default,
+ accepted_contacts)
+
+ group = group_list.get_group(group_name)
+ window = window_list.get_window(group.group_id)
+ window.window_contacts = accepted_contacts
+ window.message_log = []
+ window.unread_messages = 0
+ window.create_handle_dict()
+
+ group_management_print(NEW_GROUP, accepted, contact_list, group_name)
+ group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
+
+ local_win = window_list.get_window(WIN_UID_LOCAL)
+ local_win.add_new(ts, f"Created new group {group_name}.")
+
+
+def group_add(cmd_data: bytes,
+ ts: 'datetime',
+ window_list: 'WindowList',
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings'
+ ) -> None:
+ """Add member(s) to group."""
+ group_id, ser_members = separate_header(cmd_data, GROUP_ID_LENGTH)
+ purp_pub_keys = set(split_byte_string(ser_members, ONION_SERVICE_PUBLIC_KEY_LENGTH))
+
+ try:
+ group_name = group_list.get_group_by_id(group_id).name
+ except StopIteration:
+ raise FunctionReturn(f"Error: No group with ID '{b58encode(group_id)}' found.")
+
+ pub_keys = set(contact_list.get_list_of_pub_keys())
+ before_adding = set(group_list.get_group(group_name).get_list_of_member_pub_keys())
+ ok_accounts = set(pub_keys & purp_pub_keys)
+ new_in_group_set = set(ok_accounts - before_adding)
+
+ end_assembly = list(before_adding | new_in_group_set)
+ already_in_g = list(purp_pub_keys & before_adding)
+ rejected = list(purp_pub_keys - pub_keys)
+ new_in_group = list(new_in_group_set)
+
+ if len(end_assembly) > settings.max_number_of_group_members:
+ raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_group_members} "
+ f"members per group.")
+
+ group = group_list.get_group(group_name)
+ group.add_members([contact_list.get_contact_by_pub_key(k) for k in new_in_group])
+
+ window = window_list.get_window(group.group_id)
+ window.add_contacts(new_in_group)
+ window.create_handle_dict()
+
+ group_management_print(ADDED_MEMBERS, new_in_group, contact_list, group_name)
+ group_management_print(ALREADY_MEMBER, already_in_g, contact_list, group_name)
+ group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
+
+ local_win = window_list.get_window(WIN_UID_LOCAL)
+ local_win.add_new(ts, f"Added members to group {group_name}.")
+
+
+def group_remove(cmd_data: bytes,
+ ts: 'datetime',
+ window_list: 'WindowList',
+ contact_list: 'ContactList',
+ group_list: 'GroupList'
+ ) -> None:
+ """Remove member(s) from the group."""
+ group_id, ser_members = separate_header(cmd_data, GROUP_ID_LENGTH)
+ purp_pub_keys = set(split_byte_string(ser_members, ONION_SERVICE_PUBLIC_KEY_LENGTH))
+
+ try:
+ group_name = group_list.get_group_by_id(group_id).name
+ except StopIteration:
+ raise FunctionReturn(f"Error: No group with ID '{b58encode(group_id)}' found.")
+
+ pub_keys = set(contact_list.get_list_of_pub_keys())
+ before_removal = set(group_list.get_group(group_name).get_list_of_member_pub_keys())
+ ok_accounts_set = set(purp_pub_keys & pub_keys)
+ removable_set = set(before_removal & ok_accounts_set)
+
+ not_in_group = list(ok_accounts_set - before_removal)
+ rejected = list(purp_pub_keys - pub_keys)
+ removable = list(removable_set)
+
+ group = group_list.get_group(group_name)
+ group.remove_members(removable)
+
+ window = window_list.get_window(group.group_id)
+ window.remove_contacts(removable)
+
+ group_management_print(REMOVED_MEMBERS, removable, contact_list, group_name)
+ group_management_print(NOT_IN_GROUP, not_in_group, contact_list, group_name)
+ group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
+
+ local_win = window_list.get_window(WIN_UID_LOCAL)
+ local_win.add_new(ts, f"Removed members from group {group_name}.")
+
+
+def group_delete(group_id: bytes,
+ ts: 'datetime',
+ window_list: 'WindowList',
+ group_list: 'GroupList'
+ ) -> None:
+ """Remove the group."""
+ if not group_list.has_group_id(group_id):
+ raise FunctionReturn(f"Error: No group with ID '{b58encode(group_id)}' found.")
+
+ name = group_list.get_group_by_id(group_id).name
+ window_list.remove_window(group_id)
+ group_list.remove_group_by_id(group_id)
+
+ message = f"Removed group '{name}'."
+ m_print(message, bold=True, head=1, tail=1)
+
+ local_win = window_list.get_window(WIN_UID_LOCAL)
+ local_win.add_new(ts, message)
+
+
+def group_rename(cmd_data: bytes,
+ ts: 'datetime',
+ window_list: 'WindowList',
+ contact_list: 'ContactList',
+ group_list: 'GroupList'
+ ) -> None:
+ """Rename the group."""
+ group_id, new_name_bytes = separate_header(cmd_data, GROUP_ID_LENGTH)
+
+ try:
+ group = group_list.get_group_by_id(group_id)
+ except StopIteration:
+ raise FunctionReturn(f"Error: No group with ID '{b58encode(group_id)}' found.")
+
+ try:
+ new_name = new_name_bytes.decode()
+ except UnicodeError:
+ raise FunctionReturn(f"Error: New name for group '{group.name}' was invalid.")
+
+ error_msg = validate_group_name(new_name, contact_list, group_list)
+ if error_msg:
+ raise FunctionReturn(error_msg)
+
+ old_name = group.name
+ group.name = new_name
+ group_list.store_groups()
+
+ window = window_list.get_window(group.group_id)
+ window.name = new_name
+
+ message = f"Renamed group '{old_name}' to '{new_name}'."
+ local_win = window_list.get_window(WIN_UID_LOCAL)
+ local_win.add_new(ts, message, output=True)
diff --git a/src/receiver/files.py b/src/receiver/files.py
new file mode 100644
index 0000000..c878083
--- /dev/null
+++ b/src/receiver/files.py
@@ -0,0 +1,185 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import os.path
+import typing
+import zlib
+
+from typing import Dict, Tuple
+
+import nacl.exceptions
+
+from src.common.crypto import auth_and_decrypt, blake2b
+from src.common.encoding import bytes_to_str
+from src.common.exceptions import FunctionReturn
+from src.common.misc import decompress, ensure_dir, separate_headers, separate_trailer
+from src.common.output import phase, print_on_previous_line
+from src.common.statics import *
+
+if typing.TYPE_CHECKING:
+ from datetime import datetime
+ from src.common.db_contacts import ContactList
+ from src.common.db_settings import Settings
+ from src.receiver.windows import WindowList
+
+
+def store_unique(file_data: bytes, # File data to store
+ file_dir: str, # Directory to store file
+ file_name: str # Preferred name for the file.
+ ) -> str:
+ """Store file under a unique filename.
+
+ If file exists, add trailing counter .# with value as large as
+ needed to ensure existing file is not overwritten.
+ """
+ ensure_dir(file_dir)
+
+ if os.path.isfile(file_dir + file_name):
+ ctr = 1
+ while os.path.isfile(file_dir + file_name + f'.{ctr}'):
+ ctr += 1
+ file_name += f'.{ctr}'
+
+ with open(file_dir + file_name, 'wb+') as f:
+ f.write(file_data)
+
+ return file_name
+
+
+def process_assembled_file(ts: 'datetime', # Timestamp last received packet
+ payload: bytes, # File name and content
+ onion_pub_key: bytes, # Onion Service pubkey of sender
+ nick: str, # Nickname of sender
+ settings: 'Settings', # Settings object
+ window_list: 'WindowList', # WindowList object
+ ) -> None:
+ """Process received file assembly packets."""
+ try:
+ file_name_b, file_data = payload.split(US_BYTE, 1)
+ except ValueError:
+ raise FunctionReturn("Error: Received file had an invalid structure.")
+
+ try:
+ file_name = file_name_b.decode()
+ except UnicodeError:
+ raise FunctionReturn("Error: Received file name had invalid encoding.")
+
+ if not file_name.isprintable() or not file_name or '/' in file_name:
+ raise FunctionReturn("Error: Received file had an invalid name.")
+
+ file_ct, file_key = separate_trailer(file_data, SYMMETRIC_KEY_LENGTH)
+
+ if len(file_key) != SYMMETRIC_KEY_LENGTH:
+ raise FunctionReturn("Error: Received file had an invalid key.")
+
+ try:
+ file_pt = auth_and_decrypt(file_ct, file_key)
+ except nacl.exceptions.CryptoError:
+ raise FunctionReturn("Error: Decryption of file data failed.")
+
+ try:
+ file_dc = decompress(file_pt, settings.max_decompress_size)
+ except zlib.error:
+ raise FunctionReturn("Error: Decompression of file data failed.")
+
+ file_dir = f'{DIR_RECV_FILES}{nick}/'
+ final_name = store_unique(file_dc, file_dir, file_name)
+
+ message = f"Stored file from {nick} as '{final_name}'."
+ if settings.traffic_masking and window_list.active_win is not None:
+ window = window_list.active_win
+ else:
+ window = window_list.get_window(onion_pub_key)
+ window.add_new(ts, message, onion_pub_key, output=True, event_msg=True)
+
+
+def new_file(ts: 'datetime',
+ packet: bytes, # Sender of file and file ciphertext
+ file_keys: Dict[bytes, bytes], # Dictionary for file decryption keys
+ file_buf: Dict[bytes, Tuple['datetime', bytes]], # Dictionary for cached file ciphertexts
+ contact_list: 'ContactList', # ContactList object
+ window_list: 'WindowList', # WindowList object
+ settings: 'Settings' # Settings object
+ ) -> None:
+ """Validate received file and process or cache it."""
+ onion_pub_key, _, file_ct = separate_headers(packet, [ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_HEADER_LENGTH])
+
+ if not contact_list.has_pub_key(onion_pub_key):
+ raise FunctionReturn("File from an unknown account.", output=False)
+
+ nick = contact_list.get_contact_by_pub_key(onion_pub_key).nick
+ if not contact_list.get_contact_by_pub_key(onion_pub_key).file_reception:
+ raise FunctionReturn(f"Alert! Discarded file from {nick} as file reception for them is disabled.", bold=True)
+
+ k = onion_pub_key + blake2b(file_ct) # Dictionary key
+
+ if k in file_keys:
+ decryption_key = file_keys[k]
+ process_file(ts, onion_pub_key, file_ct, decryption_key, contact_list, window_list, settings)
+ file_keys.pop(k)
+ else:
+ file_buf[k] = (ts, file_ct)
+
+
+def process_file(ts: 'datetime', # Timestamp of received_packet
+ onion_pub_key: bytes, # Onion Service pubkey of sender
+ file_ct: bytes, # File ciphertext
+ file_key: bytes, # File decryption key
+ contact_list: 'ContactList', # ContactList object
+ window_list: 'WindowList', # WindowList object
+ settings: 'Settings' # Settings object
+ ) -> None:
+ """Store file received from a contact."""
+ nick = contact_list.get_contact_by_pub_key(onion_pub_key).nick
+
+ phase("Processing received file", head=1)
+ try:
+ file_pt = auth_and_decrypt(file_ct, file_key)
+ except nacl.exceptions.CryptoError:
+ raise FunctionReturn(f"Error: Decryption key for file from {nick} was invalid.")
+
+ try:
+ file_dc = decompress(file_pt, settings.max_decompress_size)
+ except zlib.error:
+ raise FunctionReturn(f"Error: Failed to decompress file from {nick}.")
+ phase(DONE)
+ print_on_previous_line(reps=2)
+
+ try:
+ file_name = bytes_to_str(file_dc[:PADDED_UTF32_STR_LENGTH])
+ except UnicodeError:
+ raise FunctionReturn(f"Error: Name of file from {nick} had invalid encoding.")
+
+ if not file_name.isprintable() or not file_name or '/' in file_name:
+ raise FunctionReturn(f"Error: Name of file from {nick} was invalid.")
+
+ f_data = file_dc[PADDED_UTF32_STR_LENGTH:]
+
+ file_dir = f'{DIR_RECV_FILES}{nick}/'
+ final_name = store_unique(f_data, file_dir, file_name)
+ message = f"Stored file from {nick} as '{final_name}'."
+
+ if settings.traffic_masking and window_list.active_win is not None:
+ window = window_list.active_win
+ else:
+ window = window_list.get_window(onion_pub_key)
+
+ window.add_new(ts, message, onion_pub_key, output=True, event_msg=True)
diff --git a/src/receiver/key_exchanges.py b/src/receiver/key_exchanges.py
new file mode 100644
index 0000000..66abf33
--- /dev/null
+++ b/src/receiver/key_exchanges.py
@@ -0,0 +1,334 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import os.path
+import pipes
+import readline
+import struct
+import subprocess
+import tkinter
+import typing
+
+from typing import List, Tuple
+
+import nacl.exceptions
+
+from src.common.crypto import argon2_kdf, auth_and_decrypt, blake2b, csprng
+from src.common.db_masterkey import MasterKey
+from src.common.encoding import b58encode, bytes_to_str, pub_key_to_short_address
+from src.common.exceptions import FunctionReturn
+from src.common.input import get_b58_key
+from src.common.misc import separate_header, separate_headers
+from src.common.output import m_print, phase, print_on_previous_line
+from src.common.path import ask_path_gui
+from src.common.statics import *
+
+if typing.TYPE_CHECKING:
+ from datetime import datetime
+ from multiprocessing import Queue
+ from src.common.db_contacts import ContactList
+ from src.common.db_keys import KeyList
+ from src.common.db_settings import Settings
+ from src.receiver.windows import WindowList
+
+
+# Local key
+
+def process_local_key(ts: 'datetime',
+ packet: bytes,
+ window_list: 'WindowList',
+ contact_list: 'ContactList',
+ key_list: 'KeyList',
+ settings: 'Settings',
+ kdk_hashes: List[bytes],
+ packet_hashes: List[bytes],
+ l_queue: 'Queue'
+ ) -> None:
+ """Decrypt local key packet and add local contact/keyset."""
+ bootstrap = not key_list.has_local_keyset()
+ try:
+ packet_hash = blake2b(packet)
+
+ # Check if the packet is an old one
+ if packet_hash in packet_hashes:
+ raise FunctionReturn("Error: Received old local key packet.", output=False)
+
+ while True:
+ m_print("Local key setup", bold=True, head_clear=True, head=1, tail=1)
+ kdk = get_b58_key(B58_LOCAL_KEY, settings)
+ kdk_hash = blake2b(kdk)
+
+ try:
+ plaintext = auth_and_decrypt(packet, kdk)
+ break
+ except nacl.exceptions.CryptoError:
+ # Check if key was an old one
+ if kdk_hash in kdk_hashes:
+ m_print("Error: Entered an old local key decryption key.", delay=1)
+ continue
+
+ # Check if the kdk was for a packet further ahead in the queue
+ buffer = [] # type: List[Tuple[datetime, bytes]]
+ while l_queue.qsize() > 0:
+ tup = l_queue.get() # type: Tuple[datetime, bytes]
+ if tup not in buffer:
+ buffer.append(tup)
+
+ for i, tup in enumerate(buffer):
+ try:
+ plaintext = auth_and_decrypt(tup[1], kdk)
+
+ # If we reach this point, decryption was successful.
+ for unexamined in buffer[i+1:]:
+ l_queue.put(unexamined)
+ buffer = []
+ ts = tup[0]
+ break
+
+ except nacl.exceptions.CryptoError:
+ continue
+ else:
+ # Finished the buffer without finding local key CT
+ # for the kdk. Maybe the kdk is from another session.
+ raise FunctionReturn("Error: Incorrect key decryption key.", delay=1)
+
+ break
+
+ # Add local contact to contact list database
+ contact_list.add_contact(LOCAL_PUBKEY,
+ LOCAL_NICK,
+ KEX_STATUS_LOCAL_KEY,
+ bytes(FINGERPRINT_LENGTH),
+ bytes(FINGERPRINT_LENGTH),
+ False, False, True)
+
+ tx_mk, tx_hk, c_code = separate_headers(plaintext, 2 * [SYMMETRIC_KEY_LENGTH])
+
+ # Add local keyset to keyset database
+ key_list.add_keyset(onion_pub_key=LOCAL_PUBKEY,
+ tx_mk=tx_mk,
+ rx_mk=csprng(),
+ tx_hk=tx_hk,
+ rx_hk=csprng())
+
+ # Cache hashes needed to recognize reissued local key packets and key decryption keys.
+ packet_hashes.append(packet_hash)
+ kdk_hashes.append(kdk_hash)
+
+ # Prevent leak of KDK via terminal history / clipboard
+ readline.clear_history()
+ os.system(RESET)
+ root = tkinter.Tk()
+ root.withdraw()
+ try:
+ if root.clipboard_get() == b58encode(kdk):
+ root.clipboard_clear()
+ except tkinter.TclError:
+ pass
+ root.destroy()
+
+ m_print(["Local key successfully installed.",
+ f"Confirmation code (to Transmitter): {c_code.hex()}"], box=True, head=1)
+
+ local_win = window_list.get_local_window()
+ local_win.add_new(ts, "Added new local key.")
+
+ if bootstrap:
+ window_list.active_win = local_win
+
+ except (EOFError, KeyboardInterrupt):
+ m_print("Local key setup aborted.", bold=True, tail_clear=True, delay=1, head=2)
+
+ if window_list.active_win is not None and not bootstrap:
+ window_list.active_win.redraw()
+
+ raise FunctionReturn("Local key setup aborted.", output=False)
+
+
+def local_key_rdy(ts: 'datetime',
+ window_list: 'WindowList',
+ contact_list: 'ContactList') -> None:
+ """Clear local key bootstrap process from the screen."""
+ message = "Successfully completed the local key setup."
+ local_win = window_list.get_local_window()
+ local_win.add_new(ts, message)
+
+ m_print(message, bold=True, tail_clear=True, delay=1)
+
+ if contact_list.has_contacts():
+ if window_list.active_win is not None and window_list.active_win.type in [WIN_TYPE_CONTACT, WIN_TYPE_GROUP]:
+ window_list.active_win.redraw()
+ else:
+ m_print("Waiting for new contacts", bold=True, head=1, tail=1)
+
+
+# ECDHE
+
+def key_ex_ecdhe(packet: bytes,
+ ts: 'datetime',
+ window_list: 'WindowList',
+ contact_list: 'ContactList',
+ key_list: 'KeyList',
+ settings: 'Settings'
+ ) -> None:
+ """Add contact and symmetric keys derived from X448 shared key."""
+
+ onion_pub_key, tx_mk, rx_mk, tx_hk, rx_hk, nick_bytes \
+ = separate_headers(packet, [ONION_SERVICE_PUBLIC_KEY_LENGTH] + 4*[SYMMETRIC_KEY_LENGTH])
+
+ try:
+ nick = bytes_to_str(nick_bytes)
+ except (struct.error, UnicodeError):
+ raise FunctionReturn("Error: Received invalid contact data")
+
+ contact_list.add_contact(onion_pub_key, nick,
+ bytes(FINGERPRINT_LENGTH),
+ bytes(FINGERPRINT_LENGTH),
+ KEX_STATUS_NONE,
+ settings.log_messages_by_default,
+ settings.accept_files_by_default,
+ settings.show_notifications_by_default)
+
+ key_list.add_keyset(onion_pub_key, tx_mk, rx_mk, tx_hk, rx_hk)
+
+ message = f"Successfully added {nick}."
+ local_win = window_list.get_local_window()
+ local_win.add_new(ts, message)
+
+ c_code = blake2b(onion_pub_key, digest_size=CONFIRM_CODE_LENGTH)
+ m_print([message, f"Confirmation code (to Transmitter): {c_code.hex()}"], box=True)
+
+
+# PSK
+
+def key_ex_psk_tx(packet: bytes,
+ ts: 'datetime',
+ window_list: 'WindowList',
+ contact_list: 'ContactList',
+ key_list: 'KeyList',
+ settings: 'Settings'
+ ) -> None:
+ """Add contact and Tx-PSKs."""
+
+ onion_pub_key, tx_mk, _, tx_hk, _, nick_bytes \
+ = separate_headers(packet, [ONION_SERVICE_PUBLIC_KEY_LENGTH] + 4*[SYMMETRIC_KEY_LENGTH])
+
+ try:
+ nick = bytes_to_str(nick_bytes)
+ except (struct.error, UnicodeError):
+ raise FunctionReturn("Error: Received invalid contact data")
+
+ contact_list.add_contact(onion_pub_key, nick,
+ bytes(FINGERPRINT_LENGTH),
+ bytes(FINGERPRINT_LENGTH),
+ KEX_STATUS_NO_RX_PSK,
+ settings.log_messages_by_default,
+ settings.accept_files_by_default,
+ settings.show_notifications_by_default)
+
+ # The Rx-side keys are set as null-byte strings to indicate they have not
+ # been added yet. The zero-keys do not allow existential forgeries as
+ # `decrypt_assembly_packet`does not allow the use of zero-keys for decryption.
+ key_list.add_keyset(onion_pub_key=onion_pub_key,
+ tx_mk=tx_mk,
+ rx_mk=bytes(SYMMETRIC_KEY_LENGTH),
+ tx_hk=tx_hk,
+ rx_hk=bytes(SYMMETRIC_KEY_LENGTH))
+
+ message = f"Added Tx-side PSK for {nick} ({pub_key_to_short_address(onion_pub_key)})."
+ local_win = window_list.get_local_window()
+ local_win.add_new(ts, message)
+
+ m_print(message, bold=True, tail_clear=True, delay=1)
+
+
+def key_ex_psk_rx(packet: bytes,
+ ts: 'datetime',
+ window_list: 'WindowList',
+ contact_list: 'ContactList',
+ key_list: 'KeyList',
+ settings: 'Settings'
+ ) -> None:
+ """Import Rx-PSK of contact."""
+ c_code, onion_pub_key = separate_header(packet, CONFIRM_CODE_LENGTH)
+ short_addr = pub_key_to_short_address(onion_pub_key)
+
+ if not contact_list.has_pub_key(onion_pub_key):
+ raise FunctionReturn(f"Error: Unknown account '{short_addr}'.", head_clear=True)
+
+ contact = contact_list.get_contact_by_pub_key(onion_pub_key)
+ psk_file = ask_path_gui(f"Select PSK for {contact.nick} ({short_addr})", settings, get_file=True)
+
+ try:
+ with open(psk_file, 'rb') as f:
+ psk_data = f.read()
+ except PermissionError:
+ raise FunctionReturn("Error: No read permission for the PSK file.")
+
+ if len(psk_data) != PSK_FILE_SIZE:
+ raise FunctionReturn("Error: The PSK data in the file was invalid.", head_clear=True)
+
+ salt, ct_tag = separate_header(psk_data, ARGON2_SALT_LENGTH)
+
+ while True:
+ try:
+ password = MasterKey.get_password("PSK password")
+ phase("Deriving the key decryption key", head=2)
+ kdk = argon2_kdf(password, salt, rounds=ARGON2_ROUNDS, memory=ARGON2_MIN_MEMORY)
+ psk = auth_and_decrypt(ct_tag, kdk)
+ phase(DONE)
+ break
+
+ except nacl.exceptions.CryptoError:
+ print_on_previous_line()
+ m_print("Invalid password. Try again.", head=1)
+ print_on_previous_line(reps=5, delay=1)
+ except (EOFError, KeyboardInterrupt):
+ raise FunctionReturn("PSK import aborted.", head=2, delay=1, tail_clear=True)
+
+ rx_mk, rx_hk = separate_header(psk, SYMMETRIC_KEY_LENGTH)
+
+ if any(k == bytes(SYMMETRIC_KEY_LENGTH) for k in [rx_mk, rx_hk]):
+ raise FunctionReturn("Error: Received invalid keys from contact.", head_clear=True)
+
+ contact.kex_status = KEX_STATUS_HAS_RX_PSK
+ contact_list.store_contacts()
+
+ keyset = key_list.get_keyset(onion_pub_key)
+ keyset.rx_mk = rx_mk
+ keyset.rx_hk = rx_hk
+ key_list.store_keys()
+
+ # Pipes protects against shell injection. Source of command's parameter is
+ # the program itself, and therefore trusted, but it's still good practice.
+ subprocess.Popen(f"shred -n 3 -z -u {pipes.quote(psk_file)}", shell=True).wait()
+ if os.path.isfile(psk_file):
+ m_print(f"Warning! Overwriting of PSK ({psk_file}) failed. Press to continue.",
+ manual_proceed=True, box=True)
+
+ message = f"Added Rx-side PSK for {contact.nick} ({short_addr})."
+ local_win = window_list.get_local_window()
+ local_win.add_new(ts, message)
+
+ m_print([message, '', "Warning!",
+ "Physically destroy the keyfile transmission media ",
+ "to ensure it does not steal data from this computer!", '',
+ f"Confirmation code (to Transmitter): {c_code.hex()}"], box=True, head=1, tail=1)
diff --git a/src/receiver/messages.py b/src/receiver/messages.py
new file mode 100644
index 0000000..bdf521f
--- /dev/null
+++ b/src/receiver/messages.py
@@ -0,0 +1,203 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import base64
+import typing
+
+from typing import Dict
+
+from src.common.db_logs import write_log_entry
+from src.common.encoding import bytes_to_bool
+from src.common.exceptions import FunctionReturn
+from src.common.misc import separate_header, separate_headers
+from src.common.statics import *
+
+from src.receiver.packet import decrypt_assembly_packet
+
+if typing.TYPE_CHECKING:
+ from datetime import datetime
+ from src.common.db_contacts import ContactList
+ from src.common.db_groups import GroupList
+ from src.common.db_keys import KeyList
+ from src.common.db_masterkey import MasterKey
+ from src.common.db_settings import Settings
+ from src.receiver.packet import PacketList
+ from src.receiver.windows import WindowList
+
+
+def process_message(ts: 'datetime',
+ assembly_packet_ct: bytes,
+ window_list: 'WindowList',
+ packet_list: 'PacketList',
+ contact_list: 'ContactList',
+ key_list: 'KeyList',
+ group_list: 'GroupList',
+ settings: 'Settings',
+ master_key: 'MasterKey',
+ file_keys: Dict[bytes, bytes]
+ ) -> None:
+ """Process received private / group message."""
+ local_window = window_list.get_local_window()
+
+ onion_pub_key, origin, assembly_packet_ct \
+ = separate_headers(assembly_packet_ct, [ONION_SERVICE_PUBLIC_KEY_LENGTH, ORIGIN_HEADER_LENGTH])
+
+ if onion_pub_key == LOCAL_PUBKEY:
+ raise FunctionReturn("Warning! Received packet masqueraded as a command.", window=local_window)
+ if origin not in [ORIGIN_USER_HEADER, ORIGIN_CONTACT_HEADER]:
+ raise FunctionReturn("Error: Received packet had an invalid origin-header.", window=local_window)
+
+ assembly_packet = decrypt_assembly_packet(assembly_packet_ct, onion_pub_key, origin,
+ window_list, contact_list, key_list)
+
+ p_type = FILE if assembly_packet[:ASSEMBLY_PACKET_HEADER_LENGTH].isupper() else MESSAGE
+ packet = packet_list.get_packet(onion_pub_key, origin, p_type)
+ logging = contact_list.get_contact_by_pub_key(onion_pub_key).log_messages
+
+ def log_masking_packets(completed: bool = False) -> None:
+ """Add masking packets to log file.
+
+ If logging and log file masking are enabled, this function will
+ in case of erroneous transmissions, store the correct number of
+ placeholder data packets to log file to hide the quantity of
+ communication that log file observation would otherwise reveal.
+ """
+ if logging and settings.log_file_masking and (packet.log_masking_ctr or completed):
+ no_masking_packets = len(packet.assembly_pt_list) if completed else packet.log_masking_ctr
+ for _ in range(no_masking_packets):
+ write_log_entry(PLACEHOLDER_DATA, onion_pub_key, settings, master_key, origin)
+ packet.log_masking_ctr = 0
+
+ try:
+ packet.add_packet(assembly_packet)
+ except FunctionReturn:
+ log_masking_packets()
+ raise
+ log_masking_packets()
+
+ if not packet.is_complete:
+ return None
+
+ try:
+ if p_type == FILE:
+ packet.assemble_and_store_file(ts, onion_pub_key, window_list)
+ raise FunctionReturn("File storage complete.", output=False) # Raising allows calling log_masking_packets
+
+ elif p_type == MESSAGE:
+ whisper_byte, header, assembled = separate_headers(packet.assemble_message_packet(),
+ [WHISPER_FIELD_LENGTH, MESSAGE_HEADER_LENGTH])
+ if len(whisper_byte) != WHISPER_FIELD_LENGTH:
+ raise FunctionReturn("Error: Message from contact had an invalid whisper header.")
+
+ whisper = bytes_to_bool(whisper_byte)
+
+ if header == GROUP_MESSAGE_HEADER:
+ logging = process_group_message(assembled, ts, onion_pub_key, origin, whisper, group_list, window_list)
+
+ elif header == PRIVATE_MESSAGE_HEADER:
+ window = window_list.get_window(onion_pub_key)
+ window.add_new(ts, assembled.decode(), onion_pub_key, origin, output=True, whisper=whisper)
+
+ elif header == FILE_KEY_HEADER:
+ nick = process_file_key_message(assembled, onion_pub_key, origin, contact_list, file_keys)
+ raise FunctionReturn(f"Received file decryption key from {nick}", window=local_window)
+
+ else:
+ raise FunctionReturn("Error: Message from contact had an invalid header.")
+
+ if whisper:
+ raise FunctionReturn("Whisper message complete.", output=False)
+
+ if logging:
+ for p in packet.assembly_pt_list:
+ write_log_entry(p, onion_pub_key, settings, master_key, origin)
+
+ except (FunctionReturn, UnicodeError):
+ log_masking_packets(completed=True)
+ raise
+ finally:
+ packet.clear_assembly_packets()
+
+
+def process_group_message(assembled: bytes,
+ ts: 'datetime',
+ onion_pub_key: bytes,
+ origin: bytes,
+ whisper: bool,
+ group_list: 'GroupList',
+ window_list: 'WindowList'
+ ) -> bool:
+ """Process a group message."""
+ group_id, assembled = separate_header(assembled, GROUP_ID_LENGTH)
+ if not group_list.has_group_id(group_id):
+ raise FunctionReturn("Error: Received message to an unknown group.", output=False)
+
+ group = group_list.get_group_by_id(group_id)
+ if not group.has_member(onion_pub_key):
+ raise FunctionReturn("Error: Account is not a member of the group.", output=False)
+
+ group_msg_id, group_message = separate_header(assembled, GROUP_MSG_ID_LENGTH)
+
+ try:
+ group_message_str = group_message.decode()
+ except UnicodeError:
+ raise FunctionReturn("Error: Received an invalid group message.")
+
+ window = window_list.get_window(group.group_id)
+
+ # All copies of group messages the user sends to members contain
+ # the same message ID. This allows the Receiver Program to ignore
+ # duplicates of outgoing messages sent by the user to each member.
+ if origin == ORIGIN_USER_HEADER:
+ if window.group_msg_id != group_msg_id:
+ window.group_msg_id = group_msg_id
+ window.add_new(ts, group_message_str, onion_pub_key, origin, output=True, whisper=whisper)
+
+ elif origin == ORIGIN_CONTACT_HEADER:
+ window.add_new(ts, group_message_str, onion_pub_key, origin, output=True, whisper=whisper)
+
+ return group.log_messages
+
+
+def process_file_key_message(assembled: bytes,
+ onion_pub_key: bytes,
+ origin: bytes,
+ contact_list: 'ContactList',
+ file_keys: Dict[bytes, bytes]
+ ) -> str:
+ """Process received file key delivery message."""
+ if origin == ORIGIN_USER_HEADER:
+ raise FunctionReturn("File key message from the user.", output=False)
+
+ try:
+ decoded = base64.b85decode(assembled)
+ except ValueError:
+ raise FunctionReturn("Error: Received an invalid file key message.")
+
+ ct_hash, file_key = separate_header(decoded, BLAKE2_DIGEST_LENGTH)
+
+ if len(ct_hash) != BLAKE2_DIGEST_LENGTH or len(file_key) != SYMMETRIC_KEY_LENGTH:
+ raise FunctionReturn("Error: Received an invalid file key message.")
+
+ file_keys[onion_pub_key + ct_hash] = file_key
+ nick = contact_list.get_contact_by_pub_key(onion_pub_key).nick
+
+ return nick
diff --git a/src/receiver/output_loop.py b/src/receiver/output_loop.py
new file mode 100755
index 0000000..807e3ab
--- /dev/null
+++ b/src/receiver/output_loop.py
@@ -0,0 +1,153 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import os
+import sys
+import time
+import typing
+
+from typing import Dict, List, Tuple
+
+from src.common.exceptions import FunctionReturn
+from src.common.output import clear_screen
+from src.common.statics import *
+
+from src.receiver.commands import process_command
+from src.receiver.files import new_file, process_file
+from src.receiver.key_exchanges import process_local_key
+from src.receiver.messages import process_message
+from src.receiver.packet import PacketList
+from src.receiver.windows import WindowList
+
+if typing.TYPE_CHECKING:
+ from datetime import datetime
+ from multiprocessing import Queue
+ from src.common.db_contacts import ContactList
+ from src.common.db_groups import GroupList
+ from src.common.db_keys import KeyList
+ from src.common.db_masterkey import MasterKey
+ from src.common.db_settings import Settings
+ from src.common.gateway import Gateway
+
+
+def output_loop(queues: Dict[bytes, 'Queue'],
+ gateway: 'Gateway',
+ settings: 'Settings',
+ contact_list: 'ContactList',
+ key_list: 'KeyList',
+ group_list: 'GroupList',
+ master_key: 'MasterKey',
+ stdin_fd: int,
+ unittest: bool = False
+ ) -> None:
+ """Process packets in message queues according to their priority."""
+ l_queue = queues[LOCAL_KEY_DATAGRAM_HEADER]
+ m_queue = queues[MESSAGE_DATAGRAM_HEADER]
+ f_queue = queues[FILE_DATAGRAM_HEADER]
+ c_queue = queues[COMMAND_DATAGRAM_HEADER]
+ e_queue = queues[EXIT_QUEUE]
+
+ sys.stdin = os.fdopen(stdin_fd)
+ packet_buf = dict() # type: Dict[bytes, List[Tuple[datetime, bytes]]]
+ file_buf = dict() # type: Dict[bytes, Tuple[datetime, bytes]]
+ file_keys = dict() # type: Dict[bytes, bytes]
+
+ kdk_hashes = [] # type: List[bytes]
+ packet_hashes = [] # type: List[bytes]
+
+ packet_list = PacketList(settings, contact_list)
+ window_list = WindowList(settings, contact_list, group_list, packet_list)
+
+ clear_screen()
+ while True:
+ try:
+ if l_queue.qsize() != 0:
+ ts, packet = l_queue.get()
+ process_local_key(ts, packet, window_list, contact_list, key_list,
+ settings, kdk_hashes, packet_hashes, l_queue)
+ continue
+
+ if not contact_list.has_local_contact():
+ time.sleep(0.1)
+ continue
+
+ # Commands
+ if c_queue.qsize() != 0:
+ ts, packet = c_queue.get()
+ process_command(ts, packet, window_list, packet_list, contact_list, key_list,
+ group_list, settings, master_key, gateway, e_queue)
+ continue
+
+ # File window refresh
+ if window_list.active_win is not None and window_list.active_win.uid == WIN_UID_FILE:
+ window_list.active_win.redraw_file_win()
+
+ # Cached message packets
+ for onion_pub_key in packet_buf:
+ if (contact_list.has_pub_key(onion_pub_key)
+ and key_list.has_rx_mk(onion_pub_key)
+ and packet_buf[onion_pub_key]):
+ ts, packet = packet_buf[onion_pub_key].pop(0)
+ process_message(ts, packet, window_list, packet_list, contact_list, key_list,
+ group_list, settings, master_key, file_keys)
+ continue
+
+ # New messages
+ if m_queue.qsize() != 0:
+ ts, packet = m_queue.get()
+ onion_pub_key = packet[:ONION_SERVICE_PUBLIC_KEY_LENGTH]
+
+ if contact_list.has_pub_key(onion_pub_key) and key_list.has_rx_mk(onion_pub_key):
+ process_message(ts, packet, window_list, packet_list, contact_list, key_list,
+ group_list, settings, master_key, file_keys)
+ else:
+ packet_buf.setdefault(onion_pub_key, []).append((ts, packet))
+ continue
+
+ # Cached files
+ if file_buf:
+ for k in file_buf:
+ key_to_remove = b''
+ try:
+ if k in file_keys:
+ key_to_remove = k
+ ts_, file_ct = file_buf[k]
+ dec_key = file_keys[k]
+ onion_pub_key = k[:ONION_SERVICE_PUBLIC_KEY_LENGTH]
+ process_file(ts_, onion_pub_key, file_ct, dec_key, contact_list, window_list, settings)
+ finally:
+ if key_to_remove:
+ file_buf.pop(k)
+ file_keys.pop(k)
+ break
+
+ # New files
+ if f_queue.qsize() != 0:
+ ts, packet = f_queue.get()
+ new_file(ts, packet, file_keys, file_buf, contact_list, window_list, settings)
+
+ time.sleep(0.01)
+
+ if unittest and queues[UNITTEST_QUEUE].qsize() != 0:
+ break
+
+ except (FunctionReturn, KeyError, KeyboardInterrupt):
+ pass
diff --git a/src/receiver/packet.py b/src/receiver/packet.py
new file mode 100644
index 0000000..df3520e
--- /dev/null
+++ b/src/receiver/packet.py
@@ -0,0 +1,429 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import struct
+import typing
+import zlib
+
+from datetime import datetime, timedelta
+from typing import Any, Callable, Dict, Generator, Iterable, List, Optional, Sized
+
+import nacl.exceptions
+
+from src.common.crypto import auth_and_decrypt, blake2b, rm_padding_bytes
+from src.common.encoding import bytes_to_int, int_to_bytes
+from src.common.exceptions import FunctionReturn
+from src.common.input import yes
+from src.common.misc import decompress, readable_size, separate_header, separate_headers, separate_trailer
+from src.common.output import m_print
+from src.common.statics import *
+
+from src.receiver.files import process_assembled_file
+
+if typing.TYPE_CHECKING:
+ from src.common.db_contacts import Contact, ContactList
+ from src.common.db_keys import KeyList
+ from src.common.db_settings import Settings
+ from src.receiver.windows import RxWindow, WindowList
+
+
+def process_offset(offset: int, # Number of dropped packets
+ origin: bytes, # "to/from" preposition
+ direction: str, # Direction of packet
+ nick: str, # Nickname of associated contact
+ window: 'RxWindow' # RxWindow object
+ ) -> None:
+ """Display warnings about increased offsets.
+
+ If the offset has increased over the threshold, ask the user to
+ confirm hash ratchet catch up.
+ """
+ if offset > HARAC_WARN_THRESHOLD and origin == ORIGIN_CONTACT_HEADER:
+ m_print([f"Warning! {offset} packets from {nick} were not received.",
+ f"This might indicate that {offset} most recent packets were ",
+ f"lost during transmission, or that the contact is attempting ",
+ f"a DoS attack. You can wait for TFC to attempt to decrypt the ",
+ "packet, but it might take a very long time or even forever."])
+
+ if not yes("Proceed with the decryption?", abort=False, tail=1):
+ raise FunctionReturn(f"Dropped packet from {nick}.", window=window)
+
+ elif offset:
+ m_print(f"Warning! {offset} packet{'s' if offset > 1 else ''} {direction} {nick} were not received.")
+
+
+def decrypt_assembly_packet(packet: bytes, # Assembly packet ciphertext
+ onion_pub_key: bytes, # Onion Service pubkey of associated contact
+ origin: bytes, # Direction of packet
+ window_list: 'WindowList', # WindowList object
+ contact_list: 'ContactList', # ContactList object
+ key_list: 'KeyList' # Keylist object
+ ) -> bytes: # Decrypted assembly packet
+ """Decrypt assembly packet from contact/local Transmitter."""
+ ct_harac, ct_assemby_packet = separate_header(packet, header_length=HARAC_CT_LENGTH)
+ local_window = window_list.get_local_window()
+ command = onion_pub_key == LOCAL_PUBKEY
+
+ p_type = "command" if command else "packet"
+ direction = "from" if command or (origin == ORIGIN_CONTACT_HEADER) else "sent to"
+ nick = contact_list.get_contact_by_pub_key(onion_pub_key).nick
+
+ # Load keys
+ keyset = key_list.get_keyset(onion_pub_key)
+ key_dir = TX if origin == ORIGIN_USER_HEADER else RX
+
+ header_key = getattr(keyset, f'{key_dir}_hk') # type: bytes
+ message_key = getattr(keyset, f'{key_dir}_mk') # type: bytes
+
+ if any(k == bytes(SYMMETRIC_KEY_LENGTH) for k in [header_key, message_key]):
+ raise FunctionReturn("Warning! Loaded zero-key for packet decryption.")
+
+ # Decrypt hash ratchet counter
+ try:
+ harac_bytes = auth_and_decrypt(ct_harac, header_key)
+ except nacl.exceptions.CryptoError:
+ raise FunctionReturn(
+ f"Warning! Received {p_type} {direction} {nick} had an invalid hash ratchet MAC.", window=local_window)
+
+ # Catch up with hash ratchet offset
+ purp_harac = bytes_to_int(harac_bytes)
+ stored_harac = getattr(keyset, f'{key_dir}_harac')
+ offset = purp_harac - stored_harac
+ if offset < 0:
+ raise FunctionReturn(
+ f"Warning! Received {p_type} {direction} {nick} had an expired hash ratchet counter.", window=local_window)
+
+ process_offset(offset, origin, direction, nick, local_window)
+ for harac in range(stored_harac, stored_harac + offset):
+ message_key = blake2b(message_key + int_to_bytes(harac), digest_size=SYMMETRIC_KEY_LENGTH)
+
+ # Decrypt packet
+ try:
+ assembly_packet = auth_and_decrypt(ct_assemby_packet, message_key)
+ except nacl.exceptions.CryptoError:
+ raise FunctionReturn(f"Warning! Received {p_type} {direction} {nick} had an invalid MAC.", window=local_window)
+
+ # Update message key and harac
+ keyset.update_mk(key_dir,
+ blake2b(message_key + int_to_bytes(stored_harac + offset), digest_size=SYMMETRIC_KEY_LENGTH),
+ offset + 1)
+
+ return assembly_packet
+
+
+class Packet(object):
+ """Packet objects collect and keep track of received assembly packets."""
+
+ def __init__(self,
+ onion_pub_key: bytes, # Public key of the contact associated with the packet <─┐
+ origin: bytes, # Origin of packet (user, contact) <─┼─ Form packet UID
+ p_type: str, # Packet type (message, file, command) <─┘
+ contact: 'Contact', # Contact object of contact associated with the packet
+ settings: 'Settings' # Settings object
+ ) -> None:
+ """Create a new Packet object."""
+ self.onion_pub_key = onion_pub_key
+ self.contact = contact
+ self.origin = origin
+ self.type = p_type
+ self.settings = settings
+
+ # File transmission metadata
+ self.packets = None # type: Optional[int]
+ self.time = None # type: Optional[str]
+ self.size = None # type: Optional[str]
+ self.name = None # type: Optional[str]
+
+ self.sh = {MESSAGE: M_S_HEADER, FILE: F_S_HEADER, COMMAND: C_S_HEADER}[self.type]
+ self.lh = {MESSAGE: M_L_HEADER, FILE: F_L_HEADER, COMMAND: C_L_HEADER}[self.type]
+ self.ah = {MESSAGE: M_A_HEADER, FILE: F_A_HEADER, COMMAND: C_A_HEADER}[self.type]
+ self.eh = {MESSAGE: M_E_HEADER, FILE: F_E_HEADER, COMMAND: C_E_HEADER}[self.type]
+ self.ch = {MESSAGE: M_C_HEADER, FILE: F_C_HEADER, COMMAND: C_C_HEADER}[self.type]
+ self.nh = {MESSAGE: P_N_HEADER, FILE: P_N_HEADER, COMMAND: C_N_HEADER}[self.type]
+
+ self.log_masking_ctr = 0 # type: int
+ self.assembly_pt_list = [] # type: List[bytes]
+ self.log_ct_list = [] # type: List[bytes]
+ self.long_active = False
+ self.is_complete = False
+
+ def add_masking_packet_to_log_file(self, increase: int = 1) -> None:
+ """Increase `log_masking_ctr` for message and file packets."""
+ if self.type in [MESSAGE, FILE]:
+ self.log_masking_ctr += increase
+
+ def clear_file_metadata(self) -> None:
+ """Clear file metadata."""
+ self.packets = None
+ self.time = None
+ self.size = None
+ self.name = None
+
+ def clear_assembly_packets(self) -> None:
+ """Clear packet state."""
+ self.assembly_pt_list = []
+ self.log_ct_list = []
+ self.long_active = False
+ self.is_complete = False
+
+ def new_file_packet(self) -> None:
+ """New file transmission handling logic."""
+ name = self.name
+ was_active = self.long_active
+ self.clear_file_metadata()
+ self.clear_assembly_packets()
+
+ if self.origin == ORIGIN_USER_HEADER:
+ self.add_masking_packet_to_log_file()
+ raise FunctionReturn("Ignored file from the user.", output=False)
+
+ if not self.contact.file_reception:
+ self.add_masking_packet_to_log_file()
+ raise FunctionReturn(f"Alert! File transmission from {self.contact.nick} but reception is disabled.")
+
+ if was_active:
+ m_print(f"Alert! File '{name}' from {self.contact.nick} never completed.", head=1, tail=1)
+
+ def check_long_packet(self) -> None:
+ """Check if the long packet has permission to be extended."""
+ if not self.long_active:
+ self.add_masking_packet_to_log_file()
+ raise FunctionReturn("Missing start packet.", output=False)
+
+ if self.type == FILE and not self.contact.file_reception:
+ self.add_masking_packet_to_log_file(increase=len(self.assembly_pt_list) + 1)
+ self.clear_assembly_packets()
+ raise FunctionReturn("Alert! File reception disabled mid-transfer.")
+
+ def process_short_header(self,
+ packet: bytes,
+ packet_ct: Optional[bytes] = None
+ ) -> None:
+ """Process short packet."""
+ if self.long_active:
+ self.add_masking_packet_to_log_file(increase=len(self.assembly_pt_list))
+
+ if self.type == FILE:
+ self.new_file_packet()
+ sh, _, packet = separate_headers(packet, [ASSEMBLY_PACKET_HEADER_LENGTH] + [2*ENCODED_INTEGER_LENGTH])
+ packet = sh + packet
+
+ self.assembly_pt_list = [packet]
+ self.long_active = False
+ self.is_complete = True
+
+ if packet_ct is not None:
+ self.log_ct_list = [packet_ct]
+
+ def process_long_header(self,
+ packet: bytes,
+ packet_ct: Optional[bytes] = None
+ ) -> None:
+ """Process first packet of long transmission."""
+ if self.long_active:
+ self.add_masking_packet_to_log_file(increase=len(self.assembly_pt_list))
+
+ if self.type == FILE:
+ self.new_file_packet()
+ try:
+ lh, no_p_bytes, time_bytes, size_bytes, packet \
+ = separate_headers(packet, [ASSEMBLY_PACKET_HEADER_LENGTH] + 3*[ENCODED_INTEGER_LENGTH])
+
+ self.packets = bytes_to_int(no_p_bytes) # added by transmitter.packet.split_to_assembly_packets
+ self.time = str(timedelta(seconds=bytes_to_int(time_bytes)))
+ self.size = readable_size(bytes_to_int(size_bytes))
+ self.name = packet.split(US_BYTE)[0].decode()
+ packet = lh + packet
+
+ m_print([f'Receiving file from {self.contact.nick}:',
+ f'{self.name} ({self.size})',
+ f'ETA {self.time} ({self.packets} packets)'], bold=True)
+
+ except (struct.error, UnicodeError, ValueError):
+ self.add_masking_packet_to_log_file()
+ raise FunctionReturn("Error: Received file packet had an invalid header.")
+
+ self.assembly_pt_list = [packet]
+ self.long_active = True
+ self.is_complete = False
+
+ if packet_ct is not None:
+ self.log_ct_list = [packet_ct]
+
+ def process_append_header(self,
+ packet: bytes,
+ packet_ct: Optional[bytes] = None
+ ) -> None:
+ """Process consecutive packet(s) of long transmission."""
+ self.check_long_packet()
+ self.assembly_pt_list.append(packet)
+
+ if packet_ct is not None:
+ self.log_ct_list.append(packet_ct)
+
+ def process_end_header(self,
+ packet: bytes,
+ packet_ct: Optional[bytes] = None
+ ) -> None:
+ """Process last packet of long transmission."""
+ self.check_long_packet()
+ self.assembly_pt_list.append(packet)
+ self.long_active = False
+ self.is_complete = True
+
+ if packet_ct is not None:
+ self.log_ct_list.append(packet_ct)
+
+ def abort_packet(self, cancel: bool = False) -> None:
+ """Process cancel/noise packet."""
+ if self.type == FILE and self.origin == ORIGIN_CONTACT_HEADER and self.long_active:
+ if cancel:
+ message = f"{self.contact.nick} cancelled file."
+ else:
+ message = f"Alert! File '{self.name}' from {self.contact.nick} never completed."
+ m_print(message, head=1, tail=1)
+ self.clear_file_metadata()
+ self.add_masking_packet_to_log_file(increase=len(self.assembly_pt_list) + 1)
+ self.clear_assembly_packets()
+
+ def process_cancel_header(self, *_: Any) -> None:
+ """Process cancel packet for long transmission."""
+ self.abort_packet(cancel=True)
+
+ def process_noise_header(self, *_: Any) -> None:
+ """Process traffic masking noise packet."""
+ self.abort_packet()
+
+ def add_packet(self,
+ packet: bytes,
+ packet_ct: Optional[bytes] = None
+ ) -> None:
+ """Add a new assembly packet to the object."""
+ try:
+ func_d = {self.sh: self.process_short_header,
+ self.lh: self.process_long_header,
+ self.ah: self.process_append_header,
+ self.eh: self.process_end_header,
+ self.ch: self.process_cancel_header,
+ self.nh: self.process_noise_header
+ } # type: Dict[bytes, Callable]
+ func = func_d[packet[:ASSEMBLY_PACKET_HEADER_LENGTH]]
+ except KeyError:
+ # Erroneous headers are ignored but stored as placeholder data.
+ self.add_masking_packet_to_log_file()
+ raise FunctionReturn("Error: Received packet had an invalid assembly packet header.")
+ func(packet, packet_ct)
+
+ def assemble_message_packet(self) -> bytes:
+ """Assemble message packet."""
+ padded = b''.join([p[ASSEMBLY_PACKET_HEADER_LENGTH:] for p in self.assembly_pt_list])
+ payload = rm_padding_bytes(padded)
+
+ if len(self.assembly_pt_list) > 1:
+ msg_ct, msg_key = separate_trailer(payload, SYMMETRIC_KEY_LENGTH)
+ try:
+ payload = auth_and_decrypt(msg_ct, msg_key)
+ except nacl.exceptions.CryptoError:
+ raise FunctionReturn("Error: Decryption of message failed.")
+
+ try:
+ return decompress(payload, MAX_MESSAGE_SIZE)
+ except zlib.error:
+ raise FunctionReturn("Error: Decompression of message failed.")
+
+ def assemble_and_store_file(self,
+ ts: 'datetime',
+ onion_pub_key: bytes,
+ window_list: 'WindowList'
+ ) -> None:
+ """Assemble file packet and store it."""
+ padded = b''.join([p[ASSEMBLY_PACKET_HEADER_LENGTH:] for p in self.assembly_pt_list])
+ payload = rm_padding_bytes(padded)
+
+ process_assembled_file(ts, payload, onion_pub_key, self.contact.nick, self.settings, window_list)
+
+ def assemble_command_packet(self) -> bytes:
+ """Assemble command packet."""
+ padded = b''.join([p[ASSEMBLY_PACKET_HEADER_LENGTH:] for p in self.assembly_pt_list])
+ payload = rm_padding_bytes(padded)
+
+ if len(self.assembly_pt_list) > 1:
+ payload, cmd_hash = separate_trailer(payload, BLAKE2_DIGEST_LENGTH)
+ if blake2b(payload) != cmd_hash:
+ raise FunctionReturn("Error: Received an invalid command.")
+
+ try:
+ return decompress(payload, self.settings.max_decompress_size)
+ except zlib.error:
+ raise FunctionReturn("Error: Decompression of command failed.")
+
+
+class PacketList(Iterable, Sized):
+ """PacketList manages all file, message, and command packets."""
+
+ def __init__(self,
+ settings: 'Settings',
+ contact_list: 'ContactList'
+ ) -> None:
+ """Create a new PacketList object."""
+ self.settings = settings
+ self.contact_list = contact_list
+ self.packets = [] # type: List[Packet]
+
+ def __iter__(self) -> Generator:
+ """Iterate over packet list."""
+ yield from self.packets
+
+ def __len__(self) -> int:
+ """Return number of packets in the packet list."""
+ return len(self.packets)
+
+ def has_packet(self,
+ onion_pub_key: bytes,
+ origin: bytes,
+ p_type: str
+ ) -> bool:
+ """Return True if a packet with matching selectors exists, else False."""
+ return any(p for p in self.packets if (p.onion_pub_key == onion_pub_key
+ and p.origin == origin
+ and p.type == p_type))
+
+ def get_packet(self,
+ onion_pub_key: bytes,
+ origin: bytes,
+ p_type: str,
+ log_access: bool = False
+ ) -> Packet:
+ """Get packet based on Onion Service public key, origin, and type.
+
+ If the packet does not exist, create it.
+ """
+ if not self.has_packet(onion_pub_key, origin, p_type):
+ if log_access:
+ contact = self.contact_list.generate_dummy_contact()
+ else:
+ contact = self.contact_list.get_contact_by_pub_key(onion_pub_key)
+
+ self.packets.append(Packet(onion_pub_key, origin, p_type, contact, self.settings))
+
+ return next(p for p in self.packets if (p.onion_pub_key == onion_pub_key
+ and p.origin == origin
+ and p.type == p_type))
diff --git a/src/receiver/receiver_loop.py b/src/receiver/receiver_loop.py
new file mode 100755
index 0000000..4ff0489
--- /dev/null
+++ b/src/receiver/receiver_loop.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import struct
+import time
+import typing
+
+from datetime import datetime
+from typing import Dict
+
+from src.common.encoding import bytes_to_int
+from src.common.exceptions import FunctionReturn
+from src.common.misc import ignored, separate_headers
+from src.common.output import m_print
+from src.common.statics import *
+
+if typing.TYPE_CHECKING:
+ from multiprocessing import Queue
+ from src.common.gateway import Gateway
+
+
+def receiver_loop(queues: Dict[bytes, 'Queue'],
+ gateway: 'Gateway',
+ unittest: bool = False
+ ) -> None:
+ """Decode received packets and forward them to packet queues."""
+ gateway_queue = queues[GATEWAY_QUEUE]
+
+ while True:
+ with ignored(EOFError, KeyboardInterrupt):
+ if gateway_queue.qsize() == 0:
+ time.sleep(0.01)
+
+ _, packet = gateway_queue.get()
+
+ try:
+ packet = gateway.detect_errors(packet)
+ except FunctionReturn:
+ continue
+
+ header, ts_bytes, payload = separate_headers(packet, [DATAGRAM_HEADER_LENGTH, DATAGRAM_TIMESTAMP_LENGTH])
+
+ try:
+ ts = datetime.strptime(str(bytes_to_int(ts_bytes)), "%Y%m%d%H%M%S%f")
+ except (ValueError, struct.error):
+ m_print("Error: Failed to decode timestamp in the received packet.", head=1, tail=1)
+ continue
+
+ if header in [MESSAGE_DATAGRAM_HEADER, FILE_DATAGRAM_HEADER,
+ COMMAND_DATAGRAM_HEADER, LOCAL_KEY_DATAGRAM_HEADER]:
+ queues[header].put((ts, payload))
+
+ if unittest:
+ break
diff --git a/src/receiver/windows.py b/src/receiver/windows.py
new file mode 100644
index 0000000..0440429
--- /dev/null
+++ b/src/receiver/windows.py
@@ -0,0 +1,373 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import os
+import sys
+import textwrap
+import typing
+
+from datetime import datetime
+from typing import Any, Dict, Generator, Iterable, List, Optional, Tuple
+
+from src.common.encoding import pub_key_to_short_address
+from src.common.exceptions import FunctionReturn
+from src.common.misc import get_terminal_width
+from src.common.output import clear_screen, m_print, print_on_previous_line
+from src.common.statics import *
+
+if typing.TYPE_CHECKING:
+ from src.common.db_contacts import Contact, ContactList
+ from src.common.db_groups import GroupList
+ from src.common.db_settings import Settings
+ from src.receiver.packet import PacketList
+
+MsgTuple = Tuple[datetime, str, bytes, bytes, bool, bool]
+
+
+class RxWindow(Iterable):
+ """RxWindow is an ephemeral message log for contact or group.
+
+ In addition, command history and file transfers have
+ their own windows, accessible with separate commands.
+ """
+
+ def __init__(self,
+ uid: bytes,
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings',
+ packet_list: 'PacketList'
+ ) -> None:
+ """Create a new RxWindow object."""
+ self.uid = uid
+ self.contact_list = contact_list
+ self.group_list = group_list
+ self.settings = settings
+ self.packet_list = packet_list
+
+ self.is_active = False
+ self.contact = None
+ self.group = None
+ self.group_msg_id = os.urandom(GROUP_MSG_ID_LENGTH)
+
+ self.window_contacts = [] # type: List[Contact]
+ self.message_log = [] # type: List[MsgTuple]
+ self.handle_dict = dict() # type: Dict[bytes, str]
+ self.previous_msg_ts = datetime.now()
+ self.unread_messages = 0
+
+ if self.uid == WIN_UID_LOCAL:
+ self.type = WIN_TYPE_COMMAND
+ self.name = self.type
+ self.window_contacts = []
+
+ elif self.uid == WIN_UID_FILE:
+ self.type = WIN_TYPE_FILE
+ self.packet_list = packet_list
+
+ elif self.uid in self.contact_list.get_list_of_pub_keys():
+ self.type = WIN_TYPE_CONTACT
+ self.contact = self.contact_list.get_contact_by_pub_key(uid)
+ self.name = self.contact.nick
+ self.window_contacts = [self.contact]
+
+ elif self.uid in self.group_list.get_list_of_group_ids():
+ self.type = WIN_TYPE_GROUP
+ self.group = self.group_list.get_group_by_id(self.uid)
+ self.name = self.group.name
+ self.window_contacts = self.group.members
+
+ else:
+ raise FunctionReturn(f"Invalid window '{uid}'.")
+
+ def __iter__(self) -> Generator:
+ """Iterate over window's message log."""
+ yield from self.message_log
+
+ def __len__(self) -> int:
+ """Return number of message tuples in the message log."""
+ return len(self.message_log)
+
+ def add_contacts(self, pub_keys: List[bytes]) -> None:
+ """Add contact objects to the window."""
+ self.window_contacts += [self.contact_list.get_contact_by_pub_key(k) for k in pub_keys
+ if not self.has_contact(k) and self.contact_list.has_pub_key(k)]
+
+ def remove_contacts(self, pub_keys: List[bytes]) -> None:
+ """Remove contact objects from the window."""
+ to_remove = set(pub_keys) & set([m.onion_pub_key for m in self.window_contacts])
+ if to_remove:
+ self.window_contacts = [c for c in self.window_contacts if c.onion_pub_key not in to_remove]
+
+ def reset_window(self) -> None:
+ """Reset the ephemeral message log of the window."""
+ self.message_log = []
+
+ def has_contact(self, onion_pub_key: bytes) -> bool:
+ """\
+ Return True if contact with the specified public key is in the
+ window, else False.
+ """
+ return any(onion_pub_key == c.onion_pub_key for c in self.window_contacts)
+
+ def update_handle_dict(self, pub_key: bytes) -> None:
+ """Update handle for public key in `handle_dict`."""
+ if self.contact_list.has_pub_key(pub_key):
+ self.handle_dict[pub_key] = self.contact_list.get_contact_by_pub_key(pub_key).nick
+ else:
+ self.handle_dict[pub_key] = pub_key_to_short_address(pub_key)
+
+ def create_handle_dict(self, message_log: Optional[List[MsgTuple]] = None) -> None:
+ """Pre-generate {account: handle} dictionary.
+
+ Pre-generation allows `self.print()` to indent accounts and
+ nicks without having to loop over the entire message list for
+ every message to determine the amount of require indent.
+ """
+ pub_keys = set(c.onion_pub_key for c in self.window_contacts)
+ if message_log is not None:
+ pub_keys |= set(tup[2] for tup in message_log)
+ for k in pub_keys:
+ self.update_handle_dict(k)
+
+ def get_handle(self,
+ time_stamp: 'datetime', # Timestamp of message to be printed
+ onion_pub_key: bytes, # Onion Service public key of contact (used as lookup for handles)
+ origin: bytes, # Determines whether to use "Me" or nick of contact as handle
+ whisper: bool = False, # When True, displays (whisper) specifier next to handle
+ event_msg: bool = False # When True, sets handle to "-!-"
+ ) -> str: # Handle to use
+ """Returns indented handle complete with headers and trailers."""
+ time_stamp_str = time_stamp.strftime('%H:%M:%S.%f')[:-4]
+
+ if onion_pub_key == WIN_UID_LOCAL or event_msg:
+ handle = EVENT
+ ending = ' '
+ else:
+ handle = self.handle_dict[onion_pub_key] if origin == ORIGIN_CONTACT_HEADER else ME
+ handles = list(self.handle_dict.values()) + [ME]
+ indent = max(len(v) for v in handles) - len(handle) if self.is_active else 0
+ handle = indent * ' ' + handle
+
+ # Handle specifiers for messages to inactive window
+ if not self.is_active:
+ handle += {WIN_TYPE_GROUP: f" (group {self.name})",
+ WIN_TYPE_CONTACT: f" (private message)"}.get(self.type, '')
+ if whisper:
+ handle += " (whisper)"
+
+ ending = ': '
+
+ handle = f"{time_stamp_str} {handle}{ending}"
+
+ return handle
+
+ def print(self, msg_tuple: MsgTuple, file: Any = None) -> None:
+ """Print a new message to the window."""
+
+ # Unpack tuple
+ ts, message, onion_pub_key, origin, whisper, event_msg = msg_tuple
+
+ # Determine handle
+ handle = self.get_handle(ts, onion_pub_key, origin, whisper, event_msg)
+
+ # Check if message content needs to be changed to privacy-preserving notification
+ if not self.is_active and not self.settings.new_message_notify_preview and self.uid != WIN_UID_LOCAL:
+ trailer = 's' if self.unread_messages > 0 else ''
+ message = BOLD_ON + f"{self.unread_messages + 1} unread message{trailer}" + NORMAL_TEXT
+
+ # Wrap message
+ wrapper = textwrap.TextWrapper(width=get_terminal_width(),
+ initial_indent=handle,
+ subsequent_indent=len(handle)*' ')
+ wrapped = wrapper.fill(message)
+ if wrapped == '':
+ wrapped = handle
+
+ # Add bolding unless export file is provided
+ bold_on, bold_off, f_name = (BOLD_ON, NORMAL_TEXT, sys.stdout) if file is None else ('', '', file)
+ wrapped = bold_on + wrapped[:len(handle)] + bold_off + wrapped[len(handle):]
+
+ if self.is_active:
+ if self.previous_msg_ts.date() != ts.date():
+ print(bold_on + f"00:00 -!- Day changed to {str(ts.date())}" + bold_off, file=f_name)
+ print(wrapped, file=f_name)
+
+ else:
+ if onion_pub_key != WIN_UID_LOCAL:
+ self.unread_messages += 1
+
+ if (self.type == WIN_TYPE_CONTACT and self.contact is not None and self.contact.notifications) \
+ or (self.type == WIN_TYPE_GROUP and self.group is not None and self.group.notifications) \
+ or (self.type == WIN_TYPE_COMMAND):
+
+ lines = wrapped.split('\n')
+ if len(lines) > 1:
+ print(lines[0][:-1] + '…') # Preview only first line of the long message
+ else:
+ print(wrapped)
+ print_on_previous_line(delay=self.settings.new_message_notify_duration, flush=True)
+
+ self.previous_msg_ts = ts
+
+ def add_new(self,
+ timestamp: 'datetime', # The timestamp of the received message
+ message: str, # The content of the message
+ onion_pub_key: bytes = WIN_UID_LOCAL, # The Onion Service public key of associated contact
+ origin: bytes = ORIGIN_USER_HEADER, # The direction of the message
+ output: bool = False, # When True, displays message while adding it to message_log
+ whisper: bool = False, # When True, displays message as whisper message
+ event_msg: bool = False # When True, uses "-!-" as message handle
+ ) -> None:
+ """Add message tuple to message log and optionally print it."""
+
+ self.update_handle_dict(onion_pub_key)
+
+ msg_tuple = (timestamp, message, onion_pub_key, origin, whisper, event_msg)
+ self.message_log.append(msg_tuple)
+ if output:
+ self.print(msg_tuple)
+
+ def redraw(self, file: Any = None) -> None:
+ """Print all messages received to the window."""
+ old_messages = len(self.message_log) - self.unread_messages
+ self.unread_messages = 0
+
+ if file is None:
+ clear_screen()
+
+ if self.message_log:
+ self.previous_msg_ts = self.message_log[-1][0]
+ self.create_handle_dict(self.message_log)
+ for i, msg_tuple in enumerate(self.message_log):
+ if i == old_messages:
+ print('\n' + ' Unread Messages '.center(get_terminal_width(), '-') + '\n')
+ self.print(msg_tuple, file)
+ else:
+ m_print(f"This window for {self.name} is currently empty.", bold=True, head=1, tail=1)
+
+ def redraw_file_win(self) -> None:
+ """Draw file transmission window progress bars."""
+ # Initialize columns
+ c1 = ['File name']
+ c2 = ['Size']
+ c3 = ['Sender']
+ c4 = ['Complete']
+
+ # Populate columns with file transmission status data
+ for i, p in enumerate(self.packet_list):
+ if p.type == FILE and len(p.assembly_pt_list) > 0:
+ c1.append(p.name)
+ c2.append(p.size)
+ c3.append(p.contact.nick)
+ c4.append(f"{len(p.assembly_pt_list) / p.packets * 100:.2f}%")
+
+ if not len(c1) > 1:
+ m_print("No file transmissions currently in progress.", bold=True, head=1, tail=1)
+ print_on_previous_line(reps=3, delay=0.1)
+ return None
+
+ # Calculate column widths
+ c1w, c2w, c3w, c4w = [max(len(v) for v in column) + FILE_TRANSFER_INDENT for column in [c1, c2, c3, c4]]
+
+ # Align columns by adding whitespace between fields of each line
+ lines = [f'{f1:{c1w}}{f2:{c2w}}{f3:{c3w}}{f4:{c4w}}' for f1, f2, f3, f4 in zip(c1, c2, c3, c4)]
+
+ # Add a terminal-wide line between the column names and the data
+ lines.insert(1, get_terminal_width() * '─')
+
+ # Print the file transfer list
+ print('\n' + '\n'.join(lines) + '\n')
+ print_on_previous_line(reps=len(lines)+2, delay=0.1)
+
+
+class WindowList(Iterable):
+ """WindowList manages a list of Window objects."""
+
+ def __init__(self,
+ settings: 'Settings',
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ packet_list: 'PacketList'
+ ) -> None:
+ """Create a new WindowList object."""
+ self.settings = settings
+ self.contact_list = contact_list
+ self.group_list = group_list
+ self.packet_list = packet_list
+
+ self.active_win = None # type: Optional[RxWindow]
+ self.windows = [RxWindow(uid, self.contact_list, self.group_list, self.settings, self.packet_list)
+ for uid in ([WIN_UID_LOCAL, WIN_UID_FILE]
+ + self.contact_list.get_list_of_pub_keys()
+ + self.group_list.get_list_of_group_ids())]
+
+ if self.contact_list.has_local_contact():
+ self.set_active_rx_window(WIN_UID_LOCAL)
+
+ def __iter__(self) -> Generator:
+ """Iterate over window list."""
+ yield from self.windows
+
+ def __len__(self) -> int:
+ """Return number of windows in the window list."""
+ return len(self.windows)
+
+ def has_window(self, uid: bytes) -> bool:
+ """Return True if a window with matching UID exists, else False."""
+ return any(w.uid == uid for w in self.windows)
+
+ def remove_window(self, uid: bytes) -> None:
+ """Remove window based on its UID."""
+ for i, w in enumerate(self.windows):
+ if uid == w.uid:
+ del self.windows[i]
+ break
+
+ def get_group_windows(self) -> List[RxWindow]:
+ """Return list of group windows."""
+ return [w for w in self.windows if w.type == WIN_TYPE_GROUP]
+
+ def get_window(self, uid: bytes) -> 'RxWindow':
+ """Return window that matches the specified UID.
+
+ Create window if it does not exist.
+ """
+ if not self.has_window(uid):
+ self.windows.append(RxWindow(uid, self.contact_list, self.group_list, self.settings, self.packet_list))
+
+ return next(w for w in self.windows if w.uid == uid)
+
+ def get_local_window(self) -> 'RxWindow':
+ """Return command window."""
+ return self.get_window(WIN_UID_LOCAL)
+
+ def set_active_rx_window(self, uid: bytes) -> None:
+ """Select new active window."""
+ if self.active_win is not None:
+ self.active_win.is_active = False
+ self.active_win = self.get_window(uid)
+ self.active_win.is_active = True
+
+ if self.active_win.uid == WIN_UID_FILE:
+ self.active_win.redraw_file_win()
+ else:
+ self.active_win.redraw()
diff --git a/src/rx/__init__.py b/src/relay/__init__.py
old mode 100755
new mode 100644
similarity index 100%
rename from src/rx/__init__.py
rename to src/relay/__init__.py
diff --git a/src/relay/client.py b/src/relay/client.py
new file mode 100644
index 0000000..a707ac8
--- /dev/null
+++ b/src/relay/client.py
@@ -0,0 +1,355 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import base64
+import hashlib
+import time
+import typing
+
+from datetime import datetime
+from multiprocessing import Process, Queue
+from typing import Dict, List
+
+import requests
+
+from cryptography.hazmat.primitives.asymmetric.x448 import X448PublicKey, X448PrivateKey
+
+from src.common.encoding import b58encode, int_to_bytes, onion_address_to_pub_key, pub_key_to_onion_address
+from src.common.encoding import pub_key_to_short_address
+from src.common.misc import ignored, separate_header, split_byte_string, validate_onion_addr
+from src.common.output import m_print, print_key, rp_print
+from src.common.statics import *
+
+if typing.TYPE_CHECKING:
+ from src.common.gateway import Gateway
+ from requests.sessions import Session
+ QueueDict = Dict[bytes, Queue]
+
+
+def client_manager(queues: 'QueueDict',
+ gateway: 'Gateway',
+ url_token_private_key: X448PrivateKey,
+ unittest: bool = False
+ ) -> None:
+ """Manage `client` processes."""
+ proc_dict = dict() # type: Dict[bytes, Process]
+
+ # Wait for Tor port from `onion_service` process.
+ while True:
+ with ignored(EOFError, KeyboardInterrupt):
+ while queues[TOR_DATA_QUEUE].qsize() == 0:
+ time.sleep(0.1)
+ tor_port, onion_addr_user = queues[TOR_DATA_QUEUE].get()
+ break
+
+ while True:
+ with ignored(EOFError, KeyboardInterrupt):
+
+ while queues[CONTACT_KEY_QUEUE].qsize() == 0:
+ time.sleep(0.1)
+
+ command, ser_public_keys, is_existing_contact = queues[CONTACT_KEY_QUEUE].get()
+
+ onion_pub_keys = split_byte_string(ser_public_keys, ONION_SERVICE_PUBLIC_KEY_LENGTH)
+
+ if command == RP_ADD_CONTACT_HEADER:
+ for onion_pub_key in onion_pub_keys:
+ if onion_pub_key not in proc_dict:
+ onion_addr_user = '' if is_existing_contact else onion_addr_user
+ proc_dict[onion_pub_key] = Process(target=client, args=(onion_pub_key, queues,
+ url_token_private_key, tor_port,
+ gateway, onion_addr_user))
+ proc_dict[onion_pub_key].start()
+
+ elif command == RP_REMOVE_CONTACT_HEADER:
+ for onion_pub_key in onion_pub_keys:
+ if onion_pub_key in proc_dict:
+ process = proc_dict[onion_pub_key] # type: Process
+ process.terminate()
+ proc_dict.pop(onion_pub_key)
+ rp_print(f"Removed {pub_key_to_short_address(onion_pub_key)}", bold=True)
+
+ if unittest and queues[UNITTEST_QUEUE].qsize() != 0:
+ break
+
+
+def client(onion_pub_key: bytes,
+ queues: 'QueueDict',
+ url_token_private_key: X448PrivateKey,
+ tor_port: str,
+ gateway: 'Gateway',
+ onion_addr_user: str,
+ unittest: bool = False
+ ) -> None:
+ """Load packets from contact's Onion Service."""
+ url_token = ''
+ cached_pk = ''
+ short_addr = pub_key_to_short_address(onion_pub_key)
+ onion_addr = pub_key_to_onion_address(onion_pub_key)
+ check_delay = RELAY_CLIENT_MIN_DELAY
+ is_online = False
+
+ session = requests.session()
+ session.proxies = {'http': f'socks5h://127.0.0.1:{tor_port}',
+ 'https': f'socks5h://127.0.0.1:{tor_port}'}
+
+ rp_print(f"Connecting to {short_addr}...", bold=True)
+
+ # When Transmitter Program sends contact under UNENCRYPTED_ADD_EXISTING_CONTACT, this function
+ # receives user's own Onion address: That way it knows to request the contact to add them:
+ if onion_addr_user:
+ while True:
+ try:
+ reply = session.get(f'http://{onion_addr}.onion/contact_request/{onion_addr_user}', timeout=45).text
+ if reply == "OK":
+ break
+ except requests.exceptions.RequestException:
+ time.sleep(RELAY_CLIENT_MIN_DELAY)
+
+ while True:
+ with ignored(EOFError, KeyboardInterrupt):
+ time.sleep(check_delay)
+
+ # Obtain URL token
+ # ----------------
+
+ # Load URL token public key from contact's Onion Service root domain
+ try:
+ url_token_public_key_hex = session.get(f'http://{onion_addr}.onion/', timeout=45).text
+ except requests.exceptions.RequestException:
+ url_token_public_key_hex = ''
+
+ # Manage online status of contact based on availability of URL token's public key
+ if url_token_public_key_hex == '':
+ if check_delay < RELAY_CLIENT_MAX_DELAY:
+ check_delay *= 2
+ if check_delay > CLIENT_OFFLINE_THRESHOLD and is_online:
+ is_online = False
+ rp_print(f"{short_addr} is now offline", bold=True)
+ continue
+ else:
+ check_delay = RELAY_CLIENT_MIN_DELAY
+ if not is_online:
+ is_online = True
+ rp_print(f"{short_addr} is now online", bold=True)
+
+ # When contact's URL token public key changes, update URL token
+ if url_token_public_key_hex != cached_pk:
+ try:
+ public_key = bytes.fromhex(url_token_public_key_hex)
+ assert len(public_key) == TFC_PUBLIC_KEY_LENGTH
+ assert public_key != bytes(TFC_PUBLIC_KEY_LENGTH)
+
+ shared_secret = url_token_private_key.exchange(X448PublicKey.from_public_bytes(public_key))
+ url_token = hashlib.blake2b(shared_secret, digest_size=SYMMETRIC_KEY_LENGTH).hexdigest()
+ except (AssertionError, TypeError, ValueError):
+ continue
+
+ cached_pk = url_token_public_key_hex # Update client's URL token public key
+ queues[URL_TOKEN_QUEUE].put((onion_pub_key, url_token)) # Update Flask server's URL token for contact
+
+ # Load TFC data with URL token
+ # ----------------------------
+
+ get_data_loop(onion_addr, url_token, short_addr, onion_pub_key, queues, session, gateway)
+
+ if unittest:
+ break
+
+
+def get_data_loop(onion_addr: str,
+ url_token: str,
+ short_addr: str,
+ onion_pub_key: bytes,
+ queues: 'QueueDict',
+ session: 'Session',
+ gateway: 'Gateway') -> None:
+ """Load TFC data from contact's Onion Service using valid URL token."""
+ while True:
+ try:
+ # See if a file is available
+ try:
+ file_data = session.get(f'http://{onion_addr}.onion/{url_token}/files', stream=True).content
+ if file_data:
+ ts = datetime.now()
+ ts_bytes = int_to_bytes(int(ts.strftime('%Y%m%d%H%M%S%f')[:-4]))
+ packet = FILE_DATAGRAM_HEADER + ts_bytes + onion_pub_key + ORIGIN_CONTACT_HEADER + file_data
+ queues[DST_MESSAGE_QUEUE].put(packet)
+ rp_print(f"File from contact {short_addr}", ts)
+
+ except requests.exceptions.RequestException:
+ pass
+
+ # See if messages are available
+ try:
+ r = session.get(f'http://{onion_addr}.onion/{url_token}/messages', stream=True)
+ except requests.exceptions.RequestException:
+ return None
+
+ for line in r.iter_lines(): # Iterates over newline-separated datagrams
+
+ if not line:
+ continue
+
+ try:
+ header, payload = separate_header(line, DATAGRAM_HEADER_LENGTH) # type: bytes, bytes
+ payload_bytes = base64.b85decode(payload)
+ except (UnicodeError, ValueError):
+ continue
+
+ ts = datetime.now()
+ ts_bytes = int_to_bytes(int(ts.strftime('%Y%m%d%H%M%S%f')[:-4]))
+
+ if header == PUBLIC_KEY_DATAGRAM_HEADER:
+ if len(payload_bytes) == TFC_PUBLIC_KEY_LENGTH:
+ msg = f"Received public key from {short_addr} at {ts.strftime('%b %d - %H:%M:%S.%f')[:-4]}:"
+ print_key(msg, payload_bytes, gateway.settings, public_key=True)
+
+ elif header == MESSAGE_DATAGRAM_HEADER:
+ queues[DST_MESSAGE_QUEUE].put(header + ts_bytes + onion_pub_key
+ + ORIGIN_CONTACT_HEADER + payload_bytes)
+ rp_print(f"Message from contact {short_addr}", ts)
+
+ elif header in [GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER,
+ GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER,
+ GROUP_MSG_EXIT_GROUP_HEADER]:
+ queues[GROUP_MSG_QUEUE].put((header, payload_bytes, short_addr))
+
+ else:
+ rp_print(f"Received invalid packet from {short_addr}", ts, bold=True)
+
+
+ except requests.exceptions.RequestException:
+ break
+
+
+def g_msg_manager(queues: 'QueueDict', unittest: bool = False) -> None:
+ """Show group management messages according to contact list state.
+
+ This process keeps track of existing contacts for whom there's a
+ page_loader process. When a group management message from a contact
+ is received, existing contacts are displayed under "known contacts",
+ and non-existing contacts are displayed under "unknown contacts".
+ """
+ existing_contacts = [] # type: List[bytes]
+
+ while True:
+ with ignored(EOFError, KeyboardInterrupt):
+ while queues[GROUP_MSG_QUEUE].qsize() == 0:
+ time.sleep(0.01)
+
+ header, payload, trunc_addr = queues[GROUP_MSG_QUEUE].get()
+ group_id, data = separate_header(payload, GROUP_ID_LENGTH)
+
+ if len(group_id) != GROUP_ID_LENGTH:
+ continue
+ group_id_hr = b58encode(group_id)
+
+ # Update list of existing contacts
+ while queues[GROUP_MGMT_QUEUE].qsize() > 0:
+ command, ser_onion_pub_keys = queues[GROUP_MGMT_QUEUE].get()
+ onion_pub_key_list = split_byte_string(ser_onion_pub_keys, ONION_SERVICE_PUBLIC_KEY_LENGTH)
+
+ if command == RP_ADD_CONTACT_HEADER:
+ existing_contacts = list(set(existing_contacts) | set(onion_pub_key_list))
+ elif command == RP_REMOVE_CONTACT_HEADER:
+ existing_contacts = list(set(existing_contacts) - set(onion_pub_key_list))
+
+ # Handle group management messages
+ if header in [GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER,
+ GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER]:
+
+ pub_keys = split_byte_string(data, ONION_SERVICE_PUBLIC_KEY_LENGTH)
+ pub_key_length = ONION_SERVICE_PUBLIC_KEY_LENGTH
+
+ members = [k for k in pub_keys if len(k) == pub_key_length ]
+ known = [f" * {pub_key_to_onion_address(m)}" for m in members if m in existing_contacts]
+ unknown = [f" * {pub_key_to_onion_address(m)}" for m in members if m not in existing_contacts]
+
+ line_list = []
+ if known:
+ line_list.extend(["Known contacts"] + known)
+ if unknown:
+ line_list.extend(["Unknown contacts"] + unknown)
+
+ if header in [GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER]:
+ action = 'invited you to' if header == GROUP_MSG_INVITE_HEADER else 'joined'
+ postfix = ' with' if members else ''
+ m_print([f"{trunc_addr} has {action} group {group_id_hr}{postfix}"] + line_list, box=True)
+
+ elif header in [GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER]:
+ if members:
+ action, p = ("added", "to") if header == GROUP_MSG_MEMBER_ADD_HEADER else ("removed", "from")
+ m_print([f"{trunc_addr} has {action} following members {p} group {group_id_hr}"]
+ + line_list, box=True)
+
+ elif header == GROUP_MSG_EXIT_GROUP_HEADER:
+ m_print([f"{trunc_addr} has left group {group_id_hr}",
+ '', "Warning",
+ "Unless you remove the contact from the group, they",
+ "can still read messages you send to the group."], box=True)
+
+ if unittest and queues[UNITTEST_QUEUE].qsize() != 0:
+ break
+
+
+def c_req_manager(queues: 'QueueDict', unittest: bool = False) -> None:
+ """Manage incoming contact requests."""
+ existing_contacts = [] # type: List[bytes]
+ contact_requests = [] # type: List[bytes]
+
+ packet_queue = queues[CONTACT_REQ_QUEUE]
+ contact_queue = queues[F_REQ_MGMT_QUEUE]
+ setting_queue = queues[C_REQ_MGR_QUEUE]
+ show_requests = True
+
+ while True:
+ with ignored(EOFError, KeyboardInterrupt):
+ while packet_queue.qsize() == 0:
+ time.sleep(0.1)
+ purp_onion_address = packet_queue.get()
+
+ while setting_queue.qsize() != 0:
+ show_requests = setting_queue.get()
+
+ # Update list of existing contacts
+ while contact_queue.qsize() > 0:
+ command, ser_onion_pub_keys = contact_queue.get()
+ onion_pub_key_list = split_byte_string(ser_onion_pub_keys, ONION_SERVICE_PUBLIC_KEY_LENGTH)
+
+ if command == RP_ADD_CONTACT_HEADER:
+ existing_contacts = list(set(existing_contacts) | set(onion_pub_key_list))
+ elif command == RP_REMOVE_CONTACT_HEADER:
+ existing_contacts = list(set(existing_contacts) - set(onion_pub_key_list))
+
+ if validate_onion_addr(purp_onion_address) == '':
+ onion_pub_key = onion_address_to_pub_key(purp_onion_address)
+ if onion_pub_key in existing_contacts:
+ continue
+ if onion_pub_key in contact_requests:
+ continue
+
+ if show_requests:
+ m_print(["New contact request from an unknown TFC account:", purp_onion_address], box=True)
+ contact_requests.append(onion_pub_key)
+
+ if unittest and queues[UNITTEST_QUEUE].qsize() != 0:
+ break
diff --git a/src/relay/commands.py b/src/relay/commands.py
new file mode 100644
index 0000000..7f34f1e
--- /dev/null
+++ b/src/relay/commands.py
@@ -0,0 +1,231 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import os
+import serial
+import sys
+import time
+import typing
+
+from typing import Any, Dict
+
+from src.common.encoding import bytes_to_bool, bytes_to_int
+from src.common.exceptions import FunctionReturn
+from src.common.misc import ignored, separate_header, separate_headers, split_byte_string
+from src.common.output import clear_screen, m_print
+from src.common.statics import *
+
+if typing.TYPE_CHECKING:
+ from multiprocessing import Queue
+ from src.common.gateway import Gateway
+ QueueDict = Dict[bytes, Queue]
+
+def relay_command(queues: 'QueueDict',
+ gateway: 'Gateway',
+ stdin_fd: int,
+ unittest: bool = False
+ ) -> None:
+ """Process Relay Program commands."""
+ sys.stdin = os.fdopen(stdin_fd)
+ queue_from_src = queues[SRC_TO_RELAY_QUEUE]
+
+ while True:
+ with ignored(EOFError, FunctionReturn, KeyboardInterrupt):
+ while queue_from_src.qsize() == 0:
+ time.sleep(0.01)
+
+ command = queue_from_src.get()
+ process_command(command, gateway, queues)
+
+ if unittest:
+ break
+
+
+def process_command(command: bytes,
+ gateway: 'Gateway',
+ queues: 'QueueDict'
+ ) -> None:
+ """Select function for received Relay Program command."""
+ header, command = separate_header(command, UNENCRYPTED_COMMAND_HEADER_LENGTH)
+
+ # Keyword Function to run ( Parameters )
+ # ---------------------------------------------------------------------------------
+ function_d = {UNENCRYPTED_SCREEN_CLEAR: (clear_windows, gateway, ),
+ UNENCRYPTED_SCREEN_RESET: (reset_windows, gateway, ),
+ UNENCRYPTED_EXIT_COMMAND: (exit_tfc, gateway, queues),
+ UNENCRYPTED_WIPE_COMMAND: (wipe, gateway, queues),
+ UNENCRYPTED_EC_RATIO: (change_ec_ratio, command, gateway, ),
+ UNENCRYPTED_BAUDRATE: (change_baudrate, command, gateway, ),
+ UNENCRYPTED_MANAGE_CONTACT_REQ: (manage_contact_req, command, queues),
+ UNENCRYPTED_ADD_NEW_CONTACT: (add_contact, command, False, queues),
+ UNENCRYPTED_ADD_EXISTING_CONTACT: (add_contact, command, True, queues),
+ UNENCRYPTED_REM_CONTACT: (remove_contact, command, queues),
+ UNENCRYPTED_ONION_SERVICE_DATA: (add_onion_data, command, queues)
+ } # type: Dict[bytes, Any]
+
+ if header not in function_d:
+ raise FunctionReturn("Error: Received an invalid command.")
+
+ from_dict = function_d[header]
+ func = from_dict[0]
+ parameters = from_dict[1:]
+ func(*parameters)
+
+
+def race_condition_delay(gateway: 'Gateway') -> None:
+ """Prevent race condition with Receiver command."""
+ if gateway.settings.local_testing_mode:
+ time.sleep(LOCAL_TESTING_PACKET_DELAY)
+ time.sleep(gateway.settings.data_diode_sockets * 1.0)
+
+
+def clear_windows(gateway: 'Gateway') -> None:
+ """Clear Relay Program screen."""
+ race_condition_delay(gateway)
+ clear_screen()
+
+
+def reset_windows(gateway: 'Gateway') -> None:
+ """Reset Relay Program screen."""
+ race_condition_delay(gateway)
+ os.system(RESET)
+
+
+def exit_tfc(gateway: 'Gateway', queues: 'QueueDict') -> None:
+ """Exit TFC.
+
+ The queue is read by
+ relay.onion.onion_service()
+ """
+ race_condition_delay(gateway)
+ queues[ONION_CLOSE_QUEUE].put(EXIT)
+
+
+def wipe(gateway: 'Gateway', queues: 'QueueDict') -> None:
+ """Reset terminal, wipe all user data and power off the system.
+
+ No effective RAM overwriting tool currently exists, so as long as Source and
+ Destination Computers use FDE and DDR3 memory, recovery of user data becomes
+ impossible very fast:
+ https://www1.cs.fau.de/filepool/projects/coldboot/fares_coldboot.pdf
+
+ The queue is read by
+ relay.onion.onion_service()
+ """
+ os.system(RESET)
+ race_condition_delay(gateway)
+ queues[ONION_CLOSE_QUEUE].put(WIPE)
+
+
+def change_ec_ratio(command: bytes, gateway: 'Gateway') -> None:
+ """Change Relay Program's Reed-Solomon error correction ratio."""
+ try:
+ value = int(command)
+ if value < 0 or value > MAX_INT:
+ raise ValueError
+ except ValueError:
+ raise FunctionReturn("Error: Received invalid EC ratio value from Transmitter Program.")
+
+ m_print("Error correction ratio will change on restart.", head=1, tail=1)
+
+ gateway.settings.serial_error_correction = value
+ gateway.settings.store_settings()
+
+
+def change_baudrate(command: bytes, gateway: 'Gateway') -> None:
+ """Change Relay Program's serial interface baud rate setting."""
+ try:
+ value = int(command)
+ if value not in serial.Serial.BAUDRATES:
+ raise ValueError
+ except ValueError:
+ raise FunctionReturn("Error: Received invalid baud rate value from Transmitter Program.")
+
+ m_print("Baud rate will change on restart.", head=1, tail=1)
+
+ gateway.settings.serial_baudrate = value
+ gateway.settings.store_settings()
+
+
+def manage_contact_req(command: bytes,
+ queues: 'QueueDict',
+ notify: bool = True) -> None:
+ """Control whether contact requests are accepted."""
+ enabled = bytes_to_bool(command)
+ if notify:
+ m_print(f"Contact requests are have been {('enabled' if enabled else 'disabled')}.", head=1, tail=1)
+ queues[C_REQ_MGR_QUEUE].put(enabled)
+
+
+def add_contact(command: bytes,
+ existing: bool,
+ queues: 'QueueDict'
+ ) -> None:
+ """Add clients to Relay Program.
+
+ The queues are read by
+ relay.client.client_manager()
+ relay.client.group_manager() and
+ relay.client.f_req_manager()
+ """
+ queues[CONTACT_KEY_QUEUE].put((RP_ADD_CONTACT_HEADER, command, existing))
+ queues[GROUP_MGMT_QUEUE].put((RP_ADD_CONTACT_HEADER, command))
+ queues[F_REQ_MGMT_QUEUE].put((RP_ADD_CONTACT_HEADER, command))
+
+
+def remove_contact(command: bytes, queues: 'QueueDict') -> None:
+ """Remove clients from Relay Program.
+
+ The queues are read by
+ relay.client.client_manager()
+ relay.client.group_manager() and
+ relay.client.f_req_manager()
+ """
+ queues[CONTACT_KEY_QUEUE].put((RP_REMOVE_CONTACT_HEADER, command, False))
+ queues[GROUP_MGMT_QUEUE].put((RP_REMOVE_CONTACT_HEADER, command))
+ queues[F_REQ_MGMT_QUEUE].put((RP_REMOVE_CONTACT_HEADER, command))
+
+
+def add_onion_data(command: bytes, queues: 'QueueDict') -> None:
+ """Add Onion Service data.
+
+ Separate onion service private key and public keys for
+ pending/existing contacts and add them as contacts.
+
+ The ONION_KEY_QUEUE is read by
+ relay.onion.onion_service()
+ """
+ os_private_key, confirmation_code, allow_req_byte, no_pending_bytes, ser_pub_keys \
+ = separate_headers(command, [ONION_SERVICE_PRIVATE_KEY_LENGTH, CONFIRM_CODE_LENGTH,
+ ENCODED_BOOLEAN_LENGTH, ENCODED_INTEGER_LENGTH])
+
+ no_pending = bytes_to_int(no_pending_bytes)
+ public_key_list = split_byte_string(ser_pub_keys, ONION_SERVICE_PUBLIC_KEY_LENGTH)
+ pending_public_keys = public_key_list[:no_pending]
+ existing_public_keys = public_key_list[no_pending:]
+
+ for onion_pub_key in pending_public_keys:
+ add_contact(onion_pub_key, False, queues)
+ for onion_pub_key in existing_public_keys:
+ add_contact(onion_pub_key, True, queues)
+
+ manage_contact_req(allow_req_byte, queues, notify=False)
+ queues[ONION_KEY_QUEUE].put((os_private_key, confirmation_code))
diff --git a/src/relay/onion.py b/src/relay/onion.py
new file mode 100644
index 0000000..b87e85f
--- /dev/null
+++ b/src/relay/onion.py
@@ -0,0 +1,230 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import base64
+import hashlib
+import os
+import random
+import shlex
+import socket
+import subprocess
+import tempfile
+import time
+
+from multiprocessing import Queue
+from typing import Any, Dict
+
+import nacl.signing
+
+import stem.control
+import stem.process
+
+from src.common.encoding import pub_key_to_onion_address
+from src.common.exceptions import CriticalError
+from src.common.output import m_print, rp_print
+from src.common.statics import *
+
+
+def get_available_port(min_port: int, max_port: int) -> str:
+ """Find a random available port within the given range."""
+ with socket.socket() as temp_sock:
+ while True:
+ try:
+ temp_sock.bind(('127.0.0.1', random.randint(min_port, max_port)))
+ break
+ except OSError:
+ pass
+ _, port = temp_sock.getsockname() # type: Any, str
+ return port
+
+
+class Tor(object):
+ """Tor class manages the starting and stopping of Tor client."""
+
+ def __init__(self) -> None:
+ self.tor_process = None # type: Any
+ self.controller = None # type: Any
+
+ def connect(self, port: str) -> None:
+ """Launch Tor as a subprocess."""
+ tor_data_directory = tempfile.TemporaryDirectory()
+ tor_control_socket = os.path.join(tor_data_directory.name, 'control_socket')
+
+ if not os.path.isfile('/usr/bin/tor'):
+ raise CriticalError("Check that Tor is installed.")
+
+ while True:
+ try:
+ self.tor_process = stem.process.launch_tor_with_config(
+ config={'DataDirectory': tor_data_directory.name,
+ 'SocksPort': str(port),
+ 'ControlSocket': tor_control_socket,
+ 'AvoidDiskWrites': '1',
+ 'Log': 'notice stdout',
+ 'GeoIPFile': '/usr/share/tor/geoip',
+ 'GeoIPv6File ': '/usr/share/tor/geoip6'},
+ tor_cmd='/usr/bin/tor')
+ break
+
+ except OSError:
+ pass # Tor timed out. Try again.
+
+ start_ts = time.monotonic()
+ self.controller = stem.control.Controller.from_socket_file(path=tor_control_socket)
+ self.controller.authenticate()
+
+ while True:
+ time.sleep(0.1)
+
+ try:
+ response = self.controller.get_info("status/bootstrap-phase")
+ except stem.SocketClosed:
+ raise CriticalError("Tor socket closed.")
+
+ res_parts = shlex.split(response)
+ summary = res_parts[4].split('=')[1]
+
+ if summary == 'Done':
+ tor_version = self.controller.get_version().version_str.split(' (')[0]
+ rp_print(f"Setup 70% - Tor {tor_version} is now running", bold=True)
+ break
+
+ if time.monotonic() - start_ts > 15:
+ start_ts = time.monotonic()
+ self.controller = stem.control.Controller.from_socket_file(path=tor_control_socket)
+ self.controller.authenticate()
+
+ def stop(self) -> None:
+ """Stop the Tor subprocess."""
+ if self.tor_process:
+ self.tor_process.terminate()
+ time.sleep(0.1)
+ if not self.tor_process.poll():
+ self.tor_process.kill()
+
+
+def stem_compatible_ed25519_key_from_private_key(private_key: bytes) -> str:
+ """Tor's custom encoding format for v3 Onion Service private keys.
+
+ This code is based on Tor's testing code at
+ https://github.com/torproject/tor/blob/8e84968ffbf6d284e8a877ddcde6ded40b3f5681/src/test/ed25519_exts_ref.py#L48
+ """
+ b = 256
+
+ def bit(h: bytes, i: int) -> int:
+ """\
+ Output (i % 8 + 1) right-most bit of (i // 8) right-most byte
+ of the digest.
+ """
+ return (h[i // 8] >> (i % 8)) & 1
+
+ def encode_int(y: int) -> bytes:
+ """Encode integer to 32-byte bytestring (little-endian format)."""
+ bits = [(y >> i) & 1 for i in range(b)]
+ return b''.join([bytes([(sum([bits[i * 8 + j] << j for j in range(8)]))]) for i in range(b // 8)])
+
+ def expand_private_key(sk: bytes) -> bytes:
+ """Expand private key to base64 blob."""
+ h = hashlib.sha512(sk).digest()
+ a = 2 ** (b - 2) + sum(2 ** i * bit(h, i) for i in range(3, b - 2))
+ k = b''.join([bytes([h[i]]) for i in range(b // 8, b // 4)])
+ assert len(k) == ONION_SERVICE_PRIVATE_KEY_LENGTH
+ return encode_int(a) + k
+
+ expanded_private_key = expand_private_key(private_key)
+
+ return base64.b64encode(expanded_private_key).decode()
+
+
+def kill_background_tor() -> None:
+ """Kill any open TFC-related Tor instances left open.
+
+ Copies of Tor might stay open in cases where the user has closed the
+ application from Terminator's close window ((x) button).
+ """
+ try:
+ pids = subprocess.check_output("ps aux |grep '[t]fc/tor' | awk '{print $2}' 2>/dev/null", shell=True)
+ for pid in pids.split(b'\n'):
+ subprocess.Popen("kill {}".format(int(pid)), shell=True).wait()
+ except ValueError:
+ pass
+
+
+def onion_service(queues: Dict[bytes, 'Queue']) -> None:
+ """Manage the Tor Onion Service and control Tor via stem."""
+ kill_background_tor()
+
+ rp_print("Setup 0% - Waiting for Onion Service configuration...", bold=True)
+ while queues[ONION_KEY_QUEUE].qsize() == 0:
+ time.sleep(0.1)
+
+ private_key, c_code = queues[ONION_KEY_QUEUE].get() # type: bytes, bytes
+ public_key_user = bytes(nacl.signing.SigningKey(seed=private_key).verify_key)
+ onion_addr_user = pub_key_to_onion_address(public_key_user)
+
+ try:
+ rp_print("Setup 10% - Launching Tor...", bold=True)
+ tor_port = get_available_port(1000, 65535)
+ tor = Tor()
+ tor.connect(tor_port)
+ except (EOFError, KeyboardInterrupt):
+ return
+
+ try:
+ rp_print("Setup 75% - Launching Onion Service...", bold=True)
+ key_data = stem_compatible_ed25519_key_from_private_key(private_key)
+ response = tor.controller.create_ephemeral_hidden_service(ports={80: 5000},
+ key_type='ED25519-V3',
+ key_content=key_data,
+ await_publication=True)
+ rp_print("Setup 100% - Onion Service is now published.", bold=True)
+
+ m_print(["Your TFC account is:",
+ onion_addr_user, '',
+ f"Onion Service confirmation code (to Transmitter): {c_code.hex()}"], box=True)
+
+ # Allow the client to start looking for contacts at this point.
+ queues[TOR_DATA_QUEUE].put((tor_port, onion_addr_user))
+
+ except (KeyboardInterrupt, stem.SocketClosed):
+ tor.stop()
+ return
+
+ while True:
+ try:
+ time.sleep(0.1)
+
+ if queues[ONION_KEY_QUEUE].qsize() > 0:
+ queues[ONION_KEY_QUEUE].get() # Discard re-sent private keys
+
+ if queues[ONION_CLOSE_QUEUE].qsize() > 0:
+ command = queues[ONION_CLOSE_QUEUE].get()
+ queues[EXIT_QUEUE].put(command)
+ tor.controller.remove_hidden_service(response.service_id)
+ tor.stop()
+ break
+
+ except (EOFError, KeyboardInterrupt):
+ pass
+ except stem.SocketClosed:
+ tor.controller.remove_hidden_service(response.service_id)
+ tor.stop()
+ break
diff --git a/src/relay/server.py b/src/relay/server.py
new file mode 100644
index 0000000..51e716c
--- /dev/null
+++ b/src/relay/server.py
@@ -0,0 +1,176 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import hmac
+import logging
+import threading
+import time
+import typing
+
+from io import BytesIO
+from multiprocessing import Queue
+from typing import Any, Dict, List, Optional
+
+from flask import Flask, send_file
+
+from src.common.statics import *
+
+if typing.TYPE_CHECKING:
+ QueueDict = Dict[bytes, Queue]
+
+
+def flask_server(queues: 'QueueDict',
+ url_token_public_key: str,
+ unittest: bool = False
+ ) -> Optional[Flask]:
+ """Run Flask web server for outgoing messages.
+
+ This process runs Flask web server from where clients of contacts
+ can load messages sent to them. Making such requests requires the
+ clients know the secret path, that is, the X448 shared secret
+ derived from Relay Program's private key, and the public key
+ obtained from the Onion Service of the contact.
+
+ Note that this private key does not handle E2EE of messages, it only
+ manages E2EE sessions between Relay Programs of conversing parties.
+ It prevents anyone without the Relay Program's ephemeral private key
+ from requesting ciphertexts from the user.
+
+ The connection between the requests client and Flask server is
+ end-to-end encrypted: No Tor relay between them can see the content
+ of the traffic; With Onion Services, there is no exit node. The
+ connection is strongly authenticated by the Onion Service domain
+ name, that is, the TFC account pinned by the user.
+ """
+ app = Flask(__name__)
+ pub_key_dict = dict() # type: Dict[str, bytes]
+ message_dict = dict() # type: Dict[bytes, List[str]]
+ file_dict = dict() # type: Dict[bytes, List[bytes]]
+
+ class HideRunTime(object):
+ """Context manager that hides function runtime.
+
+ By joining a thread that sleeps for a longer time than it takes
+ for the function to run, this context manager hides the actual
+ running time of the function.
+ """
+
+ def __init__(self, length: float = 0.0) -> None:
+ self.length = length
+
+ def __enter__(self) -> None:
+ self.timer = threading.Thread(target=time.sleep, args=(self.length,))
+ self.timer.start()
+
+ def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
+ self.timer.join()
+
+ def validate_url_token(purp_url_token: str) -> bool:
+ """Validate URL token using constant time comparison."""
+
+ # This context manager hides the duration of URL_TOKEN_QUEUE check as
+ # well as the number of accounts in pub_key_dict when iterating over keys.
+ with HideRunTime(0.01):
+
+ # Check if the client has derived new URL token for contact(s).
+ # If yes, add the url tokens to pub_key_dict to have up-to-date
+ # information about whether the purported URL tokens are valid.
+ while queues[URL_TOKEN_QUEUE].qsize() > 0:
+ onion_pub_key, url_token = queues[URL_TOKEN_QUEUE].get()
+
+ # Delete old URL token for contact when their URL token pub key changes.
+ for ut in list(pub_key_dict.keys()):
+ if pub_key_dict[ut] == onion_pub_key:
+ del pub_key_dict[ut]
+
+ pub_key_dict[url_token] = onion_pub_key
+
+ # Here we OR the result of constant time comparison with initial
+ # False. ORing is also a constant time operation that returns
+ # True if a matching shared secret was found in pub_key_dict.
+ valid_url_token = False
+ for url_token in pub_key_dict:
+ valid_url_token |= hmac.compare_digest(purp_url_token, url_token)
+
+ return valid_url_token
+
+ @app.route('/')
+ def index() -> str:
+ """Return the URL token public key to contacts that know the .onion address."""
+ return url_token_public_key
+
+ @app.route('/contact_request/')
+ def contact_request(purp_onion_address: str) -> str:
+ """Pass contact request to `c_req_manager`."""
+ queues[CONTACT_REQ_QUEUE].put(purp_onion_address)
+ return 'OK'
+
+ @app.route('//files/')
+ def file_get(purp_url_token: str) -> Any:
+ """Validate the URL token and return a queued file."""
+ if not validate_url_token(purp_url_token):
+ return ''
+
+ identified_onion_pub_key = pub_key_dict[purp_url_token]
+
+ while queues[F_TO_FLASK_QUEUE].qsize() != 0:
+ packet, onion_pub_key = queues[F_TO_FLASK_QUEUE].get()
+ file_dict.setdefault(onion_pub_key, []).append(packet)
+
+ if identified_onion_pub_key in file_dict and file_dict[identified_onion_pub_key]:
+ mem = BytesIO()
+ mem.write(file_dict[identified_onion_pub_key].pop(0))
+ mem.seek(0)
+ return send_file(mem, mimetype='application/octet-stream')
+ else:
+ return ''
+
+ @app.route('//messages/')
+ def contacts_url(purp_url_token: str) -> str:
+ """Validate the URL token and return queued messages."""
+ if not validate_url_token(purp_url_token):
+ return ''
+
+ identified_onion_pub_key = pub_key_dict[purp_url_token]
+
+ # Load outgoing messages for all contacts,
+ # return the oldest message for contact
+ while queues[M_TO_FLASK_QUEUE].qsize() != 0:
+ packet, onion_pub_key = queues[M_TO_FLASK_QUEUE].get()
+ message_dict.setdefault(onion_pub_key, []).append(packet)
+
+ if identified_onion_pub_key in message_dict and message_dict[identified_onion_pub_key]:
+ packets = '\n'.join(message_dict[identified_onion_pub_key]) # All messages for contact
+ message_dict[identified_onion_pub_key] = []
+ return packets
+ else:
+ return ''
+
+ # --------------------------------------------------------------------------
+
+ log = logging.getLogger('werkzeug')
+ log.setLevel(logging.ERROR)
+
+ if unittest:
+ return app
+ else: # not unittest
+ app.run()
+ return None
diff --git a/src/relay/tcb.py b/src/relay/tcb.py
new file mode 100644
index 0000000..06cb5f8
--- /dev/null
+++ b/src/relay/tcb.py
@@ -0,0 +1,197 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import time
+import typing
+
+from typing import Dict, Union
+
+from src.common.encoding import bytes_to_int, pub_key_to_short_address
+from src.common.encoding import int_to_bytes, b85encode
+from src.common.exceptions import FunctionReturn
+from src.common.misc import ignored, separate_header, split_byte_string
+from src.common.output import rp_print
+from src.common.statics import *
+
+if typing.TYPE_CHECKING:
+ from datetime import datetime
+ from multiprocessing import Queue
+ from src.common.gateway import Gateway
+ QueueDict = Dict[bytes, Queue]
+
+
+def queue_to_flask(packet: Union[bytes, str],
+ onion_pub_key: bytes,
+ flask_queue: 'Queue',
+ ts: 'datetime',
+ header: bytes
+ ) -> None:
+ """Put packet to flask queue and print message."""
+ p_type = {MESSAGE_DATAGRAM_HEADER: 'Message ',
+ PUBLIC_KEY_DATAGRAM_HEADER: 'Pub key ',
+ FILE_DATAGRAM_HEADER: 'File ',
+ GROUP_MSG_INVITE_HEADER: 'G invite ',
+ GROUP_MSG_JOIN_HEADER: 'G join ',
+ GROUP_MSG_MEMBER_ADD_HEADER: 'G add ',
+ GROUP_MSG_MEMBER_REM_HEADER: 'G remove ',
+ GROUP_MSG_EXIT_GROUP_HEADER: 'G exit '}[header]
+
+ flask_queue.put((packet, onion_pub_key))
+ rp_print(f"{p_type} to contact {pub_key_to_short_address(onion_pub_key)}", ts)
+
+
+def src_incoming(queues: 'QueueDict',
+ gateway: 'Gateway',
+ unittest: bool = False
+ ) -> None:
+ """\
+ Redirect messages received from Source Computer to appropriate queues.
+ """
+ packets_from_sc = queues[GATEWAY_QUEUE]
+ packets_to_dc = queues[DST_MESSAGE_QUEUE]
+ commands_to_dc = queues[DST_COMMAND_QUEUE]
+ messages_to_flask = queues[M_TO_FLASK_QUEUE]
+ files_to_flask = queues[F_TO_FLASK_QUEUE]
+ commands_to_relay = queues[SRC_TO_RELAY_QUEUE]
+
+ while True:
+ with ignored(EOFError, KeyboardInterrupt):
+ while packets_from_sc.qsize() == 0:
+ time.sleep(0.01)
+
+ ts, packet = packets_from_sc.get() # type: datetime, bytes
+ ts_bytes = int_to_bytes(int(ts.strftime('%Y%m%d%H%M%S%f')[:-4]))
+
+ try:
+ packet = gateway.detect_errors(packet)
+ except FunctionReturn:
+ continue
+
+ header, packet = separate_header(packet, DATAGRAM_HEADER_LENGTH)
+
+ if header == UNENCRYPTED_DATAGRAM_HEADER:
+ commands_to_relay.put(packet)
+
+ elif header in [COMMAND_DATAGRAM_HEADER, LOCAL_KEY_DATAGRAM_HEADER]:
+ commands_to_dc.put(header + ts_bytes + packet)
+ p_type = 'Command ' if header == COMMAND_DATAGRAM_HEADER else 'Local key'
+ rp_print(f"{p_type} to local Receiver", ts)
+
+ elif header in [MESSAGE_DATAGRAM_HEADER, PUBLIC_KEY_DATAGRAM_HEADER]:
+ onion_pub_key, payload = separate_header(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH)
+ packet_str = header.decode() + b85encode(payload)
+ queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header)
+ if header == MESSAGE_DATAGRAM_HEADER:
+ packets_to_dc.put(header + ts_bytes + onion_pub_key + ORIGIN_USER_HEADER + payload)
+
+ elif header == FILE_DATAGRAM_HEADER:
+ no_contacts_b, payload = separate_header(packet, ENCODED_INTEGER_LENGTH)
+ no_contacts = bytes_to_int(no_contacts_b)
+ ser_accounts, file_ct = separate_header(payload, no_contacts * ONION_SERVICE_PUBLIC_KEY_LENGTH)
+ pub_keys = split_byte_string(ser_accounts, item_len=ONION_SERVICE_PUBLIC_KEY_LENGTH)
+ for onion_pub_key in pub_keys:
+ queue_to_flask(file_ct, onion_pub_key, files_to_flask, ts, header)
+
+ elif header in [GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER,
+ GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER,
+ GROUP_MSG_EXIT_GROUP_HEADER]:
+ process_group_management_message(ts, packet, header, messages_to_flask)
+
+ if unittest:
+ break
+
+
+def process_group_management_message(ts: 'datetime',
+ packet: bytes,
+ header: bytes,
+ messages_to_flask: 'Queue') -> None:
+ """Parse and display group management message."""
+ header_str = header.decode()
+ group_id, packet = separate_header(packet, GROUP_ID_LENGTH)
+
+ if header in [GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER]:
+ pub_keys = split_byte_string(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH)
+ for onion_pub_key in pub_keys:
+ others = [k for k in pub_keys if k != onion_pub_key]
+ packet_str = header_str + b85encode(group_id + b''.join(others))
+ queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header)
+
+ elif header in [GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER]:
+ first_list_len_b, packet = separate_header(packet, ENCODED_INTEGER_LENGTH)
+ first_list_length = bytes_to_int(first_list_len_b)
+ pub_keys = split_byte_string(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH)
+ before_adding = remaining = pub_keys[:first_list_length]
+ new_in_group = removable = pub_keys[first_list_length:]
+
+ if header == GROUP_MSG_MEMBER_ADD_HEADER:
+
+ packet_str = GROUP_MSG_MEMBER_ADD_HEADER.decode() + b85encode(group_id + b''.join(new_in_group))
+ for onion_pub_key in before_adding:
+ queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header)
+
+ for onion_pub_key in new_in_group:
+ other_new = [k for k in new_in_group if k != onion_pub_key]
+ packet_str = (GROUP_MSG_INVITE_HEADER.decode()
+ + b85encode(group_id + b''.join(other_new + before_adding)))
+ queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header)
+
+ elif header == GROUP_MSG_MEMBER_REM_HEADER:
+ packet_str = header_str + b85encode(group_id + b''.join(removable))
+ for onion_pub_key in remaining:
+ queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header)
+
+ elif header == GROUP_MSG_EXIT_GROUP_HEADER:
+ pub_keys = split_byte_string(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH)
+ packet_str = header_str + b85encode(group_id)
+ for onion_pub_key in pub_keys:
+ queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header)
+
+
+def dst_outgoing(queues: 'QueueDict',
+ gateway: 'Gateway',
+ unittest: bool = False
+ ) -> None:
+ """Output packets from queues to Destination Computer.
+
+ Commands (and local keys) to local Destination Computer have higher
+ priority than messages and public keys from contacts. Prioritization
+ prevents contact from doing DoS on Receiver Program by filling the
+ queue with packets.
+ """
+ c_queue = queues[DST_COMMAND_QUEUE]
+ m_queue = queues[DST_MESSAGE_QUEUE]
+
+ while True:
+ try:
+ if c_queue.qsize() == 0 and m_queue.qsize() == 0:
+ time.sleep(0.01)
+
+ while c_queue.qsize() != 0:
+ gateway.write(c_queue.get())
+
+ if m_queue.qsize() != 0:
+ gateway.write(m_queue.get())
+
+ if unittest and queues[UNITTEST_QUEUE].qsize() > 0:
+ break
+
+ except (EOFError, KeyboardInterrupt):
+ pass
diff --git a/src/rx/commands.py b/src/rx/commands.py
deleted file mode 100644
index 57d1dc4..0000000
--- a/src/rx/commands.py
+++ /dev/null
@@ -1,357 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import os
-import typing
-
-from typing import Any, Dict, Union
-
-from src.common.db_logs import access_logs, re_encrypt, remove_logs
-from src.common.encoding import bytes_to_int
-from src.common.exceptions import FunctionReturn
-from src.common.misc import ensure_dir
-from src.common.output import box_print, clear_screen, phase, print_on_previous_line
-from src.common.statics import *
-
-from src.rx.commands_g import group_add_member, group_create, group_rm_member, remove_group
-from src.rx.key_exchanges import add_psk_tx_keys, add_x25519_keys, import_psk_rx_keys, local_key_installed
-from src.rx.packet import decrypt_assembly_packet
-
-if typing.TYPE_CHECKING:
- from datetime import datetime
- from multiprocessing import Queue
- from src.common.db_contacts import Contact, ContactList
- from src.common.db_groups import Group, GroupList
- from src.common.db_keys import KeyList
- from src.common.db_masterkey import MasterKey
- from src.common.db_settings import Settings
- from src.rx.packet import PacketList
- from src.rx.windows import WindowList
-
-
-def process_command(ts: 'datetime',
- assembly_ct: bytes,
- window_list: 'WindowList',
- packet_list: 'PacketList',
- contact_list: 'ContactList',
- key_list: 'KeyList',
- group_list: 'GroupList',
- settings: 'Settings',
- master_key: 'MasterKey',
- pubkey_buf: Dict[str, bytes],
- exit_queue: 'Queue') -> None:
- """Decrypt command assembly packet and process command."""
- assembly_packet, account, origin = decrypt_assembly_packet(assembly_ct, window_list, contact_list, key_list)
-
- cmd_packet = packet_list.get_packet(account, origin, COMMAND)
- cmd_packet.add_packet(assembly_packet)
-
- if not cmd_packet.is_complete:
- raise FunctionReturn("Incomplete command.", output=False)
-
- command = cmd_packet.assemble_command_packet()
- header = command[:2]
- cmd_data = command[2:]
-
- # Keyword Function to run ( Parameters )
- # -----------------------------------------------------------------------------------------------------------------------------------------
- d = {LOCAL_KEY_INSTALLED_HEADER: (local_key_installed, ts, window_list, contact_list ),
- SHOW_WINDOW_ACTIVITY_HEADER: (show_win_activity, window_list ),
- WINDOW_SELECT_HEADER: (select_win_cmd, cmd_data, window_list ),
- CLEAR_SCREEN_HEADER: (clear_active_window, ),
- RESET_SCREEN_HEADER: (reset_active_window, cmd_data, window_list ),
- EXIT_PROGRAM_HEADER: (exit_tfc, exit_queue),
- LOG_DISPLAY_HEADER: (log_command, cmd_data, None, window_list, contact_list, group_list, settings, master_key),
- LOG_EXPORT_HEADER: (log_command, cmd_data, ts, window_list, contact_list, group_list, settings, master_key),
- LOG_REMOVE_HEADER: (remove_log, cmd_data, settings, master_key),
- CHANGE_MASTER_K_HEADER: (change_master_key, ts, window_list, contact_list, group_list, key_list, settings, master_key),
- CHANGE_NICK_HEADER: (change_nick, cmd_data, ts, window_list, contact_list, ),
- CHANGE_SETTING_HEADER: (change_setting, cmd_data, ts, window_list, contact_list, group_list, settings, ),
- CHANGE_LOGGING_HEADER: (contact_setting, cmd_data, ts, window_list, contact_list, group_list, header ),
- CHANGE_FILE_R_HEADER: (contact_setting, cmd_data, ts, window_list, contact_list, group_list, header ),
- CHANGE_NOTIFY_HEADER: (contact_setting, cmd_data, ts, window_list, contact_list, group_list, header ),
- GROUP_CREATE_HEADER: (group_create, cmd_data, ts, window_list, contact_list, group_list, settings ),
- GROUP_ADD_HEADER: (group_add_member, cmd_data, ts, window_list, contact_list, group_list, settings ),
- GROUP_REMOVE_M_HEADER: (group_rm_member, cmd_data, ts, window_list, contact_list, group_list, ),
- GROUP_DELETE_HEADER: (remove_group, cmd_data, ts, window_list, group_list, ),
- KEY_EX_X25519_HEADER: (add_x25519_keys, cmd_data, ts, window_list, contact_list, key_list, settings, pubkey_buf),
- KEY_EX_PSK_TX_HEADER: (add_psk_tx_keys, cmd_data, ts, window_list, contact_list, key_list, settings, pubkey_buf),
- KEY_EX_PSK_RX_HEADER: (import_psk_rx_keys, cmd_data, ts, window_list, contact_list, key_list, settings ),
- CONTACT_REMOVE_HEADER: (remove_contact, cmd_data, ts, window_list, contact_list, group_list, key_list, ),
- WIPE_USER_DATA_HEADER: (wipe, exit_queue)} # type: Dict[bytes, Any]
-
- try:
- from_dict = d[header]
- except KeyError:
- raise FunctionReturn("Error: Received an invalid command.")
-
- func = from_dict[0]
- parameters = from_dict[1:]
- func(*parameters)
-
-
-def show_win_activity(window_list: 'WindowList') -> None:
- """Show number of unread messages in each window."""
- unread_wins = [w for w in window_list if (w.uid != LOCAL_ID and w.unread_messages > 0)]
- print_list = ["Window activity"] if unread_wins else ["No window activity"]
- print_list += [f"{w.name}: {w.unread_messages}" for w in unread_wins]
-
- box_print(print_list)
- print_on_previous_line(reps=(len(print_list) + 2), delay=1.5)
-
-
-def select_win_cmd(cmd_data: bytes, window_list: 'WindowList') -> None:
- """Select window specified by TxM."""
- window_uid = cmd_data.decode()
- if window_uid == WIN_TYPE_FILE:
- clear_screen()
- window_list.select_rx_window(window_uid)
-
-
-def clear_active_window() -> None:
- """Clear active screen."""
- clear_screen()
-
-
-def reset_active_window(cmd_data: bytes, window_list: 'WindowList') -> None:
- """Reset window specified by TxM."""
- uid = cmd_data.decode()
- window = window_list.get_window(uid)
- window.reset_window()
- os.system('reset')
-
-
-def exit_tfc(exit_queue: 'Queue') -> None:
- """Exit TFC."""
- exit_queue.put(EXIT)
-
-
-def log_command(cmd_data: bytes,
- ts: 'datetime',
- window_list: 'WindowList',
- contact_list: 'ContactList',
- group_list: 'GroupList',
- settings: 'Settings',
- master_key: 'MasterKey') -> None:
- """Display or export logfile for active window."""
- export = ts is not None
- win_uid, no_msg_bytes = cmd_data.split(US_BYTE)
- no_messages = bytes_to_int(no_msg_bytes)
- window = window_list.get_window(win_uid.decode())
- access_logs(window, contact_list, group_list, settings, master_key, msg_to_load=no_messages, export=export)
-
- if export:
- local_win = window_list.get_window(LOCAL_ID)
- local_win.add_new(ts, f"Exported logfile of {window.type_print} {window.name}.", output=True)
-
-
-def remove_log(cmd_data: bytes,
- settings: 'Settings',
- master_key: 'MasterKey') -> None:
- """Remove log entries for contact."""
- window_name = cmd_data.decode()
- remove_logs(window_name, settings, master_key)
-
-
-def change_master_key(ts: 'datetime',
- window_list: 'WindowList',
- contact_list: 'ContactList',
- group_list: 'GroupList',
- key_list: 'KeyList',
- settings: 'Settings',
- master_key: 'MasterKey') -> None:
- """Prompt user for new master password and derive new master key from that."""
- try:
- old_master_key = master_key.master_key[:]
- master_key.new_master_key()
-
- phase("Re-encrypting databases")
-
- ensure_dir(DIR_USER_DATA)
- file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs'
- if os.path.isfile(file_name):
- re_encrypt(old_master_key, master_key.master_key, settings)
-
- key_list.store_keys()
- settings.store_settings()
- contact_list.store_contacts()
- group_list.store_groups()
-
- phase(DONE)
- box_print("Master key successfully changed.", head=1)
- clear_screen(delay=1.5)
-
- local_win = window_list.get_window(LOCAL_ID)
- local_win.add_new(ts, "Changed RxM master key.")
-
- except KeyboardInterrupt:
- raise FunctionReturn("Password change aborted.", delay=1, head=3, tail_clear=True)
-
-
-def change_nick(cmd_data: bytes,
- ts: 'datetime',
- window_list: 'WindowList',
- contact_list: 'ContactList') -> None:
- """Change contact nick."""
- account, nick = [f.decode() for f in cmd_data.split(US_BYTE)]
-
- window = window_list.get_window(account)
- window.name = nick
-
- window.handle_dict[account] = (contact_list.get_contact(account).nick
- if contact_list.has_contact(account) else account)
-
- contact_list.get_contact(account).nick = nick
- contact_list.store_contacts()
-
- cmd_win = window_list.get_local_window()
- cmd_win.add_new(ts, f"Changed {account} nick to '{nick}'", output=True)
-
-
-def change_setting(cmd_data: bytes,
- ts: 'datetime',
- window_list: 'WindowList',
- contact_list: 'ContactList',
- group_list: 'GroupList',
- settings: 'Settings') -> None:
- """Change TFC setting."""
- setting, value = [f.decode() for f in cmd_data.split(US_BYTE)]
-
- if setting not in settings.key_list:
- raise FunctionReturn(f"Error: Invalid setting '{setting}'")
-
- settings.change_setting(setting, value, contact_list, group_list)
-
- local_win = window_list.get_local_window()
- local_win.add_new(ts, f"Changed setting {setting} to '{value}'", output=True)
-
-
-def contact_setting(cmd_data: bytes,
- ts: 'datetime',
- window_list: 'WindowList',
- contact_list: 'ContactList',
- group_list: 'GroupList',
- header: bytes) -> None:
- """Change contact/group related setting."""
- setting, win_uid = [f.decode() for f in cmd_data.split(US_BYTE)]
-
- attr, desc, file_cmd = {CHANGE_LOGGING_HEADER: ('log_messages', 'Logging of messages', False),
- CHANGE_FILE_R_HEADER: ('file_reception', 'Reception of files', True ),
- CHANGE_NOTIFY_HEADER: ('notifications', 'Message notifications', False)}[header]
-
- action, b_value = {ENABLE: ('enable', True),
- DISABLE: ('disable', False)}[setting.lower().encode()]
-
- if setting.isupper():
- # Change settings for all contacts (and groups)
- enabled = [getattr(c, attr) for c in contact_list.get_list_of_contacts()]
- enabled += [getattr(g, attr) for g in group_list] if not file_cmd else []
- status = "was already" if (( all(enabled) and b_value)
- or (not any(enabled) and not b_value)) else 'has been'
- specifier = 'every '
- w_type = 'contact'
- w_name = '.' if file_cmd else ' and group.'
-
- # Set values
- for c in contact_list.get_list_of_contacts():
- setattr(c, attr, b_value)
- contact_list.store_contacts()
-
- if not file_cmd:
- for g in group_list:
- setattr(g, attr, b_value)
- group_list.store_groups()
-
- else:
- # Change setting for contacts in specified window
- if not window_list.has_window(win_uid):
- raise FunctionReturn(f"Error: Found no window for '{win_uid}'")
- window = window_list.get_window(win_uid)
- group_window = window.type == WIN_TYPE_GROUP
- contact_window = window.type == WIN_TYPE_CONTACT
-
- if contact_window:
- target = contact_list.get_contact(win_uid) # type: Union[Contact, Group]
- else:
- target = group_list.get_group(win_uid)
-
- if file_cmd:
- enabled = [getattr(m, attr) for m in window.window_contacts]
- changed = not all(enabled) if b_value else any(enabled)
- else:
- changed = getattr(target, attr) != b_value
- status = "has been" if changed else "was already"
- specifier = 'members in ' if (file_cmd and group_window) else ''
- w_type = window.type_print
- w_name = f" {window.name}."
-
- # Set values
- if contact_window or (group_window and file_cmd):
- for c in window.window_contacts:
- setattr(c, attr, b_value)
- contact_list.store_contacts()
-
- elif window.type == WIN_TYPE_GROUP:
- setattr(group_list.get_group(win_uid), attr, b_value)
- group_list.store_groups()
-
- message = f"{desc} {status} {action}d for {specifier}{w_type}{w_name}"
- local_win = window_list.get_window(LOCAL_ID)
- local_win.add_new(ts, message, output=True)
-
-
-def remove_contact(cmd_data: bytes,
- ts: 'datetime',
- window_list: 'WindowList',
- contact_list: 'ContactList',
- group_list: 'GroupList',
- key_list: 'KeyList') -> None:
- """Remove contact from RxM."""
- rx_account = cmd_data.decode()
-
- key_list.remove_keyset(rx_account)
- window_list.remove_window(rx_account)
-
- if not contact_list.has_contact(rx_account):
- raise FunctionReturn(f"RxM has no account '{rx_account}' to remove.")
-
- nick = contact_list.get_contact(rx_account).nick
- contact_list.remove_contact(rx_account)
-
- message = f"Removed {nick} from contacts."
- box_print(message, head=1, tail=1)
-
- local_win = window_list.get_local_window()
- local_win.add_new(ts, message)
-
- if any([g.remove_members([rx_account]) for g in group_list]):
- box_print(f"Removed {rx_account} from group(s).", tail=1)
-
-
-def wipe(exit_queue: 'Queue') -> None:
- """Reset terminals, wipe all user data on RxM and power off system.
-
- No effective RAM overwriting tool currently exists, so as long as TxM/RxM
- use FDE and DDR3 memory, recovery of user data becomes impossible very fast:
-
- https://www1.cs.fau.de/filepool/projects/coldboot/fares_coldboot.pdf
- """
- os.system('reset')
- exit_queue.put(WIPE)
diff --git a/src/rx/commands_g.py b/src/rx/commands_g.py
deleted file mode 100644
index 6499388..0000000
--- a/src/rx/commands_g.py
+++ /dev/null
@@ -1,165 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import typing
-
-from src.common.exceptions import FunctionReturn
-from src.common.output import box_print, group_management_print
-from src.common.statics import *
-
-if typing.TYPE_CHECKING:
- from datetime import datetime
- from src.common.db_contacts import ContactList
- from src.common.db_groups import GroupList
- from src.common.db_settings import Settings
- from src.rx.windows import WindowList
-
-
-def group_create(cmd_data: bytes,
- ts: 'datetime',
- window_list: 'WindowList',
- contact_list: 'ContactList',
- group_list: 'GroupList',
- settings: 'Settings') -> None:
- """Create a new group."""
- fields = [f.decode() for f in cmd_data.split(US_BYTE)]
- group_name = fields[0]
-
- purp_accounts = set(fields[1:])
- accounts = set(contact_list.get_list_of_accounts())
- accepted = list(accounts & purp_accounts)
- rejected = list(purp_accounts - accounts)
-
- if len(accepted) > settings.max_number_of_group_members:
- raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_group_members} members per group.")
-
- if len(group_list) == settings.max_number_of_groups:
- raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_groups} groups.")
-
- accepted_contacts = [contact_list.get_contact(c) for c in accepted]
- group_list.add_group(group_name,
- settings.log_messages_by_default,
- settings.show_notifications_by_default,
- accepted_contacts)
-
- window = window_list.get_window(group_name)
- window.window_contacts = accepted_contacts
- window.message_log = []
- window.unread_messages = 0
- window.create_handle_dict()
-
- group_management_print(NEW_GROUP, accepted, contact_list, group_name)
- group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
-
- local_win = window_list.get_window(LOCAL_ID)
- local_win.add_new(ts, f"Created new group {group_name}.")
-
-
-def group_add_member(cmd_data: bytes,
- ts: 'datetime',
- window_list: 'WindowList',
- contact_list: 'ContactList',
- group_list: 'GroupList',
- settings: 'Settings') -> None:
- """Add member(s) to group."""
- fields = [f.decode() for f in cmd_data.split(US_BYTE)]
- group_name = fields[0]
-
- purp_accounts = set(fields[1:])
- accounts = set(contact_list.get_list_of_accounts())
- before_adding = set(group_list.get_group(group_name).get_list_of_member_accounts())
- ok_accounts = set(accounts & purp_accounts)
- new_in_group_set = set(ok_accounts - before_adding)
-
- end_assembly = list(before_adding | new_in_group_set)
- rejected = list(purp_accounts - accounts)
- already_in_g = list(before_adding & purp_accounts)
- new_in_group = list(new_in_group_set)
-
- if len(end_assembly) > settings.max_number_of_group_members:
- raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_group_members} members per group.")
-
- group = group_list.get_group(group_name)
- group.add_members([contact_list.get_contact(a) for a in new_in_group])
-
- window = window_list.get_window(group_name)
- window.add_contacts(new_in_group)
- window.create_handle_dict()
-
- group_management_print(ADDED_MEMBERS, new_in_group, contact_list, group_name)
- group_management_print(ALREADY_MEMBER, already_in_g, contact_list, group_name)
- group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
-
- local_win = window_list.get_window(LOCAL_ID)
- local_win.add_new(ts, f"Added members to group {group_name}.")
-
-
-def group_rm_member(cmd_data: bytes,
- ts: 'datetime',
- window_list: 'WindowList',
- contact_list: 'ContactList',
- group_list: 'GroupList') -> None:
- """Remove member(s) from group."""
- fields = [f.decode() for f in cmd_data.split(US_BYTE)]
- group_name = fields[0]
-
- purp_accounts = set(fields[1:])
- accounts = set(contact_list.get_list_of_accounts())
- before_removal = set(group_list.get_group(group_name).get_list_of_member_accounts())
- ok_accounts_set = set(purp_accounts & accounts)
- removable_set = set(before_removal & ok_accounts_set)
-
- not_in_group = list(ok_accounts_set - before_removal)
- rejected = list(purp_accounts - accounts)
- removable = list(removable_set)
-
- group = group_list.get_group(group_name)
- group.remove_members(removable)
-
- window = window_list.get_window(group_name)
- window.remove_contacts(removable)
-
- group_management_print(REMOVED_MEMBERS, removable, contact_list, group_name)
- group_management_print(NOT_IN_GROUP, not_in_group, contact_list, group_name)
- group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
-
- local_win = window_list.get_window(LOCAL_ID)
- local_win.add_new(ts, f"Removed members from group {group_name}.")
-
-
-def remove_group(cmd_data: bytes,
- ts: 'datetime',
- window_list: 'WindowList',
- group_list: 'GroupList') -> None:
- """Remove group."""
- group_name = cmd_data.decode()
-
- window_list.remove_window(group_name)
-
- if group_name not in group_list.get_list_of_group_names():
- raise FunctionReturn(f"RxM has no group '{group_name}' to remove.")
-
- group_list.remove_group(group_name)
-
- message = f"Removed group {group_name}."
- box_print(message, head=1, tail=1)
-
- local_win = window_list.get_window(LOCAL_ID)
- local_win.add_new(ts, message)
diff --git a/src/rx/files.py b/src/rx/files.py
deleted file mode 100644
index bb3e8b1..0000000
--- a/src/rx/files.py
+++ /dev/null
@@ -1,150 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import base64
-import binascii
-import os.path
-import typing
-import zlib
-
-import nacl.exceptions
-
-from src.common.crypto import auth_and_decrypt
-from src.common.encoding import bytes_to_str
-from src.common.exceptions import FunctionReturn
-from src.common.input import get_b58_key
-from src.common.misc import ensure_dir
-from src.common.output import box_print, c_print, phase, print_on_previous_line
-from src.common.statics import *
-
-if typing.TYPE_CHECKING:
- from datetime import datetime
- from src.common.db_settings import Settings
- from src.rx.windows import WindowList
-
-
-def store_unique(f_data: bytes, f_dir: str, f_name: str) -> str:
- """Store file under unique filename.
-
- Add trailing counter .# to duplicate files.
- """
- ensure_dir(f_dir)
-
- if os.path.isfile(f_dir + f_name):
- ctr = 1
- while os.path.isfile(f_dir + f_name + f'.{ctr}'):
- ctr += 1
- f_name += f'.{ctr}'
-
- with open(f_dir + f_name, 'wb+') as f:
- f.write(f_data)
-
- return f_name
-
-
-def process_received_file(payload: bytes, nick: str) -> None:
- """Process received file assembly packets."""
- try:
- f_name_b, f_data = payload.split(US_BYTE)
- except ValueError:
- raise FunctionReturn("Error: Received file had invalid structure.")
-
- try:
- f_name = f_name_b.decode()
- except UnicodeError:
- raise FunctionReturn("Error: Received file name had invalid encoding.")
-
- if not f_name.isprintable() or not f_name:
- raise FunctionReturn("Error: Received file had an invalid name.")
-
- try:
- f_data = base64.b85decode(f_data)
- except (binascii.Error, ValueError):
- raise FunctionReturn("Error: Received file had invalid encoding.")
-
- file_ct = f_data[:-KEY_LENGTH]
- file_key = f_data[-KEY_LENGTH:]
- if len(file_key) != KEY_LENGTH:
- raise FunctionReturn("Error: Received file had an invalid key.")
-
- try:
- file_pt = auth_and_decrypt(file_ct, file_key, soft_e=True)
- except nacl.exceptions.CryptoError:
- raise FunctionReturn("Error: Decryption of file data failed.")
-
- try:
- file_dc = zlib.decompress(file_pt)
- except zlib.error:
- raise FunctionReturn("Error: Decompression of file data failed.")
-
- file_dir = f'{DIR_RX_FILES}{nick}/'
- final_name = store_unique(file_dc, file_dir, f_name)
- box_print(f"Stored file from {nick} as '{final_name}'")
-
-
-def process_imported_file(ts: 'datetime',
- packet: bytes,
- window_list: 'WindowList',
- settings: 'Settings'):
- """Decrypt and store imported file."""
- while True:
- try:
- print('')
- key = get_b58_key(B58_FILE_KEY, settings)
- except KeyboardInterrupt:
- raise FunctionReturn("File import aborted.", head=2)
-
- try:
- phase("Decrypting file", head=1)
- file_pt = auth_and_decrypt(packet[1:], key, soft_e=True)
- phase(DONE)
- break
- except (nacl.exceptions.CryptoError, nacl.exceptions.ValueError):
- phase('ERROR', done=True)
- c_print("Invalid decryption key. Try again.")
- print_on_previous_line(reps=7, delay=1.5)
- except KeyboardInterrupt:
- phase('ABORT', done=True)
- raise FunctionReturn("File import aborted.")
-
- try:
- phase("Decompressing file")
- file_dc = zlib.decompress(file_pt)
- phase(DONE)
- except zlib.error:
- phase('ERROR', done=True)
- raise FunctionReturn("Error: Decompression of file data failed.")
-
- try:
- f_name = bytes_to_str(file_dc[:PADDED_UTF32_STR_LEN])
- except UnicodeError:
- raise FunctionReturn("Error: Received file name had invalid encoding.")
-
- if not f_name.isprintable() or not f_name:
- raise FunctionReturn("Error: Received file had an invalid name.")
-
- f_data = file_dc[PADDED_UTF32_STR_LEN:]
- final_name = store_unique(f_data, DIR_IMPORTED, f_name)
-
- message = f"Stored imported file as '{final_name}'"
- box_print(message, head=1)
-
- local_win = window_list.get_local_window()
- local_win.add_new(ts, message)
diff --git a/src/rx/key_exchanges.py b/src/rx/key_exchanges.py
deleted file mode 100644
index 2beb2d2..0000000
--- a/src/rx/key_exchanges.py
+++ /dev/null
@@ -1,286 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import os.path
-import pipes
-import subprocess
-import typing
-
-from typing import Dict
-
-import nacl.exceptions
-
-from src.common.crypto import argon2_kdf, auth_and_decrypt, csprng
-from src.common.db_masterkey import MasterKey
-from src.common.encoding import b58encode
-from src.common.exceptions import FunctionReturn
-from src.common.input import get_b58_key
-from src.common.misc import split_string
-from src.common.output import box_print, c_print, clear_screen, phase, print_key, print_on_previous_line
-from src.common.path import ask_path_gui
-from src.common.statics import *
-
-if typing.TYPE_CHECKING:
- from datetime import datetime
- from src.common.db_contacts import ContactList
- from src.common.db_keys import KeyList
- from src.common.db_settings import Settings
- from src.rx.windows import WindowList
-
-
-# Local key
-
-def process_local_key(ts: 'datetime',
- packet: bytes,
- window_list: 'WindowList',
- contact_list: 'ContactList',
- key_list: 'KeyList',
- settings: 'Settings') -> None:
- """Decrypt local key packet and add local contact/keyset."""
- bootstrap = not key_list.has_local_key()
-
- try:
- while True:
- clear_screen()
- box_print("Received encrypted local key", tail=1)
- kdk = get_b58_key(B58_LOCAL_KEY, settings)
-
- try:
- pt = auth_and_decrypt(packet[1:], key=kdk, soft_e=True)
- break
- except nacl.exceptions.CryptoError:
- if bootstrap:
- raise FunctionReturn("Error: Incorrect key decryption key.", delay=1.5)
- c_print("Incorrect key decryption key.", head=1)
- clear_screen(delay=1.5)
-
- key = pt[0:32]
- hek = pt[32:64]
- conf_code = pt[64:65]
-
- # Add local contact to contact list database
- contact_list.add_contact(LOCAL_ID, LOCAL_ID, LOCAL_ID,
- bytes(FINGERPRINT_LEN), bytes(FINGERPRINT_LEN),
- False, False, True)
-
- # Add local keyset to keyset database
- key_list.add_keyset(rx_account=LOCAL_ID,
- tx_key=key,
- rx_key=csprng(),
- tx_hek=hek,
- rx_hek=csprng())
-
- box_print(f"Confirmation code for TxM: {conf_code.hex()}", head=1)
-
- local_win = window_list.get_local_window()
- local_win.add_new(ts, "Added new local key.")
-
- if bootstrap:
- window_list.active_win = local_win
-
- except KeyboardInterrupt:
- raise FunctionReturn("Local key setup aborted.", delay=1, head=3, tail_clear=True)
-
-
-def local_key_installed(ts: 'datetime',
- window_list: 'WindowList',
- contact_list: 'ContactList') -> None:
- """Clear local key bootstrap process from screen."""
- message = "Successfully completed local key exchange."
- local_win = window_list.get_window(LOCAL_ID)
- local_win.add_new(ts, message)
-
- box_print(message)
- clear_screen(delay=1)
-
- if not contact_list.has_contacts():
- c_print("Waiting for new contacts", head=1, tail=1)
-
-
-# X25519
-
-def process_public_key(ts: 'datetime',
- packet: bytes,
- window_list: 'WindowList',
- settings: 'Settings',
- pubkey_buf: Dict[str, bytes]) -> None:
- """Display contact's public key and add it to buffer."""
- pub_key = packet[1:33]
- origin = packet[33:34]
-
- try:
- account = packet[34:].decode()
- except UnicodeError:
- raise FunctionReturn("Error! Account for received public key had invalid encoding.")
-
- if origin not in [ORIGIN_CONTACT_HEADER, ORIGIN_USER_HEADER]:
- raise FunctionReturn("Error! Received public key had an invalid origin header.")
-
- if origin == ORIGIN_CONTACT_HEADER:
- pubkey_buf[account] = pub_key
- print_key(f"Received public key from {account}:", pub_key, settings)
-
- local_win = window_list.get_local_window()
- pub_key_b58 = ' '.join(split_string(b58encode(pub_key), item_len=(51 if settings.local_testing_mode else 3)))
- local_win.add_new(ts, f"Received public key from {account}: {pub_key_b58}")
-
- elif origin == ORIGIN_USER_HEADER and account in pubkey_buf:
- clear_screen()
- print_key(f"Public key for {account}:", pubkey_buf[account], settings)
-
-
-def add_x25519_keys(packet: bytes,
- ts: 'datetime',
- window_list: 'WindowList',
- contact_list: 'ContactList',
- key_list: 'KeyList',
- settings: 'Settings',
- pubkey_buf: Dict[str, bytes]) -> None:
- """Add contact and their X25519 keys."""
- tx_key = packet[0:32]
- tx_hek = packet[32:64]
- rx_key = packet[64:96]
- rx_hek = packet[96:128]
-
- account, nick = [f.decode() for f in packet[128:].split(US_BYTE)]
-
- contact_list.add_contact(account, DUMMY_USER, nick,
- bytes(FINGERPRINT_LEN),
- bytes(FINGERPRINT_LEN),
- settings.log_messages_by_default,
- settings.accept_files_by_default,
- settings.show_notifications_by_default)
-
- key_list.add_keyset(account, tx_key, rx_key, tx_hek, rx_hek)
-
- pubkey_buf.pop(account, None)
-
- message = f"Added X25519 keys for {nick} ({account})."
- local_win = window_list.get_window(LOCAL_ID)
- local_win.add_new(ts, message)
-
- box_print(message)
- clear_screen(delay=1)
-
-
-# PSK
-
-def add_psk_tx_keys(cmd_data: bytes,
- ts: 'datetime',
- window_list: 'WindowList',
- contact_list: 'ContactList',
- key_list: 'KeyList',
- settings: 'Settings',
- pubkey_buf: Dict[str, bytes]) -> None:
- """Add contact and Tx-PSKs."""
- tx_key = cmd_data[0:32]
- tx_hek = cmd_data[32:64]
-
- account, nick = [f.decode() for f in cmd_data[64:].split(US_BYTE)]
-
- contact_list.add_contact(account, DUMMY_USER, nick,
- bytes(FINGERPRINT_LEN), bytes(FINGERPRINT_LEN),
- settings.log_messages_by_default,
- settings.accept_files_by_default,
- settings.show_notifications_by_default)
-
- # The Rx-side keys are set as null-byte strings to indicate they have not
- # been added yet. This does not allow existential forgeries as
- # decrypt_assembly_packet does not allow use of zero-keys for decryption.
- key_list.add_keyset(account,
- tx_key=tx_key,
- rx_key=bytes(KEY_LENGTH),
- tx_hek=tx_hek,
- rx_hek=bytes(KEY_LENGTH))
-
- pubkey_buf.pop(account, None)
-
- message = f"Added Tx-PSK for {nick} ({account})."
- local_win = window_list.get_window(LOCAL_ID)
- local_win.add_new(ts, message)
-
- box_print(message)
- clear_screen(delay=1)
-
-
-def import_psk_rx_keys(cmd_data: bytes,
- ts: 'datetime',
- window_list: 'WindowList',
- contact_list: 'ContactList',
- key_list: 'KeyList',
- settings: 'Settings') -> None:
- """Import Rx-PSK of contact."""
- account = cmd_data.decode()
-
- if not contact_list.has_contact(account):
- raise FunctionReturn(f"Error: Unknown account '{account}'")
-
- contact = contact_list.get_contact(account)
- psk_file = ask_path_gui(f"Select PSK for {contact.nick}", settings, get_file=True)
-
- with open(psk_file, 'rb') as f:
- psk_data = f.read()
-
- if len(psk_data) != PSK_FILE_SIZE:
- raise FunctionReturn("Error: Invalid PSK data in file.")
-
- salt = psk_data[:ARGON2_SALT_LEN]
- ct_tag = psk_data[ARGON2_SALT_LEN:]
-
- while True:
- try:
- password = MasterKey.get_password("PSK password")
- phase("Deriving key decryption key", head=2)
- kdk, _ = argon2_kdf(password, salt, parallelism=1)
- psk_pt = auth_and_decrypt(ct_tag, key=kdk, soft_e=True)
- phase(DONE)
- break
-
- except nacl.exceptions.CryptoError:
- print_on_previous_line()
- c_print("Invalid password. Try again.", head=1)
- print_on_previous_line(reps=5, delay=1.5)
- except KeyboardInterrupt:
- raise FunctionReturn("PSK import aborted.", head=2)
-
- rx_key = psk_pt[0:32]
- rx_hek = psk_pt[32:64]
-
- if any(k == bytes(KEY_LENGTH) for k in [rx_key, rx_hek]):
- raise FunctionReturn("Error: Received invalid keys from contact.")
-
- keyset = key_list.get_keyset(account)
- keyset.rx_key = rx_key
- keyset.rx_hek = rx_hek
- key_list.store_keys()
-
- # Pipes protects against shell injection. Source of command's parameter
- # is user's own RxM and therefore trusted, but it's still good practice.
- subprocess.Popen(f"shred -n 3 -z -u {pipes.quote(psk_file)}", shell=True).wait()
- if os.path.isfile(psk_file):
- box_print(f"Warning! Overwriting of PSK ({psk_file}) failed. Press to continue.", manual_proceed=True)
-
- local_win = window_list.get_local_window()
- message = f"Added Rx-PSK for {contact.nick} ({account})."
- local_win.add_new(ts, message)
-
- box_print([message, '', "Warning!",
- "Physically destroy the keyfile transmission ",
- "media to ensure that no data escapes RxM!"], head=1, tail=1)
diff --git a/src/rx/messages.py b/src/rx/messages.py
deleted file mode 100644
index 1427e95..0000000
--- a/src/rx/messages.py
+++ /dev/null
@@ -1,225 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import typing
-
-from typing import Any, List, Tuple
-
-from src.common.db_logs import write_log_entry
-from src.common.exceptions import FunctionReturn
-from src.common.output import box_print
-from src.common.statics import *
-
-from src.rx.packet import decrypt_assembly_packet
-
-if typing.TYPE_CHECKING:
- from datetime import datetime
- from src.common.db_contacts import ContactList
- from src.common.db_groups import GroupList
- from src.common.db_keys import KeyList
- from src.common.db_masterkey import MasterKey
- from src.common.db_settings import Settings
- from src.rx.packet import PacketList
- from src.rx.windows import WindowList
-
-
-def process_message(ts: 'datetime',
- assembly_packet_ct: bytes,
- window_list: 'WindowList',
- packet_list: 'PacketList',
- contact_list: 'ContactList',
- key_list: 'KeyList',
- group_list: 'GroupList',
- settings: 'Settings',
- master_key: 'MasterKey') -> None:
- """Process received private / group message.
-
- Group management messages have automatic formatting and window
- redirection based on group configuration managed by user.
- """
- assembly_packet, account, origin = decrypt_assembly_packet(assembly_packet_ct, window_list, contact_list, key_list)
-
- p_type = FILE if assembly_packet[:1].isupper() else MESSAGE
- packet = packet_list.get_packet(account, origin, p_type)
- logging = contact_list.get_contact(account).log_messages
-
- def log_masking_packets(completed: bool = False) -> None:
- """Add masking packets to log file.
-
- If logging and logfile masking are enabled this function will
- in case of erroneous transmissions, store the correct number
- of placeholder data packets to log file to hide quantity of
- communication that log file observation would reveal.
- """
- if logging and settings.logfile_masking and (packet.log_masking_ctr or completed):
- iterator = packet.assembly_pt_list if completed else range(packet.log_masking_ctr) # type: Any
- for _ in iterator:
- write_log_entry(PLACEHOLDER_DATA, account, settings, master_key, origin)
- packet.log_masking_ctr = 0
-
- try:
- packet.add_packet(assembly_packet)
- except FunctionReturn:
- log_masking_packets()
- raise
- log_masking_packets()
-
- if not packet.is_complete:
- return None
-
- try:
- if p_type == FILE:
- packet.assemble_and_store_file()
- # Raise FunctionReturn for packets stored as placeholder data.
- raise FunctionReturn("File storage complete.", output=False)
-
- elif p_type == MESSAGE:
- assembled = packet.assemble_message_packet()
- header = assembled[:1]
- assembled = assembled[1:]
-
- if header == GROUP_MESSAGE_HEADER:
- logging = process_group_message(assembled, ts, account, origin, group_list, window_list)
-
- elif header == PRIVATE_MESSAGE_HEADER:
- window = window_list.get_window(account)
- window.add_new(ts, assembled.decode(), account, origin, output=True)
-
- elif header == WHISPER_MESSAGE_HEADER:
- window = window_list.get_window(account)
- window.add_new(ts, assembled.decode(), account, origin, output=True, whisper=True)
- raise FunctionReturn("Key message message complete.", output=False)
-
- else:
- process_group_management_message(header, assembled, ts, account, origin, contact_list, group_list, window_list)
- raise FunctionReturn("Group management message complete.", output=False)
-
- if logging:
- for p in packet.assembly_pt_list:
- write_log_entry(p, account, settings, master_key, origin)
-
- except (FunctionReturn, UnicodeError):
- log_masking_packets(completed=True)
- raise
- finally:
- packet.clear_assembly_packets()
-
-
-def process_group_message(assembled: bytes,
- ts: 'datetime',
- account: str,
- origin: bytes,
- group_list: 'GroupList',
- window_list: 'WindowList') -> bool:
- """Process a group message."""
- group_msg_id = assembled[:GROUP_MSG_ID_LEN]
- group_packet = assembled[GROUP_MSG_ID_LEN:]
-
- try:
- group_name, group_message = [f.decode() for f in group_packet.split(US_BYTE)]
- except (IndexError, UnicodeError):
- raise FunctionReturn("Error: Received an invalid group message.")
-
- if not group_list.has_group(group_name):
- raise FunctionReturn("Error: Received message to unknown group.", output=False)
-
- group = group_list.get_group(group_name)
- window = window_list.get_window(group_name)
-
- if not group.has_member(account):
- raise FunctionReturn("Error: Account is not member of group.", output=False)
-
- # All copies of group messages user sends to members contain same UNIX timestamp.
- # This allows RxM to ignore copies of outgoing messages sent by the user.
- if origin == ORIGIN_USER_HEADER:
- if window.group_msg_id != group_msg_id:
- window.group_msg_id = group_msg_id
- window.add_new(ts, group_message, account, origin, output=True)
-
- elif origin == ORIGIN_CONTACT_HEADER:
- window.add_new(ts, group_message, account, origin, output=True)
-
- return group_list.get_group(group_name).log_messages
-
-
-def process_group_management_message(header: bytes,
- assembled: bytes,
- ts: 'datetime',
- account: str,
- origin: bytes,
- contact_list: 'ContactList',
- group_list: 'GroupList',
- window_list: 'WindowList') -> None:
- """Process group management message."""
- local_win = window_list.get_local_window()
- nick = contact_list.get_contact(account).nick
-
- try:
- group_name, *members = [f.decode() for f in assembled.split(US_BYTE)]
- except UnicodeError:
- raise FunctionReturn("Error: Received group management message had invalid encoding.")
-
- if origin == ORIGIN_USER_HEADER:
- raise FunctionReturn("Ignored group management message from user.", output=False)
-
- account_in_group = group_list.has_group(group_name) and group_list.get_group(group_name).has_member(account)
-
- def get_members() -> Tuple[List[str], str]:
- known = [contact_list.get_contact(m).nick for m in members if contact_list.has_contact(m)]
- unknown = [ m for m in members if not contact_list.has_contact(m)]
- just_len = len(max(known + unknown, key=len))
- listed_m_ = [f" * {m.ljust(just_len)}" for m in (known + unknown)]
- joined_m_ = ", ".join(known + unknown)
- return listed_m_, joined_m_
-
- if header == GROUP_MSG_INVITEJOIN_HEADER:
- lw_msg = f"{nick} has {'joined' if account_in_group else 'invited you to'} group '{group_name}'"
- message = [lw_msg]
- if members:
- listed_m, joined_m = get_members()
- message[0] += " with following members:"
- message += listed_m
- lw_msg += " with members " + joined_m
-
- box_print(message, head=1, tail=1)
- local_win.add_new(ts, lw_msg)
-
- elif header in [GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER]:
- if account_in_group:
- action = {GROUP_MSG_MEMBER_ADD_HEADER: "added following member(s) to",
- GROUP_MSG_MEMBER_REM_HEADER: "removed following member(s) from"}[header]
- lw_msg = f"{nick} has {action} group {group_name}: "
- message = [lw_msg]
- if members:
- listed_m, joined_m = get_members()
- message += listed_m
- lw_msg += joined_m
-
- box_print(message, head=1, tail=1)
- local_win.add_new(ts, lw_msg)
-
- elif header == GROUP_MSG_EXIT_GROUP_HEADER:
- if account_in_group:
- box_print([f"{nick} has left group {group_name}.", '', "Warning",
- "Unless you remove the contact from the group, they",
- "can still read messages you send to the group."],
- head=1, tail=1)
- else:
- raise FunctionReturn("Error: Message from contact had an invalid header.")
diff --git a/src/rx/output_loop.py b/src/rx/output_loop.py
deleted file mode 100755
index cf00d38..0000000
--- a/src/rx/output_loop.py
+++ /dev/null
@@ -1,123 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import os
-import sys
-import time
-import typing
-
-from typing import Dict, List, Tuple
-
-from src.common.exceptions import FunctionReturn
-from src.common.output import clear_screen
-from src.common.statics import *
-
-from src.rx.commands import process_command
-from src.rx.files import process_imported_file
-from src.rx.key_exchanges import process_local_key, process_public_key
-from src.rx.messages import process_message
-from src.rx.packet import PacketList
-from src.rx.windows import WindowList
-
-if typing.TYPE_CHECKING:
- from datetime import datetime
- from multiprocessing import Queue
- from src.common.db_contacts import ContactList
- from src.common.db_groups import GroupList
- from src.common.db_keys import KeyList
- from src.common.db_masterkey import MasterKey
- from src.common.db_settings import Settings
-
-
-def output_loop(queues: Dict[bytes, 'Queue'],
- settings: 'Settings',
- contact_list: 'ContactList',
- key_list: 'KeyList',
- group_list: 'GroupList',
- master_key: 'MasterKey',
- stdin_fd: int,
- unittest: bool = False) -> None:
- """Process received packets according to their priority."""
- l_queue = queues[LOCAL_KEY_PACKET_HEADER]
- p_queue = queues[PUBLIC_KEY_PACKET_HEADER]
- m_queue = queues[MESSAGE_PACKET_HEADER]
- c_queue = queues[COMMAND_PACKET_HEADER]
- i_queue = queues[IMPORTED_FILE_HEADER]
- e_queue = queues[EXIT_QUEUE]
-
- sys.stdin = os.fdopen(stdin_fd)
- packet_buf = dict() # type: Dict[str, List[Tuple[datetime, bytes]]]
- pubkey_buf = dict() # type: Dict[str, bytes]
- packet_list = PacketList(settings, contact_list)
- window_list = WindowList(settings, contact_list, group_list, packet_list)
-
- clear_screen()
- while True:
- try:
- if l_queue.qsize() != 0:
- ts, packet = l_queue.get()
- process_local_key(ts, packet, window_list, contact_list, key_list, settings)
-
- if not contact_list.has_local_contact():
- time.sleep(0.01)
- continue
-
- if c_queue.qsize() != 0:
- ts, packet = c_queue.get()
- process_command(ts, packet, window_list, packet_list, contact_list, key_list, group_list, settings, master_key, pubkey_buf, e_queue)
- continue
-
- if p_queue.qsize() != 0:
- ts, packet = p_queue.get()
- process_public_key(ts, packet, window_list, settings, pubkey_buf)
- continue
-
- if window_list.active_win is not None and window_list.active_win.uid == WIN_TYPE_FILE:
- window_list.active_win.redraw_file_win()
-
- # Prioritize buffered messages
- for rx_account in packet_buf:
- if contact_list.has_contact(rx_account) and key_list.has_rx_key(rx_account) and packet_buf[rx_account]:
- ts, packet = packet_buf[rx_account].pop(0)
- process_message(ts, packet, window_list, packet_list, contact_list, key_list, group_list, settings, master_key)
- continue
-
- if m_queue.qsize() != 0:
- ts, packet = m_queue.get()
- rx_account = packet[PACKET_LENGTH:].decode()
-
- if contact_list.has_contact(rx_account) and key_list.has_rx_key(rx_account):
- process_message(ts, packet, window_list, packet_list, contact_list, key_list, group_list, settings, master_key)
- else:
- packet_buf.setdefault(rx_account, []).append((ts, packet))
- continue
-
- if i_queue.qsize() != 0:
- ts, packet = i_queue.get()
- process_imported_file(ts, packet, window_list, settings)
- continue
-
- time.sleep(0.01)
-
- if unittest and queues[UNITTEST_QUEUE].qsize() != 0:
- break
-
- except (FunctionReturn, KeyboardInterrupt):
- pass
diff --git a/src/rx/packet.py b/src/rx/packet.py
deleted file mode 100644
index 73f3200..0000000
--- a/src/rx/packet.py
+++ /dev/null
@@ -1,385 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import datetime
-import struct
-import typing
-import zlib
-
-from typing import Callable, Dict, Generator, Iterable, List, Sized, Tuple
-
-import nacl.exceptions
-
-from src.common.crypto import auth_and_decrypt, hash_chain, rm_padding_bytes
-from src.common.encoding import bytes_to_int
-from src.common.exceptions import FunctionReturn
-from src.common.input import yes
-from src.common.misc import readable_size
-from src.common.output import box_print, c_print
-from src.common.statics import *
-
-from src.rx.files import process_received_file
-
-if typing.TYPE_CHECKING:
- from src.common.db_contacts import Contact, ContactList
- from src.common.db_keys import KeyList
- from src.common.db_settings import Settings
- from src.rx.windows import RxWindow, WindowList
-
-
-def get_packet_values(packet: bytes,
- window: 'RxWindow',
- contact_list: 'ContactList') -> Tuple[bytes, str, str, str, str, str]:
- """Load packet-related variables."""
- if packet[:1] == COMMAND_PACKET_HEADER:
- origin = ORIGIN_USER_HEADER
- direction = "from"
- key_dir = TX
- p_type = "command"
- account = LOCAL_ID
- nick = "local TxM"
- else:
- origin = packet[345:346]
- if origin not in [ORIGIN_USER_HEADER, ORIGIN_CONTACT_HEADER]:
- raise FunctionReturn("Error: Received packet had an invalid origin-header.", window=window)
-
- direction, key_dir = ("sent to", TX) if origin == ORIGIN_USER_HEADER else ("from", RX)
- p_type = "packet"
- account = packet[346:].decode()
- nick = contact_list.get_contact(account).nick
-
- if account == LOCAL_ID:
- raise FunctionReturn("Warning! Received packet masqueraded as command.", window=window)
-
- return origin, direction, key_dir, p_type, account, nick
-
-
-def process_offset(offset: int,
- origin: bytes,
- direction: str,
- nick: str,
- window: 'RxWindow') -> None:
- """Display warnings about increased offsets.
-
- If offset has increased over threshold, ask
- the user to confirm hash ratchet catch up.
- """
- if offset > HARAC_WARN_THRESHOLD and origin == ORIGIN_CONTACT_HEADER:
- box_print([f"Warning! {offset} packets from {nick} were not received.",
- f"This might indicate that {offset} most recent packets were ",
- f"lost during transmission, or that the contact is attempting ",
- f"a DoS attack. You can wait for TFC to attempt to decrypt the ",
- "packet, but it might take a very long time or even forever."])
- if not yes("Proceed with the decryption?", tail=1):
- raise FunctionReturn(f"Dropped packet from {nick}.", window=window)
- elif offset:
- box_print(f"Warning! {offset} packet{'s' if offset > 1 else ''} {direction} {nick} were not received.")
-
-
-def decrypt_assembly_packet(packet: bytes,
- window_list: 'WindowList',
- contact_list: 'ContactList',
- key_list: 'KeyList') -> Tuple[bytes, str, bytes]:
- """Decrypt assembly packet from contact/local TxM."""
- enc_harac = packet[1:49]
- enc_msg = packet[49:345]
- window = window_list.get_local_window()
-
- origin, direction, key_dir, p_type, account, nick = get_packet_values(packet, window, contact_list)
-
- # Load keys
- keyset = key_list.get_keyset(account)
- header_key = getattr(keyset, f'{key_dir}_hek')
- message_key = getattr(keyset, f'{key_dir}_key')
-
- if any(k == bytes(KEY_LENGTH) for k in [header_key, message_key]):
- raise FunctionReturn("Warning! Loaded zero-key for packet decryption.")
-
- # Decrypt hash ratchet counter
- try:
- harac_bytes = auth_and_decrypt(enc_harac, header_key, soft_e=True)
- except nacl.exceptions.CryptoError:
- raise FunctionReturn(f"Warning! Received {p_type} {direction} {nick} had an invalid hash ratchet MAC.", window=window)
-
- # Catch up with hash ratchet offset
- purp_harac = bytes_to_int(harac_bytes)
- stored_harac = getattr(keyset, f'{key_dir}_harac')
- offset = purp_harac - stored_harac
- if offset < 0:
- raise FunctionReturn(f"Warning! Received {p_type} {direction} {nick} had an expired hash ratchet counter.", window=window)
-
- process_offset(offset, origin, direction, nick, window)
- for _ in range(offset):
- message_key = hash_chain(message_key)
-
- # Decrypt packet
- try:
- assembly_packet = auth_and_decrypt(enc_msg, message_key, soft_e=True)
- except nacl.exceptions.CryptoError:
- raise FunctionReturn(f"Warning! Received {p_type} {direction} {nick} had an invalid MAC.", window=window)
-
- # Update keys in database
- keyset.update_key(key_dir, hash_chain(message_key), offset + 1)
-
- return assembly_packet, account, origin
-
-
-class Packet(object):
- """Packet objects collect and keep track of received assembly packets."""
-
- def __init__(self,
- account: str,
- contact: 'Contact',
- origin: bytes,
- p_type: str,
- settings: 'Settings') -> None:
- """Create a new Packet object."""
- self.account = account
- self.contact = contact
- self.origin = origin
- self.type = p_type
- self.settings = settings
-
- # File transmission metadata
- self.packets = None # type: int
- self.time = None # type: str
- self.size = None # type: str
- self.name = None # type: str
-
- self.sh = dict(message=M_S_HEADER, file=F_S_HEADER, command=C_S_HEADER)[self.type]
- self.lh = dict(message=M_L_HEADER, file=F_L_HEADER, command=C_L_HEADER)[self.type]
- self.ah = dict(message=M_A_HEADER, file=F_A_HEADER, command=C_A_HEADER)[self.type]
- self.eh = dict(message=M_E_HEADER, file=F_E_HEADER, command=C_E_HEADER)[self.type]
- self.ch = dict(message=M_C_HEADER, file=F_C_HEADER, command=C_C_HEADER)[self.type]
- self.nh = dict(message=P_N_HEADER, file=P_N_HEADER, command=C_N_HEADER)[self.type]
-
- self.assembly_pt_list = [] # type: List[bytes]
- self.log_masking_ctr = 0 # type: int
- self.long_active = False
- self.is_complete = False
-
- def add_masking_packet_to_logfile(self, increase: int = 1) -> None:
- """Increase log_masking_ctr for message and file packets."""
- if self.type in [MESSAGE, FILE]:
- self.log_masking_ctr += increase
-
- def clear_file_metadata(self) -> None:
- """Clear file metadata."""
- self.packets = None
- self.time = None
- self.size = None
- self.name = None
-
- def clear_assembly_packets(self) -> None:
- """Clear packet state."""
- self.assembly_pt_list = []
- self.long_active = False
- self.is_complete = False
-
- def new_file_packet(self) -> None:
- """New file transmission handling logic."""
- name = self.name
- was_active = self.long_active
- self.clear_file_metadata()
- self.clear_assembly_packets()
-
- if self.origin == ORIGIN_USER_HEADER:
- self.add_masking_packet_to_logfile()
- raise FunctionReturn("Ignored file from user.", output=False)
-
- if not self.contact.file_reception:
- self.add_masking_packet_to_logfile()
- raise FunctionReturn(f"Alert! File transmission from {self.contact.nick} but reception is disabled.")
-
- if was_active:
- c_print(f"Alert! File '{name}' from {self.contact.nick} never completed.", head=1, tail=1)
-
- def check_long_packet(self):
- """Check if long packet has permission to be extended."""
- if not self.long_active:
- self.add_masking_packet_to_logfile()
- raise FunctionReturn("Missing start packet.", output=False)
-
- if self.type == FILE and not self.contact.file_reception:
- self.add_masking_packet_to_logfile(increase=len(self.assembly_pt_list) + 1)
- self.clear_assembly_packets()
- raise FunctionReturn("Alert! File reception disabled mid-transfer.")
-
- def process_short_header(self, packet: bytes) -> None:
- """Process short packet."""
- if self.long_active:
- self.add_masking_packet_to_logfile(increase=len(self.assembly_pt_list))
-
- if self.type == FILE:
- self.new_file_packet()
- packet = self.sh + packet[17:]
-
- self.assembly_pt_list = [packet]
- self.long_active = False
- self.is_complete = True
-
- def process_long_header(self, packet: bytes) -> None:
- """Process first packet of long transmission."""
- if self.long_active:
- self.add_masking_packet_to_logfile(increase=len(self.assembly_pt_list))
-
- if self.type == FILE:
- self.new_file_packet()
- try:
- self.packets = bytes_to_int(packet[1:9])
- self.time = str(datetime.timedelta(seconds=bytes_to_int(packet[9:17])))
- self.size = readable_size(bytes_to_int(packet[17:25]))
- self.name = packet[25:].split(US_BYTE)[0].decode()
- packet = self.lh + packet[25:]
-
- box_print([f'Receiving file from {self.contact.nick}:',
- f'{self.name} ({self.size})',
- f'ETA {self.time} ({self.packets} packets)'])
-
- except (struct.error, UnicodeError, ValueError):
- self.add_masking_packet_to_logfile()
- raise FunctionReturn("Error: Received file packet had an invalid header.")
-
- self.assembly_pt_list = [packet]
- self.long_active = True
- self.is_complete = False
-
- def process_append_header(self, packet: bytes) -> None:
- """Process consecutive packet(s) of long transmission."""
- self.check_long_packet()
- self.assembly_pt_list.append(packet)
-
- def process_end_header(self, packet: bytes) -> None:
- """Process last packet of long transmission."""
- self.check_long_packet()
- self.assembly_pt_list.append(packet)
- self.long_active = False
- self.is_complete = True
-
- def abort_packet(self, message: str) -> None:
- """Process cancel/noise packet."""
- if self.type == FILE and self.origin == ORIGIN_CONTACT_HEADER and self.long_active:
- c_print(message, head=1, tail=1)
- self.clear_file_metadata()
- self.add_masking_packet_to_logfile(increase=len(self.assembly_pt_list) + 1)
- self.clear_assembly_packets()
-
- def process_cancel_header(self, _: bytes) -> None:
- """Process cancel packet for long transmission."""
- self.abort_packet(f"{self.contact.nick} cancelled file.")
-
- def process_noise_header(self, _: bytes) -> None:
- """Process traffic masking noise packet."""
- self.abort_packet(f"Alert! File '{self.name}' from {self.contact.nick} never completed.")
-
- def add_packet(self, packet: bytes) -> None:
- """Add a new assembly packet to the object."""
- try:
- func_d = {self.sh: self.process_short_header,
- self.lh: self.process_long_header,
- self.ah: self.process_append_header,
- self.eh: self.process_end_header,
- self.ch: self.process_cancel_header,
- self.nh: self.process_noise_header} # type: Dict[bytes, Callable]
- func = func_d[packet[:1]]
- except KeyError:
- # Erroneous headers are ignored, but stored as placeholder data.
- self.add_masking_packet_to_logfile()
- raise FunctionReturn("Error: Received packet had an invalid assembly packet header.")
- func(packet)
-
- def assemble_message_packet(self) -> bytes:
- """Assemble message packet."""
- padded = b''.join([p[1:] for p in self.assembly_pt_list])
- payload = rm_padding_bytes(padded)
-
- if len(self.assembly_pt_list) > 1:
- msg_ct = payload[:-KEY_LENGTH]
- msg_key = payload[-KEY_LENGTH:]
-
- try:
- payload = auth_and_decrypt(msg_ct, msg_key, soft_e=True)
- except (nacl.exceptions.CryptoError, nacl.exceptions.ValueError):
- raise FunctionReturn("Error: Decryption of message failed.")
-
- try:
- return zlib.decompress(payload)
- except zlib.error:
- raise FunctionReturn("Error: Decompression of message failed.")
-
- def assemble_and_store_file(self) -> None:
- """Assemble file packet and store it."""
- padded = b''.join([p[1:] for p in self.assembly_pt_list])
- payload = rm_padding_bytes(padded)
-
- process_received_file(payload, self.contact.nick)
-
- def assemble_command_packet(self) -> bytes:
- """Assemble command packet."""
- padded = b''.join([p[1:] for p in self.assembly_pt_list])
- payload = rm_padding_bytes(padded)
-
- if len(self.assembly_pt_list) > 1:
- cmd_hash = payload[-KEY_LENGTH:]
- payload = payload[:-KEY_LENGTH]
- if hash_chain(payload) != cmd_hash:
- raise FunctionReturn("Error: Received an invalid command.")
-
- try:
- return zlib.decompress(payload)
- except zlib.error:
- raise FunctionReturn("Error: Decompression of command failed.")
-
-
-class PacketList(Iterable, Sized):
- """PacketList manages all file, message, and command packets."""
-
- def __init__(self, settings: 'Settings', contact_list: 'ContactList') -> None:
- """Create a new PacketList object."""
- self.settings = settings
- self.contact_list = contact_list
- self.packets = [] # type: List[Packet]
-
- def __iter__(self) -> Generator:
- """Iterate over packet list."""
- yield from self.packets
-
- def __len__(self) -> int:
- """Return number of packets in packet list."""
- return len(self.packets)
-
- def has_packet(self, account: str, origin: bytes, p_type: str) -> bool:
- """Return True if packet with matching selectors exists, else False."""
- return any(p for p in self.packets if (p.account == account
- and p.origin == origin
- and p.type == p_type))
-
- def get_packet(self, account: str, origin: bytes, p_type: str) -> Packet:
- """Get packet based on account, origin and type.
-
- If packet does not exist, create it.
- """
- if not self.has_packet(account, origin, p_type):
- contact = self.contact_list.get_contact(account)
- self.packets.append(Packet(account, contact, origin, p_type, self.settings))
-
- return next(p for p in self.packets if (p.account == account
- and p.origin == origin
- and p.type == p_type))
diff --git a/src/rx/receiver_loop.py b/src/rx/receiver_loop.py
deleted file mode 100755
index 5f29e5c..0000000
--- a/src/rx/receiver_loop.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import time
-import typing
-
-from datetime import datetime
-from typing import Dict
-
-from src.common.misc import ignored
-from src.common.output import box_print
-from src.common.reed_solomon import ReedSolomonError, RSCodec
-from src.common.statics import *
-
-if typing.TYPE_CHECKING:
- from multiprocessing import Queue
- from src.common.db_settings import Settings
-
-
-def receiver_loop(queues: Dict[bytes, 'Queue'],
- settings: 'Settings',
- unittest: bool = False) -> None:
- """Decode received packets and forward them to packet queues.
-
- This function also determines the timestamp for received message.
- """
- rs = RSCodec(2 * settings.session_serial_error_correction)
- gw_queue = queues[GATEWAY_QUEUE]
-
- while True:
- with ignored(EOFError, KeyboardInterrupt):
- if gw_queue.qsize() == 0:
- time.sleep(0.01)
-
- packet = gw_queue.get()
- timestamp = datetime.now()
-
- try:
- packet = bytes(rs.decode(packet))
- except ReedSolomonError:
- box_print("Error: Failed to correct errors in received packet.", head=1, tail=1)
- continue
-
- p_header = packet[:1]
- if p_header in [PUBLIC_KEY_PACKET_HEADER, MESSAGE_PACKET_HEADER,
- LOCAL_KEY_PACKET_HEADER, COMMAND_PACKET_HEADER,
- IMPORTED_FILE_HEADER]:
- queues[p_header].put((timestamp, packet))
-
- if unittest:
- break
diff --git a/src/rx/windows.py b/src/rx/windows.py
deleted file mode 100644
index 39474c6..0000000
--- a/src/rx/windows.py
+++ /dev/null
@@ -1,327 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import os
-import sys
-import textwrap
-import typing
-
-from datetime import datetime
-from typing import Dict, Generator, Iterable, List, Tuple
-
-from src.common.exceptions import FunctionReturn
-from src.common.misc import get_terminal_width
-from src.common.output import c_print, clear_screen, print_on_previous_line
-from src.common.statics import *
-
-if typing.TYPE_CHECKING:
- from src.common.db_contacts import Contact, ContactList
- from src.common.db_groups import GroupList
- from src.common.db_settings import Settings
- from src.rx.packet import PacketList
-
-
-class RxWindow(Iterable):
- """RxWindow is an ephemeral message log for contact or group.
-
- In addition, command history and file transfers have
- their own windows, accessible with separate commands.
- """
-
- def __init__(self,
- uid: str,
- contact_list: 'ContactList',
- group_list: 'GroupList',
- settings: 'Settings',
- packet_list: 'PacketList' = None) -> None:
- """Create a new RxWindow object."""
- self.uid = uid
- self.contact_list = contact_list
- self.group_list = group_list
- self.settings = settings
- self.packet_list = packet_list
-
- self.is_active = False
- self.group_msg_id = os.urandom(GROUP_MSG_ID_LEN)
-
- self.window_contacts = [] # type: List[Contact]
- self.message_log = [] # type: List[Tuple[datetime, str, str, bytes, bool]]
- self.handle_dict = dict() # type: Dict[str, str]
- self.previous_msg_ts = datetime.now()
- self.unread_messages = 0
-
- if self.uid == LOCAL_ID:
- self.type = WIN_TYPE_COMMAND
- self.type_print = 'system messages'
- self.window_contacts = [self.contact_list.get_contact(LOCAL_ID)]
- self.name = self.type_print
-
- elif self.uid == WIN_TYPE_FILE:
- self.type = WIN_TYPE_FILE
- self.packet_list = packet_list
-
- elif self.uid in self.contact_list.get_list_of_accounts():
- self.type = WIN_TYPE_CONTACT
- self.type_print = 'contact'
- self.window_contacts = [self.contact_list.get_contact(uid)]
- self.name = self.contact_list.get_contact(uid).nick
-
- elif self.uid in self.group_list.get_list_of_group_names():
- self.type = WIN_TYPE_GROUP
- self.type_print = 'group'
- self.window_contacts = self.group_list.get_group_members(self.uid)
- self.name = self.group_list.get_group(self.uid).name
-
- else:
- raise FunctionReturn(f"Invalid window '{uid}'")
-
- def __len__(self) -> int:
- """Return number of message tuples in message log."""
- return len(self.message_log)
-
- def __iter__(self) -> Generator:
- """Iterate over window's message log."""
- yield from self.message_log
-
- def add_contacts(self, accounts: List[str]) -> None:
- """Add contact objects to window."""
- self.window_contacts += [self.contact_list.get_contact(a) for a in accounts
- if not self.has_contact(a) and self.contact_list.has_contact(a)]
-
- def remove_contacts(self, accounts: List[str]) -> None:
- """Remove contact objects from window."""
- to_remove = set(accounts) & set([m.rx_account for m in self.window_contacts])
- if to_remove:
- self.window_contacts = [c for c in self.window_contacts if c.rx_account not in to_remove]
-
- def reset_window(self) -> None:
- """Reset window."""
- self.message_log = []
-
- def has_contact(self, account: str) -> bool:
- """Return True if contact with specified account is in window, else False."""
- return any(c.rx_account == account for c in self.window_contacts)
-
- def create_handle_dict(self, message_log: List[Tuple['datetime', str, str, bytes, bool]] = None) -> None:
- """Pre-generate {account: handle} dictionary.
-
- This allows `self.print()` to indent accounts and nicks without
- having to loop over entire message list for every message.
- """
- accounts = set(c.rx_account for c in self.window_contacts)
- if message_log is not None:
- accounts |= set(a for ts, ma, a, o, w in message_log)
- for a in accounts:
- self.handle_dict[a] = self.contact_list.get_contact(a).nick if self.contact_list.has_contact(a) else a
-
- def get_handle(self, time_stamp: 'datetime', account: str, origin: bytes, whisper: bool=False) -> str:
- """Returns indented handle complete with headers and trailers."""
- if self.type == WIN_TYPE_COMMAND:
- handle = "-!- "
- else:
- handle = self.handle_dict[account] if origin == ORIGIN_CONTACT_HEADER else "Me"
- handles = list(self.handle_dict.values()) + ["Me"]
- indent = len(max(handles, key=len)) - len(handle) if self.is_active else 0
- handle = indent * ' ' + handle
-
- handle = time_stamp.strftime('%H:%M') + ' ' + handle
-
- if not self.is_active:
- handle += {WIN_TYPE_GROUP: f" (group {self.name})",
- WIN_TYPE_CONTACT: f" (private message)" }.get(self.type, '')
-
- if self.type != WIN_TYPE_COMMAND:
- if whisper:
- handle += " (whisper)"
- handle += ": "
-
- return handle
-
- def print(self, msg_tuple: Tuple['datetime', str, str, bytes, bool], file=None) -> None:
- """Print new message to window."""
- bold_on, bold_off, f_name = (BOLD_ON, NORMAL_TEXT, sys.stdout) if file is None else ('', '', file)
- ts, message, account, origin, whisper = msg_tuple
-
- if not self.is_active and not self.settings.new_message_notify_preview and self.type != WIN_TYPE_COMMAND:
- message = BOLD_ON + f"{self.unread_messages + 1} unread message{'s' if self.unread_messages > 1 else ''}" + NORMAL_TEXT
-
- handle = self.get_handle(ts, account, origin, whisper)
- wrapper = textwrap.TextWrapper(get_terminal_width(), initial_indent=handle, subsequent_indent=len(handle)*' ')
- wrapped = wrapper.fill(message)
- if wrapped == '':
- wrapped = handle
- wrapped = bold_on + wrapped[:len(handle)] + bold_off + wrapped[len(handle):]
-
- if self.is_active:
- if self.previous_msg_ts.date() != ts.date():
- print(bold_on + f"00:00 -!- Day changed to {str(ts.date())}" + bold_off, file=f_name)
- print(wrapped, file=f_name)
-
- else:
- self.unread_messages += 1
- if (self.type == WIN_TYPE_CONTACT and self.contact_list.get_contact(account).notifications) \
- or (self.type == WIN_TYPE_GROUP and self.group_list.get_group(self.uid).notifications) \
- or (self.type == WIN_TYPE_COMMAND):
-
- if len(wrapped.split('\n')) > 1:
- # Preview only first line of long message
- print(wrapped.split('\n')[0][:-3] + "...")
- else:
- print(wrapped)
- print_on_previous_line(delay=self.settings.new_message_notify_duration, flush=True)
-
- self.previous_msg_ts = ts
-
- def add_new(self,
- timestamp: 'datetime',
- message: str,
- account: str = LOCAL_ID,
- origin: bytes = ORIGIN_USER_HEADER,
- output: bool = False,
- whisper: bool = False) -> None:
- """Add message tuple to message log and optionally print it."""
- msg_tuple = (timestamp, message, account, origin, whisper)
- self.message_log.append(msg_tuple)
-
- self.handle_dict[account] = (self.contact_list.get_contact(account).nick
- if self.contact_list.has_contact(account) else account)
- if output:
- self.print(msg_tuple)
-
- def redraw(self, file=None) -> None:
- """Print all messages received to window."""
- self.unread_messages = 0
-
- if file is None:
- clear_screen()
-
- if self.message_log:
- self.previous_msg_ts = self.message_log[0][0]
- self.create_handle_dict(self.message_log)
- for msg_tuple in self.message_log:
- self.print(msg_tuple, file)
- else:
- c_print(f"This window for {self.name} is currently empty.", head=1, tail=1)
-
- def redraw_file_win(self) -> None:
- """Draw file transmission window progress bars."""
- # Columns
- c1 = ['File name']
- c2 = ['Size']
- c3 = ['Sender']
- c4 = ['Complete']
-
- for i, p in enumerate(self.packet_list):
- if p.type == FILE and len(p.assembly_pt_list) > 0:
- c1.append(p.name)
- c2.append(p.size)
- c3.append(p.contact.nick)
- c4.append(f"{len(p.assembly_pt_list) / p.packets * 100:.2f}%")
-
- if not len(c1) > 1:
- c_print("No file transmissions currently in progress.", head=1, tail=1)
- print_on_previous_line(reps=3, delay=0.1)
- return None
-
- lst = []
- for name, size, sender, percent, in zip(c1, c2, c3, c4):
- lst.append('{0:{1}} {2:{3}} {4:{5}} {6:{7}}'.format(
- name, max(len(v) for v in c1) + CONTACT_LIST_INDENT,
- size, max(len(v) for v in c2) + CONTACT_LIST_INDENT,
- sender, max(len(v) for v in c3) + CONTACT_LIST_INDENT,
- percent, max(len(v) for v in c4) + CONTACT_LIST_INDENT))
-
- lst.insert(1, get_terminal_width() * '─')
-
- print('\n' + '\n'.join(lst) + '\n')
- print_on_previous_line(reps=len(lst)+2, delay=0.1)
-
-
-class WindowList(Iterable):
- """WindowList manages a list of Window objects."""
-
- def __init__(self,
- settings: 'Settings',
- contact_list: 'ContactList',
- group_list: 'GroupList',
- packet_list: 'PacketList') -> None:
- """Create a new WindowList object."""
- self.settings = settings
- self.contact_list = contact_list
- self.group_list = group_list
- self.packet_list = packet_list
-
- self.active_win = None # type: RxWindow
- self.windows = [RxWindow(uid, self.contact_list, self.group_list, self.settings, self.packet_list)
- for uid in ([WIN_TYPE_FILE]
- + self.contact_list.get_list_of_accounts()
- + self.group_list.get_list_of_group_names())]
-
- if self.contact_list.has_local_contact():
- self.select_rx_window(LOCAL_ID)
-
- def __len__(self) -> int:
- """Return number of windows in window list."""
- return len(self.windows)
-
- def __iter__(self) -> Generator:
- """Iterate over window list."""
- yield from self.windows
-
- def get_group_windows(self) -> List[RxWindow]:
- """Return list of group windows."""
- return [w for w in self.windows if w.type == WIN_TYPE_GROUP]
-
- def has_window(self, uid: str) -> bool:
- """Return True if window with matching UID exists, else False."""
- return uid in [w.uid for w in self.windows]
-
- def remove_window(self, uid: str) -> None:
- """Remove window based on it's UID."""
- for i, w in enumerate(self.windows):
- if uid == w.uid:
- del self.windows[i]
- break
-
- def select_rx_window(self, uid: str) -> None:
- """Select new active window."""
- if self.active_win is not None:
- self.active_win.is_active = False
- self.active_win = self.get_window(uid)
- self.active_win.is_active = True
-
- if self.active_win.type == WIN_TYPE_FILE:
- self.active_win.redraw_file_win()
- else:
- self.active_win.redraw()
-
- def get_local_window(self) -> 'RxWindow':
- """Return command window."""
- return self.get_window(LOCAL_ID)
-
- def get_window(self, uid: str) -> 'RxWindow':
- """Return window that matches the specified UID.
-
- Create window if it does not exist.
- """
- if not self.has_window(uid):
- self.windows.append(RxWindow(uid, self.contact_list, self.group_list, self.settings, self.packet_list))
-
- return next(w for w in self.windows if w.uid == uid)
diff --git a/src/tx/__init__.py b/src/transmitter/__init__.py
similarity index 100%
rename from src/tx/__init__.py
rename to src/transmitter/__init__.py
diff --git a/src/transmitter/commands.py b/src/transmitter/commands.py
new file mode 100755
index 0000000..61a9b89
--- /dev/null
+++ b/src/transmitter/commands.py
@@ -0,0 +1,691 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import os
+import readline
+import struct
+import textwrap
+import time
+import typing
+
+from multiprocessing import Queue
+from typing import Any, Dict, List, Tuple, Union
+
+from src.common.db_logs import access_logs, change_log_db_key, remove_logs
+from src.common.encoding import b58decode, b58encode, bool_to_bytes, int_to_bytes, onion_address_to_pub_key
+from src.common.exceptions import FunctionReturn
+from src.common.input import yes
+from src.common.misc import ensure_dir, get_terminal_width, validate_onion_addr
+from src.common.output import clear_screen, m_print, phase, print_on_previous_line
+from src.common.statics import *
+
+from src.transmitter.commands_g import process_group_command
+from src.transmitter.contact import add_new_contact, change_nick, contact_setting, remove_contact
+from src.transmitter.key_exchanges import export_onion_service_data, new_local_key, rxp_load_psk, verify_fingerprints
+from src.transmitter.packet import cancel_packet, queue_command, queue_message, queue_to_nc
+from src.transmitter.user_input import UserInput
+from src.transmitter.windows import select_window
+
+if typing.TYPE_CHECKING:
+ from src.common.db_contacts import ContactList
+ from src.common.db_groups import GroupList
+ from src.common.db_masterkey import MasterKey
+ from src.common.db_onion import OnionService
+ from src.common.db_settings import Settings
+ from src.common.gateway import Gateway
+ from src.transmitter.windows import TxWindow
+ QueueDict = Dict[bytes, Queue]
+
+
+def process_command(user_input: 'UserInput',
+ window: 'TxWindow',
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings',
+ queues: 'QueueDict',
+ master_key: 'MasterKey',
+ onion_service: 'OnionService',
+ gateway: 'Gateway'
+ ) -> None:
+ """\
+ Select function based on the first keyword of the
+ issued command, and pass relevant parameters to it.
+ """
+ # Keyword Function to run ( Parameters )
+ # -----------------------------------------------------------------------------------------------------------------------------------------
+ d = {'about': (print_about, ),
+ 'add': (add_new_contact, contact_list, group_list, settings, queues, onion_service ),
+ 'cf': (cancel_packet, user_input, window, settings, queues ),
+ 'cm': (cancel_packet, user_input, window, settings, queues ),
+ 'clear': (clear_screens, user_input, window, settings, queues ),
+ 'cmd': (rxp_show_sys_win, user_input, window, settings, queues ),
+ 'connect': (send_onion_service_key, contact_list, settings, onion_service, gateway),
+ 'exit': (exit_tfc, settings, queues, gateway),
+ 'export': (log_command, user_input, window, contact_list, group_list, settings, queues, master_key ),
+ 'fw': (rxp_show_sys_win, user_input, window, settings, queues ),
+ 'group': (process_group_command, user_input, contact_list, group_list, settings, queues, master_key ),
+ 'help': (print_help, settings ),
+ 'history': (log_command, user_input, window, contact_list, group_list, settings, queues, master_key ),
+ 'localkey': (new_local_key, contact_list, settings, queues, ),
+ 'logging': (contact_setting, user_input, window, contact_list, group_list, settings, queues ),
+ 'msg': (select_window, user_input, window, settings, queues, onion_service, gateway),
+ 'names': (print_recipients, contact_list, group_list, ),
+ 'nick': (change_nick, user_input, window, contact_list, group_list, settings, queues ),
+ 'notify': (contact_setting, user_input, window, contact_list, group_list, settings, queues ),
+ 'passwd': (change_master_key, user_input, contact_list, group_list, settings, queues, master_key, onion_service ),
+ 'psk': (rxp_load_psk, window, contact_list, settings, queues ),
+ 'reset': (clear_screens, user_input, window, settings, queues ),
+ 'rm': (remove_contact, user_input, window, contact_list, group_list, settings, queues, master_key ),
+ 'rmlogs': (remove_log, user_input, contact_list, group_list, settings, queues, master_key ),
+ 'set': (change_setting, user_input, window, contact_list, group_list, settings, queues, gateway),
+ 'settings': (print_settings, settings, gateway),
+ 'store': (contact_setting, user_input, window, contact_list, group_list, settings, queues ),
+ 'unread': (rxp_display_unread, settings, queues ),
+ 'verify': (verify, window, contact_list ),
+ 'whisper': (whisper, user_input, window, settings, queues ),
+ 'whois': (whois, user_input, contact_list, group_list ),
+ 'wipe': (wipe, settings, queues, gateway)
+ } # type: Dict[str, Any]
+
+ try:
+ cmd_key = user_input.plaintext.split()[0]
+ except (IndexError, UnboundLocalError):
+ raise FunctionReturn("Error: Invalid command.", head_clear=True)
+
+ try:
+ from_dict = d[cmd_key]
+ except KeyError:
+ raise FunctionReturn(f"Error: Invalid command '{cmd_key}'.", head_clear=True)
+
+ func = from_dict[0]
+ parameters = from_dict[1:]
+ func(*parameters)
+
+
+def print_about() -> None:
+ """Print URLs that direct to TFC's project site and documentation."""
+ clear_screen()
+ print(f"\n Tinfoil Chat {VERSION}\n\n"
+ " Website: https://github.com/maqp/tfc/\n"
+ " Wikipage: https://github.com/maqp/tfc/wiki\n")
+
+
+def clear_screens(user_input: 'UserInput',
+ window: 'TxWindow',
+ settings: 'Settings',
+ queues: 'QueueDict'
+ ) -> None:
+ """Clear/reset screen of Source, Destination, and Networked Computer.
+
+ Only send an unencrypted command to Networked Computer if traffic
+ masking is disabled.
+
+ With clear command, sending only the command header is enough.
+ However, as reset command removes the ephemeral message log on
+ Receiver Program, Transmitter Program must define the window to
+ reset (in case, e.g., previous window selection command packet
+ dropped, and active window state is inconsistent between the
+ TCB programs).
+ """
+ clear = user_input.plaintext.split()[0] == CLEAR
+
+ command = CLEAR_SCREEN if clear else RESET_SCREEN + window.uid
+ queue_command(command, settings, queues)
+
+ clear_screen()
+
+ if not settings.traffic_masking:
+ pt_cmd = UNENCRYPTED_SCREEN_CLEAR if clear else UNENCRYPTED_SCREEN_RESET
+ packet = UNENCRYPTED_DATAGRAM_HEADER + pt_cmd
+ queue_to_nc(packet, queues[RELAY_PACKET_QUEUE])
+
+ if not clear:
+ readline.clear_history()
+ os.system(RESET)
+
+
+def rxp_show_sys_win(user_input: 'UserInput',
+ window: 'TxWindow',
+ settings: 'Settings',
+ queues: 'QueueDict',
+ ) -> None:
+ """\
+ Display a system window on Receiver Program until the user presses
+ Enter.
+
+ Receiver Program has a dedicated window, WIN_UID_LOCAL, for system
+ messages that shows information about received commands, status
+ messages etc.
+
+ Receiver Program also has another window, WIN_UID_FILE, that shows
+ progress of file transmission from contacts that have traffic
+ masking enabled.
+ """
+ cmd = user_input.plaintext.split()[0]
+ win_uid = dict(cmd=WIN_UID_LOCAL, fw=WIN_UID_FILE)[cmd]
+
+ command = WIN_SELECT + win_uid
+ queue_command(command, settings, queues)
+
+ try:
+ m_print(f" returns Receiver to {window.name}'s window", manual_proceed=True, box=True)
+ except (EOFError, KeyboardInterrupt):
+ pass
+
+ print_on_previous_line(reps=4, flush=True)
+
+ command = WIN_SELECT + window.uid
+ queue_command(command, settings, queues)
+
+
+def exit_tfc(settings: 'Settings',
+ queues: 'QueueDict',
+ gateway: 'Gateway'
+ ) -> None:
+ """Exit TFC on all three computers.
+
+ To exit TFC as fast as possible, this function starts by clearing
+ all command queues before sending the exit command to Receiver
+ Program. It then sends an unencrypted exit command to Relay Program
+ on Networked Computer. As the `sender_loop` process loads the
+ unencrypted exit command from queue, it detects the user's
+ intention, and after outputting the packet, sends the EXIT signal to
+ Transmitter Program's main() method that's running the
+ `monitor_processes` loop. Upon receiving the EXIT signal,
+ `monitor_processes` kills all Transmitter Program's processes and
+ exits the program.
+
+ During local testing, this function adds some delays to prevent TFC
+ programs from dying when sockets disconnect.
+ """
+ for q in [COMMAND_PACKET_QUEUE, RELAY_PACKET_QUEUE]:
+ while queues[q].qsize() > 0:
+ queues[q].get()
+
+ queue_command(EXIT_PROGRAM, settings, queues)
+
+ if not settings.traffic_masking:
+ if settings.local_testing_mode:
+ time.sleep(LOCAL_TESTING_PACKET_DELAY)
+ time.sleep(gateway.settings.data_diode_sockets * 1.5)
+ else:
+ time.sleep(gateway.settings.race_condition_delay)
+
+ relay_command = UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_EXIT_COMMAND
+ queue_to_nc(relay_command, queues[RELAY_PACKET_QUEUE])
+
+
+def log_command(user_input: 'UserInput',
+ window: 'TxWindow',
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings',
+ queues: 'QueueDict',
+ master_key: 'MasterKey'
+ ) -> None:
+ """Display message logs or export them to plaintext file on TCBs.
+
+ Transmitter Program processes sent, Receiver Program sent and
+ received, messages of all participants in the active window.
+ """
+ cmd = user_input.plaintext.split()[0]
+ export, header = dict(export =(True, LOG_EXPORT),
+ history=(False, LOG_DISPLAY))[cmd]
+
+ try:
+ msg_to_load = int(user_input.plaintext.split()[1])
+ except ValueError:
+ raise FunctionReturn("Error: Invalid number of messages.", head_clear=True)
+ except IndexError:
+ msg_to_load = 0
+
+ try:
+ command = header + int_to_bytes(msg_to_load) + window.uid
+ except struct.error:
+ raise FunctionReturn("Error: Invalid number of messages.", head_clear=True)
+
+ if export:
+ if not yes(f"Export logs for '{window.name}' in plaintext?", abort=False):
+ raise FunctionReturn("Log file export aborted.", tail_clear=True, head=0, delay=1)
+
+ queue_command(command, settings, queues)
+
+ access_logs(window, contact_list, group_list, settings, master_key, msg_to_load, export)
+
+ if export:
+ raise FunctionReturn(f"Exported log file of {window.type} '{window.name}'.", head_clear=True)
+
+
+def send_onion_service_key(contact_list: 'ContactList',
+ settings: 'Settings',
+ onion_service: 'OnionService',
+ gateway: 'Gateway'
+ ) -> None:
+ """Resend Onion Service key to Relay Program on Networked Computer.
+
+ This command is used in cases where Relay Program had to be
+ restarted for some reason (e.g. due to system updates).
+ """
+ try:
+ if settings.traffic_masking:
+ m_print(["Warning!",
+ "Exporting Onion Service data to Networked Computer ",
+ "during traffic masking can reveal to an adversary ",
+ "TFC is being used at the moment. You should only do ",
+ "this if you've had to restart the Relay Program."], bold=True, head=1, tail=1)
+ if not yes("Proceed with the Onion Service data export?", abort=False):
+ raise FunctionReturn("Onion Service data export canceled.", tail_clear=True, delay=1, head=0)
+
+ export_onion_service_data(contact_list, settings, onion_service, gateway)
+ except (EOFError, KeyboardInterrupt):
+ raise FunctionReturn("Onion Service data export canceled.", tail_clear=True, delay=1, head=2)
+
+
+def print_help(settings: 'Settings') -> None:
+ """Print the list of commands."""
+
+ def help_printer(tuple_list: List[Union[Tuple[str, str, bool]]]) -> None:
+ """Print list of commands and their descriptions.
+
+ Style in which commands are printed depends on terminal width.
+ Depending on whether traffic masking is enabled, some commands
+ are either displayed or hidden.
+ """
+ len_longest_command = max(len(t[0]) for t in tuple_list) + 1 # Add one for spacing
+ wrapper = textwrap.TextWrapper(width=max(1, terminal_width - len_longest_command))
+
+ for help_cmd, description, display in tuple_list:
+ if not display:
+ continue
+
+ desc_lines = wrapper.fill(description).split('\n')
+ desc_indent = (len_longest_command - len(help_cmd)) * ' '
+
+ print(help_cmd + desc_indent + desc_lines[0])
+
+ # Print wrapped description lines with indent
+ if len(desc_lines) > 1:
+ for line in desc_lines[1:]:
+ print(len_longest_command * ' ' + line)
+ print('')
+
+ # ------------------------------------------------------------------------------------------------------------------
+
+ y_tm = settings.traffic_masking
+ n_tm = not settings.traffic_masking
+
+ common_commands = [("/about", "Show links to project resources", True),
+ ("/add", "Add new contact", n_tm),
+ ("/cf", "Cancel file transmission to active contact/group", y_tm),
+ ("/cm", "Cancel message transmission to active contact/group", True),
+ ("/clear, ' '", "Clear TFC screens", True),
+ ("/cmd, '//'", "Display command window on Receiver", True),
+ ("/connect", "Resend Onion Service data to Relay", True),
+ ("/exit", "Exit TFC on all three computers", True),
+ ("/export (n)", "Export (n) messages from recipient's log file", True),
+ ("/file", "Send file to active contact/group", True),
+ ("/fw", "Display file reception window on Receiver", y_tm),
+ ("/help", "Display this list of commands", True),
+ ("/history (n)", "Print (n) messages from recipient's log file", True),
+ ("/localkey", "Generate new local key pair", n_tm),
+ ("/logging {on,off}(' all')", "Change message log setting (for all contacts)", True),
+ ("/msg {A,N,G}", "Change recipient to Account, Nick, or Group", n_tm),
+ ("/names", "List contacts and groups", True),
+ ("/nick N", "Change nickname of active recipient/group to N", True),
+ ("/notify {on,off} (' all')", "Change notification settings (for all contacts)", True),
+ ("/passwd {tx,rx}", "Change master password on target system", n_tm),
+ ("/psk", "Open PSK import dialog on Receiver", n_tm),
+ ("/reset", "Reset ephemeral session log for active window", True),
+ ("/rm {A,N}", "Remove contact specified by account A or nick N", n_tm),
+ ("/rmlogs {A,N}", "Remove log entries for account A or nick N", True),
+ ("/set S V", "Change setting S to value V", True),
+ ("/settings", "List setting names, values and descriptions", True),
+ ("/store {on,off} (' all')", "Change file reception (for all contacts)", True),
+ ("/unread, ' '", "List windows with unread messages on Receiver", True),
+ ("/verify", "Verify fingerprints with active contact", True),
+ ("/whisper M", "Send message M, asking it not to be logged", True),
+ ("/whois {A,N}", "Check which A corresponds to N or vice versa", True),
+ ("/wipe", "Wipe all TFC user data and power off systems", True),
+ ("Shift + PgUp/PgDn", "Scroll terminal up/down", True)]
+
+ group_commands = [("/group create G A₁..Aₙ", "Create group G and add accounts A₁..Aₙ", n_tm),
+ ("/group join ID G A₁..Aₙ", "Join group ID, call it G and add accounts A₁..Aₙ", n_tm),
+ ("/group add G A₁..Aₙ", "Add accounts A₁..Aₙ to group G", n_tm),
+ ("/group rm G A₁..Aₙ", "Remove accounts A₁..Aₙ from group G", n_tm),
+ ("/group rm G", "Remove group G", n_tm)]
+
+ terminal_width = get_terminal_width()
+
+ clear_screen()
+
+ print(textwrap.fill("List of commands:", width=terminal_width))
+ print('')
+ help_printer(common_commands)
+ print(terminal_width * '─')
+
+ if settings.traffic_masking:
+ print('')
+ else:
+ print(textwrap.fill("Group management:", width=terminal_width))
+ print('')
+ help_printer(group_commands)
+ print(terminal_width * '─' + '\n')
+
+
+def print_recipients(contact_list: 'ContactList', group_list: 'GroupList') -> None:
+ """Print the list of contacts and groups."""
+ contact_list.print_contacts()
+ group_list.print_groups()
+
+
+def change_master_key(user_input: 'UserInput',
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings',
+ queues: 'QueueDict',
+ master_key: 'MasterKey',
+ onion_service: 'OnionService'
+ ) -> None:
+ """Change the master key on Transmitter/Receiver Program."""
+ try:
+ if settings.traffic_masking:
+ raise FunctionReturn("Error: Command is disabled during traffic masking.", head_clear=True)
+
+ try:
+ device = user_input.plaintext.split()[1].lower()
+ except IndexError:
+ raise FunctionReturn(f"Error: No target-system ('{TX}' or '{RX}') specified.", head_clear=True)
+
+ if device not in [TX, RX]:
+ raise FunctionReturn(f"Error: Invalid target system '{device}'.", head_clear=True)
+
+ if device == RX:
+ queue_command(CH_MASTER_KEY, settings, queues)
+ return None
+
+ old_master_key = master_key.master_key[:]
+ new_master_key = master_key.master_key = master_key.new_master_key()
+
+ phase("Re-encrypting databases")
+
+ queues[KEY_MANAGEMENT_QUEUE].put((KDB_CHANGE_MASTER_KEY_HEADER, master_key))
+
+ ensure_dir(DIR_USER_DATA)
+ if os.path.isfile(f'{DIR_USER_DATA}{settings.software_operation}_logs'):
+ change_log_db_key(old_master_key, new_master_key, settings)
+
+ contact_list.store_contacts()
+ group_list.store_groups()
+ settings.store_settings()
+ onion_service.store_onion_service_private_key()
+
+ phase(DONE)
+ m_print("Master key successfully changed.", bold=True, tail_clear=True, delay=1, head=1)
+
+ except (EOFError, KeyboardInterrupt):
+ raise FunctionReturn("Password change aborted.", tail_clear=True, delay=1, head=2)
+
+
+def remove_log(user_input: 'UserInput',
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings',
+ queues: 'QueueDict',
+ master_key: 'MasterKey'
+ ) -> None:
+ """Remove log entries for contact or group."""
+ try:
+ selection = user_input.plaintext.split()[1]
+ except IndexError:
+ raise FunctionReturn("Error: No contact/group specified.", head_clear=True)
+
+ if not yes(f"Remove logs for {selection}?", abort=False, head=1):
+ raise FunctionReturn("Log file removal aborted.", tail_clear=True, delay=1, head=0)
+
+ # Determine selector (group ID or Onion Service public key) from command parameters
+ if selection in contact_list.contact_selectors():
+ selector = contact_list.get_contact_by_address_or_nick(selection).onion_pub_key
+
+ elif selection in group_list.get_list_of_group_names():
+ selector = group_list.get_group(selection).group_id
+
+ elif len(selection) == ONION_ADDRESS_LENGTH:
+ if validate_onion_addr(selection):
+ raise FunctionReturn("Error: Invalid account.", head_clear=True)
+ selector = onion_address_to_pub_key(selection)
+
+ elif len(selection) == GROUP_ID_ENC_LENGTH:
+ try:
+ selector = b58decode(selection)
+ except ValueError:
+ raise FunctionReturn("Error: Invalid group ID.", head_clear=True)
+
+ else:
+ raise FunctionReturn("Error: Unknown selector.", head_clear=True)
+
+ # Remove logs that match the selector
+ command = LOG_REMOVE + selector
+ queue_command(command, settings, queues)
+
+ remove_logs(contact_list, group_list, settings, master_key, selector)
+
+
+def change_setting(user_input: 'UserInput',
+ window: 'TxWindow',
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings',
+ queues: 'QueueDict',
+ gateway: 'Gateway'
+ ) -> None:
+ """Change setting on Transmitter and Receiver Program."""
+ # Validate the KV-pair
+ try:
+ setting = user_input.plaintext.split()[1]
+ except IndexError:
+ raise FunctionReturn("Error: No setting specified.", head_clear=True)
+
+ if setting not in (settings.key_list + gateway.settings.key_list):
+ raise FunctionReturn(f"Error: Invalid setting '{setting}'.", head_clear=True)
+
+ try:
+ value = user_input.plaintext.split()[2]
+ except IndexError:
+ raise FunctionReturn("Error: No value for setting specified.", head_clear=True)
+
+ # Check if the setting can be changed
+ relay_settings = dict(serial_error_correction=UNENCRYPTED_EC_RATIO,
+ serial_baudrate =UNENCRYPTED_BAUDRATE,
+ allow_contact_requests =UNENCRYPTED_MANAGE_CONTACT_REQ)
+ if settings.traffic_masking and (setting in relay_settings or setting == 'max_number_of_contacts'):
+ raise FunctionReturn("Error: Can't change this setting during traffic masking.", head_clear=True)
+
+ if setting in ['use_serial_usb_adapter', 'built_in_serial_interface']:
+ raise FunctionReturn("Error: Serial interface setting can only be changed manually.", head_clear=True)
+
+ # Change the setting
+ if setting in gateway.settings.key_list:
+ gateway.settings.change_setting(setting, value)
+ else:
+ settings.change_setting(setting, value, contact_list, group_list)
+
+ receiver_command = CH_SETTING + setting.encode() + US_BYTE + value.encode()
+ queue_command(receiver_command, settings, queues)
+
+ if setting in relay_settings:
+ if setting == 'allow_contact_requests':
+ value = bool_to_bytes(settings.allow_contact_requests).decode()
+ relay_command = UNENCRYPTED_DATAGRAM_HEADER + relay_settings[setting] + value.encode()
+ queue_to_nc(relay_command, queues[RELAY_PACKET_QUEUE])
+
+ # Propagate the effects of the setting
+ if setting == 'max_number_of_contacts':
+ contact_list.store_contacts()
+ queues[KEY_MANAGEMENT_QUEUE].put((KDB_UPDATE_SIZE_HEADER, settings))
+
+ if setting in ['max_number_of_group_members', 'max_number_of_groups']:
+ group_list.store_groups()
+
+ if setting == 'traffic_masking':
+ queues[SENDER_MODE_QUEUE].put(settings)
+ queues[TRAFFIC_MASKING_QUEUE].put(settings.traffic_masking)
+ window.deselect()
+
+ if setting == 'log_file_masking':
+ queues[LOGFILE_MASKING_QUEUE].put(settings.log_file_masking)
+
+
+def print_settings(settings: 'Settings',
+ gateway: 'Gateway') -> None:
+ """Print settings and gateway settings."""
+ settings.print_settings()
+ gateway.settings.print_settings()
+
+
+def rxp_display_unread(settings: 'Settings', queues: 'QueueDict') -> None:
+ """\
+ Display the list of windows that contain unread messages on Receiver
+ Program.
+ """
+ queue_command(WIN_ACTIVITY, settings, queues)
+
+
+def verify(window: 'TxWindow', contact_list: 'ContactList') -> None:
+ """Verify fingerprints with contact."""
+ if window.type == WIN_TYPE_GROUP or window.contact is None:
+ raise FunctionReturn("Error: A group is selected.", head_clear=True)
+
+ if window.contact.uses_psk():
+ raise FunctionReturn("Pre-shared keys have no fingerprints.", head_clear=True)
+
+ try:
+ verified = verify_fingerprints(window.contact.tx_fingerprint,
+ window.contact.rx_fingerprint)
+ except (EOFError, KeyboardInterrupt):
+ raise FunctionReturn("Fingerprint verification aborted.", delay=1, head=2, tail_clear=True)
+
+ status_hr, status = {True: ("Verified", KEX_STATUS_VERIFIED),
+ False: ("Unverified", KEX_STATUS_UNVERIFIED)}[verified]
+
+ window.contact.kex_status = status
+ contact_list.store_contacts()
+ m_print(f"Marked fingerprints with {window.name} as '{status_hr}'.", bold=True, tail_clear=True, delay=1, tail=1)
+
+
+def whisper(user_input: 'UserInput',
+ window: 'TxWindow',
+ settings: 'Settings',
+ queues: 'QueueDict',
+ ) -> None:
+ """\
+ Send a message to the contact that overrides their enabled logging
+ setting for that message.
+
+ The functionality of this feature is impossible to enforce, but if
+ the recipient can be trusted and they do not modify their client,
+ this feature can be used to send the message off-the-record.
+ """
+ try:
+ message = user_input.plaintext.strip().split(' ', 1)[1]
+ except IndexError:
+ raise FunctionReturn("Error: No whisper message specified.", head_clear=True)
+
+ queue_message(user_input=UserInput(message, MESSAGE),
+ window=window,
+ settings=settings,
+ queues=queues,
+ whisper=True,
+ log_as_ph=True)
+
+
+def whois(user_input: 'UserInput',
+ contact_list: 'ContactList',
+ group_list: 'GroupList'
+ ) -> None:
+ """Do a lookup for a contact or group selector."""
+ try:
+ selector = user_input.plaintext.split()[1]
+ except IndexError:
+ raise FunctionReturn("Error: No account or nick specified.", head_clear=True)
+
+ # Contacts
+ if selector in contact_list.get_list_of_addresses():
+ m_print([f"Nick of '{selector}' is ",
+ f"{contact_list.get_contact_by_address_or_nick(selector).nick}"], bold=True)
+
+ elif selector in contact_list.get_list_of_nicks():
+ m_print([f"Account of '{selector}' is",
+ f"{contact_list.get_contact_by_address_or_nick(selector).onion_address}"], bold=True)
+
+ # Groups
+ elif selector in group_list.get_list_of_group_names():
+ m_print([f"Group ID of group '{selector}' is",
+ f"{b58encode(group_list.get_group(selector).group_id)}"], bold=True)
+
+ elif selector in group_list.get_list_of_hr_group_ids():
+ m_print([f"Name of group with ID '{selector}' is",
+ f"{group_list.get_group_by_id(b58decode(selector)).name}"], bold=True)
+
+ else:
+ raise FunctionReturn("Error: Unknown selector.", head_clear=True)
+
+
+def wipe(settings: 'Settings',
+ queues: 'QueueDict',
+ gateway: 'Gateway'
+ ) -> None:
+ """\
+ Reset terminals, wipe all TFC user data from Source, Networked, and
+ Destination Computer, and power all three systems off.
+
+ The purpose of the wipe command is to provide additional protection
+ against physical attackers, e.g. in situation where a dissident gets
+ a knock on their door. By overwriting and deleting user data the
+ program prevents access to encrypted databases. Additional security
+ should be sought with full disk encryption (FDE).
+
+ Unfortunately, no effective tool for overwriting RAM currently exists.
+ However, as long as Source and Destination Computers use FDE and
+ DDR3 memory, recovery of sensitive data becomes impossible very fast:
+ https://www1.cs.fau.de/filepool/projects/coldboot/fares_coldboot.pdf
+ """
+ if not yes("Wipe all user data and power off systems?", abort=False):
+ raise FunctionReturn("Wipe command aborted.", head_clear=True)
+
+ clear_screen()
+
+ for q in [COMMAND_PACKET_QUEUE, RELAY_PACKET_QUEUE]:
+ while queues[q].qsize() != 0:
+ queues[q].get()
+
+ queue_command(WIPE_USR_DATA, settings, queues)
+
+ if not settings.traffic_masking:
+ if settings.local_testing_mode:
+ time.sleep(0.8)
+ time.sleep(gateway.settings.data_diode_sockets * 2.2)
+ else:
+ time.sleep(gateway.settings.race_condition_delay)
+
+ relay_command = UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_WIPE_COMMAND
+ queue_to_nc(relay_command, queues[RELAY_PACKET_QUEUE])
+
+ os.system(RESET)
diff --git a/src/transmitter/commands_g.py b/src/transmitter/commands_g.py
new file mode 100644
index 0000000..5aaf9cf
--- /dev/null
+++ b/src/transmitter/commands_g.py
@@ -0,0 +1,327 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import os
+import typing
+
+from typing import Callable, Dict, List, Optional
+
+from src.common.db_logs import remove_logs
+from src.common.encoding import b58decode, int_to_bytes
+from src.common.exceptions import FunctionReturn
+from src.common.input import yes
+from src.common.misc import ignored, validate_group_name
+from src.common.output import group_management_print, m_print
+from src.common.statics import *
+
+from src.transmitter.packet import queue_command, queue_to_nc
+from src.transmitter.user_input import UserInput
+
+if typing.TYPE_CHECKING:
+ from multiprocessing import Queue
+ from src.common.db_contacts import ContactList
+ from src.common.db_groups import GroupList
+ from src.common.db_masterkey import MasterKey
+ from src.common.db_settings import Settings
+ from src.transmitter.windows import TxWindow
+ QueueDict = Dict[bytes, Queue]
+
+
+def process_group_command(user_input: 'UserInput',
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings',
+ queues: 'QueueDict',
+ master_key: 'MasterKey'
+ ) -> None:
+ """Parse a group command and process it accordingly."""
+ if settings.traffic_masking:
+ raise FunctionReturn("Error: Command is disabled during traffic masking.", head_clear=True)
+
+ input_parameters = user_input.plaintext.split() # type: List[str]
+
+ try:
+ command_type = input_parameters[1]
+ except IndexError:
+ raise FunctionReturn("Error: Invalid group command.", head_clear=True)
+
+ if command_type not in ['create', 'join', 'add', 'rm']:
+ raise FunctionReturn("Error: Invalid group command.")
+
+ group_id = None # type: Optional[bytes]
+ if command_type == 'join':
+ try:
+ group_id_s = input_parameters[2]
+ except IndexError:
+ raise FunctionReturn("Error: No group ID specified.", head_clear=True)
+ try:
+ group_id = b58decode(group_id_s)
+ except ValueError:
+ raise FunctionReturn("Error: Invalid group ID.", head_clear=True)
+
+ if group_id in group_list.get_list_of_group_ids():
+ raise FunctionReturn("Error: Group with matching ID already exists.", head_clear=True)
+
+ try:
+ name_index = 3 if command_type == 'join' else 2
+ group_name = input_parameters[name_index]
+ except IndexError:
+ raise FunctionReturn("Error: No group name specified.", head_clear=True)
+
+ member_index = 4 if command_type == 'join' else 3
+ purp_members = input_parameters[member_index:]
+
+ # Swap specified strings to public keys
+ selectors = contact_list.contact_selectors()
+ pub_keys = [contact_list.get_contact_by_address_or_nick(m).onion_pub_key for m in purp_members if m in selectors]
+
+ func = dict(create=group_create,
+ join =group_create,
+ add =group_add_member,
+ rm =group_rm_member)[command_type] # type: Callable
+
+ func(group_name, pub_keys, contact_list, group_list, settings, queues, master_key, group_id)
+ print('')
+
+
+def group_create(group_name: str,
+ purp_members: List[bytes],
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings',
+ queues: 'QueueDict',
+ _: 'MasterKey',
+ group_id: Optional[bytes] = None
+ ) -> None:
+ """Create a new group.
+
+ Validate the group name and determine what members can be added.
+ """
+ error_msg = validate_group_name(group_name, contact_list, group_list)
+ if error_msg:
+ raise FunctionReturn(error_msg, head_clear=True)
+
+ public_keys = set(contact_list.get_list_of_pub_keys())
+ purp_pub_keys = set(purp_members)
+ accepted = list(purp_pub_keys & public_keys)
+ rejected = list(purp_pub_keys - public_keys)
+
+ if len(accepted) > settings.max_number_of_group_members:
+ raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_group_members} "
+ f"members per group.", head_clear=True)
+
+ if len(group_list) == settings.max_number_of_groups:
+ raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_groups} groups.", head_clear=True)
+
+ header = GROUP_MSG_INVITE_HEADER if group_id is None else GROUP_MSG_JOIN_HEADER
+
+ if group_id is None:
+ while True:
+ group_id = os.urandom(GROUP_ID_LENGTH)
+ if group_id not in group_list.get_list_of_group_ids():
+ break
+
+ group_list.add_group(group_name,
+ group_id,
+ settings.log_messages_by_default,
+ settings.show_notifications_by_default,
+ members=[contact_list.get_contact_by_pub_key(k) for k in accepted])
+
+ command = GROUP_CREATE + group_id + group_name.encode() + US_BYTE + b''.join(accepted)
+ queue_command(command, settings, queues)
+
+ group_management_print(NEW_GROUP, accepted, contact_list, group_name)
+ group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
+
+ if accepted:
+ if yes("Publish the list of group members to participants?", abort=False):
+ create_packet = header + group_id + b''.join(accepted)
+ queue_to_nc(create_packet, queues[RELAY_PACKET_QUEUE])
+
+ else:
+ m_print(f"Created an empty group '{group_name}'.", bold=True, head=1)
+
+
+def group_add_member(group_name: str,
+ purp_members: List['bytes'],
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings',
+ queues: 'QueueDict',
+ master_key: 'MasterKey',
+ _: Optional[bytes] = None
+ ) -> None:
+ """Add new member(s) to a specified group."""
+ if group_name not in group_list.get_list_of_group_names():
+ if yes(f"Group {group_name} was not found. Create new group?", abort=False, head=1):
+ group_create(group_name, purp_members, contact_list, group_list, settings, queues, master_key)
+ return None
+ else:
+ raise FunctionReturn("Group creation aborted.", head=0, delay=1, tail_clear=True)
+
+ purp_pub_keys = set(purp_members)
+ pub_keys = set(contact_list.get_list_of_pub_keys())
+ before_adding = set(group_list.get_group(group_name).get_list_of_member_pub_keys())
+ ok_pub_keys_set = set(pub_keys & purp_pub_keys)
+ new_in_group_set = set(ok_pub_keys_set - before_adding)
+
+ end_assembly = list(before_adding | new_in_group_set)
+ rejected = list(purp_pub_keys - pub_keys)
+ already_in_g = list(before_adding & purp_pub_keys)
+ new_in_group = list(new_in_group_set)
+ ok_pub_keys = list(ok_pub_keys_set)
+
+ if len(end_assembly) > settings.max_number_of_group_members:
+ raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_group_members} "
+ f"members per group.", head_clear=True)
+
+ group = group_list.get_group(group_name)
+ group.add_members([contact_list.get_contact_by_pub_key(k) for k in new_in_group])
+
+ command = GROUP_ADD + group.group_id + b''.join(ok_pub_keys)
+ queue_command(command, settings, queues)
+
+ group_management_print(ADDED_MEMBERS, new_in_group, contact_list, group_name)
+ group_management_print(ALREADY_MEMBER, already_in_g, contact_list, group_name)
+ group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
+
+ if new_in_group:
+ if yes("Publish the list of new members to involved?", abort=False):
+ add_packet = (GROUP_MSG_MEMBER_ADD_HEADER
+ + group.group_id
+ + int_to_bytes(len(before_adding))
+ + b''.join(before_adding)
+ + b''.join(new_in_group))
+ queue_to_nc(add_packet, queues[RELAY_PACKET_QUEUE])
+
+
+def group_rm_member(group_name: str,
+ purp_members: List[bytes],
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings',
+ queues: 'QueueDict',
+ master_key: 'MasterKey',
+ _: Optional[bytes] = None
+ ) -> None:
+ """Remove member(s) from the specified group or remove the group itself."""
+ if not purp_members:
+ group_rm_group(group_name, contact_list, group_list, settings, queues, master_key)
+
+ if group_name not in group_list.get_list_of_group_names():
+ raise FunctionReturn(f"Group '{group_name}' does not exist.", head_clear=True)
+
+ purp_pub_keys = set(purp_members)
+ pub_keys = set(contact_list.get_list_of_pub_keys())
+ before_removal = set(group_list.get_group(group_name).get_list_of_member_pub_keys())
+ ok_pub_keys_set = set(purp_pub_keys & pub_keys)
+ removable_set = set(before_removal & ok_pub_keys_set)
+
+ remaining = list(before_removal - removable_set)
+ not_in_group = list(ok_pub_keys_set - before_removal)
+ rejected = list(purp_pub_keys - pub_keys)
+ removable = list(removable_set)
+ ok_pub_keys = list(ok_pub_keys_set)
+
+ group = group_list.get_group(group_name)
+ group.remove_members(removable)
+
+ command = GROUP_REMOVE + group.group_id + b''.join(ok_pub_keys)
+ queue_command(command, settings, queues)
+
+ group_management_print(REMOVED_MEMBERS, removable, contact_list, group_name)
+ group_management_print(NOT_IN_GROUP, not_in_group, contact_list, group_name)
+ group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
+
+ if removable and remaining and yes("Publish the list of removed members to remaining members?", abort=False):
+ rem_packet = (GROUP_MSG_MEMBER_REM_HEADER
+ + group.group_id
+ + int_to_bytes(len(remaining))
+ + b''.join(remaining)
+ + b''.join(removable))
+ queue_to_nc(rem_packet, queues[RELAY_PACKET_QUEUE])
+
+
+def group_rm_group(group_name: str,
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings',
+ queues: 'QueueDict',
+ master_key: 'MasterKey'
+ ) -> None:
+ """Remove the group with its members."""
+ if not yes(f"Remove group '{group_name}'?", abort=False):
+ raise FunctionReturn("Group removal aborted.", head=0, delay=1, tail_clear=True)
+
+ if group_name in group_list.get_list_of_group_names():
+ group_id = group_list.get_group(group_name).group_id
+ else:
+ try:
+ group_id = b58decode(group_name)
+ except ValueError:
+ raise FunctionReturn("Error: Invalid group name/ID.", head_clear=True)
+
+ command = LOG_REMOVE + group_id
+ queue_command(command, settings, queues)
+
+ command = GROUP_DELETE + group_id
+ queue_command(command, settings, queues)
+
+ if group_list.has_group(group_name):
+ with ignored(FunctionReturn):
+ remove_logs(contact_list, group_list, settings, master_key, group_id)
+ else:
+ raise FunctionReturn(f"Transmitter has no group '{group_name}' to remove.")
+
+ group = group_list.get_group(group_name)
+ if not group.empty() and yes("Notify members about leaving the group?", abort=False):
+ exit_packet = (GROUP_MSG_EXIT_GROUP_HEADER
+ + group.group_id
+ + b''.join(group.get_list_of_member_pub_keys()))
+ queue_to_nc(exit_packet, queues[RELAY_PACKET_QUEUE])
+
+ group_list.remove_group_by_name(group_name)
+ raise FunctionReturn(f"Removed group '{group_name}'.", head=0, delay=1, tail_clear=True, bold=True)
+
+
+def rename_group(new_name: str,
+ window: 'TxWindow',
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings',
+ queues: 'QueueDict',
+ ) -> None:
+ """Rename the active group."""
+ if window.type == WIN_TYPE_CONTACT or window.group is None:
+ raise FunctionReturn("Error: Selected window is not a group window.", head_clear=True)
+
+ error_msg = validate_group_name(new_name, contact_list, group_list)
+ if error_msg:
+ raise FunctionReturn(error_msg, head_clear=True)
+
+ command = GROUP_RENAME + window.uid + new_name.encode()
+ queue_command(command, settings, queues)
+
+ old_name = window.group.name
+ window.group.name = new_name
+ group_list.store_groups()
+
+ raise FunctionReturn(f"Renamed group '{old_name}' to '{new_name}'.", delay=1, tail_clear=True)
diff --git a/src/transmitter/contact.py b/src/transmitter/contact.py
new file mode 100644
index 0000000..04cd65a
--- /dev/null
+++ b/src/transmitter/contact.py
@@ -0,0 +1,287 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import typing
+
+from typing import Dict
+
+from src.common.db_logs import remove_logs
+from src.common.encoding import onion_address_to_pub_key
+from src.common.exceptions import FunctionReturn
+from src.common.input import box_input, yes
+from src.common.misc import ignored, validate_key_exchange, validate_nick, validate_onion_addr
+from src.common.output import m_print
+from src.common.statics import *
+
+from src.transmitter.commands_g import rename_group
+from src.transmitter.key_exchanges import create_pre_shared_key, start_key_exchange
+from src.transmitter.packet import queue_command, queue_to_nc
+
+if typing.TYPE_CHECKING:
+ from multiprocessing import Queue
+ from src.common.db_contacts import ContactList
+ from src.common.db_groups import GroupList
+ from src.common.db_masterkey import MasterKey
+ from src.common.db_onion import OnionService
+ from src.common.db_settings import Settings
+ from src.transmitter.user_input import UserInput
+ from src.transmitter.windows import TxWindow
+ QueueDict = Dict[bytes, Queue]
+
+
+def add_new_contact(contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings',
+ queues: 'QueueDict',
+ onion_service: 'OnionService'
+ ) -> None:
+ """Prompt for contact account details and initialize desired key exchange.
+
+ This function requests the minimum amount of data about the
+ recipient as possible. The TFC account of contact is the same as the
+ Onion URL of contact's v3 Tor Onion Service. Since the accounts are
+ random and hard to remember, the user has to choose a nickname for
+ their contact. Finally, the user must select the key exchange method:
+ ECDHE for convenience in a pre-quantum world, or PSK for situations
+ where physical key exchange is possible, and ciphertext must remain
+ secure even after sufficient QTMs are available to adversaries.
+
+ Before starting the key exchange, Transmitter Program exports the
+ public key of contact's Onion Service to Relay Program on their
+ Networked Computer so that a connection to the contact can be
+ established.
+ """
+ try:
+ if settings.traffic_masking:
+ raise FunctionReturn("Error: Command is disabled during traffic masking.", head_clear=True)
+
+ if len(contact_list) >= settings.max_number_of_contacts:
+ raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_contacts} accounts.",
+ head_clear=True)
+
+ m_print("Add new contact", head=1, bold=True, head_clear=True)
+
+ m_print(["Your TFC account is",
+ onion_service.user_onion_address,
+ '', "Warning!",
+ "Anyone who knows this account",
+ "can see when your TFC is online"], box=True)
+
+ contact_address = box_input("Contact account",
+ expected_len=ONION_ADDRESS_LENGTH,
+ validator=validate_onion_addr,
+ validator_args=onion_service.user_onion_address).strip()
+ onion_pub_key = onion_address_to_pub_key(contact_address)
+
+ contact_nick = box_input("Contact nick",
+ expected_len=ONION_ADDRESS_LENGTH, # Limited to 255 but such long nick is unpractical.
+ validator=validate_nick,
+ validator_args=(contact_list, group_list, onion_pub_key)).strip()
+
+ key_exchange = box_input(f"Key exchange ([{ECDHE}],PSK) ",
+ default=ECDHE,
+ expected_len=28,
+ validator=validate_key_exchange).strip()
+
+ relay_command = UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_ADD_NEW_CONTACT + onion_pub_key
+ queue_to_nc(relay_command, queues[RELAY_PACKET_QUEUE])
+
+ if key_exchange.upper() in ECDHE:
+ start_key_exchange(onion_pub_key, contact_nick, contact_list, settings, queues)
+
+ elif key_exchange.upper() in PSK:
+ create_pre_shared_key(onion_pub_key, contact_nick, contact_list, settings, onion_service, queues)
+
+ except (EOFError, KeyboardInterrupt):
+ raise FunctionReturn("Contact creation aborted.", head=2, delay=1, tail_clear=True)
+
+
+def remove_contact(user_input: 'UserInput',
+ window: 'TxWindow',
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings',
+ queues: 'QueueDict',
+ master_key: 'MasterKey') -> None:
+ """Remove contact from TFC."""
+ if settings.traffic_masking:
+ raise FunctionReturn("Error: Command is disabled during traffic masking.", head_clear=True)
+
+ try:
+ selection = user_input.plaintext.split()[1]
+ except IndexError:
+ raise FunctionReturn("Error: No account specified.", head_clear=True)
+
+ if not yes(f"Remove contact '{selection}'?", abort=False, head=1):
+ raise FunctionReturn("Removal of contact aborted.", head=0, delay=1, tail_clear=True)
+
+ if selection in contact_list.contact_selectors():
+ onion_pub_key = contact_list.get_contact_by_address_or_nick(selection).onion_pub_key
+
+ else:
+ if validate_onion_addr(selection):
+ raise FunctionReturn("Error: Invalid selection.", head=0, delay=1, tail_clear=True)
+ else:
+ onion_pub_key = onion_address_to_pub_key(selection)
+
+ receiver_command = CONTACT_REM + onion_pub_key
+ queue_command(receiver_command, settings, queues)
+
+ with ignored(FunctionReturn):
+ remove_logs(contact_list, group_list, settings, master_key, onion_pub_key)
+
+ queues[KEY_MANAGEMENT_QUEUE].put((KDB_REMOVE_ENTRY_HEADER, onion_pub_key))
+
+ relay_command = UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_REM_CONTACT + onion_pub_key
+ queue_to_nc(relay_command, queues[RELAY_PACKET_QUEUE])
+
+ if onion_pub_key in contact_list.get_list_of_pub_keys():
+ contact = contact_list.get_contact_by_pub_key(onion_pub_key)
+ target = f"{contact.nick} ({contact.short_address})"
+ contact_list.remove_contact_by_pub_key(onion_pub_key)
+ m_print(f"Removed {target} from contacts.", head=1, tail=1)
+ else:
+ target = f"{selection[:TRUNC_ADDRESS_LENGTH]}"
+ m_print(f"Transmitter has no {target} to remove.", head=1, tail=1)
+
+ if any([g.remove_members([onion_pub_key]) for g in group_list]):
+ m_print(f"Removed {target} from group(s).", tail=1)
+
+ if window.type == WIN_TYPE_CONTACT:
+ if onion_pub_key == window.uid:
+ window.deselect()
+
+ if window.type == WIN_TYPE_GROUP:
+ for c in window:
+ if c.onion_pub_key == onion_pub_key:
+ window.update_window(group_list)
+
+ # If the last member of the group is removed, deselect
+ # the group. Deselection is not done in
+ # update_group_win_members because it would prevent
+ # selecting the empty group for group related commands
+ # such as notifications.
+ if not window.window_contacts:
+ window.deselect()
+
+
+def change_nick(user_input: 'UserInput',
+ window: 'TxWindow',
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings',
+ queues: 'QueueDict') -> None:
+ """Change nick of contact."""
+ try:
+ nick = user_input.plaintext.split()[1]
+ except IndexError:
+ raise FunctionReturn("Error: No nick specified.", head_clear=True)
+
+ if window.type == WIN_TYPE_GROUP:
+ rename_group(nick, window, contact_list, group_list, settings, queues)
+
+ assert window.contact is not None
+
+ onion_pub_key = window.contact.onion_pub_key
+ error_msg = validate_nick(nick, (contact_list, group_list, onion_pub_key))
+ if error_msg:
+ raise FunctionReturn(error_msg, head_clear=True)
+
+ window.contact.nick = nick
+ window.name = nick
+ contact_list.store_contacts()
+
+ command = CH_NICKNAME + onion_pub_key + nick.encode()
+ queue_command(command, settings, queues)
+
+
+def contact_setting(user_input: 'UserInput',
+ window: 'TxWindow',
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ settings: 'Settings',
+ queues: 'QueueDict'
+ ) -> None:
+ """\
+ Change logging, file reception, or notification setting of a group
+ or (all) contact(s).
+ """
+ try:
+ parameters = user_input.plaintext.split()
+ cmd_key = parameters[0]
+ cmd_header = {LOGGING: CH_LOGGING,
+ STORE: CH_FILE_RECV,
+ NOTIFY: CH_NOTIFY}[cmd_key]
+
+ setting, b_value = dict(on=(ENABLE, True),
+ off=(DISABLE, False))[parameters[1]]
+
+ except (IndexError, KeyError):
+ raise FunctionReturn("Error: Invalid command.", head_clear=True)
+
+ # If second parameter 'all' is included, apply setting for all contacts and groups
+ try:
+ win_uid = b''
+ if parameters[2] == ALL:
+ cmd_value = setting.upper()
+ else:
+ raise FunctionReturn("Error: Invalid command.", head_clear=True)
+ except IndexError:
+ win_uid = window.uid
+ cmd_value = setting + win_uid
+
+ if win_uid:
+ if window.type == WIN_TYPE_CONTACT and window.contact is not None:
+ if cmd_key == LOGGING: window.contact.log_messages = b_value
+ if cmd_key == STORE: window.contact.file_reception = b_value
+ if cmd_key == NOTIFY: window.contact.notifications = b_value
+ contact_list.store_contacts()
+
+ if window.type == WIN_TYPE_GROUP and window.group is not None:
+ if cmd_key == LOGGING: window.group.log_messages = b_value
+ if cmd_key == STORE:
+ for c in window:
+ c.file_reception = b_value
+ if cmd_key == NOTIFY: window.group.notifications = b_value
+ group_list.store_groups()
+
+ else:
+ for contact in contact_list:
+ if cmd_key == LOGGING: contact.log_messages = b_value
+ if cmd_key == STORE: contact.file_reception = b_value
+ if cmd_key == NOTIFY: contact.notifications = b_value
+ contact_list.store_contacts()
+
+ for group in group_list:
+ if cmd_key == LOGGING: group.log_messages = b_value
+ if cmd_key == NOTIFY: group.notifications = b_value
+ group_list.store_groups()
+
+ command = cmd_header + cmd_value
+
+ if settings.traffic_masking and cmd_key == LOGGING:
+ # Send `log_writer_loop` the new logging setting that is loaded
+ # when the next noise packet is loaded from `noise_packet_loop`.
+ queues[LOG_SETTING_QUEUE].put(b_value)
+
+ window.update_log_messages()
+
+ queue_command(command, settings, queues)
diff --git a/src/transmitter/files.py b/src/transmitter/files.py
new file mode 100755
index 0000000..f6177d6
--- /dev/null
+++ b/src/transmitter/files.py
@@ -0,0 +1,159 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import datetime
+import os
+import typing
+import zlib
+
+from typing import Tuple
+
+from src.common.crypto import byte_padding, csprng, encrypt_and_sign
+from src.common.encoding import int_to_bytes
+from src.common.exceptions import FunctionReturn
+from src.common.misc import readable_size, split_byte_string
+from src.common.statics import *
+
+if typing.TYPE_CHECKING:
+ from src.common.db_settings import Settings
+ from src.transmitter.windows import TxWindow
+
+
+class File(object):
+ """File object wraps methods around file data/header processing.
+
+ The File object is only used when sending a file during traffic
+ masking.
+ """
+
+ def __init__(self,
+ path: str,
+ window: 'TxWindow',
+ settings: 'Settings'
+ ) -> None:
+ """Load file data from specified path and add headers."""
+ self.window = window
+ self.settings = settings
+
+ self.name = self.get_name(path)
+ data = self.load_file_data(path)
+ size, self.size_hr = self.get_size(path)
+ processed = self.process_file_data(data)
+ self.time_hr, self.plaintext = self.finalize(size, processed)
+
+ @staticmethod
+ def get_name(path: str) -> bytes:
+ """Parse and validate file name."""
+ name = (path.split('/')[-1]).encode()
+ File.name_length_check(name)
+ return name
+
+ @staticmethod
+ def name_length_check(name: bytes) -> None:
+ """Ensure that file header fits the first packet."""
+ full_header_length = (FILE_PACKET_CTR_LENGTH
+ + FILE_ETA_FIELD_LENGTH
+ + FILE_SIZE_FIELD_LENGTH
+ + len(name) + len(US_BYTE))
+
+ if full_header_length >= PADDING_LENGTH:
+ raise FunctionReturn("Error: File name is too long.", head_clear=True)
+
+ @staticmethod
+ def load_file_data(path: str) -> bytes:
+ """Load file name, size, and data from the specified path."""
+ if not os.path.isfile(path):
+ raise FunctionReturn("Error: File not found.", head_clear=True)
+ with open(path, 'rb') as f:
+ data = f.read()
+ return data
+
+ @staticmethod
+ def get_size(path: str) -> Tuple[bytes, str]:
+ """Get size of file in bytes and in human readable form."""
+ byte_size = os.path.getsize(path)
+ if byte_size == 0:
+ raise FunctionReturn("Error: Target file is empty.", head_clear=True)
+ size = int_to_bytes(byte_size)
+ size_hr = readable_size(byte_size)
+
+ return size, size_hr
+
+ @staticmethod
+ def process_file_data(data: bytes) -> bytes:
+ """Compress, encrypt and encode file data.
+
+ Compress file to reduce data transmission time. Add an inner
+ layer of encryption to provide sender-based control over partial
+ transmission.
+ """
+ compressed = zlib.compress(data, level=COMPRESSION_LEVEL)
+ file_key = csprng()
+ processed = encrypt_and_sign(compressed, key=file_key)
+ processed += file_key
+ return processed
+
+ def finalize(self, size: bytes, processed: bytes) -> Tuple[str, bytes]:
+ """Finalize packet and generate plaintext."""
+ time_bytes, time_print = self.update_delivery_time(self.name, size, processed, self.settings, self.window)
+ packet_data = time_bytes + size + self.name + US_BYTE + processed
+ return time_print, packet_data
+
+ @staticmethod
+ def update_delivery_time(name: bytes,
+ size: bytes,
+ processed: bytes,
+ settings: 'Settings',
+ window: 'TxWindow'
+ ) -> Tuple[bytes, str]:
+ """Calculate transmission time.
+
+ Transmission time depends on delay settings, file size and
+ number of members if the recipient is a group.
+ """
+ time_bytes = bytes(FILE_ETA_FIELD_LENGTH)
+ no_packets = File.count_number_of_packets(name, size, processed, time_bytes)
+ avg_delay = settings.tm_static_delay + (settings.tm_random_delay / 2)
+
+ total_time = len(window) * no_packets * avg_delay
+ total_time *= 2 # Accommodate command packets between file packets
+ total_time += no_packets * TRAFFIC_MASKING_QUEUE_CHECK_DELAY
+
+ # Update delivery time
+ time_bytes = int_to_bytes(int(total_time))
+ time_hr = str(datetime.timedelta(seconds=int(total_time)))
+
+ return time_bytes, time_hr
+
+ @staticmethod
+ def count_number_of_packets(name: bytes,
+ size: bytes,
+ processed: bytes,
+ time_bytes: bytes
+ ) -> int:
+ """Count number of packets needed for file delivery."""
+ packet_data = time_bytes + size + name + US_BYTE + processed
+ if len(packet_data) < PADDING_LENGTH:
+ return 1
+ else:
+ packet_data += bytes(FILE_PACKET_CTR_LENGTH)
+ packet_data = byte_padding(packet_data)
+ return len(split_byte_string(packet_data, item_len=PADDING_LENGTH))
diff --git a/src/tx/input_loop.py b/src/transmitter/input_loop.py
similarity index 53%
rename from src/tx/input_loop.py
rename to src/transmitter/input_loop.py
index bac566a..7d3e052 100755
--- a/src/tx/input_loop.py
+++ b/src/transmitter/input_loop.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,7 +16,7 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
import os
@@ -23,69 +24,74 @@ import readline
import sys
import typing
-from typing import Dict
+from typing import Dict, NoReturn
from src.common.exceptions import FunctionReturn
from src.common.misc import get_tab_completer, ignored
from src.common.statics import *
-from src.tx.commands import process_command
-from src.tx.contact import add_new_contact
-from src.tx.key_exchanges import new_local_key
-from src.tx.packet import queue_file, queue_message
-from src.tx.user_input import get_input
-from src.tx.windows import TxWindow
+from src.transmitter.commands import process_command
+from src.transmitter.contact import add_new_contact
+from src.transmitter.key_exchanges import export_onion_service_data, new_local_key
+from src.transmitter.packet import queue_file, queue_message
+from src.transmitter.user_input import get_input
+from src.transmitter.windows import TxWindow
if typing.TYPE_CHECKING:
from multiprocessing import Queue
from src.common.db_contacts import ContactList
from src.common.db_groups import GroupList
from src.common.db_masterkey import MasterKey
+ from src.common.db_onion import OnionService
from src.common.db_settings import Settings
from src.common.gateway import Gateway
-def input_loop(queues: Dict[bytes, 'Queue'],
- settings: 'Settings',
- gateway: 'Gateway',
- contact_list: 'ContactList',
- group_list: 'GroupList',
- master_key: 'MasterKey',
- stdin_fd: int) -> None:
+def input_loop(queues: Dict[bytes, 'Queue'],
+ settings: 'Settings',
+ gateway: 'Gateway',
+ contact_list: 'ContactList',
+ group_list: 'GroupList',
+ master_key: 'MasterKey',
+ onion_service: 'OnionService',
+ stdin_fd: int
+ ) -> NoReturn:
"""Get input from user and process it accordingly.
- Tx side of TFC runs two processes -- input and sender loop -- separate
- from one another. This allows prioritized output of queued assembly
- packets. input_loop handles Tx-side functions excluding assembly packet
- encryption, output and logging, and hash ratchet key/counter updates in
- key_list database.
+ Running this loop as a process allows handling different functions
+ including inputs, key exchanges, file loading and assembly packet
+ generation, separate from assembly packet output.
"""
sys.stdin = os.fdopen(stdin_fd)
window = TxWindow(contact_list, group_list)
while True:
with ignored(EOFError, FunctionReturn, KeyboardInterrupt):
- readline.set_completer(get_tab_completer(contact_list, group_list, settings))
+ readline.set_completer(get_tab_completer(contact_list, group_list, settings, gateway))
readline.parse_and_bind('tab: complete')
- window.update_group_win_members(group_list)
+ window.update_window(group_list)
+
+ while not onion_service.is_delivered:
+ export_onion_service_data(contact_list, settings, onion_service, gateway)
while not contact_list.has_local_contact():
new_local_key(contact_list, settings, queues)
while not contact_list.has_contacts():
- add_new_contact(contact_list, group_list, settings, queues)
+ add_new_contact(contact_list, group_list, settings, queues, onion_service)
while not window.is_selected():
- window.select_tx_window(settings, queues)
+ window.select_tx_window(settings, queues, onion_service, gateway)
user_input = get_input(window, settings)
if user_input.type == MESSAGE:
- queue_message(user_input, window, settings, queues[MESSAGE_PACKET_QUEUE])
+ queue_message(user_input, window, settings, queues)
elif user_input.type == FILE:
- queue_file(window, settings, queues[FILE_PACKET_QUEUE], gateway)
+ queue_file(window, settings, queues)
elif user_input.type == COMMAND:
- process_command(user_input, window, settings, queues, contact_list, group_list, master_key)
+ process_command(
+ user_input, window, contact_list, group_list, settings, queues, master_key, onion_service, gateway)
diff --git a/src/transmitter/key_exchanges.py b/src/transmitter/key_exchanges.py
new file mode 100644
index 0000000..b3d3a6b
--- /dev/null
+++ b/src/transmitter/key_exchanges.py
@@ -0,0 +1,543 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import os
+import time
+import typing
+
+from typing import Dict
+
+from src.common.crypto import argon2_kdf, blake2b, csprng, encrypt_and_sign, X448
+from src.common.db_masterkey import MasterKey
+from src.common.encoding import bool_to_bytes, int_to_bytes, pub_key_to_short_address, str_to_bytes
+from src.common.exceptions import FunctionReturn
+from src.common.input import ask_confirmation_code, get_b58_key, nc_bypass_msg, yes
+from src.common.output import m_print, phase, print_fingerprint, print_key, print_on_previous_line
+from src.common.path import ask_path_gui
+from src.common.statics import *
+
+from src.transmitter.packet import queue_command, queue_to_nc
+
+if typing.TYPE_CHECKING:
+ from multiprocessing import Queue
+ from src.common.db_contacts import ContactList
+ from src.common.db_onion import OnionService
+ from src.common.db_settings import Settings
+ from src.common.gateway import Gateway
+ from src.transmitter.windows import TxWindow
+ QueueDict = Dict[bytes, Queue]
+
+
+def export_onion_service_data(contact_list: 'ContactList',
+ settings: 'Settings',
+ onion_service: 'OnionService',
+ gateway: 'Gateway'
+ ) -> None:
+ """\
+ Send the Tor Onion Service's private key and list of Onion Service
+ public keys of contacts to Relay Program on Networked Computer.
+
+ This private key is not intended to be used by the Transmitter
+ Program. Because the Networked Computer we are exporting it to
+ might not store data, we use the trusted Source Computer to generate
+ the private key and store it safely. The private key is needed by
+ Tor on Networked Computer to start the Onion Service.
+
+ Exporting this private key does not endanger message confidentiality
+ because TFC uses a separate key exchange with separate private key
+ to create the symmetric keys that protect the messages. That private
+ key is never exported to the Networked Computer.
+
+ Access to this key does not give any to user any information other
+ than the v3 Onion Address. However, if they have compromised Relay
+ Program to gain access to the key, they can see its public part
+ anyway.
+
+ This key is used by Tor to sign Diffie-Hellman public keys used when
+ clients of contacts establish a secure connection to the Onion
+ Service. This key can't be used to decrypt traffic retrospectively.
+
+ The worst possible case in the situation of key compromise is, the
+ key allows the attacker to start their own copy of the user's Onion
+ Service.
+
+ This does not allow impersonating as the user however, because the
+ attacker is not in possession of keys that allow them to create
+ valid ciphertexts. Even if they inject TFC public keys to conduct a
+ MITM attack, that attack will be detected during fingerprint
+ comparison.
+
+ In addition to the private key, the Onion Service data packet also
+ transmits the list of Onion Service public keys of existing and
+ pending contacts to the Relay Program, as well as the setting that
+ determines whether contact requests are allowed. Bundling all this
+ data in a single packet is great in the sense a single confirmation
+ code can be used to ensure that Relay Program has all the
+ information necessary to perform its duties.
+ """
+ m_print("Onion Service setup", bold=True, head_clear=True, head=1, tail=1)
+
+ pending_contacts = b''.join(contact_list.get_list_of_pending_pub_keys())
+ existing_contacts = b''.join(contact_list.get_list_of_existing_pub_keys())
+ no_pending = int_to_bytes(len(contact_list.get_list_of_pending_pub_keys()))
+ contact_data = no_pending + pending_contacts + existing_contacts
+
+ relay_command = (UNENCRYPTED_DATAGRAM_HEADER
+ + UNENCRYPTED_ONION_SERVICE_DATA
+ + onion_service.onion_private_key
+ + onion_service.conf_code
+ + bool_to_bytes(settings.allow_contact_requests)
+ + contact_data)
+
+ gateway.write(relay_command)
+
+ while True:
+ purp_code = ask_confirmation_code('Relay')
+
+ if purp_code == onion_service.conf_code.hex():
+ onion_service.is_delivered = True
+ onion_service.new_confirmation_code()
+ break
+
+ elif purp_code == '':
+ phase("Resending Onion Service data", head=2)
+ gateway.write(relay_command)
+ phase(DONE)
+ print_on_previous_line(reps=5)
+
+ else:
+ m_print(["Incorrect confirmation code. If Relay Program did not",
+ "receive Onion Service data, resend it by pressing ."], head=1)
+ print_on_previous_line(reps=5, delay=2)
+
+
+def new_local_key(contact_list: 'ContactList',
+ settings: 'Settings',
+ queues: 'QueueDict'
+ ) -> None:
+ """Run local key exchange protocol.
+
+ Local key encrypts commands and data sent from Source Computer to
+ user's Destination Computer. The key is delivered to Destination
+ Computer in packet encrypted with an ephemeral, symmetric, key
+ encryption key.
+
+ The check-summed Base58 format key decryption key is typed to
+ Receiver Program manually. This prevents local key leak in following
+ scenarios:
+
+ 1. CT is intercepted by an adversary on compromised Networked
+ Computer, but no visual eavesdropping takes place.
+
+ 2. CT is not intercepted by an adversary on Networked Computer,
+ but visual eavesdropping records key decryption key.
+
+ 3. CT is delivered from Source Computer to Destination Computer
+ directly (bypassing compromised Networked Computer), and
+ visual eavesdropping records key decryption key.
+
+ Once the correct key decryption key is entered to Receiver Program,
+ it will display the 2-hexadecimal confirmation code generated by
+ the Transmitter Program. The code will be entered back to
+ Transmitter Program to confirm the user has successfully delivered
+ the key decryption key.
+
+ The protocol is completed with Transmitter Program sending
+ LOCAL_KEY_RDY signal to the Receiver Program, that then moves to
+ wait for public keys from contact.
+ """
+ try:
+ if settings.traffic_masking and contact_list.has_local_contact():
+ raise FunctionReturn("Error: Command is disabled during traffic masking.", head_clear=True)
+
+ m_print("Local key setup", bold=True, head_clear=True, head=1, tail=1)
+
+ if not contact_list.has_local_contact():
+ time.sleep(0.5)
+
+ key = csprng()
+ hek = csprng()
+ kek = csprng()
+ c_code = os.urandom(CONFIRM_CODE_LENGTH)
+
+ local_key_packet = LOCAL_KEY_DATAGRAM_HEADER + encrypt_and_sign(plaintext=key + hek + c_code, key=kek)
+
+ # Deliver local key to Destination computer
+ nc_bypass_msg(NC_BYPASS_START, settings)
+ queue_to_nc(local_key_packet, queues[RELAY_PACKET_QUEUE])
+ while True:
+ print_key("Local key decryption key (to Receiver)", kek, settings)
+ purp_code = ask_confirmation_code('Receiver')
+ if purp_code == c_code.hex():
+ nc_bypass_msg(NC_BYPASS_STOP, settings)
+ break
+ elif purp_code == '':
+ phase("Resending local key", head=2)
+ queue_to_nc(local_key_packet, queues[RELAY_PACKET_QUEUE])
+ phase(DONE)
+ print_on_previous_line(reps=(9 if settings.local_testing_mode else 10))
+ else:
+ m_print(["Incorrect confirmation code. If Receiver did not receive",
+ "the encrypted local key, resend it by pressing ."], head=1)
+ print_on_previous_line(reps=(9 if settings.local_testing_mode else 10), delay=2)
+
+ # Add local contact to contact list database
+ contact_list.add_contact(LOCAL_PUBKEY,
+ LOCAL_NICK,
+ bytes(FINGERPRINT_LENGTH),
+ bytes(FINGERPRINT_LENGTH),
+ KEX_STATUS_LOCAL_KEY,
+ False, False, False)
+
+ # Add local contact to keyset database
+ queues[KEY_MANAGEMENT_QUEUE].put((KDB_ADD_ENTRY_HEADER,
+ LOCAL_PUBKEY,
+ key, csprng(),
+ hek, csprng()))
+
+ # Notify Receiver that confirmation code was successfully entered
+ queue_command(LOCAL_KEY_RDY, settings, queues)
+
+ m_print("Successfully completed the local key exchange.", bold=True, tail_clear=True, delay=1, head=1)
+ os.system(RESET)
+
+ except (EOFError, KeyboardInterrupt):
+ raise FunctionReturn("Local key setup aborted.", tail_clear=True, delay=1, head=2)
+
+
+def verify_fingerprints(tx_fp: bytes, # User's fingerprint
+ rx_fp: bytes # Contact's fingerprint
+ ) -> bool: # True if fingerprints match, else False
+ """\
+ Verify fingerprints over an authenticated out-of-band channel to
+ detect MITM attacks against TFC's key exchange.
+
+ MITM or man-in-the-middle attack is an attack against an inherent
+ problem in cryptography:
+
+ Cryptography is math, nothing more. During key exchange public keys
+ are just very large numbers. There is no way to tell by looking if a
+ number (received from an untrusted network / Networked Computer) is
+ the same number the contact generated.
+
+ Public key fingerprints are values designed to be compared by humans
+ either visually or audibly (or sometimes by using semi-automatic
+ means such as QR-codes). By comparing the fingerprint over an
+ authenticated channel it's possible to verify that the correct key
+ was received from the network.
+ """
+ m_print("To verify received public key was not replaced by an attacker "
+ "call the contact over an end-to-end encrypted line, preferably Signal "
+ "(https://signal.org/). Make sure Signal's safety numbers have been "
+ "verified, and then verbally compare the key fingerprints below.",
+ head_clear=True, max_width=49, head=1, tail=1)
+
+ print_fingerprint(tx_fp, " Your fingerprint (you read) ")
+ print_fingerprint(rx_fp, "Purported fingerprint for contact (they read)")
+
+ return yes("Is the contact's fingerprint correct?")
+
+
+def start_key_exchange(onion_pub_key: bytes, # Public key of contact's v3 Onion Service
+ nick: str, # Contact's nickname
+ contact_list: 'ContactList', # Contact list object
+ settings: 'Settings', # Settings object
+ queues: 'QueueDict' # Dictionary of multiprocessing queues
+ ) -> None:
+ """Start X448 key exchange with the recipient.
+
+ This function first creates the X448 key pair. It then outputs the
+ public key to Relay Program on Networked Computer, that passes the
+ public key to contact's Relay Program. When Contact's public key
+ reaches the user's Relay Program, the user will manually copy the
+ key into their Transmitter Program.
+
+ The X448 shared secret is used to create unidirectional message and
+ header keys, that will be used in forward secret communication. This
+ is followed by the fingerprint verification where the user manually
+ authenticates the public key.
+
+ Once the fingerprint has been accepted, this function will add the
+ contact/key data to contact/key databases, and export that data to
+ the Receiver Program on Destination Computer. The transmission is
+ encrypted with the local key.
+
+ ---
+
+ TFC provides proactive security by making fingerprint verification
+ part of the key exchange. This prevents the situation where the
+ users don't know about the feature, and thus helps minimize the risk
+ of MITM attack.
+
+ The fingerprints can be skipped by pressing Ctrl+C. This feature is
+ not advertised however, because verifying fingerprints the only
+ strong way to be sure TFC is not under MITM attack. When
+ verification is skipped, TFC marks the contact's X448 keys as
+ "Unverified". The fingerprints can later be verified with the
+ `/verify` command: answering `yes` to the question on whether the
+ fingerprints match, marks the X448 keys as "Verified".
+
+ Variable naming:
+ tx = user's key rx = contact's key fp = fingerprint
+ mk = message key hk = header key
+ """
+ if not contact_list.has_pub_key(onion_pub_key):
+ contact_list.add_contact(onion_pub_key, nick,
+ bytes(FINGERPRINT_LENGTH), bytes(FINGERPRINT_LENGTH),
+ KEX_STATUS_PENDING,
+ settings.log_messages_by_default,
+ settings.accept_files_by_default,
+ settings.show_notifications_by_default)
+ contact = contact_list.get_contact_by_pub_key(onion_pub_key)
+
+ # Generate new private key or load cached private key
+ if contact.tfc_private_key is None:
+ tfc_private_key_user = X448.generate_private_key()
+ else:
+ tfc_private_key_user = contact.tfc_private_key
+
+ try:
+ tfc_public_key_user = X448.derive_public_key(tfc_private_key_user)
+
+ # Import public key of contact
+ while True:
+ public_key_packet = PUBLIC_KEY_DATAGRAM_HEADER + onion_pub_key + tfc_public_key_user
+ queue_to_nc(public_key_packet, queues[RELAY_PACKET_QUEUE])
+
+ tfc_public_key_contact = get_b58_key(B58_PUBLIC_KEY, settings, contact.short_address)
+ if tfc_public_key_contact != b'':
+ break
+
+ # Validate public key of contact
+ if len(tfc_public_key_contact) != TFC_PUBLIC_KEY_LENGTH:
+ m_print(["Warning!",
+ "Received invalid size public key.",
+ "Aborting key exchange for your safety."], bold=True, tail=1)
+ raise FunctionReturn("Error: Invalid public key length", output=False)
+
+ if tfc_public_key_contact == bytes(TFC_PUBLIC_KEY_LENGTH):
+ # The public key of contact is zero with negligible probability,
+ # therefore we assume such key is malicious and attempts to set
+ # the shared key to zero.
+ m_print(["Warning!",
+ "Received a malicious zero-public key.",
+ "Aborting key exchange for your safety."], bold=True, tail=1)
+ raise FunctionReturn("Error: Zero public key", output=False)
+
+ # Derive shared key
+ dh_shared_key = X448.shared_key(tfc_private_key_user, tfc_public_key_contact)
+
+ # Domain separate unidirectional keys from shared key by using public
+ # keys as message and the context variable as personalization string.
+ tx_mk = blake2b(tfc_public_key_contact, dh_shared_key, person=b'message_key', digest_size=SYMMETRIC_KEY_LENGTH)
+ rx_mk = blake2b(tfc_public_key_user, dh_shared_key, person=b'message_key', digest_size=SYMMETRIC_KEY_LENGTH)
+ tx_hk = blake2b(tfc_public_key_contact, dh_shared_key, person=b'header_key', digest_size=SYMMETRIC_KEY_LENGTH)
+ rx_hk = blake2b(tfc_public_key_user, dh_shared_key, person=b'header_key', digest_size=SYMMETRIC_KEY_LENGTH)
+
+ # Domain separate fingerprints of public keys by using the
+ # shared secret as key and the context variable as
+ # personalization string. This way entities who might monitor
+ # fingerprint verification channel are unable to correlate
+ # spoken values with public keys that they might see on RAM or
+ # screen of Networked Computer: Public keys can not be derived
+ # from the fingerprints due to preimage resistance of BLAKE2b,
+ # and fingerprints can not be derived from public key without
+ # the X448 shared secret. Using the context variable ensures
+ # fingerprints are distinct from derived message and header keys.
+ tx_fp = blake2b(tfc_public_key_user, dh_shared_key, person=b'fingerprint', digest_size=FINGERPRINT_LENGTH)
+ rx_fp = blake2b(tfc_public_key_contact, dh_shared_key, person=b'fingerprint', digest_size=FINGERPRINT_LENGTH)
+
+ # Verify fingerprints
+ try:
+ if not verify_fingerprints(tx_fp, rx_fp):
+ m_print(["Warning!",
+ "Possible man-in-the-middle attack detected.",
+ "Aborting key exchange for your safety."], bold=True, tail=1)
+ raise FunctionReturn("Error: Fingerprint mismatch", delay=2.5, output=False)
+ kex_status = KEX_STATUS_VERIFIED
+
+ except (EOFError, KeyboardInterrupt):
+ m_print(["Skipping fingerprint verification.",
+ '', "Warning!",
+ "Man-in-the-middle attacks can not be detected",
+ "unless fingerprints are verified! To re-verify",
+ "the contact, use the command '/verify'.",
+ '', "Press to continue."],
+ manual_proceed=True, box=True, head=2)
+ kex_status = KEX_STATUS_UNVERIFIED
+
+ # Send keys to the Receiver Program
+ c_code = blake2b(onion_pub_key, digest_size=CONFIRM_CODE_LENGTH)
+ command = (KEY_EX_ECDHE
+ + onion_pub_key
+ + tx_mk + rx_mk
+ + tx_hk + rx_hk
+ + str_to_bytes(nick))
+
+ queue_command(command, settings, queues)
+
+ while True:
+ purp_code = ask_confirmation_code('Receiver')
+ if purp_code == c_code.hex():
+ break
+
+ elif purp_code == '':
+ phase("Resending contact data", head=2)
+ queue_command(command, settings, queues)
+ phase(DONE)
+ print_on_previous_line(reps=5)
+
+ else:
+ m_print("Incorrect confirmation code.", head=1)
+ print_on_previous_line(reps=4, delay=2)
+
+ # Store contact data into databases
+ contact.tfc_private_key = None
+ contact.tx_fingerprint = tx_fp
+ contact.rx_fingerprint = rx_fp
+ contact.kex_status = kex_status
+ contact_list.store_contacts()
+
+ queues[KEY_MANAGEMENT_QUEUE].put((KDB_ADD_ENTRY_HEADER,
+ onion_pub_key,
+ tx_mk, csprng(),
+ tx_hk, csprng()))
+
+ m_print(f"Successfully added {nick}.", bold=True, tail_clear=True, delay=1, head=1)
+
+ except (EOFError, KeyboardInterrupt):
+ contact.tfc_private_key = tfc_private_key_user
+ raise FunctionReturn("Key exchange interrupted.", tail_clear=True, delay=1, head=2)
+
+
+def create_pre_shared_key(onion_pub_key: bytes, # Public key of contact's v3 Onion Service
+ nick: str, # Nick of contact
+ contact_list: 'ContactList', # Contact list object
+ settings: 'Settings', # Settings object
+ onion_service: 'OnionService', # OnionService object
+ queues: 'QueueDict' # Dictionary of multiprocessing queues
+ ) -> None:
+ """Generate a new pre-shared key for manual key delivery.
+
+ Pre-shared keys offer a low-tech solution against the slowly
+ emerging threat of quantum computers. PSKs are less convenient and
+ not usable in every scenario, but until a quantum-safe key exchange
+ algorithm with reasonably short keys is standardized, TFC can't
+ provide a better alternative against quantum computers.
+
+ The generated keys are protected by a key encryption key, derived
+ from a 256-bit salt and a password (that is to be shared with the
+ recipient) using Argon2d key derivation function.
+
+ The encrypted message and header keys are stored together with salt
+ on a removable media. This media must be a never-before-used device
+ from sealed packaging. Re-using an old device might infect Source
+ Computer, and the malware could either copy sensitive data on that
+ removable media, or Source Computer might start transmitting the
+ sensitive data covertly over the serial interface to malware on
+ Networked Computer.
+
+ Once the key has been exported to the clean drive, contact data and
+ keys are exported to the Receiver Program on Destination computer.
+ The transmission is encrypted with the local key.
+ """
+ try:
+ tx_mk = csprng()
+ tx_hk = csprng()
+ salt = csprng()
+
+ password = MasterKey.new_password("password for PSK")
+
+ phase("Deriving key encryption key", head=2)
+ kek = argon2_kdf(password, salt, rounds=ARGON2_ROUNDS, memory=ARGON2_MIN_MEMORY)
+ phase(DONE)
+
+ ct_tag = encrypt_and_sign(tx_mk + tx_hk, key=kek)
+
+ while True:
+ trunc_addr = pub_key_to_short_address(onion_pub_key)
+ store_d = ask_path_gui(f"Select removable media for {nick}", settings)
+ f_name = f"{store_d}/{onion_service.user_short_address}.psk - Give to {trunc_addr}"
+ try:
+ with open(f_name, 'wb+') as f:
+ f.write(salt + ct_tag)
+ break
+ except PermissionError:
+ m_print("Error: Did not have permission to write to the directory.", delay=0.5)
+ continue
+
+ command = (KEY_EX_PSK_TX
+ + onion_pub_key
+ + tx_mk + csprng()
+ + tx_hk + csprng()
+ + str_to_bytes(nick))
+
+ queue_command(command, settings, queues)
+
+ contact_list.add_contact(onion_pub_key, nick,
+ bytes(FINGERPRINT_LENGTH), bytes(FINGERPRINT_LENGTH),
+ KEX_STATUS_NO_RX_PSK,
+ settings.log_messages_by_default,
+ settings.accept_files_by_default,
+ settings.show_notifications_by_default)
+
+ queues[KEY_MANAGEMENT_QUEUE].put((KDB_ADD_ENTRY_HEADER,
+ onion_pub_key,
+ tx_mk, csprng(),
+ tx_hk, csprng()))
+
+ m_print(f"Successfully added {nick}.", bold=True, tail_clear=True, delay=1, head=1)
+
+ except (EOFError, KeyboardInterrupt):
+ raise FunctionReturn("PSK generation aborted.", tail_clear=True, delay=1, head=2)
+
+
+def rxp_load_psk(window: 'TxWindow',
+ contact_list: 'ContactList',
+ settings: 'Settings',
+ queues: 'QueueDict',
+ ) -> None:
+ """Send command to Receiver Program to load PSK for active contact."""
+ if settings.traffic_masking:
+ raise FunctionReturn("Error: Command is disabled during traffic masking.", head_clear=True)
+
+ if window.type == WIN_TYPE_GROUP or window.contact is None:
+ raise FunctionReturn("Error: Group is selected.", head_clear=True)
+
+ if not contact_list.get_contact_by_pub_key(window.uid).uses_psk():
+ raise FunctionReturn(f"Error: The current key was exchanged with {ECDHE}.", head_clear=True)
+
+ c_code = blake2b(window.uid, digest_size=CONFIRM_CODE_LENGTH)
+ command = KEY_EX_PSK_RX + c_code + window.uid
+ queue_command(command, settings, queues)
+
+ while True:
+ try:
+ purp_code = ask_confirmation_code('Receiver')
+ if purp_code == c_code.hex():
+ window.contact.kex_status = KEX_STATUS_HAS_RX_PSK
+ contact_list.store_contacts()
+ raise FunctionReturn(f"Removed PSK reminder for {window.name}.", tail_clear=True, delay=1)
+ else:
+ m_print("Incorrect confirmation code.", head=1)
+ print_on_previous_line(reps=4, delay=2)
+ except (EOFError, KeyboardInterrupt):
+ raise FunctionReturn("PSK verification aborted.", tail_clear=True, delay=1, head=2)
diff --git a/src/transmitter/packet.py b/src/transmitter/packet.py
new file mode 100755
index 0000000..c053faf
--- /dev/null
+++ b/src/transmitter/packet.py
@@ -0,0 +1,521 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import base64
+import os
+import typing
+import zlib
+
+from typing import Dict, List, Optional, Union
+
+from src.common.crypto import blake2b, byte_padding, csprng, encrypt_and_sign
+from src.common.encoding import bool_to_bytes, int_to_bytes, str_to_bytes
+from src.common.exceptions import CriticalError, FunctionReturn
+from src.common.input import yes
+from src.common.misc import split_byte_string
+from src.common.output import m_print, phase, print_on_previous_line
+from src.common.path import ask_path_gui
+from src.common.statics import *
+
+from src.transmitter.files import File
+from src.transmitter.user_input import UserInput
+
+if typing.TYPE_CHECKING:
+ from multiprocessing import Queue
+ from src.common.db_keys import KeyList
+ from src.common.db_settings import Settings
+ from src.common.gateway import Gateway
+ from src.transmitter.windows import TxWindow, MockWindow
+ QueueDict = Dict[bytes, Queue]
+
+
+def queue_to_nc(packet: bytes,
+ nc_queue: 'Queue',
+ ) -> None:
+ """Queue unencrypted command/exported file to Networked Computer.
+
+ This function queues unencrypted packets intended for Relay Program
+ on Networked Computer. These packets are processed in the order of
+ priority by the `sender_loop` process of src.transmitter.sender_loop
+ module.
+ """
+ nc_queue.put(packet)
+
+
+def queue_command(command: bytes,
+ settings: 'Settings',
+ queues: 'QueueDict'
+ ) -> None:
+ """Split command to assembly packets and queue them for sender_loop()."""
+ assembly_packets = split_to_assembly_packets(command, COMMAND)
+
+ queue_assembly_packets(assembly_packets, COMMAND, settings, queues)
+
+
+def queue_message(user_input: 'UserInput',
+ window: Union['MockWindow', 'TxWindow'],
+ settings: 'Settings',
+ queues: 'QueueDict',
+ header: bytes = b'',
+ whisper: bool = False,
+ log_as_ph: bool = False
+ ) -> None:
+ """\
+ Prepend header to message, split the message into assembly packets,
+ and queue the assembly packets.
+
+ In this function the Transmitter Program adds the headers that allow
+ the recipient's Receiver Program to redirect the received message to
+ the correct window.
+
+ Each message packet starts with a 1 byte whisper-header that
+ determines whether the packet should be logged by the recipient. For
+ private messages no additional information aside the
+ PRIVATE_MESSAGE_HEADER -- that informs the Receiver Program to use
+ sender's window -- is required.
+
+ For group messages, the GROUP_MESSAGE_HEADER tells the Receiver
+ Program that the header is followed by two additional headers:
+
+ 1) 4-byte Group ID that tells to what group the message was
+ intended to. If the Receiver Program has not whitelisted the
+ group ID, the group message will be ignored. The group ID
+ space was chosen so that the birthday bound is at 65536
+ because it's unlikely a user will ever have that many groups.
+
+ 2) 16-byte group message ID. This random ID is not important for
+ the receiver. Instead, it is used by the sender's Receiver
+ Program to detect what group messages are copies sent to other
+ members of the group (these will be ignored from ephemeral and
+ persistent message log). The message ID space was chosen so
+ that the birthday bound is 2^64 (the same as the hash ratchet
+ counter space).
+
+ Once the headers are determined, the message is split into assembly
+ packets, that are then queued for encryption and transmission by the
+ `sender_loop` process.
+ """
+ if not header:
+ if window.type == WIN_TYPE_GROUP and window.group is not None:
+ header = GROUP_MESSAGE_HEADER + window.group.group_id + os.urandom(GROUP_MSG_ID_LENGTH)
+ else:
+ header = PRIVATE_MESSAGE_HEADER
+
+ payload = bool_to_bytes(whisper) + header + user_input.plaintext.encode()
+ assembly_packets = split_to_assembly_packets(payload, MESSAGE)
+
+ queue_assembly_packets(assembly_packets, MESSAGE, settings, queues, window, log_as_ph)
+
+
+def queue_file(window: 'TxWindow',
+ settings: 'Settings',
+ queues: 'QueueDict'
+ ) -> None:
+ """Ask file path and load file data.
+
+ In TFC there are two ways to send a file.
+
+ For traffic masking, the file is loaded and sent inside normal
+ messages using assembly packet headers dedicated for file
+ transmission. This transmission is much slower, so the File object
+ will determine metadata about the transmission's estimated transfer
+ time, number of packets and the name and size of file. This
+ information is inserted to the first assembly packet so that the
+ recipient can observe the transmission progress from file transfer
+ window.
+
+ When traffic masking is disabled, file transmission is much faster
+ as the file is only encrypted and transferred over serial once
+ before the Relay Program multi-casts the ciphertext to each
+ specified recipient. See the send_file docstring (below) for more
+ details.
+ """
+ path = ask_path_gui("Select file to send...", settings, get_file=True)
+
+ if path.endswith(('tx_contacts', 'tx_groups', 'tx_keys', 'tx_login_data', 'tx_settings',
+ 'rx_contacts', 'rx_groups', 'rx_keys', 'rx_login_data', 'rx_settings',
+ 'tx_serial_settings.json', 'nc_serial_settings.json',
+ 'rx_serial_settings.json', 'tx_onion_db')):
+ raise FunctionReturn("Error: Can't send TFC database.", head_clear=True)
+
+ if not settings.traffic_masking:
+ send_file(path, settings, queues, window)
+ return
+
+ file = File(path, window, settings)
+ assembly_packets = split_to_assembly_packets(file.plaintext, FILE)
+
+ if settings.confirm_sent_files:
+ try:
+ if not yes(f"Send {file.name.decode()} ({file.size_hr}) to {window.type_print} {window.name} "
+ f"({len(assembly_packets)} packets, time: {file.time_hr})?"):
+ raise FunctionReturn("File selection aborted.", head_clear=True)
+ except (EOFError, KeyboardInterrupt):
+ raise FunctionReturn("File selection aborted.", head_clear=True)
+
+ queue_assembly_packets(assembly_packets, FILE, settings, queues, window, log_as_ph=True)
+
+
+def send_file(path: str,
+ settings: 'Settings',
+ queues: 'QueueDict',
+ window: 'TxWindow'
+ ) -> None:
+ """Send file to window members in a single transmission.
+
+ This is the default mode for file transmission, used when traffic
+ masking is not enabled. The file is loaded and compressed before it
+ is encrypted. The encrypted file is then exported to Networked
+ Computer along with a list of Onion Service public keys (members in
+ window) of all recipients to whom the Relay Program will multi-cast
+ the file to.
+
+ Once the file ciphertext has been exported, this function will
+ multi-cast the file decryption key to each recipient inside an
+ automated key delivery message that uses a special FILE_KEY_HEADER
+ in place of standard PRIVATE_MESSAGE_HEADER. To know for which file
+ ciphertext the key is for, an identifier must be added to the key
+ delivery message. The identifier in this case is the BLAKE2b digest
+ of the ciphertext itself. The reason of using the digest as the
+ identifier is, it authenticates both the ciphertext and its origin.
+ To understand this, consider the following attack scenario:
+
+ Let the file ciphertext identifier be just a random 32-byte value "ID".
+
+ 1) Alice sends Bob and Chuck (a malicious common peer) a file
+ ciphertext and identifier CT|ID (where | denotes concatenation).
+
+ 2) Chuck who has compromised Bob's Networked Computer interdicts the
+ CT|ID from Alice.
+
+ 3) Chuck decrypts CT in his end, makes edits to the plaintext PT to
+ create PT'.
+
+ 4) Chuck re-encrypts PT' with the same symmetric key to produce CT'.
+
+ 5) Chuck re-uses the ID and produces CT'|ID.
+
+ 6) Chuck uploads the CT'|ID to Bob's Networked Computer and replaces
+ the interdicted CT|ID with it.
+
+ 7) When Bob' Receiver Program receives the automated key delivery
+ message from Alice, his Receiver program uses the bundled ID to
+ identify the key is for CT'.
+
+ 8) Bob's Receiver decrypts CT' using the newly received key and
+ obtains Chuck's PT', that appears to come from Alice.
+
+ Now, consider a situation where the ID is instead calculated
+ ID = BLAKE2b(CT), if Chuck edits the PT, the CT' will by definition
+ be different from CT, and the BLAKE2b digest will also be different.
+ In order to make Bob decrypt CT', Chuck needs to also change the
+ hash in Alice's key delivery message, which means Chuck needs to
+ create an existential forgery of the TFC message. Since the Poly1305
+ tag prevents this, the calculated ID is enough to authenticate the
+ ciphertext.
+
+ If Chuck attempts to send their own key delivery message, Chuck's
+ own Onion Service public key used to identify the TFC message key
+ (decryption key for the key delivery message) will be permanently
+ associated with the file hash, so if they inject a file CT, and Bob
+ has decided to enable file reception for Chuck, the file CT will
+ appear to come from Chuck, and not from Alice. From the perspective
+ of Bob, it's as if Chuck had dropped Alice's file and sent him
+ another file instead.
+ """
+ from src.transmitter.windows import MockWindow # Avoid circular import
+
+ if settings.traffic_masking:
+ raise FunctionReturn("Error: Command is disabled during traffic masking.", head_clear=True)
+
+ name = path.split('/')[-1]
+ data = bytearray()
+ data.extend(str_to_bytes(name))
+
+ if not os.path.isfile(path):
+ raise FunctionReturn("Error: File not found.", head_clear=True)
+
+ if os.path.getsize(path) == 0:
+ raise FunctionReturn("Error: Target file is empty.", head_clear=True)
+
+ phase("Reading data")
+ with open(path, 'rb') as f:
+ data.extend(f.read())
+ phase(DONE)
+ print_on_previous_line(flush=True)
+
+ phase("Compressing data")
+ comp = bytes(zlib.compress(bytes(data), level=COMPRESSION_LEVEL))
+ phase(DONE)
+ print_on_previous_line(flush=True)
+
+ phase("Encrypting data")
+ file_key = csprng()
+ file_ct = encrypt_and_sign(comp, file_key)
+ ct_hash = blake2b(file_ct)
+ phase(DONE)
+ print_on_previous_line(flush=True)
+
+ phase("Exporting data")
+ no_contacts = int_to_bytes(len(window))
+ ser_contacts = b''.join([c.onion_pub_key for c in window])
+ file_packet = FILE_DATAGRAM_HEADER + no_contacts + ser_contacts + file_ct
+ queue_to_nc(file_packet, queues[RELAY_PACKET_QUEUE])
+
+ key_delivery_msg = base64.b85encode(ct_hash + file_key).decode()
+ for contact in window:
+ queue_message(user_input=UserInput(key_delivery_msg, MESSAGE),
+ window =MockWindow(contact.onion_pub_key, [contact]),
+ settings =settings,
+ queues =queues,
+ header =FILE_KEY_HEADER,
+ log_as_ph =True)
+ phase(DONE)
+ print_on_previous_line(flush=True)
+ m_print(f"Sent file '{name}' to {window.type_print} {window.name}.")
+
+
+def split_to_assembly_packets(payload: bytes, p_type: str) -> List[bytes]:
+ """Split payload to assembly packets.
+
+ Messages and commands are compressed to reduce transmission time.
+ Files directed to this function during traffic masking have been
+ compressed at an earlier point.
+
+ If the compressed message cannot be sent over one packet, it is
+ split into multiple assembly packets. Long messages are encrypted
+ with an inner layer of XChaCha20-Poly1305 to provide sender based
+ control over partially transmitted data. Regardless of packet size,
+ files always have an inner layer of encryption, and it is added
+ before the file data is passed to this function. Commands do not
+ need sender-based control, so they are only delivered with a hash
+ that makes integrity check easy.
+
+ First assembly packet in file transmission is prepended with an
+ 8-byte packet counter header that tells the sender and receiver how
+ many packets the file transmission requires.
+
+ Each assembly packet is prepended with a header that tells the
+ Receiver Program if the packet is a short (single packet)
+ transmission or if it's the start packet, a continuation packet, or
+ the last packet of a multi-packet transmission.
+ """
+ s_header = {MESSAGE: M_S_HEADER, FILE: F_S_HEADER, COMMAND: C_S_HEADER}[p_type]
+ l_header = {MESSAGE: M_L_HEADER, FILE: F_L_HEADER, COMMAND: C_L_HEADER}[p_type]
+ a_header = {MESSAGE: M_A_HEADER, FILE: F_A_HEADER, COMMAND: C_A_HEADER}[p_type]
+ e_header = {MESSAGE: M_E_HEADER, FILE: F_E_HEADER, COMMAND: C_E_HEADER}[p_type]
+
+ if p_type in [MESSAGE, COMMAND]:
+ payload = zlib.compress(payload, level=COMPRESSION_LEVEL)
+
+ if len(payload) < PADDING_LENGTH:
+ padded = byte_padding(payload)
+ packet_list = [s_header + padded]
+
+ else:
+ if p_type == MESSAGE:
+ msg_key = csprng()
+ payload = encrypt_and_sign(payload, msg_key)
+ payload += msg_key
+
+ elif p_type == FILE:
+ payload = bytes(FILE_PACKET_CTR_LENGTH) + payload
+
+ elif p_type == COMMAND:
+ payload += blake2b(payload)
+
+ padded = byte_padding(payload)
+ p_list = split_byte_string(padded, item_len=PADDING_LENGTH)
+
+ if p_type == FILE:
+ p_list[0] = int_to_bytes(len(p_list)) + p_list[0][FILE_PACKET_CTR_LENGTH:]
+
+ packet_list = ([l_header + p_list[0]] +
+ [a_header + p for p in p_list[1:-1]] +
+ [e_header + p_list[-1]])
+
+ return packet_list
+
+
+def queue_assembly_packets(assembly_packet_list: List[bytes],
+ p_type: str,
+ settings: 'Settings',
+ queues: 'QueueDict',
+ window: Optional[Union['TxWindow', 'MockWindow']] = None,
+ log_as_ph: bool = False
+ ) -> None:
+ """Queue assembly packets for sender_loop().
+
+ This function is the last function on Transmitter Program's
+ `input_loop` process. It feeds the assembly packets to
+ multiprocessing queues along with metadata required for transmission
+ and message logging. The data put into these queues is read by the
+ `sender_loop` process in src.transmitter.sender_loop module.
+ """
+ if p_type in [MESSAGE, FILE] and window is not None:
+
+ if settings.traffic_masking:
+ queue = queues[TM_MESSAGE_PACKET_QUEUE] if p_type == MESSAGE else queues[TM_FILE_PACKET_QUEUE]
+ for assembly_packet in assembly_packet_list:
+ queue.put((assembly_packet, window.log_messages, log_as_ph))
+ else:
+ queue = queues[MESSAGE_PACKET_QUEUE]
+ for c in window:
+ for assembly_packet in assembly_packet_list:
+ queue.put((assembly_packet, c.onion_pub_key, window.log_messages, log_as_ph, window.uid))
+
+ elif p_type == COMMAND:
+ queue = queues[TM_COMMAND_PACKET_QUEUE] if settings.traffic_masking else queues[COMMAND_PACKET_QUEUE]
+ for assembly_packet in assembly_packet_list:
+ queue.put(assembly_packet)
+
+
+def send_packet(key_list: 'KeyList', # Key list object
+ gateway: 'Gateway', # Gateway object
+ log_queue: 'Queue', # Multiprocessing queue for logged messages
+ assembly_packet: bytes, # Padded plaintext assembly packet
+ onion_pub_key: Optional[bytes] = None, # Recipient v3 Onion Service address
+ log_messages: Optional[bool] = None, # When True, log the message assembly packet
+ log_as_ph: Optional[bool] = None # When True, log assembly packet as placeholder data
+ ) -> None:
+ """Encrypt and send assembly packet.
+
+ The assembly packets are encrypted using a symmetric message key.
+ TFC provides forward secrecy via a hash ratchet, meaning previous
+ message key is replaced by it's BLAKE2b hash. The preimage
+ resistance of the hash function prevents retrospective decryption of
+ ciphertexts in cases of physical compromise.
+
+ The hash ratchet state (the number of times initial message key has
+ been passed through BLAKE2b) is delivered to recipient inside the
+ hash ratchet counter. This counter is encrypted with a static
+ symmetric key called the header key.
+
+ The encrypted assembly packet and encrypted harac are prepended with
+ datagram headers that tell if the encrypted assembly packet is a
+ command or a message. Packets with MESSAGE_DATAGRAM_HEADER also
+ contain a second header, which is the public key of the recipient's
+ Onion Service. This allows the ciphertext to be requested from Relay
+ Program's server by the correct contact.
+
+ Once the encrypted_packet has been output, the hash ratchet advances
+ to the next state, and the assembly packet is pushed to log_queue,
+ which is read by the `log_writer_loop` process (that can be found
+ at src.common.db_logs). This approach prevents IO delays caused by
+ `input_loop` reading the log file from affecting the `sender_loop`
+ process, which could reveal schedule information under traffic
+ masking mode.
+ """
+ if len(assembly_packet) != ASSEMBLY_PACKET_LENGTH:
+ raise CriticalError("Invalid assembly packet PT length.")
+
+ if onion_pub_key is None:
+ keyset = key_list.get_keyset(LOCAL_PUBKEY)
+ header = COMMAND_DATAGRAM_HEADER
+ else:
+ keyset = key_list.get_keyset(onion_pub_key)
+ header = MESSAGE_DATAGRAM_HEADER + onion_pub_key
+
+ harac_in_bytes = int_to_bytes(keyset.tx_harac)
+ encrypted_harac = encrypt_and_sign(harac_in_bytes, keyset.tx_hk)
+ encrypted_message = encrypt_and_sign(assembly_packet, keyset.tx_mk)
+ encrypted_packet = header + encrypted_harac + encrypted_message
+ gateway.write(encrypted_packet)
+
+ keyset.rotate_tx_mk()
+
+ log_queue.put((onion_pub_key, assembly_packet, log_messages, log_as_ph, key_list.master_key))
+
+
+def cancel_packet(user_input: 'UserInput',
+ window: 'TxWindow',
+ settings: 'Settings',
+ queues: 'QueueDict'
+ ) -> None:
+ """Cancel sent message/file to contact/group.
+
+ In cases where the assembly packets have not yet been encrypted or
+ output to Networked Computer, the queued messages or files to active
+ window can be cancelled. Any single-packet message and file this
+ function removes from the queue/transfer buffer are unavailable to
+ recipient. However, in the case of multi-packet transmissions, if
+ only the last assembly packet is cancelled, the recipient might
+ obtain large enough section of the key that protects the inner
+ encryption layer to allow them to brute force the rest of the key,
+ and thus, decryption of the packet. There is simply no way to
+ prevent this kind of attack without making TFC proprietary and
+ re-writing it in a compiled language (which is very bad for users'
+ rights).
+ """
+ header, p_type = dict(cm=(M_C_HEADER, 'messages'),
+ cf=(F_C_HEADER, 'files' ))[user_input.plaintext]
+
+ if settings.traffic_masking:
+ queue = queues[TM_MESSAGE_PACKET_QUEUE] if header == M_C_HEADER else queues[TM_FILE_PACKET_QUEUE]
+ else:
+ if header == F_C_HEADER:
+ raise FunctionReturn("Files are only queued during traffic masking.", head_clear=True)
+ queue = queues[MESSAGE_PACKET_QUEUE]
+
+ cancel_pt = header + bytes(PADDING_LENGTH)
+ log_as_ph = False # Never log cancel assembly packets as placeholder data
+
+ cancel = False
+ if settings.traffic_masking:
+ if queue.qsize() != 0:
+ cancel = True
+
+ # Get most recent log_messages setting status in queue
+ log_messages = False
+ while queue.qsize() != 0:
+ log_messages = queue.get()[1]
+
+ queue.put((cancel_pt, log_messages, log_as_ph))
+
+ m_print(f"Cancelled queues {p_type}." if cancel else f"No {p_type} to cancel.", head=1, tail=1)
+
+ else:
+ p_buffer = []
+ while queue.qsize() != 0:
+ queue_data = queue.get()
+ window_uid = queue_data[4]
+
+ # Put messages unrelated to the active window into the buffer
+ if window_uid != window.uid:
+ p_buffer.append(queue_data)
+ else:
+ cancel = True
+
+ # Put cancel packets for each window contact to queue first
+ if cancel:
+ for c in window:
+ queue.put((cancel_pt, c.onion_pub_key, c.log_messages, log_as_ph, window.uid))
+
+ # Put buffered tuples back to the queue
+ for p in p_buffer:
+ queue.put(p)
+
+ if cancel:
+ message = f"Cancelled queued {p_type} to {window.type_print} {window.name}."
+ else:
+ message = f"No {p_type} queued for {window.type_print} {window.name}."
+
+ raise FunctionReturn(message, head_clear=True)
diff --git a/src/transmitter/sender_loop.py b/src/transmitter/sender_loop.py
new file mode 100755
index 0000000..29b8bac
--- /dev/null
+++ b/src/transmitter/sender_loop.py
@@ -0,0 +1,276 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import time
+import typing
+
+from typing import Dict, List, Optional, Tuple
+
+from src.common.misc import ignored
+from src.common.statics import *
+
+from src.transmitter.packet import send_packet
+from src.transmitter.traffic_masking import HideRunTime
+
+if typing.TYPE_CHECKING:
+ from multiprocessing import Queue
+ from src.common.db_keys import KeyList
+ from src.common.db_settings import Settings
+ from src.common.gateway import Gateway
+ from src.common.db_settings import Settings
+ QueueDict = Dict[bytes, Queue]
+ Message_buffer = Dict[bytes, List[Tuple[bytes, bytes, bool, bool, bytes]]]
+
+
+def sender_loop(queues: 'QueueDict',
+ settings: 'Settings',
+ gateway: 'Gateway',
+ key_list: 'KeyList',
+ unittest: bool = False
+ ) -> None:
+ """Output packets from queues based on queue priority.
+
+ Depending on traffic masking setting adjusted by the user, enable
+ either traffic masking or standard sender loop for packet output.
+ """
+ m_buffer = dict() # type: Message_buffer
+
+ while True:
+ if settings.traffic_masking:
+ settings = traffic_masking_loop(queues, settings, gateway, key_list)
+ else:
+ settings, m_buffer = standard_sender_loop(queues, gateway, key_list, m_buffer)
+ if unittest:
+ break
+
+
+def traffic_masking_loop(queues: 'QueueDict',
+ settings: 'Settings',
+ gateway: 'Gateway',
+ key_list: 'KeyList',
+ ) -> 'Settings':
+ """Run Transmitter Program in traffic masking mode.
+
+ The traffic masking loop loads assembly packets from a set of queues.
+ As Python's multiprocessing lacks priority queues, several queues are
+ prioritized based on their status.
+
+ Files are only transmitted when messages are not being output: This
+ is because file transmission is usually very slow and the user might
+ need to send messages in the meantime. Command datagrams are output
+ from Source Computer between each message datagram. The frequency in
+ output allows commands to take effect as soon as possible but this
+ unfortunately slows down message/file delivery by half. Each contact
+ in the window is cycled in order.
+
+ When this loop is active, making changes to the recipient list is
+ prevented to protect the user from accidentally revealing the use of
+ TFC.
+
+ The traffic is masked the following way: If both m_queue and f_queue
+ are empty, a noise assembly packet is loaded from np_queue. If no
+ command packet is available in c_queue, a noise command packet is
+ loaded from nc_queue. Both noise queues are filled with independent
+ processes that ensure both noise queues always have packets to
+ output.
+
+ TFC does its best to hide the assembly packet loading times and
+ encryption duration by using constant time context manager with
+ CSPRNG spawned jitter, constant time queue status lookup and constant
+ time XChaCha20 cipher. However, since TFC is written in a high-level
+ language, it is impossible to guarantee Source Computer never
+ reveals to Networked Computer when the user operates the Source
+ Computer.
+ """
+ ws_queue = queues[WINDOW_SELECT_QUEUE]
+ m_queue = queues[TM_MESSAGE_PACKET_QUEUE]
+ f_queue = queues[TM_FILE_PACKET_QUEUE]
+ c_queue = queues[TM_COMMAND_PACKET_QUEUE]
+ np_queue = queues[TM_NOISE_PACKET_QUEUE]
+ nc_queue = queues[TM_NOISE_COMMAND_QUEUE]
+ rp_queue = queues[RELAY_PACKET_QUEUE]
+ log_queue = queues[LOG_PACKET_QUEUE]
+ sm_queue = queues[SENDER_MODE_QUEUE]
+
+ while True:
+ with ignored(EOFError, KeyboardInterrupt):
+ while ws_queue.qsize() == 0:
+ time.sleep(0.01)
+ window_contacts = ws_queue.get()
+
+ # Window selection command to Receiver Program.
+ while c_queue.qsize() == 0:
+ time.sleep(0.01)
+ send_packet(key_list, gateway, log_queue, c_queue.get())
+ break
+
+ while True:
+ with ignored(EOFError, KeyboardInterrupt):
+ # Load message/file assembly packet.
+ with HideRunTime(settings, duration=TRAFFIC_MASKING_QUEUE_CHECK_DELAY):
+
+ # Choosing element from list is constant time.
+ #
+ # First queue we evaluate: if m_queue has data Second to evaluate. If m_queue
+ # in it, False is evaluated as 0, and we load has no data but f_queue has, the
+ # the first nested list. At that point we load False is evaluated as 0 meaning
+ # from m_queue regardless of f_queue state. f_queue (True as 1 and np_queue)
+ # | |
+ # v v
+ queue = [[m_queue, m_queue], [f_queue, np_queue]][m_queue.qsize() == 0][f_queue.qsize() == 0]
+
+ # Regardless of queue, each .get() returns a tuple with identical
+ # amount of data: 256 bytes long bytestring and two booleans.
+ assembly_packet, log_messages, log_as_ph = queue.get() # type: bytes, bool, bool
+
+ for c in window_contacts:
+ # Message/file assembly packet to window contact.
+ with HideRunTime(settings, delay_type=TRAFFIC_MASKING):
+ send_packet(key_list, gateway, log_queue, assembly_packet, c.onion_pub_key, log_messages)
+
+ # Send a command between each assembly packet for each contact.
+ with HideRunTime(settings, delay_type=TRAFFIC_MASKING):
+
+ # Choosing element from list is constant time.
+ queue = [c_queue, nc_queue][c_queue.qsize() == 0]
+
+ # Each loaded command and noise command is a 256 long bytestring.
+ command = queue.get() # type: bytes
+
+ send_packet(key_list, gateway, log_queue, command)
+
+ # The two queues below are empty until the user is willing to reveal to
+ # Networked Computer they are either disabling Traffic masking or exiting
+ # TFC. Until that happens, queue status check takes constant time.
+
+ # Check for unencrypted commands that close TFC.
+ if rp_queue.qsize() != 0:
+ packet = rp_queue.get()
+ command = packet[DATAGRAM_HEADER_LENGTH:]
+ if command in [UNENCRYPTED_EXIT_COMMAND, UNENCRYPTED_WIPE_COMMAND]:
+ gateway.write(packet)
+ queues[EXIT_QUEUE].put(command)
+
+ # If traffic masking has been disabled, move all packets to standard_sender_loop queues.
+ if sm_queue.qsize() != 0 and all(q.qsize() == 0 for q in (m_queue, f_queue, c_queue)):
+ settings = queues[SENDER_MODE_QUEUE].get()
+ return settings
+
+
+def standard_sender_loop(queues: 'QueueDict',
+ gateway: 'Gateway',
+ key_list: 'KeyList',
+ m_buffer: Optional['Message_buffer'] = None
+ ) -> Tuple['Settings', 'Message_buffer']:
+ """Run Transmitter program in standard send mode.
+
+ The standard sender loop loads assembly packets from a set of queues.
+ As Python's multiprocessing lacks priority queues, several queues are
+ prioritized based on their status:
+
+ KEY_MANAGEMENT_QUEUE has the highest priority. This is to ensure the
+ no queued message/command is encrypted with expired keyset.
+
+ COMMAND_PACKET_QUEUE has the second highest priority, to ensure
+ commands are issued swiftly to Receiver program. Some commands like
+ screen clearing might need to be issued quickly.
+
+ RELAY_PACKET_QUEUE has third highest priority. These are still
+ commands but since Relay Program does not handle sensitive data,
+ issuing commands to that devices does not take priority.
+
+ Buffered messages have fourth highest priority. This ensures that if
+ for whatever reason the keyset is removed, buffered messages do not
+ get lost. Packets are loaded from the buffer in FIFO basis ensuring
+ packets arrive to the recipient in order.
+
+ MESSAGE_PACKET_QUEUE has fifth highest priority. Any buffered
+ messages need to arrive earlier, thus new messages must be
+ prioritized after the buffered ones.
+
+ SENDER_MODE_QUEUE has sixth highest priority. This prevents outgoing
+ packets from being left in the queues used by this loop. This queue
+ returns up-to-date settings object for `sender_loop` parent loop,
+ that in turn uses it to start `traffic_masking_loop`.
+
+ Along with settings, this function returns the m_buffer status so that
+ assembly packets that could not have been sent due to missing key
+ can be output later, if the user resumes to standard_sender_loop and
+ adds new keys for the contact.
+ """
+ km_queue = queues[KEY_MANAGEMENT_QUEUE]
+ c_queue = queues[COMMAND_PACKET_QUEUE]
+ rp_queue = queues[RELAY_PACKET_QUEUE]
+ m_queue = queues[MESSAGE_PACKET_QUEUE]
+ sm_queue = queues[SENDER_MODE_QUEUE]
+ log_queue = queues[LOG_PACKET_QUEUE]
+
+ if m_buffer is None:
+ m_buffer = dict()
+
+ while True:
+ with ignored(EOFError, KeyboardInterrupt):
+ if km_queue.qsize() != 0:
+ key_list.manage(*km_queue.get())
+ continue
+
+ # Commands to Receiver
+ if c_queue.qsize() != 0:
+ if key_list.has_local_keyset():
+ send_packet(key_list, gateway, log_queue, c_queue.get())
+ continue
+
+ # Commands/files to Networked Computer
+ if rp_queue.qsize() != 0:
+ packet = rp_queue.get()
+ gateway.write(packet)
+
+ command = packet[DATAGRAM_HEADER_LENGTH:]
+ if command in [UNENCRYPTED_EXIT_COMMAND, UNENCRYPTED_WIPE_COMMAND]:
+ time.sleep(gateway.settings.local_testing_mode * 0.1)
+ time.sleep(gateway.settings.data_diode_sockets * 1.5)
+ signal = WIPE if command == UNENCRYPTED_WIPE_COMMAND else EXIT
+ queues[EXIT_QUEUE].put(signal)
+ continue
+
+ # Buffered messages
+ for onion_pub_key in m_buffer:
+ if key_list.has_keyset(onion_pub_key) and m_buffer[onion_pub_key]:
+ send_packet(key_list, gateway, log_queue, *m_buffer[onion_pub_key].pop(0)[:-1])
+ continue
+
+ # New messages
+ if m_queue.qsize() != 0:
+ queue_data = m_queue.get() # type: Tuple[bytes, bytes, bool, bool, bytes]
+ onion_pub_key = queue_data[1]
+
+ if key_list.has_keyset(onion_pub_key):
+ send_packet(key_list, gateway, log_queue, *queue_data[:-1])
+ else:
+ m_buffer.setdefault(onion_pub_key, []).append(queue_data)
+ continue
+
+ # If traffic masking has been enabled, switch send mode when all queues are empty.
+ if sm_queue.qsize() != 0 and all(q.qsize() == 0 for q in (km_queue, c_queue, rp_queue, m_queue)):
+ settings = sm_queue.get()
+ return settings, m_buffer
+
+ time.sleep(0.01)
diff --git a/src/transmitter/traffic_masking.py b/src/transmitter/traffic_masking.py
new file mode 100755
index 0000000..dd2d247
--- /dev/null
+++ b/src/transmitter/traffic_masking.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import random
+import threading
+import time
+import typing
+
+from typing import Any, Dict, Optional, Tuple, Union
+
+from src.common.misc import ignored
+from src.common.statics import *
+
+if typing.TYPE_CHECKING:
+ from multiprocessing import Queue
+ from src.common.db_contacts import ContactList
+ from src.common.db_settings import Settings
+ QueueDict = Dict[bytes, Queue]
+
+
+class HideRunTime(object):
+ """Runtime hiding time context manager.
+
+ By joining a thread that sleeps for a longer time than it takes for
+ the function to run, this context manager hides the actual running
+ time of the function.
+
+ Note that random.SystemRandom() uses the Kernel CSPRNG (/dev/urandom),
+ not Python's weak PRNG based on Mersenne Twister:
+ https://docs.python.org/2/library/random.html#random.SystemRandom
+ """
+
+ def __init__(self,
+ settings: 'Settings',
+ delay_type: str = STATIC,
+ duration: float = 0.0
+ ) -> None:
+
+ if delay_type == TRAFFIC_MASKING:
+ self.length = settings.tm_static_delay
+ self.length += random.SystemRandom().uniform(0, settings.tm_random_delay)
+
+ elif delay_type == STATIC:
+ self.length = duration
+
+ def __enter__(self) -> None:
+ self.timer = threading.Thread(target=time.sleep, args=(self.length,))
+ self.timer.start()
+
+ def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
+ self.timer.join()
+
+
+def noise_loop(queues: 'QueueDict',
+ contact_list: Optional['ContactList'] = None,
+ unittest: bool = False
+ ) -> None:
+ """Generate noise packets for traffic masking.
+
+ This process ensures noise packet / noise command queue always has
+ noise assembly packets available.
+ """
+ log_messages = True # This setting is ignored: settings.log_file_masking controls logging of noise packets.
+ log_as_ph = True
+
+ header = C_N_HEADER if contact_list is None else P_N_HEADER
+ noise_assembly_packet = header + bytes(PADDING_LENGTH)
+
+ if contact_list is None:
+ # Noise command
+ queue = queues[TM_NOISE_COMMAND_QUEUE]
+ content = noise_assembly_packet # type: Union[bytes, Tuple[bytes, bool, bool]]
+
+ else:
+ # Noise packet
+ queue = queues[TM_NOISE_PACKET_QUEUE]
+ content = (noise_assembly_packet, log_messages, log_as_ph)
+
+ while True:
+ with ignored(EOFError, KeyboardInterrupt):
+ while queue.qsize() < NOISE_PACKET_BUFFER:
+ queue.put(content)
+ time.sleep(0.1)
+
+ if unittest:
+ break
diff --git a/src/tx/user_input.py b/src/transmitter/user_input.py
similarity index 67%
rename from src/tx/user_input.py
rename to src/transmitter/user_input.py
index fd203e1..cd73820 100755
--- a/src/tx/user_input.py
+++ b/src/transmitter/user_input.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,7 +16,7 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
import typing
@@ -24,11 +25,14 @@ from src.common.output import print_on_previous_line
from src.common.statics import *
if typing.TYPE_CHECKING:
- from src.common.db_settings import Settings
- from src.tx.windows import TxWindow
+ from src.common.db_settings import Settings
+ from src.transmitter.windows import TxWindow
-def process_aliases(plaintext: str, settings: 'Settings', window: 'TxWindow') -> str:
+def process_aliases(plaintext: str,
+ settings: 'Settings',
+ window: 'TxWindow'
+ ) -> str:
"""Check if plaintext is an alias for another command."""
aliases = [(' ', '/unread' ),
(' ', '/exit' if settings.double_space_exits else '/clear'),
@@ -37,6 +41,8 @@ def process_aliases(plaintext: str, settings: 'Settings', window: 'TxWindow') ->
for a in aliases:
if plaintext == a[0]:
plaintext = a[1]
+
+ # Replace what the user typed
print_on_previous_line()
print(f"Msg to {window.type_print} {window.name}: {plaintext}")
break
@@ -45,7 +51,7 @@ def process_aliases(plaintext: str, settings: 'Settings', window: 'TxWindow') ->
def get_input(window: 'TxWindow', settings: 'Settings') -> 'UserInput':
- """Read and process input from user."""
+ """Read and process input from the user and determine its type."""
while True:
try:
plaintext = input(f"Msg to {window.type_print} {window.name}: ")
@@ -60,18 +66,21 @@ def get_input(window: 'TxWindow', settings: 'Settings') -> 'UserInput':
# Determine plaintext type
pt_type = MESSAGE
+
if plaintext == '/file':
pt_type = FILE
+
elif plaintext.startswith('/'):
- plaintext = plaintext[1:]
+ plaintext = plaintext[len('/'):]
pt_type = COMMAND
- # Check if group was empty
- if pt_type in [MESSAGE, FILE] and window.type == WIN_TYPE_GROUP and not window.group.has_members():
- print_on_previous_line()
- print(f"Msg to {window.type_print} {window.name}: Error: Group is empty.")
- print_on_previous_line(delay=0.5)
- continue
+ # Check if the group was empty
+ if pt_type in [MESSAGE, FILE] and window.type == WIN_TYPE_GROUP:
+ if window.group is not None and window.group.empty():
+ print_on_previous_line()
+ print(f"Msg to {window.type_print} {window.name}: Error: The group is empty.")
+ print_on_previous_line(delay=0.5)
+ continue
return UserInput(plaintext, pt_type)
@@ -80,10 +89,10 @@ class UserInput(object):
"""UserInput objects are messages, files or commands.
The type of created UserInput object is determined based on input
- by user. Commands start with slash, but as files are a special case
- of command, /file commands are interpreted as file type. The 'type'
- attribute allows tx_loop to determine what function should process
- the user input.
+ by the user. Commands start with a slash, but as files are a special
+ case of a command, /file commands are interpreted as the file type.
+ The 'type' attribute allows tx_loop to determine what function
+ should process the user input.
"""
def __init__(self, plaintext: str, type_: str) -> None:
diff --git a/src/transmitter/windows.py b/src/transmitter/windows.py
new file mode 100755
index 0000000..57d9554
--- /dev/null
+++ b/src/transmitter/windows.py
@@ -0,0 +1,265 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import typing
+
+from typing import Dict, Generator, Iterable, List, Optional, Sized
+
+from src.common.exceptions import FunctionReturn
+from src.common.input import yes
+from src.common.output import clear_screen
+from src.common.statics import *
+
+from src.transmitter.contact import add_new_contact
+from src.transmitter.key_exchanges import export_onion_service_data, start_key_exchange
+from src.transmitter.packet import queue_command
+
+if typing.TYPE_CHECKING:
+ from multiprocessing import Queue
+ from src.common.db_contacts import Contact, ContactList
+ from src.common.db_groups import Group, GroupList
+ from src.common.db_onion import OnionService
+ from src.common.db_settings import Settings
+ from src.common.gateway import Gateway
+ from src.transmitter.user_input import UserInput
+ QueueDict = Dict[bytes, Queue]
+
+
+class MockWindow(Iterable):
+ """\
+ Mock window simplifies queueing of message assembly packets for
+ automatically generated group management and key delivery messages.
+ """
+
+ def __init__(self, uid: bytes, contacts: List['Contact']) -> None:
+ """Create a new MockWindow object."""
+ self.window_contacts = contacts
+ self.type = WIN_TYPE_CONTACT
+ self.group = None # type: Optional[Group]
+ self.name = None # type: Optional[str]
+ self.uid = uid
+ self.log_messages = self.window_contacts[0].log_messages
+
+ def __iter__(self) -> Generator:
+ """Iterate over contact objects in the window."""
+ yield from self.window_contacts
+
+
+class TxWindow(Iterable, Sized):
+ """\
+ TxWindow object contains data about the active recipient (contact or
+ group).
+ """
+
+ def __init__(self,
+ contact_list: 'ContactList',
+ group_list: 'GroupList'
+ ) -> None:
+ """Create a new TxWindow object."""
+ self.contact_list = contact_list
+ self.group_list = group_list
+ self.window_contacts = [] # type: List[Contact]
+ self.contact = None # type: Optional[Contact]
+ self.group = None # type: Optional[Group]
+ self.name = '' # type: str
+ self.uid = b'' # type: bytes
+ self.group_id = None # type: Optional[bytes]
+ self.log_messages = None # type: Optional[bool]
+ self.type = '' # type: str
+ self.type_print = None # type: Optional[str]
+
+ def __iter__(self) -> Generator:
+ """Iterate over Contact objects in the window."""
+ yield from self.window_contacts
+
+ def __len__(self) -> int:
+ """Return the number of contacts in the window."""
+ return len(self.window_contacts)
+
+ def select_tx_window(self,
+ settings: 'Settings', # Settings object
+ queues: 'QueueDict', # Dictionary of Queues
+ onion_service: 'OnionService', # OnionService object
+ gateway: 'Gateway', # Gateway object
+ selection: Optional[str] = None, # Selector for window
+ cmd: bool = False # True when `/msg` command is used to switch window
+ ) -> None:
+ """Select specified window or ask the user to specify one."""
+ if selection is None:
+ self.contact_list.print_contacts()
+ self.group_list.print_groups()
+
+ if self.contact_list.has_only_pending_contacts():
+ print("\n'/connect' sends Onion Service/contact data to Relay"
+ "\n'/add' adds another contact."
+ "\n'/rm ' removes an existing contact.\n")
+
+ selection = input("Select recipient: ").strip()
+
+ if selection in self.group_list.get_list_of_group_names():
+ if cmd and settings.traffic_masking and selection != self.name:
+ raise FunctionReturn("Error: Can't change window during traffic masking.", head_clear=True)
+
+ self.contact = None
+ self.group = self.group_list.get_group(selection)
+ self.window_contacts = self.group.members
+ self.name = self.group.name
+ self.uid = self.group.group_id
+ self.group_id = self.group.group_id
+ self.log_messages = self.group.log_messages
+ self.type = WIN_TYPE_GROUP
+ self.type_print = 'group'
+
+ elif selection in self.contact_list.contact_selectors():
+ if cmd and settings.traffic_masking:
+ contact = self.contact_list.get_contact_by_address_or_nick(selection)
+ if contact.onion_pub_key != self.uid:
+ raise FunctionReturn("Error: Can't change window during traffic masking.", head_clear=True)
+
+ self.contact = self.contact_list.get_contact_by_address_or_nick(selection)
+
+ if self.contact.kex_status == KEX_STATUS_PENDING:
+ start_key_exchange(self.contact.onion_pub_key,
+ self.contact.nick,
+ self.contact_list,
+ settings, queues)
+
+ self.group = None
+ self.group_id = None
+ self.window_contacts = [self.contact]
+ self.name = self.contact.nick
+ self.uid = self.contact.onion_pub_key
+ self.log_messages = self.contact.log_messages
+ self.type = WIN_TYPE_CONTACT
+ self.type_print = 'contact'
+
+ elif selection.startswith('/'):
+ self.window_selection_command(selection, settings, queues, onion_service, gateway)
+
+ else:
+ raise FunctionReturn("Error: No contact/group was found.")
+
+ if settings.traffic_masking:
+ queues[WINDOW_SELECT_QUEUE].put(self.window_contacts)
+
+ packet = WIN_SELECT + self.uid
+ queue_command(packet, settings, queues)
+
+ clear_screen()
+
+ def window_selection_command(self,
+ selection: str,
+ settings: 'Settings',
+ queues: 'QueueDict',
+ onion_service: 'OnionService',
+ gateway: 'Gateway'
+ ) -> None:
+ """Commands for adding and removing contacts from contact selection menu.
+
+ In situations where only pending contacts are available and
+ those contacts are not online, these commands prevent the user
+ from not being able to add new contacts.
+ """
+ if selection == '/add':
+ add_new_contact(self.contact_list, self.group_list, settings, queues, onion_service)
+ raise FunctionReturn("New contact added.", output=False)
+
+ elif selection == '/connect':
+ export_onion_service_data(self.contact_list, settings, onion_service, gateway)
+
+ elif selection.startswith('/rm'):
+ try:
+ selection = selection.split()[1]
+ except IndexError:
+ raise FunctionReturn("Error: No account specified.", delay=1)
+
+ if not yes(f"Remove contact '{selection}'?", abort=False, head=1):
+ raise FunctionReturn("Removal of contact aborted.", head=0, delay=1)
+
+ if selection in self.contact_list.contact_selectors():
+ onion_pub_key = self.contact_list.get_contact_by_address_or_nick(selection).onion_pub_key
+ self.contact_list.remove_contact_by_pub_key(onion_pub_key)
+ self.contact_list.store_contacts()
+ raise FunctionReturn(f"Removed contact '{selection}'.", delay=1)
+ else:
+ raise FunctionReturn(f"Error: Unknown contact '{selection}'.", delay=1)
+
+ else:
+ raise FunctionReturn("Error: Invalid command.", delay=1)
+
+ def deselect(self) -> None:
+ """Deselect active window."""
+ self.window_contacts = []
+ self.contact = None # type: Contact
+ self.group = None # type: Group
+ self.name = '' # type: str
+ self.uid = b'' # type: bytes
+ self.log_messages = None # type: bool
+ self.type = '' # type: str
+ self.type_print = None # type: str
+
+ def is_selected(self) -> bool:
+ """Return True if a window is selected, else False."""
+ return self.name != ''
+
+ def update_log_messages(self) -> None:
+ """Update window's logging setting."""
+ if self.type == WIN_TYPE_CONTACT and self.contact is not None:
+ self.log_messages = self.contact.log_messages
+ if self.type == WIN_TYPE_GROUP and self.group is not None:
+ self.log_messages = self.group.log_messages
+
+ def update_window(self, group_list: 'GroupList') -> None:
+ """Update window.
+
+ Since previous input may have changed the window data, reload
+ window data before prompting for UserInput.
+ """
+ if self.type == WIN_TYPE_GROUP:
+ if self.group_id is not None and group_list.has_group_id(self.group_id):
+ self.group = group_list.get_group_by_id(self.group_id)
+ self.window_contacts = self.group.members
+ self.name = self.group.name
+ self.uid = self.group.group_id
+ else:
+ self.deselect()
+
+ elif self.type == WIN_TYPE_CONTACT:
+ if self.contact is not None and self.contact_list.has_pub_key(self.contact.onion_pub_key):
+ # Reload window contact in case keys were re-exchanged.
+ self.contact = self.contact_list.get_contact_by_pub_key(self.contact.onion_pub_key)
+ self.window_contacts = [self.contact]
+
+
+def select_window(user_input: 'UserInput',
+ window: 'TxWindow',
+ settings: 'Settings',
+ queues: 'QueueDict',
+ onion_service: 'OnionService',
+ gateway: 'Gateway'
+ ) -> None:
+ """Select a new window to send messages/files."""
+ try:
+ selection = user_input.plaintext.split()[1]
+ except (IndexError, TypeError):
+ raise FunctionReturn("Error: Invalid recipient.", head_clear=True)
+
+ window.select_tx_window(settings, queues, onion_service, gateway, selection=selection, cmd=True)
diff --git a/src/tx/commands.py b/src/tx/commands.py
deleted file mode 100755
index 2794670..0000000
--- a/src/tx/commands.py
+++ /dev/null
@@ -1,537 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import os
-import struct
-import textwrap
-import time
-import typing
-import zlib
-
-from multiprocessing import Queue
-from typing import Any, Dict, List, Tuple, Union
-
-from src.common.crypto import csprng, encrypt_and_sign
-from src.common.db_logs import access_logs, re_encrypt, remove_logs
-from src.common.encoding import int_to_bytes, str_to_bytes
-from src.common.exceptions import FunctionReturn
-from src.common.input import yes
-from src.common.misc import ensure_dir, get_terminal_width
-from src.common.output import box_print, clear_screen, phase, print_key, print_on_previous_line
-from src.common.path import ask_path_gui
-from src.common.statics import *
-
-from src.tx.commands_g import process_group_command
-from src.tx.contact import add_new_contact, change_nick, contact_setting, show_fingerprints, remove_contact
-from src.tx.key_exchanges import new_local_key, rxm_load_psk
-from src.tx.packet import cancel_packet, queue_command, queue_message, queue_to_nh
-from src.tx.user_input import UserInput
-from src.tx.windows import select_window
-
-if typing.TYPE_CHECKING:
- from src.common.db_contacts import ContactList
- from src.common.db_groups import GroupList
- from src.common.db_masterkey import MasterKey
- from src.common.db_settings import Settings
- from src.tx.windows import TxWindow
-
-
-def process_command(user_input: 'UserInput',
- window: 'TxWindow',
- settings: 'Settings',
- queues: Dict[bytes, 'Queue'],
- contact_list: 'ContactList',
- group_list: 'GroupList',
- master_key: 'MasterKey') -> None:
- """\
- Select function based on first keyword of issued
- command and pass relevant parameters to it.
- """
- c = COMMAND_PACKET_QUEUE
- m = MESSAGE_PACKET_QUEUE
- n = NH_PACKET_QUEUE
-
- # Keyword Function to run ( Parameters )
- # ------------------------------------------------------------------------------------------------------------------------
- d = {'about': (print_about, ),
- 'add': (add_new_contact, contact_list, group_list, settings, queues ),
- 'clear': (clear_screens, user_input, window, settings, queues ),
- 'cmd': (rxm_show_sys_win, user_input, window, settings, queues[c] ),
- 'cm': (cancel_packet, user_input, window, settings, queues ),
- 'cf': (cancel_packet, user_input, window, settings, queues ),
- 'exit': (exit_tfc, settings, queues ),
- 'export': (log_command, user_input, window, contact_list, group_list, settings, queues[c], master_key),
- 'fingerprints': (show_fingerprints, window ),
- 'fe': (export_file, settings, queues[n] ),
- 'fi': (import_file, settings, queues[n] ),
- 'fw': (rxm_show_sys_win, user_input, window, settings, queues[c] ),
- 'group': (process_group_command, user_input, contact_list, group_list, settings, queues, master_key),
- 'help': (print_help, settings ),
- 'history': (log_command, user_input, window, contact_list, group_list, settings, queues[c], master_key),
- 'localkey': (new_local_key, contact_list, settings, queues, ),
- 'logging': (contact_setting, user_input, window, contact_list, group_list, settings, queues[c] ),
- 'msg': (select_window, user_input, window, settings, queues ),
- 'names': (print_recipients, contact_list, group_list, ),
- 'nick': (change_nick, user_input, window, contact_list, group_list, settings, queues[c] ),
- 'notify': (contact_setting, user_input, window, contact_list, group_list, settings, queues[c] ),
- 'passwd': (change_master_key, user_input, contact_list, group_list, settings, queues, master_key),
- 'psk': (rxm_load_psk, window, contact_list, settings, queues[c] ),
- 'reset': (clear_screens, user_input, window, settings, queues ),
- 'rm': (remove_contact, user_input, window, contact_list, group_list, settings, queues, master_key),
- 'rmlogs': (remove_log, user_input, contact_list, settings, queues[c], master_key),
- 'set': (change_setting, user_input, contact_list, group_list, settings, queues ),
- 'settings': (settings.print_settings, ),
- 'store': (contact_setting, user_input, window, contact_list, group_list, settings, queues[c] ),
- 'unread': (rxm_display_unread, settings, queues[c] ),
- 'whisper': (whisper, user_input, window, settings, queues[m] ),
- 'wipe': (wipe, settings, queues )} # type: Dict[str, Any]
-
- try:
- cmd_key = user_input.plaintext.split()[0]
- from_dict = d[cmd_key]
- except KeyError:
- raise FunctionReturn(f"Error: Invalid command '{cmd_key}'")
- except (IndexError, UnboundLocalError):
- raise FunctionReturn(f"Error: Invalid command.")
-
- func = from_dict[0]
- parameters = from_dict[1:]
- func(*parameters)
-
-
-def print_about() -> None:
- """Print URLs that direct to TFC's project site and documentation."""
- clear_screen()
- print(f"\n Tinfoil Chat {VERSION}\n\n"
- " Website: https://github.com/maqp/tfc/\n"
- " Wikipage: https://github.com/maqp/tfc/wiki\n"
- " White paper: https://cs.helsinki.fi/u/oottela/tfc.pdf\n")
-
-
-def clear_screens(user_input: 'UserInput',
- window: 'TxWindow',
- settings: 'Settings',
- queues: Dict[bytes, 'Queue']) -> None:
- """Clear/reset TxM, RxM and NH screens.
-
- Only send unencrypted command to NH if traffic masking is disabled and
- if some related IM account can be bound to active window.
-
- Since reset command removes ephemeral message log on RxM, TxM decides
- the window to reset (in case e.g. previous window selection command
- packet dropped and active window state is inconsistent between TxM/RxM).
- """
- cmd = user_input.plaintext.split()[0]
-
- command = CLEAR_SCREEN_HEADER if cmd == CLEAR else RESET_SCREEN_HEADER + window.uid.encode()
- queue_command(command, settings, queues[COMMAND_PACKET_QUEUE])
-
- clear_screen()
-
- if not settings.session_traffic_masking and window.imc_name is not None:
- im_window = window.imc_name.encode()
- pt_cmd = UNENCRYPTED_SCREEN_CLEAR if cmd == CLEAR else UNENCRYPTED_SCREEN_RESET
- packet = UNENCRYPTED_PACKET_HEADER + pt_cmd + im_window
- queue_to_nh(packet, settings, queues[NH_PACKET_QUEUE])
-
- if cmd == RESET:
- os.system('reset')
-
-
-def rxm_show_sys_win(user_input: 'UserInput',
- window: 'TxWindow',
- settings: 'Settings',
- c_queue: 'Queue') -> None:
- """Display system window on RxM until user presses Enter."""
- cmd = user_input.plaintext.split()[0]
- win_name = dict(cmd=LOCAL_ID, fw=WIN_TYPE_FILE)[cmd]
-
- command = WINDOW_SELECT_HEADER + win_name.encode()
- queue_command(command, settings, c_queue)
-
- box_print(f" returns RxM to {window.name}'s window", manual_proceed=True)
- print_on_previous_line(reps=4, flush=True)
-
- command = WINDOW_SELECT_HEADER + window.uid.encode()
- queue_command(command, settings, c_queue)
-
-
-def exit_tfc(settings: 'Settings', queues: Dict[bytes, 'Queue']) -> None:
- """Exit TFC on TxM/RxM/NH."""
- for q in [COMMAND_PACKET_QUEUE, NH_PACKET_QUEUE]:
- while queues[q].qsize() != 0:
- queues[q].get()
-
- queue_command(EXIT_PROGRAM_HEADER, settings, queues[COMMAND_PACKET_QUEUE])
-
- if not settings.session_traffic_masking:
- if settings.local_testing_mode:
- time.sleep(0.8)
- if settings.data_diode_sockets:
- time.sleep(2.2)
- else:
- time.sleep(settings.race_condition_delay)
-
- queue_to_nh(UNENCRYPTED_PACKET_HEADER + UNENCRYPTED_EXIT_COMMAND, settings, queues[NH_PACKET_QUEUE])
-
-
-def log_command(user_input: 'UserInput',
- window: 'TxWindow',
- contact_list: 'ContactList',
- group_list: 'GroupList',
- settings: 'Settings',
- c_queue: 'Queue',
- master_key: 'MasterKey') -> None:
- """Display message logs or export them to plaintext file on TxM/RxM.
-
- TxM processes sent messages, RxM processes sent and
- received messages for all participants in active window.
- """
- cmd = user_input.plaintext.split()[0]
-
- export, header = dict(export =(True, LOG_EXPORT_HEADER),
- history=(False, LOG_DISPLAY_HEADER))[cmd]
-
- try:
- msg_to_load = int(user_input.plaintext.split()[1])
- except ValueError:
- raise FunctionReturn("Error: Invalid number of messages.")
- except IndexError:
- msg_to_load = 0
-
- if export and not yes(f"Export logs for '{window.name}' in plaintext?", head=1, tail=1):
- raise FunctionReturn("Logfile export aborted.")
-
- try:
- command = header + window.uid.encode() + US_BYTE + int_to_bytes(msg_to_load)
- except struct.error:
- raise FunctionReturn("Error: Invalid number of messages.")
-
- queue_command(command, settings, c_queue)
-
- access_logs(window, contact_list, group_list, settings, master_key, msg_to_load, export)
-
-
-def export_file(settings: 'Settings', nh_queue: 'Queue') -> None:
- """Encrypt and export file to NH.
-
- This is a faster method to send large files. It is used together
- with file import (/fi) command that uploads ciphertext to RxM for
- RxM-side decryption. Key is generated automatically so that bad
- passwords selected by users do not affect security of ciphertexts.
- """
- if settings.session_traffic_masking:
- raise FunctionReturn("Error: Command is disabled during traffic masking.")
-
- path = ask_path_gui("Select file to export...", settings, get_file=True)
- name = path.split('/')[-1]
- data = bytearray()
- data.extend(str_to_bytes(name))
-
- if not os.path.isfile(path):
- raise FunctionReturn("Error: File not found.")
-
- if os.path.getsize(path) == 0:
- raise FunctionReturn("Error: Target file is empty.")
-
- phase("Reading data")
- with open(path, 'rb') as f:
- data.extend(f.read())
- phase(DONE)
-
- phase("Compressing data")
- comp = bytes(zlib.compress(bytes(data), level=COMPRESSION_LEVEL))
- phase(DONE)
-
- phase("Encrypting data")
- file_key = csprng()
- file_ct = encrypt_and_sign(comp, key=file_key)
- phase(DONE)
-
- phase("Exporting data")
- queue_to_nh(EXPORTED_FILE_HEADER + file_ct, settings, nh_queue)
- phase(DONE)
-
- print_key(f"Decryption key for file '{name}':", file_key, settings, no_split=True, file_key=True)
-
-
-def import_file(settings: 'Settings', nh_queue: 'Queue') -> None:
- """\
- Send unencrypted command to NH that tells it to open
- RxM upload prompt for received (exported) file.
- """
- if settings.session_traffic_masking:
- raise FunctionReturn("Error: Command is disabled during traffic masking.")
-
- queue_to_nh(UNENCRYPTED_PACKET_HEADER + UNENCRYPTED_IMPORT_COMMAND, settings, nh_queue)
-
-
-def print_help(settings: 'Settings') -> None:
- """Print the list of commands."""
-
- def help_printer(tuple_list: List[Union[Tuple[str, str, bool]]]) -> None:
- """Print list of commands.
-
- Style depends on terminal width and settings.
- """
- len_longest_command = max(len(t[0]) for t in tuple_list) + 1 # Add one for spacing
-
- for help_cmd, description, display in tuple_list:
- if not display:
- continue
-
- wrapper = textwrap.TextWrapper(width=max(1, terminal_width - len_longest_command))
- desc_lines = wrapper.fill(description).split('\n')
- desc_indent = (len_longest_command - len(help_cmd)) * ' '
-
- print(help_cmd + desc_indent + desc_lines[0])
-
- # Print wrapped description lines with indent
- if len(desc_lines) > 1:
- for line in desc_lines[1:]:
- print(len_longest_command * ' ' + line)
- print('')
-
- notm = not settings.session_traffic_masking
- common = [("/about", "Show links to project resources", True),
- ("/add", "Add new contact", notm),
- ("/cf", "Cancel file transmission to active contact/group", True),
- ("/cm", "Cancel message transmission to active contact/group", True),
- ("/clear, ' '", "Clear screens from TxM, RxM and IM client", True),
- ("/cmd, '//'", "Display command window on RxM", True),
- ("/exit", "Exit TFC on TxM, NH and RxM", True),
- ("/export (n)", "Export (n) messages from recipient's logfile", True),
- ("/file", "Send file to active contact/group", True),
- ("/fingerprints", "Print public key fingerprints of user and contact", True),
- ("/fe", "Encrypt and export file to NH", notm),
- ("/fi", "Import file from NH to RxM", notm),
- ("/fw", "Display file reception window on RxM", True),
- ("/help", "Display this list of commands", True),
- ("/history (n)", "Print (n) messages from recipient's logfile", True),
- ("/localkey", "Generate new local key pair", notm),
- ("/logging {on,off}(' all')", "Change message log setting (for all contacts)", True),
- ("/msg {A,N}", "Change active recipient to account A or nick N", notm),
- ("/names", "List contacts and groups", True),
- ("/nick N", "Change nickname of active recipient to N", True),
- ("/notify {on,off} (' all')", "Change notification settings (for all contacts)", True),
- ("/passwd {tx,rx}", "Change master password on TxM/RxM", notm),
- ("/psk", "Open PSK import dialog on RxM", notm),
- ("/reset", "Reset ephemeral session log on TxM/RxM/IM client", True),
- ("/rm {A,N}", "Remove account A or nick N from TxM and RxM", notm),
- ("/rmlogs {A,N}", "Remove log entries for A/N on TxM and RxM", True),
- ("/set S V", "Change setting S to value V on TxM/RxM(/NH)", True),
- ("/settings", "List setting names, values and descriptions", True),
- ("/store {on,off} (' all')", "Change file reception (for all contacts)", True),
- ("/unread, ' '", "List windows with unread messages on RxM", True),
- ("/whisper M", "Send message M, asking it not to be logged", True),
- ("/wipe", "Wipe all TFC/IM user data and power off systems", True),
- ("Shift + PgUp/PgDn", "Scroll terminal up/down", True),]
-
- groupc = [("/group create G A₁ .. Aₙ ", "Create group G and add accounts A₁ .. Aₙ", notm),
- ("/group add G A₁ .. Aₙ", "Add accounts A₁ .. Aₙ to group G", notm),
- ("/group rm G A₁ .. Aₙ", "Remove accounts A₁ .. Aₙ from group G", notm),
- ("/group rm G", "Remove group G", notm)]
-
- terminal_width = get_terminal_width()
-
- clear_screen()
-
- print(textwrap.fill("List of commands:", width=terminal_width))
- print('')
- help_printer(common)
- print(terminal_width * '─')
-
- if settings.session_traffic_masking:
- print('')
- else:
- print("Group management:\n")
- help_printer(groupc)
- print(terminal_width * '─' + '\n')
-
-
-def print_recipients(contact_list: 'ContactList', group_list: 'GroupList') -> None:
- """Print list of contacts and groups."""
- contact_list.print_contacts()
- group_list.print_groups()
-
-
-def change_master_key(user_input: 'UserInput',
- contact_list: 'ContactList',
- group_list: 'GroupList',
- settings: 'Settings',
- queues: Dict[bytes, 'Queue'],
- master_key: 'MasterKey') -> None:
- """Change master key on TxM/RxM."""
- try:
- if settings.session_traffic_masking:
- raise FunctionReturn("Error: Command is disabled during traffic masking.")
-
- try:
- device = user_input.plaintext.split()[1].lower()
- except IndexError:
- raise FunctionReturn("Error: No target system specified.")
-
- if device not in [TX, RX]:
- raise FunctionReturn("Error: Invalid target system.")
-
- if device == RX:
- queue_command(CHANGE_MASTER_K_HEADER, settings, queues[COMMAND_PACKET_QUEUE])
- return None
-
- old_master_key = master_key.master_key[:]
- master_key.new_master_key()
- new_master_key = master_key.master_key
-
- phase("Re-encrypting databases")
-
- queues[KEY_MANAGEMENT_QUEUE].put((KDB_CHANGE_MASTER_KEY_HEADER, master_key))
-
- ensure_dir(DIR_USER_DATA)
- file_name = f'{DIR_USER_DATA}{settings.software_operation}_logs'
- if os.path.isfile(file_name):
- re_encrypt(old_master_key, new_master_key, settings)
-
- settings.store_settings()
- contact_list.store_contacts()
- group_list.store_groups()
-
- phase(DONE)
- box_print("Master key successfully changed.", head=1)
- clear_screen(delay=1.5)
-
- except KeyboardInterrupt:
- raise FunctionReturn("Password change aborted.", delay=1, head=3, tail_clear=True)
-
-
-def remove_log(user_input: 'UserInput',
- contact_list: 'ContactList',
- settings: 'Settings',
- c_queue: 'Queue',
- master_key: 'MasterKey') -> None:
- """Remove log entries for contact."""
- try:
- selection = user_input.plaintext.split()[1]
- except IndexError:
- raise FunctionReturn("Error: No contact/group specified.")
-
- if not yes(f"Remove logs for {selection}?", head=1):
- raise FunctionReturn("Logfile removal aborted.")
-
- # Swap specified nick to rx_account
- if selection in contact_list.get_list_of_nicks():
- selection = contact_list.get_contact(selection).rx_account
-
- command = LOG_REMOVE_HEADER + selection.encode()
- queue_command(command, settings, c_queue)
-
- remove_logs(selection, settings, master_key)
-
-
-def change_setting(user_input: 'UserInput',
- contact_list: 'ContactList',
- group_list: 'GroupList',
- settings: 'Settings',
- queues: Dict[bytes, 'Queue']) -> None:
- """Change setting on TxM / RxM."""
- try:
- setting = user_input.plaintext.split()[1]
- except IndexError:
- raise FunctionReturn("Error: No setting specified.")
-
- if setting not in settings.key_list:
- raise FunctionReturn(f"Error: Invalid setting '{setting}'")
-
- try:
- value = user_input.plaintext.split()[2]
- except IndexError:
- raise FunctionReturn("Error: No value for setting specified.")
-
- pt_cmd = dict(serial_error_correction=UNENCRYPTED_EC_RATIO,
- serial_baudrate =UNENCRYPTED_BAUDRATE,
- disable_gui_dialog =UNENCRYPTED_GUI_DIALOG)
-
- if setting in pt_cmd:
- if settings.session_traffic_masking:
- raise FunctionReturn("Error: Can't change this setting during traffic masking.")
-
- settings.change_setting(setting, value, contact_list, group_list)
-
- command = CHANGE_SETTING_HEADER + setting.encode() + US_BYTE + value.encode()
- queue_command(command, settings, queues[COMMAND_PACKET_QUEUE])
-
- if setting in pt_cmd:
- packet = UNENCRYPTED_PACKET_HEADER + pt_cmd[setting] + value.encode()
- queue_to_nh(packet, settings, queues[NH_PACKET_QUEUE])
-
-
-def rxm_display_unread(settings: 'Settings', c_queue: 'Queue') -> None:
- """Temporarily display list of windows with unread messages on RxM."""
- queue_command(SHOW_WINDOW_ACTIVITY_HEADER, settings, c_queue)
-
-
-def whisper(user_input: 'UserInput', window: 'TxWindow', settings: 'Settings', m_queue: 'Queue') -> None:
- """Send a message to contact that overrides enabled logging setting.
-
- The functionality of this feature is impossible to enforce, but if
- the recipient can be trusted, it can be used to send keys for to be
- imported files as well as off-the-record messages, without worrying
- they are stored into log files, ruining forward secrecy for imported
- (and later deleted) files.
- """
- message = user_input.plaintext[len('whisper '):]
-
- queue_message(user_input=UserInput(message, MESSAGE),
- window =window,
- settings =settings,
- m_queue =m_queue,
- header =WHISPER_MESSAGE_HEADER,
- log_as_ph =True)
-
-
-def wipe(settings: 'Settings', queues: Dict[bytes, 'Queue']) -> None:
- """Reset terminals, wipe all user data from TxM/RxM/NH and power off systems.
-
- No effective RAM overwriting tool currently exists, so as long as TxM/RxM
- use FDE and DDR3 memory, recovery of user data becomes impossible very fast:
-
- https://www1.cs.fau.de/filepool/projects/coldboot/fares_coldboot.pdf
- """
- if not yes("Wipe all user data and power off systems?"):
- raise FunctionReturn("Wipe command aborted.")
-
- clear_screen()
-
- for q in [COMMAND_PACKET_QUEUE, NH_PACKET_QUEUE]:
- while queues[q].qsize() != 0:
- queues[q].get()
-
- queue_command(WIPE_USER_DATA_HEADER, settings, queues[COMMAND_PACKET_QUEUE])
-
- if not settings.session_traffic_masking:
- if settings.local_testing_mode:
- time.sleep(0.8)
- if settings.data_diode_sockets:
- time.sleep(2.2)
- else:
- time.sleep(settings.race_condition_delay)
-
- queue_to_nh(UNENCRYPTED_PACKET_HEADER + UNENCRYPTED_WIPE_COMMAND, settings, queues[NH_PACKET_QUEUE])
-
- os.system('reset')
diff --git a/src/tx/commands_g.py b/src/tx/commands_g.py
deleted file mode 100644
index 29af352..0000000
--- a/src/tx/commands_g.py
+++ /dev/null
@@ -1,301 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import re
-import typing
-
-from typing import Callable, Dict, List
-
-from src.common.db_logs import remove_logs
-from src.common.exceptions import FunctionReturn
-from src.common.input import yes
-from src.common.misc import ignored
-from src.common.output import box_print, group_management_print
-from src.common.statics import *
-
-from src.tx.user_input import UserInput
-from src.tx.packet import queue_command, queue_message
-from src.tx.windows import MockWindow
-
-if typing.TYPE_CHECKING:
- from multiprocessing import Queue
- from src.common.db_contacts import ContactList
- from src.common.db_groups import GroupList
- from src.common.db_masterkey import MasterKey
- from src.common.db_settings import Settings
-
-
-def process_group_command(user_input: 'UserInput',
- contact_list: 'ContactList',
- group_list: 'GroupList',
- settings: 'Settings',
- queues: Dict[bytes, 'Queue'],
- master_key: 'MasterKey') -> None:
- """Parse group command and process it accordingly."""
- if settings.session_traffic_masking:
- raise FunctionReturn("Error: Command is disabled during traffic masking.")
-
- try:
- command_type = user_input.plaintext.split()[1] # type: str
- except IndexError:
- raise FunctionReturn("Error: Invalid group command.")
-
- if command_type not in ['create', 'add', 'rm', 'join']:
- raise FunctionReturn("Error: Invalid group command.")
-
- try:
- group_name = user_input.plaintext.split()[2] # type: str
- except IndexError:
- raise FunctionReturn("Error: No group name specified.")
-
- purp_members = user_input.plaintext.split()[3:] # type: List[str]
-
- # Swap specified nicks to rx_accounts
- for i, m in enumerate(purp_members):
- if m in contact_list.get_list_of_nicks():
- purp_members[i] = contact_list.get_contact(m).rx_account
-
- func_d = dict(create=group_create,
- join =group_create,
- add =group_add_member,
- rm =group_rm_member) # type: Dict[str, Callable]
-
- func = func_d[command_type]
- func(group_name, purp_members, group_list, contact_list, settings, queues, master_key)
- print('')
-
-
-def validate_group_name(group_name: str, contact_list: 'ContactList', group_list: 'GroupList') -> None:
- """Check that group name is valid."""
- # Avoids collision with delimiters
- if not group_name.isprintable():
- raise FunctionReturn("Error: Group name must be printable.")
-
- # Length limited by database's unicode padding
- if len(group_name) >= PADDING_LEN:
- raise FunctionReturn("Error: Group name must be less than 255 chars long.")
-
- if group_name == DUMMY_GROUP:
- raise FunctionReturn("Error: Group name can't use name reserved for database padding.")
-
- if re.match(ACCOUNT_FORMAT, group_name):
- raise FunctionReturn("Error: Group name can't have format of an account.")
-
- if group_name in contact_list.get_list_of_nicks():
- raise FunctionReturn("Error: Group name can't be nick of contact.")
-
- if group_name in group_list.get_list_of_group_names():
- if not yes(f"Group with name '{group_name}' already exists. Overwrite?", head=1):
- raise FunctionReturn("Group creation aborted.")
-
-
-def group_create(group_name: str,
- purp_members: List[str],
- group_list: 'GroupList',
- contact_list: 'ContactList',
- settings: 'Settings',
- queues: Dict[bytes, 'Queue'],
- _: 'MasterKey') -> None:
- """Create a new group.
-
- Validate group name and determine what members that can be added.
- """
- validate_group_name(group_name, contact_list, group_list)
-
- accounts = set(contact_list.get_list_of_accounts())
- purp_accounts = set(purp_members)
- accepted = list(accounts & purp_accounts)
- rejected = list(purp_accounts - accounts)
-
- if len(accepted) > settings.max_number_of_group_members:
- raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_group_members} members per group.")
-
- if len(group_list) == settings.max_number_of_groups:
- raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_groups} groups.")
-
- group_list.add_group(group_name,
- settings.log_messages_by_default,
- settings.show_notifications_by_default,
- members=[contact_list.get_contact(c) for c in accepted])
-
- fields = [f.encode() for f in ([group_name] + accepted)]
- command = GROUP_CREATE_HEADER + US_BYTE.join(fields)
- queue_command(command, settings, queues[COMMAND_PACKET_QUEUE])
-
- group_management_print(NEW_GROUP, accepted, contact_list, group_name)
- group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
-
- if accepted:
- if yes("Publish list of group members to participants?"):
- for member in accepted:
- m_list = [m for m in accepted if m != member]
- queue_message(user_input=UserInput(US_STR.join([group_name] + m_list), MESSAGE),
- window =MockWindow(member, [contact_list.get_contact(member)]),
- settings =settings,
- m_queue =queues[MESSAGE_PACKET_QUEUE],
- header =GROUP_MSG_INVITEJOIN_HEADER,
- log_as_ph =True)
- else:
- box_print(f"Created an empty group '{group_name}'", head=1)
-
-
-def group_add_member(group_name: str,
- purp_members: List['str'],
- group_list: 'GroupList',
- contact_list: 'ContactList',
- settings: 'Settings',
- queues: Dict[bytes, 'Queue'],
- master_key: 'MasterKey') -> None:
- """Add new member(s) to group."""
- if group_name not in group_list.get_list_of_group_names():
- if yes(f"Group {group_name} was not found. Create new group?", head=1):
- group_create(group_name, purp_members, group_list, contact_list, settings, queues, master_key)
- return None
- else:
- raise FunctionReturn("Group creation aborted.")
-
- purp_accounts = set(purp_members)
- accounts = set(contact_list.get_list_of_accounts())
- before_adding = set(group_list.get_group(group_name).get_list_of_member_accounts())
- ok_accounts_set = set(accounts & purp_accounts)
- new_in_group_set = set(ok_accounts_set - before_adding)
-
- end_assembly = list(before_adding | new_in_group_set)
- rejected = list(purp_accounts - accounts)
- already_in_g = list(before_adding & purp_accounts)
- new_in_group = list(new_in_group_set)
- ok_accounts = list(ok_accounts_set)
-
- if len(end_assembly) > settings.max_number_of_group_members:
- raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_group_members} members per group.")
-
- group = group_list.get_group(group_name)
- group.add_members([contact_list.get_contact(a) for a in new_in_group])
-
- fields = [f.encode() for f in ([group_name] + ok_accounts)]
- command = GROUP_ADD_HEADER + US_BYTE.join(fields)
- queue_command(command, settings, queues[COMMAND_PACKET_QUEUE])
-
- group_management_print(ADDED_MEMBERS, new_in_group, contact_list, group_name)
- group_management_print(ALREADY_MEMBER, already_in_g, contact_list, group_name)
- group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
-
- if new_in_group:
- if yes("Publish new list of members to involved?"):
- for member in before_adding:
- queue_message(user_input=UserInput(US_STR.join([group_name] + new_in_group), MESSAGE),
- window =MockWindow(member, [contact_list.get_contact(member)]),
- settings =settings,
- m_queue =queues[MESSAGE_PACKET_QUEUE],
- header =GROUP_MSG_MEMBER_ADD_HEADER,
- log_as_ph =True)
-
- for member in new_in_group:
- m_list = [m for m in end_assembly if m != member]
- queue_message(user_input=UserInput(US_STR.join([group_name] + m_list), MESSAGE),
- window =MockWindow(member, [contact_list.get_contact(member)]),
- settings =settings,
- m_queue =queues[MESSAGE_PACKET_QUEUE],
- header =GROUP_MSG_INVITEJOIN_HEADER,
- log_as_ph =True)
-
-
-def group_rm_member(group_name: str,
- purp_members: List[str],
- group_list: 'GroupList',
- contact_list: 'ContactList',
- settings: 'Settings',
- queues: Dict[bytes, 'Queue'],
- master_key: 'MasterKey') -> None:
- """Remove member(s) from group or group itself."""
- if not purp_members:
- group_rm_group(group_name, group_list, settings, queues, master_key)
-
- if group_name not in group_list.get_list_of_group_names():
- raise FunctionReturn(f"Group '{group_name}' does not exist.")
-
- purp_accounts = set(purp_members)
- accounts = set(contact_list.get_list_of_accounts())
- before_removal = set(group_list.get_group(group_name).get_list_of_member_accounts())
- ok_accounts_set = set(purp_accounts & accounts)
- removable_set = set(before_removal & ok_accounts_set)
-
- end_assembly = list(before_removal - removable_set)
- not_in_group = list(ok_accounts_set - before_removal)
- rejected = list(purp_accounts - accounts)
- removable = list(removable_set)
- ok_accounts = list(ok_accounts_set)
-
- group = group_list.get_group(group_name)
- group.remove_members(removable)
-
- fields = [f.encode() for f in ([group_name] + ok_accounts)]
- command = GROUP_REMOVE_M_HEADER + US_BYTE.join(fields)
- queue_command(command, settings, queues[COMMAND_PACKET_QUEUE])
-
- group_management_print(REMOVED_MEMBERS, removable, contact_list, group_name)
- group_management_print(NOT_IN_GROUP, not_in_group, contact_list, group_name)
- group_management_print(UNKNOWN_ACCOUNTS, rejected, contact_list, group_name)
-
- if removable and end_assembly and yes("Publish list of removed members to remaining members?"):
- for member in end_assembly:
- queue_message(user_input=UserInput(US_STR.join([group_name] + removable), MESSAGE),
- window =MockWindow(member, [contact_list.get_contact(member)]),
- settings =settings,
- m_queue =queues[MESSAGE_PACKET_QUEUE],
- header =GROUP_MSG_MEMBER_REM_HEADER,
- log_as_ph =True)
-
-
-def group_rm_group(group_name: str,
- group_list: 'GroupList',
- settings: 'Settings',
- queues: Dict[bytes, 'Queue'],
- master_key: 'MasterKey'):
- """Remove group with it's members."""
- if not yes(f"Remove group '{group_name}'?", head=1):
- raise FunctionReturn("Group removal aborted.")
-
- rm_logs = yes("Also remove logs for the group?", head=1)
-
- command = GROUP_DELETE_HEADER + group_name.encode()
- queue_command(command, settings, queues[COMMAND_PACKET_QUEUE])
-
- if rm_logs:
- command = LOG_REMOVE_HEADER + group_name.encode()
- queue_command(command, settings, queues[COMMAND_PACKET_QUEUE])
- with ignored(FunctionReturn):
- remove_logs(group_name, settings, master_key)
-
- if group_name not in group_list.get_list_of_group_names():
- raise FunctionReturn(f"TxM has no group '{group_name}' to remove.")
-
- group = group_list.get_group(group_name)
- if group.has_members() and yes("Notify members about leaving the group?"):
- for member in group:
- queue_message(user_input=UserInput(group_name, MESSAGE),
- window =MockWindow(member.rx_account, [member]),
- settings =settings,
- m_queue =queues[MESSAGE_PACKET_QUEUE],
- header =GROUP_MSG_EXIT_GROUP_HEADER,
- log_as_ph =True)
-
- group_list.remove_group(group_name)
- raise FunctionReturn(f"Removed group '{group_name}'")
diff --git a/src/tx/contact.py b/src/tx/contact.py
deleted file mode 100644
index 5a590a7..0000000
--- a/src/tx/contact.py
+++ /dev/null
@@ -1,253 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import typing
-
-from typing import Dict
-
-from src.common.db_logs import remove_logs
-from src.common.exceptions import FunctionReturn
-from src.common.input import box_input, yes
-from src.common.misc import ignored, validate_account, validate_key_exchange, validate_nick
-from src.common.output import box_print, c_print, clear_screen, print_fingerprint
-from src.common.statics import *
-
-from src.tx.key_exchanges import create_pre_shared_key, start_key_exchange
-from src.tx.packet import queue_command
-
-if typing.TYPE_CHECKING:
- from multiprocessing import Queue
- from src.common.db_contacts import ContactList
- from src.common.db_groups import GroupList
- from src.common.db_masterkey import MasterKey
- from src.common.db_settings import Settings
- from src.tx.user_input import UserInput
- from src.tx.windows import TxWindow
-
-
-def add_new_contact(contact_list: 'ContactList',
- group_list: 'GroupList',
- settings: 'Settings',
- queues: Dict[bytes, 'Queue']) -> None:
- """Prompt for contact account details and initialize desired key exchange."""
- try:
- if settings.session_traffic_masking:
- raise FunctionReturn("Error: Command is disabled during traffic masking.")
-
- if len(contact_list) >= settings.max_number_of_contacts:
- raise FunctionReturn(f"Error: TFC settings only allow {settings.max_number_of_contacts} accounts.")
-
- clear_screen()
- c_print("Add new contact", head=1)
-
- contact_account = box_input("Contact account", validator=validate_account).strip()
- user_account = box_input("Your account", validator=validate_account).strip()
- default_nick = contact_account.split('@')[0].capitalize()
- contact_nick = box_input(f"Contact nick [{default_nick}]", default=default_nick, validator=validate_nick,
- validator_args=(contact_list, group_list, contact_account)).strip()
- key_exchange = box_input("Key exchange ([X25519],PSK) ", default=X25519, validator=validate_key_exchange).strip()
-
- if key_exchange.lower() in X25519:
- start_key_exchange(contact_account, user_account, contact_nick, contact_list, settings, queues)
-
- elif key_exchange.lower() in PSK:
- create_pre_shared_key(contact_account, user_account, contact_nick, contact_list, settings, queues)
-
- except KeyboardInterrupt:
- raise FunctionReturn("Contact creation aborted.", head_clear=True)
-
-
-def remove_contact(user_input: 'UserInput',
- window: 'TxWindow',
- contact_list: 'ContactList',
- group_list: 'GroupList',
- settings: 'Settings',
- queues: Dict[bytes, 'Queue'],
- master_key: 'MasterKey') -> None:
- """Remove contact on TxM/RxM."""
- if settings.session_traffic_masking:
- raise FunctionReturn("Error: Command is disabled during traffic masking.")
-
- try:
- selection = user_input.plaintext.split()[1]
- except IndexError:
- raise FunctionReturn("Error: No account specified.")
-
- if not yes(f"Remove {selection} completely?", head=1):
- raise FunctionReturn("Removal of contact aborted.")
-
- rm_logs = yes(f"Also remove logs for {selection}?", head=1)
-
- # Load account if selector was nick
- if selection in contact_list.get_list_of_nicks():
- selection = contact_list.get_contact(selection).rx_account
-
- packet = CONTACT_REMOVE_HEADER + selection.encode()
- queue_command(packet, settings, queues[COMMAND_PACKET_QUEUE])
-
- if rm_logs:
- packet = LOG_REMOVE_HEADER + selection.encode()
- queue_command(packet, settings, queues[COMMAND_PACKET_QUEUE])
- with ignored(FunctionReturn):
- remove_logs(selection, settings, master_key)
-
- queues[KEY_MANAGEMENT_QUEUE].put((KDB_REMOVE_ENTRY_HEADER, selection))
-
- if selection in contact_list.get_list_of_accounts():
- contact_list.remove_contact(selection)
- box_print(f"Removed {selection} from contacts.", head=1, tail=1)
- else:
- box_print(f"TxM has no {selection} to remove.", head=1, tail=1)
-
- if any([g.remove_members([selection]) for g in group_list]):
- box_print(f"Removed {selection} from group(s).", tail=1)
-
- if window.type == WIN_TYPE_CONTACT:
- if selection == window.uid:
- window.deselect_window()
-
- if window.type == WIN_TYPE_GROUP:
- for c in window:
- if selection == c.rx_account:
- window.update_group_win_members(group_list)
-
- # If last member from group is removed, deselect group.
- # Deselection is not done in update_group_win_members
- # because it would prevent selecting the empty group
- # for group related commands such as notifications.
- if not window.window_contacts:
- window.deselect_window()
-
-
-def change_nick(user_input: 'UserInput',
- window: 'TxWindow',
- contact_list: 'ContactList',
- group_list: 'GroupList',
- settings: 'Settings',
- c_queue: 'Queue') -> None:
- """Change nick of contact."""
- if window.type == WIN_TYPE_GROUP:
- raise FunctionReturn("Error: Group is selected.")
-
- try:
- nick = user_input.plaintext.split()[1]
- except IndexError:
- raise FunctionReturn("Error: No nick specified.")
-
- rx_account = window.contact.rx_account
- error_msg = validate_nick(nick, (contact_list, group_list, rx_account))
- if error_msg:
- raise FunctionReturn(error_msg)
-
- window.contact.nick = nick
- window.name = nick
- contact_list.store_contacts()
-
- packet = CHANGE_NICK_HEADER + rx_account.encode() + US_BYTE + nick.encode()
- queue_command(packet, settings, c_queue)
-
-
-def contact_setting(user_input: 'UserInput',
- window: 'TxWindow',
- contact_list: 'ContactList',
- group_list: 'GroupList',
- settings: 'Settings',
- c_queue: 'Queue') -> None:
- """\
- Change logging, file reception, or received message
- notification setting of group or (all) contact(s).
- """
- try:
- parameters = user_input.plaintext.split()
- cmd_key = parameters[0]
- cmd_header = {LOGGING: CHANGE_LOGGING_HEADER,
- STORE: CHANGE_FILE_R_HEADER,
- NOTIFY: CHANGE_NOTIFY_HEADER}[cmd_key]
-
- s_value, b_value = dict(on =(ENABLE, True),
- off=(DISABLE, False))[parameters[1]]
-
- except (IndexError, KeyError):
- raise FunctionReturn("Error: Invalid command.")
-
- # If second parameter 'all' is included, apply setting for all contacts and groups
- try:
- target = b''
- if parameters[2] == ALL:
- cmd_value = s_value.upper() + US_BYTE
- else:
- raise FunctionReturn("Error: Invalid command.")
- except IndexError:
- target = window.uid.encode()
- cmd_value = s_value + US_BYTE + target
-
- if target:
- if window.type == WIN_TYPE_CONTACT:
- if cmd_key == LOGGING: window.contact.log_messages = b_value
- if cmd_key == STORE: window.contact.file_reception = b_value
- if cmd_key == NOTIFY: window.contact.notifications = b_value
- contact_list.store_contacts()
-
- if window.type == WIN_TYPE_GROUP:
- if cmd_key == LOGGING: window.group.log_messages = b_value
- if cmd_key == STORE:
- for c in window:
- c.file_reception = b_value
- if cmd_key == NOTIFY: window.group.notifications = b_value
- group_list.store_groups()
-
- else:
- for contact in contact_list:
- if cmd_key == LOGGING: contact.log_messages = b_value
- if cmd_key == STORE: contact.file_reception = b_value
- if cmd_key == NOTIFY: contact.notifications = b_value
- contact_list.store_contacts()
-
- for group in group_list:
- if cmd_key == LOGGING: group.log_messages = b_value
- if cmd_key == NOTIFY: group.notifications = b_value
- group_list.store_groups()
-
- packet = cmd_header + cmd_value
-
- if settings.session_traffic_masking and cmd_key == LOGGING:
- window.update_log_messages()
- queue_command(packet, settings, c_queue, window)
- else:
- window.update_log_messages()
- queue_command(packet, settings, c_queue)
-
-
-def show_fingerprints(window: 'TxWindow') -> None:
- """Print domain separated fingerprints of public keys on TxM.
-
- Comparison of fingerprints over authenticated channel can be
- used to verify users are not under man-in-the-middle attack.
- """
- if window.type == WIN_TYPE_GROUP:
- raise FunctionReturn('Group is selected.')
-
- if window.contact.tx_fingerprint == bytes(FINGERPRINT_LEN):
- raise FunctionReturn(f"Pre-shared keys have no fingerprints.")
-
- clear_screen()
- print_fingerprint(window.contact.tx_fingerprint, " Your fingerprint (you read) ")
- print_fingerprint(window.contact.rx_fingerprint, "Contact's fingerprint (they read)")
- print('')
diff --git a/src/tx/files.py b/src/tx/files.py
deleted file mode 100755
index d901a70..0000000
--- a/src/tx/files.py
+++ /dev/null
@@ -1,161 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of .
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import base64
-import datetime
-import os
-import typing
-import zlib
-
-from src.common.crypto import byte_padding, csprng, encrypt_and_sign
-from src.common.encoding import int_to_bytes
-from src.common.exceptions import FunctionReturn
-from src.common.misc import readable_size, split_byte_string
-from src.common.reed_solomon import RSCodec
-from src.common.statics import *
-
-if typing.TYPE_CHECKING:
- from src.common.db_settings import Settings
- from src.common.gateway import Gateway
- from src.tx.windows import TxWindow
-
-
-class File(object):
- """File object wraps methods around file data/header processing."""
-
- def __init__(self,
- path: str,
- window: 'TxWindow',
- settings: 'Settings',
- gateway: 'Gateway') -> None:
- """Load file data from specified path and add headers."""
- self.path = path
- self.window = window
- self.settings = settings
- self.gateway = gateway
-
- self.name = None # type: bytes
- self.size = None # type: bytes
- self.data = None # type: bytes
-
- self.time_bytes = bytes(FILE_ETA_FIELD_LEN)
- self.time_print = ''
- self.size_print = ''
- self.plaintext = b''
-
- self.load_file_data()
- self.process_file_data()
- self.finalize()
-
- def load_file_data(self) -> None:
- """Load file name, size and data from specified path."""
- if not os.path.isfile(self.path):
- raise FunctionReturn("Error: File not found.")
-
- self.name = (self.path.split('/')[-1]).encode()
- self.name_length_check()
-
- byte_size = os.path.getsize(self.path)
- if byte_size == 0:
- raise FunctionReturn("Error: Target file is empty.")
- self.size = int_to_bytes(byte_size)
- self.size_print = readable_size(byte_size)
-
- with open(self.path, 'rb') as f:
- self.data = f.read()
-
- def process_file_data(self) -> None:
- """Compress, encrypt and encode file data.
-
- Compress file to reduce data transmission time. Add inner
- layer of encryption to provide sender-based control over
- partial transmission. Encode data with Base85. This prevents
- inner ciphertext from colliding with file header delimiters.
- """
- compressed = zlib.compress(self.data, level=COMPRESSION_LEVEL)
-
- file_key = csprng()
- encrypted = encrypt_and_sign(compressed, key=file_key)
- encrypted += file_key
-
- self.data = base64.b85encode(encrypted)
-
- def finalize(self) -> None:
- """Finalize packet and generate plaintext."""
- self.update_delivery_time()
- self.plaintext = self.time_bytes + self.size + self.name + US_BYTE + self.data
-
- def name_length_check(self) -> None:
- """Ensure that file header fits the first packet."""
- header = bytes(FILE_PACKET_CTR_LEN + FILE_ETA_FIELD_LEN + FILE_SIZE_FIELD_LEN)
- header += self.name + US_BYTE
- if len(header) >= PADDING_LEN:
- raise FunctionReturn("Error: File name is too long.")
-
- def count_number_of_packets(self) -> int:
- """Count number of packets needed for file delivery."""
- packet_data = self.time_bytes + self.size + self.name + US_BYTE + self.data
- if len(packet_data) < PADDING_LEN:
- return 1
- else:
- packet_data += bytes(FILE_PACKET_CTR_LEN)
- packet_data = byte_padding(packet_data)
- return len(split_byte_string(packet_data, item_len=PADDING_LEN))
-
- def update_delivery_time(self) -> None:
- """Calculate transmission time.
-
- Transmission time is based on average delays and settings.
- """
- no_packets = self.count_number_of_packets()
-
- if self.settings.session_traffic_masking:
- avg_delay = self.settings.traffic_masking_static_delay + (self.settings.traffic_masking_random_delay / 2)
- if self.settings.multi_packet_random_delay:
- avg_delay += (self.settings.max_duration_of_random_delay / 2)
-
- total_time = len(self.window) * no_packets * avg_delay
- total_time *= 2 # Accommodate command packets between file packets
- total_time += no_packets * TRAFFIC_MASKING_QUEUE_CHECK_DELAY
-
- else:
- # Determine total data to be transmitted over serial
- rs = RSCodec(2 * self.settings.session_serial_error_correction)
- total_data = 0
- for c in self.window:
- data = os.urandom(PACKET_LENGTH) + c.rx_account.encode() + c.tx_account.encode()
- enc_data = rs.encode(data)
- total_data += no_packets * len(enc_data)
-
- # Determine time required to send all data
- total_time = 0.0
- if self.settings.local_testing_mode:
- total_time += no_packets * LOCAL_TESTING_PACKET_DELAY
- else:
- total_bauds = total_data * BAUDS_PER_BYTE
- total_time += total_bauds / self.settings.session_serial_baudrate
- total_time += no_packets * self.settings.txm_inter_packet_delay
-
- if self.settings.multi_packet_random_delay:
- total_time += no_packets * (self.settings.max_duration_of_random_delay / 2)
-
- # Update delivery time
- self.time_bytes = int_to_bytes(int(total_time))
- self.time_print = str(datetime.timedelta(seconds=int(total_time)))
diff --git a/src/tx/key_exchanges.py b/src/tx/key_exchanges.py
deleted file mode 100644
index f5d1b27..0000000
--- a/src/tx/key_exchanges.py
+++ /dev/null
@@ -1,331 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import os
-import time
-import typing
-
-from typing import Dict
-
-import nacl.bindings
-import nacl.encoding
-import nacl.public
-
-from src.common.crypto import argon2_kdf, csprng, encrypt_and_sign, hash_chain
-from src.common.db_masterkey import MasterKey
-from src.common.exceptions import FunctionReturn
-from src.common.input import ask_confirmation_code, get_b58_key, nh_bypass_msg, yes
-from src.common.output import box_print, c_print, clear_screen, message_printer, print_key
-from src.common.output import phase, print_fingerprint, print_on_previous_line
-from src.common.path import ask_path_gui
-from src.common.statics import *
-
-from src.tx.packet import queue_command, queue_to_nh
-
-if typing.TYPE_CHECKING:
- from multiprocessing import Queue
- from src.common.db_contacts import ContactList
- from src.common.db_settings import Settings
- from src.tx.windows import TxWindow
-
-
-def new_local_key(contact_list: 'ContactList',
- settings: 'Settings',
- queues: Dict[bytes, 'Queue']) -> None:
- """Run Tx-side local key exchange protocol.
-
- Local key encrypts commands and data sent from TxM to RxM. The key is
- delivered to RxM in packet encrypted with an ephemeral symmetric key.
-
- The checksummed Base58 format key decryption key is typed on RxM manually.
- This prevents local key leak in following scenarios:
-
- 1. CT is intercepted by adversary on compromised NH but no visual
- eavesdropping takes place.
- 2. CT is not intercepted by adversary on NH but visual eavesdropping
- records decryption key.
- 3. CT is delivered from TxM to RxM (compromised NH is bypassed) and
- visual eavesdropping records decryption key.
-
- Once correct key decryption key is entered on RxM, Receiver program will
- display the 1-byte confirmation code generated by Transmitter program.
- The code will be entered on TxM to confirm user has successfully delivered
- the key decryption key.
-
- The protocol is completed with Transmitter program sending an ACK message
- to Receiver program, that then moves to wait for public keys from contact.
- """
- try:
- if settings.session_traffic_masking and contact_list.has_local_contact:
- raise FunctionReturn("Error: Command is disabled during traffic masking.")
-
- clear_screen()
- c_print("Local key setup", head=1, tail=1)
-
- c_code = os.urandom(1)
- key = csprng()
- hek = csprng()
- kek = csprng()
- packet = LOCAL_KEY_PACKET_HEADER + encrypt_and_sign(key + hek + c_code, key=kek)
-
- nh_bypass_msg(NH_BYPASS_START, settings)
- queue_to_nh(packet, settings, queues[NH_PACKET_QUEUE])
-
- while True:
- print_key("Local key decryption key (to RxM)", kek, settings)
- purp_code = ask_confirmation_code()
- if purp_code == c_code.hex():
- break
- elif purp_code == RESEND:
- phase("Resending local key", head=2)
- queue_to_nh(packet, settings, queues[NH_PACKET_QUEUE])
- phase(DONE)
- print_on_previous_line(reps=(9 if settings.local_testing_mode else 10))
- else:
- box_print(["Incorrect confirmation code. If RxM did not receive",
- "encrypted local key, resend it by typing 'resend'."], head=1)
- print_on_previous_line(reps=(11 if settings.local_testing_mode else 12), delay=2)
-
- nh_bypass_msg(NH_BYPASS_STOP, settings)
-
- # Add local contact to contact list database
- contact_list.add_contact(LOCAL_ID, LOCAL_ID, LOCAL_ID,
- bytes(FINGERPRINT_LEN), bytes(FINGERPRINT_LEN),
- False, False, False)
-
- # Add local contact to keyset database
- queues[KEY_MANAGEMENT_QUEUE].put((KDB_ADD_ENTRY_HEADER, LOCAL_ID,
- key, csprng(),
- hek, csprng()))
-
- # Notify RxM that confirmation code was successfully entered
- queue_command(LOCAL_KEY_INSTALLED_HEADER, settings, queues[COMMAND_PACKET_QUEUE])
-
- box_print("Successfully added a new local key.")
- clear_screen(delay=1)
-
- except KeyboardInterrupt:
- raise FunctionReturn("Local key setup aborted.", delay=1, head=3, tail_clear=True)
-
-
-def verify_fingerprints(tx_fp: bytes, rx_fp: bytes) -> bool:
- """\
- Verify fingerprints over out-of-band channel to
- detect MITM attacks against TFC's key exchange.
-
- :param tx_fp: User's fingerprint
- :param rx_fp: Contact's fingerprint
- :return: True if fingerprints match, else False
- """
- clear_screen()
-
- message_printer("To verify received public key was not replaced by attacker in network, "
- "call the contact over end-to-end encrypted line, preferably Signal "
- "(https://signal.org/). Make sure Signal's safety numbers have been "
- "verified, and then verbally compare the key fingerprints below.", head=1, tail=1)
-
- print_fingerprint(tx_fp, " Your fingerprint (you read) ")
- print_fingerprint(rx_fp, "Purported fingerprint for contact (they read)")
-
- return yes("Is the contact's fingerprint correct?")
-
-
-def start_key_exchange(account: str,
- user: str,
- nick: str,
- contact_list: 'ContactList',
- settings: 'Settings',
- queues: Dict[bytes, 'Queue']) -> None:
- """Start X25519 key exchange with recipient.
-
- Variable naming:
-
- tx = user's key rx = contact's key
- sk = private (secret) key pk = public key
- key = message key hek = header key
- dh_ssk = X25519 shared secret
-
- :param account: The contact's account name (e.g. alice@jabber.org)
- :param user: The user's account name (e.g. bob@jabber.org)
- :param nick: Contact's nickname
- :param contact_list: Contact list object
- :param settings: Settings object
- :param queues: Dictionary of multiprocessing queues
- :return: None
- """
- try:
- tx_sk = nacl.public.PrivateKey(csprng())
- tx_pk = bytes(tx_sk.public_key)
-
- while True:
- queue_to_nh(PUBLIC_KEY_PACKET_HEADER
- + tx_pk
- + user.encode()
- + US_BYTE
- + account.encode(),
- settings, queues[NH_PACKET_QUEUE])
-
- rx_pk = get_b58_key(B58_PUB_KEY, settings)
-
- if rx_pk != RESEND.encode():
- break
-
- if rx_pk == bytes(KEY_LENGTH):
- # Public key is zero with negligible probability, therefore we
- # assume such key is malicious and attempts to either result in
- # zero shared key (pointless considering implementation), or to
- # DoS the key exchange as libsodium does not accept zero keys.
- box_print(["Warning!",
- "Received a malicious public key from network.",
- "Aborting key exchange for your safety."], tail=1)
- raise FunctionReturn("Error: Zero public key", output=False)
-
- dh_box = nacl.public.Box(tx_sk, nacl.public.PublicKey(rx_pk))
- dh_ssk = dh_box.shared_key()
-
- # Domain separate each key with key-type specific context variable
- # and with public keys that both clients know which way to place.
- tx_key = hash_chain(dh_ssk + rx_pk + b'message_key')
- rx_key = hash_chain(dh_ssk + tx_pk + b'message_key')
-
- tx_hek = hash_chain(dh_ssk + rx_pk + b'header_key')
- rx_hek = hash_chain(dh_ssk + tx_pk + b'header_key')
-
- # Domain separate fingerprints of public keys by using the shared
- # secret as salt. This way entities who might monitor fingerprint
- # verification channel are unable to correlate spoken values with
- # public keys that transit through a compromised IM server. This
- # protects against de-anonymization of IM accounts in cases where
- # clients connect to the compromised server via Tor. The preimage
- # resistance of hash chain protects the shared secret from leaking.
- tx_fp = hash_chain(dh_ssk + tx_pk + b'fingerprint')
- rx_fp = hash_chain(dh_ssk + rx_pk + b'fingerprint')
-
- if not verify_fingerprints(tx_fp, rx_fp):
- box_print(["Warning!",
- "Possible man-in-the-middle attack detected.",
- "Aborting key exchange for your safety."], tail=1)
- raise FunctionReturn("Error: Fingerprint mismatch", output=False)
-
- packet = KEY_EX_X25519_HEADER \
- + tx_key + tx_hek \
- + rx_key + rx_hek \
- + account.encode() + US_BYTE + nick.encode()
-
- queue_command(packet, settings, queues[COMMAND_PACKET_QUEUE])
-
- contact_list.add_contact(account, user, nick,
- tx_fp, rx_fp,
- settings.log_messages_by_default,
- settings.accept_files_by_default,
- settings.show_notifications_by_default)
-
- # Use random values as Rx-keys to prevent decryption if they're accidentally used.
- queues[KEY_MANAGEMENT_QUEUE].put((KDB_ADD_ENTRY_HEADER, account,
- tx_key, csprng(),
- tx_hek, csprng()))
-
- box_print(f"Successfully added {nick}.")
- clear_screen(delay=1)
-
- except KeyboardInterrupt:
- raise FunctionReturn("Key exchange aborted.", delay=1, head=2, tail_clear=True)
-
-
-def create_pre_shared_key(account: str,
- user: str,
- nick: str,
- contact_list: 'ContactList',
- settings: 'Settings',
- queues: Dict[bytes, 'Queue']) -> None:
- """Generate new pre-shared key for manual key delivery.
-
- :param account: The contact's account name (e.g. alice@jabber.org)
- :param user: The user's account name (e.g. bob@jabber.org)
- :param nick: Nick of contact
- :param contact_list: Contact list object
- :param settings: Settings object
- :param queues: Dictionary of multiprocessing queues
- :return: None
- """
- try:
- tx_key = csprng()
- tx_hek = csprng()
- salt = csprng()
- password = MasterKey.new_password("password for PSK")
-
- phase("Deriving key encryption key", head=2)
- kek, _ = argon2_kdf(password, salt, parallelism=1)
- phase(DONE)
-
- ct_tag = encrypt_and_sign(tx_key + tx_hek, key=kek)
-
- while True:
- store_d = ask_path_gui(f"Select removable media for {nick}", settings)
- f_name = f"{store_d}/{user}.psk - Give to {account}"
- try:
- with open(f_name, 'wb+') as f:
- f.write(salt + ct_tag)
- break
- except PermissionError:
- c_print("Error: Did not have permission to write to directory.")
- time.sleep(0.5)
- continue
-
- packet = KEY_EX_PSK_TX_HEADER \
- + tx_key \
- + tx_hek \
- + account.encode() + US_BYTE + nick.encode()
-
- queue_command(packet, settings, queues[COMMAND_PACKET_QUEUE])
-
- contact_list.add_contact(account, user, nick,
- bytes(FINGERPRINT_LEN), bytes(FINGERPRINT_LEN),
- settings.log_messages_by_default,
- settings.accept_files_by_default,
- settings.show_notifications_by_default)
-
- queues[KEY_MANAGEMENT_QUEUE].put((KDB_ADD_ENTRY_HEADER, account,
- tx_key, csprng(),
- tx_hek, csprng()))
-
- box_print(f"Successfully added {nick}.", head=1)
- clear_screen(delay=1)
-
- except KeyboardInterrupt:
- raise FunctionReturn("PSK generation aborted.", delay=1, head=2, tail_clear=True)
-
-
-def rxm_load_psk(window: 'TxWindow',
- contact_list: 'ContactList',
- settings: 'Settings',
- c_queue: 'Queue') -> None:
- """Load PSK for selected contact on RxM."""
- if settings.session_traffic_masking:
- raise FunctionReturn("Error: Command is disabled during traffic masking.")
-
- if window.type == WIN_TYPE_GROUP:
- raise FunctionReturn("Error: Group is selected.")
-
- if contact_list.get_contact(window.uid).tx_fingerprint != bytes(FINGERPRINT_LEN):
- raise FunctionReturn("Error: Current key was exchanged with X25519.")
-
- packet = KEY_EX_PSK_RX_HEADER + window.uid.encode()
- queue_command(packet, settings, c_queue)
diff --git a/src/tx/packet.py b/src/tx/packet.py
deleted file mode 100755
index 11ad435..0000000
--- a/src/tx/packet.py
+++ /dev/null
@@ -1,312 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import os
-import random
-import time
-import typing
-import zlib
-
-from typing import Dict, List, Union
-
-from src.common.crypto import byte_padding, csprng, encrypt_and_sign, hash_chain
-from src.common.encoding import int_to_bytes
-from src.common.exceptions import CriticalError, FunctionReturn
-from src.common.input import yes
-from src.common.misc import split_byte_string
-from src.common.output import c_print
-from src.common.path import ask_path_gui
-from src.common.reed_solomon import RSCodec
-from src.common.statics import *
-
-from src.tx.files import File
-
-if typing.TYPE_CHECKING:
- from multiprocessing import Queue
- from src.common.db_keys import KeyList
- from src.common.db_settings import Settings
- from src.common.gateway import Gateway
- from src.tx.user_input import UserInput
- from src.tx.windows import MockWindow, TxWindow
-
-
-def queue_message(user_input: 'UserInput',
- window: Union['MockWindow', 'TxWindow'],
- settings: 'Settings',
- m_queue: 'Queue',
- header: bytes = b'',
- log_as_ph: bool = False) -> None:
- """Prepend header, split to assembly packets and queue them."""
- if not header:
- if window.type == WIN_TYPE_GROUP:
- group_msg_id = os.urandom(GROUP_MSG_ID_LEN)
- header = GROUP_MESSAGE_HEADER + group_msg_id + window.name.encode() + US_BYTE
- else:
- header = PRIVATE_MESSAGE_HEADER
-
- payload = header + user_input.plaintext.encode()
- packet_list = split_to_assembly_packets(payload, MESSAGE)
-
- queue_packets(packet_list, MESSAGE, settings, m_queue, window, log_as_ph)
-
-
-def queue_file(window: 'TxWindow',
- settings: 'Settings',
- f_queue: 'Queue',
- gateway: 'Gateway') -> None:
- """Ask file path and load file data."""
- path = ask_path_gui("Select file to send...", settings, get_file=True)
- file = File(path, window, settings, gateway)
-
- packet_list = split_to_assembly_packets(file.plaintext, FILE)
-
- if settings.confirm_sent_files:
- try:
- if not yes(f"Send {file.name.decode()} ({file.size_print}) to {window.type_print} {window.name} "
- f"({len(packet_list)} packets, time: {file.time_print})?"):
- raise FunctionReturn("File selection aborted.")
- except KeyboardInterrupt:
- raise FunctionReturn("File selection aborted.", head=3)
-
- queue_packets(packet_list, FILE, settings, f_queue, window, log_as_ph=True)
-
-
-def queue_command(command: bytes,
- settings: 'Settings',
- c_queue: 'Queue',
- window: 'TxWindow' = None) -> None:
- """Split command to assembly packets and queue them for sender_loop()."""
- packet_list = split_to_assembly_packets(command, COMMAND)
-
- queue_packets(packet_list, COMMAND, settings, c_queue, window)
-
-
-def queue_to_nh(packet: bytes,
- settings: 'Settings',
- nh_queue: 'Queue',
- delay: bool = False) -> None:
- """Queue unencrypted command/exported file to NH."""
- nh_queue.put((packet, delay, settings))
-
-
-def split_to_assembly_packets(payload: bytes, p_type: str) -> List[bytes]:
- """Split payload to assembly packets.
-
- Messages and commands are compressed to reduce transmission time.
- Files have been compressed at earlier phase, before B85 encoding.
-
- If the compressed message can not be sent over one packet, it is
- split into multiple assembly packets with headers. Long messages
- are encrypted with inner layer of XSalsa20-Poly1305 to provide
- sender based control over partially transmitted data. Regardless
- of packet size, files always have an inner layer of encryption,
- and it is added in earlier phase. Commands do not need
- sender-based control, so they are only delivered with hash that
- makes integrity check easy.
-
- First assembly packet in file transmission is prepended with 8-byte
- packet counter that tells sender and receiver how many packets the
- file transmission requires.
- """
- s_header = {MESSAGE: M_S_HEADER, FILE: F_S_HEADER, COMMAND: C_S_HEADER}[p_type]
- l_header = {MESSAGE: M_L_HEADER, FILE: F_L_HEADER, COMMAND: C_L_HEADER}[p_type]
- a_header = {MESSAGE: M_A_HEADER, FILE: F_A_HEADER, COMMAND: C_A_HEADER}[p_type]
- e_header = {MESSAGE: M_E_HEADER, FILE: F_E_HEADER, COMMAND: C_E_HEADER}[p_type]
-
- if p_type in [MESSAGE, COMMAND]:
- payload = zlib.compress(payload, level=COMPRESSION_LEVEL)
-
- if len(payload) < PADDING_LEN:
- padded = byte_padding(payload)
- packet_list = [s_header + padded]
-
- else:
- if p_type == MESSAGE:
- msg_key = csprng()
- payload = encrypt_and_sign(payload, msg_key)
- payload += msg_key
-
- elif p_type == FILE:
- payload = bytes(FILE_PACKET_CTR_LEN) + payload
-
- elif p_type == COMMAND:
- payload += hash_chain(payload)
-
- padded = byte_padding(payload)
- p_list = split_byte_string(padded, item_len=PADDING_LEN)
-
- if p_type == FILE:
- p_list[0] = int_to_bytes(len(p_list)) + p_list[0][FILE_PACKET_CTR_LEN:]
-
- packet_list = ([l_header + p_list[0]] +
- [a_header + p for p in p_list[1:-1]] +
- [e_header + p_list[-1]])
-
- return packet_list
-
-
-def queue_packets(packet_list: List[bytes],
- p_type: str,
- settings: 'Settings',
- queue: 'Queue',
- window: Union['MockWindow', 'TxWindow'] = None,
- log_as_ph: bool = False) -> None:
- """Queue assembly packets for sender_loop()."""
- if p_type in [MESSAGE, FILE] and window is not None:
-
- if settings.session_traffic_masking:
- for p in packet_list:
- queue.put((p, window.log_messages, log_as_ph))
- else:
- for c in window:
- for p in packet_list:
- queue.put((p, settings, c.rx_account, c.tx_account, window.log_messages, log_as_ph, window.uid))
-
- elif p_type == COMMAND:
- if settings.session_traffic_masking:
- for p in packet_list:
- if window is None:
- log_setting = None
- else:
- log_setting = window.log_messages
- queue.put((p, log_setting))
- else:
- for p in packet_list:
- queue.put((p, settings))
-
-
-def send_packet(key_list: 'KeyList',
- gateway: 'Gateway',
- log_queue: 'Queue',
- packet: bytes,
- settings: 'Settings',
- rx_account: str = None,
- tx_account: str = None,
- logging: bool = None,
- log_as_ph: bool = None) -> None:
- """Encrypt and send assembly packet.
-
- :param packet: Padded plaintext assembly packet
- :param key_list: Key list object
- :param settings: Settings object
- :param gateway: Gateway object
- :param log_queue: Multiprocessing queue for logged messages
- :param rx_account: Recipient account
- :param tx_account: Sender's account associated with recipient's account
- :param logging: When True, log the assembly packet
- :param log_as_ph: When True, log assembly packet as placeholder data
- :return: None
- """
- if len(packet) != ASSEMBLY_PACKET_LEN:
- raise CriticalError("Invalid assembly packet PT length.")
-
- if rx_account is None:
- keyset = key_list.get_keyset(LOCAL_ID)
- header = COMMAND_PACKET_HEADER
- trailer = b''
- else:
- keyset = key_list.get_keyset(rx_account)
- header = MESSAGE_PACKET_HEADER
- trailer = tx_account.encode() + US_BYTE + rx_account.encode()
-
- harac_in_bytes = int_to_bytes(keyset.tx_harac)
- encrypted_harac = encrypt_and_sign(harac_in_bytes, keyset.tx_hek)
- encrypted_message = encrypt_and_sign(packet, keyset.tx_key)
- encrypted_packet = header + encrypted_harac + encrypted_message + trailer
- transmit(encrypted_packet, settings, gateway)
-
- keyset.rotate_tx_key()
-
- log_queue.put((logging, log_as_ph, packet, rx_account, settings, key_list.master_key))
-
-
-def transmit(packet: bytes,
- settings: 'Settings',
- gateway: 'Gateway',
- delay: bool = True) -> None:
- """Add Reed-Solomon erasure code and output packet via gateway.
-
- Note that random.SystemRandom() uses Kernel CSPRNG (/dev/urandom),
- not Python's weak RNG based on Mersenne Twister:
- https://docs.python.org/2/library/random.html#random.SystemRandom
- """
- rs = RSCodec(2 * settings.session_serial_error_correction)
- packet = rs.encode(packet)
- gateway.write(packet)
-
- if settings.local_testing_mode:
- time.sleep(LOCAL_TESTING_PACKET_DELAY)
-
- if not settings.session_traffic_masking:
- if settings.multi_packet_random_delay and delay:
- random_delay = random.SystemRandom().uniform(0, settings.max_duration_of_random_delay)
- time.sleep(random_delay)
-
-
-def cancel_packet(user_input: 'UserInput',
- window: 'TxWindow',
- settings: 'Settings',
- queues: Dict[bytes, 'Queue']) -> None:
- """Cancel sent message/file to contact/group."""
-
- queue, header, p_type = dict(cm=(queues[MESSAGE_PACKET_QUEUE], M_C_HEADER, 'messages'),
- cf=(queues[FILE_PACKET_QUEUE], F_C_HEADER, 'files' ))[user_input.plaintext]
-
- cancel_pt = header + bytes(PADDING_LEN)
-
- cancel = False
- if settings.session_traffic_masking:
- if queue.qsize() != 0:
- cancel = True
- while queue.qsize() != 0:
- queue.get()
- log_m_dictionary = dict((c.rx_account, c.log_messages) for c in window)
- queue.put((cancel_pt, log_m_dictionary, True))
-
- message = f"Cancelled queues {p_type}." if cancel else f"No {p_type} to cancel."
- c_print(message, head=1, tail=1)
-
- else:
- p_buffer = []
- while queue.qsize() != 0:
- q_data = queue.get()
- win_uid = q_data[6]
-
- # Put messages unrelated to active window into buffer
- if win_uid != window.uid:
- p_buffer.append(q_data)
- else:
- cancel = True
-
- # Put cancel packets for each window contact to queue first
- if cancel:
- for c in window:
- queue.put((cancel_pt, settings, c.rx_account, c.tx_account, c.log_messages, window.uid))
-
- # Put buffered tuples back to queue
- for p in p_buffer:
- queue.put(p)
-
- if cancel:
- message = f"Cancelled queued {p_type} to {window.type_print} {window.name}."
- else:
- message = f"No {p_type} queued for {window.type_print} {window.name}."
-
- c_print(message, head=1, tail=1)
diff --git a/src/tx/sender_loop.py b/src/tx/sender_loop.py
deleted file mode 100755
index 5ca48a4..0000000
--- a/src/tx/sender_loop.py
+++ /dev/null
@@ -1,189 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import time
-import typing
-
-from typing import Dict, List, Tuple
-
-from src.common.misc import ignored
-from src.common.statics import *
-
-from src.tx.packet import send_packet, transmit
-from src.tx.traffic_masking import ConstantTime
-
-if typing.TYPE_CHECKING:
- from multiprocessing import Queue
- from src.common.db_keys import KeyList
- from src.common.db_settings import Settings
- from src.common.gateway import Gateway
- from src.common.db_settings import Settings
-
-
-def sender_loop(queues: Dict[bytes, 'Queue'],
- settings: 'Settings',
- gateway: 'Gateway',
- key_list: 'KeyList',
- unittest: bool = False) -> None:
- """Output packets from queues based on queue priority.
-
- Sender loop loads assembly packets from a set of queues. As
- Python's multiprocessing lacks priority queues, several queues are
- prioritized based on their status. Whether or not traffic masking
- is enabled, files are only transmitted when no messages are being
- output. This is because file transmission is usually very slow and
- user might need to send messages in the meantime. When traffic
- masking is disabled, commands take highest priority as they are not
- output all the time. When traffic masking is enabled, commands are
- output between each output message packet. This allows commands to
- take effect as soon as possible but slows down message/file
- delivery by half. Each contact in window is cycled in order.
-
- Making changes to recipient list during use is prevented to protect
- user from accidentally revealing use of TFC. When traffic masking
- is enabled, if no packets are available in either m_queue or f_queue,
- a noise assembly packet is loaded from np_queue. If no command packet
- is available in c_queue, a noise command packet is loaded from
- nc_queue. TFC does it's best to hide the loading times and encryption
- duration by using constant time context manager with CSPRNG spawned
- jitter, constant time queue status lookup, and constant time XSalsa20
- cipher. However, since TFC is written with in a high-level language,
- it is impossible to guarantee TxM never reveals it's user-operation
- schedule to NH.
- """
- m_queue = queues[MESSAGE_PACKET_QUEUE]
- f_queue = queues[FILE_PACKET_QUEUE]
- c_queue = queues[COMMAND_PACKET_QUEUE]
- n_queue = queues[NH_PACKET_QUEUE]
- l_queue = queues[LOG_PACKET_QUEUE]
- km_queue = queues[KEY_MANAGEMENT_QUEUE]
- np_queue = queues[NOISE_PACKET_QUEUE]
- nc_queue = queues[NOISE_COMMAND_QUEUE]
- ws_queue = queues[WINDOW_SELECT_QUEUE]
-
- m_buffer = dict() # type: Dict[str, List[Tuple[bytes, Settings, str, str, bool]]]
- f_buffer = dict() # type: Dict[str, List[Tuple[bytes, Settings, str, str, bool]]]
-
- if settings.session_traffic_masking:
-
- while ws_queue.qsize() == 0:
- time.sleep(0.01)
-
- window, log_messages = ws_queue.get()
-
- while True:
- with ignored(EOFError, KeyboardInterrupt):
- with ConstantTime(settings, length=TRAFFIC_MASKING_QUEUE_CHECK_DELAY):
- queue = [[m_queue, m_queue], [f_queue, np_queue]][m_queue.qsize()==0][f_queue.qsize()==0]
-
- packet, lm, log_as_ph = queue.get()
-
- if lm is not None: # Ignores None sent by noise_packet_loop that does not alter log setting
- log_messages = lm
-
- for c in window:
-
- with ConstantTime(settings, d_type=TRAFFIC_MASKING):
- send_packet(key_list, gateway, l_queue, packet, settings, c.rx_account, c.tx_account, log_messages, log_as_ph)
-
- with ConstantTime(settings, d_type=TRAFFIC_MASKING):
- queue = [c_queue, nc_queue][c_queue.qsize()==0]
- command, lm = queue.get()
-
- if lm is not None: # Log setting is only updated with 'logging' command
- log_messages = lm
-
- send_packet(key_list, gateway, l_queue, command, settings)
-
- if n_queue.qsize() != 0:
- packet, delay, settings = n_queue.get()
- transmit(packet, settings, gateway, delay)
- if packet[1:] == UNENCRYPTED_EXIT_COMMAND:
- queues[EXIT_QUEUE].put(EXIT)
- elif packet[1:] == UNENCRYPTED_WIPE_COMMAND:
- queues[EXIT_QUEUE].put(WIPE)
-
- if unittest:
- break
-
- else:
- while True:
- try:
- if km_queue.qsize() != 0:
- key_list.manage(*km_queue.get())
- continue
-
- # Commands to RxM
- if c_queue.qsize() != 0:
- if key_list.has_local_key():
- send_packet(key_list, gateway, l_queue, *c_queue.get())
- continue
-
- # Commands/exported files to NH
- if n_queue.qsize() != 0:
- packet, delay, settings = n_queue.get()
- transmit(packet, settings, gateway, delay)
-
- if packet[1:] == UNENCRYPTED_EXIT_COMMAND:
- queues[EXIT_QUEUE].put(EXIT)
- elif packet[1:] == UNENCRYPTED_WIPE_COMMAND:
- queues[EXIT_QUEUE].put(WIPE)
- continue
-
- # Buffered messages
- for rx_account in m_buffer:
- if key_list.has_keyset(rx_account) and m_buffer[rx_account]:
- send_packet(key_list, gateway, l_queue, *m_buffer[rx_account].pop(0)[:-1]) # Strip window UID as it's only used to cancel packets
- continue
-
- # New messages
- if m_queue.qsize() != 0:
- q_data = m_queue.get()
- rx_account = q_data[2]
-
- if key_list.has_keyset(rx_account):
- send_packet(key_list, gateway, l_queue, *q_data[:-1])
- else:
- m_buffer.setdefault(rx_account, []).append(q_data)
- continue
-
- # Buffered files
- for rx_account in m_buffer:
- if key_list.has_keyset(rx_account) and f_buffer[rx_account]:
- send_packet(key_list, gateway, l_queue, *f_buffer[rx_account].pop(0)[:-1])
- continue
-
- # New files
- if f_queue.qsize() != 0:
- q_data = f_queue.get()
- rx_account = q_data[2]
-
- if key_list.has_keyset(rx_account):
- send_packet(key_list, gateway, l_queue, *q_data[:-1])
- else:
- f_buffer.setdefault(rx_account, []).append(q_data)
-
- if unittest and queues[UNITTEST_QUEUE].qsize() != 0:
- break
-
- time.sleep(0.01)
-
- except (EOFError, KeyboardInterrupt):
- pass
diff --git a/src/tx/traffic_masking.py b/src/tx/traffic_masking.py
deleted file mode 100755
index 033d3b2..0000000
--- a/src/tx/traffic_masking.py
+++ /dev/null
@@ -1,90 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import random
-import threading
-import time
-import typing
-
-from typing import Tuple, Union
-
-from src.common.misc import ignored
-from src.common.statics import *
-
-if typing.TYPE_CHECKING:
- from multiprocessing import Queue
- from src.common.db_contacts import ContactList
- from src.common.db_settings import Settings
-
-
-class ConstantTime(object):
- """Constant time context manager.
-
- By joining a thread that sleeps for longer time than it takes for
- the function to run, this context manager hides the actual running
- time of the function.
-
- Note that random.SystemRandom() uses Kernel CSPRNG (/dev/urandom),
- not Python's weak RNG based on Mersenne Twister:
- https://docs.python.org/2/library/random.html#random.SystemRandom
- """
-
- def __init__(self,
- settings: 'Settings',
- d_type: str = STATIC,
- length: float = 0.0) -> None:
-
- if d_type == TRAFFIC_MASKING:
- self.length = settings.traffic_masking_static_delay
- self.length += random.SystemRandom().uniform(0, settings.traffic_masking_random_delay)
- if settings.multi_packet_random_delay:
- self.length += random.SystemRandom().uniform(0, settings.max_duration_of_random_delay)
-
- elif d_type == STATIC:
- self.length = length
-
- def __enter__(self) -> None:
- self.timer = threading.Thread(target=time.sleep, args=(self.length,))
- self.timer.start()
-
- def __exit__(self, exc_type, exc_value, traceback) -> None:
- self.timer.join()
-
-
-def noise_loop(header: bytes,
- queue: 'Queue',
- contact_list: 'ContactList' = None,
- unittest: bool = False) -> None:
- """Generate noise packets and keep noise queues filled."""
- packet = header + bytes(PADDING_LEN)
-
- if contact_list is None:
- content = (packet, None) # type: Union[Tuple[bytes, None], Tuple[bytes, None, bool]]
- else:
- content = (packet, None, True)
-
- while True:
- with ignored(EOFError, KeyboardInterrupt):
- while queue.qsize() < 100:
- queue.put(content)
- time.sleep(0.1)
-
- if unittest:
- break
diff --git a/src/tx/windows.py b/src/tx/windows.py
deleted file mode 100755
index e61bb6a..0000000
--- a/src/tx/windows.py
+++ /dev/null
@@ -1,181 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import typing
-
-from typing import Dict, Generator, Iterable, List, Sized
-
-from src.common.exceptions import FunctionReturn
-from src.common.output import clear_screen
-from src.common.statics import *
-
-from src.tx.packet import queue_command
-
-if typing.TYPE_CHECKING:
- from multiprocessing import Queue
- from src.common.db_contacts import Contact, ContactList
- from src.common.db_groups import Group, GroupList
- from src.common.db_settings import Settings
- from src.tx.user_input import UserInput
-
-
-class MockWindow(Iterable):
- """Mock window simplifies queueing of message assembly packets."""
-
- def __init__(self, uid: str, contacts: List['Contact']) -> None:
- """Create new mock window."""
- self.uid = uid
- self.window_contacts = contacts
- self.log_messages = self.window_contacts[0].log_messages
- self.type = WIN_TYPE_CONTACT
- self.group = None # type: Group
- self.name = None # type: str
-
- def __iter__(self) -> Generator:
- """Iterate over contact objects in window."""
- yield from self.window_contacts
-
-
-class TxWindow(Iterable, Sized):
- """
- TxWindow objects manages ephemeral communications
- data associated with selected contact or group.
- """
-
- def __init__(self,
- contact_list: 'ContactList',
- group_list: 'GroupList') -> None:
- """Create a new TxWindow object."""
- self.contact_list = contact_list
- self.group_list = group_list
- self.window_contacts = [] # type: List[Contact]
- self.group = None # type: Group
- self.contact = None # type: Contact
- self.name = None # type: str
- self.type = None # type: str
- self.type_print = None # type: str
- self.uid = None # type: str
- self.imc_name = None # type: str
- self.log_messages = None # type: bool
-
- def __iter__(self) -> Generator:
- """Iterate over Contact objects in window."""
- yield from self.window_contacts
-
- def __len__(self) -> int:
- """Return the number of contacts in window."""
- return len(self.window_contacts)
-
- def select_tx_window(self,
- settings: 'Settings',
- queues: Dict[bytes, 'Queue'],
- selection: str = None,
- cmd: bool = False) -> None:
- """Select specified window or ask the user to specify one."""
- if selection is None:
- self.contact_list.print_contacts()
- self.group_list.print_groups()
- selection = input("Select recipient: ").strip()
-
- if selection in self.group_list.get_list_of_group_names():
- if cmd and settings.session_traffic_masking and selection != self.uid:
- raise FunctionReturn("Error: Can't change window during traffic masking.")
-
- self.group = self.group_list.get_group(selection)
- self.window_contacts = self.group.members
- self.name = self.group.name
- self.uid = self.name
- self.log_messages = self.group.log_messages
- self.type = WIN_TYPE_GROUP
- self.type_print = 'group'
-
- if self.window_contacts:
- self.imc_name = self.window_contacts[0].rx_account
-
- elif selection in self.contact_list.contact_selectors():
- if cmd and settings.session_traffic_masking:
- contact = self.contact_list.get_contact(selection)
- if contact.rx_account != self.uid:
- raise FunctionReturn("Error: Can't change window during traffic masking.")
-
- self.contact = self.contact_list.get_contact(selection)
- self.window_contacts = [self.contact]
- self.name = self.contact.nick
- self.uid = self.contact.rx_account
- self.imc_name = self.contact.rx_account
- self.log_messages = self.contact.log_messages
- self.type = WIN_TYPE_CONTACT
- self.type_print = 'contact'
-
- else:
- raise FunctionReturn("Error: No contact/group was found.")
-
- if settings.session_traffic_masking and not cmd:
- queues[WINDOW_SELECT_QUEUE].put((self.window_contacts, self.log_messages))
-
- packet = WINDOW_SELECT_HEADER + self.uid.encode()
- queue_command(packet, settings, queues[COMMAND_PACKET_QUEUE])
-
- clear_screen()
-
- def deselect_window(self) -> None:
- """Deselect active window."""
- self.window_contacts = []
- self.group = None # type: Group
- self.contact = None # type: Contact
- self.name = None # type: str
- self.type = None # type: str
- self.uid = None # type: str
- self.imc_name = None # type: str
-
- def is_selected(self) -> bool:
- """Return True if window is selected, else False."""
- return self.name is not None
-
- def update_log_messages(self) -> None:
- """Update window's logging setting."""
- if self.type == WIN_TYPE_CONTACT:
- self.log_messages = self.contact.log_messages
- if self.type == WIN_TYPE_GROUP:
- self.log_messages = self.group.log_messages
-
- def update_group_win_members(self, group_list: 'GroupList') -> None:
- """Update window's group members list."""
- if self.type == WIN_TYPE_GROUP:
- if group_list.has_group(self.name):
- self.group = group_list.get_group(self.name)
- self.window_contacts = self.group.members
- if self.window_contacts:
- self.imc_name = self.window_contacts[0].rx_account
- else:
- self.deselect_window()
-
-
-def select_window(user_input: 'UserInput',
- window: 'TxWindow',
- settings: 'Settings',
- queues: Dict[bytes, 'Queue']) -> None:
- """Select new window to send messages/files to."""
- try:
- selection = user_input.plaintext.split()[1]
- except (IndexError, TypeError):
- raise FunctionReturn("Error: Invalid recipient.")
-
- window.select_tx_window(settings, queues, selection, cmd=True)
diff --git a/tests/common/test_crypto.py b/tests/common/test_crypto.py
index b9ecd0d..b474273 100644
--- a/tests/common/test_crypto.py
+++ b/tests/common/test_crypto.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,344 +16,325 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
-import binascii
import multiprocessing
import os
import unittest
-import nacl.bindings
+from unittest import mock
+
+import argon2
import nacl.exceptions
import nacl.public
import nacl.utils
-import argon2
+from cryptography.hazmat.primitives.asymmetric.x448 import X448PrivateKey
-from src.common.crypto import sha3_256, blake2s, sha256, hash_chain, argon2_kdf
-from src.common.crypto import encrypt_and_sign, auth_and_decrypt
-from src.common.crypto import byte_padding, rm_padding_bytes, xor
-from src.common.crypto import csprng, check_kernel_entropy, check_kernel_version
+from src.common.crypto import argon2_kdf, auth_and_decrypt, blake2b, byte_padding, check_kernel_entropy
+from src.common.crypto import check_kernel_version, csprng, encrypt_and_sign, rm_padding_bytes, X448
from src.common.statics import *
-class TestSHA3256(unittest.TestCase):
+class TestBLAKE2b(unittest.TestCase):
- def test_SHA3_256_KAT(self):
- """Run sanity check with official SHA3-256 KAT:
- csrc.nist.gov/groups/ST/toolkit/documents/Examples/SHA3-256_Msg0.pdf
+ def test_blake2b_kat(self):
+ """Run sanity check with an official BLAKE2b KAT:
+ https://github.com/BLAKE2/BLAKE2/blob/master/testvectors/blake2b-kat.txt#L259
+
+ in: 000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f
+ 202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f
+
+ key: 000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f
+ 202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f
+
+ hash: 65676d800617972fbd87e4b9514e1c67402b7a331096d3bfac22f1abb95374ab
+ c942f16e9ab0ead33b87c91968a6e509e119ff07787b3ef483e1dcdccf6e3022
"""
- self.assertEqual(sha3_256(b''),
- binascii.unhexlify('a7ffc6f8bf1ed76651c14756a061d662'
- 'f580ff4de43b49fa82d80a4b80f8434a'))
+ message = key = bytes.fromhex(
+ '000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f'
+ '202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f')
+ digest = bytes.fromhex(
+ '65676d800617972fbd87e4b9514e1c67402b7a331096d3bfac22f1abb95374ab'
+ 'c942f16e9ab0ead33b87c91968a6e509e119ff07787b3ef483e1dcdccf6e3022')
-
-class TestBlake2s(unittest.TestCase):
-
- def test_blake2s_KAT(self):
- """Run sanity check with official Blake2s KAT:
- https://github.com/BLAKE2/BLAKE2/blob/master/testvectors/blake2s-kat.txt#L131
-
- in: 000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f
- key: 000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f
- hash: c03bc642b20959cbe133a0303e0c1abff3e31ec8e1a328ec8565c36decff5265
- """
- message = key = binascii.unhexlify('000102030405060708090a0b0c0d0e0f'
- '101112131415161718191a1b1c1d1e1f')
-
- self.assertEqual(blake2s(message, key),
- binascii.unhexlify('c03bc642b20959cbe133a0303e0c1abf'
- 'f3e31ec8e1a328ec8565c36decff5265'))
-
-
-class TestSHA256(unittest.TestCase):
-
- def test_SHA256_KAT(self):
- """Run sanity check with official SHA256 KAT:
- http://csrc.nist.gov/groups/ST/toolkit/documents/Examples/SHA_All.pdf // page 14
- """
- self.assertEqual(sha256(b'abc'),
- binascii.unhexlify('ba7816bf8f01cfea414140de5dae2223'
- 'b00361a396177a9cb410ff61f20015ad'))
-
-
-class TestHashChain(unittest.TestCase):
-
- def test_chain(self):
- """Sanity check after verifying function. No official test vectors exist."""
- self.assertEqual(hash_chain(bytes(32)),
- binascii.unhexlify('8d8c36497eb93a6355112e253f705a32'
- '85f3e2d82b9ac29461cd8d4f764e5d41'))
+ self.assertEqual(blake2b(message, key, digest_size=len(digest)),
+ digest)
class TestArgon2KDF(unittest.TestCase):
- def test_Argon2_KAT(self):
- """The official Argon2 implementation is at
- https://github.com/P-H-C/phc-winner-argon2#command-line-utility
+ def test_argon2d_kat(self):
+ """Run sanity check with an official Argon2 KAT:
- To re-produce the test vector, run
+ The official Argon2 implementation is at
+ https://github.com/P-H-C/phc-winner-argon2#command-line-utility
+
+ To reproduce the test vector, run
$ wget https://github.com/P-H-C/phc-winner-argon2/archive/master.zip
- $ unzip master.zip
+ $ unzip master.zip
$ cd phc-winner-argon2-master/
$ make
$ echo -n "password" | ./argon2 somesalt -t 1 -m 16 -p 4 -l 32 -d
Expected output
- Type: Argon2d
- Iterations: 1
- Memory: 65536 KiB
+ Type: Argon2d
+ Iterations: 1
+ Memory: 65536 KiB
Parallelism: 4
Hash: 7e12cb75695277c0ab974e4ae943b87da08e36dd065aca8de3ca009125ae8953
Encoded: $argon2d$v=19$m=65536,t=1,p=4$c29tZXNhbHQ$fhLLdWlSd8Crl05K6UO4faCONt0GWsqN48oAkSWuiVM
0.231 seconds
Verification ok
"""
- key = argon2.low_level.hash_secret_raw(secret=b'password', salt=b'somesalt', time_cost=1,
- memory_cost=65536, parallelism=4, hash_len=32, type=argon2.Type.D)
- self.assertEqual(binascii.hexlify(key), b'7e12cb75695277c0ab974e4ae943b87da08e36dd065aca8de3ca009125ae8953')
+ key = argon2.low_level.hash_secret_raw(secret=b'password',
+ salt=b'somesalt',
+ time_cost=1,
+ memory_cost=65536,
+ parallelism=4,
+ hash_len=32,
+ type=argon2.Type.D)
- def test_argon2_kdf(self):
- key, parallelism = argon2_kdf('password', ARGON2_SALT_LEN*b'a')
+ self.assertEqual(key.hex(), '7e12cb75695277c0ab974e4ae943b87da08e36dd065aca8de3ca009125ae8953')
+
+ def test_argon2d_kdf(self):
+ key = argon2_kdf('password', ARGON2_SALT_LENGTH*b'a', rounds=1, memory=100)
self.assertIsInstance(key, bytes)
- self.assertEqual(len(key), KEY_LENGTH)
- self.assertEqual(parallelism, multiprocessing.cpu_count())
+ self.assertEqual(len(key), SYMMETRIC_KEY_LENGTH)
- def test_argon2_kdf_local_testing(self):
- key, parallelism = argon2_kdf('password', ARGON2_SALT_LEN*b'a', local_test=True)
- self.assertIsInstance(key, bytes)
- self.assertEqual(len(key), KEY_LENGTH)
- self.assertEqual(parallelism, max(multiprocessing.cpu_count()//2, 1))
+ def test_invalid_salt_length_raises_assertion_error(self):
+ for salt_length in [v for v in range(1000) if v != ARGON2_SALT_LENGTH]:
+ with self.assertRaises(AssertionError):
+ argon2_kdf('password', salt_length * b'a')
-class TestXSalsa20Poly1305(unittest.TestCase):
- """Test vectors:
- https://cr.yp.to/highspeed/naclcrypto-20090310.pdf // page 35
+class TestX448(unittest.TestCase):
"""
- key_tv = binascii.unhexlify('1b27556473e985d4'
- '62cd51197a9a46c7'
- '6009549eac6474f2'
- '06c4ee0844f68389')
+ X448 test vectors
+ https://tools.ietf.org/html/rfc7748#section-6.2
+ """
+ sk_alice = bytes.fromhex(
+ '9a8f4925d1519f5775cf46b04b5800d4ee9ee8bae8bc5565d498c28d'
+ 'd9c9baf574a9419744897391006382a6f127ab1d9ac2d8c0a598726b')
- nonce_tv = binascii.unhexlify('69696ee955b62b73'
- 'cd62bda875fc73d6'
- '8219e0036b7a0b37')
+ pk_alice = bytes.fromhex(
+ '9b08f7cc31b7e3e67d22d5aea121074a273bd2b83de09c63faa73d2c'
+ '22c5d9bbc836647241d953d40c5b12da88120d53177f80e532c41fa0')
- pt_tv = binascii.unhexlify('be075fc53c81f2d5'
- 'cf141316ebeb0c7b'
- '5228c52a4c62cbd4'
- '4b66849b64244ffc'
- 'e5ecbaaf33bd751a'
- '1ac728d45e6c6129'
- '6cdc3c01233561f4'
- '1db66cce314adb31'
- '0e3be8250c46f06d'
- 'ceea3a7fa1348057'
- 'e2f6556ad6b1318a'
- '024a838f21af1fde'
- '048977eb48f59ffd'
- '4924ca1c60902e52'
- 'f0a089bc76897040'
- 'e082f93776384864'
- '5e0705')
+ sk_bob = bytes.fromhex(
+ '1c306a7ac2a0e2e0990b294470cba339e6453772b075811d8fad0d1d'
+ '6927c120bb5ee8972b0d3e21374c9c921b09d1b0366f10b65173992d')
- ct_tv = binascii.unhexlify('f3ffc7703f9400e5'
- '2a7dfb4b3d3305d9'
- '8e993b9f48681273'
- 'c29650ba32fc76ce'
- '48332ea7164d96a4'
- '476fb8c531a1186a'
- 'c0dfc17c98dce87b'
- '4da7f011ec48c972'
- '71d2c20f9b928fe2'
- '270d6fb863d51738'
- 'b48eeee314a7cc8a'
- 'b932164548e526ae'
- '90224368517acfea'
- 'bd6bb3732bc0e9da'
- '99832b61ca01b6de'
- '56244a9e88d5f9b3'
- '7973f622a43d14a6'
- '599b1f654cb45a74'
- 'e355a5')
+ pk_bob = bytes.fromhex(
+ '3eb7a829b0cd20f5bcfc0b599b6feccf6da4627107bdb0d4f345b430'
+ '27d8b972fc3e34fb4232a13ca706dcb57aec3dae07bdc1c67bf33609')
- def test_encrypt_and_sign_with_kat(self):
- """Test encryption with official test vectors."""
- # Setup
- o_nacl_utils_random = nacl.utils.random
- nacl.utils.random = lambda _: self.nonce_tv
+ shared_secret = bytes.fromhex(
+ '07fff4181ac6cc95ec1c16a94a0f74d12da232ce40a77552281d282b'
+ 'b60c0b56fd2464c335543936521c24403085d59a449a5037514a879d')
- # Test
- self.assertEqual(encrypt_and_sign(self.pt_tv, self.key_tv), self.nonce_tv + self.ct_tv)
+ def test_private_key_generation(self):
+ self.assertIsInstance(X448.generate_private_key(), X448PrivateKey)
- # Teardown
- nacl.utils.random = o_nacl_utils_random
+ def test_x448(self):
+ sk_alice_ = X448PrivateKey.from_private_bytes(TestX448.sk_alice)
+ sk_bob_ = X448PrivateKey.from_private_bytes(TestX448.sk_bob)
- def test_auth_and_decrypt_with_kat(self):
- """Test decryption with official test vectors."""
- self.assertEqual(auth_and_decrypt(self.nonce_tv + self.ct_tv, self.key_tv), self.pt_tv)
+ self.assertEqual(X448.derive_public_key(sk_alice_), TestX448.pk_alice)
+ self.assertEqual(X448.derive_public_key(sk_bob_), TestX448.pk_bob)
- def test_invalid_decryption_raises_critical_error(self):
+ shared_secret1 = X448.shared_key(sk_alice_, TestX448.pk_bob)
+ shared_secret2 = X448.shared_key(sk_bob_, TestX448.pk_alice)
+
+ self.assertEqual(shared_secret1, blake2b(TestX448.shared_secret))
+ self.assertEqual(shared_secret2, blake2b(TestX448.shared_secret))
+
+
+class TestXChaCha20Poly1305(unittest.TestCase):
+ """Libsodium test vectors:
+ Message: https://github.com/jedisct1/libsodium/blob/master/test/default/aead_xchacha20poly1305.c#L22
+ Ad: https://github.com/jedisct1/libsodium/blob/master/test/default/aead_xchacha20poly1305.c#L28
+ Nonce: https://github.com/jedisct1/libsodium/blob/master/test/default/aead_xchacha20poly1305.c#L25
+ Key: https://github.com/jedisct1/libsodium/blob/master/test/default/aead_xchacha20poly1305.c#L14
+ CT+tag: https://github.com/jedisct1/libsodium/blob/master/test/default/aead_xchacha20poly1305.exp#L1
+
+ IETF test vectors:
+ https://tools.ietf.org/html/draft-arciszewski-xchacha-02#appendix-A.1
+ """
+ plaintext = \
+ b"Ladies and Gentlemen of the class of '99: If I could offer you " \
+ b"only one tip for the future, sunscreen would be it."
+
+ ad = bytes.fromhex(
+ '50515253c0c1c2c3c4c5c6c7')
+
+ nonce = bytes.fromhex(
+ '070000004041424344454647'
+ '48494a4b4c4d4e4f50515253')
+
+ key = bytes.fromhex(
+ '8081828384858687'
+ '88898a8b8c8d8e8f'
+ '9091929394959697'
+ '98999a9b9c9d9e9f')
+
+ ct_tag = bytes.fromhex(
+ 'f8ebea4875044066'
+ 'fc162a0604e171fe'
+ 'ecfb3d2042524856'
+ '3bcfd5a155dcc47b'
+ 'bda70b86e5ab9b55'
+ '002bd1274c02db35'
+ '321acd7af8b2e2d2'
+ '5015e136b7679458'
+ 'e9f43243bf719d63'
+ '9badb5feac03f80a'
+ '19a96ef10cb1d153'
+ '33a837b90946ba38'
+ '54ee74da3f2585ef'
+ 'c7e1e170e17e15e5'
+ '63e77601f4f85caf'
+ 'a8e5877614e143e6'
+ '8420')
+
+ nonce_ct_tag = nonce + ct_tag
+
+ # ---
+
+ ietf_nonce = bytes.fromhex(
+ "404142434445464748494a4b4c4d4e4f"
+ "5051525354555657")
+
+ ietf_ct = bytes.fromhex(
+ "bd6d179d3e83d43b9576579493c0e939"
+ "572a1700252bfaccbed2902c21396cbb"
+ "731c7f1b0b4aa6440bf3a82f4eda7e39"
+ "ae64c6708c54c216cb96b72e1213b452"
+ "2f8c9ba40db5d945b11b69b982c1bb9e"
+ "3f3fac2bc369488f76b2383565d3fff9"
+ "21f9664c97637da9768812f615c68b13"
+ "b52e")
+
+ ietf_tag = bytes.fromhex(
+ "c0875924c1c7987947deafd8780acf49")
+
+ ietf_nonce_ct_tag = ietf_nonce + ietf_ct + ietf_tag
+
+ @mock.patch('src.common.crypto.csprng', side_effect=[nonce, ietf_nonce])
+ def test_encrypt_and_sign_with_official_test_vectors(self, mock_csprng):
+ self.assertEqual(encrypt_and_sign(self.plaintext, self.key, self.ad),
+ self.nonce_ct_tag)
+
+ self.assertEqual(encrypt_and_sign(self.plaintext, self.key, self.ad),
+ self.ietf_nonce_ct_tag)
+
+ mock_csprng.assert_called_with(XCHACHA20_NONCE_LENGTH)
+
+ def test_auth_and_decrypt_with_official_test_vectors(self):
+ self.assertEqual(auth_and_decrypt(self.nonce_ct_tag, self.key, ad=self.ad),
+ self.plaintext)
+
+ self.assertEqual(auth_and_decrypt(self.ietf_nonce_ct_tag, self.key, ad=self.ad),
+ self.plaintext)
+
+ def test_database_decryption_error_raises_critical_error(self):
with self.assertRaises(SystemExit):
- self.assertEqual(auth_and_decrypt(self.nonce_tv + self.ct_tv, key=bytes(KEY_LENGTH)), self.pt_tv)
+ auth_and_decrypt(self.nonce_ct_tag, key=bytes(SYMMETRIC_KEY_LENGTH), database='path/database_filename')
- def test_invalid_decryption_raises_soft_error(self):
+ def test_error_in_decryption_of_data_from_contact_raises_nacl_crypto_error(self):
with self.assertRaises(nacl.exceptions.CryptoError):
- self.assertEqual(auth_and_decrypt(self.nonce_tv + self.ct_tv, key=bytes(KEY_LENGTH), soft_e=True), self.pt_tv)
+ auth_and_decrypt(self.nonce_ct_tag, key=bytes(SYMMETRIC_KEY_LENGTH))
class TestBytePadding(unittest.TestCase):
- def test_padding(self):
- for s in range(0, PADDING_LEN):
- string = s * b'm'
+ def test_padding_length_is_divisible_by_packet_length(self):
+ for length in range(1000):
+ string = length * b'm'
padded = byte_padding(string)
- self.assertEqual(len(padded), PADDING_LEN)
+ self.assertIsInstance(padded, bytes)
+ self.assertEqual(len(padded) % PADDING_LENGTH, 0)
- # Verify removal of padding doesn't alter the string
- self.assertEqual(string, padded[:-ord(padded[-1:])])
-
- for s in range(PADDING_LEN, 1000):
- string = s * b'm'
- padded = byte_padding(string)
- self.assertEqual(len(padded) % PADDING_LEN, 0)
- self.assertEqual(string, padded[:-ord(padded[-1:])])
+ def test_packet_length_equal_to_padding_size_adds_dummy_block(self):
+ string = PADDING_LENGTH * b'm'
+ padded = byte_padding(string)
+ self.assertEqual(len(padded), 2*PADDING_LENGTH)
class TestRmPaddingBytes(unittest.TestCase):
- def test_padding_removal(self):
- for i in range(0, 1000):
- string = os.urandom(i)
- length = PADDING_LEN - (len(string) % PADDING_LEN)
- padded = string + length * bytes([length])
+ def test_removal_of_padding_does_not_alter_original_string(self):
+ for length in range(1000):
+ string = os.urandom(length)
+ padded = byte_padding(string)
self.assertEqual(rm_padding_bytes(padded), string)
-class TestXOR(unittest.TestCase):
-
- def test_length_mismatch_raises_critical_error(self):
- with self.assertRaises(SystemExit):
- xor(bytes(32), bytes(31))
-
- def test_xor_of_byte_strings(self):
- b1 = b'\x00\x01\x00\x01\x01'
- b2 = b'\x00\x00\x01\x01\x02'
- b3 = b'\x00\x01\x01\x00\x03'
-
- self.assertEqual(xor(b2, b3), b1)
- self.assertEqual(xor(b3, b2), b1)
- self.assertEqual(xor(b1, b3), b2)
- self.assertEqual(xor(b3, b1), b2)
- self.assertEqual(xor(b1, b2), b3)
- self.assertEqual(xor(b2, b1), b3)
-
-
class TestCSPRNG(unittest.TestCase):
- def test_travis_mock(self):
- # Setup
- o_environ = os.environ
- os.environ = dict(TRAVIS='true')
-
- # Test
- self.assertEqual(len(csprng()), KEY_LENGTH)
- self.assertIsInstance(csprng(), bytes)
-
- # Teardown
- os.environ = o_environ
+ entropy = SYMMETRIC_KEY_LENGTH * b'a'
def test_key_generation(self):
- self.assertEqual(len(csprng()), KEY_LENGTH)
- self.assertIsInstance(csprng(), bytes)
+ key = csprng()
+ self.assertEqual(len(key), SYMMETRIC_KEY_LENGTH)
+ self.assertIsInstance(key, bytes)
+
+ @mock.patch('os.getrandom', return_value=entropy)
+ def test_function_calls_getrandom_with_correct_parameters_and_hashes_with_blake2b(self, mock_get_random):
+ key = csprng()
+ mock_get_random.assert_called_with(SYMMETRIC_KEY_LENGTH, flags=0)
+ self.assertEqual(key, blake2b(self.entropy))
+
+ def test_function_returns_specified_amount_of_entropy(self):
+ for key_size in [16, 24, 32, 56, 64]:
+ key = csprng(key_size)
+ self.assertEqual(len(key), key_size)
+
+ def test_exceeding_hash_function_max_digest_size_raises_assertion_error(self):
+ with self.assertRaises(AssertionError):
+ csprng(BLAKE2_DIGEST_LENGTH_MAX + 1)
class TestCheckKernelEntropy(unittest.TestCase):
- def test_entropy_collection(self):
- self.assertIsNone(check_kernel_entropy())
+ @mock.patch('time.sleep', return_value=None)
+ def test_large_enough_entropy_pool_state_returns_none(self, _):
+ with mock.patch('builtins.open', mock.mock_open(read_data=str(ENTROPY_THRESHOLD))):
+ self.assertIsNone(check_kernel_entropy())
+ with mock.patch('builtins.open', mock.mock_open(read_data=str(ENTROPY_THRESHOLD+1))):
+ self.assertIsNone(check_kernel_entropy())
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_insufficient_entropy_pool_state_does_not_return(self, _):
+ with unittest.mock.patch('builtins.open', unittest.mock.mock_open(read_data=str(ENTROPY_THRESHOLD-1))):
+ p = multiprocessing.Process(target=check_kernel_entropy)
+ try:
+ p.start()
+ p.join(timeout=0.1)
+ self.assertTrue(p.is_alive())
+ finally:
+ p.terminate()
+ p.join()
+ self.assertFalse(p.is_alive())
class TestCheckKernelVersion(unittest.TestCase):
- def setUp(self):
- self.o_uname = os.uname
-
- def tearDown(self):
- os.uname = self.o_uname
-
- def test_invalid_kernel_versions_raise_critical_error(self):
- for version in ['3.9.0-52-generic', '4.7.0-52-generic']:
- os.uname = lambda: ['', '', version]
+ invalid_versions = ['3.9.11', '3.19.8', '4.7.10']
+ valid_versions = ['4.8.1', '4.10.1', '5.0.0']
+ @mock.patch('os.uname', side_effect=[['', '', f'{i}-0-generic'] for i in invalid_versions])
+ def test_invalid_kernel_versions_raise_critical_error(self, _):
+ for _ in self.invalid_versions:
with self.assertRaises(SystemExit):
check_kernel_version()
- def test_valid_kernel_versions(self):
- for version in ['4.8.0-52-generic', '4.10.0-52-generic', '5.0.0-52-generic']:
- os.uname = lambda: ['', '', version]
-
+ @mock.patch('os.uname', side_effect=[['', '', f'{v}-0-generic'] for v in valid_versions])
+ def test_valid_kernel_versions(self, _):
+ for _ in self.valid_versions:
self.assertIsNone(check_kernel_version())
-class TestX25519(unittest.TestCase):
- """\
- This test does not utilize functions in src.common.crypto
- module, but tests PyNaCl's X25519 used in key exchanges.
-
- Test vectors for X25519
-
- https://tools.ietf.org/html/rfc7748#section-6.1
-
- Alice's private key, a:
- 77076d0a7318a57d3c16c17251b26645df4c2f87ebc0992ab177fba51db92c2a
- Alice's public key, X25519(a, 9):
- 8520f0098930a754748b7ddcb43ef75a0dbf3a0d26381af4eba4a98eaa9b4e6a
- Bob's private key, b:
- 5dab087e624a8a4b79e17f8b83800ee66f3bb1292618b6fd1c2f8b27ff88e0eb
- Bob's public key, X25519(b, 9):
- de9edb7d7b7dc1b4d35b61c2ece435373f8343c85b78674dadfc7e146f882b4f
- Their shared secret, K:
- 4a5d9d5ba4ce2de1728e3bf480350f25e07e21c947d19e3376f09b3c1e161742
-
- Quoting PyNaCl tests:
- "Since libNaCl/libsodium shared key generation adds an HSalsa20
- key derivation pass on the raw shared Diffie-Hellman key, which
- is not exposed by itself, we just check the shared key for equality."
-
- TOFU style, unofficial KAT / sanity check shared secret test vector is
- 1b27556473e985d462cd51197a9a46c76009549eac6474f206c4ee0844f68389
- """
-
- def test_x25519(self):
- # Setup
- tv_sk_a = binascii.unhexlify('77076d0a7318a57d3c16c17251b26645df4c2f87ebc0992ab177fba51db92c2a')
- tv_pk_a = binascii.unhexlify('8520f0098930a754748b7ddcb43ef75a0dbf3a0d26381af4eba4a98eaa9b4e6a')
- tv_sk_b = binascii.unhexlify('5dab087e624a8a4b79e17f8b83800ee66f3bb1292618b6fd1c2f8b27ff88e0eb')
- tv_pk_b = binascii.unhexlify('de9edb7d7b7dc1b4d35b61c2ece435373f8343c85b78674dadfc7e146f882b4f')
- ssk = binascii.unhexlify('1b27556473e985d462cd51197a9a46c76009549eac6474f206c4ee0844f68389')
-
- # Generate known key pair for Alice
- sk_alice = nacl.public.PrivateKey(tv_sk_a)
- self.assertEqual(sk_alice._private_key, tv_sk_a)
- self.assertEqual(bytes(sk_alice.public_key), tv_pk_a)
-
- # Generate known key pair for Bob
- sk_bob = nacl.public.PrivateKey(tv_sk_b)
- self.assertEqual(sk_bob._private_key, tv_sk_b)
- self.assertEqual(bytes(sk_bob.public_key), tv_pk_b)
-
- # Test shared secrets are equal
- dh_box_a = nacl.public.Box(sk_alice, sk_bob.public_key)
- dh_ssk_a = dh_box_a.shared_key()
-
- dh_box_b = nacl.public.Box(sk_bob, sk_alice.public_key)
- dh_ssk_b = dh_box_b.shared_key()
-
- self.assertEqual(dh_ssk_a, ssk)
- self.assertEqual(dh_ssk_b, ssk)
-
-
if __name__ == '__main__':
unittest.main(exit=False)
diff --git a/tests/common/test_db_contacts.py b/tests/common/test_db_contacts.py
index 621648e..70a812f 100644
--- a/tests/common/test_db_contacts.py
+++ b/tests/common/test_db_contacts.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,7 +16,7 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
import os
@@ -25,159 +26,321 @@ from src.common.db_contacts import Contact, ContactList
from src.common.statics import *
from tests.mock_classes import create_contact, MasterKey, Settings
-from tests.utils import cleanup, TFCTestCase
+from tests.utils import cd_unittest, cleanup, nick_to_onion_address, nick_to_pub_key, tamper_file, TFCTestCase
class TestContact(unittest.TestCase):
+ def setUp(self):
+ self.contact = Contact(nick_to_pub_key('Bob'),
+ 'Bob',
+ FINGERPRINT_LENGTH * b'\x01',
+ FINGERPRINT_LENGTH * b'\x02',
+ KEX_STATUS_UNVERIFIED,
+ log_messages =True,
+ file_reception=True,
+ notifications =True)
+
def test_contact_serialization_length_and_type(self):
- serialized = create_contact().serialize_c()
+ serialized = self.contact.serialize_c()
self.assertEqual(len(serialized), CONTACT_LENGTH)
self.assertIsInstance(serialized, bytes)
+ def test_uses_psk(self):
+ for kex_status in [KEX_STATUS_NO_RX_PSK, KEX_STATUS_HAS_RX_PSK]:
+ self.contact.kex_status = kex_status
+ self.assertTrue(self.contact.uses_psk())
+
+ for kex_status in [KEX_STATUS_NONE, KEX_STATUS_PENDING, KEX_STATUS_UNVERIFIED,
+ KEX_STATUS_VERIFIED, KEX_STATUS_LOCAL_KEY]:
+ self.contact.kex_status = kex_status
+ self.assertFalse(self.contact.uses_psk())
+
class TestContactList(TFCTestCase):
def setUp(self):
+ self.unittest_dir = cd_unittest()
self.master_key = MasterKey()
self.settings = Settings()
+ self.file_name = f'{DIR_USER_DATA}{self.settings.software_operation}_contacts'
self.contact_list = ContactList(self.master_key, self.settings)
- self.contact_list.contacts = list(map(create_contact, ['Alice', 'Benny', 'Charlie', 'David', 'Eric']))
+ self.full_contact_list = ['Alice', 'Bob', 'Charlie', 'David', 'Eric', LOCAL_ID]
+ self.contact_list.contacts = list(map(create_contact, self.full_contact_list))
+ self.real_contact_list = self.full_contact_list[:]
+ self.real_contact_list.remove(LOCAL_ID)
def tearDown(self):
- cleanup()
+ cleanup(self.unittest_dir)
def test_contact_list_iterates_over_contact_objects(self):
for c in self.contact_list:
self.assertIsInstance(c, Contact)
- def test_len_returns_number_of_contacts(self):
- self.assertEqual(len(self.contact_list), 5)
+ def test_len_returns_the_number_of_contacts_and_excludes_the_local_key(self):
+ self.assertEqual(len(self.contact_list),
+ len(self.real_contact_list))
def test_storing_and_loading_of_contacts(self):
# Test store
self.contact_list.store_contacts()
- self.assertTrue(os.path.isfile(f'{DIR_USER_DATA}ut_contacts'))
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_contacts'),
- XSALSA20_NONCE_LEN
- + self.settings.max_number_of_contacts * CONTACT_LENGTH
- + POLY1305_TAG_LEN)
+ self.assertEqual(os.path.getsize(self.file_name),
+ XCHACHA20_NONCE_LENGTH
+ + (self.settings.max_number_of_contacts + 1) * CONTACT_LENGTH
+ + POLY1305_TAG_LENGTH)
# Test load
contact_list2 = ContactList(self.master_key, self.settings)
- self.assertEqual(len(contact_list2), 5)
+ self.assertEqual(len(contact_list2), len(self.real_contact_list))
+ self.assertEqual(len(contact_list2.contacts), len(self.full_contact_list))
for c in contact_list2:
self.assertIsInstance(c, Contact)
+ def test_load_of_modified_database_raises_critical_error(self):
+ self.contact_list.store_contacts()
+
+ # Test reading works normally
+ self.assertIsInstance(ContactList(self.master_key, self.settings), ContactList)
+
+ # Test loading of tampered database raises CriticalError
+ tamper_file(self.file_name, tamper_size=1)
+ with self.assertRaises(SystemExit):
+ ContactList(self.master_key, self.settings)
+
def test_generate_dummy_contact(self):
dummy_contact = ContactList.generate_dummy_contact()
self.assertIsInstance(dummy_contact, Contact)
self.assertEqual(len(dummy_contact.serialize_c()), CONTACT_LENGTH)
+ def test_dummy_contacts(self):
+ dummies = self.contact_list._dummy_contacts()
+ self.assertEqual(len(dummies), self.settings.max_number_of_contacts - len(self.real_contact_list))
+ for c in dummies:
+ self.assertIsInstance(c, Contact)
+
def test_add_contact(self):
- self.assertIsNone(self.contact_list.add_contact(f'faye@jabber.org', 'bob@jabber.org', f'Faye',
- FINGERPRINT_LEN * b'\x03',
- FINGERPRINT_LEN * b'\x04',
- True, True, True))
+ tx_fingerprint = FINGERPRINT_LENGTH * b'\x03'
+ rx_fingerprint = FINGERPRINT_LENGTH * b'\x04'
+ self.assertIsNone(self.contact_list.add_contact(nick_to_pub_key('Faye'),
+ 'Faye',
+ tx_fingerprint,
+ rx_fingerprint,
+ KEX_STATUS_UNVERIFIED,
+ self.settings.log_messages_by_default,
+ self.settings.accept_files_by_default,
+ self.settings.show_notifications_by_default))
+
+ # Test new contact was stored by loading
+ # the database from file to another object
contact_list2 = ContactList(MasterKey(), Settings())
- c_alice = contact_list2.get_contact('Alice')
- c_faye = contact_list2.get_contact('Faye')
+ faye = contact_list2.get_contact_by_pub_key(nick_to_pub_key('Faye'))
- self.assertEqual(len(self.contact_list), 6)
- self.assertIsInstance(c_alice, Contact)
- self.assertEqual(c_alice.tx_fingerprint, FINGERPRINT_LEN * b'\x01')
- self.assertEqual(c_faye.tx_fingerprint, FINGERPRINT_LEN * b'\x03')
+ self.assertEqual(len(self.contact_list), len(self.real_contact_list)+1)
+ self.assertIsInstance(faye, Contact)
- def test_replace_existing_contact(self):
- c_alice = self.contact_list.get_contact('Alice')
- self.assertEqual(c_alice.tx_fingerprint, FINGERPRINT_LEN * b'\x01')
+ self.assertEqual(faye.tx_fingerprint, tx_fingerprint)
+ self.assertEqual(faye.rx_fingerprint, rx_fingerprint)
+ self.assertEqual(faye.kex_status, KEX_STATUS_UNVERIFIED)
- self.assertIsNone(self.contact_list.add_contact(f'alice@jabber.org', 'bob@jabber.org', f'Alice',
- FINGERPRINT_LEN * b'\x03',
- FINGERPRINT_LEN * b'\x04',
- True, True, True))
+ self.assertEqual(faye.log_messages, self.settings.log_messages_by_default)
+ self.assertEqual(faye.file_reception, self.settings.accept_files_by_default)
+ self.assertEqual(faye.notifications, self.settings.show_notifications_by_default)
+ def test_add_contact_that_replaces_an_existing_contact(self):
+ alice = self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice'))
+ new_nick = 'Alice2'
+ new_tx_fingerprint = FINGERPRINT_LENGTH * b'\x03'
+ new_rx_fingerprint = FINGERPRINT_LENGTH * b'\x04'
+
+ # Verify that existing nick, kex status and fingerprints are
+ # different from those that will replace the existing data
+ self.assertNotEqual(alice.nick, new_nick)
+ self.assertNotEqual(alice.tx_fingerprint, new_tx_fingerprint)
+ self.assertNotEqual(alice.rx_fingerprint, new_rx_fingerprint)
+ self.assertNotEqual(alice.kex_status, KEX_STATUS_UNVERIFIED)
+
+ # Make sure each contact setting is opposite from default value
+ alice.log_messages = not self.settings.log_messages_by_default
+ alice.file_reception = not self.settings.accept_files_by_default
+ alice.notifications = not self.settings.show_notifications_by_default
+
+ # Replace the existing contact
+ self.assertIsNone(self.contact_list.add_contact(nick_to_pub_key('Alice'),
+ new_nick,
+ new_tx_fingerprint,
+ new_rx_fingerprint,
+ KEX_STATUS_UNVERIFIED,
+ self.settings.log_messages_by_default,
+ self.settings.accept_files_by_default,
+ self.settings.show_notifications_by_default))
+
+ # Load database to another object from
+ # file to verify new contact was stored
contact_list2 = ContactList(MasterKey(), Settings())
- c_alice = contact_list2.get_contact('Alice')
+ alice = contact_list2.get_contact_by_pub_key(nick_to_pub_key('Alice'))
- self.assertEqual(len(self.contact_list), 5)
- self.assertIsInstance(c_alice, Contact)
- self.assertEqual(c_alice.tx_fingerprint, FINGERPRINT_LEN * b'\x03')
+ # Verify the content of loaded data
+ self.assertEqual(len(contact_list2), len(self.real_contact_list))
+ self.assertIsInstance(alice, Contact)
- def test_remove_contact(self):
- self.assertTrue(self.contact_list.has_contact('Benny'))
- self.assertTrue(self.contact_list.has_contact('Charlie'))
+ # Test replaced contact replaced nick, fingerprints and kex status
+ self.assertEqual(alice.nick, new_nick)
+ self.assertEqual(alice.tx_fingerprint, new_tx_fingerprint)
+ self.assertEqual(alice.rx_fingerprint, new_rx_fingerprint)
+ self.assertEqual(alice.kex_status, KEX_STATUS_UNVERIFIED)
- self.contact_list.remove_contact('benny@jabber.org')
- self.assertFalse(self.contact_list.has_contact('Benny'))
+ # Test replaced contact kept settings set
+ # to be opposite from default settings
+ self.assertNotEqual(alice.log_messages, self.settings.log_messages_by_default)
+ self.assertNotEqual(alice.file_reception, self.settings.accept_files_by_default)
+ self.assertNotEqual(alice.notifications, self.settings.show_notifications_by_default)
- self.contact_list.remove_contact('Charlie')
- self.assertFalse(self.contact_list.has_contact('Charlie'))
+ def test_remove_contact_by_pub_key(self):
+ # Verify both contacts exist
+ self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Bob')))
+ self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Charlie')))
- def test_get_contact(self):
- for selector in ['benny@jabber.org', 'Benny']:
- contact = self.contact_list.get_contact(selector)
- self.assertIsInstance(contact, Contact)
- self.assertEqual(contact.rx_account, 'benny@jabber.org')
+ self.assertIsNone(self.contact_list.remove_contact_by_pub_key(nick_to_pub_key('Bob')))
+ self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key('Bob')))
+ self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Charlie')))
+
+ def test_remove_contact_by_address_or_nick(self):
+ # Verify both contacts exist
+ self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Bob')))
+ self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Charlie')))
+
+ # Test removal with address
+ self.assertIsNone(self.contact_list.remove_contact_by_address_or_nick(nick_to_onion_address('Bob')))
+ self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key('Bob')))
+ self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Charlie')))
+
+ # Test removal with nick
+ self.assertIsNone(self.contact_list.remove_contact_by_address_or_nick('Charlie'))
+ self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key('Bob')))
+ self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key('Charlie')))
+
+ def test_get_contact_by_pub_key(self):
+ self.assertIs(self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Bob')),
+ self.contact_list.get_contact_by_address_or_nick('Bob'))
+
+ def test_get_contact_by_address_or_nick_returns_same_contact_with_address_and_nick(self):
+ for selector in [nick_to_onion_address('Bob'), 'Bob']:
+ self.assertIsInstance(self.contact_list.get_contact_by_address_or_nick(selector), Contact)
+
+ self.assertIs(self.contact_list.get_contact_by_address_or_nick('Bob'),
+ self.contact_list.get_contact_by_address_or_nick(nick_to_onion_address('Bob')))
def test_get_list_of_contacts(self):
+ self.assertEqual(len(self.contact_list.get_list_of_contacts()),
+ len(self.real_contact_list))
for c in self.contact_list.get_list_of_contacts():
self.assertIsInstance(c, Contact)
- def test_get_list_of_accounts(self):
- self.assertEqual(self.contact_list.get_list_of_accounts(),
- ['alice@jabber.org', 'benny@jabber.org',
- 'charlie@jabber.org', 'david@jabber.org',
- 'eric@jabber.org'])
+ def test_get_list_of_addresses(self):
+ self.assertEqual(self.contact_list.get_list_of_addresses(),
+ [nick_to_onion_address('Alice'),
+ nick_to_onion_address('Bob'),
+ nick_to_onion_address('Charlie'),
+ nick_to_onion_address('David'),
+ nick_to_onion_address('Eric')])
def test_get_list_of_nicks(self):
self.assertEqual(self.contact_list.get_list_of_nicks(),
- ['Alice', 'Benny', 'Charlie', 'David', 'Eric'])
+ ['Alice', 'Bob', 'Charlie', 'David', 'Eric'])
- def test_get_list_of_users_accounts(self):
- self.assertEqual(self.contact_list.get_list_of_users_accounts(), ['user@jabber.org'])
+ def test_get_list_of_pub_keys(self):
+ self.assertEqual(self.contact_list.get_list_of_pub_keys(),
+ [nick_to_pub_key('Alice'),
+ nick_to_pub_key('Bob'),
+ nick_to_pub_key('Charlie'),
+ nick_to_pub_key('David'),
+ nick_to_pub_key('Eric')])
+
+ def test_get_list_of_pending_pub_keys(self):
+ # Set key exchange statuses to pending
+ for nick in ['Alice', 'Bob']:
+ contact = self.contact_list.get_contact_by_address_or_nick(nick)
+ contact.kex_status = KEX_STATUS_PENDING
+
+ # Test pending contacts are returned
+ self.assertEqual(self.contact_list.get_list_of_pending_pub_keys(),
+ [nick_to_pub_key('Alice'),
+ nick_to_pub_key('Bob')])
+
+ def test_get_list_of_existing_pub_keys(self):
+ self.contact_list.get_contact_by_address_or_nick('Alice').kex_status = KEX_STATUS_UNVERIFIED
+ self.contact_list.get_contact_by_address_or_nick('Bob').kex_status = KEX_STATUS_VERIFIED
+ self.contact_list.get_contact_by_address_or_nick('Charlie').kex_status = KEX_STATUS_HAS_RX_PSK
+ self.contact_list.get_contact_by_address_or_nick('David').kex_status = KEX_STATUS_NO_RX_PSK
+ self.contact_list.get_contact_by_address_or_nick('Eric').kex_status = KEX_STATUS_PENDING
+
+ self.assertEqual(self.contact_list.get_list_of_existing_pub_keys(),
+ [nick_to_pub_key('Alice'),
+ nick_to_pub_key('Bob'),
+ nick_to_pub_key('Charlie'),
+ nick_to_pub_key('David')])
def test_contact_selectors(self):
self.assertEqual(self.contact_list.contact_selectors(),
- ['alice@jabber.org', 'benny@jabber.org', 'charlie@jabber.org',
- 'david@jabber.org', 'eric@jabber.org',
- 'Alice', 'Benny', 'Charlie', 'David', 'Eric'])
+ [nick_to_onion_address('Alice'),
+ nick_to_onion_address('Bob'),
+ nick_to_onion_address('Charlie'),
+ nick_to_onion_address('David'),
+ nick_to_onion_address('Eric'),
+ 'Alice', 'Bob', 'Charlie', 'David', 'Eric'])
def test_has_contacts(self):
self.assertTrue(self.contact_list.has_contacts())
self.contact_list.contacts = []
self.assertFalse(self.contact_list.has_contacts())
- def test_has_contact(self):
+ def test_has_only_pending_contacts(self):
+ # Change all to pending
+ for contact in self.contact_list.get_list_of_contacts():
+ contact.kex_status = KEX_STATUS_PENDING
+ self.assertTrue(self.contact_list.has_only_pending_contacts())
+
+ # Change one from pending
+ alice = self.contact_list.get_contact_by_address_or_nick('Alice')
+ alice.kex_status = KEX_STATUS_UNVERIFIED
+ self.assertFalse(self.contact_list.has_only_pending_contacts())
+
+ def test_has_pub_key(self):
self.contact_list.contacts = []
- self.assertFalse(self.contact_list.has_contact('Benny'))
- self.assertFalse(self.contact_list.has_contact('bob@jabber.org'))
+ self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key('Bob')))
+ self.assertFalse(self.contact_list.has_pub_key(nick_to_pub_key('Bob')))
self.contact_list.contacts = list(map(create_contact, ['Bob', 'Charlie']))
- self.assertTrue(self.contact_list.has_contact('Bob'))
- self.assertTrue(self.contact_list.has_contact('charlie@jabber.org'))
+ self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Bob')))
+ self.assertTrue(self.contact_list.has_pub_key(nick_to_pub_key('Charlie')))
def test_has_local_contact(self):
+ self.contact_list.contacts = []
self.assertFalse(self.contact_list.has_local_contact())
- self.contact_list.contacts.append(create_contact(LOCAL_ID))
+
+ self.contact_list.contacts = [create_contact(LOCAL_ID)]
self.assertTrue(self.contact_list.has_local_contact())
- def test_contact_printing(self):
+ def test_print_contacts(self):
self.contact_list.contacts.append(create_contact(LOCAL_ID))
- self.contact_list.get_contact('Alice').log_messages = False
- self.contact_list.get_contact('Benny').notifications = False
- self.contact_list.get_contact('Charlie').file_reception = False
- self.contact_list.get_contact('David').tx_fingerprint = bytes(FINGERPRINT_LEN)
- self.assertPrints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + """\
+ self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice')).log_messages = False
+ self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice')).kex_status = KEX_STATUS_PENDING
+ self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Bob')).notifications = False
+ self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Charlie')).kex_status = KEX_STATUS_UNVERIFIED
+ self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Bob')).file_reception = False
+ self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Bob')).kex_status = KEX_STATUS_VERIFIED
+ self.contact_list.get_contact_by_pub_key(nick_to_pub_key('David')).rx_fingerprint = bytes(FINGERPRINT_LENGTH)
+ self.contact_list.get_contact_by_pub_key(nick_to_pub_key('David')).kex_status = bytes(KEX_STATUS_NO_RX_PSK)
+ self.assert_prints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + f"""\
-Contact Logging Notify Files Key Ex Account
+Contact Account Logging Notify Files Key Ex
────────────────────────────────────────────────────────────────────────────────
-Alice No Yes Accept X25519 alice@jabber.org
-Benny Yes No Accept X25519 benny@jabber.org
-Charlie Yes Yes Reject X25519 charlie@jabber.org
-David Yes Yes Accept PSK david@jabber.org
-Eric Yes Yes Accept X25519 eric@jabber.org
+Alice hpcra No Yes Accept {ECDHE} (Pending)
+Bob zwp3d Yes No Reject {ECDHE} (Verified)
+Charlie n2a3c Yes Yes Accept {ECDHE} (Unverified)
+David u22uy Yes Yes Accept {PSK} (No contact key)
+Eric jszzy Yes Yes Accept {ECDHE} (Verified)
""", self.contact_list.print_contacts)
diff --git a/tests/common/test_db_groups.py b/tests/common/test_db_groups.py
index 2680297..228bc56 100644
--- a/tests/common/test_db_groups.py
+++ b/tests/common/test_db_groups.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,7 +16,7 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
import os
@@ -23,223 +24,331 @@ import unittest
from src.common.db_contacts import Contact, ContactList
from src.common.db_groups import Group, GroupList
+from src.common.encoding import b58encode
+from src.common.misc import ensure_dir
from src.common.statics import *
-from tests.mock_classes import create_contact, MasterKey, Settings
-from tests.utils import cleanup, TFCTestCase
+from tests.mock_classes import create_contact, group_name_to_group_id, MasterKey, nick_to_pub_key, Settings
+from tests.utils import cd_unittest, cleanup, tamper_file, TFCTestCase
class TestGroup(unittest.TestCase):
def setUp(self):
- members = list(map(create_contact, ['Alice', 'Bob', 'Charlie']))
- self.settings = Settings()
- self.group = Group('testgroup', False, False, members, self.settings, lambda: None)
+ self.unittest_dir = cd_unittest()
+ self.nicks = ['Alice', 'Bob', 'Charlie']
+ members = list(map(create_contact, self.nicks))
+ self.settings = Settings()
+ self.group = Group(name ='test_group',
+ group_id =group_name_to_group_id('test_group'),
+ log_messages =False,
+ notifications=False,
+ members =members,
+ settings =self.settings,
+ store_groups =lambda: None)
+ ensure_dir(DIR_USER_DATA)
def tearDown(self):
- cleanup()
+ cleanup(self.unittest_dir)
def test_group_iterates_over_contact_objects(self):
for c in self.group:
self.assertIsInstance(c, Contact)
- def test_len_returns_number_of_members(self):
- self.assertEqual(len(self.group), 3)
+ def test_len_returns_the_number_of_members(self):
+ self.assertEqual(len(self.group), len(self.nicks))
- def test_serialize_g(self):
+ def test_group_serialization_length_and_type(self):
serialized = self.group.serialize_g()
self.assertIsInstance(serialized, bytes)
- self.assertEqual(len(serialized),
- PADDED_UTF32_STR_LEN
- + (2 * BOOLEAN_SETTING_LEN)
- + (self.settings.max_number_of_group_members * PADDED_UTF32_STR_LEN))
+ self.assertEqual(len(serialized), GROUP_STATIC_LENGTH + (self.settings.max_number_of_group_members
+ * ONION_SERVICE_PUBLIC_KEY_LENGTH))
def test_add_members(self):
- self.group.members = []
- self.assertFalse(self.group.has_member('david@jabber.org'))
- self.assertFalse(self.group.has_member('eric@jabber.org'))
+ # Test members to be added are not already in group
+ self.assertFalse(self.group.has_member(nick_to_pub_key('David')))
+ self.assertFalse(self.group.has_member(nick_to_pub_key('Eric')))
- self.group.add_members([create_contact(n) for n in ['David', 'Eric']])
- self.assertTrue(self.group.has_member('david@jabber.org'))
- self.assertTrue(self.group.has_member('eric@jabber.org'))
+ self.assertIsNone(self.group.add_members(list(map(create_contact, ['Alice', 'David', 'Eric']))))
+
+ # Test new members were added
+ self.assertTrue(self.group.has_member(nick_to_pub_key('David')))
+ self.assertTrue(self.group.has_member(nick_to_pub_key('Eric')))
+
+ # Test Alice was not added twice
+ self.assertEqual(len(self.group), len(['Alice', 'Bob', 'Charlie', 'David', 'Eric']))
def test_remove_members(self):
- self.assertTrue(self.group.has_member('alice@jabber.org'))
- self.assertTrue(self.group.has_member('bob@jabber.org'))
- self.assertTrue(self.group.has_member('charlie@jabber.org'))
+ # Test members to be removed are part of group
+ self.assertTrue(self.group.has_member(nick_to_pub_key('Alice')))
+ self.assertTrue(self.group.has_member(nick_to_pub_key('Bob')))
+ self.assertTrue(self.group.has_member(nick_to_pub_key('Charlie')))
- self.assertTrue(self.group.remove_members(['charlie@jabber.org', 'eric@jabber.org']))
- self.assertFalse(self.group.remove_members(['charlie@jabber.org', 'eric@jabber.org']))
+ # Test first attempt to remove returns True (because Charlie was removed)
+ self.assertTrue(self.group.remove_members([nick_to_pub_key('Charlie'), nick_to_pub_key('Unknown')]))
- self.assertTrue(self.group.has_member('alice@jabber.org'))
- self.assertTrue(self.group.has_member('bob@jabber.org'))
- self.assertFalse(self.group.has_member('charlie@jabber.org'))
+ # Test second attempt to remove returns False (because no-one was removed)
+ self.assertFalse(self.group.remove_members([nick_to_pub_key('Charlie'), nick_to_pub_key('Unknown')]))
- def test_get_list_of_member_accounts(self):
- self.assertEqual(self.group.get_list_of_member_accounts(),
- ['alice@jabber.org', 'bob@jabber.org', 'charlie@jabber.org'])
+ # Test Charlie was removed
+ self.assertFalse(self.group.has_member(nick_to_pub_key('Charlie')))
- def test_get_list_of_member_nicks(self):
- self.assertEqual(self.group.get_list_of_member_nicks(), ['Alice', 'Bob', 'Charlie'])
+ # Test no other members were removed
+ self.assertTrue(self.group.has_member(nick_to_pub_key('Alice')))
+ self.assertTrue(self.group.has_member(nick_to_pub_key('Bob')))
+
+ def test_get_list_of_member_pub_keys(self):
+ self.assertEqual(first=self.group.get_list_of_member_pub_keys(),
+ second=[nick_to_pub_key('Alice'),
+ nick_to_pub_key('Bob'),
+ nick_to_pub_key('Charlie')])
def test_has_member(self):
- self.assertTrue(self.group.has_member('charlie@jabber.org'))
- self.assertFalse(self.group.has_member('david@jabber.org'))
+ self.assertTrue(self.group.has_member(nick_to_pub_key('Charlie')))
+ self.assertFalse(self.group.has_member(nick_to_pub_key('David')))
def test_has_members(self):
- self.assertTrue(self.group.has_members())
+ self.assertFalse(self.group.empty())
self.group.members = []
- self.assertFalse(self.group.has_members())
+ self.assertTrue(self.group.empty())
class TestGroupList(TFCTestCase):
def setUp(self):
+ self.unittest_dir = cd_unittest()
self.master_key = MasterKey()
self.settings = Settings()
+ self.file_name = f'{DIR_USER_DATA}{self.settings.software_operation}_groups'
self.contact_list = ContactList(self.master_key, self.settings)
self.group_list = GroupList(self.master_key, self.settings, self.contact_list)
- members = [create_contact(n) for n in ['Alice', 'Bob', 'Charlie', 'David', 'Eric',
- 'Fido', 'Guido', 'Heidi', 'Ivan', 'Joana', 'Karol']]
+ self.nicks = ['Alice', 'Bob', 'Charlie', 'David', 'Eric',
+ 'Fido', 'Guido', 'Heidi', 'Ivan', 'Joana', 'Karol']
+ self.group_names = ['test_group_1', 'test_group_2', 'test_group_3', 'test_group_4', 'test_group_5',
+ 'test_group_6', 'test_group_7', 'test_group_8', 'test_group_9', 'test_group_10',
+ 'test_group_11']
+ members = list(map(create_contact, self.nicks))
+
self.contact_list.contacts = members
- groups = [Group(n, False, False, members, self.settings, self.group_list.store_groups)
- for n in ['testgroup_1', 'testgroup_2', 'testgroup_3', 'testgroup_4', 'testgroup_5',
- 'testgroup_6', 'testgroup_7', 'testgroup_8', 'testgroup_9', 'testgroup_10',
- 'testgroup_11']]
+ self.group_list.groups = \
+ [Group(name =name,
+ group_id =group_name_to_group_id(name),
+ log_messages =False,
+ notifications=False,
+ members =members,
+ settings =self.settings,
+ store_groups =self.group_list.store_groups)
+ for name in self.group_names]
- self.group_list.groups = groups
- self.group_list.store_groups()
-
- self.single_member_data = (PADDED_UTF32_STR_LEN
- + (2 * BOOLEAN_SETTING_LEN)
- + (self.settings.max_number_of_group_members * PADDED_UTF32_STR_LEN))
+ self.single_member_data_len = (GROUP_STATIC_LENGTH
+ + self.settings.max_number_of_group_members * ONION_SERVICE_PUBLIC_KEY_LENGTH)
def tearDown(self):
- cleanup()
+ cleanup(self.unittest_dir)
def test_group_list_iterates_over_group_objects(self):
for g in self.group_list:
self.assertIsInstance(g, Group)
- def test_len_returns_number_of_groups(self):
- self.assertEqual(len(self.group_list), 11)
+ def test_len_returns_the_number_of_groups(self):
+ self.assertEqual(len(self.group_list), len(self.group_names))
- def test_database_size(self):
- self.assertTrue(os.path.isfile(f'{DIR_USER_DATA}ut_groups'))
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_groups'),
- XSALSA20_NONCE_LEN
- + GROUP_DB_HEADER_LEN
- + self.settings.max_number_of_groups * self.single_member_data
- + POLY1305_TAG_LEN)
+ def test_storing_and_loading_of_groups(self):
+ self.group_list.store_groups()
+ self.assertTrue(os.path.isfile(self.file_name))
+ self.assertEqual(os.path.getsize(self.file_name),
+ XCHACHA20_NONCE_LENGTH
+ + GROUP_DB_HEADER_LENGTH
+ + self.settings.max_number_of_groups * self.single_member_data_len
+ + POLY1305_TAG_LENGTH)
+
+ # Reduce setting values from 20 to 10
self.settings.max_number_of_groups = 10
self.settings.max_number_of_group_members = 10
group_list2 = GroupList(self.master_key, self.settings, self.contact_list)
self.assertEqual(len(group_list2), 11)
- # Check that load_groups() function increases setting values with larger db
- self.assertEqual(self.settings.max_number_of_groups, 20)
+ # Check that `_load_groups()` increased setting values back to 20 so it fits the 11 groups
+ self.assertEqual(self.settings.max_number_of_groups, 20)
self.assertEqual(self.settings.max_number_of_group_members, 20)
# Check that removed contact from contact list updates group
- self.contact_list.remove_contact('Alice')
+ self.contact_list.remove_contact_by_address_or_nick('Alice')
group_list3 = GroupList(self.master_key, self.settings, self.contact_list)
- self.assertEqual(len(group_list3.get_group('testgroup_1').members), 10)
+ self.assertEqual(len(group_list3.get_group('test_group_1').members), 10)
- group_list4 = GroupList(self.master_key, self.settings, self.contact_list)
- self.assertEqual(len(group_list4.get_group('testgroup_2').members), 10)
+ def test_load_of_modified_database_raises_critical_error(self):
+ self.group_list.store_groups()
+
+ # Test reading works normally
+ self.assertIsInstance(GroupList(self.master_key, self.settings, self.contact_list), GroupList)
+
+ # Test loading of the tampered database raises CriticalError
+ tamper_file(self.file_name, tamper_size=1)
+ with self.assertRaises(SystemExit):
+ GroupList(self.master_key, self.settings, self.contact_list)
+
+ def test_check_db_settings(self):
+ self.assertFalse(self.group_list._check_db_settings(
+ number_of_actual_groups=self.settings.max_number_of_groups,
+ members_in_largest_group=self.settings.max_number_of_group_members))
+
+ self.assertTrue(self.group_list._check_db_settings(
+ number_of_actual_groups=self.settings.max_number_of_groups + 1,
+ members_in_largest_group=self.settings.max_number_of_group_members))
+
+ self.assertTrue(self.group_list._check_db_settings(
+ number_of_actual_groups=self.settings.max_number_of_groups,
+ members_in_largest_group=self.settings.max_number_of_group_members + 1))
def test_generate_group_db_header(self):
- header = self.group_list.generate_group_db_header()
- self.assertEqual(len(header), GROUP_DB_HEADER_LEN)
+ header = self.group_list._generate_group_db_header()
+ self.assertEqual(len(header), GROUP_DB_HEADER_LENGTH)
self.assertIsInstance(header, bytes)
def test_generate_dummy_group(self):
- dummy_group = self.group_list.generate_dummy_group()
- self.assertEqual(len(dummy_group.serialize_g()), self.single_member_data)
+ dummy_group = self.group_list._generate_dummy_group()
self.assertIsInstance(dummy_group, Group)
+ self.assertEqual(len(dummy_group.serialize_g()), self.single_member_data_len)
+
+ def test_dummy_groups(self):
+ dummies = self.group_list._dummy_groups()
+ self.assertEqual(len(dummies), self.settings.max_number_of_contacts - len(self.nicks))
+ for g in dummies:
+ self.assertIsInstance(g, Group)
def test_add_group(self):
members = [create_contact('Laura')]
- self.group_list.add_group('testgroup_12', False, False, members)
- self.group_list.add_group('testgroup_12', False, True, members)
- self.assertTrue(self.group_list.get_group('testgroup_12').notifications)
- self.assertEqual(len(self.group_list), 12)
+ self.group_list.add_group('test_group_12', bytes(GROUP_ID_LENGTH), False, False, members)
+ self.group_list.add_group('test_group_12', bytes(GROUP_ID_LENGTH), False, True, members)
+ self.assertTrue(self.group_list.get_group('test_group_12').notifications)
+ self.assertEqual(len(self.group_list), len(self.group_names)+1)
- def test_remove_group(self):
- self.assertEqual(len(self.group_list), 11)
+ def test_remove_group_by_name(self):
+ self.assertEqual(len(self.group_list), len(self.group_names))
- self.assertIsNone(self.group_list.remove_group('testgroup_12'))
- self.assertEqual(len(self.group_list), 11)
+ # Remove non-existing group
+ self.assertIsNone(self.group_list.remove_group_by_name('test_group_12'))
+ self.assertEqual(len(self.group_list), len(self.group_names))
- self.assertIsNone(self.group_list.remove_group('testgroup_11'))
- self.assertEqual(len(self.group_list), 10)
+ # Remove existing group
+ self.assertIsNone(self.group_list.remove_group_by_name('test_group_11'))
+ self.assertEqual(len(self.group_list), len(self.group_names)-1)
- def test_get_list_of_group_names(self):
- g_names = ['testgroup_1', 'testgroup_2', 'testgroup_3', 'testgroup_4', 'testgroup_5', 'testgroup_6',
- 'testgroup_7', 'testgroup_8', 'testgroup_9', 'testgroup_10', 'testgroup_11']
- self.assertEqual(self.group_list.get_list_of_group_names(), g_names)
+ def test_remove_group_by_id(self):
+ self.assertEqual(len(self.group_list), len(self.group_names))
+
+ # Remove non-existing group
+ self.assertIsNone(self.group_list.remove_group_by_id(group_name_to_group_id('test_group_12')))
+ self.assertEqual(len(self.group_list), len(self.group_names))
+
+ # Remove existing group
+ self.assertIsNone(self.group_list.remove_group_by_id(group_name_to_group_id('test_group_11')))
+ self.assertEqual(len(self.group_list), len(self.group_names)-1)
def test_get_group(self):
- self.assertEqual(self.group_list.get_group('testgroup_3').name, 'testgroup_3')
+ self.assertEqual(self.group_list.get_group('test_group_3').name, 'test_group_3')
+
+ def test_get_group_by_id(self):
+ members = [create_contact('Laura')]
+ group_id = os.urandom(GROUP_ID_LENGTH)
+ self.group_list.add_group('test_group_12', group_id, False, False, members)
+ self.assertEqual(self.group_list.get_group_by_id(group_id).name, 'test_group_12')
+
+ def test_get_list_of_group_names(self):
+ self.assertEqual(self.group_list.get_list_of_group_names(), self.group_names)
+
+ def test_get_list_of_group_ids(self):
+ self.assertEqual(self.group_list.get_list_of_group_ids(),
+ list(map(group_name_to_group_id, self.group_names)))
+
+ def test_get_list_of_hr_group_ids(self):
+ self.assertEqual(self.group_list.get_list_of_hr_group_ids(),
+ [b58encode(gid) for gid in list(map(group_name_to_group_id, self.group_names))])
def test_get_group_members(self):
- members = self.group_list.get_group_members('testgroup_1')
+ members = self.group_list.get_group_members(group_name_to_group_id('test_group_1'))
for c in members:
self.assertIsInstance(c, Contact)
def test_has_group(self):
- self.assertTrue(self.group_list.has_group('testgroup_11'))
- self.assertFalse(self.group_list.has_group('testgroup_12'))
+ self.assertTrue(self.group_list.has_group('test_group_11'))
+ self.assertFalse(self.group_list.has_group('test_group_12'))
- def test_has_groups(self):
- self.assertTrue(self.group_list.has_groups())
- self.group_list.groups = []
- self.assertFalse(self.group_list.has_groups())
+ def test_has_group_id(self):
+ members = [create_contact('Laura')]
+ group_id = os.urandom(GROUP_ID_LENGTH)
+ self.assertFalse(self.group_list.has_group_id(group_id))
+ self.group_list.add_group('test_group_12', group_id, False, False, members)
+ self.assertTrue(self.group_list.has_group_id(group_id))
def test_largest_group(self):
- self.assertEqual(self.group_list.largest_group(), 11)
+ self.assertEqual(self.group_list.largest_group(), len(self.nicks))
def test_print_group(self):
- self.group_list.get_group("testgroup_1").log_messages = True
- self.group_list.get_group("testgroup_2").notifications = True
- self.group_list.get_group("testgroup_3").members = []
- self.assertPrints("""\
-Group Logging Notify Members
+ self.group_list.get_group("test_group_1").name = "group"
+ self.group_list.get_group("test_group_2").log_messages = True
+ self.group_list.get_group("test_group_3").notifications = True
+ self.group_list.get_group("test_group_4").log_messages = True
+ self.group_list.get_group("test_group_4").notifications = True
+ self.group_list.get_group("test_group_5").members = []
+ self.group_list.get_group("test_group_6").members = list(map(create_contact, ['Alice', 'Bob', 'Charlie',
+ 'David', 'Eric', 'Fido']))
+ self.assert_prints("""\
+Group Group ID Logging Notify Members
────────────────────────────────────────────────────────────────────────────────
-testgroup_1 Yes No Alice, Bob, Charlie, David, Eric, Fido,
- Guido, Heidi, Ivan, Joana, Karol
+group 2drs4c4VcDdrP No No Alice, Bob, Charlie,
+ David, Eric, Fido,
+ Guido, Heidi, Ivan,
+ Joana, Karol
-testgroup_2 No Yes Alice, Bob, Charlie, David, Eric, Fido,
- Guido, Heidi, Ivan, Joana, Karol
+test_group_2 2dnGTyhkThmPi Yes No Alice, Bob, Charlie,
+ David, Eric, Fido,
+ Guido, Heidi, Ivan,
+ Joana, Karol
-testgroup_3 No No
+test_group_3 2df7s3LZhwLDw No Yes Alice, Bob, Charlie,
+ David, Eric, Fido,
+ Guido, Heidi, Ivan,
+ Joana, Karol
-testgroup_4 No No Alice, Bob, Charlie, David, Eric, Fido,
- Guido, Heidi, Ivan, Joana, Karol
+test_group_4 2djy3XwUQVR8q Yes Yes Alice, Bob, Charlie,
+ David, Eric, Fido,
+ Guido, Heidi, Ivan,
+ Joana, Karol
-testgroup_5 No No Alice, Bob, Charlie, David, Eric, Fido,
- Guido, Heidi, Ivan, Joana, Karol
+test_group_5 2dvbcgnjiLLMo No No
-testgroup_6 No No Alice, Bob, Charlie, David, Eric, Fido,
- Guido, Heidi, Ivan, Joana, Karol
+test_group_6 2dwBRWAqWKHWv No No Alice, Bob, Charlie,
+ David, Eric, Fido
-testgroup_7 No No Alice, Bob, Charlie, David, Eric, Fido,
- Guido, Heidi, Ivan, Joana, Karol
+test_group_7 2eDPg5BAM6qF4 No No Alice, Bob, Charlie,
+ David, Eric, Fido,
+ Guido, Heidi, Ivan,
+ Joana, Karol
-testgroup_8 No No Alice, Bob, Charlie, David, Eric, Fido,
- Guido, Heidi, Ivan, Joana, Karol
+test_group_8 2dqdayy5TJKcf No No Alice, Bob, Charlie,
+ David, Eric, Fido,
+ Guido, Heidi, Ivan,
+ Joana, Karol
-testgroup_9 No No Alice, Bob, Charlie, David, Eric, Fido,
- Guido, Heidi, Ivan, Joana, Karol
+test_group_9 2e45bLYvSX3C8 No No Alice, Bob, Charlie,
+ David, Eric, Fido,
+ Guido, Heidi, Ivan,
+ Joana, Karol
-testgroup_10 No No Alice, Bob, Charlie, David, Eric, Fido,
- Guido, Heidi, Ivan, Joana, Karol
+test_group_10 2dgkncX9xRibh No No Alice, Bob, Charlie,
+ David, Eric, Fido,
+ Guido, Heidi, Ivan,
+ Joana, Karol
-testgroup_11 No No Alice, Bob, Charlie, David, Eric, Fido,
- Guido, Heidi, Ivan, Joana, Karol
+test_group_11 2e6vAGmHmSEEJ No No Alice, Bob, Charlie,
+ David, Eric, Fido,
+ Guido, Heidi, Ivan,
+ Joana, Karol
""", self.group_list.print_groups)
diff --git a/tests/common/test_db_keys.py b/tests/common/test_db_keys.py
index 588d78e..bbd2762 100644
--- a/tests/common/test_db_keys.py
+++ b/tests/common/test_db_keys.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,134 +16,227 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
import os.path
import unittest
-from src.common.crypto import hash_chain
-from src.common.db_keys import KeyList, KeySet
-from src.common.statics import *
+from src.common.crypto import blake2b
+from src.common.db_keys import KeyList, KeySet
+from src.common.encoding import int_to_bytes
+from src.common.statics import *
-from tests.mock_classes import create_keyset, MasterKey, Settings
-from tests.utils import cleanup
+from tests.mock_classes import create_keyset, MasterKey, nick_to_pub_key, Settings
+from tests.utils import cd_unittest, cleanup, tamper_file
class TestKeySet(unittest.TestCase):
def setUp(self):
- self.keyset = KeySet('alice@jabber.org',
- KEY_LENGTH * b'\x00',
- KEY_LENGTH * b'\x00',
- KEY_LENGTH * b'\x00',
- KEY_LENGTH * b'\x00',
- 0, 0, lambda: None)
+ self.keyset = KeySet(onion_pub_key=nick_to_pub_key('Alice'),
+ tx_mk=bytes(SYMMETRIC_KEY_LENGTH),
+ rx_mk=bytes(SYMMETRIC_KEY_LENGTH),
+ tx_hk=bytes(SYMMETRIC_KEY_LENGTH),
+ rx_hk=bytes(SYMMETRIC_KEY_LENGTH),
+ tx_harac=INITIAL_HARAC,
+ rx_harac=INITIAL_HARAC,
+ store_keys=lambda: None)
def test_keyset_serialization_length_and_type(self):
serialized = self.keyset.serialize_k()
self.assertEqual(len(serialized), KEYSET_LENGTH)
self.assertIsInstance(serialized, bytes)
- def test_rotate_tx_key(self):
- self.assertIsNone(self.keyset.rotate_tx_key())
- self.assertEqual(self.keyset.tx_key, hash_chain(KEY_LENGTH * b'\x00'))
+ def test_rotate_tx_mk(self):
+ self.assertIsNone(self.keyset.rotate_tx_mk())
+ self.assertEqual(self.keyset.tx_mk, blake2b(bytes(SYMMETRIC_KEY_LENGTH) + int_to_bytes(INITIAL_HARAC),
+ digest_size=SYMMETRIC_KEY_LENGTH))
+ self.assertEqual(self.keyset.rx_mk, bytes(SYMMETRIC_KEY_LENGTH))
+ self.assertEqual(self.keyset.tx_hk, bytes(SYMMETRIC_KEY_LENGTH))
+ self.assertEqual(self.keyset.rx_hk, bytes(SYMMETRIC_KEY_LENGTH))
self.assertEqual(self.keyset.tx_harac, 1)
+ self.assertEqual(self.keyset.rx_harac, INITIAL_HARAC)
- def test_update_tx_key(self):
- self.keyset.update_key(TX, KEY_LENGTH * b'\x01', 2)
- self.assertEqual(self.keyset.tx_key, KEY_LENGTH * b'\x01')
- self.assertEqual(self.keyset.rx_key, KEY_LENGTH * b'\x00')
- self.assertEqual(self.keyset.tx_hek, KEY_LENGTH * b'\x00')
- self.assertEqual(self.keyset.rx_hek, KEY_LENGTH * b'\x00')
+ def test_update_tx_mk(self):
+ self.keyset.update_mk(TX, SYMMETRIC_KEY_LENGTH * b'\x01', 2)
+ self.assertEqual(self.keyset.tx_mk, SYMMETRIC_KEY_LENGTH * b'\x01')
+ self.assertEqual(self.keyset.rx_mk, bytes(SYMMETRIC_KEY_LENGTH))
+ self.assertEqual(self.keyset.tx_hk, bytes(SYMMETRIC_KEY_LENGTH))
+ self.assertEqual(self.keyset.rx_hk, bytes(SYMMETRIC_KEY_LENGTH))
self.assertEqual(self.keyset.tx_harac, 2)
+ self.assertEqual(self.keyset.rx_harac, INITIAL_HARAC)
- def test_update_rx_key(self):
- self.keyset.update_key(RX, KEY_LENGTH * b'\x01', 2)
- self.assertEqual(self.keyset.tx_key, KEY_LENGTH * b'\x00')
- self.assertEqual(self.keyset.rx_key, KEY_LENGTH * b'\x01')
- self.assertEqual(self.keyset.tx_hek, KEY_LENGTH * b'\x00')
- self.assertEqual(self.keyset.rx_hek, KEY_LENGTH * b'\x00')
+ def test_update_rx_mk(self):
+ self.keyset.update_mk(RX, SYMMETRIC_KEY_LENGTH * b'\x01', 2)
+ self.assertEqual(self.keyset.tx_mk, bytes(SYMMETRIC_KEY_LENGTH))
+ self.assertEqual(self.keyset.rx_mk, SYMMETRIC_KEY_LENGTH * b'\x01')
+ self.assertEqual(self.keyset.tx_hk, bytes(SYMMETRIC_KEY_LENGTH))
+ self.assertEqual(self.keyset.rx_hk, bytes(SYMMETRIC_KEY_LENGTH))
+ self.assertEqual(self.keyset.tx_harac, INITIAL_HARAC)
self.assertEqual(self.keyset.rx_harac, 2)
def test_invalid_direction_raises_critical_error(self):
+ invalid_direction = 'sx'
with self.assertRaises(SystemExit):
- self.keyset.update_key('sx', KEY_LENGTH * b'\x01', 2)
+ self.keyset.update_mk(invalid_direction, SYMMETRIC_KEY_LENGTH * b'\x01', 2)
class TestKeyList(unittest.TestCase):
def setUp(self):
- self.master_key = MasterKey()
- self.settings = Settings()
- self.keylist = KeyList(MasterKey(), Settings())
- self.keylist.keysets = [create_keyset(n, store_f=self.keylist.store_keys) for n in ['Alice', 'Bob', 'Charlie']]
- self.keylist.store_keys()
+ self.unittest_dir = cd_unittest()
+ self.master_key = MasterKey()
+ self.settings = Settings()
+ self.file_name = f'{DIR_USER_DATA}{self.settings.software_operation}_keys'
+ self.keylist = KeyList(self.master_key, self.settings)
+ self.full_contact_list = ['Alice', 'Bob', 'Charlie', LOCAL_ID]
+ self.keylist.keysets = [create_keyset(n, store_f=self.keylist.store_keys) for n in self.full_contact_list]
def tearDown(self):
- cleanup()
+ cleanup(self.unittest_dir)
def test_storing_and_loading_of_keysets(self):
- # Test Store
- self.assertTrue(os.path.isfile(f'{DIR_USER_DATA}ut_keys'))
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_keys'),
- XSALSA20_NONCE_LEN
- + self.settings.max_number_of_contacts * KEYSET_LENGTH
- + POLY1305_TAG_LEN)
+ # Test store
+ self.keylist.store_keys()
+ self.assertEqual(os.path.getsize(self.file_name),
+ XCHACHA20_NONCE_LENGTH
+ + (self.settings.max_number_of_contacts+1) * KEYSET_LENGTH
+ + POLY1305_TAG_LENGTH)
# Test load
- keylist2 = KeyList(MasterKey(), Settings())
- self.assertEqual(len(keylist2.keysets), 3)
+ key_list2 = KeyList(MasterKey(), Settings())
+ self.assertEqual(len(key_list2.keysets), len(self.full_contact_list))
- def test_change_master_key(self):
- key = KEY_LENGTH * b'\x01'
- masterkey2 = MasterKey(master_key=key)
- self.keylist.change_master_key(masterkey2)
- self.assertEqual(self.keylist.master_key.master_key, key)
+ def test_load_of_modified_database_raises_critical_error(self):
+ self.keylist.store_keys()
+
+ # Test reading works normally
+ self.assertIsInstance(KeyList(self.master_key, self.settings), KeyList)
+
+ # Test loading of the tampered database raises CriticalError
+ tamper_file(self.file_name, tamper_size=1)
+ with self.assertRaises(SystemExit):
+ KeyList(self.master_key, self.settings)
def test_generate_dummy_keyset(self):
dummy_keyset = self.keylist.generate_dummy_keyset()
self.assertEqual(len(dummy_keyset.serialize_k()), KEYSET_LENGTH)
self.assertIsInstance(dummy_keyset, KeySet)
+ def test_dummy_keysets(self):
+ dummies = self.keylist._dummy_keysets()
+ self.assertEqual(len(dummies), (self.settings.max_number_of_contacts+1) - len(self.full_contact_list))
+ for c in dummies:
+ self.assertIsInstance(c, KeySet)
+
+ def test_add_keyset(self):
+ new_key = bytes(SYMMETRIC_KEY_LENGTH)
+ self.keylist.keysets = [create_keyset(LOCAL_ID)]
+
+ # Check that KeySet exists and that its keys are different
+ self.assertNotEqual(self.keylist.keysets[0].rx_hk, new_key)
+
+ # Replace existing KeySet
+ self.assertIsNone(self.keylist.add_keyset(LOCAL_PUBKEY,
+ new_key, new_key,
+ new_key, new_key))
+
+ # Check that new KeySet replaced the old one
+ self.assertEqual(self.keylist.keysets[0].onion_pub_key, LOCAL_PUBKEY)
+ self.assertEqual(self.keylist.keysets[0].rx_hk, new_key)
+
+ def test_remove_keyset(self):
+ # Test KeySet for Bob exists
+ self.assertTrue(self.keylist.has_keyset(nick_to_pub_key('Bob')))
+
+ # Remove KeySet for Bob
+ self.assertIsNone(self.keylist.remove_keyset(nick_to_pub_key('Bob')))
+
+ # Test KeySet was removed
+ self.assertFalse(self.keylist.has_keyset(nick_to_pub_key('Bob')))
+
+ def test_change_master_key(self):
+ key = SYMMETRIC_KEY_LENGTH * b'\x01'
+ master_key2 = MasterKey(master_key=key)
+
+ # Test that new key is different from existing one
+ self.assertNotEqual(key, self.master_key.master_key)
+
+ # Change master key
+ self.assertIsNone(self.keylist.change_master_key(master_key2))
+
+ # Test that master key has changed
+ self.assertEqual(self.keylist.master_key.master_key, key)
+
+ # Test that loading of the database with new key succeeds
+ self.assertIsInstance(KeyList(master_key2, self.settings), KeyList)
+
+ def test_update_database(self):
+ self.assertEqual(os.path.getsize(self.file_name), 9016)
+ self.assertIsNone(self.keylist.manage(KDB_UPDATE_SIZE_HEADER, Settings(max_number_of_contacts=100)))
+ self.assertEqual(os.path.getsize(self.file_name), 17816)
+ self.assertEqual(self.keylist.settings.max_number_of_contacts, 100)
+
def test_get_keyset(self):
- keyset = self.keylist.get_keyset('alice@jabber.org')
+ keyset = self.keylist.get_keyset(nick_to_pub_key('Alice'))
self.assertIsInstance(keyset, KeySet)
- def test_has_local_key_and_add_keyset(self):
- self.assertFalse(self.keylist.has_local_key())
- self.assertIsNone(self.keylist.add_keyset(LOCAL_ID,
- bytes(KEY_LENGTH), bytes(KEY_LENGTH),
- bytes(KEY_LENGTH), bytes(KEY_LENGTH)))
- self.assertIsNone(self.keylist.add_keyset(LOCAL_ID,
- bytes(KEY_LENGTH), bytes(KEY_LENGTH),
- bytes(KEY_LENGTH), bytes(KEY_LENGTH)))
- self.assertTrue(self.keylist.has_local_key())
+ def test_get_list_of_pub_keys(self):
+ self.assertEqual(self.keylist.get_list_of_pub_keys(),
+ [nick_to_pub_key("Alice"),
+ nick_to_pub_key("Bob"),
+ nick_to_pub_key("Charlie")])
- def test_has_keyset_and_remove_keyset(self):
- self.assertTrue(self.keylist.has_keyset('bob@jabber.org'))
- self.assertIsNone(self.keylist.remove_keyset('bob@jabber.org'))
- self.assertFalse(self.keylist.has_keyset('bob@jabber.org'))
+ def test_has_keyset(self):
+ self.keylist.keysets = []
+ self.assertFalse(self.keylist.has_keyset(nick_to_pub_key("Alice")))
- def test_has_rx_key(self):
- self.assertTrue(self.keylist.has_rx_key('bob@jabber.org'))
- self.keylist.get_keyset('bob@jabber.org').rx_key = bytes(KEY_LENGTH)
- self.keylist.get_keyset('bob@jabber.org').rx_hek = bytes(KEY_LENGTH)
- self.assertFalse(self.keylist.has_rx_key('bob@jabber.org'))
+ self.keylist.keysets = [create_keyset('Alice')]
+ self.assertTrue(self.keylist.has_keyset(nick_to_pub_key("Alice")))
- def test_manage_keylist(self):
- self.assertFalse(self.keylist.has_keyset('david@jabber.org'))
- self.assertIsNone(self.keylist.manage(KDB_ADD_ENTRY_HEADER, 'david@jabber.org',
- bytes(KEY_LENGTH), bytes(KEY_LENGTH),
- bytes(KEY_LENGTH), bytes(KEY_LENGTH)))
- self.assertTrue(self.keylist.has_keyset('david@jabber.org'))
+ def test_has_rx_mk(self):
+ self.assertTrue(self.keylist.has_rx_mk(nick_to_pub_key('Bob')))
+ self.keylist.get_keyset(nick_to_pub_key('Bob')).rx_mk = bytes(SYMMETRIC_KEY_LENGTH)
+ self.keylist.get_keyset(nick_to_pub_key('Bob')).rx_hk = bytes(SYMMETRIC_KEY_LENGTH)
+ self.assertFalse(self.keylist.has_rx_mk(nick_to_pub_key('Bob')))
- self.assertIsNone(self.keylist.manage(KDB_REMOVE_ENTRY_HEADER, 'david@jabber.org'))
- self.assertFalse(self.keylist.has_keyset('david@jabber.org'))
+ def test_has_local_keyset(self):
+ self.keylist.keysets = []
+ self.assertFalse(self.keylist.has_local_keyset())
- self.assertIsNone(self.keylist.manage(KDB_CHANGE_MASTER_KEY_HEADER, MasterKey(master_key=KEY_LENGTH * b'\x01')))
- self.assertEqual(self.keylist.master_key.master_key, KEY_LENGTH * b'\x01')
+ self.assertIsNone(self.keylist.add_keyset(LOCAL_PUBKEY,
+ bytes(SYMMETRIC_KEY_LENGTH), bytes(SYMMETRIC_KEY_LENGTH),
+ bytes(SYMMETRIC_KEY_LENGTH), bytes(SYMMETRIC_KEY_LENGTH)))
+ self.assertTrue(self.keylist.has_local_keyset())
+ def test_manage(self):
+ # Test that KeySet for David does not exist
+ self.assertFalse(self.keylist.has_keyset(nick_to_pub_key('David')))
+
+ # Test adding KeySet
+ self.assertIsNone(self.keylist.manage(KDB_ADD_ENTRY_HEADER, nick_to_pub_key('David'),
+ bytes(SYMMETRIC_KEY_LENGTH), bytes(SYMMETRIC_KEY_LENGTH),
+ bytes(SYMMETRIC_KEY_LENGTH), bytes(SYMMETRIC_KEY_LENGTH)))
+ self.assertTrue(self.keylist.has_keyset(nick_to_pub_key('David')))
+
+ # Test removing KeySet
+ self.assertIsNone(self.keylist.manage(KDB_REMOVE_ENTRY_HEADER, nick_to_pub_key('David')))
+ self.assertFalse(self.keylist.has_keyset(nick_to_pub_key('David')))
+
+ # Test changing master key
+ new_key = SYMMETRIC_KEY_LENGTH * b'\x01'
+
+ self.assertNotEqual(self.master_key.master_key, new_key)
+ self.assertIsNone(self.keylist.manage(KDB_CHANGE_MASTER_KEY_HEADER, MasterKey(master_key=new_key)))
+ self.assertEqual(self.keylist.master_key.master_key, new_key)
+
+ # Test updating key_database with new settings changes database size.
+ self.assertEqual(os.path.getsize(self.file_name), 9016)
+ self.assertIsNone(self.keylist.manage(KDB_UPDATE_SIZE_HEADER, Settings(max_number_of_contacts=100)))
+ self.assertEqual(os.path.getsize(self.file_name), 17816)
+
+ # Test invalid KeyList management command raises Critical Error
with self.assertRaises(SystemExit):
self.keylist.manage('invalid_key', None)
diff --git a/tests/common/test_db_logs.py b/tests/common/test_db_logs.py
index b4db5cc..9e5087b 100644
--- a/tests/common/test_db_logs.py
+++ b/tests/common/test_db_logs.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,138 +16,219 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
-import binascii
+import os
import os.path
-import time
-import struct
import threading
+import time
import unittest
-from datetime import datetime
-from multiprocessing import Queue
+from unittest import mock
from src.common.db_contacts import ContactList
-from src.common.db_logs import access_logs, log_writer_loop, re_encrypt, remove_logs, write_log_entry
+from src.common.db_logs import access_logs, change_log_db_key, log_writer_loop, remove_logs, write_log_entry
+from src.common.encoding import bytes_to_timestamp
from src.common.statics import *
from tests.mock_classes import create_contact, GroupList, MasterKey, RxWindow, Settings
-from tests.utils import assembly_packet_creator, cleanup, ignored, TFCTestCase
+from tests.utils import assembly_packet_creator, cd_unittest, cleanup, group_name_to_group_id, nick_to_pub_key
+from tests.utils import nick_to_short_address, tear_queues, TFCTestCase, gen_queue_dict
+
+TIMESTAMP_BYTES = bytes.fromhex('08ceae02')
+STATIC_TIMESTAMP = bytes_to_timestamp(TIMESTAMP_BYTES).strftime('%H:%M:%S.%f')[:-TIMESTAMP_LENGTH]
class TestLogWriterLoop(unittest.TestCase):
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+
def tearDown(self):
- cleanup()
+ cleanup(self.unittest_dir)
def test_function_logs_normal_data(self):
# Setup
settings = Settings()
master_key = MasterKey()
- queues = {LOG_PACKET_QUEUE: Queue(),
- UNITTEST_QUEUE: Queue()}
+ queues = gen_queue_dict()
def queue_delayer():
"""Place messages to queue one at a time."""
- for p in [(False, False, M_S_HEADER + bytes(PADDING_LEN), 'alice@jabber.org', settings, master_key), # Do not log message (boolean)
- (True, False, C_S_HEADER + bytes(PADDING_LEN), None, settings, master_key), # Do not log command
- (True, True, P_N_HEADER + bytes(PADDING_LEN), 'alice@jabber.org', settings, master_key), # Do not log noise packet
- (True, True, F_S_HEADER + bytes(PADDING_LEN), 'alice@jabber.org', settings, master_key), # Do not log file packet
- (True, False, M_S_HEADER + bytes(PADDING_LEN), 'alice@jabber.org', settings, master_key)]: # Log message (boolean)
-
- time.sleep(0.1)
+ for p in [(nick_to_pub_key('Alice'), M_S_HEADER + bytes(PADDING_LENGTH), False, False, master_key),
+ (None, C_S_HEADER + bytes(PADDING_LENGTH), True, False, master_key),
+ (nick_to_pub_key('Alice'), P_N_HEADER + bytes(PADDING_LENGTH), True, True, master_key),
+ (nick_to_pub_key('Alice'), F_S_HEADER + bytes(PADDING_LENGTH), True, True, master_key),
+ (nick_to_pub_key('Alice'), M_S_HEADER + bytes(PADDING_LENGTH), True, False, master_key)]:
queues[LOG_PACKET_QUEUE].put(p)
- time.sleep(0.1)
+ time.sleep(0.02)
+
queues[UNITTEST_QUEUE].put(EXIT)
- time.sleep(0.1)
- queues[LOG_PACKET_QUEUE].put((True, False, M_S_HEADER + bytes(PADDING_LEN), 'alice@jabber.org', settings, master_key)) # Log message (boolean)
+ time.sleep(0.02)
+
+ queues[LOG_PACKET_QUEUE].put((
+ nick_to_pub_key('Alice'), M_S_HEADER + bytes(PADDING_LENGTH), True, False, master_key))
+ time.sleep(0.02)
# Test
threading.Thread(target=queue_delayer).start()
- log_writer_loop(queues, unittest=True)
-
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), 2*LOG_ENTRY_LENGTH)
+ log_writer_loop(queues, settings, unittest=True)
+ self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}{settings.software_operation}_logs'), 2*LOG_ENTRY_LENGTH)
# Teardown
- for key in queues:
- while not queues[key].empty():
- queues[key].get()
- time.sleep(0.1)
- queues[key].close()
+ tear_queues(queues)
def test_function_logs_traffic_masking_data(self):
# Setup
- settings = Settings(log_file_placeholder_data=False,
- logfile_masking=True,
- session_traffic_masking=True)
+ settings = Settings(log_file_masking=True,
+ traffic_masking=False)
master_key = MasterKey()
- queues = {LOG_PACKET_QUEUE: Queue(),
- UNITTEST_QUEUE: Queue()}
+ queues = gen_queue_dict()
+
+ queues[TRAFFIC_MASKING_QUEUE].put(True)
def queue_delayer():
"""Place messages to queue one at a time."""
- for p in [(False, False, M_S_HEADER + bytes(PADDING_LEN), 'alice@jabber.org', settings, master_key), # Do not log message (boolean)
- (True, False, C_S_HEADER + bytes(PADDING_LEN), None, settings, master_key), # Do not log command
- (True, True, F_S_HEADER + bytes(PADDING_LEN), 'alice@jabber.org', settings, master_key), # Log placeholder data
- (True, False, M_S_HEADER + bytes(PADDING_LEN), 'alice@jabber.org', settings, master_key)]: # Log message (boolean)
- time.sleep(0.1)
+ for p in [(nick_to_pub_key('Alice'), M_S_HEADER + bytes(PADDING_LENGTH), False, False, master_key),
+ (None, C_S_HEADER + bytes(PADDING_LENGTH), True, False, master_key),
+ (nick_to_pub_key('Alice'), F_S_HEADER + bytes(PADDING_LENGTH), True, True, master_key),
+ (nick_to_pub_key('Alice'), M_S_HEADER + bytes(PADDING_LENGTH), True, False, master_key)]:
queues[LOG_PACKET_QUEUE].put(p)
- time.sleep(0.1)
+ time.sleep(0.02)
+
queues[UNITTEST_QUEUE].put(EXIT)
- time.sleep(0.1)
- queues[LOG_PACKET_QUEUE].put((True, True, P_N_HEADER + bytes(PADDING_LEN), 'alice@jabber.org', settings, master_key)) # Log noise packet
+ time.sleep(0.02)
+
+ queues[LOG_PACKET_QUEUE].put(
+ (nick_to_pub_key('Alice'), P_N_HEADER + bytes(PADDING_LENGTH), True, True, master_key))
+ time.sleep(0.02)
# Test
threading.Thread(target=queue_delayer).start()
- log_writer_loop(queues, unittest=True)
-
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), 3*LOG_ENTRY_LENGTH)
+ log_writer_loop(queues, settings, unittest=True)
+ self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}{settings.software_operation}_logs'), 3*LOG_ENTRY_LENGTH)
# Teardown
- for key in queues:
- while not queues[key].empty():
- queues[key].get()
- time.sleep(0.1)
- queues[key].close()
+ tear_queues(queues)
+
+ def test_function_log_file_masking_queue_controls_log_file_masking(self):
+ # Setup
+ settings = Settings(log_file_masking=False,
+ traffic_masking=True)
+ master_key = MasterKey()
+ queues = gen_queue_dict()
+
+ def queue_delayer():
+ """Place messages to queue one at a time."""
+ for p in [(None, C_S_HEADER + bytes(PADDING_LENGTH), True, False, master_key),
+ (nick_to_pub_key('Alice'), M_S_HEADER + bytes(PADDING_LENGTH), False, False, master_key),
+ (nick_to_pub_key('Alice'), F_S_HEADER + bytes(PADDING_LENGTH), True, True, master_key)]:
+
+ queues[LOG_PACKET_QUEUE].put(p)
+ time.sleep(0.02)
+
+ queues[LOGFILE_MASKING_QUEUE].put(True) # Start logging noise packets
+ time.sleep(0.02)
+
+ for _ in range(2):
+ queues[LOG_PACKET_QUEUE].put(
+ (nick_to_pub_key('Alice'), F_S_HEADER + bytes(PADDING_LENGTH), True, True, master_key))
+ time.sleep(0.02)
+
+ queues[UNITTEST_QUEUE].put(EXIT)
+ time.sleep(0.02)
+
+ queues[LOG_PACKET_QUEUE].put(
+ (nick_to_pub_key('Alice'), M_S_HEADER + bytes(PADDING_LENGTH), True, False, master_key))
+ time.sleep(0.02)
+
+ # Test
+ threading.Thread(target=queue_delayer).start()
+ log_writer_loop(queues, settings, unittest=True)
+ self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}{settings.software_operation}_logs'), 3*LOG_ENTRY_LENGTH)
+
+ # Teardown
+ tear_queues(queues)
+
+ def test_function_allows_control_of_noise_packets_based_on_log_setting_queue(self):
+ # Setup
+ settings = Settings(log_file_masking=True,
+ traffic_masking=True)
+ master_key = MasterKey()
+ queues = gen_queue_dict()
+
+ noise_tuple = (nick_to_pub_key('Alice'), P_N_HEADER + bytes(PADDING_LENGTH), True, True, master_key)
+
+ def queue_delayer():
+ """Place packets to log into queue after delay."""
+ for _ in range(5):
+ queues[LOG_PACKET_QUEUE].put(noise_tuple) # Not logged because logging_state is False by default
+ time.sleep(0.02)
+
+ queues[LOG_SETTING_QUEUE].put(True)
+ for _ in range(2):
+ queues[LOG_PACKET_QUEUE].put(noise_tuple) # Log two packets
+ time.sleep(0.02)
+
+ queues[LOG_SETTING_QUEUE].put(False)
+ for _ in range(3):
+ queues[LOG_PACKET_QUEUE].put(noise_tuple) # Not logged because logging_state is False
+ time.sleep(0.02)
+
+ queues[UNITTEST_QUEUE].put(EXIT)
+
+ queues[LOG_SETTING_QUEUE].put(True)
+ queues[LOG_PACKET_QUEUE].put(noise_tuple) # Log third packet
+
+ # Test
+ threading.Thread(target=queue_delayer).start()
+
+ log_writer_loop(queues, settings, unittest=True)
+ self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}{settings.software_operation}_logs'), 3*LOG_ENTRY_LENGTH)
+
+ # Teardown
+ tear_queues(queues)
class TestWriteLogEntry(unittest.TestCase):
def setUp(self):
- self.masterkey = MasterKey()
- self.settings = Settings()
+ self.unittest_dir = cd_unittest()
+ self.master_key = MasterKey()
+ self.settings = Settings()
+ self.log_file = f'{DIR_USER_DATA}{self.settings.software_operation}_logs'
def tearDown(self):
- cleanup()
+ cleanup(self.unittest_dir)
def test_log_entry_is_concatenated(self):
- self.assertIsNone(write_log_entry(F_S_HEADER + bytes(PADDING_LEN), 'alice@jabber.org', self.settings, self.masterkey))
- self.assertTrue(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), LOG_ENTRY_LENGTH)
-
- self.assertIsNone(write_log_entry(F_S_HEADER + bytes(PADDING_LEN), 'alice@jabber.org', self.settings, self.masterkey))
- self.assertTrue(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), 2*LOG_ENTRY_LENGTH)
+ for i in range(5):
+ assembly_p = F_S_HEADER + bytes(PADDING_LENGTH)
+ self.assertIsNone(write_log_entry(assembly_p, nick_to_pub_key('Alice'), self.settings, self.master_key))
+ self.assertTrue(os.path.getsize(self.log_file), (i+1)*LOG_ENTRY_LENGTH)
class TestAccessHistoryAndPrintLogs(TFCTestCase):
def setUp(self):
- self.masterkey = MasterKey()
- self.settings = Settings()
- self.window = RxWindow(type=WIN_TYPE_CONTACT, uid='alice@jabber.org', name='Alice')
+ self.unittest_dir = cd_unittest()
+ self.master_key = MasterKey()
+ self.settings = Settings()
+ self.window = RxWindow(type=WIN_TYPE_CONTACT,
+ uid=nick_to_pub_key('Alice'),
+ name='Alice',
+ type_print='contact')
- self.contact_list = ContactList(self.masterkey, self.settings)
+ self.contact_list = ContactList(self.master_key, self.settings)
self.contact_list.contacts = list(map(create_contact, ['Alice', 'Charlie']))
- self.time = datetime.fromtimestamp(struct.unpack('
+
-"""), access_logs, self.window, self.contact_list, self.group_list, self.settings, self.masterkey)
+"""), access_logs, *self.args)
- def test_export_short_private_message(self):
+ @mock.patch('struct.pack', return_value=TIMESTAMP_BYTES)
+ def test_export_short_private_message(self, _):
# Setup
- for p in assembly_packet_creator(MESSAGE, b'Hi Bob'):
- write_log_entry(p, 'alice@jabber.org', self.settings, self.masterkey, origin=ORIGIN_CONTACT_HEADER)
- for p in assembly_packet_creator(MESSAGE, b'Hi Alice'):
- write_log_entry(p, 'alice@jabber.org', self.settings, self.masterkey)
+ # Test title displayed by the Receiver program.
+ self.settings.software_operation = RX
+
+ # Add a message from contact Alice to user (Bob).
+ for p in assembly_packet_creator(MESSAGE, 'Hi Bob'):
+ write_log_entry(p, nick_to_pub_key('Alice'), self.settings, self.master_key, origin=ORIGIN_CONTACT_HEADER)
+
+ # Add a message from user (Bob) to Alice.
+ for p in assembly_packet_creator(MESSAGE, 'Hi Alice'):
+ write_log_entry(p, nick_to_pub_key('Alice'), self.settings, self.master_key)
# Test
- self.assertIsNone(access_logs(self.window, self.contact_list, self.group_list, self.settings, self.masterkey, export=True))
+ self.assertIsNone(access_logs(*self.args, export=True))
- with open("UtM - Plaintext log (Alice)") as f:
- exported_log = f.read()
- self.assertEqual(exported_log, f"""\
-Logfile of messages to/from Alice
+ with open("Receiver - Plaintext log (Alice)") as f:
+ self.assertEqual(f.read(), f"""\
+Log file of message(s) to/from contact Alice
════════════════════════════════════════════════════════════════════════════════
{self.time} Alice: Hi Bob
{self.time} Me: Hi Alice
-
+
""")
- def test_long_private_message(self):
+ @mock.patch('struct.pack', return_value=TIMESTAMP_BYTES)
+ def test_long_private_message(self, _):
# Setup
- # Add an assembly packet sequence for contact containing cancel packet that the function should skip
+ # Add an assembly packet sequence sent to contact Alice containing cancel packet. access_logs should skip this.
packets = assembly_packet_creator(MESSAGE, self.msg)
- packets = packets[2:] + [M_C_HEADER + bytes(PADDING_LEN)]
+ packets = packets[2:] + [M_C_HEADER + bytes(PADDING_LENGTH)]
for p in packets:
- write_log_entry(p, 'alice@jabber.org', self.settings, self.masterkey)
+ write_log_entry(p, nick_to_pub_key('Alice'), self.settings, self.master_key)
- # Add an orphaned 'append' assembly packet that the function should skip
- write_log_entry(M_A_HEADER + bytes(PADDING_LEN), 'alice@jabber.org', self.settings, self.masterkey)
+ # Add an orphaned 'append' assembly packet the function should skip.
+ write_log_entry(M_A_HEADER + bytes(PADDING_LENGTH), nick_to_pub_key('Alice'), self.settings, self.master_key)
- # Add a group message that the function should skip
- for p in assembly_packet_creator(MESSAGE, b'This is a short message', group_name='test_group'):
- write_log_entry(p, 'alice@jabber.org', self.settings, self.masterkey)
+ # Add a group message for a different group the function should skip.
+ for p in assembly_packet_creator(MESSAGE, 'This is a short message', group_id=GROUP_ID_LENGTH * b'1'):
+ write_log_entry(p, nick_to_pub_key('Alice'), self.settings, self.master_key)
- # Add normal messages for contact and user that should be displayed
+ # Add a message from contact Alice to user (Bob).
for p in assembly_packet_creator(MESSAGE, self.msg):
- write_log_entry(p, 'alice@jabber.org', self.settings, self.masterkey, origin=ORIGIN_CONTACT_HEADER)
+ write_log_entry(p, nick_to_pub_key('Alice'), self.settings, self.master_key, origin=ORIGIN_CONTACT_HEADER)
+
+ # Add a message from user (Bob) to Alice.
for p in assembly_packet_creator(MESSAGE, self.msg):
- write_log_entry(p, 'alice@jabber.org', self.settings, self.masterkey)
+ write_log_entry(p, nick_to_pub_key('Alice'), self.settings, self.master_key)
# Test
- self.assertPrints((CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + f"""\
-Logfile of messages to/from Alice
+ self.assert_prints((CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + f"""\
+Log file of message(s) sent to contact Alice
════════════════════════════════════════════════════════════════════════════════
-{self.time} Alice: Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean
- condimentum consectetur purus quis dapibus. Fusce venenatis lacus
- ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendum
- velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus.
- Integer aliquet lectus id massa blandit imperdiet. Ut sed massa
- eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut elit
- iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu.
- In hac habitasse platea dictumst. Integer luctus aliquam justo, at
- vestibulum dolor iaculis ac. Etiam laoreet est eget odio rutrum,
- vel malesuada lorem rhoncus. Cras finibus in neque eu euismod.
- Nulla facilisi. Nunc nec aliquam quam, quis ullamcorper leo. Nunc
- egestas lectus eget est porttitor, in iaculis felis scelerisque. In
- sem elit, fringilla id viverra commodo, sagittis varius purus.
- Pellentesque rutrum lobortis neque a facilisis. Mauris id tortor
- placerat, aliquam dolor ac, venenatis arcu.
-{self.time} Me: Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean
- condimentum consectetur purus quis dapibus. Fusce venenatis lacus
- ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendum
- velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus.
- Integer aliquet lectus id massa blandit imperdiet. Ut sed massa
- eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut elit
- iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu.
- In hac habitasse platea dictumst. Integer luctus aliquam justo, at
- vestibulum dolor iaculis ac. Etiam laoreet est eget odio rutrum,
- vel malesuada lorem rhoncus. Cras finibus in neque eu euismod.
- Nulla facilisi. Nunc nec aliquam quam, quis ullamcorper leo. Nunc
- egestas lectus eget est porttitor, in iaculis felis scelerisque. In
- sem elit, fringilla id viverra commodo, sagittis varius purus.
- Pellentesque rutrum lobortis neque a facilisis. Mauris id tortor
- placerat, aliquam dolor ac, venenatis arcu.
-
+{self.time} Alice: Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+ Aenean condimentum consectetur purus quis dapibus. Fusce
+ venenatis lacus ut rhoncus faucibus. Cras sollicitudin
+ commodo sapien, sed bibendum velit maximus in. Aliquam ac
+ metus risus. Sed cursus ornare luctus. Integer aliquet lectus
+ id massa blandit imperdiet. Ut sed massa eget quam facilisis
+ rutrum. Mauris eget luctus nisl. Sed ut elit iaculis,
+ faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In
+ hac habitasse platea dictumst. Integer luctus aliquam justo,
+ at vestibulum dolor iaculis ac. Etiam laoreet est eget odio
+ rutrum, vel malesuada lorem rhoncus. Cras finibus in neque eu
+ euismod. Nulla facilisi. Nunc nec aliquam quam, quis
+ ullamcorper leo. Nunc egestas lectus eget est porttitor, in
+ iaculis felis scelerisque. In sem elit, fringilla id viverra
+ commodo, sagittis varius purus. Pellentesque rutrum lobortis
+ neque a facilisis. Mauris id tortor placerat, aliquam dolor
+ ac, venenatis arcu.
+{self.time} Me: Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+ Aenean condimentum consectetur purus quis dapibus. Fusce
+ venenatis lacus ut rhoncus faucibus. Cras sollicitudin
+ commodo sapien, sed bibendum velit maximus in. Aliquam ac
+ metus risus. Sed cursus ornare luctus. Integer aliquet lectus
+ id massa blandit imperdiet. Ut sed massa eget quam facilisis
+ rutrum. Mauris eget luctus nisl. Sed ut elit iaculis,
+ faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In
+ hac habitasse platea dictumst. Integer luctus aliquam justo,
+ at vestibulum dolor iaculis ac. Etiam laoreet est eget odio
+ rutrum, vel malesuada lorem rhoncus. Cras finibus in neque eu
+ euismod. Nulla facilisi. Nunc nec aliquam quam, quis
+ ullamcorper leo. Nunc egestas lectus eget est porttitor, in
+ iaculis felis scelerisque. In sem elit, fringilla id viverra
+ commodo, sagittis varius purus. Pellentesque rutrum lobortis
+ neque a facilisis. Mauris id tortor placerat, aliquam dolor
+ ac, venenatis arcu.
+
-"""), access_logs, self.window, self.contact_list, self.group_list, self.settings, self.masterkey)
+"""), access_logs, *self.args)
- def test_short_group_message(self):
+ @mock.patch('struct.pack', return_value=TIMESTAMP_BYTES)
+ def test_short_group_message(self, _):
# Setup
- self.window = RxWindow(type=WIN_TYPE_GROUP, uid='test_group', name='test_group')
+ self.window = RxWindow(type=WIN_TYPE_GROUP,
+ uid=group_name_to_group_id('test_group'),
+ name='test_group',
+ group=self.group,
+ type_print='group',
+ group_list=self.group_list)
- for p in assembly_packet_creator(MESSAGE, b'This is a short message', group_name='test_group'):
- write_log_entry(p, 'alice@jabber.org', self.settings, self.masterkey)
- write_log_entry(p, 'alice@jabber.org', self.settings, self.masterkey, origin=ORIGIN_CONTACT_HEADER)
- write_log_entry(p, 'charlie@jabber.org', self.settings, self.masterkey)
- write_log_entry(p, 'charlie@jabber.org', self.settings, self.masterkey, origin=ORIGIN_CONTACT_HEADER)
+ # Add messages to Alice and Charlie. Add duplicate of outgoing message that should be skipped by access_logs.
+ for p in assembly_packet_creator(MESSAGE, 'This is a short message', group_id=self.window.uid):
+ write_log_entry(p, nick_to_pub_key('Alice'), self.settings, self.master_key)
+ write_log_entry(p, nick_to_pub_key('Alice'), self.settings, self.master_key, origin=ORIGIN_CONTACT_HEADER)
+ write_log_entry(p, nick_to_pub_key('Charlie'), self.settings, self.master_key)
+ write_log_entry(p, nick_to_pub_key('Charlie'), self.settings, self.master_key, origin=ORIGIN_CONTACT_HEADER)
# Test
- self.assertPrints((CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + f"""\
-Logfile of messages to/from test_group
+ self.assert_prints((CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + f"""\
+Log file of message(s) sent to group test_group
════════════════════════════════════════════════════════════════════════════════
{self.time} Me: This is a short message
{self.time} Alice: This is a short message
{self.time} Charlie: This is a short message
-
+
-"""), access_logs, self.window, self.contact_list, self.group_list, self.settings, self.masterkey)
+"""), access_logs, self.window, self.contact_list, self.group_list, self.settings, self.master_key)
- def test_long_group_message(self):
+ @mock.patch('struct.pack', return_value=TIMESTAMP_BYTES)
+ def test_long_group_message(self, _):
# Setup
- self.window = RxWindow(type=WIN_TYPE_GROUP, uid='test_group', name='test_group')
+ # Test title displayed by the Receiver program.
+ self.settings.software_operation = RX
- # Add an assembly packet sequence for contact containing cancel packet that the function should skip
- packets = assembly_packet_creator(MESSAGE, self.msg)
- packets = packets[2:] + [M_C_HEADER + bytes(PADDING_LEN)]
+ self.window = RxWindow(type=WIN_TYPE_GROUP,
+ uid=group_name_to_group_id('test_group'),
+ name='test_group',
+ group=self.group,
+ type_print='group')
+
+ # Add an assembly packet sequence sent to contact Alice in group containing cancel packet.
+ # Access_logs should skip this.
+ packets = assembly_packet_creator(MESSAGE, self.msg, group_id=group_name_to_group_id('test_group'))
+ packets = packets[2:] + [M_C_HEADER + bytes(PADDING_LENGTH)]
for p in packets:
- write_log_entry(p, 'alice@jabber.org', self.settings, self.masterkey)
+ write_log_entry(p, nick_to_pub_key('Alice'), self.settings, self.master_key)
- # Add an orphaned 'append' assembly packet that the function should skip
- write_log_entry(M_A_HEADER + bytes(PADDING_LEN), 'alice@jabber.org', self.settings, self.masterkey)
+ # Add an orphaned 'append' assembly packet. access_logs should skip this.
+ write_log_entry(M_A_HEADER + bytes(PADDING_LENGTH), nick_to_pub_key('Alice'), self.settings, self.master_key)
- # Add a private message that the function should skip
- for p in assembly_packet_creator(MESSAGE, b'This is a short message'):
- write_log_entry(p, 'alice@jabber.org', self.settings, self.masterkey)
+ # Add a private message. access_logs should skip this.
+ for p in assembly_packet_creator(MESSAGE, 'This is a short private message'):
+ write_log_entry(p, nick_to_pub_key('Alice'), self.settings, self.master_key)
- # Add a group management message that the function should skip
- message = US_BYTE.join([b'test_group', b'alice@jabber.org'])
- for p in assembly_packet_creator(MESSAGE, message, header=GROUP_MSG_INVITEJOIN_HEADER):
- write_log_entry(p, 'alice@jabber.org', self.settings, self.masterkey)
+ # Add a group message for a different group. access_logs should skip this.
+ for p in assembly_packet_creator(MESSAGE, 'This is a short group message', group_id=GROUP_ID_LENGTH * b'1'):
+ write_log_entry(p, nick_to_pub_key('Alice'), self.settings, self.master_key)
- # Add a group message that the function should skip
- for p in assembly_packet_creator(MESSAGE, b'This is a short message', group_name='different_group'):
- write_log_entry(p, 'alice@jabber.org', self.settings, self.masterkey)
-
- for p in assembly_packet_creator(MESSAGE, self.msg, group_name='test_group'):
- write_log_entry(p, 'alice@jabber.org', self.settings, self.masterkey)
- write_log_entry(p, 'alice@jabber.org', self.settings, self.masterkey, origin=ORIGIN_CONTACT_HEADER)
- write_log_entry(p, 'charlie@jabber.org', self.settings, self.masterkey)
- write_log_entry(p, 'charlie@jabber.org', self.settings, self.masterkey, origin=ORIGIN_CONTACT_HEADER)
+ # Add messages to Alice and Charlie in group.
+ # Add duplicate of outgoing message that should be skipped by access_logs.
+ for p in assembly_packet_creator(MESSAGE, self.msg, group_id=group_name_to_group_id('test_group')):
+ write_log_entry(p, nick_to_pub_key('Alice'), self.settings, self.master_key)
+ write_log_entry(p, nick_to_pub_key('Alice'), self.settings, self.master_key, origin=ORIGIN_CONTACT_HEADER)
+ write_log_entry(p, nick_to_pub_key('Charlie'), self.settings, self.master_key)
+ write_log_entry(p, nick_to_pub_key('Charlie'), self.settings, self.master_key, origin=ORIGIN_CONTACT_HEADER)
# Test
- access_logs(self.window, self.contact_list, self.group_list, self.settings, self.masterkey)
- self.assertPrints((CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + f"""\
-Logfile of messages to/from test_group
+ self.assert_prints((CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + f"""\
+Log file of message(s) to/from group test_group
════════════════════════════════════════════════════════════════════════════════
-{self.time} Me: Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean
- condimentum consectetur purus quis dapibus. Fusce venenatis lacus
- ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed
- bibendum velit maximus in. Aliquam ac metus risus. Sed cursus
- ornare luctus. Integer aliquet lectus id massa blandit imperdiet.
- Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl.
- Sed ut elit iaculis, faucibus lacus eget, sodales magna. Nunc sed
- commodo arcu. In hac habitasse platea dictumst. Integer luctus
- aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est
- eget odio rutrum, vel malesuada lorem rhoncus. Cras finibus in
- neque eu euismod. Nulla facilisi. Nunc nec aliquam quam, quis
- ullamcorper leo. Nunc egestas lectus eget est porttitor, in
- iaculis felis scelerisque. In sem elit, fringilla id viverra
- commodo, sagittis varius purus. Pellentesque rutrum lobortis
- neque a facilisis. Mauris id tortor placerat, aliquam dolor ac,
- venenatis arcu.
-{self.time} Alice: Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean
- condimentum consectetur purus quis dapibus. Fusce venenatis lacus
- ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed
- bibendum velit maximus in. Aliquam ac metus risus. Sed cursus
- ornare luctus. Integer aliquet lectus id massa blandit imperdiet.
- Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl.
- Sed ut elit iaculis, faucibus lacus eget, sodales magna. Nunc sed
- commodo arcu. In hac habitasse platea dictumst. Integer luctus
- aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est
- eget odio rutrum, vel malesuada lorem rhoncus. Cras finibus in
- neque eu euismod. Nulla facilisi. Nunc nec aliquam quam, quis
- ullamcorper leo. Nunc egestas lectus eget est porttitor, in
- iaculis felis scelerisque. In sem elit, fringilla id viverra
- commodo, sagittis varius purus. Pellentesque rutrum lobortis
- neque a facilisis. Mauris id tortor placerat, aliquam dolor ac,
- venenatis arcu.
-{self.time} Charlie: Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean
- condimentum consectetur purus quis dapibus. Fusce venenatis lacus
- ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed
- bibendum velit maximus in. Aliquam ac metus risus. Sed cursus
- ornare luctus. Integer aliquet lectus id massa blandit imperdiet.
- Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl.
- Sed ut elit iaculis, faucibus lacus eget, sodales magna. Nunc sed
- commodo arcu. In hac habitasse platea dictumst. Integer luctus
- aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est
- eget odio rutrum, vel malesuada lorem rhoncus. Cras finibus in
- neque eu euismod. Nulla facilisi. Nunc nec aliquam quam, quis
- ullamcorper leo. Nunc egestas lectus eget est porttitor, in
- iaculis felis scelerisque. In sem elit, fringilla id viverra
- commodo, sagittis varius purus. Pellentesque rutrum lobortis
- neque a facilisis. Mauris id tortor placerat, aliquam dolor ac,
- venenatis arcu.
-
+{self.time} Me: Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+ Aenean condimentum consectetur purus quis dapibus. Fusce
+ venenatis lacus ut rhoncus faucibus. Cras sollicitudin
+ commodo sapien, sed bibendum velit maximus in. Aliquam ac
+ metus risus. Sed cursus ornare luctus. Integer aliquet
+ lectus id massa blandit imperdiet. Ut sed massa eget quam
+ facilisis rutrum. Mauris eget luctus nisl. Sed ut elit
+ iaculis, faucibus lacus eget, sodales magna. Nunc sed
+ commodo arcu. In hac habitasse platea dictumst. Integer
+ luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam
+ laoreet est eget odio rutrum, vel malesuada lorem rhoncus.
+ Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec
+ aliquam quam, quis ullamcorper leo. Nunc egestas lectus
+ eget est porttitor, in iaculis felis scelerisque. In sem
+ elit, fringilla id viverra commodo, sagittis varius purus.
+ Pellentesque rutrum lobortis neque a facilisis. Mauris id
+ tortor placerat, aliquam dolor ac, venenatis arcu.
+{self.time} Alice: Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+ Aenean condimentum consectetur purus quis dapibus. Fusce
+ venenatis lacus ut rhoncus faucibus. Cras sollicitudin
+ commodo sapien, sed bibendum velit maximus in. Aliquam ac
+ metus risus. Sed cursus ornare luctus. Integer aliquet
+ lectus id massa blandit imperdiet. Ut sed massa eget quam
+ facilisis rutrum. Mauris eget luctus nisl. Sed ut elit
+ iaculis, faucibus lacus eget, sodales magna. Nunc sed
+ commodo arcu. In hac habitasse platea dictumst. Integer
+ luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam
+ laoreet est eget odio rutrum, vel malesuada lorem rhoncus.
+ Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec
+ aliquam quam, quis ullamcorper leo. Nunc egestas lectus
+ eget est porttitor, in iaculis felis scelerisque. In sem
+ elit, fringilla id viverra commodo, sagittis varius purus.
+ Pellentesque rutrum lobortis neque a facilisis. Mauris id
+ tortor placerat, aliquam dolor ac, venenatis arcu.
+{self.time} Charlie: Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+ Aenean condimentum consectetur purus quis dapibus. Fusce
+ venenatis lacus ut rhoncus faucibus. Cras sollicitudin
+ commodo sapien, sed bibendum velit maximus in. Aliquam ac
+ metus risus. Sed cursus ornare luctus. Integer aliquet
+ lectus id massa blandit imperdiet. Ut sed massa eget quam
+ facilisis rutrum. Mauris eget luctus nisl. Sed ut elit
+ iaculis, faucibus lacus eget, sodales magna. Nunc sed
+ commodo arcu. In hac habitasse platea dictumst. Integer
+ luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam
+ laoreet est eget odio rutrum, vel malesuada lorem rhoncus.
+ Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec
+ aliquam quam, quis ullamcorper leo. Nunc egestas lectus
+ eget est porttitor, in iaculis felis scelerisque. In sem
+ elit, fringilla id viverra commodo, sagittis varius purus.
+ Pellentesque rutrum lobortis neque a facilisis. Mauris id
+ tortor placerat, aliquam dolor ac, venenatis arcu.
+
-"""), access_logs, self.window, self.contact_list, self.group_list, self.settings, self.masterkey)
+"""), access_logs, self.window, self.contact_list, self.group_list, self.settings, self.master_key)
class TestReEncrypt(TFCTestCase):
def setUp(self):
+ self.unittest_dir = cd_unittest()
self.old_key = MasterKey()
- self.new_key = MasterKey(master_key=os.urandom(32))
+ self.new_key = MasterKey(master_key=os.urandom(SYMMETRIC_KEY_LENGTH))
self.settings = Settings()
- self.o_struct_pack = struct.pack
- self.time = datetime.fromtimestamp(struct.unpack('
+
-"""), access_logs, window, contact_list, group_list, self.settings, self.old_key)
+""")
+ self.assert_prints(message, access_logs, window, contact_list, group_list, self.settings, self.old_key)
- self.assertIsNone(re_encrypt(self.old_key.master_key, self.new_key.master_key, self.settings))
+ self.assertIsNone(change_log_db_key(self.old_key.master_key, self.new_key.master_key, self.settings))
- # Test that decryption works with new key
- self.assertPrints((CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + f"""\
-Logfile of messages to/from Alice
-════════════════════════════════════════════════════════════════════════════════
-{self.time} Alice: This is a short message
-{self.time} Me: This is a short message
-
+ # Test that decryption with new key is identical.
+ self.assert_prints(message, access_logs, window, contact_list, group_list, self.settings, self.new_key)
-"""), access_logs, window, contact_list, group_list, self.settings, self.new_key)
-
- # Test that temp file is removed
- self.assertFalse(os.path.isfile("user_data/ut_logs_temp"))
+ # Test that temp file is removed.
+ self.assertFalse(os.path.isfile(self.tmp_file_name))
class TestRemoveLog(TFCTestCase):
def setUp(self):
- self.masterkey = MasterKey()
- self.settings = Settings()
- self.time = datetime.fromtimestamp(struct.unpack('.
+along with TFC. If not, see .
"""
-import getpass
+import os
import os.path
import unittest
+from unittest import mock
+
from src.common.db_masterkey import MasterKey
+from src.common.misc import ensure_dir
from src.common.statics import *
-from tests.utils import cleanup
+from tests.utils import cd_unittest, cleanup
class TestMasterKey(unittest.TestCase):
+ input_list = ['password', 'different_password', # Invalid new password pair
+ 'password', 'password', # Valid new password pair
+ 'invalid_password', # Invalid login password
+ 'password'] # Valid login password
def setUp(self):
- self.o_get_password = getpass.getpass
-
- input_list = ['invalid_password', 'test_password', # Invalid new password pair
- 'test_password', 'test_password', # Valid new password pair
- 'invalid_password', # Invalid login password
- 'test_password'] # Valid login password
- gen = iter(input_list)
- getpass.getpass = lambda _: str(next(gen))
+ self.unittest_dir = cd_unittest()
+ self.operation = TX
+ self.file_name = f"{DIR_USER_DATA}{self.operation}_login_data"
def tearDown(self):
- getpass.getpass = self.o_get_password
- cleanup()
+ cleanup(self.unittest_dir)
- def test_master_key_generation_and_load(self):
- masterkey = MasterKey('ut', local_test=False)
- self.assertIsInstance(masterkey.master_key, bytes)
+ @mock.patch('time.sleep', return_value=None)
+ def test_invalid_data_in_db_raises_critical_error(self, _):
+ for delta in [-1, 1]:
+ ensure_dir(DIR_USER_DATA)
+ with open(self.file_name, 'wb+') as f:
+ f.write(os.urandom(MASTERKEY_DB_SIZE + delta))
- os.path.isfile(f"{DIR_USER_DATA}ut_login_data")
- self.assertEqual(os.path.getsize(f"{DIR_USER_DATA}ut_login_data"), ARGON2_SALT_LEN + KEY_LENGTH + 3*INTEGER_SETTING_LEN)
+ with self.assertRaises(SystemExit):
+ _ = MasterKey(self.operation, local_test=False)
- masterkey = MasterKey('ut', local_test=False)
- self.assertIsInstance(masterkey.master_key, bytes)
+ @mock.patch('src.common.db_masterkey.ARGON2_MIN_MEMORY', 100)
+ @mock.patch('src.common.db_masterkey.ARGON2_ROUNDS', 1)
+ @mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 0.1)
+ @mock.patch('os.path.isfile', side_effect=[KeyboardInterrupt, False, True])
+ @mock.patch('getpass.getpass', side_effect=input_list)
+ @mock.patch('time.sleep', return_value=None)
+ def test_master_key_generation_and_load(self, *_):
+ with self.assertRaises(SystemExit):
+ MasterKey(self.operation, local_test=True)
+
+ master_key = MasterKey(self.operation, local_test=True)
+ self.assertIsInstance(master_key.master_key, bytes)
+ self.assertEqual(os.path.getsize(self.file_name), MASTERKEY_DB_SIZE)
+
+ master_key2 = MasterKey(self.operation, local_test=True)
+ self.assertIsInstance(master_key2.master_key, bytes)
+ self.assertEqual(master_key.master_key, master_key2.master_key)
if __name__ == '__main__':
diff --git a/tests/common/test_db_onion.py b/tests/common/test_db_onion.py
new file mode 100644
index 0000000..860ec9a
--- /dev/null
+++ b/tests/common/test_db_onion.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import os
+import unittest
+
+from unittest import mock
+
+from src.common.db_onion import OnionService
+from src.common.misc import validate_onion_addr
+from src.common.statics import *
+
+from tests.mock_classes import MasterKey
+from tests.utils import cd_unittest, cleanup, tamper_file
+
+
+class TestOnionService(unittest.TestCase):
+
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+ self.master_key = MasterKey()
+ self.file_name = f"{DIR_USER_DATA}{TX}_onion_db"
+
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_onion_service_key_generation_and_load(self, _):
+ onion_service = OnionService(self.master_key)
+
+ # Test new OnionService has valid attributes
+ self.assertIsInstance(onion_service.master_key, MasterKey)
+ self.assertIsInstance(onion_service.onion_private_key, bytes)
+ self.assertIsInstance(onion_service.user_onion_address, str)
+ self.assertFalse(onion_service.is_delivered)
+ self.assertEqual(validate_onion_addr(onion_service.user_onion_address), '')
+
+ # Test data is stored to a database
+ self.assertTrue(os.path.isfile(self.file_name))
+ self.assertEqual(os.path.getsize(self.file_name),
+ XCHACHA20_NONCE_LENGTH + ONION_SERVICE_PRIVATE_KEY_LENGTH + POLY1305_TAG_LENGTH)
+
+ # Test data can be loaded from the database
+ onion_service2 = OnionService(self.master_key)
+ self.assertIsInstance(onion_service2.onion_private_key, bytes)
+ self.assertEqual(onion_service.onion_private_key, onion_service2.onion_private_key)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_load_of_modified_database_raises_critical_error(self, _):
+ # Write data to file
+ OnionService(self.master_key)
+
+ # Test reading works normally
+ self.assertIsInstance(OnionService(self.master_key), OnionService)
+
+ # Test loading of the tampered database raises CriticalError
+ tamper_file(self.file_name, tamper_size=1)
+ with self.assertRaises(SystemExit):
+ OnionService(self.master_key)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_confirmation_code_generation(self, _):
+ onion_service = OnionService(self.master_key)
+ conf_code = onion_service.conf_code
+ onion_service.new_confirmation_code()
+ self.assertNotEqual(conf_code, onion_service.conf_code)
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/common/test_db_settings.py b/tests/common/test_db_settings.py
index e21891c..70026d6 100644
--- a/tests/common/test_db_settings.py
+++ b/tests/common/test_db_settings.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,149 +16,157 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
-import builtins
import os.path
-import shutil
import unittest
+from unittest import mock
+
from src.common.db_settings import Settings
from src.common.statics import *
-from tests.mock_classes import create_group, ContactList, GroupList, MasterKey
-from tests.utils import cleanup, TFCTestCase
+from tests.mock_classes import ContactList, create_group, GroupList, MasterKey
+from tests.utils import cd_unittest, cleanup, tamper_file, TFCTestCase
class TestSettings(TFCTestCase):
def setUp(self):
- self.o_input = builtins.input
- builtins.input = lambda _: 'yes'
- self.masterkey = MasterKey()
- self.settings = Settings(self.masterkey, operation='ut', local_test=False, dd_sockets=False)
- self.contact_list = ContactList(nicks=['contact_{}'.format(n) for n in range(18)])
- self.group_list = GroupList(groups=['group_{}'.format(n) for n in range(18)])
- self.group_list.groups[0] = create_group('group_0', ['contact_{}'.format(n) for n in range(18)])
+ self.unittest_dir = cd_unittest()
+ self.file_name = f"{DIR_USER_DATA}{TX}_settings"
+ self.master_key = MasterKey()
+ self.settings = Settings(self.master_key, operation=TX, local_test=False)
+ self.contact_list = ContactList(nicks=[f'contact_{n}' for n in range(18)])
+ self.group_list = GroupList(groups=[f'group_{n}' for n in range(18)])
+ self.group_list.groups[0] = create_group('group_0', [f'contact_{n}' for n in range(18)])
+ self.args = self.contact_list, self.group_list
def tearDown(self):
- cleanup()
- builtins.input = self.o_input
+ cleanup(self.unittest_dir)
def test_invalid_type_raises_critical_error_on_store(self):
- self.settings.serial_error_correction = b'bytestring'
+ self.settings.tm_random_delay = b'bytestring'
with self.assertRaises(SystemExit):
self.settings.store_settings()
def test_invalid_type_raises_critical_error_on_load(self):
with self.assertRaises(SystemExit):
- self.settings.nh_bypass_messages = b'bytestring'
+ self.settings.nc_bypass_messages = b'bytestring'
self.settings.load_settings()
- def test_store_and_load_settings(self):
+ def test_store_and_load_tx_settings(self):
# Test store
self.assertFalse(self.settings.disable_gui_dialog)
self.settings.disable_gui_dialog = True
self.settings.store_settings()
- self.assertEqual(os.path.getsize(f"{DIR_USER_DATA}ut_settings"), SETTING_LENGTH)
+ self.assertEqual(os.path.getsize(self.file_name), SETTING_LENGTH)
# Test load
- settings2 = Settings(self.masterkey, 'ut', False, False)
+ settings2 = Settings(self.master_key, TX, False)
self.assertTrue(settings2.disable_gui_dialog)
+ def test_store_and_load_rx_settings(self):
+ # Setup
+ self.settings = Settings(self.master_key, operation=RX, local_test=False)
+
+ # Test store
+ self.assertFalse(self.settings.disable_gui_dialog)
+ self.settings.disable_gui_dialog = True
+ self.settings.store_settings()
+ self.assertEqual(os.path.getsize(self.file_name), SETTING_LENGTH)
+
+ # Test load
+ settings2 = Settings(self.master_key, RX, False)
+ self.assertTrue(settings2.disable_gui_dialog)
+
+ def test_load_of_modified_database_raises_critical_error(self):
+ # Store settings to database
+ self.settings.store_settings()
+
+ # Test reading from database works normally
+ self.assertIsInstance(Settings(self.master_key, operation=TX, local_test=False), Settings)
+
+ # Test loading of the tampered database raises CriticalError
+ tamper_file(self.file_name, tamper_size=1)
+ with self.assertRaises(SystemExit):
+ Settings(self.master_key, operation=TX, local_test=False)
+
def test_invalid_type_raises_critical_error_when_changing_settings(self):
self.settings.traffic_masking = b'bytestring'
with self.assertRaises(SystemExit):
- self.assertIsNone(self.settings.change_setting('traffic_masking', 'True', self.contact_list, self.group_list))
+ self.assertIsNone(self.settings.change_setting('traffic_masking', 'True', *self.args))
def test_change_settings(self):
- self.assertFR("Error: Invalid value 'Falsee'", self.settings.change_setting, 'disable_gui_dialog', 'Falsee', self.contact_list, self.group_list)
- self.assertFR("Error: Invalid value '1.1'", self.settings.change_setting, 'max_number_of_group_members', '1.1', self.contact_list, self.group_list)
- self.assertFR("Error: Invalid value '-1.1'", self.settings.change_setting, 'max_duration_of_random_delay', '-1.1', self.contact_list, self.group_list)
- self.assertFR("Error: Invalid value '18446744073709551616'", self.settings.change_setting, 'serial_error_correction', str(2 ** 64), self.contact_list, self.group_list)
- self.assertFR("Error: Invalid value 'True'", self.settings.change_setting, 'traffic_masking_static_delay', 'True', self.contact_list, self.group_list)
+ self.assert_fr("Error: Invalid value 'Falsee'.",
+ self.settings.change_setting, 'disable_gui_dialog', 'Falsee', *self.args)
+ self.assert_fr("Error: Invalid value '1.1'.",
+ self.settings.change_setting, 'max_number_of_group_members', '1.1', *self.args)
+ self.assert_fr("Error: Invalid value '18446744073709551616'.",
+ self.settings.change_setting, 'max_number_of_contacts', str(2 ** 64), *self.args)
+ self.assert_fr("Error: Invalid value '-1.1'.",
+ self.settings.change_setting, 'tm_static_delay', '-1.1', *self.args)
+ self.assert_fr("Error: Invalid value 'True'.",
+ self.settings.change_setting, 'tm_static_delay', 'True', *self.args)
- self.assertIsNone(self.settings.change_setting('serial_error_correction', '10', self.contact_list, self.group_list))
- self.assertIsNone(self.settings.change_setting('rxm_usb_serial_adapter', 'True', self.contact_list, self.group_list))
- self.assertIsNone(self.settings.change_setting('traffic_masking', 'True', self.contact_list, self.group_list))
+ self.assertIsNone(self.settings.change_setting('traffic_masking', 'True', *self.args))
+ self.assertIsNone(self.settings.change_setting('max_number_of_group_members', '100', *self.args))
- def test_validate_key_value_pair(self):
- self.assertFR("Error: Database padding settings must be divisible by 10.", self.settings.validate_key_value_pair, 'max_number_of_group_members', 0, self.contact_list, self.group_list)
- self.assertFR("Error: Database padding settings must be divisible by 10.", self.settings.validate_key_value_pair, 'max_number_of_group_members', 18, self.contact_list, self.group_list)
- self.assertFR("Error: Database padding settings must be divisible by 10.", self.settings.validate_key_value_pair, 'max_number_of_groups', 18, self.contact_list, self.group_list)
- self.assertFR("Error: Database padding settings must be divisible by 10.", self.settings.validate_key_value_pair, 'max_number_of_contacts', 18, self.contact_list, self.group_list)
- self.assertFR("Error: Can't set max number of members lower than 20.", self.settings.validate_key_value_pair, 'max_number_of_group_members', 10, self.contact_list, self.group_list)
- self.assertFR("Error: Can't set max number of groups lower than 20.", self.settings.validate_key_value_pair, 'max_number_of_groups', 10, self.contact_list, self.group_list)
- self.assertFR("Error: Can't set max number of contacts lower than 20.", self.settings.validate_key_value_pair, 'max_number_of_contacts', 10, self.contact_list, self.group_list)
- self.assertFR("Error: Specified baud rate is not supported.", self.settings.validate_key_value_pair, 'serial_baudrate', 10, self.contact_list, self.group_list)
- self.assertFR("Error: Invalid value for error correction ratio.", self.settings.validate_key_value_pair, 'serial_error_correction', 0, self.contact_list, self.group_list)
- self.assertFR("Error: Invalid value for error correction ratio.", self.settings.validate_key_value_pair, 'serial_error_correction', -1, self.contact_list, self.group_list)
- self.assertFR("Error: Too small value for message notify duration.", self.settings.validate_key_value_pair, 'new_message_notify_duration', 0.04, self.contact_list, self.group_list)
+ @mock.patch('builtins.input', side_effect=['No', 'Yes'])
+ def test_validate_key_value_pair(self, _):
+ self.assert_fr("Error: Database padding settings must be divisible by 10.",
+ self.settings.validate_key_value_pair, 'max_number_of_group_members', 0, *self.args)
+ self.assert_fr("Error: Database padding settings must be divisible by 10.",
+ self.settings.validate_key_value_pair, 'max_number_of_group_members', 18, *self.args)
+ self.assert_fr("Error: Database padding settings must be divisible by 10.",
+ self.settings.validate_key_value_pair, 'max_number_of_groups', 18, *self.args)
+ self.assert_fr("Error: Database padding settings must be divisible by 10.",
+ self.settings.validate_key_value_pair, 'max_number_of_contacts', 18, *self.args)
+ self.assert_fr("Error: Can't set the max number of members lower than 20.",
+ self.settings.validate_key_value_pair, 'max_number_of_group_members', 10, *self.args)
+ self.assert_fr("Error: Can't set the max number of groups lower than 20.",
+ self.settings.validate_key_value_pair, 'max_number_of_groups', 10, *self.args)
+ self.assert_fr("Error: Can't set the max number of contacts lower than 20.",
+ self.settings.validate_key_value_pair, 'max_number_of_contacts', 10, *self.args)
+ self.assert_fr("Error: Too small value for message notify duration.",
+ self.settings.validate_key_value_pair, 'new_message_notify_duration', 0.04, *self.args)
+ self.assert_fr("Error: Can't set static delay lower than 0.1.",
+ self.settings.validate_key_value_pair, 'tm_static_delay', 0.01, *self.args)
+ self.assert_fr("Error: Can't set random delay lower than 0.1.",
+ self.settings.validate_key_value_pair, 'tm_random_delay', 0.01, *self.args)
+ self.assert_fr("Aborted traffic masking setting change.",
+ self.settings.validate_key_value_pair, 'tm_random_delay', 0.1, *self.args)
- self.assertIsNone(self.settings.validate_key_value_pair("serial_baudrate", 9600, self.contact_list, self.group_list))
-
- def test_too_narrow_terminal_raises_fr_when_printing_settings(self):
- # Setup
- o_get_terminal_size = shutil.get_terminal_size
- shutil.get_terminal_size = lambda: [64, 64]
+ self.assertIsNone(self.settings.validate_key_value_pair("serial_baudrate", 9600, *self.args))
+ self.assertIsNone(self.settings.validate_key_value_pair("tm_static_delay", 1, *self.args))
+ @mock.patch('shutil.get_terminal_size', return_value=(64, 64))
+ def test_too_narrow_terminal_raises_fr_when_printing_settings(self, _):
# Test
- self.assertFR("Error: Screen width is too small.", self.settings.print_settings)
-
- # Teardown
- shutil.get_terminal_size = o_get_terminal_size
-
- def test_setup(self):
- # Setup
- builtins.input = lambda _: 'No'
-
- # Test
- self.settings.software_operation = TX
- self.settings.setup()
- self.assertFalse(self.settings.txm_usb_serial_adapter)
-
- self.settings.software_operation = RX
- self.settings.setup()
- self.assertFalse(self.settings.rxm_usb_serial_adapter)
+ self.assert_fr("Error: Screen width is too small.", self.settings.print_settings)
def test_print_settings(self):
- self.settings.max_number_of_group_members = 30
- self.settings.log_messages_by_default = True
- self.settings.traffic_masking_static_delay = 10.2
- self.assertPrints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + """\
+ self.settings.max_number_of_group_members = 30
+ self.settings.log_messages_by_default = True
+ self.settings.tm_static_delay = 10.2
+ self.assert_prints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + """\
Setting name Current value Default value Description
────────────────────────────────────────────────────────────────────────────────
disable_gui_dialog False False True replaces
- Tkinter dialogs
- with CLI prompts
+ GUI dialogs with
+ CLI prompts
-max_number_of_group_members 30 20 Max members in
- group (TxM/RxM
- must have the
- same value)
+max_number_of_group_members 30 50 Maximum number
+ of members in a
+ group
-max_number_of_groups 20 20 Max number of
- groups (TxM/RxM
- must have the
- same value)
+max_number_of_groups 50 50 Maximum number
+ of groups
-max_number_of_contacts 20 20 Max number of
- contacts
- (TxM/RxM must
- have the same
- value)
-
-serial_baudrate 19200 19200 The speed of
- serial interface
- in bauds per
- second
-
-serial_error_correction 5 5 Number of byte
- errors serial
- datagrams can
- recover from
+max_number_of_contacts 50 50 Maximum number
+ of contacts
log_messages_by_default True False Default logging
setting for new
@@ -173,18 +182,15 @@ show_notifications_by_default True True Default message
setting for new
contacts/groups
-logfile_masking False False True hides real
- size of logfile
+log_file_masking False False True hides real
+ size of log file
during traffic
masking
-txm_usb_serial_adapter True True False uses
- system's
- integrated
- serial interface
-
-nh_bypass_messages True True False removes NH
- bypass interrupt
+nc_bypass_messages False False False removes
+ Networked
+ Computer bypass
+ interrupt
messages
confirm_sent_files True True False sends
@@ -202,32 +208,21 @@ traffic_masking False False True enables
traffic masking
to hide metadata
-traffic_masking_static_delay 10.2 2.0 Static delay
+tm_static_delay 10.2 2.0 The static delay
between traffic
masking packets
-traffic_masking_random_delay 2.0 2.0 Max random delay
+tm_random_delay 2.0 2.0 Max random delay
for traffic
masking timing
obfuscation
-multi_packet_random_delay False False True adds IM
- server spam
- guard evading
- delay
-
-max_duration_of_random_delay 10.0 10.0 Maximum time for
- random spam
- guard evasion
- delay
-
-rxm_usb_serial_adapter True True False uses
- system's
- integrated
- serial interface
+allow_contact_requests True True When False, does
+ not show TFC
+ contact requests
new_message_notify_preview False False When True, shows
- preview of
+ a preview of the
received message
new_message_notify_duration 1.0 1.0 Number of
@@ -236,6 +231,11 @@ new_message_notify_duration 1.0 1.0 Number of
notification
appears
+max_decompress_size 100000000 100000000 Max size
+ Receiver accepts
+ when
+ decompressing
+ file
""", self.settings.print_settings)
diff --git a/tests/common/test_encoding.py b/tests/common/test_encoding.py
index 665c38d..f345f98 100644
--- a/tests/common/test_encoding.py
+++ b/tests/common/test_encoding.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,61 +16,63 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
-import binascii
+import base64
import os
import unittest
-from src.common.encoding import b58encode, bool_to_bytes, double_to_bytes, str_to_bytes, int_to_bytes, unicode_padding
-from src.common.encoding import b58decode, bytes_to_bool, bytes_to_double, bytes_to_str, bytes_to_int, rm_padding_str
+from datetime import datetime
+
+from src.common.encoding import b58encode, bool_to_bytes, double_to_bytes, str_to_bytes, int_to_bytes
+from src.common.encoding import b58decode, bytes_to_bool, bytes_to_double, bytes_to_str, bytes_to_int
+from src.common.encoding import onion_address_to_pub_key, unicode_padding, pub_key_to_short_address, b85encode
+from src.common.encoding import pub_key_to_onion_address, rm_padding_str, bytes_to_timestamp, b10encode
from src.common.statics import *
class TestBase58EncodeAndDecode(unittest.TestCase):
- def test_encoding_and_decoding_of_random_keys(self):
- for _ in range(1000):
- key = os.urandom(KEY_LENGTH)
+ def setUp(self):
+ self.key = SYMMETRIC_KEY_LENGTH * b'\x01'
+
+ def test_encoding_and_decoding_of_random_local_keys(self):
+ for _ in range(100):
+ key = os.urandom(SYMMETRIC_KEY_LENGTH)
encoded = b58encode(key)
decoded = b58decode(encoded)
self.assertEqual(key, decoded)
- def test_encoding_and_decoding_of_random_file_keys(self):
- for _ in range(1000):
- key = os.urandom(KEY_LENGTH)
- encoded = b58encode(key, file_key=True)
- decoded = b58decode(encoded, file_key=True)
+ def test_encoding_and_decoding_of_random_public_keys(self):
+ for _ in range(100):
+ key = os.urandom(TFC_PUBLIC_KEY_LENGTH)
+ encoded = b58encode(key, public_key=True)
+ decoded = b58decode(encoded, public_key=True)
self.assertEqual(key, decoded)
def test_invalid_decoding(self):
- key = KEY_LENGTH * b'\x01'
- encoded = b58encode(key) # 5HpjE2Hs7vjU4SN3YyPQCdhzCu92WoEeuE6PWNuiPyTu3ESGnzn
+ encoded = b58encode(self.key) # 5HpjE2Hs7vjU4SN3YyPQCdhzCu92WoEeuE6PWNuiPyTu3ESGnzn
changed = encoded[:-1] + 'a'
with self.assertRaises(ValueError):
b58decode(changed)
- def test_public_keys_raise_value_error_when_expecting_file_key(self):
- public_key = KEY_LENGTH * b'\x01'
- b58_pub_key = b58encode(public_key)
-
+ def test_public_keys_raise_value_error_when_expecting_local_key(self):
+ b58_pub_key = b58encode(self.key)
with self.assertRaises(ValueError):
- b58decode(b58_pub_key, file_key=True)
-
- def test_file_keys_raise_value_error_when_expecting_public_key(self):
- file_key = KEY_LENGTH * b'\x01'
- b58_file_key = b58encode(file_key, file_key=True)
+ b58decode(b58_pub_key, public_key=True)
+ def test_local_keys_raise_value_error_when_expecting_public_key(self):
+ b58_file_key = b58encode(self.key, public_key=True)
with self.assertRaises(ValueError):
b58decode(b58_file_key)
- def test_Bitcoin_WIF_test_vectors(self):
+ def test_bitcoin_wif_test_vectors(self):
"""Test vectors are available at
https://en.bitcoin.it/wiki/Wallet_import_format
"""
- byte_key = binascii.unhexlify("0C28FCA386C7A227600B2FE50B7CAE11"
- "EC86D3BF1FBE471BE89827E19D72AA1D")
+ byte_key = bytes.fromhex("0C28FCA386C7A227600B2FE50B7CAE11"
+ "EC86D3BF1FBE471BE89827E19D72AA1D")
b58_key = "5HueCGU8rMjxEXxiPuD5BDku4MkFqeZyd4dZ1jvhTVqvbTLvyTJ"
@@ -77,19 +80,34 @@ class TestBase58EncodeAndDecode(unittest.TestCase):
self.assertEqual(b58decode(b58_key), byte_key)
+class TestBase85Encode(unittest.TestCase):
+
+ def test_b85encode(self):
+ message = os.urandom(100)
+ self.assertEqual(b85encode(message),
+ base64.b85encode(message).decode())
+
+
+class TestBase10Encode(unittest.TestCase):
+
+ def test_b10encode(self):
+ self.assertEqual(b10encode(FINGERPRINT_LENGTH * b'a'),
+ '44046402572626160612103472728795008085361523578694645928734845681441465000289')
+
+
class TestUnicodePadding(unittest.TestCase):
def test_padding(self):
- for s in range(0, PADDING_LEN):
+ for s in range(0, PADDING_LENGTH):
string = s * 'm'
padded = unicode_padding(string)
- self.assertEqual(len(padded), PADDING_LEN)
+ self.assertEqual(len(padded), PADDING_LENGTH)
# Verify removal of padding doesn't alter the string
self.assertEqual(string, padded[:-ord(padded[-1:])])
def test_oversize_msg_raises_assertion_error(self):
- for s in range(PADDING_LEN, 260):
+ for s in range(PADDING_LENGTH, 260):
with self.assertRaises(AssertionError):
unicode_padding(s * 'm')
@@ -99,47 +117,61 @@ class TestRmPaddingStr(unittest.TestCase):
def test_padding_removal(self):
for i in range(0, 1000):
string = i * 'm'
- length = PADDING_LEN - (len(string) % PADDING_LEN)
+ length = PADDING_LENGTH - (len(string) % PADDING_LENGTH)
padded = string + length * chr(length)
self.assertEqual(rm_padding_str(padded), string)
class TestConversions(unittest.TestCase):
+ def test_conversion_back_and_forth(self):
+ pub_key = os.urandom(SYMMETRIC_KEY_LENGTH)
+ self.assertEqual(onion_address_to_pub_key(pub_key_to_onion_address(pub_key)), pub_key)
+
+ def test_pub_key_to_short_addr(self):
+ self.assertEqual(len(pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH))),
+ TRUNC_ADDRESS_LENGTH)
+
+ self.assertIsInstance(pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)), str)
+
def test_bool_to_bytes(self):
- self.assertEqual(bool_to_bytes(False), b'\x00')
- self.assertEqual(bool_to_bytes(True), b'\x01')
- self.assertEqual(len(bool_to_bytes(True)), BOOLEAN_SETTING_LEN)
+ self.assertEqual( bool_to_bytes(False), b'\x00')
+ self.assertEqual( bool_to_bytes(True), b'\x01')
+ self.assertEqual(len(bool_to_bytes(True)), ENCODED_BOOLEAN_LENGTH)
def test_bytes_to_bool(self):
self.assertEqual(bytes_to_bool(b'\x00'), False)
self.assertEqual(bytes_to_bool(b'\x01'), True)
def test_int_to_bytes(self):
- self.assertEqual(int_to_bytes(1), b'\x00\x00\x00\x00\x00\x00\x00\x01')
- self.assertEqual(len(int_to_bytes(1)), INTEGER_SETTING_LEN)
+ self.assertEqual( int_to_bytes(1), b'\x00\x00\x00\x00\x00\x00\x00\x01')
+ self.assertEqual(len(int_to_bytes(1)), ENCODED_INTEGER_LENGTH)
def test_bytes_to_int(self):
self.assertEqual(bytes_to_int(b'\x00\x00\x00\x00\x00\x00\x00\x01'), 1)
def test_double_to_bytes(self):
- self.assertEqual(double_to_bytes(1.0), binascii.unhexlify('000000000000f03f'))
- self.assertEqual(double_to_bytes(1.1), binascii.unhexlify('9a9999999999f13f'))
- self.assertEqual(len(double_to_bytes(1.1)), FLOAT_SETTING_LEN)
+ self.assertEqual( double_to_bytes(1.0), bytes.fromhex('000000000000f03f'))
+ self.assertEqual( double_to_bytes(1.1), bytes.fromhex('9a9999999999f13f'))
+ self.assertEqual(len(double_to_bytes(1.1)), ENCODED_FLOAT_LENGTH)
def test_bytes_to_double(self):
- self.assertEqual(bytes_to_double(binascii.unhexlify('000000000000f03f')), 1.0)
- self.assertEqual(bytes_to_double(binascii.unhexlify('9a9999999999f13f')), 1.1)
+ self.assertEqual(bytes_to_double(bytes.fromhex('000000000000f03f')), 1.0)
+ self.assertEqual(bytes_to_double(bytes.fromhex('9a9999999999f13f')), 1.1)
def test_str_to_bytes(self):
encoded = str_to_bytes('test')
self.assertIsInstance(encoded, bytes)
- self.assertEqual(len(encoded), PADDED_UTF32_STR_LEN)
+ self.assertEqual(len(encoded), PADDED_UTF32_STR_LENGTH)
def test_bytes_to_str(self):
encoded = str_to_bytes('test')
self.assertEqual(bytes_to_str(encoded), 'test')
+ def test_bytes_to_timestamp(self):
+ encoded = bytes.fromhex('00000000')
+ self.assertIsInstance(bytes_to_timestamp(encoded), datetime)
+
if __name__ == '__main__':
unittest.main(exit=False)
diff --git a/tests/common/test_exceptions.py b/tests/common/test_exceptions.py
index bd87c9b..320d3b9 100644
--- a/tests/common/test_exceptions.py
+++ b/tests/common/test_exceptions.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,7 +16,7 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
import unittest
@@ -51,8 +52,8 @@ class TestGracefulExit(unittest.TestCase):
def test_graceful_exit(self):
with self.assertRaises(SystemExit):
- graceful_exit('test message', clear=True)
graceful_exit('test message')
+ graceful_exit('test message', clear=False)
graceful_exit('test message', exit_code=1)
graceful_exit('test message', exit_code=2)
diff --git a/tests/common/test_gateway.py b/tests/common/test_gateway.py
index a5ebeba..aa2d8f3 100644
--- a/tests/common/test_gateway.py
+++ b/tests/common/test_gateway.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,149 +16,492 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
-import multiprocessing.connection
import os
-import serial
-import time
import unittest
+import socket
-from multiprocessing import Queue
+from datetime import datetime
+from unittest import mock
+from unittest.mock import MagicMock
-from src.common.gateway import gateway_loop, Gateway
-from src.common.statics import *
+from serial import SerialException
+
+from src.common.crypto import blake2b
+from src.common.gateway import gateway_loop, Gateway, GatewaySettings
+from src.common.misc import ensure_dir
+from src.common.reed_solomon import RSCodec
+from src.common.statics import *
from tests.mock_classes import Settings
-from tests.mock_classes import Gateway as MockGateway
+from tests.utils import cd_unittest, cleanup, gen_queue_dict, tear_queues, TFCTestCase
class TestGatewayLoop(unittest.TestCase):
def setUp(self):
- self.queues = {GATEWAY_QUEUE: Queue()}
- self.gateway = MockGateway()
- self.gateway.read = lambda: "read_data"
+ self.unittest_dir = cd_unittest()
+ self.queues = gen_queue_dict()
def tearDown(self):
- while not self.queues[GATEWAY_QUEUE].empty():
- self.queues[GATEWAY_QUEUE].get()
- time.sleep(0.1)
- self.queues[GATEWAY_QUEUE].close()
+ cleanup(self.unittest_dir)
+ tear_queues(self.queues)
- def test_loop(self):
- self.assertIsNone(gateway_loop(self.queues, self.gateway, unittest=True))
- self.assertEqual(self.queues[GATEWAY_QUEUE].get(), "read_data")
+ @mock.patch('multiprocessing.connection.Listener',
+ return_value=MagicMock(accept=lambda: MagicMock(recv=MagicMock(return_value='message'))))
+ def test_loop(self, _):
+ gateway = Gateway(operation=RX, local_test=True, dd_sockets=False)
+ self.assertIsNone(gateway_loop(self.queues, gateway, unittest=True))
+
+ data = self.queues[GATEWAY_QUEUE].get()
+ self.assertIsInstance(data[0], datetime)
+ self.assertEqual(data[1], 'message')
-class TestGatewaySerial(unittest.TestCase):
-
- class MockSerial(object):
-
- def __init__(self, iface_name, baudrate, timeout):
- self.iface = iface_name
- self.baudrate = baudrate
- self.timeout = timeout
- self.written = []
- output_list = [b'', bytearray(b'a'), bytearray(b'b'), b'']
- self.gen = iter(output_list)
-
- def write(self, output):
- self.written.append(output)
-
- def read(self, _):
- time.sleep(0.1)
- return next(self.gen)
-
- def flush(self):
- pass
+class TestGatewaySerial(TFCTestCase):
def setUp(self):
- self.settings = Settings(session_usb_serial_adapter=True)
- self.o_listdir = os.listdir
- self.o_serial = serial.Serial
-
- input_list = ['ttyUSB0', 'ttyS0', 'ttyUSB0', 'ttyS0', 'ttyUSB0']
- gen = iter(input_list)
- os.listdir = lambda _: [next(gen)]
- serial.Serial = TestGatewaySerial.MockSerial
- self.gateway = Gateway(self.settings)
+ self.unittest_dir = cd_unittest()
+ self.settings = Settings(session_usb_serial_adapter=True)
def tearDown(self):
- os.listdir = self.o_listdir
- serial.Serial = self.o_serial
+ cleanup(self.unittest_dir)
- def test_serial(self):
- self.assertIsNone(self.gateway.write(b'test'))
- self.assertEqual(self.gateway.search_serial_interface(), '/dev/ttyUSB0')
- self.assertEqual(self.gateway.read(), b'ab')
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('serial.Serial', return_value=MagicMock())
+ @mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0']])
+ @mock.patch('builtins.input', side_effect=['Yes'])
+ def test_search_and_establish_serial(self, *_):
+ gateway = Gateway(operation=RX, local_test=False, dd_sockets=False)
+ self.assertIsInstance(gateway.rs, RSCodec)
+ self.assertIs(gateway.tx_serial, gateway.rx_serial)
- self.gateway.settings.session_usb_serial_adapter = False
- self.assertEqual(self.gateway.search_serial_interface(), '/dev/ttyS0')
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('serial.Serial', side_effect=SerialException)
+ @mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0']])
+ @mock.patch('builtins.input', side_effect=['Yes'])
+ def test_serialexception_during_establish_exists(self, *_):
+ with self.assertRaises(SystemExit):
+ Gateway(operation=RX, local_test=False, dd_sockets=False)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('serial.Serial', return_value=MagicMock(write=MagicMock(side_effect=[SerialException, None])))
+ @mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0'], ['ttyUSB0']])
+ @mock.patch('builtins.input', side_effect=['Yes'])
+ def test_write_serial_(self, *_):
+ gateway = Gateway(operation=RX, local_test=False, dd_sockets=False)
+ self.assertIsNone(gateway.write(b"message"))
+
+ @mock.patch('time.monotonic', side_effect=[1, 2, 3])
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('serial.Serial', return_value=MagicMock(
+ read_all=MagicMock(side_effect=[KeyboardInterrupt, SerialException, b'', b'1', b'2', b''])))
+ @mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0'], ['ttyUSB0']])
+ @mock.patch('builtins.input', side_effect=['Yes'])
+ def test_read_serial(self, *_):
+ gateway = Gateway(operation=RX, local_test=False, dd_sockets=False)
+ data = gateway.read()
+ self.assertEqual(data, b'12')
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('serial.Serial', return_value=MagicMock())
+ @mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0']])
+ @mock.patch('builtins.input', side_effect=['Yes'])
+ def test_add_error_correction(self, *_):
+ gateway = Gateway(operation=RX, local_test=False, dd_sockets=False)
+ packet = b'packet'
+
+ # Test BLAKE2b based checksum
+ gateway.settings.session_serial_error_correction = 0
+ self.assertEqual(gateway.add_error_correction(packet,),
+ packet + blake2b(packet, digest_size=PACKET_CHECKSUM_LENGTH))
+
+ # Test Reed-Solomon erasure code
+ gateway.settings.session_serial_error_correction = 5
+ gateway.rs = RSCodec(gateway.settings.session_serial_error_correction)
+ self.assertEqual(gateway.add_error_correction(packet),
+ gateway.rs.encode(packet))
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('serial.Serial', return_value=MagicMock())
+ @mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0']])
+ @mock.patch('builtins.input', side_effect=['Yes'])
+ def test_detect_errors(self, *_):
+ gateway = Gateway(operation=RX, local_test=False, dd_sockets=False)
+ packet = b'packet'
+
+ # Test BLAKE2b based checksum
+ gateway.settings.session_serial_error_correction = 0
+ self.assertEqual(gateway.detect_errors(gateway.add_error_correction(packet)),
+ packet)
+
+ # Test unrecoverable error raises FR
+ self.assert_fr("Warning! Received packet had an invalid checksum.",
+ gateway.detect_errors, 300 * b'a')
+
+ # Test Reed-Solomon erasure code
+ gateway.settings.session_serial_error_correction = 5
+ gateway.rs = RSCodec(gateway.settings.session_serial_error_correction)
+ self.assertEqual(gateway.detect_errors(gateway.add_error_correction(packet)),
+ packet)
+
+ # Test unrecoverable error raises FR
+ self.assert_fr("Error: Reed-Solomon failed to correct errors in the received packet.",
+ gateway.detect_errors, 300 * b'a')
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('serial.Serial', return_value=MagicMock())
+ @mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyUSB0'], [''], ['ttyUSB0'], ['ttyS0'], ['']])
+ @mock.patch('builtins.input', side_effect=['Yes'])
+ def test_search_serial_interfaces(self, *_):
+ gateway = Gateway(operation=RX, local_test=False, dd_sockets=False)
+
+ interface = gateway.search_serial_interface()
+ self.assertEqual(interface, '/dev/ttyUSB0')
+
+ # Test unavailable system serial exits:
+ gateway.settings.session_usb_serial_adapter = False
+
+ interface = gateway.search_serial_interface()
+ self.assertEqual(interface, '/dev/ttyS0')
with self.assertRaises(SystemExit):
- self.gateway.search_serial_interface()
+ gateway.search_serial_interface()
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('multiprocessing.connection.Client', MagicMock())
+ @mock.patch('multiprocessing.connection.Listener', MagicMock())
+ def test_establish_local_testing_gateway(self, *_):
+ gateway = Gateway(operation=NC, local_test=True, dd_sockets=False)
+ self.assertIsInstance(gateway.rs, RSCodec)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('multiprocessing.connection.Client', MagicMock(side_effect=KeyboardInterrupt))
+ def test_keyboard_interrupt_exits(self, *_):
+ with self.assertRaises(SystemExit):
+ Gateway(operation=TX, local_test=True, dd_sockets=False)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('multiprocessing.connection.Client', MagicMock(
+ side_effect=[socket.error, ConnectionRefusedError, MagicMock()]))
+ def test_socket_client(self, *_):
+ gateway = Gateway(operation=TX, local_test=True, dd_sockets=False)
+ self.assertIsInstance(gateway, Gateway)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('multiprocessing.connection.Listener', MagicMock(
+ side_effect=[MagicMock(), KeyboardInterrupt]))
+ def test_socket_server(self, *_):
+ gateway = Gateway(operation=RX, local_test=True, dd_sockets=False)
+ self.assertIsInstance(gateway, Gateway)
+
+ with self.assertRaises(SystemExit):
+ Gateway(operation=RX, local_test=True, dd_sockets=False)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('multiprocessing.connection.Listener', return_value=MagicMock(
+ accept=lambda: MagicMock(recv=MagicMock(side_effect=[KeyboardInterrupt, b'data', EOFError]))))
+ def test_local_testing_read(self, *_):
+ gateway = Gateway(operation=RX, local_test=True, dd_sockets=False)
+ self.assertEqual(gateway.read(), b'data')
+
+ with self.assertRaises(SystemExit):
+ gateway.read()
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('multiprocessing.connection.Client', return_value=MagicMock(
+ send=MagicMock(side_effect=[None, BrokenPipeError])))
+ def test_local_testing_write(self, *_):
+ gateway = Gateway(operation=TX, local_test=True, dd_sockets=False)
+
+ self.assertIsNone(gateway.write(b'data'))
+
+ with self.assertRaises(SystemExit):
+ gateway.write(b'data')
-class TestMultiProcessingClient(unittest.TestCase):
-
- class MockMultiprocessingClient(object):
-
- def __init__(self, args):
- self.hostname = args[0]
- self.socket_no = args[1]
- self.written = []
-
- def send(self, output):
- self.written.append(output)
+class TestGatewaySettings(TFCTestCase):
def setUp(self):
- self.settings = Settings(software_operation=TX,
- local_testing_mode=True)
- multiprocessing.connection.Client = TestMultiProcessingClient.MockMultiprocessingClient
- self.gateway = Gateway(self.settings)
+ self.unittest_dir = cd_unittest()
+ self.default_serialized = """\
+{
+ "serial_baudrate": 19200,
+ "serial_error_correction": 5,
+ "use_serial_usb_adapter": true,
+ "built_in_serial_interface": "ttyS0"
+}"""
- def test_socket(self):
- self.assertEqual(self.gateway.interface.socket_no, NH_LISTEN_SOCKET)
- self.assertEqual(self.gateway.interface.hostname, 'localhost')
- self.assertIsNone(self.gateway.write(b'test'))
- self.assertEqual(self.gateway.interface.written[0], b'test')
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+
+ @mock.patch('os.listdir', side_effect=[['ttyUSB0'], ['ttyS0'], ['ttyUSB0'], ['ttyS0']])
+ @mock.patch('builtins.input', side_effect=['yes', 'yes', 'no', 'no'])
+ def test_gateway_setup(self, *_):
+ settings = GatewaySettings(operation=TX, local_test=False, dd_sockets=True)
+ self.assertIsNone(settings.setup())
+
+ def test_store_and_load_of_settings(self):
+ settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
+ self.assertTrue(os.path.isfile(f'{DIR_USER_DATA}/{TX}_serial_settings.json'))
+
+ self.assertEqual(settings.serial_baudrate, 19200)
+ self.assertEqual(settings.use_serial_usb_adapter, True)
+ settings.serial_baudrate = 115200
+ settings.use_serial_usb_adapter = False
+
+ self.assertIsNone(settings.store_settings())
+ settings2 = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
+
+ self.assertEqual(settings2.serial_baudrate, 115200)
+ self.assertEqual(settings.use_serial_usb_adapter, False)
+
+ def test_manually_edited_settings_are_loaded(self):
+ # Setup
+ ensure_dir(DIR_USER_DATA)
+ with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f:
+ f.write("""\
+{
+ "serial_baudrate": 9600,
+ "serial_error_correction": 1,
+ "use_serial_usb_adapter": false,
+ "built_in_serial_interface": "ttyS0"
+}""")
+ # Test
+ settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
+ self.assertEqual(settings.serial_baudrate, 9600)
+ self.assertEqual(settings.serial_error_correction, 1)
+ self.assertEqual(settings.use_serial_usb_adapter, False)
+ self.assertEqual(settings.built_in_serial_interface, 'ttyS0')
+
+ def test_missing_values_are_set_to_default_and_database_is_overwritten(self):
+ # Setup
+ ensure_dir(DIR_USER_DATA)
+ with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f:
+ f.write("""\
+{
+ "serial_error_correction": 1,
+ "use_serial_usb_adapter": false,
+ "relay_usb_serial_adapter": false
+}""")
+ # Test
+ settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
+ self.assertEqual(settings.serial_baudrate, 19200)
+ self.assertEqual(settings.serial_error_correction, 1)
+ self.assertEqual(settings.use_serial_usb_adapter, False)
+ self.assertEqual(settings.built_in_serial_interface, 'ttyS0')
+
+ def test_invalid_format_is_replaced_with_defaults(self):
+ # Setup
+ ensure_dir(DIR_USER_DATA)
+ with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f:
+ f.write("""\
+{
+ "serial_error_correction": 5,
+ "use_serial_usb_adapter": false,
+
+}""")
+ # Test
+ settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
+ self.assertEqual(settings.serial_baudrate, 19200)
+ self.assertEqual(settings.serial_error_correction, 5)
+ self.assertEqual(settings.use_serial_usb_adapter, True)
+ self.assertEqual(settings.built_in_serial_interface, 'ttyS0')
+
+ with open(settings.file_name) as f:
+ data = f.read()
+
+ self.assertEqual(data, self.default_serialized)
+
+ def test_invalid_serial_baudrate_is_replaced_with_default(self):
+ # Setup
+ ensure_dir(DIR_USER_DATA)
+ with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f:
+ f.write("""\
+{
+ "serial_baudrate": 19201,
+ "serial_error_correction": 5,
+ "use_serial_usb_adapter": true,
+ "built_in_serial_interface": "ttyS0"
+}""")
+ # Test
+ settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
+ self.assertEqual(settings.serial_baudrate, 19200)
+ self.assertEqual(settings.serial_error_correction, 5)
+ self.assertEqual(settings.use_serial_usb_adapter, True)
+ self.assertEqual(settings.built_in_serial_interface, 'ttyS0')
+
+ with open(settings.file_name) as f:
+ data = f.read()
+
+ self.assertEqual(data, self.default_serialized)
+
+ def test_invalid_serial_error_correction_is_replaced_with_default(self):
+ # Setup
+ ensure_dir(DIR_USER_DATA)
+ with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f:
+ f.write("""\
+{
+ "serial_baudrate": 19200,
+ "serial_error_correction": -1,
+ "use_serial_usb_adapter": true,
+ "built_in_serial_interface": "ttyS0"
+}""")
+ # Test
+ settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
+ self.assertEqual(settings.serial_baudrate, 19200)
+ self.assertEqual(settings.serial_error_correction, 5)
+ self.assertEqual(settings.use_serial_usb_adapter, True)
+ self.assertEqual(settings.built_in_serial_interface, 'ttyS0')
+
+ with open(settings.file_name) as f:
+ data = f.read()
+
+ self.assertEqual(data, self.default_serialized)
+
+ def test_invalid_serial_interface_is_replaced_with_default(self):
+ # Setup
+ ensure_dir(DIR_USER_DATA)
+ with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f:
+ f.write("""\
+{
+ "serial_baudrate": 19200,
+ "serial_error_correction": 5,
+ "use_serial_usb_adapter": true,
+ "built_in_serial_interface": "does_not_exist"
+}""")
+ # Test
+ settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
+ self.assertEqual(settings.serial_baudrate, 19200)
+ self.assertEqual(settings.serial_error_correction, 5)
+ self.assertEqual(settings.use_serial_usb_adapter, True)
+ self.assertEqual(settings.built_in_serial_interface, 'ttyS0')
+
+ with open(settings.file_name) as f:
+ data = f.read()
+
+ self.assertEqual(data, self.default_serialized)
+
+ def test_invalid_type_is_replaced_with_default(self):
+ # Setup
+ ensure_dir(DIR_USER_DATA)
+ with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f:
+ f.write("""\
+{
+ "serial_baudrate": "115200",
+ "serial_error_correction": "5",
+ "use_serial_usb_adapter": "true",
+ "built_in_serial_interface": true
+}""")
+ # Test
+ settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
+ self.assertEqual(settings.serial_baudrate, 19200)
+ self.assertEqual(settings.serial_error_correction, 5)
+ self.assertEqual(settings.use_serial_usb_adapter, True)
+ self.assertEqual(settings.built_in_serial_interface, 'ttyS0')
+
+ with open(settings.file_name) as f:
+ data = f.read()
+
+ self.assertEqual(data, self.default_serialized)
+
+ def test_unknown_kv_pair_is_removed(self):
+ # Setup
+ ensure_dir(DIR_USER_DATA)
+ with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f:
+ f.write("""\
+{
+ "serial_baudrate": 19200,
+ "serial_error_correction": 5,
+ "use_serial_usb_adapter": true,
+ "built_in_serial_interface": "ttyS0",
+ "this_should_not_be_here": 1
+}""")
+ # Test
+ settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
+ self.assertEqual(settings.serial_baudrate, 19200)
+ self.assertEqual(settings.serial_error_correction, 5)
+ self.assertEqual(settings.use_serial_usb_adapter, True)
+ self.assertEqual(settings.built_in_serial_interface, 'ttyS0')
+
+ with open(settings.file_name) as f:
+ data = f.read()
+
+ self.assertEqual(data, self.default_serialized)
+
+ @mock.patch('os.listdir', side_effect=[['ttyS0'], ['ttyUSB0'], ['ttyUSB0'], ['ttyS0']])
+ @mock.patch('builtins.input', side_effect=['Yes', 'Yes', 'No', 'No'])
+ def test_setup(self, *_):
+ # Setup
+ ensure_dir(DIR_USER_DATA)
+ with open(f"{DIR_USER_DATA}{TX}_serial_settings.json", 'w+') as f:
+ f.write(self.default_serialized)
+
+ settings = GatewaySettings(operation=TX, local_test=False, dd_sockets=True)
+
+ # Test
+ self.assertIsNone(settings.setup())
+ self.assertIsNone(settings.setup())
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_change_setting(self, _):
+ settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
+ self.assert_fr("Error: Invalid value 'Falsee'.",
+ settings.change_setting, 'serial_baudrate', 'Falsee')
+ self.assert_fr("Error: Invalid value '1.1'.",
+ settings.change_setting, 'serial_baudrate', '1.1', )
+ self.assert_fr("Error: Invalid value '18446744073709551616'.",
+ settings.change_setting, 'serial_baudrate', str(2 ** 64))
+ self.assert_fr("Error: Invalid value 'Falsee'.",
+ settings.change_setting, 'use_serial_usb_adapter', 'Falsee')
+
+ self.assertIsNone(settings.change_setting('serial_baudrate', '9600'))
+ self.assertEqual(GatewaySettings(operation=TX, local_test=True, dd_sockets=True).serial_baudrate, 9600)
+
+ settings.serial_baudrate = b'bytestring'
+ with self.assertRaises(SystemExit):
+ settings.change_setting('serial_baudrate', '9600')
+
+ def test_validate_key_value_pair(self):
+ settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
+ self.assert_fr("Error: The specified baud rate is not supported.",
+ settings.validate_key_value_pair, 'serial_baudrate', 0)
+ self.assert_fr("Error: The specified baud rate is not supported.",
+ settings.validate_key_value_pair, 'serial_baudrate', 10)
+ self.assert_fr("Error: The specified baud rate is not supported.",
+ settings.validate_key_value_pair, 'serial_baudrate', 9601)
+ self.assert_fr("Error: Invalid value for error correction ratio.",
+ settings.validate_key_value_pair, 'serial_error_correction', -1)
+
+ self.assertIsNone(settings.validate_key_value_pair("serial_baudrate", 9600))
+ self.assertIsNone(settings.validate_key_value_pair("serial_error_correction", 20))
+ self.assertIsNone(settings.validate_key_value_pair("use_serial_usb_adapter", True))
+
+ @mock.patch('shutil.get_terminal_size', return_value=(64, 64))
+ def test_too_narrow_terminal_raises_fr_when_printing_settings(self, _):
+ settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
+ self.assert_fr("Error: Screen width is too small.", settings.print_settings)
+
+ def test_print_settings(self):
+ settings = GatewaySettings(operation=TX, local_test=True, dd_sockets=True)
+ self.assert_prints("""\
+
+Serial interface setting Current value Default value Description
+────────────────────────────────────────────────────────────────────────────────
+serial_baudrate 19200 19200 The speed of
+ serial interface
+ in bauds per
+ second
+
+serial_error_correction 5 5 Number of byte
+ errors serial
+ datagrams can
+ recover from
-class TestMultiProcessingServer(unittest.TestCase):
-
- class MockMultiprocessingListener(object):
-
- def __init__(self, args):
- self.hostname = args[0]
- self.socket_no = args[1]
- self.written = []
-
- def accept(self):
-
- class Interface(object):
-
- def __init__(self, hostname, socket_no):
- self.hostname = hostname
- self.socket_no = socket_no
-
- @staticmethod
- def recv():
- return b'mock_message'
-
- return Interface(self.hostname, self.socket_no)
-
- def setUp(self):
- self.settings = Settings(software_operation=RX,
- local_testing_mode=True)
- multiprocessing.connection.Listener = TestMultiProcessingServer.MockMultiprocessingListener
- self.gateway = Gateway(self.settings)
-
- def test_listener(self):
- self.assertEqual(self.gateway.interface.socket_no, RXM_LISTEN_SOCKET)
- self.assertEqual(self.gateway.interface.hostname, 'localhost')
- self.assertEqual(self.gateway.read(), b'mock_message')
+""", settings.print_settings)
if __name__ == '__main__':
diff --git a/tests/common/test_input.py b/tests/common/test_input.py
index 79118bc..6c1e4f8 100644
--- a/tests/common/test_input.py
+++ b/tests/common/test_input.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,143 +16,110 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
-import builtins
-import getpass
import unittest
-from src.common.input import ask_confirmation_code, box_input, get_b58_key, nh_bypass_msg, pwd_prompt, yes
+from unittest import mock
+
+from src.common.input import ask_confirmation_code, box_input, get_b58_key, nc_bypass_msg, pwd_prompt, yes
from src.common.statics import *
from tests.mock_classes import Settings
+from tests.utils import nick_to_short_address, VALID_ECDHE_PUB_KEY, VALID_LOCAL_KEY_KDK
class TestAskConfirmationCode(unittest.TestCase):
- def setUp(self):
- self.o_input = builtins.input
- builtins.input = lambda _: 'ff'
+ confirmation_code = 'ff'
- def tearDown(self):
- builtins.input = self.o_input
-
- def test_ask_confirmation_code(self):
- self.assertEqual(ask_confirmation_code(), 'ff')
+ @mock.patch('builtins.input', return_value=confirmation_code)
+ def test_ask_confirmation_code(self, _):
+ self.assertEqual(ask_confirmation_code('Receiver'), self.confirmation_code)
class TestBoxInput(unittest.TestCase):
- def setUp(self):
- self.o_input = builtins.input
- input_list = ['mock_input', 'mock_input', '', 'bad', 'ok']
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
- self.mock_validator = lambda string, *_: '' if string == 'ok' else 'Error'
-
- def tearDown(self):
- builtins.input = self.o_input
-
- def test_box_input(self):
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=['mock_input', 'mock_input', '', 'invalid', 'ok'])
+ def test_box_input(self, *_):
self.assertEqual(box_input('test title'), 'mock_input')
self.assertEqual(box_input('test title', head=1, expected_len=20), 'mock_input')
self.assertEqual(box_input('test title', head=1, default='mock_input', expected_len=20), 'mock_input')
- self.assertEqual(box_input('test title', validator=self.mock_validator), 'ok')
+ self.assertEqual(box_input('test title', validator=lambda string, *_: '' if string == 'ok' else 'Error'), 'ok')
class TestGetB58Key(unittest.TestCase):
def setUp(self):
- self.o_input = builtins.input
self.settings = Settings()
- def tearDown(self):
- builtins.input = self.o_input
-
- def test_get_b58_key(self):
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('shutil.get_terminal_size', return_value=[200, 200])
+ @mock.patch('builtins.input', side_effect=(2*['invalid', VALID_LOCAL_KEY_KDK[:-1], VALID_LOCAL_KEY_KDK] +
+ 2*['invalid', VALID_ECDHE_PUB_KEY[:-1], VALID_ECDHE_PUB_KEY]))
+ def test_get_b58_key(self, *_):
for boolean in [True, False]:
self.settings.local_testing_mode = boolean
- for key_type in [B58_PUB_KEY, B58_LOCAL_KEY]:
- input_list = ["bad",
- "5HueCGU8rMjxEXxiPuD5BDku4MkFqeZyd4dZ1jvhTVqvbTLvyTa",
- "5HueCGU8rMjxEXxiPuD5BDku4MkFqeZyd4dZ1jvhTVqvbTLvyTJ"]
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
- key = get_b58_key(key_type, self.settings)
+ key = get_b58_key(B58_LOCAL_KEY, self.settings)
- self.assertIsInstance(key, bytes)
- self.assertEqual(len(key), KEY_LENGTH)
+ self.assertIsInstance(key, bytes)
+ self.assertEqual(len(key), SYMMETRIC_KEY_LENGTH)
with self.assertRaises(SystemExit):
- get_b58_key('invalid_keytype', self.settings)
+ get_b58_key('invalid_key_type', self.settings)
for boolean in [True, False]:
self.settings.local_testing_mode = boolean
- for key_type in [B58_FILE_KEY]:
- input_list = ["bad",
- "91avARGdfge8E4tZfYLoxeJ5sGBdNJQH4kvjJoQFacbgwi1C2Ga",
- "91avARGdfge8E4tZfYLoxeJ5sGBdNJQH4kvjJoQFacbgwi1C2GD"]
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
- key = get_b58_key(key_type, self.settings)
+ key = get_b58_key(B58_PUBLIC_KEY, self.settings, nick_to_short_address('Alice'))
- self.assertIsInstance(key, bytes)
- self.assertEqual(len(key), KEY_LENGTH)
+ self.assertIsInstance(key, bytes)
+ self.assertEqual(len(key), TFC_PUBLIC_KEY_LENGTH)
with self.assertRaises(SystemExit):
- get_b58_key('invalid_keytype', self.settings)
+ get_b58_key('invalid_key_type', self.settings)
+
+ @mock.patch('builtins.input', return_value='')
+ @mock.patch('shutil.get_terminal_size', return_value=[200, 200])
+ def test_empty_pub_key_returns_empty_bytes(self, *_):
+ key = get_b58_key(B58_PUBLIC_KEY, self.settings)
+ self.assertEqual(key, b'')
-class TestNHBypassMsg(unittest.TestCase):
+class TestNCBypassMsg(unittest.TestCase):
- def setUp(self):
- self.o_input = builtins.input
- self.settings = Settings()
- builtins.input = lambda _: ''
-
- def tearDown(self):
- builtins.input = self.o_input
-
- def test_nh_bypass_msg(self):
- self.assertIsNone(nh_bypass_msg(NH_BYPASS_START, self.settings))
- self.assertIsNone(nh_bypass_msg(NH_BYPASS_STOP, self.settings))
+ @mock.patch('builtins.input', return_value='')
+ def test_nc_bypass_msg(self, _):
+ settings = Settings(nc_bypass_messages=True)
+ self.assertIsNone(nc_bypass_msg(NC_BYPASS_START, settings))
+ self.assertIsNone(nc_bypass_msg(NC_BYPASS_STOP, settings))
class TestPwdPrompt(unittest.TestCase):
- def setUp(self):
- self.o_input = builtins.input
- self.o_getpass = getpass.getpass
- getpass.getpass = lambda x: 'testpwd'
-
- def tearDown(self):
- builtins.input = self.o_input
- getpass.getpass = self.o_getpass
-
- def test_pwd_prompt(self):
- self.assertEqual(pwd_prompt("test prompt"), 'testpwd')
+ @mock.patch('getpass.getpass', return_value='test_password')
+ def test_pwd_prompt(self, _):
+ self.assertEqual(pwd_prompt("test prompt"), 'test_password')
class TestYes(unittest.TestCase):
- def setUp(self):
- self.o_input = builtins.input
- self.o_getpass = getpass.getpass
- input_list = ['BAD', '', 'bad', 'Y', 'YES', 'N', 'NO']
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
-
- def tearDown(self):
- builtins.input = self.o_input
- getpass.getpass = self.o_getpass
-
- def test_yes(self):
+ @mock.patch('builtins.input', side_effect=['Invalid', '', 'invalid', 'Y', 'YES', 'N', 'NO',
+ KeyboardInterrupt, KeyboardInterrupt, EOFError, EOFError])
+ def test_yes(self, _):
self.assertTrue(yes('test prompt', head=1, tail=1))
self.assertTrue(yes('test prompt'))
+
self.assertFalse(yes('test prompt', head=1, tail=1))
self.assertFalse(yes('test prompt'))
+ self.assertTrue(yes('test prompt', head=1, tail=1, abort=True))
+ self.assertFalse(yes('test prompt', abort=False))
+
+ self.assertTrue(yes('test prompt', head=1, tail=1, abort=True))
+ self.assertFalse(yes('test prompt', abort=False))
+
if __name__ == '__main__':
unittest.main(exit=False)
diff --git a/tests/common/test_misc.py b/tests/common/test_misc.py
index c049940..436d272 100644
--- a/tests/common/test_misc.py
+++ b/tests/common/test_misc.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,21 +16,63 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
import argparse
import os
+import threading
+import time
import types
import unittest
+import zlib
-from src.common.misc import ensure_dir, get_tab_complete_list, get_tab_completer, get_terminal_height
-from src.common.misc import get_terminal_width, ignored, process_arguments, readable_size, round_up, split_string
-from src.common.misc import split_byte_string, validate_account, validate_key_exchange, validate_nick
+from multiprocessing import Process
+from unittest import mock
+
+from src.common.misc import calculate_race_condition_delay, decompress, ensure_dir, get_tab_complete_list
+from src.common.misc import get_tab_completer, get_terminal_height, get_terminal_width, ignored, monitor_processes
+from src.common.misc import process_arguments, readable_size, round_up, separate_header, separate_headers
+from src.common.misc import separate_trailer, split_string, split_byte_string, terminal_width_check
+from src.common.misc import validate_group_name, validate_key_exchange, validate_onion_addr, validate_nick
from src.common.statics import *
-from tests.mock_classes import ContactList, GroupList, Settings
-from tests.utils import ignored
+from tests.mock_classes import ContactList, Gateway, GroupList, Settings
+from tests.utils import cd_unittest, cleanup, gen_queue_dict, ignored, nick_to_onion_address
+from tests.utils import nick_to_pub_key, tear_queues, TFCTestCase
+
+
+class TestCalculateRaceConditionDelay(unittest.TestCase):
+
+ def setUp(self):
+ self.settings = Settings()
+
+ def test_race_condition_delay_calculation(self):
+ self.assertIsInstance(calculate_race_condition_delay(5, 9600), float)
+
+
+class TestDecompress(TFCTestCase):
+
+ def setUp(self):
+ self.settings = Settings()
+ self.settings.max_decompress_size = 1000
+
+ def test_successful_decompression(self):
+ # Setup
+ data = os.urandom(self.settings.max_decompress_size)
+ compressed = zlib.compress(data)
+
+ # Test
+ self.assertEqual(decompress(compressed, self.settings.max_decompress_size), data)
+
+ def test_oversize_decompression_raises_fr(self):
+ # Setup
+ data = os.urandom(self.settings.max_decompress_size + 1)
+ compressed = zlib.compress(data)
+
+ # Test
+ self.assert_fr("Error: Decompression aborted due to possible zip bomb.",
+ decompress, compressed, self.settings.max_decompress_size)
class TestEnsureDir(unittest.TestCase):
@@ -39,6 +82,7 @@ class TestEnsureDir(unittest.TestCase):
os.rmdir('test_dir/')
def test_ensure_dir(self):
+ self.assertIsNone(ensure_dir('test_dir/'))
self.assertIsNone(ensure_dir('test_dir/'))
self.assertTrue(os.path.isdir('test_dir/'))
@@ -47,23 +91,25 @@ class TestTabCompleteList(unittest.TestCase):
def setUp(self):
self.contact_list = ContactList(nicks=['Alice', 'Bob'])
- self.group_list = GroupList(groups=['testgroup'])
+ self.group_list = GroupList(groups=['test_group'])
self.settings = Settings(key_list=['key1', 'key2'])
+ self.gateway = Gateway()
def test_get_tab_complete_list(self):
- tab_complete_list = ['about', 'add ', 'all', 'clear', 'cmd', 'create ', 'exit', 'export ', 'false', 'file',
- 'fingerprints', 'group ', 'help', 'history ', 'join ', 'localkey', 'logging ', 'msg ', 'names',
- 'nick ', 'notify ', 'passwd ', 'psk', 'reset', 'rm', 'rmlogs ', 'set ', 'settings',
- 'store ', 'true', 'unread', 'key1 ', 'key2 ', 'alice@jabber.org ', 'user@jabber.org ',
- 'Alice ', 'bob@jabber.org ', 'Bob ', 'testgroup ', 'whisper ']
+ tab_complete_list = [a + ' ' for a in self.contact_list.get_list_of_addresses()]
+ tab_complete_list += [i + ' ' for i in self.group_list.get_list_of_hr_group_ids()]
+ tab_complete_list += [s + ' ' for s in self.settings.key_list]
+ tab_complete_list += [s + ' ' for s in self.gateway.settings.key_list]
- self.assertEqual(set(get_tab_complete_list(self.contact_list, self.group_list, self.settings)), set(tab_complete_list))
- self.assertIsInstance(get_tab_completer(self.contact_list, self.group_list, self.settings), types.FunctionType)
+ tc_list = get_tab_complete_list(self.contact_list, self.group_list, self.settings, self.gateway)
+ self.assertTrue(set(tab_complete_list) < set(tc_list))
+ self.assertIsInstance(get_tab_completer(self.contact_list, self.group_list, self.settings, self.gateway),
+ types.FunctionType)
- completer = get_tab_completer(self.contact_list, self.group_list, self.settings)
+ completer = get_tab_completer(self.contact_list, self.group_list, self.settings, self.gateway)
options = completer('a', state=0)
- self.assertEqual(options, 'about')
+ self.assertEqual(options, 'all')
self.assertIsNone(completer('a', state=5))
@@ -83,6 +129,7 @@ class TestIgnored(unittest.TestCase):
@staticmethod
def func():
+ """Mock function that raises exception."""
raise KeyboardInterrupt
def test_ignored_contextmanager(self):
@@ -95,23 +142,138 @@ class TestIgnored(unittest.TestCase):
self.assertFalse(raised)
+class TestMonitorProcesses(TFCTestCase):
+
+ def setUp(self):
+ self.settings = Settings()
+ self.unittest_dir = cd_unittest()
+
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+
+ @staticmethod
+ def mock_process():
+ """Mock process that does not return."""
+ while True:
+ time.sleep(0.01)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_exit(self, *_):
+ queues = gen_queue_dict()
+ process_list = [Process(target=self.mock_process)]
+
+ for p in process_list:
+ p.start()
+
+ def queue_delayer():
+ """Place EXIT packet into queue after delay."""
+ time.sleep(0.01)
+ queues[EXIT_QUEUE].put(EXIT)
+ threading.Thread(target=queue_delayer).start()
+
+ with self.assertRaises(SystemExit):
+ monitor_processes(process_list, RX, queues)
+
+ tear_queues(queues)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_dying_process(self, *_):
+
+ def mock_process():
+ """Function that returns after a moment."""
+ time.sleep(0.01)
+
+ queues = gen_queue_dict()
+ process_list = [Process(target=mock_process)]
+
+ for p in process_list:
+ p.start()
+
+ with self.assertRaises(SystemExit):
+ monitor_processes(process_list, RX, queues)
+
+ tear_queues(queues)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('os.system', return_value=None)
+ def test_wipe(self, mock_os_system, *_):
+ queues = gen_queue_dict()
+ process_list = [Process(target=self.mock_process)]
+
+ os.mkdir(DIR_USER_DATA)
+ os.mkdir(DIR_RECV_FILES)
+ self.assertTrue(os.path.isdir(DIR_USER_DATA))
+ self.assertTrue(os.path.isdir(DIR_RECV_FILES))
+
+ for p in process_list:
+ p.start()
+
+ def queue_delayer():
+ """Place WIPE packet to queue after delay."""
+ time.sleep(0.01)
+ queues[EXIT_QUEUE].put(WIPE)
+ threading.Thread(target=queue_delayer).start()
+
+ with self.assertRaises(SystemExit):
+ monitor_processes(process_list, RX, queues)
+ self.assertFalse(os.path.isdir(DIR_USER_DATA))
+ self.assertFalse(os.path.isdir(DIR_RECV_FILES))
+ mock_os_system.assert_called_with('poweroff')
+
+ tear_queues(queues)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('os.system', return_value=None)
+ @mock.patch('subprocess.check_output', lambda *popenargs, timeout=None, **kwargs: TAILS)
+ def test_wipe_tails(self, mock_os_system, *_):
+ queues = gen_queue_dict()
+ process_list = [Process(target=self.mock_process)]
+
+ os.mkdir(DIR_USER_DATA)
+ self.assertTrue(os.path.isdir(DIR_USER_DATA))
+
+ for p in process_list:
+ p.start()
+
+ def queue_delayer():
+ """Place WIPE packet to queue after delay."""
+ time.sleep(0.01)
+ queues[EXIT_QUEUE].put(WIPE)
+ threading.Thread(target=queue_delayer).start()
+
+ with self.assertRaises(SystemExit):
+ monitor_processes(process_list, RX, queues)
+
+ mock_os_system.assert_called_with('poweroff')
+
+ # Test that user data wasn't removed
+ self.assertTrue(os.path.isdir(DIR_USER_DATA))
+ tear_queues(queues)
+
+
class TestProcessArguments(unittest.TestCase):
def setUp(self):
class MockParser(object):
+ """MockParse object."""
def __init__(self, *_, **__):
pass
def parse_args(self):
+ """Return Args mock object."""
+
class Args(object):
+ """Mock object for command line arguments."""
def __init__(self):
- self.operation = True
- self.local_test = True
- self.dd_sockets = True
+ """Create new Args mock object."""
+ self.operation = True
+ self.local_test = True
+ self.data_diode_sockets = True
args = Args()
return args
def add_argument(self, *_, **__):
+ """Mock function for adding argument."""
pass
self.o_argparse = argparse.ArgumentParser
@@ -150,70 +312,189 @@ class TestRoundUp(unittest.TestCase):
class TestSplitString(unittest.TestCase):
def test_split_string(self):
- self.assertEqual(split_string('teststring', 1), ['t', 'e', 's', 't', 's', 't', 'r', 'i', 'n', 'g'])
- self.assertEqual(split_string('teststring', 2), ['te', 'st', 'st', 'ri', 'ng'])
- self.assertEqual(split_string('teststring', 3), ['tes', 'tst', 'rin', 'g'])
- self.assertEqual(split_string('teststring', 5), ['tests', 'tring'])
- self.assertEqual(split_string('teststring', 10), ['teststring'])
- self.assertEqual(split_string('teststring', 15), ['teststring'])
+ self.assertEqual(split_string('cypherpunk', 1), ['c',
+ 'y',
+ 'p',
+ 'h',
+ 'e',
+ 'r',
+ 'p',
+ 'u',
+ 'n',
+ 'k'])
+
+ self.assertEqual(split_string('cypherpunk', 2), ['cy',
+ 'ph',
+ 'er',
+ 'pu',
+ 'nk'])
+
+ self.assertEqual(split_string('cypherpunk', 3), ['cyp',
+ 'her',
+ 'pun',
+ 'k'])
+
+ self.assertEqual(split_string('cypherpunk', 5), ['cyphe',
+ 'rpunk'])
+
+ self.assertEqual(split_string('cypherpunk', 10), ['cypherpunk'])
+ self.assertEqual(split_string('cypherpunk', 15), ['cypherpunk'])
class TestSplitByteString(unittest.TestCase):
def test_split_byte_string(self):
- self.assertEqual(split_byte_string(b'teststring', 1), [b't', b'e', b's', b't', b's', b't', b'r', b'i', b'n', b'g'])
- self.assertEqual(split_byte_string(b'teststring', 2), [b'te', b'st', b'st', b'ri', b'ng'])
- self.assertEqual(split_byte_string(b'teststring', 3), [b'tes', b'tst', b'rin', b'g'])
- self.assertEqual(split_byte_string(b'teststring', 5), [b'tests', b'tring'])
- self.assertEqual(split_byte_string(b'teststring', 10), [b'teststring'])
- self.assertEqual(split_byte_string(b'teststring', 15), [b'teststring'])
+ self.assertEqual(split_byte_string(b'cypherpunk', 1), [b'c',
+ b'y',
+ b'p',
+ b'h',
+ b'e',
+ b'r',
+ b'p',
+ b'u',
+ b'n',
+ b'k'])
+
+ self.assertEqual(split_byte_string(b'cypherpunk', 2), [b'cy',
+ b'ph',
+ b'er',
+ b'pu',
+ b'nk'])
+
+ self.assertEqual(split_byte_string(b'cypherpunk', 3), [b'cyp',
+ b'her',
+ b'pun',
+ b'k'])
+
+ self.assertEqual(split_byte_string(b'cypherpunk', 5), [b'cyphe',
+ b'rpunk'])
+
+ self.assertEqual(split_byte_string(b'cypherpunk', 10), [b'cypherpunk'])
+ self.assertEqual(split_byte_string(b'cypherpunk', 15), [b'cypherpunk'])
-class TestValidateAccount(unittest.TestCase):
+class TestSeparateHeader(unittest.TestCase):
+
+ def test_separate_header(self):
+ self.assertEqual(separate_header(b"cypherpunk", header_length=len(b"cypher")),
+ (b"cypher", b"punk"))
+
+
+class TestSeparateHeaders(unittest.TestCase):
+
+ def test_separate_headers(self):
+ self.assertEqual(separate_headers(b"cypherpunk", header_length_list=[1, 2, 3]),
+ [b"c", b"yp", b"her", b"punk"])
+
+ def test_too_small_string(self):
+ self.assertEqual(separate_headers(b"cypherpunk", header_length_list=[1, 2, 10]),
+ [b"c", b"yp", b"herpunk", b""])
+
+
+class TestSeparateTrailer(unittest.TestCase):
+
+ def test_separate_header(self):
+ self.assertEqual(separate_trailer(b"cypherpunk", trailer_length=len(b"punk")),
+ (b"cypher", b"punk"))
+
+
+class TestTerminalWidthCheck(unittest.TestCase):
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('shutil.get_terminal_size', side_effect=[[50, 50], [50, 50], [100, 100]])
+ def test_width_check(self, *_):
+ self.assertIsNone(terminal_width_check(80))
+
+
+class TestValidateOnionAddr(unittest.TestCase):
def test_validate_account(self):
- self.assertEqual(validate_account(248 * 'a' + '@a.com'), '')
- self.assertEqual(validate_account(249 * 'a' + '@a.com'), "Account must be shorter than 255 chars.")
- self.assertEqual(validate_account(250 * 'a' + '@a.com'), "Account must be shorter than 255 chars.")
- self.assertEqual(validate_account('bob@jabberorg'), "Invalid account format.")
- self.assertEqual(validate_account('bobjabber.org'), "Invalid account format.")
- self.assertEqual(validate_account('\x1fbobjabber.org'), "Account must be printable.")
+ user_account = nick_to_onion_address("Bob")
+ self.assertEqual(validate_onion_addr(nick_to_onion_address("Alice"), user_account),
+ '')
+ self.assertEqual(validate_onion_addr(nick_to_onion_address("Bob"), user_account),
+ 'Error: Can not add own account.')
+ self.assertEqual(validate_onion_addr(nick_to_onion_address("Alice")[:-1] + 'a', user_account),
+ 'Checksum error - Check that the entered account is correct.')
+ self.assertEqual(validate_onion_addr(nick_to_onion_address("Alice")[:-1] + '%', user_account),
+ 'Error: Invalid account format.')
+ self.assertEqual(validate_onion_addr(nick_to_onion_address("Alice") + 'a', user_account),
+ 'Error: Invalid account format.')
+ self.assertEqual(validate_onion_addr(nick_to_onion_address("Alice")[:-1] + '€', user_account),
+ 'Error: Invalid account format.')
+ self.assertEqual(validate_onion_addr(LOCAL_ID, user_account),
+ 'Error: Can not add reserved account.')
+
+
+class TestValidateGroupName(unittest.TestCase):
+
+ def setUp(self):
+ self.contact_list = ContactList(nicks=['Alice'])
+ self.group_list = GroupList(groups=['test_group'])
+
+ def test_validate_group_name(self):
+ self.assertEqual(validate_group_name('test_group\x1f', self.contact_list, self.group_list),
+ "Error: Group name must be printable.")
+ self.assertEqual(validate_group_name(PADDING_LENGTH * 'a', self.contact_list, self.group_list),
+ "Error: Group name must be less than 255 chars long.")
+ self.assertEqual(validate_group_name(DUMMY_GROUP, self.contact_list, self.group_list),
+ "Error: Group name cannot use the name reserved for database padding.")
+ self.assertEqual(validate_group_name(nick_to_onion_address("Alice"), self.contact_list, self.group_list),
+ "Error: Group name cannot have the format of an account.")
+ self.assertEqual(validate_group_name('Alice', self.contact_list, self.group_list),
+ "Error: Group name cannot be a nick of contact.")
+ self.assertEqual(validate_group_name('test_group', self.contact_list, self.group_list),
+ "Error: Group with name 'test_group' already exists.")
+ self.assertEqual(validate_group_name('test_group2', self.contact_list, self.group_list),
+ '')
class TestValidateKeyExchange(unittest.TestCase):
def test_validate_key_exchange(self):
- self.assertEqual(validate_key_exchange(''), 'Invalid key exchange selection.')
- self.assertEqual(validate_key_exchange('x2'), 'Invalid key exchange selection.')
- self.assertEqual(validate_key_exchange('x'), '')
- self.assertEqual(validate_key_exchange('X'), '')
- self.assertEqual(validate_key_exchange('x25519'), '')
- self.assertEqual(validate_key_exchange('X25519'), '')
- self.assertEqual(validate_key_exchange('p'), '')
- self.assertEqual(validate_key_exchange('P'), '')
- self.assertEqual(validate_key_exchange('psk'), '')
- self.assertEqual(validate_key_exchange('PSK'), '')
+ self.assertEqual(validate_key_exchange(''), 'Invalid key exchange selection.')
+ self.assertEqual(validate_key_exchange('x2'), 'Invalid key exchange selection.')
+ self.assertEqual(validate_key_exchange('x'), '')
+ self.assertEqual(validate_key_exchange('X'), '')
+ self.assertEqual(validate_key_exchange(ECDHE), '')
+ self.assertEqual(validate_key_exchange(ECDHE.lower()), '')
+ self.assertEqual(validate_key_exchange('p'), '')
+ self.assertEqual(validate_key_exchange('P'), '')
+ self.assertEqual(validate_key_exchange('psk'), '')
+ self.assertEqual(validate_key_exchange('PSK'), '')
class TestValidateNick(unittest.TestCase):
def setUp(self):
self.contact_list = ContactList(nicks=['Alice', 'Bob'])
- self.group_list = GroupList(groups=['testgroup'])
+ self.group_list = GroupList(groups=['test_group'])
def test_validate_nick(self):
- self.assertEqual(validate_nick("Alice_", (self.contact_list, self.group_list, 'alice@jabber.org')), '')
- self.assertEqual(validate_nick(254*"a", (self.contact_list, self.group_list, 'alice@jabber.org')), '')
- self.assertEqual(validate_nick(255*"a", (self.contact_list, self.group_list, 'alice@jabber.org')), 'Nick must be shorter than 255 chars.')
- self.assertEqual(validate_nick("\x01Alice", (self.contact_list, self.group_list, 'alice@jabber.org')), 'Nick must be printable.')
- self.assertEqual(validate_nick('', (self.contact_list, self.group_list, 'alice@jabber.org')), "Nick can't be empty.")
- self.assertEqual(validate_nick('Me', (self.contact_list, self.group_list, 'alice@jabber.org')), "'Me' is a reserved nick.")
- self.assertEqual(validate_nick('-!-', (self.contact_list, self.group_list, 'alice@jabber.org')), "'-!-' is a reserved nick.")
- self.assertEqual(validate_nick('local', (self.contact_list, self.group_list, 'alice@jabber.org')), "Nick can't refer to local keyfile.")
- self.assertEqual(validate_nick('a@b.org', (self.contact_list, self.group_list, 'alice@jabber.org')), "Nick can't have format of an account.")
- self.assertEqual(validate_nick('Bob', (self.contact_list, self.group_list, 'alice@jabber.org')), 'Nick already in use.')
- self.assertEqual(validate_nick("Alice", (self.contact_list, self.group_list, 'alice@jabber.org')), '')
- self.assertEqual(validate_nick("testgroup", (self.contact_list, self.group_list, 'alice@jabber.org')), "Nick can't be a group name.")
+ self.assertEqual(validate_nick("Alice_", (self.contact_list, self.group_list, nick_to_pub_key(
+ "Alice"))), '')
+ self.assertEqual(validate_nick(254 * "a", (self.contact_list, self.group_list, nick_to_pub_key(
+ "Alice"))), '')
+ self.assertEqual(validate_nick(255 * "a", (self.contact_list, self.group_list, nick_to_pub_key(
+ "Alice"))), 'Error: Nick must be shorter than 255 chars.')
+ self.assertEqual(validate_nick("\x01Alice", (self.contact_list, self.group_list, nick_to_pub_key(
+ "Alice"))), 'Error: Nick must be printable.')
+ self.assertEqual(validate_nick('', (self.contact_list, self.group_list, nick_to_pub_key(
+ "Alice"))), "Error: Nick cannot be empty.")
+ self.assertEqual(validate_nick('Me', (self.contact_list, self.group_list, nick_to_pub_key(
+ "Alice"))), "Error: 'Me' is a reserved nick.")
+ self.assertEqual(validate_nick('-!-', (self.contact_list, self.group_list, nick_to_pub_key(
+ "Alice"))), "Error: '-!-' is a reserved nick.")
+ self.assertEqual(validate_nick(LOCAL_ID, (self.contact_list, self.group_list, nick_to_pub_key(
+ "Alice"))), "Error: Nick cannot have the format of an account.")
+ self.assertEqual(validate_nick(nick_to_onion_address('A'), (self.contact_list, self.group_list, nick_to_pub_key(
+ "Alice"))), "Error: Nick cannot have the format of an account.")
+ self.assertEqual(validate_nick('Bob', (self.contact_list, self.group_list, nick_to_pub_key(
+ "Alice"))), 'Error: Nick already in use.')
+ self.assertEqual(validate_nick("Alice", (self.contact_list, self.group_list, nick_to_pub_key(
+ "Alice"))), '')
+ self.assertEqual(validate_nick("test_group", (self.contact_list, self.group_list, nick_to_pub_key(
+ "Alice"))), "Error: Nick cannot be a group name.")
if __name__ == '__main__':
diff --git a/tests/common/test_output.py b/tests/common/test_output.py
index 0340429..0fd0a50 100644
--- a/tests/common/test_output.py
+++ b/tests/common/test_output.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,158 +16,205 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
-import builtins
import unittest
-from src.common.output import box_print, c_print, clear_screen, group_management_print, message_printer
-from src.common.output import phase, print_fingerprint, print_key, print_on_previous_line
+from datetime import datetime
+from unittest import mock
+
+from src.common.output import clear_screen, group_management_print, m_print, phase, print_fingerprint, print_key
+from src.common.output import print_title, print_on_previous_line, print_spacing, rp_print
from src.common.statics import *
-from tests.mock_classes import ContactList, Settings
+from tests.mock_classes import ContactList, nick_to_pub_key, Settings
from tests.utils import TFCTestCase
-class TestBoxPrint(TFCTestCase):
-
- def setUp(self):
- self.o_input = builtins.input
- builtins.input = lambda _: ''
-
- def tearDown(self):
- builtins.input = self.o_input
-
- def test_box_print(self):
- self.assertIsNone(box_print("Test message", manual_proceed=True))
- self.assertPrints("""
- ┌──────────────┐
- │ Test message │
- └──────────────┘ \n
-""", box_print, "Test message", head=1, tail=1)
-
- self.assertPrints("""
- ┌─────────────────┐
- │ Test message │
- │ │
- │ Another message │
- └─────────────────┘ \n
-""", box_print, ["Test message", '', "Another message"], head=1, tail=1)
-
-
-class TestCPrint(TFCTestCase):
-
- def test_c_print(self):
- self.assertPrints("""
- Test message \n
-""", c_print, 'Test message', head=1, tail=1)
-
-
class TestClearScreen(TFCTestCase):
def test_clear_screen(self):
- self.assertPrints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER, clear_screen)
+ self.assert_prints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER, clear_screen)
class TestGroupManagementPrint(TFCTestCase):
def setUp(self):
self.contact_list = ContactList(nicks=['Alice'])
+ self.lines = [nick_to_pub_key('Alice'), nick_to_pub_key('Bob')]
+ self.group_name = 'test_group'
def test_group_management_print(self):
- self.assertPrints("""
- ┌───────────────────────────────────────────────────────┐
- │ Created new group 'testgroup' with following members: │
- │ * Alice │
- │ * bob@jabber.org │
- └───────────────────────────────────────────────────────┘ \n
-""", group_management_print, NEW_GROUP, ['alice@jabber.org', 'bob@jabber.org'], self.contact_list, group_name='testgroup')
+ group_management_print(NEW_GROUP, self.lines, self.contact_list, self.group_name)
+ self.assert_prints("""\
+ ┌──────────────────────────────────────────────────────────────┐
+ │ Created new group 'test_group' with following members: │
+ │ * Alice │
+ │ * zwp3dykiztmeils2u5eqjtdtx5x3kti5ktjthpkznku3ws5u5fq2bnad │
+ └──────────────────────────────────────────────────────────────┘
+""", group_management_print, NEW_GROUP, self.lines, self.contact_list, self.group_name)
- self.assertPrints("""
- ┌────────────────────────────────────────────────┐
- │ Added following accounts to group 'testgroup': │
- │ * Alice │
- │ * bob@jabber.org │
- └────────────────────────────────────────────────┘ \n
-""", group_management_print, ADDED_MEMBERS, ['alice@jabber.org', 'bob@jabber.org'], self.contact_list, group_name='testgroup')
+ self.assert_prints("""\
+ ┌──────────────────────────────────────────────────────────────┐
+ │ Added following accounts to group 'test_group': │
+ │ * Alice │
+ │ * zwp3dykiztmeils2u5eqjtdtx5x3kti5ktjthpkznku3ws5u5fq2bnad │
+ └──────────────────────────────────────────────────────────────┘
+""", group_management_print, ADDED_MEMBERS, self.lines, self.contact_list, self.group_name)
- self.assertPrints("""
- ┌───────────────────────────────────────────────────────┐
- │ Following accounts were already in group 'testgroup': │
- │ * Alice │
- │ * bob@jabber.org │
- └───────────────────────────────────────────────────────┘ \n
-""", group_management_print, ALREADY_MEMBER, ['alice@jabber.org', 'bob@jabber.org'], self.contact_list, group_name='testgroup')
+ self.assert_prints("""\
+ ┌──────────────────────────────────────────────────────────────┐
+ │ Following accounts were already in group 'test_group': │
+ │ * Alice │
+ │ * zwp3dykiztmeils2u5eqjtdtx5x3kti5ktjthpkznku3ws5u5fq2bnad │
+ └──────────────────────────────────────────────────────────────┘
+""", group_management_print, ALREADY_MEMBER, self.lines, self.contact_list, self.group_name)
- self.assertPrints("""
- ┌───────────────────────────────────────────────────┐
- │ Removed following members from group 'testgroup': │
- │ * Alice │
- │ * bob@jabber.org │
- └───────────────────────────────────────────────────┘ \n
-""", group_management_print, REMOVED_MEMBERS, ['alice@jabber.org', 'bob@jabber.org'], self.contact_list, group_name='testgroup')
+ self.assert_prints("""\
+ ┌──────────────────────────────────────────────────────────────┐
+ │ Removed following members from group 'test_group': │
+ │ * Alice │
+ │ * zwp3dykiztmeils2u5eqjtdtx5x3kti5ktjthpkznku3ws5u5fq2bnad │
+ └──────────────────────────────────────────────────────────────┘
+""", group_management_print, REMOVED_MEMBERS, self.lines, self.contact_list, self.group_name)
- self.assertPrints("""
- ┌───────────────────────────────────────────────────┐
- │ Following accounts were not in group 'testgroup': │
- │ * Alice │
- │ * bob@jabber.org │
- └───────────────────────────────────────────────────┘ \n
-""", group_management_print, NOT_IN_GROUP, ['alice@jabber.org', 'bob@jabber.org'], self.contact_list, group_name='testgroup')
+ self.assert_prints("""\
+ ┌──────────────────────────────────────────────────────────────┐
+ │ Following accounts were not in group 'test_group': │
+ │ * Alice │
+ │ * zwp3dykiztmeils2u5eqjtdtx5x3kti5ktjthpkznku3ws5u5fq2bnad │
+ └──────────────────────────────────────────────────────────────┘
+""", group_management_print, NOT_IN_GROUP, self.lines, self.contact_list, self.group_name)
- self.assertPrints("""
- ┌──────────────────────────────────────────┐
- │ Following unknown accounts were ignored: │
- │ * Alice │
- │ * bob@jabber.org │
- └──────────────────────────────────────────┘ \n
-""", group_management_print, UNKNOWN_ACCOUNTS, ['alice@jabber.org', 'bob@jabber.org'], self.contact_list, group_name='testgroup')
+ self.assert_prints("""\
+ ┌──────────────────────────────────────────────────────────────┐
+ │ Following unknown accounts were ignored: │
+ │ * Alice │
+ │ * zwp3dykiztmeils2u5eqjtdtx5x3kti5ktjthpkznku3ws5u5fq2bnad │
+ └──────────────────────────────────────────────────────────────┘
+""", group_management_print, UNKNOWN_ACCOUNTS, self.lines, self.contact_list, self.group_name)
-class TestMessagePrinter(TFCTestCase):
+class TestMPrint(TFCTestCase):
- def test_message_printer(self):
- long_msg = ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis"
- " dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu"
- "m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma"
- "ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el"
- "it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic"
- "tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r"
- "utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali"
- "quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris"
- "que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti"
- "s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu.")
+ long_msg = ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis"
+ " dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu"
+ "m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma"
+ "ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el"
+ "it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic"
+ "tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r"
+ "utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali"
+ "quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris"
+ "que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti"
+ "s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu.")
- self.assertPrints("""\
+ @mock.patch('builtins.input', return_value='')
+ def test_m_print(self, _):
+ self.assert_prints("Test message\n", m_print, ["Test message"], center=False)
+ self.assert_prints("Test message\n", m_print, "Test message", center=False)
- Lorem ipsum dolor sit amet, consectetur
- adipiscing elit. Aenean condimentum consectetur
- purus quis dapibus. Fusce venenatis lacus ut
- rhoncus faucibus. Cras sollicitudin commodo
- sapien, sed bibendum velit maximus in. Aliquam ac
- metus risus. Sed cursus ornare luctus. Integer
- aliquet lectus id massa blandit imperdiet. Ut sed
- massa eget quam facilisis rutrum. Mauris eget
- luctus nisl. Sed ut elit iaculis, faucibus lacus
- eget, sodales magna. Nunc sed commodo arcu. In
- hac habitasse platea dictumst. Integer luctus
- aliquam justo, at vestibulum dolor iaculis ac.
- Etiam laoreet est eget odio rutrum, vel malesuada
- lorem rhoncus. Cras finibus in neque eu euismod.
- Nulla facilisi. Nunc nec aliquam quam, quis
- ullamcorper leo. Nunc egestas lectus eget est
- porttitor, in iaculis felis scelerisque. In sem
- elit, fringilla id viverra commodo, sagittis
- varius purus. Pellentesque rutrum lobortis neque
- a facilisis. Mauris id tortor placerat, aliquam
- dolor ac, venenatis arcu. \n
-""", message_printer, long_msg, head=1, tail=1)
+ def test_long_message(self):
+ self.assert_prints("""\
+Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum
+consectetur purus quis dapibus. Fusce venenatis lacus ut rhoncus faucibus.
+Cras sollicitudin commodo sapien, sed bibendum velit maximus in. Aliquam ac
+metus risus. Sed cursus ornare luctus. Integer aliquet lectus id massa blandit
+imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl.
+Sed ut elit iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo
+arcu. In hac habitasse platea dictumst. Integer luctus aliquam justo, at
+vestibulum dolor iaculis ac. Etiam laoreet est eget odio rutrum, vel malesuada
+lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec
+aliquam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in
+iaculis felis scelerisque. In sem elit, fringilla id viverra commodo, sagittis
+varius purus. Pellentesque rutrum lobortis neque a facilisis. Mauris id tortor
+placerat, aliquam dolor ac, venenatis arcu.
+""", m_print, TestMPrint.long_msg, center=False)
+
+ self.assert_prints("""\
+┌──────────────────────────────────────────────────────────────────────────────┐
+│ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum │
+│ consectetur purus quis dapibus. Fusce venenatis lacus ut rhoncus faucibus. │
+│ Cras sollicitudin commodo sapien, sed bibendum velit maximus in. Aliquam ac │
+│ metus risus. Sed cursus ornare luctus. Integer aliquet lectus id massa │
+│ blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget │
+│ luctus nisl. Sed ut elit iaculis, faucibus lacus eget, sodales magna. Nunc │
+│ sed commodo arcu. In hac habitasse platea dictumst. Integer luctus aliquam │
+│ justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio rutrum, │
+│ vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla │
+│ facilisi. Nunc nec aliquam quam, quis ullamcorper leo. Nunc egestas lectus │
+│ eget est porttitor, in iaculis felis scelerisque. In sem elit, fringilla id │
+│ viverra commodo, sagittis varius purus. Pellentesque rutrum lobortis neque a │
+│ facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu. │
+└──────────────────────────────────────────────────────────────────────────────┘
+""", m_print, TestMPrint.long_msg, center=False, box=True)
+
+ self.assert_prints(f"""\
+{BOLD_ON}┌──────────────────────────────────────────────────────────────────────────────┐{NORMAL_TEXT}
+{BOLD_ON}│ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum │{NORMAL_TEXT}
+{BOLD_ON}│ consectetur purus quis dapibus. Fusce venenatis lacus ut rhoncus faucibus. │{NORMAL_TEXT}
+{BOLD_ON}│ Cras sollicitudin commodo sapien, sed bibendum velit maximus in. Aliquam ac │{NORMAL_TEXT}
+{BOLD_ON}│ metus risus. Sed cursus ornare luctus. Integer aliquet lectus id massa │{NORMAL_TEXT}
+{BOLD_ON}│ blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget │{NORMAL_TEXT}
+{BOLD_ON}│ luctus nisl. Sed ut elit iaculis, faucibus lacus eget, sodales magna. Nunc │{NORMAL_TEXT}
+{BOLD_ON}│ sed commodo arcu. In hac habitasse platea dictumst. Integer luctus aliquam │{NORMAL_TEXT}
+{BOLD_ON}│ justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio rutrum, │{NORMAL_TEXT}
+{BOLD_ON}│ vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla │{NORMAL_TEXT}
+{BOLD_ON}│ facilisi. Nunc nec aliquam quam, quis ullamcorper leo. Nunc egestas lectus │{NORMAL_TEXT}
+{BOLD_ON}│ eget est porttitor, in iaculis felis scelerisque. In sem elit, fringilla id │{NORMAL_TEXT}
+{BOLD_ON}│ viverra commodo, sagittis varius purus. Pellentesque rutrum lobortis neque a │{NORMAL_TEXT}
+{BOLD_ON}│ facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu. │{NORMAL_TEXT}
+{BOLD_ON}└──────────────────────────────────────────────────────────────────────────────┘{NORMAL_TEXT}
+""", m_print, TestMPrint.long_msg, center=False, box=True, bold=True)
+
+ def test_multi_line(self):
+ self.assert_prints("""\
+ ┌─────────┐
+ │ Test │
+ │ │
+ │ message │
+ └─────────┘
+""", m_print, ["Test", '', "message"], box=True)
+
+ def test_head_and_tail(self):
+ self.assert_prints("""\
+[2J[H
+
+ ┌──────┐
+ │ Test │
+ └──────┘
+
+[2J[H""", m_print, ["Test"], box=True, head_clear=True, tail_clear=True, head=2, tail=1)
+
+ def test_wrapping(self):
+ self.assert_prints("""\
+┌──────────────────────────────────────────────────────────────────────────────┐
+│ short message │
+│ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum │
+│ consectetur purus quis dapibus. Fusce venenatis lacus ut rhoncus faucibus. │
+│ Cras sollicitudin commodo sapien, sed bibendum velit maximus in. Aliquam ac │
+│ metus risus. Sed cursus ornare luctus. Integer aliquet lectus id massa │
+│ blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget │
+│ luctus nisl. Sed ut elit iaculis, faucibus lacus eget, sodales magna. Nunc │
+│ sed commodo arcu. In hac habitasse platea dictumst. Integer luctus aliquam │
+│ justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio rutrum, │
+│ vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla │
+│ facilisi. Nunc nec aliquam quam, quis ullamcorper leo. Nunc egestas lectus │
+│ eget est porttitor, in iaculis felis scelerisque. In sem elit, fringilla id │
+│ viverra commodo, sagittis varius purus. Pellentesque rutrum lobortis neque a │
+│ facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu. │
+└──────────────────────────────────────────────────────────────────────────────┘
+""", m_print, ["short message", TestMPrint.long_msg], box=True)
+
+ @mock.patch("builtins.input", return_value='')
+ def test_manual_proceed(self, _):
+ self.assertIsNone(m_print("test", manual_proceed=True))
class TestPhase(unittest.TestCase):
- def test_phase(self):
+ @mock.patch('time.sleep', return_value=None)
+ def test_phase(self, _):
self.assertIsNone(phase('Entering phase'))
self.assertIsNone(phase(DONE))
self.assertIsNone(phase('Starting phase', head=1, offset=len("Finished")))
@@ -176,7 +224,7 @@ class TestPhase(unittest.TestCase):
class TestPrintFingerprint(TFCTestCase):
def test_print_fingerprints(self):
- self.assertPrints("""\
+ self.assert_prints("""\
┌───────────────────────────────┐
│ Fingerprint for Alice │
│ │
@@ -184,7 +232,7 @@ class TestPrintFingerprint(TFCTestCase):
│ 54936 03101 11892 94057 51231 │
│ 59374 09637 58434 47573 71137 │
└───────────────────────────────┘ \n""",
- print_fingerprint, FINGERPRINT_LEN * b'\x01', 'Fingerprint for Alice')
+ print_fingerprint, FINGERPRINT_LENGTH * b'\x01', 'Fingerprint for Alice')
class TestPrintKey(TFCTestCase):
@@ -193,29 +241,71 @@ class TestPrintKey(TFCTestCase):
self.settings = Settings()
def test_print_kdk(self):
- self.assertPrints("""\
+ self.assert_prints("""\
┌─────────────────────────────────────────────────────────────────────┐
- │ Local key decryption key (to RxM) │
+ │ Local key decryption key (to Receiver) │
│ A B C D E F G H I J K L M N O P Q │
│ 5Hp Hag T65 TZz G1P H3C Su6 3k8 Dbp vD8 s5i p4n EB3 kEs reA bua tmU │
- └─────────────────────────────────────────────────────────────────────┘
-""", print_key, "Local key decryption key (to RxM)", bytes(32), self.settings)
+ └─────────────────────────────────────────────────────────────────────┘ \n""",
+ print_key, "Local key decryption key (to Receiver)",
+ bytes(SYMMETRIC_KEY_LENGTH), self.settings)
+ def test_print_kdk_local_testing(self):
self.settings.local_testing_mode = True
- self.assertPrints("""\
+ self.assert_prints("""\
┌─────────────────────────────────────────────────────┐
- │ Local key decryption key (to RxM) │
+ │ Local key decryption key (to Receiver) │
│ 5HpHagT65TZzG1PH3CSu63k8DbpvD8s5ip4nEB3kEsreAbuatmU │
- └─────────────────────────────────────────────────────┘
-""", print_key, "Local key decryption key (to RxM)", bytes(32), self.settings)
+ └─────────────────────────────────────────────────────┘ \n""",
+ print_key, "Local key decryption key (to Receiver)",
+ bytes(SYMMETRIC_KEY_LENGTH), self.settings)
+
+
+class TestPrintTitle(TFCTestCase):
+
+ def test_print_tx_title(self):
+ self.assert_prints(f"""\
+{CLEAR_ENTIRE_SCREEN+CURSOR_LEFT_UP_CORNER}
+{BOLD_ON} TFC - Transmitter {VERSION} {NORMAL_TEXT}\n
+""", print_title, TX)
+
+ def test_print_rx_title(self):
+ self.assert_prints(f"""\
+{CLEAR_ENTIRE_SCREEN+CURSOR_LEFT_UP_CORNER}
+{BOLD_ON} TFC - Receiver {VERSION} {NORMAL_TEXT}\n
+""", print_title, RX)
class TestPrintOnPreviousLine(TFCTestCase):
def test_print_on_previous_line(self):
- self.assertPrints(CURSOR_UP_ONE_LINE + CLEAR_ENTIRE_LINE, print_on_previous_line)
- self.assertPrints(2*(CURSOR_UP_ONE_LINE + CLEAR_ENTIRE_LINE), print_on_previous_line, reps=2)
- self.assertPrints(2*(CURSOR_UP_ONE_LINE + CLEAR_ENTIRE_LINE), print_on_previous_line, reps=2, flush=True)
+ self.assert_prints(CURSOR_UP_ONE_LINE + CLEAR_ENTIRE_LINE, print_on_previous_line)
+ self.assert_prints(2 * (CURSOR_UP_ONE_LINE + CLEAR_ENTIRE_LINE), print_on_previous_line, reps=2)
+ self.assert_prints(2 * (CURSOR_UP_ONE_LINE + CLEAR_ENTIRE_LINE), print_on_previous_line, reps=2, flush=True)
+
+
+class TestPrintSpacing(TFCTestCase):
+
+ def test_print_spacing(self):
+ for i in range(20):
+ self.assert_prints(i * '\n', print_spacing, i)
+
+
+class TestRPPrint(TFCTestCase):
+
+ def setUp(self):
+ self.ts = datetime.now()
+ self.timestamp = self.ts.strftime("%b %d - %H:%M:%S.%f")[:-4]
+
+ def test_bold_print(self):
+ self.assert_prints(f"{BOLD_ON}{self.timestamp} - testMessage{NORMAL_TEXT}\n",
+ rp_print, "testMessage", self.ts, bold=True)
+
+ def test_normal_print(self):
+ self.assert_prints(f"{self.timestamp} - testMessage\n", rp_print, "testMessage", self.ts, bold=False)
+
+ def test_works_without_timestamp(self):
+ self.assertIsNone(rp_print("testMessage"))
if __name__ == '__main__':
diff --git a/tests/common/test_path.py b/tests/common/test_path.py
index b5ca832..1a912f5 100644
--- a/tests/common/test_path.py
+++ b/tests/common/test_path.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,15 +16,15 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
-import builtins
import os
import _tkinter
import unittest
-from tkinter import filedialog
+from unittest import mock
+from unittest.mock import MagicMock
from src.common.path import ask_path_cli, ask_path_gui, Completer
@@ -33,64 +34,42 @@ from tests.utils import ignored, TFCTestCase
class TestAskPathGui(TFCTestCase):
+ file_path = '/home/user/file.txt'
+ path = '/home/user/'
+
def setUp(self):
- self.o_aof = filedialog.askopenfilename
- self.o_ad = filedialog.askdirectory
- self.o_input = builtins.input
self.settings = Settings()
- def tearDown(self):
- filedialog.askopenfilename = self.o_aof
- filedialog.askdirectory = self.o_ad
- builtins.input = self.o_input
-
- def test_disabled_gui_uses_cli(self):
- # Setup
+ @mock.patch('os.path.isfile', return_value=True)
+ @mock.patch('builtins.input', return_value=file_path)
+ def test_disabled_gui_uses_cli(self, *_):
self.settings.disable_gui_dialog = True
- builtins.input = lambda _: '/bin/mv'
+ self.assertEqual(ask_path_gui('prompt_msg', self.settings, get_file=True), self.file_path)
- # Test
- self.assertEqual(ask_path_gui('prompt_msg', self.settings, get_file=True), '/bin/mv')
+ @mock.patch('os.path.isfile', return_value=True)
+ @mock.patch('builtins.input', return_value=file_path)
+ @mock.patch('tkinter.filedialog.askopenfilename', side_effect=_tkinter.TclError)
+ def test_tcl_error_falls_back_to_cli(self, *_):
+ self.assertEqual(ask_path_gui('prompt_msg', self.settings, get_file=True), self.file_path)
- def test_tcl_error_falls_back_to_cli(self):
- # Setup
- builtins.input = lambda _: '/bin/mv'
- filedialog.askopenfilename = lambda title: (_ for _ in ()).throw(_tkinter.TclError)
+ @mock.patch('tkinter.Tk', return_value=MagicMock())
+ @mock.patch('os.path.isfile', return_value=True)
+ @mock.patch('tkinter.filedialog.askopenfilename', return_value=file_path)
+ def test_get_path_to_file_gui(self, *_):
+ self.assertEqual(ask_path_gui('path to file:', self.settings, get_file=True),
+ self.file_path)
- # Test
- self.assertEqual(ask_path_gui('prompt_msg', self.settings, get_file=True), '/bin/mv')
+ @mock.patch('tkinter.filedialog.askopenfilename', return_value='')
+ def test_no_path_to_file_raises_fr(self, _):
+ self.assert_fr("File selection aborted.", ask_path_gui, 'test message', self.settings, True)
- @unittest.skipIf("TRAVIS" in os.environ and os.environ["TRAVIS"] == "true", "Skip as Travis has no $DISPLAY.")
- def test_get_path_to_file_gui(self):
- # Setup
- filedialog.askopenfilename = lambda title: 'test_path_to_file'
+ @mock.patch('tkinter.filedialog.askdirectory', return_value=path)
+ def test_get_path_gui(self, _):
+ self.assertEqual(ask_path_gui('select path for file:', self.settings), self.path)
- # Test
- self.assertEqual(ask_path_gui('test message', self.settings, get_file=True), 'test_path_to_file')
-
- @unittest.skipIf("TRAVIS" in os.environ and os.environ["TRAVIS"] == "true", "Skip as Travis has no $DISPLAY.")
- def test_no_path_to_file_raises_fr(self):
- # Setup
- filedialog.askopenfilename = lambda title: ''
-
- # Test
- self.assertFR("File selection aborted.", ask_path_gui, 'test message', self.settings, True)
-
- @unittest.skipIf("TRAVIS" in os.environ and os.environ["TRAVIS"] == "true", "Skip as Travis has no $DISPLAY.")
- def test_get_path_gui(self):
- # Setup
- filedialog.askdirectory = lambda title: 'test_path'
-
- # Test
- self.assertEqual(ask_path_gui('test message', self.settings, get_file=False), 'test_path')
-
- @unittest.skipIf("TRAVIS" in os.environ and os.environ["TRAVIS"] == "true", "Skip as Travis has no $DISPLAY.")
- def test_no_path_raises_fr(self):
- # Setup
- filedialog.askdirectory = lambda title: ''
-
- # Test
- self.assertFR("Path selection aborted.", ask_path_gui, 'test message', self.settings, False)
+ @mock.patch('tkinter.filedialog.askdirectory', return_value='')
+ def test_no_path_raises_fr(self, _):
+ self.assert_fr("Path selection aborted.", ask_path_gui, 'test message', self.settings, False)
class TestCompleter(unittest.TestCase):
@@ -106,7 +85,7 @@ class TestCompleter(unittest.TestCase):
# Test path
completer = Completer(get_file=False)
self.assertEqual(completer.complete_path('/bin/'), [])
- self.assertEqual(completer.path_complete('/bin'), [])
+ self.assertEqual(completer.path_complete(['/bin']), [])
self.assertEqual(completer.path_complete(), [])
self.assertEqual(completer.complete_path(''), [])
self.assertEqual(completer.complete_path('/bin/sh'), ['/bin/sh '])
@@ -115,49 +94,39 @@ class TestCompleter(unittest.TestCase):
# Test file
completer = Completer(get_file=True)
self.assertTrue(len(completer.complete_path('/bin/')) > 0)
- self.assertTrue(completer.complete(0, 0))
+ self.assertTrue(completer.complete('', 0))
class TestPath(TFCTestCase):
def setUp(self):
- self.o_input = builtins.input
- input_list = ['/dev/zero', '/bin/mv', './testdir', './testfile']
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
-
with ignored(FileExistsError):
- os.mkdir('testdir/')
+ os.mkdir('test_dir/')
def tearDown(self):
- builtins.input = self.o_input
-
with ignored(OSError):
os.remove('testfile')
-
with ignored(OSError):
- os.rmdir('testdir/')
+ os.rmdir('test_dir/')
- def test_ask_path_cli(self):
- self.assertEqual(ask_path_cli('prompt_msg', get_file=True), '/bin/mv')
- self.assertEqual(ask_path_cli('prompt_msg'), 'testdir/')
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('os.path.isfile', side_effect=[False, True, True])
+ @mock.patch('builtins.input', side_effect=['file1', 'file2', './test_dir', './testfile', '', '/home',
+ '/dir_that_does_not_exist', '/bin/', KeyboardInterrupt])
+ def test_ask_path_cli(self, *_):
+ self.assertEqual(ask_path_cli('path to file:', get_file=True), 'file2')
+ self.assertEqual(ask_path_cli('prompt_msg'), 'test_dir/')
open('testfile', 'a+').close()
self.assertEqual(ask_path_cli('prompt_msg', get_file=True), 'testfile')
- builtins.input = lambda _: ''
- self.assertFR("File selection aborted.", ask_path_cli, 'prompt_msg', True)
-
- builtins.input = lambda _: '/home/'
- self.assertEqual(ask_path_cli('prompt_msg'), '/home/')
-
- input_list = ['/home', '/dir_that_does_not_exist', '/bin/']
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
+ self.assert_fr("File selection aborted.", ask_path_cli, 'prompt_msg', True)
self.assertEqual(ask_path_cli('prompt_msg'), '/home/')
self.assertEqual(ask_path_cli('prompt_msg'), '/bin/')
+ self.assert_fr("File path selection aborted.", ask_path_cli, 'prompt_msg', False)
+
if __name__ == '__main__':
unittest.main(exit=False)
diff --git a/tests/common/test_reed_solomon.py b/tests/common/test_reed_solomon.py
index 0f9ade3..c2238b2 100644
--- a/tests/common/test_reed_solomon.py
+++ b/tests/common/test_reed_solomon.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3.5
+#!/usr/bin/env python3.6
# -*- coding: utf-8 -*-
"""
@@ -6,11 +6,10 @@
# Copyright (c) 2015 rotorgit
# Copyright (c) 2015 Stephen Larroque
-The code below is edited and used under public domain license:
-https://github.com/tomerfiliba/reedsolomon/blob/master/LICENSE
+The Reed Solomon erasure code library has been released to the public domain.
-The comments/unused code have been intentionally removed. Original code is at
-https://github.com/tomerfiliba/reedsolomon/blob/master/tests/test_reedsolo.py
+https://github.com/lrq3000/reedsolomon/blob/master/LICENSE
+https://github.com/tomerfiliba/reedsolomon/blob/master/LICENSE
"""
import unittest
@@ -23,64 +22,84 @@ from src.common.reed_solomon import *
class TestReedSolomon(unittest.TestCase):
def test_simple(self):
- rs = RSCodec()
- msg = bytearray("hello world " * 10, "latin1")
- enc = rs.encode(msg)
- dec = rs.decode(enc)
+ rs = RSCodec()
+ msg = bytearray("hello world " * 10, "latin1")
+ enc = rs.encode(msg)
+ dec, dec_enc = rs.decode(enc)
self.assertEqual(dec, msg)
+ self.assertEqual(dec_enc, enc)
def test_correction(self):
- rs = RSCodec()
- msg = bytearray("hello world " * 10, "latin1")
- enc = rs.encode(msg)
- self.assertEqual(rs.decode(enc), msg)
-
+ rs = RSCodec()
+ msg = bytearray("hello world " * 10, "latin1")
+ enc = rs.encode(msg)
+ rmsg, renc = rs.decode(enc)
+ self.assertEqual(rmsg, msg)
+ self.assertEqual(renc, enc)
for i in [27, -3, -9, 7, 0]:
- enc[i] = 99
- self.assertEqual(rs.decode(enc), msg)
-
+ enc[i] = 99
+ rmsg, renc = rs.decode(enc)
+ self.assertEqual(rmsg, msg)
enc[82] = 99
self.assertRaises(ReedSolomonError, rs.decode, enc)
- def test_long(self):
- rs = RSCodec()
- msg = bytearray("a" * 10000, "latin1")
- enc = rs.encode(msg)
- dec = rs.decode(enc)
- self.assertEqual(dec, msg)
+ def test_check(self):
+ rs = RSCodec()
+ msg = bytearray("hello world " * 10, "latin1")
+ enc = rs.encode(msg)
+ rmsg, renc = rs.decode(enc)
+ self.assertEqual(rs.check(enc), [True])
+ self.assertEqual(rs.check(renc), [True])
+ for i in [27, -3, -9, 7, 0]:
+ enc[i] = 99
+ rmsg, renc = rs.decode(enc)
+ self.assertEqual(rs.check(enc), [False])
+ self.assertEqual(rs.check(renc), [True])
- enc[177] = 99
- enc[2212] = 88
- dec2 = rs.decode(enc)
+ def test_long(self):
+ rs = RSCodec()
+ msg = bytearray("a" * 10000, "latin1")
+ enc = rs.encode(msg)
+ dec, dec_enc = rs.decode(enc)
+ self.assertEqual(dec, msg)
+ self.assertEqual(dec_enc, enc)
+ enc2 = list(enc)
+ enc2[177] = 99
+ enc2[2212] = 88
+ dec2, dec_enc2 = rs.decode(enc2)
self.assertEqual(dec2, msg)
+ self.assertEqual(dec_enc2, enc)
def test_prim_fcr_basic(self):
nn = 30
kk = 18
tt = nn - kk
rs = RSCodec(tt, fcr=120, prim=0x187)
- hexencmsg = '00faa123555555c000000354064432c02800fe97c434e1ff5365cf8fafe4'
- encmsg = bytearray.fromhex(str(hexencmsg))
+ hexencmsg = '00faa123555555c000000354064432' \
+ 'c02800fe97c434e1ff5365cf8fafe4'
+ strf = str
+ encmsg = bytearray.fromhex(strf(hexencmsg))
decmsg = encmsg[:kk]
tem = rs.encode(decmsg)
self.assertEqual(encmsg, tem, msg="encoded does not match expected")
- tdm = rs.decode(tem)
+ tdm, rtem = rs.decode(tem)
self.assertEqual(tdm, decmsg, msg="decoded does not match original")
+ self.assertEqual(rtem, tem, msg="decoded mesecc does not match original")
- tem1 = bytearray(tem) # clone a copy
+ tem1 = bytearray(tem) # Clone a copy
# Encoding and decoding intact message seem OK, so test errors
numerrs = tt >> 1 # Inject tt/2 errors (expected to recover fully)
for i in sample(range(nn), numerrs): # inject errors in random places
- tem1[i] ^= 0xff # Flip all 8 bits
- tdm = rs.decode(tem1)
+ tem1[i] ^= 0xff # flip all 8 bits
+ tdm, _ = rs.decode(tem1)
self.assertEqual(tdm, decmsg, msg="decoded with errors does not match original")
- tem1 = bytearray(tem) # clone another copy
- numerrs += 1 # inject tt/2 + 1 errors (expected to fail and detect it)
- for i in sample(range(nn), numerrs): # inject errors in random places
- tem1[i] ^= 0xff # flip all 8 bits
+ tem1 = bytearray(tem) # Clone another copy
+ numerrs += 1 # Inject tt/2 + 1 errors (expected to fail and detect it)
+ for i in sample(range(nn), numerrs): # Inject errors in random places
+ tem1[i] ^= 0xff # Flip all 8 bits
# If this fails, it means excessive errors not detected
self.assertRaises(ReedSolomonError, rs.decode, tem1)
@@ -91,21 +110,23 @@ class TestReedSolomon(unittest.TestCase):
rs = RSCodec(tt, fcr=120, prim=0x187)
hexencmsg = '08faa123555555c000000354064432c0280e1b4d090cfc04' \
'887400000003500000000e1985ff9c6b33066ca9f43d12e8'
-
- encmsg = bytearray.fromhex(str(hexencmsg))
- decmsg = encmsg[:kk]
- tem = rs.encode(decmsg)
+ strf = str
+ encmsg = bytearray.fromhex(strf(hexencmsg))
+ decmsg = encmsg[:kk]
+ tem = rs.encode(decmsg)
self.assertEqual(encmsg, tem, msg="encoded does not match expected")
- tdm = rs.decode(tem)
+ tdm, rtem = rs.decode(tem)
self.assertEqual(tdm, decmsg, msg="decoded does not match original")
+ self.assertEqual(rtem, tem, msg="decoded mesecc does not match original")
tem1 = bytearray(tem)
numerrs = tt >> 1
for i in sample(range(nn), numerrs):
tem1[i] ^= 0xff
- tdm = rs.decode(tem1)
+ tdm, rtem = rs.decode(tem1)
self.assertEqual(tdm, decmsg, msg="decoded with errors does not match original")
+ self.assertEqual(rtem, tem, msg="decoded mesecc with errors does not match original")
tem1 = bytearray(tem)
numerrs += 1
@@ -115,8 +136,8 @@ class TestReedSolomon(unittest.TestCase):
def test_generator_poly(self):
"""\
- Test if generator poly finder is working correctly and if the
- all generators poly finder does output the same result.
+ Test if generator poly finder is working correctly and if
+ the all generators poly finder does output the same result
"""
n = 11
k = 3
@@ -127,8 +148,8 @@ class TestReedSolomon(unittest.TestCase):
prim = 0x11d
init_tables(generator=generator, prim=prim)
g = rs_generator_poly_all(n, fcr=fcr, generator=generator)
- self.assertEqual(list(g[n - k]), list(rs_generator_poly(n - k, fcr=fcr, generator=generator)))
- self.assertEqual(list(g[n - k]), [1, 106, 9, 105, 86, 5, 166, 76, 9])
+ self.assertEqual(list(g[n-k]), list(rs_generator_poly(n-k, fcr=fcr, generator=generator)))
+ self.assertEqual(list(g[n-k]), [1, 106, 9, 105, 86, 5, 166, 76, 9])
# Base 3 test
fcr = 0
@@ -136,155 +157,242 @@ class TestReedSolomon(unittest.TestCase):
prim = 0x11b
init_tables(generator=generator, prim=prim)
g = rs_generator_poly_all(n, fcr=fcr, generator=generator)
- self.assertEqual(list(g[n - k]), list(rs_generator_poly(n - k, fcr=fcr, generator=generator)))
- self.assertEqual(list(g[n - k]), [1, 128, 13, 69, 36, 145, 199, 165, 30])
+ self.assertEqual(list(g[n-k]), list(rs_generator_poly(n-k, fcr=fcr, generator=generator)))
+ self.assertEqual(list(g[n-k]), [1, 128, 13, 69, 36, 145, 199, 165, 30])
def test_prime_poly_build(self):
"""\
- Try if the prime polynomials finder works correctly for different
- GFs (ie, GF(2^6) to GF(2^10)) and with different generators.
+ Try if the prime polynomials finder works correctly for
+ different GFs (ie, GF(2^6) to GF(2^10)) and with different
+ generators.
"""
- params = {"count": 7,
- "c_exp": [6, 7, 7, 8, 8, 9, 10],
+ params = {"count": 7,
+ "c_exp": [6, 7, 7, 8, 8, 9, 10],
"generator": [2, 2, 3, 2, 3, 2, 2],
- "expected": [
- [67, 91, 97, 103, 109, 115],
- [131, 137, 143, 145, 157, 167, 171, 185, 191, 193, 203, 211, 213, 229, 239, 241, 247, 253],
- [131, 137, 143, 145, 157, 167, 171, 185, 191, 193, 203, 211, 213, 229, 239, 241, 247, 253],
- [285, 299, 301, 333, 351, 355, 357, 361, 369, 391, 397, 425, 451, 463, 487, 501],
- [283, 313, 319, 333, 351, 355, 357, 361, 375, 397, 415, 419, 425, 451, 501, 505],
- [529, 539, 545, 557, 563, 601, 607, 617, 623, 631, 637, 647, 661, 675, 677, 687, 695, 701, 719,
- 721, 731, 757, 761, 787, 789, 799, 803, 817, 827, 847, 859, 865, 875, 877, 883, 895, 901, 911,
- 949, 953, 967, 971, 973, 981, 985, 995, 1001, 1019],
- [1033, 1051, 1063, 1069, 1125, 1135, 1153, 1163, 1221, 1239, 1255, 1267, 1279, 1293, 1305, 1315,
- 1329, 1341, 1347, 1367, 1387, 1413, 1423, 1431, 1441, 1479, 1509, 1527, 1531, 1555, 1557, 1573,
- 1591, 1603, 1615, 1627, 1657, 1663, 1673, 1717, 1729, 1747, 1759, 1789, 1815, 1821, 1825, 1849,
- 1863, 1869, 1877, 1881, 1891, 1917, 1933, 1939, 1969, 2011, 2035, 2041]
- ]
+ "expected":
+ [
+ [67, 91, 97, 103, 109, 115],
+ [131, 137, 143, 145, 157, 167, 171, 185, 191, 193, 203, 211, 213, 229, 239, 241, 247, 253],
+ [131, 137, 143, 145, 157, 167, 171, 185, 191, 193, 203, 211, 213, 229, 239, 241, 247, 253],
+ [285, 299, 301, 333, 351, 355, 357, 361, 369, 391, 397, 425, 451, 463, 487, 501],
+ [283, 313, 319, 333, 351, 355, 357, 361, 375, 397, 415, 419, 425, 451, 501, 505],
+
+ [529, 539, 545, 557, 563, 601, 607, 617, 623, 631, 637, 647, 661, 675, 677, 687,
+ 695, 701, 719, 721, 731, 757, 761, 787, 789, 799, 803, 817, 827, 847, 859, 865,
+ 875, 877, 883, 895, 901, 911, 949, 953, 967, 971, 973, 981, 985, 995, 1001, 1019],
+
+ [1033, 1051, 1063, 1069, 1125, 1135, 1153, 1163, 1221, 1239, 1255, 1267, 1279, 1293, 1305,
+ 1315, 1329, 1341, 1347, 1367, 1387, 1413, 1423, 1431, 1441, 1479, 1509, 1527, 1531, 1555,
+ 1557, 1573, 1591, 1603, 1615, 1627, 1657, 1663, 1673, 1717, 1729, 1747, 1759, 1789, 1815,
+ 1821, 1825, 1849, 1863, 1869, 1877, 1881, 1891, 1917, 1933, 1939, 1969, 2011, 2035, 2041]
+ ]
}
for i in range(params['count']):
- self.assertEqual(find_prime_polys(generator=params['generator'][i], c_exp=params['c_exp'][i]), params["expected"][i])
+ self.assertEqual(find_prime_polys(generator=params['generator'][i],
+ c_exp=params['c_exp'][i]),
+ params["expected"][i])
def test_init_tables(self):
"""\
- Try if the look up table generator (galois field
- generator) works correctly for different parameters.
+ Try if the look up table generator (galois field generator)
+ works correctly for different parameters.
"""
params = [
- [0x11d, 2, 8],
- [0x11b, 3, 8],
- [0xfd, 3, 7]
+ [0x11d, 2, 8],
+ [0x11b, 3, 8],
+ [0xfd, 3, 7]
+ ]
+ expected = [
+ [
+ [0, 0, 1, 25, 2, 50, 26, 198, 3, 223, 51, 238, 27, 104, 199, 75, 4, 100, 224, 14, 52,
+ 141, 239, 129, 28, 193, 105, 248, 200, 8, 76, 113, 5, 138, 101, 47, 225, 36, 15, 33,
+ 53, 147, 142, 218, 240, 18, 130, 69, 29, 181, 194, 125, 106, 39, 249, 185, 201, 154,
+ 9, 120, 77, 228, 114, 166, 6, 191, 139, 98, 102, 221, 48, 253, 226, 152, 37, 179,
+ 16, 145, 34, 136, 54, 208, 148, 206, 143, 150, 219, 189, 241, 210, 19, 92, 131, 56,
+ 70, 64, 30, 66, 182, 163, 195, 72, 126, 110, 107, 58, 40, 84, 250, 133, 186, 61, 202,
+ 94, 155, 159, 10, 21, 121, 43, 78, 212, 229, 172, 115, 243, 167, 87, 7, 112, 192,
+ 247, 140, 128, 99, 13, 103, 74, 222, 237, 49, 197, 254, 24, 227, 165, 153, 119, 38,
+ 184, 180, 124, 17, 68, 146, 217, 35, 32, 137, 46, 55, 63, 209, 91, 149, 188, 207,
+ 205, 144, 135, 151, 178, 220, 252, 190, 97, 242, 86, 211, 171, 20, 42, 93, 158, 132,
+ 60, 57, 83, 71, 109, 65, 162, 31, 45, 67, 216, 183, 123, 164, 118, 196, 23, 73, 236,
+ 127, 12, 111, 246, 108, 161, 59, 82, 41, 157, 85, 170, 251, 96, 134, 177, 187, 204,
+ 62, 90, 203, 89, 95, 176, 156, 169, 160, 81, 11, 245, 22, 235, 122, 117, 44, 215,
+ 79, 174, 213, 233, 230, 231, 173, 232, 116, 214, 244, 234, 168, 80, 88, 175],
+
+ [1, 2, 4, 8, 16, 32, 64, 128, 29, 58, 116, 232, 205, 135, 19, 38, 76, 152, 45, 90,
+ 180, 117, 234, 201, 143, 3, 6, 12, 24, 48, 96, 192, 157, 39, 78, 156, 37, 74, 148,
+ 53, 106, 212, 181, 119, 238, 193, 159, 35, 70, 140, 5, 10, 20, 40, 80, 160, 93, 186,
+ 105, 210, 185, 111, 222, 161, 95, 190, 97, 194, 153, 47, 94, 188, 101, 202, 137, 15,
+ 30, 60, 120, 240, 253, 231, 211, 187, 107, 214, 177, 127, 254, 225, 223, 163, 91,
+ 182, 113, 226, 217, 175, 67, 134, 17, 34, 68, 136, 13, 26, 52, 104, 208, 189, 103,
+ 206, 129, 31, 62, 124, 248, 237, 199, 147, 59, 118, 236, 197, 151, 51, 102, 204, 133,
+ 23, 46, 92, 184, 109, 218, 169, 79, 158, 33, 66, 132, 21, 42, 84, 168, 77, 154, 41,
+ 82, 164, 85, 170, 73, 146, 57, 114, 228, 213, 183, 115, 230, 209, 191, 99, 198, 145,
+ 63, 126, 252, 229, 215, 179, 123, 246, 241, 255, 227, 219, 171, 75, 150, 49, 98, 196,
+ 149, 55, 110, 220, 165, 87, 174, 65, 130, 25, 50, 100, 200, 141, 7, 14, 28, 56, 112,
+ 224, 221, 167, 83, 166, 81, 162, 89, 178, 121, 242, 249, 239, 195, 155, 43, 86, 172,
+ 69, 138, 9, 18, 36, 72, 144, 61, 122, 244, 245, 247, 243, 251, 235, 203, 139, 11, 22,
+ 44, 88, 176, 125, 250, 233, 207, 131, 27, 54, 108, 216, 173, 71, 142, 1, 2, 4, 8, 16,
+ 32, 64, 128, 29, 58, 116, 232, 205, 135, 19, 38, 76, 152, 45, 90, 180, 117, 234, 201,
+ 143, 3, 6, 12, 24, 48, 96, 192, 157, 39, 78, 156, 37, 74, 148, 53, 106, 212, 181,
+ 119, 238, 193, 159, 35, 70, 140, 5, 10, 20, 40, 80, 160, 93, 186, 105, 210, 185, 111,
+ 222, 161, 95, 190, 97, 194, 153, 47, 94, 188, 101, 202, 137, 15, 30, 60, 120, 240,
+ 253, 231, 211, 187, 107, 214, 177, 127, 254, 225, 223, 163, 91, 182, 113, 226, 217,
+ 175, 67, 134, 17, 34, 68, 136, 13, 26, 52, 104, 208, 189, 103, 206, 129, 31, 62, 124,
+ 248, 237, 199, 147, 59, 118, 236, 197, 151, 51, 102, 204, 133, 23, 46, 92, 184, 109,
+ 218, 169, 79, 158, 33, 66, 132, 21, 42, 84, 168, 77, 154, 41, 82, 164, 85, 170, 73,
+ 146, 57, 114, 228, 213, 183, 115, 230, 209, 191, 99, 198, 145, 63, 126, 252, 229,
+ 215, 179, 123, 246, 241, 255, 227, 219, 171, 75, 150, 49, 98, 196, 149, 55, 110, 220,
+ 165, 87, 174, 65, 130, 25, 50, 100, 200, 141, 7, 14, 28, 56, 112, 224, 221, 167, 83,
+ 166, 81, 162, 89, 178, 121, 242, 249, 239, 195, 155, 43, 86, 172, 69, 138, 9, 18, 36,
+ 72, 144, 61, 122, 244, 245, 247, 243, 251, 235, 203, 139, 11, 22, 44, 88, 176, 125,
+ 250, 233, 207, 131, 27, 54, 108, 216, 173, 71, 142],
+
+ 255
+ ],
+ [
+ [0, 0, 25, 1, 50, 2, 26, 198, 75, 199, 27, 104, 51, 238, 223, 3, 100, 4, 224, 14, 52,
+ 141, 129, 239, 76, 113, 8, 200, 248, 105, 28, 193, 125, 194, 29, 181, 249, 185, 39,
+ 106, 77, 228, 166, 114, 154, 201, 9, 120, 101, 47, 138, 5, 33, 15, 225, 36, 18, 240,
+ 130, 69, 53, 147, 218, 142, 150, 143, 219, 189, 54, 208, 206, 148, 19, 92, 210, 241,
+ 64, 70, 131, 56, 102, 221, 253, 48, 191, 6, 139, 98, 179, 37, 226, 152, 34, 136, 145,
+ 16, 126, 110, 72, 195, 163, 182, 30, 66, 58, 107, 40, 84, 250, 133, 61, 186, 43, 121,
+ 10, 21, 155, 159, 94, 202, 78, 212, 172, 229, 243, 115, 167, 87, 175, 88, 168, 80,
+ 244, 234, 214, 116, 79, 174, 233, 213, 231, 230, 173, 232, 44, 215, 117, 122, 235,
+ 22, 11, 245, 89, 203, 95, 176, 156, 169, 81, 160, 127, 12, 246, 111, 23, 196, 73,
+ 236, 216, 67, 31, 45, 164, 118, 123, 183, 204, 187, 62, 90, 251, 96, 177, 134, 59,
+ 82, 161, 108, 170, 85, 41, 157, 151, 178, 135, 144, 97, 190, 220, 252, 188, 149, 207,
+ 205, 55, 63, 91, 209, 83, 57, 132, 60, 65, 162, 109, 71, 20, 42, 158, 93, 86, 242,
+ 211, 171, 68, 17, 146, 217, 35, 32, 46, 137, 180, 124, 184, 38, 119, 153, 227, 165,
+ 103, 74, 237, 222, 197, 49, 254, 24, 13, 99, 140, 128, 192, 247, 112, 7],
+
+ [1, 3, 5, 15, 17, 51, 85, 255, 26, 46, 114, 150, 161, 248, 19, 53, 95, 225, 56, 72,
+ 216, 115, 149, 164, 247, 2, 6, 10, 30, 34, 102, 170, 229, 52, 92, 228, 55, 89, 235,
+ 38, 106, 190, 217, 112, 144, 171, 230, 49, 83, 245, 4, 12, 20, 60, 68, 204, 79, 209,
+ 104, 184, 211, 110, 178, 205, 76, 212, 103, 169, 224, 59, 77, 215, 98, 166, 241, 8,
+ 24, 40, 120, 136, 131, 158, 185, 208, 107, 189, 220, 127, 129, 152, 179, 206, 73,
+ 219, 118, 154, 181, 196, 87, 249, 16, 48, 80, 240, 11, 29, 39, 105, 187, 214, 97,
+ 163, 254, 25, 43, 125, 135, 146, 173, 236, 47, 113, 147, 174, 233, 32, 96, 160, 251,
+ 22, 58, 78, 210, 109, 183, 194, 93, 231, 50, 86, 250, 21, 63, 65, 195, 94, 226, 61,
+ 71, 201, 64, 192, 91, 237, 44, 116, 156, 191, 218, 117, 159, 186, 213, 100, 172, 239,
+ 42, 126, 130, 157, 188, 223, 122, 142, 137, 128, 155, 182, 193, 88, 232, 35, 101,
+ 175, 234, 37, 111, 177, 200, 67, 197, 84, 252, 31, 33, 99, 165, 244, 7, 9, 27, 45,
+ 119, 153, 176, 203, 70, 202, 69, 207, 74, 222, 121, 139, 134, 145, 168, 227, 62, 66,
+ 198, 81, 243, 14, 18, 54, 90, 238, 41, 123, 141, 140, 143, 138, 133, 148, 167, 242,
+ 13, 23, 57, 75, 221, 124, 132, 151, 162, 253, 28, 36, 108, 180, 199, 82, 246, 1, 3,
+ 5, 15, 17, 51, 85, 255, 26, 46, 114, 150, 161, 248, 19, 53, 95, 225, 56, 72, 216,
+ 115, 149, 164, 247, 2, 6, 10, 30, 34, 102, 170, 229, 52, 92, 228, 55, 89, 235, 38,
+ 106, 190, 217, 112, 144, 171, 230, 49, 83, 245, 4, 12, 20, 60, 68, 204, 79, 209,
+ 104, 184, 211, 110, 178, 205, 76, 212, 103, 169, 224, 59, 77, 215, 98, 166, 241, 8,
+ 24, 40, 120, 136, 131, 158, 185, 208, 107, 189, 220, 127, 129, 152, 179, 206, 73,
+ 219, 118, 154, 181, 196, 87, 249, 16, 48, 80, 240, 11, 29, 39, 105, 187, 214, 97,
+ 163, 254, 25, 43, 125, 135, 146, 173, 236, 47, 113, 147, 174, 233, 32, 96, 160, 251,
+ 22, 58, 78, 210, 109, 183, 194, 93, 231, 50, 86, 250, 21, 63, 65, 195, 94, 226, 61,
+ 71, 201, 64, 192, 91, 237, 44, 116, 156, 191, 218, 117, 159, 186, 213, 100, 172,
+ 239, 42, 126, 130, 157, 188, 223, 122, 142, 137, 128, 155, 182, 193, 88, 232, 35,
+ 101, 175, 234, 37, 111, 177, 200, 67, 197, 84, 252, 31, 33, 99, 165, 244, 7, 9, 27,
+ 45, 119, 153, 176, 203, 70, 202, 69, 207, 74, 222, 121, 139, 134, 145, 168, 227, 62,
+ 66, 198, 81, 243, 14, 18, 54, 90, 238, 41, 123, 141, 140, 143, 138, 133, 148, 167,
+ 242, 13, 23, 57, 75, 221, 124, 132, 151, 162, 253, 28, 36, 108, 180, 199, 82, 246],
+
+ 255
+ ],
+ [
+ [0, 0, 7, 1, 14, 2, 8, 56, 21, 57, 9, 90, 15, 31, 63, 3, 28, 4, 64, 67, 16, 112, 97,
+ 32, 22, 47, 38, 58, 70, 91, 10, 108, 35, 109, 11, 87, 71, 79, 74, 92, 23, 82, 119,
+ 48, 104, 59, 39, 100, 29, 19, 54, 5, 45, 68, 65, 95, 77, 33, 98, 117, 17, 43, 115,
+ 113, 42, 114, 116, 76, 18, 53, 94, 44, 78, 73, 86, 34, 81, 118, 99, 103, 30, 62, 89,
+ 20, 126, 6, 55, 13, 111, 96, 66, 27, 46, 37, 107, 69, 36, 106, 26, 110, 61, 88, 12,
+ 125, 52, 93, 75, 41, 72, 85, 102, 80, 84, 101, 40, 51, 105, 25, 124, 60, 24, 123, 50,
+ 83, 122, 49, 120, 121],
+
+ [1, 3, 5, 15, 17, 51, 85, 2, 6, 10, 30, 34, 102, 87, 4, 12, 20, 60, 68, 49, 83, 8, 24,
+ 40, 120, 117, 98, 91, 16, 48, 80, 13, 23, 57, 75, 32, 96, 93, 26, 46, 114, 107, 64,
+ 61, 71, 52, 92, 25, 43, 125, 122, 115, 104, 69, 50, 86, 7, 9, 27, 45, 119, 100, 81,
+ 14, 18, 54, 90, 19, 53, 95, 28, 36, 108, 73, 38, 106, 67, 56, 72, 37, 111, 76, 41,
+ 123, 112, 109, 74, 35, 101, 82, 11, 29, 39, 105, 70, 55, 89, 22, 58, 78, 47, 113,
+ 110, 79, 44, 116, 97, 94, 31, 33, 99, 88, 21, 63, 65, 62, 66, 59, 77, 42, 126, 127,
+ 124, 121, 118, 103, 84, 1, 3, 5, 15, 17, 51, 85, 2, 6, 10, 30, 34, 102, 87, 4, 12,
+ 20, 60, 68, 49, 83, 8, 24, 40, 120, 117, 98, 91, 16, 48, 80, 13, 23, 57, 75, 32, 96,
+ 93, 26, 46, 114, 107, 64, 61, 71, 52, 92, 25, 43, 125, 122, 115, 104, 69, 50, 86, 7,
+ 9, 27, 45, 119, 100, 81, 14, 18, 54, 90, 19, 53, 95, 28, 36, 108, 73, 38, 106, 67,
+ 56, 72, 37, 111, 76, 41, 123, 112, 109, 74, 35, 101, 82, 11, 29, 39, 105, 70, 55, 89,
+ 22, 58, 78, 47, 113, 110, 79, 44, 116, 97, 94, 31, 33, 99, 88, 21, 63, 65, 62, 66,
+ 59, 77, 42, 126, 127, 124, 121, 118, 103, 84],
+
+ 127
+ ]
]
- expected = [[[0, 0, 1, 25, 2, 50, 26, 198, 3, 223, 51, 238, 27, 104, 199, 75, 4, 100, 224, 14, 52, 141, 239,
- 129, 28, 193, 105, 248, 200, 8, 76, 113, 5, 138, 101, 47, 225, 36, 15, 33, 53, 147, 142, 218, 240,
- 18, 130, 69, 29, 181, 194, 125, 106, 39, 249, 185, 201, 154, 9, 120, 77, 228, 114, 166, 6, 191,
- 139, 98, 102, 221, 48, 253, 226, 152, 37, 179, 16, 145, 34, 136, 54, 208, 148, 206, 143, 150, 219,
- 189, 241, 210, 19, 92, 131, 56, 70, 64, 30, 66, 182, 163, 195, 72, 126, 110, 107, 58, 40, 84, 250,
- 133, 186, 61, 202, 94, 155, 159, 10, 21, 121, 43, 78, 212, 229, 172, 115, 243, 167, 87, 7, 112,
- 192, 247, 140, 128, 99, 13, 103, 74, 222, 237, 49, 197, 254, 24, 227, 165, 153, 119, 38, 184, 180,
- 124, 17, 68, 146, 217, 35, 32, 137, 46, 55, 63, 209, 91, 149, 188, 207, 205, 144, 135, 151, 178,
- 220, 252, 190, 97, 242, 86, 211, 171, 20, 42, 93, 158, 132, 60, 57, 83, 71, 109, 65, 162, 31, 45,
- 67, 216, 183, 123, 164, 118, 196, 23, 73, 236, 127, 12, 111, 246, 108, 161, 59, 82, 41, 157, 85,
- 170, 251, 96, 134, 177, 187, 204, 62, 90, 203, 89, 95, 176, 156, 169, 160, 81, 11, 245, 22, 235,
- 122, 117, 44, 215, 79, 174, 213, 233, 230, 231, 173, 232, 116, 214, 244, 234, 168, 80, 88, 175],
- [1, 2, 4, 8, 16, 32, 64, 128, 29, 58, 116, 232, 205, 135, 19, 38, 76, 152, 45, 90, 180, 117, 234,
- 201, 143, 3, 6, 12, 24, 48, 96, 192, 157, 39, 78, 156, 37, 74, 148, 53, 106, 212, 181, 119, 238,
- 193, 159, 35, 70, 140, 5, 10, 20, 40, 80, 160, 93, 186, 105, 210, 185, 111, 222, 161, 95, 190, 97,
- 194, 153, 47, 94, 188, 101, 202, 137, 15, 30, 60, 120, 240, 253, 231, 211, 187, 107, 214, 177,
- 127, 254, 225, 223, 163, 91, 182, 113, 226, 217, 175, 67, 134, 17, 34, 68, 136, 13, 26, 52, 104,
- 208, 189, 103, 206, 129, 31, 62, 124, 248, 237, 199, 147, 59, 118, 236, 197, 151, 51, 102, 204,
- 133, 23, 46, 92, 184, 109, 218, 169, 79, 158, 33, 66, 132, 21, 42, 84, 168, 77, 154, 41, 82, 164,
- 85, 170, 73, 146, 57, 114, 228, 213, 183, 115, 230, 209, 191, 99, 198, 145, 63, 126, 252, 229,
- 215, 179, 123, 246, 241, 255, 227, 219, 171, 75, 150, 49, 98, 196, 149, 55, 110, 220, 165, 87,
- 174, 65, 130, 25, 50, 100, 200, 141, 7, 14, 28, 56, 112, 224, 221, 167, 83, 166, 81, 162, 89, 178,
- 121, 242, 249, 239, 195, 155, 43, 86, 172, 69, 138, 9, 18, 36, 72, 144, 61, 122, 244, 245, 247,
- 243, 251, 235, 203, 139, 11, 22, 44, 88, 176, 125, 250, 233, 207, 131, 27, 54, 108, 216, 173, 71,
- 142, 1, 2, 4, 8, 16, 32, 64, 128, 29, 58, 116, 232, 205, 135, 19, 38, 76, 152, 45, 90, 180, 117,
- 234, 201, 143, 3, 6, 12, 24, 48, 96, 192, 157, 39, 78, 156, 37, 74, 148, 53, 106, 212, 181, 119,
- 238, 193, 159, 35, 70, 140, 5, 10, 20, 40, 80, 160, 93, 186, 105, 210, 185, 111, 222, 161, 95,
- 190, 97, 194, 153, 47, 94, 188, 101, 202, 137, 15, 30, 60, 120, 240, 253, 231, 211, 187, 107, 214,
- 177, 127, 254, 225, 223, 163, 91, 182, 113, 226, 217, 175, 67, 134, 17, 34, 68, 136, 13, 26, 52,
- 104, 208, 189, 103, 206, 129, 31, 62, 124, 248, 237, 199, 147, 59, 118, 236, 197, 151, 51, 102,
- 204, 133, 23, 46, 92, 184, 109, 218, 169, 79, 158, 33, 66, 132, 21, 42, 84, 168, 77, 154, 41, 82,
- 164, 85, 170, 73, 146, 57, 114, 228, 213, 183, 115, 230, 209, 191, 99, 198, 145, 63, 126, 252,
- 229, 215, 179, 123, 246, 241, 255, 227, 219, 171, 75, 150, 49, 98, 196, 149, 55, 110, 220, 165,
- 87, 174, 65, 130, 25, 50, 100, 200, 141, 7, 14, 28, 56, 112, 224, 221, 167, 83, 166, 81, 162, 89,
- 178, 121, 242, 249, 239, 195, 155, 43, 86, 172, 69, 138, 9, 18, 36, 72, 144, 61, 122, 244, 245,
- 247, 243, 251, 235, 203, 139, 11, 22, 44, 88, 176, 125, 250, 233, 207, 131, 27, 54, 108, 216, 173,
- 71, 142]], [
- [0, 0, 25, 1, 50, 2, 26, 198, 75, 199, 27, 104, 51, 238, 223, 3, 100, 4, 224, 14, 52, 141, 129,
- 239, 76, 113, 8, 200, 248, 105, 28, 193, 125, 194, 29, 181, 249, 185, 39, 106, 77, 228, 166,
- 114, 154, 201, 9, 120, 101, 47, 138, 5, 33, 15, 225, 36, 18, 240, 130, 69, 53, 147, 218, 142,
- 150, 143, 219, 189, 54, 208, 206, 148, 19, 92, 210, 241, 64, 70, 131, 56, 102, 221, 253, 48,
- 191, 6, 139, 98, 179, 37, 226, 152, 34, 136, 145, 16, 126, 110, 72, 195, 163, 182, 30, 66, 58,
- 107, 40, 84, 250, 133, 61, 186, 43, 121, 10, 21, 155, 159, 94, 202, 78, 212, 172, 229, 243,
- 115, 167, 87, 175, 88, 168, 80, 244, 234, 214, 116, 79, 174, 233, 213, 231, 230, 173, 232, 44,
- 215, 117, 122, 235, 22, 11, 245, 89, 203, 95, 176, 156, 169, 81, 160, 127, 12, 246, 111, 23,
- 196, 73, 236, 216, 67, 31, 45, 164, 118, 123, 183, 204, 187, 62, 90, 251, 96, 177, 134, 59, 82,
- 161, 108, 170, 85, 41, 157, 151, 178, 135, 144, 97, 190, 220, 252, 188, 149, 207, 205, 55, 63,
- 91, 209, 83, 57, 132, 60, 65, 162, 109, 71, 20, 42, 158, 93, 86, 242, 211, 171, 68, 17, 146,
- 217, 35, 32, 46, 137, 180, 124, 184, 38, 119, 153, 227, 165, 103, 74, 237, 222, 197, 49, 254,
- 24, 13, 99, 140, 128, 192, 247, 112, 7],
- [1, 3, 5, 15, 17, 51, 85, 255, 26, 46, 114, 150, 161, 248, 19, 53, 95, 225, 56, 72, 216, 115,
- 149, 164, 247, 2, 6, 10, 30, 34, 102, 170, 229, 52, 92, 228, 55, 89, 235, 38, 106, 190, 217,
- 112, 144, 171, 230, 49, 83, 245, 4, 12, 20, 60, 68, 204, 79, 209, 104, 184, 211, 110, 178, 205,
- 76, 212, 103, 169, 224, 59, 77, 215, 98, 166, 241, 8, 24, 40, 120, 136, 131, 158, 185, 208,
- 107, 189, 220, 127, 129, 152, 179, 206, 73, 219, 118, 154, 181, 196, 87, 249, 16, 48, 80, 240,
- 11, 29, 39, 105, 187, 214, 97, 163, 254, 25, 43, 125, 135, 146, 173, 236, 47, 113, 147, 174,
- 233, 32, 96, 160, 251, 22, 58, 78, 210, 109, 183, 194, 93, 231, 50, 86, 250, 21, 63, 65, 195,
- 94, 226, 61, 71, 201, 64, 192, 91, 237, 44, 116, 156, 191, 218, 117, 159, 186, 213, 100, 172,
- 239, 42, 126, 130, 157, 188, 223, 122, 142, 137, 128, 155, 182, 193, 88, 232, 35, 101, 175,
- 234, 37, 111, 177, 200, 67, 197, 84, 252, 31, 33, 99, 165, 244, 7, 9, 27, 45, 119, 153, 176,
- 203, 70, 202, 69, 207, 74, 222, 121, 139, 134, 145, 168, 227, 62, 66, 198, 81, 243, 14, 18, 54,
- 90, 238, 41, 123, 141, 140, 143, 138, 133, 148, 167, 242, 13, 23, 57, 75, 221, 124, 132, 151,
- 162, 253, 28, 36, 108, 180, 199, 82, 246, 1, 3, 5, 15, 17, 51, 85, 255, 26, 46, 114, 150, 161,
- 248, 19, 53, 95, 225, 56, 72, 216, 115, 149, 164, 247, 2, 6, 10, 30, 34, 102, 170, 229, 52, 92,
- 228, 55, 89, 235, 38, 106, 190, 217, 112, 144, 171, 230, 49, 83, 245, 4, 12, 20, 60, 68, 204,
- 79, 209, 104, 184, 211, 110, 178, 205, 76, 212, 103, 169, 224, 59, 77, 215, 98, 166, 241, 8,
- 24, 40, 120, 136, 131, 158, 185, 208, 107, 189, 220, 127, 129, 152, 179, 206, 73, 219, 118,
- 154, 181, 196, 87, 249, 16, 48, 80, 240, 11, 29, 39, 105, 187, 214, 97, 163, 254, 25, 43, 125,
- 135, 146, 173, 236, 47, 113, 147, 174, 233, 32, 96, 160, 251, 22, 58, 78, 210, 109, 183, 194,
- 93, 231, 50, 86, 250, 21, 63, 65, 195, 94, 226, 61, 71, 201, 64, 192, 91, 237, 44, 116, 156,
- 191, 218, 117, 159, 186, 213, 100, 172, 239, 42, 126, 130, 157, 188, 223, 122, 142, 137, 128,
- 155, 182, 193, 88, 232, 35, 101, 175, 234, 37, 111, 177, 200, 67, 197, 84, 252, 31, 33, 99,
- 165, 244, 7, 9, 27, 45, 119, 153, 176, 203, 70, 202, 69, 207, 74, 222, 121, 139, 134, 145, 168,
- 227, 62, 66, 198, 81, 243, 14, 18, 54, 90, 238, 41, 123, 141, 140, 143, 138, 133, 148, 167,
- 242, 13, 23, 57, 75, 221, 124, 132, 151, 162, 253, 28, 36, 108, 180, 199, 82, 246]], [
- [0, 0, 7, 1, 14, 2, 8, 56, 21, 57, 9, 90, 15, 31, 63, 3, 28, 4, 64, 67, 16, 112, 97, 32, 22, 47,
- 38, 58, 70, 91, 10, 108, 35, 109, 11, 87, 71, 79, 74, 92, 23, 82, 119, 48, 104, 59, 39, 100,
- 29, 19, 54, 5, 45, 68, 65, 95, 77, 33, 98, 117, 17, 43, 115, 113, 42, 114, 116, 76, 18, 53, 94,
- 44, 78, 73, 86, 34, 81, 118, 99, 103, 30, 62, 89, 20, 126, 6, 55, 13, 111, 96, 66, 27, 46, 37,
- 107, 69, 36, 106, 26, 110, 61, 88, 12, 125, 52, 93, 75, 41, 72, 85, 102, 80, 84, 101, 40, 51,
- 105, 25, 124, 60, 24, 123, 50, 83, 122, 49, 120, 121],
- [1, 3, 5, 15, 17, 51, 85, 2, 6, 10, 30, 34, 102, 87, 4, 12, 20, 60, 68, 49, 83, 8, 24, 40, 120,
- 117, 98, 91, 16, 48, 80, 13, 23, 57, 75, 32, 96, 93, 26, 46, 114, 107, 64, 61, 71, 52, 92, 25,
- 43, 125, 122, 115, 104, 69, 50, 86, 7, 9, 27, 45, 119, 100, 81, 14, 18, 54, 90, 19, 53, 95, 28,
- 36, 108, 73, 38, 106, 67, 56, 72, 37, 111, 76, 41, 123, 112, 109, 74, 35, 101, 82, 11, 29, 39,
- 105, 70, 55, 89, 22, 58, 78, 47, 113, 110, 79, 44, 116, 97, 94, 31, 33, 99, 88, 21, 63, 65, 62,
- 66, 59, 77, 42, 126, 127, 124, 121, 118, 103, 84, 1, 3, 5, 15, 17, 51, 85, 2, 6, 10, 30, 34,
- 102, 87, 4, 12, 20, 60, 68, 49, 83, 8, 24, 40, 120, 117, 98, 91, 16, 48, 80, 13, 23, 57, 75,
- 32, 96, 93, 26, 46, 114, 107, 64, 61, 71, 52, 92, 25, 43, 125, 122, 115, 104, 69, 50, 86, 7, 9,
- 27, 45, 119, 100, 81, 14, 18, 54, 90, 19, 53, 95, 28, 36, 108, 73, 38, 106, 67, 56, 72, 37,
- 111, 76, 41, 123, 112, 109, 74, 35, 101, 82, 11, 29, 39, 105, 70, 55, 89, 22, 58, 78, 47, 113,
- 110, 79, 44, 116, 97, 94, 31, 33, 99, 88, 21, 63, 65, 62, 66, 59, 77, 42, 126, 127, 124, 121,
- 118, 103, 84]]]
for i in range(len(params)):
- p = params[i]
- expected_log_t, expected_exp_t = expected[i]
- log_t, exp_t = init_tables(prim=p[0], generator=p[1], c_exp=p[2])
+ p = params[i]
+ expected_log_t, expected_exp_t, expected_field_charac_t = expected[i]
+ log_t, exp_t, field_charac_t = init_tables(prim=p[0], generator=p[1], c_exp=p[2])
+ self.assertEqual(field_charac_t, expected_field_charac_t)
self.assertEqual(list(log_t), expected_log_t)
self.assertEqual(list(exp_t), expected_exp_t)
+class TestBigReedSolomon(unittest.TestCase):
+
+ def test_find_prime_polys(self):
+ self.assertEqual(find_prime_polys(c_exp=4), [19, 25])
+ self.assertEqual(find_prime_polys(),
+ [285, 299, 301, 333, 351, 355, 357, 361, 369, 391, 397, 425, 451, 463, 487, 501])
+
+ self.assertEqual(find_prime_polys(fast_primes=True), [397, 463, 487])
+ self.assertEqual(find_prime_polys(c_exp=9, fast_primes=True, single=True), 557)
+
+ def test_c_exp_9(self):
+ rsc = RSCodec(12, c_exp=9)
+ rsc2 = RSCodec(12, nsize=511)
+ self.assertEqual(rsc.c_exp, rsc2.c_exp)
+ self.assertEqual(rsc.nsize, rsc2.nsize)
+
+ mes = 'a'*((511-12)*2)
+ mesecc = rsc.encode(mes)
+ mesecc[2] = 1
+ mesecc[-1] = 1
+ rmes, rmesecc = rsc.decode(mesecc)
+ self.assertEqual(rsc.check(mesecc), [False, False])
+ self.assertEqual(rsc.check(rmesecc), [True, True])
+ self.assertEqual([x for x in rmes], [ord(x) for x in mes])
+
+ def test_c_exp_12(self):
+ rsc = RSCodec(12, c_exp=12)
+ rsc2 = RSCodec(12, nsize=4095)
+ self.assertEqual(rsc.c_exp, rsc2.c_exp)
+ self.assertEqual(rsc.nsize, rsc2.nsize)
+
+ mes = 'a'*(4095-12)
+ mesecc = rsc.encode(mes)
+ mesecc[2] = 1
+ mesecc[-1] = 1
+ rmes, rmesecc = rsc.decode(mesecc)
+ self.assertEqual(rsc.check(mesecc), [False])
+ self.assertEqual(rsc.check(rmesecc), [True])
+ self.assertEqual([x for x in rmes], [ord(x) for x in mes])
+
+ def test_multiple_rs_codec(self):
+ """Test multiple RSCodec instances with different parameters."""
+ mes = 'A' * 30
+ rs_256 = RSCodec(102)
+ rs_1024 = RSCodec(900, c_exp=10)
+ bytearray(rs_1024.decode(rs_1024.encode(mes))[0])
+ rs_256.encode(mes)
+ rs_1024.encode(mes)
+ bytearray(rs_256.decode(rs_256.encode(mes))[0])
+ # At this point, there should not have been any exception raised!
+
+
class TestGFArithmetics(unittest.TestCase):
"""Test Galois Field arithmetics."""
def test_multiply_nolut(self):
"""\
Try to multiply without look-up tables
- (necessary to build the look-up tables!).
+ (necessary to build the look-up tables!)
"""
a = 30
b = 19
@@ -292,22 +400,88 @@ class TestGFArithmetics(unittest.TestCase):
generator = 2
prim = 0x11d
- # Compare the LUT multiplication and noLUT
+ # Compare the LUT multiplication and noLUT.
init_tables(prim=prim, generator=generator)
self.assertEqual(gf_mul(a, b), gf_mult_nolut(a, b, prim=prim))
# More Galois Field multiplications
- self.assertEqual(gf_mult_nolut(5, 6, prim=0x11b), 30)
+ self.assertEqual(gf_mult_nolut(5, 6, prim=0x11b), 30)
self.assertEqual(gf_mult_nolut(3, 125, prim=0x11b), 135)
self.assertEqual(gf_mult_nolut(2, 200, prim=0x11d), 141)
self.assertEqual(gf_mult_nolut_slow(2, 200, prim=0x11d), 141)
# Multiplications in GF(2^7)
- self.assertEqual(gf_mult_nolut(3, 125, prim=0xfd, field_charac_full=128), 122)
+ self.assertEqual(gf_mult_nolut(3, 125, prim=0xfd, field_charac_full=128), 122)
- # Multiplications outside of the finite field (we revert to standard integer multiplications just to see if it works)
+ # Multiplications outside of the finite field (we revert to
+ # standard integer multiplications just to see if it works).
self.assertEqual(gf_mult_nolut(3, 125, carryless=False), 375)
- self.assertEqual(gf_mult_nolut_slow(4, 125), 500) # the second method, just to check that everything's alright
+
+ # The second method, just to check that everything's alright
+ self.assertEqual(gf_mult_nolut_slow(4, 125), 500)
+
+ def test_gf_operations(self):
+ """Try various Galois Field 2 operations"""
+ init_tables()
+
+ a = 30
+ b = 19
+
+ # Addition and subtraction (they are the same in GF(2^p)
+ self.assertEqual(gf_add(0, 0), 0)
+ self.assertEqual(gf_add(0, 0), gf_sub(0, 0))
+ self.assertEqual(gf_add(1, 0), 1)
+ self.assertEqual(gf_add(1, 0), gf_sub(1, 0))
+ self.assertEqual(gf_add(0, 1), 1)
+ self.assertEqual(gf_add(0, 1), gf_sub(0, 1))
+ self.assertEqual(gf_add(1, 1), 0)
+ self.assertEqual(gf_add(1, 1), gf_sub(1, 1))
+ self.assertEqual(gf_add(a, b), 13)
+ self.assertEqual(gf_add(a, b), gf_sub(a, b))
+ self.assertEqual(gf_add(0, b), b)
+ self.assertEqual(gf_add(0, b), gf_sub(0, b))
+ self.assertEqual(gf_add(a, 0), a)
+ self.assertEqual(gf_add(a, 0), gf_sub(a, 0))
+ self.assertEqual(gf_add(a, 1), (a+1))
+ self.assertEqual(gf_add(a, 1), gf_sub(a, 1))
+ self.assertEqual(gf_add(1, a), (a+1))
+ self.assertEqual(gf_add(1, a), gf_sub(1, a))
+ self.assertEqual(gf_add(255, 1), 254)
+
+ # Negation
+ self.assertEqual(gf_neg(a), a)
+ self.assertEqual(gf_neg(b), b)
+
+ # Division
+ self.assertEqual(gf_div(a, 1), a)
+ self.assertEqual(gf_div(12, 3), 4)
+ self.assertEqual(gf_div(a, b), 222)
+ self.assertEqual(gf_div(b, a), 25)
+ self.assertEqual(gf_div(0, a), 0)
+ self.assertRaises(ZeroDivisionError, gf_div, *[a, 0])
+
+
+class TestSimpleFuncs(unittest.TestCase):
+ """\
+ Test simple functions and see if the results
+ are equivalent with optimized functions
+ """
+
+ def test_gf_poly_mul_simple(self):
+ a = [1, 12, 14, 9]
+ b = [0, 23, 2, 15]
+ self.assertEqual(gf_poly_mul(a, b), gf_poly_mul_simple(a, b))
+
+ def test_gf_poly_neg(self):
+ a = [1, 12, 14, 9]
+ self.assertEqual(gf_poly_neg(a), a)
+
+ def test_rs_simple_encode_msg(self):
+ a = bytearray("hello world", "latin1")
+ nsym = 10
+ init_tables()
+ self.assertEqual(rs_simple_encode_msg(a, nsym),
+ rs_encode_msg(a, nsym))
class TestRSCodecUniversalCrossValidation(unittest.TestCase):
@@ -318,128 +492,124 @@ class TestRSCodecUniversalCrossValidation(unittest.TestCase):
"""
def test_main(self):
+
def cartesian_product_dict_items(dicts):
+ """Return dictionary of cartesian products."""
return (dict(zip(dicts, x)) for x in itertools.product(*dicts.values()))
- # If one or more tests don't pass, you can enable
- # this flag to True to get verbose output to debug
- debugg = False
-
orig_mes = bytearray("hello world", "latin1")
- n = len(orig_mes) * 2
- k = len(orig_mes)
- nsym = n - k
- istart = 0
+ n = len(orig_mes)*2
+ k = len(orig_mes)
+ istart = 0
+
+ params = {"count": 5,
+ "fcr": [120, 0, 1, 1, 1],
+ "prim": [0x187, 0x11d, 0x11b, 0xfd, 0xfd],
+ "generator": [2, 2, 3, 3, 2],
+ "c_exponent": [8, 8, 8, 7, 7]}
- params = {"count": 5,
- "fcr": [120, 0, 1, 1, 1],
- "prim": [0x187, 0x11d, 0x11b, 0xfd, 0xfd],
- "generator": [2, 2, 3, 3, 2],
- "c_exponent": [8, 8, 8, 7, 7],
- }
cases = {
- "errmode": [1, 2, 3, 4],
- "erratasnb_errorsnb_onlyeras": [[8, 3, False], [6, 5, False], [5, 5, False],
- [11, 0, True], [11, 0, False], [0, 0, False]],
- # errata number (errors+erasures), erasures number and only_erasures:
- # the last item is the value for only_erasures (True/False)
- }
-
- ############################$
+ "errmode": [1, 2, 3, 4],
+ # Errata number (errors+erasures), erasures number and
+ # only_erasures: the last item is the value for only_erasures
+ # (True/False)
+ "erratasnb_errorsnb_onlyeras": [[8, 3, False],
+ [6, 5, False],
+ [5, 5, False],
+ [11, 0, True],
+ [11, 0, False],
+ [0, 0, False]]}
results_br = []
it = 0
for p in range(params["count"]):
- fcr = params["fcr"][p]
- prim = params["prim"][p]
- generator = params["generator"][p]
+ fcr = params["fcr"][p]
+ prim = params["prim"][p]
+ generator = params["generator"][p]
c_exponent = params["c_exponent"][p]
for case in cartesian_product_dict_items(cases):
- errmode = case["errmode"]
- erratanb = case["erratasnb_errorsnb_onlyeras"][0]
- errnb = case["erratasnb_errorsnb_onlyeras"][1]
+ errmode = case["errmode"]
+ erratanb = case["erratasnb_errorsnb_onlyeras"][0]
+ errnb = case["erratasnb_errorsnb_onlyeras"][1]
only_erasures = case["erratasnb_errorsnb_onlyeras"][2]
it += 1
- if debugg:
- print("it ", it)
- print("param", p)
- print(case)
-
# Reed-Solomon
+
# Init the RS codec
init_tables(generator=generator, prim=prim, c_exp=c_exponent)
g = rs_generator_poly_all(n, fcr=fcr, generator=generator)
# Encode the message
- rmesecc = rs_encode_msg(orig_mes, n - k, gen=g[n - k])
- rmesecc_orig = rmesecc[
- :] # make a copy of the original message to check later if fully corrected (because the syndrome may be wrong sometimes)
- # Tamper the message
+ rmesecc = rs_encode_msg(orig_mes, n-k, gen=g[n-k])
+
+ # Make a copy of the original message to check later if
+ # fully corrected (because the syndrome may be wrong
+ # sometimes).
+ rmesecc_orig = rmesecc[:]
+
+ # Tamper the message.
+
if erratanb > 0:
if errmode == 1:
- sl = slice(istart, istart + erratanb)
+ sl = slice(istart, istart+erratanb)
elif errmode == 2:
- sl = slice(-istart - erratanb - (n - k), -(n - k))
+ sl = slice(-istart-erratanb-(n-k), -(n-k))
elif errmode == 3:
- sl = slice(-istart - erratanb - 1, -1)
+ sl = slice(-istart-erratanb-1, -1)
elif errmode == 4:
- sl = slice(-istart - erratanb, None)
- if debugg:
- print("Removed slice:", list(rmesecc[sl]), rmesecc[sl])
- rmesecc[sl] = [0] * erratanb # replace with null bytes
+ sl = slice(-istart-erratanb, None)
+ else:
+ raise ValueError
- # Generate the erasures positions (if any)
+ rmesecc[sl] = [0] * erratanb # Replace with null bytes.
+
+ # Generate the erasures positions (if any).
erase_pos = [x for x in range(len(rmesecc)) if rmesecc[x] == 0]
- # Remove the errors positions (must not be known by definition)
- if errnb > 0: erase_pos = erase_pos[:-errnb]
- if debugg:
- print("erase_pos", erase_pos)
- print("coef_pos", [len(rmesecc) - 1 - pos for pos in erase_pos])
- print("Errata total: ", erratanb - errnb + errnb * 2, " -- Correctable? ",
- (erratanb - errnb + errnb * 2 <= nsym))
- # Decoding the corrupted codeword
- # -- Forney syndrome method
+ # Remove the errors positions (must not be known by definition).
+ if errnb > 0:
+ erase_pos = erase_pos[:-errnb]
+
+ # Decoding the corrupted codeword.
+
+ # Forney syndrome method.
try:
- rmes, recc = rs_correct_msg(rmesecc, n - k, fcr=fcr, generator=generator,
- erase_pos=erase_pos, only_erasures=only_erasures)
+ rmes, recc = rs_correct_msg(rmesecc,
+ n-k,
+ fcr=fcr,
+ generator=generator,
+ erase_pos=erase_pos,
+ only_erasures=only_erasures)
- # Check if correct by syndrome analysis (can be wrong)
- results_br.append(rs_check(rmes + recc, n - k, fcr=fcr, generator=generator))
+ # Check if correct by syndrome analysis (can be wrong).
+ results_br.append(rs_check(rmes + recc, n-k, fcr=fcr, generator=generator))
- # Check if correct by comparing to the original message (always correct)
- results_br.append(rmesecc_orig == (rmes + recc))
- if debugg and not rs_check(rmes + recc, n - k, fcr=fcr, generator=generator) or not (
- rmesecc_orig == (rmes + recc)): raise ReedSolomonError("False!!!!!")
+ # Check if correct by comparing to the original message (always correct).
+ results_br.append(rmesecc_orig == (rmes+recc))
- except ReedSolomonError as exc:
- results_br.append(False)
- results_br.append(False)
- if debugg:
- print("====")
- print("ERROR! Details:")
- print("param", p)
- print(case)
- print(erase_pos)
- print("original_msg", rmesecc_orig)
- print("tampered_msg", rmesecc)
- print("decoded_msg", rmes + recc)
- print("checks: ", rs_check(rmes + recc, n - k, fcr=fcr, generator=generator), rmesecc_orig == (rmes + recc))
- print("====")
- raise exc
-
- # -- Without Forney syndrome method
- try:
- results_br.append(rs_check(rmes + recc, n - k, fcr=fcr, generator=generator))
- results_br.append(rmesecc_orig == (rmes + recc))
except ReedSolomonError:
results_br.append(False)
results_br.append(False)
- if debugg: print("-----")
+ # Without Forney syndrome method
+ try:
+ rmes, recc = rs_correct_msg_nofsynd(rmesecc,
+ n-k,
+ fcr=fcr,
+ generator=generator,
+ erase_pos=erase_pos,
+ only_erasures=only_erasures)
+ results_br.append(rs_check(rmes + recc,
+ n-k,
+ fcr=fcr,
+ generator=generator))
+ results_br.append(rmesecc_orig == (rmes+recc))
+ except ReedSolomonError:
+ results_br.append(False)
+ results_br.append(False)
self.assertTrue(results_br.count(True) == len(results_br))
diff --git a/tests/common/test_statics.py b/tests/common/test_statics.py
index 64eb1f5..adbd894 100644
--- a/tests/common/test_statics.py
+++ b/tests/common/test_statics.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,18 +16,21 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
import unittest
import src.common.statics
+from src.common.encoding import onion_address_to_pub_key
+from src.common.misc import validate_onion_addr
+
class TestStatics(unittest.TestCase):
def test_uniqueness(self):
- variable_list = [getattr(src.common.statics, item) for item in dir(src.common.statics) if not item.startswith("__")]
+ variable_list = [getattr(src.common.statics, i) for i in dir(src.common.statics) if not i.startswith('__')]
variable_list = [v for v in variable_list if (isinstance(v, str) or isinstance(v, bytes))]
# Debugger
@@ -39,11 +43,63 @@ class TestStatics(unittest.TestCase):
spacing = (3 - len(unique_variable)) * ' '
print(f"Setting value '{unique_variable}'{spacing} appeared in {repeats} variables: ", end='')
items = [i for i in dir(src.common.statics)
- if not i.startswith("__") and getattr(src.common.statics, i) == unique_variable]
+ if not i.startswith('__') and getattr(src.common.statics, i) == unique_variable]
print(', '.join(items))
self.assertEqual(len(list(set(variable_list))), len(variable_list))
+ def test_group_id_length_is_not_same_as_onion_service_pub_key_length(self):
+ """}
+ In current implementation, `src.common.db_logs.remove_logs`
+ determines the type of data to be removed from the length of
+ provided `selector` parameter. If group ID length is set to same
+ length as Onion Service public keys, the function is no longer
+ able to distinguish what type of entries (contacts or group
+ logs) should be removed from the database.
+ """
+ self.assertNotEqual(src.common.statics.ONION_SERVICE_PUBLIC_KEY_LENGTH,
+ src.common.statics.GROUP_ID_LENGTH)
+
+ def test_reserved_accounts_are_valid(self):
+ """\
+ Each used account placeholder should be a valid, but reserved
+ account.
+ """
+ reserved_accounts = [src.common.statics.LOCAL_ID,
+ src.common.statics.DUMMY_CONTACT,
+ src.common.statics.DUMMY_MEMBER]
+
+ for account in reserved_accounts:
+ self.assertEqual(validate_onion_addr(account), "Error: Can not add reserved account.")
+
+ # Test each account is unique.
+ self.assertEqual(len(reserved_accounts),
+ len(set(reserved_accounts)))
+
+ def test_local_pubkey(self):
+ """Test that local key's reserved public key is valid."""
+ self.assertEqual(src.common.statics.LOCAL_PUBKEY,
+ onion_address_to_pub_key(src.common.statics.LOCAL_ID))
+
+ def test_group_management_header_length_matches_datagram_header_length(self):
+ """
+ As group management messages are handled as messages available
+ to Relay Program, the header should be the same as any datagrams
+ handled by the Relay program.
+ """
+ self.assertEqual(src.common.statics.GROUP_MGMT_HEADER_LENGTH,
+ src.common.statics.DATAGRAM_HEADER_LENGTH)
+
+ def test_key_exchanges_start_with_different_letter(self):
+ """
+ Key exchange can be selected by entering just X to represent
+ X448 or P to represent X448. This test detects if selection
+ names would ever be set to something like PUBLIC and PSK
+ that both start with P.
+ """
+ self.assertNotEqual(src.common.statics.ECDHE[:1],
+ src.common.statics.PSK[:1])
+
if __name__ == '__main__':
unittest.main(exit=False)
diff --git a/tests/mock_classes.py b/tests/mock_classes.py
index df31611..730760b 100644
--- a/tests/mock_classes.py
+++ b/tests/mock_classes.py
@@ -1,7 +1,8 @@
#!/usr/bin/env python3.6
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -14,77 +15,91 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
import time
from datetime import datetime
-from typing import Iterable, Sized
+from typing import Generator, Iterable, List, Sized
+
+import nacl.signing
from src.common.db_contacts import Contact
from src.common.db_groups import Group
from src.common.db_keys import KeySet
-from src.common.db_contacts import ContactList as OrigContactList
-from src.common.db_groups import GroupList as OrigGroupList
-from src.common.db_keys import KeyList as OrigKeyList
-from src.common.db_masterkey import MasterKey as OrigMasterKey
-from src.common.db_settings import Settings as OrigSettings
+from src.common.db_contacts import ContactList as OrigContactList
+from src.common.db_groups import GroupList as OrigGroupList
+from src.common.db_onion import OnionService as OrigOnionService
+from src.common.db_keys import KeyList as OrigKeyList
+from src.common.db_masterkey import MasterKey as OrigMasterKey
+from src.common.gateway import Gateway as OrigGateway
+from src.common.gateway import GatewaySettings as OrigGatewaySettings
+from src.common.db_settings import Settings as OrigSettings
+from src.common.encoding import pub_key_to_onion_address, pub_key_to_short_address
+from src.common.misc import calculate_race_condition_delay
+from src.common.reed_solomon import RSCodec
from src.common.statics import *
-from src.tx.windows import TxWindow as OrigTxWindow
+from src.transmitter.windows import TxWindow as OrigTxWindow
-from src.rx.packet import PacketList as OrigPacketList
-from src.rx.windows import RxWindow as OrigRxWindow
+from src.receiver.packet import PacketList as OrigPacketList
+from src.receiver.windows import RxWindow as OrigRxWindow
+
+from tests.utils import nick_to_pub_key, group_name_to_group_id
-def create_contact(nick ='Alice',
- user ='user',
- txfp =FINGERPRINT_LEN * b'\x01',
- rxfp =FINGERPRINT_LEN * b'\x02',
+def create_contact(nick,
+ tx_fingerprint=FINGERPRINT_LENGTH * b'\x01',
+ rx_fingerprint=FINGERPRINT_LENGTH * b'\x02',
+ kex_status =KEX_STATUS_VERIFIED,
log_messages =True,
file_reception=True,
notifications =True):
- """Create mock contact object."""
- account = LOCAL_ID if nick == LOCAL_ID else f'{nick.lower()}@jabber.org'
- user = LOCAL_ID if nick == LOCAL_ID else f'{user.lower()}@jabber.org'
- return Contact(account, user, nick,
- txfp, rxfp,
+ """Create a mock contact object."""
+ if nick == LOCAL_ID:
+ pub_key = LOCAL_PUBKEY
+ nick = LOCAL_NICK
+ else:
+ pub_key = nick_to_pub_key(nick)
+
+ return Contact(pub_key, nick,
+ tx_fingerprint, rx_fingerprint, kex_status,
log_messages, file_reception, notifications)
-def create_group(name='testgroup', nick_list=None):
- """Create mock group object."""
+def create_group(name, nick_list=None):
+ """Create a mock group object."""
if nick_list is None:
nick_list = ['Alice', 'Bob']
settings = Settings()
- store_f = lambda: None
members = [create_contact(n) for n in nick_list]
- return Group(name, False, False, members, settings, store_f)
+ return Group(name, group_name_to_group_id(name), False, False, members, settings, lambda: None)
-def create_keyset(nick ='Alice',
- tx_key =KEY_LENGTH * b'\x01',
- tx_hek =KEY_LENGTH * b'\x01',
- rx_key =KEY_LENGTH * b'\x01',
- rx_hek =KEY_LENGTH * b'\x01',
+def create_keyset(nick,
+ tx_key=SYMMETRIC_KEY_LENGTH * b'\x01',
+ tx_hek=SYMMETRIC_KEY_LENGTH * b'\x01',
+ rx_key=SYMMETRIC_KEY_LENGTH * b'\x01',
+ rx_hek=SYMMETRIC_KEY_LENGTH * b'\x01',
tx_harac=INITIAL_HARAC,
rx_harac=INITIAL_HARAC,
- store_f =None):
- """Create mock keyset object."""
- account = LOCAL_ID if nick == LOCAL_ID else f'{nick.lower()}@jabber.org'
- store_f = lambda: None if store_f is None else store_f
- return KeySet(account, tx_key, tx_hek, rx_key, rx_hek, tx_harac, rx_harac, store_f)
+ store_f=None):
+ """Create a mock keyset object."""
+ pub_key = LOCAL_PUBKEY if nick == LOCAL_ID else nick_to_pub_key(nick)
+ return KeySet(pub_key, tx_key, tx_hek, rx_key, rx_hek, tx_harac, rx_harac,
+ store_keys=lambda: None if store_f is None else store_f)
def create_rx_window(nick='Alice'):
- account = LOCAL_ID if nick == LOCAL_ID else f'{nick.lower()}@jabber.org'
- return RxWindow(uid=account)
+ """Create a mock Rx-window object."""
+ pub_key = LOCAL_PUBKEY if nick == LOCAL_ID else nick_to_pub_key(nick)
+ return RxWindow(uid=pub_key)
# Common
class ContactList(OrigContactList, Iterable, Sized):
- """Mock object for unittesting."""
+ """Mock the object for unittesting."""
def __init__(self, nicks=None, **kwargs):
self.master_key = MasterKey()
@@ -94,148 +109,224 @@ class ContactList(OrigContactList, Iterable, Sized):
for key, value in kwargs.items():
setattr(self, key, value)
+ def __iter__(self) -> Generator:
+ yield from self.contacts
+
def store_contacts(self):
+ """Mock method."""
pass
def load_contacts(self):
+ """Mock method."""
pass
def print_contacts(self):
+ """Mock method."""
pass
-class Gateway(object):
- """Mock object for unittesting."""
+class Gateway(OrigGateway):
+ """Mock the object for unittesting."""
def __init__(self, **kwargs):
- self.packets = []
- for key, value in kwargs.items():
- setattr(self, key, value)
+ self.packets = []
+ self.settings = GatewaySettings(**kwargs)
+ self.rs = RSCodec(2 * self.settings.serial_error_correction)
def write(self, output):
+ """Mock method."""
self.packets.append(output)
class GroupList(OrigGroupList, Iterable, Sized):
- """Mock object for unittesting."""
+ """Mock the object for unittesting."""
- def __init__(self, groups = None, **kwargs):
+ def __init__(self, groups=None, **kwargs):
self.master_key = MasterKey()
self.settings = Settings()
self.contact_list = ContactList()
- self.groups = [] if groups is None else [(create_group(g)) for g in groups]
+ self.groups = [] if groups is None else [(create_group(g)) for g in groups] # type: List[Group]
+ self.store_groups_called = False
for key, value in kwargs.items():
setattr(self, key, value)
+ def __iter__(self) -> Generator:
+ """Mock method."""
+ yield from self.groups
+
+ def __len__(self) -> int:
+ """Mock method."""
+ return len(self.groups)
+
def store_groups(self):
- pass
+ """Mock method."""
+ self.store_groups_called = True
def load_groups(self):
+ """Mock method."""
pass
def print_groups(self):
+ """Mock method."""
pass
class KeyList(OrigKeyList):
- """Mock object for unittesting."""
+ """Mock the object for unittesting."""
def __init__(self, nicks=None, **kwargs):
self.master_key = MasterKey()
self.settings = Settings()
self.keysets = [] if nicks is None else [create_keyset(n) for n in nicks]
+ self.store_keys_called = False
+
for key, value in kwargs.items():
setattr(self, key, value)
def store_keys(self):
- pass
+ """Mock method."""
+ self.store_keys_called = True
def load_keys(self):
+ """Mock method."""
pass
class MasterKey(OrigMasterKey):
- """Mock object for unittesting."""
+ """Mock the object for unittesting."""
def __init__(self, **kwargs):
+ """Create new MasterKey mock object."""
self.local_test = False
- self.master_key = bytes(KEY_LENGTH)
- self.file_name = f'{DIR_USER_DATA}ut_login_data'
+ self.master_key = bytes(SYMMETRIC_KEY_LENGTH)
+ self.file_name = f'{DIR_USER_DATA}{TX}_login_data'
for key, value in kwargs.items():
setattr(self, key, value)
-# TxM
-class Settings(OrigSettings):
- """Mock object for unittesting."""
+class OnionService(OrigOnionService):
+ """Mock the object for unittesting."""
def __init__(self, **kwargs):
+ """Create new OnionService mock object."""
+ self.onion_private_key = ONION_SERVICE_PRIVATE_KEY_LENGTH*b'a'
+ self.conf_code = b'a'
+ self.public_key = bytes(nacl.signing.SigningKey(seed=self.onion_private_key).verify_key)
+ self.user_onion_address = pub_key_to_onion_address(self.public_key)
+ self.user_short_address = pub_key_to_short_address(self.public_key)
+ self.is_delivered = False
+
+ for key, value in kwargs.items():
+ setattr(self, key, value)
+
+
+# Transmitter Program
+class Settings(OrigSettings):
+ """Mock the object for unittesting."""
+
+ def __init__(self, **kwargs):
+ """Create new Settings mock object."""
self.disable_gui_dialog = False
- self.max_number_of_group_members = 20
- self.max_number_of_groups = 20
- self.max_number_of_contacts = 20
- self.serial_baudrate = 19200
- self.serial_error_correction = 5
+ self.max_number_of_group_members = 50
+ self.max_number_of_groups = 50
+ self.max_number_of_contacts = 50
self.log_messages_by_default = False
self.accept_files_by_default = False
self.show_notifications_by_default = True
- self.logfile_masking = False
+ self.log_file_masking = False
# Transmitter settings
- self.txm_usb_serial_adapter = True
- self.nh_bypass_messages = True
- self.confirm_sent_files = True
- self.double_space_exits = False
- self.traffic_masking = False
- self.traffic_masking_static_delay = 2.0
- self.traffic_masking_random_delay = 2.0
- self.multi_packet_random_delay = False
- self.max_duration_of_random_delay = 10.0
+ self.nc_bypass_messages = False
+ self.confirm_sent_files = True
+ self.double_space_exits = False
+ self.traffic_masking = False
+ self.tm_static_delay = 2.0
+ self.tm_random_delay = 2.0
+
+ # Relay settings
+ self.allow_contact_requests = True
# Receiver settings
- self.rxm_usb_serial_adapter = True
self.new_message_notify_preview = False
self.new_message_notify_duration = 1.0
+ self.max_decompress_size = 100_000_000
self.master_key = MasterKey()
- self.software_operation = 'ut'
+ self.software_operation = TX
self.local_testing_mode = False
- self.data_diode_sockets = False
- self.session_serial_error_correction = self.serial_error_correction
- self.session_serial_baudrate = self.serial_baudrate
- self.session_traffic_masking = self.traffic_masking
- self.session_usb_serial_adapter = None
- self.transmit_delay = 0.0
- self.receive_timeout = 0.0
- self.txm_inter_packet_delay = 0.0
- self.rxm_receive_timeout = 0.0
+ self.all_keys = list(vars(self).keys())
+ self.key_list = self.all_keys[:self.all_keys.index('master_key')]
+ self.defaults = {k: self.__dict__[k] for k in self.key_list}
# Override defaults with specified kwargs
for key, value in kwargs.items():
setattr(self, key, value)
def store_settings(self):
+ """Mock method."""
pass
def load_settings(self):
+ """Mock method."""
pass
@staticmethod
def validate_key_value_pair(key, value, contact_list, group_list):
+ """Mock method."""
pass
- def print_settings(self):
+
+# Transmitter Program
+class GatewaySettings(OrigGatewaySettings):
+ """Mock the object for unittesting."""
+
+ def __init__(self, **kwargs):
+ """Create new GatewaySettings mock object."""
+ self.serial_baudrate = 19200
+ self.serial_error_correction = 5
+ self.use_serial_usb_adapter = True
+ self.built_in_serial_interface = 'ttyS0'
+
+ self.software_operation = TX
+ self.local_testing_mode = False
+ self.data_diode_sockets = False
+
+ self.all_keys = list(vars(self).keys())
+ self.key_list = self.all_keys[:self.all_keys.index('software_operation')]
+ self.defaults = {k: self.__dict__[k] for k in self.key_list}
+
+ self.session_serial_error_correction = self.serial_error_correction
+ self.session_serial_baudrate = self.serial_baudrate
+ self.session_usb_serial_adapter = self.use_serial_usb_adapter
+
+ self.tx_inter_packet_delay = 0.0
+ self.rx_receive_timeout = 0.0
+
+ self.race_condition_delay = calculate_race_condition_delay(self.session_serial_error_correction,
+ self.serial_baudrate)
+
+ # Override defaults with specified kwargs
+ for key, value in kwargs.items():
+ setattr(self, key, value)
+
+ def store_settings(self):
+ """Mock method."""
+ pass
+
+ def load_settings(self):
+ """Mock method."""
pass
class TxWindow(OrigTxWindow):
- """Mock object for unittesting."""
+ """Mock the object for unittesting."""
def __init__(self, **kwargs):
+ """Create new TxWindow mock object."""
self.contact_list = ContactList()
self.group_list = GroupList()
self.window_contacts = []
@@ -244,26 +335,29 @@ class TxWindow(OrigTxWindow):
self.name = None
self.type = None
self.uid = None
+ self.group_id = None
self.imc_name = None
for key, value in kwargs.items():
setattr(self, key, value)
class UserInput(object):
- """Mock object for unittesting."""
+ """Mock the object for unittesting."""
def __init__(self, plaintext=None, **kwargs):
+ """Create new UserInput mock object."""
self.plaintext = plaintext
self.type = None
for key, value in kwargs.items():
setattr(self, key, value)
-# RxM
+# Receiver Program
class Packet(object):
- """Mock object for unittesting."""
+ """Mock the object for unittesting."""
def __init__(self, **kwargs):
+ """Create new Pack mock object."""
self.account = None
self.contact = None
self.origin = None
@@ -281,20 +375,24 @@ class Packet(object):
setattr(self, key, value)
def add_packet(self, packet):
+ """Mock method."""
pass
def assemble_message_packet(self):
+ """Mock method."""
return self.payload
def assemble_and_store_file(self):
+ """Mock method."""
return self.payload
def assemble_command_packet(self):
+ """Mock method."""
return self.payload
class PacketList(OrigPacketList):
- """Mock object for unittesting."""
+ """Mock the object for unittesting."""
def __init__(self, **kwargs):
self.settings = Settings()
@@ -306,7 +404,7 @@ class PacketList(OrigPacketList):
class RxWindow(OrigRxWindow):
- """Mock object for unittesting."""
+ """Mock the object for unittesting."""
def __init__(self, **kwargs):
self.uid = None
@@ -317,6 +415,7 @@ class RxWindow(OrigRxWindow):
self.is_active = False
self.group_timestamp = time.time() * 1000
+ self.group = None
self.window_contacts = []
self.message_log = []
@@ -324,18 +423,19 @@ class RxWindow(OrigRxWindow):
self.previous_msg_ts = datetime.now()
self.unread_messages = 0
- self.type = None
- self.type_print = None
- self.name = None
+ self.type = None
+ self.type_print = None
+ self.name = None
for key, value in kwargs.items():
setattr(self, key, value)
class WindowList(object):
- """Mock object for unittesting."""
+ """Mock the object for unittesting."""
def __init__(self, nicks=None, **kwargs):
+ """Create new WindowList mock object."""
self.contact_list = ContactList()
self.group_list = GroupList()
self.packet_list = PacketList()
@@ -353,30 +453,37 @@ class WindowList(object):
yield from self.windows
def group_windows(self):
+ """Mock method."""
return [w for w in self.windows if w.type == WIN_TYPE_GROUP]
- def select_rx_window(self, name):
+ def set_active_rx_window(self, name):
+ """Mock method."""
if self.active_win is not None:
self.active_win.is_active = False
self.active_win = self.get_window(name)
self.active_win.is_active = True
def has_window(self, name):
+ """Mock method."""
return name in self.get_list_of_window_names()
def get_list_of_window_names(self):
+ """Mock method."""
return [w.uid for w in self.windows]
def get_local_window(self):
- return self.get_window(LOCAL_ID)
+ """Mock method."""
+ return self.get_window(WIN_UID_LOCAL)
def remove_window(self, uid: str) -> None:
+ """Mock method."""
for i, w in enumerate(self.windows):
if uid == w.uid:
del self.windows[i]
break
def get_window(self, uid):
+ """Mock method."""
if not self.has_window(uid):
self.windows.append(RxWindow(uid=uid,
contact_list=self.contact_list,
diff --git a/tests/nh/test_commands.py b/tests/nh/test_commands.py
deleted file mode 100644
index a398d3a..0000000
--- a/tests/nh/test_commands.py
+++ /dev/null
@@ -1,255 +0,0 @@
-#!/usr/bin/env python3.5
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import os
-import threading
-import time
-import unittest
-
-from multiprocessing import Queue
-from tkinter import filedialog
-
-from src.common.statics import *
-
-from src.nh.commands import nh_command, process_command, race_condition_delay, clear_windows, reset_windows
-from src.nh.commands import exit_tfc, rxm_import, change_ec_ratio, change_baudrate, change_gui_dialog, wipe
-
-from tests.mock_classes import Settings
-from tests.utils import ignored, TFCTestCase
-
-
-class TestNHCommand(unittest.TestCase):
-
- def setUp(self):
- self.settings = Settings(race_condition_delay=0.0)
- self.queues = {TXM_TO_NH_QUEUE: Queue(),
- NH_TO_IM_QUEUE: Queue(),
- EXIT_QUEUE: Queue(),
- RXM_OUTGOING_QUEUE: Queue()}
-
- def tearDown(self):
- for key in self.queues:
- while not self.queues[key]:
- self.queues[key].get()
- time.sleep(0.1)
- self.queues[key].close()
-
- def test_packet_reading(self):
-
- def queue_delayer():
- time.sleep(0.1)
- self.queues[TXM_TO_NH_QUEUE].put(UNENCRYPTED_SCREEN_CLEAR)
-
- threading.Thread(target=queue_delayer).start()
- self.assertIsNone(nh_command(self.queues, self.settings, stdin_fd=1, unittest=True))
- self.assertEqual(self.queues[NH_TO_IM_QUEUE].qsize(), 1)
-
-
-class TestProcessCommand(TFCTestCase):
-
- def setUp(self):
- self.settings = Settings()
- self.queues = {TXM_TO_NH_QUEUE: Queue(),
- NH_TO_IM_QUEUE: Queue(),
- EXIT_QUEUE: Queue(),
- RXM_OUTGOING_QUEUE: Queue()}
-
- def tearDown(self):
- for key in self.queues:
- while not self.queues[key]:
- self.queues[key].get()
- time.sleep(0.1)
- self.queues[key].close()
-
- def test_invalid_key(self):
- self.assertFR("Error: Received an invalid command.",
- process_command, self.settings, b'INVALID', self.queues)
-
-
-class TestRaceConditionDelay(unittest.TestCase):
-
- def setUp(self):
- self.settings = Settings(local_testing_mode=True,
- data_diode_sockets=True)
-
- def test_delay(self):
- start_time = time.monotonic()
- self.assertIsNone(race_condition_delay(self.settings))
- duration = time.monotonic() - start_time
- self.assertTrue(duration > 1)
-
-
-class TestClearWindows(TFCTestCase):
-
- def setUp(self):
- self.settings = Settings(race_condition_delay=0.0)
- self.queue_to_im = Queue()
-
- def tearDown(self):
- while not self.queue_to_im.empty():
- self.queue_to_im.get()
- time.sleep(0.1)
- self.queue_to_im.close()
-
- def test_clear_display(self):
- self.assertPrints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER,
- clear_windows, self.settings, UNENCRYPTED_SCREEN_CLEAR, self.queue_to_im)
-
- self.assertEqual(self.queue_to_im.get(), UNENCRYPTED_SCREEN_CLEAR)
-
-
-class TestResetWindows(TFCTestCase):
-
- def setUp(self):
- self.settings = Settings(race_condition_delay=0.0)
- self.queue_to_im = Queue()
-
- def tearDown(self):
- while not self.queue_to_im.empty():
- self.queue_to_im.get()
- time.sleep(0.1)
- self.queue_to_im.close()
-
- def test_reset_display(self):
- self.assertIsNone(reset_windows(self.settings, UNENCRYPTED_SCREEN_RESET, self.queue_to_im))
- self.assertEqual(self.queue_to_im.get(), UNENCRYPTED_SCREEN_RESET)
-
-
-class TestExitTFC(unittest.TestCase):
-
- def setUp(self):
- self.settings = Settings(race_condition_delay=0.0)
- self.queue_exit = Queue()
-
- def tearDown(self):
- while not self.queue_exit.empty():
- self.queue_exit.get()
- time.sleep(0.1)
- self.queue_exit.close()
-
- def test_exit_tfc(self):
- self.assertIsNone(exit_tfc(self.settings, self.queue_exit))
- self.assertEqual(self.queue_exit.get(), EXIT)
-
-
-class TestRxMImport(unittest.TestCase):
-
- def setUp(self):
- with open('testfile', 'wb+') as f:
- f.write(5000*b'a')
-
- self.queue_to_rxm = Queue()
- self.o_tkfd = filedialog.askopenfilename
- filedialog.askopenfilename = lambda title: 'testfile'
- self.settings = Settings(local_testing_mode=True)
-
- def tearDown(self):
- with ignored(OSError):
- os.remove('testfile')
-
- filedialog.askopenfilename = self.o_tkfd
-
- while not self.queue_to_rxm.empty():
- self.queue_to_rxm.get()
- time.sleep(0.1)
- self.queue_to_rxm.close()
-
- @unittest.skipIf("TRAVIS" in os.environ and os.environ["TRAVIS"] == "true", "Skip as Travis has no $DISPLAY.")
- def test_rxm_import(self):
- self.assertIsNone(rxm_import(self.settings, self.queue_to_rxm))
- time.sleep(0.1)
- self.assertEqual(self.queue_to_rxm.get(), IMPORTED_FILE_HEADER + 5000 * b'a')
-
-
-class TestChangeECRatio(TFCTestCase):
-
- def setUp(self):
- self.settings = Settings()
-
- def test_non_digit_value_raises_fr(self):
- self.assertFR("Error: Received invalid EC ratio value from TxM.",
- change_ec_ratio, self.settings, UNENCRYPTED_EC_RATIO + b'a')
-
- def test_invalid_digit_value_raises_fr(self):
- self.assertFR("Error: Received invalid EC ratio value from TxM.",
- change_ec_ratio, self.settings, UNENCRYPTED_EC_RATIO + b'0')
-
- def test_change_value(self):
- self.assertIsNone(change_ec_ratio(self.settings, UNENCRYPTED_EC_RATIO + b'3'))
- self.assertEqual(self.settings.serial_error_correction, 3)
-
-
-class TestChangeBaudrate(TFCTestCase):
-
- def setUp(self):
- self.settings = Settings()
-
- def test_non_digit_value_raises_fr(self):
- self.assertFR("Error: Received invalid baud rate value from TxM.",
- change_baudrate, self.settings, UNENCRYPTED_BAUDRATE + b'a')
-
- def test_invalid_digit_value_raises_fr(self):
- self.assertFR("Error: Received invalid baud rate value from TxM.",
- change_baudrate, self.settings, UNENCRYPTED_BAUDRATE + b'1300')
-
- def test_change_value(self):
- self.assertIsNone(change_baudrate(self.settings, UNENCRYPTED_BAUDRATE + b'9600'))
- self.assertEqual(self.settings.serial_baudrate, 9600)
-
-
-class TestChangeGUIDialog(TFCTestCase):
-
- def setUp(self):
- self.settings = Settings()
-
- def test_invalid_value_raises_fr(self):
- self.assertFR("Error: Received invalid GUI dialog setting value from TxM.",
- change_gui_dialog, self.settings, UNENCRYPTED_GUI_DIALOG + b'invalid')
-
- def test_enable_gui_dialog_setting(self):
- # Setup
- self.settings.disable_gui_dialog = False
-
- # Test
- self.assertIsNone(change_gui_dialog(self.settings, UNENCRYPTED_GUI_DIALOG + b'true'))
- self.assertTrue(self.settings.disable_gui_dialog)
-
- def test_disable_gui_dialog_setting(self):
- # Setup
- self.settings.disable_gui_dialog = True
-
- # Test
- self.assertIsNone(change_gui_dialog(self.settings, UNENCRYPTED_GUI_DIALOG + b'false'))
- self.assertFalse(self.settings.disable_gui_dialog)
-
-
-class TestWipe(unittest.TestCase):
-
- def setUp(self):
- self.settings = Settings(race_condition_delay=0.0)
- self.queue_exit = Queue()
-
- def test_wipe_command(self):
- self.assertIsNone(wipe(self.settings, self.queue_exit))
- self.assertEqual(self.queue_exit.get(), WIPE)
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/nh/test_gateway.py b/tests/nh/test_gateway.py
deleted file mode 100644
index f9da07f..0000000
--- a/tests/nh/test_gateway.py
+++ /dev/null
@@ -1,155 +0,0 @@
-#!/usr/bin/env python3.5
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import multiprocessing.connection
-import os
-import serial
-import time
-import unittest
-
-from multiprocessing import Queue
-
-from src.common.statics import *
-
-from src.nh.gateway import gateway_loop, Gateway
-
-from tests.mock_classes import Settings
-from tests.mock_classes import Gateway as MockGateway
-
-
-class TestGatewayLoop(unittest.TestCase):
-
- def setUp(self):
- self.queues = {TXM_INCOMING_QUEUE: Queue()}
- self.gateway = MockGateway()
- self.gateway.read = lambda: "read_data"
-
- def tearDown(self):
- while not self.queues[TXM_INCOMING_QUEUE].empty():
- self.queues[TXM_INCOMING_QUEUE].get()
- time.sleep(0.1)
- self.queues[TXM_INCOMING_QUEUE].close()
-
- def test_loop(self):
- self.assertIsNone(gateway_loop(self.queues, self.gateway, unittest=True))
- self.assertEqual(self.queues[TXM_INCOMING_QUEUE].get(), "read_data")
-
-
-class TestGateway(unittest.TestCase):
-
- class MockSerial(object):
-
- def __init__(self, iface_name, baudrate, timeout):
- self.iface = iface_name
- self.baudrate = baudrate
- self.timeout = timeout
- self.written = []
-
- output_list = [b'', bytearray(b'a'), bytearray(b'b'), b'']
- self.gen = iter(output_list)
-
- def write(self, output):
- self.written.append(output)
-
- def read(self, _):
- time.sleep(0.1)
- return next(self.gen)
-
- def flush(self):
- pass
-
- def setUp(self):
- self.o_listdir = os.listdir
- self.o_serial = serial.Serial
-
- settings = Settings(serial_usb_adapter=True)
- input_list = ['ttyUSB0', 'ttyS0', 'ttyUSB0', 'ttyS0', 'ttyUSB0']
- gen = iter(input_list)
- os.listdir = lambda _: [str(next(gen))]
- serial.Serial = TestGateway.MockSerial
- self.gateway = Gateway(settings)
-
- def tearDown(self):
- os.listdir = self.o_listdir
- serial.Serial = self.o_serial
-
- def test_serial(self):
- self.assertIsNone(self.gateway.write(b'test'))
- self.assertEqual(self.gateway.search_serial_interface(), '/dev/ttyUSB0')
- self.assertEqual(self.gateway.read(), b'ab')
-
- self.gateway.settings.serial_usb_adapter = False
- self.assertEqual(self.gateway.search_serial_interface(), '/dev/ttyS0')
-
- with self.assertRaises(SystemExit):
- self.gateway.search_serial_interface()
-
-
-class TestEstablishSocket(unittest.TestCase):
-
- class MockMultiprocessingListener(object):
-
- def __init__(self, args):
- self.hostname = args[0]
- self.socket_no = args[1]
- self.written = []
-
- def accept(self):
-
- class Interface(object):
-
- def __init__(self, hostname, socket_no):
- self.hostname = hostname
- self.socket_no = socket_no
-
- @staticmethod
- def recv():
- return b'mock_message'
-
- return Interface(self.hostname, self.socket_no)
-
- class MockMultiprocessingClient(object):
-
- def __init__(self, args):
- self.hostname = args[0]
- self.socket_no = args[1]
- self.written = []
-
- def send(self, output):
- self.written.append(output)
-
- def setUp(self):
- self.settings = Settings(local_testing_mode=True)
- multiprocessing.connection.Client = TestEstablishSocket.MockMultiprocessingClient
- multiprocessing.connection.Listener = TestEstablishSocket.MockMultiprocessingListener
-
- def test_socket(self):
- gateway = Gateway(self.settings)
- self.assertEqual(gateway.txm_interface.socket_no, NH_LISTEN_SOCKET)
- self.assertEqual(gateway.rxm_interface.socket_no, RXM_LISTEN_SOCKET)
- self.assertEqual(gateway.txm_interface.hostname, 'localhost')
- self.assertEqual(gateway.rxm_interface.hostname, 'localhost')
- self.assertIsNone(gateway.write(b'test'))
- self.assertEqual(gateway.rxm_interface.written[0], b'test')
- self.assertEqual(gateway.read(), b'mock_message')
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/nh/test_misc.py b/tests/nh/test_misc.py
deleted file mode 100644
index 8268a7c..0000000
--- a/tests/nh/test_misc.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/env python3.5
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import argparse
-import unittest
-
-from src.nh.misc import process_arguments
-
-from tests.utils import TFCTestCase
-
-
-class TestMisc(TFCTestCase):
-
- class MockParser(object):
- def __init__(self, *_, **__):
- pass
-
- def parse_args(self):
- class Args(object):
- def __init__(self):
- self.local_test = True
- self.dd_sockets = True
-
- args = Args()
- return args
-
- def add_argument(self, *_, **__):
- pass
-
- def setUp(self):
- self.o_argparse = argparse.ArgumentParser
- argparse.ArgumentParser = TestMisc.MockParser
-
- def tearDown(self):
- argparse.ArgumentParser = self.o_argparse
-
- def test_process_arguments(self):
- self.assertEqual(process_arguments(), (True, True))
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/nh/test_settings.py b/tests/nh/test_settings.py
deleted file mode 100644
index 661cc03..0000000
--- a/tests/nh/test_settings.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env python3.5
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import builtins
-import os
-import unittest
-
-from src.common.statics import *
-
-from src.nh.settings import Settings
-
-from tests.utils import cleanup
-
-
-class TestSettings(unittest.TestCase):
-
- def setUp(self):
- self.o_input = builtins.input
- builtins.input = lambda _: 'yes'
-
- def tearDown(self):
- cleanup()
- builtins.input = self.o_input
-
- def test_store_and_load_settings(self):
- # Test store
- settings = Settings(False, False, 'ut')
- settings.disable_gui_dialog = True
- settings.store_settings()
- self.assertEqual(os.path.getsize(f"{DIR_USER_DATA}ut_settings"), 2*INTEGER_SETTING_LEN + 2*BOOLEAN_SETTING_LEN)
-
- # Test load
- settings2 = Settings(False, False, 'ut')
- self.assertTrue(settings2.disable_gui_dialog)
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/nh/test_tcb.py b/tests/nh/test_tcb.py
deleted file mode 100644
index fe601c6..0000000
--- a/tests/nh/test_tcb.py
+++ /dev/null
@@ -1,179 +0,0 @@
-#!/usr/bin/env python3.5
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import os
-import threading
-import time
-import unittest
-
-from multiprocessing import Queue
-
-from src.common.reed_solomon import RSCodec
-from src.common.statics import *
-
-from src.nh.tcb import rxm_outgoing, txm_incoming
-
-from tests.mock_classes import Settings, Gateway
-from tests.utils import ignored
-
-
-class TestTxMIncoming(unittest.TestCase):
-
- def setUp(self):
- self.settings = Settings()
- self.rs = RSCodec(2 * self.settings.serial_error_correction)
- self.o_urandom = os.urandom
- self.queues = {TXM_INCOMING_QUEUE: Queue(),
- RXM_OUTGOING_QUEUE: Queue(),
- TXM_TO_IM_QUEUE: Queue(),
- TXM_TO_NH_QUEUE: Queue(),
- TXM_TO_RXM_QUEUE: Queue(),
- NH_TO_IM_QUEUE: Queue(),
- EXIT_QUEUE: Queue()}
-
- def tearDown(self):
- os.urandom = self.o_urandom
-
- for key in self.queues:
- while not self.queues[key].empty():
- self.queues[key].get()
- time.sleep(0.1)
- self.queues[key].close()
-
- for f in [8*'61', 8*'62']:
- with ignored(OSError):
- os.remove(f)
-
- def test_unencrypted_packet(self):
- # Setup
- packet = self.rs.encode(UNENCRYPTED_PACKET_HEADER + b'test')
- self.queues[TXM_INCOMING_QUEUE].put(640 * b'a')
- self.queues[TXM_INCOMING_QUEUE].put(packet)
- time.sleep(0.1)
-
- # Test
- self.assertIsNone(txm_incoming(self.queues, self.settings, unittest=True))
- time.sleep(0.1)
- self.assertEqual(self.queues[TXM_TO_NH_QUEUE].qsize(), 1)
-
- def test_local_key_packet(self):
- # Setup
- packet = self.rs.encode(LOCAL_KEY_PACKET_HEADER + b'test')
-
- def queue_delayer():
- time.sleep(0.1)
- self.queues[TXM_INCOMING_QUEUE].put(packet)
-
- threading.Thread(target=queue_delayer).start()
-
- # Test
- self.assertIsNone(txm_incoming(self.queues, self.settings, unittest=True))
- time.sleep(0.1)
- self.assertEqual(self.queues[TXM_TO_RXM_QUEUE].qsize(), 1)
-
- def test_command_packet(self):
- # Setup
- packet = self.rs.encode(COMMAND_PACKET_HEADER + b'test')
- self.queues[TXM_INCOMING_QUEUE].put(packet)
- time.sleep(0.1)
-
- # Test
- self.assertIsNone(txm_incoming(self.queues, self.settings, unittest=True))
- time.sleep(0.1)
- self.assertEqual(self.queues[TXM_TO_RXM_QUEUE].qsize(), 1)
-
- def test_message_packet(self):
- # Setup
- packet = self.rs.encode(MESSAGE_PACKET_HEADER + 344 * b'a'
- + b'bob@jabber.org' + US_BYTE + b'alice@jabber.org')
- self.queues[TXM_INCOMING_QUEUE].put(packet)
- time.sleep(0.1)
-
- # Test
- self.assertIsNone(txm_incoming(self.queues, self.settings, unittest=True))
- time.sleep(0.1)
- self.assertEqual(self.queues[TXM_TO_IM_QUEUE].qsize(), 1)
- self.assertEqual(self.queues[RXM_OUTGOING_QUEUE].qsize(), 1)
-
- def test_public_key_packet(self):
- # Setup
- packet = self.rs.encode(PUBLIC_KEY_PACKET_HEADER + KEY_LENGTH * b'a'
- + b'bob@jabber.org' + US_BYTE + b'alice@jabber.org')
- self.queues[TXM_INCOMING_QUEUE].put(packet)
- time.sleep(0.1)
-
- # Test
- self.assertIsNone(txm_incoming(self.queues, self.settings, unittest=True))
- time.sleep(0.1)
- self.assertEqual(self.queues[RXM_OUTGOING_QUEUE].qsize(), 1)
- self.assertEqual(self.queues[TXM_TO_IM_QUEUE].qsize(), 1)
-
- def test_exported_file_packet(self):
- # Setup
- open(8*'61', 'w+').close()
-
- packet = self.rs.encode(EXPORTED_FILE_HEADER + 500 * b'a')
- output_list = [8*b'a', 8*b'b']
- gen = iter(output_list)
- os.urandom = lambda _: next(gen)
-
- self.queues[TXM_INCOMING_QUEUE].put(packet)
- time.sleep(0.1)
-
- # Test
- self.assertIsNone(txm_incoming(self.queues, self.settings, unittest=True))
- self.assertTrue(os.path.isfile(8*'62'))
-
-
-class TestRxMOutGoing(unittest.TestCase):
-
- def setUp(self):
- self.settings = Settings()
- self.gateway = Gateway()
- self.rs = RSCodec(2 * self.settings.serial_error_correction)
- self.queues = {TXM_INCOMING_QUEUE: Queue(),
- RXM_OUTGOING_QUEUE: Queue(),
- TXM_TO_IM_QUEUE: Queue(),
- TXM_TO_NH_QUEUE: Queue(),
- TXM_TO_RXM_QUEUE: Queue(),
- NH_TO_IM_QUEUE: Queue(),
- EXIT_QUEUE: Queue()}
-
- def tearDown(self):
- for k in self.queues:
- while not self.queues[k].empty():
- self.queues[k].get()
- time.sleep(0.1)
- self.queues[k].close()
-
- def test_loop(self):
- # Setup
- packet = b'testpacket'
- self.queues[TXM_TO_RXM_QUEUE].put(packet)
- self.queues[RXM_OUTGOING_QUEUE].put(packet)
- time.sleep(0.1)
-
- # Test
- self.assertIsNone(rxm_outgoing(self.queues, self.settings, self.gateway, unittest=True))
- self.assertEqual(packet, self.rs.decode(self.gateway.packets[0]))
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/nh/__init__.py b/tests/receiver/__init__.py
similarity index 100%
rename from tests/nh/__init__.py
rename to tests/receiver/__init__.py
diff --git a/tests/receiver/test_commands.py b/tests/receiver/test_commands.py
new file mode 100644
index 0000000..d68eaf6
--- /dev/null
+++ b/tests/receiver/test_commands.py
@@ -0,0 +1,472 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import os
+import struct
+import unittest
+
+from datetime import datetime
+from multiprocessing import Queue
+from unittest import mock
+
+from src.common.db_logs import write_log_entry
+from src.common.encoding import int_to_bytes
+from src.common.statics import *
+
+from src.receiver.packet import PacketList
+from src.receiver.commands import ch_contact_s, ch_master_key, ch_nick, ch_setting, contact_rem, exit_tfc, log_command
+from src.receiver.commands import process_command, remove_log, reset_screen, win_activity, win_select, wipe
+
+from tests.mock_classes import ContactList, Gateway, group_name_to_group_id, GroupList, KeyList, MasterKey
+from tests.mock_classes import nick_to_pub_key, RxWindow, Settings, WindowList
+from tests.utils import assembly_packet_creator, cd_unittest, cleanup, ignored, nick_to_short_address, tear_queue
+from tests.utils import TFCTestCase
+
+
+class TestProcessCommand(TFCTestCase):
+
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+ self.ts = datetime.now()
+ self.settings = Settings()
+ self.master_key = MasterKey()
+ self.group_list = GroupList()
+ self.exit_queue = Queue()
+ self.gateway = Gateway()
+ self.window_list = WindowList(nicks=[LOCAL_ID])
+ self.contact_list = ContactList(nicks=[LOCAL_ID])
+ self.packet_list = PacketList(self.settings, self.contact_list)
+ self.key_list = KeyList(nicks=[LOCAL_ID])
+ self.key_set = self.key_list.get_keyset(LOCAL_PUBKEY)
+
+ self.args = (self.window_list, self.packet_list, self.contact_list, self.key_list, self.group_list,
+ self.settings, self.master_key, self.gateway, self.exit_queue)
+
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+
+ def test_incomplete_command_raises_fr(self):
+ packet = assembly_packet_creator(COMMAND, b'test_command', s_header_override=C_L_HEADER, encrypt_packet=True)[0]
+ self.assert_fr("Incomplete command.", process_command, self.ts, packet, *self.args)
+
+ def test_invalid_command_header(self):
+ packet = assembly_packet_creator(COMMAND, b'invalid_header', encrypt_packet=True)[0]
+ self.assert_fr("Error: Received an invalid command.", process_command, self.ts, packet, *self.args)
+
+ def test_process_command(self):
+ packet = assembly_packet_creator(COMMAND, LOG_REMOVE, encrypt_packet=True)[0]
+ self.assert_fr(f"No log database available.", process_command, self.ts, packet, *self.args)
+
+
+class TestWinActivity(TFCTestCase):
+
+ def setUp(self):
+ self.window_list = WindowList()
+ self.window_list.windows = [RxWindow(name='Alice', unread_messages=4),
+ RxWindow(name='Bob', unread_messages=15)]
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_function(self, _):
+ self.assert_prints(f"""\
+ ┌─────────────────┐
+ │ Window activity │
+ │ Alice: 4 │
+ │ Bob: 15 │
+ └─────────────────┘
+{5*(CURSOR_UP_ONE_LINE+CLEAR_ENTIRE_LINE)}""", win_activity, self.window_list)
+
+
+class TestWinSelect(unittest.TestCase):
+
+ def setUp(self):
+ self.window_list = WindowList()
+ self.window_list.windows = [RxWindow(uid=nick_to_pub_key("Alice"), name='Alice'),
+ RxWindow(uid=nick_to_pub_key("Bob"), name='Bob')]
+
+ def test_window_selection(self):
+ self.assertIsNone(win_select(nick_to_pub_key("Alice"), self.window_list))
+ self.assertEqual(self.window_list.active_win.name, 'Alice')
+
+ self.assertIsNone(win_select(nick_to_pub_key("Bob"), self.window_list))
+ self.assertEqual(self.window_list.active_win.name, 'Bob')
+
+ self.assertIsNone(win_select(WIN_UID_FILE, self.window_list))
+ self.assertEqual(self.window_list.active_win.uid, WIN_UID_FILE)
+
+
+class TestResetScreen(unittest.TestCase):
+
+ def setUp(self):
+ self.cmd_data = nick_to_pub_key("Alice")
+ self.window_list = WindowList()
+ self.window_list.windows = [RxWindow(uid=nick_to_pub_key("Alice"), name='Alice'),
+ RxWindow(uid=nick_to_pub_key("Bob"), name='Bob')]
+ self.window = self.window_list.get_window(nick_to_pub_key("Alice"))
+ self.window.message_log = [(datetime.now(), 'Hi Bob', nick_to_pub_key("Alice"), ORIGIN_CONTACT_HEADER)]
+
+ @mock.patch('os.system', return_value=None, create_autospec=True)
+ def test_screen_reset(self, reset):
+ # Ensure there is a message to be removed from the ephemeral message log
+ self.assertEqual(len(self.window.message_log), 1)
+
+ reset_screen(self.cmd_data, self.window_list)
+
+ # Test that screen is reset by the command
+ reset.assert_called_with(RESET)
+
+ # Test that the ephemeral message log is empty after the command
+ self.assertEqual(len(self.window.message_log), 0)
+
+
+class TestExitTFC(unittest.TestCase):
+
+ def setUp(self):
+ self.exit_queue = Queue()
+
+ def tearDown(self):
+ tear_queue(self.exit_queue)
+
+ def test_function(self):
+ self.assertIsNone(exit_tfc(self.exit_queue))
+ self.assertEqual(self.exit_queue.qsize(), 1)
+
+
+class TestLogCommand(TFCTestCase):
+
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+ self.cmd_data = int_to_bytes(1) + nick_to_pub_key("Bob")
+ self.ts = datetime.now()
+ self.window_list = WindowList(nicks=['Alice', 'Bob'])
+ self.window = self.window_list.get_window(nick_to_pub_key("Bob"))
+ self.window.type_print = 'contact'
+ self.window.name = 'Bob'
+ self.window.type = WIN_TYPE_CONTACT
+ self.contact_list = ContactList(nicks=['Alice', 'Bob'])
+ self.group_list = GroupList()
+ self.settings = Settings()
+ self.master_key = MasterKey()
+ self.args = (self.ts, self.window_list, self.contact_list,
+ self.group_list, self.settings, self.master_key)
+
+ time_float = struct.unpack('
+
+""")
+
+
+class TestRemoveLog(TFCTestCase):
+
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+ self.win_name = nick_to_pub_key("Alice")
+ self.contact_list = ContactList()
+ self.group_list = GroupList()
+ self.settings = Settings()
+ self.master_key = MasterKey()
+
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+
+ def test_remove_log_file(self):
+ self.assert_fr(f"No log database available.",
+ remove_log, self.win_name, self.contact_list, self.group_list, self.settings, self.master_key)
+
+
+class TestChMasterKey(TFCTestCase):
+
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+ self.ts = datetime.now()
+ self.master_key = MasterKey()
+ self.settings = Settings()
+ self.contact_list = ContactList(nicks=[LOCAL_ID])
+ self.window_list = WindowList(nicks=[LOCAL_ID])
+ self.group_list = GroupList()
+ self.key_list = KeyList()
+ self.args = (self.ts, self.window_list, self.contact_list, self.group_list,
+ self.key_list, self.settings, self.master_key)
+
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+
+ @mock.patch('getpass.getpass', return_value='a')
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('src.common.db_masterkey.ARGON2_MIN_MEMORY', 1000)
+ @mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 0.01)
+ def test_master_key_change(self, *_):
+ # Setup
+ write_log_entry(F_S_HEADER + bytes(PADDING_LENGTH), nick_to_pub_key("Alice"), self.settings, self.master_key)
+
+ # Test
+ self.assertEqual(self.master_key.master_key, bytes(SYMMETRIC_KEY_LENGTH))
+ self.assertIsNone(ch_master_key(*self.args))
+ self.assertNotEqual(self.master_key.master_key, bytes(SYMMETRIC_KEY_LENGTH))
+
+ @mock.patch('getpass.getpass', return_value='a')
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('os.getrandom', side_effect=KeyboardInterrupt)
+ def test_keyboard_interrupt_raises_fr(self, *_):
+ self.assert_fr("Password change aborted.", ch_master_key, *self.args)
+
+
+class TestChNick(TFCTestCase):
+
+ def setUp(self):
+ self.ts = datetime.now()
+ self.contact_list = ContactList(nicks=['Alice'])
+ self.window_list = WindowList(contact_list=self.contact_list)
+ self.group_list = GroupList()
+ self.args = self.ts, self.window_list, self.contact_list
+ self.window = self.window_list.get_window(nick_to_pub_key("Alice"))
+ self.window.type = WIN_TYPE_CONTACT
+
+ def test_unknown_account_raises_fr(self):
+ # Setup
+ cmd_data = nick_to_pub_key("Bob") + b'Bob_'
+
+ # Test
+ trunc_addr = nick_to_short_address('Bob')
+ self.assert_fr(f"Error: Receiver has no contact '{trunc_addr}' to rename.", ch_nick, cmd_data, *self.args)
+
+ def test_nick_change(self):
+ # Setup
+ cmd_data = nick_to_pub_key("Alice") + b'Alice_'
+
+ # Test
+ self.assertIsNone(ch_nick(cmd_data, *self.args))
+ self.assertEqual(self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice")).nick, 'Alice_')
+ self.assertEqual(self.window.name, 'Alice_')
+
+
+class TestChSetting(TFCTestCase):
+
+ def setUp(self):
+ self.ts = datetime.now()
+ self.window_list = WindowList()
+ self.contact_list = ContactList()
+ self.group_list = GroupList()
+ self.key_list = KeyList()
+ self.settings = Settings()
+ self.gateway = Gateway()
+ self.args = (self.ts, self.window_list, self.contact_list, self.group_list,
+ self.key_list, self.settings, self.gateway)
+
+ def test_invalid_data_raises_fr(self):
+ # Setup
+ self.settings.key_list = ['']
+
+ # Test
+ cmd_data = b'setting' + b'True'
+ self.assert_fr("Error: Received invalid setting data.", ch_setting, cmd_data, *self.args)
+
+ def test_invalid_setting_raises_fr(self):
+ # Setup
+ self.settings.key_list = ['']
+
+ # Test
+ cmd_data = b'setting' + US_BYTE + b'True'
+ self.assert_fr("Error: Invalid setting 'setting'.", ch_setting, cmd_data, *self.args)
+
+ def test_databases(self):
+ # Setup
+ self.settings.key_list = ['max_number_of_group_members', 'max_number_of_contacts']
+
+ # Test
+ cmd_data = b'max_number_of_group_members' + US_BYTE + b'30'
+ self.assertIsNone(ch_setting(cmd_data, *self.args))
+
+ cmd_data = b'max_number_of_contacts' + US_BYTE + b'30'
+ self.assertIsNone(ch_setting(cmd_data, *self.args))
+
+ def test_change_gateway_setting(self):
+ # Setup
+ self.settings.key_list = ['max_number_of_group_members', 'max_number_of_contacts']
+
+ # Test
+ cmd_data = b'serial_baudrate' + US_BYTE + b'115200'
+ self.assertIsNone(ch_setting(cmd_data, *self.args))
+
+
+class TestChContactSetting(TFCTestCase):
+
+ def setUp(self):
+ self.ts = datetime.fromtimestamp(1502750000)
+ self.contact_list = ContactList(nicks=['Alice', 'Bob'])
+ self.group_list = GroupList(groups=['test_group', 'test_group2'])
+ self.window_list = WindowList(contact_list=self.contact_list,
+ group_list=self.group_list)
+ self.args = self.ts, self.window_list, self.contact_list, self.group_list
+
+ def test_invalid_window_raises_fr(self):
+ # Setup
+ cmd_data = ENABLE + nick_to_pub_key("Bob")
+ header = CH_LOGGING
+ self.contact_list = ContactList(nicks=['Alice'])
+ self.window_list = WindowList(contact_list=self.contact_list,
+ group_list=self.group_list)
+ # Test
+ self.assert_fr(f"Error: Found no window for '{nick_to_short_address('Bob')}'.",
+ ch_contact_s, cmd_data, *self.args, header)
+
+ def test_setting_change_contact(self):
+ # Setup
+ self.window = self.window_list.get_window(nick_to_pub_key("Bob"))
+ self.window.type = WIN_TYPE_CONTACT
+ self.window.type_print = 'contact'
+ self.window.window_contacts = self.contact_list.contacts
+ bob = self.contact_list.get_contact_by_address_or_nick("Bob")
+
+ # Test
+ for attr, header in [('log_messages', CH_LOGGING),
+ ('notifications', CH_NOTIFY),
+ ('file_reception', CH_FILE_RECV)]:
+ for s in [ENABLE, ENABLE, DISABLE, DISABLE]:
+ cmd_data = s + nick_to_pub_key("Bob")
+ self.assertIsNone(ch_contact_s(cmd_data, *self.args, header))
+ self.assertEqual(bob.__getattribute__(attr), (s == ENABLE))
+
+ def test_setting_change_group(self):
+ # Setup
+ self.window = self.window_list.get_window(group_name_to_group_id('test_group'))
+ self.window.type = WIN_TYPE_GROUP
+ self.window.type_print = 'group'
+ self.window.window_contacts = self.group_list.get_group('test_group').members
+
+ # Test
+ for attr, header in [('log_messages', CH_LOGGING),
+ ('notifications', CH_NOTIFY),
+ ('file_reception', CH_FILE_RECV)]:
+ for s in [ENABLE, ENABLE, DISABLE, DISABLE]:
+ cmd_data = s + group_name_to_group_id('test_group')
+ self.assertIsNone(ch_contact_s(cmd_data, *self.args, header))
+
+ if header in [CH_LOGGING, CH_NOTIFY]:
+ self.assertEqual(self.group_list.get_group('test_group').__getattribute__(attr), (s == ENABLE))
+
+ if header == CH_FILE_RECV:
+ for m in self.group_list.get_group('test_group').members:
+ self.assertEqual(m.file_reception, (s == ENABLE))
+
+ def test_setting_change_all(self):
+ # Setup
+ self.window = self.window_list.get_window(nick_to_pub_key("Bob"))
+ self.window.type = WIN_TYPE_CONTACT
+ self.window.type_print = 'contact'
+ self.window.window_contacts = self.contact_list.contacts
+
+ # Test
+ for attr, header in [('log_messages', CH_LOGGING),
+ ('notifications', CH_NOTIFY),
+ ('file_reception', CH_FILE_RECV)]:
+ for s in [ENABLE, ENABLE, DISABLE, DISABLE]:
+ cmd_data = s.upper() + US_BYTE
+ self.assertIsNone(ch_contact_s(cmd_data, *self.args, header))
+
+ if header in [CH_LOGGING, CH_NOTIFY]:
+ for c in self.contact_list.get_list_of_contacts():
+ self.assertEqual(c.__getattribute__(attr), (s == ENABLE))
+ for g in self.group_list.groups:
+ self.assertEqual(g.__getattribute__(attr), (s == ENABLE))
+
+ if header == CH_FILE_RECV:
+ for c in self.contact_list.get_list_of_contacts():
+ self.assertEqual(c.__getattribute__(attr), (s == ENABLE))
+
+
+class TestContactRemove(TFCTestCase):
+
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+ self.ts = datetime.now()
+ self.window_list = WindowList()
+ self.cmd_data = nick_to_pub_key("Bob")
+ self.settings = Settings()
+ self.master_key = MasterKey()
+ self.args = self.cmd_data, self.ts, self.window_list
+
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+
+ def test_no_contact_raises_fr(self):
+ # Setup
+ contact_list = ContactList(nicks=['Alice'])
+ group_list = GroupList(groups=[])
+ key_list = KeyList(nicks=['Alice'])
+
+ # Test
+ self.assert_fr(f"Receiver has no account '{nick_to_short_address('Bob')}' to remove.",
+ contact_rem, *self.args, contact_list, group_list, key_list, self.settings, self.master_key)
+
+ def test_successful_removal(self):
+ # Setup
+ contact_list = ContactList(nicks=['Alice', 'Bob'])
+ contact = contact_list.get_contact_by_address_or_nick("Bob")
+ group_list = GroupList(groups=['test_group', 'test_group2'])
+ key_list = KeyList(nicks=['Alice', 'Bob'])
+ self.window_list.windows = [RxWindow(type=WIN_TYPE_GROUP)]
+
+ # Test
+ self.assert_fr("No log database available.",
+ contact_rem, *self.args, contact_list, group_list, key_list, self.settings, self.master_key)
+ self.assertFalse(contact_list.has_pub_key(nick_to_pub_key("Bob")))
+ self.assertFalse(key_list.has_keyset(nick_to_pub_key("Bob")))
+ for g in group_list:
+ self.assertFalse(contact in g.members)
+
+
+class TestWipe(unittest.TestCase):
+
+ @mock.patch('os.system', return_value=None)
+ def test_wipe_command(self, _):
+ exit_queue = Queue()
+ self.assertIsNone(wipe(exit_queue))
+ self.assertEqual(exit_queue.get(), WIPE)
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/receiver/test_commands_g.py b/tests/receiver/test_commands_g.py
new file mode 100644
index 0000000..8051927
--- /dev/null
+++ b/tests/receiver/test_commands_g.py
@@ -0,0 +1,220 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import datetime
+import unittest
+
+from src.common.statics import *
+from src.receiver.commands_g import group_add, group_create, group_delete, group_remove, group_rename
+
+from tests.mock_classes import Contact, ContactList, GroupList, RxWindow, Settings, WindowList
+from tests.utils import group_name_to_group_id, nick_to_pub_key, TFCTestCase, UNDECODABLE_UNICODE
+
+
+class TestGroupCreate(TFCTestCase):
+
+ def setUp(self):
+ self.ts = datetime.datetime.now()
+ self.settings = Settings()
+ self.window_list = WindowList()
+ self.group_id = group_name_to_group_id('test_group')
+
+ def test_too_many_purp_accounts_raises_fr(self):
+ # Setup
+ create_list = [nick_to_pub_key(str(n)) for n in range(51)]
+ cmd_data = self.group_id + b'test_group' + US_BYTE + b''.join(create_list)
+ group_list = GroupList(groups=['test_group'])
+ contact_list = ContactList(nicks=[str(n) for n in range(51)])
+ group = group_list.get_group('test_group')
+ group.members = contact_list.contacts
+
+ # Test
+ self.assert_fr("Error: TFC settings only allow 50 members per group.",
+ group_create, cmd_data, self.ts, self.window_list, contact_list, group_list, self.settings)
+
+ def test_full_group_list_raises_fr(self):
+ # Setup
+ cmd_data = self.group_id + b'test_group' + US_BYTE + nick_to_pub_key('51')
+ group_list = GroupList(groups=[f"test_group_{n}" for n in range(50)])
+ contact_list = ContactList(nicks=['Alice'])
+
+ # Test
+ self.assert_fr("Error: TFC settings only allow 50 groups.",
+ group_create, cmd_data, self.ts, self.window_list, contact_list, group_list, self.settings)
+
+ def test_successful_group_creation(self):
+ # Setup
+ group_list = GroupList(groups=['test_group'])
+ cmd_data = group_name_to_group_id('test_group') + b'test_group2' + US_BYTE + nick_to_pub_key('Bob')
+ contact_list = ContactList(nicks=['Alice', 'Bob'])
+ window_list = WindowList(nicks =['Alice', 'Bob'],
+ contact_list=contact_list,
+ group_lis =group_list,
+ packet_list =None,
+ settings =Settings)
+ # Test
+ self.assertIsNone(group_create(cmd_data, self.ts, window_list, contact_list, group_list, self.settings))
+ self.assertEqual(len(group_list.get_group('test_group')), 2)
+
+
+class TestGroupAdd(TFCTestCase):
+
+ def setUp(self):
+ self.ts = datetime.datetime.now()
+ self.settings = Settings()
+ self.window_list = WindowList()
+
+ def test_too_large_final_member_list_raises_fr(self):
+ # Setup
+ group_list = GroupList(groups=['test_group'])
+ contact_list = ContactList(nicks=[str(n) for n in range(51)])
+ group = group_list.get_group('test_group')
+ group.members = contact_list.contacts[:50]
+ cmd_data = group_name_to_group_id('test_group') + nick_to_pub_key('50')
+
+ # Test
+ self.assert_fr("Error: TFC settings only allow 50 members per group.",
+ group_add, cmd_data, self.ts, self.window_list, contact_list, group_list, self.settings)
+
+ def test_unknown_group_id_raises_fr(self):
+ # Setup
+ group_list = GroupList(groups=['test_group'])
+ contact_list = ContactList(nicks=[str(n) for n in range(21)])
+ cmd_data = group_name_to_group_id('test_group2') + nick_to_pub_key('50')
+
+ # Test
+ self.assert_fr("Error: No group with ID '2e7mHQznTMsP6' found.",
+ group_add, cmd_data, self.ts, self.window_list, contact_list, group_list, self.settings)
+
+ def test_successful_group_add(self):
+ # Setup
+ contact_list = ContactList(nicks=[str(n) for n in range(21)])
+ group_lst = GroupList(groups=['test_group'])
+ group = group_lst.get_group('test_group')
+ group.members = contact_list.contacts[:19]
+ cmd_data = group_name_to_group_id('test_group') + nick_to_pub_key('20')
+
+ # Test
+ self.assertIsNone(group_add(cmd_data, self.ts, self.window_list, contact_list, group_lst, self.settings))
+
+ group2 = group_lst.get_group('test_group')
+ self.assertEqual(len(group2), 20)
+
+ for c in group2:
+ self.assertIsInstance(c, Contact)
+
+
+class TestGroupRemove(TFCTestCase):
+
+ def setUp(self):
+ self.ts = datetime.datetime.now()
+ self.window_list = WindowList()
+ self.contact_list = ContactList(nicks=[f"contact_{n}" for n in range(21)])
+ self.group_list = GroupList(groups=['test_group'])
+ self.group = self.group_list.get_group('test_group')
+ self.group.members = self.contact_list.contacts[:19]
+ self.settings = Settings()
+
+ def test_unknown_group_id_raises_fr(self):
+ # Setup
+ group_list = GroupList(groups=['test_group'])
+ contact_list = ContactList(nicks=[str(n) for n in range(21)])
+ cmd_data = group_name_to_group_id('test_group2') + nick_to_pub_key('20')
+
+ # Test
+ self.assert_fr("Error: No group with ID '2e7mHQznTMsP6' found.",
+ group_remove, cmd_data, self.ts, self.window_list, contact_list, group_list)
+
+ def test_successful_member_removal(self):
+ self.cmd_data = group_name_to_group_id('test_group') + b''.join([nick_to_pub_key('contact_18'),
+ nick_to_pub_key('contact_20')])
+ self.assertIsNone(group_remove(self.cmd_data, self.ts, self.window_list, self.contact_list, self.group_list))
+
+
+class TestGroupDelete(TFCTestCase):
+
+ def setUp(self):
+ self.ts = datetime.datetime.now()
+ self.window_list = WindowList()
+ self.group_list = GroupList(groups=['test_group'])
+
+ def test_missing_group_raises_fr(self):
+ cmd_data = group_name_to_group_id('test_group2')
+ self.assert_fr("Error: No group with ID '2e7mHQznTMsP6' found.",
+ group_delete, cmd_data, self.ts, self.window_list, self.group_list)
+
+ def test_unknown_group_id_raises_fr(self):
+ # Setup
+ group_list = GroupList(groups=['test_group'])
+ cmd_data = group_name_to_group_id('test_group2')
+
+ # Test
+ self.assert_fr("Error: No group with ID '2e7mHQznTMsP6' found.",
+ group_delete, cmd_data, self.ts, self.window_list, group_list)
+
+ def test_successful_remove(self):
+ cmd_data = group_name_to_group_id('test_group')
+ self.assertIsNone(group_delete(cmd_data, self.ts, self.window_list, self.group_list))
+ self.assertEqual(len(self.group_list.groups), 0)
+
+
+class TestGroupRename(TFCTestCase):
+
+ def setUp(self):
+ self.ts = datetime.datetime.now()
+ self.group_list = GroupList(groups=['test_group'])
+ self.window_list = WindowList()
+ self.window = RxWindow()
+ self.window_list.windows = [self.window]
+ self.contact_list = ContactList(nicks=['alice'])
+ self.args = self.ts, self.window_list, self.contact_list, self.group_list
+
+ def test_missing_group_id_raises_fr(self):
+ # Setup
+ cmd_data = group_name_to_group_id('test_group2') + b'new_name'
+
+ # Test
+ self.assert_fr("Error: No group with ID '2e7mHQznTMsP6' found.", group_rename, cmd_data, *self.args)
+
+ def test_invalid_group_name_encoding_raises_fr(self):
+ # Setup
+ cmd_data = group_name_to_group_id('test_group') + b'new_name' + UNDECODABLE_UNICODE
+
+ # Test
+ self.assert_fr("Error: New name for group 'test_group' was invalid.", group_rename, cmd_data, *self.args)
+
+ def test_invalid_group_name_raises_fr(self):
+ # Setup
+ cmd_data = group_name_to_group_id('test_group') + b'new_name\x1f'
+
+ # Test
+ self.assert_fr("Error: Group name must be printable.", group_rename, cmd_data, *self.args)
+
+ def test_valid_group_name_change(self):
+ # Setup
+ cmd_data = group_name_to_group_id('test_group') + b'new_name'
+
+ # Test
+ self.assertIsNone(group_rename(cmd_data, *self.args))
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/receiver/test_files.py b/tests/receiver/test_files.py
new file mode 100644
index 0000000..dee634d
--- /dev/null
+++ b/tests/receiver/test_files.py
@@ -0,0 +1,293 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import os
+import unittest
+import zlib
+
+from datetime import datetime
+from unittest import mock
+
+from src.common.crypto import blake2b, encrypt_and_sign
+from src.common.encoding import str_to_bytes
+from src.common.statics import *
+
+from src.receiver.files import new_file, process_assembled_file, process_file, store_unique
+
+from tests.mock_classes import ContactList, Settings, WindowList
+from tests.utils import cd_unittest, cleanup, nick_to_pub_key, TFCTestCase, UNDECODABLE_UNICODE
+
+
+class TestStoreUnique(unittest.TestCase):
+
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+ self.file_data = os.urandom(100)
+ self.file_dir = 'test_dir/'
+ self.file_name = 'test_file'
+
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+
+ def test_each_file_is_store_with_unique_name(self):
+ self.assertEqual(store_unique(self.file_data, self.file_dir, self.file_name), 'test_file')
+ self.assertEqual(store_unique(self.file_data, self.file_dir, self.file_name), 'test_file.1')
+ self.assertEqual(store_unique(self.file_data, self.file_dir, self.file_name), 'test_file.2')
+
+
+class ProcessAssembledFile(TFCTestCase):
+
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+ self.ts = datetime.now()
+ self.onion_pub_key = nick_to_pub_key('Alice')
+ self.nick = 'Alice'
+ self.settings = Settings()
+ self.window_list = WindowList(nick=['Alice', 'Bob'])
+ self.key = os.urandom(SYMMETRIC_KEY_LENGTH)
+ self.args = self.onion_pub_key, self.nick, self.settings, self.window_list
+
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+
+ def test_invalid_structure_raises_fr(self):
+ # Setup
+ payload = b'testfile.txt'
+
+ # Test
+ self.assert_fr("Error: Received file had an invalid structure.",
+ process_assembled_file, self.ts, payload, *self.args)
+
+ def test_invalid_encoding_raises_fr(self):
+ # Setup
+ payload = UNDECODABLE_UNICODE + US_BYTE + b'file_data'
+
+ # Test
+ self.assert_fr("Error: Received file name had invalid encoding.",
+ process_assembled_file, self.ts, payload, *self.args)
+
+ def test_invalid_name_raises_fr(self):
+ # Setup
+ payload = b'\x01filename' + US_BYTE + b'file_data'
+
+ # Test
+ self.assert_fr("Error: Received file had an invalid name.",
+ process_assembled_file, self.ts, payload, *self.args)
+
+ def test_slash_in_file_name_raises_fr(self):
+ # Setup
+ payload = b'file/name' + US_BYTE + b'file_data'
+
+ # Test
+ self.assert_fr("Error: Received file had an invalid name.",
+ process_assembled_file, self.ts, payload, *self.args)
+
+ def test_invalid_key_raises_fr(self):
+ # Setup
+ payload = b'testfile.txt' + US_BYTE + b'file_data'
+
+ # Test
+ self.assert_fr("Error: Received file had an invalid key.",
+ process_assembled_file, self.ts, payload, *self.args)
+
+ def test_decryption_fail_raises_fr(self):
+ # Setup
+ file_data = encrypt_and_sign(b'file_data', self.key)[::-1]
+ payload = b'testfile.txt' + US_BYTE + file_data
+
+ # Test
+ self.assert_fr("Error: Decryption of file data failed.",
+ process_assembled_file, self.ts, payload, *self.args)
+
+ def test_invalid_compression_raises_fr(self):
+ # Setup
+ compressed = zlib.compress(b'file_data', level=COMPRESSION_LEVEL)[::-1]
+ file_data = encrypt_and_sign(compressed, self.key) + self.key
+ payload = b'testfile.txt' + US_BYTE + file_data
+
+ # Test
+ self.assert_fr("Error: Decompression of file data failed.",
+ process_assembled_file, self.ts, payload, *self.args)
+
+ def test_successful_reception(self):
+ # Setup
+ compressed = zlib.compress(b'file_data', level=COMPRESSION_LEVEL)
+ file_data = encrypt_and_sign(compressed, self.key) + self.key
+ payload = b'testfile.txt' + US_BYTE + file_data
+
+ # Test
+ self.assertIsNone(process_assembled_file(self.ts, payload, *self.args))
+ self.assertTrue(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt'))
+
+ def test_successful_reception_during_traffic_masking(self):
+ # Setup
+ self.settings.traffic_masking = True
+ self.window_list.active_win = self.window_list.get_window(nick_to_pub_key('Bob'))
+
+ compressed = zlib.compress(b'file_data', level=COMPRESSION_LEVEL)
+ file_data = encrypt_and_sign(compressed, self.key) + self.key
+ payload = b'testfile.txt' + US_BYTE + file_data
+
+ # Test
+ self.assertIsNone(process_assembled_file(self.ts, payload, *self.args))
+ self.assertEqual(self.window_list.get_window(nick_to_pub_key('Bob')).message_log[0][1],
+ "Stored file from Alice as 'testfile.txt'.")
+ self.assertTrue(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt'))
+
+
+class TestNewFile(TFCTestCase):
+
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+ self.ts = datetime.now()
+ self.packet = b''
+ self.file_keys = dict()
+ self.file_buf = dict()
+ self.contact_list = ContactList(nicks=['Alice'])
+ self.window_list = WindowList()
+ self.file_key = SYMMETRIC_KEY_LENGTH*b'a'
+ self.settings = Settings()
+ self.compressed = zlib.compress(str_to_bytes("test_file.txt") + b'file_data', level=COMPRESSION_LEVEL)
+ self.args = self.file_keys, self.file_buf, self.contact_list, self.window_list, self.settings
+
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+
+ def test_unknown_account_raises_fr(self):
+ # Setup
+ file_ct = encrypt_and_sign(self.compressed, self.file_key)
+ packet = nick_to_pub_key('Bob') + ORIGIN_CONTACT_HEADER + file_ct
+
+ # Test
+ self.assert_fr("File from an unknown account.", new_file, self.ts, packet, *self.args)
+
+ def test_disabled_file_reception_raises_fr(self):
+ # Setup
+ file_ct = encrypt_and_sign(self.compressed, self.file_key)
+ packet = nick_to_pub_key('Alice') + ORIGIN_CONTACT_HEADER + file_ct
+ self.contact_list.get_contact_by_address_or_nick('Alice').file_reception = False
+
+ # Test
+ self.assert_fr("Alert! Discarded file from Alice as file reception for them is disabled.",
+ new_file, self.ts, packet, *self.args)
+
+ def test_valid_file_without_key_is_cached(self):
+ # Setup
+ file_ct = encrypt_and_sign(self.compressed, self.file_key)
+ file_hash = blake2b(file_ct)
+ packet = nick_to_pub_key('Alice') + ORIGIN_CONTACT_HEADER + file_ct
+
+ # Test
+ self.assertIsNone(new_file(self.ts, packet, *self.args))
+ self.assertEqual(self.file_buf[nick_to_pub_key('Alice') + file_hash], (self.ts, file_ct))
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_valid_file_with_key_is_processed(self, _):
+ # Setup
+ file_ct = encrypt_and_sign(self.compressed, self.file_key)
+ file_hash = blake2b(file_ct)
+ packet = nick_to_pub_key('Alice') + ORIGIN_CONTACT_HEADER + file_ct
+ self.file_keys = {(nick_to_pub_key('Alice') + file_hash): self.file_key}
+ self.args = self.file_keys, self.file_buf, self.contact_list, self.window_list, self.settings
+
+ # Test
+ self.assertIsNone(new_file(self.ts, packet, *self.args))
+
+
+class TestProcessFile(TFCTestCase):
+
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+ self.ts = datetime.now()
+ self.account = nick_to_pub_key('Alice')
+ self.file_key = SYMMETRIC_KEY_LENGTH*b'a'
+ self.file_ct = encrypt_and_sign(50 * b'a', key=self.file_key)
+ self.contact_list = ContactList(nicks=['Alice'])
+ self.window_list = WindowList()
+ self.settings = Settings()
+ self.args = self.file_key, self.contact_list, self.window_list, self.settings
+
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+
+ def test_invalid_key_raises_fr(self):
+ self.file_key = SYMMETRIC_KEY_LENGTH * b'f'
+ self.args = self.file_key, self.contact_list, self.window_list, self.settings
+ self.assert_fr("Error: Decryption key for file from Alice was invalid.",
+ process_file, self.ts, self.account, self.file_ct, *self.args)
+
+ def test_invalid_compression_raises_fr(self):
+ compressed = zlib.compress(b'file_data', level=COMPRESSION_LEVEL)[::-1]
+ file_data = encrypt_and_sign(compressed, self.file_key)
+
+ self.assert_fr("Error: Failed to decompress file from Alice.",
+ process_file, self.ts, self.account, file_data, *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_invalid_file_name_raises_fr(self, _):
+ compressed = zlib.compress(UNDECODABLE_UNICODE + b'file_data', level=COMPRESSION_LEVEL)
+ file_data = encrypt_and_sign(compressed, self.file_key)
+
+ self.assert_fr("Error: Name of file from Alice had invalid encoding.",
+ process_file, self.ts, self.account, file_data, *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_non_printable_name_raises_fr(self, _):
+ compressed = zlib.compress(str_to_bytes("file\x01") + b'file_data', level=COMPRESSION_LEVEL)
+ file_data = encrypt_and_sign(compressed, self.file_key)
+
+ self.assert_fr("Error: Name of file from Alice was invalid.",
+ process_file, self.ts, self.account, file_data, *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_slash_in_name_raises_fr(self, _):
+ compressed = zlib.compress(str_to_bytes("Alice/file.txt") + b'file_data', level=COMPRESSION_LEVEL)
+ file_data = encrypt_and_sign(compressed, self.file_key)
+
+ self.assert_fr("Error: Name of file from Alice was invalid.",
+ process_file, self.ts, self.account, file_data, *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_successful_storage_of_file(self, _):
+ compressed = zlib.compress(str_to_bytes("test_file.txt") + b'file_data', level=COMPRESSION_LEVEL)
+ file_data = encrypt_and_sign(compressed, self.file_key)
+
+ self.assertIsNone(process_file(self.ts, self.account, file_data, *self.args))
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_successful_storage_during_traffic_masking(self, _):
+ # Setup
+ self.settings.traffic_masking = True
+ self.window_list.active_win = self.window_list.get_window(nick_to_pub_key('Bob'))
+
+ compressed = zlib.compress(str_to_bytes("testfile.txt") + b'file_data', level=COMPRESSION_LEVEL)
+ file_data = encrypt_and_sign(compressed, self.file_key)
+
+ self.assertIsNone(process_file(self.ts, self.account, file_data, *self.args))
+
+ self.assertEqual(self.window_list.get_window(nick_to_pub_key('Bob')).message_log[0][1],
+ "Stored file from Alice as 'testfile.txt'.")
+
+ self.assertTrue(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt'))
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/receiver/test_key_exchanges.py b/tests/receiver/test_key_exchanges.py
new file mode 100644
index 0000000..bc0bf1c
--- /dev/null
+++ b/tests/receiver/test_key_exchanges.py
@@ -0,0 +1,420 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import os
+import tkinter
+import unittest
+
+from multiprocessing import Queue
+
+from datetime import datetime
+from unittest import mock
+from unittest.mock import MagicMock
+
+from src.common.crypto import argon2_kdf, encrypt_and_sign
+from src.common.encoding import b58encode, str_to_bytes
+from src.common.exceptions import FunctionReturn
+from src.common.statics import *
+
+from src.receiver.key_exchanges import key_ex_ecdhe, key_ex_psk_rx, key_ex_psk_tx, local_key_rdy, process_local_key
+
+from tests.mock_classes import Contact, ContactList, KeyList, KeySet, Settings, WindowList
+from tests.utils import cd_unittest, cleanup, nick_to_short_address, nick_to_pub_key, tear_queue, TFCTestCase
+from tests.utils import UNDECODABLE_UNICODE
+
+
+class TestProcessLocalKey(TFCTestCase):
+
+ kek = os.urandom(SYMMETRIC_KEY_LENGTH)
+ new_kek = os.urandom(SYMMETRIC_KEY_LENGTH)
+
+ def setUp(self):
+ self.contact_list = ContactList(nicks=[LOCAL_ID, 'Alice'])
+ self.key_list = KeyList( nicks=[LOCAL_ID, 'Alice'])
+ self.window_list = WindowList( nicks=[LOCAL_ID, 'Alice'])
+ self.settings = Settings()
+ self.ts = datetime.now()
+ self.kdk_hashes = list()
+ self.packet_hashes = list()
+ self.l_queue = Queue()
+ self.key = os.urandom(SYMMETRIC_KEY_LENGTH)
+ self.hek = os.urandom(SYMMETRIC_KEY_LENGTH)
+ self.conf_code = os.urandom(CONFIRM_CODE_LENGTH)
+ self.packet = encrypt_and_sign(self.key + self.hek + self.conf_code, key=self.kek)
+ self.args = (self.window_list, self.contact_list, self.key_list, self.settings,
+ self.kdk_hashes, self.packet_hashes, self.l_queue)
+
+ def tearDown(self):
+ tear_queue(self.l_queue)
+
+ @mock.patch('tkinter.Tk', return_value=MagicMock())
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', return_value='5KfgdgUvseWfNkoUPWSvxMPNStu5wBBxyjz1zpZtLEjk7ZvwEAT')
+ def test_invalid_decryption_key_raises_fr(self, *_):
+ # Setup
+ packet = b''
+ self.key_list.keysets = []
+
+ # Test
+ self.assert_fr("Error: Incorrect key decryption key.", process_local_key, self.ts, packet, *self.args)
+
+ @mock.patch('tkinter.Tk', return_value=MagicMock())
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=['5KfgdgUvseWfNkoUPWSvxMPNStu5wBBxyjz1zpZtLEjk7ZvwEAT', b58encode(kek)])
+ def test_successful_local_key_processing_with_existing_local_key(self, *_):
+ self.assert_fr("Error: Incorrect key decryption key.", process_local_key, self.ts, self.packet, *self.args)
+ self.assertIsNone(process_local_key(self.ts, self.packet, *self.args))
+
+ @mock.patch('tkinter.Tk', return_value=MagicMock())
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', return_value=b58encode(kek))
+ def test_successful_local_key_processing_existing_bootstrap(self, *_):
+ # Setup
+ self.key_list.keysets = []
+
+ # Test
+ self.assertIsNone(process_local_key(self.ts, self.packet, *self.args))
+ self.assertEqual(self.window_list.active_win.uid, WIN_UID_LOCAL)
+
+ @mock.patch('tkinter.Tk', return_value=MagicMock())
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=KeyboardInterrupt)
+ def test_keyboard_interrupt_raises_fr(self, *_):
+ # Setup
+ self.window_list.active_win = self.window_list.get_window(nick_to_pub_key('Alice'))
+
+ # Test
+ self.assert_fr("Local key setup aborted.", process_local_key, self.ts, bytes(SYMMETRIC_KEY_LENGTH), *self.args)
+
+ @mock.patch('tkinter.Tk', return_value=MagicMock())
+ @mock.patch('os.system', return_value=None)
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=[b58encode(kek), b58encode(kek), b58encode(kek), b58encode(new_kek)])
+ def test_old_local_key_packet_raises_fr(self, *_):
+ # Setup
+ self.key_list.keysets = []
+ new_key = os.urandom(SYMMETRIC_KEY_LENGTH)
+ new_hek = os.urandom(SYMMETRIC_KEY_LENGTH)
+ new_conf_code = os.urandom(CONFIRM_CODE_LENGTH)
+ new_packet = encrypt_and_sign(new_key + new_hek + new_conf_code, key=self.new_kek)
+
+ # Test
+ self.assertIsNone(process_local_key(self.ts, self.packet, *self.args))
+ self.assert_fr("Error: Received old local key packet.", process_local_key, self.ts, self.packet, *self.args)
+ self.assertIsNone(process_local_key(self.ts, new_packet, *self.args))
+
+ @mock.patch('tkinter.Tk', side_effect=[MagicMock(clipboard_get =MagicMock(return_value=b58encode(new_kek)),
+ clipboard_clear=MagicMock(side_effect=[tkinter.TclError]))])
+ @mock.patch('os.system', return_value=None)
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=[b58encode(new_kek)])
+ def test_loading_local_key_from_queue(self, *_):
+ # Setup
+ self.key_list.keysets = []
+ new_key = os.urandom(SYMMETRIC_KEY_LENGTH)
+ new_hek = os.urandom(SYMMETRIC_KEY_LENGTH)
+ new_conf_code = os.urandom(CONFIRM_CODE_LENGTH)
+ new_packet = encrypt_and_sign(new_key + new_hek + new_conf_code, key=self.new_kek)
+ next_packet = os.urandom(len(new_packet))
+ first_packet = os.urandom(len(new_packet))
+ self.l_queue.put((datetime.now(), first_packet))
+ self.l_queue.put((datetime.now(), new_packet))
+ self.l_queue.put((datetime.now(), next_packet))
+
+ # Test
+ self.assertEqual(self.l_queue.qsize(), 3)
+ self.assertIsNone(process_local_key(self.ts, self.packet, *self.args))
+ self.assertEqual(self.l_queue.qsize(), 1)
+
+
+class TestLocalKeyRdy(TFCTestCase):
+
+ def setUp(self):
+ self.ts = datetime.fromtimestamp(1502750000)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_local_key_installed_no_contacts(self, _):
+ # Setup
+ self.window_list = WindowList(nicks=[LOCAL_ID])
+ self.contact_list = ContactList(nicks=[LOCAL_ID])
+
+ # Test
+ self.assert_prints(f"""\
+{BOLD_ON} Successfully completed the local key setup. {NORMAL_TEXT}
+{CLEAR_ENTIRE_SCREEN+CURSOR_LEFT_UP_CORNER}
+{BOLD_ON} Waiting for new contacts {NORMAL_TEXT}
+
+""", local_key_rdy, self.ts, self.window_list, self.contact_list)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_local_key_installed_existing_contact(self, _):
+ # Setup
+ self.window_list = WindowList(nicks=[LOCAL_ID, 'Alice'])
+ self.contact_list = ContactList(nicks=[LOCAL_ID, 'Alice'])
+ self.window_list.active_win = self.window_list.get_window(nick_to_pub_key('Alice'))
+ self.window_list.active_win.type = WIN_TYPE_CONTACT
+
+ # Test
+ self.assertIsNone(local_key_rdy(self.ts, self.window_list, self.contact_list))
+
+
+class TestKeyExECDHE(TFCTestCase):
+
+ def setUp(self):
+ self.ts = datetime.fromtimestamp(1502750000)
+ self.window_list = WindowList(nicks=[LOCAL_ID])
+ self.contact_list = ContactList()
+ self.key_list = KeyList()
+ self.settings = Settings()
+ self.packet = (nick_to_pub_key("Alice")
+ + SYMMETRIC_KEY_LENGTH * b'\x01'
+ + SYMMETRIC_KEY_LENGTH * b'\x02'
+ + SYMMETRIC_KEY_LENGTH * b'\x03'
+ + SYMMETRIC_KEY_LENGTH * b'\x04'
+ + str_to_bytes('Alice'))
+ self.args = self.packet, self.ts, self.window_list, self.contact_list, self.key_list, self.settings
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_invalid_nick_raises_fr(self, _):
+ self.packet = (nick_to_pub_key("Alice")
+ + SYMMETRIC_KEY_LENGTH * b'\x01'
+ + SYMMETRIC_KEY_LENGTH * b'\x02'
+ + SYMMETRIC_KEY_LENGTH * b'\x03'
+ + SYMMETRIC_KEY_LENGTH * b'\x04'
+ + UNDECODABLE_UNICODE)
+ self.args = self.packet, self.ts, self.window_list, self.contact_list, self.key_list, self.settings
+
+ self.assert_fr("Error: Received invalid contact data", key_ex_ecdhe, *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_add_ecdhe_keys(self, _):
+ self.assertIsNone(key_ex_ecdhe(*self.args))
+
+ keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
+ self.assertIsInstance(keyset, KeySet)
+
+ self.assertEqual(keyset.onion_pub_key, nick_to_pub_key("Alice"))
+ self.assertEqual(keyset.tx_mk, SYMMETRIC_KEY_LENGTH * b'\x01')
+ self.assertEqual(keyset.rx_mk, SYMMETRIC_KEY_LENGTH * b'\x02')
+ self.assertEqual(keyset.tx_hk, SYMMETRIC_KEY_LENGTH * b'\x03')
+ self.assertEqual(keyset.rx_hk, SYMMETRIC_KEY_LENGTH * b'\x04')
+
+ contact = self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice"))
+ self.assertIsInstance(contact, Contact)
+ self.assertEqual(contact.onion_pub_key, nick_to_pub_key("Alice"))
+ self.assertEqual(contact.nick, 'Alice')
+ self.assertEqual(contact.rx_fingerprint, bytes(FINGERPRINT_LENGTH))
+ self.assertEqual(contact.tx_fingerprint, bytes(FINGERPRINT_LENGTH))
+
+
+class TestKeyExPSKTx(TFCTestCase):
+
+ def setUp(self):
+ self.ts = datetime.fromtimestamp(1502750000)
+ self.window_list = WindowList(nicks=[LOCAL_ID])
+ self.contact_list = ContactList()
+ self.key_list = KeyList()
+ self.settings = Settings()
+ self.packet = (nick_to_pub_key("Alice")
+ + SYMMETRIC_KEY_LENGTH * b'\x01'
+ + bytes(SYMMETRIC_KEY_LENGTH)
+ + SYMMETRIC_KEY_LENGTH * b'\x02'
+ + bytes(SYMMETRIC_KEY_LENGTH)
+ + str_to_bytes('Alice'))
+ self.args = self.packet, self.ts, self.window_list, self.contact_list, self.key_list, self.settings
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_invalid_nick_raises_fr(self, _):
+ self.packet = (nick_to_pub_key("Alice")
+ + SYMMETRIC_KEY_LENGTH * b'\x01'
+ + bytes(SYMMETRIC_KEY_LENGTH)
+ + SYMMETRIC_KEY_LENGTH * b'\x02'
+ + bytes(SYMMETRIC_KEY_LENGTH)
+ + UNDECODABLE_UNICODE)
+ self.args = self.packet, self.ts, self.window_list, self.contact_list, self.key_list, self.settings
+
+ self.assert_fr("Error: Received invalid contact data", key_ex_psk_tx, *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_add_psk_tx_keys(self, _):
+ self.assertIsNone(key_ex_psk_tx(*self.args))
+
+ keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
+ self.assertIsInstance(keyset, KeySet)
+
+ self.assertEqual(keyset.onion_pub_key, nick_to_pub_key("Alice"))
+ self.assertEqual(keyset.tx_mk, SYMMETRIC_KEY_LENGTH * b'\x01')
+ self.assertEqual(keyset.rx_mk, bytes(SYMMETRIC_KEY_LENGTH))
+ self.assertEqual(keyset.tx_hk, SYMMETRIC_KEY_LENGTH * b'\x02')
+ self.assertEqual(keyset.rx_hk, bytes(SYMMETRIC_KEY_LENGTH))
+
+ contact = self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice"))
+ self.assertIsInstance(contact, Contact)
+
+ self.assertEqual(contact.onion_pub_key, nick_to_pub_key("Alice"))
+ self.assertEqual(contact.nick, 'Alice')
+ self.assertEqual(contact.tx_fingerprint, bytes(FINGERPRINT_LENGTH))
+ self.assertEqual(contact.rx_fingerprint, bytes(FINGERPRINT_LENGTH))
+
+
+class TestKeyExPSKRx(TFCTestCase):
+
+ file_name = f"{nick_to_short_address('User')}.psk - give to {nick_to_short_address('Alice')}"
+
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+ self.packet = b'\x00' + nick_to_pub_key("Alice")
+ self.ts = datetime.now()
+ self.window_list = WindowList( nicks=['Alice', LOCAL_ID])
+ self.contact_list = ContactList(nicks=['Alice', LOCAL_ID])
+ self.key_list = KeyList( nicks=['Alice', LOCAL_ID])
+ self.settings = Settings(disable_gui_dialog=True)
+ self.file_name = self.file_name
+ self.args = self.packet, self.ts, self.window_list, self.contact_list, self.key_list, self.settings
+
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+
+ def test_unknown_account_raises_fr(self):
+ self.assert_fr(f"Error: Unknown account '{nick_to_short_address('Bob')}'.",
+ key_ex_psk_rx, b'\x00' + nick_to_pub_key("Bob"),
+ self.ts, self.window_list, self.contact_list, self.key_list, self.settings)
+
+ @mock.patch('builtins.input', return_value=file_name)
+ def test_invalid_psk_data_raises_fr(self, _):
+ # Setup
+ with open(self.file_name, 'wb+') as f:
+ f.write(os.urandom(135))
+
+ # Test
+ self.assert_fr("Error: The PSK data in the file was invalid.", key_ex_psk_rx, *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', return_value=file_name)
+ def test_permission_error_raises_fr(self, *_):
+ # Setup
+ with open(self.file_name, 'wb+') as f:
+ f.write(os.urandom(PSK_FILE_SIZE))
+
+ # Test
+ e_raised = False
+ try:
+ with mock.patch('builtins.open', side_effect=PermissionError):
+ key_ex_psk_rx(*self.args)
+ except FunctionReturn as inst:
+ e_raised = True
+ self.assertEqual("Error: No read permission for the PSK file.", inst.message)
+ self.assertTrue(e_raised)
+
+ @mock.patch('src.receiver.key_exchanges.ARGON2_ROUNDS', 1)
+ @mock.patch('src.receiver.key_exchanges.ARGON2_MIN_MEMORY', 100)
+ @mock.patch('getpass.getpass', side_effect=['invalid', 'password'])
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('os.urandom', side_effect=[bytes(XCHACHA20_NONCE_LENGTH)])
+ @mock.patch('builtins.input', return_value=file_name)
+ def test_invalid_keys_raise_fr(self, *_):
+ # Setup
+ keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
+ keyset.rx_mk = bytes(SYMMETRIC_KEY_LENGTH)
+ keyset.rx_hk = bytes(SYMMETRIC_KEY_LENGTH)
+
+ salt = bytes(ARGON2_SALT_LENGTH)
+ rx_key = bytes(SYMMETRIC_KEY_LENGTH)
+ rx_hek = bytes(SYMMETRIC_KEY_LENGTH)
+ kek = argon2_kdf('password', salt, rounds=1, memory=100)
+ ct_tag = encrypt_and_sign(rx_key + rx_hek, key=kek)
+
+ with open(self.file_name, 'wb+') as f:
+ f.write(salt + ct_tag)
+
+ # Test
+ self.assert_fr("Error: Received invalid keys from contact.", key_ex_psk_rx, *self.args)
+
+ @mock.patch('src.receiver.key_exchanges.ARGON2_ROUNDS', 1)
+ @mock.patch('src.receiver.key_exchanges.ARGON2_MIN_MEMORY', 100)
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', return_value=file_name)
+ @mock.patch('getpass.getpass', return_value='test_password')
+ def test_valid_psk(self, *_):
+ # Setup
+ keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
+ keyset.rx_mk = bytes(SYMMETRIC_KEY_LENGTH)
+ keyset.rx_hk = bytes(SYMMETRIC_KEY_LENGTH)
+ salt = os.urandom(ARGON2_SALT_LENGTH)
+ rx_key = os.urandom(SYMMETRIC_KEY_LENGTH)
+ rx_hek = os.urandom(SYMMETRIC_KEY_LENGTH)
+ kek = argon2_kdf('test_password', salt, rounds=1, memory=100)
+ ct_tag = encrypt_and_sign(rx_key + rx_hek, key=kek)
+
+ with open(self.file_name, 'wb+') as f:
+ f.write(salt + ct_tag)
+
+ # Test
+ self.assertTrue(os.path.isfile(self.file_name))
+ self.assertIsNone(key_ex_psk_rx(*self.args))
+ self.assertFalse(os.path.isfile(self.file_name))
+ self.assertEqual(keyset.rx_mk, rx_key)
+ self.assertEqual(keyset.rx_hk, rx_hek)
+
+ @mock.patch('src.receiver.key_exchanges.ARGON2_ROUNDS', 1)
+ @mock.patch('src.receiver.key_exchanges.ARGON2_MIN_MEMORY', 100)
+ @mock.patch('subprocess.Popen')
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=[file_name, ''])
+ @mock.patch('getpass.getpass', return_value='test_password')
+ def test_valid_psk_overwrite_failure(self, *_):
+ # Setup
+ keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
+ keyset.rx_mk = bytes(SYMMETRIC_KEY_LENGTH)
+ keyset.rx_hk = bytes(SYMMETRIC_KEY_LENGTH)
+
+ salt = os.urandom(ARGON2_SALT_LENGTH)
+ rx_key = os.urandom(SYMMETRIC_KEY_LENGTH)
+ rx_hek = os.urandom(SYMMETRIC_KEY_LENGTH)
+ kek = argon2_kdf('test_password', salt, rounds=1, memory=100)
+ ct_tag = encrypt_and_sign(rx_key + rx_hek, key=kek)
+
+ with open(self.file_name, 'wb+') as f:
+ f.write(salt + ct_tag)
+
+ # Test
+ self.assertTrue(os.path.isfile(self.file_name))
+ self.assertIsNone(key_ex_psk_rx(*self.args))
+ self.assertTrue(os.path.isfile(self.file_name))
+ self.assertEqual(keyset.rx_mk, rx_key)
+ self.assertEqual(keyset.rx_hk, rx_hek)
+
+ @mock.patch('src.receiver.key_exchanges.ARGON2_ROUNDS', 1)
+ @mock.patch('src.receiver.key_exchanges.ARGON2_MIN_MEMORY', 100)
+ @mock.patch('subprocess.Popen')
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=[file_name, ''])
+ @mock.patch('getpass.getpass', side_effect=[KeyboardInterrupt])
+ def test_valid_psk_keyboard_interrupt_raises_fr(self, *_):
+ with open(self.file_name, 'wb+') as f:
+ f.write(bytes(PSK_FILE_SIZE))
+
+ self.assert_fr("PSK import aborted.",
+ key_ex_psk_rx, *self.args)
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/receiver/test_messages.py b/tests/receiver/test_messages.py
new file mode 100644
index 0000000..0828afc
--- /dev/null
+++ b/tests/receiver/test_messages.py
@@ -0,0 +1,342 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import base64
+import os
+import unittest
+
+from datetime import datetime
+from unittest import mock
+
+from src.common.encoding import bool_to_bytes
+from src.common.misc import ensure_dir
+from src.common.statics import *
+
+from src.receiver.messages import process_message
+from src.receiver.packet import PacketList
+from src.receiver.windows import WindowList
+
+from tests.mock_classes import ContactList, GroupList, KeyList, MasterKey, Settings
+from tests.utils import assembly_packet_creator, cd_unittest, cleanup, group_name_to_group_id
+from tests.utils import nick_to_pub_key, TFCTestCase
+
+
+class TestProcessMessage(TFCTestCase):
+
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+
+ self.msg = ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis"
+ " dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu"
+ "m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma"
+ "ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el"
+ "it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic"
+ "tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r"
+ "utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali"
+ "quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris"
+ "que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti"
+ "s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu.")
+
+ self.ts = datetime.now()
+ self.master_key = MasterKey()
+ self.settings = Settings(log_file_masking=True)
+ self.file_name = f'{DIR_USER_DATA}{self.settings.software_operation}_logs'
+
+ self.contact_list = ContactList(nicks=['Alice', 'Bob', 'Charlie', LOCAL_ID])
+ self.key_list = KeyList( nicks=['Alice', 'Bob', 'Charlie', LOCAL_ID])
+ self.group_list = GroupList( groups=['test_group'])
+ self.packet_list = PacketList(contact_list=self.contact_list, settings=self.settings)
+ self.window_list = WindowList(contact_list=self.contact_list, settings=self.settings,
+ group_list=self.group_list, packet_list=self.packet_list)
+ self.group_id = group_name_to_group_id('test_group')
+ self.file_keys = dict()
+
+ self.group_list.get_group('test_group').log_messages = True
+ self.args = (self.window_list, self.packet_list, self.contact_list, self.key_list,
+ self.group_list, self.settings, self.master_key, self.file_keys)
+
+ ensure_dir(DIR_USER_DATA)
+
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+
+ # Invalid packets
+ @mock.patch('time.sleep', return_value=None)
+ def test_invalid_origin_header_raises_fr(self, _):
+ # Setup
+ invalid_origin_header = b'e'
+ packet = nick_to_pub_key('Alice') + invalid_origin_header + MESSAGE_LENGTH * b'm'
+
+ # Test
+ self.assert_fr("Error: Received packet had an invalid origin-header.",
+ process_message, self.ts, packet, *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_masqueraded_command_raises_fr(self, _):
+ for origin_header in [ORIGIN_USER_HEADER, ORIGIN_CONTACT_HEADER]:
+ # Setup
+ packet = LOCAL_PUBKEY + origin_header + MESSAGE_LENGTH * b'm'
+
+ # Test
+ self.assert_fr("Warning! Received packet masqueraded as a command.",
+ process_message, self.ts, packet, *self.args)
+
+ # Private messages
+ @mock.patch('time.sleep', return_value=None)
+ def test_private_msg_from_contact(self, _):
+ # Setup
+ assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, origin_header=ORIGIN_CONTACT_HEADER,
+ encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'))
+
+ # Test
+ for p in assembly_ct_list:
+ self.assertIsNone(process_message(self.ts, p, *self.args))
+
+ self.assertEqual(os.path.getsize(self.file_name), len(assembly_ct_list)*LOG_ENTRY_LENGTH)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_private_msg_from_user(self, _):
+ # Setup
+ assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, origin_header=ORIGIN_USER_HEADER,
+ encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'))
+
+ # Test
+ for p in assembly_ct_list:
+ self.assertIsNone(process_message(self.ts, p, *self.args))
+
+ self.assertEqual(os.path.getsize(self.file_name), len(assembly_ct_list) * LOG_ENTRY_LENGTH)
+
+ # Whispered messages
+ @mock.patch('time.sleep', return_value=None)
+ def test_whisper_msg_from_contact(self, _):
+ # Setup
+ assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, origin_header=ORIGIN_CONTACT_HEADER,
+ encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
+ whisper_header=bool_to_bytes(True))
+
+ # Test
+ for p in assembly_ct_list[:-1]:
+ self.assertIsNone(process_message(self.ts, p, *self.args))
+
+ for p in assembly_ct_list[-1:]:
+ self.assert_fr("Whisper message complete.",
+ process_message, self.ts, p, *self.args)
+
+ self.assertEqual(os.path.getsize(self.file_name), len(assembly_ct_list)*LOG_ENTRY_LENGTH)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_whisper_msg_from_user(self, _):
+ # Setup
+ assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, origin_header=ORIGIN_USER_HEADER,
+ encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
+ whisper_header=bool_to_bytes(True))
+ # Test
+ for p in assembly_ct_list[:-1]:
+ self.assertIsNone(process_message(self.ts, p, *self.args))
+
+ for p in assembly_ct_list[-1:]:
+ self.assert_fr("Whisper message complete.", process_message, self.ts, p, *self.args)
+
+ self.assertEqual(os.path.getsize(self.file_name), len(assembly_ct_list)*LOG_ENTRY_LENGTH)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_empty_whisper_msg_from_user(self, _):
+ # Setup
+ assembly_ct_list = assembly_packet_creator(MESSAGE, '', origin_header=ORIGIN_USER_HEADER,
+ encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
+ whisper_header=bool_to_bytes(True))
+ # Test
+ for p in assembly_ct_list[:-1]:
+ self.assertIsNone(process_message(self.ts, p, *self.args))
+
+ for p in assembly_ct_list[-1:]:
+ self.assert_fr("Whisper message complete.", process_message, self.ts, p, *self.args)
+
+ self.assertEqual(os.path.getsize(self.file_name), len(assembly_ct_list)*LOG_ENTRY_LENGTH)
+
+ # File key messages
+ @mock.patch('time.sleep', return_value=None)
+ def test_user_origin_raises_fr(self, _):
+ assembly_ct_list = assembly_packet_creator(MESSAGE, ' ', origin_header=ORIGIN_USER_HEADER,
+ encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
+ message_header=FILE_KEY_HEADER)
+
+ for p in assembly_ct_list[-1:]:
+ self.assert_fr("File key message from the user.", process_message, self.ts, p, *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_invalid_file_key_data_raises_fr(self, _):
+ assembly_ct_list = assembly_packet_creator(MESSAGE, ' ', origin_header=ORIGIN_CONTACT_HEADER,
+ encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
+ message_header=FILE_KEY_HEADER)
+
+ for p in assembly_ct_list[-1:]:
+ self.assert_fr("Error: Received an invalid file key message.", process_message, self.ts, p, *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_too_large_file_key_data_raises_fr(self, _):
+ assembly_ct_list = assembly_packet_creator(MESSAGE, base64.b85encode(BLAKE2_DIGEST_LENGTH * b'a'
+ + SYMMETRIC_KEY_LENGTH * b'b'
+ + b'a').decode(),
+ origin_header=ORIGIN_CONTACT_HEADER,
+ encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
+ message_header=FILE_KEY_HEADER)
+
+ for p in assembly_ct_list[-1:]:
+ self.assert_fr("Error: Received an invalid file key message.", process_message, self.ts, p, *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_valid_file_key_message(self, _):
+ assembly_ct_list = assembly_packet_creator(MESSAGE, base64.b85encode(BLAKE2_DIGEST_LENGTH * b'a'
+ + SYMMETRIC_KEY_LENGTH * b'b').decode(),
+ origin_header=ORIGIN_CONTACT_HEADER,
+ encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
+ message_header=FILE_KEY_HEADER)
+ for p in assembly_ct_list[-1:]:
+ self.assert_fr("Received file decryption key from Alice", process_message, self.ts, p, *self.args)
+
+ # Group messages
+ @mock.patch('time.sleep', return_value=None)
+ def test_invalid_message_header_raises_fr(self, _):
+ # Setup
+ assembly_ct_list = assembly_packet_creator(MESSAGE, 'test_message', origin_header=ORIGIN_CONTACT_HEADER,
+ encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
+ message_header=b'Z')
+
+ # Test
+ self.assert_fr("Error: Message from contact had an invalid header.",
+ process_message, self.ts, assembly_ct_list[0], *self.args)
+
+ self.assertEqual(os.path.getsize(self.file_name), LOG_ENTRY_LENGTH)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_invalid_window_raises_fr(self, _):
+ # Setup
+ assembly_ct_list = assembly_packet_creator(MESSAGE, 'test_message', origin_header=ORIGIN_CONTACT_HEADER,
+ encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
+ group_id=self.group_id)
+
+ self.group_list.get_group('test_group').group_id = GROUP_ID_LENGTH * b'a'
+
+ # Test
+ self.assert_fr("Error: Received message to an unknown group.",
+ process_message, self.ts, assembly_ct_list[0], *self.args)
+
+ self.assertEqual(os.path.getsize(self.file_name), LOG_ENTRY_LENGTH)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_invalid_message_raises_fr(self, _):
+ # Setup
+ assembly_ct_list = assembly_packet_creator(MESSAGE, ' ', origin_header=ORIGIN_CONTACT_HEADER,
+ encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
+ group_id=self.group_id, tamper_plaintext=True)
+
+ # Test
+ self.assert_fr("Error: Received an invalid group message.",
+ process_message, self.ts, assembly_ct_list[0], *self.args)
+
+ self.assertEqual(os.path.getsize(self.file_name), LOG_ENTRY_LENGTH)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_invalid_whisper_header_raises_fr(self, _):
+ # Setup
+ assembly_ct_list = assembly_packet_creator(MESSAGE, '', origin_header=ORIGIN_CONTACT_HEADER,
+ encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'),
+ whisper_header=b'', message_header=b'')
+
+ # Test
+ self.assert_fr("Error: Message from contact had an invalid whisper header.",
+ process_message, self.ts, assembly_ct_list[0], *self.args)
+
+ self.assertEqual(os.path.getsize(self.file_name), LOG_ENTRY_LENGTH)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_contact_not_in_group_raises_fr(self, _):
+ # Setup
+
+ assembly_ct_list = assembly_packet_creator(MESSAGE, 'test_message', origin_header=ORIGIN_CONTACT_HEADER,
+ encrypt_packet=True, group_id=self.group_id,
+ onion_pub_key=nick_to_pub_key('Charlie'))
+
+ # Test
+ self.assert_fr("Error: Account is not a member of the group.",
+ process_message, self.ts, assembly_ct_list[0], *self.args)
+
+ self.assertEqual(os.path.getsize(self.file_name), LOG_ENTRY_LENGTH)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_normal_group_msg_from_contact(self, _):
+ # Setup
+ assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, origin_header=ORIGIN_CONTACT_HEADER,
+ group_id=self.group_id, encrypt_packet=True,
+ onion_pub_key=nick_to_pub_key('Alice'))
+
+ for p in assembly_ct_list:
+ self.assertIsNone(process_message(self.ts, p, *self.args))
+
+ self.assertEqual(os.path.getsize(self.file_name), len(assembly_ct_list)*LOG_ENTRY_LENGTH)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_normal_group_msg_from_user(self, _):
+ # Setup
+ assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, origin_header=ORIGIN_USER_HEADER,
+ group_id=self.group_id, encrypt_packet=True,
+ onion_pub_key=nick_to_pub_key('Alice'))
+
+ for p in assembly_ct_list:
+ self.assertIsNone(process_message(self.ts, p, *self.args))
+
+ self.assertEqual(os.path.getsize(self.file_name), len(assembly_ct_list)*LOG_ENTRY_LENGTH)
+
+ # Files
+ @mock.patch('time.sleep', return_value=None)
+ def test_file(self, _):
+ # Setup
+ assembly_ct_list = assembly_packet_creator(FILE, origin_header=ORIGIN_CONTACT_HEADER,
+ encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'))
+
+ # Test
+ for p in assembly_ct_list[:-1]:
+ self.assertIsNone(process_message(self.ts, p, *self.args))
+
+ for p in assembly_ct_list[-1:]:
+ self.assert_fr("File storage complete.",
+ process_message, self.ts, p, *self.args)
+
+ self.assertEqual(os.path.getsize(self.file_name), len(assembly_ct_list)*LOG_ENTRY_LENGTH)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_file_when_reception_is_disabled(self, _):
+ # Setup
+ assembly_ct_list = assembly_packet_creator(FILE, origin_header=ORIGIN_CONTACT_HEADER,
+ encrypt_packet=True, onion_pub_key=nick_to_pub_key('Alice'))
+
+ self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice')).file_reception = False
+
+ # Test
+ self.assert_fr("Alert! File transmission from Alice but reception is disabled.",
+ process_message, self.ts, assembly_ct_list[0], *self.args)
+
+ self.assertEqual(os.path.getsize(self.file_name), LOG_ENTRY_LENGTH)
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/receiver/test_output_loop.py b/tests/receiver/test_output_loop.py
new file mode 100644
index 0000000..a3b0224
--- /dev/null
+++ b/tests/receiver/test_output_loop.py
@@ -0,0 +1,164 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import base64
+import datetime
+import threading
+import time
+import unittest
+
+from typing import Tuple
+from unittest import mock
+from unittest.mock import MagicMock
+
+from src.common.crypto import blake2b, encrypt_and_sign
+from src.common.encoding import b58encode, bool_to_bytes, int_to_bytes, str_to_bytes
+from src.common.statics import *
+
+from src.transmitter.packet import split_to_assembly_packets
+
+from src.receiver.output_loop import output_loop
+
+from tests.mock_classes import ContactList, Gateway, GroupList, KeyList, MasterKey, nick_to_pub_key, Settings
+from tests.utils import gen_queue_dict, tear_queues
+
+
+def rotate_key(key: bytes, harac: int) -> Tuple[bytes, int]:
+ """Move to next key in hash ratchet."""
+ return blake2b(key + int_to_bytes(harac), digest_size=SYMMETRIC_KEY_LENGTH), harac + 1
+
+
+class TestOutputLoop(unittest.TestCase):
+
+ def setUp(self):
+ self.o_sleep = time.sleep
+ time.sleep = lambda _: None
+
+ def tearDown(self):
+ time.sleep = self.o_sleep
+
+ @mock.patch('tkinter.Tk', return_value=MagicMock())
+ @mock.patch('os.system', return_value=None)
+ @mock.patch('builtins.input', side_effect=[b58encode(SYMMETRIC_KEY_LENGTH*b'a'),
+ bytes(CONFIRM_CODE_LENGTH).hex(),
+ b58encode(SYMMETRIC_KEY_LENGTH*b'a', public_key=True)])
+ def test_loop(self, *_):
+ # Setup
+ queues = gen_queue_dict()
+ kek = SYMMETRIC_KEY_LENGTH * b'a'
+ conf_code = bytes(1)
+ tx_pub_key = nick_to_pub_key('Bob')
+ o_sleep = self.o_sleep
+ test_delay = 0.1
+
+ def queue_packet(mk, hk, tx_harac, packet, onion_pub_key=None):
+ """Create encrypted datagram."""
+ if onion_pub_key is None:
+ header = b''
+ queue = queues[COMMAND_DATAGRAM_HEADER]
+ packet = split_to_assembly_packets(packet, COMMAND)[0]
+ else:
+ header = onion_pub_key + ORIGIN_CONTACT_HEADER
+ queue = queues[MESSAGE_DATAGRAM_HEADER]
+ packet = split_to_assembly_packets(packet, MESSAGE)[0]
+
+ encrypted_harac = encrypt_and_sign(int_to_bytes(tx_harac), hk)
+ encrypted_message = encrypt_and_sign(packet, mk)
+ encrypted_packet = header + encrypted_harac + encrypted_message
+ queue.put((datetime.datetime.now(), encrypted_packet))
+
+ def queue_delayer():
+ """Place datagrams into queue after delay."""
+ o_sleep(test_delay)
+ local_harac = INITIAL_HARAC
+ tx_harac = INITIAL_HARAC
+ local_hek = SYMMETRIC_KEY_LENGTH * b'a'
+ file_key = SYMMETRIC_KEY_LENGTH * b'b'
+ local_key = SYMMETRIC_KEY_LENGTH * b'a'
+ tx_mk = SYMMETRIC_KEY_LENGTH * b'a'
+ tx_hk = SYMMETRIC_KEY_LENGTH * b'a'
+
+ # Queue local key packet
+ local_key_packet = encrypt_and_sign(local_key + local_hek + conf_code, key=kek)
+ queues[LOCAL_KEY_DATAGRAM_HEADER].put((datetime.datetime.now(), local_key_packet))
+ o_sleep(test_delay)
+
+ # Select file window
+ command = WIN_SELECT + WIN_UID_FILE
+ queue_packet(local_key, tx_hk, local_harac, command)
+ local_key, local_harac = rotate_key(local_key, local_harac)
+ o_sleep(test_delay)
+
+ # Select local window
+ command = WIN_SELECT + WIN_UID_LOCAL
+ queue_packet(local_key, tx_hk, local_harac, command)
+ local_key, local_harac = rotate_key(local_key, local_harac)
+ o_sleep(test_delay)
+
+ # A message that goes to buffer
+ queue_packet(tx_mk, tx_hk, tx_harac, bool_to_bytes(False) + PRIVATE_MESSAGE_HEADER + b'Hi Bob', tx_pub_key)
+ tx_mk, tx_harac = rotate_key(tx_mk, tx_harac)
+
+ # ECDHE keyset for Bob
+ command = KEY_EX_ECDHE + nick_to_pub_key("Bob") + (4 * SYMMETRIC_KEY_LENGTH * b'a') + str_to_bytes('Bob')
+ queue_packet(local_key, tx_hk, local_harac, command)
+ local_key, local_harac = rotate_key(local_key, local_harac)
+ o_sleep(test_delay)
+
+ # Message for Bob
+ queue_packet(tx_mk, tx_hk, tx_harac, bool_to_bytes(False) + PRIVATE_MESSAGE_HEADER + b'Hi Bob', tx_pub_key)
+ tx_mk, tx_harac = rotate_key(tx_mk, tx_harac)
+ o_sleep(test_delay)
+
+ # Enable file reception for Bob
+ command = CH_FILE_RECV + ENABLE.upper() + US_BYTE
+ queue_packet(local_key, tx_hk, local_harac, command)
+ o_sleep(test_delay)
+
+ # File packet from Bob
+ ct = encrypt_and_sign(b'test', file_key)
+ f_hash = blake2b(ct)
+ packet = nick_to_pub_key('Bob') + ORIGIN_CONTACT_HEADER + ct
+ queues[FILE_DATAGRAM_HEADER].put((datetime.datetime.now(), packet))
+ o_sleep(test_delay)
+
+ # File key packet from Bob
+ queue_packet(tx_mk, tx_hk, tx_harac, bool_to_bytes(False)
+ + FILE_KEY_HEADER + base64.b85encode(f_hash + file_key), tx_pub_key)
+ o_sleep(test_delay)
+
+ # Queue exit message to break the loop
+ o_sleep(0.5)
+ queues[UNITTEST_QUEUE].put(EXIT)
+ o_sleep(test_delay)
+
+ threading.Thread(target=queue_delayer).start()
+
+ # Test
+ self.assertIsNone(output_loop(queues, Gateway(), Settings(), ContactList(), KeyList(),
+ GroupList(), MasterKey(), stdin_fd=1, unittest=True))
+
+ # Teardown
+ tear_queues(queues)
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/receiver/test_packet.py b/tests/receiver/test_packet.py
new file mode 100644
index 0000000..833bb35
--- /dev/null
+++ b/tests/receiver/test_packet.py
@@ -0,0 +1,448 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import os
+import unittest
+import zlib
+
+from datetime import datetime
+from unittest import mock
+
+from src.common.crypto import byte_padding, encrypt_and_sign
+from src.common.encoding import int_to_bytes
+from src.common.statics import *
+
+from src.transmitter.packet import split_to_assembly_packets
+
+from src.receiver.packet import decrypt_assembly_packet, Packet, PacketList
+
+from tests.mock_classes import ContactList, create_contact, KeyList, Settings, WindowList
+from tests.utils import assembly_packet_creator, cd_unittest, cleanup, nick_to_pub_key, TFCTestCase
+from tests.utils import UNDECODABLE_UNICODE
+
+
+class TestDecryptAssemblyPacket(TFCTestCase):
+
+ def setUp(self):
+ self.onion_pub_key = nick_to_pub_key("Alice")
+ self.origin = ORIGIN_CONTACT_HEADER
+ self.window_list = WindowList(nicks=['Alice', LOCAL_ID])
+ self.contact_list = ContactList(nicks=['Alice', LOCAL_ID])
+ self.key_list = KeyList(nicks=['Alice', LOCAL_ID])
+ self.keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
+ self.args = self.onion_pub_key, self.origin, self.window_list, self.contact_list, self.key_list
+
+ def test_decryption_with_zero_rx_key_raises_fr(self):
+ # Setup
+ keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
+ keyset.rx_mk = bytes(SYMMETRIC_KEY_LENGTH)
+ packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True)[0]
+
+ # Test
+ self.assert_fr("Warning! Loaded zero-key for packet decryption.",
+ decrypt_assembly_packet, packet, *self.args)
+
+ def test_invalid_harac_ct_raises_fr(self):
+ packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True, tamper_harac=True)[0]
+ self.assert_fr("Warning! Received packet from Alice had an invalid hash ratchet MAC.",
+ decrypt_assembly_packet, packet, *self.args)
+
+ def test_decryption_with_zero_rx_hek_raises_fr(self):
+ # Setup
+ keyset = self.key_list.get_keyset(nick_to_pub_key("Alice"))
+ keyset.rx_hk = bytes(SYMMETRIC_KEY_LENGTH)
+ packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True)[0]
+
+ # Test
+ self.assert_fr("Warning! Loaded zero-key for packet decryption.", decrypt_assembly_packet, packet, *self.args)
+
+ def test_expired_harac_raises_fr(self):
+ # Setup
+ self.keyset.rx_harac = 1
+
+ # Test
+ packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True, harac=0)[0]
+ self.assert_fr("Warning! Received packet from Alice had an expired hash ratchet counter.",
+ decrypt_assembly_packet, packet, *self.args)
+
+ @mock.patch('builtins.input', return_value='No')
+ def test_harac_dos_can_be_interrupted(self, _):
+ packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True, harac=100_001)[0]
+ self.assert_fr("Dropped packet from Alice.",
+ decrypt_assembly_packet, packet, *self.args)
+
+ def test_invalid_packet_ct_raises_fr(self):
+ packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True, tamper_message=True)[0]
+ self.assert_fr("Warning! Received packet from Alice had an invalid MAC.",
+ decrypt_assembly_packet, packet, *self.args)
+
+ def test_successful_packet_decryption(self):
+ packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True)[0]
+ self.assertEqual(decrypt_assembly_packet(packet, *self.args),
+ assembly_packet_creator(MESSAGE, payload="Test message")[0])
+
+ def test_successful_packet_decryption_with_offset(self):
+ packet = assembly_packet_creator(MESSAGE, payload="Test message", encrypt_packet=True, message_number=3)[0]
+ self.assertEqual(decrypt_assembly_packet(packet, *self.args),
+ assembly_packet_creator(MESSAGE, payload="Test message", message_number=3)[0])
+
+ def test_successful_command_decryption(self):
+ packet = assembly_packet_creator(COMMAND, payload=b"command_data", encrypt_packet=True)[0]
+ self.assertEqual(decrypt_assembly_packet(packet, *self.args),
+ assembly_packet_creator(COMMAND, payload=b"command_data")[0])
+
+
+class TestPacket(TFCTestCase):
+
+ def setUp(self):
+ self.short_msg = "Lorem ipsum dolor sit amet, consectetur adipiscing elit"
+ self.msg = ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis"
+ " dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu"
+ "m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma"
+ "ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el"
+ "it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic"
+ "tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r"
+ "utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali"
+ "quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris"
+ "que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti"
+ "s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu.")
+
+ self.unittest_dir = cd_unittest()
+ self.ts = datetime.now()
+ self.contact = create_contact('Alice')
+ self.settings = Settings(log_file_masking=True)
+ self.onion_pub_key = nick_to_pub_key('Alice')
+ self.window_list = WindowList()
+ self.whisper_header = b'\x00'
+
+ compressed = zlib.compress(b'file_data', level=COMPRESSION_LEVEL)
+ file_key = os.urandom(SYMMETRIC_KEY_LENGTH)
+ encrypted = encrypt_and_sign(compressed, key=file_key)
+ encrypted += file_key
+ self.short_f_data = (int_to_bytes(1) + int_to_bytes(2) + b'testfile.txt' + US_BYTE + encrypted)
+
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+
+ def test_invalid_assembly_packet_header_raises_fr(self):
+ # Setup
+ packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE, self.contact, self.settings)
+ a_packet = assembly_packet_creator(MESSAGE, payload=self.short_msg, s_header_override=b'i')[0]
+
+ # Test
+ self.assert_fr("Error: Received packet had an invalid assembly packet header.", packet.add_packet, a_packet)
+ self.assertEqual(packet.log_masking_ctr, 1)
+
+ def test_missing_start_packet_raises_fr(self):
+ # Setup
+ packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings)
+
+ # Test
+ for header in [M_A_HEADER, M_E_HEADER]:
+ self.assert_fr("Missing start packet.", packet.add_packet, header + bytes(PADDING_LENGTH))
+ self.assertEqual(packet.log_masking_ctr, 2)
+
+ def test_short_message(self):
+ # Setup
+ packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings)
+ packet_list = assembly_packet_creator(MESSAGE, self.short_msg)
+
+ for p in packet_list:
+ packet.add_packet(p, packet_ct=b'test_ct')
+
+ # Test
+ self.assertEqual(packet.assemble_message_packet(),
+ self.whisper_header + PRIVATE_MESSAGE_HEADER + self.short_msg.encode())
+ self.assertEqual(packet.log_ct_list, [b'test_ct'])
+
+ def test_compression_error_raises_fr(self):
+ # Setup
+ packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings)
+ packet_list = assembly_packet_creator(MESSAGE, self.short_msg, tamper_compression=True)
+
+ for p in packet_list:
+ packet.add_packet(p)
+
+ # Test
+ self.assert_fr("Error: Decompression of message failed.", packet.assemble_message_packet)
+
+ def test_long_message(self):
+ # Setup
+ packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings)
+ packet_list = assembly_packet_creator(MESSAGE, self.msg)
+
+ for p in packet_list:
+ packet.add_packet(p, packet_ct=b'test_ct')
+
+ # Test
+ message = packet.assemble_message_packet()
+ self.assertEqual(message, self.whisper_header + PRIVATE_MESSAGE_HEADER + self.msg.encode())
+ self.assertEqual(packet.log_ct_list, 3 * [b'test_ct'])
+
+ def test_decryption_error_raises_fr(self):
+ # Setup
+ packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE, self.contact, self.settings)
+ packet_list = assembly_packet_creator(MESSAGE, self.msg, tamper_ciphertext=True)
+
+ for p in packet_list:
+ packet.add_packet(p)
+
+ # Test
+ self.assert_fr("Error: Decryption of message failed.", packet.assemble_message_packet)
+
+ def test_short_file(self):
+ # Setup
+ packets = split_to_assembly_packets(self.short_f_data, FILE)
+
+ # Test
+ self.assertFalse(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt'))
+ self.assertFalse(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt.1'))
+
+ packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings)
+ packet.long_active = True
+
+ for p in packets:
+ packet.add_packet(p)
+ self.assertIsNone(packet.assemble_and_store_file(self.ts, self.onion_pub_key, self.window_list))
+ self.assertTrue(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt'))
+
+ for p in packets:
+ packet.add_packet(p)
+ self.assertIsNone(packet.assemble_and_store_file(self.ts, self.onion_pub_key, self.window_list))
+ self.assertTrue(os.path.isfile(f'{DIR_RECV_FILES}Alice/testfile.txt.1'))
+
+ def test_short_file_from_user_raises_fr(self):
+ # Setup
+ packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, FILE, self.contact, self.settings)
+ packets = split_to_assembly_packets(self.short_f_data, FILE)
+
+ # Test
+ for p in packets:
+ self.assert_fr("Ignored file from the user.", packet.add_packet, p)
+ self.assertEqual(packet.log_masking_ctr, 1)
+
+ def test_unauthorized_file_from_contact_raises_fr(self):
+ # Setup
+ self.contact.file_reception = False
+
+ packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings)
+ packets = split_to_assembly_packets(self.short_f_data, FILE)
+
+ # Test
+ for p in packets:
+ self.assert_fr("Alert! File transmission from Alice but reception is disabled.", packet.add_packet, p)
+ self.assertEqual(packet.log_masking_ctr, 1)
+
+ def test_long_file(self):
+ # Setup
+ packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings)
+ packet.long_active = True
+ packet_list = assembly_packet_creator(FILE)
+
+ for p in packet_list:
+ packet.add_packet(p)
+
+ # Test
+ self.assertIsNone(packet.assemble_and_store_file(self.ts, self.onion_pub_key, self.window_list))
+ self.assertEqual(os.path.getsize(f'{DIR_RECV_FILES}Alice/test_file.txt'), 10000)
+
+ def test_disabled_file_reception_raises_fr_with_append_packet(self):
+ # Setup
+ packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings)
+ packet.long_active = True
+ packet_list = assembly_packet_creator(FILE)
+
+ for p in packet_list[:2]:
+ self.assertIsNone(packet.add_packet(p))
+
+ packet.contact.file_reception = False
+
+ # Test
+ self.assert_fr("Alert! File reception disabled mid-transfer.", packet.add_packet, packet_list[2])
+
+ for p in packet_list[3:]:
+ self.assert_fr("Missing start packet.", packet.add_packet, p)
+
+ self.assertEqual(packet.log_masking_ctr, len(packet_list))
+
+ def test_disabled_file_reception_raises_fr_with_end_packet(self):
+ # Setup
+ packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings)
+ packet.long_active = True
+ packet_list = assembly_packet_creator(FILE)
+
+ for p in packet_list[:-1]:
+ self.assertIsNone(packet.add_packet(p))
+
+ packet.contact.file_reception = False
+
+ # Test
+ for p in packet_list[-1:]:
+ self.assert_fr("Alert! File reception disabled mid-transfer.", packet.add_packet, p)
+ self.assertEqual(packet.log_masking_ctr, len(packet_list))
+
+ def test_long_file_from_user_raises_fr(self):
+ # Setup
+ packet = Packet(self.onion_pub_key, ORIGIN_USER_HEADER, FILE, self.contact, self.settings)
+ packet_list = assembly_packet_creator(FILE)
+
+ # Test
+ self.assert_fr("Ignored file from the user.", packet.add_packet, packet_list[0])
+ self.assertEqual(packet.log_masking_ctr, 1)
+
+ def test_unauthorized_long_file_raises_fr(self):
+ # Setup
+ self.contact.file_reception = False
+
+ packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings)
+ packet_list = assembly_packet_creator(FILE)
+
+ # Test
+ self.assert_fr("Alert! File transmission from Alice but reception is disabled.",
+ packet.add_packet, packet_list[0])
+ self.assertEqual(packet.log_masking_ctr, 1)
+
+ def test_invalid_long_file_header_raises_fr(self):
+ # Setup
+ packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings)
+ packet_list = assembly_packet_creator(FILE, file_name=UNDECODABLE_UNICODE)
+
+ # Test
+ self.assert_fr("Error: Received file packet had an invalid header.", packet.add_packet, packet_list[0])
+ self.assertEqual(packet.log_masking_ctr, 1)
+
+ def test_contact_canceled_file(self):
+ # Setup
+ packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings)
+ packet_list = assembly_packet_creator(FILE)[:20]
+ packet_list.append(byte_padding(F_C_HEADER)) # Add cancel packet
+
+ for p in packet_list:
+ packet.add_packet(p)
+
+ # Test
+ self.assertEqual(len(packet.assembly_pt_list), 0) # Cancel packet empties packet list
+ self.assertFalse(packet.long_active)
+ self.assertFalse(packet.is_complete)
+ self.assertEqual(packet.log_masking_ctr, len(packet_list))
+
+ def test_noise_packet_interrupts_file(self):
+ # Setup
+ packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, FILE, self.contact, self.settings)
+ packet_list = assembly_packet_creator(FILE)[:20]
+ packet_list.append(byte_padding(P_N_HEADER)) # Add noise packet
+
+ for p in packet_list:
+ packet.add_packet(p)
+
+ # Test
+ self.assertEqual(len(packet.assembly_pt_list), 0) # Noise packet empties packet list
+ self.assertFalse(packet.long_active)
+ self.assertFalse(packet.is_complete)
+ self.assertEqual(packet.log_masking_ctr, len(packet_list))
+
+ def test_short_command(self):
+ # Setup
+ packet = Packet(LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings)
+ packets = assembly_packet_creator(COMMAND, b'test_command')
+
+ for p in packets:
+ packet.add_packet(p)
+
+ # Test
+ self.assertEqual(packet.assemble_command_packet(), b'test_command')
+ self.assertEqual(packet.log_masking_ctr, 0)
+
+ def test_long_command(self):
+ # Setup
+ packet = Packet(LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings)
+ command = 500*b'test_command'
+ packets = assembly_packet_creator(COMMAND, command)
+
+ for p in packets:
+ packet.add_packet(p)
+
+ # Test
+ self.assertEqual(packet.assemble_command_packet(), command)
+ self.assertEqual(packet.log_masking_ctr, 0)
+
+ def test_long_command_hash_mismatch_raises_fr(self):
+ # Setup
+ packet = Packet(LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings)
+ packet_list = assembly_packet_creator(COMMAND, os.urandom(500), tamper_cmd_hash=True)
+
+ for p in packet_list:
+ packet.add_packet(p)
+
+ # Test
+ self.assert_fr("Error: Received an invalid command.", packet.assemble_command_packet)
+ self.assertEqual(packet.log_masking_ctr, 0)
+
+ def test_long_command_compression_error_raises_fr(self):
+ # Setup
+ packet = Packet(LOCAL_ID, ORIGIN_CONTACT_HEADER, COMMAND, self.contact, self.settings)
+ packet_list = assembly_packet_creator(COMMAND, os.urandom(500), tamper_compression=True)
+
+ for p in packet_list:
+ packet.add_packet(p)
+
+ # Test
+ self.assert_fr("Error: Decompression of command failed.", packet.assemble_command_packet)
+ self.assertEqual(packet.log_masking_ctr, 0)
+
+
+class TestPacketList(unittest.TestCase):
+
+ def setUp(self):
+ self.contact_list = ContactList(nicks=['Alice', 'Bob'])
+ self.settings = Settings()
+ self.onion_pub_key = nick_to_pub_key('Alice')
+ packet = Packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE,
+ self.contact_list.get_contact_by_address_or_nick('Alice'), self.settings)
+
+ self.packet_list = PacketList(self.settings, self.contact_list)
+ self.packet_list.packets = [packet]
+
+ def test_packet_list_iterates_over_contact_objects(self):
+ for p in self.packet_list:
+ self.assertIsInstance(p, Packet)
+
+ def test_len_returns_number_of_contacts(self):
+ self.assertEqual(len(self.packet_list), 1)
+
+ def test_has_packet(self):
+ self.assertTrue(self.packet_list.has_packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE))
+ self.assertFalse(self.packet_list.has_packet(self.onion_pub_key, ORIGIN_USER_HEADER, MESSAGE))
+
+ def test_get_packet(self):
+ packet = self.packet_list.get_packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE)
+ self.assertEqual(packet.onion_pub_key, self.onion_pub_key)
+ self.assertEqual(packet.origin, ORIGIN_CONTACT_HEADER)
+ self.assertEqual(packet.type, MESSAGE)
+
+ packet = self.packet_list.get_packet(self.onion_pub_key, ORIGIN_CONTACT_HEADER, MESSAGE)
+ self.assertEqual(packet.onion_pub_key, self.onion_pub_key)
+ self.assertEqual(packet.origin, ORIGIN_CONTACT_HEADER)
+ self.assertEqual(packet.type, MESSAGE)
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/receiver/test_receiver_loop.py b/tests/receiver/test_receiver_loop.py
new file mode 100644
index 0000000..fd5706a
--- /dev/null
+++ b/tests/receiver/test_receiver_loop.py
@@ -0,0 +1,81 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import threading
+import time
+import unittest
+
+from datetime import datetime
+from multiprocessing import Queue
+
+from src.common.encoding import int_to_bytes
+from src.common.reed_solomon import RSCodec
+from src.common.statics import *
+
+from src.receiver.receiver_loop import receiver_loop
+
+from tests.mock_classes import Gateway
+from tests.utils import tear_queue
+
+
+class TestReceiverLoop(unittest.TestCase):
+
+ def test_receiver_loop(self):
+ # Setup
+ gateway = Gateway(local_test=False)
+ rs = RSCodec(2 * gateway.settings.serial_error_correction)
+ queues = {MESSAGE_DATAGRAM_HEADER: Queue(),
+ FILE_DATAGRAM_HEADER: Queue(),
+ COMMAND_DATAGRAM_HEADER: Queue(),
+ LOCAL_KEY_DATAGRAM_HEADER: Queue()}
+
+ all_q = dict(queues)
+ all_q.update({GATEWAY_QUEUE: Queue()})
+
+ ts = datetime.now()
+ ts_bytes = int_to_bytes(int(ts.strftime('%Y%m%d%H%M%S%f')[:-4]))
+
+ for key in queues:
+ packet = key + ts_bytes + bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)
+ encoded = rs.encode(packet)
+ broken_p = key + bytes.fromhex('df9005313af4136d') + bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)
+ broken_p += rs.encode(b'a')
+
+ def queue_delayer():
+ """Place datagrams into queue after delay."""
+ time.sleep(0.01)
+ all_q[GATEWAY_QUEUE].put((datetime.now(), rs.encode(8 * b'1' + b'undecodable')))
+ all_q[GATEWAY_QUEUE].put((datetime.now(), broken_p))
+ all_q[GATEWAY_QUEUE].put((datetime.now(), encoded))
+
+ threading.Thread(target=queue_delayer).start()
+
+ # Test
+ self.assertIsNone(receiver_loop(all_q, gateway, unittest=True))
+ time.sleep(0.01)
+ self.assertEqual(queues[key].qsize(), 1)
+
+ # Teardown
+ tear_queue(queues[key])
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/receiver/test_windows.py b/tests/receiver/test_windows.py
new file mode 100644
index 0000000..8631b7c
--- /dev/null
+++ b/tests/receiver/test_windows.py
@@ -0,0 +1,517 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import unittest
+
+from datetime import datetime
+from unittest import mock
+
+from src.common.statics import *
+
+from src.receiver.windows import RxWindow, WindowList
+
+from tests.mock_classes import create_contact, ContactList, GroupList, Packet, PacketList, Settings
+from tests.utils import group_name_to_group_id, nick_to_pub_key, nick_to_short_address, TFCTestCase
+
+
+class TestRxWindow(TFCTestCase):
+
+ def setUp(self):
+ self.contact_list = ContactList(nicks=['Alice', 'Bob', 'Charlie', LOCAL_ID])
+ self.group_list = GroupList(groups=['test_group', 'test_group2'])
+ self.settings = Settings()
+ self.packet_list = PacketList()
+ self.ts = datetime.fromtimestamp(1502750000)
+ self.time = self.ts.strftime('%H:%M:%S.%f')[:-4]
+
+ group = self.group_list.get_group('test_group')
+ group.members = list(map(self.contact_list.get_contact_by_address_or_nick, ['Alice', 'Bob', 'Charlie']))
+
+ def create_window(self, uid: bytes):
+ """Create new RxWindow object."""
+ return RxWindow(uid, self.contact_list, self.group_list, self.settings, self.packet_list)
+
+ def test_command_window_creation(self):
+ window = self.create_window(WIN_UID_LOCAL)
+ self.assertEqual(window.type, WIN_TYPE_COMMAND)
+ self.assertEqual(window.name, WIN_TYPE_COMMAND)
+
+ def test_file_window_creation(self):
+ window = self.create_window(WIN_UID_FILE)
+ self.assertEqual(window.type, WIN_TYPE_FILE)
+
+ def test_contact_window_creation(self):
+ window = self.create_window(nick_to_pub_key("Alice"))
+ self.assertEqual(window.type, WIN_TYPE_CONTACT)
+ self.assertEqual(window.window_contacts[0].onion_pub_key, nick_to_pub_key("Alice"))
+ self.assertEqual(window.name, 'Alice')
+
+ def test_group_window_creation(self):
+ window = self.create_window(group_name_to_group_id('test_group'))
+ self.assertEqual(window.type, WIN_TYPE_GROUP)
+ self.assertEqual(window.window_contacts[0].onion_pub_key, nick_to_pub_key("Alice"))
+ self.assertEqual(window.name, 'test_group')
+
+ def test_invalid_uid_raises_fr(self):
+ self.assert_fr("Invalid window 'bad_uid'.", self.create_window, 'bad_uid')
+
+ def test_window_iterates_over_message_tuples(self):
+ # Setup
+ window = self.create_window(nick_to_pub_key("Alice"))
+ window.message_log = 5*[(datetime.now(), 'Lorem ipsum', nick_to_pub_key("Alice"),
+ ORIGIN_CONTACT_HEADER, False, False)]
+
+ # Test
+ for mt in window:
+ self.assertEqual(mt[1:],
+ ("Lorem ipsum", nick_to_pub_key("Alice"), ORIGIN_CONTACT_HEADER, False, False))
+
+ def test_len_returns_number_of_messages_in_window(self):
+ # Setup
+ window = self.create_window(nick_to_pub_key("Alice"))
+ window.message_log = 5*[(datetime.now(), "Lorem ipsum", nick_to_pub_key("Alice"),
+ ORIGIN_CONTACT_HEADER, False, False)]
+
+ # Test
+ self.assertEqual(len(window), 5)
+
+ def test_remove_contacts(self):
+ # Setup
+ window = self.create_window(group_name_to_group_id('test_group'))
+
+ # Test
+ self.assertEqual(len(window.window_contacts), 3)
+ self.assertIsNone(window.remove_contacts([nick_to_pub_key("Alice"),
+ nick_to_pub_key("Bob"),
+ nick_to_pub_key("DoesNotExist")]))
+ self.assertEqual(len(window.window_contacts), 1)
+
+ def test_add_contacts(self):
+ # Setup
+ window = self.create_window(group_name_to_group_id('test_group'))
+ window.window_contacts = [self.contact_list.get_contact_by_address_or_nick('Alice')]
+
+ # Test
+ self.assertIsNone(window.add_contacts([nick_to_pub_key("Alice"),
+ nick_to_pub_key("Bob"),
+ nick_to_pub_key("DoesNotExist")]))
+ self.assertEqual(len(window.window_contacts), 2)
+
+ def test_reset_window(self):
+ # Setup
+ window = self.create_window(group_name_to_group_id('test_group'))
+ window.message_log = \
+ [(datetime.now(), "Hi everybody", nick_to_pub_key("Alice"), ORIGIN_USER_HEADER, False, False),
+ (datetime.now(), "Hi David", nick_to_pub_key("Alice"), ORIGIN_CONTACT_HEADER, False, False),
+ (datetime.now(), "Hi David", nick_to_pub_key("Bob"), ORIGIN_CONTACT_HEADER, False, False)]
+
+ # Test
+ self.assertIsNone(window.reset_window())
+ self.assertEqual(len(window), 0)
+
+ def test_has_contact(self):
+ window = self.create_window(group_name_to_group_id('test_group'))
+ self.assertTrue(window.has_contact(nick_to_pub_key("Alice")))
+ self.assertFalse(window.has_contact(nick_to_pub_key("DoesNotExist")))
+
+ def test_create_handle_dict(self):
+ # Setup
+ window = self.create_window(group_name_to_group_id('test_group'))
+ message_log = [(datetime.now(), "Lorem ipsum", nick_to_pub_key("Alice"), ORIGIN_CONTACT_HEADER, False, False),
+ (datetime.now(), "Lorem ipsum", nick_to_pub_key("Bob"), ORIGIN_USER_HEADER, False, False),
+ (datetime.now(), "Lorem ipsum", nick_to_pub_key("Charlie"), ORIGIN_CONTACT_HEADER, False, False),
+ (datetime.now(), "Lorem ipsum", nick_to_pub_key("Charlie"), ORIGIN_CONTACT_HEADER, True, False),
+ (datetime.now(), "Lorem ipsum", nick_to_pub_key("Charlie"), ORIGIN_CONTACT_HEADER, False, False),
+ (datetime.now(), "Lorem ipsum", nick_to_pub_key("David"), ORIGIN_CONTACT_HEADER, False, False),
+ (datetime.now(), "Lorem ipsum", nick_to_pub_key("Eric"), ORIGIN_CONTACT_HEADER, False, False)]
+
+ # Test
+ self.assertIsNone(window.create_handle_dict(message_log))
+ self.assertEqual(window.handle_dict, {nick_to_pub_key("Alice"): 'Alice',
+ nick_to_pub_key("Bob"): 'Bob',
+ nick_to_pub_key("Charlie"): 'Charlie',
+ nick_to_pub_key("David"): nick_to_short_address("David"),
+ nick_to_pub_key("Eric"): nick_to_short_address("Eric")})
+
+ def test_get_command_handle(self):
+ # Setup
+ window = self.create_window(WIN_UID_LOCAL)
+ window.is_active = True
+
+ # Test
+ self.assertEqual(window.get_handle(self.ts, WIN_UID_LOCAL, ORIGIN_USER_HEADER), f"{self.time} -!- ")
+
+ def test_get_contact_handle(self):
+ # Setup
+ window = self.create_window(nick_to_pub_key("Alice"))
+ window.is_active = True
+ window.handle_dict = {nick_to_pub_key("Alice"): 'Alice'}
+
+ # Test
+ self.assertEqual(window.get_handle(self.ts, nick_to_pub_key("Alice"), ORIGIN_USER_HEADER),
+ f"{self.time} Me: ")
+ self.assertEqual(window.get_handle(self.ts, nick_to_pub_key("Alice"), ORIGIN_CONTACT_HEADER),
+ f"{self.time} Alice: ")
+
+ window.is_active = False
+ self.assertEqual(window.get_handle(self.ts, nick_to_pub_key("Alice"), ORIGIN_USER_HEADER),
+ f"{self.time} Me (private message): ")
+ self.assertEqual(window.get_handle(self.ts, nick_to_pub_key("Alice"), ORIGIN_CONTACT_HEADER),
+ f"{self.time} Alice (private message): ")
+
+ def test_get_group_contact_handle(self):
+ # Setup
+ window = self.create_window(group_name_to_group_id('test_group'))
+ window.is_active = True
+ window.handle_dict = {nick_to_pub_key("Alice"): 'Alice',
+ nick_to_pub_key("Charlie"): 'Charlie',
+ nick_to_pub_key("David"): nick_to_short_address("David"),
+ nick_to_pub_key("Eric"): nick_to_short_address("Eric")}
+
+ # Test
+ self.assertEqual(window.get_handle(self.ts, nick_to_pub_key("Alice"), ORIGIN_USER_HEADER),
+ f"{self.time} Me: ")
+ self.assertEqual(window.get_handle(self.ts, nick_to_pub_key("Charlie"), ORIGIN_CONTACT_HEADER),
+ f"{self.time} Charlie: ")
+
+ window.is_active = False
+ self.assertEqual(window.get_handle(self.ts, nick_to_pub_key("Alice"), ORIGIN_USER_HEADER),
+ f"{self.time} Me (group test_group): ")
+ self.assertEqual(window.get_handle(self.ts, nick_to_pub_key("Charlie"), ORIGIN_CONTACT_HEADER),
+ f"{self.time} Charlie (group test_group): ")
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_print_to_inactive_window_preview_on_short_message(self, _):
+ # Setup
+ window = self.create_window(nick_to_pub_key("Alice"))
+ window.handle_dict = {nick_to_pub_key("Alice"): 'Alice'}
+ window.is_active = False
+ window.settings = Settings(new_message_notify_preview=True)
+ msg_tuple = (self.ts, "Hi Bob", nick_to_pub_key("Bob"), ORIGIN_USER_HEADER, False, False)
+
+ # Test
+ self.assert_prints(f"{BOLD_ON}{self.time} Me (private message): {NORMAL_TEXT}"
+ f"Hi Bob\n{CURSOR_UP_ONE_LINE}{CLEAR_ENTIRE_LINE}",
+ window.print, msg_tuple)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_print_to_inactive_window_preview_on_long_message(self, _):
+ # Setup
+ window = self.create_window(nick_to_pub_key("Alice"))
+ window.is_active = False
+ window.handle_dict = {nick_to_pub_key("Alice"): 'Alice'}
+ window.settings = Settings(new_message_notify_preview=True)
+ long_message = ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Quisque consequat libero et lao"
+ "reet egestas. Aliquam a arcu malesuada, elementum metus eget, elementum mi. Vestibulum i"
+ "d arcu sem. Ut sodales odio sed viverra mollis. Praesent gravida ante tellus, pellentesq"
+ "ue venenatis massa placerat quis. Nullam in magna porta, hendrerit sem vel, dictum ipsum"
+ ". Ut sagittis, ipsum ut bibendum ornare, ex lorem congue metus, vel posuere metus nulla "
+ "at augue.")
+ msg_tuple = (self.ts, long_message, nick_to_pub_key("Bob"), ORIGIN_USER_HEADER, False, False)
+
+ # Test
+ self.assert_prints(f"{BOLD_ON}{self.time} Me (private message): {NORMAL_TEXT}Lorem ipsum dolor sit "
+ f"amet, consectetu…\n{CURSOR_UP_ONE_LINE}{CLEAR_ENTIRE_LINE}",
+ window.print, msg_tuple)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_print_to_inactive_window_preview_off(self, _):
+ # Setup
+ window = self.create_window(nick_to_pub_key("Alice"))
+ window.is_active = False
+ window.handle_dict = {nick_to_pub_key("Alice"): 'Alice'}
+ window.settings = Settings(new_message_notify_preview=False)
+ msg_tuple = (self.ts, "Hi Bob", nick_to_pub_key("Bob"), ORIGIN_USER_HEADER, False, False)
+
+ # Test
+ self.assert_prints(
+ f"{BOLD_ON}{self.time} Me (private message): {NORMAL_TEXT}{BOLD_ON}1 unread message{NORMAL_TEXT}\n"
+ f"{CURSOR_UP_ONE_LINE}{CLEAR_ENTIRE_LINE}", window.print, msg_tuple)
+
+ def test_print_to_active_window_no_date_change(self):
+ # Setup
+ window = self.create_window(nick_to_pub_key("Alice"))
+ window.previous_msg_ts = datetime.fromtimestamp(1502750000)
+ window.is_active = True
+ window.handle_dict = {nick_to_pub_key("Bob"): 'Bob'}
+ window.settings = Settings(new_message_notify_preview=False)
+ msg_tuple = (self.ts, "Hi Alice", nick_to_pub_key("Bob"), ORIGIN_CONTACT_HEADER, False, False)
+
+ # Test
+ self.assert_prints(f"{BOLD_ON}{self.time} Bob: {NORMAL_TEXT}Hi Alice\n",
+ window.print, msg_tuple)
+
+ def test_print_to_active_window_with_date_change_and_whisper(self):
+ # Setup
+ window = self.create_window(nick_to_pub_key("Alice"))
+ window.previous_msg_ts = datetime.fromtimestamp(1501750000)
+ window.is_active = True
+ window.handle_dict = {nick_to_pub_key("Bob"): 'Bob'}
+ window.settings = Settings(new_message_notify_preview=False)
+ msg_tuple = (self.ts, "Hi Alice", nick_to_pub_key("Bob"), ORIGIN_CONTACT_HEADER, True, False)
+ self.time = self.ts.strftime('%H:%M:%S.%f')[:-4]
+
+ # Test
+ self.assert_prints(f"""\
+{BOLD_ON}00:00 -!- Day changed to 2017-08-15{NORMAL_TEXT}
+{BOLD_ON}{self.time} Bob (whisper): {NORMAL_TEXT}Hi Alice
+""", window.print, msg_tuple)
+
+ def test_print_to_active_window_with_date_change_and_whisper_empty_message(self):
+ # Setup
+ window = self.create_window(nick_to_pub_key("Alice"))
+ window.previous_msg_ts = datetime.fromtimestamp(1501750000)
+ window.is_active = True
+ window.handle_dict = {nick_to_pub_key("Bob"): 'Bob'}
+ window.settings = Settings(new_message_notify_preview=False)
+ msg_tuple = (self.ts, " ", nick_to_pub_key("Bob"), ORIGIN_CONTACT_HEADER, True, False)
+
+ # Test
+ self.assert_prints(f"""\
+{BOLD_ON}00:00 -!- Day changed to 2017-08-15{NORMAL_TEXT}
+{BOLD_ON}{self.time} Bob (whisper): {NORMAL_TEXT}
+""", window.print, msg_tuple)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_print_new(self, _):
+ # Setup
+ window = self.create_window(nick_to_pub_key("Alice"))
+
+ # Test
+ self.assertIsNone(window.add_new(self.ts, "Hi Alice", nick_to_pub_key("Bob"),
+ ORIGIN_CONTACT_HEADER, output=True))
+ self.assertEqual(len(window.message_log), 1)
+ self.assertEqual(window.handle_dict[nick_to_pub_key("Bob")], 'Bob')
+
+ def test_redraw_message_window(self):
+ # Setup
+ window = self.create_window(nick_to_pub_key("Alice"))
+ window.is_active = True
+ window.message_log = [(self.ts, "Hi Alice", nick_to_pub_key("Bob"), ORIGIN_CONTACT_HEADER, False, False)]
+ window.unread_messages = 1
+
+ # Test
+ self.assert_prints(f"""\
+{CLEAR_ENTIRE_SCREEN}{CURSOR_LEFT_UP_CORNER}
+------------------------------- Unread Messages --------------------------------
+
+{BOLD_ON}{self.time} Bob: {NORMAL_TEXT}Hi Alice
+""", window.redraw)
+ self.assertEqual(window.unread_messages, 0)
+
+ def test_redraw_empty_window(self):
+ # Setup
+ window = self.create_window(nick_to_pub_key("Alice"))
+ window.is_active = True
+ window.message_log = []
+
+ # Test
+ self.assert_prints(f"""\
+{CLEAR_ENTIRE_SCREEN}{CURSOR_LEFT_UP_CORNER}
+{BOLD_ON} This window for Alice is currently empty. {NORMAL_TEXT}\n
+""", window.redraw)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_redraw_file_win(self, _):
+ # Setup
+ self.packet_list.packets = [Packet(type=FILE,
+ name='testfile.txt',
+ assembly_pt_list=5*[b'a'],
+ packets=10,
+ size="100.0KB",
+ contact=create_contact('Bob')),
+ Packet(type=FILE,
+ name='testfile2.txt',
+ assembly_pt_list=7 * [b'a'],
+ packets=100,
+ size="15.0KB",
+ contact=create_contact('Charlie'))]
+
+ # Test
+ window = self.create_window(WIN_UID_FILE)
+ self.assert_prints(f"""\
+
+File name Size Sender Complete
+────────────────────────────────────────────────────────────────────────────────
+testfile.txt 100.0KB Bob 50.00%
+testfile2.txt 15.0KB Charlie 7.00%
+
+{6*(CURSOR_UP_ONE_LINE+CLEAR_ENTIRE_LINE)}""", window.redraw_file_win)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_redraw_empty_file_win(self, _):
+ # Setup
+ self.packet_list.packet_l = []
+
+ # Test
+ window = self.create_window(WIN_UID_FILE)
+ self.assert_prints(f"""\
+
+{BOLD_ON} No file transmissions currently in progress. {NORMAL_TEXT}
+
+{3*(CURSOR_UP_ONE_LINE+CLEAR_ENTIRE_LINE)}""", window.redraw_file_win)
+
+
+class TestWindowList(TFCTestCase):
+
+ def setUp(self):
+ self.settings = Settings()
+ self.contact_list = ContactList(nicks=['Alice', 'Bob', 'Charlie', LOCAL_ID])
+ self.group_list = GroupList(groups=['test_group', 'test_group2'])
+ self.packet_list = PacketList()
+
+ group = self.group_list.get_group('test_group')
+ group.members = list(map(self.contact_list.get_contact_by_address_or_nick, ['Alice', 'Bob', 'Charlie']))
+
+ self.window_list = WindowList(self.settings, self.contact_list, self.group_list, self.packet_list)
+
+ def create_window(self, uid):
+ """Create new RxWindow object."""
+ return RxWindow(uid, self.contact_list, self.group_list, self.settings, self.packet_list)
+
+ def test_active_win_is_none_if_local_key_is_not_present(self):
+ # Setup
+ self.contact_list.contacts = []
+
+ # Test
+ window_list = WindowList(self.settings, self.contact_list, self.group_list, self.packet_list)
+ self.assertEqual(window_list.active_win, None)
+
+ def test_active_win_is_command_win_if_local_key_is_present(self):
+ # Setup
+ self.contact_list.contacts = [create_contact(LOCAL_ID)]
+
+ # Test
+ self.assertEqual(self.window_list.active_win.uid, WIN_UID_LOCAL)
+
+ def test_window_list_iterates_over_windows(self):
+ for w in self.window_list:
+ self.assertIsInstance(w, RxWindow)
+
+ def test_len_returns_number_of_windows(self):
+ self.assertEqual(len(self.window_list), 7)
+
+ def test_group_windows(self):
+ # Setup
+ self.window_list.windows = [self.create_window(group_name_to_group_id(g)) for g in ['test_group',
+ 'test_group2']]
+
+ # Test
+ for g in self.window_list.get_group_windows():
+ self.assertEqual(g.type, WIN_TYPE_GROUP)
+
+ def test_has_window(self):
+ # Setup
+ self.window_list.windows = [self.create_window(group_name_to_group_id(g)) for g in ['test_group',
+ 'test_group2']]
+
+ # Test
+ self.assertTrue(self.window_list.has_window(group_name_to_group_id('test_group')))
+ self.assertTrue(self.window_list.has_window(group_name_to_group_id('test_group2')))
+ self.assertFalse(self.window_list.has_window(group_name_to_group_id('test_group3')))
+
+ def test_remove_window(self):
+ # Setup
+ self.window_list.windows = [self.create_window(group_name_to_group_id(g)) for g in ['test_group',
+ 'test_group2']]
+
+ # Test
+ self.assertEqual(len(self.window_list), 2)
+ self.assertIsNone(self.window_list.remove_window(group_name_to_group_id('test_group3')))
+ self.assertEqual(len(self.window_list), 2)
+ self.assertIsNone(self.window_list.remove_window(group_name_to_group_id('test_group2')))
+ self.assertEqual(len(self.window_list), 1)
+
+ def test_select_rx_window(self):
+ # Setup
+ self.window_list.windows = [self.create_window(group_name_to_group_id(g)) for g in ['test_group',
+ 'test_group2']]
+ tg_win = self.window_list.windows[0]
+ tg2_win = self.window_list.windows[1]
+ tg_win.is_active = True
+ self.window_list.active_win = tg_win
+
+ # Test
+ self.assert_prints(f"""{CLEAR_ENTIRE_SCREEN}{CURSOR_LEFT_UP_CORNER}
+{BOLD_ON} This window for test_group2 is currently empty. {NORMAL_TEXT}
+
+""", self.window_list.set_active_rx_window, group_name_to_group_id('test_group2'))
+ self.assertFalse(tg_win.is_active)
+ self.assertTrue(tg2_win.is_active)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_select_rx_file_window(self, _):
+ # Setup
+ self.window_list.windows = [self.create_window(WIN_UID_FILE)]
+ self.window_list.windows += [self.create_window(group_name_to_group_id(g)) for g in ['test_group',
+ 'test_group2']]
+ tg_win = self.window_list.get_window(group_name_to_group_id('test_group'))
+ tg_win.is_active = True
+ self.window_list.active_win = tg_win
+ self.packet_list.packets = [Packet(type=FILE,
+ name='testfile.txt',
+ assembly_pt_list=5 * [b'a'],
+ packets=10,
+ size="100.0KB",
+ contact=create_contact('Bob'))]
+
+ # Test
+ self.assert_prints(f"""\
+
+File name Size Sender Complete
+────────────────────────────────────────────────────────────────────────────────
+testfile.txt 100.0KB Bob 50.00%
+
+{5*(CURSOR_UP_ONE_LINE+CLEAR_ENTIRE_LINE)}""", self.window_list.set_active_rx_window, WIN_UID_FILE)
+
+ self.assertFalse(tg_win.is_active)
+ self.assertTrue(self.window_list.get_window(WIN_UID_FILE).is_active)
+
+ def test_get_local_window(self):
+ # Setup
+ self.window_list.windows = [self.create_window(uid) for uid in [group_name_to_group_id('test_group'),
+ group_name_to_group_id('test_group2'),
+ WIN_UID_FILE,
+ WIN_UID_LOCAL]]
+
+ # Test
+ self.assertEqual(self.window_list.get_local_window().uid, WIN_UID_LOCAL)
+
+ def test_get_non_existing_window(self):
+ # Setup
+ self.window_list.windows = [self.create_window(uid) for uid in [group_name_to_group_id('test_group'),
+ WIN_UID_FILE,
+ WIN_UID_LOCAL]]
+
+ # Test existing window
+ self.assertTrue(self.window_list.has_window(group_name_to_group_id('test_group')))
+ window = self.window_list.get_window( group_name_to_group_id('test_group'))
+ self.assertEqual(window.uid, group_name_to_group_id('test_group'))
+
+ # Test non-existing window
+ self.assertFalse(self.window_list.has_window(group_name_to_group_id('test_group2')))
+ window2 = self.window_list.get_window( group_name_to_group_id('test_group2'))
+ self.assertEqual(window2.uid, group_name_to_group_id('test_group2'))
+ self.assertTrue(self.window_list.has_window( group_name_to_group_id('test_group2')))
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/rx/__init__.py b/tests/relay/__init__.py
similarity index 100%
rename from tests/rx/__init__.py
rename to tests/relay/__init__.py
diff --git a/tests/relay/test_client.py b/tests/relay/test_client.py
new file mode 100644
index 0000000..bb1ea31
--- /dev/null
+++ b/tests/relay/test_client.py
@@ -0,0 +1,398 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import base64
+import threading
+import time
+import unittest
+
+from unittest import mock
+from typing import Any
+
+import requests
+
+from src.common.crypto import X448
+from src.common.db_onion import pub_key_to_onion_address, pub_key_to_short_address
+from src.common.statics import *
+
+from src.relay.client import c_req_manager, client, client_manager, g_msg_manager, get_data_loop
+
+from tests.mock_classes import Gateway
+from tests.utils import gen_queue_dict, nick_to_onion_address, nick_to_pub_key, tear_queues
+
+
+class TestClient(unittest.TestCase):
+
+ url_token_private_key = X448.generate_private_key()
+ url_token_public_key = X448.derive_public_key(url_token_private_key)
+ url_token = X448.shared_key(url_token_private_key, url_token_public_key).hex()
+
+ class MockResponse(object):
+ """Mock Response object."""
+ def __init__(self, text):
+ """Create new MockResponse object."""
+ self.text = text
+ self.content = text
+
+ class MockSession(object):
+ """Mock Session object."""
+
+ def __init__(self):
+ """Create new MockSession object."""
+ self.proxies = dict()
+ self.timeout = None
+ self.url = None
+ self.test_no = 0
+
+ def get(self, url, timeout=0, stream=False):
+ """Mock .get() method."""
+
+ self.timeout = timeout
+
+ # When we reach `get_data_loop` that loads stream, throw exception to close the test.
+ if stream:
+ (_ for _ in ()).throw(requests.exceptions.RequestException)
+
+ if url.startswith("http://hpcrayuxhrcy2wtpfwgwjibderrvjll6azfr4tqat3eka2m2gbb55bid.onion/"):
+
+ if self.test_no == 0:
+ self.test_no += 1
+ (_ for _ in ()).throw(requests.exceptions.RequestException)
+
+ if self.test_no == 1:
+ self.test_no += 1
+ return TestClient.MockResponse('OK')
+
+ # Test function recovers from RequestException.
+ if self.test_no == 2:
+ self.test_no += 1
+ (_ for _ in ()).throw(requests.exceptions.RequestException)
+
+ # Test function recovers from invalid public key.
+ if self.test_no == 3:
+ self.test_no += 1
+ return TestClient.MockResponse(((ONION_SERVICE_PUBLIC_KEY_LENGTH-1)*b'a').hex())
+
+ # Test client prints online/offline messages.
+ elif self.test_no < 10:
+ self.test_no += 1
+ return TestClient.MockResponse('')
+
+ # Test valid public key moves function to `get_data_loop`.
+ elif self.test_no == 10:
+ self.test_no += 1
+ return TestClient.MockResponse(TestClient.url_token_public_key.hex())
+
+ @staticmethod
+ def mock_session():
+ """Return MockSession object."""
+ return TestClient.MockSession()
+
+ def setUp(self):
+ self.o_session = requests.session
+ self.queues = gen_queue_dict()
+ requests.session = TestClient.mock_session
+
+ def tearDown(self):
+ requests.session = self.o_session
+ tear_queues(self.queues)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_client(self, _):
+ onion_pub_key = nick_to_pub_key('Alice')
+ onion_address = nick_to_onion_address('Alice')
+ tor_port = '1337'
+ settings = Gateway()
+ sk = TestClient.url_token_private_key
+ self.assertIsNone(client(onion_pub_key, self.queues, sk, tor_port, settings, onion_address, unittest=True))
+ self.assertEqual(self.queues[URL_TOKEN_QUEUE].get(), (onion_pub_key, TestClient.url_token))
+
+
+class TestGetDataLoop(unittest.TestCase):
+
+ url_token_private_key_user = X448.generate_private_key()
+ url_token_public_key_user = X448.derive_public_key(url_token_private_key_user)
+ url_token_public_key_contact = X448.derive_public_key(X448.generate_private_key())
+ url_token = X448.shared_key(url_token_private_key_user, url_token_public_key_contact).hex()
+
+ class MockResponse(object):
+ """Mock Response object."""
+ def __init__(self):
+ self.test_no = 0
+
+ def iter_lines(self):
+ """Return data depending test number."""
+ self.test_no += 1
+ message = b''
+
+ # Empty message
+ if self.test_no == 1:
+ pass
+
+ # Invalid message
+ elif self.test_no == 2:
+ message = MESSAGE_DATAGRAM_HEADER + b'\x1f'
+
+ # Valid message
+ elif self.test_no == 3:
+ message = MESSAGE_DATAGRAM_HEADER + base64.b85encode(b'test') + b'\n'
+
+ # Invalid public key
+ elif self.test_no == 4:
+ message = PUBLIC_KEY_DATAGRAM_HEADER + base64.b85encode((TFC_PUBLIC_KEY_LENGTH-1) * b'\x01')
+
+ # Valid public key
+ elif self.test_no == 5:
+ message = PUBLIC_KEY_DATAGRAM_HEADER + base64.b85encode(TFC_PUBLIC_KEY_LENGTH * b'\x01')
+
+ # Group management headers
+ elif self.test_no == 6:
+ message = GROUP_MSG_INVITE_HEADER
+
+ elif self.test_no == 7:
+ message = GROUP_MSG_JOIN_HEADER
+
+ elif self.test_no == 8:
+ message = GROUP_MSG_MEMBER_ADD_HEADER
+
+ elif self.test_no == 9:
+ message = GROUP_MSG_MEMBER_REM_HEADER
+
+ elif self.test_no == 10:
+ message = GROUP_MSG_EXIT_GROUP_HEADER
+
+ # Invalid header
+ elif self.test_no == 11:
+ message = b'\x1f'
+
+ # RequestException (no remaining data)
+ elif self.test_no == 12:
+ (_ for _ in ()).throw(requests.exceptions.RequestException)
+
+ return message.split(b'\n')
+
+ class MockFileResponse(object):
+ """MockFileResponse object."""
+
+ def __init__(self, content):
+ self.content = content
+
+ class Session(object):
+ """Mock session object."""
+
+ def __init__(self) -> None:
+ """Create new Session object."""
+ self.proxies = dict()
+ self.timeout = None
+ self.url = None
+ self.stream = False
+ self.test_no = 0
+ self.response = TestGetDataLoop.MockResponse()
+ self.url_token = TestGetDataLoop.url_token
+ self.onion_url = 'http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaam2dqd.onion'
+
+ def get(self, url: str, timeout: int = 0, stream: bool = False) -> Any:
+ """Return data depending on what test is in question."""
+
+ self.stream = stream
+ self.timeout = timeout
+
+ if url == f"{self.onion_url}/{self.url_token}/messages":
+
+ # Test function recovers from RequestException.
+ if self.test_no == 1:
+ self.test_no += 1
+ (_ for _ in ()).throw(requests.exceptions.RequestException)
+
+ if self.test_no >= 2:
+ self.test_no += 1
+ return self.response
+
+ elif url == f"{self.onion_url}/{self.url_token}/files":
+
+ # Test file data is received
+ if self.test_no == 0:
+ self.test_no += 1
+ return TestGetDataLoop.MockFileResponse(b'test')
+
+ # Test function recovers from RequestException.
+ if self.test_no > 1:
+ (_ for _ in ()).throw(requests.exceptions.RequestException)
+
+ @staticmethod
+ def mock_session() -> Session:
+ """Return mock Session object."""
+ return TestGetDataLoop.Session()
+
+ def setUp(self):
+ self.o_session = requests.session
+ self.queues = gen_queue_dict()
+ requests.session = TestGetDataLoop.mock_session
+
+ def tearDown(self):
+ requests.session = self.o_session
+ tear_queues(self.queues)
+
+ def test_get_data_loop(self):
+
+ onion_pub_key = bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH)
+ settings = Gateway()
+ onion_addr = pub_key_to_onion_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH))
+ short_addr = pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH))
+ url_token = TestGetDataLoop.url_token
+ session = TestGetDataLoop.mock_session()
+
+ self.assertIsNone(get_data_loop(onion_addr, url_token, short_addr,
+ onion_pub_key, self.queues, session, settings))
+
+ self.assertIsNone(get_data_loop(onion_addr, url_token, short_addr,
+ onion_pub_key, self.queues, session, settings))
+
+ self.assertEqual(self.queues[DST_MESSAGE_QUEUE].qsize(), 2) # Message and file
+ self.assertEqual(self.queues[GROUP_MSG_QUEUE].qsize(), 5) # 5 group management messages
+
+
+class TestGroupManager(unittest.TestCase):
+
+ def test_group_manager(self):
+
+ queues = gen_queue_dict()
+
+ def queue_delayer():
+ """Place messages to queue one at a time."""
+ time.sleep(0.1)
+
+ # Test function recovers from incorrect group ID size
+ queues[GROUP_MSG_QUEUE].put((
+ GROUP_MSG_EXIT_GROUP_HEADER,
+ bytes((GROUP_ID_LENGTH - 1)),
+ pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH))
+ ))
+
+ # Test group invite for added and removed contacts
+ queues[GROUP_MGMT_QUEUE].put((RP_ADD_CONTACT_HEADER, nick_to_pub_key('Alice') + nick_to_pub_key('Bob')))
+ queues[GROUP_MGMT_QUEUE].put((RP_REMOVE_CONTACT_HEADER, nick_to_pub_key('Alice')))
+
+ for header in [GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER,
+ GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER]:
+ queues[GROUP_MSG_QUEUE].put(
+ (header,
+ bytes(GROUP_ID_LENGTH) + nick_to_pub_key('Bob') + nick_to_pub_key('Charlie'),
+ pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH))
+ ))
+
+ queues[GROUP_MSG_QUEUE].put(
+ (GROUP_MSG_EXIT_GROUP_HEADER,
+ bytes(GROUP_ID_LENGTH),
+ pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH))
+ ))
+
+ # Exit test
+ time.sleep(0.2)
+ queues[UNITTEST_QUEUE].put(EXIT)
+ queues[GROUP_MSG_QUEUE].put(
+ (GROUP_MSG_EXIT_GROUP_HEADER,
+ bytes(GROUP_ID_LENGTH),
+ pub_key_to_short_address(bytes(ONION_SERVICE_PUBLIC_KEY_LENGTH))
+ ))
+
+ # Test
+ threading.Thread(target=queue_delayer).start()
+ self.assertIsNone(g_msg_manager(queues, unittest=True))
+ tear_queues(queues)
+
+
+class TestClientManager(unittest.TestCase):
+
+ def test_client_manager(self):
+ queues = gen_queue_dict()
+ gateway = Gateway()
+ server_private_key = X448.generate_private_key()
+
+ def queue_delayer():
+ """Place messages to queue one at a time."""
+ time.sleep(0.1)
+ queues[TOR_DATA_QUEUE].put(
+ ('1234', nick_to_onion_address('Alice')))
+ queues[CONTACT_KEY_QUEUE].put(
+ (RP_ADD_CONTACT_HEADER, b''.join([nick_to_pub_key('Alice'), nick_to_pub_key('Bob')]), True))
+ time.sleep(0.1)
+ queues[CONTACT_KEY_QUEUE].put(
+ (RP_REMOVE_CONTACT_HEADER, b''.join([nick_to_pub_key('Alice'), nick_to_pub_key('Bob')]), True))
+ time.sleep(0.1)
+ queues[UNITTEST_QUEUE].put(EXIT)
+ time.sleep(0.1)
+ queues[CONTACT_KEY_QUEUE].put((EXIT, EXIT, EXIT))
+
+ threading.Thread(target=queue_delayer).start()
+
+ self.assertIsNone(client_manager(queues, gateway, server_private_key, unittest=True))
+ tear_queues(queues)
+
+
+class TestContactRequestManager(unittest.TestCase):
+
+ def test_contact_request_manager(self):
+
+ queues = gen_queue_dict()
+
+ def queue_delayer():
+ """Place messages to queue one at a time."""
+ time.sleep(0.1)
+ queues[F_REQ_MGMT_QUEUE].put(
+ (RP_ADD_CONTACT_HEADER, b''.join(list(map(nick_to_pub_key, ['Alice', 'Bob'])))))
+ time.sleep(0.1)
+
+ # Test that request from Alice does not appear
+ queues[CONTACT_REQ_QUEUE].put((nick_to_onion_address('Alice')))
+ time.sleep(0.1)
+
+ # Test that request from Charlie appears
+ queues[CONTACT_REQ_QUEUE].put((nick_to_onion_address('Charlie')))
+ time.sleep(0.1)
+
+ # Test that another request from Charlie does not appear
+ queues[CONTACT_REQ_QUEUE].put((nick_to_onion_address('Charlie')))
+ time.sleep(0.1)
+
+ # Remove Alice
+ queues[F_REQ_MGMT_QUEUE].put((RP_REMOVE_CONTACT_HEADER, nick_to_pub_key('Alice')))
+ time.sleep(0.1)
+
+ # Load settings from queue
+ queues[C_REQ_MGR_QUEUE].put(False)
+ queues[C_REQ_MGR_QUEUE].put(True)
+
+ # Test that request from Alice is accepted
+ queues[CONTACT_REQ_QUEUE].put((nick_to_onion_address('Alice')))
+ time.sleep(0.1)
+
+ # Exit test
+ queues[UNITTEST_QUEUE].put(EXIT)
+ queues[CONTACT_REQ_QUEUE].put(nick_to_pub_key('Charlie'))
+
+ threading.Thread(target=queue_delayer).start()
+ self.assertIsNone(c_req_manager(queues, unittest=True))
+ tear_queues(queues)
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/relay/test_commands.py b/tests/relay/test_commands.py
new file mode 100644
index 0000000..a80ef5a
--- /dev/null
+++ b/tests/relay/test_commands.py
@@ -0,0 +1,254 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import threading
+import time
+import unittest
+
+from unittest import mock
+
+from unittest.mock import MagicMock
+
+from src.common.encoding import int_to_bytes
+from src.common.statics import *
+
+from src.relay.commands import add_contact, add_onion_data, change_baudrate, change_ec_ratio, clear_windows, exit_tfc
+from src.relay.commands import manage_contact_req, process_command, race_condition_delay, relay_command, remove_contact
+from src.relay.commands import reset_windows, wipe
+
+from tests.mock_classes import Gateway, nick_to_pub_key
+from tests.utils import gen_queue_dict, tear_queues, TFCTestCase
+
+
+class TestRelayCommand(unittest.TestCase):
+
+ def setUp(self):
+ self.gateway = Gateway()
+ self.queues = gen_queue_dict()
+ self.gateway.settings.race_condition_delay = 0.0
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ @mock.patch('sys.stdin', MagicMock())
+ @mock.patch('os.fdopen', MagicMock())
+ def test_packet_reading(self, *_):
+
+ def queue_delayer():
+ """Place packet into queue after delay."""
+ time.sleep(0.1)
+ self.queues[SRC_TO_RELAY_QUEUE].put(UNENCRYPTED_SCREEN_CLEAR)
+
+ threading.Thread(target=queue_delayer).start()
+ self.assertIsNone(relay_command(self.queues, self.gateway, stdin_fd=1, unittest=True))
+
+
+class TestProcessCommand(TFCTestCase):
+
+ def setUp(self):
+ self.gateway = Gateway()
+ self.queues = gen_queue_dict()
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def test_invalid_key(self):
+ self.assert_fr("Error: Received an invalid command.", process_command, b'INVALID', self.gateway, self.queues)
+
+
+class TestRaceConditionDelay(unittest.TestCase):
+
+ def setUp(self):
+ self.gateway = Gateway(local_testing_mode=True,
+ data_diode_sockets=True)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_delay(self, mock_sleep):
+ self.assertIsNone(race_condition_delay(self.gateway))
+ self.assertEqual(mock_sleep.call_args_list, [mock.call(LOCAL_TESTING_PACKET_DELAY), mock.call(1.0)])
+
+
+class TestClearWindows(TFCTestCase):
+
+ def setUp(self):
+ self.gateway = Gateway(race_condition_delay=0.0)
+
+ def test_clear_display(self):
+ self.assert_prints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER, clear_windows, self.gateway)
+
+
+class TestResetWindows(TFCTestCase):
+
+ @mock.patch('os.system', return_value=None)
+ def test_reset_display(self, _):
+ self.gateway = Gateway(race_condition_delay=0.0)
+ self.assertIsNone(reset_windows(self.gateway))
+
+
+class TestExitTFC(unittest.TestCase):
+
+ def setUp(self):
+ self.gateway = Gateway(race_condition_delay=0.0)
+ self.queues = gen_queue_dict()
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def test_exit_tfc(self):
+ self.assertIsNone(exit_tfc(self.gateway, self.queues))
+ self.assertEqual(self.queues[ONION_CLOSE_QUEUE].get(), EXIT)
+
+
+class TestChangeECRatio(TFCTestCase):
+
+ def setUp(self):
+ self.gateway = Gateway()
+
+ def test_non_digit_value_raises_fr(self):
+ self.assert_fr("Error: Received invalid EC ratio value from Transmitter Program.",
+ change_ec_ratio, b'a', self.gateway)
+
+ def test_invalid_digit_value_raises_fr(self):
+ self.assert_fr("Error: Received invalid EC ratio value from Transmitter Program.",
+ change_ec_ratio, b'-1', self.gateway)
+
+ def test_change_value(self):
+ self.assertIsNone(change_ec_ratio(b'3', self.gateway))
+ self.assertEqual(self.gateway.settings.serial_error_correction, 3)
+
+
+class TestChangeBaudrate(TFCTestCase):
+
+ def setUp(self):
+ self.gateway = Gateway()
+
+ def test_non_digit_value_raises_fr(self):
+ self.assert_fr("Error: Received invalid baud rate value from Transmitter Program.",
+ change_baudrate, b'a', self.gateway)
+
+ def test_invalid_digit_value_raises_fr(self):
+ self.assert_fr("Error: Received invalid baud rate value from Transmitter Program.",
+ change_baudrate, b'1300', self.gateway)
+
+ def test_change_value(self):
+ self.assertIsNone(change_baudrate(b'9600', self.gateway))
+ self.assertEqual(self.gateway.settings.serial_baudrate, 9600)
+
+
+class TestWipe(unittest.TestCase):
+
+ def setUp(self):
+ self.gateway = Gateway(race_condition_delay=0.0)
+ self.queues = gen_queue_dict()
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ @mock.patch('os.system', return_value=None)
+ def test_wipe_command(self, _):
+ self.assertIsNone(wipe(self.gateway, self.queues))
+ self.assertEqual(self.queues[ONION_CLOSE_QUEUE].get(), WIPE)
+
+
+class TestManageContactReq(unittest.TestCase):
+
+ def setUp(self):
+ self.queues = gen_queue_dict()
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def test_setting_management(self):
+ manage_contact_req(b'\x01', self.queues)
+ self.assertTrue(self.queues[C_REQ_MGR_QUEUE].get())
+
+ manage_contact_req(b'\x00', self.queues)
+ self.assertFalse(self.queues[C_REQ_MGR_QUEUE].get())
+
+
+class TestAddContact(unittest.TestCase):
+
+ def setUp(self):
+ self.queues = gen_queue_dict()
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def test_add_contact(self):
+ command = b''.join([nick_to_pub_key('Alice'), nick_to_pub_key('Bob')])
+
+ self.assertIsNone(add_contact(command, True, self.queues))
+ self.assertEqual(self.queues[CONTACT_KEY_QUEUE].qsize(), 1)
+ for q in [GROUP_MGMT_QUEUE, F_REQ_MGMT_QUEUE]:
+ command = self.queues[q].get()
+ self.assertEqual(command,
+ (RP_ADD_CONTACT_HEADER, b''.join([nick_to_pub_key('Alice'), nick_to_pub_key('Bob')])))
+ self.assertEqual(self.queues[CONTACT_KEY_QUEUE].get(),
+ (RP_ADD_CONTACT_HEADER, b''.join(list(map(nick_to_pub_key, ['Alice', 'Bob']))), True))
+
+
+class TestRemContact(unittest.TestCase):
+
+ def setUp(self):
+ self.queues = gen_queue_dict()
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def test_add_contact(self):
+ command = b''.join([nick_to_pub_key('Alice'), nick_to_pub_key('Bob')])
+
+ self.assertIsNone(remove_contact(command, self.queues))
+ self.assertEqual(self.queues[CONTACT_KEY_QUEUE].qsize(), 1)
+ self.assertEqual(self.queues[CONTACT_KEY_QUEUE].get(),
+ (RP_REMOVE_CONTACT_HEADER,
+ b''.join([nick_to_pub_key('Alice'), nick_to_pub_key('Bob')]),
+ False)
+ )
+
+ for q in [GROUP_MGMT_QUEUE, F_REQ_MGMT_QUEUE]:
+ command = self.queues[q].get()
+ self.assertEqual(command, (RP_REMOVE_CONTACT_HEADER,
+ b''.join([nick_to_pub_key('Alice'), nick_to_pub_key('Bob')])))
+
+
+class TestAddOnionKey(unittest.TestCase):
+
+ def setUp(self):
+ self.queues = gen_queue_dict()
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def test_add_contact(self):
+ command = (ONION_SERVICE_PRIVATE_KEY_LENGTH * b'a'
+ + b'b'
+ + b'\x01'
+ + int_to_bytes(1)
+ + nick_to_pub_key('Alice')
+ + nick_to_pub_key('Bob'))
+ self.assertIsNone(add_onion_data(command, self.queues))
+ self.assertEqual(self.queues[ONION_KEY_QUEUE].qsize(), 1)
+ self.assertEqual(self.queues[ONION_KEY_QUEUE].get(), (ONION_SERVICE_PRIVATE_KEY_LENGTH * b'a', b'b'))
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/relay/test_onion.py b/tests/relay/test_onion.py
new file mode 100644
index 0000000..a461c7f
--- /dev/null
+++ b/tests/relay/test_onion.py
@@ -0,0 +1,141 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import threading
+import time
+import unittest
+
+from unittest import mock
+from unittest.mock import MagicMock
+
+import stem.control
+
+from src.common.misc import validate_onion_addr
+from src.common.statics import *
+
+from src.relay.onion import get_available_port, onion_service, Tor
+
+from tests.utils import gen_queue_dict
+
+
+class TestGetAvailablePort(unittest.TestCase):
+
+ @mock.patch('random.randint', side_effect=[OSError, 1234])
+ def test_get_available_port(self, _):
+ port = get_available_port(1000, 65535)
+ self.assertEqual(port, 1234)
+
+
+class TestTor(unittest.TestCase):
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('os.path.isfile', return_value=False)
+ def test_missing_binary_raises_critical_error(self, *_):
+ tor = Tor()
+ with self.assertRaises(SystemExit):
+ tor.connect('1234')
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('stem.process.launch_tor_with_config', side_effect=[MagicMock(), OSError, MagicMock()])
+ @mock.patch('stem.control.Controller.from_socket_file', return_value=MagicMock(get_info=MagicMock(
+ side_effect=['NOTICE BOOTSTRAP PROGRESS=100 TAG=done SUMMARY="Done"', stem.SocketClosed])))
+ def test_closed_socket_raises_critical_error(self, *_):
+ tor = Tor()
+ self.assertIsNone(tor.connect('1234'))
+ with self.assertRaises(SystemExit):
+ tor.connect('1234')
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('time.monotonic', side_effect=[1, 20, 30, 40])
+ @mock.patch('stem.control.Controller.from_socket_file', return_value=MagicMock(get_info=MagicMock(
+ side_effect=['NOTICE BOOTSTRAP PROGRESS=100 TAG=done SUMMARY="Nope"',
+ 'NOTICE BOOTSTRAP PROGRESS=100 TAG=done SUMMARY="Done"'])))
+ @mock.patch('stem.process.launch_tor_with_config', return_value=MagicMock(poll=lambda: False))
+ def test_timeout_restarts_tor(self, *_):
+ tor = Tor()
+ self.assertIsNone(tor.connect('1234'))
+ tor.stop()
+
+
+class TestOnionService(unittest.TestCase):
+
+ @mock.patch('shlex.split', return_value=['NOTICE', 'BOOTSTRAP', 'PROGRESS=100',
+ 'TAG=done', 'SUMMARY=Done'])
+ @mock.patch('stem.control.Controller.from_socket_file', return_value=MagicMock())
+ @mock.patch('src.relay.onion.get_available_port', side_effect=KeyboardInterrupt)
+ def test_returns_with_keyboard_interrupt(self, *_):
+ queues = gen_queue_dict()
+ queues[ONION_KEY_QUEUE].put((bytes(ONION_SERVICE_PRIVATE_KEY_LENGTH), b'\x01'))
+ self.assertIsNone(onion_service(queues))
+
+ @mock.patch('shlex.split', return_value=['NOTICE', 'BOOTSTRAP', 'PROGRESS=100',
+ 'TAG=done', 'SUMMARY=Done'])
+ @mock.patch('stem.control.Controller.from_socket_file', return_value=MagicMock())
+ @mock.patch('stem.process.launch_tor_with_config', return_value=MagicMock())
+ def test_onion_service(self, *_):
+ queues = gen_queue_dict()
+
+ def queue_delayer():
+ """Place Onion Service data into queue after delay."""
+ time.sleep(0.5)
+ queues[ONION_KEY_QUEUE].put((bytes(ONION_SERVICE_PRIVATE_KEY_LENGTH), b'\x01'))
+ queues[ONION_KEY_QUEUE].put((bytes(ONION_SERVICE_PRIVATE_KEY_LENGTH), b'\x01'))
+ time.sleep(0.1)
+ queues[ONION_CLOSE_QUEUE].put(EXIT)
+
+ threading.Thread(target=queue_delayer).start()
+
+ with mock.patch("time.sleep", return_value=None):
+ self.assertIsNone(onion_service(queues))
+
+ port, address = queues[TOR_DATA_QUEUE].get()
+ self.assertIsInstance(port, int)
+ self.assertEqual(validate_onion_addr(address), '')
+ self.assertEqual(queues[EXIT_QUEUE].get(), EXIT)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('shlex.split', return_value=['NOTICE', 'BOOTSTRAP', 'PROGRESS=100',
+ 'TAG=done', 'SUMMARY=Done'])
+ @mock.patch('shutil.get_terminal_size', side_effect=[stem.SocketClosed])
+ @mock.patch('stem.control.Controller.from_socket_file', return_value=MagicMock())
+ @mock.patch('stem.process.launch_tor_with_config', return_value=MagicMock())
+ def test_exception_during_onion_service_setup_returns(self, *_):
+ queues = gen_queue_dict()
+ queues[ONION_KEY_QUEUE].put((bytes(ONION_SERVICE_PRIVATE_KEY_LENGTH), b'\x01'))
+ self.assertIsNone(onion_service(queues))
+
+ @mock.patch('time.sleep', side_effect=[None, None, KeyboardInterrupt, stem.SocketClosed, None])
+ @mock.patch('shlex.split', return_value=['NOTICE', 'BOOTSTRAP', 'PROGRESS=100', 'TAG=done', 'SUMMARY=Done'])
+ @mock.patch('stem.control.Controller.from_socket_file', return_value=MagicMock())
+ @mock.patch('stem.process.launch_tor_with_config', return_value=MagicMock())
+ def test_socket_closed_returns(self, *_):
+ queues = gen_queue_dict()
+
+ controller = stem.control.Controller
+ controller.create_ephemeral_hidden_service = MagicMock()
+
+ queues[ONION_KEY_QUEUE].put((bytes(ONION_SERVICE_PRIVATE_KEY_LENGTH), b'\x01'))
+
+ self.assertIsNone(onion_service(queues))
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/relay/test_server.py b/tests/relay/test_server.py
new file mode 100644
index 0000000..95cac5b
--- /dev/null
+++ b/tests/relay/test_server.py
@@ -0,0 +1,92 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import unittest
+
+from src.common.crypto import X448
+from src.common.statics import *
+
+from src.relay.server import flask_server
+
+from tests.utils import gen_queue_dict, nick_to_onion_address, nick_to_pub_key
+
+
+class TestFlaskServer(unittest.TestCase):
+
+ def test_flask_server(self):
+ # Setup
+ queues = gen_queue_dict()
+ url_token_private_key = X448.generate_private_key()
+ url_token_public_key = X448.derive_public_key(url_token_private_key).hex()
+ url_token = 'a450987345098723459870234509827340598273405983274234098723490285'
+ url_token_old = 'a450987345098723459870234509827340598273405983274234098723490286'
+ url_token_invalid = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
+ onion_pub_key = nick_to_pub_key('Alice')
+ onion_address = nick_to_onion_address('Alice')
+ packet1 = "packet1"
+ packet2 = "packet2"
+ packet3 = b"packet3"
+
+ # Test
+ app = flask_server(queues, url_token_public_key, unittest=True)
+
+ with app.test_client() as c:
+ # Test root domain returns public key of server.
+ resp = c.get('/')
+ self.assertEqual(resp.data, url_token_public_key.encode())
+
+ resp = c.get(f'/contact_request/{onion_address}')
+ self.assertEqual(b'OK', resp.data)
+ self.assertEqual(queues[CONTACT_REQ_QUEUE].qsize(), 1)
+
+ # Test invalid URL token returns empty response
+ resp = c.get(f'/{url_token_invalid}/messages/')
+ self.assertEqual(b'', resp.data)
+ resp = c.get(f'/{url_token_invalid}/files/')
+ self.assertEqual(b'', resp.data)
+
+ # Test valid URL token returns all queued messages
+ queues[URL_TOKEN_QUEUE].put((onion_pub_key, url_token_old))
+ queues[URL_TOKEN_QUEUE].put((onion_pub_key, url_token))
+ queues[M_TO_FLASK_QUEUE].put((packet1, onion_pub_key))
+ queues[M_TO_FLASK_QUEUE].put((packet2, onion_pub_key))
+ queues[F_TO_FLASK_QUEUE].put((packet3, onion_pub_key))
+
+ with app.test_client() as c:
+ resp = c.get(f'/{url_token}/messages/')
+ self.assertEqual(b'packet1\npacket2', resp.data)
+
+ with app.test_client() as c:
+ resp = c.get(f'/{url_token}/files/')
+ self.assertEqual(b'packet3', resp.data)
+
+ # Test valid URL token returns nothing as queues are empty
+ with app.test_client() as c:
+ resp = c.get(f'/{url_token}/messages/')
+ self.assertEqual(b'', resp.data)
+
+ with app.test_client() as c:
+ resp = c.get(f'/{url_token}/files/')
+ self.assertEqual(b'', resp.data)
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/relay/test_tcb.py b/tests/relay/test_tcb.py
new file mode 100644
index 0000000..8f61dd2
--- /dev/null
+++ b/tests/relay/test_tcb.py
@@ -0,0 +1,222 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import threading
+import time
+import unittest
+
+from datetime import datetime
+from unittest import mock
+
+from src.common.encoding import int_to_bytes
+from src.common.reed_solomon import RSCodec
+from src.common.statics import *
+
+from src.relay.tcb import dst_outgoing, src_incoming
+
+from tests.mock_classes import Gateway, nick_to_pub_key, Settings
+from tests.utils import cd_unittest, cleanup, gen_queue_dict, tear_queues
+
+
+class TestSRCIncoming(unittest.TestCase):
+
+ def setUp(self):
+ self.settings = Settings()
+ self.unittest_dir = cd_unittest()
+ self.gateway = Gateway()
+ self.rs = RSCodec(2 * self.gateway.settings.serial_error_correction)
+ self.ts = datetime.now()
+ self.queues = gen_queue_dict()
+ self.args = self.queues, self.gateway
+
+ def tearDown(self):
+ tear_queues(self.queues)
+ cleanup(self.unittest_dir)
+
+ def create_packet(self, packet: bytes):
+ """Create Reed-Solomon encoded packet"""
+ return self.rs.encode(packet)
+
+ def test_unencrypted_datagram(self):
+ # Setup
+ packet = self.create_packet(UNENCRYPTED_DATAGRAM_HEADER + b'test')
+ self.queues[GATEWAY_QUEUE].put((self.ts, 640 * b'a'))
+ self.queues[GATEWAY_QUEUE].put((self.ts, packet))
+
+ # Test
+ self.assertIsNone(src_incoming(*self.args, unittest=True))
+ self.assertEqual(self.queues[SRC_TO_RELAY_QUEUE].qsize(), 1)
+
+ def test_local_key_datagram(self):
+ # Setup
+ packet = self.create_packet(LOCAL_KEY_DATAGRAM_HEADER + b'test')
+
+ def queue_delayer():
+ """Place packet into queue after delay."""
+ time.sleep(0.01)
+ self.queues[GATEWAY_QUEUE].put((self.ts, packet))
+
+ threading.Thread(target=queue_delayer).start()
+
+ # Test
+ self.assertIsNone(src_incoming(*self.args, unittest=True))
+ self.assertEqual(self.queues[DST_COMMAND_QUEUE].qsize(), 1)
+
+ def test_command_datagram(self):
+ # Setup
+ packet = self.create_packet(COMMAND_DATAGRAM_HEADER + b'test')
+ self.queues[GATEWAY_QUEUE].put((self.ts, packet))
+
+ # Test
+ self.assertIsNone(src_incoming(*self.args, unittest=True))
+ self.assertEqual(self.queues[DST_COMMAND_QUEUE].qsize(), 1)
+
+ def test_message_datagram(self):
+ # Setup
+ packet = self.create_packet(MESSAGE_DATAGRAM_HEADER + 344 * b'a' + nick_to_pub_key('bob'))
+ self.queues[GATEWAY_QUEUE].put((self.ts, packet))
+
+ # Test
+ self.assertIsNone(src_incoming(*self.args, unittest=True))
+ self.assertEqual(self.queues[M_TO_FLASK_QUEUE].qsize(), 1)
+ self.assertEqual(self.queues[DST_MESSAGE_QUEUE].qsize(), 1)
+
+ def test_public_key_datagram(self):
+ # Setup
+ packet = self.create_packet(PUBLIC_KEY_DATAGRAM_HEADER + nick_to_pub_key('bob') + TFC_PUBLIC_KEY_LENGTH * b'a')
+ self.queues[GATEWAY_QUEUE].put((self.ts, packet))
+
+ # Test
+ self.assertIsNone(src_incoming(*self.args, unittest=True))
+ self.assertEqual(self.queues[M_TO_FLASK_QUEUE].qsize(), 1)
+
+ def test_file_datagram(self):
+ # Setup
+ packet = self.create_packet(FILE_DATAGRAM_HEADER
+ + int_to_bytes(2)
+ + nick_to_pub_key('Alice')
+ + nick_to_pub_key('Bob')
+ + 200 * b'a')
+ self.queues[GATEWAY_QUEUE].put((self.ts, packet))
+
+ # Test
+ self.assertIsNone(src_incoming(*self.args, unittest=True))
+ self.assertEqual(self.queues[DST_MESSAGE_QUEUE].qsize(), 0)
+ self.assertEqual(self.queues[F_TO_FLASK_QUEUE].qsize(), 2)
+
+ def test_group_invitation_datagram(self):
+ # Setup
+ packet = self.create_packet(GROUP_MSG_INVITE_HEADER
+ + bytes(GROUP_ID_LENGTH)
+ + nick_to_pub_key('Alice')
+ + nick_to_pub_key('Bob'))
+ self.queues[GATEWAY_QUEUE].put((self.ts, packet))
+
+ # Test
+ self.assertIsNone(src_incoming(*self.args, unittest=True))
+ self.assertEqual(self.queues[DST_MESSAGE_QUEUE].qsize(), 0)
+ self.assertEqual(self.queues[M_TO_FLASK_QUEUE].qsize(), 2)
+
+ def test_group_join_datagram(self):
+ # Setup
+ packet = self.create_packet(GROUP_MSG_JOIN_HEADER
+ + bytes(GROUP_ID_LENGTH)
+ + nick_to_pub_key('Alice')
+ + nick_to_pub_key('Bob'))
+ self.queues[GATEWAY_QUEUE].put((self.ts, packet))
+
+ # Test
+ self.assertIsNone(src_incoming(*self.args, unittest=True))
+ self.assertEqual(self.queues[DST_MESSAGE_QUEUE].qsize(), 0)
+ self.assertEqual(self.queues[M_TO_FLASK_QUEUE].qsize(), 2)
+
+ def test_group_add_datagram(self):
+ # Setup
+ packet = self.create_packet(GROUP_MSG_MEMBER_ADD_HEADER
+ + bytes(GROUP_ID_LENGTH)
+ + int_to_bytes(1)
+ + nick_to_pub_key('Alice')
+ + nick_to_pub_key('Bob'))
+ self.queues[GATEWAY_QUEUE].put((self.ts, packet))
+
+ # Test
+ self.assertIsNone(src_incoming(*self.args, unittest=True))
+ self.assertEqual(self.queues[DST_MESSAGE_QUEUE].qsize(), 0)
+ self.assertEqual(self.queues[M_TO_FLASK_QUEUE].qsize(), 2)
+
+ def test_group_remove_datagram(self):
+ # Setup
+ packet = self.create_packet(GROUP_MSG_MEMBER_REM_HEADER
+ + bytes(GROUP_ID_LENGTH)
+ + int_to_bytes(2)
+ + nick_to_pub_key('Alice')
+ + nick_to_pub_key('Bob'))
+ self.queues[GATEWAY_QUEUE].put((self.ts, packet))
+
+ # Test
+ self.assertIsNone(src_incoming(*self.args, unittest=True))
+ self.assertEqual(self.queues[DST_MESSAGE_QUEUE].qsize(), 0)
+ self.assertEqual(self.queues[M_TO_FLASK_QUEUE].qsize(), 2)
+
+ def test_group_exit_datagram(self):
+ # Setup
+ packet = self.create_packet(GROUP_MSG_EXIT_GROUP_HEADER
+ + bytes(GROUP_ID_LENGTH)
+ + nick_to_pub_key('Alice')
+ + nick_to_pub_key('Bob'))
+ self.queues[GATEWAY_QUEUE].put((self.ts, packet))
+
+ # Test
+ self.assertIsNone(src_incoming(*self.args, unittest=True))
+ self.assertEqual(self.queues[DST_MESSAGE_QUEUE].qsize(), 0)
+ self.assertEqual(self.queues[M_TO_FLASK_QUEUE].qsize(), 2)
+
+
+class TestDSTOutGoing(unittest.TestCase):
+
+ def test_loop(self):
+ # Setup
+ packet = b'test_packet'
+ queues = gen_queue_dict()
+ gateway = Gateway()
+
+ def queue_delayer():
+ """Place packets into queue after delay."""
+ time.sleep(0.01)
+ queues[DST_COMMAND_QUEUE].put(packet)
+ queues[DST_MESSAGE_QUEUE].put(packet)
+ time.sleep(0.01)
+ queues[UNITTEST_QUEUE].put(EXIT)
+
+ threading.Thread(target=queue_delayer).start()
+
+ # Test
+ side_effects = [EOFError, KeyboardInterrupt, None] + [None] * 100_000
+ with unittest.mock.patch('time.sleep', side_effect=side_effects):
+ self.assertIsNone(dst_outgoing(queues, gateway, unittest=True))
+ self.assertEqual(packet, gateway.packets[0])
+
+ # Teardown
+ tear_queues(queues)
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/rx/test_commands.py b/tests/rx/test_commands.py
deleted file mode 100644
index 7ed6af0..0000000
--- a/tests/rx/test_commands.py
+++ /dev/null
@@ -1,424 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import binascii
-import getpass
-import os
-import struct
-import time
-import unittest
-import zlib
-
-from datetime import datetime
-from multiprocessing import Queue
-
-from src.common.crypto import byte_padding, encrypt_and_sign
-from src.common.db_logs import write_log_entry
-from src.common.encoding import int_to_bytes
-from src.common.statics import *
-
-from src.rx.packet import PacketList
-from src.rx.commands import change_master_key, change_nick, change_setting, clear_active_window, contact_setting, exit_tfc, log_command
-from src.rx.commands import process_command, remove_contact, remove_log, reset_active_window, select_win_cmd, show_win_activity, wipe
-
-from tests.mock_classes import ContactList, GroupList, KeyList, MasterKey, RxWindow, Settings, WindowList
-from tests.utils import assembly_packet_creator, cleanup, ignored, TFCTestCase
-
-
-class TestProcessCommand(TFCTestCase):
-
- def setUp(self):
- self.ts = datetime.now()
- self.settings = Settings()
- self.master_key = MasterKey()
- self.group_list = GroupList()
- self.exit_queue = Queue()
- self.pubkey_buf = dict()
- self.window_list = WindowList(nicks=[LOCAL_ID])
- self.contact_list = ContactList(nicks=[LOCAL_ID])
- self.packet_list = PacketList(self.settings, self.contact_list)
- self.key_list = KeyList(nicks=[LOCAL_ID])
- self.key_set = self.key_list.get_keyset(LOCAL_ID)
- self.key_set.rx_key = bytes(KEY_LENGTH)
- self.key_set.rx_hek = bytes(KEY_LENGTH)
- self.key_set.tx_harac = 1
- self.key_set.rx_harac = 1
-
- def create_packet(self, data, header=C_S_HEADER):
- payload = zlib.compress(data, level=COMPRESSION_LEVEL)
- packet = header + byte_padding(payload)
- harac_in_bytes = int_to_bytes(self.key_set.tx_harac)
- encrypted_harac = encrypt_and_sign(harac_in_bytes, self.key_set.tx_hek)
- encrypted_message = encrypt_and_sign(packet, self.key_set.tx_key)
-
- return COMMAND_PACKET_HEADER + encrypted_harac + encrypted_message
-
- def test_incomplete_command_raises_fr(self):
- self.assertFR("Incomplete command.",
- process_command,
- self.ts, self.create_packet(b'ZZ', header=C_L_HEADER), self.window_list, self.packet_list,
- self.contact_list, self.key_list, self.group_list, self.settings,
- self.master_key, self.pubkey_buf, self.exit_queue)
-
- def test_invalid_command_header(self):
- self.assertFR("Error: Received an invalid command.",
- process_command,
- self.ts, self.create_packet(b'ZZ'), self.window_list, self.packet_list,
- self.contact_list, self.key_list, self.group_list, self.settings,
- self.master_key, self.pubkey_buf, self.exit_queue)
-
- def test_process_command(self):
- self.assertFR(f"Error: Could not find log database.",
- process_command,
- self.ts, self.create_packet(LOG_REMOVE_HEADER), self.window_list, self.packet_list,
- self.contact_list, self.key_list, self.group_list, self.settings,
- self.master_key, self.pubkey_buf, self.exit_queue)
-
-
-class TestShowWinActivity(TFCTestCase):
-
- def setUp(self):
- self.window_list = WindowList()
- self.window_list.windows = [RxWindow(name='Alice', unread_messages=4),
- RxWindow(name='Bob', unread_messages=15)]
-
- def test_function(self):
- self.assertPrints(f"""\
- ┌─────────────────┐
- │ Window activity │
- │ Alice: 4 │
- │ Bob: 15 │
- └─────────────────┘
-{5*(CURSOR_UP_ONE_LINE+CLEAR_ENTIRE_LINE)}""", show_win_activity, self.window_list)
-
-
-class TestSelectSystemWindows(unittest.TestCase):
-
- def setUp(self):
- self.window_list = WindowList()
- self.window_list.windows = [RxWindow(uid='alice@jabber.org', name='Alice'),
- RxWindow(uid='bob@jabber.org', name='Bob')]
-
- def test_window_selection(self):
- self.assertIsNone(select_win_cmd(b'alice@jabber.org', self.window_list))
- self.assertEqual(self.window_list.active_win.name, 'Alice')
-
- self.assertIsNone(select_win_cmd(b'bob@jabber.org', self.window_list))
- self.assertEqual(self.window_list.active_win.name, 'Bob')
-
- self.assertIsNone(select_win_cmd(WIN_TYPE_FILE.encode(), self.window_list))
- self.assertEqual(self.window_list.active_win.uid, WIN_TYPE_FILE)
-
-
-class TestClearActiveWindow(TFCTestCase):
-
- def test_function(self):
- self.assertPrints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER, clear_active_window)
-
-
-class TestResetActiveWindow(unittest.TestCase):
-
- def setUp(self):
- self.cmd_data = b'alice@jabber.org'
- self.window_list = WindowList()
- self.window_list.windows = [RxWindow(uid='alice@jabber.org', name='Alice'),
- RxWindow(uid='bob@jabber.org', name='Bob')]
- self.window = self.window_list.get_window('alice@jabber.org')
- self.window.message_log = [(datetime.now(), 'Hi Bob', 'alice@jabber.org', ORIGIN_CONTACT_HEADER)]
-
- def test_screen_reset(self):
- self.assertEqual(len(self.window.message_log), 1)
- self.assertIsNone(reset_active_window(self.cmd_data, self.window_list))
- self.assertEqual(len(self.window.message_log), 0)
-
-
-class TestExitTFC(unittest.TestCase):
-
- def setUp(self):
- self.exit_queue = Queue()
-
- def tearDown(self):
- while not self.exit_queue.empty():
- self.exit_queue.get()
- time.sleep(0.1)
- self.exit_queue.close()
-
- def test_function(self):
- self.assertIsNone(exit_tfc(self.exit_queue))
- time.sleep(0.1)
- self.assertEqual(self.exit_queue.qsize(), 1)
-
-
-class TestLogCommand(TFCTestCase):
-
- def setUp(self):
- self.cmd_data = b'alice@jabber.org' + US_BYTE + int_to_bytes(1)
- self.ts = datetime.now()
- self.window_list = WindowList(nicks=['Alice', 'Bob'])
- self.window = self.window_list.get_window('alice@jabber.org')
- self.window.type_print = 'contact'
- self.contact_list = ContactList(nicks=['Alice', 'Bob'])
- self.group_list = GroupList()
- self.settings = Settings()
- self.master_key = MasterKey()
-
- self.time = datetime.fromtimestamp(struct.unpack('
-
-""")
-
-
-class TestRemoveLog(TFCTestCase):
-
- def setUp(self):
- self.win_name = b'alice@jabber.org'
- self.settings = Settings()
- self.master_key = MasterKey()
-
- def test_remove_logfile(self):
- self.assertFR(f"Error: Could not find log database.",
- remove_log, self.win_name, self.settings, self.master_key)
-
-
-class TestChangeMasterKey(unittest.TestCase):
-
- def setUp(self):
- self.o_getpass = getpass.getpass
- self.ts = datetime.now()
- self.master_key = MasterKey()
- self.settings = Settings()
- self.contact_list = ContactList(nicks=[LOCAL_ID])
- self.window_list = WindowList(nicks=[LOCAL_ID])
- self.group_list = GroupList()
- self.key_list = KeyList()
- getpass.getpass = lambda _: 'a'
-
- def tearDown(self):
- getpass.getpass = self.o_getpass
- cleanup()
-
- def test_master_key_change(self):
- # Setup
- write_log_entry(F_S_HEADER + bytes(PADDING_LEN), 'alice@jabber.org', self.settings, self.master_key)
-
- # Test
- self.assertEqual(self.master_key.master_key, bytes(KEY_LENGTH))
- self.assertIsNone(change_master_key(self.ts, self.window_list, self.contact_list, self.group_list,
- self.key_list, self.settings, self.master_key))
- self.assertNotEqual(self.master_key.master_key, bytes(KEY_LENGTH))
-
-
-class TestChangeNick(TFCTestCase):
-
- def setUp(self):
- self.ts = datetime.now()
- self.contact_list = ContactList(nicks=['Alice'])
- self.window_list = WindowList(contact_list=self.contact_list)
- self.group_list = GroupList()
-
- def test_nick_change(self):
- # Setup
- cmd_data = b'alice@jabber.org' + US_BYTE + b'Alice_'
-
- # Test
- self.assertIsNone(change_nick(cmd_data, self.ts, self.window_list, self.contact_list))
- self.assertEqual(self.contact_list.get_contact('alice@jabber.org').nick, 'Alice_')
- self.assertEqual(self.window_list.get_window('alice@jabber.org').name, 'Alice_')
-
-
-class TestChangeSetting(TFCTestCase):
-
- def setUp(self):
- self.ts = datetime.now()
- self.window_list = WindowList()
- self.contact_list = ContactList()
- self.group_list = GroupList()
-
- def test_invalid_setting_raises_r(self):
- # Setup
- cmd_data = b'setting' + US_BYTE + b'True'
- settings = Settings(key_list=[''])
-
- # Test
- self.assertFR("Error: Invalid setting 'setting'",
- change_setting, cmd_data, self.ts, self.window_list, self.contact_list, self.group_list, settings)
-
- def test_valid_setting_change(self):
- # Setup
- cmd_data = b'serial_error_correction' + US_BYTE + b'5'
- settings = Settings(key_list=['serial_error_correction'])
-
- # Test
- self.assertIsNone(change_setting(cmd_data, self.ts, self.window_list, self.contact_list, self.group_list, settings))
-
-
-class TestContactSetting(TFCTestCase):
-
- def setUp(self):
- self.ts = datetime.fromtimestamp(1502750000)
- self.contact_list = ContactList(nicks=['Alice', 'Bob'])
- self.group_list = GroupList(groups=['test_group', 'test_group2'])
- self.window_list = WindowList(contact_list=self.contact_list,
- group_list=self.group_list)
-
- def test_invalid_window_raises_fr(self):
- # Setup
- cmd_data = ENABLE + US_BYTE + b'bob@jabber.org'
- header = CHANGE_LOGGING_HEADER
- self.contact_list = ContactList(nicks=['Alice'])
- self.window_list = WindowList(contact_list=self.contact_list,
- group_list=self.group_list)
- # Test
- self.assertFR("Error: Found no window for 'bob@jabber.org'",
- contact_setting, cmd_data, self.ts, self.window_list, self.contact_list, self.group_list, header)
-
- def test_setting_change_contact(self):
- # Setup
- self.window = self.window_list.get_window('bob@jabber.org')
- self.window.type = WIN_TYPE_CONTACT
- self.window.type_print = 'contact'
- self.window.window_contacts = self.contact_list.contacts
-
- # Test
- for attr, header in [('log_messages', CHANGE_LOGGING_HEADER), ('notifications', CHANGE_NOTIFY_HEADER), ('file_reception', CHANGE_FILE_R_HEADER)]:
- for s in [ENABLE, ENABLE, DISABLE, DISABLE]:
- cmd_data = s + US_BYTE + b'bob@jabber.org'
- self.assertIsNone(contact_setting(cmd_data, self.ts, self.window_list, self.contact_list, self.group_list, header))
- self.assertEqual(self.contact_list.get_contact('bob@jabber.org').__getattribute__(attr), (s==ENABLE))
-
- def test_setting_change_group(self):
- # Setup
- self.window = self.window_list.get_window('test_group')
- self.window.type = WIN_TYPE_GROUP
- self.window.type_print = 'group'
- self.window.window_contacts = self.group_list.get_group('test_group').members
-
- # Test
- for attr, header in [('log_messages', CHANGE_LOGGING_HEADER), ('notifications', CHANGE_NOTIFY_HEADER), ('file_reception', CHANGE_FILE_R_HEADER)]:
- for s in [ENABLE, ENABLE, DISABLE, DISABLE]:
- cmd_data = s + US_BYTE + b'test_group'
- self.assertIsNone(contact_setting(cmd_data, self.ts, self.window_list, self.contact_list, self.group_list, header))
-
- if header in [CHANGE_LOGGING_HEADER, CHANGE_NOTIFY_HEADER]:
- self.assertEqual(self.group_list.get_group('test_group').__getattribute__(attr), (s==ENABLE))
-
- if header == CHANGE_FILE_R_HEADER:
- for m in self.group_list.get_group('test_group').members:
- self.assertEqual(m.file_reception, (s==ENABLE))
-
- def test_setting_change_all(self):
- # Setup
- self.window = self.window_list.get_window('bob@jabber.org')
- self.window.type = WIN_TYPE_CONTACT
- self.window.type_print = 'contact'
- self.window.window_contacts = self.contact_list.contacts
-
- # Test
- for attr, header in [('log_messages', CHANGE_LOGGING_HEADER), ('notifications', CHANGE_NOTIFY_HEADER), ('file_reception', CHANGE_FILE_R_HEADER)]:
- for s in [ENABLE, ENABLE, DISABLE, DISABLE]:
- cmd_data = s.upper() + US_BYTE
- self.assertIsNone(contact_setting(cmd_data, self.ts, self.window_list, self.contact_list, self.group_list, header))
-
- if header in [CHANGE_LOGGING_HEADER, CHANGE_NOTIFY_HEADER]:
- for c in self.contact_list.get_list_of_contacts():
- self.assertEqual(c.__getattribute__(attr), (s==ENABLE))
- for g in self.group_list.groups:
- self.assertEqual(g.__getattribute__(attr), (s == ENABLE))
-
- if header == CHANGE_FILE_R_HEADER:
- for c in self.contact_list.get_list_of_contacts():
- self.assertEqual(c.__getattribute__(attr), (s==ENABLE))
-
-
-class TestRemoveContact(TFCTestCase):
-
- def setUp(self):
- self.ts = datetime.now()
- self.window_list = WindowList()
- self.cmd_data = b'bob@jabber.org'
-
- def test_no_contact_raises_fr(self):
- # Setup
- contact_list = ContactList(nicks=['Alice'])
- group_list = GroupList(groups=[])
- key_list = KeyList(nicks=['Alice'])
-
- # Test
- self.assertFR("RxM has no account 'bob@jabber.org' to remove.",
- remove_contact, self.cmd_data, self.ts, self.window_list, contact_list, group_list, key_list)
-
- def test_successful_removal(self):
- # Setup
- contact_list = ContactList(nicks=['Alice', 'Bob'])
- contact = contact_list.get_contact('bob@jabber.org')
- group_list = GroupList(groups=['testgroup', 'testgroup2'])
- key_list = KeyList(nicks=['Alice', 'Bob'])
- self.window_list.windows = [RxWindow(type=WIN_TYPE_GROUP)]
-
- # Test
- self.assertIsNone(remove_contact(self.cmd_data, self.ts, self.window_list, contact_list, group_list, key_list))
- self.assertFalse(contact_list.has_contact('bob@jabber.org'))
- self.assertFalse(key_list.has_keyset('bob@jabber.org'))
- for g in group_list:
- self.assertFalse(contact in g.members)
-
-
-class TestWipe(unittest.TestCase):
-
- def setUp(self):
- self.exit_queue = Queue()
-
- def test_wipe_command(self):
- self.assertIsNone(wipe(self.exit_queue))
- self.assertEqual(self.exit_queue.get(), WIPE)
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/rx/test_commands_g.py b/tests/rx/test_commands_g.py
deleted file mode 100644
index c9aebe6..0000000
--- a/tests/rx/test_commands_g.py
+++ /dev/null
@@ -1,154 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import datetime
-import unittest
-
-from src.common.statics import *
-from src.rx.commands_g import group_create, group_add_member, group_rm_member, remove_group
-
-from tests.mock_classes import Contact, ContactList, GroupList, Settings, WindowList
-from tests.utils import TFCTestCase
-
-
-class TestGroupCreate(TFCTestCase):
-
- def setUp(self):
- self.ts = datetime.datetime.now()
- self.settings = Settings()
- self.window_list = WindowList()
-
- def test_too_many_purp_accounts_raises_fr(self):
- # Setup
- cl = ["contact_{}@jabber.org".format(n).encode() for n in range(21)]
- cmd_data = US_BYTE.join([b'test_group2'] + cl)
- group_list = GroupList(groups=['test_group'])
- contact_list = ContactList(nicks=["contact_{}".format(n) for n in range(21)])
- group = group_list.get_group('test_group')
- group.members = contact_list.contacts
-
- # Test
- self.assertFR("Error: TFC settings only allow 20 members per group.",
- group_create, cmd_data, self.ts, self.window_list, contact_list, group_list, self.settings)
-
- def test_full_group_list_raises_fr(self):
- # Setup
- cmd_data = US_BYTE.join([b'test_group_21', b'contact_21@jabber.org'])
- group_list = GroupList(groups=["test_group_{}".format(n) for n in range(20)])
- contact_list = ContactList(nicks=['Alice'])
-
- # Test
- self.assertFR("Error: TFC settings only allow 20 groups.",
- group_create, cmd_data, self.ts, self.window_list, contact_list, group_list, self.settings)
-
- def test_successful_group_creation(self):
- # Setup
- group_list = GroupList(groups=['test_group'])
- cmd_data = US_BYTE.join([b'test_group_2', b'bob@jabber.org'])
- contact_list = ContactList(nicks=['Alice', 'Bob'])
- window_list = WindowList(nicks =['Alice', 'Bob'],
- contact_list=contact_list,
- group_lis =group_list,
- packet_list =None,
- settings =Settings)
- # Test
- self.assertIsNone(group_create(cmd_data, self.ts, window_list, contact_list, group_list, self.settings))
- self.assertEqual(len(group_list.get_group('test_group')), 2)
-
-
-class TestGroupAddMember(TFCTestCase):
-
- def setUp(self):
- self.ts = datetime.datetime.now()
- self.settings = Settings()
- self.window_list = WindowList()
-
- def test_too_large_final_member_list_raises_fr(self):
- # Setup
- group_list = GroupList(groups=['test_group'])
- contact_list = ContactList(nicks=["contact_{}".format(n) for n in range(21)])
- group = group_list.get_group('test_group')
- group.members = contact_list.contacts[:20]
- cmd_data = US_BYTE.join([b'test_group', b'contact_20@jabber.org'])
-
- # Test
- self.assertFR("Error: TFC settings only allow 20 members per group.",
- group_add_member, cmd_data, self.ts, self.window_list, contact_list, group_list, self.settings)
-
- def test_successful_group_add(self):
- # Setup
- contact_list = ContactList(nicks=["contact_{}".format(n) for n in range(21)])
- group_list = GroupList(groups=['test_group'])
- group = group_list.get_group('test_group')
- group.members = contact_list.contacts[:19]
- cmd_data = US_BYTE.join([b'test_group', b'contact_20@jabber.org'])
-
- # Test
- self.assertIsNone(group_add_member(cmd_data, self.ts, self.window_list, contact_list, group_list, self.settings))
-
- group2 = group_list.get_group('test_group')
- self.assertEqual(len(group2), 20)
-
- for c in group2:
- self.assertIsInstance(c, Contact)
-
-
-class TestGroupRMMember(unittest.TestCase):
-
- def setUp(self):
- self.ts = datetime.datetime.now()
- self.window_list = WindowList()
- self.cmd_data = US_BYTE.join([b'test_group', b'contact_18@jabber.org', b'contact_20@jabber.org'])
- self.contact_list = ContactList(nicks=["contact_{}".format(n) for n in range(21)])
- self.group_list = GroupList(groups=['test_group'])
- self.group = self.group_list.get_group('test_group')
- self.group.members = self.contact_list.contacts[:19]
-
- def test_function(self):
- self.assertIsNone(group_rm_member(self.cmd_data, self.ts, self.window_list, self.contact_list, self.group_list))
- self.assertFalse(b'contact@jabber.org' in self.group.get_list_of_member_accounts())
-
-
-class TestRemoveGroup(TFCTestCase):
-
- def setUp(self):
- self.ts = datetime.datetime.now()
- self.window_list = WindowList()
- self.group_list = GroupList(groups=['test_group'])
-
- def test_missing_group_raises_fr(self):
- # Setup
- cmd_data = b'test_group_2'
-
- # Test
- self.assertFR("RxM has no group 'test_group_2' to remove.",
- remove_group, cmd_data, self.ts, self.window_list, self.group_list)
-
- def test_successful_remove(self):
- # Setup
- cmd_data = b'test_group'
-
- # Test
- self.assertIsNone(remove_group(cmd_data, self.ts, self.window_list, self.group_list))
- self.assertEqual(len(self.group_list.groups), 0)
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/rx/test_files.py b/tests/rx/test_files.py
deleted file mode 100644
index 4d48ee8..0000000
--- a/tests/rx/test_files.py
+++ /dev/null
@@ -1,195 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import base64
-import binascii
-import builtins
-import datetime
-import os
-import shutil
-import unittest
-import zlib
-
-from src.common.crypto import encrypt_and_sign
-from src.common.encoding import b58encode, str_to_bytes
-from src.common.statics import *
-
-from src.rx.files import process_imported_file, process_received_file, store_unique
-
-from tests.mock_classes import Settings, WindowList
-from tests.utils import ignored, TFCTestCase
-
-
-class TestStoreUnique(unittest.TestCase):
-
- def setUp(self):
- self.f_data = os.urandom(100)
- self.f_dir = 'test_dir/'
- self.f_name = 'test_file'
-
- def tearDown(self):
- with ignored(FileNotFoundError):
- shutil.rmtree('test_dir/')
-
- def test_each_file_is_store_with_unique_name(self):
- self.assertEqual(store_unique(self.f_data, self.f_dir, self.f_name), 'test_file')
- self.assertEqual(store_unique(self.f_data, self.f_dir, self.f_name), 'test_file.1')
- self.assertEqual(store_unique(self.f_data, self.f_dir, self.f_name), 'test_file.2')
-
-
-class TestProcessReceivedFile(TFCTestCase):
-
- def setUp(self):
- self.nick = 'Alice'
- self.key = os.urandom(KEY_LENGTH)
-
- def tearDown(self):
- with ignored(FileNotFoundError):
- shutil.rmtree(DIR_RX_FILES)
-
- def test_invalid_structure_raises_fr(self):
- self.assertFR("Error: Received file had invalid structure.", process_received_file, b'testfile.txt', self.nick)
-
- def test_invalid_encoding_raises_fr(self):
- # Setup
- payload = binascii.unhexlify('3f264d4189d7a091') + US_BYTE + base64.b85encode(b'filedata')
-
- # Test
- self.assertFR("Error: Received file name had invalid encoding.", process_received_file, payload, self.nick)
-
- def test_invalid_name_raises_fr(self):
- # Setup
- payload = b'\x01filename' + US_BYTE + base64.b85encode(b'filedata')
-
- # Test
- self.assertFR("Error: Received file had an invalid name.", process_received_file, payload, self.nick)
-
- def test_invalid_data_raises_fr(self):
- # Setup
- payload = b'testfile.txt' + US_BYTE + base64.b85encode(b'filedata') + b'\x01'
-
- # Test
- self.assertFR("Error: Received file had invalid encoding.", process_received_file, payload, self.nick)
-
- def test_invalid_key_raises_fr(self):
- # Setup
- payload = b'testfile.txt' + US_BYTE + base64.b85encode(b'filedata')
-
- # Test
- self.assertFR("Error: Received file had an invalid key.", process_received_file, payload, self.nick)
-
- def test_decryption_fail_raises_fr(self):
- # Setup
- f_data = encrypt_and_sign(b'filedata', self.key)[::-1]
- payload = b'testfile.txt' + US_BYTE + base64.b85encode(f_data)
-
- # Test
- self.assertFR("Error: Decryption of file data failed.", process_received_file, payload, self.nick)
-
- def test_invalid_compression_raises_fr(self):
- # Setup
- compressed = zlib.compress(b'filedata', level=COMPRESSION_LEVEL)[::-1]
- f_data = encrypt_and_sign(compressed, self.key) + self.key
- payload = b'testfile.txt' + US_BYTE + base64.b85encode(f_data)
-
- # Test
- self.assertFR("Error: Decompression of file data failed.", process_received_file, payload, self.nick)
-
- def test_successful_reception(self):
- # Setup
- compressed = zlib.compress(b'filedata', level=COMPRESSION_LEVEL)
- f_data = encrypt_and_sign(compressed, self.key) + self.key
- payload = b'testfile.txt' + US_BYTE + base64.b85encode(f_data)
-
- # Test
- self.assertIsNone(process_received_file(payload, self.nick))
- self.assertTrue(os.path.isfile(f'{DIR_RX_FILES}Alice/testfile.txt'))
-
-
-class TestProcessImportedFile(TFCTestCase):
-
- def setUp(self):
- self.o_input = builtins.input
- self.settings = Settings()
- self.ts = datetime.datetime.now()
- self.window_list = WindowList(nicks=[LOCAL_ID])
- self.key = os.urandom(KEY_LENGTH)
- self.key_b58 = b58encode(self.key, file_key=True)
-
- input_list = ['91avARGdfge8E4tZfYLoxeJ5sGBdNJQH4kvjJoQFacbgwi1C2GD', self.key_b58]
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
-
- def tearDown(self):
- builtins.input = self.o_input
-
- with ignored(FileNotFoundError):
- shutil.rmtree(DIR_IMPORTED)
-
- def test_invalid_compression_raises_fr(self):
- # Setup
- data = os.urandom(1000)
- compressed = zlib.compress(data, level=COMPRESSION_LEVEL)
- compressed = compressed[:-2] + b'aa'
- packet = IMPORTED_FILE_HEADER + encrypt_and_sign(compressed, self.key)
- input_list = ['bad', self.key_b58]
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
-
- # Test
- self.assertFR("Error: Decompression of file data failed.",
- process_imported_file, self.ts, packet, self.window_list, self.settings)
-
- def test_invalid_name_encoding_raises_fr(self):
- # Setup
- file_name = binascii.unhexlify('8095b2f59d650ab7')
- data = file_name + os.urandom(1000)
- compressed = zlib.compress(data, level=COMPRESSION_LEVEL)
- packet = IMPORTED_FILE_HEADER + encrypt_and_sign(compressed, self.key)
-
- # Test
- self.assertFR("Error: Received file name had invalid encoding.",
- process_imported_file, self.ts, packet, self.window_list, self.settings)
-
- def test_invalid_name_raises_fr(self):
- # Setup
- file_name = str_to_bytes('\x01testfile.txt')
- data = file_name + os.urandom(1000)
- compressed = zlib.compress(data, level=COMPRESSION_LEVEL)
- packet = IMPORTED_FILE_HEADER + encrypt_and_sign(compressed, self.key)
-
- # Test
- self.assertFR("Error: Received file had an invalid name.",
- process_imported_file, self.ts, packet, self.window_list, self.settings)
-
- def test_valid_import(self):
- # Setup
- file_name = str_to_bytes('testfile.txt')
- data = file_name + os.urandom(1000)
- compressed = zlib.compress(data, level=COMPRESSION_LEVEL)
- packet = IMPORTED_FILE_HEADER + encrypt_and_sign(compressed, self.key)
-
- # Test
- self.assertIsNone(process_imported_file(self.ts, packet, self.window_list, self.settings))
- self.assertTrue(os.path.isfile(f"{DIR_IMPORTED}testfile.txt"))
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/rx/test_key_exchanges.py b/tests/rx/test_key_exchanges.py
deleted file mode 100644
index 64dc449..0000000
--- a/tests/rx/test_key_exchanges.py
+++ /dev/null
@@ -1,354 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import binascii
-import builtins
-import datetime
-import getpass
-import os
-import subprocess
-import unittest
-
-from src.common.crypto import argon2_kdf, encrypt_and_sign
-from src.common.encoding import b58encode
-from src.common.statics import *
-
-from src.rx.key_exchanges import add_psk_tx_keys, add_x25519_keys, import_psk_rx_keys
-from src.rx.key_exchanges import local_key_installed, process_local_key, process_public_key
-
-from tests.mock_classes import Contact, ContactList, KeyList, KeySet, Settings, WindowList
-from tests.utils import ignored, TFCTestCase
-
-
-class TestProcessLocalKey(TFCTestCase):
-
- def setUp(self):
- self.o_input = builtins.input
- self.contact_list = ContactList(nicks=[LOCAL_ID])
- self.key_list = KeyList( nicks=[LOCAL_ID])
- self.window_list = WindowList( nicks=[LOCAL_ID])
- self.settings = Settings()
- self.ts = datetime.datetime.now()
-
- def tearDown(self):
- builtins.input = self.o_input
-
- def test_invalid_decryption_key_raises_fr(self):
- # Setup
- packet = b''
- builtins.input = lambda _: '5JJwZE46Eic9B8sKJ8Qocyxa8ytUJSfcqRo7Hr5ES7YgFGeJjCJ'
- self.key_list.keysets = []
-
- # Test
- self.assertFR("Error: Incorrect key decryption key.",
- process_local_key, self.ts, packet, self.window_list, self.contact_list, self.key_list, self.settings)
-
- def test_successful_local_key_processing_existing_local_key(self):
- # Setup
- conf_code = os.urandom(1)
- key = os.urandom(KEY_LENGTH)
- hek = os.urandom(KEY_LENGTH)
- kek = os.urandom(KEY_LENGTH)
- packet = LOCAL_KEY_PACKET_HEADER + encrypt_and_sign(key + hek + conf_code, key=kek)
-
- input_list = ['5JJwZE46Eic9B8sKJ8Qocyxa8ytUJSfcqRo7Hr5ES7YgFGeJjCJ', b58encode(kek)]
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
-
- # Test
- self.assertIsNone(process_local_key(self.ts, packet, self.window_list, self.contact_list, self.key_list, self.settings))
-
- def test_successful_local_key_processing_existing_bootstrap(self):
- # Setup
- conf_code = os.urandom(1)
- key = os.urandom(KEY_LENGTH)
- hek = os.urandom(KEY_LENGTH)
- kek = os.urandom(KEY_LENGTH)
- packet = LOCAL_KEY_PACKET_HEADER + encrypt_and_sign(key + hek + conf_code, key=kek)
-
- input_list = [b58encode(kek)]
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
-
- self.key_list.keysets = []
-
- # Test
- self.assertIsNone(process_local_key(self.ts, packet, self.window_list, self.contact_list, self.key_list, self.settings))
- self.assertEqual(self.window_list.active_win.uid, LOCAL_ID)
-
-
-class TestLocalKeyInstalled(TFCTestCase):
-
- def setUp(self):
- self.ts = datetime.datetime.now()
- self.window_list = WindowList(nicks=[LOCAL_ID])
- self.contact_list = ContactList(nicks=[LOCAL_ID])
-
- def test_local_key_installed(self):
- self.assertPrints(f"""\
- ┌────────────────────────────────────────────┐
- │ Successfully completed local key exchange. │
- └────────────────────────────────────────────┘
-{CLEAR_ENTIRE_SCREEN+CURSOR_LEFT_UP_CORNER}
- Waiting for new contacts
-
-""", local_key_installed, self.ts, self.window_list, self.contact_list)
-
-
-class TestProcessPublicKey(TFCTestCase):
-
- def setUp(self):
- self.ts = datetime.datetime.now()
- self.window_list = WindowList()
- self.settings = Settings()
- self.pubkey_buf = dict()
-
- def test_invalid_account_encoding_raises_fr(self):
- packet = PUBLIC_KEY_PACKET_HEADER + os.urandom(KEY_LENGTH) + ORIGIN_CONTACT_HEADER + binascii.unhexlify('a466c02c221cb135')
-
- self.assertFR("Error! Account for received public key had invalid encoding.",
- process_public_key, self.ts, packet, self.window_list, self.settings, self.pubkey_buf)
-
- def test_invalid_origin_raises_fr(self):
- packet = PUBLIC_KEY_PACKET_HEADER + os.urandom(KEY_LENGTH) + b'x' + b'alice@jabber.org'
-
- self.assertFR("Error! Received public key had an invalid origin header.",
- process_public_key, self.ts, packet, self.window_list, self.settings, self.pubkey_buf)
-
- def test_receive_public_key_from_contact(self):
- packet = PUBLIC_KEY_PACKET_HEADER + KEY_LENGTH*b'a' + ORIGIN_CONTACT_HEADER + b'alice@jabber.org'
-
- self.assertPrints("""\
- ┌─────────────────────────────────────────────────────────────────────┐
- │ Received public key from alice@jabber.org: │
- │ A B C D E F G H I J K L M N O P Q │
- │ 5JZ B2s 2RC tRU unK iqM bb6 rAj 3Z7 TkJ wa8 zkn L1c fTF pWo QAr d6n │
- └─────────────────────────────────────────────────────────────────────┘
-""", process_public_key, self.ts, packet, self.window_list, self.settings, self.pubkey_buf)
-
- def test_outgoing_public_key_loads_most_recent_pub_key_from_contact(self):
- self.pubkey_buf['alice@jabber.org'] = KEY_LENGTH * b'a'
- packet = PUBLIC_KEY_PACKET_HEADER + KEY_LENGTH * b'a' + ORIGIN_USER_HEADER + b'alice@jabber.org'
-
- self.assertPrints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + """\
- ┌─────────────────────────────────────────────────────────────────────┐
- │ Public key for alice@jabber.org: │
- │ A B C D E F G H I J K L M N O P Q │
- │ 5JZ B2s 2RC tRU unK iqM bb6 rAj 3Z7 TkJ wa8 zkn L1c fTF pWo QAr d6n │
- └─────────────────────────────────────────────────────────────────────┘
-""", process_public_key, self.ts, packet, self.window_list, self.settings, self.pubkey_buf)
-
-
-class TestX25519Command(unittest.TestCase):
-
- def setUp(self):
- self.ts = datetime.datetime.now()
- self.window_list = WindowList(nicks=[LOCAL_ID])
- self.contact_list = ContactList()
- self.key_list = KeyList()
- self.settings = Settings()
- self.pubkey_buf = {'alice@jabber.org': KEY_LENGTH*b'a'}
-
- self.packet = KEY_LENGTH * b'\x01' + KEY_LENGTH * b'\x02' \
- + KEY_LENGTH * b'\x03' + KEY_LENGTH * b'\x04' \
- + b'alice@jabber.org' + US_BYTE + b'Alice'
-
- def test_add_x25519keys(self):
- self.assertIsNone(add_x25519_keys(self.packet, self.ts, self.window_list, self.contact_list,
- self.key_list, self.settings, self.pubkey_buf))
-
- keyset = self.key_list.get_keyset('alice@jabber.org')
- self.assertIsInstance(keyset, KeySet)
-
- self.assertEqual(keyset.rx_account, 'alice@jabber.org')
- self.assertEqual(keyset.tx_key, KEY_LENGTH * b'\x01')
- self.assertEqual(keyset.tx_hek, KEY_LENGTH * b'\x02')
- self.assertEqual(keyset.rx_key, KEY_LENGTH * b'\x03')
- self.assertEqual(keyset.rx_hek, KEY_LENGTH * b'\x04')
-
- contact = self.contact_list.get_contact('alice@jabber.org')
- self.assertIsInstance(contact, Contact)
-
- self.assertEqual(contact.rx_account, 'alice@jabber.org')
- self.assertEqual(contact.nick, 'Alice')
- self.assertEqual(contact.rx_fingerprint, bytes(FINGERPRINT_LEN))
- self.assertEqual(contact.tx_fingerprint, bytes(FINGERPRINT_LEN))
-
- self.assertFalse('alice@jabber.org' in self.pubkey_buf)
-
-
-class TestAddPSKTxKeys(unittest.TestCase):
-
- def setUp(self):
- self.ts = datetime.datetime.now()
- self.window_list = WindowList(nicks=[LOCAL_ID])
- self.contact_list = ContactList()
- self.key_list = KeyList()
- self.settings = Settings()
- self.pubkey_buf = {'alice@jabber.org' : KEY_LENGTH*b'a'}
- self.packet = KEY_LENGTH * b'\x01' + KEY_LENGTH * b'\x02' + b'alice@jabber.org' + US_BYTE + b'Alice'
-
- def test_add_psk_tx_keys(self):
- self.assertIsNone(add_psk_tx_keys(self.packet, self.ts, self.window_list, self.contact_list,
- self.key_list, self.settings, self.pubkey_buf))
-
- keyset = self.key_list.get_keyset('alice@jabber.org')
- self.assertIsInstance(keyset, KeySet)
- self.assertEqual(keyset.rx_account, 'alice@jabber.org')
- self.assertEqual(keyset.tx_key, KEY_LENGTH * b'\x01')
- self.assertEqual(keyset.tx_hek, KEY_LENGTH * b'\x02')
- self.assertEqual(keyset.rx_key, bytes(KEY_LENGTH))
- self.assertEqual(keyset.rx_hek, bytes(KEY_LENGTH))
-
- contact = self.contact_list.get_contact('alice@jabber.org')
- self.assertIsInstance(contact, Contact)
-
- self.assertEqual(contact.rx_account, 'alice@jabber.org')
- self.assertEqual(contact.nick, 'Alice')
- self.assertEqual(contact.rx_fingerprint, bytes(FINGERPRINT_LEN))
- self.assertEqual(contact.tx_fingerprint, bytes(FINGERPRINT_LEN))
-
- self.assertFalse('alice@jabber.org' in self.pubkey_buf)
-
-
-class TestImportPSKRxKeys(TFCTestCase):
-
- class MockPopen(object):
- def __init__(self, cmd, shell):
- self.cmd = cmd
- self.shell = shell
-
- def wait(self):
- pass
-
- def setUp(self):
- self.o_input = builtins.input
- self.o_getpass = getpass.getpass
- self.o_sp = subprocess.Popen
-
- self.packet = b'alice@jabber.org'
- self.ts = datetime.datetime.now()
- self.window_list = WindowList( nicks=['Alice', LOCAL_ID])
- self.contact_list = ContactList(nicks=['Alice', LOCAL_ID])
- self.key_list = KeyList( nicks=['Alice', LOCAL_ID])
- self.settings = Settings(disable_gui_dialog=True)
-
- builtins.input = lambda _: 'ut_psk'
-
- def tearDown(self):
- builtins.input = self.o_input
- getpass.getpass = self.o_getpass
- subprocess.Popen = self.o_sp
-
- with ignored(OSError):
- os.remove('ut_psk')
-
- def test_unknown_account_raises_fr(self):
- self.assertFR("Error: Unknown account 'bob@jabber.org'",
- import_psk_rx_keys, b'bob@jabber.org', self.ts, self.window_list, self.contact_list, self.key_list, self.settings)
-
- def test_invalid_psk_data_raises_fr(self):
- # Setup
- with open('ut_psk', 'wb+') as f:
- f.write(os.urandom(135))
-
- # Test
- self.assertFR("Error: Invalid PSK data in file.",
- import_psk_rx_keys, self.packet, self.ts, self.window_list, self.contact_list, self.key_list, self.settings)
-
- def test_invalid_keys_raise_fr(self):
- # Setup
- keyset = self.key_list.get_keyset('alice@jabber.org')
- keyset.rx_key = bytes(KEY_LENGTH)
- keyset.rx_hek = bytes(KEY_LENGTH)
- password = 'password'
- input_list = ['bad', password]
- gen = iter(input_list)
- getpass.getpass = lambda _: str(next(gen))
-
- salt = os.urandom(ARGON2_SALT_LEN)
- rx_key = bytes(KEY_LENGTH)
- rx_hek = os.urandom(KEY_LENGTH)
- kek, _ = argon2_kdf(password, salt, parallelism=1)
- ct_tag = encrypt_and_sign(rx_key + rx_hek, key=kek)
-
- with open('ut_psk', 'wb+') as f:
- f.write(salt + ct_tag)
-
- # Test
- self.assertFR("Error: Received invalid keys from contact.",
- import_psk_rx_keys, self.packet, self.ts, self.window_list, self.contact_list, self.key_list, self.settings)
-
- def test_valid_psk(self):
- # Setup
- keyset = self.key_list.get_keyset('alice@jabber.org')
- keyset.rx_key = bytes(KEY_LENGTH)
- keyset.rx_hek = bytes(KEY_LENGTH)
-
- getpass.getpass = lambda _: 'testpassword'
- password = 'testpassword'
- salt = os.urandom(ARGON2_SALT_LEN)
- rx_key = os.urandom(KEY_LENGTH)
- rx_hek = os.urandom(KEY_LENGTH)
- kek, _ = argon2_kdf(password, salt, parallelism=1)
- ct_tag = encrypt_and_sign(rx_key + rx_hek, key=kek)
-
- with open('ut_psk', 'wb+') as f:
- f.write(salt + ct_tag)
-
- # Test
- self.assertTrue(os.path.isfile('ut_psk'))
- self.assertIsNone(import_psk_rx_keys(self.packet, self.ts, self.window_list, self.contact_list, self.key_list, self.settings))
- self.assertFalse(os.path.isfile('ut_psk'))
- self.assertEqual(keyset.rx_key, rx_key)
- self.assertEqual(keyset.rx_hek, rx_hek)
-
- def test_valid_psk_overwrite_failure(self):
- # Setup
- keyset = self.key_list.get_keyset('alice@jabber.org')
- keyset.rx_key = bytes(KEY_LENGTH)
- keyset.rx_hek = bytes(KEY_LENGTH)
-
- input_list = ['ut_psk', '']
- gen = iter(input_list)
- builtins.input = lambda _: next(gen)
- subprocess.Popen = TestImportPSKRxKeys.MockPopen
-
- getpass.getpass = lambda _: 'testpassword'
- password = 'testpassword'
- salt = os.urandom(ARGON2_SALT_LEN)
- rx_key = os.urandom(KEY_LENGTH)
- rx_hek = os.urandom(KEY_LENGTH)
- kek, _ = argon2_kdf(password, salt, parallelism=1)
- ct_tag = encrypt_and_sign(rx_key + rx_hek, key=kek)
-
- with open('ut_psk', 'wb+') as f:
- f.write(salt + ct_tag)
-
- # Test
- self.assertTrue(os.path.isfile('ut_psk'))
- self.assertIsNone(import_psk_rx_keys(self.packet, self.ts, self.window_list, self.contact_list, self.key_list, self.settings))
- self.assertTrue(os.path.isfile('ut_psk'))
- self.assertEqual(keyset.rx_key, rx_key)
- self.assertEqual(keyset.rx_hek, rx_hek)
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/rx/test_messages.py b/tests/rx/test_messages.py
deleted file mode 100644
index 09163ef..0000000
--- a/tests/rx/test_messages.py
+++ /dev/null
@@ -1,330 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import binascii
-import os
-import shutil
-import unittest
-
-from datetime import datetime
-
-from src.common.encoding import int_to_bytes
-from src.common.statics import *
-
-from src.rx.messages import process_message
-from src.rx.windows import WindowList
-from src.rx.packet import PacketList
-
-from tests.mock_classes import ContactList, KeyList, GroupList, Settings, MasterKey
-from tests.utils import assembly_packet_creator, cleanup, ignored, TFCTestCase
-
-
-class TestProcessMessage(TFCTestCase):
-
- def setUp(self):
- self.msg = ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis"
- " dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu"
- "m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma"
- "ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el"
- "it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic"
- "tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r"
- "utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali"
- "quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris"
- "que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti"
- "s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu.").encode()
-
- self.ts = datetime.now()
- self.master_key = MasterKey()
- self.settings = Settings(logfile_masking=True)
-
- self.contact_list = ContactList(nicks=['Alice', 'Bob', 'Charlie', LOCAL_ID])
- self.key_list = KeyList( nicks=['Alice', 'Bob', 'Charlie', LOCAL_ID])
- self.group_list = GroupList( groups=['testgroup'])
- self.packet_list = PacketList(contact_list=self.contact_list, settings=self.settings)
- self.window_list = WindowList(contact_list=self.contact_list, settings=self.settings, group_list=self.group_list, packet_list=self.packet_list)
- self.group_list.get_group('testgroup').log_messages = True
- for account in self.contact_list.get_list_of_accounts():
- keyset = self.key_list.get_keyset(account)
- keyset.tx_harac = 1
- keyset.rx_harac = 1
- keyset.tx_hek = KEY_LENGTH * b'\x01'
- keyset.rx_hek = KEY_LENGTH * b'\x01'
- keyset.tx_key = KEY_LENGTH * b'\x01'
- keyset.rx_key = KEY_LENGTH * b'\x01'
-
- self.message = b'testgroup' + US_BYTE + b'bob@jabber.org' + US_BYTE + b'charlie@jabber.org'
-
- def tearDown(self):
- cleanup()
- with ignored(FileNotFoundError):
- shutil.rmtree(DIR_RX_FILES)
-
- # Private messages
- def test_private_msg_from_contact(self):
- # Setup
- assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, ORIGIN_CONTACT_HEADER, encrypt=True)
-
- # Test
- for p in assembly_ct_list:
- self.assertIsNone(process_message(self.ts, p, self.window_list, self.packet_list, self.contact_list,
- self.key_list, self.group_list, self.settings, self.master_key))
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), len(assembly_ct_list)*LOG_ENTRY_LENGTH)
-
- def test_private_msg_from_user(self):
- # Setup
- assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, ORIGIN_USER_HEADER, encrypt=True)
-
- # Test
- for p in assembly_ct_list:
- self.assertIsNone(process_message(self.ts, p, self.window_list, self.packet_list, self.contact_list,
- self.key_list, self.group_list, self.settings, self.master_key))
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), len(assembly_ct_list) * LOG_ENTRY_LENGTH)
-
- # Whispered messages
- def test_whisper_msg_from_contact(self):
- # Setup
- assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, ORIGIN_CONTACT_HEADER, encrypt=True, header=WHISPER_MESSAGE_HEADER)
-
- # Test
- for p in assembly_ct_list[:-1]:
- self.assertIsNone(process_message(self.ts, p, self.window_list, self.packet_list, self.contact_list,
- self.key_list, self.group_list, self.settings, self.master_key))
-
- for p in assembly_ct_list[-1:]:
- self.assertFR("Key message message complete.",
- process_message, self.ts, p, self.window_list, self.packet_list,
- self.contact_list, self.key_list, self.group_list, self.settings, self.master_key)
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), len(assembly_ct_list)*LOG_ENTRY_LENGTH)
-
- def test_whisper_msg_from_user(self):
- # Setup
- assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, ORIGIN_USER_HEADER, encrypt=True, header=WHISPER_MESSAGE_HEADER)
-
- # Test
- for p in assembly_ct_list[:-1]:
- self.assertIsNone(process_message(self.ts, p, self.window_list, self.packet_list, self.contact_list,
- self.key_list, self.group_list, self.settings, self.master_key))
-
- for p in assembly_ct_list[-1:]:
- self.assertFR("Key message message complete.",
- process_message, self.ts, p, self.window_list, self.packet_list,
- self.contact_list, self.key_list, self.group_list, self.settings, self.master_key)
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), len(assembly_ct_list)*LOG_ENTRY_LENGTH)
-
- def test_empty_whisper_msg_from_user(self):
- # Setup
- assembly_ct_list = assembly_packet_creator(MESSAGE, b' ', ORIGIN_USER_HEADER, encrypt=True, header=WHISPER_MESSAGE_HEADER)
-
- # Test
- for p in assembly_ct_list[:-1]:
- self.assertIsNone(process_message(self.ts, p, self.window_list, self.packet_list, self.contact_list,
- self.key_list, self.group_list, self.settings, self.master_key))
-
- for p in assembly_ct_list[-1:]:
- self.assertFR("Key message message complete.",
- process_message, self.ts, p, self.window_list, self.packet_list,
- self.contact_list, self.key_list, self.group_list, self.settings, self.master_key)
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), len(assembly_ct_list)*LOG_ENTRY_LENGTH)
-
- # Group messages
- def test_invalid_encoding_raises_fr(self):
- encrypted_packet = assembly_packet_creator(MESSAGE, b'test', ORIGIN_CONTACT_HEADER, group_name='testgroup', encrypt=True, break_g_name=True)[0]
-
- # Test
- self.assertFR("Error: Received an invalid group message.",
- process_message, self.ts, encrypted_packet, self.window_list, self.packet_list,
- self.contact_list, self.key_list, self.group_list, self.settings, self.master_key)
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), LOG_ENTRY_LENGTH)
-
- def test_invalid_message_header_raises_fr(self):
- # Setup
- encrypted_packet = assembly_packet_creator(MESSAGE, b'testgroup', ORIGIN_CONTACT_HEADER, header=b'Z', encrypt=True)[0]
-
- # Test
- self.assertFR("Error: Message from contact had an invalid header.",
- process_message, self.ts, encrypted_packet, self.window_list, self.packet_list,
- self.contact_list, self.key_list, self.group_list, self.settings, self.master_key)
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), LOG_ENTRY_LENGTH)
-
- def test_invalid_window_raises_fr(self):
- # Setup
- encrypted_packet = assembly_packet_creator(MESSAGE, b'test', ORIGIN_CONTACT_HEADER, group_name='test_group', encrypt=True)[0]
-
- # Test
- self.assertFR("Error: Received message to unknown group.",
- process_message, self.ts, encrypted_packet, self.window_list, self.packet_list,
- self.contact_list, self.key_list, self.group_list, self.settings, self.master_key)
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), LOG_ENTRY_LENGTH)
-
- def test_contact_not_in_group_raises_fr(self):
- # Setup
- encrypted_packet = assembly_packet_creator(MESSAGE, b'test', ORIGIN_CONTACT_HEADER, group_name='testgroup', encrypt=True, origin_acco=b'charlie@jabber.org')[0]
-
- # Test
- self.assertFR("Error: Account is not member of group.",
- process_message, self.ts, encrypted_packet, self.window_list, self.packet_list,
- self.contact_list, self.key_list, self.group_list, self.settings, self.master_key)
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), LOG_ENTRY_LENGTH)
-
- def test_normal_group_msg_from_contact(self):
- # Setup
- assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, ORIGIN_CONTACT_HEADER, group_name='testgroup', encrypt=True)
-
- for p in assembly_ct_list:
- self.assertIsNone(process_message(self.ts, p, self.window_list, self.packet_list, self.contact_list,
- self.key_list, self.group_list, self.settings, self.master_key))
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), len(assembly_ct_list)*LOG_ENTRY_LENGTH)
-
- def test_normal_group_msg_from_user(self):
- # Setup
- assembly_ct_list = assembly_packet_creator(MESSAGE, self.msg, ORIGIN_USER_HEADER, group_name='testgroup', encrypt=True)
-
- for p in assembly_ct_list:
- self.assertIsNone(process_message(self.ts, p, self.window_list, self.packet_list, self.contact_list,
- self.key_list, self.group_list, self.settings, self.master_key))
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), len(assembly_ct_list)*LOG_ENTRY_LENGTH)
-
- # Group management messages
- def test_group_invitation_msg_from_contact(self):
- # Setup
- assembly_ct_list = assembly_packet_creator(MESSAGE, self.message, ORIGIN_CONTACT_HEADER,
- header=GROUP_MSG_INVITEJOIN_HEADER, encrypt=True)
-
- # Test
- for p in assembly_ct_list[:-1]:
- self.assertIsNone(process_message(self.ts, p, self.window_list, self.packet_list, self.contact_list,
- self.key_list, self.group_list, self.settings, self.master_key))
-
- for p in assembly_ct_list[-1:]:
- self.assertFR("Group management message complete.",
- process_message, self.ts, p, self.window_list, self.packet_list,
- self.contact_list, self.key_list, self.group_list, self.settings, self.master_key)
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), len(assembly_ct_list)*LOG_ENTRY_LENGTH)
-
- def test_group_invitation_msg_from_user(self):
- # Setup
- assembly_ct_list = assembly_packet_creator(MESSAGE, self.message, ORIGIN_USER_HEADER,
- header=GROUP_MSG_INVITEJOIN_HEADER, encrypt=True)
-
- # Test
- for p in assembly_ct_list[:-1]:
- self.assertIsNone(process_message(self.ts, p, self.window_list, self.packet_list, self.contact_list,
- self.key_list, self.group_list, self.settings, self.master_key))
-
- for p in assembly_ct_list[-1:]:
- self.assertFR("Ignored group management message from user.",
- process_message, self.ts, p, self.window_list, self.packet_list,
- self.contact_list, self.key_list, self.group_list, self.settings, self.master_key)
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), len(assembly_ct_list)*LOG_ENTRY_LENGTH)
-
- def test_group_add_member_msg_from_contact(self):
- # Setup
- assembly_ct_list = assembly_packet_creator(MESSAGE, self.message, ORIGIN_CONTACT_HEADER,
- header=GROUP_MSG_MEMBER_ADD_HEADER, encrypt=True)
-
- # Test
- for p in assembly_ct_list[:-1]:
- self.assertIsNone(process_message(self.ts, p, self.window_list, self.packet_list, self.contact_list,
- self.key_list, self.group_list, self.settings, self.master_key))
-
- for p in assembly_ct_list[-1:]:
- self.assertFR("Group management message complete.",
- process_message, self.ts, p, self.window_list, self.packet_list,
- self.contact_list, self.key_list, self.group_list, self.settings, self.master_key)
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), len(assembly_ct_list)*LOG_ENTRY_LENGTH)
-
- def test_group_remove_member_msg_from_contact(self):
- # Setup
- assembly_ct_list = assembly_packet_creator(MESSAGE, self.message, ORIGIN_CONTACT_HEADER,
- header=GROUP_MSG_MEMBER_REM_HEADER, encrypt=True)
-
- # Test
- for p in assembly_ct_list[:-1]:
- self.assertIsNone(process_message(self.ts, p, self.window_list, self.packet_list, self.contact_list,
- self.key_list, self.group_list, self.settings, self.master_key))
-
- for p in assembly_ct_list[-1:]:
- self.assertFR("Group management message complete.",
- process_message, self.ts, p, self.window_list, self.packet_list,
- self.contact_list, self.key_list, self.group_list, self.settings, self.master_key)
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), len(assembly_ct_list)*LOG_ENTRY_LENGTH)
-
- def test_group_exit_msg_from_contact(self):
- # Setup
- assembly_ct_list = assembly_packet_creator(MESSAGE, b'testgroup', ORIGIN_CONTACT_HEADER,
- header=GROUP_MSG_EXIT_GROUP_HEADER, encrypt=True)
-
- # Test
- for p in assembly_ct_list[:-1]:
- self.assertIsNone(process_message(self.ts, p, self.window_list, self.packet_list, self.contact_list,
- self.key_list, self.group_list, self.settings, self.master_key))
-
- for p in assembly_ct_list[-1:]:
- self.assertFR("Group management message complete.",
- process_message, self.ts, p, self.window_list, self.packet_list,
- self.contact_list, self.key_list, self.group_list, self.settings, self.master_key)
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), len(assembly_ct_list)*LOG_ENTRY_LENGTH)
-
- def test_invalid_encoding_in_group_management_message_raises_fr_but_is_logged(self):
- # Setup
- message = b'testgroup' + US_BYTE + b'bob@jabber.org' + US_BYTE + binascii.unhexlify('a466c02c221cb135')
- encrypted_packet = assembly_packet_creator(MESSAGE, message, ORIGIN_CONTACT_HEADER, header=GROUP_MSG_INVITEJOIN_HEADER, encrypt=True)[0]
-
- self.settings.logfile_masking = True
- self.contact_list.get_contact('bob@jabber.org').log_messages = True
-
- # Test
- self.assertFR("Error: Received group management message had invalid encoding.",
- process_message, self.ts, encrypted_packet, self.window_list, self.packet_list,
- self.contact_list, self.key_list, self.group_list, self.settings, self.master_key)
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), LOG_ENTRY_LENGTH)
-
- # Files
- def test_file(self):
- # Setup
- assembly_ct_list = assembly_packet_creator(FILE, origin=ORIGIN_CONTACT_HEADER, encrypt=True)
-
- # Test
- for p in assembly_ct_list[:-1]:
- self.assertIsNone(process_message(self.ts, p, self.window_list, self.packet_list, self.contact_list,
- self.key_list, self.group_list, self.settings, self.master_key))
-
- for p in assembly_ct_list[-1:]:
- self.assertFR("File storage complete.",
- process_message, self.ts, p, self.window_list, self.packet_list,
- self.contact_list, self.key_list, self.group_list, self.settings, self.master_key)
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), len(assembly_ct_list)*LOG_ENTRY_LENGTH)
-
- def test_file_file_reception_is_disabled(self):
- # Setup
- payload = int_to_bytes(1) + int_to_bytes(2) + b'testfile.txt' + US_BYTE + os.urandom(50)
- encrypted_packet = assembly_packet_creator(FILE, payload=payload, origin=ORIGIN_CONTACT_HEADER, encrypt=True)[0]
-
- self.contact_list.get_contact('alice@jabber.org').file_reception = False
-
- # Test
- self.assertFR("Alert! File transmission from Alice but reception is disabled.",
- process_message, self.ts, encrypted_packet, self.window_list, self.packet_list,
- self.contact_list, self.key_list, self.group_list, self.settings, self.master_key)
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), LOG_ENTRY_LENGTH)
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/rx/test_output_loop.py b/tests/rx/test_output_loop.py
deleted file mode 100644
index 831c17c..0000000
--- a/tests/rx/test_output_loop.py
+++ /dev/null
@@ -1,159 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import builtins
-import datetime
-import shutil
-import threading
-import time
-import unittest
-import zlib
-
-from multiprocessing import Queue
-
-from src.common.crypto import encrypt_and_sign, hash_chain
-from src.common.encoding import b58encode, int_to_bytes, str_to_bytes
-from src.common.statics import *
-
-from src.tx.packet import split_to_assembly_packets
-
-from src.rx.output_loop import output_loop
-
-from tests.mock_classes import ContactList, GroupList, KeyList, MasterKey, Settings
-from tests.utils import ignored
-
-
-class TestOutputLoop(unittest.TestCase):
-
- def setUp(self):
- self.o_input = builtins.input
-
- def tearDown(self):
- builtins.input = self.o_input
-
- with ignored(FileNotFoundError):
- shutil.rmtree(DIR_IMPORTED)
-
- def test_loop(self):
- # Setup
- queues = {LOCAL_KEY_PACKET_HEADER: Queue(),
- PUBLIC_KEY_PACKET_HEADER: Queue(),
- MESSAGE_PACKET_HEADER: Queue(),
- COMMAND_PACKET_HEADER: Queue(),
- EXIT_QUEUE: Queue(),
- IMPORTED_FILE_HEADER: Queue(),
- GATEWAY_QUEUE: Queue(),
- UNITTEST_QUEUE: Queue()}
-
- local_key = KEY_LENGTH * b'a'
- local_hek = KEY_LENGTH * b'a'
- kek = KEY_LENGTH * b'a'
- fdk = KEY_LENGTH * b'a'
- tx_key = KEY_LENGTH * b'a'
- tx_hek = KEY_LENGTH * b'a'
- conf_code = bytes(1)
-
- input_list = [b58encode(kek),
- conf_code.hex(),
- b58encode(fdk, file_key=True)]
- gen = iter(input_list)
- builtins.input = lambda _: next(gen)
-
- def queue_packet(key, hek, tx_harac, packet, rx_account=None):
- if rx_account is None:
- header = COMMAND_PACKET_HEADER
- trailer = b''
- queue = queues[COMMAND_PACKET_HEADER]
- packet = split_to_assembly_packets(packet, COMMAND)[0]
-
- else:
- header = MESSAGE_PACKET_HEADER
- trailer = ORIGIN_CONTACT_HEADER + rx_account
- queue = queues[MESSAGE_PACKET_HEADER]
- packet = split_to_assembly_packets(packet, MESSAGE)[0]
-
- encrypted_harac = encrypt_and_sign(int_to_bytes(tx_harac), hek)
- encrypted_message = encrypt_and_sign(packet, key)
- encrypted_packet = header + encrypted_harac + encrypted_message + trailer
- queue.put((datetime.datetime.now(), encrypted_packet))
- time.sleep(0.1)
-
- def queue_delayer():
- time.sleep(0.1)
-
- # Queue local key packet
- local_key_packet = LOCAL_KEY_PACKET_HEADER + encrypt_and_sign(local_key + local_hek + conf_code, key=kek)
- queues[LOCAL_KEY_PACKET_HEADER].put((datetime.datetime.now(), local_key_packet))
- time.sleep(0.1)
-
- # Queue screen clearing command
- queue_packet(tx_key, tx_hek, INITIAL_HARAC, CLEAR_SCREEN_HEADER)
-
- # Queue message that goes to buffer
- queue_packet(tx_key, tx_hek, INITIAL_HARAC, PRIVATE_MESSAGE_HEADER + b'Hi Bob', b'bob@jabber.org')
-
- # Queue public key for Bob
- public_key_packet = PUBLIC_KEY_PACKET_HEADER + KEY_LENGTH * b'a' + ORIGIN_CONTACT_HEADER + b'bob@jabber.org'
- queues[PUBLIC_KEY_PACKET_HEADER].put((datetime.datetime.now(), public_key_packet))
- time.sleep(0.1)
-
- # Queue X25519 keyset for Bob
- command = KEY_EX_X25519_HEADER + 4 * (KEY_LENGTH * b'a') + b'bob@jabber.org' + US_BYTE + b'Bob'
- queue_packet(hash_chain(tx_key), tx_hek, INITIAL_HARAC+1, command)
-
- # Queue window selection packet
- command = WINDOW_SELECT_HEADER + b'bob@jabber.org'
- queue_packet(hash_chain(hash_chain(tx_key)), tx_hek, INITIAL_HARAC+2, command)
-
- # Queue message that is displayed directly
- packet = b'Hi again, Bob'
- queue_packet(tx_key, tx_hek, INITIAL_HARAC, packet, b'bob@jabber.org')
-
- # Queue file window selection command
- command = WINDOW_SELECT_HEADER + WIN_TYPE_FILE.encode()
- queue_packet(hash_chain(hash_chain(hash_chain(tx_key))), tx_hek, INITIAL_HARAC+3, command)
-
- # Queue imported file packet
- file_data = str_to_bytes('testfile') + 500*b'a'
- compressed = zlib.compress(file_data, level=COMPRESSION_LEVEL)
- packet = IMPORTED_FILE_HEADER + encrypt_and_sign(compressed, key=fdk)
- queues[IMPORTED_FILE_HEADER].put((datetime.datetime.now(), packet))
- time.sleep(0.1)
-
- # Queue exit message to break loop
- queues[UNITTEST_QUEUE].put(EXIT)
- time.sleep(0.1)
-
- threading.Thread(target=queue_delayer).start()
-
- # Test
- self.assertIsNone(output_loop(queues, Settings(), ContactList(), KeyList(),
- GroupList(), MasterKey(), stdin_fd=1, unittest=True))
-
- # Teardown
- for key_ in queues:
- while not queues[key_].empty():
- queues[key_].get()
- time.sleep(0.1)
- queues[key_].close()
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/rx/test_packet.py b/tests/rx/test_packet.py
deleted file mode 100644
index f33ae54..0000000
--- a/tests/rx/test_packet.py
+++ /dev/null
@@ -1,576 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import base64
-import binascii
-import builtins
-import os
-import shutil
-import unittest
-import zlib
-
-from src.common.crypto import byte_padding, encrypt_and_sign, hash_chain, rm_padding_bytes
-from src.common.encoding import int_to_bytes
-from src.common.misc import split_byte_string
-from src.common.statics import *
-
-from src.tx.packet import split_to_assembly_packets
-
-from src.rx.packet import decrypt_assembly_packet, Packet, PacketList
-
-from tests.mock_classes import create_contact, ContactList, KeyList, Settings, WindowList
-from tests.utils import ignored, TFCTestCase
-
-
-class TestDecryptAssemblyPacket(TFCTestCase):
-
- def setUp(self):
- self.o_input = builtins.input
- self.window_list = WindowList( nicks=['Alice', LOCAL_ID])
- self.contact_list = ContactList(nicks=['Alice', LOCAL_ID])
- self.key_list = KeyList( nicks=['Alice', LOCAL_ID])
- self.keyset = self.key_list.get_keyset('alice@jabber.org')
- self.packet = None
-
- def tearDown(self):
- builtins.input = self.o_input
-
- def create_encrypted_packet(self, tx_harac, rx_harac, hek=KEY_LENGTH*b'\x01', key=KEY_LENGTH*b'\x01'):
- encrypted_message = encrypt_and_sign(PRIVATE_MESSAGE_HEADER + byte_padding(b'test'), key)
- encrypted_harac = encrypt_and_sign(int_to_bytes(tx_harac), hek)
- self.packet = MESSAGE_PACKET_HEADER + encrypted_harac + encrypted_message + ORIGIN_CONTACT_HEADER + b'alice@jabber.org'
- self.keyset.rx_harac = rx_harac
-
- def test_invalid_origin_header_raises_fr(self):
- # Setup
- packet = MESSAGE_PACKET_HEADER + MESSAGE_LENGTH*b'm' + b'e'
-
- # Test
- self.assertFR("Error: Received packet had an invalid origin-header.",
- decrypt_assembly_packet, packet, self.window_list, self.contact_list, self.key_list)
-
- def test_masqueraded_command_raises_fr(self):
- for o in [ORIGIN_USER_HEADER, ORIGIN_CONTACT_HEADER]:
- # Setup
- packet = MESSAGE_PACKET_HEADER + MESSAGE_LENGTH*b'm' + o + LOCAL_ID.encode()
-
- # Test
- self.assertFR("Warning! Received packet masqueraded as command.",
- decrypt_assembly_packet, packet, self.window_list, self.contact_list, self.key_list)
-
- def test_invalid_harac_ct_raises_fr(self):
- # Setup
- packet = MESSAGE_PACKET_HEADER + MESSAGE_LENGTH*b'm' + ORIGIN_CONTACT_HEADER + b'alice@jabber.org'
-
- # Test
- self.assertFR("Warning! Received packet from Alice had an invalid hash ratchet MAC.",
- decrypt_assembly_packet, packet, self.window_list, self.contact_list, self.key_list)
-
- def test_decryption_with_zero_rx_key_raises_fr(self):
- # Setup
- self.create_encrypted_packet(tx_harac=2, rx_harac=1, key=(hash_chain(KEY_LENGTH*b'\x01')))
- keyset = self.key_list.get_keyset('alice@jabber.org')
- keyset.rx_key = bytes(KEY_LENGTH)
-
- # Test
- self.assertFR("Warning! Loaded zero-key for packet decryption.",
- decrypt_assembly_packet, self.packet, self.window_list, self.contact_list, self.key_list)
-
- def test_decryption_with_zero_rx_hek_raises_fr(self):
- # Setup
- self.create_encrypted_packet(tx_harac=2, rx_harac=1, key=(hash_chain(KEY_LENGTH*b'\x01')))
- keyset = self.key_list.get_keyset('alice@jabber.org')
- keyset.rx_hek = bytes(KEY_LENGTH)
-
- # Test
- self.assertFR("Warning! Loaded zero-key for packet decryption.",
- decrypt_assembly_packet, self.packet, self.window_list, self.contact_list, self.key_list)
-
- def test_invalid_harac_raises_fr(self):
- # Setup
- self.create_encrypted_packet(tx_harac=3, rx_harac=3, hek=KEY_LENGTH*b'\x02')
-
- # Test
- self.assertFR("Warning! Received packet from Alice had an invalid hash ratchet MAC.",
- decrypt_assembly_packet, self.packet, self.window_list, self.contact_list, self.key_list)
-
- def test_expired_harac_raises_fr(self):
- # Setup
- self.create_encrypted_packet(tx_harac=1, rx_harac=3)
-
- # Test
- self.assertFR("Warning! Received packet from Alice had an expired hash ratchet counter.",
- decrypt_assembly_packet, self.packet, self.window_list, self.contact_list, self.key_list)
-
- def test_harac_dos_can_be_interrupted(self):
- # Setup
- self.create_encrypted_packet(tx_harac=10000, rx_harac=3)
- builtins.input = lambda _: 'No'
-
- # Test
- self.assertFR("Dropped packet from Alice.",
- decrypt_assembly_packet, self.packet, self.window_list, self.contact_list, self.key_list)
-
- def test_invalid_packet_ct_raises_fr(self):
- # Setup
- self.create_encrypted_packet(tx_harac=5, rx_harac=3)
-
- # Test
- self.assertFR("Warning! Received packet from Alice had an invalid MAC.",
- decrypt_assembly_packet, self.packet, self.window_list, self.contact_list, self.key_list)
-
- def test_successful_packet_decryption(self):
- # Setup
- self.create_encrypted_packet(tx_harac=1, rx_harac=1)
-
- # Test
- assembly_pt, account, origin = decrypt_assembly_packet(self.packet, self.window_list, self.contact_list, self.key_list)
- self.assertEqual(rm_padding_bytes(assembly_pt), PRIVATE_MESSAGE_HEADER + b'test')
- self.assertEqual(account, 'alice@jabber.org')
- self.assertEqual(origin, ORIGIN_CONTACT_HEADER)
-
- def test_successful_packet_decryption_with_offset(self):
- # Setup
- self.create_encrypted_packet(tx_harac=2, rx_harac=1, key=(hash_chain(KEY_LENGTH*b'\x01')))
-
- # Test
- assembly_pt, account, origin = decrypt_assembly_packet(self.packet, self.window_list, self.contact_list, self.key_list)
- self.assertEqual(rm_padding_bytes(assembly_pt), PRIVATE_MESSAGE_HEADER + b'test')
- self.assertEqual(account, 'alice@jabber.org')
- self.assertEqual(origin, ORIGIN_CONTACT_HEADER)
-
- def test_successful_command_decryption(self):
- # Setup
- command = byte_padding(b'test')
- encrypted_message = encrypt_and_sign(command, KEY_LENGTH*b'\x01')
- encrypted_harac = encrypt_and_sign(int_to_bytes(1), KEY_LENGTH*b'\x01')
- packet = COMMAND_PACKET_HEADER + encrypted_harac + encrypted_message
- keyset = self.key_list.get_keyset(LOCAL_ID)
- keyset.tx_harac = 1
-
- # Test
- assembly_pt, account, origin = decrypt_assembly_packet(packet, self.window_list, self.contact_list, self.key_list)
- self.assertEqual(assembly_pt, command)
- self.assertEqual(account, LOCAL_ID)
- self.assertEqual(origin, ORIGIN_USER_HEADER)
-
-
-class TestPacket(TFCTestCase):
-
- def setUp(self):
- self.msg = ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean condimentum consectetur purus quis"
- " dapibus. Fusce venenatis lacus ut rhoncus faucibus. Cras sollicitudin commodo sapien, sed bibendu"
- "m velit maximus in. Aliquam ac metus risus. Sed cursus ornare luctus. Integer aliquet lectus id ma"
- "ssa blandit imperdiet. Ut sed massa eget quam facilisis rutrum. Mauris eget luctus nisl. Sed ut el"
- "it iaculis, faucibus lacus eget, sodales magna. Nunc sed commodo arcu. In hac habitasse platea dic"
- "tumst. Integer luctus aliquam justo, at vestibulum dolor iaculis ac. Etiam laoreet est eget odio r"
- "utrum, vel malesuada lorem rhoncus. Cras finibus in neque eu euismod. Nulla facilisi. Nunc nec ali"
- "quam quam, quis ullamcorper leo. Nunc egestas lectus eget est porttitor, in iaculis felis sceleris"
- "que. In sem elit, fringilla id viverra commodo, sagittis varius purus. Pellentesque rutrum loborti"
- "s neque a facilisis. Mauris id tortor placerat, aliquam dolor ac, venenatis arcu.")
-
- self.contact = create_contact()
- self.settings = Settings(logfile_masking=True)
-
- compressed = zlib.compress(b'abcdefghijk', level=COMPRESSION_LEVEL)
- file_key = os.urandom(KEY_LENGTH)
- encrypted = encrypt_and_sign(compressed, key=file_key)
- encrypted += file_key
- encoded = base64.b85encode(encrypted)
-
- self.short_f_data = (int_to_bytes(1) + int_to_bytes(2) + b'testfile.txt' + US_BYTE + encoded)
-
- def tearDown(self):
- with ignored(FileNotFoundError):
- shutil.rmtree(DIR_RX_FILES)
-
- def test_invalid_assembly_packet_header_raises_fr(self):
- # Setup
- packet = Packet('alice@jabber.org', self.contact, ORIGIN_USER_HEADER, MESSAGE, self.settings)
- plaintext = "Lorem ipsum dolor sit amet, consectetur adipiscing elit".encode()
- packets = split_to_assembly_packets(plaintext, MESSAGE)
-
- # Test
- self.assertFR("Error: Received packet had an invalid assembly packet header.",
- packet.add_packet, b'i' + packets[0][1:])
- self.assertEqual(packet.log_masking_ctr, 1)
-
- def test_missing_start_packet_raises_fr(self):
- packet = Packet('alice@jabber.org', self.contact, ORIGIN_USER_HEADER, MESSAGE, self.settings)
-
- for header in [M_A_HEADER, M_E_HEADER]:
- self.assertFR("Missing start packet.", packet.add_packet, header + bytes(PADDING_LEN))
- self.assertEqual(packet.log_masking_ctr, 2)
-
- def test_short_message(self):
- # Setup
- packet = Packet('alice@jabber.org', self.contact, ORIGIN_USER_HEADER, MESSAGE, self.settings)
- plaintext = "Lorem ipsum dolor sit amet, consectetur adipiscing elit".encode()
- packets = split_to_assembly_packets(plaintext, MESSAGE)
-
- for p in packets:
- packet.add_packet(p)
-
- # Test
- self.assertEqual(packet.assemble_message_packet(), plaintext)
-
- def test_compression_error_raises_fr(self):
- # Setup
- packet = Packet('alice@jabber.org', self.contact, ORIGIN_USER_HEADER, MESSAGE, self.settings)
- payload = zlib.compress(b"Lorem ipsum", level=COMPRESSION_LEVEL)[::-1]
- packet_list = [M_S_HEADER + byte_padding(payload)]
-
- for p in packet_list:
- packet.add_packet(p)
-
- # Test
- self.assertFR("Error: Decompression of message failed.", packet.assemble_message_packet)
-
- def test_long_message(self):
- # Setup
- packet = Packet('alice@jabber.org', self.contact, ORIGIN_USER_HEADER, MESSAGE, self.settings)
- packets = split_to_assembly_packets(self.msg.encode(), MESSAGE)
-
- for p in packets:
- packet.add_packet(p)
-
- # Test
- message = packet.assemble_message_packet()
- self.assertEqual(message.decode(), self.msg)
-
- def test_decryption_error_raises_fr(self):
- # Setup
- packet = Packet('alice@jabber.org', self.contact, ORIGIN_USER_HEADER, MESSAGE, self.settings)
- payload = zlib.compress(self.msg.encode(), level=COMPRESSION_LEVEL)
- msg_key = bytes(KEY_LENGTH)
- payload = encrypt_and_sign(payload, msg_key)[::-1]
- payload += msg_key
- padded = byte_padding(payload)
- p_list = split_byte_string(padded, item_len=PADDING_LEN)
- packet_list = ([M_L_HEADER + p_list[0]] +
- [M_A_HEADER + p for p in p_list[1:-1]] +
- [M_E_HEADER + p_list[-1]])
-
- for p in packet_list:
- packet.add_packet(p)
-
- # Test
- self.assertFR("Error: Decryption of message failed.", packet.assemble_message_packet)
-
- def test_short_file(self):
- # Setup
- packets = split_to_assembly_packets(self.short_f_data, FILE)
-
- # Test
- self.assertFalse(os.path.isfile(f'{DIR_RX_FILES}Alice/testfile.txt'))
- self.assertFalse(os.path.isfile(f'{DIR_RX_FILES}Alice/testfile.txt.1'))
-
- packet = Packet('alice@jabber.org', self.contact, ORIGIN_CONTACT_HEADER, FILE, self.settings)
- packet.long_active = True
-
- for p in packets:
- packet.add_packet(p)
- self.assertIsNone(packet.assemble_and_store_file())
- self.assertTrue(os.path.isfile(f'{DIR_RX_FILES}Alice/testfile.txt'))
-
- for p in packets:
- packet.add_packet(p)
- self.assertIsNone(packet.assemble_and_store_file())
- self.assertTrue(os.path.isfile(f'{DIR_RX_FILES}Alice/testfile.txt.1'))
-
- # Teardown
- shutil.rmtree(DIR_RX_FILES)
-
- def test_short_file_from_user_raises_fr(self):
- # Setup
- packet = Packet('alice@jabber.org', self.contact, ORIGIN_USER_HEADER, FILE, self.settings)
- packets = split_to_assembly_packets(self.short_f_data, FILE)
-
- # Test
- for p in packets:
- self.assertFR("Ignored file from user.", packet.add_packet, p)
- self.assertEqual(packet.log_masking_ctr, 1)
-
- def test_unauthorized_file_from_contact_raises_fr(self):
- # Setup
- self.contact.file_reception = False
-
- packet = Packet('alice@jabber.org', self.contact, ORIGIN_CONTACT_HEADER, FILE, self.settings)
- packets = split_to_assembly_packets(self.short_f_data, FILE)
-
- # Test
- for p in packets:
- self.assertFR("Alert! File transmission from Alice but reception is disabled.", packet.add_packet, p)
- self.assertEqual(packet.log_masking_ctr, 1)
-
- def test_long_file(self):
- # Setup
- packet = Packet('alice@jabber.org', self.contact, ORIGIN_CONTACT_HEADER, FILE, self.settings)
- packet.long_active = True
-
- compressed = zlib.compress(os.urandom(10000), level=COMPRESSION_LEVEL)
- file_key = os.urandom(KEY_LENGTH)
- encrypted = encrypt_and_sign(compressed, key=file_key)
- encrypted += file_key
- encoded = base64.b85encode(encrypted)
- file_data = int_to_bytes(1000) + int_to_bytes(10000)+ b'testfile.txt' + US_BYTE + encoded
- packets = split_to_assembly_packets(file_data, FILE)
-
- for p in packets:
- packet.add_packet(p)
-
- # Test
- self.assertIsNone(packet.assemble_and_store_file())
- self.assertTrue(os.path.isfile(f'{DIR_RX_FILES}Alice/testfile.txt'))
- self.assertEqual(os.path.getsize(f'{DIR_RX_FILES}Alice/testfile.txt'), 10000)
-
-
- def test_disabled_file_reception_raises_fr_with_append_packet(self):
- # Setup
- packet = Packet('alice@jabber.org', self.contact, ORIGIN_CONTACT_HEADER, FILE, self.settings)
- packet.long_active = True
-
- compressed = zlib.compress(os.urandom(10000), level=COMPRESSION_LEVEL)
- file_key = os.urandom(KEY_LENGTH)
- encrypted = encrypt_and_sign(compressed, key=file_key)
- encrypted += file_key
- encoded = base64.b85encode(encrypted)
- file_data = int_to_bytes(1000) + int_to_bytes(10000)+ b'testfile.txt' + US_BYTE + encoded
- packets = split_to_assembly_packets(file_data, FILE)
-
- for p in packets[:2]:
- self.assertIsNone(packet.add_packet(p))
-
- packet.contact.file_reception = False
-
- # Test
- self.assertFR("Alert! File reception disabled mid-transfer.",
- packet.add_packet, packets[2])
-
- for p in packets[3:]:
- self.assertFR("Missing start packet.", packet.add_packet, p)
-
- self.assertEqual(packet.log_masking_ctr, len(packets))
-
-
- def test_disabled_file_reception_raises_fr_with_end_packet(self):
- # Setup
- packet = Packet('alice@jabber.org', self.contact, ORIGIN_CONTACT_HEADER, FILE, self.settings)
- packet.long_active = True
-
- compressed = zlib.compress(os.urandom(10000), level=COMPRESSION_LEVEL)
- file_key = os.urandom(KEY_LENGTH)
- encrypted = encrypt_and_sign(compressed, key=file_key)
- encrypted += file_key
- encoded = base64.b85encode(encrypted)
- file_data = int_to_bytes(1000) + int_to_bytes(10000)+ b'testfile.txt' + US_BYTE + encoded
- packets = split_to_assembly_packets(file_data, FILE)
-
- for p in packets[:-1]:
- self.assertIsNone(packet.add_packet(p))
-
- packet.contact.file_reception = False
-
- # Test
- for p in packets[-1:]:
- self.assertFR("Alert! File reception disabled mid-transfer.", packet.add_packet, p)
- self.assertEqual(packet.log_masking_ctr, len(packets))
-
- def test_long_file_from_user_raises_fr(self):
- # Setup
- packet = Packet('alice@jabber.org', self.contact, ORIGIN_USER_HEADER, FILE, self.settings)
- compressed = zlib.compress(os.urandom(10000), level=COMPRESSION_LEVEL)
- file_key = os.urandom(KEY_LENGTH)
- encrypted = encrypt_and_sign(compressed, key=file_key)
- encrypted += file_key
- encoded = base64.b85encode(encrypted)
- file_data = int_to_bytes(1000) + int_to_bytes(10000) + b'testfile.txt' + US_BYTE + encoded
- packets = split_to_assembly_packets(file_data, FILE)
-
- # Test
- self.assertFR("Ignored file from user.", packet.add_packet, packets[0])
- self.assertEqual(packet.log_masking_ctr, 1)
-
- def test_unauthorized_long_file_raises_fr(self):
- # Setup
- self.contact.file_reception = False
-
- packet = Packet('alice@jabber.org', self.contact, ORIGIN_CONTACT_HEADER, FILE, self.settings)
- compressed = zlib.compress(os.urandom(10000), level=COMPRESSION_LEVEL)
- file_key = os.urandom(KEY_LENGTH)
- encrypted = encrypt_and_sign(compressed, key=file_key)
- encrypted += file_key
- encoded = base64.b85encode(encrypted)
- file_data = int_to_bytes(1000) + int_to_bytes(10000) + b'testfile.txt' + US_BYTE + encoded
- packets = split_to_assembly_packets(file_data, FILE)
-
- # Test
- self.assertFR("Alert! File transmission from Alice but reception is disabled.", packet.add_packet, packets[0])
- self.assertEqual(packet.log_masking_ctr, 1)
-
- def test_invalid_long_file_header_raises_fr(self):
- # Setup
- packet = Packet('alice@jabber.org', self.contact, ORIGIN_CONTACT_HEADER, FILE, self.settings)
- compressed = zlib.compress(os.urandom(10000), level=COMPRESSION_LEVEL)
- file_key = os.urandom(KEY_LENGTH)
- encrypted = encrypt_and_sign(compressed, key=file_key)
- encrypted += file_key
- encoded = base64.b85encode(encrypted)
- file_data = int_to_bytes(1000) + int_to_bytes(10000) + binascii.unhexlify('3f264d4189d7a091') + US_BYTE + encoded
- packets = split_to_assembly_packets(file_data, FILE)
-
- # Test
- self.assertFR("Error: Received file packet had an invalid header.", packet.add_packet, packets[0])
- self.assertEqual(packet.log_masking_ctr, 1)
-
- def test_contact_canceled_file(self):
- # Setup
- packet = Packet('alice@jabber.org', self.contact, ORIGIN_CONTACT_HEADER, FILE, self.settings)
- compressed = zlib.compress(os.urandom(10000), level=COMPRESSION_LEVEL)
- file_key = os.urandom(KEY_LENGTH)
- encrypted = encrypt_and_sign(compressed, key=file_key)
- encrypted += file_key
- encoded = base64.b85encode(encrypted)
- file_data = int_to_bytes(1000) + int_to_bytes(10000) + b'testfile.txt' + US_BYTE + encoded
- packets = split_to_assembly_packets(file_data, FILE)
- packets = packets[:20]
- packets.append(byte_padding(F_C_HEADER)) # Add cancel packet
-
- for p in packets:
- packet.add_packet(p)
-
- # Test
- self.assertEqual(len(packet.assembly_pt_list), 0) # Cancel packet empties packet list
- self.assertFalse(packet.long_active)
- self.assertFalse(packet.is_complete)
- self.assertEqual(packet.log_masking_ctr, len(packets))
-
- def test_noise_packet_interrupts_file(self):
- # Setup
- packet = Packet('alice@jabber.org', self.contact, ORIGIN_CONTACT_HEADER, FILE, self.settings)
- compressed = zlib.compress(os.urandom(10000), level=COMPRESSION_LEVEL)
- file_key = os.urandom(KEY_LENGTH)
- encrypted = encrypt_and_sign(compressed, key=file_key)
- encrypted += file_key
- encoded = base64.b85encode(encrypted)
- file_data = int_to_bytes(1000) + int_to_bytes(10000) + b'testfile.txt' + US_BYTE + encoded
- packets = split_to_assembly_packets(file_data, FILE)
- packets = packets[:20]
- packets.append(byte_padding(P_N_HEADER)) # Add cancel packet
-
- for p in packets:
- packet.add_packet(p)
-
- # Test
- self.assertEqual(len(packet.assembly_pt_list), 0) # Cancel packet empties packet list
- self.assertFalse(packet.long_active)
- self.assertFalse(packet.is_complete)
- self.assertEqual(packet.log_masking_ctr, len(packets))
-
- def test_short_command(self):
- # Setup
- packet = Packet(LOCAL_ID, self.contact, ORIGIN_CONTACT_HEADER, COMMAND, self.settings)
- packets = split_to_assembly_packets(b'testcommand', COMMAND)
-
- for p in packets:
- packet.add_packet(p)
-
- # Test
- self.assertEqual(packet.assemble_command_packet(), b'testcommand')
- self.assertEqual(packet.log_masking_ctr, 0)
-
- def test_long_command(self):
- # Setup
- packet = Packet(LOCAL_ID, self.contact, ORIGIN_CONTACT_HEADER, COMMAND, self.settings)
- command = os.urandom(500)
- packets = split_to_assembly_packets(command, COMMAND)
-
- for p in packets:
- packet.add_packet(p)
-
- # Test
- self.assertEqual(packet.assemble_command_packet(), command)
- self.assertEqual(packet.log_masking_ctr, 0)
-
- def test_long_command_hash_mismatch_raises_fr(self):
- # Setup
- packet = Packet(LOCAL_ID, self.contact, ORIGIN_CONTACT_HEADER, COMMAND, self.settings)
- command = os.urandom(500) + b'a'
- packets = split_to_assembly_packets(command, COMMAND)
- packets = [p.replace(b'a', b'c') for p in packets]
-
- for p in packets:
- packet.add_packet(p)
-
- # Test
- self.assertFR("Error: Received an invalid command.", packet.assemble_command_packet)
- self.assertEqual(packet.log_masking_ctr, 0)
-
- def test_long_command_compression_error_raises_fr(self):
- # Setup
- packet = Packet(LOCAL_ID, self.contact, ORIGIN_CONTACT_HEADER, COMMAND, self.settings)
- command = os.urandom(500) + b'a'
- payload = zlib.compress(command, level=COMPRESSION_LEVEL)[::-1]
- payload += hash_chain(payload)
- padded = byte_padding(payload)
- p_list = split_byte_string(padded, item_len=PADDING_LEN)
- packet_list = ([C_L_HEADER + p_list[0]] +
- [C_A_HEADER + p for p in p_list[1:-1]] +
- [C_E_HEADER + p_list[-1]])
-
- for p in packet_list:
- packet.add_packet(p)
-
- # Test
- self.assertFR("Error: Decompression of command failed.", packet.assemble_command_packet)
- self.assertEqual(packet.log_masking_ctr, 0)
-
-
-class TestPacketList(unittest.TestCase):
-
- def setUp(self):
- self.contact_list = ContactList(nicks=['Alice', 'Bob'])
- self.settings = Settings()
- packet = Packet('alice@jabber.org', self.contact_list.get_contact('Alice'),
- ORIGIN_CONTACT_HEADER, MESSAGE, self.settings)
-
- self.packet_list = PacketList(self.settings, self.contact_list)
- self.packet_list.packets = [packet]
-
- def test_packet_list_iterates_over_contact_objects(self):
- for p in self.packet_list:
- self.assertIsInstance(p, Packet)
-
- def test_len_returns_number_of_contacts(self):
- self.assertEqual(len(self.packet_list), 1)
-
- def test_has_packet(self):
- self.assertTrue(self.packet_list.has_packet('alice@jabber.org', ORIGIN_CONTACT_HEADER, MESSAGE))
- self.assertFalse(self.packet_list.has_packet('alice@jabber.org', ORIGIN_USER_HEADER, MESSAGE))
-
- def test_get_packet(self):
- packet = self.packet_list.get_packet('alice@jabber.org', ORIGIN_CONTACT_HEADER, MESSAGE)
- self.assertEqual(packet.account, 'alice@jabber.org')
- self.assertEqual(packet.origin, ORIGIN_CONTACT_HEADER)
- self.assertEqual(packet.type, MESSAGE)
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/rx/test_receiver_loop.py b/tests/rx/test_receiver_loop.py
deleted file mode 100644
index 6f792a3..0000000
--- a/tests/rx/test_receiver_loop.py
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import threading
-import time
-import unittest
-
-from multiprocessing import Queue
-
-from src.common.reed_solomon import RSCodec
-from src.common.statics import *
-
-from src.rx.receiver_loop import receiver_loop
-
-from tests.mock_classes import Settings
-
-
-class TestReceiverLoop(unittest.TestCase):
-
- def test_receiver_loop(self):
- # Setup
- settings = Settings()
- rs = RSCodec(2 * settings.serial_error_correction)
- queues = {LOCAL_KEY_PACKET_HEADER: Queue(),
- PUBLIC_KEY_PACKET_HEADER: Queue(),
- MESSAGE_PACKET_HEADER: Queue(),
- COMMAND_PACKET_HEADER: Queue(),
- IMPORTED_FILE_HEADER: Queue()}
-
- all_q = dict(queues)
- all_q.update({GATEWAY_QUEUE: Queue()})
-
- for key in queues:
- packet = key + bytes(KEY_LENGTH)
- encoded = rs.encode(packet)
-
- def queue_delayer():
- time.sleep(0.1)
- all_q[GATEWAY_QUEUE].put(b'undecodable')
- all_q[GATEWAY_QUEUE].put(encoded)
-
- threading.Thread(target=queue_delayer).start()
-
- # Test
- self.assertIsNone(receiver_loop(all_q, settings, unittest=True))
- time.sleep(0.1)
- self.assertEqual(queues[key].qsize(), 1)
-
- # Teardown
- while not queues[key].empty():
- queues[key].get()
- time.sleep(0.1)
- queues[key].close()
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/rx/test_windows.py b/tests/rx/test_windows.py
deleted file mode 100644
index 0702fa1..0000000
--- a/tests/rx/test_windows.py
+++ /dev/null
@@ -1,479 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import unittest
-
-from datetime import datetime
-
-from src.common.statics import *
-
-from src.rx.windows import RxWindow, WindowList
-
-from tests.mock_classes import create_contact, ContactList, GroupList, Packet, PacketList, Settings
-from tests.utils import TFCTestCase
-
-
-class TestRxWindow(TFCTestCase):
-
- def setUp(self):
- self.contact_list = ContactList(nicks=['Alice', 'Bob', 'Charlie', LOCAL_ID])
- self.group_list = GroupList(groups=['test_group', 'test_group2'])
- self.settings = Settings()
- self.packet_list = PacketList()
- self.ts = datetime.fromtimestamp(1502750000)
- self.time = self.ts.strftime('%H:%M')
-
- group = self.group_list.get_group('test_group')
- group.members = list(map(self.contact_list.get_contact, ['Alice', 'Bob', 'Charlie']))
-
- def create_window(self, uid):
- return RxWindow(uid, self.contact_list, self.group_list, self.settings, self.packet_list)
-
- def test_command_window_creation(self):
- window = self.create_window(LOCAL_ID)
- self.assertEqual(window.type, WIN_TYPE_COMMAND)
- self.assertEqual(window.window_contacts[0].rx_account, LOCAL_ID)
- self.assertEqual(window.type_print, 'system messages')
- self.assertEqual(window.name, 'system messages')
-
- def test_file_window_creation(self):
- window = self.create_window(WIN_TYPE_FILE)
- self.assertEqual(window.type, WIN_TYPE_FILE)
-
- def test_contact_window_creation(self):
- window = self.create_window('alice@jabber.org')
- self.assertEqual(window.type, WIN_TYPE_CONTACT)
- self.assertEqual(window.window_contacts[0].rx_account, 'alice@jabber.org')
- self.assertEqual(window.type_print, 'contact')
- self.assertEqual(window.name, 'Alice')
-
- def test_group_window_creation(self):
- window = self.create_window('test_group')
- self.assertEqual(window.type, WIN_TYPE_GROUP)
- self.assertEqual(window.window_contacts[0].rx_account, 'alice@jabber.org')
- self.assertEqual(window.type_print, 'group')
- self.assertEqual(window.name, 'test_group')
-
- def test_invalid_uid_raises_fr(self):
- self.assertFR("Invalid window 'bad_uid'", self.create_window, 'bad_uid')
-
- def test_len_returns_number_of_messages_in_window(self):
- # Setup
- window = self.create_window('alice@jabber.org')
- window.message_log = 5*[(datetime.now(), "Lorem ipsum", 'alice@jabber.org', ORIGIN_CONTACT_HEADER, False)]
-
- # Test
- self.assertEqual(len(window), 5)
-
- def test_window_iterates_over_message_tuples(self):
- # Setup
- window = self.create_window('alice@jabber.org')
- window.message_log = 5*[(datetime.now(), 'Lorem ipsum', 'alice@jabber.org', ORIGIN_CONTACT_HEADER, False)]
-
- # Test
- for mt in window:
- self.assertEqual(mt[1:], ("Lorem ipsum", 'alice@jabber.org', ORIGIN_CONTACT_HEADER, False))
-
- def test_remove_contacts(self):
- # Setup
- window = self.create_window('test_group')
-
- # Test
- self.assertEqual(len(window.window_contacts), 3)
- self.assertIsNone(window.remove_contacts(['alice@jabber.org', 'bob@jabber.org', 'doesnotexist@jabber.org']))
- self.assertEqual(len(window.window_contacts), 1)
-
- def test_add_contacts(self):
- # Setup
- window = self.create_window('test_group')
- window.window_contacts = [self.contact_list.get_contact('Alice')]
-
- # Test
- self.assertIsNone(window.add_contacts(['alice@jabber.org', 'bob@jabber.org', 'doesnotexist@jabber.org']))
- self.assertEqual(len(window.window_contacts), 2)
-
- def test_reset_window(self):
- # Setup
- window = self.create_window('test_group')
- window.message_log = [(datetime.now(), "Hi everybody", 'alice@jabber.org', ORIGIN_USER_HEADER, False),
- (datetime.now(), "Hi David", 'alice@jabber.org', ORIGIN_CONTACT_HEADER, False),
- (datetime.now(), "Hi David", 'bob@jabber.org', ORIGIN_CONTACT_HEADER, False)]
-
- # Test
- self.assertIsNone(window.reset_window())
- self.assertEqual(len(window), 0)
-
- def test_has_contact(self):
- window = self.create_window('test_group')
- self.assertTrue(window.has_contact('alice@jabber.org'))
- self.assertFalse(window.has_contact('doesnotexist@jabber.org'))
-
- def test_create_handle_dict(self):
- # Setup
- window = self.create_window('test_group')
- message_log = [(datetime.now(), "Lorem ipsum", 'alice@jabber.org', ORIGIN_CONTACT_HEADER, False),
- (datetime.now(), "Lorem ipsum", 'bob@jabber.org', ORIGIN_USER_HEADER , False),
- (datetime.now(), "Lorem ipsum", 'charlie@jabber.org', ORIGIN_CONTACT_HEADER, False),
- (datetime.now(), "Lorem ipsum", 'charlie@jabber.org', ORIGIN_CONTACT_HEADER, True),
- (datetime.now(), "Lorem ipsum", 'charlie@jabber.org', ORIGIN_CONTACT_HEADER, False),
- (datetime.now(), "Lorem ipsum", 'david@jabber.org', ORIGIN_CONTACT_HEADER, False),
- (datetime.now(), "Lorem ipsum", 'eric@jabber.org', ORIGIN_CONTACT_HEADER, False)]
-
- # Test
- self.assertIsNone(window.create_handle_dict(message_log))
- self.assertEqual(window.handle_dict, {'alice@jabber.org': 'Alice',
- 'bob@jabber.org': 'Bob',
- 'charlie@jabber.org': 'Charlie',
- 'david@jabber.org': 'david@jabber.org',
- 'eric@jabber.org': 'eric@jabber.org'})
-
- def test_get_command_handle(self):
- # Setup
- window = self.create_window(LOCAL_ID)
- window.is_active = True
- window.handle_dict = {LOCAL_ID: LOCAL_ID}
-
- # Test
- self.assertEqual(window.get_handle(self.ts, LOCAL_ID, ORIGIN_USER_HEADER, False), f"{self.time} -!- ")
-
- def test_get_contact_handle(self):
- # Setup
- window = self.create_window('alice@jabber.org')
- window.is_active = True
- window.handle_dict = {'alice@jabber.org': 'Alice'}
-
- # Test
- self.assertEqual(window.get_handle(self.ts, 'alice@jabber.org', ORIGIN_USER_HEADER, False), f"{self.time} Me: ")
- self.assertEqual(window.get_handle(self.ts, 'alice@jabber.org', ORIGIN_CONTACT_HEADER, False), f"{self.time} Alice: ")
-
- window.is_active = False
- self.assertEqual(window.get_handle(self.ts, 'alice@jabber.org', ORIGIN_USER_HEADER, False), f"{self.time} Me (private message): ")
- self.assertEqual(window.get_handle(self.ts, 'alice@jabber.org', ORIGIN_CONTACT_HEADER, False), f"{self.time} Alice (private message): ")
-
- def test_get_group_contact_handle(self):
- # Setup
- window = self.create_window('test_group')
- window.is_active = True
- window.handle_dict = {'alice@jabber.org': 'Alice',
- 'charlie@jabber.org': 'Charlie',
- 'david@jabber.org': 'david@jabber.org',
- 'eric@jabber.org': 'eric@jabber.org'}
-
- # Test
- self.assertEqual(window.get_handle(self.ts, 'alice@jabber.org', ORIGIN_USER_HEADER, False), f"{self.time} Me: ")
- self.assertEqual(window.get_handle(self.ts, 'charlie@jabber.org', ORIGIN_CONTACT_HEADER, False), f"{self.time} Charlie: ")
-
- window.is_active = False
- self.assertEqual(window.get_handle(self.ts, 'alice@jabber.org', ORIGIN_USER_HEADER, False), f"{self.time} Me (group test_group): ")
- self.assertEqual(window.get_handle(self.ts, 'charlie@jabber.org', ORIGIN_CONTACT_HEADER, False), f"{self.time} Charlie (group test_group): ")
-
- def test_print_to_inactive_window_preview_on_short_message(self):
- # Setup
- window = self.create_window('alice@jabber.org')
- window.handle_dict = {'alice@jabber.org': 'Alice'}
- window.is_active = False
- window.settings = Settings(new_message_notify_preview=True)
- msg_tuple = (self.ts, "Hi Bob", 'bob@jabber.org', ORIGIN_USER_HEADER, False)
-
- # Test
- self.assertPrints(f"{BOLD_ON}{self.time} Me (private message): {NORMAL_TEXT}Hi Bob\n{CURSOR_UP_ONE_LINE}{CLEAR_ENTIRE_LINE}",
- window.print, msg_tuple)
-
- def test_print_to_inactive_window_preview_on_long_message(self):
- # Setup
- window = self.create_window('alice@jabber.org')
- window.is_active = False
- window.handle_dict = {'alice@jabber.org': 'Alice'}
- window.settings = Settings(new_message_notify_preview=True)
- long_message = ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Quisque consequat libero et lao"
- "reet egestas. Aliquam a arcu malesuada, elementum metus eget, elementum mi. Vestibulum i"
- "d arcu sem. Ut sodales odio sed viverra mollis. Praesent gravida ante tellus, pellentesq"
- "ue venenatis massa placerat quis. Nullam in magna porta, hendrerit sem vel, dictum ipsum"
- ". Ut sagittis, ipsum ut bibendum ornare, ex lorem congue metus, vel posuere metus nulla "
- "at augue.")
- msg_tuple = (self.ts, long_message, 'bob@jabber.org', ORIGIN_USER_HEADER, False)
-
- # Test
- self.assertPrints(f"{BOLD_ON}{self.time} Me (private message): {NORMAL_TEXT}Lorem ipsum dolor sit "
- f"amet, consectetur adipisc...\n{CURSOR_UP_ONE_LINE}{CLEAR_ENTIRE_LINE}",
- window.print, msg_tuple)
-
- def test_print_to_inactive_window_preview_off(self):
- # Setup
- window = self.create_window('alice@jabber.org')
- window.is_active = False
- window.handle_dict = {'alice@jabber.org': 'Alice'}
- window.settings = Settings(new_message_notify_preview=False)
- msg_tuple = (self.ts, "Hi Bob", 'bob@jabber.org', ORIGIN_USER_HEADER, False)
-
- # Test
- self.assertPrints(f"{BOLD_ON}{self.time} Me (private message): {NORMAL_TEXT}{BOLD_ON}1 unread message{NORMAL_TEXT}\n"
- f"{CURSOR_UP_ONE_LINE}{CLEAR_ENTIRE_LINE}", window.print, msg_tuple)
-
- def test_print_to_active_window_no_date_change(self):
- # Setup
- window = self.create_window('alice@jabber.org')
- window.previous_msg_ts = datetime.fromtimestamp(1502750000)
- window.is_active = True
- window.handle_dict = {'bob@jabber.org': 'Bob'}
- window.settings = Settings(new_message_notify_preview=False)
- msg_tuple = (self.ts, "Hi Alice", 'bob@jabber.org', ORIGIN_CONTACT_HEADER, False)
-
- # Test
- self.assertPrints(f"{BOLD_ON}{self.time} Bob: {NORMAL_TEXT}Hi Alice\n",
- window.print, msg_tuple)
-
- def test_print_to_active_window_with_date_change_and_whisper(self):
- # Setup
- window = self.create_window('alice@jabber.org')
- window.previous_msg_ts = datetime.fromtimestamp(1501750000)
- window.is_active = True
- window.handle_dict = {'bob@jabber.org': 'Bob'}
- window.settings = Settings(new_message_notify_preview=False)
- msg_tuple = (self.ts, "Hi Alice", 'bob@jabber.org', ORIGIN_CONTACT_HEADER, True)
- self.time = self.ts.strftime('%H:%M')
-
- # Test
- self.assertPrints(f"""\
-{BOLD_ON}00:00 -!- Day changed to 2017-08-15{NORMAL_TEXT}
-{BOLD_ON}{self.time} Bob (whisper): {NORMAL_TEXT}Hi Alice
-""", window.print, msg_tuple)
-
- def test_print_to_active_window_with_date_change_and_whisper_empty_message(self):
- # Setup
- window = self.create_window('alice@jabber.org')
- window.previous_msg_ts = datetime.fromtimestamp(1501750000)
- window.is_active = True
- window.handle_dict = {'bob@jabber.org': 'Bob'}
- window.settings = Settings(new_message_notify_preview=False)
- msg_tuple = (self.ts, " ", 'bob@jabber.org', ORIGIN_CONTACT_HEADER, True)
-
- # Test
- self.assertPrints(f"""\
-{BOLD_ON}00:00 -!- Day changed to 2017-08-15{NORMAL_TEXT}
-{BOLD_ON}{self.time} Bob (whisper): {NORMAL_TEXT}
-""", window.print, msg_tuple)
-
- def test_print_new(self):
- # Setup
- window = self.create_window('alice@jabber.org')
-
- # Test
- self.assertIsNone(window.add_new(self.ts, "Hi Alice", 'bob@jabber.org', ORIGIN_CONTACT_HEADER, output=True))
- self.assertEqual(len(window.message_log), 1)
- self.assertEqual(window.handle_dict['bob@jabber.org'], 'Bob')
-
- def test_redraw_message_window(self):
- # Setup
- window = self.create_window('alice@jabber.org')
- window.is_active = True
- window.message_log = [(self.ts, "Hi Alice", 'bob@jabber.org', ORIGIN_CONTACT_HEADER, False)]
- window.unread_messages = 1
-
- # Test
- self.assertPrints(f"""\
-{CLEAR_ENTIRE_SCREEN}{CURSOR_LEFT_UP_CORNER}{BOLD_ON}{self.time} Bob: {NORMAL_TEXT}Hi Alice
-""", window.redraw)
- self.assertEqual(window.unread_messages, 0)
-
- def test_redraw_empty_window(self):
- # Setup
- window = self.create_window('alice@jabber.org')
- window.is_active = True
- window.message_log = []
-
- # Test
- self.assertPrints(f"""\
-{CLEAR_ENTIRE_SCREEN}{CURSOR_LEFT_UP_CORNER}
- This window for Alice is currently empty. \n
-""", window.redraw)
-
- def test_redraw_file_win(self):
- # Setup
- self.packet_list.packets = [Packet(type=FILE,
- name='testfile.txt',
- assembly_pt_list=5*[b'a'],
- packets=10,
- size="100.0KB",
- contact=create_contact('Bob')),
- Packet(type=FILE,
- name='testfile2.txt',
- assembly_pt_list=7 * [b'a'],
- packets=100,
- size="15.0KB",
- contact=create_contact('Charlie'))]
-
- # Test
- window = self.create_window(WIN_TYPE_FILE)
- self.assertPrints(f"""\
-
-File name Size Sender Complete
-────────────────────────────────────────────────────────────────────────────────
-testfile.txt 100.0KB Bob 50.00%
-testfile2.txt 15.0KB Charlie 7.00%
-
-{6*(CURSOR_UP_ONE_LINE+CLEAR_ENTIRE_LINE)}""",
- window.redraw_file_win)
-
- def test_redraw_empty_file_win(self):
- # Setup
- self.packet_list.packet_l = []
-
- # Test
- window = self.create_window(WIN_TYPE_FILE)
- self.assertPrints(f"""\
-
- No file transmissions currently in progress.
-
-{3*(CURSOR_UP_ONE_LINE+CLEAR_ENTIRE_LINE)}""", window.redraw_file_win)
-
-
-class TestWindowList(TFCTestCase):
-
- def setUp(self):
- self.settings = Settings()
- self.contact_list = ContactList(nicks=['Alice', 'Bob', 'Charlie', LOCAL_ID])
- self.group_list = GroupList(groups=['test_group', 'test_group2'])
- self.packet_list = PacketList()
-
- group = self.group_list.get_group('test_group')
- group.members = list(map(self.contact_list.get_contact, ['Alice', 'Bob', 'Charlie']))
-
- self.window_list = WindowList(self.settings, self.contact_list, self.group_list, self.packet_list)
-
- def create_window(self, uid):
- return RxWindow(uid, self.contact_list, self.group_list, self.settings, self.packet_list)
-
- def test_active_win_is_none_if_local_key_is_not_present(self):
- # Setup
- self.contact_list.contacts = []
-
- # Test
- window_list = WindowList(self.settings, self.contact_list, self.group_list, self.packet_list)
- self.assertEqual(window_list.active_win, None)
-
- def test_active_win_is_local_win_if_local_key_is_present(self):
- # Setup
- self.contact_list.contacts = [create_contact(LOCAL_ID)]
-
- # Test
- self.assertEqual(self.window_list.active_win.uid, LOCAL_ID)
-
- def test_len_returns_number_of_windows(self):
- self.assertEqual(len(self.window_list), 7)
-
- def test_window_list_iterates_over_windows(self):
- for w in self.window_list:
- self.assertIsInstance(w, RxWindow)
-
- def test_group_windows(self):
- # Setup
- self.window_list.windows = [self.create_window(g) for g in ['test_group', 'test_group2']]
-
- # Test
- for g in self.window_list.get_group_windows():
- self.assertEqual(g.type, WIN_TYPE_GROUP)
-
- def test_has_window(self):
- # Setup
- self.window_list.windows = [self.create_window(g) for g in ['test_group', 'test_group2']]
-
- # Test
- self.assertTrue(self.window_list.has_window('test_group'))
- self.assertTrue(self.window_list.has_window('test_group2'))
- self.assertFalse(self.window_list.has_window('test_group3'))
-
- def test_remove_window(self):
- # Setup
- self.window_list.windows = [self.create_window(g) for g in ['test_group', 'test_group2']]
-
- # Test
- self.assertEqual(len(self.window_list), 2)
- self.assertIsNone(self.window_list.remove_window('test_group3'))
- self.assertEqual(len(self.window_list), 2)
- self.assertIsNone(self.window_list.remove_window('test_group2'))
- self.assertEqual(len(self.window_list), 1)
-
- def test_select_rx_window(self):
- # Setup
- self.window_list.windows = [self.create_window(g) for g in ['test_group', 'test_group2']]
- tg_win = self.window_list.windows[0]
- tg2_win = self.window_list.windows[1]
- tg_win.is_active = True
- self.window_list.active_win = tg_win
-
- # Test
- self.assertPrints(f"""{CLEAR_ENTIRE_SCREEN}{CURSOR_LEFT_UP_CORNER}
- This window for test_group2 is currently empty.
-
-""", self.window_list.select_rx_window, 'test_group2')
- self.assertFalse(tg_win.is_active)
- self.assertTrue(tg2_win.is_active)
-
- def test_select_rx_file_window(self):
- # Setup
- self.window_list.windows = [self.create_window(g) for g in ['test_group', 'test_group2', WIN_TYPE_FILE]]
- tg_win = self.window_list.windows[0]
- tg_win.is_active = True
- self.window_list.active_win = tg_win
- self.packet_list.packets = [Packet(type=FILE,
- name='testfile.txt',
- assembly_pt_list=5 * [b'a'],
- packets=10,
- size="100.0KB",
- contact=create_contact('Bob'))]
-
- # Test
- self.assertPrints(f"""\
-
-File name Size Sender Complete
-────────────────────────────────────────────────────────────────────────────────
-testfile.txt 100.0KB Bob 50.00%
-
-{5*(CURSOR_UP_ONE_LINE+CLEAR_ENTIRE_LINE)}""", self.window_list.select_rx_window, WIN_TYPE_FILE)
-
- self.assertFalse(tg_win.is_active)
- self.assertTrue(self.window_list.get_window(WIN_TYPE_FILE).is_active)
-
- def test_get_local_window(self):
- # Setup
- self.window_list.windows = [self.create_window(g) for g in ['test_group', 'test_group2', WIN_TYPE_FILE, LOCAL_ID]]
-
- # Test
- self.assertEqual(self.window_list.get_local_window().uid, LOCAL_ID)
-
- def test_get_non_existing_window(self):
- # Setup
- self.window_list.windows = [self.create_window(g) for g in ['test_group', WIN_TYPE_FILE, LOCAL_ID]]
-
- # Test existing window
- self.assertTrue(self.window_list.has_window('test_group'))
- window = self.window_list.get_window('test_group')
- self.assertEqual(window.uid, 'test_group')
-
- # Test non-existing window
- self.assertFalse(self.window_list.has_window('test_group2'))
- window2 = self.window_list.get_window('test_group2')
- self.assertEqual(window2.uid, 'test_group2')
- self.assertTrue(self.window_list.has_window('test_group2'))
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/tx/__init__.py b/tests/transmitter/__init__.py
similarity index 100%
rename from tests/tx/__init__.py
rename to tests/transmitter/__init__.py
diff --git a/tests/transmitter/test_commands.py b/tests/transmitter/test_commands.py
new file mode 100644
index 0000000..2101a4d
--- /dev/null
+++ b/tests/transmitter/test_commands.py
@@ -0,0 +1,950 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import os
+import unittest
+
+from unittest import mock
+
+from src.common.db_logs import write_log_entry
+from src.common.statics import *
+
+from src.transmitter.commands import change_master_key, change_setting, clear_screens, exit_tfc, log_command
+from src.transmitter.commands import print_about, print_help, print_recipients, print_settings, process_command
+from src.transmitter.commands import remove_log, rxp_display_unread, rxp_show_sys_win, send_onion_service_key, verify
+from src.transmitter.commands import whisper, whois, wipe
+from src.transmitter.packet import split_to_assembly_packets
+
+from tests.mock_classes import ContactList, create_contact, Gateway, GroupList, MasterKey, OnionService, Settings
+from tests.mock_classes import TxWindow, UserInput
+from tests.utils import assembly_packet_creator, cd_unittest, cleanup, group_name_to_group_id
+from tests.utils import gen_queue_dict, nick_to_onion_address, nick_to_pub_key, tear_queues, TFCTestCase
+
+
+class TestProcessCommand(TFCTestCase):
+
+ def setUp(self):
+ self.window = TxWindow()
+ self.contact_list = ContactList()
+ self.group_list = GroupList()
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.master_key = MasterKey()
+ self.onion_service = OnionService()
+ self.gateway = Gateway()
+ self.args = (self.window, self.contact_list, self.group_list, self.settings,
+ self.queues, self.master_key, self.onion_service, self.gateway)
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def test_valid_command(self):
+ self.assertIsNone(process_command(UserInput('about'), *self.args))
+
+ def test_invalid_command(self):
+ self.assert_fr("Error: Invalid command 'abou'.", process_command, UserInput('abou'), *self.args)
+
+ def test_empty_command(self):
+ self.assert_fr("Error: Invalid command.", process_command, UserInput(' '), *self.args)
+
+
+class TestPrintAbout(TFCTestCase):
+
+ def test_print_about(self):
+ self.assert_prints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + f"""\
+
+ Tinfoil Chat {VERSION}
+
+ Website: https://github.com/maqp/tfc/
+ Wikipage: https://github.com/maqp/tfc/wiki
+
+""", print_about)
+
+
+class TestClearScreens(unittest.TestCase):
+
+ def setUp(self):
+ self.window = TxWindow(uid=nick_to_pub_key('Alice'))
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.args = self.window, self.settings, self.queues
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ @mock.patch('os.system', return_value=None)
+ def test_clear_screens(self, _):
+ self.assertIsNone(clear_screens(UserInput('clear'), *self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+ self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1)
+
+ @mock.patch('os.system', return_value=None)
+ def test_no_relay_clear_cmd_when_traffic_masking_is_enabled(self, _):
+ # Setup
+ self.settings.traffic_masking = True
+
+ # Test
+ self.assertIsNone(clear_screens(UserInput('clear'), *self.args))
+ self.assertEqual(self.queues[TM_COMMAND_PACKET_QUEUE].qsize(), 1)
+ self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 0)
+
+ @mock.patch('os.system', return_value=None)
+ def test_reset_screens(self, mock_os_system):
+ self.assertIsNone(clear_screens(UserInput('reset'), *self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+ self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1)
+ mock_os_system.assert_called_with(RESET)
+
+ @mock.patch('os.system', return_value=None)
+ def test_no_relay_reset_cmd_when_traffic_masking_is_enabled(self, mock_os_system):
+ # Setup
+ self.settings.traffic_masking = True
+
+ # Test
+ self.assertIsNone(clear_screens(UserInput('reset'), *self.args))
+ self.assertEqual(self.queues[TM_COMMAND_PACKET_QUEUE].qsize(), 1)
+ self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 0)
+ mock_os_system.assert_called_with(RESET)
+
+
+class TestRXPShowSysWin(unittest.TestCase):
+
+ def setUp(self):
+ self.window = TxWindow(name='Alice',
+ uid=nick_to_pub_key('Alice'))
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.args = self.window, self.settings, self.queues
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ @mock.patch('builtins.input', side_effect=['', EOFError, KeyboardInterrupt])
+ def test_cmd_window(self, _):
+ self.assertIsNone(rxp_show_sys_win(UserInput(plaintext='cmd'), *self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 2)
+ self.assertIsNone(rxp_show_sys_win(UserInput(plaintext='cmd'), *self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 4)
+ self.assertIsNone(rxp_show_sys_win(UserInput(plaintext='cmd'), *self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 6)
+
+ @mock.patch('builtins.input', side_effect=['', EOFError, KeyboardInterrupt])
+ def test_file_window(self, _):
+ self.assertIsNone(rxp_show_sys_win(UserInput(plaintext='fw'), *self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 2)
+ self.assertIsNone(rxp_show_sys_win(UserInput(plaintext='fw'), *self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 4)
+ self.assertIsNone(rxp_show_sys_win(UserInput(plaintext='fw'), *self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 6)
+
+
+class TestExitTFC(unittest.TestCase):
+
+ def setUp(self):
+ self.settings = Settings(local_testing_mode=True)
+ self.queues = gen_queue_dict()
+ self.gateway = Gateway(data_diode_sockets=True)
+ self.args = self.settings, self.queues, self.gateway
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_exit_tfc_local_test(self, _):
+ # Setup
+ for _ in range(2):
+ self.queues[COMMAND_PACKET_QUEUE].put("dummy command")
+
+ # Test
+ self.assertIsNone(exit_tfc(*self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+ self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_exit_tfc(self, _):
+ # Setup
+ self.settings.local_testing_mode = False
+ for _ in range(2):
+ self.queues[COMMAND_PACKET_QUEUE].put("dummy command")
+
+ # Test
+ self.assertIsNone(exit_tfc(*self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+ self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1)
+
+
+class TestLogCommand(TFCTestCase):
+
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+ self.window = TxWindow(name='Alice',
+ uid=nick_to_pub_key('Alice'))
+ self.contact_list = ContactList()
+ self.group_list = GroupList()
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.master_key = MasterKey()
+ self.args = self.window, self.contact_list, self.group_list, self.settings, self.queues, self.master_key
+
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+ tear_queues(self.queues)
+
+ def test_invalid_export(self):
+ self.assert_fr("Error: Invalid number of messages.",
+ log_command, UserInput("history a"), *self.args)
+
+ def test_log_printing(self):
+ self.assert_fr(f"No log database available.",
+ log_command, UserInput("history 4"), *self.args)
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+
+ def test_log_printing_all(self):
+ self.assert_fr(f"No log database available.",
+ log_command, UserInput("history"), *self.args)
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+
+ def test_invalid_number_raises_fr(self):
+ self.assert_fr("Error: Invalid number of messages.",
+ log_command, UserInput('history a'), *self.args)
+
+ def test_too_high_number_raises_fr(self):
+ self.assert_fr("Error: Invalid number of messages.",
+ log_command, UserInput('history 94857634985763454345'), *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', return_value='No')
+ def test_user_abort_raises_fr(self, *_):
+ self.assert_fr("Log file export aborted.",
+ log_command, UserInput('export'), *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_successful_export_command(self, *_):
+ # Setup
+ self.window.type = 'contact'
+ self.window.uid = nick_to_pub_key('Alice')
+ whisper_header = b'\x00'
+ packet = split_to_assembly_packets(whisper_header + PRIVATE_MESSAGE_HEADER + b'test', MESSAGE)[0]
+ write_log_entry(packet, nick_to_pub_key('Alice'), self.settings, self.master_key)
+
+ # Test
+ for command in ['export', 'export 1']:
+ self.assert_fr(f"Exported log file of contact 'Alice'.",
+ log_command, UserInput(command), self.window, ContactList(nicks=['Alice']),
+ self.group_list, self.settings, self.queues, self.master_key)
+
+
+class TestSendOnionServiceKey(TFCTestCase):
+
+ confirmation_code = b'a'
+
+ def setUp(self):
+ self.contact_list = ContactList()
+ self.settings = Settings()
+ self.onion_service = OnionService()
+ self.gateway = Gateway()
+ self.args = self.contact_list, self.settings, self.onion_service, self.gateway
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('os.urandom', return_value=confirmation_code)
+ @mock.patch('builtins.input', side_effect=['Yes', confirmation_code.hex()])
+ def test_onion_service_key_delivery_traffic_masking(self, *_):
+ self.assertIsNone(send_onion_service_key(*self.args))
+ self.assertEqual(len(self.gateway.packets), 1)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('os.urandom', return_value=confirmation_code)
+ @mock.patch('builtins.input', side_effect=[KeyboardInterrupt, 'No'])
+ def test_onion_service_key_delivery_traffic_masking_abort(self, *_):
+ # Setup
+ self.settings.traffic_masking = True
+
+ # Test
+ for _ in range(2):
+ self.assert_fr("Onion Service data export canceled.", send_onion_service_key, *self.args)
+
+ @mock.patch('os.urandom', return_value=confirmation_code)
+ @mock.patch('builtins.input', return_value=confirmation_code.hex())
+ def test_onion_service_key_delivery(self, *_):
+ self.assertIsNone(send_onion_service_key(*self.args))
+ self.assertEqual(len(self.gateway.packets), 1)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('os.urandom', return_value=confirmation_code)
+ @mock.patch('builtins.input', side_effect=[EOFError, KeyboardInterrupt])
+ def test_onion_service_key_delivery_cancel(self, *_):
+ for _ in range(2):
+ self.assert_fr("Onion Service data export canceled.", send_onion_service_key, *self.args)
+
+
+class TestPrintHelp(TFCTestCase):
+
+ def setUp(self):
+ self.settings = Settings()
+ self.settings.traffic_masking = False
+
+ @mock.patch('shutil.get_terminal_size', return_value=[60, 60])
+ def test_print_normal(self, _):
+ self.assert_prints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + """\
+List of commands:
+
+/about Show links to project resources
+/add Add new contact
+/cm Cancel message transmission to
+ active contact/group
+
+/clear, ' ' Clear TFC screens
+/cmd, '//' Display command window on Receiver
+/connect Resend Onion Service data to Relay
+/exit Exit TFC on all three computers
+/export (n) Export (n) messages from
+ recipient's log file
+
+/file Send file to active contact/group
+/help Display this list of commands
+/history (n) Print (n) messages from
+ recipient's log file
+
+/localkey Generate new local key pair
+/logging {on,off}(' all') Change message log setting (for
+ all contacts)
+
+/msg {A,N,G} Change recipient to Account, Nick,
+ or Group
+
+/names List contacts and groups
+/nick N Change nickname of active
+ recipient/group to N
+
+/notify {on,off} (' all') Change notification settings (for
+ all contacts)
+
+/passwd {tx,rx} Change master password on target
+ system
+
+/psk Open PSK import dialog on Receiver
+/reset Reset ephemeral session log for
+ active window
+
+/rm {A,N} Remove contact specified by
+ account A or nick N
+
+/rmlogs {A,N} Remove log entries for account A
+ or nick N
+
+/set S V Change setting S to value V
+/settings List setting names, values and
+ descriptions
+
+/store {on,off} (' all') Change file reception (for all
+ contacts)
+
+/unread, ' ' List windows with unread messages
+ on Receiver
+
+/verify Verify fingerprints with active
+ contact
+
+/whisper M Send message M, asking it not to
+ be logged
+
+/whois {A,N} Check which A corresponds to N or
+ vice versa
+
+/wipe Wipe all TFC user data and power
+ off systems
+
+Shift + PgUp/PgDn Scroll terminal up/down
+────────────────────────────────────────────────────────────
+Group management:
+
+/group create G A₁..Aₙ Create group G and add accounts
+ A₁..Aₙ
+
+/group join ID G A₁..Aₙ Join group ID, call it G and add
+ accounts A₁..Aₙ
+
+/group add G A₁..Aₙ Add accounts A₁..Aₙ to group G
+/group rm G A₁..Aₙ Remove accounts A₁..Aₙ from group G
+/group rm G Remove group G
+────────────────────────────────────────────────────────────
+
+""", print_help, self.settings)
+
+ @mock.patch('shutil.get_terminal_size', return_value=[80, 80])
+ def test_print_during_traffic_masking(self, _):
+ self.settings.traffic_masking = True
+ self.assert_prints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + """\
+List of commands:
+
+/about Show links to project resources
+/cf Cancel file transmission to active contact/group
+/cm Cancel message transmission to active contact/group
+/clear, ' ' Clear TFC screens
+/cmd, '//' Display command window on Receiver
+/connect Resend Onion Service data to Relay
+/exit Exit TFC on all three computers
+/export (n) Export (n) messages from recipient's log file
+/file Send file to active contact/group
+/fw Display file reception window on Receiver
+/help Display this list of commands
+/history (n) Print (n) messages from recipient's log file
+/logging {on,off}(' all') Change message log setting (for all contacts)
+/names List contacts and groups
+/nick N Change nickname of active recipient/group to N
+/notify {on,off} (' all') Change notification settings (for all contacts)
+/reset Reset ephemeral session log for active window
+/rmlogs {A,N} Remove log entries for account A or nick N
+/set S V Change setting S to value V
+/settings List setting names, values and descriptions
+/store {on,off} (' all') Change file reception (for all contacts)
+/unread, ' ' List windows with unread messages on Receiver
+/verify Verify fingerprints with active contact
+/whisper M Send message M, asking it not to be logged
+/whois {A,N} Check which A corresponds to N or vice versa
+/wipe Wipe all TFC user data and power off systems
+Shift + PgUp/PgDn Scroll terminal up/down
+────────────────────────────────────────────────────────────────────────────────
+
+""", print_help, self.settings)
+
+
+class TestPrintRecipients(TFCTestCase):
+
+ def setUp(self):
+ self.contact_list = ContactList(nicks=['Alice', 'Bob'])
+ self.group_list = GroupList(groups=['test_group', 'test_group_2'])
+ self.args = self.contact_list, self.group_list
+
+ def test_printing(self):
+ self.assertIsNone(print_recipients(*self.args))
+
+
+class TestChangeMasterKey(TFCTestCase):
+
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+ self.contact_list = ContactList()
+ self.group_list = GroupList()
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.master_key = MasterKey()
+ self.onion_service = OnionService(master_key=self.master_key,
+ file_name=f'{DIR_USER_DATA}/unittest')
+ self.args = (self.contact_list, self.group_list, self.settings,
+ self.queues, self.master_key, self.onion_service)
+
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+ tear_queues(self.queues)
+
+ def test_raises_fr_during_traffic_masking(self):
+ # Setup
+ self.settings.traffic_masking = True
+
+ # Test
+ self.assert_fr("Error: Command is disabled during traffic masking.",
+ change_master_key, UserInput(), *self.args)
+
+ def test_missing_target_sys_raises_fr(self):
+ self.assert_fr("Error: No target-system ('tx' or 'rx') specified.",
+ change_master_key, UserInput("passwd "), *self.args)
+
+ def test_invalid_target_sys_raises_fr(self):
+ self.assert_fr("Error: Invalid target system 't'.",
+ change_master_key, UserInput("passwd t"), *self.args)
+
+ @mock.patch('getpass.getpass', return_value='a')
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('src.common.db_masterkey.ARGON2_MIN_MEMORY', 1000)
+ @mock.patch('src.common.db_masterkey.MIN_KEY_DERIVATION_TIME', 0.01)
+ def test_transmitter_command(self, *_):
+ # Setup
+ write_log_entry(M_S_HEADER + PADDING_LENGTH * b'a', nick_to_pub_key('Alice'), self.settings, self.master_key)
+
+ # Test
+ self.assertIsNone(change_master_key(UserInput("passwd tx"), *self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 0)
+ self.assertEqual(self.queues[KEY_MANAGEMENT_QUEUE].qsize(), 1)
+
+ def test_receiver_command(self):
+ self.assertIsNone(change_master_key(UserInput("passwd rx"), *self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+ self.assertEqual(self.queues[KEY_MANAGEMENT_QUEUE].qsize(), 0)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('getpass.getpass', side_effect=KeyboardInterrupt)
+ def test_keyboard_interrupt_raises_fr(self, *_):
+ self.assert_fr("Password change aborted.",
+ change_master_key, UserInput("passwd tx"), *self.args)
+
+
+class TestRemoveLog(TFCTestCase):
+
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+ self.contact_list = ContactList(nicks=['Alice'])
+ self.group_list = GroupList(groups=['test_group'])
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.master_key = MasterKey()
+ self.file_name = f'{DIR_USER_DATA}{self.settings.software_operation}_logs'
+ self.args = self.contact_list, self.group_list, self.settings, self.queues, self.master_key
+
+ def tearDown(self):
+ tear_queues(self.queues)
+ cleanup(self.unittest_dir)
+
+ def test_missing_contact_raises_fr(self):
+ self.assert_fr("Error: No contact/group specified.",
+ remove_log, UserInput(''), *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', return_value='No')
+ def test_no_aborts_removal(self, *_):
+ # Setup
+ write_log_entry(M_S_HEADER + PADDING_LENGTH * b'a', nick_to_pub_key('Alice'), self.settings, self.master_key)
+ self.assertEqual(os.path.getsize(self.file_name), LOG_ENTRY_LENGTH)
+
+ # Test
+ self.assert_fr("Log file removal aborted.",
+ remove_log, UserInput('/rmlogs Alice'), *self.args)
+
+ @mock.patch('shutil.get_terminal_size', return_value=[150, 150])
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_removal_with_invalid_account_raises_fr(self, *_):
+ self.assert_fr("Error: Invalid account.",
+ remove_log, UserInput(f'/rmlogs {nick_to_onion_address("Alice")[:-1] + "a"}'), *self.args)
+
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_invalid_group_id_raises_fr(self, _):
+ self.assert_fr("Error: Invalid group ID.",
+ remove_log, UserInput(f'/rmlogs {group_name_to_group_id("test_group")[:-1] + b"a"}'), *self.args)
+
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_log_remove_with_nick(self, _):
+ # Setup
+ write_log_entry(M_S_HEADER + PADDING_LENGTH * b'a', nick_to_pub_key("Alice"), self.settings, self.master_key)
+ self.assertEqual(os.path.getsize(self.file_name), LOG_ENTRY_LENGTH)
+
+ # Test
+ self.assert_fr("Removed log entries for contact 'Alice'.",
+ remove_log, UserInput('/rmlogs Alice'), *self.args)
+ self.assertEqual(os.path.getsize(self.file_name), 0)
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+
+ @mock.patch('shutil.get_terminal_size', return_value=[150, 150])
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_log_remove_with_onion_address(self, *_):
+ # Setup
+ write_log_entry(M_S_HEADER + PADDING_LENGTH * b'a', nick_to_pub_key("Alice"), self.settings, self.master_key)
+ self.assertEqual(os.path.getsize(self.file_name), LOG_ENTRY_LENGTH)
+
+ # Test
+ self.assert_fr("Removed log entries for contact 'Alice'.",
+ remove_log, UserInput(f'/rmlogs {nick_to_onion_address("Alice")}'), *self.args)
+ self.assertEqual(os.path.getsize(self.file_name), 0)
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+
+ @mock.patch('shutil.get_terminal_size', return_value=[150, 150])
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_log_remove_with_unknown_onion_address(self, *_):
+ # Setup
+ write_log_entry(M_S_HEADER + PADDING_LENGTH * b'a', nick_to_pub_key("Alice"), self.settings, self.master_key)
+ self.assertEqual(os.path.getsize(self.file_name), LOG_ENTRY_LENGTH)
+
+ # Test
+ self.assert_fr("Found no log entries for contact 'w5sm3'.",
+ remove_log, UserInput(f'/rmlogs {nick_to_onion_address("Unknown")}'), *self.args)
+ self.assertEqual(os.path.getsize(self.file_name), LOG_ENTRY_LENGTH)
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_log_remove_with_group_name(self, _):
+ # Setup
+ for p in assembly_packet_creator(MESSAGE, 'This is a short group message',
+ group_id=group_name_to_group_id('test_group')):
+ write_log_entry(p, nick_to_pub_key('Alice'), self.settings, self.master_key)
+ self.assertEqual(os.path.getsize(self.file_name), LOG_ENTRY_LENGTH)
+
+ # Test
+ self.assert_fr("Removed log entries for group 'test_group'.",
+ remove_log, UserInput(f'/rmlogs test_group'), *self.args)
+ self.assertEqual(os.path.getsize(self.file_name), 0)
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_unknown_selector_raises_fr(self, _):
+ # Setup
+ write_log_entry(M_S_HEADER + PADDING_LENGTH * b'a', nick_to_pub_key("Alice"), self.settings, self.master_key)
+ self.assertEqual(os.path.getsize(self.file_name), LOG_ENTRY_LENGTH)
+
+ # Test
+ self.assert_fr("Error: Unknown selector.",
+ remove_log, UserInput(f'/rmlogs unknown'), *self.args)
+
+
+class TestChangeSetting(TFCTestCase):
+
+ def setUp(self):
+ self.window = TxWindow()
+ self.contact_list = ContactList()
+ self.group_list = GroupList()
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.gateway = Gateway()
+ self.args = self.window, self.contact_list, self.group_list, self.settings, self.queues, self.gateway
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def test_missing_setting_raises_fr(self):
+ self.assert_fr("Error: No setting specified.",
+ change_setting, UserInput('set'), *self.args)
+
+ def test_invalid_setting_raises_fr(self):
+ self.assert_fr("Error: Invalid setting 'e_correction_ratia'.",
+ change_setting, UserInput("set e_correction_ratia true"), *self.args)
+
+ def test_missing_value_raises_fr(self):
+ self.assert_fr("Error: No value for setting specified.",
+ change_setting, UserInput("set serial_error_correction"), *self.args)
+
+ def test_serial_settings_raise_fr(self):
+ self.assert_fr("Error: Serial interface setting can only be changed manually.",
+ change_setting, UserInput("set use_serial_usb_adapter True"), *self.args)
+
+ self.assert_fr("Error: Serial interface setting can only be changed manually.",
+ change_setting, UserInput("set built_in_serial_interface Truej"), *self.args)
+
+ def test_relay_commands_raise_fr_when_traffic_masking_is_enabled(self):
+ # Setup
+ self.settings.traffic_masking = True
+
+ # Test
+ key_list = ['serial_error_correction', 'serial_baudrate', 'allow_contact_requests']
+ for key, value in zip(key_list, ['5', '5', 'True']):
+ self.assert_fr("Error: Can't change this setting during traffic masking.",
+ change_setting, UserInput(f"set {key} {value}"), *self.args)
+
+ def test_individual_settings(self):
+
+ self.assertIsNone(change_setting(UserInput("set serial_error_correction 5"), *self.args))
+ self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1)
+
+ self.assertIsNone(change_setting(UserInput("set serial_baudrate 9600"), *self.args))
+ self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 2)
+
+ self.assertIsNone(change_setting(UserInput("set allow_contact_requests True"), *self.args))
+ self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 3)
+
+ self.assertIsNone(change_setting(UserInput("set traffic_masking True"), *self.args))
+ self.assertIsInstance(self.queues[SENDER_MODE_QUEUE].get(), Settings)
+ self.assertTrue(self.queues[TRAFFIC_MASKING_QUEUE].get())
+
+ self.settings.traffic_masking = False
+ self.assertIsNone(change_setting(UserInput("set max_number_of_group_members 100"), *self.args))
+ self.assertTrue(self.group_list.store_groups_called)
+ self.group_list.store_groups_called = False
+
+ self.assertIsNone(change_setting(UserInput("set max_number_of_groups 100"), *self.args))
+ self.assertTrue(self.group_list.store_groups_called)
+ self.group_list.store_groups_called = False
+
+ self.assertIsNone(change_setting(UserInput("set max_number_of_contacts 100"), *self.args))
+ self.assertEqual(self.queues[KEY_MANAGEMENT_QUEUE].qsize(), 1)
+
+ self.assertIsNone(change_setting(UserInput("set log_file_masking True"), *self.args))
+ self.assertTrue(self.queues[LOGFILE_MASKING_QUEUE].get())
+
+
+class TestPrintSettings(TFCTestCase):
+
+ def test_print_settings(self):
+ self.assert_prints(f"""\
+{CLEAR_ENTIRE_SCREEN}{CURSOR_LEFT_UP_CORNER}
+Setting name Current value Default value Description
+────────────────────────────────────────────────────────────────────────────────
+disable_gui_dialog False False True replaces
+ GUI dialogs with
+ CLI prompts
+
+max_number_of_group_members 50 50 Maximum number
+ of members in a
+ group
+
+max_number_of_groups 50 50 Maximum number
+ of groups
+
+max_number_of_contacts 50 50 Maximum number
+ of contacts
+
+log_messages_by_default False False Default logging
+ setting for new
+ contacts/groups
+
+accept_files_by_default False False Default file
+ reception
+ setting for new
+ contacts
+
+show_notifications_by_default True True Default message
+ notification
+ setting for new
+ contacts/groups
+
+log_file_masking False False True hides real
+ size of log file
+ during traffic
+ masking
+
+nc_bypass_messages False False False removes
+ Networked
+ Computer bypass
+ interrupt
+ messages
+
+confirm_sent_files True True False sends
+ files without
+ asking for
+ confirmation
+
+double_space_exits False False True exits,
+ False clears
+ screen with
+ double space
+ command
+
+traffic_masking False False True enables
+ traffic masking
+ to hide metadata
+
+tm_static_delay 2.0 2.0 The static delay
+ between traffic
+ masking packets
+
+tm_random_delay 2.0 2.0 Max random delay
+ for traffic
+ masking timing
+ obfuscation
+
+allow_contact_requests True True When False, does
+ not show TFC
+ contact requests
+
+new_message_notify_preview False False When True, shows
+ a preview of the
+ received message
+
+new_message_notify_duration 1.0 1.0 Number of
+ seconds new
+ message
+ notification
+ appears
+
+max_decompress_size 100000000 100000000 Max size
+ Receiver accepts
+ when
+ decompressing
+ file
+
+
+Serial interface setting Current value Default value Description
+────────────────────────────────────────────────────────────────────────────────
+serial_baudrate 19200 19200 The speed of
+ serial interface
+ in bauds per
+ second
+
+serial_error_correction 5 5 Number of byte
+ errors serial
+ datagrams can
+ recover from
+
+
+""", print_settings, Settings(), Gateway())
+
+
+class TestRxPDisplayUnread(unittest.TestCase):
+
+ def setUp(self):
+ self.queues = gen_queue_dict()
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def test_command(self):
+ self.assertIsNone(rxp_display_unread(Settings(), self.queues))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+
+
+class TestVerify(TFCTestCase):
+
+ def setUp(self):
+ self.window = TxWindow(uid=nick_to_pub_key("Alice"),
+ name='Alice',
+ window_contacts=[create_contact('test_group')],
+ log_messages=True,
+ type=WIN_TYPE_CONTACT)
+ self.contact_list = ContactList(nicks=['Alice'])
+ self.contact = self.contact_list.get_contact_by_address_or_nick('Alice')
+ self.window.contact = self.contact
+ self.args = self.window, self.contact_list
+
+ def test_active_group_raises_fr(self):
+ self.window.type = WIN_TYPE_GROUP
+ self.assert_fr("Error: A group is selected.", verify, *self.args)
+
+ def test_psk_raises_fr(self):
+ self.contact.kex_status = KEX_STATUS_NO_RX_PSK
+ self.assert_fr("Pre-shared keys have no fingerprints.", verify, *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=['No', 'Yes'])
+ def test_fingerprint_check(self, *_):
+ self.contact.kex_status = KEX_STATUS_VERIFIED
+
+ self.assertIsNone(verify(*self.args))
+ self.assertEqual(self.contact.kex_status, KEX_STATUS_UNVERIFIED)
+
+ self.assertIsNone(verify(*self.args))
+ self.assertEqual(self.contact.kex_status, KEX_STATUS_VERIFIED)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=KeyboardInterrupt)
+ def test_keyboard_interrupt_raises_fr(self, *_):
+ self.contact.kex_status = KEX_STATUS_VERIFIED
+ self.assert_fr("Fingerprint verification aborted.", verify, *self.args)
+ self.assertEqual(self.contact.kex_status, KEX_STATUS_VERIFIED)
+
+
+class TestWhisper(TFCTestCase):
+
+ def setUp(self):
+ self.window = TxWindow(uid=nick_to_pub_key("Alice"),
+ name='Alice',
+ window_contacts=[create_contact('Alice')],
+ log_messages=True)
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.args = self.window, self.settings, self.queues
+
+ def test_empty_input_raises_fr(self):
+ self.assert_fr("Error: No whisper message specified.",
+ whisper, UserInput("whisper"), *self.args)
+
+ def test_whisper(self):
+ self.assertIsNone(whisper(UserInput("whisper This message ought not to be logged."), *self.args))
+
+ message, pub_key, logging, log_as_ph, win_uid = self.queues[MESSAGE_PACKET_QUEUE].get()
+ self.assertEqual(pub_key, nick_to_pub_key("Alice"))
+ self.assertTrue(logging)
+ self.assertTrue(log_as_ph)
+
+
+class TestWhois(TFCTestCase):
+
+ def setUp(self):
+ self.contact_list = ContactList(nicks=['Alice'])
+ self.group_list = GroupList(groups=['test_group'])
+ self.args = self.contact_list, self.group_list
+
+ def test_missing_selector_raises_fr(self):
+ self.assert_fr("Error: No account or nick specified.", whois, UserInput("whois"), *self.args)
+
+ def test_unknown_account_raises_fr(self):
+ self.assert_fr("Error: Unknown selector.", whois, UserInput("whois alice"), *self.args)
+
+ def test_nick_from_account(self):
+ self.assert_prints(
+ f"""\
+{BOLD_ON} Nick of 'hpcrayuxhrcy2wtpfwgwjibderrvjll6azfr4tqat3eka2m2gbb55bid' is {NORMAL_TEXT}
+{BOLD_ON} Alice {NORMAL_TEXT}\n""",
+ whois, UserInput("whois hpcrayuxhrcy2wtpfwgwjibderrvjll6azfr4tqat3eka2m2gbb55bid"), *self.args)
+
+ def test_account_from_nick(self):
+ self.assert_prints(
+ f"""\
+{BOLD_ON} Account of 'Alice' is {NORMAL_TEXT}
+{BOLD_ON} hpcrayuxhrcy2wtpfwgwjibderrvjll6azfr4tqat3eka2m2gbb55bid {NORMAL_TEXT}\n""",
+ whois, UserInput("whois Alice"), *self.args)
+
+ def test_group_id_from_group_name(self):
+ self.assert_prints(
+ f"""\
+{BOLD_ON} Group ID of group 'test_group' is {NORMAL_TEXT}
+{BOLD_ON} 2dbCCptB9UGo9 {NORMAL_TEXT}\n""",
+ whois, UserInput(f"whois test_group"), *self.args)
+
+ def test_group_name_from_group_id(self):
+ self.assert_prints(
+ f"""\
+{BOLD_ON} Name of group with ID '2dbCCptB9UGo9' is {NORMAL_TEXT}
+{BOLD_ON} test_group {NORMAL_TEXT}\n""",
+ whois, UserInput("whois 2dbCCptB9UGo9"), *self.args)
+
+
+class TestWipe(TFCTestCase):
+
+ def setUp(self):
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.gateway = Gateway()
+ self.args = self.settings, self.queues, self.gateway
+
+ @mock.patch('builtins.input', return_value='No')
+ def test_no_raises_fr(self, _):
+ self.assert_fr("Wipe command aborted.", wipe, *self.args)
+
+ @mock.patch('os.system', return_value=None)
+ @mock.patch('builtins.input', return_value='Yes')
+ @mock.patch('time.sleep', return_value=None)
+ def test_wipe_local_testing(self, *_):
+ # Setup
+ self.settings.local_testing_mode = True
+ self.gateway.settings.data_diode_sockets = True
+ for _ in range(2):
+ self.queues[COMMAND_PACKET_QUEUE].put("dummy command")
+ self.queues[RELAY_PACKET_QUEUE].put("dummy packet")
+
+ # Test
+ self.assertIsNone(wipe(*self.args))
+ wipe_packet = UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_WIPE_COMMAND
+ self.assertTrue(self.queues[RELAY_PACKET_QUEUE].get().startswith(wipe_packet))
+
+ @mock.patch('os.system', return_value=None)
+ @mock.patch('builtins.input', return_value='Yes')
+ @mock.patch('time.sleep', return_value=None)
+ def test_wipe(self, *_):
+ # Setup
+ for _ in range(2):
+ self.queues[COMMAND_PACKET_QUEUE].put("dummy command")
+ self.queues[RELAY_PACKET_QUEUE].put("dummy packet")
+
+ # Test
+ self.assertIsNone(wipe(*self.args))
+ wipe_packet = UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_WIPE_COMMAND
+ self.assertTrue(self.queues[RELAY_PACKET_QUEUE].get().startswith(wipe_packet))
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/transmitter/test_commands_g.py b/tests/transmitter/test_commands_g.py
new file mode 100644
index 0000000..ebb064b
--- /dev/null
+++ b/tests/transmitter/test_commands_g.py
@@ -0,0 +1,322 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import unittest
+
+from unittest import mock
+
+from src.common.encoding import b58encode
+from src.common.statics import *
+
+from src.transmitter.commands_g import group_add_member, group_create, group_rm_group, group_rm_member
+from src.transmitter.commands_g import process_group_command, rename_group
+
+from tests.mock_classes import create_group, Contact, ContactList, GroupList, MasterKey, Settings, UserInput, TxWindow
+from tests.utils import cd_unittest, cleanup, gen_queue_dict, nick_to_pub_key, tear_queues, TFCTestCase
+
+
+class TestProcessGroupCommand(TFCTestCase):
+
+ def setUp(self):
+ self.contact_list = ContactList(nicks=['Alice'])
+ self.group_list = GroupList()
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.master_key = MasterKey()
+ self.args = self.contact_list, self.group_list, self.settings, self.queues, self.settings
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def test_raises_fr_when_traffic_masking_is_enabled(self):
+ # Setup
+ self.settings.traffic_masking = True
+
+ # Test
+ self.assert_fr("Error: Command is disabled during traffic masking.",
+ process_group_command, UserInput(), *self.args)
+
+ def test_invalid_command_raises_fr(self):
+ self.assert_fr("Error: Invalid group command.", process_group_command, UserInput('group '), *self.args)
+
+ def test_invalid_command_parameters_raises_fr(self):
+ self.assert_fr("Error: Invalid group command.", process_group_command, UserInput('group bad'), *self.args)
+
+ def test_missing_group_id_raises_fr(self):
+ self.assert_fr("Error: No group ID specified.", process_group_command, UserInput('group join '), *self.args)
+
+ def test_invalid_group_id_raises_fr(self):
+ self.assert_fr("Error: Invalid group ID.", process_group_command, UserInput('group join invalid'), *self.args)
+
+ def test_missing_name_raises_fr(self):
+ self.assert_fr("Error: No group name specified.", process_group_command, UserInput('group create '), *self.args)
+
+ @mock.patch('builtins.input', return_value='Yes')
+ @mock.patch('os.urandom', return_value=GROUP_ID_LENGTH*b'a')
+ def test_successful_command(self, *_):
+ self.assertIsNone(process_group_command(UserInput('group create team Alice'), *self.args))
+ user_input = UserInput(f"group join {b58encode(GROUP_ID_LENGTH*b'a')} team2")
+ self.assert_fr("Error: Group with matching ID already exists.", process_group_command, user_input, *self.args)
+
+
+class TestGroupCreate(TFCTestCase):
+
+ def setUp(self):
+ self.contact_list = ContactList(nicks=['Alice', 'Bob'])
+ self.group_list = GroupList()
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.master_key = MasterKey()
+ self.account_list = None
+ self.args = self.contact_list, self.group_list, self.settings, self.queues, self.settings
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def configure_groups(self, no_contacts: int) -> None:
+ """Configure group list."""
+ self.contact_list = ContactList(nicks=[str(n) for n in range(no_contacts)])
+ self.group_list = GroupList(groups=['test_group'])
+ self.group = self.group_list.get_group('test_group')
+ self.group.members = self.contact_list.contacts
+ self.account_list = [nick_to_pub_key(str(n)) for n in range(no_contacts)]
+
+ def test_invalid_group_name_raises_fr(self):
+ # Setup
+ self.configure_groups(no_contacts=21)
+
+ # Test
+ self.assert_fr("Error: Group name must be printable.",
+ group_create, 'test_group\x1f', self.account_list, *self.args)
+
+ def test_too_many_purp_accounts_raises_fr(self):
+ # Setup
+ self.configure_groups(no_contacts=60)
+
+ # Test
+ cl_str = [nick_to_pub_key(str(n)) for n in range(51)]
+ self.assert_fr("Error: TFC settings only allow 50 members per group.",
+ group_create, 'test_group_50', cl_str,
+ self.contact_list, self.group_list, self.settings, self.queues, self.master_key)
+
+ def test_full_group_list_raises_fr(self):
+ # Setup
+ self.group_list = GroupList(groups=[f"testgroup_{n}" for n in range(50)])
+
+ # Test
+ self.assert_fr("Error: TFC settings only allow 50 groups.",
+ group_create, 'testgroup_50', [nick_to_pub_key("Alice")],
+ self.contact_list, self.group_list, self.settings, self.queues, self.master_key)
+
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_successful_group_creation(self, _):
+ # Test
+ self.assertIsNone(group_create('test_group_2', [nick_to_pub_key("Alice")], *self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+ self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1)
+
+ def test_successful_empty_group_creation(self):
+ self.assertIsNone(group_create('test_group_2', [], *self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+ self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 0)
+
+
+class TestGroupAddMember(TFCTestCase):
+
+ def setUp(self):
+ self.user_input = UserInput()
+ self.contact_list = ContactList(nicks=['Alice', 'Bob'])
+ self.group_list = GroupList()
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.master_key = MasterKey()
+ self.args = self.contact_list, self.group_list, self.settings, self.queues, self.settings
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def configure_groups(self, no_contacts: int) -> None:
+ """Configure group database."""
+ self.contact_list = ContactList(nicks=[str(n) for n in range(no_contacts)])
+ self.group_list = GroupList(groups=['test_group'])
+ self.group = self.group_list.get_group('test_group')
+ self.group.members = self.contact_list.contacts
+ self.account_list = [nick_to_pub_key(str(n)) for n in range(no_contacts)]
+
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_new_group_is_created_if_specified_group_does_not_exist_and_user_chooses_yes(self, _):
+ self.assertIsNone(group_add_member('test_group', [], *self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+ self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 0)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', return_value='No')
+ def test_raises_fr_if_specified_group_does_not_exist_and_user_chooses_no(self, *_):
+ self.assert_fr("Group creation aborted.", group_add_member, 'test_group', [], *self.args)
+
+ def test_too_large_final_member_list_raises_fr(self):
+ # Setup
+ contact_list = ContactList(nicks=[str(n) for n in range(51)])
+ group_list = GroupList(groups=['testgroup'])
+ group = group_list.get_group('testgroup')
+ group.members = contact_list.contacts[:49]
+
+ # Test
+ m_to_add = [nick_to_pub_key("49"), nick_to_pub_key("50")]
+ self.assert_fr("Error: TFC settings only allow 50 members per group.", group_add_member,
+ 'testgroup', m_to_add, contact_list, group_list, self.settings, self.queues, self.master_key)
+
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_successful_group_add(self, _):
+ # Setup
+ self.configure_groups(no_contacts=51)
+ self.group.members = self.contact_list.contacts[:49]
+
+ # Test
+ self.assertIsNone(group_add_member('test_group', [nick_to_pub_key("49")], self.contact_list,
+ self.group_list, self.settings, self.queues, self.master_key))
+ group2 = self.group_list.get_group('test_group')
+ self.assertEqual(len(group2), 50)
+
+ for c in group2:
+ self.assertIsInstance(c, Contact)
+
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+ self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1)
+
+
+class TestGroupRmMember(TFCTestCase):
+
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+ self.user_input = UserInput()
+ self.contact_list = ContactList(nicks=['Alice', 'Bob'])
+ self.group_list = GroupList(groups=["test_group"])
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.master_key = MasterKey()
+ self.args = self.contact_list, self.group_list, self.settings, self.queues, self.settings
+
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+ tear_queues(self.queues)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_no_accounts_removes_group(self, *_):
+ self.assert_fr("Removed group 'test_group'.", group_rm_member, 'test_group', [], *self.args)
+
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_remove_members_from_unknown_group(self, _):
+ self.assert_fr("Group 'test_group_2' does not exist.",
+ group_rm_member, 'test_group_2', [nick_to_pub_key("Alice")], *self.args)
+
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_successful_group_remove(self, _):
+ self.assertIsNone(group_rm_member('test_group', [nick_to_pub_key("Alice")], *self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+ self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1)
+
+
+class TestGroupRemoveGroup(TFCTestCase):
+
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+ self.user_input = UserInput()
+ self.contact_list = ContactList(nicks=['Alice', 'Bob'])
+ self.group_list = GroupList(groups=['test_group'])
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.master_key = MasterKey()
+ self.args = self.contact_list, self.group_list, self.settings, self.queues, self.settings
+
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+ tear_queues(self.queues)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', return_value='No')
+ def test_cancel_of_remove_raises_fr(self, *_):
+ self.assert_fr("Group removal aborted.", group_rm_group, 'test_group', *self.args)
+
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_remove_group_not_on_transmitter_raises_fr(self, _):
+ unknown_group_id = b58encode(bytes(GROUP_ID_LENGTH))
+ self.assert_fr("Transmitter has no group '2dVseX46KS9Sp' to remove.",
+ group_rm_group, unknown_group_id, *self.args)
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 2)
+
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_invalid_group_id_raises_fr(self, _):
+ invalid_group_id = b58encode(bytes(GROUP_ID_LENGTH))[:-1]
+ self.assert_fr("Error: Invalid group name/ID.", group_rm_group, invalid_group_id, *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_remove_group_and_notify(self, *_):
+ self.assert_fr("Removed group 'test_group'.", group_rm_group, 'test_group', *self.args)
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 2)
+ self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1)
+
+
+class TestRenameGroup(TFCTestCase):
+
+ def setUp(self):
+ self.queues = gen_queue_dict()
+ self.settings = Settings()
+ self.contact_list = ContactList()
+ self.group_list = GroupList(groups=['test_group'])
+ self.window = TxWindow()
+ self.args = self.window, self.contact_list, self.group_list, self.settings, self.queues
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def test_contact_window_raises_fr(self):
+ # Setup
+ self.window.type = WIN_TYPE_CONTACT
+
+ # Test
+ self.assert_fr("Error: Selected window is not a group window.", rename_group, "window", *self.args)
+
+ def test_invalid_group_name_raises_fr(self):
+ # Setup
+ self.window.type = WIN_TYPE_GROUP
+ self.window.group = self.group_list.get_group('test_group')
+
+ # Test
+ self.assert_fr("Error: Group name must be printable.", rename_group, "window\x1f", *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_successful_group_change(self, _):
+ # Setup
+ group = create_group('test_group')
+ self.window.type = WIN_TYPE_GROUP
+ self.window.uid = group.group_id
+ self.window.group = group
+
+ # Test
+ self.assert_fr("Renamed group 'test_group' to 'window'.", rename_group, "window", *self.args)
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/transmitter/test_contact.py b/tests/transmitter/test_contact.py
new file mode 100644
index 0000000..49b7fa6
--- /dev/null
+++ b/tests/transmitter/test_contact.py
@@ -0,0 +1,628 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import os
+import unittest
+
+from unittest import mock
+
+from src.common.crypto import blake2b
+from src.common.statics import *
+
+from src.transmitter.contact import add_new_contact, change_nick, contact_setting, remove_contact
+
+from tests.mock_classes import ContactList, create_contact, create_group, Group, GroupList, MasterKey, OnionService
+from tests.mock_classes import Settings, TxWindow, UserInput
+from tests.utils import cd_unittest, cleanup, gen_queue_dict, group_name_to_group_id, ignored
+from tests.utils import nick_to_onion_address, nick_to_pub_key, tear_queues, TFCTestCase, VALID_ECDHE_PUB_KEY
+
+
+class TestAddNewContact(TFCTestCase):
+
+ def setUp(self):
+ self.contact_list = ContactList()
+ self.group_list = GroupList()
+ self.settings = Settings(disable_gui_dialog=True)
+ self.queues = gen_queue_dict()
+ self.onion_service = OnionService()
+ self.args = self.contact_list, self.group_list, self.settings, self.queues, self.onion_service
+
+ def tearDown(self):
+ with ignored(OSError):
+ os.remove(f'v4dkh.psk - Give to hpcra')
+ tear_queues(self.queues)
+
+ def test_adding_new_contact_during_traffic_masking_raises_fr(self):
+ # Setup
+ self.settings.traffic_masking = True
+
+ # Test
+ self.assert_fr("Error: Command is disabled during traffic masking.", add_new_contact, *self.args)
+
+ def test_contact_list_full_raises_fr(self):
+ # Setup
+ contact_list = ContactList(nicks=[str(n) for n in range(50)])
+ self.contact_list.contacts = contact_list.contacts
+
+ # Test
+ self.assert_fr("Error: TFC settings only allow 50 accounts.", add_new_contact, *self.args)
+
+ @mock.patch('builtins.input', side_effect=[nick_to_onion_address("Bob"), 'Bob', '', VALID_ECDHE_PUB_KEY, 'Yes',
+ blake2b(nick_to_pub_key('Bob'), digest_size=CONFIRM_CODE_LENGTH).hex()])
+ @mock.patch('shutil.get_terminal_size', return_value=[200, 200])
+ @mock.patch('time.sleep', return_value=None)
+ def test_default_nick_ecdhe(self, *_):
+ self.assertIsNone(add_new_contact(*self.args))
+ contact = self.contact_list.get_contact_by_address_or_nick("Bob")
+ self.assertEqual(contact.nick, 'Bob')
+ self.assertNotEqual(contact.tx_fingerprint, bytes(FINGERPRINT_LENGTH))
+
+ @mock.patch('builtins.input', side_effect=[nick_to_onion_address("Alice"), 'Alice_', 'psk', '.'])
+ @mock.patch('getpass.getpass', return_value='test_password')
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('src.transmitter.key_exchanges.ARGON2_MIN_MEMORY', 1000)
+ @mock.patch('src.transmitter.key_exchanges.MIN_KEY_DERIVATION_TIME', 0.01)
+ def test_standard_nick_psk_kex(self, *_):
+ self.onion_service.account = nick_to_onion_address('Bob').encode()
+ self.assertIsNone(add_new_contact(*self.args))
+ contact = self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice"))
+ self.assertEqual(contact.nick, 'Alice_')
+ self.assertEqual(contact.tx_fingerprint, bytes(FINGERPRINT_LENGTH))
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=KeyboardInterrupt)
+ def test_keyboard_interrupt_raises_fr(self, *_):
+ self.assert_fr('Contact creation aborted.', add_new_contact, *self.args)
+
+
+class TestRemoveContact(TFCTestCase):
+
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+ self.contact_list = ContactList(nicks=['Alice'])
+ self.group_list = GroupList(groups=['test_group'])
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.master_key = MasterKey()
+ self.pub_key = nick_to_pub_key('Alice')
+ self.args = self.contact_list, self.group_list, self.settings, self.queues, self.master_key
+
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+ tear_queues(self.queues)
+
+ def test_contact_removal_during_traffic_masking_raises_fr(self):
+ # Setup
+ self.settings.traffic_masking = True
+
+ # Test
+ self.assert_fr("Error: Command is disabled during traffic masking.",
+ remove_contact, UserInput(), None, *self.args)
+
+ def test_missing_account_raises_fr(self):
+ self.assert_fr("Error: No account specified.", remove_contact, UserInput('rm '), None, *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('shutil.get_terminal_size', return_value=[150, 150])
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_invalid_account_raises_fr(self, *_):
+ # Setup
+ user_input = UserInput(f'rm {nick_to_onion_address("Alice")[:-1]}')
+ window = TxWindow(window_contacts=[self.contact_list.get_contact_by_address_or_nick('Alice')],
+ type=WIN_TYPE_CONTACT,
+ uid=self.pub_key)
+
+ # Test
+ self.assert_fr("Error: Invalid selection.", remove_contact, user_input, window, *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('shutil.get_terminal_size', return_value=[150, 150])
+ @mock.patch('builtins.input', return_value='No')
+ def test_user_abort_raises_fr(self, *_):
+ # Setup
+ user_input = UserInput(f'rm {nick_to_onion_address("Alice")}')
+
+ # Test
+ self.assert_fr("Removal of contact aborted.", remove_contact, user_input, None, *self.args)
+
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_successful_removal_of_contact(self, _):
+ # Setup
+ window = TxWindow(window_contacts=[self.contact_list.get_contact_by_address_or_nick('Alice')],
+ type=WIN_TYPE_CONTACT,
+ uid=self.pub_key)
+
+ # Test
+ for g in self.group_list:
+ self.assertIsInstance(g, Group)
+ self.assertTrue(g.has_member(self.pub_key))
+
+ self.assertIsNone(remove_contact(UserInput('rm Alice'), window, *self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+
+ km_data = self.queues[KEY_MANAGEMENT_QUEUE].get()
+ self.assertEqual(km_data, (KDB_REMOVE_ENTRY_HEADER, self.pub_key))
+ self.assertFalse(self.contact_list.has_pub_key(self.pub_key))
+
+ for g in self.group_list:
+ self.assertIsInstance(g, Group)
+ self.assertFalse(g.has_member(self.pub_key))
+
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_successful_removal_of_last_member_of_active_group(self, _):
+ # Setup
+ user_input = UserInput('rm Alice')
+ window = TxWindow(window_contacts=[self.contact_list.get_contact_by_address_or_nick("Alice")],
+ type=WIN_TYPE_GROUP,
+ name='test_group')
+ group = self.group_list.get_group('test_group')
+ group.members = [self.contact_list.get_contact_by_address_or_nick("Alice")]
+ pub_key = nick_to_pub_key('Alice')
+
+ # Test
+ for g in self.group_list:
+ self.assertIsInstance(g, Group)
+ self.assertTrue(g.has_member(pub_key))
+ self.assertEqual(len(group), 1)
+
+ self.assertIsNone(remove_contact(user_input, window, *self.args))
+
+ for g in self.group_list:
+ self.assertIsInstance(g, Group)
+ self.assertFalse(g.has_member(pub_key))
+
+ self.assertFalse(self.contact_list.has_pub_key(pub_key))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+
+ km_data = self.queues[KEY_MANAGEMENT_QUEUE].get()
+ self.assertEqual(km_data, (KDB_REMOVE_ENTRY_HEADER, pub_key))
+
+ @mock.patch('shutil.get_terminal_size', return_value=[150, 150])
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_no_contact_found_on_transmitter(self, *_):
+ # Setup
+ user_input = UserInput(f'rm {nick_to_onion_address("Charlie")}')
+ contact_list = ContactList(nicks=['Bob'])
+ window = TxWindow(window_contact=[contact_list.get_contact_by_address_or_nick('Bob')],
+ type=WIN_TYPE_GROUP)
+
+ # Test
+ self.assertIsNone(remove_contact(user_input, window, *self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+ self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1)
+ command_packet = self.queues[COMMAND_PACKET_QUEUE].get()
+ self.assertIsInstance(command_packet, bytes)
+
+
+class TestChangeNick(TFCTestCase):
+
+ def setUp(self):
+ self.contact_list = ContactList(nicks=['Alice'])
+ self.group_list = GroupList()
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.args = self.contact_list, self.group_list, self.settings, self.queues
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def test_missing_nick_raises_fr(self):
+ self.assert_fr("Error: No nick specified.",
+ change_nick, UserInput("nick "), TxWindow(type=WIN_TYPE_CONTACT), *self.args)
+
+ def test_invalid_nick_raises_fr(self):
+ # Setup
+ window = TxWindow(type=WIN_TYPE_CONTACT,
+ contact=create_contact('Bob'))
+
+ # Test
+ self.assert_fr("Error: Nick must be printable.", change_nick, UserInput("nick Alice\x01"), window, *self.args)
+
+ def test_successful_nick_change(self):
+ # Setup
+ window = TxWindow(name='Alice',
+ type=WIN_TYPE_CONTACT,
+ contact=self.contact_list.get_contact_by_address_or_nick('Alice'))
+
+ # Test
+ self.assertIsNone(change_nick(UserInput("nick Alice_"), window, *self.args))
+ self.assertEqual(self.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice')).nick, 'Alice_')
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_successful_group_nick_change(self, _):
+ # Setup
+ group = create_group('test_group')
+ user_input = UserInput("nick group2")
+ window = TxWindow(name ='test_group',
+ type =WIN_TYPE_GROUP,
+ group=group,
+ uid =group.group_id)
+
+ # Test
+ self.assert_fr("Renamed group 'test_group' to 'group2'.", change_nick, user_input, window, *self.args)
+ self.assertEqual(window.group.name, 'group2')
+
+
+class TestContactSetting(TFCTestCase):
+
+ def setUp(self):
+ self.contact_list = ContactList(nicks=['Alice', 'Bob'])
+ self.group_list = GroupList(groups=['test_group'])
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.pub_key = nick_to_pub_key("Alice")
+ self.args = self.contact_list, self.group_list, self.settings, self.queues
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def test_invalid_command_raises_fr(self):
+ self.assert_fr("Error: Invalid command.", contact_setting, UserInput('loging on'), None, *self.args)
+
+ def test_missing_parameter_raises_fr(self):
+ self.assert_fr("Error: Invalid command.", contact_setting, UserInput(''), None, *self.args)
+
+ def test_invalid_extra_parameter_raises_fr(self):
+ self.assert_fr("Error: Invalid command.", contact_setting, UserInput('logging on al'), None, *self.args)
+
+ def test_enable_logging_for_user(self):
+ # Setup
+ contact = self.contact_list.get_contact_by_address_or_nick('Alice')
+ contact.log_messages = False
+ window = TxWindow(uid=self.pub_key,
+ type=WIN_TYPE_CONTACT,
+ contact=contact)
+
+ # Test
+ self.assertFalse(contact.log_messages)
+ self.assertIsNone(contact_setting(UserInput('logging on'), window, *self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+ self.assertEqual(self.queues[LOG_SETTING_QUEUE].qsize(), 0)
+ self.assertTrue(contact.log_messages)
+
+ def test_enable_logging_for_user_during_traffic_masking(self):
+ # Setup
+ contact = self.contact_list.get_contact_by_address_or_nick('Alice')
+ contact.log_messages = False
+ window = TxWindow(uid=self.pub_key,
+ type=WIN_TYPE_CONTACT,
+ contact=contact,
+ log_messages=False)
+ self.settings.traffic_masking = True
+
+ # Test
+ self.assertFalse(contact.log_messages)
+ self.assertFalse(window.log_messages)
+
+ self.assertIsNone(contact_setting(UserInput('logging on'), window, *self.args))
+
+ self.assertEqual(self.queues[TM_COMMAND_PACKET_QUEUE].qsize(), 1)
+ self.assertTrue(self.queues[LOG_SETTING_QUEUE].get())
+ self.assertTrue(window.log_messages)
+ self.assertTrue(contact.log_messages)
+
+ def test_enable_logging_for_group(self):
+ # Setup
+ group = self.group_list.get_group('test_group')
+ group.log_messages = False
+ window = TxWindow(uid=group_name_to_group_id('test_group'),
+ type=WIN_TYPE_GROUP,
+ group=group,
+ window_contacts=group.members)
+
+ # Test
+ self.assertFalse(group.log_messages)
+ self.assertIsNone(contact_setting(UserInput('logging on'), window, *self.args))
+ self.assertTrue(group.log_messages)
+
+ def test_enable_logging_for_all_users(self):
+ # Setup
+ contact = self.contact_list.get_contact_by_address_or_nick("Alice")
+ window = TxWindow(uid=self.pub_key,
+ type=WIN_TYPE_CONTACT,
+ contact=contact,
+ window_contacts=[contact])
+
+ for c in self.contact_list:
+ c.log_messages = False
+ for g in self.group_list:
+ g.log_messages = False
+
+ # Test
+ for c in self.contact_list:
+ self.assertFalse(c.log_messages)
+ for g in self.group_list:
+ self.assertFalse(g.log_messages)
+
+ self.assertIsNone(contact_setting(UserInput('logging on all'), window, *self.args))
+
+ for c in self.contact_list:
+ self.assertTrue(c.log_messages)
+ for g in self.group_list:
+ self.assertTrue(g.log_messages)
+
+ def test_disable_logging_for_user(self):
+ # Setup
+ contact = self.contact_list.get_contact_by_address_or_nick('Alice')
+ contact.log_messages = True
+ window = TxWindow(uid=self.pub_key,
+ type=WIN_TYPE_CONTACT,
+ contact=contact,
+ window_contacts=[contact])
+
+ # Test
+ self.assertTrue(contact.log_messages)
+ self.assertIsNone(contact_setting(UserInput('logging off'), window, *self.args))
+ self.assertFalse(contact.log_messages)
+
+ def test_disable_logging_for_group(self):
+ # Setup
+ group = self.group_list.get_group('test_group')
+ group.log_messages = True
+ window = TxWindow(uid=group_name_to_group_id('test_group'),
+ type=WIN_TYPE_GROUP,
+ group=group,
+ window_contacts=group.members)
+
+ # Test
+ self.assertTrue(group.log_messages)
+ self.assertIsNone(contact_setting(UserInput('logging off'), window, *self.args))
+ self.assertFalse(group.log_messages)
+
+ def test_disable_logging_for_all_users(self):
+ # Setup
+ contact = self.contact_list.get_contact_by_address_or_nick("Alice")
+ window = TxWindow(uid=self.pub_key,
+ type=WIN_TYPE_CONTACT,
+ contact=contact,
+ window_contacts=[contact])
+
+ for c in self.contact_list:
+ c.log_messages = True
+ for g in self.group_list:
+ g.log_messages = True
+
+ # Test
+ for c in self.contact_list:
+ self.assertTrue(c.log_messages)
+ for g in self.group_list:
+ self.assertTrue(g.log_messages)
+
+ self.assertIsNone(contact_setting(UserInput('logging off all'), window, *self.args))
+
+ for c in self.contact_list:
+ self.assertFalse(c.log_messages)
+ for g in self.group_list:
+ self.assertFalse(g.log_messages)
+
+ def test_enable_file_reception_for_user(self):
+ # Setup
+ contact = self.contact_list.get_contact_by_address_or_nick('Alice')
+ contact.file_reception = False
+ window = TxWindow(uid=self.pub_key,
+ type=WIN_TYPE_CONTACT,
+ contact=contact,
+ window_contacts=[contact])
+
+ # Test
+ self.assertFalse(contact.file_reception)
+ self.assertIsNone(contact_setting(UserInput('store on'), window, *self.args))
+ self.assertTrue(contact.file_reception)
+
+ def test_enable_file_reception_for_group(self):
+ # Setup
+ group = self.group_list.get_group('test_group')
+ window = TxWindow(uid=group_name_to_group_id('test_group'),
+ type=WIN_TYPE_GROUP,
+ group=group,
+ window_contacts=group.members)
+
+ for m in group:
+ m.file_reception = False
+
+ # Test
+ for m in group:
+ self.assertFalse(m.file_reception)
+ self.assertIsNone(contact_setting(UserInput('store on'), window, *self.args))
+ for m in group:
+ self.assertTrue(m.file_reception)
+
+ def test_enable_file_reception_for_all_users(self):
+ # Setup
+ contact = self.contact_list.get_contact_by_address_or_nick("Alice")
+ window = TxWindow(uid=self.pub_key,
+ type=WIN_TYPE_CONTACT,
+ contact=contact,
+ window_contacts=[contact])
+
+ for c in self.contact_list:
+ c.file_reception = False
+
+ # Test
+ for c in self.contact_list:
+ self.assertFalse(c.file_reception)
+
+ self.assertIsNone(contact_setting(UserInput('store on all'), window, *self.args))
+ for c in self.contact_list:
+ self.assertTrue(c.file_reception)
+
+ def test_disable_file_reception_for_user(self):
+ # Setup
+ contact = self.contact_list.get_contact_by_address_or_nick('Alice')
+ contact.file_reception = True
+ window = TxWindow(uid=self.pub_key,
+ type=WIN_TYPE_CONTACT,
+ contact=contact,
+ window_contacts=[contact])
+
+ # Test
+ self.assertTrue(contact.file_reception)
+ self.assertIsNone(contact_setting(UserInput('store off'), window, *self.args))
+ self.assertFalse(contact.file_reception)
+
+ def test_disable_file_reception_for_group(self):
+ # Setup
+ group = self.group_list.get_group('test_group')
+ window = TxWindow(uid=group_name_to_group_id('test_group'),
+ type=WIN_TYPE_GROUP,
+ group=group,
+ window_contacts=group.members)
+
+ for m in group:
+ m.file_reception = True
+
+ # Test
+ for m in group:
+ self.assertTrue(m.file_reception)
+
+ self.assertIsNone(contact_setting(UserInput('store off'), window, *self.args))
+ for m in group:
+ self.assertFalse(m.file_reception)
+
+ def test_disable_file_reception_for_all_users(self):
+ # Setup
+ contact = self.contact_list.get_contact_by_address_or_nick("Alice")
+ window = TxWindow(uid=self.pub_key,
+ type=WIN_TYPE_CONTACT,
+ contact=contact,
+ window_contacts=[contact])
+
+ for c in self.contact_list:
+ c.file_reception = True
+
+ # Test
+ for c in self.contact_list:
+ self.assertTrue(c.file_reception)
+ self.assertIsNone(contact_setting(UserInput('store off all'), window, *self.args))
+ for c in self.contact_list:
+ self.assertFalse(c.file_reception)
+
+ def test_enable_notifications_for_user(self):
+ # Setup
+ contact = self.contact_list.get_contact_by_address_or_nick('Alice')
+ contact.notifications = False
+ window = TxWindow(uid=self.pub_key,
+ type=WIN_TYPE_CONTACT,
+ contact=contact)
+
+ # Test
+ self.assertFalse(contact.notifications)
+ self.assertIsNone(contact_setting(UserInput('notify on'), window, *self.args))
+ self.assertTrue(contact.notifications)
+
+ def test_enable_notifications_for_group(self):
+ # Setup
+ user_input = UserInput('notify on')
+ group = self.group_list.get_group('test_group')
+ group.notifications = False
+ window = TxWindow(uid=group_name_to_group_id('test_group'),
+ type=WIN_TYPE_GROUP,
+ group=group,
+ window_contacts=group.members)
+
+ # Test
+ self.assertFalse(group.notifications)
+ self.assertIsNone(contact_setting(user_input, window, *self.args))
+ self.assertTrue(group.notifications)
+
+ def test_enable_notifications_for_all_users(self):
+ # Setup
+ contact = self.contact_list.get_contact_by_address_or_nick("Alice")
+ window = TxWindow(uid=self.pub_key,
+ type=WIN_TYPE_CONTACT,
+ contact=contact,
+ window_contacts=[contact])
+
+ for c in self.contact_list:
+ c.notifications = False
+ for g in self.group_list:
+ g.notifications = False
+
+ # Test
+ for c in self.contact_list:
+ self.assertFalse(c.notifications)
+ for g in self.group_list:
+ self.assertFalse(g.notifications)
+
+ self.assertIsNone(contact_setting(UserInput('notify on all'), window, *self.args))
+
+ for c in self.contact_list:
+ self.assertTrue(c.notifications)
+ for g in self.group_list:
+ self.assertTrue(g.notifications)
+
+ def test_disable_notifications_for_user(self):
+ # Setup
+ contact = self.contact_list.get_contact_by_address_or_nick('Alice')
+ contact.notifications = True
+ window = TxWindow(uid=self.pub_key,
+ type=WIN_TYPE_CONTACT,
+ contact=contact,
+ window_contacts=[contact])
+
+ # Test
+ self.assertTrue(contact.notifications)
+ self.assertIsNone(contact_setting(UserInput('notify off'), window, *self.args))
+ self.assertFalse(contact.notifications)
+
+ def test_disable_notifications_for_group(self):
+ # Setup
+ group = self.group_list.get_group('test_group')
+ group.notifications = True
+ window = TxWindow(uid=group_name_to_group_id('test_group'),
+ type=WIN_TYPE_GROUP,
+ group=group,
+ window_contacts=group.members)
+
+ # Test
+ self.assertTrue(group.notifications)
+ self.assertIsNone(contact_setting(UserInput('notify off'), window, *self.args))
+ self.assertFalse(group.notifications)
+
+ def test_disable_notifications_for_all_users(self):
+ # Setup
+ contact = self.contact_list.get_contact_by_address_or_nick("Alice")
+ window = TxWindow(uid=self.pub_key,
+ type=WIN_TYPE_CONTACT,
+ contact=contact,
+ window_contacts=[contact])
+
+ for c in self.contact_list:
+ c.notifications = True
+ for g in self.group_list:
+ g.notifications = True
+
+ # Test
+ for c in self.contact_list:
+ self.assertTrue(c.notifications)
+ for g in self.group_list:
+ self.assertTrue(g.notifications)
+
+ self.assertIsNone(contact_setting(UserInput('notify off all'), window, *self.args))
+
+ for c in self.contact_list:
+ self.assertFalse(c.notifications)
+ for g in self.group_list:
+ self.assertFalse(g.notifications)
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/tx/test_files.py b/tests/transmitter/test_files.py
similarity index 54%
rename from tests/tx/test_files.py
rename to tests/transmitter/test_files.py
index 14862fa..0d8e5c7 100644
--- a/tests/tx/test_files.py
+++ b/tests/transmitter/test_files.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,34 +16,31 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
import os
import unittest
-from src.tx.files import File
+from src.transmitter.files import File
-from tests.mock_classes import create_contact, Gateway, Settings, TxWindow
-from tests.utils import ignored, TFCTestCase
+from tests.mock_classes import create_contact, Settings, TxWindow
+from tests.utils import cd_unittest, cleanup, TFCTestCase
class TestFile(TFCTestCase):
def setUp(self):
- self.f_name = 250 * 'a' + '.txt'
- self.settings = Settings()
- self.window = TxWindow()
- self.gateway = Gateway(txm_inter_packet_delay=0.02)
+ self.unittest_dir = cd_unittest()
+ self.window = TxWindow()
+ self.settings = Settings()
+ self.args = self.window, self.settings
def tearDown(self):
- for f in [self.f_name, 'testfile.txt']:
- with ignored(OSError):
- os.remove(f)
+ cleanup(self.unittest_dir)
def test_missing_file_raises_fr(self):
- self.assertFR("Error: File not found.",
- File, './testfile.txt', self.window, self.settings, self.gateway)
+ self.assert_fr("Error: File not found.", File, './testfile.txt', *self.args)
def test_empty_file_raises_fr(self):
# Setup
@@ -50,17 +48,16 @@ class TestFile(TFCTestCase):
f.write(b'')
# Test
- self.assertFR("Error: Target file is empty.",
- File, './testfile.txt', self.window, self.settings, self.gateway)
+ self.assert_fr("Error: Target file is empty.", File, './testfile.txt', *self.args)
def test_oversize_filename_raises_fr(self):
# Setup
- with open(self.f_name, 'wb+') as f:
+ f_name = 250 * 'a' + '.txt'
+ with open(f_name, 'wb+') as f:
f.write(b'a')
# Test
- self.assertFR("Error: File name is too long.",
- File, f'./{self.f_name}', self.window, self.settings, self.gateway)
+ self.assert_fr("Error: File name is too long.", File, f'./{f_name}', *self.args)
def test_small_file(self):
# Setup
@@ -68,16 +65,15 @@ class TestFile(TFCTestCase):
with open('testfile.txt', 'wb+') as f:
f.write(input_data)
- self.settings.session_traffic_masking = True
+ self.settings.traffic_masking = True
self.settings.multi_packet_random_delay = True
# Test
- file = File('./testfile.txt', self.window, self.settings, self.gateway)
+ file = File('./testfile.txt', *self.args)
self.assertEqual(file.name, b'testfile.txt')
- self.assertEqual(file.size, b'\x00\x00\x00\x00\x00\x00\x00\x05')
- self.assertEqual(file.size_print, '5.0B')
- self.assertEqual(len(file.plaintext), 136)
+ self.assertEqual(file.size_hr, '5.0B')
+ self.assertEqual(len(file.plaintext), 114)
self.assertIsInstance(file.plaintext, bytes)
def test_large_file_and_local_testing(self):
@@ -91,14 +87,13 @@ class TestFile(TFCTestCase):
self.window.window_contacts = [create_contact(c) for c in ['Alice', 'Bob']]
# Test
- file = File('./testfile.txt', self.window, self.settings, self.gateway)
+ file = File('./testfile.txt', *self.args)
self.assertEqual(file.name, b'testfile.txt')
- self.assertEqual(file.size, b'\x00\x00\x00\x00\x00\x00\x07\xd0')
- self.assertEqual(len(file.plaintext), 2633)
- self.assertEqual(file.size_print, '2.0KB')
+ self.assertEqual(len(file.plaintext), 2112)
+ self.assertEqual(file.size_hr, '2.0KB')
self.assertIsInstance(file.plaintext, bytes)
- self.assertEqual(file.time_print, '0:00:56')
+ self.assertEqual(file.time_hr, '0:01:48')
if __name__ == '__main__':
diff --git a/tests/transmitter/test_input_loop.py b/tests/transmitter/test_input_loop.py
new file mode 100644
index 0000000..d371b22
--- /dev/null
+++ b/tests/transmitter/test_input_loop.py
@@ -0,0 +1,81 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import unittest
+
+from unittest import mock
+from unittest.mock import MagicMock
+
+from src.common.crypto import blake2b
+from src.common.statics import *
+
+from src.transmitter.input_loop import input_loop
+
+from tests.mock_classes import ContactList, Gateway, GroupList, MasterKey, OnionService, Settings
+from tests.utils import gen_queue_dict, nick_to_onion_address, nick_to_pub_key, tear_queues, VALID_ECDHE_PUB_KEY
+
+
+class TestInputLoop(unittest.TestCase):
+
+ conf_code = blake2b(nick_to_pub_key('Alice'), digest_size=CONFIRM_CODE_LENGTH).hex()
+ input_list = ['61', # Enter Relay confirmation code
+ '61', # Enter Receiver confirmation code
+ nick_to_onion_address("Alice"), # Enter rx-account for new contact
+ 'Alice', # Enter nick for contact
+ '', # Enter to default for ECDHE
+ VALID_ECDHE_PUB_KEY, # Enter public key for contact
+ 'Yes', # Accept key fingerprints for Alice
+ conf_code, # Confirmation code
+ 'Alice', # Select Alice as the recipient
+ 'Alice', # Select Alice as the recipient
+ 'Test', # Send test message
+ '/file', # Open file selection prompt
+ '', # Give empty string to abort
+ '/exit'] # Enter exit command
+
+ def setUp(self):
+ self.settings = Settings(disable_gui_dialog=True)
+ self.gateway = Gateway()
+ self.contact_list = ContactList()
+ self.group_list = GroupList()
+ self.master_key = MasterKey()
+ self.onion_service = OnionService()
+ self.queues = gen_queue_dict()
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ @mock.patch('builtins.input', side_effect=input_list)
+ @mock.patch('os.fdopen', MagicMock())
+ @mock.patch('os.getrandom', lambda n, flags: n*b'a')
+ @mock.patch('os.urandom', lambda n: n*b'a')
+ @mock.patch('shutil.get_terminal_size', return_value=[200, 200])
+ @mock.patch('src.transmitter.commands.exit_tfc', side_effect=SystemExit)
+ @mock.patch('sys.stdin', MagicMock())
+ @mock.patch('time.sleep', return_value=None)
+ def test_input_loop_functions(self, *_):
+ with self.assertRaises(SystemExit):
+ self.assertIsNone(input_loop(self.queues, self.settings, self.gateway, self.contact_list,
+ self.group_list, self.master_key, self.onion_service, stdin_fd=1))
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/transmitter/test_key_exchanges.py b/tests/transmitter/test_key_exchanges.py
new file mode 100644
index 0000000..8791776
--- /dev/null
+++ b/tests/transmitter/test_key_exchanges.py
@@ -0,0 +1,361 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import os
+import unittest
+
+from unittest import mock
+
+from src.common.crypto import blake2b
+from src.common.encoding import b58encode
+from src.common.statics import *
+
+from src.transmitter.key_exchanges import create_pre_shared_key, export_onion_service_data, new_local_key
+from src.transmitter.key_exchanges import rxp_load_psk, start_key_exchange, verify_fingerprints
+
+from tests.mock_classes import ContactList, create_contact, Gateway, OnionService, Settings, TxWindow
+from tests.utils import cd_unittest, cleanup, gen_queue_dict, ignored, nick_to_pub_key
+from tests.utils import nick_to_short_address, tear_queues, TFCTestCase, VALID_ECDHE_PUB_KEY
+
+
+class TestOnionService(TFCTestCase):
+
+ def setUp(self):
+ self.contact_list = ContactList()
+ self.settings = Settings()
+ self.onion_service = OnionService()
+ self.queues = gen_queue_dict()
+ self.gateway = Gateway()
+
+ @mock.patch('os.urandom', side_effect=[b'a'])
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=['invalid_cc', '', '61'])
+ def test_onion_service_delivery(self, *_):
+ self.assertIsNone(export_onion_service_data(self.contact_list, self.settings, self.onion_service, self.gateway))
+ self.assertEqual(len(self.gateway.packets), 2)
+
+
+class TestLocalKey(TFCTestCase):
+
+ def setUp(self):
+ self.contact_list = ContactList()
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.args = self.contact_list, self.settings, self.queues
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def test_new_local_key_when_traffic_masking_is_enabled_raises_fr(self):
+ self.settings.traffic_masking = True
+ self.contact_list.contacts = [create_contact(LOCAL_ID)]
+ self.assert_fr("Error: Command is disabled during traffic masking.", new_local_key, *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=['bad', '', '61'])
+ @mock.patch('os.getrandom', side_effect=[SYMMETRIC_KEY_LENGTH*b'a',
+ SYMMETRIC_KEY_LENGTH*b'a',
+ SYMMETRIC_KEY_LENGTH*b'a',
+ XCHACHA20_NONCE_LENGTH*b'a',
+ SYMMETRIC_KEY_LENGTH*b'a',
+ SYMMETRIC_KEY_LENGTH*b'a'])
+ @mock.patch('os.urandom', return_value=CONFIRM_CODE_LENGTH*b'a')
+ def test_new_local_key(self, *_):
+ # Setup
+ self.settings.nc_bypass_messages = False
+ self.settings.traffic_masking = False
+
+ # Test
+ self.assertIsNone(new_local_key(*self.args))
+ local_contact = self.contact_list.get_contact_by_pub_key(LOCAL_PUBKEY)
+
+ self.assertEqual(local_contact.onion_pub_key, LOCAL_PUBKEY)
+ self.assertEqual(local_contact.nick, LOCAL_NICK)
+ self.assertEqual(local_contact.tx_fingerprint, bytes(FINGERPRINT_LENGTH))
+ self.assertEqual(local_contact.rx_fingerprint, bytes(FINGERPRINT_LENGTH))
+ self.assertFalse(local_contact.log_messages)
+ self.assertFalse(local_contact.file_reception)
+ self.assertFalse(local_contact.notifications)
+
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+
+ cmd, account, tx_key, rx_key, tx_hek, rx_hek = self.queues[KEY_MANAGEMENT_QUEUE].get()
+
+ self.assertEqual(cmd, KDB_ADD_ENTRY_HEADER)
+ self.assertEqual(account, LOCAL_PUBKEY)
+ for key in [tx_key, rx_key, tx_hek, rx_hek]:
+ self.assertIsInstance(key, bytes)
+ self.assertEqual(len(key), SYMMETRIC_KEY_LENGTH)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=KeyboardInterrupt)
+ @mock.patch('os.getrandom', lambda x, flags: x * b'a')
+ def test_keyboard_interrupt_raises_fr(self, *_):
+ self.assert_fr("Local key setup aborted.", new_local_key, *self.args)
+
+
+class TestVerifyFingerprints(unittest.TestCase):
+
+ @mock.patch('builtins.input', return_value='Yes')
+ def test_correct_fingerprint(self, _):
+ self.assertTrue(verify_fingerprints(bytes(FINGERPRINT_LENGTH), bytes(FINGERPRINT_LENGTH)))
+
+ @mock.patch('builtins.input', return_value='No')
+ def test_incorrect_fingerprint(self, _):
+ self.assertFalse(verify_fingerprints(bytes(FINGERPRINT_LENGTH), bytes(FINGERPRINT_LENGTH)))
+
+
+class TestKeyExchange(TFCTestCase):
+
+ def setUp(self):
+ self.contact_list = ContactList()
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.args = self.contact_list, self.settings, self.queues
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ @mock.patch('shutil.get_terminal_size', return_value=[200, 200])
+ @mock.patch('builtins.input', return_value=b58encode(bytes(TFC_PUBLIC_KEY_LENGTH), public_key=True))
+ def test_zero_public_key_raises_fr(self, *_):
+ self.assert_fr("Error: Zero public key", start_key_exchange, nick_to_pub_key("Alice"), 'Alice', *self.args)
+
+ @mock.patch('shutil.get_terminal_size', return_value=[200, 200])
+ @mock.patch('builtins.input', return_value=b58encode((TFC_PUBLIC_KEY_LENGTH-1)*b'a', public_key=True))
+ def test_invalid_public_key_length_raises_fr(self, *_):
+ self.assert_fr("Error: Invalid public key length",
+ start_key_exchange, nick_to_pub_key("Alice"), 'Alice', *self.args)
+
+ @mock.patch('builtins.input', side_effect=['', # Empty message should resend key
+ VALID_ECDHE_PUB_KEY[:-1], # Short key should fail
+ VALID_ECDHE_PUB_KEY + 'a', # Long key should fail
+ VALID_ECDHE_PUB_KEY[:-1] + 'a', # Invalid key should fail
+ VALID_ECDHE_PUB_KEY, # Correct key
+ 'No']) # Fingerprint mismatch)
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('shutil.get_terminal_size', return_value=[200, 200])
+ def test_fingerprint_mismatch_raises_fr(self, *_):
+ self.assert_fr("Error: Fingerprint mismatch", start_key_exchange, nick_to_pub_key("Alice"), 'Alice', *self.args)
+
+ @mock.patch('builtins.input', side_effect=['', # Resend public key
+ VALID_ECDHE_PUB_KEY, # Correct key
+ 'Yes', # Fingerprint match
+ '', # Resend contact data
+ 'ff', # Invalid confirmation code
+ blake2b(nick_to_pub_key('Alice'), digest_size=CONFIRM_CODE_LENGTH).hex()
+ ])
+ @mock.patch('shutil.get_terminal_size', return_value=[200, 200])
+ @mock.patch('time.sleep', return_value=None)
+ def test_successful_exchange(self, *_):
+ self.assertIsNone(start_key_exchange(nick_to_pub_key("Alice"), 'Alice', *self.args))
+
+ contact = self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice"))
+ self.assertEqual(contact.onion_pub_key, nick_to_pub_key("Alice"))
+ self.assertEqual(contact.nick, 'Alice')
+ self.assertEqual(contact.kex_status, KEX_STATUS_VERIFIED)
+ self.assertIsInstance(contact.tx_fingerprint, bytes)
+ self.assertIsInstance(contact.rx_fingerprint, bytes)
+ self.assertEqual(len(contact.tx_fingerprint), FINGERPRINT_LENGTH)
+ self.assertEqual(len(contact.rx_fingerprint), FINGERPRINT_LENGTH)
+ self.assertFalse(contact.log_messages)
+ self.assertFalse(contact.file_reception)
+ self.assertTrue(contact.notifications)
+
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 2)
+ self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 2)
+
+ cmd, account, tx_key, rx_key, tx_hek, rx_hek = self.queues[KEY_MANAGEMENT_QUEUE].get()
+
+ self.assertEqual(cmd, KDB_ADD_ENTRY_HEADER)
+ self.assertEqual(account, nick_to_pub_key("Alice"))
+ self.assertEqual(len(tx_key), SYMMETRIC_KEY_LENGTH)
+
+ for key in [tx_key, rx_key, tx_hek, rx_hek]:
+ self.assertIsInstance(key, bytes)
+ self.assertEqual(len(key), SYMMETRIC_KEY_LENGTH)
+
+ @mock.patch('builtins.input', side_effect=['', # Resend public key
+ VALID_ECDHE_PUB_KEY, # Correct key
+ KeyboardInterrupt, # Skip fingerprint verification
+ '', # Manual proceed for warning message
+ blake2b(nick_to_pub_key('Alice'),
+ digest_size=CONFIRM_CODE_LENGTH).hex()])
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('shutil.get_terminal_size', return_value=[200, 200])
+ def test_successful_exchange_skip_fingerprint_verification(self, *_):
+ self.assertIsNone(start_key_exchange(nick_to_pub_key("Alice"), 'Alice', *self.args))
+
+ contact = self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice"))
+ self.assertEqual(contact.onion_pub_key, nick_to_pub_key("Alice"))
+ self.assertEqual(contact.nick, 'Alice')
+ self.assertEqual(contact.kex_status, KEX_STATUS_UNVERIFIED)
+
+ @mock.patch('os.getrandom', side_effect=[SYMMETRIC_KEY_LENGTH * b'a',
+ SYMMETRIC_KEY_LENGTH * b'a'])
+ @mock.patch('builtins.input', side_effect=[KeyboardInterrupt,
+ VALID_ECDHE_PUB_KEY,
+ 'Yes',
+ blake2b(nick_to_pub_key('Alice'),
+ digest_size=CONFIRM_CODE_LENGTH).hex()])
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('shutil.get_terminal_size', return_value=[200, 200])
+ def test_successful_exchange_with_previous_key(self, *_):
+ # Test caching of private key
+ self.assert_fr("Key exchange interrupted.", start_key_exchange, nick_to_pub_key('Alice'), 'Alice', *self.args)
+
+ alice = self.contact_list.get_contact_by_address_or_nick('Alice')
+ self.assertEqual(alice.kex_status, KEX_STATUS_PENDING)
+
+ # Test re-using private key
+ self.assertIsNone(start_key_exchange(nick_to_pub_key('Alice'), 'Alice', *self.args))
+ self.assertIsNone(alice.tfc_private_key)
+ self.assertEqual(alice.kex_status, KEX_STATUS_VERIFIED)
+
+
+class TestPSK(TFCTestCase):
+
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+ self.contact_list = ContactList()
+ self.settings = Settings(disable_gui_dialog=True)
+ self.queues = gen_queue_dict()
+ self.onion_service = OnionService()
+ self.args = self.contact_list, self.settings, self.onion_service, self.queues
+
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+
+ with ignored(OSError):
+ os.remove(f"{self.onion_service.user_short_address}.psk - Give to {nick_to_short_address('Alice')}")
+
+ tear_queues(self.queues)
+
+ @mock.patch('builtins.input', side_effect=['/root/', '.'])
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('getpass.getpass', return_value='test_password')
+ @mock.patch('src.transmitter.key_exchanges.ARGON2_MIN_MEMORY', 1000)
+ @mock.patch('src.transmitter.key_exchanges.MIN_KEY_DERIVATION_TIME', 0.01)
+ def test_psk_creation(self, *_):
+ self.assertIsNone(create_pre_shared_key(nick_to_pub_key("Alice"), 'Alice', *self.args))
+
+ contact = self.contact_list.get_contact_by_pub_key(nick_to_pub_key("Alice"))
+
+ self.assertEqual(contact.onion_pub_key, nick_to_pub_key("Alice"))
+ self.assertEqual(contact.nick, 'Alice')
+ self.assertEqual(contact.tx_fingerprint, bytes(FINGERPRINT_LENGTH))
+ self.assertEqual(contact.rx_fingerprint, bytes(FINGERPRINT_LENGTH))
+ self.assertEqual(contact.kex_status, KEX_STATUS_NO_RX_PSK)
+
+ self.assertFalse(contact.log_messages)
+ self.assertFalse(contact.file_reception)
+ self.assertTrue(contact.notifications)
+
+ cmd, account, tx_key, rx_key, tx_hek, rx_hek = self.queues[KEY_MANAGEMENT_QUEUE].get()
+
+ self.assertEqual(cmd, KDB_ADD_ENTRY_HEADER)
+ self.assertEqual(account, nick_to_pub_key("Alice"))
+
+ for key in [tx_key, rx_key, tx_hek, rx_hek]:
+ self.assertIsInstance(key, bytes)
+ self.assertEqual(len(key), SYMMETRIC_KEY_LENGTH)
+
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+ self.assertTrue(os.path.isfile(
+ f"{self.onion_service.user_short_address}.psk - Give to {nick_to_short_address('Alice')}"))
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('getpass.getpass', side_effect=KeyboardInterrupt)
+ def test_keyboard_interrupt_raises_fr(self, *_):
+ self.assert_fr("PSK generation aborted.", create_pre_shared_key, nick_to_pub_key("Alice"), 'Alice', *self.args)
+
+
+class TestReceiverLoadPSK(TFCTestCase):
+
+ def setUp(self):
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.args = self.settings, self.queues
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def test_raises_fr_when_traffic_masking_is_enabled(self):
+ # Setup
+ self.settings.traffic_masking = True
+
+ # Test
+ self.assert_fr("Error: Command is disabled during traffic masking.", rxp_load_psk, None, None, *self.args)
+
+ def test_active_group_raises_fr(self):
+ # Setup
+ window = TxWindow(type=WIN_TYPE_GROUP)
+
+ # Test
+ self.assert_fr("Error: Group is selected.", rxp_load_psk, window, None, *self.args)
+
+ def test_ecdhe_key_raises_fr(self):
+ # Setup
+ contact = create_contact('Alice')
+ contact_list = ContactList(contacts=[contact])
+ window = TxWindow(type=WIN_TYPE_CONTACT,
+ uid=nick_to_pub_key("Alice"),
+ contact=contact)
+
+ # Test
+ self.assert_fr(f"Error: The current key was exchanged with {ECDHE}.",
+ rxp_load_psk, window, contact_list, *self.args)
+
+ @mock.patch('src.transmitter.key_exchanges.ARGON2_MIN_MEMORY', 1000)
+ @mock.patch('src.transmitter.key_exchanges.MIN_KEY_DERIVATION_TIME', 0.01)
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=[b'0'.hex(), blake2b(nick_to_pub_key('Alice'),
+ digest_size=CONFIRM_CODE_LENGTH).hex()])
+ def test_successful_command(self, *_):
+ # Setup
+ contact = create_contact('Alice', kex_status=KEX_STATUS_NO_RX_PSK)
+ contact_list = ContactList(contacts=[contact])
+ window = TxWindow(type=WIN_TYPE_CONTACT,
+ name='Alice',
+ uid=nick_to_pub_key("Alice"),
+ contact=contact)
+
+ # Test
+ self.assert_fr("Removed PSK reminder for Alice.", rxp_load_psk, window, contact_list, *self.args)
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+ self.assertEqual(contact.kex_status, KEX_STATUS_HAS_RX_PSK)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=KeyboardInterrupt)
+ def test_keyboard_interrupt_raises_fr(self, *_):
+ # Setup
+ contact = create_contact('Alice', kex_status=KEX_STATUS_NO_RX_PSK)
+ contact_list = ContactList(contacts=[contact])
+ window = TxWindow(type=WIN_TYPE_CONTACT,
+ uid=nick_to_pub_key("Alice"),
+ contact=contact)
+
+ # Test
+ self.assert_fr("PSK verification aborted.", rxp_load_psk, window, contact_list, *self.args)
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/transmitter/test_packet.py b/tests/transmitter/test_packet.py
new file mode 100644
index 0000000..ea7b7a3
--- /dev/null
+++ b/tests/transmitter/test_packet.py
@@ -0,0 +1,618 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import os
+import struct
+import time
+import unittest
+
+from multiprocessing import Queue
+from unittest import mock
+
+from src.common.statics import *
+
+from src.transmitter.packet import cancel_packet, queue_command, queue_file, queue_message, queue_assembly_packets
+from src.transmitter.packet import send_file, send_packet, split_to_assembly_packets
+
+from tests.mock_classes import create_contact, create_group, create_keyset, Gateway, ContactList, KeyList
+from tests.mock_classes import nick_to_pub_key, OnionService, Settings, TxWindow, UserInput
+from tests.utils import cd_unittest, cleanup, gen_queue_dict, tear_queue, tear_queues, TFCTestCase
+
+
+class TestQueueMessage(unittest.TestCase):
+
+ def setUp(self):
+ self.queues = gen_queue_dict()
+ self.settings = Settings()
+ self.args = self.settings, self.queues
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def test_private_message_header(self):
+ # Setup
+ user_input = UserInput(plaintext='Test message', type=MESSAGE)
+ window = TxWindow(log_messages=True)
+ window.window_contacts = [create_contact('Alice')]
+
+ # Test
+ self.assertIsNone(queue_message(user_input, window, *self.args))
+ self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 1)
+
+ def test_group_message_header(self):
+ # Setup
+ user_input = UserInput(plaintext='Test message', type=MESSAGE)
+ window = TxWindow(name='test_group',
+ type=WIN_TYPE_GROUP,
+ group=create_group('test_group'),
+ log_messages=True)
+ window.window_contacts = [create_contact('Alice')]
+
+ # Test
+ self.assertIsNone(queue_message(user_input, window, *self.args))
+ self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 1)
+
+ def test_group_management_message_header(self):
+ # Setup
+ user_input = UserInput(plaintext='Test message', type=MESSAGE)
+ window = TxWindow(log_messages=True)
+ window.window_contacts = [create_contact('Alice')]
+
+ # Test
+ self.assertIsNone(queue_message(user_input, window, *self.args, header=GROUP_MSG_INVITE_HEADER))
+ self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 1)
+
+
+class TestSendFile(TFCTestCase):
+
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.window = TxWindow()
+ self.onion_service = OnionService()
+ self.contact_list = ContactList(nicks=['Alice', 'Bob', 'Charlie'])
+ self.args = self.settings, self.queues, self.window
+
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+ tear_queues(self.queues)
+
+ def test_traffic_masking_raises_fr(self):
+ self.settings.traffic_masking = True
+ self.assert_fr("Error: Command is disabled during traffic masking.", send_file, "testfile.txt", *self.args)
+
+ def test_missing_file_raises_fr(self):
+ self.assert_fr("Error: File not found.", send_file, "testfile.txt", *self.args)
+
+ def test_empty_file_raises_fr(self):
+ # Setup
+ open('testfile.txt', 'wb+').close()
+
+ # Test
+ self.assert_fr("Error: Target file is empty.", send_file, "testfile.txt", *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_file_transmission_to_contact(self, _):
+ # Setup
+ self.window.window_contacts = [self.contact_list.get_contact_by_address_or_nick('Alice')]
+ self.window.type_print = 'contact'
+
+ input_data = os.urandom(5)
+ with open('testfile.txt', 'wb+') as f:
+ f.write(input_data)
+
+ # Test
+ self.assertIsNone(send_file("testfile.txt", *self.args))
+ self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 1)
+ self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1)
+
+ @mock.patch('time.sleep', return_value=None)
+ def test_file_transmission_to_group(self, _):
+ # Setup
+ self.window.window_contacts = [self.contact_list.get_contact_by_address_or_nick('Alice'),
+ self.contact_list.get_contact_by_address_or_nick('Bob')]
+ self.window.type_print = 'group'
+
+ input_data = os.urandom(5)
+ with open('testfile.txt', 'wb+') as f:
+ f.write(input_data)
+
+ self.assertIsNone(send_file("testfile.txt", *self.args))
+ self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 2)
+ self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1)
+
+
+class TestQueueFile(TFCTestCase):
+
+ file_list = ('tx_contacts', 'tx_groups', 'tx_keys', 'tx_login_data', 'tx_settings',
+ 'rx_contacts', 'rx_groups', 'rx_keys', 'rx_login_data', 'rx_settings',
+ 'tx_serial_settings.json', 'nc_serial_settings.json',
+ 'rx_serial_settings.json', 'tx_onion_db')
+
+ def setUp(self):
+ self.unittest_dir = cd_unittest()
+ self.queues = gen_queue_dict()
+
+ def tearDown(self):
+ cleanup(self.unittest_dir)
+ tear_queues(self.queues)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=file_list)
+ def test_tfc_database_raises_fr(self, *_):
+ window = TxWindow(name='Alice',
+ type=WIN_TYPE_CONTACT,
+ type_print='contact',
+ uid=nick_to_pub_key("Alice"))
+ settings = Settings(traffic_masking=True,
+ disable_gui_dialog=True)
+
+ for file in self.file_list:
+ with open(file, 'wb+') as f:
+ f.write(b'a')
+
+ self.assert_fr("Error: Can't send TFC database.", queue_file, window, settings, self.queues)
+
+ @mock.patch('shutil.get_terminal_size', return_value=[150, 150])
+ @mock.patch('builtins.input', side_effect=['./testfile.txt', 'No'])
+ def test_aborted_file(self, *_):
+ # Setup
+ input_data = os.urandom(5)
+ with open('testfile.txt', 'wb+') as f:
+ f.write(input_data)
+
+ window = TxWindow(name='Alice',
+ type=WIN_TYPE_CONTACT,
+ type_print='contact',
+ uid=nick_to_pub_key("Alice"))
+ settings = Settings(traffic_masking=True,
+ disable_gui_dialog=True)
+
+ # Test
+ self.assert_fr("File selection aborted.", queue_file, window, settings, self.queues)
+
+ @mock.patch('shutil.get_terminal_size', return_value=[150, 150])
+ @mock.patch('builtins.input', side_effect=['./testfile.txt', 'Yes'])
+ def test_file_queue_short_traffic_masking(self, *_):
+ # Setup
+ input_data = os.urandom(5)
+ with open('testfile.txt', 'wb+') as f:
+ f.write(input_data)
+
+ window = TxWindow(name='Alice',
+ type=WIN_TYPE_CONTACT,
+ type_print='contact',
+ uid=nick_to_pub_key("Alice"),
+ log_messages=True)
+ settings = Settings(traffic_masking=True,
+ disable_gui_dialog=True)
+
+ # Test
+ self.assertIsNone(queue_file(window, settings, self.queues))
+ self.assertEqual(self.queues[TM_FILE_PACKET_QUEUE].qsize(), 1)
+
+ q_data, log_messages, log_as_ph = self.queues[TM_FILE_PACKET_QUEUE].get()
+ self.assertIsInstance(q_data, bytes)
+ self.assertTrue(log_messages)
+ self.assertTrue(log_as_ph)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=['./testfile.txt', 'Yes'])
+ def test_file_queue_long_normal(self, *_):
+ # Setup
+ input_data = os.urandom(2000)
+ with open('testfile.txt', 'wb+') as f:
+ f.write(input_data)
+
+ window = TxWindow(name='Alice',
+ type=WIN_TYPE_CONTACT,
+ type_print='contact',
+ uid=nick_to_pub_key("Alice"),
+ window_contacts=[create_contact('Alice')],
+ log_messages=True)
+ settings = Settings(traffic_masking=False,
+ disable_gui_dialog=True,
+ confirm_sent_files=True,
+ multi_packet_random_delay=True)
+
+ # Test
+ self.assertIsNone(queue_file(window, settings, self.queues))
+ self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 1)
+
+ @mock.patch('shutil.get_terminal_size', return_value=[150, 150])
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=['./testfile.txt', KeyboardInterrupt])
+ def test_keyboard_interrupt_raises_fr(self, *_):
+ # Setup
+ input_data = os.urandom(2000)
+ with open('testfile.txt', 'wb+') as f:
+ f.write(input_data)
+
+ window = TxWindow(name='Alice',
+ type=WIN_TYPE_CONTACT,
+ type_print='contact',
+ uid=nick_to_pub_key("Alice"),
+ window_contacts=[create_contact('Alice')],
+ log_messages=True)
+ settings = Settings(traffic_masking=True,
+ disable_gui_dialog=True,
+ confirm_sent_files=True,
+ multi_packet_random_delay=True)
+
+ # Test
+ self.assert_fr("File selection aborted.", queue_file, window, settings, self.queues)
+ self.assertEqual(self.queues[RELAY_PACKET_QUEUE].qsize(), 0)
+
+
+class TestQueueCommand(unittest.TestCase):
+
+ def setUp(self):
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def test_queue_command(self):
+ self.assertIsNone(queue_command(os.urandom(200), self.settings, self.queues))
+ c_pt = self.queues[COMMAND_PACKET_QUEUE].get()
+ self.assertEqual(len(c_pt), ASSEMBLY_PACKET_LENGTH)
+
+
+class TestSplitToAssemblyPackets(unittest.TestCase):
+
+ def test_short_message(self):
+ packet_list = split_to_assembly_packets(b'Short message', MESSAGE)
+ self.assertEqual(len(packet_list), 1)
+ self.assertTrue(packet_list[0].startswith(M_S_HEADER))
+
+ def test_long_message(self):
+ packet_list = split_to_assembly_packets(os.urandom(800), MESSAGE)
+ self.assertEqual(len(packet_list), 4)
+ self.assertTrue(packet_list[0].startswith(M_L_HEADER))
+ self.assertTrue(packet_list[1].startswith(M_A_HEADER))
+ self.assertTrue(packet_list[2].startswith(M_A_HEADER))
+ self.assertTrue(packet_list[3].startswith(M_E_HEADER))
+
+ def test_short_file(self):
+ packet_list = split_to_assembly_packets(os.urandom(50), FILE)
+ self.assertEqual(len(packet_list), 1)
+ self.assertTrue(packet_list[0].startswith(F_S_HEADER))
+
+ def test_long_file(self):
+ packet_list = split_to_assembly_packets(os.urandom(800), FILE)
+ self.assertEqual(len(packet_list), 4)
+ self.assertTrue(packet_list[0].startswith(F_L_HEADER + b'\x00\x00\x00\x00\x00\x00\x00\x04'))
+ self.assertTrue(packet_list[1].startswith(F_A_HEADER))
+ self.assertTrue(packet_list[2].startswith(F_A_HEADER))
+ self.assertTrue(packet_list[3].startswith(F_E_HEADER))
+
+ def test_short_command(self):
+ packet_list = split_to_assembly_packets(os.urandom(50), COMMAND)
+ self.assertEqual(len(packet_list), 1)
+ self.assertTrue(packet_list[0].startswith(C_S_HEADER))
+
+ def test_long_command(self):
+ packet_list = split_to_assembly_packets(os.urandom(800), COMMAND)
+ self.assertEqual(len(packet_list), 4)
+ self.assertTrue(packet_list[0].startswith(C_L_HEADER))
+ self.assertTrue(packet_list[1].startswith(C_A_HEADER))
+ self.assertTrue(packet_list[2].startswith(C_A_HEADER))
+ self.assertTrue(packet_list[3].startswith(C_E_HEADER))
+
+
+class TestQueueAssemblyPackets(unittest.TestCase):
+
+ def setUp(self):
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.window = TxWindow(uid=nick_to_pub_key("Alice"),
+ log_messages=True)
+ self.window.window_contacts = [create_contact('Alice')]
+ self.args = self.settings, self.queues, self.window
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def test_queue_message_traffic_masking(self):
+ # Setup
+ packet_list = split_to_assembly_packets(os.urandom(200), MESSAGE)
+ self.settings.traffic_masking = True
+
+ # Test
+ self.assertIsNone(queue_assembly_packets(packet_list, MESSAGE, *self.args))
+ self.assertEqual(self.queues[TM_MESSAGE_PACKET_QUEUE].qsize(), 1)
+ packet, log_messages, log_as_ph = self.queues[TM_MESSAGE_PACKET_QUEUE].get()
+ self.assertIsInstance(packet, bytes)
+ self.assertTrue(log_messages)
+ self.assertFalse(log_as_ph)
+
+ def test_queue_message_normal(self):
+ # Setup
+ packet_list = split_to_assembly_packets(os.urandom(200), MESSAGE)
+
+ # Test
+ self.assertIsNone(queue_assembly_packets(packet_list, MESSAGE, *self.args))
+ self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 1)
+
+ packet, pub_key, log_setting, log_as_ph, win_uid = self.queues[MESSAGE_PACKET_QUEUE].get()
+ self.assertIsInstance(packet, bytes)
+ self.assertEqual(pub_key, nick_to_pub_key("Alice"))
+ self.assertEqual(win_uid, nick_to_pub_key("Alice"))
+ self.assertTrue(log_setting)
+ self.assertFalse(log_as_ph)
+
+ def test_queue_file_traffic_masking(self):
+ # Setup
+ packet_list = split_to_assembly_packets(os.urandom(200), FILE)
+ self.settings.traffic_masking = True
+
+ # Test
+ self.assertIsNone(queue_assembly_packets(packet_list, FILE, *self.args))
+ self.assertEqual(self.queues[TM_FILE_PACKET_QUEUE].qsize(), 1)
+ packet, log_messages, log_as_ph = self.queues[TM_FILE_PACKET_QUEUE].get()
+ self.assertIsInstance(packet, bytes)
+ self.assertTrue(log_messages)
+ self.assertFalse(log_as_ph)
+
+ def test_queue_command_traffic_masking(self):
+ # Setup
+ packet_list = split_to_assembly_packets(os.urandom(200), COMMAND)
+ self.settings.traffic_masking = True
+
+ # Test
+ self.assertIsNone(queue_assembly_packets(packet_list, COMMAND, *self.args))
+ self.assertEqual(self.queues[TM_COMMAND_PACKET_QUEUE].qsize(), 1)
+ data = self.queues[TM_COMMAND_PACKET_QUEUE].get()
+ self.assertIsInstance(data, bytes)
+
+ def test_queue_command_traffic_masking_no_window(self):
+ # Setup
+ self.window = None
+ packet_list = split_to_assembly_packets(os.urandom(200), COMMAND)
+ self.settings.traffic_masking = True
+
+ # Test
+ self.assertIsNone(queue_assembly_packets(packet_list, COMMAND, *self.args))
+ self.assertEqual(self.queues[TM_COMMAND_PACKET_QUEUE].qsize(), 1)
+ data = self.queues[TM_COMMAND_PACKET_QUEUE].get()
+ self.assertIsInstance(data, bytes)
+
+ def test_queue_command_normal(self):
+ # Setup
+ packet_list = split_to_assembly_packets(os.urandom(200), COMMAND)
+
+ # Test
+ self.assertIsNone(queue_assembly_packets(packet_list, COMMAND, *self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+ packet = self.queues[COMMAND_PACKET_QUEUE].get()
+ self.assertIsInstance(packet, bytes)
+
+
+class TestSendPacket(unittest.TestCase):
+ """\
+ This function is by far the most critical to security in Transmitter
+ Program, as it must detect the output of key material.
+
+ Plaintext length must always be evaluated to ensure constant
+ ciphertext length and hiding of output data type.
+
+ The most likely place for error is going to be the tx_harac
+ attribute of keyset, as it's the only data loaded from the sensitive
+ key database, and that is sent to the contact. An alternative place
+ could be a bug in the implementation where account strings would
+ incorrectly contain a byte string that contained key material, which
+ would cause Transmitter Program to leak keys to Networked Computer.
+ """
+
+ def setUp(self):
+ self.l_queue = Queue()
+ self.key_list = KeyList(nicks=['Alice'])
+ self.settings = Settings()
+ self.gateway = Gateway()
+ self.onion_service = OnionService()
+
+ def tearDown(self):
+ tear_queue(self.l_queue)
+
+ def test_message_length(self):
+ # Check that only 256-byte plaintext messages are ever allowed
+ pub_key = nick_to_pub_key("Alice")
+ for l in range(1, 256):
+ with self.assertRaises(SystemExit):
+ send_packet(self.key_list, self.gateway, self.l_queue, bytes(l), pub_key, True)
+
+ for l in range(257, 300):
+ with self.assertRaises(SystemExit):
+ send_packet(self.key_list, self.gateway, self.l_queue, bytes(l), pub_key, True)
+
+ def test_invalid_harac_raises_raises_struct_error(self):
+ # Check that in the case where an internal error caused bytestring (possible key material) to end up in hash
+ # ratchet value, the system raises some error that prevents the output of packet. In this case the, error comes
+ # from the unsuccessful encoding of hash ratchet counter.
+ for l in range(1, 33):
+ key_list = KeyList()
+ key_list.keysets = [create_keyset('Alice',
+ tx_key=SYMMETRIC_KEY_LENGTH * b'\x02',
+ tx_harac=l * b'k')]
+
+ with self.assertRaises(struct.error):
+ send_packet(key_list, self.gateway, self.l_queue,
+ bytes(ASSEMBLY_PACKET_LENGTH), nick_to_pub_key("Alice"), True)
+
+ def test_valid_message_packet(self):
+ # Setup
+ gateway = Gateway(serial_error_correction=5)
+ key_list = KeyList(master_key=bytes(SYMMETRIC_KEY_LENGTH))
+ key_list.keysets = [create_keyset('Alice',
+ tx_key=SYMMETRIC_KEY_LENGTH * b'\x02',
+ tx_harac=8)]
+
+ # Test
+ self.assertIsNone(send_packet(key_list, gateway, self.l_queue,
+ bytes(ASSEMBLY_PACKET_LENGTH), nick_to_pub_key("Alice"), True))
+ self.assertEqual(len(gateway.packets), 1)
+ time.sleep(0.01)
+ self.assertFalse(self.l_queue.empty())
+
+ def test_valid_command_packet(self):
+ """Test that commands are output as they should.
+
+ Since command packets have no trailer, and since only user's
+ Receiver Program has local decryption key, encryption with any
+ key recipient is not already in possession of does not
+ compromise plaintext.
+ """
+ # Setup
+ key_list = KeyList(master_key=bytes(SYMMETRIC_KEY_LENGTH))
+ key_list.keysets = [create_keyset(LOCAL_ID)]
+
+ # Test
+ self.assertIsNone(send_packet(key_list, self.gateway, self.l_queue,
+ bytes(ASSEMBLY_PACKET_LENGTH)))
+ self.assertEqual(len(self.gateway.packets), 1)
+ self.assertEqual(len(self.gateway.packets[0]), 345)
+ self.assertEqual(self.l_queue.qsize(), 1)
+
+
+class TestCancelPacket(TFCTestCase):
+
+ def setUp(self):
+ self.queues = gen_queue_dict()
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def test_cancel_message_during_normal(self):
+ # Setup
+ user_input = UserInput('cm')
+ settings = Settings()
+ window = TxWindow(name='Alice',
+ type=WIN_TYPE_CONTACT,
+ type_print='contact',
+ uid=nick_to_pub_key("Alice"))
+ window.window_contacts = [create_contact('Alice')]
+
+ self.queues[MESSAGE_PACKET_QUEUE].put(
+ ('test_message1', nick_to_pub_key("Alice"), False, False, nick_to_pub_key("Alice")))
+ self.queues[MESSAGE_PACKET_QUEUE].put(
+ ('test_message2', nick_to_pub_key("Charlie"), False, False, nick_to_pub_key("Charlie")))
+ self.queues[MESSAGE_PACKET_QUEUE].put(
+ ('test_message3', nick_to_pub_key("Alice"), False, False, nick_to_pub_key("Alice")))
+
+ # Test
+ self.assert_fr("Cancelled queued messages to contact Alice.",
+ cancel_packet, user_input, window, settings, self.queues)
+ self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 2)
+
+ def test_cancel_group_message_during_normal(self):
+ # Setup
+ user_input = UserInput('cm')
+ settings = Settings()
+ window = TxWindow(name='test_group',
+ type=WIN_TYPE_GROUP,
+ type_print='group',
+ uid='test_group')
+ window.window_contacts = [create_contact('Alice')]
+
+ self.queues[MESSAGE_PACKET_QUEUE].put(('test_message1', nick_to_pub_key("Alice"), False, False, 'test_group'))
+ self.queues[MESSAGE_PACKET_QUEUE].put(('test_message2', nick_to_pub_key("Alice"), False, False, 'test_group'))
+
+ # Test
+ self.assert_fr("Cancelled queued messages to group test_group.",
+ cancel_packet, user_input, window, settings, self.queues)
+ self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 1) # Cancel packet
+
+ def test_cancel_message_during_traffic_masking(self):
+ # Setup
+ user_input = UserInput('cm')
+ settings = Settings(traffic_masking=True)
+ window = TxWindow()
+ window.window_contacts = [create_contact('Alice')]
+
+ self.queues[TM_MESSAGE_PACKET_QUEUE].put(('test_message1', {nick_to_pub_key("Alice"): False}))
+ self.queues[TM_MESSAGE_PACKET_QUEUE].put(('test_message2', {nick_to_pub_key("Alice"): False}))
+
+ # Test
+ self.assertIsNone(cancel_packet(user_input, window, settings, self.queues))
+ self.assertEqual(self.queues[TM_MESSAGE_PACKET_QUEUE].qsize(), 1) # Cancel packet in queue
+
+ def test_cancel_file_during_traffic_masking(self):
+ # Setup
+ user_input = UserInput('cf')
+ settings = Settings(traffic_masking=True)
+ window = TxWindow()
+ window.window_contacts = [create_contact('Alice')]
+
+ self.queues[TM_FILE_PACKET_QUEUE].put(('testfile1', {nick_to_pub_key("Alice"): False}))
+ self.queues[TM_FILE_PACKET_QUEUE].put(('testfile2', {nick_to_pub_key("Alice"): False}))
+
+ # Test
+ self.assertIsNone(cancel_packet(user_input, window, settings, self.queues))
+ self.assertEqual(self.queues[TM_FILE_PACKET_QUEUE].qsize(), 1)
+
+ def test_cancel_file_during_normal(self):
+ # Setup
+ user_input = UserInput('cf')
+ settings = Settings()
+ window = TxWindow(name='Alice',
+ type=WIN_TYPE_CONTACT,
+ type_print='contact',
+ uid=nick_to_pub_key("Alice"))
+ window.window_contacts = [create_contact('Alice')]
+
+ # Test
+ self.assert_fr('Files are only queued during traffic masking.',
+ cancel_packet, user_input, window, settings, self.queues)
+
+ def test_cancel_file_when_nothing_to_cancel(self):
+ # Setup
+ user_input = UserInput('cf')
+ settings = Settings(traffic_masking=True)
+ window = TxWindow(name='Alice',
+ type=WIN_TYPE_CONTACT,
+ type_print='contact',
+ uid=nick_to_pub_key("Alice"))
+ window.window_contacts = [create_contact('Alice')]
+
+ # Test
+ self.assertIsNone(cancel_packet(user_input, window, settings, self.queues))
+ self.assertEqual(self.queues[TM_FILE_PACKET_QUEUE].qsize(), 0)
+
+ def test_cancel_message_when_nothing_to_cancel(self):
+ # Setup
+ user_input = UserInput('cm')
+ settings = Settings()
+ window = TxWindow(name='Alice',
+ type=WIN_TYPE_CONTACT,
+ type_print='contact',
+ uid=nick_to_pub_key("Alice"))
+ window.window_contacts = [create_contact('Alice')]
+
+ # Test
+ self.assert_fr("No messages queued for contact Alice.",
+ cancel_packet, user_input, window, settings, self.queues)
+ self.assertEqual(self.queues[TM_FILE_PACKET_QUEUE].qsize(), 0)
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/transmitter/test_sender_loop.py b/tests/transmitter/test_sender_loop.py
new file mode 100644
index 0000000..83de086
--- /dev/null
+++ b/tests/transmitter/test_sender_loop.py
@@ -0,0 +1,170 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import threading
+import time
+import unittest
+
+from src.common.statics import *
+
+from src.transmitter.commands import queue_command
+from src.transmitter.packet import queue_message, queue_to_nc
+from src.transmitter.sender_loop import sender_loop, standard_sender_loop, traffic_masking_loop
+
+from tests.mock_classes import ContactList, Gateway, KeyList, nick_to_pub_key, Settings, TxWindow, UserInput
+from tests.utils import gen_queue_dict, tear_queues
+
+
+class TestSenderLoop(unittest.TestCase):
+
+ def test_loops(self):
+ queues = gen_queue_dict()
+ window = TxWindow(log_messages=True)
+ settings = Settings(traffic_masking=True,
+ tm_static_delay=0.001,
+ tm_random_delay=0.001)
+ gateway = Gateway()
+ key_list = KeyList(nicks=['Bob', LOCAL_ID]) # Output Bob as existing contact
+
+ queues[TM_NOISE_COMMAND_QUEUE].put((C_N_HEADER + bytes(PADDING_LENGTH)))
+ queues[TM_NOISE_PACKET_QUEUE].put((P_N_HEADER + bytes(PADDING_LENGTH), True, True))
+ queues[WINDOW_SELECT_QUEUE].put(window.window_contacts)
+ queues[SENDER_MODE_QUEUE].put(settings)
+ queue_command(b'test', settings, queues) # Output command
+ self.assertIsNone(sender_loop(queues, settings, gateway, key_list, unittest=True))
+ self.assertEqual(len(gateway.packets), 1)
+
+ settings.traffic_masking = False
+ queues[SENDER_MODE_QUEUE].put(settings)
+ self.assertIsNone(sender_loop(queues, settings, gateway, key_list, unittest=True)) # Output Alice and Bob again
+ self.assertEqual(len(gateway.packets), 1)
+
+
+class TestTrafficMaskingLoop(unittest.TestCase):
+
+ def test_loop(self):
+ # Setup
+ queues = gen_queue_dict()
+ settings = Settings(traffic_masking=True,
+ tm_static_delay=0.001,
+ tm_random_delay=0.001)
+ gateway = Gateway()
+ key_list = KeyList(nicks=['Alice', LOCAL_ID])
+ window = TxWindow(log_messages=True)
+ contact_list = ContactList(nicks=['Alice', LOCAL_ID])
+ window.contact_list = contact_list
+ window.window_contacts = [contact_list.get_contact_by_address_or_nick('Alice')]
+ user_input = UserInput(plaintext='test')
+
+ def queue_delayer():
+ """Place packets to queue after delay."""
+ time.sleep(0.01)
+ queues[WINDOW_SELECT_QUEUE].put(window.window_contacts)
+ time.sleep(0.01)
+ queue_command(b'test', settings, queues) # 1
+ queue_message(user_input, window, settings, queues) # 2
+ queue_message(user_input, window, settings, queues) # 3
+ queue_command(b'test', settings, queues) # 4
+ queues[TM_NOISE_COMMAND_QUEUE].put((C_N_HEADER + bytes(PADDING_LENGTH))) # 5
+ queue_to_nc(UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_EXIT_COMMAND, queues[RELAY_PACKET_QUEUE]) # 6
+ queue_to_nc(UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_WIPE_COMMAND, queues[RELAY_PACKET_QUEUE]) # 7
+ queues[SENDER_MODE_QUEUE].put(settings)
+
+ # Test
+ threading.Thread(target=queue_delayer).start()
+ self.assertIsInstance(traffic_masking_loop(queues, settings, gateway, key_list), Settings)
+ self.assertEqual(len(gateway.packets), 7)
+
+ # Teardown
+ tear_queues(queues)
+
+
+class TestStandardSenderLoop(unittest.TestCase):
+
+ def test_loop(self):
+ # Setup
+ queues = gen_queue_dict()
+ settings = Settings(traffic_masking=False)
+ gateway = Gateway()
+ key_list = KeyList()
+ window = TxWindow(log_messages=True)
+ contact_list = ContactList(nicks=['Alice', LOCAL_ID])
+ window.contact_list = contact_list
+ window.window_contacts = [contact_list.get_contact_by_address_or_nick('Alice')]
+ user_input = UserInput(plaintext='test')
+
+ delay = 0.01
+
+ def queue_delayer():
+ """Place datagrams into queue after delay."""
+ time.sleep(delay)
+ queue_command(b'test', settings, queues)
+
+ time.sleep(delay)
+ queue_to_nc(PUBLIC_KEY_DATAGRAM_HEADER + TFC_PUBLIC_KEY_LENGTH * b'a' + nick_to_pub_key('Alice'), # 1
+ queues[RELAY_PACKET_QUEUE])
+
+ time.sleep(delay)
+ queue_to_nc(UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_WIPE_COMMAND, queues[RELAY_PACKET_QUEUE]) # 2
+
+ time.sleep(delay)
+ queue_to_nc(UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_EXIT_COMMAND, queues[RELAY_PACKET_QUEUE]) # 3
+
+ time.sleep(delay)
+ queues[KEY_MANAGEMENT_QUEUE].put((KDB_ADD_ENTRY_HEADER, LOCAL_PUBKEY, # 4
+ SYMMETRIC_KEY_LENGTH * b'a', SYMMETRIC_KEY_LENGTH * b'a',
+ SYMMETRIC_KEY_LENGTH * b'a', SYMMETRIC_KEY_LENGTH * b'a'))
+
+ time.sleep(delay)
+ queue_message(user_input, window, settings, queues) # 5
+
+ time.sleep(delay)
+ queue_message(user_input, window, settings, queues) # 6
+
+ time.sleep(delay)
+ queues[KEY_MANAGEMENT_QUEUE].put((KDB_ADD_ENTRY_HEADER, nick_to_pub_key('Alice'),
+ SYMMETRIC_KEY_LENGTH * b'a', SYMMETRIC_KEY_LENGTH * b'a',
+ SYMMETRIC_KEY_LENGTH * b'a', SYMMETRIC_KEY_LENGTH * b'a'))
+
+ time.sleep(delay)
+ queue_message(user_input, window, settings, queues) # 7
+
+ time.sleep(delay)
+ queue_message(user_input, window, settings, queues) # 8
+
+ time.sleep(delay)
+ queues[SENDER_MODE_QUEUE].put(settings)
+
+ threading.Thread(target=queue_delayer).start()
+
+ # Test
+ settings, m_buffer = standard_sender_loop(queues, gateway, key_list)
+ self.assertIsInstance(settings, Settings)
+ self.assertEqual(m_buffer, {nick_to_pub_key('Alice'): []})
+ self.assertEqual(len(gateway.packets), 8)
+ self.assertEqual(queues[EXIT_QUEUE].qsize(), 2)
+
+ # Teardown
+ tear_queues(queues)
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/tx/test_traffic_masking.py b/tests/transmitter/test_traffic_masking.py
similarity index 55%
rename from tests/tx/test_traffic_masking.py
rename to tests/transmitter/test_traffic_masking.py
index eff5a14..fcb3df1 100644
--- a/tests/tx/test_traffic_masking.py
+++ b/tests/transmitter/test_traffic_masking.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,36 +16,37 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
-import unittest
import time
-
-from multiprocessing import Queue
+import unittest
from src.common.statics import *
-from src.tx.traffic_masking import ConstantTime, noise_loop
+from src.transmitter.traffic_masking import HideRunTime, noise_loop
from tests.mock_classes import ContactList, Settings
+from tests.utils import gen_queue_dict, tear_queues
-class TestConstantTime(unittest.TestCase):
+class TestHideRunTime(unittest.TestCase):
def setUp(self):
- self.settings = Settings(multi_packet_random_delay=True)
+ self.settings = Settings()
+ self.settings.tm_random_delay = 1
+ self.settings.tm_static_delay = 1
def test_traffic_masking_delay(self):
start = time.monotonic()
- with ConstantTime(self.settings, d_type=TRAFFIC_MASKING):
+ with HideRunTime(self.settings, delay_type=TRAFFIC_MASKING):
pass
duration = time.monotonic() - start
- self.assertTrue(duration > 2.0)
+ self.assertTrue(duration > self.settings.tm_static_delay)
- def test_constant_time(self):
+ def test_static_time(self):
start = time.monotonic()
- with ConstantTime(self.settings, length=1.0):
+ with HideRunTime(self.settings, duration=1):
pass
duration = time.monotonic() - start
self.assertTrue(0.9 < duration < 1.1)
@@ -53,27 +55,22 @@ class TestConstantTime(unittest.TestCase):
class TestNoiseLoop(unittest.TestCase):
def setUp(self):
- self.np_queue = Queue()
+ self.queues = gen_queue_dict()
self.contact_list = ContactList(nicks=['Alice'])
def tearDown(self):
- while not self.np_queue.empty():
- self.np_queue.get()
- time.sleep(0.1)
- self.np_queue.close()
- time.sleep(0.1)
+ tear_queues(self.queues)
def test_noise_commands(self):
- self.assertIsNone(noise_loop(C_N_HEADER, self.np_queue, unittest=True))
- packet, log_messages = self.np_queue.get()
- self.assertEqual(packet, C_N_HEADER + bytes(PADDING_LEN))
- self.assertIsNone(log_messages)
+ self.assertIsNone(noise_loop(self.queues, unittest=True))
+ packet = self.queues[TM_NOISE_COMMAND_QUEUE].get()
+ self.assertEqual(packet, C_N_HEADER + bytes(PADDING_LENGTH))
def test_noise_packets(self):
- self.assertIsNone(noise_loop(P_N_HEADER, self.np_queue, self.contact_list, unittest=True))
- packet, log_messages, log_as_ph = self.np_queue.get()
+ self.assertIsNone(noise_loop(self.queues, self.contact_list, unittest=True))
+ packet, log_messages, log_as_ph = self.queues[TM_NOISE_PACKET_QUEUE].get()
self.assertEqual(packet, PLACEHOLDER_DATA)
- self.assertIsNone(log_messages)
+ self.assertTrue(log_messages)
self.assertTrue(log_as_ph)
diff --git a/tests/tx/test_user_input.py b/tests/transmitter/test_user_input.py
similarity index 75%
rename from tests/tx/test_user_input.py
rename to tests/transmitter/test_user_input.py
index 84b2e2f..9545e3a 100644
--- a/tests/tx/test_user_input.py
+++ b/tests/transmitter/test_user_input.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,15 +16,16 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
-import builtins
import unittest
+from unittest import mock
+
from src.common.statics import *
-from src.tx.user_input import get_input, process_aliases, UserInput
+from src.transmitter.user_input import get_input, process_aliases, UserInput
from tests.mock_classes import create_contact, create_group, Settings, TxWindow
@@ -35,7 +37,7 @@ class TestProcessAliases(unittest.TestCase):
self.window = TxWindow(name='Alice',
type=WIN_TYPE_CONTACT,
type_print='contact',
- window_contacts=[create_contact()])
+ window_contacts=[create_contact('Alice')])
def test_unread_shortcut(self):
self.assertEqual(process_aliases(' ', self.settings, self.window), '/unread')
@@ -61,49 +63,36 @@ class TestGetInput(unittest.TestCase):
self.window = TxWindow(name='Alice',
type=WIN_TYPE_CONTACT,
type_print='contact',
- window_contacts=[create_contact()])
+ window_contacts=[create_contact('Alice')])
self.window.group = create_group('test_group')
- def test_message(self):
- # Setup
- input_list = ['/', '', 'testmessage']
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
-
- # Test
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=['/', '', 'test_message'])
+ def test_message(self, *_):
user_input = get_input(self.window, self.settings)
- self.assertEqual(user_input.plaintext, 'testmessage')
+ self.assertEqual(user_input.plaintext, 'test_message')
self.assertEqual(user_input.type, MESSAGE)
- def test_message_and_command_to_empty_group(self):
- # Setup
- input_list = ['/', '', 'testmessage', '/clear']
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
-
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=['/', '', 'test_message', '/clear'])
+ def test_message_and_command_to_empty_group(self, *_):
self.window.type = WIN_TYPE_GROUP
self.window.window_contacts = []
self.window.group.members = []
-
- # Test
user_input = get_input(self.window, self.settings)
self.assertEqual(user_input.plaintext, 'clear')
self.assertEqual(user_input.type, COMMAND)
- def test_file(self):
- # Setup
- builtins.input = lambda _: '/file'
-
- # Test
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', return_value='/file')
+ def test_file(self, *_):
user_input = get_input(self.window, self.settings)
self.assertEqual(user_input.plaintext, '/file')
self.assertEqual(user_input.type, FILE)
- def test_command(self):
- # Setup
- builtins.input = lambda _: '/clear'
-
- # Test
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', return_value='/clear')
+ def test_command(self, *_):
user_input = get_input(self.window, self.settings)
self.assertEqual(user_input.plaintext, 'clear')
self.assertEqual(user_input.type, COMMAND)
@@ -112,10 +101,7 @@ class TestGetInput(unittest.TestCase):
class TestUserInput(unittest.TestCase):
def test_user_input(self):
- # Setup
user_input = UserInput('test_plaintext', FILE)
-
- # Test
self.assertEqual(user_input.plaintext, 'test_plaintext')
self.assertEqual(user_input.type, FILE)
diff --git a/tests/transmitter/test_windows.py b/tests/transmitter/test_windows.py
new file mode 100644
index 0000000..f22cc86
--- /dev/null
+++ b/tests/transmitter/test_windows.py
@@ -0,0 +1,348 @@
+#!/usr/bin/env python3.6
+# -*- coding: utf-8 -*-
+
+"""
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
+
+This file is part of TFC.
+
+TFC is free software: you can redistribute it and/or modify it under the terms
+of the GNU General Public License as published by the Free Software Foundation,
+either version 3 of the License, or (at your option) any later version.
+
+TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with TFC. If not, see .
+"""
+
+import unittest
+
+from unittest import mock
+
+from src.common.crypto import blake2b
+from src.common.db_contacts import Contact
+from src.common.statics import *
+
+from src.transmitter.windows import MockWindow, select_window, TxWindow
+
+from tests.mock_classes import ContactList, create_contact, Gateway, GroupList, OnionService, Settings, UserInput
+from tests.utils import gen_queue_dict, group_name_to_group_id, nick_to_onion_address, nick_to_pub_key
+from tests.utils import tear_queues, TFCTestCase, VALID_ECDHE_PUB_KEY
+
+
+class TestMockWindow(unittest.TestCase):
+
+ def setUp(self):
+ self.window = MockWindow(nick_to_pub_key("Alice"), contacts=[create_contact(n) for n in ['Alice', 'Bob']])
+
+ def test_window_iterates_over_contacts(self):
+ for c in self.window:
+ self.assertIsInstance(c, Contact)
+
+
+class TestTxWindow(TFCTestCase):
+
+ def setUp(self):
+ self.contact_list = ContactList(['Alice', 'Bob'])
+ self.group_list = GroupList(groups=['test_group', 'test_group_2'])
+ self.window = TxWindow(self.contact_list, self.group_list)
+ self.window.group = self.group_list.get_group('test_group')
+ self.window.type = WIN_TYPE_GROUP
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.onion_service = OnionService()
+ self.gateway = Gateway()
+ self.args = self.settings, self.queues, self.onion_service, self.gateway
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def test_window_iterates_over_contacts(self):
+ # Setup
+ self.window.window_contacts = self.contact_list.contacts
+
+ # Test
+ for c in self.window:
+ self.assertIsInstance(c, Contact)
+
+ def test_len_returns_number_of_contacts_in_window(self):
+ # Setup
+ self.window.window_contacts = self.contact_list.contacts
+
+ # Test
+ self.assertEqual(len(self.window), 2)
+
+ def test_group_window_change_during_traffic_masking_raises_fr(self):
+ # Setup
+ self.settings.traffic_masking = True
+ self.window.uid = 'test_group'
+
+ # Test
+ self.assert_fr("Error: Can't change window during traffic masking.",
+ self.window.select_tx_window, *self.args, selection='test_group_2', cmd=True)
+
+ def test_contact_window_change_during_traffic_masking_raises_fr(self):
+ # Setup
+ self.settings.traffic_masking = True
+ self.window.uid = nick_to_pub_key("Alice")
+
+ # Test
+ self.assert_fr("Error: Can't change window during traffic masking.",
+ self.window.select_tx_window, *self.args, selection=nick_to_onion_address("Bob"), cmd=True)
+
+ def test_contact_window_reload_during_traffic_masking(self):
+ # Setup
+ self.settings.traffic_masking = True
+ self.window.uid = nick_to_pub_key("Alice")
+
+ # Test
+ self.assertIsNone(self.window.select_tx_window(*self.args, selection=nick_to_onion_address("Alice"), cmd=True))
+ self.assertEqual(self.window.uid, nick_to_pub_key("Alice"))
+
+ def test_group_window_reload_during_traffic_masking(self):
+ # Setup
+ self.settings.traffic_masking = True
+ self.window.name = 'test_group'
+ self.window.uid = group_name_to_group_id('test_group')
+
+ # Test
+ self.assertIsNone(self.window.select_tx_window(*self.args, selection='test_group', cmd=True))
+ self.assertEqual(self.window.uid, group_name_to_group_id('test_group'))
+
+ def test_invalid_selection_raises_fr(self):
+ # Setup
+ self.window.uid = nick_to_pub_key("Alice")
+
+ # Test
+ self.assert_fr("Error: No contact/group was found.",
+ self.window.select_tx_window, *self.args, selection=nick_to_onion_address("Charlie"), cmd=True)
+
+ @mock.patch('builtins.input', return_value=nick_to_onion_address("Bob"))
+ def test_window_selection_during_traffic_masking(self, *_):
+ # Setup
+ self.settings.traffic_masking = True
+ self.window.uid = None
+
+ # Test
+ self.assertIsNone(self.window.select_tx_window(*self.args))
+ self.assertEqual(self.queues[WINDOW_SELECT_QUEUE].qsize(), 1)
+
+ @mock.patch('builtins.input', return_value=nick_to_onion_address("Bob"))
+ def test_contact_window_selection_from_input(self, *_):
+ # Setup
+ self.window.uid = None
+
+ # Test
+ self.assertIsNone(self.window.select_tx_window(*self.args))
+ self.assertEqual(self.window.uid, nick_to_pub_key("Bob"))
+
+ def test_group_window_selection_from_command(self):
+ # Setup
+ self.window.uid = None
+
+ self.assertIsNone(self.window.select_tx_window(*self.args, selection='test_group', cmd=True))
+ self.assertEqual(self.window.uid, group_name_to_group_id('test_group'))
+
+ def test_deselect_window(self):
+ # Setup
+ self.window.window_contacts = self.contact_list.contacts
+ self.window.contact = self.contact_list.get_contact_by_address_or_nick("Bob")
+ self.window.name = 'Bob'
+ self.window.type = WIN_TYPE_CONTACT
+ self.window.uid = nick_to_pub_key("Bob")
+
+ # Test
+ self.assertIsNone(self.window.deselect())
+ self.assertIsNone(self.window.contact)
+ self.assertEqual(self.window.name, '')
+ self.assertEqual(self.window.type, '')
+ self.assertEqual(self.window.uid, b'')
+
+ def test_is_selected(self):
+ self.window.name = ''
+ self.assertFalse(self.window.is_selected())
+
+ self.window.name = nick_to_pub_key("Bob")
+ self.assertTrue(self.window.is_selected())
+
+ def test_update_log_messages_for_contact(self):
+ # Setup
+ self.window.type = WIN_TYPE_CONTACT
+ self.window.log_messages = None
+ self.window.contact = self.contact_list.get_contact_by_address_or_nick('Alice')
+ self.window.contact.log_messages = False
+
+ # Test
+ self.assertIsNone(self.window.update_log_messages())
+ self.assertFalse(self.window.log_messages)
+
+ def test_update_log_messages_for_group(self):
+ # Setup
+ self.window.type = WIN_TYPE_GROUP
+ self.window.log_messages = None
+ self.window.group = self.group_list.get_group('test_group')
+ self.window.group.log_messages = False
+
+ # Test
+ self.assertIsNone(self.window.update_log_messages())
+ self.assertFalse(self.window.log_messages)
+
+ def test_update_group_win_members_if_group_is_available(self):
+ # Setup
+ self.window.window_contacts = []
+ self.window.group = None
+ self.window.group_id = group_name_to_group_id('test_group')
+ self.window.name = 'test_group'
+ self.window.type = WIN_TYPE_GROUP
+
+ # Test
+ self.assertIsNone(self.window.update_window(self.group_list))
+ self.assertEqual(self.window.group, self.group_list.get_group('test_group'))
+ self.assertEqual(self.window.window_contacts, self.window.group.members)
+
+ def test_window_contact_is_reloaded_when_contact_is_active(self):
+ # Setup
+ self.window.type = WIN_TYPE_CONTACT
+ self.window.contact = create_contact('Alice')
+ self.window.window_contacts = [self.window.contact]
+ self.assertIsNot(self.window.contact,
+ self.window.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice')))
+ self.assertIsNot(self.window.window_contacts[0],
+ self.window.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice')))
+
+ # Test
+ self.assertIsNone(self.window.update_window(self.group_list))
+ self.assertIs(self.window.contact,
+ self.window.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice')))
+ self.assertIs(self.window.window_contacts[0],
+ self.window.contact_list.get_contact_by_pub_key(nick_to_pub_key('Alice')))
+
+ def test_deactivate_window_if_group_is_not_available(self):
+ # Setup
+ self.window.window_contacts = []
+ self.window.group = None
+ self.window.name = 'test_group_3'
+ self.window.type = WIN_TYPE_GROUP
+
+ # Test
+ self.assertIsNone(self.window.update_window(self.group_list))
+ self.assertIsNone(self.window.contact)
+ self.assertEqual(self.window.name, '')
+ self.assertEqual(self.window.type, '')
+ self.assertEqual(self.window.uid, b'')
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=['Alice',
+ VALID_ECDHE_PUB_KEY,
+ 'yes',
+ blake2b(nick_to_pub_key('Alice'),
+ digest_size=CONFIRM_CODE_LENGTH).hex()])
+ @mock.patch('shutil.get_terminal_size', return_value=[200, 200])
+ def test_selecting_pending_contact_starts_key_exchange(self, *_):
+ # Setup
+ alice = self.contact_list.get_contact_by_address_or_nick('Alice')
+ bob = self.contact_list.get_contact_by_address_or_nick('Bob')
+ alice.kex_status = KEX_STATUS_PENDING
+ bob.kex_status = KEX_STATUS_PENDING
+
+ # Test
+ self.assertIsNone(self.window.select_tx_window(*self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 2)
+ self.assertEqual(self.queues[WINDOW_SELECT_QUEUE].qsize(), 0)
+ self.assertEqual(alice.kex_status, KEX_STATUS_VERIFIED)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=['/add',
+ nick_to_onion_address('Alice'),
+ 'Alice',
+ '',
+ VALID_ECDHE_PUB_KEY,
+ 'yes',
+ blake2b(nick_to_pub_key('Alice'),
+ digest_size=CONFIRM_CODE_LENGTH).hex()])
+ @mock.patch('shutil.get_terminal_size', return_value=[200, 200])
+ def test_adding_new_contact_from_contact_selection(self, *_):
+ # Setup
+ alice = self.contact_list.get_contact_by_address_or_nick('Alice')
+ alice.kex_status = KEX_STATUS_PENDING
+
+ # Test
+ self.assert_fr('New contact added.',
+ self.window.select_tx_window, *self.args)
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+ self.assertEqual(self.queues[WINDOW_SELECT_QUEUE].qsize(), 0)
+ self.assertEqual(alice.kex_status, KEX_STATUS_VERIFIED)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=['/rm '])
+ def test_missing_account_when_removing_raises_fr(self, *_):
+ self.assert_fr("Error: No account specified.", self.window.select_tx_window, *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=['/rm Charlie', 'yes'])
+ def test_unknown_account_when_removing_raises_fr(self, *_):
+ self.assert_fr("Error: Unknown contact 'Charlie'.", self.window.select_tx_window, *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=['/rm Alice', 'no'])
+ def test_abort_removal_of_contact_form_contact_selection(self, *_):
+ self.assert_fr("Removal of contact aborted.", self.window.select_tx_window, *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=['/rm Alice', 'yes'])
+ def test_removing_pending_contact_from_contact_selection(self, *_):
+ self.assert_fr("Removed contact 'Alice'.", self.window.select_tx_window, *self.args)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=['/connect', b'a'.hex()])
+ def test_sending_onion_service_data_from_contact_selection(self, *_):
+ self.assertIsNone(self.window.select_tx_window(*self.args))
+ self.assertEqual(len(self.gateway.packets), 1)
+
+ @mock.patch('time.sleep', return_value=None)
+ @mock.patch('builtins.input', side_effect=['/help'])
+ def test_invalid_command_raises_fr(self, *_):
+ self.assert_fr("Error: Invalid command.", self.window.select_tx_window, *self.args)
+
+
+class TestSelectWindow(TFCTestCase):
+
+ def setUp(self):
+ self.contact_list = ContactList(nicks=['Alice'])
+ self.group_list = GroupList()
+ self.user_input = UserInput()
+ self.window = TxWindow(self.contact_list, self.group_list)
+ self.settings = Settings()
+ self.queues = gen_queue_dict()
+ self.onion_service = OnionService()
+ self.gateway = Gateway()
+ self.args = self.user_input, self.window, self.settings, self.queues, self.onion_service, self.gateway
+
+ def tearDown(self):
+ tear_queues(self.queues)
+
+ def test_invalid_selection_raises_fr(self):
+ # Setup
+ self.user_input.plaintext = 'msg'
+ self.assert_fr("Error: Invalid recipient.", select_window, *self.args)
+
+ # Test
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 0)
+ self.assertEqual(self.queues[WINDOW_SELECT_QUEUE].qsize(), 0)
+
+ def test_window_selection(self):
+ # Setup
+ self.user_input.plaintext = f"msg {nick_to_onion_address('Alice')}"
+
+ # Test
+ self.assertIsNone(select_window(*self.args))
+ self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
+ self.assertEqual(self.queues[WINDOW_SELECT_QUEUE].qsize(), 0)
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/tests/tx/test_commands.py b/tests/tx/test_commands.py
deleted file mode 100644
index 6e6b589..0000000
--- a/tests/tx/test_commands.py
+++ /dev/null
@@ -1,785 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import builtins
-import getpass
-import os
-import shutil
-import time
-import tkinter.filedialog
-import unittest
-
-from multiprocessing import Queue
-
-from src.common.db_logs import write_log_entry
-from src.common.statics import *
-
-from src.tx.commands import change_master_key, change_setting, clear_screens, exit_tfc, export_file
-from src.tx.commands import import_file, log_command, print_about, print_help, print_recipients
-from src.tx.commands import process_command, remove_log, rxm_display_unread, rxm_show_sys_win, whisper, wipe
-
-from tests.mock_classes import ContactList, create_contact, GroupList, MasterKey, UserInput, Settings, TxWindow
-from tests.utils import cleanup, ignored, TFCTestCase
-
-
-class TestProcessCommand(TFCTestCase):
-
- def setUp(self):
- self.queues = {COMMAND_PACKET_QUEUE: Queue(),
- MESSAGE_PACKET_QUEUE: Queue(),
- NH_PACKET_QUEUE: Queue()}
- self.window = TxWindow()
- self.settings = Settings()
- self.contact_list = ContactList()
- self.group_list = GroupList()
- self.master_key = MasterKey()
-
- def tearDown(self):
- for key in self.queues:
- while not self.queues[key].empty():
- self.queues[key].get()
- time.sleep(0.1)
- self.queues[key].close()
-
- def test_process_command(self):
- self.assertIsNone(process_command(UserInput('about'), self.window, self.settings, self.queues,
- self.contact_list, self.group_list, self.master_key))
-
- def test_invalid_command(self):
- self.assertFR("Error: Invalid command 'abou'", process_command, UserInput('abou'), self.window, self.settings,
- self.queues, self.contact_list, self.group_list, self.master_key)
-
- def test_empty_command(self):
- self.assertFR("Error: Invalid command.", process_command, UserInput(' '), self.window, self.settings,
- self.queues, self.contact_list, self.group_list, self.master_key)
-
-
-class TestPrintAbout(TFCTestCase):
-
- def test_print_about(self):
- self.assertPrints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + """\
-
- Tinfoil Chat {}
-
- Website: https://github.com/maqp/tfc/
- Wikipage: https://github.com/maqp/tfc/wiki
- White paper: https://cs.helsinki.fi/u/oottela/tfc.pdf
-
-""".format(VERSION), print_about)
-
-
-class TestClearScreens(unittest.TestCase):
-
- def setUp(self):
- self.queues = {COMMAND_PACKET_QUEUE: Queue(),
- NH_PACKET_QUEUE: Queue()}
- self.settings = Settings()
- self.window = TxWindow(imc_name='alice@jabber.org',
- uid='alice@jabber.org')
-
- def tearDown(self):
- for key in self.queues:
- while not self.queues[key].empty():
- self.queues[key].get()
- time.sleep(0.1)
- self.queues[key].close()
-
- def test_clear_screens(self):
- self.assertIsNone(clear_screens(UserInput(plaintext='clear'), self.window,
- self.settings, self.queues))
- time.sleep(0.1)
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
- self.assertEqual(self.queues[NH_PACKET_QUEUE].qsize(), 1)
-
- def test_no_nh_clear_cmd_when_traffic_masking_is_enabled(self):
- # Setup
- self.settings.session_traffic_masking = True
-
- # Test
- self.assertIsNone(clear_screens(UserInput(plaintext='clear'), self.window,
- self.settings, self.queues))
- time.sleep(0.1)
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
- self.assertEqual(self.queues[NH_PACKET_QUEUE].qsize(), 0)
-
- def test_reset_screens(self):
- self.assertIsNone(clear_screens(UserInput(plaintext='reset'), self.window,
- self.settings, self.queues))
- time.sleep(0.1)
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
- self.assertEqual(self.queues[NH_PACKET_QUEUE].qsize(), 1)
-
- def test_no_nh_reset_cmd_when_traffic_masking_is_enabled(self):
- # Setup
- self.settings.session_traffic_masking = True
-
- # Test
- self.assertIsNone(clear_screens(UserInput(plaintext='reset'), self.window,
- self.settings, self.queues))
- time.sleep(0.1)
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
- self.assertEqual(self.queues[NH_PACKET_QUEUE].qsize(), 0)
-
-
-class TestRxMShowSysWin(unittest.TestCase):
-
- def setUp(self):
- self.o_input = builtins.input
- builtins.input = lambda _: ''
- self.c_queue = Queue()
- self.settings = Settings()
- self.window = TxWindow(name='alice@jabber.org',
- uid='alice@jabber.org')
-
- def tearDown(self):
- builtins.input = self.o_input
-
- while not self.c_queue.empty():
- self.c_queue.get()
- time.sleep(0.1)
- self.c_queue.close()
-
- def test_cmd_window(self):
- self.assertIsNone(rxm_show_sys_win(UserInput(plaintext='cmd'), self.window, self.settings, self.c_queue))
- time.sleep(0.1)
- self.assertEqual(self.c_queue.qsize(), 2)
-
- def test_file_window(self):
- self.assertIsNone(rxm_show_sys_win(UserInput(plaintext='fw'), self.window, self.settings, self.c_queue))
- time.sleep(0.1)
- self.assertEqual(self.c_queue.qsize(), 2)
-
-
-class TestExitTFC(unittest.TestCase):
-
- def setUp(self):
- self.queues = {COMMAND_PACKET_QUEUE: Queue(),
- NH_PACKET_QUEUE: Queue(),
- EXIT_QUEUE: Queue()}
- self.settings = Settings(session_traffic_masking=False,
- local_testing_mode=True,
- data_diode_sockets=True,
- race_condition_delay=0.0)
-
- def tearDown(self):
- for key in self.queues:
- while not self.queues[key].empty():
- self.queues[key].get()
- time.sleep(0.1)
- self.queues[key].close()
-
- def test_exit_tfc_local_test(self):
- # Setup
- for _ in range(2):
- self.queues[COMMAND_PACKET_QUEUE].put("dummy command")
- time.sleep(0.1)
-
- # Test
- self.assertIsNone(exit_tfc(self.settings, self.queues))
- time.sleep(0.5)
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
- self.assertEqual(self.queues[NH_PACKET_QUEUE].qsize(), 1)
-
- def test_exit_tfc(self):
- # Setup
- self.settings.local_testing_mode = False
- for _ in range(2):
- self.queues[COMMAND_PACKET_QUEUE].put("dummy command")
- time.sleep(0.1)
-
- # Test
- self.assertIsNone(exit_tfc(self.settings, self.queues))
- time.sleep(0.5)
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
- self.assertEqual(self.queues[NH_PACKET_QUEUE].qsize(), 1)
-
-
-class TestAccessLogs(TFCTestCase):
-
- def setUp(self):
- self.o_input = builtins.input
- self.c_queue = Queue()
- self.window = TxWindow(uid='alice@jabber.org',
- name='Alice')
- self.contact_list = ContactList()
- self.group_list = GroupList()
- self.settings = Settings()
- self.master_key = MasterKey()
-
- def tearDown(self):
- cleanup()
-
- while not self.c_queue.empty():
- self.c_queue.get()
- time.sleep(0.1)
- self.c_queue.close()
-
- def test_invalid_export(self):
- self.assertFR("Error: Invalid number of messages.",
- log_command, UserInput("history a"), self.window, self.contact_list,
- self.group_list, self.settings, self.c_queue, self.master_key)
-
- def test_log_printing(self):
- self.assertFR(f"Error: Could not find log database.",
- log_command, UserInput("history 4"), self.window, self.contact_list,
- self.group_list, self.settings, self.c_queue, self.master_key)
- time.sleep(0.1)
-
- self.assertEqual(self.c_queue.qsize(), 1)
-
- def test_log_printing_all(self):
- self.assertFR(f"Error: Could not find log database.",
- log_command, UserInput("history"), self.window, self.contact_list,
- self.group_list, self.settings, self.c_queue, self.master_key)
- time.sleep(0.1)
-
- self.assertEqual(self.c_queue.qsize(), 1)
-
- def test_invalid_number_raises_fr(self):
- self.assertFR("Error: Invalid number of messages.",
- log_command, UserInput('history a'), self.window, self.contact_list,
- self.group_list, self.settings, self.c_queue, self.master_key)
-
- def test_too_high_number_raises_fr(self):
- self.assertFR("Error: Invalid number of messages.",
- log_command, UserInput('history 94857634985763454345'), self.window, self.contact_list,
- self.group_list, self.settings, self.c_queue, self.master_key)
-
- def test_user_abort_raises_fr(self):
- # Setup
- builtins.input = lambda _: 'No'
-
- # Test
- self.assertFR("Logfile export aborted.",
- log_command, UserInput('export'), self.window, self.contact_list,
- self.group_list, self.settings, self.c_queue, self.master_key)
-
- def test_successful_export_command(self):
- # Setup
- builtins.input = lambda _: 'Yes'
-
- # Test
- # Indicates that access_history was called.
- self.assertFR(f"Error: Could not find log database.",
- log_command, UserInput('export'), self.window, ContactList(nicks=['Alice']),
- self.group_list, self.settings, self.c_queue, self.master_key)
-
- def test_successful_export_command_with_number(self):
- # Setup
- builtins.input = lambda _: 'Yes'
-
- # Test
- # Indicates that access_history was called.
- self.assertFR(f"Error: Could not find log database.",
- log_command, UserInput('export 4'), self.window, ContactList(nicks=['Alice']),
- self.group_list, self.settings, self.c_queue, self.master_key)
-
-
-class TestExportFile(TFCTestCase):
-
- def setUp(self):
- self.o_tk_aof = tkinter.filedialog.askopenfilename
- self.o_input = builtins.input
- self.settings = Settings()
- self.nh_queue = Queue()
-
- def tearDown(self):
- tkinter.filedialog.askopenfilename = self.o_tk_aof
- builtins.input = self.o_input
-
- with ignored(OSError):
- os.remove('testfile')
-
- def test_raises_fr_during_traffic_masking(self):
- self.assertFR("Error: Command is disabled during traffic masking.",
- export_file, Settings(session_traffic_masking=True), None)
-
- @unittest.skipIf("TRAVIS" in os.environ and os.environ["TRAVIS"] == "true", "Skip as Travis has no $DISPLAY.")
- def test_unknown_file_raises_fr(self):
- # Setup
- tkinter.filedialog.askopenfilename = lambda title: 'unknown_file'
-
- # Test
- self.assertFR("Error: File not found.", export_file, self.settings, None)
-
- def test_empty_file_raises_fr(self):
- # Setup
- builtins.input = lambda _: './testfile'
-
- with open('testfile', 'wb+') as f:
- f.write(b'')
-
- # Test
- self.assertFR("Error: Target file is empty.",
- export_file, Settings(disable_gui_dialog=True), None)
-
- def test_file_export(self):
- # Setup
- builtins.input = lambda _: './testfile'
-
- with open('testfile', 'wb+') as f:
- f.write(os.urandom(300))
-
- # Test
- self.assertIsNone(export_file(Settings(disable_gui_dialog=True), self.nh_queue))
- self.assertEqual(self.nh_queue.qsize(), 1)
-
-
-class TestImportFile(TFCTestCase):
-
- def setUp(self):
- self.settings = Settings()
- self.nh_queue = Queue()
-
- def test_raises_fr_when_traffic_masking_is_enabled(self):
- self.assertFR("Error: Command is disabled during traffic masking.",
- import_file, Settings(session_traffic_masking=True), None)
-
- def test_import_file(self):
- self.assertIsNone(import_file(self.settings, self.nh_queue))
- self.assertEqual(self.nh_queue.qsize(), 1)
-
-
-class TestPrintHelp(TFCTestCase):
-
- def setUp(self):
- self.settings = Settings()
- self.settings.session_traffic_masking = False
- self.o_shutil_ttyw = shutil.get_terminal_size
-
- def tearDown(self):
- shutil.get_terminal_size = self.o_shutil_ttyw
-
- def test_print_normal(self):
- # Setup
- shutil.get_terminal_size = lambda: [60, 60]
-
- # Test
- self.assertPrints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + """\
-List of commands:
-
-/about Show links to project resources
-/add Add new contact
-/cf Cancel file transmission to active
- contact/group
-
-/cm Cancel message transmission to
- active contact/group
-
-/clear, ' ' Clear screens from TxM, RxM and IM
- client
-
-/cmd, '//' Display command window on RxM
-/exit Exit TFC on TxM, NH and RxM
-/export (n) Export (n) messages from
- recipient's logfile
-
-/file Send file to active contact/group
-/fingerprints Print public key fingerprints of
- user and contact
-
-/fe Encrypt and export file to NH
-/fi Import file from NH to RxM
-/fw Display file reception window on
- RxM
-
-/help Display this list of commands
-/history (n) Print (n) messages from
- recipient's logfile
-
-/localkey Generate new local key pair
-/logging {on,off}(' all') Change message log setting (for
- all contacts)
-
-/msg {A,N} Change active recipient to account
- A or nick N
-
-/names List contacts and groups
-/nick N Change nickname of active
- recipient to N
-
-/notify {on,off} (' all') Change notification settings (for
- all contacts)
-
-/passwd {tx,rx} Change master password on TxM/RxM
-/psk Open PSK import dialog on RxM
-/reset Reset ephemeral session log on
- TxM/RxM/IM client
-
-/rm {A,N} Remove account A or nick N from
- TxM and RxM
-
-/rmlogs {A,N} Remove log entries for A/N on TxM
- and RxM
-
-/set S V Change setting S to value V on
- TxM/RxM(/NH)
-
-/settings List setting names, values and
- descriptions
-
-/store {on,off} (' all') Change file reception (for all
- contacts)
-
-/unread, ' ' List windows with unread messages
- on RxM
-
-/whisper M Send message M, asking it not to
- be logged
-
-/wipe Wipe all TFC/IM user data and
- power off systems
-
-Shift + PgUp/PgDn Scroll terminal up/down
-────────────────────────────────────────────────────────────
-Group management:
-
-/group create G A₁ .. Aₙ Create group G and add accounts A₁
- .. Aₙ
-
-/group add G A₁ .. Aₙ Add accounts A₁ .. Aₙ to group G
-/group rm G A₁ .. Aₙ Remove accounts A₁ .. Aₙ from
- group G
-
-/group rm G Remove group G
-────────────────────────────────────────────────────────────
-
-""", print_help, self.settings)
-
- def test_print_during_traffic_masking(self):
- # Setup
- self.settings.session_traffic_masking = True
- shutil.get_terminal_size = lambda: [80, 80]
-
- # Test
- self.assertPrints(CLEAR_ENTIRE_SCREEN + CURSOR_LEFT_UP_CORNER + """\
-List of commands:
-
-/about Show links to project resources
-/cf Cancel file transmission to active contact/group
-/cm Cancel message transmission to active contact/group
-/clear, ' ' Clear screens from TxM, RxM and IM client
-/cmd, '//' Display command window on RxM
-/exit Exit TFC on TxM, NH and RxM
-/export (n) Export (n) messages from recipient's logfile
-/file Send file to active contact/group
-/fingerprints Print public key fingerprints of user and contact
-/fw Display file reception window on RxM
-/help Display this list of commands
-/history (n) Print (n) messages from recipient's logfile
-/logging {on,off}(' all') Change message log setting (for all contacts)
-/names List contacts and groups
-/nick N Change nickname of active recipient to N
-/notify {on,off} (' all') Change notification settings (for all contacts)
-/reset Reset ephemeral session log on TxM/RxM/IM client
-/rmlogs {A,N} Remove log entries for A/N on TxM and RxM
-/set S V Change setting S to value V on TxM/RxM(/NH)
-/settings List setting names, values and descriptions
-/store {on,off} (' all') Change file reception (for all contacts)
-/unread, ' ' List windows with unread messages on RxM
-/whisper M Send message M, asking it not to be logged
-/wipe Wipe all TFC/IM user data and power off systems
-Shift + PgUp/PgDn Scroll terminal up/down
-────────────────────────────────────────────────────────────────────────────────
-
-""", print_help, self.settings)
-
-
-class TestPrintRecipients(TFCTestCase):
-
- def setUp(self):
- self.contact_list = ContactList(nicks=['Alice', 'Bob'])
- self.group_list = GroupList(groups=['testgroup', 'testgroup2'])
-
- def test_printing(self):
- self.assertIsNone(print_recipients(self.contact_list, self.group_list))
-
-
-class TestChangeMasterKey(TFCTestCase):
-
- def setUp(self):
- self.o_getpass = getpass.getpass
- self.user_input = UserInput()
- self.contact_list = ContactList()
- self.group_list = GroupList()
- self.settings = Settings()
- self.queues = {COMMAND_PACKET_QUEUE: Queue(),
- KEY_MANAGEMENT_QUEUE: Queue()}
- self.master_key = MasterKey()
-
- def tearDown(self):
- getpass.getpass = self.o_getpass
- cleanup()
-
- for key in self.queues:
- while not self.queues[key].empty():
- self.queues[key].get()
- time.sleep(0.1)
- self.queues[key].close()
-
- def test_raises_fr_during_traffic_masking(self):
- self.assertFR("Error: Command is disabled during traffic masking.",
- change_master_key, self.user_input, self.contact_list, self.group_list,
- Settings(session_traffic_masking=True), self.queues, self.master_key)
-
- def test_missing_target_sys_raises_fr(self):
- self.assertFR("Error: No target system specified.",
- change_master_key, UserInput("passwd "), self.contact_list,
- self.group_list, self.settings, self.queues, self.master_key)
-
- def test_invalid_target_sys_raises_fr(self):
- self.assertFR("Error: Invalid target system.",
- change_master_key, UserInput("passwd t"), self.contact_list,
- self.group_list, self.settings, self.queues, self.master_key)
-
- def test_txm_command(self):
- # Setup
- settings = Settings(software_operation='ut')
- getpass.getpass = lambda _: 'a'
- write_log_entry(M_S_HEADER + PADDING_LEN * b'a', 'alice@jabber.org', settings, self.master_key)
-
- # Test
- self.assertIsNone(change_master_key(UserInput("passwd tx"), self.contact_list,
- self.group_list, settings, self.queues, self.master_key))
- time.sleep(0.1)
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 0)
- self.assertEqual(self.queues[KEY_MANAGEMENT_QUEUE].qsize(), 1)
-
- def test_rxm_command(self):
- self.assertIsNone(change_master_key(UserInput("passwd rx"), self.contact_list,
- self.group_list, self.settings, self.queues, self.master_key))
- time.sleep(0.1)
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
-
-
-class TestRemoveLog(TFCTestCase):
-
- def setUp(self):
- self.c_queue = Queue()
- self.contact_list = ContactList(nicks=['Alice'])
- self.settings = Settings()
- self.master_key = MasterKey()
- self.o_input = builtins.input
- builtins.input = lambda _: 'Yes'
-
- def tearDown(self):
- builtins.input = self.o_input
-
- while not self.c_queue.empty():
- self.c_queue.get()
- time.sleep(0.1)
- self.c_queue.close()
-
- cleanup()
-
- def test_missing_contact_raises_fr(self):
- self.assertFR("Error: No contact/group specified.",
- remove_log, UserInput(''), self.contact_list,
- self.settings, self.c_queue, self.master_key)
-
- def test_no_aborts_removal(self):
- # Setup
- write_log_entry(M_S_HEADER + PADDING_LEN * b'a', 'alice@jabber.org', self.settings, self.master_key)
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), LOG_ENTRY_LENGTH)
-
- builtins.input = lambda _: 'No'
- self.assertFR("Logfile removal aborted.",
- remove_log, UserInput('/rmlogs Alice'), self.contact_list,
- self.settings, self.c_queue, self.master_key)
-
- def test_log_remove(self):
- # Setup
- write_log_entry(M_S_HEADER + PADDING_LEN * b'a', 'alice@jabber.org', self.settings, self.master_key)
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), LOG_ENTRY_LENGTH)
-
- # Test
- self.assertIsNone(remove_log(UserInput('/rmlogs Alice'), self.contact_list,
- self.settings, self.c_queue, self.master_key))
- time.sleep(0.1)
- self.assertEqual(os.path.getsize(f'{DIR_USER_DATA}ut_logs'), 0)
- self.assertEqual(self.c_queue.qsize(), 1)
-
-
-class TestChangeSetting(TFCTestCase):
-
- def setUp(self):
- self.contact_list = ContactList()
- self.group_list = GroupList()
- self.settings = Settings()
- self.queues = {COMMAND_PACKET_QUEUE: Queue(),
- NH_PACKET_QUEUE: Queue()}
- self.c_queue = Queue()
-
- def tearDown(self):
- while not self.c_queue.empty():
- self.c_queue.get()
- time.sleep(0.1)
- self.c_queue.close()
-
- def test_missing_setting_raises_fr(self):
- self.assertFR("Error: No setting specified.",
- change_setting, UserInput('set'), self.contact_list, self.group_list,
- self.settings, self.queues)
-
- def test_invalid_setting_raises_fr(self):
- # Setup
- user_input = UserInput("set e_correction_ratia true")
- settings = Settings(key_list=['serial_error_correction'])
-
- # Test
- self.assertFR("Error: Invalid setting 'e_correction_ratia'",
- change_setting, user_input, self.contact_list, self.group_list,
- settings, self.queues)
-
- def test_missing_value_raises_fr(self):
- # Setup
- user_input = UserInput("set serial_error_correction")
- settings = Settings(key_list=['serial_error_correction'])
-
- # Test
- self.assertFR("Error: No value for setting specified.",
- change_setting, user_input, self.contact_list, self.group_list,
- settings, self.queues)
-
- def test_nh_commands_raise_fr_when_traffic_masking_is_enabled(self):
- # Setup
- key_list = ['serial_error_correction', 'serial_baudrate', 'disable_gui_dialog']
- settings = Settings(session_traffic_masking=True,
- key_list=key_list)
-
- for key in key_list:
- user_input = UserInput(f"set {key} 5")
- self.assertFR("Error: Can't change this setting during traffic masking.",
- change_setting, user_input, self.contact_list, self.group_list,
- settings, self.queues)
-
- def test_nh_management(self):
- # Setup
- settings = Settings(key_list=['serial_error_correction', 'serial_baudrate', 'disable_gui_dialog'])
-
- # Test
- user_input = UserInput("set serial_error_correction 5")
- self.assertIsNone(change_setting(user_input, self.contact_list, self.group_list,
- settings, self.queues))
- self.assertEqual(self.queues[NH_PACKET_QUEUE].qsize(), 1)
-
- user_input = UserInput("set serial_baudrate 9600")
- self.assertIsNone(change_setting(user_input, self.contact_list, self.group_list,
- settings, self.queues))
- self.assertEqual(self.queues[NH_PACKET_QUEUE].qsize(), 2)
-
- user_input = UserInput("set disable_gui_dialog True")
- self.assertIsNone(change_setting(user_input, self.contact_list, self.group_list,
- settings, self.queues))
- self.assertEqual(self.queues[NH_PACKET_QUEUE].qsize(), 3)
-
-
-class TestRxMDisplayUnread(unittest.TestCase):
-
- def setUp(self):
- self.settings = Settings()
- self.c_queue = Queue()
-
- def tearDown(self):
- while not self.c_queue.empty():
- self.c_queue.get()
- time.sleep(0.1)
- self.c_queue.close()
-
- def test_command(self):
- self.assertIsNone(rxm_display_unread(self.settings, self.c_queue))
- time.sleep(0.1)
- self.assertEqual(self.c_queue.qsize(), 1)
-
-
-class TestWhisper(unittest.TestCase):
-
- def setUp(self):
- self.user_input = UserInput("whisper Decryption key for file 'test_file.txt' is "
- "92Kocbqxo7Vcsqq1ThVVySighDUAuUUmUwcjQdyAnzZZaQjKoKm")
- self.window = TxWindow(uid='alice@jabber.org', name='Alice',
- window_contacts=[create_contact()],
- log_messages=True)
- self.settings = Settings()
- self.m_queue = Queue()
-
- def test_whisper(self):
- self.assertIsNone(whisper(self.user_input, self.window, self.settings, self.m_queue))
-
- message, settings, rx_account, tx_account, logging, log_as_ph, win_uid = self.m_queue.get()
-
- self.assertEqual(rx_account, 'alice@jabber.org')
- self.assertEqual(tx_account, 'user@jabber.org')
- self.assertTrue(logging)
- self.assertTrue(log_as_ph)
-
-
-class TestWipe(TFCTestCase):
-
- def setUp(self):
- self.queues = {COMMAND_PACKET_QUEUE: Queue(),
- NH_PACKET_QUEUE: Queue()}
- self.settings = Settings(session_traffic_masking=False,
- race_condition_delay=0.0)
- self.o_input = builtins.input
-
- def tearDown(self):
- builtins.input = self.o_input
-
- def test_no_raises_fr(self):
- # Setup
- builtins.input = lambda _: 'No'
-
- # Test
- self.assertFR("Wipe command aborted.",
- wipe, self.settings, self.queues)
-
- def test_wipe_local_Testing(self):
- # Setup
- builtins.input = lambda _: 'Yes'
- self.settings.local_testing_mode = True
- self.settings.data_diode_sockets = True
- for _ in range(2):
- self.queues[COMMAND_PACKET_QUEUE].put("dummy command")
- self.queues[NH_PACKET_QUEUE].put("dummy packet")
- time.sleep(0.1)
-
- # Test
- self.assertIsNone(wipe(self.settings, self.queues))
- wipe_packet = UNENCRYPTED_PACKET_HEADER + UNENCRYPTED_WIPE_COMMAND
- self.assertTrue(self.queues[NH_PACKET_QUEUE].get()[0].startswith(wipe_packet))
-
- def test_wipe(self):
- # Setup
- builtins.input = lambda _: 'Yes'
-
- for _ in range(2):
- self.queues[COMMAND_PACKET_QUEUE].put("dummy command")
- self.queues[NH_PACKET_QUEUE].put("dummy packet")
- time.sleep(0.1)
-
- # Test
- self.assertIsNone(wipe(self.settings, self.queues))
- wipe_packet = UNENCRYPTED_PACKET_HEADER + UNENCRYPTED_WIPE_COMMAND
- self.assertTrue(self.queues[NH_PACKET_QUEUE].get()[0].startswith(wipe_packet))
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/tx/test_commands_g.py b/tests/tx/test_commands_g.py
deleted file mode 100644
index 8093334..0000000
--- a/tests/tx/test_commands_g.py
+++ /dev/null
@@ -1,355 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import builtins
-import time
-import unittest
-
-from multiprocessing import Queue
-
-from src.common.statics import *
-
-from src.tx.commands_g import group_add_member, group_create, group_rm_group, group_rm_member, process_group_command, validate_group_name
-
-from tests.mock_classes import Contact, ContactList, GroupList, MasterKey, Settings, UserInput
-from tests.utils import TFCTestCase
-
-
-class TestProcessGroupCommand(TFCTestCase):
-
- def setUp(self):
- self.o_input = builtins.input
- builtins.input = lambda _: 'Yes'
-
- self.user_input = UserInput()
- self.contact_list = ContactList(nicks=['Alice'])
- self.group_list = GroupList()
- self.settings = Settings()
- self.queues = {COMMAND_PACKET_QUEUE: Queue(),
- MESSAGE_PACKET_QUEUE: Queue()}
- self.master_key = MasterKey()
-
- def tearDown(self):
- builtins.input = self.o_input
-
- for key in self.queues:
- while not self.queues[key].empty():
- self.queues[key].get()
- time.sleep(0.1)
- self.queues[key].close()
-
- def test_raises_fr_when_traffic_masking_is_enabled(self):
- self.assertFR("Error: Command is disabled during traffic masking.",
- process_group_command, self.user_input, self.contact_list,
- self.group_list, Settings(session_traffic_masking=True), self.queues, self.master_key)
-
- def test_invalid_command_raises_fr(self):
- self.assertFR("Error: Invalid group command.",
- process_group_command, UserInput('group '), self.contact_list,
- self.group_list, self.settings, self.queues, self.master_key)
-
- def test_invalid_command_parameters_raises_fr(self):
- self.assertFR("Error: Invalid group command.",
- process_group_command, UserInput('group bad'), self.contact_list,
- self.group_list, self.settings, self.queues, self.master_key)
-
- def test_missing_name_raises_fr(self):
- self.assertFR("Error: No group name specified.",
- process_group_command, UserInput('group create '), self.contact_list,
- self.group_list, self.settings, self.queues, self.master_key)
-
- def test_successful_command(self):
- self.assertIsNone(process_group_command(UserInput('group create team Alice'), self.contact_list,
- self.group_list, self.settings, self.queues, self.master_key))
-
-
-class TestValidateGroupName(TFCTestCase):
-
- def setUp(self):
- self.contact_list = ContactList(nicks=['Alice'])
- self.group_list = GroupList(groups=['testgroup'])
- builtins.input = lambda _: 'No'
-
- def test_non_printable_group_name_raises_fr(self):
- self.assertFR("Error: Group name must be printable.",
- validate_group_name, 'testgroup\x1f', self.contact_list, self.group_list)
-
- def test_too_long_group_name_raises_fr(self):
- self.assertFR("Error: Group name must be less than 255 chars long.",
- validate_group_name, PADDING_LEN * 'a', self.contact_list, self.group_list)
-
- def test_use_of_dummy_group_name_raises_fr(self):
- self.assertFR("Error: Group name can't use name reserved for database padding.",
- validate_group_name, DUMMY_GROUP, self.contact_list, self.group_list)
-
- def test_group_name_with_account_format_raises_fr(self):
- self.assertFR("Error: Group name can't have format of an account.",
- validate_group_name, 'alice@jabber.org', self.contact_list, self.group_list)
-
- def test_use_of_contact_nick_raises_fr(self):
- self.assertFR("Error: Group name can't be nick of contact.",
- validate_group_name, 'Alice', self.contact_list, self.group_list)
-
- def test_user_abort_on_existing_group_raises_fr(self):
- self.assertFR("Group creation aborted.",
- validate_group_name, 'testgroup', self.contact_list, self.group_list)
-
- def test_valid_group_name(self):
- self.assertIsNone(validate_group_name('testgroup2', self.contact_list, self.group_list))
-
-
-class TestGroupCreate(TFCTestCase):
-
- def setUp(self):
- self.o_input = builtins.input
- builtins.input = lambda _: 'Yes'
-
- self.user_input = UserInput()
- self.contact_list = ContactList(nicks=['Alice', 'Bob'])
- self.group_list = GroupList()
- self.settings = Settings()
- self.queues = {COMMAND_PACKET_QUEUE: Queue(),
- MESSAGE_PACKET_QUEUE: Queue()}
- self.master_key = MasterKey()
-
- def tearDown(self):
- builtins.input = self.o_input
-
- for key in self.queues:
- while not self.queues[key].empty():
- self.queues[key].get()
- time.sleep(0.1)
- self.queues[key].close()
-
- def test_too_many_purp_accounts_raises_fr(self):
- # Setup
- group_list = GroupList(groups=['testgroup'])
- contact_list = ContactList(nicks=["contact_{}".format(n) for n in range(21)])
- group = group_list.get_group('testgroup')
- group.members = contact_list.contacts
-
- # Test
- cl_str = ["contact_{}@jabber.org".format(n) for n in range(21)]
- self.assertFR("Error: TFC settings only allow 20 members per group.",
- group_create, 'testgroup_21', cl_str, group_list, contact_list, self.settings, self.queues, self.master_key)
-
- def test_full_group_list_raises_fr(self):
- # Setup
- group_list = GroupList(groups=["testgroup_{}".format(n) for n in range(20)])
- contact_list = ContactList(nicks=['Alice'])
-
- # Test
- self.assertFR("Error: TFC settings only allow 20 groups.",
- group_create, 'testgroup_20', ['alice@jabber.org'], group_list, contact_list, self.settings, self.queues, self.master_key)
-
- def test_successful_group_creation(self):
- # Setup
- group_list = GroupList(groups=['testgroup'])
-
- # Test
- self.assertIsNone(group_create('testgroup_2', ['alice@jabber.org'], group_list, self.contact_list, self.settings, self.queues, self.master_key))
- time.sleep(0.1)
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
- self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 1)
-
- def test_successful_empty_group_creation(self):
- self.assertIsNone(group_create('testgroup_2', [], self.group_list, self.contact_list, self.settings, self.queues, self.master_key))
- time.sleep(0.1)
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
- self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 0)
-
-
-class TestGroupAddMember(TFCTestCase):
-
- def setUp(self):
- self.o_input = builtins.input
- builtins.input = lambda _: 'Yes'
-
- self.user_input = UserInput()
- self.contact_list = ContactList(nicks=['Alice', 'Bob'])
- self.group_list = GroupList()
- self.settings = Settings()
- self.queues = {COMMAND_PACKET_QUEUE: Queue(),
- MESSAGE_PACKET_QUEUE: Queue()}
- self.master_key = MasterKey()
-
- def tearDown(self):
- builtins.input = self.o_input
-
- for key in self.queues:
- while not self.queues[key].empty():
- self.queues[key].get()
- time.sleep(0.1)
- self.queues[key].close()
-
- def test_new_group_is_created_if_specified_group_does_not_exist_and_user_chooses_yes(self):
- self.assertIsNone(group_add_member('test_group', [], self.group_list, self.contact_list,
- self.settings, self.queues, self.master_key))
- time.sleep(0.1)
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
- self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 0)
-
- def test_raises_fr_if_specified_group_does_not_exist_and_user_chooses_no(self):
- # Setup
- builtins.input = lambda _: 'No'
-
- # Test
- self.assertFR("Group creation aborted.",
- group_add_member, 'test_group', [], self.group_list, self.contact_list,
- self.settings, self.queues, self.master_key)
-
- def test_too_large_final_member_list_raises_fr(self):
- # Setup
- group_list = GroupList(groups=['testgroup'])
- contact_list = ContactList(nicks=["contact_{}".format(n) for n in range(21)])
- group = group_list.get_group('testgroup')
- group.members = contact_list.contacts[:19]
-
- # Test
- m_to_add = ["contact_19@jabber.org", "contact_20@jabber.org"]
- self.assertFR("Error: TFC settings only allow 20 members per group.",
- group_add_member, 'testgroup', m_to_add, group_list, contact_list, self.settings, self.queues, self.master_key)
-
- def test_successful_group_add(self):
- # Setup
- group_list = GroupList(groups=['testgroup'])
- contact_list = ContactList(nicks=["contact_{}".format(n) for n in range(21)])
- group = group_list.get_group('testgroup')
- group.members = contact_list.contacts[:19]
-
- # Test
- m_to_add = ["contact_19@jabber.org"]
- self.assertIsNone(group_add_member('testgroup', m_to_add, group_list, contact_list, self.settings, self.queues, self.master_key))
- time.sleep(0.1)
-
- group2 = group_list.get_group('testgroup')
- self.assertEqual(len(group2), 20)
-
- for c in group2:
- self.assertIsInstance(c, Contact)
-
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
- self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 20)
-
-
-class TestGroupRmMember(TFCTestCase):
-
- def setUp(self):
- self.o_input = builtins.input
- builtins.input = lambda _: 'Yes'
-
- self.user_input = UserInput()
- self.contact_list = ContactList(nicks=['Alice', 'Bob'])
- self.group_list = GroupList()
- self.settings = Settings()
- self.queues = {COMMAND_PACKET_QUEUE: Queue(),
- MESSAGE_PACKET_QUEUE: Queue()}
- self.master_key = MasterKey()
-
- def tearDown(self):
- builtins.input = self.o_input
-
- for key in self.queues:
- while not self.queues[key].empty():
- self.queues[key].get()
- time.sleep(0.1)
- self.queues[key].close()
-
- def test_no_accounts_removes_group(self):
- # Setup
- group_list = GroupList(groups=['testgroup'])
-
- # Test
- self.assertFR("Removed group 'testgroup'",
- group_rm_member, 'testgroup', [], group_list, self.contact_list,
- self.settings, self.queues, self.master_key)
-
- def test_remove_members_from_unknown_group(self):
- # Setup
- group_list = GroupList(groups=['testgroup2'])
-
- # Test
- self.assertFR("Group 'testgroup' does not exist.",
- group_rm_member, 'testgroup', ['alice@jabber.org'], group_list,
- self.contact_list, self.settings, self.queues, self.master_key)
-
- def test_successful_group_remove(self):
- # Setup
- group_list = GroupList(groups=['testgroup'])
-
- # Test
- self.assertIsNone(group_rm_member('testgroup', ['alice@jabber.org'], group_list,
- self.contact_list, self.settings, self.queues, self.master_key))
- time.sleep(0.1)
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
- self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 1)
-
-
-class TestGroupRemoveGroup(TFCTestCase):
-
- def setUp(self):
- self.o_input = builtins.input
- builtins.input = lambda _: 'Yes'
-
- self.user_input = UserInput()
- self.contact_list = ContactList(nicks=['Alice', 'Bob'])
- self.group_list = GroupList()
- self.settings = Settings()
- self.queues = {COMMAND_PACKET_QUEUE: Queue(),
- MESSAGE_PACKET_QUEUE: Queue()}
- self.master_key = MasterKey()
-
- def tearDown(self):
- builtins.input = self.o_input
-
- for key in self.queues:
- while not self.queues[key].empty():
- self.queues[key].get()
- time.sleep(0.1)
- self.queues[key].close()
-
- def test_cancel_of_remove_raises_fr(self):
- # Setup
- builtins.input = lambda _: 'No'
-
- # Test
- self.assertFR("Group removal aborted.",
- group_rm_group, 'testgroup', self.group_list, self.settings, self.queues, self.master_key)
-
- def test_remove_group_not_on_txm(self):
- self.assertFR("TxM has no group 'testgroup' to remove.",
- group_rm_group, 'testgroup', self.group_list, self.settings, self.queues, self.master_key)
- time.sleep(0.1)
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 2)
-
- def test_remove_group_and_notify(self):
- # Setup
- group_list = GroupList(groups=['testgroup'])
-
- # Test
- self.assertFR("Removed group 'testgroup'",
- group_rm_group, 'testgroup', group_list, self.settings, self.queues, self.master_key)
- time.sleep(0.1)
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 2)
- self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 2)
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/tx/test_contact.py b/tests/tx/test_contact.py
deleted file mode 100644
index 1d1a377..0000000
--- a/tests/tx/test_contact.py
+++ /dev/null
@@ -1,737 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import builtins
-import getpass
-import os
-import unittest
-import time
-
-from multiprocessing import Queue
-
-from src.common.statics import *
-
-from src.tx.contact import add_new_contact, change_nick, contact_setting, show_fingerprints, remove_contact
-
-from tests.mock_classes import create_contact, ContactList, Group, GroupList, MasterKey, Settings, TxWindow, UserInput
-from tests.utils import ignored, TFCTestCase
-
-
-class TestAddNewContact(TFCTestCase):
-
- def setUp(self):
- self.o_getpass = getpass.getpass
- self.contact_list = ContactList()
- self.group_list = GroupList()
- self.settings = Settings(disable_gui_dialog=True)
- self.queues = {COMMAND_PACKET_QUEUE: Queue(),
- NH_PACKET_QUEUE: Queue(),
- KEY_MANAGEMENT_QUEUE: Queue()}
-
- def tearDown(self):
- getpass.getpass = self.o_getpass
-
- with ignored(OSError):
- os.remove('bob@jabber.org.psk - Give to alice@jabber.org')
-
- for key in self.queues:
- while not self.queues[key].empty():
- self.queues[key].get()
- time.sleep(0.1)
- self.queues[key].close()
-
- def test_adding_new_contact_during_traffic_masking_raises_fr(self):
- # Setup
- self.settings.session_traffic_masking = True
-
- # Test
- self.assertFR("Error: Command is disabled during traffic masking.",
- add_new_contact, self.contact_list, self.group_list, self.settings, self.queues)
-
- def test_contact_list_full_raises_fr(self):
- # Setup
- self.contact_list = ContactList(nicks=['contact_{}'.format(n) for n in range(20)])
-
- # Test
- self.assertFR("Error: TFC settings only allow 20 accounts.",
- add_new_contact, self.contact_list, self.group_list, self.settings, self.queues)
-
- def test_default_nick_x25519_kex(self):
- # Setup
- input_list = ['alice@jabber.org', 'bob@jabber.org', '', '',
- '5JJwZE46Eic9B8sKJ8Qocyxa8ytUJSfcqRo7Hr5ES7YgFGeJjCJ', 'Yes']
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
-
- # Test
- self.assertIsNone(add_new_contact(self.contact_list, self.group_list, self.settings, self.queues))
-
- contact = self.contact_list.get_contact('alice@jabber.org')
- self.assertEqual(contact.nick, 'Alice')
- self.assertNotEqual(contact.tx_fingerprint, bytes(FINGERPRINT_LEN)) # Indicates that PSK function was not called
-
- def test_standard_nick_psk_kex(self):
- # Setup
- getpass.getpass = lambda _: 'test_password'
- input_list = ['alice@jabber.org', 'bob@jabber.org', 'Alice_', 'psk', '.']
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
-
- # Test
- self.assertIsNone(add_new_contact(self.contact_list, self.group_list, self.settings, self.queues))
- contact = self.contact_list.get_contact('alice@jabber.org')
- self.assertEqual(contact.nick, 'Alice_')
- self.assertEqual(contact.tx_fingerprint, bytes(FINGERPRINT_LEN)) # Indicates that PSK function was called
-
-
-class TestRemoveContact(TFCTestCase):
-
- def setUp(self):
- self.o_input = builtins.input
- self.settings = Settings()
- self.master_key = MasterKey()
- self.queues = {KEY_MANAGEMENT_QUEUE: Queue(),
- COMMAND_PACKET_QUEUE: Queue()}
- self.contact_list = ContactList(nicks=['Alice'])
- self.group_list = GroupList(groups=['testgroup'])
-
- def tearDown(self):
- builtins.input = self.o_input
-
- for key in self.queues:
- while not self.queues[key].empty():
- self.queues[key].get()
- time.sleep(0.1)
- self.queues[key].close()
-
- def test_contact_removal_during_traffic_masking_raises_fr(self):
- # Setup
- self.settings.session_traffic_masking = True
-
- # Test
- self.assertFR("Error: Command is disabled during traffic masking.",
- remove_contact, None, None, None, None, self.settings, None, self.master_key)
-
- def test_missing_account_raises_fr(self):
- # Setup
- user_input = UserInput('rm ')
-
- # Test
- self.assertFR("Error: No account specified.",
- remove_contact, user_input, None, None, None, self.settings, None, self.master_key)
-
- def test_user_abort_raises_fr(self):
- # Setup
- builtins.input = lambda _: 'No'
- user_input = UserInput('rm alice@jabber.org')
-
- # Test
- self.assertFR("Removal of contact aborted.",
- remove_contact, user_input, None, None, None, self.settings, None, self.master_key)
-
- def test_successful_removal_of_contact(self):
- # Setup
- builtins.input = lambda _: 'Yes'
- user_input = UserInput('rm Alice')
- window = TxWindow(window_contacts=[self.contact_list.get_contact('Alice')],
- type=WIN_TYPE_CONTACT,
- uid='alice@jabber.org')
-
- # Test
- for g in self.group_list:
- self.assertIsInstance(g, Group)
- self.assertTrue(g.has_member('alice@jabber.org'))
-
- self.assertIsNone(remove_contact(user_input, window, self.contact_list, self.group_list, self.settings, self.queues, self.master_key))
- time.sleep(0.1)
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 2)
-
- km_data = self.queues[KEY_MANAGEMENT_QUEUE].get()
- self.assertEqual(km_data, (KDB_REMOVE_ENTRY_HEADER, 'alice@jabber.org'))
- self.assertFalse(self.contact_list.has_contact('alice@jabber.org'))
-
- for g in self.group_list:
- self.assertIsInstance(g, Group)
- self.assertFalse(g.has_member('alice@jabber.org'))
-
- def test_successful_removal_of_last_member_of_active_group(self):
- # Setup
- builtins.input = lambda _: 'Yes'
- user_input = UserInput('rm Alice')
- window = TxWindow(window_contacts=[self.contact_list.get_contact('Alice')],
- type=WIN_TYPE_GROUP,
- name='testgroup')
- group = self.group_list.get_group('testgroup')
- group.members = [self.contact_list.get_contact('alice@jabber.org')]
-
- # Test
- for g in self.group_list:
- self.assertIsInstance(g, Group)
- self.assertTrue(g.has_member('alice@jabber.org'))
- self.assertEqual(len(group), 1)
-
- self.assertIsNone(remove_contact(user_input, window, self.contact_list, self.group_list, self.settings, self.queues, self.master_key))
- time.sleep(0.1)
-
- for g in self.group_list:
- self.assertIsInstance(g, Group)
- self.assertFalse(g.has_member('alice@jabber.org'))
-
- self.assertFalse(self.contact_list.has_contact('alice@jabber.org'))
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 2)
-
- km_data = self.queues[KEY_MANAGEMENT_QUEUE].get()
- self.assertEqual(km_data, (KDB_REMOVE_ENTRY_HEADER, 'alice@jabber.org'))
-
- def test_no_contact_found_on_txm(self):
- # Setup
- builtins.input = lambda _: 'Yes'
- user_input = UserInput('rm charlie@jabber.org')
- contact_list = ContactList(nicks=['Bob'])
- window = TxWindow(window_contact=[contact_list.get_contact('Bob')],
- type=WIN_TYPE_GROUP)
-
- # Test
- self.assertIsNone(remove_contact(user_input, window, self.contact_list, self.group_list, self.settings, self.queues, self.master_key))
- time.sleep(0.1)
-
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 2)
- command_packet, settings_ = self.queues[COMMAND_PACKET_QUEUE].get()
- self.assertIsInstance(command_packet, bytes)
- self.assertIsInstance(settings_, Settings)
-
-
-class TestChangeNick(TFCTestCase):
-
- def setUp(self):
- self.c_queue = Queue()
- self.group_list = GroupList()
- self.settings = Settings()
- self.contact_list = ContactList(nicks=['Alice'])
-
- def tearDown(self):
- while not self.c_queue.empty():
- self.c_queue.get()
- time.sleep(0.1)
- self.c_queue.close()
-
- def test_active_group_raises_fr(self):
- # Setup
- window = TxWindow(type=WIN_TYPE_GROUP)
-
- # Test
- self.assertFR("Error: Group is selected.", change_nick, None, window, None, None, None, None)
-
- def test_missing_nick_raises_fr(self):
- # Setup
- user_input = UserInput("nick ")
- window = TxWindow(type=WIN_TYPE_CONTACT)
-
- # Test
- self.assertFR("Error: No nick specified.", change_nick, user_input, window, None, None, None, None)
-
- def test_invalid_nick_raises_fr(self):
- # Setup
- user_input = UserInput("nick Alice\x01")
- window = TxWindow(type=WIN_TYPE_CONTACT,
- contact=create_contact('Bob'))
-
- # Test
- self.assertFR("Nick must be printable.",
- change_nick, user_input, window, self.contact_list, self.group_list, None, None)
-
- def test_successful_nick_change(self):
- # Setup
- user_input = UserInput("nick Alice_")
- window = TxWindow(name='Alice',
- type=WIN_TYPE_CONTACT,
- contact=self.contact_list.get_contact('Alice'))
-
- # Test
- self.assertIsNone(change_nick(user_input, window, self.contact_list, self.group_list, self.settings, self.c_queue))
- self.assertEqual(self.contact_list.get_contact('alice@jabber.org').nick, 'Alice_')
-
-
-class TestContactSetting(TFCTestCase):
-
- def setUp(self):
- self.c_queue = Queue()
- self.contact_list = ContactList(nicks=['Alice', 'Bob'])
- self.settings = Settings()
- self.group_list = GroupList(groups=['testgroup'])
-
- def tearDown(self):
- while not self.c_queue.empty():
- self.c_queue.get()
- time.sleep(0.1)
- self.c_queue.close()
-
- def test_invalid_command_raises_fr(self):
- # Setup
- user_input = UserInput('loging on')
-
- # Test
- self.assertFR("Error: Invalid command.", contact_setting, user_input, None, None, None, None, None)
-
- def test_missing_parameter_raises_fr(self):
- # Setup
- user_input = UserInput('')
-
- # Test
- self.assertFR("Error: Invalid command.", contact_setting, user_input, None, None, None, None, None)
-
- def test_invalid_extra_parameter_raises_fr(self):
- # Setup
- user_input = UserInput('logging on al')
-
- # Test
- self.assertFR("Error: Invalid command.", contact_setting, user_input, None, None, None, None, None)
-
- def test_enable_logging_for_user(self):
- # Setup
- user_input = UserInput('logging on')
- contact = self.contact_list.get_contact('Alice')
- contact.log_messages = False
- window = TxWindow(uid='alice@jabber.org',
- type=WIN_TYPE_CONTACT,
- contact=contact)
-
- # Test
- self.assertFalse(contact.log_messages)
-
- self.assertIsNone(contact_setting(user_input, window, self.contact_list, self.group_list, self.settings, self.c_queue))
- time.sleep(0.1)
-
- self.assertTrue(contact.log_messages)
-
- def test_enable_logging_for_user_during_traffic_masking(self):
- # Setup
- user_input = UserInput('logging on')
- contact = self.contact_list.get_contact('Alice')
- contact.log_messages = False
- window = TxWindow(uid='alice@jabber.org',
- type=WIN_TYPE_CONTACT,
- contact=contact,
- log_messages=False)
- self.settings.session_traffic_masking = True
-
- # Test
- self.assertFalse(contact.log_messages)
- self.assertFalse(window.log_messages)
-
- self.assertIsNone(contact_setting(user_input, window, self.contact_list, self.group_list, self.settings, self.c_queue))
- time.sleep(0.1)
-
- self.assertEqual(self.c_queue.qsize(), 1)
- self.assertTrue(window.log_messages)
- self.assertTrue(contact.log_messages)
-
- def test_enable_logging_for_group(self):
- # Setup
- user_input = UserInput('logging on')
- group = self.group_list.get_group('testgroup')
- group.log_messages = False
- window = TxWindow(uid='testgroup',
- type=WIN_TYPE_GROUP,
- group=group,
- window_contacts=group.members)
-
- # Test
- self.assertFalse(group.log_messages)
-
- self.assertIsNone(contact_setting(user_input, window, self.contact_list, self.group_list, self.settings, self.c_queue))
- time.sleep(0.1)
-
- self.assertTrue(group.log_messages)
-
- def test_enable_logging_for_all_users(self):
- # Setup
- user_input = UserInput('logging on all')
- contact = self.contact_list.get_contact('alice@jabber.org')
- window = TxWindow(uid='alice@jabber.org',
- type=WIN_TYPE_CONTACT,
- contact=contact,
- window_contacts=[contact])
-
- for c in self.contact_list:
- c.log_messages = False
- for g in self.group_list:
- g.log_messages = False
-
- # Test
- for c in self.contact_list:
- self.assertFalse(c.log_messages)
- for g in self.group_list:
- self.assertFalse(g.log_messages)
-
- self.assertIsNone(contact_setting(user_input, window, self.contact_list, self.group_list, self.settings, self.c_queue))
- time.sleep(0.1)
-
- for c in self.contact_list:
- self.assertTrue(c.log_messages)
- for g in self.group_list:
- self.assertTrue(g.log_messages)
-
- def test_disable_logging_for_user(self):
- # Setup
- user_input = UserInput('logging off')
- contact = self.contact_list.get_contact('Alice')
- contact.log_messages = True
- window = TxWindow(uid='alice@jabber.org',
- type=WIN_TYPE_CONTACT,
- contact=contact,
- window_contacts=[contact])
-
- # Test
- self.assertTrue(contact.log_messages)
-
- self.assertIsNone(contact_setting(user_input, window, self.contact_list, self.group_list, self.settings, self.c_queue))
- time.sleep(0.1)
-
- self.assertFalse(contact.log_messages)
-
- def test_disable_logging_for_group(self):
- # Setup
- user_input = UserInput('logging off')
- group = self.group_list.get_group('testgroup')
- group.log_messages = True
- window = TxWindow(uid='testgroup',
- type=WIN_TYPE_GROUP,
- group=group,
- window_contacts=group.members)
-
- # Test
- self.assertTrue(group.log_messages)
-
- self.assertIsNone(contact_setting(user_input, window, self.contact_list, self.group_list, self.settings, self.c_queue))
- time.sleep(0.1)
-
- self.assertFalse(group.log_messages)
-
- def test_disable_logging_for_all_users(self):
- # Setup
- user_input = UserInput('logging off all')
- contact = self.contact_list.get_contact('alice@jabber.org')
- window = TxWindow(uid='alice@jabber.org',
- type=WIN_TYPE_CONTACT,
- contact=contact,
- window_contacts=[contact])
-
- for c in self.contact_list:
- c.log_messages = True
- for g in self.group_list:
- g.log_messages = True
-
- # Test
- for c in self.contact_list:
- self.assertTrue(c.log_messages)
- for g in self.group_list:
- self.assertTrue(g.log_messages)
-
- self.assertIsNone(contact_setting(user_input, window, self.contact_list, self.group_list, self.settings, self.c_queue))
- time.sleep(0.1)
-
- for c in self.contact_list:
- self.assertFalse(c.log_messages)
- for g in self.group_list:
- self.assertFalse(g.log_messages)
-
- def test_enable_file_reception_for_user(self):
- # Setup
- user_input = UserInput('store on')
- contact = self.contact_list.get_contact('Alice')
- contact.file_reception = False
- window = TxWindow(uid='alice@jabber.org',
- type=WIN_TYPE_CONTACT,
- contact=contact,
- window_contacts=[contact])
-
- # Test
- self.assertFalse(contact.file_reception)
-
- self.assertIsNone(contact_setting(user_input, window, self.contact_list, self.group_list, self.settings, self.c_queue))
- time.sleep(0.1)
-
- self.assertTrue(contact.file_reception)
-
- def test_enable_file_reception_for_group(self):
- # Setup
- user_input = UserInput('store on')
- group = self.group_list.get_group('testgroup')
- window = TxWindow(uid='testgroup',
- type=WIN_TYPE_GROUP,
- group=group,
- window_contacts=group.members)
-
- for m in group:
- m.file_reception = False
-
- # Test
- for m in group:
- self.assertFalse(m.file_reception)
-
- self.assertIsNone(contact_setting(user_input, window, self.contact_list, self.group_list, self.settings, self.c_queue))
- time.sleep(0.1)
-
- for m in group:
- self.assertTrue(m.file_reception)
-
- def test_enable_file_reception_for_all_users(self):
- # Setup
- user_input = UserInput('store on all')
- contact = self.contact_list.get_contact('alice@jabber.org')
- window = TxWindow(uid='alice@jabber.org',
- type=WIN_TYPE_CONTACT,
- contact=contact,
- window_contacts=[contact])
-
- for c in self.contact_list:
- c.file_reception = False
-
- # Test
- for c in self.contact_list:
- self.assertFalse(c.file_reception)
-
- self.assertIsNone(contact_setting(user_input, window, self.contact_list, self.group_list, self.settings, self.c_queue))
- time.sleep(0.1)
-
- for c in self.contact_list:
- self.assertTrue(c.file_reception)
-
- def test_disable_file_reception_for_user(self):
- # Setup
- user_input = UserInput('store off')
- contact = self.contact_list.get_contact('Alice')
- contact.file_reception = True
- window = TxWindow(uid='alice@jabber.org',
- type=WIN_TYPE_CONTACT,
- contact=contact,
- window_contacts=[contact])
-
- # Test
- self.assertTrue(contact.file_reception)
-
- self.assertIsNone(contact_setting(user_input, window, self.contact_list, self.group_list, self.settings, self.c_queue))
- time.sleep(0.1)
-
- self.assertFalse(contact.file_reception)
-
- def test_disable_file_reception_for_group(self):
- # Setup
- user_input = UserInput('store off')
- group = self.group_list.get_group('testgroup')
- window = TxWindow(uid='testgroup',
- type=WIN_TYPE_GROUP,
- group=group,
- window_contacts=group.members)
-
- for m in group:
- m.file_reception = True
-
- # Test
- for m in group:
- self.assertTrue(m.file_reception)
-
- self.assertIsNone(contact_setting(user_input, window, self.contact_list, self.group_list, self.settings, self.c_queue))
- time.sleep(0.1)
-
- for m in group:
- self.assertFalse(m.file_reception)
-
- def test_disable_file_reception_for_all_users(self):
- # Setup
- user_input = UserInput('store off all')
- contact = self.contact_list.get_contact('alice@jabber.org')
- window = TxWindow(uid='alice@jabber.org',
- type=WIN_TYPE_CONTACT,
- contact=contact,
- window_contacts=[contact])
-
- for c in self.contact_list:
- c.file_reception = True
-
- # Test
- for c in self.contact_list:
- self.assertTrue(c.file_reception)
-
- self.assertIsNone(contact_setting(user_input, window, self.contact_list, self.group_list, self.settings, self.c_queue))
- time.sleep(0.1)
-
- for c in self.contact_list:
- self.assertFalse(c.file_reception)
-
- def test_enable_notifications_for_user(self):
- # Setup
- user_input = UserInput('notify on')
- contact = self.contact_list.get_contact('Alice')
- contact.notifications = False
- window = TxWindow(uid='alice@jabber.org',
- type=WIN_TYPE_CONTACT,
- contact=contact)
-
- # Test
- self.assertFalse(contact.notifications)
-
- self.assertIsNone(contact_setting(user_input, window, self.contact_list, self.group_list, self.settings, self.c_queue))
- time.sleep(0.1)
-
- self.assertTrue(contact.notifications)
-
- def test_enable_notifications_for_group(self):
- # Setup
- user_input = UserInput('notify on')
- group = self.group_list.get_group('testgroup')
- group.notifications = False
- window = TxWindow(uid='testgroup',
- type=WIN_TYPE_GROUP,
- group=group,
- window_contacts=group.members)
-
- # Test
- self.assertFalse(group.notifications)
-
- self.assertIsNone(contact_setting(user_input, window, self.contact_list, self.group_list, self.settings, self.c_queue))
- time.sleep(0.1)
-
- self.assertTrue(group.notifications)
-
- def test_enable_notifications_for_all_users(self):
- # Setup
- user_input = UserInput('notify on all')
- contact = self.contact_list.get_contact('alice@jabber.org')
- window = TxWindow(uid='alice@jabber.org',
- type=WIN_TYPE_CONTACT,
- contact=contact,
- window_contacts=[contact])
-
- for c in self.contact_list:
- c.notifications = False
- for g in self.group_list:
- g.notifications = False
-
- # Test
- for c in self.contact_list:
- self.assertFalse(c.notifications)
- for g in self.group_list:
- self.assertFalse(g.notifications)
-
- self.assertIsNone(contact_setting(user_input, window, self.contact_list, self.group_list, self.settings, self.c_queue))
- time.sleep(0.1)
-
- for c in self.contact_list:
- self.assertTrue(c.notifications)
- for g in self.group_list:
- self.assertTrue(g.notifications)
-
- def test_disable_notifications_for_user(self):
- # Setup
- user_input = UserInput('notify off')
- contact = self.contact_list.get_contact('Alice')
- contact.notifications = True
- window = TxWindow(uid='alice@jabber.org',
- type=WIN_TYPE_CONTACT,
- contact=contact,
- window_contacts=[contact])
-
- # Test
- self.assertTrue(contact.notifications)
-
- self.assertIsNone(contact_setting(user_input, window, self.contact_list, self.group_list, self.settings, self.c_queue))
- time.sleep(0.1)
-
- self.assertFalse(contact.notifications)
-
- def test_disable_notifications_for_group(self):
- # Setup
- user_input = UserInput('notify off')
- group = self.group_list.get_group('testgroup')
- group.notifications = True
- window = TxWindow(uid='testgroup',
- type=WIN_TYPE_GROUP,
- group=group,
- window_contacts=group.members)
-
- # Test
- self.assertTrue(group.notifications)
-
- self.assertIsNone(contact_setting(user_input, window, self.contact_list, self.group_list, self.settings, self.c_queue))
- time.sleep(0.1)
-
- self.assertFalse(group.notifications)
-
- def test_disable_notifications_for_all_users(self):
- # Setup
- user_input = UserInput('notify off all')
- contact = self.contact_list.get_contact('alice@jabber.org')
- window = TxWindow(uid='alice@jabber.org',
- type=WIN_TYPE_CONTACT,
- contact=contact,
- window_contacts=[contact])
-
- for c in self.contact_list:
- c.notifications = True
- for g in self.group_list:
- g.notifications = True
-
- # Test
- for c in self.contact_list:
- self.assertTrue(c.notifications)
- for g in self.group_list:
- self.assertTrue(g.notifications)
-
- self.assertIsNone(contact_setting(user_input, window, self.contact_list, self.group_list, self.settings, self.c_queue))
- time.sleep(0.1)
-
- for c in self.contact_list:
- self.assertFalse(c.notifications)
- for g in self.group_list:
- self.assertFalse(g.notifications)
-
-
-class TestFingerprints(TFCTestCase):
-
- def test_active_group_raises_fr(self):
- # Setup
- window = TxWindow(type=WIN_TYPE_GROUP)
-
- # Test
- self.assertFR("Group is selected.", show_fingerprints, window)
-
- def test_psk_raises_fr(self):
- # Setup
- contact = create_contact()
- contact.tx_fingerprint = bytes(FINGERPRINT_LEN)
- window = TxWindow(name='Alice',
- type=WIN_TYPE_CONTACT,
- contact=contact)
- # Test
- self.assertFR("Pre-shared keys have no fingerprints.", show_fingerprints, window)
-
- def test_fingerprint_print_command(self):
- # Setup
- window = TxWindow(name='Alice',
- type=WIN_TYPE_CONTACT,
- contact=create_contact())
- # Test
- self.assertIsNone(show_fingerprints(window))
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/tx/test_input_loop.py b/tests/tx/test_input_loop.py
deleted file mode 100644
index 38d5443..0000000
--- a/tests/tx/test_input_loop.py
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import builtins
-import os
-import time
-import unittest
-
-from multiprocessing import Queue
-
-from src.common.statics import *
-
-import src.tx.commands
-from src.tx.input_loop import input_loop
-
-from tests.mock_classes import ContactList, Gateway, GroupList, MasterKey, Settings
-
-
-class TestInputLoop(unittest.TestCase):
-
- def setUp(self):
- if 'TRAVIS' not in os.environ or not os.environ['TRAVIS'] == 'true':
- self.o_getrandom = os.getrandom
- self.o_input = builtins.input
- self.o_urandom = os.urandom
- self.gateway = Gateway()
- self.settings = Settings(disable_gui_dialog=True)
- self.contact_list = ContactList()
- self.group_list = GroupList()
- self.master_key = MasterKey()
- self.queues = {MESSAGE_PACKET_QUEUE: Queue(),
- FILE_PACKET_QUEUE: Queue(),
- COMMAND_PACKET_QUEUE: Queue(),
- NH_PACKET_QUEUE: Queue(),
- LOG_PACKET_QUEUE: Queue(),
- EXIT_QUEUE: Queue(),
- NOISE_PACKET_QUEUE: Queue(),
- NOISE_COMMAND_QUEUE: Queue(),
- KEY_MANAGEMENT_QUEUE: Queue(),
- WINDOW_SELECT_QUEUE: Queue()}
-
- input_list = ['', # NH Bypass start
- '61', # Enter confirmation code
- '', # NH Bypass end
- 'alice@jabber.org', # Enter rx-account for new contact
- 'bob@jabber.org', # Enter tx-account for new contact
- '', # Enter for auto-nick
- '', # Enter to default for X25519
- '5JZB2s2RCtRUunKiqMbb6rAj3Z' # Enter public key for contact
- '7TkJwa8zknL1cfTFpWoQArd6n',
- 'Yes', # Accept key fingerprints for Alice
- 'Alice', # Select Alice as recipient
- 'Test', # Send test message
- '/file', # Open file selection prompt
- '', # Give empty string to abort
- '/exit'] # Enter exit command
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
- if 'TRAVIS' not in os.environ or not os.environ['TRAVIS'] == 'true':
- os.getrandom = lambda n, flags: n * b'a'
- os.urandom = lambda n: n * b'a'
-
- self.o_exit_tfc = src.tx.commands.exit_tfc
- src.tx.commands.exit_tfc = lambda *_: (_ for _ in ()).throw(SystemExit)
-
- def tearDown(self):
- if 'TRAVIS' not in os.environ or not os.environ['TRAVIS'] == 'true':
- os.getrandom = self.o_getrandom
-
- builtins.input = self.o_input
- os.urandom = self.o_urandom
- src.tx.commands.exit_tfc = self.o_exit_tfc
-
- for key in self.queues:
- while not self.queues[key].empty():
- self.queues[key].get()
- time.sleep(0.1)
- self.queues[key].close()
-
- def test_input_loop_functions(self):
- with self.assertRaises(SystemExit):
- self.assertIsNone(input_loop(self.queues, self.settings, self.gateway, self.contact_list,
- self.group_list, self.master_key, stdin_fd=1))
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/tx/test_key_exchanges.py b/tests/tx/test_key_exchanges.py
deleted file mode 100644
index 8190048..0000000
--- a/tests/tx/test_key_exchanges.py
+++ /dev/null
@@ -1,327 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import builtins
-import getpass
-import os
-import unittest
-import time
-
-from multiprocessing import Queue
-
-from src.common.encoding import b58encode
-from src.common.statics import *
-
-from src.tx.key_exchanges import new_local_key, create_pre_shared_key, rxm_load_psk, start_key_exchange, verify_fingerprints
-
-from tests.mock_classes import create_contact, ContactList, Settings, TxWindow
-from tests.utils import ignored, TFCTestCase
-
-
-class TestLocalKey(TFCTestCase):
-
- def setUp(self):
- self.o_input = builtins.input
- self.o_urandom = os.urandom
-
- if 'TRAVIS' not in os.environ or not os.environ['TRAVIS'] == 'true':
- self.o_getrandom = os.getrandom
-
- self.contact_list = ContactList()
- self.settings = Settings()
- self.queues = {COMMAND_PACKET_QUEUE: Queue(),
- NH_PACKET_QUEUE: Queue(),
- KEY_MANAGEMENT_QUEUE: Queue()}
-
- def tearDown(self):
- builtins.input = self.o_input
- os.urandom = self.o_urandom
-
- if 'TRAVIS' not in os.environ or not os.environ['TRAVIS'] == 'true':
- os.getrandom = self.o_getrandom
-
- for key in self.queues.keys():
- while not self.queues[key].empty():
- self.queues[key].get()
- time.sleep(0.1)
- self.queues[key].close()
-
- def test_new_local_key_when_traffic_masking_is_enabled_raises_fr(self):
- # Setup
- self.settings.session_traffic_masking = True
-
- # Test
- self.assertFR("Error: Command is disabled during traffic masking.",
- new_local_key, self.contact_list, self.settings, self.queues)
-
- def test_new_local_key(self):
- # Setup
- self.settings.nh_bypass_messages = False
- self.settings.session_traffic_masking = False
-
- if 'TRAVIS' not in os.environ or not os.environ['TRAVIS'] == 'true':
- os.getrandom = lambda n, flags: n * b'\xff'
-
- os.urandom = lambda n: n * b'\xff'
- input_list = ['bad', 'resend', 'ff']
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
-
- # Test
- self.assertIsNone(new_local_key(self.contact_list, self.settings, self.queues))
- time.sleep(0.1)
-
- local_contact = self.contact_list.get_contact(LOCAL_ID)
-
- self.assertEqual(local_contact.rx_account, LOCAL_ID)
- self.assertEqual(local_contact.tx_account, LOCAL_ID)
- self.assertEqual(local_contact.nick, LOCAL_ID)
- self.assertEqual(local_contact.tx_fingerprint, bytes(FINGERPRINT_LEN))
- self.assertEqual(local_contact.rx_fingerprint, bytes(FINGERPRINT_LEN))
- self.assertFalse(local_contact.log_messages)
- self.assertFalse(local_contact.file_reception)
- self.assertFalse(local_contact.notifications)
-
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
-
- cmd, account, tx_key, rx_key, tx_hek, rx_hek = self.queues[KEY_MANAGEMENT_QUEUE].get()
-
- self.assertEqual(cmd, KDB_ADD_ENTRY_HEADER)
- self.assertEqual(account, LOCAL_ID)
- for key in [tx_key, rx_key, tx_hek, rx_hek]:
- self.assertIsInstance(key, bytes)
- self.assertEqual(len(key), KEY_LENGTH)
-
-
-class TestVerifyFingerprints(unittest.TestCase):
-
- def setUp(self):
- self.o_input = builtins.input
-
- def tearDown(self):
- builtins.input = self.o_input
-
- def test_correct_fingerprint(self):
- builtins.input = lambda _: 'Yes'
- self.assertTrue(verify_fingerprints(bytes(32), bytes(32)))
-
- def test_incorrect_fingerprint(self):
- builtins.input = lambda _: 'No'
- self.assertFalse(verify_fingerprints(bytes(32), bytes(32)))
-
-
-class TestKeyExchange(TFCTestCase):
-
- def setUp(self):
- self.o_input = builtins.input
-
- self.contact_list = ContactList()
- self.settings = Settings()
- self.queues = {COMMAND_PACKET_QUEUE: Queue(),
- NH_PACKET_QUEUE: Queue(),
- KEY_MANAGEMENT_QUEUE: Queue()}
-
- def tearDown(self):
- builtins.input = self.o_input
-
- for key in self.queues.keys():
- while not self.queues[key].empty():
- self.queues[key].get()
- time.sleep(0.1)
- self.queues[key].close()
-
- def test_zero_public_key_raises_fr(self):
- # Setup
- builtins.input = lambda _: b58encode(bytes(32))
-
- # Test
- self.assertFR("Error: Zero public key", start_key_exchange, 'alice@jabber.org', 'user@jabber.org', 'Alice',
- self.contact_list, self.settings, self.queues)
-
- def test_raises_fr_during_fingerprint_mismatch(self):
- # Setup
- input_list = ['resend', # Resend should resend key
- '5JCVapni8CR2PEXr5v92cCY2QgSd4cztR2v3L3vK2eair7dGHi', # Short key should fail
- '5JCVapni8CR2PEXr5v92cCY2QgSd4cztR2v3L3vK2eair7dGHiHa', # Long key should fail
- '5JCVapni8CR2PEXr5v92cCY2QgSd4cztR2v3L3vK2eair7dGHia', # Invalid key should fail
- '5JCVapni8CR2PEXr5v92cCY2QgSd4cztR2v3L3vK2eair7dGHiH', # Correct key
- 'No'] # Fingerprint mismatch
-
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
-
- # Test
- self.assertFR("Error: Fingerprint mismatch", start_key_exchange, 'alice@jabber.org', 'user@jabber.org', 'Alice',
- self.contact_list, self.settings, self.queues)
-
- def test_successful_exchange(self):
- # Setup
- input_list = ['5JCVapni8CR2PEXr5v92cCY2QgSd4cztR2v3L3vK2eair7dGHiH', # Correct key
- 'Yes'] # Fingerprint match
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
-
- # Test
- self.assertIsNone(start_key_exchange('alice@jabber.org', 'user@jabber.org', 'Alice',
- self.contact_list, self.settings, self.queues))
- time.sleep(0.1)
-
- contact = self.contact_list.get_contact('alice@jabber.org')
-
- self.assertEqual(contact.rx_account, 'alice@jabber.org')
- self.assertEqual(contact.tx_account, 'user@jabber.org')
- self.assertEqual(contact.nick, 'Alice')
- self.assertIsInstance(contact.tx_fingerprint, bytes)
- self.assertIsInstance(contact.rx_fingerprint, bytes)
- self.assertEqual(len(contact.tx_fingerprint), FINGERPRINT_LEN)
- self.assertEqual(len(contact.rx_fingerprint), FINGERPRINT_LEN)
- self.assertFalse(contact.log_messages)
- self.assertFalse(contact.file_reception)
- self.assertTrue(contact.notifications)
-
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
-
- cmd, account, tx_key, rx_key, tx_hek, rx_hek = self.queues[KEY_MANAGEMENT_QUEUE].get()
-
- self.assertEqual(cmd, KDB_ADD_ENTRY_HEADER)
- self.assertEqual(account, 'alice@jabber.org')
- self.assertEqual(len(tx_key), KEY_LENGTH)
- for key in [tx_key, rx_key, tx_hek, rx_hek]:
- self.assertIsInstance(key, bytes)
- self.assertEqual(len(key), KEY_LENGTH)
-
-class TestPSK(TFCTestCase):
-
- def setUp(self):
- if 'TRAVIS' not in os.environ or not os.environ['TRAVIS'] == 'true':
- self.o_getrandom = os.getrandom
-
- self.o_input = builtins.input
- self.o_getpass = getpass.getpass
- self.contact_list = ContactList()
- self.settings = Settings(disable_gui_dialog=True)
- self.queues = {COMMAND_PACKET_QUEUE: Queue(),
- KEY_MANAGEMENT_QUEUE: Queue()}
-
- if 'TRAVIS' not in os.environ or not os.environ['TRAVIS'] == 'true':
- os.getrandom = lambda n, flags: n * b'\x00'
-
- getpass.getpass = lambda _: 'test_password'
- input_list = ['/root/', # Invalid directory
- '.'] # Valid directory
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
-
- def tearDown(self):
- builtins.input = self.o_input
- getpass.getpass = self.o_getpass
-
- if 'TRAVIS' not in os.environ or not os.environ['TRAVIS'] == 'true':
- os.getrandom = self.o_getrandom
-
- with ignored(OSError):
- os.remove('user@jabber.org.psk - Give to alice@jabber.org')
-
- for key in self.queues.keys():
- while not self.queues[key].empty():
- self.queues[key].get()
- time.sleep(0.1)
- self.queues[key].close()
-
- def test_psk_creation(self):
- self.assertIsNone(create_pre_shared_key('alice@jabber.org', 'user@jabber.org', 'Alice',
- self.contact_list, self.settings, self.queues))
-
- contact = self.contact_list.get_contact('alice@jabber.org')
-
- self.assertEqual(contact.rx_account, 'alice@jabber.org')
- self.assertEqual(contact.tx_account, 'user@jabber.org')
- self.assertEqual(contact.nick, 'Alice')
- self.assertEqual(contact.tx_fingerprint, bytes(FINGERPRINT_LEN))
- self.assertEqual(contact.rx_fingerprint, bytes(FINGERPRINT_LEN))
- self.assertFalse(contact.log_messages)
- self.assertFalse(contact.file_reception)
- self.assertTrue(contact.notifications)
-
- cmd, account, tx_key, rx_key, tx_hek, rx_hek = self.queues[KEY_MANAGEMENT_QUEUE].get()
-
- self.assertEqual(cmd, KDB_ADD_ENTRY_HEADER)
- self.assertEqual(account, 'alice@jabber.org')
- for key in [tx_key, rx_key, tx_hek, rx_hek]:
- self.assertIsInstance(key, bytes)
- self.assertEqual(len(key), KEY_LENGTH)
-
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
- self.assertTrue(os.path.isfile('user@jabber.org.psk - Give to alice@jabber.org'))
-
-
-class TestRxMLoadPSK(TFCTestCase):
-
- def setUp(self):
- self.c_queue = Queue()
- self.settings = Settings()
-
- def tearDown(self):
- while not self.c_queue.empty():
- self.c_queue.get()
- time.sleep(0.1)
- self.c_queue.close()
-
- def test_raises_fr_when_traffic_masking_is_enabled(self):
- # Setup
- self.settings.session_traffic_masking = True
-
- # Test
- self.assertFR("Error: Command is disabled during traffic masking.",
- rxm_load_psk, None, None, self.settings, None)
-
- def test_active_group_raises_fr(self):
- # Setup
- window = TxWindow(type=WIN_TYPE_GROUP)
-
- # Test
- self.assertFR("Error: Group is selected.", rxm_load_psk, window, None, self.settings, None)
-
- def test_x25519_key_raises_fr(self):
- # Setup
- window = TxWindow(type=WIN_TYPE_CONTACT,
- uid ='alice@jabber.org')
- contact_list = ContactList(nicks=['Alice'])
-
- # Test
- self.assertFR("Error: Current key was exchanged with X25519.",
- rxm_load_psk, window, contact_list, self.settings, None)
-
- def test_successful_command(self):
- # Setup
- window = TxWindow(type=WIN_TYPE_CONTACT,
- uid ='alice@jabber.org')
- contact = create_contact(txfp=bytes(FINGERPRINT_LEN))
- contact_list = ContactList(contacts=[contact])
-
- # Test
- self.assertIsNone(rxm_load_psk(window, contact_list, self.settings, self.c_queue))
- time.sleep(0.1)
-
- self.assertEqual(self.c_queue.qsize(), 1)
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/tx/test_packet.py b/tests/tx/test_packet.py
deleted file mode 100644
index 3113475..0000000
--- a/tests/tx/test_packet.py
+++ /dev/null
@@ -1,636 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import builtins
-import os
-import struct
-import time
-import unittest
-
-from multiprocessing import Queue
-
-from src.common.statics import *
-
-from src.tx.packet import cancel_packet, queue_command, queue_file, queue_message
-from src.tx.packet import queue_packets, split_to_assembly_packets, send_packet, transmit
-
-from tests.mock_classes import create_contact, create_group, create_keyset, Gateway, KeyList, Settings, TxWindow, UserInput
-from tests.utils import ignored, TFCTestCase
-
-
-class TestQueueMessage(unittest.TestCase):
-
- def setUp(self):
- self.m_queue = Queue()
- self.settings = Settings()
-
- def tearDown(self):
- while not self.m_queue.empty():
- self.m_queue.get()
- time.sleep(0.1)
- self.m_queue.close()
-
- def test_private_message_header(self):
- # Setup
- user_input = UserInput(plaintext='Test message', type=MESSAGE)
- window = TxWindow(log_messages=True)
-
- window.window_contacts = [create_contact()]
-
- # Test
- self.assertIsNone(queue_message(user_input, window, self.settings, self.m_queue))
- time.sleep(0.1)
-
- self.assertEqual(self.m_queue.qsize(), 1)
-
- def test_group_message_header(self):
- # Setup
- user_input = UserInput(plaintext='Test message', type=MESSAGE)
- window = TxWindow(name='testgroup',
- type=WIN_TYPE_GROUP,
- group=create_group(),
- log_messages=True)
-
- window.window_contacts = [create_contact()]
-
- # Test
- self.assertIsNone(queue_message(user_input, window, self.settings, self.m_queue))
- time.sleep(0.1)
-
- self.assertEqual(self.m_queue.qsize(), 1)
-
- def test_group_management_message_header(self):
- # Setup
- user_input = UserInput(plaintext='Test message', type=MESSAGE)
- window = TxWindow(log_messages=True)
-
- window.window_contacts = [create_contact()]
-
- # Test
- self.assertIsNone(queue_message(user_input, window, self.settings,
- self.m_queue, header=GROUP_MSG_INVITEJOIN_HEADER))
- time.sleep(0.1)
-
- self.assertEqual(self.m_queue.qsize(), 1)
-
-
-class TestQueueFile(TFCTestCase):
-
- def setUp(self):
- self.f_queue = Queue()
-
- def tearDown(self):
- while not self.f_queue.empty():
- self.f_queue.get()
- time.sleep(0.1)
- self.f_queue.close()
-
- with ignored(OSError):
- os.remove('testfile.txt')
-
- def test_aborted_file(self):
- # Setup
- input_data = os.urandom(5)
- with open('testfile.txt', 'wb+') as f:
- f.write(input_data)
-
- window = TxWindow(name='Alice',
- type=WIN_TYPE_CONTACT,
- type_print='contact',
- uid='alice@jabber.org')
- settings = Settings(session_traffic_masking=True,
- disable_gui_dialog=True)
- gateway = Gateway(txm_inter_packet_delay=0.02)
-
- input_list = ['./testfile.txt', 'No']
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
-
- # Test
- self.assertFR("File selection aborted.", queue_file, window, settings, self.f_queue, gateway)
-
- def test_file_queue_short_traffic_masking(self):
- # Setup
- input_data = os.urandom(5)
- with open('testfile.txt', 'wb+') as f:
- f.write(input_data)
-
- window = TxWindow(name='Alice',
- type=WIN_TYPE_CONTACT,
- type_print='contact',
- uid='alice@jabber.org',
- log_messages=True)
- settings = Settings(session_traffic_masking=True,
- disable_gui_dialog=True)
- gateway = Gateway(txm_inter_packet_delay=0.02)
-
- input_list = ['./testfile.txt', 'Yes']
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
-
- # Test
- self.assertIsNone(queue_file(window, settings, self.f_queue, gateway))
- time.sleep(0.1)
-
- self.assertEqual(self.f_queue.qsize(), 1)
-
- q_data, log_messages, log_as_ph = self.f_queue.get()
- self.assertIsInstance(q_data, bytes)
- self.assertTrue(log_messages)
- self.assertTrue(log_as_ph)
-
- def test_file_queue_long_normal(self):
- # Setup
- input_data = os.urandom(2000)
- with open('testfile.txt', 'wb+') as f:
- f.write(input_data)
-
- window = TxWindow(name='Alice',
- type=WIN_TYPE_CONTACT,
- type_print='contact',
- uid='alice@jabber.org',
- window_contacts=[create_contact()],
- log_messages=True)
- settings = Settings(session_traffic_masking=False,
- disable_gui_dialog=True,
- confirm_sent_files=True,
- multi_packet_random_delay=True)
- gateway = Gateway(txm_inter_packet_delay=0.02)
-
- input_list = ['./testfile.txt', 'Yes']
- gen = iter(input_list)
- builtins.input = lambda _: str(next(gen))
-
- # Test
- self.assertIsNone(queue_file(window, settings, self.f_queue, gateway))
- time.sleep(0.1)
-
- self.assertEqual(self.f_queue.qsize(), 11)
-
- packet, settings, rx_account, tx_account, log_messages, log_as_ph, win_uid = self.f_queue.get()
- self.assertIsInstance(packet, bytes)
- self.assertIsInstance(settings, Settings)
- self.assertEqual(rx_account, 'alice@jabber.org')
- self.assertEqual(tx_account, 'user@jabber.org')
- self.assertEqual(win_uid, 'alice@jabber.org')
- self.assertTrue(log_messages)
- self.assertTrue(log_as_ph)
-
-
-class TestQueueCommand(unittest.TestCase):
-
- def setUp(self):
- self.c_queue = Queue()
- self.settings = Settings()
-
- def tearDown(self):
- while not self.c_queue.empty():
- self.c_queue.get()
- time.sleep(0.1)
- self.c_queue.close()
-
- def test_queue_command(self):
- self.assertIsNone(queue_command(os.urandom(200), self.settings, self.c_queue))
- time.sleep(0.1)
-
- c_pt, settings_ = self.c_queue.get()
- self.assertEqual(len(c_pt), ASSEMBLY_PACKET_LEN)
- self.assertIsInstance(settings_, Settings)
-
-
-class TestSplitToAssemblyPackets(unittest.TestCase):
-
- def test_short_message(self):
- packet_list = split_to_assembly_packets(b'Short message', MESSAGE)
- self.assertEqual(len(packet_list), 1)
- self.assertTrue(packet_list[0].startswith(M_S_HEADER))
-
- def test_long_message(self):
- packet_list = split_to_assembly_packets(os.urandom(800), MESSAGE)
- self.assertEqual(len(packet_list), 4)
- self.assertTrue(packet_list[0].startswith(M_L_HEADER))
- self.assertTrue(packet_list[1].startswith(M_A_HEADER))
- self.assertTrue(packet_list[2].startswith(M_A_HEADER))
- self.assertTrue(packet_list[3].startswith(M_E_HEADER))
-
- def test_short_file(self):
- packet_list = split_to_assembly_packets(os.urandom(50), FILE)
- self.assertEqual(len(packet_list), 1)
- self.assertTrue(packet_list[0].startswith(F_S_HEADER))
-
- def test_long_file(self):
- packet_list = split_to_assembly_packets(os.urandom(800), FILE)
- self.assertEqual(len(packet_list), 4)
- self.assertTrue(packet_list[0].startswith(F_L_HEADER + b'\x00\x00\x00\x00\x00\x00\x00\x04'))
- self.assertTrue(packet_list[1].startswith(F_A_HEADER))
- self.assertTrue(packet_list[2].startswith(F_A_HEADER))
- self.assertTrue(packet_list[3].startswith(F_E_HEADER))
-
- def test_short_command(self):
- packet_list = split_to_assembly_packets(os.urandom(50), COMMAND)
- self.assertEqual(len(packet_list), 1)
- self.assertTrue(packet_list[0].startswith(C_S_HEADER))
-
- def test_long_command(self):
- packet_list = split_to_assembly_packets(os.urandom(800), COMMAND)
- self.assertEqual(len(packet_list), 4)
- self.assertTrue(packet_list[0].startswith(C_L_HEADER))
- self.assertTrue(packet_list[1].startswith(C_A_HEADER))
- self.assertTrue(packet_list[2].startswith(C_A_HEADER))
- self.assertTrue(packet_list[3].startswith(C_E_HEADER))
-
-
-class TestQueuePackets(unittest.TestCase):
-
- def setUp(self):
- self.settings = Settings()
- self.queue = Queue()
- self.window = TxWindow(uid='alice@jabber.org',
- log_messages=True)
-
- self.window.window_contacts = [create_contact()]
-
- def tearDown(self):
- while not self.queue.empty():
- self.queue.get()
- time.sleep(0.1)
- self.queue.close()
-
- def test_queue_message_traffic_masking(self):
- # Setup
- packet_list = split_to_assembly_packets(os.urandom(200), MESSAGE)
- self.settings.session_traffic_masking = True
-
- # Test
- self.assertIsNone(queue_packets(packet_list, MESSAGE, self.settings, self.queue, self.window))
- time.sleep(0.1)
-
- self.assertEqual(self.queue.qsize(), 1)
- packet, log_messages, log_as_ph = self.queue.get()
- self.assertIsInstance(packet, bytes)
- self.assertTrue(log_messages)
- self.assertFalse(log_as_ph)
-
- def test_queue_message_normal(self):
- # Setup
- packet_list = split_to_assembly_packets(os.urandom(200), MESSAGE)
-
- # Test
- self.assertIsNone(queue_packets(packet_list, MESSAGE, self.settings, self.queue, self.window))
- time.sleep(0.1)
-
- self.assertEqual(self.queue.qsize(), 1)
-
- packet, settings, rx_account, tx_account, log_setting, log_as_ph, win_uid = self.queue.get()
- self.assertIsInstance(packet, bytes)
- self.assertIsInstance(settings, Settings)
- self.assertEqual(rx_account, 'alice@jabber.org')
- self.assertEqual(tx_account, 'user@jabber.org')
- self.assertEqual(win_uid, 'alice@jabber.org')
- self.assertTrue(log_setting)
- self.assertFalse(log_as_ph)
-
- def test_queue_file_traffic_masking(self):
- # Setup
- packet_list = split_to_assembly_packets(os.urandom(200), FILE)
- self.settings.session_traffic_masking = True
-
- # Test
- self.assertIsNone(queue_packets(packet_list, FILE, self.settings, self.queue, self.window))
- time.sleep(0.1)
-
- self.assertEqual(self.queue.qsize(), 1)
- packet, log_messages, log_as_ph = self.queue.get()
- self.assertIsInstance(packet, bytes)
- self.assertTrue(log_messages)
- self.assertFalse(log_as_ph)
-
- def test_queue_file_normal(self):
- # Setup
- packet_list = split_to_assembly_packets(os.urandom(200), FILE)
-
- # Test
- self.assertIsNone(queue_packets(packet_list, FILE, self.settings, self.queue, self.window, log_as_ph=True))
- time.sleep(0.1)
-
- self.assertEqual(self.queue.qsize(), 1)
-
- packet, settings, rx_account, tx_account, log_setting, log_as_ph, window_uid = self.queue.get()
- self.assertIsInstance(packet, bytes)
- self.assertIsInstance(settings, Settings)
- self.assertEqual(rx_account, 'alice@jabber.org')
- self.assertEqual(tx_account, 'user@jabber.org')
- self.assertEqual(window_uid, 'alice@jabber.org')
- self.assertTrue(log_setting)
- self.assertTrue(log_as_ph)
-
- def test_queue_command_traffic_masking(self):
- # Setup
- packet_list = split_to_assembly_packets(os.urandom(200), COMMAND)
- self.settings.session_traffic_masking = True
-
- # Test
- self.assertIsNone(queue_packets(packet_list, COMMAND, self.settings, self.queue, self.window))
- time.sleep(0.1)
-
- self.assertEqual(self.queue.qsize(), 1)
- data, log_messages = self.queue.get()
- self.assertIsInstance(data, bytes)
- self.assertTrue(log_messages)
-
- def test_queue_command_normal(self):
- # Setup
- packet_list = split_to_assembly_packets(os.urandom(200), COMMAND)
-
- # Test
- self.assertIsNone(queue_packets(packet_list, COMMAND, self.settings, self.queue, self.window))
- time.sleep(0.1)
-
- self.assertEqual(self.queue.qsize(), 1)
-
- packet, settings = self.queue.get()
- self.assertIsInstance(packet, bytes)
- self.assertIsInstance(settings, Settings)
-
-
-class TestSendPacket(unittest.TestCase):
- """\
- This function is by far the most critical to security in TxM,
- as it must detect output of key material.
-
- Plaintext length must always be evaluated to ensure constant
- ciphertext length and hiding of output data type.
-
- The most likely place for error is going to be the tx_harac
- attribute of keyset, as it's the only data loaded from the
- sensitive key database that is sent to contact. Alternative
- place could be a bug in implementation where account strings
- would incorrectly contain a byte string that contained key
- material.
- """
-
- def setUp(self):
- self.l_queue = Queue()
- self.key_list = KeyList(nicks=['Alice'])
- self.settings = Settings()
- self.gateway = Gateway()
-
- def tearDown(self):
- while not self.l_queue.empty():
- self.l_queue.get()
- time.sleep(0.1)
- self.l_queue.close()
-
- def test_message_length(self):
- # Check that only 256-byte plaintext messages are ever allowed
- for l in range(1, 256):
- with self.assertRaises(SystemExit):
- send_packet(self.key_list, self.gateway, self.l_queue, bytes(l),
- self.settings, 'alice@jabber.org', 'bob@jabber.org', True)
-
- for l in range(257, 300):
- with self.assertRaises(SystemExit):
- send_packet(self.key_list, self.gateway, self.l_queue, bytes(l),
- self.settings, 'alice@jabber.org', 'bob@jabber.org', True)
-
- def test_invalid_harac_raises_raises_struct_error(self):
- # Check that in case where internal error caused bytestring (possible key material)
- # to end up in hash ratchet value, system raises some error that prevents output of packet.
- # In this case the error comes from unsuccessful encoding of hash ratchet counter.
- for l in range(1, 33):
- key_list = KeyList()
- key_list.keysets = [create_keyset(tx_key=KEY_LENGTH * b'\x02',
- tx_harac=l * b'k')]
-
- with self.assertRaises(struct.error):
- send_packet(key_list, self.gateway, self.l_queue, bytes(ASSEMBLY_PACKET_LEN),
- self.settings, 'alice@jabber.org', 'bob@jabber.org', True)
-
- def test_invalid_account_raises_stop_iteration(self):
- # Check that in case where internal error caused bytestring (possible key material)
- # to end up in account strings, System raises some error that prevents output of packet.
- # In this case the error comes from unsuccessful encoding of string (AttributeError)
- # or KeyList lookup error when bytes are used (StopIteration). These errors are not catched.
- with self.assertRaises(StopIteration):
- send_packet(self.key_list, self.gateway, self.l_queue, bytes(ASSEMBLY_PACKET_LEN),
- self.settings, b'alice@jabber.org', 'bob@jabber.org', True)
-
- with self.assertRaises(AttributeError):
- send_packet(self.key_list, self.gateway, self.l_queue, bytes(ASSEMBLY_PACKET_LEN),
- self.settings, 'alice@jabber.org', b'bob@jabber.org', True)
-
- def test_valid_message_packet(self):
- # Setup
- settings = Settings(multi_packet_random_delay=True)
- gateway = Gateway()
- key_list = KeyList(master_key=bytes(KEY_LENGTH))
- key_list.keysets = [create_keyset(tx_key=KEY_LENGTH * b'\x02',
- tx_harac=8)]
-
- # Test
- self.assertIsNone(send_packet(key_list, gateway, self.l_queue, bytes(ASSEMBLY_PACKET_LEN),
- settings, 'alice@jabber.org', 'bob@jabber.org', True))
-
- self.assertEqual(len(gateway.packets), 1)
- self.assertEqual(len(gateway.packets[0]), 396)
-
- time.sleep(0.1)
- self.assertFalse(self.l_queue.empty())
-
- def test_valid_command_packet(self):
- """Test that commands are output as they should.
-
- Since command packets have no trailer, and since only user's
- RxM has local decryption key, encryption with any key recipient
- is not already in possession of does not compromise plaintext.
- """
- # Setup
- key_list = KeyList(master_key=bytes(KEY_LENGTH))
- key_list.keysets = [create_keyset(LOCAL_ID)]
-
- # Test
- self.assertIsNone(send_packet(key_list, self.gateway, self.l_queue,
- bytes(ASSEMBLY_PACKET_LEN), self.settings))
- time.sleep(0.1)
-
- self.assertEqual(len(self.gateway.packets), 1)
- self.assertEqual(len(self.gateway.packets[0]), 365)
- self.assertEqual(self.l_queue.qsize(), 1)
-
-
-class TestTransmit(unittest.TestCase):
-
- def setUp(self):
- self.settings = Settings(local_testing_mode=True)
- self.gateway = Gateway()
-
- def test_transmit(self):
- self.assertIsNone(transmit(200*b'a', self.settings, self.gateway))
- self.assertEqual(len(self.gateway.packets), 1)
-
- def test_transmit_with_multi_packet_random_delay(self):
- self.settings.multi_packet_random_delay = True
- self.assertIsNone(transmit(200*b'a', self.settings, self.gateway))
- self.assertEqual(len(self.gateway.packets), 1)
-
-
-class TestCancelPacket(unittest.TestCase):
-
- def setUp(self):
- self.queues = {FILE_PACKET_QUEUE: Queue(),
- MESSAGE_PACKET_QUEUE: Queue()}
-
- def tearDown(self):
- for key in self.queues:
- while not self.queues[key].empty():
- self.queues[key].get()
- time.sleep(0.1)
- self.queues[key].close()
-
- def test_cancel_message_during_traffic_masking(self):
- # Setup
- user_input = UserInput('cm')
- settings = Settings(session_traffic_masking=True)
- window = TxWindow()
- window.window_contacts = [create_contact()]
-
- self.queues[MESSAGE_PACKET_QUEUE].put(('testmessage1', {'alice@jabber.org': False}))
- self.queues[MESSAGE_PACKET_QUEUE].put(('testmessage2', {'alice@jabber.org': False}))
- time.sleep(0.1)
-
- # Test
- self.assertIsNone(cancel_packet(user_input, window, settings, self.queues))
- time.sleep(0.1)
-
- self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 1)
-
- def test_cancel_file_during_traffic_masking(self):
- # Setup
- user_input = UserInput('cf')
- settings = Settings(session_traffic_masking=True)
- window = TxWindow()
- window.window_contacts = [create_contact()]
-
- self.queues[FILE_PACKET_QUEUE].put(('testfile1', {'alice@jabber.org': False}))
- self.queues[FILE_PACKET_QUEUE].put(('testfile2', {'alice@jabber.org': False}))
- time.sleep(0.1)
-
- # Test
- self.assertIsNone(cancel_packet(user_input, window, settings, self.queues))
- time.sleep(0.1)
-
- self.assertEqual(self.queues[FILE_PACKET_QUEUE].qsize(), 1)
-
- def test_cancel_message_during_normal(self):
- # Setup
- user_input = UserInput('cm')
- settings = Settings()
- window = TxWindow(name='Alice',
- type=WIN_TYPE_CONTACT,
- type_print='contact',
- uid ='alice@jabber.org')
- window.window_contacts = [create_contact()]
-
- self.queues[MESSAGE_PACKET_QUEUE].put(('testmessage1', settings, 'alice@jabber.org', 'bob@jabber.org', False, False, 'alice@jabber.org'))
- self.queues[MESSAGE_PACKET_QUEUE].put(('testmessage2', settings, 'charlie@jabber.org', 'bob@jabber.org', False, False, 'charlie@jabber.org'))
- self.queues[MESSAGE_PACKET_QUEUE].put(('testmessage3', settings, 'alice@jabber.org', 'bob@jabber.org', False, False, 'alice@jabber.org'))
- time.sleep(0.1)
-
- # Test
- self.assertIsNone(cancel_packet(user_input, window, settings, self.queues))
- time.sleep(0.1)
-
- self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 2)
-
- def test_cancel_group_message_during_normal(self):
- # Setup
- user_input = UserInput('cm')
- settings = Settings()
- window = TxWindow(name='testgroup',
- type=WIN_TYPE_GROUP,
- type_print='group',
- uid='testgroup')
- window.window_contacts = [create_contact()]
-
- self.queues[MESSAGE_PACKET_QUEUE].put(('testmessage1', settings, 'alice@jabber.org', 'bob@jabber.org', False, False, 'testgroup'))
- self.queues[MESSAGE_PACKET_QUEUE].put(('testmessage2', settings, 'alice@jabber.org', 'bob@jabber.org', False, False, 'testgroup'))
- time.sleep(0.1)
-
- # Test
- self.assertIsNone(cancel_packet(user_input, window, settings, self.queues))
- time.sleep(0.1)
-
- self.assertEqual(self.queues[MESSAGE_PACKET_QUEUE].qsize(), 1)
-
- def test_cancel_file_during_normal(self):
- # Setup
- user_input = UserInput('cf')
- settings = Settings()
- window = TxWindow(name='Alice',
- type=WIN_TYPE_CONTACT,
- type_print='contact',
- uid='alice@jabber.org')
- window.window_contacts = [create_contact()]
-
- self.queues[FILE_PACKET_QUEUE].put(('testmessage1', settings, 'alice@jabber.org', 'bob@jabber.org', False, False, 'alice@jabber.org'))
- self.queues[FILE_PACKET_QUEUE].put(('testmessage2', settings, 'alice@jabber.org', 'bob@jabber.org', False, False, 'alice@jabber.org'))
- time.sleep(0.1)
-
- # Test
- self.assertIsNone(cancel_packet(user_input, window, settings, self.queues))
- time.sleep(0.1)
-
- self.assertEqual(self.queues[FILE_PACKET_QUEUE].qsize(), 1)
-
- def test_cancel_file_when_nothing_to_cancel(self):
- # Setup
- user_input = UserInput('cf')
- settings = Settings()
- window = TxWindow(name='Alice',
- type=WIN_TYPE_CONTACT,
- type_print='contact',
- uid='alice@jabber.org')
- window.window_contacts = [create_contact()]
-
- # Test
- self.assertIsNone(cancel_packet(user_input, window, settings, self.queues))
- time.sleep(0.1)
-
- self.assertEqual(self.queues[FILE_PACKET_QUEUE].qsize(), 0)
-
- def test_cancel_message_when_nothing_to_cancel(self):
- # Setup
- user_input = UserInput('cm')
- settings = Settings()
- window = TxWindow(name='Alice',
- type=WIN_TYPE_CONTACT,
- type_print='contact',
- uid='alice@jabber.org')
- window.window_contacts = [create_contact()]
-
- # Test
- self.assertIsNone(cancel_packet(user_input, window, settings, self.queues))
- time.sleep(0.1)
-
- self.assertEqual(self.queues[FILE_PACKET_QUEUE].qsize(), 0)
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/tx/test_sender_loop.py b/tests/tx/test_sender_loop.py
deleted file mode 100644
index bbc7327..0000000
--- a/tests/tx/test_sender_loop.py
+++ /dev/null
@@ -1,176 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import time
-import threading
-import unittest
-
-from multiprocessing import Queue
-
-from src.common.statics import *
-
-from src.tx.commands import queue_command
-from src.tx.packet import queue_message, queue_to_nh
-from src.tx.sender_loop import sender_loop
-
-from tests.mock_classes import ContactList, Gateway, KeyList, Settings, UserInput, TxWindow
-
-
-class TestTrafficMaskingLoop(unittest.TestCase):
-
- def test_loop(self):
- # Setup
- queues = {MESSAGE_PACKET_QUEUE: Queue(),
- FILE_PACKET_QUEUE: Queue(),
- COMMAND_PACKET_QUEUE: Queue(),
- NH_PACKET_QUEUE: Queue(),
- LOG_PACKET_QUEUE: Queue(),
- NOISE_PACKET_QUEUE: Queue(),
- NOISE_COMMAND_QUEUE: Queue(),
- KEY_MANAGEMENT_QUEUE: Queue(),
- WINDOW_SELECT_QUEUE: Queue(),
- EXIT_QUEUE: Queue()}
-
- settings = Settings(session_traffic_masking=True)
- gateway = Gateway()
- key_list = KeyList(nicks=['Alice', LOCAL_ID])
- window = TxWindow(log_messages=True)
- contact_list = ContactList(nicks=['Alice', LOCAL_ID])
- window.contact_list = contact_list
- window.window_contacts = [contact_list.get_contact('Alice')]
- user_input = UserInput(plaintext='test')
-
- queue_message(user_input, window, settings, queues[MESSAGE_PACKET_QUEUE])
- queue_message(user_input, window, settings, queues[MESSAGE_PACKET_QUEUE])
- queue_message(user_input, window, settings, queues[MESSAGE_PACKET_QUEUE])
- queue_command(b'test', settings, queues[COMMAND_PACKET_QUEUE])
- queue_command(b'test', settings, queues[COMMAND_PACKET_QUEUE])
- queue_command(b'test', settings, queues[COMMAND_PACKET_QUEUE], window)
- queue_to_nh(UNENCRYPTED_PACKET_HEADER + UNENCRYPTED_EXIT_COMMAND, settings, queues[NH_PACKET_QUEUE])
- queue_to_nh(UNENCRYPTED_PACKET_HEADER + UNENCRYPTED_WIPE_COMMAND, settings, queues[NH_PACKET_QUEUE])
-
- def queue_delayer():
- time.sleep(0.1)
- queues[WINDOW_SELECT_QUEUE].put((window, True))
-
- # Test
- threading.Thread(target=queue_delayer).start()
- self.assertIsNone(sender_loop(queues, settings, gateway, key_list, unittest=True))
-
- threading.Thread(target=queue_delayer).start()
-
- self.assertIsNone(sender_loop(queues, settings, gateway, key_list, unittest=True))
-
- threading.Thread(target=queue_delayer).start()
-
- self.assertIsNone(sender_loop(queues, settings, gateway, key_list, unittest=True))
-
- self.assertEqual(len(gateway.packets), 8)
- self.assertEqual(queues[EXIT_QUEUE].qsize(), 2)
-
- # Teardown
- for key in queues:
- while not queues[key].empty():
- queues[key].get()
- time.sleep(0.1)
- queues[key].close()
-
-
-class TestNormalLoop(unittest.TestCase):
-
- def test_loop(self):
- # Setup
- queues = {MESSAGE_PACKET_QUEUE: Queue(),
- FILE_PACKET_QUEUE: Queue(),
- COMMAND_PACKET_QUEUE: Queue(),
- NH_PACKET_QUEUE: Queue(),
- LOG_PACKET_QUEUE: Queue(),
- NOISE_PACKET_QUEUE: Queue(),
- NOISE_COMMAND_QUEUE: Queue(),
- KEY_MANAGEMENT_QUEUE: Queue(),
- WINDOW_SELECT_QUEUE: Queue(),
- UNITTEST_QUEUE: Queue(),
- EXIT_QUEUE: Queue()}
-
- settings = Settings(session_traffic_masking=False)
- gateway = Gateway()
- key_list = KeyList()
- window = TxWindow(log_messages=True)
- contact_list = ContactList(nicks=['Alice', LOCAL_ID])
- window.contact_list = contact_list
- window.window_contacts = [contact_list.get_contact('Alice')]
- user_input = UserInput(plaintext='test')
-
- def queue_delayer():
- time.sleep(0.1)
- queue_command(b'test', settings, queues[COMMAND_PACKET_QUEUE])
-
- time.sleep(0.1)
- queue_to_nh(PUBLIC_KEY_PACKET_HEADER + KEY_LENGTH * b'a'
- +b'alice@jabber.org' + US_BYTE + b'bob@jabber.org', settings, queues[NH_PACKET_QUEUE])
-
- time.sleep(0.1)
- queue_to_nh(UNENCRYPTED_PACKET_HEADER + UNENCRYPTED_WIPE_COMMAND, settings, queues[NH_PACKET_QUEUE])
-
- time.sleep(0.1)
- queue_to_nh(UNENCRYPTED_PACKET_HEADER + UNENCRYPTED_EXIT_COMMAND, settings, queues[NH_PACKET_QUEUE])
-
- time.sleep(0.1)
- queues[KEY_MANAGEMENT_QUEUE].put((KDB_ADD_ENTRY_HEADER, LOCAL_ID,
- KEY_LENGTH * b'a', KEY_LENGTH * b'a',
- KEY_LENGTH * b'a', KEY_LENGTH * b'a'))
-
- time.sleep(0.1)
- queue_message(user_input, window, settings, queues[MESSAGE_PACKET_QUEUE])
-
- time.sleep(0.1)
- queue_message(user_input, window, settings, queues[FILE_PACKET_QUEUE])
-
- time.sleep(0.1)
- queues[KEY_MANAGEMENT_QUEUE].put((KDB_ADD_ENTRY_HEADER, 'alice@jabber.org',
- KEY_LENGTH*b'a', KEY_LENGTH*b'a',
- KEY_LENGTH*b'a', KEY_LENGTH*b'a'))
-
- time.sleep(0.1)
- queue_message(user_input, window, settings, queues[MESSAGE_PACKET_QUEUE])
-
- time.sleep(0.1)
- queue_message(user_input, window, settings, queues[FILE_PACKET_QUEUE])
-
- time.sleep(0.1)
- queues[UNITTEST_QUEUE].put(EXIT)
-
- threading.Thread(target=queue_delayer).start()
-
- # Test
- self.assertIsNone(sender_loop(queues, settings, gateway, key_list, unittest=True))
- self.assertEqual(len(gateway.packets), 8)
- self.assertEqual(queues[EXIT_QUEUE].qsize(), 2)
-
- # Teardown
- for key in queues:
- while not queues[key].empty():
- queues[key].get()
- time.sleep(0.1)
- queues[key].close()
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/tx/test_windows.py b/tests/tx/test_windows.py
deleted file mode 100644
index a483fcf..0000000
--- a/tests/tx/test_windows.py
+++ /dev/null
@@ -1,273 +0,0 @@
-#!/usr/bin/env python3.6
-# -*- coding: utf-8 -*-
-
-"""
-Copyright (C) 2013-2017 Markus Ottela
-
-This file is part of TFC.
-
-TFC is free software: you can redistribute it and/or modify it under the terms
-of the GNU General Public License as published by the Free Software Foundation,
-either version 3 of the License, or (at your option) any later version.
-
-TFC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE. See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
-"""
-
-import builtins
-import time
-import unittest
-
-from multiprocessing import Queue
-
-from src.common.db_contacts import Contact
-from src.common.statics import *
-
-from src.tx.windows import MockWindow, select_window, TxWindow
-
-from tests.mock_classes import create_contact, ContactList, GroupList, Settings, UserInput
-from tests.utils import TFCTestCase
-
-
-class TestMockWindow(unittest.TestCase):
-
- def setUp(self):
- self.window = MockWindow('alice@jabber.org', contacts=[create_contact(n) for n in ['Alice', 'Bob']])
-
- def test_window_iterates_over_contacts(self):
- for c in self.window:
- self.assertIsInstance(c, Contact)
-
-
-class TestTxWindow(TFCTestCase):
-
- def setUp(self):
- self.o_input = builtins.input
- self.contact_list = ContactList(['Alice', 'Bob'])
- self.group_list = GroupList(groups=['testgroup', 'testgroup_2'])
- self.settings = Settings()
- self.window = TxWindow(self.contact_list, self.group_list)
- self.window.group = self.group_list.get_group('testgroup')
- self.window.type = WIN_TYPE_GROUP
- self.queues = {COMMAND_PACKET_QUEUE: Queue(),
- WINDOW_SELECT_QUEUE: Queue()}
-
- def tearDown(self):
- builtins.input = self.o_input
-
- for key in self.queues:
- while not self.queues[key].empty():
- self.queues[key].get()
- time.sleep(0.1)
- self.queues[key].close()
-
- def test_window_iterates_over_contacts(self):
- # Setup
- self.window.window_contacts = self.contact_list.contacts
-
- # Test
- for c in self.window:
- self.assertIsInstance(c, Contact)
-
- def test_len_returns_number_of_contacts_in_window(self):
- # Setup
- self.window.window_contacts = self.contact_list.contacts
-
- # Test
- self.assertEqual(len(self.window), 2)
-
- def test_group_window_change_during_traffic_masking_raises_fr(self):
- # Setup
- self.settings.session_traffic_masking = True
- self.window.uid = 'testgroup'
-
- # Test
- self.assertFR("Error: Can't change window during traffic masking.",
- self.window.select_tx_window, self.settings, self.queues, selection='testgroup_2', cmd=True)
-
- def test_contact_window_change_during_traffic_masking_raises_fr(self):
- # Setup
- self.settings.session_traffic_masking = True
- self.window.uid = 'alice@jabber.org'
-
- # Test
- self.assertFR("Error: Can't change window during traffic masking.",
- self.window.select_tx_window, self.settings, self.queues, selection='bob@jabber.org', cmd=True)
-
- def test_contact_window_reload_during_traffic_masking(self):
- # Setup
- self.settings.session_traffic_masking = True
- self.window.uid = 'alice@jabber.org'
-
- # Test
- self.assertIsNone(self.window.select_tx_window(self.settings, self.queues, selection='alice@jabber.org', cmd=True))
- self.assertEqual(self.window.uid, 'alice@jabber.org')
-
- def test_group_window_reload_during_traffic_masking(self):
- # Setup
- self.settings.session_traffic_masking = True
- self.window.uid = 'testgroup'
-
- # Test
- self.assertIsNone(self.window.select_tx_window(self.settings, self.queues, selection='testgroup', cmd=True))
- self.assertEqual(self.window.uid, 'testgroup')
-
- def test_invalid_selection_raises_fr(self):
- # Setup
- self.window.uid = 'alice@jabber.org'
-
- # Test
- self.assertFR("Error: No contact/group was found.",
- self.window.select_tx_window, self.settings, self.queues, selection='charlie@jabber.org', cmd=True)
-
- def test_window_selection_during_traffic_masking(self):
- # Setup
- self.settings.session_traffic_masking = True
- self.window.uid = None
- builtins.input = lambda _: 'bob@jabber.org'
-
- # Test
- self.assertIsNone(self.window.select_tx_window(self.settings, self.queues))
- self.assertEqual(self.queues[WINDOW_SELECT_QUEUE].qsize(), 1)
-
- def test_contact_window_selection_from_input(self):
- # Setup
- self.window.uid = None
- builtins.input = lambda _: 'bob@jabber.org'
-
- # Test
- self.assertIsNone(self.window.select_tx_window(self.settings, self.queues))
- self.assertEqual(self.window.uid, 'bob@jabber.org')
-
- def test_group_window_selection_from_command(self):
- # Setup
- self.window.uid = None
-
- # Test
- self.assertIsNone(self.window.select_tx_window(self.settings, self.queues, selection='testgroup', cmd=True))
- self.assertEqual(self.window.uid, 'testgroup')
-
- def test_deselect_window(self):
- # Setup
- self.window.window_contacts = self.contact_list.contacts
- self.window.contact = self.contact_list.get_contact('bob@jabber.org')
- self.window.name = 'Bob'
- self.window.type = WIN_TYPE_CONTACT
- self.window.uid = 'bob@jabber.org'
- self.window.imc_name = 'bob@jabber.org'
-
- # Test
- self.assertIsNone(self.window.deselect_window())
- self.assertIsNone(self.window.contact)
- self.assertIsNone(self.window.name)
- self.assertIsNone(self.window.type)
- self.assertIsNone(self.window.uid)
- self.assertIsNone(self.window.imc_name)
-
- def test_is_selected(self):
- self.window.name = None
- self.assertFalse(self.window.is_selected())
-
- self.window.name = 'bob@jabber.org'
- self.assertTrue(self.window.is_selected())
-
- def test_update_log_messages_for_contact(self):
- # Setup
- self.window.type = WIN_TYPE_CONTACT
- self.window.log_messages = None
- self.window.contact = self.contact_list.get_contact('Alice')
- self.window.contact.log_messages = False
-
- # Test
- self.assertIsNone(self.window.update_log_messages())
- self.assertFalse(self.window.log_messages)
-
- def test_update_log_messages_for_group(self):
- # Setup
- self.window.type = WIN_TYPE_GROUP
- self.window.log_messages = None
- self.window.group = self.group_list.get_group('testgroup')
- self.window.group.log_messages = False
-
- # Test
- self.assertIsNone(self.window.update_log_messages())
- self.assertFalse(self.window.log_messages)
-
- def test_update_group_win_members_if_group_is_available(self):
- # Setup
- self.window.window_contacts = []
- self.window.group = None
- self.window.name = 'testgroup'
- self.window.type = WIN_TYPE_GROUP
- self.window.imc_name = None
-
- # Test
- self.assertIsNone(self.window.update_group_win_members(self.group_list))
-
- self.assertEqual(self.window.group, self.group_list.get_group('testgroup'))
- self.assertEqual(self.window.window_contacts, self.window.group.members)
- self.assertEqual(self.window.imc_name, 'alice@jabber.org')
-
- def test_deactivate_window_if_group_is_not_available(self):
- # Setup
- self.window.window_contacts = []
- self.window.group = None
- self.window.name = 'testgroup_3'
- self.window.type = WIN_TYPE_GROUP
- self.window.imc_name = None
-
- # Test
- self.assertIsNone(self.window.update_group_win_members(self.group_list))
- self.assertIsNone(self.window.contact)
- self.assertIsNone(self.window.name)
- self.assertIsNone(self.window.type)
- self.assertIsNone(self.window.uid)
- self.assertIsNone(self.window.imc_name)
-
-
-class TestSelectWindow(TFCTestCase):
-
- def setUp(self):
- self.user_input = UserInput()
- self.contact_list = ContactList(nicks=['Alice'])
- self.group_list = GroupList()
- self.window = TxWindow(self.contact_list, self.group_list)
- self.settings = Settings()
- self.queues = {COMMAND_PACKET_QUEUE: Queue(),
- WINDOW_SELECT_QUEUE: Queue()}
-
- def tearDown(self):
- for key in self.queues:
- while not self.queues[key].empty():
- self.queues[key].get()
- time.sleep(0.1)
- self.queues[key].close()
-
- def test_invalid_selection_raises_fr(self):
- # Setup
- self.user_input.plaintext = 'msg'
- self.assertFR("Error: Invalid recipient.", select_window, self.user_input, self.window, self.settings, self.queues)
- time.sleep(0.1)
-
- # Test
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 0)
- self.assertEqual(self.queues[WINDOW_SELECT_QUEUE].qsize(), 0)
-
- def test_window_selection(self):
- # Setup
- self.user_input.plaintext = 'msg alice@jabber.org'
-
- # Test
- self.assertIsNone(select_window(self.user_input, self.window, self.settings, self.queues))
- time.sleep(0.1)
-
- self.assertEqual(self.queues[COMMAND_PACKET_QUEUE].qsize(), 1)
- self.assertEqual(self.queues[WINDOW_SELECT_QUEUE].qsize(), 0)
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/tests/utils.py b/tests/utils.py
index 5644a18..e1fbeaa 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,40 +16,69 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
-import base64
-import binascii
+import hashlib
import io
import os
+import shutil
import unittest
import zlib
-from contextlib import contextmanager, redirect_stdout
+from contextlib import contextmanager, redirect_stdout
+from multiprocessing import Queue
+from typing import Any, Callable, Dict, List, Union
-from src.common.crypto import encrypt_and_sign, hash_chain
-from src.common.encoding import int_to_bytes
+from src.common.crypto import blake2b, byte_padding, csprng, encrypt_and_sign
+from src.common.encoding import int_to_bytes, pub_key_to_onion_address
+from src.common.misc import split_byte_string
from src.common.exceptions import FunctionReturn
from src.common.statics import *
-from src.tx.packet import split_to_assembly_packets
+
+UNDECODABLE_UNICODE = bytes.fromhex('3f264d4189d7a091')
+VALID_ECDHE_PUB_KEY = '4EcuqaDddsdsucgBX2PY2qR8hReAaeSN2ohJB9w5Cvq6BQjDaPPgzSvW932aHiosT42SKJGu2PpS1Za3Xrao'
+VALID_LOCAL_KEY_KDK = '5HueCGU8rMjxEXxiPuD5BDku4MkFqeZyd4dZ1jvhTVqvbTLvyTJ'
+
+
+def nick_to_pub_key(nick: str) -> bytes:
+ """Produce deterministic public key from nick."""
+ return hashlib.sha256(nick.encode()).digest()
+
+
+def nick_to_onion_address(nick: str) -> str:
+ """Produce deterministic v3 Onion Service address from nick."""
+ return pub_key_to_onion_address(nick_to_pub_key(nick))
+
+
+def nick_to_short_address(nick: str) -> str:
+ """Produce deterministic short address from nick."""
+ return nick_to_onion_address(nick)[:TRUNC_ADDRESS_LENGTH]
+
+
+def group_name_to_group_id(name: str) -> bytes:
+ """Produce deterministic group ID from group name."""
+ return hashlib.sha256(name.encode()).digest()[:GROUP_ID_LENGTH]
class TFCTestCase(unittest.TestCase):
- def assertFR(self, msg, func, *args, **kwargs):
- """Check that FunctionReturn error is raised and specific message is displayed."""
+ def assert_fr(self, msg, func, *args, **kwargs):
+ """\
+ Check that FunctionReturn error is raised
+ and that a specific message is displayed.
+ """
e_raised = False
try:
func(*args, **kwargs)
except FunctionReturn as inst:
e_raised = True
- self.assertEqual(inst.message, msg)
+ self.assertEqual(msg, inst.message)
self.assertTrue(e_raised)
- def assertPrints(self, msg, func, *args, **kwargs):
+ def assert_prints(self, msg, func, *args, **kwargs):
"""Check that specific message is printed by function."""
f = io.StringIO()
with redirect_stdout(f):
@@ -58,65 +88,298 @@ class TFCTestCase(unittest.TestCase):
@contextmanager
def ignored(*exceptions):
- """Ignore exception."""
+ """Ignore an exception."""
try:
yield
except exceptions:
pass
-def cleanup():
- """Remove unittest related files."""
- for f in os.listdir(DIR_USER_DATA):
- if f.startswith('ut'):
- with ignored(FileNotFoundError):
- os.remove(f'{DIR_USER_DATA}{f}')
+def cd_unittest():
+ """Change working directory to one for unittests.
+
+ Separate working directory for unittests protects existing user data.
+ """
+ name = f"unittest_{(os.urandom(16)).hex()}/"
+ try:
+ os.mkdir(name)
+ except FileExistsError:
+ pass
+ os.chdir(name)
+ return name
-def assembly_packet_creator(p_type: str,
- payload: bytes = b'',
- origin: bytes = b'',
- header: bytes = b'',
- group_name: str = None,
- encrypt: bool = False,
- break_g_name: bool = False,
- origin_acco: bytes = b'alice@jabber.org'):
- """Create assembly packet list and optionally encrypt it."""
- if p_type == MESSAGE:
- if not header:
- if group_name is not None:
- group_msg_id = GROUP_MSG_ID_LEN * b'a'
- group_name = binascii.unhexlify('a466c02c221cb135') if break_g_name else group_name.encode()
- header = GROUP_MESSAGE_HEADER + group_msg_id + group_name + US_BYTE
+def cleanup(name):
+ """Remove unittest related directory."""
+ os.chdir("..")
+ shutil.rmtree(f"{name}/")
+
+
+def func_that_raises(exception: Any) -> Callable:
+ """Return function that when called, raises the specified exception."""
+ return lambda *args, **kwargs: (_ for _ in ()).throw(exception)
+
+
+def tamper_file(file_name: str, tamper_size: int) -> None:
+ """Change `tamper_size` bytes in file `file_name`."""
+
+ with open(file_name, 'rb') as f:
+ data = f.read()
+
+ while True:
+ tampered_bytes = os.urandom(tamper_size)
+ if tampered_bytes != data[:tamper_size]:
+ break
+ new_data = tampered_bytes + data[tamper_size:]
+
+ with open(file_name, 'wb') as f:
+ f.write(new_data)
+
+
+def tear_queue(queue: 'Queue'):
+ """Empty and close multiprocessing queue."""
+ while queue.qsize() != 0:
+ queue.get()
+ queue.close()
+
+
+def tear_queues(queues: Dict[bytes, 'Queue']):
+ """Empty and close multiprocessing queues."""
+ for q in queues:
+ tear_queue(queues[q])
+
+
+def tamper_last_byte(byte_string: bytes) -> bytes:
+ """Increase the ord value of last byte by 1 mod 255."""
+ return byte_string[:-1] + chr((ord(byte_string[-1:]) + 1) % 256).encode()
+
+
+def assembly_packet_creator(
+ # --- Payload creation ---
+
+ # Common settings
+ packet_type: str, # Packet type (MESSAGE, FILE, or COMMAND, do not use tampered values)
+ payload: Union[bytes, str] = None, # Payload message content (Plaintext message (str), file data (bytes), or command (bytes))
+ inner_key: bytes = None, # Key for inner encryption layer
+ tamper_ciphertext: bool = False, # When True, tampers with the inner layer of encryption to make it undecryptable
+
+ # Message packet parameters
+ message_header: bytes = None, # Message header (PRIVATE_MESSAGE_HEADER, GROUP_MESSAGE_HEADER, FILE_KEY_HEADER, or tamper byte)
+ tamper_plaintext: bool = False, # When true, replaces plaintext with undecodable bytestring.
+ group_id: bytes = None, # When specified, creates message for group (4 byte random string)
+ group_msg_id: bytes = None, # The group message id (16 byte random string)
+ whisper_header: bytes = b'\x00', # Define whisper-header (b'\x00' for False, b'\x01' for True, others for tampering)
+
+ # File packet parameters
+ create_zip_bomb: bool = False, # When True, creates large enough ciphertext to trigger zip bomb protection
+ tamper_compression: bool = False, # When True, tampers with compression to make decompression impossible
+ packet_time: bytes = None, # Allows overriding the 8-byte packet time header
+ packet_size: bytes = None, # Allows overriding the 8-byte packet size header
+ file_name: bytes = None, # Name of the file (allows e.g. injection of invalid file names)
+ omit_header_delim: bool = False, # When True, omits the file_name<>file_data delimiter.
+
+ # --- Assembly packet splitting ---
+ s_header_override: bytes = None, # Allows overriding the `short packet` assembly packet header
+ l_header_override: bytes = None, # Allows overriding the `start of long packet` assembly packet header
+ a_header_override: bytes = None, # Allows overriding the `appended long packet` assembly packet header
+ e_header_override: bytes = None, # Allows overriding the `last packet of long packet` assembly packet header
+ tamper_cmd_hash: bool = False, # When True, tampers with the command hash to make it undecryptable
+ no_padding: bool = False, # When True, does not add padding to assembly packet.
+ split_length: int = PADDING_LENGTH, # Allows configuring the length to which assembly packets are split
+
+ # --- Packet encryption ---
+ encrypt_packet: bool = False, # When True, encrypts packet into set of datagrams starting with default key (32*b'\x01')
+ message_number: int = 0, # Determines the message key and harac for message
+ harac: int = INITIAL_HARAC, # Allows choosing the hash ratchet counter for packet encryption
+ message_key: bytes = None, # Allows choosing the message key to encrypt message with
+ header_key: bytes = None, # Allows choosing the header key for hash ratchet encryption
+ tamper_harac: bool = False, # When True, tampers with the MAC of encrypted harac
+ tamper_message: bool = False, # When True, tampers with the MAC of encrypted messagae
+ onion_pub_key: bytes = b'', # Defines the contact public key to use with datagram creation
+ origin_header: bytes = b'', # Allows editing the origin header
+ ) -> List[bytes]:
+ """Create assembly packet list and optionally encrypt it to create datagram list."""
+
+ # ------------------------------------------------------------------------------------------------------------------
+ # | Create payload |
+ # ------------------------------------------------------------------------------------------------------------------
+
+ if packet_type == MESSAGE:
+
+ assert isinstance(payload, str)
+
+ if message_header is None:
+ if group_id is not None:
+ group_msg_id_bytes = bytes(GROUP_MSG_ID_LENGTH) if group_msg_id is None else group_msg_id
+ header = GROUP_MESSAGE_HEADER + group_id + group_msg_id_bytes
else:
header = PRIVATE_MESSAGE_HEADER
- payload = header + payload
+ else:
+ header = message_header
- if p_type == FILE:
- if not payload:
- compressed = zlib.compress(os.urandom(10000), level=COMPRESSION_LEVEL)
- file_key = os.urandom(KEY_LENGTH)
- encrypted = encrypt_and_sign(compressed, key=file_key) + file_key
- encoded = base64.b85encode(encrypted)
- payload = int_to_bytes(1) + int_to_bytes(2) + b'testfile.txt' + US_BYTE + encoded
+ payload_bytes = UNDECODABLE_UNICODE if tamper_plaintext else payload.encode()
- packet_list = split_to_assembly_packets(payload, p_type)
+ payload = whisper_header + header + payload_bytes
- if not encrypt:
+ # ---
+
+ elif packet_type == FILE: # Create packets for traffic masking file transmission
+
+ file_data_size = 100_000_001 if create_zip_bomb else 10_000
+ payload_bytes = os.urandom(file_data_size) if payload is None else payload
+
+ compressed = zlib.compress(payload_bytes, level=COMPRESSION_LEVEL)
+ compressed = compressed if not tamper_compression else compressed[::-1]
+ file_key_bytes = os.urandom(SYMMETRIC_KEY_LENGTH) if inner_key is None else inner_key
+
+ ciphertext = encrypt_and_sign(compressed, key=file_key_bytes)
+ ciphertext = ciphertext if not tamper_ciphertext else ciphertext[::-1]
+ ct_with_key = ciphertext + file_key_bytes
+
+ time_bytes = int_to_bytes(2) if packet_time is None else packet_time
+ size_bytes = int_to_bytes(file_data_size) if packet_size is None else packet_size
+ file_name_bytes = b'test_file.txt' if file_name is None else file_name
+ delimiter = US_BYTE if not omit_header_delim else b''
+
+ payload = time_bytes + size_bytes + file_name_bytes + delimiter + ct_with_key
+
+ elif packet_type == COMMAND:
+ payload = payload
+
+ else:
+ raise ValueError(f"Invalid packet type '{packet_type}'.")
+
+ # ------------------------------------------------------------------------------------------------------------------
+ # | Split payload to assembly packets |
+ # ------------------------------------------------------------------------------------------------------------------
+
+ s_header = {MESSAGE: M_S_HEADER, FILE: F_S_HEADER, COMMAND: C_S_HEADER}[packet_type]
+ l_header = {MESSAGE: M_L_HEADER, FILE: F_L_HEADER, COMMAND: C_L_HEADER}[packet_type]
+ a_header = {MESSAGE: M_A_HEADER, FILE: F_A_HEADER, COMMAND: C_A_HEADER}[packet_type]
+ e_header = {MESSAGE: M_E_HEADER, FILE: F_E_HEADER, COMMAND: C_E_HEADER}[packet_type]
+
+ s_header = s_header if s_header_override is None else s_header_override
+ l_header = l_header if l_header_override is None else l_header_override
+ a_header = a_header if a_header_override is None else a_header_override
+ e_header = e_header if e_header_override is None else e_header_override
+
+ if packet_type in [MESSAGE, COMMAND]:
+ compressed = zlib.compress(payload, level=COMPRESSION_LEVEL)
+ payload = compressed if not tamper_compression else compressed[::-1]
+
+ if len(payload) < PADDING_LENGTH:
+ padded = byte_padding(payload)
+ packet_list = [s_header + padded]
+
+ else:
+ if packet_type == MESSAGE:
+ msg_key = csprng() if inner_key is None else inner_key
+ payload = encrypt_and_sign(payload, msg_key)
+ payload = payload if not tamper_ciphertext else payload[::-1]
+ payload += msg_key
+
+ elif packet_type == FILE:
+ payload = bytes(FILE_PACKET_CTR_LENGTH) + payload
+
+ elif packet_type == COMMAND:
+ command_hash = blake2b(payload)
+ command_hash = command_hash if not tamper_cmd_hash else command_hash[::-1]
+ payload += command_hash
+
+ padded = payload if no_padding else byte_padding(payload)
+ p_list = split_byte_string(padded, item_len=split_length)
+
+ if packet_type == FILE:
+ p_list[0] = int_to_bytes(len(p_list)) + p_list[0][FILE_PACKET_CTR_LENGTH:]
+
+ packet_list = ([l_header + p_list[0]] +
+ [a_header + p for p in p_list[1:-1]] +
+ [e_header + p_list[-1]])
+
+ if not encrypt_packet:
return packet_list
- if encrypt:
- harac = 1
- m_key = KEY_LENGTH * b'\x01'
- m_hek = KEY_LENGTH * b'\x01'
- assembly_ct_list = []
- for p in packet_list:
- harac_in_bytes = int_to_bytes(harac)
- encrypted_harac = encrypt_and_sign(harac_in_bytes, m_hek)
- encrypted_message = encrypt_and_sign(p, m_key)
- encrypted_packet = MESSAGE_PACKET_HEADER + encrypted_harac + encrypted_message + origin + origin_acco
- assembly_ct_list.append(encrypted_packet)
- m_key = hash_chain(m_key)
- harac += 1
+ # ------------------------------------------------------------------------------------------------------------------
+ # | Encrypt assembly packets to create datagrams |
+ # ------------------------------------------------------------------------------------------------------------------
- return assembly_ct_list
+ message_key = SYMMETRIC_KEY_LENGTH * b'\x01' if message_key is None else message_key
+ header_key = SYMMETRIC_KEY_LENGTH * b'\x01' if header_key is None else header_key
+
+ for _ in range(message_number):
+ message_key = blake2b(message_key + int_to_bytes(harac), digest_size=SYMMETRIC_KEY_LENGTH)
+ harac += 1
+
+ assembly_ct_list = []
+
+ for packet in packet_list:
+ harac_in_bytes = int_to_bytes(harac)
+ encrypted_harac = encrypt_and_sign(harac_in_bytes, header_key)
+ encrypted_message = encrypt_and_sign(packet, message_key)
+
+ encrypted_harac = encrypted_harac if not tamper_harac else tamper_last_byte(encrypted_harac)
+ encrypted_message = encrypted_message if not tamper_message else tamper_last_byte(encrypted_message)
+
+ encrypted_packet = onion_pub_key + origin_header + encrypted_harac + encrypted_message
+
+ assembly_ct_list.append(encrypted_packet)
+
+ message_key = blake2b(message_key + int_to_bytes(harac), digest_size=SYMMETRIC_KEY_LENGTH)
+ harac += 1
+
+ return assembly_ct_list
+
+
+def gen_queue_dict() -> Dict[bytes, Queue]:
+ """Create dictionary that has all the queues used by TFC processes."""
+ transmitter_queues = [MESSAGE_PACKET_QUEUE,
+ COMMAND_PACKET_QUEUE,
+ TM_MESSAGE_PACKET_QUEUE,
+ TM_FILE_PACKET_QUEUE,
+ TM_COMMAND_PACKET_QUEUE,
+ TM_NOISE_PACKET_QUEUE,
+ TM_NOISE_COMMAND_QUEUE,
+ RELAY_PACKET_QUEUE,
+ LOG_PACKET_QUEUE,
+ LOG_SETTING_QUEUE,
+ TRAFFIC_MASKING_QUEUE,
+ LOGFILE_MASKING_QUEUE,
+ KEY_MANAGEMENT_QUEUE,
+ SENDER_MODE_QUEUE,
+ WINDOW_SELECT_QUEUE,
+ EXIT_QUEUE]
+
+ receiver_queues = [GATEWAY_QUEUE,
+ LOCAL_KEY_DATAGRAM_HEADER,
+ MESSAGE_DATAGRAM_HEADER,
+ FILE_DATAGRAM_HEADER,
+ COMMAND_DATAGRAM_HEADER,
+ EXIT_QUEUE]
+
+ relay_queues = [GATEWAY_QUEUE,
+ DST_MESSAGE_QUEUE,
+ M_TO_FLASK_QUEUE,
+ F_TO_FLASK_QUEUE,
+ SRC_TO_RELAY_QUEUE,
+ DST_COMMAND_QUEUE,
+ CONTACT_KEY_QUEUE,
+ C_REQ_MGR_QUEUE,
+ URL_TOKEN_QUEUE,
+ GROUP_MSG_QUEUE,
+ CONTACT_REQ_QUEUE,
+ F_REQ_MGMT_QUEUE,
+ GROUP_MGMT_QUEUE,
+ ONION_CLOSE_QUEUE,
+ ONION_KEY_QUEUE,
+ TOR_DATA_QUEUE,
+ EXIT_QUEUE]
+
+ unittest_queue = [UNITTEST_QUEUE]
+
+ queue_list = set(transmitter_queues + receiver_queues + relay_queues + unittest_queue)
+ queue_dict = dict()
+
+ for q in queue_list:
+ queue_dict[q] = Queue()
+
+ return queue_dict
diff --git a/tfc.py b/tfc.py
index 7b69df5..bad7d1d 100755
--- a/tfc.py
+++ b/tfc.py
@@ -2,7 +2,8 @@
# -*- coding: utf-8 -*-
"""
-Copyright (C) 2013-2017 Markus Ottela
+TFC - Onion-routed, endpoint secure messaging system
+Copyright (C) 2013-2019 Markus Ottela
This file is part of TFC.
@@ -15,15 +16,14 @@ without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with TFC. If not, see .
+along with TFC. If not, see .
"""
import os
-import subprocess
import sys
-import time
from multiprocessing import Process, Queue
+from typing import Dict
from src.common.crypto import check_kernel_entropy, check_kernel_version
from src.common.db_contacts import ContactList
@@ -31,92 +31,123 @@ from src.common.db_groups import GroupList
from src.common.db_keys import KeyList
from src.common.db_logs import log_writer_loop
from src.common.db_masterkey import MasterKey
+from src.common.db_onion import OnionService
from src.common.db_settings import Settings
from src.common.gateway import Gateway, gateway_loop
-from src.common.misc import ignored, process_arguments
-from src.common.output import c_print, clear_screen
+from src.common.misc import ensure_dir, monitor_processes, process_arguments
+from src.common.output import print_title
from src.common.statics import *
-from src.tx.input_loop import input_loop
-from src.tx.sender_loop import sender_loop
-from src.tx.traffic_masking import noise_loop
+from src.transmitter.input_loop import input_loop
+from src.transmitter.sender_loop import sender_loop
+from src.transmitter.traffic_masking import noise_loop
-from src.rx.output_loop import output_loop
-from src.rx.receiver_loop import receiver_loop
+from src.receiver.output_loop import output_loop
+from src.receiver.receiver_loop import receiver_loop
def main() -> None:
- """Derive master key, decrypt databases and initialize processes."""
- os.chdir(sys.path[0])
+ """Load persistent data and launch the Transmitter/Receiver Program.
+
+ This function decrypts user data from databases and launches
+ processes for Transmitter or Receiver Program. It then monitors the
+ EXIT_QUEUE for EXIT/WIPE signals and each process in case one of
+ them dies.
+
+ If you're reading this code to get the big picture on how TFC works,
+ start by looking at the loop functions below, defined as the target
+ for each process, from top to bottom:
+ From `input_loop` process, you can see how the Transmitter
+ Program processes a message or command from the user, creates
+ assembly packets for a message/file/command, and how those are
+ eventually pushed into a multiprocessing queue, from where they are
+ loaded by the `sender_loop`.
+ The `sender_loop` process encrypts outgoing assembly packets,
+ and outputs the encrypted datagrams to the Networked Computer. The
+ process also sends assembly packets to the `log_writer_loop`.
+ The `log_writer_loop` process filters out non-message assembly
+ packets and if logging for contact is enabled, stores the message
+ assembly packet into an encrypted log database.
+ The `noise_loop` processes are used to provide the `sender_loop`
+ an interface identical to that of the `input_loop`. The
+ `sender_loop` uses the interface to load noise packets/commands when
+ traffic masking is enabled.
+
+ Refer to the file `relay.py` to see how the Relay Program on
+ Networked Computer manages datagrams between the network and
+ Source/Destination Computer.
+
+ In Receiver Program (also launched by this file), the `gateway_loop`
+ process acts as a buffer for incoming datagrams. This buffer is
+ consumed by the `receiver_loop` process that organizes datagrams
+ loaded from the buffer into a set of queues depending on datagram
+ type. Finally, the `output_loop` process loads and processes
+ datagrams from the queues in the order of priority.
+ """
+ working_dir = f'{os.getenv("HOME")}/{DIR_TFC}'
+ ensure_dir(working_dir)
+ os.chdir(working_dir)
+
+ operation, local_test, data_diode_sockets = process_arguments()
check_kernel_version()
check_kernel_entropy()
- operation, local_test, dd_sockets = process_arguments()
-
- clear_screen()
- c_print(TFC, head=1, tail=1)
+ print_title(operation)
master_key = MasterKey( operation, local_test)
- settings = Settings( master_key, operation, local_test, dd_sockets)
+ gateway = Gateway( operation, local_test, data_diode_sockets)
+ settings = Settings( master_key, operation, local_test)
contact_list = ContactList(master_key, settings)
key_list = KeyList( master_key, settings)
group_list = GroupList( master_key, settings, contact_list)
- gateway = Gateway( settings)
if settings.software_operation == TX:
- queues = {MESSAGE_PACKET_QUEUE: Queue(),
- FILE_PACKET_QUEUE: Queue(),
- COMMAND_PACKET_QUEUE: Queue(),
- NH_PACKET_QUEUE: Queue(),
- LOG_PACKET_QUEUE: Queue(),
- EXIT_QUEUE: Queue(),
- NOISE_PACKET_QUEUE: Queue(),
- NOISE_COMMAND_QUEUE: Queue(),
- KEY_MANAGEMENT_QUEUE: Queue(),
- WINDOW_SELECT_QUEUE: Queue()}
+ onion_service = OnionService(master_key)
- process_list = [Process(target=input_loop, args=(queues, settings, gateway, contact_list, group_list, master_key, sys.stdin.fileno())),
+ queues = {MESSAGE_PACKET_QUEUE: Queue(), # Standard messages
+ COMMAND_PACKET_QUEUE: Queue(), # Standard commands
+ TM_MESSAGE_PACKET_QUEUE: Queue(), # Traffic masking messages
+ TM_FILE_PACKET_QUEUE: Queue(), # Traffic masking files
+ TM_COMMAND_PACKET_QUEUE: Queue(), # Traffic masking commands
+ TM_NOISE_PACKET_QUEUE: Queue(), # Traffic masking noise packets
+ TM_NOISE_COMMAND_QUEUE: Queue(), # Traffic masking noise commands
+ RELAY_PACKET_QUEUE: Queue(), # Unencrypted datagrams to Networked Computer
+ LOG_PACKET_QUEUE: Queue(), # `log_writer_loop` assembly packets to be logged
+ LOG_SETTING_QUEUE: Queue(), # `log_writer_loop` logging state management between noise packets
+ TRAFFIC_MASKING_QUEUE: Queue(), # `log_writer_loop` traffic masking setting management commands
+ LOGFILE_MASKING_QUEUE: Queue(), # `log_writer_loop` logfile masking setting management commands
+ KEY_MANAGEMENT_QUEUE: Queue(), # `sender_loop` key database management commands
+ SENDER_MODE_QUEUE: Queue(), # `sender_loop` default/traffic masking mode switch commands
+ WINDOW_SELECT_QUEUE: Queue(), # `sender_loop` window selection commands during traffic masking
+ EXIT_QUEUE: Queue() # EXIT/WIPE signal from `input_loop` to `main`
+ } # type: Dict[bytes, Queue]
+
+ process_list = [Process(target=input_loop, args=(queues, settings, gateway, contact_list, group_list,
+ master_key, onion_service, sys.stdin.fileno())),
Process(target=sender_loop, args=(queues, settings, gateway, key_list)),
- Process(target=log_writer_loop, args=(queues,))]
-
- if settings.session_traffic_masking:
- process_list.extend([Process(target=noise_loop, args=(P_N_HEADER, queues[NOISE_PACKET_QUEUE], contact_list)),
- Process(target=noise_loop, args=(C_N_HEADER, queues[NOISE_COMMAND_QUEUE]))])
+ Process(target=log_writer_loop, args=(queues, settings)),
+ Process(target=noise_loop, args=(queues, contact_list)),
+ Process(target=noise_loop, args=(queues,))]
else:
- queues = {LOCAL_KEY_PACKET_HEADER: Queue(),
- PUBLIC_KEY_PACKET_HEADER: Queue(),
- MESSAGE_PACKET_HEADER: Queue(),
- COMMAND_PACKET_HEADER: Queue(),
- IMPORTED_FILE_HEADER: Queue(),
- EXIT_QUEUE: Queue(),
- GATEWAY_QUEUE: Queue()}
+ queues = {GATEWAY_QUEUE: Queue(), # Buffer for incoming datagrams
+ LOCAL_KEY_DATAGRAM_HEADER: Queue(), # Local key datagrams
+ MESSAGE_DATAGRAM_HEADER: Queue(), # Message datagrams
+ FILE_DATAGRAM_HEADER: Queue(), # File datagrams
+ COMMAND_DATAGRAM_HEADER: Queue(), # Command datagrams
+ EXIT_QUEUE: Queue() # EXIT/WIPE signal from `output_loop` to `main`
+ }
process_list = [Process(target=gateway_loop, args=(queues, gateway)),
- Process(target=receiver_loop, args=(queues, settings)),
- Process(target=output_loop, args=(queues, settings, contact_list, key_list, group_list, master_key, sys.stdin.fileno()))]
+ Process(target=receiver_loop, args=(queues, gateway)),
+ Process(target=output_loop, args=(queues, gateway, settings, contact_list, key_list,
+ group_list, master_key, sys.stdin.fileno()))]
for p in process_list:
p.start()
- while True:
- with ignored(EOFError, KeyboardInterrupt):
- time.sleep(0.1)
- if not all([p.is_alive() for p in process_list]):
- for p in process_list:
- p.terminate()
- sys.exit(1)
-
- if not queues[EXIT_QUEUE].empty():
- command = queues[EXIT_QUEUE].get()
- for p in process_list:
- p.terminate()
- if command == WIPE:
- subprocess.Popen(f"find {DIR_USER_DATA} -name '{operation}*' -type f -exec shred -n 3 -z -u {{}} \;", shell=True).wait()
- os.system('poweroff')
- else:
- sys.exit(0)
+ monitor_processes(process_list, settings.software_operation, queues)
if __name__ == '__main__':