commit d52836995443c260d3dda38a7e52c3d024ea6074 Author: Thomas Sileo Date: Wed Jun 22 20:11:22 2022 +0200 Initial commit for new v2 diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..c1b2260 --- /dev/null +++ b/.flake8 @@ -0,0 +1,4 @@ +[flake8] +max-line-length = 88 +extend-ignore = E203 +exclude = alembic/versions diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..f65b589 --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +*.db +__pycache__/ +.mypy_cache/ +.pytest_cache/ diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..be3f7b2 --- /dev/null +++ b/LICENSE @@ -0,0 +1,661 @@ + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. diff --git a/README.md b/README.md new file mode 100644 index 0000000..0af0ae8 --- /dev/null +++ b/README.md @@ -0,0 +1,9 @@ +# microblog.pub + +This branch is a complete rewrite of the original microblog.pub server. + +The original server became hard to debug, maintain and is not super easy to deploy (due to the dependecies like MongoDB). + +This rewrite is built using "modern" Python 3.10, SQLite and does not need any external tasks queue service. + +It is still in early development, this README will be updated when I get to deploy a personal instance in the wild. diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..86d1800 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,105 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/README b/alembic/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000..f0bf9cd --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,81 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +import app.models # noqa: F401 # Register models +from alembic import context +from app.database import SQLALCHEMY_DATABASE_URL +from app.database import Base + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +config.set_main_option("sqlalchemy.url", SQLALCHEMY_DATABASE_URL) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = Base.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000..55df286 --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/alembic/versions/b122c3a69fc9_initial_migration.py b/alembic/versions/b122c3a69fc9_initial_migration.py new file mode 100644 index 0000000..1a590aa --- /dev/null +++ b/alembic/versions/b122c3a69fc9_initial_migration.py @@ -0,0 +1,192 @@ +"""Initial migration + +Revision ID: b122c3a69fc9 +Revises: +Create Date: 2022-06-22 19:54:19.153320 + +""" +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = 'b122c3a69fc9' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('actors', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('ap_id', sa.String(), nullable=False), + sa.Column('ap_actor', sa.JSON(), nullable=False), + sa.Column('ap_type', sa.String(), nullable=False), + sa.Column('handle', sa.String(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_actors_ap_id'), 'actors', ['ap_id'], unique=True) + op.create_index(op.f('ix_actors_handle'), 'actors', ['handle'], unique=False) + op.create_index(op.f('ix_actors_id'), 'actors', ['id'], unique=False) + op.create_table('inbox', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('actor_id', sa.Integer(), nullable=False), + sa.Column('server', sa.String(), nullable=False), + sa.Column('is_hidden_from_stream', sa.Boolean(), nullable=False), + sa.Column('ap_actor_id', sa.String(), nullable=False), + sa.Column('ap_type', sa.String(), nullable=False), + sa.Column('ap_id', sa.String(), nullable=False), + sa.Column('ap_context', sa.String(), nullable=True), + sa.Column('ap_published_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('ap_object', sa.JSON(), nullable=False), + sa.Column('activity_object_ap_id', sa.String(), nullable=True), + sa.Column('visibility', sa.Enum('PUBLIC', 'UNLISTED', 'DIRECT', name='visibilityenum'), nullable=False), + sa.Column('relates_to_inbox_object_id', sa.Integer(), nullable=True), + sa.Column('relates_to_outbox_object_id', sa.Integer(), nullable=True), + sa.Column('undone_by_inbox_object_id', sa.Integer(), nullable=True), + sa.Column('liked_via_outbox_object_ap_id', sa.String(), nullable=True), + sa.Column('announced_via_outbox_object_ap_id', sa.String(), nullable=True), + sa.Column('is_bookmarked', sa.Boolean(), nullable=False), + sa.Column('has_replies', sa.Boolean(), nullable=False), + sa.Column('og_meta', sa.JSON(), nullable=True), + sa.ForeignKeyConstraint(['actor_id'], ['actors.id'], ), + sa.ForeignKeyConstraint(['relates_to_inbox_object_id'], ['inbox.id'], ), + sa.ForeignKeyConstraint(['relates_to_outbox_object_id'], ['outbox.id'], ), + sa.ForeignKeyConstraint(['undone_by_inbox_object_id'], ['inbox.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_inbox_ap_id'), 'inbox', ['ap_id'], unique=True) + op.create_index(op.f('ix_inbox_id'), 'inbox', ['id'], unique=False) + op.create_table('outbox', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('is_hidden_from_homepage', sa.Boolean(), nullable=False), + sa.Column('public_id', sa.String(), nullable=False), + sa.Column('ap_type', sa.String(), nullable=False), + sa.Column('ap_id', sa.String(), nullable=False), + sa.Column('ap_context', sa.String(), nullable=True), + sa.Column('ap_object', sa.JSON(), nullable=False), + sa.Column('activity_object_ap_id', sa.String(), nullable=True), + sa.Column('source', sa.String(), nullable=True), + sa.Column('ap_published_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('visibility', sa.Enum('PUBLIC', 'UNLISTED', 'DIRECT', name='visibilityenum'), nullable=False), + sa.Column('likes_count', sa.Integer(), nullable=False), + sa.Column('announces_count', sa.Integer(), nullable=False), + sa.Column('replies_count', sa.Integer(), nullable=False), + sa.Column('webmentions', sa.JSON(), nullable=True), + sa.Column('og_meta', sa.JSON(), nullable=True), + sa.Column('is_deleted', sa.Boolean(), nullable=False), + sa.Column('relates_to_inbox_object_id', sa.Integer(), nullable=True), + sa.Column('relates_to_outbox_object_id', sa.Integer(), nullable=True), + sa.Column('undone_by_outbox_object_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['relates_to_inbox_object_id'], ['inbox.id'], ), + sa.ForeignKeyConstraint(['relates_to_outbox_object_id'], ['outbox.id'], ), + sa.ForeignKeyConstraint(['undone_by_outbox_object_id'], ['outbox.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_outbox_ap_id'), 'outbox', ['ap_id'], unique=True) + op.create_index(op.f('ix_outbox_id'), 'outbox', ['id'], unique=False) + op.create_index(op.f('ix_outbox_public_id'), 'outbox', ['public_id'], unique=False) + op.create_table('followers', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('actor_id', sa.Integer(), nullable=False), + sa.Column('inbox_object_id', sa.Integer(), nullable=False), + sa.Column('ap_actor_id', sa.String(), nullable=False), + sa.ForeignKeyConstraint(['actor_id'], ['actors.id'], ), + sa.ForeignKeyConstraint(['inbox_object_id'], ['inbox.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('actor_id'), + sa.UniqueConstraint('ap_actor_id') + ) + op.create_index(op.f('ix_followers_id'), 'followers', ['id'], unique=False) + op.create_table('following', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('actor_id', sa.Integer(), nullable=False), + sa.Column('outbox_object_id', sa.Integer(), nullable=False), + sa.Column('ap_actor_id', sa.String(), nullable=False), + sa.ForeignKeyConstraint(['actor_id'], ['actors.id'], ), + sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('actor_id'), + sa.UniqueConstraint('ap_actor_id') + ) + op.create_index(op.f('ix_following_id'), 'following', ['id'], unique=False) + op.create_table('notifications', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('notification_type', sa.Enum('NEW_FOLLOWER', 'UNFOLLOW', 'LIKE', 'UNDO_LIKE', 'ANNOUNCE', 'UNDO_ANNOUNCE', 'MENTION', name='notificationtype'), nullable=True), + sa.Column('is_new', sa.Boolean(), nullable=False), + sa.Column('actor_id', sa.Integer(), nullable=True), + sa.Column('outbox_object_id', sa.Integer(), nullable=True), + sa.Column('inbox_object_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['actor_id'], ['actors.id'], ), + sa.ForeignKeyConstraint(['inbox_object_id'], ['inbox.id'], ), + sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_notifications_id'), 'notifications', ['id'], unique=False) + op.create_table('outgoing_activities', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('recipient', sa.String(), nullable=False), + sa.Column('outbox_object_id', sa.Integer(), nullable=False), + sa.Column('tries', sa.Integer(), nullable=False), + sa.Column('next_try', sa.DateTime(timezone=True), nullable=True), + sa.Column('last_try', sa.DateTime(timezone=True), nullable=True), + sa.Column('last_status_code', sa.Integer(), nullable=True), + sa.Column('last_response', sa.String(), nullable=True), + sa.Column('is_sent', sa.Boolean(), nullable=False), + sa.Column('is_errored', sa.Boolean(), nullable=False), + sa.Column('error', sa.String(), nullable=True), + sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_outgoing_activities_id'), 'outgoing_activities', ['id'], unique=False) + op.create_table('tagged_outbox_objects', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('outbox_object_id', sa.Integer(), nullable=False), + sa.Column('tag', sa.String(), nullable=False), + sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('outbox_object_id', 'tag', name='uix_tagged_object') + ) + op.create_index(op.f('ix_tagged_outbox_objects_id'), 'tagged_outbox_objects', ['id'], unique=False) + op.create_index(op.f('ix_tagged_outbox_objects_tag'), 'tagged_outbox_objects', ['tag'], unique=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_tagged_outbox_objects_tag'), table_name='tagged_outbox_objects') + op.drop_index(op.f('ix_tagged_outbox_objects_id'), table_name='tagged_outbox_objects') + op.drop_table('tagged_outbox_objects') + op.drop_index(op.f('ix_outgoing_activities_id'), table_name='outgoing_activities') + op.drop_table('outgoing_activities') + op.drop_index(op.f('ix_notifications_id'), table_name='notifications') + op.drop_table('notifications') + op.drop_index(op.f('ix_following_id'), table_name='following') + op.drop_table('following') + op.drop_index(op.f('ix_followers_id'), table_name='followers') + op.drop_table('followers') + op.drop_index(op.f('ix_outbox_public_id'), table_name='outbox') + op.drop_index(op.f('ix_outbox_id'), table_name='outbox') + op.drop_index(op.f('ix_outbox_ap_id'), table_name='outbox') + op.drop_table('outbox') + op.drop_index(op.f('ix_inbox_id'), table_name='inbox') + op.drop_index(op.f('ix_inbox_ap_id'), table_name='inbox') + op.drop_table('inbox') + op.drop_index(op.f('ix_actors_id'), table_name='actors') + op.drop_index(op.f('ix_actors_handle'), table_name='actors') + op.drop_index(op.f('ix_actors_ap_id'), table_name='actors') + op.drop_table('actors') + # ### end Alembic commands ### diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/activitypub.py b/app/activitypub.py new file mode 100644 index 0000000..4226056 --- /dev/null +++ b/app/activitypub.py @@ -0,0 +1,276 @@ +import enum +import json +import mimetypes +from typing import Any + +import httpx + +from app import config +from app.httpsig import auth +from app.key import get_pubkey_as_pem + +RawObject = dict[str, Any] +AS_CTX = "https://www.w3.org/ns/activitystreams" +AS_PUBLIC = "https://www.w3.org/ns/activitystreams#Public" + +ACTOR_TYPES = ["Application", "Group", "Organization", "Person", "Service"] + + +class VisibilityEnum(str, enum.Enum): + PUBLIC = "public" + UNLISTED = "unlisted" + DIRECT = "direct" + + +MICROBLOGPUB = { + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://w3id.org/security/v1", + { + "Hashtag": "as:Hashtag", + "PropertyValue": "schema:PropertyValue", + "manuallyApprovesFollowers": "as:manuallyApprovesFollowers", + "ostatus": "http://ostatus.org#", + "schema": "http://schema.org", + "sensitive": "as:sensitive", + "toot": "http://joinmastodon.org/ns#", + "totalItems": "as:totalItems", + "value": "schema:value", + "Emoji": "toot:Emoji", + }, + ] +} + +DEFAULT_CTX = COLLECTION_CTX = [ + "https://www.w3.org/ns/activitystreams", + "https://w3id.org/security/v1", + { + # AS ext + "Hashtag": "as:Hashtag", + "sensitive": "as:sensitive", + "manuallyApprovesFollowers": "as:manuallyApprovesFollowers", + # toot + "toot": "http://joinmastodon.org/ns#", + # "featured": "toot:featured", + # schema + "schema": "http://schema.org#", + "PropertyValue": "schema:PropertyValue", + "value": "schema:value", + }, +] + +ME = { + "@context": DEFAULT_CTX, + "type": "Person", + "id": config.ID, + "following": config.BASE_URL + "/following", + "followers": config.BASE_URL + "/followers", + # "featured": ID + "/featured", + "inbox": config.BASE_URL + "/inbox", + "outbox": config.BASE_URL + "/outbox", + "preferredUsername": config.USERNAME, + "name": config.CONFIG.name, + "summary": config.CONFIG.summary, + "endpoints": {}, + "url": config.ID, + "manuallyApprovesFollowers": False, + "attachment": [], + "icon": { + "mediaType": mimetypes.guess_type(config.CONFIG.icon_url)[0], + "type": "Image", + "url": config.CONFIG.icon_url, + }, + "publicKey": { + "id": f"{config.ID}#main-key", + "owner": config.ID, + "publicKeyPem": get_pubkey_as_pem(), + }, + "alsoKnownAs": [], +} + + +class NotAnObjectError(Exception): + def __init__(self, url: str, resp: httpx.Response | None = None) -> None: + message = f"{url} is not an AP activity" + super().__init__(message) + self.url = url + self.resp = resp + + +def fetch(url: str, params: dict[str, Any] | None = None) -> dict[str, Any]: + resp = httpx.get( + url, + headers={ + "User-Agent": config.USER_AGENT, + "Accept": config.AP_CONTENT_TYPE, + }, + params=params, + follow_redirects=True, + ) + resp.raise_for_status() + try: + return resp.json() + except json.JSONDecodeError: + raise NotAnObjectError(url, resp) + + +def parse_collection( # noqa: C901 + url: str | None = None, + payload: RawObject | None = None, + level: int = 0, +) -> list[RawObject]: + """Resolve/fetch a `Collection`/`OrderedCollection`.""" + if level > 3: + raise ValueError("recursion limit exceeded") + + # Go through all the pages + out: list[RawObject] = [] + if url: + payload = fetch(url) + if not payload: + raise ValueError("must at least prove a payload or an URL") + + ap_type = payload.get("type") + if not ap_type: + raise ValueError(f"Missing type: {payload=}") + + if level == 0 and ap_type not in ["Collection", "OrderedCollection"]: + raise ValueError(f"Unexpected type {ap_type}") + + if payload["type"] in ["Collection", "OrderedCollection"]: + if "orderedItems" in payload: + return payload["orderedItems"] + if "items" in payload: + return payload["items"] + if "first" in payload: + if isinstance(payload["first"], str): + out.extend(parse_collection(url=payload["first"], level=level + 1)) + else: + if "orderedItems" in payload["first"]: + out.extend(payload["first"]["orderedItems"]) + if "items" in payload["first"]: + out.extend(payload["first"]["items"]) + n = payload["first"].get("next") + if n: + out.extend(parse_collection(url=n, level=level + 1)) + return out + + while payload: + if ap_type in ["CollectionPage", "OrderedCollectionPage"]: + if "orderedItems" in payload: + out.extend(payload["orderedItems"]) + if "items" in payload: + out.extend(payload["items"]) + n = payload.get("next") + if n is None: + break + payload = fetch(n) + else: + raise ValueError("unexpected activity type {}".format(payload["type"])) + + return out + + +def as_list(val: Any | list[Any]) -> list[Any]: + if isinstance(val, list): + return val + + return [val] + + +def get_id(val: str | dict[str, Any]) -> str: + if isinstance(val, dict): + val = val["id"] + + if not isinstance(val, str): + raise ValueError(f"Invalid ID type: {val}") + + return val + + +def object_visibility(ap_activity: RawObject) -> VisibilityEnum: + to = as_list(ap_activity.get("to", [])) + cc = as_list(ap_activity.get("cc", [])) + if AS_PUBLIC in to: + return VisibilityEnum.PUBLIC + elif AS_PUBLIC in cc: + return VisibilityEnum.UNLISTED + else: + return VisibilityEnum.DIRECT + + +def get_actor_id(activity: RawObject) -> str: + if activity["type"] in ["Note", "Article", "Video"]: + attributed_to = as_list(activity["attributedTo"]) + return get_id(attributed_to[0]) + else: + return get_id(activity["actor"]) + + +def wrap_object(activity: RawObject) -> RawObject: + return { + "@context": AS_CTX, + "actor": config.ID, + "to": activity.get("to", []), + "cc": activity.get("cc", []), + "id": activity["id"] + "/activity", + "object": remove_context(activity), + "published": activity["published"], + "type": "Create", + } + + +def wrap_object_if_needed(raw_object: RawObject) -> RawObject: + if raw_object["type"] in ["Note"]: + return wrap_object(raw_object) + + return raw_object + + +def unwrap_activity(activity: RawObject) -> RawObject: + # FIXME(ts): other types to unwrap? + if activity["type"] == "Create": + unwrapped_object = activity["object"] + + # Sanity check, ensure the wrapped object actor matches the activity + if get_actor_id(unwrapped_object) != get_actor_id(activity): + raise ValueError( + f"Unwrapped object actor does not match activity: {activity}" + ) + return unwrapped_object + + return activity + + +def remove_context(raw_object: RawObject) -> RawObject: + if "@context" not in raw_object: + return raw_object + a = dict(raw_object) + del a["@context"] + return a + + +def get(url: str, params: dict[str, Any] | None = None) -> dict[str, Any]: + resp = httpx.get( + url, + headers={"User-Agent": config.USER_AGENT, "Accept": config.AP_CONTENT_TYPE}, + params=params, + follow_redirects=True, + auth=auth, + ) + resp.raise_for_status() + return resp.json() + + +def post(url: str, payload: dict[str, Any]) -> httpx.Response: + resp = httpx.post( + url, + headers={ + "User-Agent": config.USER_AGENT, + "Content-Type": config.AP_CONTENT_TYPE, + }, + json=payload, + auth=auth, + ) + resp.raise_for_status() + return resp diff --git a/app/actor.py b/app/actor.py new file mode 100644 index 0000000..08d188e --- /dev/null +++ b/app/actor.py @@ -0,0 +1,190 @@ +import typing +from dataclasses import dataclass +from urllib.parse import urlparse + +from sqlalchemy.orm import Session +from sqlalchemy.orm import joinedload + +from app import activitypub as ap + +if typing.TYPE_CHECKING: + from app.models import Actor as ActorModel + + +def _handle(raw_actor: ap.RawObject) -> str: + ap_id = ap.get_id(raw_actor["id"]) + domain = urlparse(ap_id) + if not domain.hostname: + raise ValueError(f"Invalid actor ID {ap_id}") + + return f'@{raw_actor["preferredUsername"]}@{domain.hostname}' # type: ignore + + +class Actor: + @property + def ap_actor(self) -> ap.RawObject: + raise NotImplementedError() + + @property + def ap_id(self) -> str: + return ap.get_id(self.ap_actor["id"]) + + @property + def name(self) -> str | None: + return self.ap_actor.get("name") + + @property + def summary(self) -> str | None: + return self.ap_actor.get("summary") + + @property + def url(self) -> str | None: + return self.ap_actor.get("url") or self.ap_actor["id"] + + @property + def preferred_username(self) -> str: + return self.ap_actor["preferredUsername"] + + @property + def handle(self) -> str: + return _handle(self.ap_actor) + + @property + def ap_type(self) -> str: + raise NotImplementedError() + + @property + def inbox_url(self) -> str: + return self.ap_actor["inbox"] + + @property + def shared_inbox_url(self) -> str | None: + return self.ap_actor.get("endpoints", {}).get("sharedInbox") + + @property + def icon_url(self) -> str | None: + return self.ap_actor.get("icon", {}).get("url") + + @property + def icon_media_type(self) -> str | None: + return self.ap_actor.get("icon", {}).get("mediaType") + + @property + def public_key_as_pem(self) -> str: + return self.ap_actor["publicKey"]["publicKeyPem"] + + @property + def public_key_id(self) -> str: + return self.ap_actor["publicKey"]["id"] + + +class RemoteActor(Actor): + def __init__(self, ap_actor: ap.RawObject) -> None: + if (ap_type := ap_actor.get("type")) not in ap.ACTOR_TYPES: + raise ValueError(f"Unexpected actor type: {ap_type}") + + self._ap_actor = ap_actor + self._ap_type = ap_type + + @property + def ap_actor(self) -> ap.RawObject: + return self._ap_actor + + @property + def ap_type(self) -> str: + return self._ap_type + + @property + def is_from_db(self) -> bool: + return False + + +LOCAL_ACTOR = RemoteActor(ap_actor=ap.ME) + + +def save_actor(db: Session, ap_actor: ap.RawObject) -> "ActorModel": + from app import models + + if ap_type := ap_actor.get("type") not in ap.ACTOR_TYPES: + raise ValueError(f"Invalid type {ap_type} for actor {ap_actor}") + + actor = models.Actor( + ap_id=ap_actor["id"], + ap_actor=ap_actor, + ap_type=ap_actor["type"], + handle=_handle(ap_actor), + ) + db.add(actor) + db.commit() + db.refresh(actor) + return actor + + +def fetch_actor(db: Session, actor_id: str) -> "ActorModel": + from app import models + + existing_actor = ( + db.query(models.Actor).filter(models.Actor.ap_id == actor_id).one_or_none() + ) + if existing_actor: + return existing_actor + + ap_actor = ap.get(actor_id) + return save_actor(db, ap_actor) + + +@dataclass +class ActorMetadata: + ap_actor_id: str + is_following: bool + is_follower: bool + is_follow_request_sent: bool + outbox_follow_ap_id: str | None + inbox_follow_ap_id: str | None + + +ActorsMetadata = dict[str, ActorMetadata] + + +def get_actors_metadata( + db: Session, + actors: list["ActorModel"], +) -> ActorsMetadata: + from app import models + + ap_actor_ids = [actor.ap_id for actor in actors] + followers = { + follower.ap_actor_id: follower.inbox_object.ap_id + for follower in db.query(models.Follower) + .filter(models.Follower.ap_actor_id.in_(ap_actor_ids)) + .options(joinedload(models.Follower.inbox_object)) + .all() + } + following = { + following.ap_actor_id + for following in db.query(models.Following.ap_actor_id) + .filter(models.Following.ap_actor_id.in_(ap_actor_ids)) + .all() + } + sent_follow_requests = { + follow_req.ap_object["object"]: follow_req.ap_id + for follow_req in db.query( + models.OutboxObject.ap_object, models.OutboxObject.ap_id + ) + .filter( + models.OutboxObject.ap_type == "Follow", + models.OutboxObject.undone_by_outbox_object_id.is_(None), + ) + .all() + } + idx: ActorsMetadata = {} + for actor in actors: + idx[actor.ap_id] = ActorMetadata( + ap_actor_id=actor.ap_id, + is_following=actor.ap_id in following, + is_follower=actor.ap_id in followers, + is_follow_request_sent=actor.ap_id in sent_follow_requests, + outbox_follow_ap_id=sent_follow_requests.get(actor.ap_id), + inbox_follow_ap_id=followers.get(actor.ap_id), + ) + return idx diff --git a/app/admin.py b/app/admin.py new file mode 100644 index 0000000..b58f87d --- /dev/null +++ b/app/admin.py @@ -0,0 +1,286 @@ +from fastapi import APIRouter +from fastapi import Cookie +from fastapi import Depends +from fastapi import Form +from fastapi import Request +from fastapi import UploadFile +from fastapi.exceptions import HTTPException +from fastapi.responses import RedirectResponse +from sqlalchemy.orm import Session +from sqlalchemy.orm import joinedload + +from app import activitypub as ap +from app import boxes +from app import models +from app import templates +from app.actor import get_actors_metadata +from app.boxes import get_inbox_object_by_ap_id +from app.boxes import send_follow +from app.config import generate_csrf_token +from app.config import session_serializer +from app.config import verify_csrf_token +from app.config import verify_password +from app.database import get_db +from app.lookup import lookup + + +def user_session_or_redirect( + request: Request, + session: str | None = Cookie(default=None), +) -> None: + _RedirectToLoginPage = HTTPException( + status_code=302, + headers={"Location": request.url_for("login")}, + ) + + if not session: + raise _RedirectToLoginPage + + try: + loaded_session = session_serializer.loads(session, max_age=3600 * 12) + except Exception: + raise _RedirectToLoginPage + + if not loaded_session.get("is_logged_in"): + raise _RedirectToLoginPage + + return None + + +router = APIRouter( + dependencies=[Depends(user_session_or_redirect)], +) +unauthenticated_router = APIRouter() + + +@router.get("/") +def admin_index( + request: Request, + db: Session = Depends(get_db), +) -> templates.TemplateResponse: + return templates.render_template(db, request, "index.html", {"request": request}) + + +@router.get("/lookup") +def get_lookup( + request: Request, + query: str | None = None, + db: Session = Depends(get_db), +) -> templates.TemplateResponse: + ap_object = None + actors_metadata = {} + if query: + ap_object = lookup(db, query) + if ap_object.ap_type in ap.ACTOR_TYPES: + actors_metadata = get_actors_metadata(db, [ap_object]) + else: + actors_metadata = get_actors_metadata(db, [ap_object.actor]) + print(ap_object) + return templates.render_template( + db, + request, + "lookup.html", + { + "query": query, + "ap_object": ap_object, + "actors_metadata": actors_metadata, + }, + ) + + +@router.get("/new") +def admin_new( + request: Request, + query: str | None = None, + db: Session = Depends(get_db), +) -> templates.TemplateResponse: + return templates.render_template( + db, + request, + "admin_new.html", + {}, + ) + + +@router.get("/stream") +def stream( + request: Request, + db: Session = Depends(get_db), +) -> templates.TemplateResponse: + stream = ( + db.query(models.InboxObject) + .filter( + models.InboxObject.ap_type.in_(["Note", "Article", "Video", "Announce"]), + models.InboxObject.is_hidden_from_stream.is_(False), + models.InboxObject.undone_by_inbox_object_id.is_(None), + ) + .options( + # joinedload(models.InboxObject.relates_to_inbox_object), + joinedload(models.InboxObject.relates_to_outbox_object), + ) + .order_by(models.InboxObject.ap_published_at.desc()) + .limit(20) + .all() + ) + return templates.render_template( + db, + request, + "admin_stream.html", + { + "stream": stream, + }, + ) + + +@router.get("/notifications") +def get_notifications( + request: Request, db: Session = Depends(get_db) +) -> templates.TemplateResponse: + notifications = ( + db.query(models.Notification) + .options( + joinedload(models.Notification.actor), + joinedload(models.Notification.inbox_object), + joinedload(models.Notification.outbox_object), + ) + .order_by(models.Notification.created_at.desc()) + .all() + ) + actors_metadata = get_actors_metadata( + db, [notif.actor for notif in notifications if notif.actor] + ) + + for notif in notifications: + notif.is_new = False + db.commit() + + return templates.render_template( + db, + request, + "notifications.html", + { + "notifications": notifications, + "actors_metadata": actors_metadata, + }, + ) + + +@router.post("/actions/follow") +def admin_actions_follow( + request: Request, + ap_actor_id: str = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db: Session = Depends(get_db), +) -> RedirectResponse: + print(f"Following {ap_actor_id}") + send_follow(db, ap_actor_id) + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/like") +def admin_actions_like( + request: Request, + ap_object_id: str = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db: Session = Depends(get_db), +) -> RedirectResponse: + boxes.send_like(db, ap_object_id) + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/undo") +def admin_actions_undo( + request: Request, + ap_object_id: str = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db: Session = Depends(get_db), +) -> RedirectResponse: + boxes.send_undo(db, ap_object_id) + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/announce") +def admin_actions_announce( + request: Request, + ap_object_id: str = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db: Session = Depends(get_db), +) -> RedirectResponse: + boxes.send_announce(db, ap_object_id) + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/bookmark") +def admin_actions_bookmark( + request: Request, + ap_object_id: str = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db: Session = Depends(get_db), +) -> RedirectResponse: + inbox_object = get_inbox_object_by_ap_id(db, ap_object_id) + if not inbox_object: + raise ValueError("Should never happen") + inbox_object.is_bookmarked = True + db.commit() + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/new") +async def admin_actions_new( + request: Request, + files: list[UploadFile], + content: str = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db: Session = Depends(get_db), +) -> RedirectResponse: + # XXX: for some reason, no files restuls in an empty single file + if len(files) >= 1 and files[0].filename: + print("Got files") + public_id = boxes.send_create(db, content) + return RedirectResponse( + request.url_for("outbox_by_public_id", public_id=public_id), + status_code=302, + ) + + +@unauthenticated_router.get("/login") +def login( + request: Request, + db: Session = Depends(get_db), +) -> templates.TemplateResponse: + return templates.render_template( + db, + request, + "login.html", + {"csrf_token": generate_csrf_token()}, + ) + + +@unauthenticated_router.post("/login") +def login_validation( + request: Request, + password: str = Form(), + csrf_check: None = Depends(verify_csrf_token), +) -> RedirectResponse: + if not verify_password(password): + raise HTTPException(status_code=401) + + resp = RedirectResponse("/admin", status_code=302) + resp.set_cookie("session", session_serializer.dumps({"is_logged_in": True})) # type: ignore # noqa: E501 + + return resp + + +@router.get("/logout") +def logout( + request: Request, +) -> RedirectResponse: + resp = RedirectResponse(request.url_for("index"), status_code=302) + resp.set_cookie("session", session_serializer.dumps({"is_logged_in": False})) # type: ignore # noqa: E501 + return resp diff --git a/app/ap_object.py b/app/ap_object.py new file mode 100644 index 0000000..dc32dd8 --- /dev/null +++ b/app/ap_object.py @@ -0,0 +1,183 @@ +import hashlib +from datetime import datetime +from typing import Any + +import pydantic +from dateutil.parser import isoparse +from markdown import markdown + +from app import activitypub as ap +from app import opengraph +from app.actor import LOCAL_ACTOR +from app.actor import Actor +from app.actor import RemoteActor + + +class Object: + @property + def is_from_db(self) -> bool: + return False + + @property + def ap_type(self) -> str: + return self.ap_object["type"] + + @property + def ap_object(self) -> ap.RawObject: + raise NotImplementedError + + @property + def ap_id(self) -> str: + return ap.get_id(self.ap_object["id"]) + + @property + def ap_actor_id(self) -> str: + return ap.get_actor_id(self.ap_object) + + @property + def ap_published_at(self) -> datetime | None: + # TODO: default to None? or now()? + if "published" in self.ap_object: + return isoparse(self.ap_object["published"]) + elif "created" in self.ap_object: + return isoparse(self.ap_object["created"]) + return None + + @property + def actor(self) -> Actor: + raise NotImplementedError() + + @property + def visibility(self) -> ap.VisibilityEnum: + return ap.object_visibility(self.ap_object) + + @property + def context(self) -> str | None: + return self.ap_object.get("context") + + @property + def sensitive(self) -> bool: + return self.ap_object.get("sensitive", False) + + @property + def attachments(self) -> list["Attachment"]: + attachments = [ + Attachment.parse_obj(obj) for obj in self.ap_object.get("attachment", []) + ] + + # Also add any video Link (for PeerTube compat) + if self.ap_type == "Video": + for link in ap.as_list(self.ap_object.get("url", [])): + if (isinstance(link, dict)) and link.get("type") == "Link": + if link.get("mediaType", "").startswith("video"): + attachments.append( + Attachment( + type="Video", + mediaType=link["mediaType"], + url=link["href"], + ) + ) + break + + return attachments + + @property + def url(self) -> str | None: + obj_url = self.ap_object.get("url") + if isinstance(obj_url, str): + return obj_url + elif obj_url: + for u in ap.as_list(obj_url): + if u["mediaType"] == "text/html": + return u["href"] + + return None + + @property + def content(self) -> str | None: + content = self.ap_object.get("content") + if not content: + return None + + # PeerTube returns the content as markdown + if self.ap_object.get("mediaType") == "text/markdown": + return markdown(content, extensions=["mdx_linkify"]) + + return content + + @property + def permalink_id(self) -> str: + return ( + "permalink-" + + hashlib.md5( + self.ap_id.encode(), + usedforsecurity=False, + ).hexdigest() + ) + + @property + def activity_object_ap_id(self) -> str | None: + if "object" in self.ap_object: + return ap.get_id(self.ap_object["object"]) + + return None + + @property + def in_reply_to(self) -> str | None: + return self.ap_object.get("inReplyTo") + + +def _to_camel(string: str) -> str: + cased = "".join(word.capitalize() for word in string.split("_")) + return cased[0:1].lower() + cased[1:] + + +class BaseModel(pydantic.BaseModel): + class Config: + alias_generator = _to_camel + + +class Attachment(BaseModel): + type: str + media_type: str + name: str | None + url: str + + +class RemoteObject(Object): + def __init__(self, raw_object: ap.RawObject, actor: Actor | None = None): + self._raw_object = raw_object + self._actor: Actor + + # Pre-fetch the actor + actor_id = ap.get_actor_id(raw_object) + if actor_id == LOCAL_ACTOR.ap_id: + self._actor = LOCAL_ACTOR + elif actor: + if actor.ap_id != actor_id: + raise ValueError( + f"Invalid actor, got {actor.ap_id}, " f"expected {actor_id}" + ) + self._actor = actor + else: + self._actor = RemoteActor( + ap_actor=ap.fetch(ap.get_actor_id(raw_object)), + ) + + self._og_meta = None + if self.ap_type == "Note": + self._og_meta = opengraph.og_meta_from_note(self._raw_object) + + @property + def og_meta(self) -> list[dict[str, Any]] | None: + if self._og_meta: + return [og_meta.dict() for og_meta in self._og_meta] + return None + + @property + def ap_object(self) -> ap.RawObject: + return self._raw_object + + @property + def actor(self) -> Actor: + return self._actor diff --git a/app/boxes.py b/app/boxes.py new file mode 100644 index 0000000..ecd48bb --- /dev/null +++ b/app/boxes.py @@ -0,0 +1,684 @@ +"""Actions related to the AP inbox/outbox.""" +import uuid +from urllib.parse import urlparse + +import httpx +from dateutil.parser import isoparse +from loguru import logger +from sqlalchemy.exc import IntegrityError +from sqlalchemy.orm import Session +from sqlalchemy.orm import joinedload + +from app import activitypub as ap +from app import config +from app import models +from app.actor import LOCAL_ACTOR +from app.actor import RemoteActor +from app.actor import fetch_actor +from app.actor import save_actor +from app.ap_object import RemoteObject +from app.config import BASE_URL +from app.config import ID +from app.database import now +from app.process_outgoing_activities import new_outgoing_activity +from app.source import markdownify + + +def allocate_outbox_id() -> str: + return uuid.uuid4().hex + + +def outbox_object_id(outbox_id) -> str: + return f"{BASE_URL}/o/{outbox_id}" + + +def save_outbox_object( + db: Session, + public_id: str, + raw_object: ap.RawObject, + relates_to_inbox_object_id: int | None = None, + relates_to_outbox_object_id: int | None = None, + source: str | None = None, +) -> models.OutboxObject: + ra = RemoteObject(raw_object) + + outbox_object = models.OutboxObject( + public_id=public_id, + ap_type=ra.ap_type, + ap_id=ra.ap_id, + ap_context=ra.context, + ap_object=ra.ap_object, + visibility=ra.visibility, + og_meta=ra.og_meta, + relates_to_inbox_object_id=relates_to_inbox_object_id, + relates_to_outbox_object_id=relates_to_outbox_object_id, + activity_object_ap_id=ra.activity_object_ap_id, + is_hidden_from_homepage=True if ra.in_reply_to else False, + ) + db.add(outbox_object) + db.commit() + db.refresh(outbox_object) + + return outbox_object + + +def send_like(db: Session, ap_object_id: str) -> None: + inbox_object = get_inbox_object_by_ap_id(db, ap_object_id) + if not inbox_object: + raise ValueError(f"{ap_object_id} not found in the inbox") + + like_id = allocate_outbox_id() + like = { + "@context": ap.AS_CTX, + "id": outbox_object_id(like_id), + "type": "Like", + "actor": ID, + "object": ap_object_id, + } + outbox_object = save_outbox_object( + db, like_id, like, relates_to_inbox_object_id=inbox_object.id + ) + if not outbox_object.id: + raise ValueError("Should never happen") + + inbox_object.liked_via_outbox_object_ap_id = outbox_object.ap_id + db.commit() + + new_outgoing_activity(db, inbox_object.actor.inbox_url, outbox_object.id) + + +def send_announce(db: Session, ap_object_id: str) -> None: + inbox_object = get_inbox_object_by_ap_id(db, ap_object_id) + if not inbox_object: + raise ValueError(f"{ap_object_id} not found in the inbox") + + announce_id = allocate_outbox_id() + announce = { + "@context": ap.AS_CTX, + "id": outbox_object_id(announce_id), + "type": "Announce", + "actor": ID, + "object": ap_object_id, + "to": [ap.AS_PUBLIC], + "cc": [ + f"{BASE_URL}/followers", + inbox_object.ap_actor_id, + ], + } + outbox_object = save_outbox_object( + db, announce_id, announce, relates_to_inbox_object_id=inbox_object.id + ) + if not outbox_object.id: + raise ValueError("Should never happen") + + inbox_object.announced_via_outbox_object_ap_id = outbox_object.ap_id + db.commit() + + recipients = _compute_recipients(db, announce) + for rcp in recipients: + new_outgoing_activity(db, rcp, outbox_object.id) + + +def send_follow(db: Session, ap_actor_id: str) -> None: + actor = fetch_actor(db, ap_actor_id) + + follow_id = allocate_outbox_id() + follow = { + "@context": ap.AS_CTX, + "id": outbox_object_id(follow_id), + "type": "Follow", + "actor": ID, + "object": ap_actor_id, + } + + outbox_object = save_outbox_object(db, follow_id, follow) + if not outbox_object.id: + raise ValueError("Should never happen") + + new_outgoing_activity(db, actor.inbox_url, outbox_object.id) + + +def send_undo(db: Session, ap_object_id: str) -> None: + outbox_object_to_undo = get_outbox_object_by_ap_id(db, ap_object_id) + if not outbox_object_to_undo: + raise ValueError(f"{ap_object_id} not found in the outbox") + + if outbox_object_to_undo.ap_type not in ["Follow", "Like", "Announce"]: + raise ValueError( + f"Cannot build Undo for {outbox_object_to_undo.ap_type} activity" + ) + + undo_id = allocate_outbox_id() + undo = { + "@context": ap.AS_CTX, + "id": outbox_object_id(undo_id), + "type": "Undo", + "actor": ID, + "object": ap.remove_context(outbox_object_to_undo.ap_object), + } + + outbox_object = save_outbox_object( + db, + undo_id, + undo, + relates_to_outbox_object_id=outbox_object_to_undo.id, + ) + if not outbox_object.id: + raise ValueError("Should never happen") + + outbox_object_to_undo.undone_by_outbox_object_id = outbox_object.id + + if outbox_object_to_undo.ap_type == "Follow": + if not outbox_object_to_undo.activity_object_ap_id: + raise ValueError("Should never happen") + followed_actor = fetch_actor(db, outbox_object_to_undo.activity_object_ap_id) + new_outgoing_activity( + db, + followed_actor.inbox_url, + outbox_object.id, + ) + # Also remove the follow from the following collection + db.query(models.Following).filter( + models.Following.ap_actor_id == followed_actor.ap_id + ).delete() + db.commit() + elif outbox_object_to_undo.ap_type == "Like": + liked_object_ap_id = outbox_object_to_undo.activity_object_ap_id + if not liked_object_ap_id: + raise ValueError("Should never happen") + liked_object = get_inbox_object_by_ap_id(db, liked_object_ap_id) + if not liked_object: + raise ValueError(f"Cannot find liked object {liked_object_ap_id}") + liked_object.liked_via_outbox_object_ap_id = None + + # Send the Undo to the liked object's actor + new_outgoing_activity( + db, + liked_object.actor.inbox_url, # type: ignore + outbox_object.id, + ) + elif outbox_object_to_undo.ap_type == "Announce": + announced_object_ap_id = outbox_object_to_undo.activity_object_ap_id + if not announced_object_ap_id: + raise ValueError("Should never happen") + announced_object = get_inbox_object_by_ap_id(db, announced_object_ap_id) + if not announced_object: + raise ValueError(f"Cannot find announced object {announced_object_ap_id}") + announced_object.announced_via_outbox_object_ap_id = None + + # Send the Undo to the original recipients + recipients = _compute_recipients(db, outbox_object.ap_object) + for rcp in recipients: + new_outgoing_activity(db, rcp, outbox_object.id) + else: + raise ValueError("Should never happen") + + +def send_create(db: Session, source: str) -> str: + note_id = allocate_outbox_id() + published = now().replace(microsecond=0).isoformat().replace("+00:00", "Z") + context = f"{ID}/contexts/" + uuid.uuid4().hex + content, tags = markdownify(db, source) + note = { + "@context": ap.AS_CTX, + "type": "Note", + "id": outbox_object_id(note_id), + "attributedTo": ID, + "content": content, + "to": [ap.AS_PUBLIC], + "cc": [f"{BASE_URL}/followers"], + "published": published, + "context": context, + "conversation": context, + "url": outbox_object_id(note_id), + "tag": tags, + "summary": None, + "inReplyTo": None, + "sensitive": False, + } + outbox_object = save_outbox_object(db, note_id, note, source=source) + if not outbox_object.id: + raise ValueError("Should never happen") + + for tag in tags: + if tag["type"] == "Hashtag": + tagged_object = models.TaggedOutboxObject( + tag=tag["name"][1:], + outbox_object_id=outbox_object.id, + ) + db.add(tagged_object) + db.commit() + + recipients = _compute_recipients(db, note) + for rcp in recipients: + new_outgoing_activity(db, rcp, outbox_object.id) + + return note_id + + +def _compute_recipients(db: Session, ap_object: ap.RawObject) -> set[str]: + _recipients = [] + for field in ["to", "cc", "bto", "bcc"]: + if field in ap_object: + _recipients.extend(ap.as_list(ap_object[field])) + + recipients = set() + for r in _recipients: + if r in [ap.AS_PUBLIC, ID]: + continue + + # If we got a local collection, assume it's a collection of actors + if r.startswith(BASE_URL): + for raw_actor in fetch_collection(db, r): + actor = RemoteActor(raw_actor) + recipients.add(actor.shared_inbox_url or actor.inbox_url) + + continue + + # Is it a known actor? + known_actor = ( + db.query(models.Actor).filter(models.Actor.ap_id == r).one_or_none() + ) + if known_actor: + recipients.add(known_actor.shared_inbox_url or actor.inbox_url) + continue + + # Fetch the object + raw_object = ap.fetch(r) + if raw_object.get("type") in ap.ACTOR_TYPES: + saved_actor = save_actor(db, raw_object) + recipients.add(saved_actor.shared_inbox_url or saved_actor.inbox_url) + else: + # Assume it's a collection of actors + for raw_actor in ap.parse_collection(payload=raw_object): + actor = RemoteActor(raw_actor) + recipients.add(actor.shared_inbox_url or actor.inbox_url) + + return recipients + + +def get_inbox_object_by_ap_id(db: Session, ap_id: str) -> models.InboxObject | None: + return ( + db.query(models.InboxObject) + .filter(models.InboxObject.ap_id == ap_id) + .one_or_none() + ) + + +def get_outbox_object_by_ap_id(db: Session, ap_id: str) -> models.OutboxObject | None: + return ( + db.query(models.OutboxObject) + .filter(models.OutboxObject.ap_id == ap_id) + .one_or_none() + ) + + +def _handle_delete_activity( + db: Session, + from_actor: models.Actor, + ap_object_to_delete: models.InboxObject, +) -> None: + if from_actor.ap_id != ap_object_to_delete.actor.ap_id: + logger.warning( + "Actor mismatch between the activity and the object: " + f"{from_actor.ap_id}/{ap_object_to_delete.actor.ap_id}" + ) + return + + # TODO(ts): do we need to delete related activities? should we keep + # bookmarked objects with a deleted flag? + logger.info(f"Deleting {ap_object_to_delete.ap_type}/{ap_object_to_delete.ap_id}") + db.delete(ap_object_to_delete) + db.flush() + + +def _handle_follow_follow_activity( + db: Session, + from_actor: models.Actor, + inbox_object: models.InboxObject, +) -> None: + follower = models.Follower( + actor_id=from_actor.id, + inbox_object_id=inbox_object.id, + ap_actor_id=from_actor.ap_id, + ) + try: + db.add(follower) + db.flush() + except IntegrityError: + pass # TODO update the existing followe + + # Reply with an Accept + reply_id = allocate_outbox_id() + reply = { + "@context": ap.AS_CTX, + "id": outbox_object_id(reply_id), + "type": "Accept", + "actor": ID, + "object": inbox_object.ap_id, + } + outbox_activity = save_outbox_object(db, reply_id, reply) + if not outbox_activity.id: + raise ValueError("Should never happen") + new_outgoing_activity(db, from_actor.inbox_url, outbox_activity.id) + + notif = models.Notification( + notification_type=models.NotificationType.NEW_FOLLOWER, + actor_id=from_actor.id, + ) + db.add(notif) + + +def _handle_undo_activity( + db: Session, + from_actor: models.Actor, + undo_activity: models.InboxObject, + ap_activity_to_undo: models.InboxObject, +) -> None: + if from_actor.ap_id != ap_activity_to_undo.actor.ap_id: + logger.warning( + "Actor mismatch between the activity and the object: " + f"{from_actor.ap_id}/{ap_activity_to_undo.actor.ap_id}" + ) + return + + ap_activity_to_undo.undone_by_inbox_object_id = undo_activity.id + + if ap_activity_to_undo.ap_type == "Follow": + logger.info(f"Undo follow from {from_actor.ap_id}") + db.query(models.Follower).filter( + models.Follower.inbox_object_id == ap_activity_to_undo.id + ).delete() + notif = models.Notification( + notification_type=models.NotificationType.UNFOLLOW, + actor_id=from_actor.id, + ) + db.add(notif) + + elif ap_activity_to_undo.ap_type == "Like": + if not ap_activity_to_undo.activity_object_ap_id: + raise ValueError("Like without object") + liked_obj = get_outbox_object_by_ap_id( + db, + ap_activity_to_undo.activity_object_ap_id, + ) + if not liked_obj: + logger.warning( + "Cannot find liked object: " + f"{ap_activity_to_undo.activity_object_ap_id}" + ) + return + + liked_obj.likes_count = models.OutboxObject.likes_count - 1 + notif = models.Notification( + notification_type=models.NotificationType.UNDO_LIKE, + actor_id=from_actor.id, + outbox_object_id=liked_obj.id, + inbox_object_id=ap_activity_to_undo.id, + ) + db.add(notif) + + elif ap_activity_to_undo.ap_type == "Announce": + if not ap_activity_to_undo.activity_object_ap_id: + raise ValueError("Announce witout object") + announced_obj_ap_id = ap_activity_to_undo.activity_object_ap_id + logger.info( + f"Undo for announce {ap_activity_to_undo.ap_id}/{announced_obj_ap_id}" + ) + if announced_obj_ap_id.startswith(BASE_URL): + announced_obj_from_outbox = get_outbox_object_by_ap_id( + db, announced_obj_ap_id + ) + if announced_obj_from_outbox: + logger.info("Found in the oubox") + announced_obj_from_outbox.announces_count = ( + models.OutboxObject.announces_count - 1 + ) + notif = models.Notification( + notification_type=models.NotificationType.UNDO_ANNOUNCE, + actor_id=from_actor.id, + outbox_object_id=announced_obj_from_outbox.id, + inbox_object_id=ap_activity_to_undo.id, + ) + db.add(notif) + + # FIXME(ts): what to do with ap_activity_to_undo? flag? delete? + else: + logger.warning(f"Don't know how to undo {ap_activity_to_undo.ap_type} activity") + + # commit will be perfomed in save_to_inbox + + +def _handle_create_activity( + db: Session, + from_actor: models.Actor, + created_object: models.InboxObject, +) -> None: + logger.info("Processing Create activity") + tags = created_object.ap_object.get("tag") + + if not tags: + logger.info("No tags to process") + return None + + if not isinstance(tags, list): + logger.info(f"Invalid tags: {tags}") + return None + + for tag in tags: + if tag.get("name") == LOCAL_ACTOR.handle or tag.get("href") == LOCAL_ACTOR.url: + notif = models.Notification( + notification_type=models.NotificationType.MENTION, + actor_id=from_actor.id, + inbox_object_id=created_object.id, + ) + db.add(notif) + + +def save_to_inbox(db: Session, raw_object: ap.RawObject) -> None: + try: + actor = fetch_actor(db, raw_object["actor"]) + except httpx.HTTPStatusError: + logger.exception("Failed to fetch actor") + # XXX: Delete 410 when we never seen the actor + return + + ap_published_at = now() + if "published" in raw_object: + ap_published_at = isoparse(raw_object["published"]) + + ra = RemoteObject(ap.unwrap_activity(raw_object), actor=actor) + relates_to_inbox_object: models.InboxObject | None = None + relates_to_outbox_object: models.OutboxObject | None = None + if ra.activity_object_ap_id: + if ra.activity_object_ap_id.startswith(BASE_URL): + relates_to_outbox_object = get_outbox_object_by_ap_id( + db, + ra.activity_object_ap_id, + ) + else: + relates_to_inbox_object = get_inbox_object_by_ap_id( + db, + ra.activity_object_ap_id, + ) + + inbox_object = models.InboxObject( + server=urlparse(ra.ap_id).netloc, + actor_id=actor.id, + ap_actor_id=actor.ap_id, + ap_type=ra.ap_type, + ap_id=ra.ap_id, + ap_context=ra.context, + ap_published_at=ap_published_at, + ap_object=ra.ap_object, + visibility=ra.visibility, + relates_to_inbox_object_id=relates_to_inbox_object.id + if relates_to_inbox_object + else None, + relates_to_outbox_object_id=relates_to_outbox_object.id + if relates_to_outbox_object + else None, + activity_object_ap_id=ra.activity_object_ap_id, + # Hide replies from the stream + is_hidden_from_stream=True if ra.in_reply_to else False, + ) + + db.add(inbox_object) + db.flush() + db.refresh(inbox_object) + + if ra.ap_type == "Create": + _handle_create_activity(db, actor, inbox_object) + elif ra.ap_type == "Update": + pass + elif ra.ap_type == "Delete": + if relates_to_inbox_object: + _handle_delete_activity(db, actor, relates_to_inbox_object) + else: + # TODO(ts): handle delete actor + logger.info( + f"Received a Delete for an unknown object: {ra.activity_object_ap_id}" + ) + elif ra.ap_type == "Follow": + _handle_follow_follow_activity(db, actor, inbox_object) + elif ra.ap_type == "Undo": + if relates_to_inbox_object: + _handle_undo_activity(db, actor, inbox_object, relates_to_inbox_object) + else: + logger.info("Received Undo for an unknown activity") + elif ra.ap_type in ["Accept", "Reject"]: + if not relates_to_outbox_object: + logger.info( + f"Received {raw_object['type']} for an unknown activity: " + f"{ra.activity_object_ap_id}" + ) + else: + if relates_to_outbox_object.ap_type == "Follow": + following = models.Following( + actor_id=actor.id, + outbox_object_id=relates_to_outbox_object.id, + ap_actor_id=actor.ap_id, + ) + db.add(following) + else: + logger.info( + "Received an Accept for an unsupported activity: " + f"{relates_to_outbox_object.ap_type}" + ) + elif ra.ap_type == "Like": + if not relates_to_outbox_object: + logger.info( + f"Received a like for an unknown activity: {ra.activity_object_ap_id}" + ) + else: + relates_to_outbox_object.likes_count = models.OutboxObject.likes_count + 1 + + notif = models.Notification( + notification_type=models.NotificationType.LIKE, + actor_id=actor.id, + outbox_object_id=relates_to_outbox_object.id, + inbox_object_id=inbox_object.id, + ) + db.add(notif) + elif raw_object["type"] == "Announce": + if relates_to_outbox_object: + # This is an announce for a local object + relates_to_outbox_object.announces_count = ( + models.OutboxObject.announces_count + 1 + ) + + notif = models.Notification( + notification_type=models.NotificationType.ANNOUNCE, + actor_id=actor.id, + outbox_object_id=relates_to_outbox_object.id, + inbox_object_id=inbox_object.id, + ) + db.add(notif) + else: + # This is announce for a maybe unknown object + if relates_to_inbox_object: + logger.info("Nothing to do, we already know about this object") + else: + # Save it as an inbox object + if not ra.activity_object_ap_id: + raise ValueError("Should never happen") + announced_raw_object = ap.fetch(ra.activity_object_ap_id) + announced_actor = fetch_actor(db, ap.get_actor_id(announced_raw_object)) + announced_object = RemoteObject(announced_raw_object, announced_actor) + announced_inbox_object = models.InboxObject( + server=urlparse(announced_object.ap_id).netloc, + actor_id=announced_actor.id, + ap_actor_id=announced_actor.ap_id, + ap_type=announced_object.ap_type, + ap_id=announced_object.ap_id, + ap_context=announced_object.context, + ap_published_at=announced_object.ap_published_at, + ap_object=announced_object.ap_object, + visibility=announced_object.visibility, + is_hidden_from_stream=True, + ) + db.add(announced_inbox_object) + db.flush() + inbox_object.relates_to_inbox_object_id = announced_inbox_object.id + elif ra.ap_type in ["Like", "Announce"]: + if not relates_to_outbox_object: + logger.info( + f"Received {ra.ap_type} for an unknown activity: " + f"{ra.activity_object_ap_id}" + ) + else: + if ra.ap_type == "Like": + # TODO(ts): notification + relates_to_outbox_object.likes_count = ( + models.OutboxObject.likes_count + 1 + ) + + notif = models.Notification( + notification_type=models.NotificationType.LIKE, + actor_id=actor.id, + outbox_object_id=relates_to_outbox_object.id, + inbox_object_id=inbox_object.id, + ) + db.add(notif) + elif raw_object["type"] == "Announce": + # TODO(ts): notification + relates_to_outbox_object.announces_count = ( + models.OutboxObject.announces_count + 1 + ) + + notif = models.Notification( + notification_type=models.NotificationType.ANNOUNCE, + actor_id=actor.id, + outbox_object_id=relates_to_outbox_object.id, + inbox_object_id=inbox_object.id, + ) + db.add(notif) + else: + raise ValueError("Should never happpen") + + else: + logger.warning(f"Received an unknown {inbox_object.ap_type} object") + + db.commit() + + +def public_outbox_objects_count(db: Session) -> int: + return ( + db.query(models.OutboxObject) + .filter( + models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC, + models.OutboxObject.is_deleted.is_(False), + ) + .count() + ) + + +def fetch_collection(db: Session, url: str) -> list[ap.RawObject]: + if url.startswith(config.BASE_URL): + if url == config.BASE_URL + "/followers": + q = db.query(models.Follower).options(joinedload(models.Follower.actor)) + return [follower.actor.ap_actor for follower in q.all()] + else: + raise ValueError(f"internal collection for {url}) not supported") + + return ap.parse_collection(url) diff --git a/app/config.py b/app/config.py new file mode 100644 index 0000000..934009c --- /dev/null +++ b/app/config.py @@ -0,0 +1,93 @@ +import os +from pathlib import Path + +import bcrypt +import pydantic +import tomli +from fastapi import Form +from fastapi import HTTPException +from fastapi import Request +from itsdangerous import TimedSerializer +from itsdangerous import TimestampSigner + +ROOT_DIR = Path().parent.resolve() + +_CONFIG_FILE = os.getenv("MICROBLOGPUB_CONFIG_FILE", "me.toml") + +VERSION = "2.0" +USER_AGENT = f"microblogpub/{VERSION}" +AP_CONTENT_TYPE = "application/activity+json" + + +class Config(pydantic.BaseModel): + domain: str + username: str + admin_password: bytes + name: str + summary: str + https: bool + icon_url: str + secret: str + debug: bool = False + + # Config items to make tests easier + sqlalchemy_database_url: str | None = None + key_path: str | None = None + + +def load_config() -> Config: + try: + return Config.parse_obj( + tomli.loads((ROOT_DIR / "data" / _CONFIG_FILE).read_text()) + ) + except FileNotFoundError: + raise ValueError("Please run the configuration wizard") + + +def is_activitypub_requested(req: Request) -> bool: + accept_value = req.headers.get("accept") + if not accept_value: + return False + for val in { + "application/ld+json", + "application/activity+json", + }: + if accept_value.startswith(val): + return True + + return False + + +def verify_password(pwd: str) -> bool: + return bcrypt.checkpw(pwd.encode(), CONFIG.admin_password) + + +CONFIG = load_config() +DOMAIN = CONFIG.domain +_SCHEME = "https" if CONFIG.https else "http" +ID = f"{_SCHEME}://{DOMAIN}" +USERNAME = CONFIG.username +BASE_URL = ID +DEBUG = CONFIG.debug +DB_PATH = ROOT_DIR / "data" / "microblogpub.db" +SQLALCHEMY_DATABASE_URL = CONFIG.sqlalchemy_database_url or f"sqlite:///{DB_PATH}" +KEY_PATH = ( + (ROOT_DIR / CONFIG.key_path) if CONFIG.key_path else ROOT_DIR / "data" / "key.pem" +) + + +session_serializer = TimedSerializer(CONFIG.secret, salt="microblogpub.login") +csrf_signer = TimestampSigner( + os.urandom(16).hex(), + salt=os.urandom(16).hex(), +) + + +def generate_csrf_token() -> str: + return csrf_signer.sign(os.urandom(16).hex()).decode() + + +def verify_csrf_token(csrf_token: str = Form()) -> None: + if not csrf_signer.validate(csrf_token, max_age=600): + raise HTTPException(status_code=403, detail="CSRF error") + return None diff --git a/app/database.py b/app/database.py new file mode 100644 index 0000000..3aca462 --- /dev/null +++ b/app/database.py @@ -0,0 +1,29 @@ +import datetime +from typing import Any +from typing import Generator + +from sqlalchemy import create_engine +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import Session +from sqlalchemy.orm import sessionmaker + +from app.config import SQLALCHEMY_DATABASE_URL + +engine = create_engine( + SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False} +) +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + +Base: Any = declarative_base() + + +def now() -> datetime.datetime: + return datetime.datetime.now(datetime.timezone.utc) + + +def get_db() -> Generator[Session, None, None]: + db = SessionLocal() + try: + yield db + finally: + db.close() diff --git a/app/highlight.py b/app/highlight.py new file mode 100644 index 0000000..1413e7d --- /dev/null +++ b/app/highlight.py @@ -0,0 +1,27 @@ +from functools import lru_cache + +from bs4 import BeautifulSoup # type: ignore +from pygments import highlight as phighlight # type: ignore +from pygments.formatters import HtmlFormatter # type: ignore +from pygments.lexers import guess_lexer # type: ignore + +_FORMATTER = HtmlFormatter(style="vim") + +HIGHLIGHT_CSS = _FORMATTER.get_style_defs() + + +@lru_cache(256) +def highlight(html: str) -> str: + soup = BeautifulSoup(html, "html5lib") + for code in soup.find_all("code"): + if not code.parent.name == "pre": + continue + lexer = guess_lexer(code.text) + tag = BeautifulSoup( + phighlight(code.text, lexer, _FORMATTER), "html5lib" + ).body.next + pre = code.parent + pre.replaceWith(tag) + out = soup.body + out.name = "div" + return str(out) diff --git a/app/httpsig.py b/app/httpsig.py new file mode 100644 index 0000000..8bc9303 --- /dev/null +++ b/app/httpsig.py @@ -0,0 +1,182 @@ +"""Implements HTTP signature for Flask requests. + +Mastodon instances won't accept requests that are not signed using this scheme. + +""" +import base64 +import hashlib +import typing +from dataclasses import dataclass +from datetime import datetime +from functools import lru_cache +from typing import Any +from typing import Dict +from typing import Optional + +import fastapi +import httpx +from Crypto.Hash import SHA256 +from Crypto.Signature import PKCS1_v1_5 +from loguru import logger + +from app import config +from app.key import Key +from app.key import get_key + + +def _build_signed_string( + signed_headers: str, method: str, path: str, headers: Any, body_digest: str | None +) -> str: + out = [] + for signed_header in signed_headers.split(" "): + if signed_header == "(request-target)": + out.append("(request-target): " + method.lower() + " " + path) + elif signed_header == "digest" and body_digest: + out.append("digest: " + body_digest) + else: + out.append(signed_header + ": " + headers[signed_header]) + return "\n".join(out) + + +def _parse_sig_header(val: Optional[str]) -> Optional[Dict[str, str]]: + if not val: + return None + out = {} + for data in val.split(","): + k, v = data.split("=", 1) + out[k] = v[1 : len(v) - 1] # noqa: black conflict + return out + + +def _verify_h(signed_string, signature, pubkey): + signer = PKCS1_v1_5.new(pubkey) + digest = SHA256.new() + digest.update(signed_string.encode("utf-8")) + return signer.verify(digest, signature) + + +def _body_digest(body: bytes) -> str: + h = hashlib.new("sha256") + h.update(body) # type: ignore + return "SHA-256=" + base64.b64encode(h.digest()).decode("utf-8") + + +@lru_cache(32) +def _get_public_key(key_id: str) -> Key: + from app import activitypub as ap + + actor = ap.fetch(key_id) + if actor["type"] == "Key": + # The Key is not embedded in the Person + k = Key(actor["owner"], actor["id"]) + k.load_pub(actor["publicKeyPem"]) + else: + k = Key(actor["id"], actor["publicKey"]["id"]) + k.load_pub(actor["publicKey"]["publicKeyPem"]) + + # Ensure the right key was fetch + if key_id != k.key_id(): + raise ValueError( + f"failed to fetch requested key {key_id}: got {actor['publicKey']['id']}" + ) + + return k + + +@dataclass(frozen=True) +class HTTPSigInfo: + has_valid_signature: bool + signed_by_ap_actor_id: str | None = None + + +async def httpsig_checker( + request: fastapi.Request, +) -> HTTPSigInfo: + body = await request.body() + + hsig = _parse_sig_header(request.headers.get("Signature")) + if not hsig: + logger.info("No HTTP signature found") + return HTTPSigInfo(has_valid_signature=False) + + logger.debug(f"hsig={hsig}") + signed_string = _build_signed_string( + hsig["headers"], + request.method, + request.url.path, + request.headers, + _body_digest(body) if body else None, + ) + + try: + k = _get_public_key(hsig["keyId"]) + except Exception: + logger.exception(f'Failed to fetch HTTP sig key {hsig["keyId"]}') + return HTTPSigInfo(has_valid_signature=False) + + httpsig_info = HTTPSigInfo( + has_valid_signature=_verify_h( + signed_string, base64.b64decode(hsig["signature"]), k.pubkey + ), + signed_by_ap_actor_id=k.owner, + ) + logger.info(f"Valid HTTP signature for {httpsig_info.signed_by_ap_actor_id}") + return httpsig_info + + +async def enforce_httpsig( + request: fastapi.Request, + httpsig_info: HTTPSigInfo = fastapi.Depends(httpsig_checker), +) -> HTTPSigInfo: + if not httpsig_info.has_valid_signature: + logger.warning(f"Invalid HTTP sig {httpsig_info=}") + body = await request.body() + logger.info(f"{body=}") + raise fastapi.HTTPException(status_code=401, detail="Invalid HTTP sig") + + return httpsig_info + + +class HTTPXSigAuth(httpx.Auth): + def __init__(self, key: Key) -> None: + self.key = key + + def auth_flow( + self, r: httpx.Request + ) -> typing.Generator[httpx.Request, httpx.Response, None]: + logger.info(f"keyid={self.key.key_id()}") + + bodydigest = None + if r.content: + bh = hashlib.new("sha256") + bh.update(r.content) + bodydigest = "SHA-256=" + base64.b64encode(bh.digest()).decode("utf-8") + + date = datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S GMT") + r.headers["Date"] = date + if bodydigest: + r.headers["Digest"] = bodydigest + sigheaders = "(request-target) user-agent host date digest content-type" + else: + sigheaders = "(request-target) user-agent host date accept" + + to_be_signed = _build_signed_string( + sigheaders, r.method, r.url.path, r.headers, bodydigest + ) + if not self.key.privkey: + raise ValueError("Should never happen") + signer = PKCS1_v1_5.new(self.key.privkey) + digest = SHA256.new() + digest.update(to_be_signed.encode("utf-8")) + sig = base64.b64encode(signer.sign(digest)).decode() + + key_id = self.key.key_id() + sig_value = f'keyId="{key_id}",algorithm="rsa-sha256",headers="{sigheaders}",signature="{sig}"' # noqa: E501 + logger.debug(f"signed request {sig_value=}") + r.headers["Signature"] = sig_value + yield r + + +k = Key(config.ID, f"{config.ID}#main-key") +k.load(get_key()) +auth = HTTPXSigAuth(k) diff --git a/app/key.py b/app/key.py new file mode 100644 index 0000000..c0bb077 --- /dev/null +++ b/app/key.py @@ -0,0 +1,84 @@ +import base64 +from typing import Any + +from Crypto.PublicKey import RSA +from Crypto.Util import number + +from app.config import KEY_PATH + + +def key_exists() -> bool: + return KEY_PATH.exists() + + +def generate_key() -> None: + if key_exists(): + raise ValueError(f"Key at {KEY_PATH} already exists") + k = RSA.generate(2048) + privkey_pem = k.exportKey("PEM").decode("utf-8") + KEY_PATH.write_text(privkey_pem) + + +def get_pubkey_as_pem() -> str: + text = KEY_PATH.read_text() + return RSA.import_key(text).public_key().export_key("PEM").decode("utf-8") + + +def get_key() -> str: + return KEY_PATH.read_text() + + +class Key(object): + DEFAULT_KEY_SIZE = 2048 + + def __init__(self, owner: str, id_: str | None = None) -> None: + self.owner = owner + self.privkey_pem: str | None = None + self.pubkey_pem: str | None = None + self.privkey: RSA.RsaKey | None = None + self.pubkey: RSA.RsaKey | None = None + self.id_ = id_ + + def load_pub(self, pubkey_pem: str) -> None: + self.pubkey_pem = pubkey_pem + self.pubkey = RSA.importKey(pubkey_pem) + + def load(self, privkey_pem: str) -> None: + self.privkey_pem = privkey_pem + self.privkey = RSA.importKey(self.privkey_pem) + self.pubkey_pem = self.privkey.publickey().exportKey("PEM").decode("utf-8") + + def new(self) -> None: + k = RSA.generate(self.DEFAULT_KEY_SIZE) + self.privkey_pem = k.exportKey("PEM").decode("utf-8") + self.pubkey_pem = k.publickey().exportKey("PEM").decode("utf-8") + self.privkey = k + + def key_id(self) -> str: + return self.id_ or f"{self.owner}#main-key" + + def to_dict(self) -> dict[str, Any]: + return { + "id": self.key_id(), + "owner": self.owner, + "publicKeyPem": self.pubkey_pem, + "type": "Key", + } + + @classmethod + def from_dict(cls, data): + try: + k = cls(data["owner"], data["id"]) + k.load_pub(data["publicKeyPem"]) + except KeyError: + raise ValueError(f"bad key data {data!r}") + return k + + def to_magic_key(self) -> str: + mod = base64.urlsafe_b64encode( + number.long_to_bytes(self.privkey.n) # type: ignore + ).decode("utf-8") + pubexp = base64.urlsafe_b64encode( + number.long_to_bytes(self.privkey.e) # type: ignore + ).decode("utf-8") + return f"data:application/magic-public-key,RSA.{mod}.{pubexp}" diff --git a/app/lookup.py b/app/lookup.py new file mode 100644 index 0000000..63c6d1a --- /dev/null +++ b/app/lookup.py @@ -0,0 +1,40 @@ +import mf2py # type: ignore +from sqlalchemy.orm import Session + +from app import activitypub as ap +from app import webfinger +from app.actor import Actor +from app.actor import fetch_actor +from app.ap_object import RemoteObject + + +def lookup(db: Session, query: str) -> Actor | RemoteObject: + if query.startswith("@"): + query = webfinger.get_actor_url(query) # type: ignore # None check below + + if not query: + raise ap.NotAnObjectError(query) + + try: + ap_obj = ap.fetch(query) + except ap.NotAnObjectError as not_an_object_error: + resp = not_an_object_error.resp + if not resp: + raise ap.NotAnObjectError(query) + + alternate_obj = None + if resp.headers.get("content-type", "").startswith("text/html"): + for alternate in mf2py.parse(doc=resp.text).get("alternates", []): + if alternate.get("type") == "application/activity+json": + alternate_obj = ap.fetch(alternate["url"]) + + if alternate_obj: + ap_obj = alternate_obj + else: + raise + + if ap_obj["type"] in ap.ACTOR_TYPES: + actor = fetch_actor(db, ap_obj["id"]) + return actor + else: + return RemoteObject(ap_obj) diff --git a/app/main.py b/app/main.py new file mode 100644 index 0000000..41525b3 --- /dev/null +++ b/app/main.py @@ -0,0 +1,558 @@ +import base64 +import os +import sys +import time +from datetime import datetime +from typing import Any +from typing import Type + +import httpx +from dateutil.parser import isoparse +from fastapi import Depends +from fastapi import FastAPI +from fastapi import Request +from fastapi import Response +from fastapi.exceptions import HTTPException +from fastapi.responses import PlainTextResponse +from fastapi.responses import StreamingResponse +from fastapi.staticfiles import StaticFiles +from loguru import logger +from sqlalchemy.orm import Session +from sqlalchemy.orm import joinedload +from starlette.background import BackgroundTask +from starlette.responses import JSONResponse + +from app import activitypub as ap +from app import admin +from app import config +from app import httpsig +from app import models +from app import templates +from app.actor import LOCAL_ACTOR +from app.actor import get_actors_metadata +from app.boxes import public_outbox_objects_count +from app.boxes import save_to_inbox +from app.config import BASE_URL +from app.config import DEBUG +from app.config import DOMAIN +from app.config import ID +from app.config import USER_AGENT +from app.config import USERNAME +from app.config import is_activitypub_requested +from app.database import get_db +from app.templates import is_current_user_admin + +# TODO(ts): +# +# Next: +# - show likes/announces counter for outbox activities +# - update actor support +# - replies support +# - file upload + place/exif extraction (or not) support +# - custom emoji support +# - hash config/profile to detect when to send Update actor +# +# - [ ] block support +# - [ ] make the media proxy authenticated +# - [ ] prevent SSRF (urlutils from little-boxes) +# - [ ] Dockerization +# - [ ] Webmentions +# - [ ] custom emoji +# - [ ] poll/questions support +# - [ ] cleanup tasks +# - notifs: +# - MENTIONED +# - LIKED +# - ANNOUNCED +# - FOLLOWED +# - UNFOLLOWED +# - POLL_ENDED + +app = FastAPI(docs_url=None, redoc_url=None) +app.mount("/static", StaticFiles(directory="app/static"), name="static") +app.include_router(admin.router, prefix="/admin") +app.include_router(admin.unauthenticated_router, prefix="/admin") + +logger.configure(extra={"request_id": "no_req_id"}) +logger.remove() +logger_format = ( + "{time:YYYY-MM-DD HH:mm:ss.SSS} | " + "{level: <8} | " + "{name}:{function}:{line} | " + "{extra[request_id]} - {message}" +) +logger.add(sys.stdout, format=logger_format) + + +@app.middleware("http") +async def request_middleware(request, call_next): + start_time = time.perf_counter() + request_id = os.urandom(8).hex() + with logger.contextualize(request_id=request_id): + logger.info( + f"{request.client.host}:{request.client.port} - " + f"{request.method} {request.url}" + ) + try: + response = await call_next(request) + response.headers["X-Request-ID"] = request_id + response.headers["Server"] = "microblogpub" + elapsed_time = time.perf_counter() - start_time + logger.info(f"status_code={response.status_code} {elapsed_time=:.2f}s") + return response + except Exception: + logger.exception("Request failed") + raise + + +@app.middleware("http") +async def add_security_headers(request: Request, call_next): + response = await call_next(request) + response.headers["referrer-policy"] = "no-referrer, strict-origin-when-cross-origin" + response.headers["x-content-type-options"] = "nosniff" + response.headers["x-xss-protection"] = "1; mode=block" + response.headers["x-frame-options"] = "SAMEORIGIN" + # TODO(ts): disallow inline CSS? + response.headers["content-security-policy"] = ( + "default-src 'self'" + " style-src 'self' 'unsafe-inline';" + ) + if not DEBUG: + response.headers[ + "strict-transport-security" + ] = "max-age=63072000; includeSubdomains" + return response + + +DEFAULT_CTX = COLLECTION_CTX = [ + "https://www.w3.org/ns/activitystreams", + "https://w3id.org/security/v1", + { + # AS ext + "Hashtag": "as:Hashtag", + "sensitive": "as:sensitive", + "manuallyApprovesFollowers": "as:manuallyApprovesFollowers", + # toot + "toot": "http://joinmastodon.org/ns#", + # "featured": "toot:featured", + # schema + "schema": "http://schema.org#", + "PropertyValue": "schema:PropertyValue", + "value": "schema:value", + }, +] + + +class ActivityPubResponse(JSONResponse): + media_type = "application/activity+json" + + +@app.get("/") +def index( + request: Request, + db: Session = Depends(get_db), + _: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker), +) -> templates.TemplateResponse | ActivityPubResponse: + if is_activitypub_requested(request): + return ActivityPubResponse(LOCAL_ACTOR.ap_actor) + + outbox_objects = ( + db.query(models.OutboxObject) + .filter( + models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC, + models.OutboxObject.is_deleted.is_(False), + models.OutboxObject.is_hidden_from_homepage.is_(False), + ) + .order_by(models.OutboxObject.ap_published_at.desc()) + .limit(20) + .all() + ) + + return templates.render_template( + db, + request, + "index.html", + {"request": request, "objects": outbox_objects}, + ) + + +def _build_followx_collection( + db: Session, + model_cls: Type[models.Following | models.Follower], + path: str, + page: bool | None, + next_cursor: str | None, +) -> ap.RawObject: + total_items = db.query(model_cls).count() + + if not page and not next_cursor: + return { + "@context": ap.AS_CTX, + "id": ID + path, + "first": ID + path + "?page=true", + "type": "OrderedCollection", + "totalItems": total_items, + } + + q = db.query(model_cls).order_by(model_cls.created_at.desc()) # type: ignore + if next_cursor: + q = q.filter(model_cls.created_at < _decode_cursor(next_cursor)) # type: ignore + q = q.limit(20) + + items = [followx for followx in q.all()] + next_cursor = None + if ( + items + and db.query(model_cls) + .filter(model_cls.created_at < items[-1].created_at) + .count() + > 0 + ): + next_cursor = _encode_cursor(items[-1].created_at) + + collection_page = { + "@context": ap.AS_CTX, + "id": ( + ID + path + "?page=true" + if not next_cursor + else ID + path + f"?next_cursor={next_cursor}" + ), + "partOf": ID + path, + "type": "OrderedCollectionPage", + "orderedItems": [item.ap_actor_id for item in items], + } + if next_cursor: + collection_page["next"] = ID + path + f"?next_cursor={next_cursor}" + + return collection_page + + +def _encode_cursor(val: datetime) -> str: + return base64.urlsafe_b64encode(val.isoformat().encode()).decode() + + +def _decode_cursor(cursor: str) -> datetime: + return isoparse(base64.urlsafe_b64decode(cursor).decode()) + + +@app.get("/followers") +def followers( + request: Request, + page: bool | None = None, + next_cursor: str | None = None, + prev_cursor: str | None = None, + db: Session = Depends(get_db), + _: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker), +) -> ActivityPubResponse | templates.TemplateResponse: + if is_activitypub_requested(request): + return ActivityPubResponse( + _build_followx_collection( + db=db, + model_cls=models.Follower, + path="/followers", + page=page, + next_cursor=next_cursor, + ) + ) + + followers = ( + db.query(models.Follower) + .options(joinedload(models.Follower.actor)) + .order_by(models.Follower.created_at.desc()) + .limit(20) + .all() + ) + + # TODO: support next_cursor/prev_cursor + actors_metadata = {} + if is_current_user_admin(request): + actors_metadata = get_actors_metadata( + db, + [f.actor for f in followers], + ) + + return templates.render_template( + db, + request, + "followers.html", + { + "followers": followers, + "actors_metadata": actors_metadata, + }, + ) + + +@app.get("/following") +def following( + request: Request, + page: bool | None = None, + next_cursor: str | None = None, + prev_cursor: str | None = None, + db: Session = Depends(get_db), + _: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker), +) -> ActivityPubResponse | templates.TemplateResponse: + if is_activitypub_requested(request): + return ActivityPubResponse( + _build_followx_collection( + db=db, + model_cls=models.Following, + path="/following", + page=page, + next_cursor=next_cursor, + ) + ) + + q = ( + db.query(models.Following) + .options(joinedload(models.Following.actor)) + .order_by(models.Following.created_at.desc()) + .limit(20) + ) + following = q.all() + + # TODO: support next_cursor/prev_cursor + actors_metadata = {} + if is_current_user_admin(request): + actors_metadata = get_actors_metadata( + db, + [f.actor for f in following], + ) + + return templates.render_template( + db, + request, + "following.html", + { + "following": following, + "actors_metadata": actors_metadata, + }, + ) + + +@app.get("/outbox") +def outbox( + db: Session = Depends(get_db), + _: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker), +) -> ActivityPubResponse: + outbox_objects = ( + db.query(models.OutboxObject) + .filter( + models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC, + models.OutboxObject.is_deleted.is_(False), + ) + .order_by(models.OutboxObject.ap_published_at.desc()) + .limit(20) + .all() + ) + return ActivityPubResponse( + { + "@context": DEFAULT_CTX, + "id": f"{ID}/outbox", + "type": "OrderedCollection", + "totalItems": len(outbox_objects), + "orderedItems": [ + ap.remove_context(ap.wrap_object_if_needed(a.ap_object)) + for a in outbox_objects + ], + } + ) + + +@app.get("/o/{public_id}") +def outbox_by_public_id( + public_id: str, + request: Request, + db: Session = Depends(get_db), + _: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker), +) -> ActivityPubResponse | templates.TemplateResponse: + # TODO: ACL? + maybe_object = ( + db.query(models.OutboxObject) + .filter( + models.OutboxObject.public_id == public_id, + # models.OutboxObject.is_deleted.is_(False), + ) + .one_or_none() + ) + if not maybe_object: + raise HTTPException(status_code=404) + # + if is_activitypub_requested(request): + return ActivityPubResponse(maybe_object.ap_object) + + return templates.render_template( + db, + request, + "object.html", + { + "outbox_object": maybe_object, + }, + ) + + +@app.get("/o/{public_id}/activity") +def outbox_activity_by_public_id( + public_id: str, + db: Session = Depends(get_db), + _: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker), +) -> ActivityPubResponse: + # TODO: ACL? + maybe_object = ( + db.query(models.OutboxObject) + .filter(models.OutboxObject.public_id == public_id) + .one_or_none() + ) + if not maybe_object: + raise HTTPException(status_code=404) + + return ActivityPubResponse(ap.wrap_object(maybe_object.ap_object)) + + +@app.get("/t/{tag}") +def tag_by_name( + tag: str, + request: Request, + db: Session = Depends(get_db), + _: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker), +) -> ActivityPubResponse | templates.TemplateResponse: + # TODO(ts): implement HTML version + # if is_activitypub_requested(request): + return ActivityPubResponse( + { + "@context": ap.AS_CTX, + "id": BASE_URL + f"/t/{tag}", + "type": "OrderedCollection", + "totalItems": 0, + "orderedItems": [], + } + ) + + +@app.post("/inbox") +async def inbox( + request: Request, + db: Session = Depends(get_db), + httpsig_info: httpsig.HTTPSigInfo = Depends(httpsig.enforce_httpsig), +) -> Response: + logger.info(f"headers={request.headers}") + payload = await request.json() + logger.info(f"{payload=}") + save_to_inbox(db, payload) + return Response(status_code=204) + + +@app.get("/.well-known/webfinger") +def wellknown_webfinger(resource: str) -> JSONResponse: + """Exposes/servers WebFinger data.""" + if resource not in [f"acct:{USERNAME}@{DOMAIN}", ID]: + raise HTTPException(status_code=404) + + out = { + "subject": f"acct:{USERNAME}@{DOMAIN}", + "aliases": [ID], + "links": [ + { + "rel": "http://webfinger.net/rel/profile-page", + "type": "text/html", + "href": ID, + }, + {"rel": "self", "type": "application/activity+json", "href": ID}, + { + "rel": "http://ostatus.org/schema/1.0/subscribe", + "template": DOMAIN + "/authorize_interaction?uri={uri}", + }, + ], + } + + return JSONResponse(out, media_type="application/jrd+json; charset=utf-8") + + +@app.get("/.well-known/nodeinfo") +async def well_known_nodeinfo() -> dict[str, Any]: + return { + "links": [ + { + "rel": "http://nodeinfo.diaspora.software/ns/schema/2.1", + "href": f"{BASE_URL}/nodeinfo", + } + ] + } + + +@app.get("/nodeinfo") +def nodeinfo( + db: Session = Depends(get_db), +): + local_posts = public_outbox_objects_count(db) + return JSONResponse( + { + "version": "2.1", + "software": { + "name": "microblogpub", + "version": config.VERSION, + "repository": "https://github.com/tsileo/microblog.pub", + }, + "protocols": ["activitypub"], + "services": {"inbound": [], "outbound": []}, + "openRegistrations": False, + "usage": {"users": {"total": 1}, "localPosts": local_posts}, + "metadata": { + "nodeName": LOCAL_ACTOR.handle, + }, + }, + media_type=( + "application/json; " + "profile=http://nodeinfo.diaspora.software/ns/schema/2.1#" + ), + ) + + +proxy_client = httpx.AsyncClient() + + +@app.get("/proxy/media/{encoded_url}") +async def serve_proxy_media(request: Request, encoded_url: str) -> StreamingResponse: + # Decode the base64-encoded URL + url = base64.urlsafe_b64decode(encoded_url).decode() + # Request the URL (and filter request headers) + proxy_req = proxy_client.build_request( + request.method, + url, + headers=[ + (k, v) + for (k, v) in request.headers.raw + if k.lower() + not in [b"host", b"cookie", b"x-forwarded-for", b"x-real-ip", b"user-agent"] + ] + + [(b"user-agent", USER_AGENT.encode())], + ) + proxy_resp = await proxy_client.send(proxy_req, stream=True) + # Filter the headers + proxy_resp_headers = [ + (k, v) + for (k, v) in proxy_resp.headers.items() + if k.lower() + in [ + "content-length", + "content-type", + "content-range", + "accept-ranges" "etag", + "cache-control", + "expires", + "date", + "last-modified", + ] + ] + return StreamingResponse( + proxy_resp.aiter_raw(), + status_code=proxy_resp.status_code, + headers=dict(proxy_resp_headers), + background=BackgroundTask(proxy_resp.aclose), + ) + + +@app.get("/robots.txt", response_class=PlainTextResponse) +async def robots_file(): + return """User-agent: * +Disallow: /followers +Disallow: /following +Disallow: /admin""" diff --git a/app/models.py b/app/models.py new file mode 100644 index 0000000..c1a4ae4 --- /dev/null +++ b/app/models.py @@ -0,0 +1,288 @@ +import enum +from typing import Any +from typing import Optional + +from sqlalchemy import JSON +from sqlalchemy import Boolean +from sqlalchemy import Column +from sqlalchemy import DateTime +from sqlalchemy import Enum +from sqlalchemy import ForeignKey +from sqlalchemy import Integer +from sqlalchemy import String +from sqlalchemy import UniqueConstraint +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import relationship + +from app import activitypub as ap +from app.actor import LOCAL_ACTOR +from app.actor import Actor as BaseActor +from app.ap_object import Object as BaseObject +from app.database import Base +from app.database import now + + +class Actor(Base, BaseActor): + __tablename__ = "actors" + + id = Column(Integer, primary_key=True, index=True) + created_at = Column(DateTime(timezone=True), nullable=False, default=now) + updated_at = Column(DateTime(timezone=True), nullable=False, default=now) + + ap_id = Column(String, unique=True, nullable=False, index=True) + ap_actor: Mapped[ap.RawObject] = Column(JSON, nullable=False) + ap_type = Column(String, nullable=False) + + handle = Column(String, nullable=True, index=True) + + @property + def is_from_db(self) -> bool: + return True + + +class InboxObject(Base, BaseObject): + __tablename__ = "inbox" + + id = Column(Integer, primary_key=True, index=True) + created_at = Column(DateTime(timezone=True), nullable=False, default=now) + updated_at = Column(DateTime(timezone=True), nullable=False, default=now) + + actor_id = Column(Integer, ForeignKey("actors.id"), nullable=False) + actor: Mapped[Actor] = relationship(Actor, uselist=False) + + server = Column(String, nullable=False) + + is_hidden_from_stream = Column(Boolean, nullable=False, default=False) + + ap_actor_id = Column(String, nullable=False) + ap_type = Column(String, nullable=False) + ap_id = Column(String, nullable=False, unique=True, index=True) + ap_context = Column(String, nullable=True) + ap_published_at = Column(DateTime(timezone=True), nullable=False) + ap_object: Mapped[ap.RawObject] = Column(JSON, nullable=False) + + activity_object_ap_id = Column(String, nullable=True) + + visibility = Column(Enum(ap.VisibilityEnum), nullable=False) + + # Used for Like, Announce and Undo activities + relates_to_inbox_object_id = Column( + Integer, + ForeignKey("inbox.id"), + nullable=True, + ) + relates_to_inbox_object: Mapped[Optional["InboxObject"]] = relationship( + "InboxObject", + foreign_keys=relates_to_inbox_object_id, + remote_side=id, + uselist=False, + ) + relates_to_outbox_object_id = Column( + Integer, + ForeignKey("outbox.id"), + nullable=True, + ) + relates_to_outbox_object: Mapped[Optional["OutboxObject"]] = relationship( + "OutboxObject", + foreign_keys=[relates_to_outbox_object_id], + uselist=False, + ) + + undone_by_inbox_object_id = Column(Integer, ForeignKey("inbox.id"), nullable=True) + + # Link the oubox AP ID to allow undo without any extra query + liked_via_outbox_object_ap_id = Column(String, nullable=True) + announced_via_outbox_object_ap_id = Column(String, nullable=True) + + is_bookmarked = Column(Boolean, nullable=False, default=False) + + # FIXME(ts): do we need this? + has_replies = Column(Boolean, nullable=False, default=False) + + og_meta: Mapped[list[dict[str, Any]] | None] = Column(JSON, nullable=True) + + +class OutboxObject(Base, BaseObject): + __tablename__ = "outbox" + + id = Column(Integer, primary_key=True, index=True) + created_at = Column(DateTime(timezone=True), nullable=False, default=now) + updated_at = Column(DateTime(timezone=True), nullable=False, default=now) + + is_hidden_from_homepage = Column(Boolean, nullable=False, default=False) + + public_id = Column(String, nullable=False, index=True) + + ap_type = Column(String, nullable=False) + ap_id = Column(String, nullable=False, unique=True, index=True) + ap_context = Column(String, nullable=True) + ap_object: Mapped[ap.RawObject] = Column(JSON, nullable=False) + + activity_object_ap_id = Column(String, nullable=True) + + # Source content for activities (like Notes) + source = Column(String, nullable=True) + + ap_published_at = Column(DateTime(timezone=True), nullable=False, default=now) + visibility = Column(Enum(ap.VisibilityEnum), nullable=False) + + likes_count = Column(Integer, nullable=False, default=0) + announces_count = Column(Integer, nullable=False, default=0) + replies_count = Column(Integer, nullable=False, default=0) + + webmentions = Column(JSON, nullable=True) + + og_meta: Mapped[list[dict[str, Any]] | None] = Column(JSON, nullable=True) + + # Never actually delete from the outbox + is_deleted = Column(Boolean, nullable=False, default=False) + + # Used for Like, Announce and Undo activities + relates_to_inbox_object_id = Column( + Integer, + ForeignKey("inbox.id"), + nullable=True, + ) + relates_to_inbox_object: Mapped[Optional["InboxObject"]] = relationship( + "InboxObject", + foreign_keys=[relates_to_inbox_object_id], + uselist=False, + ) + relates_to_outbox_object_id = Column( + Integer, + ForeignKey("outbox.id"), + nullable=True, + ) + relates_to_outbox_object: Mapped[Optional["OutboxObject"]] = relationship( + "OutboxObject", + foreign_keys=[relates_to_outbox_object_id], + remote_side=id, + uselist=False, + ) + + undone_by_outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=True) + + @property + def actor(self) -> BaseActor: + return LOCAL_ACTOR + + +class Follower(Base): + __tablename__ = "followers" + + id = Column(Integer, primary_key=True, index=True) + created_at = Column(DateTime(timezone=True), nullable=False, default=now) + updated_at = Column(DateTime(timezone=True), nullable=False, default=now) + + actor_id = Column(Integer, ForeignKey("actors.id"), nullable=False, unique=True) + actor = relationship(Actor, uselist=False) + + inbox_object_id = Column(Integer, ForeignKey("inbox.id"), nullable=False) + inbox_object = relationship(InboxObject, uselist=False) + + ap_actor_id = Column(String, nullable=False, unique=True) + + +class Following(Base): + __tablename__ = "following" + + id = Column(Integer, primary_key=True, index=True) + created_at = Column(DateTime(timezone=True), nullable=False, default=now) + updated_at = Column(DateTime(timezone=True), nullable=False, default=now) + + actor_id = Column(Integer, ForeignKey("actors.id"), nullable=False, unique=True) + actor = relationship(Actor, uselist=False) + + outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False) + outbox_object = relationship(OutboxObject, uselist=False) + + ap_actor_id = Column(String, nullable=False, unique=True) + + +@enum.unique +class NotificationType(str, enum.Enum): + NEW_FOLLOWER = "new_follower" + UNFOLLOW = "unfollow" + LIKE = "like" + UNDO_LIKE = "undo_like" + ANNOUNCE = "announce" + UNDO_ANNOUNCE = "undo_announce" + + # TODO: + MENTION = "mention" + + +class Notification(Base): + __tablename__ = "notifications" + + id = Column(Integer, primary_key=True, index=True) + created_at = Column(DateTime(timezone=True), nullable=False, default=now) + notification_type = Column(Enum(NotificationType), nullable=True) + is_new = Column(Boolean, nullable=False, default=True) + + actor_id = Column(Integer, ForeignKey("actors.id"), nullable=True) + actor = relationship(Actor, uselist=False) + + outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=True) + outbox_object = relationship(OutboxObject, uselist=False) + + inbox_object_id = Column(Integer, ForeignKey("inbox.id"), nullable=True) + inbox_object = relationship(InboxObject, uselist=False) + + +class OutgoingActivity(Base): + __tablename__ = "outgoing_activities" + + id = Column(Integer, primary_key=True, index=True) + created_at = Column(DateTime(timezone=True), nullable=False, default=now) + + recipient = Column(String, nullable=False) + outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False) + outbox_object = relationship(OutboxObject, uselist=False) + + tries = Column(Integer, nullable=False, default=0) + next_try = Column(DateTime(timezone=True), nullable=True, default=now) + + last_try = Column(DateTime(timezone=True), nullable=True) + last_status_code = Column(Integer, nullable=True) + last_response = Column(String, nullable=True) + + is_sent = Column(Boolean, nullable=False, default=False) + is_errored = Column(Boolean, nullable=False, default=False) + error = Column(String, nullable=True) + + +class TaggedOutboxObject(Base): + __tablename__ = "tagged_outbox_objects" + __table_args__ = ( + UniqueConstraint("outbox_object_id", "tag", name="uix_tagged_object"), + ) + + id = Column(Integer, primary_key=True, index=True) + + outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False) + outbox_object = relationship(OutboxObject, uselist=False) + + tag = Column(String, nullable=False, index=True) + + +""" +class Upload(Base): + __tablename__ = "upload" + + filename = Column(String, nullable=False) + filehash = Column(String, nullable=False) + filesize = Column(Integer, nullable=False) + + +class OutboxObjectAttachment(Base): + __tablename__ = "outbox_object_attachment" + + id = Column(Integer, primary_key=True, index=True) + + outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False) + outbox_object = relationship(OutboxObject, uselist=False) + + upload_id = Column(Integer, ForeignKey("upload.id")) + upload = relationship(Upload, uselist=False) +""" diff --git a/app/opengraph.py b/app/opengraph.py new file mode 100644 index 0000000..e28c79b --- /dev/null +++ b/app/opengraph.py @@ -0,0 +1,90 @@ +import mimetypes +import re +from urllib.parse import urlparse + +import httpx +from bs4 import BeautifulSoup # type: ignore +from pydantic import BaseModel + +from app import activitypub as ap +from app import config +from app.urlutils import is_url_valid + + +class OpenGraphMeta(BaseModel): + url: str + title: str + image: str + description: str + site_name: str + + +def _scrap_og_meta(html: str) -> OpenGraphMeta | None: + soup = BeautifulSoup(html, "html5lib") + ogs = { + og.attrs["property"]: og.attrs.get("content") + for og in soup.html.head.findAll(property=re.compile(r"^og")) + } + raw = {} + for field in OpenGraphMeta.__fields__.keys(): + og_field = f"og:{field}" + if not ogs.get(og_field): + return None + + raw[field] = ogs[og_field] + + return OpenGraphMeta.parse_obj(raw) + + +def _urls_from_note(note: ap.RawObject) -> set[str]: + note_host = urlparse(ap.get_id(note["id"]) or "").netloc + + urls = set() + if "content" in note: + soup = BeautifulSoup(note["content"], "html5lib") + for link in soup.find_all("a"): + h = link.get("href") + ph = urlparse(h) + mimetype, _ = mimetypes.guess_type(h) + if ( + ph.scheme in {"http", "https"} + and ph.netloc != note_host + and is_url_valid(h) + and ( + not mimetype + or mimetype.split("/")[0] in ["image", "video", "audio"] + ) + ): + urls.add(h) + + return urls + + +def _og_meta_from_url(url: str) -> OpenGraphMeta | None: + resp = httpx.get( + url, + headers={ + "User-Agent": config.USER_AGENT, + }, + follow_redirects=True, + ) + resp.raise_for_status() + + if not (ct := resp.headers.get("content-type")) or not ct.startswith("text/html"): + return None + + return _scrap_og_meta(resp.text) + + +def og_meta_from_note(note: ap.RawObject) -> list[OpenGraphMeta]: + og_meta = [] + urls = _urls_from_note(note) + for url in urls: + try: + maybe_og_meta = _og_meta_from_url(url) + if maybe_og_meta: + og_meta.append(maybe_og_meta) + except httpx.HTTPError: + pass + + return og_meta diff --git a/app/process_outgoing_activities.py b/app/process_outgoing_activities.py new file mode 100644 index 0000000..826d2aa --- /dev/null +++ b/app/process_outgoing_activities.py @@ -0,0 +1,138 @@ +import email +import time +import traceback +from datetime import datetime +from datetime import timedelta + +import httpx +from loguru import logger +from sqlalchemy.orm import Session + +from app import activitypub as ap +from app import models +from app.database import SessionLocal +from app.database import now + +_MAX_RETRIES = 16 + + +def new_outgoing_activity( + db: Session, + recipient: str, + outbox_object_id: int, +) -> models.OutgoingActivity: + outgoing_activity = models.OutgoingActivity( + recipient=recipient, + outbox_object_id=outbox_object_id, + ) + + db.add(outgoing_activity) + db.commit() + db.refresh(outgoing_activity) + return outgoing_activity + + +def _parse_retry_after(retry_after: str) -> datetime | None: + try: + # Retry-After: 120 + seconds = int(retry_after) + except ValueError: + # Retry-After: Wed, 21 Oct 2015 07:28:00 GMT + dt_tuple = email.utils.parsedate_tz(retry_after) + if dt_tuple is None: + return None + + seconds = int(email.utils.mktime_tz(dt_tuple) - time.time()) + + return now() + timedelta(seconds=seconds) + + +def _exp_backoff(tries: int) -> datetime: + seconds = 2 * (2 ** (tries - 1)) + return now() + timedelta(seconds=seconds) + + +def _set_next_try( + outgoing_activity: models.OutgoingActivity, + next_try: datetime | None = None, +) -> None: + if not outgoing_activity.tries: + raise ValueError("Should never happen") + + if outgoing_activity.tries == _MAX_RETRIES: + outgoing_activity.is_errored = True + outgoing_activity.next_try = None + else: + outgoing_activity.next_try = next_try or _exp_backoff(outgoing_activity.tries) + + +def process_next_outgoing_activity(db: Session) -> bool: + q = ( + db.query(models.OutgoingActivity) + .filter( + models.OutgoingActivity.next_try <= now(), + models.OutgoingActivity.is_errored.is_(False), + models.OutgoingActivity.is_sent.is_(False), + ) + .order_by(models.OutgoingActivity.next_try) + ) + q_count = q.count() + logger.info(f"{q_count} outgoing activities ready to process") + if not q_count: + logger.info("No activities to process") + return False + + next_activity = q.limit(1).one() + + next_activity.tries = next_activity.tries + 1 + next_activity.last_try = now() + + payload = ap.wrap_object_if_needed(next_activity.outbox_object.ap_object) + logger.info(f"{payload=}") + try: + resp = ap.post(next_activity.recipient, payload) + except httpx.HTTPStatusError as http_error: + logger.exception("Failed") + next_activity.last_status_code = http_error.response.status_code + next_activity.last_response = http_error.response.text + next_activity.error = traceback.format_exc() + + if http_error.response.status_code in [429, 503]: + retry_after: datetime | None = None + if retry_after_value := http_error.response.headers.get("Retry-After"): + retry_after = _parse_retry_after(retry_after_value) + _set_next_try(next_activity, retry_after) + elif 400 <= http_error.response.status_code < 500: + logger.info(f"status_code={http_error.response.status_code} not retrying") + next_activity.is_errored = True + next_activity.next_try = None + else: + _set_next_try(next_activity) + except Exception: + logger.exception("Failed") + next_activity.error = traceback.format_exc() + _set_next_try(next_activity) + else: + logger.info("Success") + next_activity.is_sent = True + next_activity.last_status_code = resp.status_code + next_activity.last_response = resp.text + + db.commit() + return True + + +def loop() -> None: + db = SessionLocal() + while 1: + try: + process_next_outgoing_activity(db) + except Exception: + logger.exception("Failed to process next outgoing activity") + raise + + time.sleep(1) + + +if __name__ == "__main__": + loop() diff --git a/app/source.py b/app/source.py new file mode 100644 index 0000000..d71ff41 --- /dev/null +++ b/app/source.py @@ -0,0 +1,81 @@ +import re + +from markdown import markdown +from sqlalchemy.orm import Session + +from app import models +from app import webfinger +from app.actor import fetch_actor +from app.config import BASE_URL + + +def _set_a_attrs(attrs, new=False): + attrs[(None, "target")] = "_blank" + attrs[(None, "class")] = "external" + attrs[(None, "rel")] = "noopener" + attrs[(None, "title")] = attrs[(None, "href")] + return attrs + + +_HASHTAG_REGEX = re.compile(r"(#[\d\w]+)") +_MENTION_REGEX = re.compile(r"@[\d\w_.+-]+@[\d\w-]+\.[\d\w\-.]+") + + +def _hashtagify(db: Session, content: str) -> tuple[str, list[dict[str, str]]]: + tags = [] + hashtags = re.findall(_HASHTAG_REGEX, content) + hashtags = sorted(set(hashtags), reverse=True) # unique tags, longest first + for hashtag in hashtags: + tag = hashtag[1:] + link = f'' # noqa: E501 + tags.append(dict(href=f"{BASE_URL}/t/{tag}", name=hashtag, type="Hashtag")) + content = content.replace(hashtag, link) + return content, tags + + +def _mentionify( + db: Session, content: str, hide_domain: bool = False +) -> tuple[str, list[dict[str, str]]]: + tags = [] + for mention in re.findall(_MENTION_REGEX, content): + _, username, domain = mention.split("@") + actor = ( + db.query(models.Actor).filter(models.Actor.handle == mention).one_or_none() + ) + if not actor: + actor_url = webfinger.get_actor_url(mention) + if not actor_url: + # FIXME(ts): raise an error? + continue + actor = fetch_actor(db, actor_url) + + tags.append(dict(type="Mention", href=actor.url, name=mention)) + + d = f"@{domain}" + if hide_domain: + d = "" + + link = f'@{username}{d}' # noqa: E501 + content = content.replace(mention, link) + return content, tags + + +def markdownify( + db: Session, + content: str, + mentionify: bool = True, + hashtagify: bool = True, +) -> tuple[str, list[dict[str, str]]]: + """ + >>> content, tags = markdownify("Hello") + + """ + tags = [] + if hashtagify: + content, hashtag_tags = _hashtagify(db, content) + tags.extend(hashtag_tags) + if mentionify: + content, mention_tags = _mentionify(db, content) + tags.extend(mention_tags) + content = markdown(content, extensions=["mdx_linkify"]) + return content, tags diff --git a/app/static/css/.gitignore b/app/static/css/.gitignore new file mode 100644 index 0000000..b3a5267 --- /dev/null +++ b/app/static/css/.gitignore @@ -0,0 +1 @@ +*.css diff --git a/app/static/nopic.png b/app/static/nopic.png new file mode 100644 index 0000000..988d806 Binary files /dev/null and b/app/static/nopic.png differ diff --git a/app/templates.py b/app/templates.py new file mode 100644 index 0000000..2ca7c14 --- /dev/null +++ b/app/templates.py @@ -0,0 +1,190 @@ +import base64 +from datetime import datetime +from datetime import timezone +from functools import lru_cache +from typing import Any +from urllib.parse import urlparse + +import bleach +import timeago # type: ignore +from bs4 import BeautifulSoup # type: ignore +from fastapi import Request +from fastapi.templating import Jinja2Templates +from sqlalchemy.orm import Session +from starlette.templating import _TemplateResponse as TemplateResponse + +from app import models +from app.actor import LOCAL_ACTOR +from app.ap_object import Attachment +from app.boxes import public_outbox_objects_count +from app.config import DEBUG +from app.config import DOMAIN +from app.config import VERSION +from app.config import generate_csrf_token +from app.config import session_serializer +from app.database import now +from app.highlight import HIGHLIGHT_CSS +from app.highlight import highlight + +_templates = Jinja2Templates(directory="app/templates") + + +def _filter_domain(text: str) -> str: + hostname = urlparse(text).hostname + if not hostname: + raise ValueError(f"No hostname for {text}") + return hostname + + +def _media_proxy_url(url: str | None) -> str: + if not url: + return "/static/nopic.png" + + if url.startswith(DOMAIN): + return url + + encoded_url = base64.urlsafe_b64encode(url.encode()).decode() + return f"/proxy/media/{encoded_url}" + + +def is_current_user_admin(request: Request) -> bool: + is_admin = False + session_cookie = request.cookies.get("session") + if session_cookie: + try: + loaded_session = session_serializer.loads( + session_cookie, + max_age=3600 * 12, + ) + except Exception: + pass + else: + is_admin = loaded_session.get("is_logged_in") + + return is_admin + + +def render_template( + db: Session, + request: Request, + template: str, + template_args: dict[str, Any] = {}, +) -> TemplateResponse: + is_admin = False + is_admin = is_current_user_admin(request) + + return _templates.TemplateResponse( + template, + { + "request": request, + "debug": DEBUG, + "microblogpub_version": VERSION, + "is_admin": is_admin, + "csrf_token": generate_csrf_token() if is_admin else None, + "highlight_css": HIGHLIGHT_CSS, + "notifications_count": db.query(models.Notification) + .filter(models.Notification.is_new.is_(True)) + .count() + if is_admin + else 0, + "local_actor": LOCAL_ACTOR, + "followers_count": db.query(models.Follower).count(), + "following_count": db.query(models.Following).count(), + "objects_count": public_outbox_objects_count(db), + **template_args, + }, + ) + + +# HTML/templates helper +ALLOWED_TAGS = [ + "a", + "abbr", + "acronym", + "b", + "br", + "blockquote", + "code", + "pre", + "em", + "i", + "li", + "ol", + "strong", + "sup", + "sub", + "del", + "ul", + "span", + "div", + "p", + "h1", + "h2", + "h3", + "h4", + "h5", + "h6", + "table", + "th", + "tr", + "td", + "thead", + "tbody", + "tfoot", + "colgroup", + "caption", + "img", +] + +ALLOWED_ATTRIBUTES = { + "a": ["href", "title"], + "abbr": ["title"], + "acronym": ["title"], + "img": ["src", "alt", "title"], +} + + +@lru_cache(maxsize=256) +def _update_inline_imgs(content): + soup = BeautifulSoup(content, "html5lib") + imgs = soup.find_all("img") + if not imgs: + return content + + for img in imgs: + if not img.attrs.get("src"): + continue + + img.attrs["src"] = _media_proxy_url(img.attrs["src"]) + + return soup.find("body").decode_contents() + + +def _clean_html(html: str) -> str: + try: + return bleach.clean( + _update_inline_imgs(highlight(html)), + tags=ALLOWED_TAGS, + attributes=ALLOWED_ATTRIBUTES, + strip=True, + ) + except Exception: + raise + + +def _timeago(original_dt: datetime) -> str: + dt = original_dt + if dt.tzinfo: + dt = dt.astimezone(timezone.utc).replace(tzinfo=None) + return timeago.format(dt, now().replace(tzinfo=None)) + + +def _has_media_type(attachment: Attachment, media_type_prefix: str) -> bool: + return attachment.media_type.startswith(media_type_prefix) + + +_templates.env.filters["domain"] = _filter_domain +_templates.env.filters["media_proxy_url"] = _media_proxy_url +_templates.env.filters["clean_html"] = _clean_html +_templates.env.filters["timeago"] = _timeago +_templates.env.filters["has_media_type"] = _has_media_type diff --git a/app/templates/admin_new.html b/app/templates/admin_new.html new file mode 100644 index 0000000..d3d4e87 --- /dev/null +++ b/app/templates/admin_new.html @@ -0,0 +1,13 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} +{% block content %} + +
+ {{ utils.embed_csrf_token() }} + {{ utils.embed_redirect_url() }} + + + +
+ +{% endblock %} diff --git a/app/templates/admin_stream.html b/app/templates/admin_stream.html new file mode 100644 index 0000000..45dce48 --- /dev/null +++ b/app/templates/admin_stream.html @@ -0,0 +1,25 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} +{% block content %} + +{% for inbox_object in stream %} +{% if inbox_object.ap_type == "Announce" %} + {% if inbox_object.relates_to_inbox_object_id %} + {{ utils.display_object(inbox_object.relates_to_inbox_object) }} + {% else %} + + {% endif %} + +{% else %} +{{ utils.display_object(inbox_object) }} +{% if inbox_object.liked_via_outbox_object_ap_id %} +{{ utils.admin_undo_button(inbox_object.liked_via_outbox_object_ap_id, "Unlike") }} +{% else %} +{{ utils.admin_like_button(inbox_object.ap_id) }} +{% endif %} + +{{ utils.admin_announce_button(inbox_object.ap_id) }} +{% endif %} +{% endfor %} + +{% endblock %} diff --git a/app/templates/followers.html b/app/templates/followers.html new file mode 100644 index 0000000..264e279 --- /dev/null +++ b/app/templates/followers.html @@ -0,0 +1,12 @@ +{%- import "utils.html" as utils -%} +{% extends "layout.html" %} +{% block content %} +{% include "header.html" %} +
+
    +{% for follower in followers %} +
  • {{ utils.display_actor(follower.actor, actors_metadata) }}
  • +{% endfor %} +
+
+{% endblock %} diff --git a/app/templates/following.html b/app/templates/following.html new file mode 100644 index 0000000..be55eb5 --- /dev/null +++ b/app/templates/following.html @@ -0,0 +1,12 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} +{% block content %} +{% include "header.html" %} +
+
    +{% for follow in following %} +
  • {{ utils.display_actor(follow.actor, actors_metadata) }}
  • +{% endfor %} +
+
+{% endblock %} diff --git a/app/templates/header.html b/app/templates/header.html new file mode 100644 index 0000000..4a86662 --- /dev/null +++ b/app/templates/header.html @@ -0,0 +1,31 @@ + diff --git a/app/templates/index.html b/app/templates/index.html new file mode 100644 index 0000000..c325678 --- /dev/null +++ b/app/templates/index.html @@ -0,0 +1,14 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} +{% block content %} +{% include "header.html" %} + +{% for outbox_object in objects %} +{{ outbox_object.likes_count }} +{{ outbox_object.announces_count }} +{{ utils.display_object(outbox_object) }} +{% endfor %} + + + +{% endblock %} diff --git a/app/templates/layout.html b/app/templates/layout.html new file mode 100644 index 0000000..4d8ef25 --- /dev/null +++ b/app/templates/layout.html @@ -0,0 +1,46 @@ + + + + + + + + + + +
+
+{% if is_admin %} +
+{% macro admin_link(url, text) %} +{% set url_for = request.url_for(url) %} +{{ text }} +{% endmacro %} +
+ +
+ +
+{% endif %} +{% block content %}{% endblock %} +
+
+ + + + diff --git a/app/templates/login.html b/app/templates/login.html new file mode 100644 index 0000000..9c221ef --- /dev/null +++ b/app/templates/login.html @@ -0,0 +1,13 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} +{% block content %} +
+
+
+ + + +
+
+
+{% endblock %} diff --git a/app/templates/lookup.html b/app/templates/lookup.html new file mode 100644 index 0000000..d3ff8e6 --- /dev/null +++ b/app/templates/lookup.html @@ -0,0 +1,14 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} +{% block content %} +
+ + +
+ {{ actors_metadata }} + {% if ap_object and ap_object.ap_type == "Person" %} + {{ utils.display_actor(ap_object, actors_metadata) }} + {% elif ap_object %} + {{ utils.display_object(ap_object) }} + {% endif %} +{% endblock %} diff --git a/app/templates/notifications.html b/app/templates/notifications.html new file mode 100644 index 0000000..7aa29a8 --- /dev/null +++ b/app/templates/notifications.html @@ -0,0 +1,45 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} +{% block content %} +

Notifications

+
+ {%- for notif in notifications %} +
+ {%- if notif.notification_type.value == "new_follower" %} + + {{ utils.display_actor(notif.actor, actors_metadata) }} + {% elif notif.notification_type.value == "unfollow" %} + + {{ utils.display_actor(notif.actor, actors_metadata) }} + {% elif notif.notification_type.value == "like" %} + + {{ utils.display_object(notif.outbox_object) }} + {% elif notif.notification_type.value == "undo_like" %} + + {{ utils.display_object(notif.outbox_object) }} + {% elif notif.notification_type.value == "announce" %} + + {{ utils.display_object(notif.outbox_object) }} + {% elif notif.notification_type.value == "undo_announce" %} + + {{ utils.display_object(notif.outbox_object) }} + + {% else %} + {{ notif }} + {%- endif %} +
+ {%- endfor %} +
+{% endblock %} diff --git a/app/templates/object.html b/app/templates/object.html new file mode 100644 index 0000000..4cabb02 --- /dev/null +++ b/app/templates/object.html @@ -0,0 +1,8 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} +{% block content %} +{% include "header.html" %} + +{{ utils.display_object(outbox_object) }} + +{% endblock %} diff --git a/app/templates/utils.html b/app/templates/utils.html new file mode 100644 index 0000000..dfb5f4a --- /dev/null +++ b/app/templates/utils.html @@ -0,0 +1,143 @@ +{% macro embed_csrf_token() %} + +{% endmacro %} + +{% macro embed_redirect_url() %} + +{% endmacro %} + +{% macro admin_follow_button(actor) %} +
+ {{ embed_csrf_token() }} + {{ embed_redirect_url() }} + + +
+{% endmacro %} + +{% macro admin_like_button(ap_object_id) %} +
+ {{ embed_csrf_token() }} + {{ embed_redirect_url() }} + + +
+{% endmacro %} + +{% macro admin_announce_button(ap_object_id) %} +
+ {{ embed_csrf_token() }} + {{ embed_redirect_url() }} + + +
+{% endmacro %} + +{% macro admin_undo_button(ap_object_id, action="Undo") %} +
+ {{ embed_csrf_token() }} + {{ embed_redirect_url() }} + + +
+{% endmacro %} + +{% macro sensitive_button(permalink_id) %} +
+ +{% for k, v in request.query_params.items() %} + +{% endfor %} + +
+{% endmacro %} + +{% macro display_actor(actor, actors_metadata) %} +{{ actors_metadata }} +{% set metadata = actors_metadata.get(actor.ap_id) %} + +{% if metadata %} +
+ +
+{% endif %} + +{% endmacro %} + +{% macro display_object(object) %} +{% if object.ap_type in ["Note", "Article", "Video"] %} + +{% endif %} +{% endmacro %} diff --git a/app/urlutils.py b/app/urlutils.py new file mode 100644 index 0000000..8afb3fc --- /dev/null +++ b/app/urlutils.py @@ -0,0 +1,61 @@ +import functools +import ipaddress +import socket +from urllib.parse import urlparse + +from loguru import logger + +from app.config import DEBUG + + +class InvalidURLError(Exception): + pass + + +@functools.lru_cache +def _getaddrinfo(hostname: str, port: int) -> str: + try: + ip_address = str(ipaddress.ip_address(hostname)) + except ValueError: + try: + ip_address = socket.getaddrinfo(hostname, port)[0][4][0] + logger.debug(f"DNS lookup: {hostname} -> {ip_address}") + except socket.gaierror: + logger.exception(f"failed to lookup addr info for {hostname}") + raise + + return ip_address + + +def is_url_valid(url: str) -> bool: + """Implements basic SSRF protection.""" + parsed = urlparse(url) + if parsed.scheme not in ["http", "https"]: + return False + + # XXX in debug mode, we want to allow requests to localhost to test the + # federation with local instances + if DEBUG: # pragma: no cover + return True + + if not parsed.hostname or parsed.hostname.lower() in ["localhost"]: + return False + + ip_address = _getaddrinfo( + parsed.hostname, parsed.port or (80 if parsed.scheme == "http" else 443) + ) + logger.debug(f"{ip_address=}") + + if ipaddress.ip_address(ip_address).is_private: + logger.info(f"rejecting private URL {url} -> {ip_address}") + return False + + return True + + +def check_url(url: str, debug: bool = False) -> None: + logger.debug(f"check_url {url=}") + if not is_url_valid(url): + raise InvalidURLError(f'"{url}" is invalid') + + return None diff --git a/app/webfinger.py b/app/webfinger.py new file mode 100644 index 0000000..287b646 --- /dev/null +++ b/app/webfinger.py @@ -0,0 +1,79 @@ +from typing import Any +from urllib.parse import urlparse + +import httpx +from loguru import logger + +from app import activitypub as ap + + +def webfinger( + resource: str, +) -> dict[str, Any] | None: # noqa: C901 + """Mastodon-like WebFinger resolution to retrieve the activity stream Actor URL.""" + logger.info(f"performing webfinger resolution for {resource}") + protos = ["https", "http"] + if resource.startswith("http://"): + protos.reverse() + host = urlparse(resource).netloc + elif resource.startswith("https://"): + host = urlparse(resource).netloc + else: + if resource.startswith("acct:"): + resource = resource[5:] + if resource.startswith("@"): + resource = resource[1:] + _, host = resource.split("@", 1) + resource = "acct:" + resource + + is_404 = False + + for i, proto in enumerate(protos): + try: + url = f"{proto}://{host}/.well-known/webfinger" + resp = ap.get(url, params={"resource": resource}) + break + except httpx.HTTPStatusError as http_error: + logger.exception("HTTP error") + if http_error.response.status_code in [403, 404, 410]: + is_404 = True + continue + raise + except httpx.HTTPError: + logger.exception("req failed") + # If we tried https first and the domain is "http only" + if i == 0: + continue + break + if is_404: + return None + + return resp + + +def get_remote_follow_template(resource: str) -> str | None: + data = webfinger(resource) + if data is None: + return None + for link in data["links"]: + if link.get("rel") == "http://ostatus.org/schema/1.0/subscribe": + return link.get("template") + return None + + +def get_actor_url(resource: str) -> str | None: + """Mastodon-like WebFinger resolution to retrieve the activity stream Actor URL. + + Returns: + the Actor URL or None if the resolution failed. + """ + data = webfinger(resource) + if data is None: + return None + for link in data["links"]: + if ( + link.get("rel") == "self" + and link.get("type") == "application/activity+json" + ): + return link.get("href") + return None diff --git a/boussole.json b/boussole.json new file mode 100644 index 0000000..9f060ea --- /dev/null +++ b/boussole.json @@ -0,0 +1,8 @@ +{ + "SOURCES_PATH": "scss", + "TARGET_PATH": "app/static/css", + "LIBRARY_PATHS": [], + "OUTPUT_STYLES": "nested", + "SOURCE_COMMENTS": false, + "EXCLUDES": [] +} diff --git a/data/.gitignore b/data/.gitignore new file mode 100644 index 0000000..7db6e4c --- /dev/null +++ b/data/.gitignore @@ -0,0 +1,3 @@ +* +!uploads/ +!.gitignore diff --git a/data/uploads/.gitignore b/data/uploads/.gitignore new file mode 100644 index 0000000..d6b7ef3 --- /dev/null +++ b/data/uploads/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..cf8245b --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1697 @@ +[[package]] +name = "alembic" +version = "1.8.0" +description = "A database migration tool for SQLAlchemy." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" + +[package.extras] +tz = ["python-dateutil"] + +[[package]] +name = "anyio" +version = "3.6.1" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] +test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] +trio = ["trio (>=0.16)"] + +[[package]] +name = "asgiref" +version = "3.5.2" +description = "ASGI specs, helper code, and adapters" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +tests = ["pytest", "pytest-asyncio", "mypy (>=0.800)"] + +[[package]] +name = "atomicwrites" +version = "1.4.0" +description = "Atomic file writes." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "attrs" +version = "21.4.0" +description = "Classes Without Boilerplate" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] + +[[package]] +name = "bcrypt" +version = "3.2.2" +description = "Modern password hashing for your software and your servers" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cffi = ">=1.1" + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + +[[package]] +name = "beautifulsoup4" +version = "4.11.1" +description = "Screen-scraping library" +category = "main" +optional = false +python-versions = ">=3.6.0" + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "black" +version = "22.3.0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "bleach" +version = "5.0.0" +description = "An easy safelist-based HTML-sanitizing tool." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +six = ">=1.9.0" +webencodings = "*" + +[package.extras] +css = ["tinycss2 (>=1.1.0)"] +dev = ["pip-tools (==6.5.1)", "pytest (==7.1.1)", "flake8 (==4.0.1)", "tox (==3.24.5)", "sphinx (==4.3.2)", "twine (==4.0.0)", "wheel (==0.37.1)", "hashin (==0.17.0)", "black (==22.3.0)", "mypy (==0.942)"] + +[[package]] +name = "boussole" +version = "2.0.0" +description = "Commandline interface to build Sass projects using libsass-python" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +click = ">=5.1" +colorama = "*" +colorlog = "*" +libsass = ">=0.19.4" +pyaml = "*" +watchdog = ">=0.8.3" + +[package.extras] +dev = ["flake8", "pytest", "sphinx", "sphinx-rtd-theme", "livereload", "twine", "packaging"] + +[[package]] +name = "bs4" +version = "0.0.1" +description = "Dummy package for Beautiful Soup" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +beautifulsoup4 = "*" + +[[package]] +name = "certifi" +version = "2022.5.18.1" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "cffi" +version = "1.15.0" +description = "Foreign Function Interface for Python calling C code." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "2.0.12" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + +[[package]] +name = "click" +version = "8.1.3" +description = "Composable command line interface toolkit" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.4" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "colorlog" +version = "6.6.0" +description = "Add colours to the output of Python's logging module." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +development = ["black", "flake8", "mypy", "pytest", "types-colorama"] + +[[package]] +name = "factory-boy" +version = "3.2.1" +description = "A versatile test fixtures replacement based on thoughtbot's factory_bot for Ruby." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +Faker = ">=0.7.0" + +[package.extras] +dev = ["coverage", "django", "flake8", "isort", "pillow", "sqlalchemy", "mongoengine", "wheel (>=0.32.0)", "tox", "zest.releaser"] +doc = ["sphinx", "sphinx-rtd-theme", "sphinxcontrib-spelling"] + +[[package]] +name = "faker" +version = "13.13.0" +description = "Faker is a Python package that generates fake data for you." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +python-dateutil = ">=2.4" + +[[package]] +name = "fastapi" +version = "0.78.0" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +category = "main" +optional = false +python-versions = ">=3.6.1" + +[package.dependencies] +pydantic = ">=1.6.2,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1.7.3,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0" +starlette = "0.19.1" + +[package.extras] +all = ["requests (>=2.24.0,<3.0.0)", "jinja2 (>=2.11.2,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "itsdangerous (>=1.1.0,<3.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "orjson (>=3.2.1,<4.0.0)", "email_validator (>=1.1.1,<2.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)"] +dev = ["python-jose[cryptography] (>=3.3.0,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)", "pre-commit (>=2.17.0,<3.0.0)"] +doc = ["mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "typer (>=0.4.1,<0.5.0)", "pyyaml (>=5.3.1,<7.0.0)"] +test = ["pytest (>=6.2.4,<7.0.0)", "pytest-cov (>=2.12.0,<4.0.0)", "mypy (==0.910)", "flake8 (>=3.8.3,<4.0.0)", "black (==22.3.0)", "isort (>=5.0.6,<6.0.0)", "requests (>=2.24.0,<3.0.0)", "httpx (>=0.14.0,<0.19.0)", "email_validator (>=1.1.1,<2.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "peewee (>=3.13.3,<4.0.0)", "databases[sqlite] (>=0.3.2,<0.6.0)", "orjson (>=3.2.1,<4.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "flask (>=1.1.2,<3.0.0)", "anyio[trio] (>=3.2.1,<4.0.0)", "types-ujson (==4.2.1)", "types-orjson (==3.6.2)", "types-dataclasses (==0.6.5)"] + +[[package]] +name = "flake8" +version = "4.0.1" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.8.0,<2.9.0" +pyflakes = ">=2.4.0,<2.5.0" + +[[package]] +name = "greenlet" +version = "1.1.2" +description = "Lightweight in-process concurrent programming" +category = "main" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" + +[package.extras] +docs = ["sphinx"] + +[[package]] +name = "h11" +version = "0.12.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "html5lib" +version = "1.1" +description = "HTML parser based on the WHATWG HTML specification" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +six = ">=1.9" +webencodings = "*" + +[package.extras] +all = ["genshi", "chardet (>=2.2)", "lxml"] +chardet = ["chardet (>=2.2)"] +genshi = ["genshi"] +lxml = ["lxml"] + +[[package]] +name = "httpcore" +version = "0.15.0" +description = "A minimal low-level HTTP client." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +anyio = ">=3.0.0,<4.0.0" +certifi = "*" +h11 = ">=0.11,<0.13" +sniffio = ">=1.0.0,<2.0.0" + +[package.extras] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + +[[package]] +name = "httpx" +version = "0.23.0" +description = "The next generation HTTP client." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +certifi = "*" +httpcore = ">=0.15.0,<0.16.0" +rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} +sniffio = "*" + +[package.extras] +brotli = ["brotlicffi", "brotli"] +cli = ["click (>=8.0.0,<9.0.0)", "rich (>=10,<13)", "pygments (>=2.0.0,<3.0.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + +[[package]] +name = "idna" +version = "3.3" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "invoke" +version = "1.7.1" +description = "Pythonic task execution" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "isort" +version = "5.10.1" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.6.1,<4.0" + +[package.extras] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] +requirements_deprecated_finder = ["pipreqs", "pip-api"] +colors = ["colorama (>=0.4.3,<0.5.0)"] +plugins = ["setuptools"] + +[[package]] +name = "itsdangerous" +version = "2.1.2" +description = "Safely pass data to untrusted environments and back." +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "libsass" +version = "0.21.0" +description = "Sass for Python: A straightforward binding of libsass for Python." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +six = "*" + +[[package]] +name = "loguru" +version = "0.6.0" +description = "Python logging made (stupidly) simple" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} +win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} + +[package.extras] +dev = ["colorama (>=0.3.4)", "docutils (==0.16)", "flake8 (>=3.7.7)", "tox (>=3.9.0)", "pytest (>=4.6.2)", "pytest-cov (>=2.7.1)", "black (>=19.10b0)", "isort (>=5.1.1)", "Sphinx (>=4.1.1)", "sphinx-autobuild (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)"] + +[[package]] +name = "mako" +version = "1.2.0" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["babel"] +lingua = ["lingua"] +testing = ["pytest"] + +[[package]] +name = "markdown" +version = "3.3.7" +description = "Python implementation of Markdown." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "markupsafe" +version = "2.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "mdx-linkify" +version = "2.1" +description = "Link recognition for Python Markdown" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +bleach = ">=3.1.0" +Markdown = ">=3.0" + +[[package]] +name = "mf2py" +version = "1.1.2" +description = "Python Microformats2 parser" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +BeautifulSoup4 = ">=4.6.0" +html5lib = ">=1.0.1" +requests = ">=2.18.4" + +[[package]] +name = "mypy" +version = "0.960" +description = "Optional static typing for Python" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +mypy-extensions = ">=0.4.3" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=3.10" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +python2 = ["typed-ast (>=1.4.0,<2)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "packaging" +version = "21.3" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" + +[[package]] +name = "pathspec" +version = "0.9.0" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[[package]] +name = "platformdirs" +version = "2.5.2" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] +test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.29" +description = "Library for building powerful interactive command lines in Python" +category = "main" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pyaml" +version = "21.10.1" +description = "PyYAML-based module to produce pretty and readable YAML-serialized data" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +PyYAML = "*" + +[[package]] +name = "pycodestyle" +version = "2.8.0" +description = "Python style guide checker" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pycryptodome" +version = "3.14.1" +description = "Cryptographic library for Python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pydantic" +version = "1.9.1" +description = "Data validation and settings management using python type hints" +category = "main" +optional = false +python-versions = ">=3.6.1" + +[package.dependencies] +typing-extensions = ">=3.7.4.3" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyflakes" +version = "2.4.0" +description = "passive checker of Python programs" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pygments" +version = "2.12.0" +description = "Pygments is a syntax highlighting package written in Python." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "pyparsing" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "dev" +optional = false +python-versions = ">=3.6.8" + +[package.extras] +diagrams = ["railroad-diagrams", "jinja2"] + +[[package]] +name = "pytest" +version = "7.1.2" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +tomli = ">=1.0.0" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.18.3" +description = "Pytest support for asyncio" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +pytest = ">=6.1.0" + +[package.extras] +testing = ["coverage (==6.2)", "hypothesis (>=5.7.1)", "flaky (>=3.5.0)", "mypy (==0.931)", "pytest-trio (>=0.7.0)"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-multipart" +version = "0.0.5" +description = "A streaming multipart parser for Python" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +six = ">=1.4.0" + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "requests" +version = "2.27.1" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} +idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] + +[[package]] +name = "respx" +version = "0.19.2" +description = "A utility for mocking out the Python HTTPX and HTTP Core libraries." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +httpx = ">=0.21.0" + +[[package]] +name = "rfc3986" +version = "1.5.0" +description = "Validating URI References per RFC 3986" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} + +[package.extras] +idna2008 = ["idna"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "sniffio" +version = "1.2.0" +description = "Sniff out which async library your code is running under" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "soupsieve" +version = "2.3.2.post1" +description = "A modern CSS selector implementation for Beautiful Soup." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "sqlalchemy" +version = "1.4.37" +description = "Database Abstraction Library" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +mypy = {version = ">=0.910", optional = true, markers = "python_version >= \"3\" and extra == \"mypy\""} +sqlalchemy2-stubs = {version = "*", optional = true, markers = "extra == \"mypy\""} + +[package.extras] +aiomysql = ["greenlet (!=0.4.17)", "aiomysql"] +aiosqlite = ["typing_extensions (!=3.10.0.1)", "greenlet (!=0.4.17)", "aiosqlite"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["greenlet (!=0.4.17)", "asyncmy (>=0.2.3,!=0.2.4)"] +mariadb_connector = ["mariadb (>=1.0.1)"] +mssql = ["pyodbc"] +mssql_pymssql = ["pymssql"] +mssql_pyodbc = ["pyodbc"] +mypy = ["sqlalchemy2-stubs", "mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0,<2)", "mysqlclient (>=1.4.0)"] +mysql_connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=7,<8)", "cx_oracle (>=7)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql_asyncpg = ["greenlet (!=0.4.17)", "asyncpg"] +postgresql_pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql_psycopg2binary = ["psycopg2-binary"] +postgresql_psycopg2cffi = ["psycopg2cffi"] +pymysql = ["pymysql (<1)", "pymysql"] +sqlcipher = ["sqlcipher3-binary"] + +[[package]] +name = "sqlalchemy2-stubs" +version = "0.0.2a23" +description = "Typing Stubs for SQLAlchemy 1.4" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = ">=3.7.4" + +[[package]] +name = "starlette" +version = "0.19.1" +description = "The little ASGI library that shines." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +anyio = ">=3.4.0,<5" + +[package.extras] +full = ["itsdangerous", "jinja2", "python-multipart", "pyyaml", "requests"] + +[[package]] +name = "timeago" +version = "1.0.15" +description = "A very simple python library, used to format datetime with `*** time ago` statement. eg: \"3 hours ago\"." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "tomli-w" +version = "1.0.0" +description = "A lil' TOML writer" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "types-bleach" +version = "5.0.2" +description = "Typing stubs for bleach" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "types-markdown" +version = "3.3.28" +description = "Typing stubs for Markdown" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "types-python-dateutil" +version = "2.8.17" +description = "Typing stubs for python-dateutil" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "types-requests" +version = "2.27.29" +description = "Typing stubs for requests" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +types-urllib3 = "<1.27" + +[[package]] +name = "types-urllib3" +version = "1.26.15" +description = "Typing stubs for urllib3" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "typing-extensions" +version = "4.2.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "urllib3" +version = "1.26.9" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" + +[package.extras] +brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "uvicorn" +version = "0.17.6" +description = "The lightning-fast ASGI server." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +asgiref = ">=3.4.0" +click = ">=7.0" +h11 = ">=0.8" + +[package.extras] +standard = ["websockets (>=10.0)", "httptools (>=0.4.0)", "watchgod (>=0.6)", "python-dotenv (>=0.13)", "PyYAML (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "colorama (>=0.4)"] + +[[package]] +name = "watchdog" +version = "2.1.8" +description = "Filesystem events monitoring" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + +[[package]] +name = "wcwidth" +version = "0.2.5" +description = "Measures the displayed width of unicode strings in a terminal" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "win32-setctime" +version = "1.1.0" +description = "A small Python utility to set file creation time on Windows" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +dev = ["pytest (>=4.6.2)", "black (>=19.3b0)"] + +[metadata] +lock-version = "1.1" +python-versions = "^3.10" +content-hash = "2559d473ab650fbad970dda28a4f83d8ebd62eef9c780ab39490632ab6a9fa48" + +[metadata.files] +alembic = [ + {file = "alembic-1.8.0-py3-none-any.whl", hash = "sha256:b5ae4bbfc7d1302ed413989d39474d102e7cfa158f6d5969d2497955ffe85a30"}, + {file = "alembic-1.8.0.tar.gz", hash = "sha256:a2d4d90da70b30e70352cd9455e35873a255a31402a438fe24815758d7a0e5e1"}, +] +anyio = [ + {file = "anyio-3.6.1-py3-none-any.whl", hash = "sha256:cb29b9c70620506a9a8f87a309591713446953302d7d995344d0d7c6c0c9a7be"}, + {file = "anyio-3.6.1.tar.gz", hash = "sha256:413adf95f93886e442aea925f3ee43baa5a765a64a0f52c6081894f9992fdd0b"}, +] +asgiref = [ + {file = "asgiref-3.5.2-py3-none-any.whl", hash = "sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4"}, + {file = "asgiref-3.5.2.tar.gz", hash = "sha256:4a29362a6acebe09bf1d6640db38c1dc3d9217c68e6f9f6204d72667fc19a424"}, +] +atomicwrites = [ + {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, + {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, +] +attrs = [ + {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, + {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, +] +bcrypt = [ + {file = "bcrypt-3.2.2-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:7180d98a96f00b1050e93f5b0f556e658605dd9f524d0b0e68ae7944673f525e"}, + {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:61bae49580dce88095d669226d5076d0b9d927754cedbdf76c6c9f5099ad6f26"}, + {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88273d806ab3a50d06bc6a2fc7c87d737dd669b76ad955f449c43095389bc8fb"}, + {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6d2cb9d969bfca5bc08e45864137276e4c3d3d7de2b162171def3d188bf9d34a"}, + {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b02d6bfc6336d1094276f3f588aa1225a598e27f8e3388f4db9948cb707b521"}, + {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a2c46100e315c3a5b90fdc53e429c006c5f962529bc27e1dfd656292c20ccc40"}, + {file = "bcrypt-3.2.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7d9ba2e41e330d2af4af6b1b6ec9e6128e91343d0b4afb9282e54e5508f31baa"}, + {file = "bcrypt-3.2.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cd43303d6b8a165c29ec6756afd169faba9396a9472cdff753fe9f19b96ce2fa"}, + {file = "bcrypt-3.2.2-cp36-abi3-win32.whl", hash = "sha256:4e029cef560967fb0cf4a802bcf4d562d3d6b4b1bf81de5ec1abbe0f1adb027e"}, + {file = "bcrypt-3.2.2-cp36-abi3-win_amd64.whl", hash = "sha256:7ff2069240c6bbe49109fe84ca80508773a904f5a8cb960e02a977f7f519b129"}, + {file = "bcrypt-3.2.2.tar.gz", hash = "sha256:433c410c2177057705da2a9f2cd01dd157493b2a7ac14c8593a16b3dab6b6bfb"}, +] +beautifulsoup4 = [ + {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, + {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, +] +black = [ + {file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"}, + {file = "black-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb"}, + {file = "black-22.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a"}, + {file = "black-22.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968"}, + {file = "black-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d"}, + {file = "black-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce"}, + {file = "black-22.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82"}, + {file = "black-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b"}, + {file = "black-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015"}, + {file = "black-22.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b"}, + {file = "black-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a"}, + {file = "black-22.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163"}, + {file = "black-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464"}, + {file = "black-22.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0"}, + {file = "black-22.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176"}, + {file = "black-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0"}, + {file = "black-22.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20"}, + {file = "black-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a"}, + {file = "black-22.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad"}, + {file = "black-22.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21"}, + {file = "black-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265"}, + {file = "black-22.3.0-py3-none-any.whl", hash = "sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72"}, + {file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"}, +] +bleach = [ + {file = "bleach-5.0.0-py3-none-any.whl", hash = "sha256:08a1fe86d253b5c88c92cc3d810fd8048a16d15762e1e5b74d502256e5926aa1"}, + {file = "bleach-5.0.0.tar.gz", hash = "sha256:c6d6cc054bdc9c83b48b8083e236e5f00f238428666d2ce2e083eaa5fd568565"}, +] +boussole = [ + {file = "boussole-2.0.0.tar.gz", hash = "sha256:e4907180698339c778669d71b16a77b8d54c97d54e79d7813de1630a9d091a2f"}, +] +bs4 = [ + {file = "bs4-0.0.1.tar.gz", hash = "sha256:36ecea1fd7cc5c0c6e4a1ff075df26d50da647b75376626cc186e2212886dd3a"}, +] +certifi = [ + {file = "certifi-2022.5.18.1-py3-none-any.whl", hash = "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a"}, + {file = "certifi-2022.5.18.1.tar.gz", hash = "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7"}, +] +cffi = [ + {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, + {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, + {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, + {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, + {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, + {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, + {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, + {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, + {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, + {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, + {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, + {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, + {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, + {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, + {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, + {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, + {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, + {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, + {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, + {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, + {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, + {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, + {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, + {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, + {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, +] +charset-normalizer = [ + {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, + {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, +] +click = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] +colorama = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] +colorlog = [ + {file = "colorlog-6.6.0-py2.py3-none-any.whl", hash = "sha256:351c51e866c86c3217f08e4b067a7974a678be78f07f85fc2d55b8babde6d94e"}, + {file = "colorlog-6.6.0.tar.gz", hash = "sha256:344f73204009e4c83c5b6beb00b3c45dc70fcdae3c80db919e0a4171d006fde8"}, +] +factory-boy = [ + {file = "factory_boy-3.2.1-py2.py3-none-any.whl", hash = "sha256:eb02a7dd1b577ef606b75a253b9818e6f9eaf996d94449c9d5ebb124f90dc795"}, + {file = "factory_boy-3.2.1.tar.gz", hash = "sha256:a98d277b0c047c75eb6e4ab8508a7f81fb03d2cb21986f627913546ef7a2a55e"}, +] +faker = [ + {file = "Faker-13.13.0-py3-none-any.whl", hash = "sha256:638b9c362e77bcd8212f0d1434c1940f1e8d6c336fe949add563ba0a154b6310"}, + {file = "Faker-13.13.0.tar.gz", hash = "sha256:f192d238b3b6acb98ee85bd596258a15a171385613b30a7849e5845f8980d722"}, +] +fastapi = [ + {file = "fastapi-0.78.0-py3-none-any.whl", hash = "sha256:15fcabd5c78c266fa7ae7d8de9b384bfc2375ee0503463a6febbe3bab69d6f65"}, + {file = "fastapi-0.78.0.tar.gz", hash = "sha256:3233d4a789ba018578658e2af1a4bb5e38bdd122ff722b313666a9b2c6786a83"}, +] +flake8 = [ + {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, + {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, +] +greenlet = [ + {file = "greenlet-1.1.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:58df5c2a0e293bf665a51f8a100d3e9956febfbf1d9aaf8c0677cf70218910c6"}, + {file = "greenlet-1.1.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:aec52725173bd3a7b56fe91bc56eccb26fbdff1386ef123abb63c84c5b43b63a"}, + {file = "greenlet-1.1.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:833e1551925ed51e6b44c800e71e77dacd7e49181fdc9ac9a0bf3714d515785d"}, + {file = "greenlet-1.1.2-cp27-cp27m-win32.whl", hash = "sha256:aa5b467f15e78b82257319aebc78dd2915e4c1436c3c0d1ad6f53e47ba6e2713"}, + {file = "greenlet-1.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:40b951f601af999a8bf2ce8c71e8aaa4e8c6f78ff8afae7b808aae2dc50d4c40"}, + {file = "greenlet-1.1.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:95e69877983ea39b7303570fa6760f81a3eec23d0e3ab2021b7144b94d06202d"}, + {file = "greenlet-1.1.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:356b3576ad078c89a6107caa9c50cc14e98e3a6c4874a37c3e0273e4baf33de8"}, + {file = "greenlet-1.1.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8639cadfda96737427330a094476d4c7a56ac03de7265622fcf4cfe57c8ae18d"}, + {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97e5306482182170ade15c4b0d8386ded995a07d7cc2ca8f27958d34d6736497"}, + {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6a36bb9474218c7a5b27ae476035497a6990e21d04c279884eb10d9b290f1b1"}, + {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abb7a75ed8b968f3061327c433a0fbd17b729947b400747c334a9c29a9af6c58"}, + {file = "greenlet-1.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b336501a05e13b616ef81ce329c0e09ac5ed8c732d9ba7e3e983fcc1a9e86965"}, + {file = "greenlet-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:14d4f3cd4e8b524ae9b8aa567858beed70c392fdec26dbdb0a8a418392e71708"}, + {file = "greenlet-1.1.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:17ff94e7a83aa8671a25bf5b59326ec26da379ace2ebc4411d690d80a7fbcf23"}, + {file = "greenlet-1.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9f3cba480d3deb69f6ee2c1825060177a22c7826431458c697df88e6aeb3caee"}, + {file = "greenlet-1.1.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:fa877ca7f6b48054f847b61d6fa7bed5cebb663ebc55e018fda12db09dcc664c"}, + {file = "greenlet-1.1.2-cp35-cp35m-win32.whl", hash = "sha256:7cbd7574ce8e138bda9df4efc6bf2ab8572c9aff640d8ecfece1b006b68da963"}, + {file = "greenlet-1.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:903bbd302a2378f984aef528f76d4c9b1748f318fe1294961c072bdc7f2ffa3e"}, + {file = "greenlet-1.1.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:049fe7579230e44daef03a259faa24511d10ebfa44f69411d99e6a184fe68073"}, + {file = "greenlet-1.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:dd0b1e9e891f69e7675ba5c92e28b90eaa045f6ab134ffe70b52e948aa175b3c"}, + {file = "greenlet-1.1.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7418b6bfc7fe3331541b84bb2141c9baf1ec7132a7ecd9f375912eca810e714e"}, + {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9d29ca8a77117315101425ec7ec2a47a22ccf59f5593378fc4077ac5b754fce"}, + {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21915eb821a6b3d9d8eefdaf57d6c345b970ad722f856cd71739493ce003ad08"}, + {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eff9d20417ff9dcb0d25e2defc2574d10b491bf2e693b4e491914738b7908168"}, + {file = "greenlet-1.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b8c008de9d0daba7b6666aa5bbfdc23dcd78cafc33997c9b7741ff6353bafb7f"}, + {file = "greenlet-1.1.2-cp36-cp36m-win32.whl", hash = "sha256:32ca72bbc673adbcfecb935bb3fb1b74e663d10a4b241aaa2f5a75fe1d1f90aa"}, + {file = "greenlet-1.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f0214eb2a23b85528310dad848ad2ac58e735612929c8072f6093f3585fd342d"}, + {file = "greenlet-1.1.2-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:b92e29e58bef6d9cfd340c72b04d74c4b4e9f70c9fa7c78b674d1fec18896dc4"}, + {file = "greenlet-1.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fdcec0b8399108577ec290f55551d926d9a1fa6cad45882093a7a07ac5ec147b"}, + {file = "greenlet-1.1.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:93f81b134a165cc17123626ab8da2e30c0455441d4ab5576eed73a64c025b25c"}, + {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e12bdc622676ce47ae9abbf455c189e442afdde8818d9da983085df6312e7a1"}, + {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c790abda465726cfb8bb08bd4ca9a5d0a7bd77c7ac1ca1b839ad823b948ea28"}, + {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f276df9830dba7a333544bd41070e8175762a7ac20350786b322b714b0e654f5"}, + {file = "greenlet-1.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c5d5b35f789a030ebb95bff352f1d27a93d81069f2adb3182d99882e095cefe"}, + {file = "greenlet-1.1.2-cp37-cp37m-win32.whl", hash = "sha256:64e6175c2e53195278d7388c454e0b30997573f3f4bd63697f88d855f7a6a1fc"}, + {file = "greenlet-1.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b11548073a2213d950c3f671aa88e6f83cda6e2fb97a8b6317b1b5b33d850e06"}, + {file = "greenlet-1.1.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:9633b3034d3d901f0a46b7939f8c4d64427dfba6bbc5a36b1a67364cf148a1b0"}, + {file = "greenlet-1.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:eb6ea6da4c787111adf40f697b4e58732ee0942b5d3bd8f435277643329ba627"}, + {file = "greenlet-1.1.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:f3acda1924472472ddd60c29e5b9db0cec629fbe3c5c5accb74d6d6d14773478"}, + {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e859fcb4cbe93504ea18008d1df98dee4f7766db66c435e4882ab35cf70cac43"}, + {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00e44c8afdbe5467e4f7b5851be223be68adb4272f44696ee71fe46b7036a711"}, + {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec8c433b3ab0419100bd45b47c9c8551248a5aee30ca5e9d399a0b57ac04651b"}, + {file = "greenlet-1.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2bde6792f313f4e918caabc46532aa64aa27a0db05d75b20edfc5c6f46479de2"}, + {file = "greenlet-1.1.2-cp38-cp38-win32.whl", hash = "sha256:288c6a76705dc54fba69fbcb59904ae4ad768b4c768839b8ca5fdadec6dd8cfd"}, + {file = "greenlet-1.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:8d2f1fb53a421b410751887eb4ff21386d119ef9cde3797bf5e7ed49fb51a3b3"}, + {file = "greenlet-1.1.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:166eac03e48784a6a6e0e5f041cfebb1ab400b394db188c48b3a84737f505b67"}, + {file = "greenlet-1.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:572e1787d1460da79590bf44304abbc0a2da944ea64ec549188fa84d89bba7ab"}, + {file = "greenlet-1.1.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:be5f425ff1f5f4b3c1e33ad64ab994eed12fc284a6ea71c5243fd564502ecbe5"}, + {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1692f7d6bc45e3200844be0dba153612103db241691088626a33ff1f24a0d88"}, + {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7227b47e73dedaa513cdebb98469705ef0d66eb5a1250144468e9c3097d6b59b"}, + {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ff61ff178250f9bb3cd89752df0f1dd0e27316a8bd1465351652b1b4a4cdfd3"}, + {file = "greenlet-1.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0051c6f1f27cb756ffc0ffbac7d2cd48cb0362ac1736871399a739b2885134d3"}, + {file = "greenlet-1.1.2-cp39-cp39-win32.whl", hash = "sha256:f70a9e237bb792c7cc7e44c531fd48f5897961701cdaa06cf22fc14965c496cf"}, + {file = "greenlet-1.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:013d61294b6cd8fe3242932c1c5e36e5d1db2c8afb58606c5a67efce62c1f5fd"}, + {file = "greenlet-1.1.2.tar.gz", hash = "sha256:e30f5ea4ae2346e62cedde8794a56858a67b878dd79f7df76a0767e356b1744a"}, +] +h11 = [ + {file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"}, + {file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"}, +] +html5lib = [ + {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, + {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, +] +httpcore = [ + {file = "httpcore-0.15.0-py3-none-any.whl", hash = "sha256:1105b8b73c025f23ff7c36468e4432226cbb959176eab66864b8e31c4ee27fa6"}, + {file = "httpcore-0.15.0.tar.gz", hash = "sha256:18b68ab86a3ccf3e7dc0f43598eaddcf472b602aba29f9aa6ab85fe2ada3980b"}, +] +httpx = [ + {file = "httpx-0.23.0-py3-none-any.whl", hash = "sha256:42974f577483e1e932c3cdc3cd2303e883cbfba17fe228b0f63589764d7b9c4b"}, + {file = "httpx-0.23.0.tar.gz", hash = "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef"}, +] +idna = [ + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, +] +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] +invoke = [ + {file = "invoke-1.7.1-py3-none-any.whl", hash = "sha256:2dc975b4f92be0c0a174ad2d063010c8a1fdb5e9389d69871001118b4fcac4fb"}, + {file = "invoke-1.7.1.tar.gz", hash = "sha256:7b6deaf585eee0a848205d0b8c0014b9bf6f287a8eb798818a642dff1df14b19"}, +] +isort = [ + {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, + {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, +] +itsdangerous = [ + {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, + {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, +] +jinja2 = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] +libsass = [ + {file = "libsass-0.21.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:06c8776417fe930714bdc930a3d7e795ae3d72be6ac883ff72a1b8f7c49e5ffb"}, + {file = "libsass-0.21.0-cp27-cp27m-win32.whl", hash = "sha256:a005f298f64624f313a3ac618ab03f844c71d84ae4f4a4aec4b68d2a4ffe75eb"}, + {file = "libsass-0.21.0-cp27-cp27m-win_amd64.whl", hash = "sha256:6b984510ed94993708c0d697b4fef2d118929bbfffc3b90037be0f5ccadf55e7"}, + {file = "libsass-0.21.0-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e25dd9047a9392d3c59a0b869e0404f2b325a03871ee45285ee33b3664f5613"}, + {file = "libsass-0.21.0-cp36-abi3-macosx_10_14_x86_64.whl", hash = "sha256:12f39712de38689a8b785b7db41d3ba2ea1d46f9379d81ea4595802d91fa6529"}, + {file = "libsass-0.21.0-cp36-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e2b1a7d093f2e76dc694c17c0c285e846d0b0deb0e8b21dc852ba1a3a4e2f1d6"}, + {file = "libsass-0.21.0-cp36-abi3-win32.whl", hash = "sha256:abc29357ee540849faf1383e1746d40d69ed5cb6d4c346df276b258f5aa8977a"}, + {file = "libsass-0.21.0-cp36-abi3-win_amd64.whl", hash = "sha256:659ae41af8708681fa3ec73f47b9735a6725e71c3b66ff570bfce78952f2314e"}, + {file = "libsass-0.21.0-cp38-abi3-macosx_12_0_arm64.whl", hash = "sha256:c9ec490609752c1d81ff6290da33485aa7cb6d7365ac665b74464c1b7d97f7da"}, + {file = "libsass-0.21.0.tar.gz", hash = "sha256:d5ba529d9ce668be9380563279f3ffe988f27bc5b299c5a28453df2e0b0fbaf2"}, +] +loguru = [ + {file = "loguru-0.6.0-py3-none-any.whl", hash = "sha256:4e2414d534a2ab57573365b3e6d0234dfb1d84b68b7f3b948e6fb743860a77c3"}, + {file = "loguru-0.6.0.tar.gz", hash = "sha256:066bd06758d0a513e9836fd9c6b5a75bfb3fd36841f4b996bc60b547a309d41c"}, +] +mako = [ + {file = "Mako-1.2.0-py3-none-any.whl", hash = "sha256:23aab11fdbbb0f1051b93793a58323ff937e98e34aece1c4219675122e57e4ba"}, + {file = "Mako-1.2.0.tar.gz", hash = "sha256:9a7c7e922b87db3686210cf49d5d767033a41d4010b284e747682c92bddd8b39"}, +] +markdown = [ + {file = "Markdown-3.3.7-py3-none-any.whl", hash = "sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621"}, + {file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"}, +] +markupsafe = [ + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, +] +mccabe = [ + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, +] +mdx-linkify = [ + {file = "mdx_linkify-2.1.tar.gz", hash = "sha256:e09278e43e5076b63398238b069a361913779683183481e9206235667cd89f54"}, +] +mf2py = [ + {file = "mf2py-1.1.2.tar.gz", hash = "sha256:84f1f8f2ff3f1deb1c30be497e7ccd805452996a662fd4a77f09e0105bede2c9"}, +] +mypy = [ + {file = "mypy-0.960-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3a3e525cd76c2c4f90f1449fd034ba21fcca68050ff7c8397bb7dd25dd8b8248"}, + {file = "mypy-0.960-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7a76dc4f91e92db119b1be293892df8379b08fd31795bb44e0ff84256d34c251"}, + {file = "mypy-0.960-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ffdad80a92c100d1b0fe3d3cf1a4724136029a29afe8566404c0146747114382"}, + {file = "mypy-0.960-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7d390248ec07fa344b9f365e6ed9d205bd0205e485c555bed37c4235c868e9d5"}, + {file = "mypy-0.960-cp310-cp310-win_amd64.whl", hash = "sha256:925aa84369a07846b7f3b8556ccade1f371aa554f2bd4fb31cb97a24b73b036e"}, + {file = "mypy-0.960-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:239d6b2242d6c7f5822163ee082ef7a28ee02e7ac86c35593ef923796826a385"}, + {file = "mypy-0.960-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f1ba54d440d4feee49d8768ea952137316d454b15301c44403db3f2cb51af024"}, + {file = "mypy-0.960-cp36-cp36m-win_amd64.whl", hash = "sha256:cb7752b24528c118a7403ee955b6a578bfcf5879d5ee91790667c8ea511d2085"}, + {file = "mypy-0.960-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:826a2917c275e2ee05b7c7b736c1e6549a35b7ea5a198ca457f8c2ebea2cbecf"}, + {file = "mypy-0.960-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3eabcbd2525f295da322dff8175258f3fc4c3eb53f6d1929644ef4d99b92e72d"}, + {file = "mypy-0.960-cp37-cp37m-win_amd64.whl", hash = "sha256:f47322796c412271f5aea48381a528a613f33e0a115452d03ae35d673e6064f8"}, + {file = "mypy-0.960-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2c7f8bb9619290836a4e167e2ef1f2cf14d70e0bc36c04441e41487456561409"}, + {file = "mypy-0.960-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fbfb873cf2b8d8c3c513367febde932e061a5f73f762896826ba06391d932b2a"}, + {file = "mypy-0.960-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc537885891382e08129d9862553b3d00d4be3eb15b8cae9e2466452f52b0117"}, + {file = "mypy-0.960-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:481f98c6b24383188c928f33dd2f0776690807e12e9989dd0419edd5c74aa53b"}, + {file = "mypy-0.960-cp38-cp38-win_amd64.whl", hash = "sha256:29dc94d9215c3eb80ac3c2ad29d0c22628accfb060348fd23d73abe3ace6c10d"}, + {file = "mypy-0.960-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:33d53a232bb79057f33332dbbb6393e68acbcb776d2f571ba4b1d50a2c8ba873"}, + {file = "mypy-0.960-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8d645e9e7f7a5da3ec3bbcc314ebb9bb22c7ce39e70367830eb3c08d0140b9ce"}, + {file = "mypy-0.960-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:85cf2b14d32b61db24ade8ac9ae7691bdfc572a403e3cb8537da936e74713275"}, + {file = "mypy-0.960-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a85a20b43fa69efc0b955eba1db435e2ffecb1ca695fe359768e0503b91ea89f"}, + {file = "mypy-0.960-cp39-cp39-win_amd64.whl", hash = "sha256:0ebfb3f414204b98c06791af37a3a96772203da60636e2897408517fcfeee7a8"}, + {file = "mypy-0.960-py3-none-any.whl", hash = "sha256:bfd4f6536bd384c27c392a8b8f790fd0ed5c0cf2f63fc2fed7bce56751d53026"}, + {file = "mypy-0.960.tar.gz", hash = "sha256:d4fccf04c1acf750babd74252e0f2db6bd2ac3aa8fe960797d9f3ef41cf2bfd4"}, +] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] +packaging = [ + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, +] +pathspec = [ + {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, + {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, +] +platformdirs = [ + {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, + {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, +] +pluggy = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] +prompt-toolkit = [ + {file = "prompt_toolkit-3.0.29-py3-none-any.whl", hash = "sha256:62291dad495e665fca0bda814e342c69952086afb0f4094d0893d357e5c78752"}, + {file = "prompt_toolkit-3.0.29.tar.gz", hash = "sha256:bd640f60e8cecd74f0dc249713d433ace2ddc62b65ee07f96d358e0b152b6ea7"}, +] +py = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] +pyaml = [ + {file = "pyaml-21.10.1-py2.py3-none-any.whl", hash = "sha256:19985ed303c3a985de4cf8fd329b6d0a5a5b5c9035ea240eccc709ebacbaf4a0"}, + {file = "pyaml-21.10.1.tar.gz", hash = "sha256:c6519fee13bf06e3bb3f20cacdea8eba9140385a7c2546df5dbae4887f768383"}, +] +pycodestyle = [ + {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, + {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, +] +pycparser = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] +pycryptodome = [ + {file = "pycryptodome-3.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:75a3a364fee153e77ed889c957f6f94ec6d234b82e7195b117180dcc9fc16f96"}, + {file = "pycryptodome-3.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:aae395f79fa549fb1f6e3dc85cf277f0351e15a22e6547250056c7f0c990d6a5"}, + {file = "pycryptodome-3.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:f403a3e297a59d94121cb3ee4b1cf41f844332940a62d71f9e4a009cc3533493"}, + {file = "pycryptodome-3.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ce7a875694cd6ccd8682017a7c06c6483600f151d8916f2b25cf7a439e600263"}, + {file = "pycryptodome-3.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a36ab51674b014ba03da7f98b675fcb8eabd709a2d8e18219f784aba2db73b72"}, + {file = "pycryptodome-3.14.1-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:50a5346af703330944bea503106cd50c9c2212174cfcb9939db4deb5305a8367"}, + {file = "pycryptodome-3.14.1-cp27-cp27m-win32.whl", hash = "sha256:36e3242c4792e54ed906c53f5d840712793dc68b726ec6baefd8d978c5282d30"}, + {file = "pycryptodome-3.14.1-cp27-cp27m-win_amd64.whl", hash = "sha256:c880a98376939165b7dc504559f60abe234b99e294523a273847f9e7756f4132"}, + {file = "pycryptodome-3.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:dcd65355acba9a1d0fc9b923875da35ed50506e339b35436277703d7ace3e222"}, + {file = "pycryptodome-3.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:766a8e9832128c70012e0c2b263049506cbf334fb21ff7224e2704102b6ef59e"}, + {file = "pycryptodome-3.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:2562de213960693b6d657098505fd4493c45f3429304da67efcbeb61f0edfe89"}, + {file = "pycryptodome-3.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d1b7739b68a032ad14c5e51f7e4e1a5f92f3628bba024a2bda1f30c481fc85d8"}, + {file = "pycryptodome-3.14.1-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:27e92c1293afcb8d2639baf7eb43f4baada86e4de0f1fb22312bfc989b95dae2"}, + {file = "pycryptodome-3.14.1-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:f2772af1c3ef8025c85335f8b828d0193fa1e43256621f613280e2c81bfad423"}, + {file = "pycryptodome-3.14.1-cp35-abi3-manylinux1_i686.whl", hash = "sha256:9ec761a35dbac4a99dcbc5cd557e6e57432ddf3e17af8c3c86b44af9da0189c0"}, + {file = "pycryptodome-3.14.1-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:e64738207a02a83590df35f59d708bf1e7ea0d6adce712a777be2967e5f7043c"}, + {file = "pycryptodome-3.14.1-cp35-abi3-manylinux2010_i686.whl", hash = "sha256:e24d4ec4b029611359566c52f31af45c5aecde7ef90bf8f31620fd44c438efe7"}, + {file = "pycryptodome-3.14.1-cp35-abi3-manylinux2010_x86_64.whl", hash = "sha256:8b5c28058102e2974b9868d72ae5144128485d466ba8739abd674b77971454cc"}, + {file = "pycryptodome-3.14.1-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:924b6aad5386fb54f2645f22658cb0398b1f25bc1e714a6d1522c75d527deaa5"}, + {file = "pycryptodome-3.14.1-cp35-abi3-win32.whl", hash = "sha256:53dedbd2a6a0b02924718b520a723e88bcf22e37076191eb9b91b79934fb2192"}, + {file = "pycryptodome-3.14.1-cp35-abi3-win_amd64.whl", hash = "sha256:ea56a35fd0d13121417d39a83f291017551fa2c62d6daa6b04af6ece7ed30d84"}, + {file = "pycryptodome-3.14.1-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:028dcbf62d128b4335b61c9fbb7dd8c376594db607ef36d5721ee659719935d5"}, + {file = "pycryptodome-3.14.1-pp27-pypy_73-manylinux1_x86_64.whl", hash = "sha256:69f05aaa90c99ac2f2af72d8d7f185f729721ad7c4be89e9e3d0ab101b0ee875"}, + {file = "pycryptodome-3.14.1-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:12ef157eb1e01a157ca43eda275fa68f8db0dd2792bc4fe00479ab8f0e6ae075"}, + {file = "pycryptodome-3.14.1-pp27-pypy_73-win32.whl", hash = "sha256:f572a3ff7b6029dd9b904d6be4e0ce9e309dcb847b03e3ac8698d9d23bb36525"}, + {file = "pycryptodome-3.14.1-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9924248d6920b59c260adcae3ee231cd5af404ac706ad30aa4cd87051bf09c50"}, + {file = "pycryptodome-3.14.1-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:e0c04c41e9ade19fbc0eff6aacea40b831bfcb2c91c266137bcdfd0d7b2f33ba"}, + {file = "pycryptodome-3.14.1-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:893f32210de74b9f8ac869ed66c97d04e7d351182d6d39ebd3b36d3db8bda65d"}, + {file = "pycryptodome-3.14.1-pp36-pypy36_pp73-win32.whl", hash = "sha256:7fb90a5000cc9c9ff34b4d99f7f039e9c3477700e309ff234eafca7b7471afc0"}, + {file = "pycryptodome-3.14.1.tar.gz", hash = "sha256:e04e40a7f8c1669195536a37979dd87da2c32dbdc73d6fe35f0077b0c17c803b"}, +] +pydantic = [ + {file = "pydantic-1.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8098a724c2784bf03e8070993f6d46aa2eeca031f8d8a048dff277703e6e193"}, + {file = "pydantic-1.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c320c64dd876e45254bdd350f0179da737463eea41c43bacbee9d8c9d1021f11"}, + {file = "pydantic-1.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18f3e912f9ad1bdec27fb06b8198a2ccc32f201e24174cec1b3424dda605a310"}, + {file = "pydantic-1.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11951b404e08b01b151222a1cb1a9f0a860a8153ce8334149ab9199cd198131"}, + {file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8bc541a405423ce0e51c19f637050acdbdf8feca34150e0d17f675e72d119580"}, + {file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e565a785233c2d03724c4dc55464559639b1ba9ecf091288dd47ad9c629433bd"}, + {file = "pydantic-1.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:a4a88dcd6ff8fd47c18b3a3709a89adb39a6373f4482e04c1b765045c7e282fd"}, + {file = "pydantic-1.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:447d5521575f18e18240906beadc58551e97ec98142266e521c34968c76c8761"}, + {file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:985ceb5d0a86fcaa61e45781e567a59baa0da292d5ed2e490d612d0de5796918"}, + {file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059b6c1795170809103a1538255883e1983e5b831faea6558ef873d4955b4a74"}, + {file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d12f96b5b64bec3f43c8e82b4aab7599d0157f11c798c9f9c528a72b9e0b339a"}, + {file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ae72f8098acb368d877b210ebe02ba12585e77bd0db78ac04a1ee9b9f5dd2166"}, + {file = "pydantic-1.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:79b485767c13788ee314669008d01f9ef3bc05db9ea3298f6a50d3ef596a154b"}, + {file = "pydantic-1.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:494f7c8537f0c02b740c229af4cb47c0d39840b829ecdcfc93d91dcbb0779892"}, + {file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0f047e11febe5c3198ed346b507e1d010330d56ad615a7e0a89fae604065a0e"}, + {file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:969dd06110cb780da01336b281f53e2e7eb3a482831df441fb65dd30403f4608"}, + {file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:177071dfc0df6248fd22b43036f936cfe2508077a72af0933d0c1fa269b18537"}, + {file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9bcf8b6e011be08fb729d110f3e22e654a50f8a826b0575c7196616780683380"}, + {file = "pydantic-1.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a955260d47f03df08acf45689bd163ed9df82c0e0124beb4251b1290fa7ae728"}, + {file = "pydantic-1.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9ce157d979f742a915b75f792dbd6aa63b8eccaf46a1005ba03aa8a986bde34a"}, + {file = "pydantic-1.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0bf07cab5b279859c253d26a9194a8906e6f4a210063b84b433cf90a569de0c1"}, + {file = "pydantic-1.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d93d4e95eacd313d2c765ebe40d49ca9dd2ed90e5b37d0d421c597af830c195"}, + {file = "pydantic-1.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1542636a39c4892c4f4fa6270696902acb186a9aaeac6f6cf92ce6ae2e88564b"}, + {file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a9af62e9b5b9bc67b2a195ebc2c2662fdf498a822d62f902bf27cccb52dbbf49"}, + {file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fe4670cb32ea98ffbf5a1262f14c3e102cccd92b1869df3bb09538158ba90fe6"}, + {file = "pydantic-1.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:9f659a5ee95c8baa2436d392267988fd0f43eb774e5eb8739252e5a7e9cf07e0"}, + {file = "pydantic-1.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b83ba3825bc91dfa989d4eed76865e71aea3a6ca1388b59fc801ee04c4d8d0d6"}, + {file = "pydantic-1.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1dd8fecbad028cd89d04a46688d2fcc14423e8a196d5b0a5c65105664901f810"}, + {file = "pydantic-1.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02eefd7087268b711a3ff4db528e9916ac9aa18616da7bca69c1871d0b7a091f"}, + {file = "pydantic-1.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb57ba90929bac0b6cc2af2373893d80ac559adda6933e562dcfb375029acee"}, + {file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4ce9ae9e91f46c344bec3b03d6ee9612802682c1551aaf627ad24045ce090761"}, + {file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:72ccb318bf0c9ab97fc04c10c37683d9eea952ed526707fabf9ac5ae59b701fd"}, + {file = "pydantic-1.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:61b6760b08b7c395975d893e0b814a11cf011ebb24f7d869e7118f5a339a82e1"}, + {file = "pydantic-1.9.1-py3-none-any.whl", hash = "sha256:4988c0f13c42bfa9ddd2fe2f569c9d54646ce84adc5de84228cfe83396f3bd58"}, + {file = "pydantic-1.9.1.tar.gz", hash = "sha256:1ed987c3ff29fff7fd8c3ea3a3ea877ad310aae2ef9889a119e22d3f2db0691a"}, +] +pyflakes = [ + {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, + {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, +] +pygments = [ + {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, + {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, +] +pyparsing = [ + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, +] +pytest = [ + {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"}, + {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"}, +] +pytest-asyncio = [ + {file = "pytest-asyncio-0.18.3.tar.gz", hash = "sha256:7659bdb0a9eb9c6e3ef992eef11a2b3e69697800ad02fb06374a210d85b29f91"}, + {file = "pytest_asyncio-0.18.3-1-py3-none-any.whl", hash = "sha256:16cf40bdf2b4fb7fc8e4b82bd05ce3fbcd454cbf7b92afc445fe299dabb88213"}, + {file = "pytest_asyncio-0.18.3-py3-none-any.whl", hash = "sha256:8fafa6c52161addfd41ee7ab35f11836c5a16ec208f93ee388f752bea3493a84"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] +python-multipart = [ + {file = "python-multipart-0.0.5.tar.gz", hash = "sha256:f7bb5f611fc600d15fa47b3974c8aa16e93724513b49b5f95c81e6624c83fa43"}, +] +pyyaml = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] +requests = [ + {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, + {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, +] +respx = [ + {file = "respx-0.19.2-py2.py3-none-any.whl", hash = "sha256:417f986fec599b9cc6531e93e494b7a75d1cb7bccff9dde5b53edc51f7954494"}, + {file = "respx-0.19.2.tar.gz", hash = "sha256:f3d210bb4de0ccc4c5afabeb87c3c1b03b3765a9c1a73eb042a07bb18ac33705"}, +] +rfc3986 = [ + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +sniffio = [ + {file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"}, + {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"}, +] +soupsieve = [ + {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, + {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, +] +sqlalchemy = [ + {file = "SQLAlchemy-1.4.37-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:d9050b0c4a7f5538650c74aaba5c80cd64450e41c206f43ea6d194ae6d060ff9"}, + {file = "SQLAlchemy-1.4.37-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b4c92823889cf9846b972ee6db30c0e3a92c0ddfc76c6060a6cda467aa5fb694"}, + {file = "SQLAlchemy-1.4.37-cp27-cp27m-win32.whl", hash = "sha256:b55932fd0e81b43f4aff397c8ad0b3c038f540af37930423ab8f47a20b117e4c"}, + {file = "SQLAlchemy-1.4.37-cp27-cp27m-win_amd64.whl", hash = "sha256:4a17c1a1152ca4c29d992714aa9df3054da3af1598e02134f2e7314a32ef69d8"}, + {file = "SQLAlchemy-1.4.37-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ffe487570f47536b96eff5ef2b84034a8ba4e19aab5ab7647e677d94a119ea55"}, + {file = "SQLAlchemy-1.4.37-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:78363f400fbda80f866e8e91d37d36fe6313ff847ded08674e272873c1377ea5"}, + {file = "SQLAlchemy-1.4.37-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ee34c85cbda7779d66abac392c306ec78c13f5c73a1f01b8b767916d4895d23"}, + {file = "SQLAlchemy-1.4.37-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b38e088659b30c2ca0af63e5d139fad1779a7925d75075a08717a21c406c0f6"}, + {file = "SQLAlchemy-1.4.37-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6629c79967a6c92e33fad811599adf9bc5cee6e504a1027bbf9cc1b6fb2d276d"}, + {file = "SQLAlchemy-1.4.37-cp310-cp310-win32.whl", hash = "sha256:2aac2a685feb9882d09f457f4e5586c885d578af4e97a2b759e91e8c457cbce5"}, + {file = "SQLAlchemy-1.4.37-cp310-cp310-win_amd64.whl", hash = "sha256:7a44683cf97744a405103ef8fdd31199e9d7fc41b4a67e9044523b29541662b0"}, + {file = "SQLAlchemy-1.4.37-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:cffc67cdd07f0e109a1fc83e333972ae423ea5ad414585b63275b66b870ea62b"}, + {file = "SQLAlchemy-1.4.37-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17417327b87a0f703c9a20180f75e953315207d048159aff51822052f3e33e69"}, + {file = "SQLAlchemy-1.4.37-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:aaa0e90e527066409c2ea5676282cf4afb4a40bb9dce0f56c8ec2768bff22a6e"}, + {file = "SQLAlchemy-1.4.37-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1d9fb3931e27d59166bb5c4dcc911400fee51082cfba66ceb19ac954ade068"}, + {file = "SQLAlchemy-1.4.37-cp36-cp36m-win32.whl", hash = "sha256:0e7fd52e48e933771f177c2a1a484b06ea03774fc7741651ebdf19985a34037c"}, + {file = "SQLAlchemy-1.4.37-cp36-cp36m-win_amd64.whl", hash = "sha256:eec39a17bab3f69c44c9df4e0ed87c7306f2d2bf1eca3070af644927ec4199fa"}, + {file = "SQLAlchemy-1.4.37-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:caca6acf3f90893d7712ae2c6616ecfeac3581b4cc677c928a330ce6fbad4319"}, + {file = "SQLAlchemy-1.4.37-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50c8eaf44c3fed5ba6758d375de25f163e46137c39fda3a72b9ee1d1bb327dfc"}, + {file = "SQLAlchemy-1.4.37-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:139c50b9384e6d32a74fc4dcd0e9717f343ed38f95dbacf832c782c68e3862f3"}, + {file = "SQLAlchemy-1.4.37-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4c3b009c9220ae6e33f17b45f43fb46b9a1d281d76118405af13e26376f2e11"}, + {file = "SQLAlchemy-1.4.37-cp37-cp37m-win32.whl", hash = "sha256:9785d6f962d2c925aeb06a7539ac9d16608877da6aeaaf341984b3693ae80a02"}, + {file = "SQLAlchemy-1.4.37-cp37-cp37m-win_amd64.whl", hash = "sha256:3197441772dc3b1c6419f13304402f2418a18d7fe78000aa5a026e7100836739"}, + {file = "SQLAlchemy-1.4.37-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:3862a069a24f354145e01a76c7c720c263d62405fe5bed038c46a7ce900f5dd6"}, + {file = "SQLAlchemy-1.4.37-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e8706919829d455a9fa687c6bbd1b048e36fec3919a59f2d366247c2bfdbd9c"}, + {file = "SQLAlchemy-1.4.37-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:06ec11a5e6a4b6428167d3ce33b5bd455c020c867dabe3e6951fa98836e0741d"}, + {file = "SQLAlchemy-1.4.37-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d58f2d9d1a4b1459e8956a0153a4119da80f54ee5a9ea623cd568e99459a3ef1"}, + {file = "SQLAlchemy-1.4.37-cp38-cp38-win32.whl", hash = "sha256:d6927c9e3965b194acf75c8e0fb270b4d54512db171f65faae15ef418721996e"}, + {file = "SQLAlchemy-1.4.37-cp38-cp38-win_amd64.whl", hash = "sha256:a91d0668cada27352432f15b92ac3d43e34d8f30973fa8b86f5e9fddee928f3b"}, + {file = "SQLAlchemy-1.4.37-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:f9940528bf9c4df9e3c3872d23078b6b2da6431c19565637c09f1b88a427a684"}, + {file = "SQLAlchemy-1.4.37-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29a742c29fea12259f1d2a9ee2eb7fe4694a85d904a4ac66d15e01177b17ad7f"}, + {file = "SQLAlchemy-1.4.37-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7e579d6e281cc937bdb59917017ab98e618502067e04efb1d24ac168925e1d2a"}, + {file = "SQLAlchemy-1.4.37-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a940c551cfbd2e1e646ceea2777944425f5c3edff914bc808fe734d9e66f8d71"}, + {file = "SQLAlchemy-1.4.37-cp39-cp39-win32.whl", hash = "sha256:5e4e517ce72fad35cce364a01aff165f524449e9c959f1837dc71088afa2824c"}, + {file = "SQLAlchemy-1.4.37-cp39-cp39-win_amd64.whl", hash = "sha256:c37885f83b59e248bebe2b35beabfbea398cb40960cdc6d3a76eac863d4e1938"}, + {file = "SQLAlchemy-1.4.37.tar.gz", hash = "sha256:3688f92c62db6c5df268e2264891078f17ecb91e3141b400f2e28d0f75796dea"}, +] +sqlalchemy2-stubs = [ + {file = "sqlalchemy2-stubs-0.0.2a23.tar.gz", hash = "sha256:a13d94e23b5b0da8ee21986ef8890788a1f2eb26c2a9f39424cc933e4e7e87ff"}, + {file = "sqlalchemy2_stubs-0.0.2a23-py3-none-any.whl", hash = "sha256:6011d2219365d4e51f3e9d83ffeb5b904964ef1d143dc1298d8a70ce8641014d"}, +] +starlette = [ + {file = "starlette-0.19.1-py3-none-any.whl", hash = "sha256:5a60c5c2d051f3a8eb546136aa0c9399773a689595e099e0877704d5888279bf"}, + {file = "starlette-0.19.1.tar.gz", hash = "sha256:c6d21096774ecb9639acad41b86b7706e52ba3bf1dc13ea4ed9ad593d47e24c7"}, +] +timeago = [ + {file = "timeago-1.0.15.tar.gz", hash = "sha256:cfce420d82892af6b2439d0f69eeb3e876bbeddab6670c3c88ebf7676407bf4c"}, +] +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] +tomli-w = [ + {file = "tomli_w-1.0.0-py3-none-any.whl", hash = "sha256:9f2a07e8be30a0729e533ec968016807069991ae2fd921a78d42f429ae5f4463"}, + {file = "tomli_w-1.0.0.tar.gz", hash = "sha256:f463434305e0336248cac9c2dc8076b707d8a12d019dd349f5c1e382dd1ae1b9"}, +] +types-bleach = [ + {file = "types-bleach-5.0.2.tar.gz", hash = "sha256:e1498c512a62117496cf82be3d129972bb89fd1d6482b001cdeb2759ab3c82f5"}, + {file = "types_bleach-5.0.2-py3-none-any.whl", hash = "sha256:6fcb75ee4b69190fe60340147b66442cecddaefe3c0629433a4240da1ec2dcf6"}, +] +types-markdown = [ + {file = "types-Markdown-3.3.28.tar.gz", hash = "sha256:733ba19dad58d5dca1206390f55fa285573535b7c369b94dd367bbc34bf7e4de"}, + {file = "types_Markdown-3.3.28-py3-none-any.whl", hash = "sha256:7868cfa3f8a2304d9ecea2ca9b02c14fcb2e34bd26fdbaf01d8c4d362a85d345"}, +] +types-python-dateutil = [ + {file = "types-python-dateutil-2.8.17.tar.gz", hash = "sha256:6c54265a221681dd87f61df6743bd5eab060cf1b4086ff65c1a8fd763ed6370e"}, + {file = "types_python_dateutil-2.8.17-py3-none-any.whl", hash = "sha256:0be7435b4d382d1cd00b8c55a8a90f4e515aaad8a96f8f0bc20c22df046792e5"}, +] +types-requests = [ + {file = "types-requests-2.27.29.tar.gz", hash = "sha256:fb453b3a76a48eca66381cea8004feaaea12835e838196f5c7ac87c75c5c19ef"}, + {file = "types_requests-2.27.29-py3-none-any.whl", hash = "sha256:014f4f82db7b96c41feea9adaea30e68cd64c230eeab34b70c29bebb26ec74ac"}, +] +types-urllib3 = [ + {file = "types-urllib3-1.26.15.tar.gz", hash = "sha256:c89283541ef92e344b7f59f83ea9b5a295b16366ceee3f25ecfc5593c79f794e"}, + {file = "types_urllib3-1.26.15-py3-none-any.whl", hash = "sha256:6011befa13f901fc934f59bb1fd6973be6f3acf4ebfce427593a27e7f492918f"}, +] +typing-extensions = [ + {file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"}, + {file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"}, +] +urllib3 = [ + {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, + {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, +] +uvicorn = [ + {file = "uvicorn-0.17.6-py3-none-any.whl", hash = "sha256:19e2a0e96c9ac5581c01eb1a79a7d2f72bb479691acd2b8921fce48ed5b961a6"}, + {file = "uvicorn-0.17.6.tar.gz", hash = "sha256:5180f9d059611747d841a4a4c4ab675edf54c8489e97f96d0583ee90ac3bfc23"}, +] +watchdog = [ + {file = "watchdog-2.1.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:676263bee67b165f16b05abc52acc7a94feac5b5ab2449b491f1a97638a79277"}, + {file = "watchdog-2.1.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aa68d2d9a89d686fae99d28a6edf3b18595e78f5adf4f5c18fbfda549ac0f20c"}, + {file = "watchdog-2.1.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5e2e51c53666850c3ecffe9d265fc5d7351db644de17b15e9c685dd3cdcd6f97"}, + {file = "watchdog-2.1.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7721ac736170b191c50806f43357407138c6748e4eb3e69b071397f7f7aaeedd"}, + {file = "watchdog-2.1.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ce7376aed3da5fd777483fe5ebc8475a440c6d18f23998024f832134b2938e7b"}, + {file = "watchdog-2.1.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f9ee4c6bf3a1b2ed6be90a2d78f3f4bbd8105b6390c04a86eb48ed67bbfa0b0b"}, + {file = "watchdog-2.1.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:68dbe75e0fa1ba4d73ab3f8e67b21770fbed0651d32ce515cd38919a26873266"}, + {file = "watchdog-2.1.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0c520009b8cce79099237d810aaa19bc920941c268578436b62013b2f0102320"}, + {file = "watchdog-2.1.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:efcc8cbc1b43902571b3dce7ef53003f5b97fe4f275fe0489565fc6e2ebe3314"}, + {file = "watchdog-2.1.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:746e4c197ec1083581bb1f64d07d1136accf03437badb5ff8fcb862565c193b2"}, + {file = "watchdog-2.1.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1ae17b6be788fb8e4d8753d8d599de948f0275a232416e16436363c682c6f850"}, + {file = "watchdog-2.1.8-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ddde157dc1447d8130cb5b8df102fad845916fe4335e3d3c3f44c16565becbb7"}, + {file = "watchdog-2.1.8-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4978db33fc0934c92013ee163a9db158ec216099b69fce5aec790aba704da412"}, + {file = "watchdog-2.1.8-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b962de4d7d92ff78fb2dbc6a0cb292a679dea879a0eb5568911484d56545b153"}, + {file = "watchdog-2.1.8-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1e5d0fdfaa265c29dc12621913a76ae99656cf7587d03950dfeb3595e5a26102"}, + {file = "watchdog-2.1.8-py3-none-manylinux2014_armv7l.whl", hash = "sha256:036ed15f7cd656351bf4e17244447be0a09a61aaa92014332d50719fc5973bc0"}, + {file = "watchdog-2.1.8-py3-none-manylinux2014_i686.whl", hash = "sha256:2962628a8777650703e8f6f2593065884c602df7bae95759b2df267bd89b2ef5"}, + {file = "watchdog-2.1.8-py3-none-manylinux2014_ppc64.whl", hash = "sha256:156ec3a94695ea68cfb83454b98754af6e276031ba1ae7ae724dc6bf8973b92a"}, + {file = "watchdog-2.1.8-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:47598fe6713fc1fee86b1ca85c9cbe77e9b72d002d6adeab9c3b608f8a5ead10"}, + {file = "watchdog-2.1.8-py3-none-manylinux2014_s390x.whl", hash = "sha256:fed4de6e45a4f16e4046ea00917b4fe1700b97244e5d114f594b4a1b9de6bed8"}, + {file = "watchdog-2.1.8-py3-none-manylinux2014_x86_64.whl", hash = "sha256:24dedcc3ce75e150f2a1d704661f6879764461a481ba15a57dc80543de46021c"}, + {file = "watchdog-2.1.8-py3-none-win32.whl", hash = "sha256:6ddf67bc9f413791072e3afb466e46cc72c6799ba73dea18439b412e8f2e3257"}, + {file = "watchdog-2.1.8-py3-none-win_amd64.whl", hash = "sha256:88ef3e8640ef0a64b7ad7394b0f23384f58ac19dd759da7eaa9bc04b2898943f"}, + {file = "watchdog-2.1.8-py3-none-win_ia64.whl", hash = "sha256:0fb60c7d31474b21acba54079ce9ff0136411183e9a591369417cddb1d7d00d7"}, + {file = "watchdog-2.1.8.tar.gz", hash = "sha256:6d03149126864abd32715d4e9267d2754cede25a69052901399356ad3bc5ecff"}, +] +wcwidth = [ + {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, + {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, +] +webencodings = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] +win32-setctime = [ + {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, + {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, +] diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..55bacb4 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,68 @@ +[tool.poetry] +name = "microblogpub" +version = "2.0.0" +description = "" +authors = ["Thomas Sileo "] +license = "AGPL-3.0" + +[tool.poetry.dependencies] +python = "^3.10" +Jinja2 = "^3.1.2" +fastapi = "^0.78.0" +uvicorn = "^0.17.6" +pycryptodome = "^3.14.1" +bcrypt = "^3.2.2" +itsdangerous = "^2.1.2" +python-multipart = "^0.0.5" +tomli = "^2.0.1" +httpx = "^0.23.0" +timeago = "^1.0.15" +SQLAlchemy = {extras = ["mypy"], version = "^1.4.37"} +alembic = "^1.8.0" +bleach = "^5.0.0" +requests = "^2.27.1" +Markdown = "^3.3.7" +prompt-toolkit = "^3.0.29" +tomli-w = "^1.0.0" +python-dateutil = "^2.8.2" +bs4 = "^0.0.1" +html5lib = "^1.1" +mf2py = "^1.1.2" +Pygments = "^2.12.0" +types-python-dateutil = "^2.8.17" +loguru = "^0.6.0" +mdx-linkify = "^2.1" + +[tool.poetry.dev-dependencies] +black = "^22.3.0" +flake8 = "^4.0.1" +mypy = "^0.960" +isort = "^5.10.1" +types-requests = "^2.27.29" +invoke = "^1.7.1" +libsass = "^0.21.0" +pytest = "^7.1.2" +respx = "^0.19.2" +boussole = "^2.0.0" +types-bleach = "^5.0.2" +types-Markdown = "^3.3.28" +factory-boy = "^3.2.1" +pytest-asyncio = "^0.18.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.isort] +profile = "black" + +[tool.mypy] +exclude = ["alembic/versions/"] +plugins = ["sqlalchemy.ext.mypy.plugin", "pydantic.mypy"] + +[tool.black] +extend-exclude = ''' +/( + | alembic/versions +)/ +''' diff --git a/scripts/config_wizard.py b/scripts/config_wizard.py new file mode 100644 index 0000000..4a2e1d1 --- /dev/null +++ b/scripts/config_wizard.py @@ -0,0 +1,85 @@ +"""Basic wizard for setting up microblog.pub configuration files.""" +import os +import sys +from pathlib import Path +from typing import Any + +import bcrypt +import tomli_w +from markdown import markdown # type: ignore +from prompt_toolkit import prompt + +from app.key import generate_key +from app.key import key_exists + + +def main() -> None: + print("Welcome to microblog.pub setup wizard\n") + print("Generating key...") + if key_exists(): + yn = "" + while yn not in ["y", "n"]: + yn = prompt( + "WARNING, a key already exists, overwrite it? (y/n): ", default="n" + ).lower() + if yn == "y": + generate_key() + else: + generate_key() + + config_file = Path("data/me.toml") + + if config_file.exists(): + # Spit out the relative path for the "config artifacts" + rconfig_file = "data/me.toml" + print( + f"Existing setup detected, please delete {rconfig_file} " + "before restarting the wizard" + ) + sys.exit(2) + + dat: dict[str, Any] = {} + print("Your identity will be @{username}@{domain}") + dat["domain"] = prompt("domain: ") + dat["username"] = prompt("username: ") + dat["admin_password"] = bcrypt.hashpw( + prompt("admin password: ", is_password=True).encode(), bcrypt.gensalt() + ).decode() + dat["name"] = prompt("name (e.g. John Doe): ", default=dat["username"]) + dat["summary"] = markdown( + prompt( + ( + "summary (short description, in markdown, " + "press [ESC] then [ENTER] to submit):\n" + ), + multiline=True, + ) + ) + dat["https"] = True + proto = "https" + yn = "" + while yn not in ["y", "n"]: + yn = prompt("will the site be served via https? (y/n): ", default="y").lower() + if yn == "n": + dat["https"] = False + proto = "http" + + print("Note that you can put your icon/avatar in the static/ directory") + dat["icon_url"] = prompt( + "icon URL: ", default=f'{proto}://{dat["domain"]}/static/nopic.png' + ) + dat["secret"] = os.urandom(16).hex() + + with config_file.open("w") as f: + f.write(tomli_w.dumps(dat)) + + print("Done") + sys.exit(0) + + +if __name__ == "__main__": + try: + main() + except KeyboardInterrupt: + print("Aborted") + sys.exit(1) diff --git a/scss/main.scss b/scss/main.scss new file mode 100644 index 0000000..8144b66 --- /dev/null +++ b/scss/main.scss @@ -0,0 +1,135 @@ +body { + margin: 0; + padding: 0; + display: flex; + min-height: 100vh; + flex-direction: column; +} +#main { + flex: 1; +} +main { + max-width: 800px; + margin: 20px auto; +} +footer { + max-width: 800px; + margin: 20px auto; +} +#notifications, #followers, #following { + ul { + list-style-type: none; + margin: 0; + padding: 0; + } + li { + display: inline-block; + } +} +.actor-box { + a { + text-decoration: none; + } +} + +#admin { +.navbar { + display: grid; + grid-template-rows: auto; + grid-template-columns: 1fr; + grid-auto-flow: dense; + justify-items: stretch; + align-items: stretch; + column-gap: 20px; +} + +.logo { + grid-column:-3; + padding: 5px; +} + +.menus { + display: flex; + flex-direction: row; + justify-content: start; + grid-column: 1; +} + +.menus * { + padding: 5px; +} +} + +nav.flexbox { + display: flex; + justify-content: space-between; + align-items: center; + + ul { + display: flex; + align-items: center; + list-style-type: none; + margin: 0; + padding: 0; + + } + + ul li { + margin-right: 20px; + + &:last-child { + margin-right: 0px; + } + } +} +#admin { + a.active { + font-weight: bold; + text-decoration: none; + } +} + + +.activity-wrap { + margin: 0 auto; + padding: 30px 0; + .actor-icon { + width:48px; + margin-right: 15px; + img { + max-width: 48px; + } + } + .activity-content { + display: flex; + align-items:flex-start; + .activity-header { + width: 100%; + strong { + font-weight:bold; + } + span { + font-weight:normal; + margin-left: 5px; + } + .activity-date { float:right; } + } + } + .activity-attachment { + padding-left: 60px; + img, audio, video { + width: 100%; + max-width: 740px; + margin: 30px 0; + } + } + .activity-bar { + display: flex; + margin-left: 60px; + margin-top: 10px; + .bar-item { + display: flex; + margin-right: 20px; + } + } +} diff --git a/tasks.py b/tasks.py new file mode 100644 index 0000000..5c7fda2 --- /dev/null +++ b/tasks.py @@ -0,0 +1,67 @@ +from typing import Optional + +from invoke import Context # type: ignore +from invoke import run # type: ignore +from invoke import task # type: ignore + + +@task +def generate_db_migration(ctx, message): + # type: (Context, str) -> None + run(f'poetry run alembic revision --autogenerate -m "{message}"', echo=True) + + +@task +def migrate_db(ctx): + # type: (Context) -> None + run("poetry run alembic upgrade head", echo=True) + + +@task +def autoformat(ctx): + # type: (Context) -> None + run("black .", echo=True) + run("isort -sl .", echo=True) + + +@task +def lint(ctx): + # type: (Context) -> None + run("black --check .", echo=True) + run("isort -sl --check-only .", echo=True) + run("flake8 .", echo=True) + run("mypy .", echo=True) + + +@task +def compile_scss(ctx, watch=False): + # type: (Context, bool) -> None + if watch: + run("poetry run boussole watch", echo=True) + else: + run("poetry run boussole compile", echo=True) + + +@task +def uvicorn(ctx): + # type: (Context) -> None + run("poetry run uvicorn app.main:app --no-server-header", pty=True, echo=True) + + +@task +def process_outgoing_activities(ctx): + # type: (Context) -> None + run("poetry run python app/process_outgoing_activities.py", pty=True, echo=True) + + +@task +def tests(ctx, k=None): + # type: (Context, Optional[str]) -> None + pytest_args = " -vvv" + if k: + pytest_args += f" -k {k}" + run( + f"MICROBLOGPUB_CONFIG_FILE=tests.toml pytest tests{pytest_args}", + pty=True, + echo=True, + ) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..9774702 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,49 @@ +from typing import Generator + +import pytest +from fastapi.testclient import TestClient +from sqlalchemy import orm + +from app.database import Base +from app.database import engine +from app.database import get_db +from app.main import app + +_Session = orm.sessionmaker(bind=engine, autocommit=False, autoflush=False) + + +def _get_db_for_testing() -> Generator[orm.Session, None, None]: + session = _Session() + try: + yield session + finally: + session.close() + + +@pytest.fixture +def db() -> Generator: + Base.metadata.create_all(bind=engine) + yield orm.scoped_session(orm.sessionmaker(bind=engine)) + try: + Base.metadata.drop_all(bind=engine) + except Exception: + # XXX: for some reason, the teardown occasionally fails because of this + pass + + +@pytest.fixture +def exclude_fastapi_middleware(): + """Workaround for https://github.com/encode/starlette/issues/472""" + user_middleware = app.user_middleware.copy() + app.user_middleware = [] + app.middleware_stack = app.build_middleware_stack() + yield + app.user_middleware = user_middleware + app.middleware_stack = app.build_middleware_stack() + + +@pytest.fixture +def client(db, exclude_fastapi_middleware) -> Generator: + app.dependency_overrides[get_db] = _get_db_for_testing + with TestClient(app) as c: + yield c diff --git a/tests/factories.py b/tests/factories.py new file mode 100644 index 0000000..f0aff49 --- /dev/null +++ b/tests/factories.py @@ -0,0 +1,140 @@ +from uuid import uuid4 + +import factory # type: ignore +from Crypto.PublicKey import RSA +from sqlalchemy import orm + +from app import activitypub as ap +from app import actor +from app import models +from app.actor import RemoteActor +from app.ap_object import RemoteObject +from app.database import engine + +_Session = orm.scoped_session(orm.sessionmaker(bind=engine)) + + +def generate_key() -> tuple[str, str]: + k = RSA.generate(1024) + return k.exportKey("PEM").decode(), k.publickey().exportKey("PEM").decode() + + +def build_follow_activity( + from_remote_actor: actor.RemoteActor, + for_remote_actor: actor.RemoteActor, + outbox_public_id: str | None = None, +) -> ap.RawObject: + return { + "@context": ap.AS_CTX, + "type": "Follow", + "id": from_remote_actor.ap_id + "/follow/" + (outbox_public_id or uuid4().hex), + "actor": from_remote_actor.ap_id, + "object": for_remote_actor.ap_id, + } + + +def build_accept_activity( + from_remote_actor: actor.RemoteActor, + for_remote_object: RemoteObject, + outbox_public_id: str | None = None, +) -> ap.RawObject: + return { + "@context": ap.AS_CTX, + "type": "Accept", + "id": from_remote_actor.ap_id + "/accept/" + (outbox_public_id or uuid4().hex), + "actor": from_remote_actor.ap_id, + "object": for_remote_object.ap_id, + } + + +class BaseModelMeta: + sqlalchemy_session = _Session + sqlalchemy_session_persistence = "commit" + + +class RemoteActorFactory(factory.Factory): + class Meta: + model = RemoteActor + exclude = ( + "base_url", + "username", + "public_key", + ) + + class Params: + icon_url = None + summary = "I like unit tests" + + ap_actor = factory.LazyAttribute( + lambda o: { + "@context": ap.AS_CTX, + "type": "Person", + "id": o.base_url, + "following": o.base_url + "/following", + "followers": o.base_url + "/followers", + # "featured": ID + "/featured", + "inbox": o.base_url + "/inbox", + "outbox": o.base_url + "/outbox", + "preferredUsername": o.username, + "name": o.username, + "summary": o.summary, + "endpoints": {}, + "url": o.base_url, + "manuallyApprovesFollowers": False, + "attachment": [], + "icon": {}, + "publicKey": { + "id": f"{o.base_url}#main-key", + "owner": o.base_url, + "publicKeyPem": o.public_key, + }, + } + ) + + +class ActorFactory(factory.alchemy.SQLAlchemyModelFactory): + class Meta(BaseModelMeta): + model = models.Actor + + # ap_actor + # ap_id + ap_type = "Person" + + @classmethod + def from_remote_actor(cls, ra): + return cls( + ap_type=ra.ap_type, + ap_actor=ra.ap_actor, + ap_id=ra.ap_id, + ) + + +class OutboxObjectFactory(factory.alchemy.SQLAlchemyModelFactory): + class Meta(BaseModelMeta): + model = models.OutboxObject + + # public_id + # relates_to_inbox_object_id + # relates_to_outbox_object_id + + @classmethod + def from_remote_object(cls, public_id, ro): + return cls( + public_id=public_id, + ap_type=ro.ap_type, + ap_id=ro.ap_id, + ap_context=ro.context, + ap_object=ro.ap_object, + visibility=ro.visibility, + og_meta=ro.og_meta, + activity_object_ap_id=ro.activity_object_ap_id, + is_hidden_from_homepage=True if ro.in_reply_to else False, + ) + + +class OutgoingActivityFactory(factory.alchemy.SQLAlchemyModelFactory): + class Meta(BaseModelMeta): + model = models.OutgoingActivity + + # recipient + # outbox_object_id diff --git a/tests/test.key b/tests/test.key new file mode 100644 index 0000000..375428a --- /dev/null +++ b/tests/test.key @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAvYhynEC0l2WVpXoPutfhhZHEeQyyoHiMszOfl1EHM50V0xOC +XCoXd/i5Hsa6dWswyjftOtSmdknY5Whr6LatwNu+i/tlsjmHSGgdhUxLhbj4Xc5T +LQWxDbS1cg49IwSZFYSIrBw2yfPI3dpMNzYvBt8CKAk0zodypHzdfSKPbSRIyBAy +SuG+mJsxsg9tx9CgWNrizauj/zVSWa/cRvNTvIwlxs1J516QJ0px3NygKqPMP2I4 +zNkhKFzaNDLzuv4zMsW8UNoM+Mlpf6+NbHQycUC9gIqywrP21E7YFmdljyr5cAfr +qn+KgDsQTpDSINFE1oUanY0iadKvFXjD9uQLfwIDAQABAoIBAAtqK1TjxLyVfqS/ +rDDZjZiIxedwb1WgzQCB7GulkqR2Inla5G/+jPlJvoRu/Y3SzdZv9dakNf5LxkdS +uaUDU4WY9mnh0ycftdkThCuiA65jDHpB0dqVTCuCJadf2ijAvyN/nueWr2oMR52s +5wgwODbWuX+Fxmtl1u63InPF4BN3kEQcGP4pgXMiQ2QEwjxMubG7fZTuHFChsZMZ +0QyHy0atmauK8+1FeseoZv7LefgjE+UhAKnIz5z/Ij4erGRaWJUKe5YS7i8nTT6M +W+SJ/gs/l6vOUmrqHZaXsp29pvseY23akgGnZciHJfuj/vxMJjGfZVM2ls+MUkh4 +tdEZ0NECgYEAxRGcRxhQyOdiohcsH4efG03mB7u+JBuvt33oFXWOCpW7lenAr9qg +3hm30lZq95ST3XilqGldgIW2zpHCkSLXk/lsJteNC9EEk8HuTDJ7Gd4SBiXisELd +IY147SJu5KXN/kaGoDMgMCGcR7Qkr6hzsRT3308A6nMNZG0viyUMzicCgYEA9jXx +WaLe0PC8pT/yAyPJnYerSOofv+vz+3KNlopBTSRsREsCpdbyOnGCXa4bechj29Lv +0QCbQMkga2pXUPNszdUz7L0LnAi8DZhKumPxyz82kcZSxSCGsvwp9kZju/LPCIHo +j1wKW92/w47QXdzCVjgkKbDAGsSwzphEJOuMhukCgYBUKl9KZfIqu9f+TlND7BJi +APUbnG1q0oBLp/R1Jc3Sa3zAXCM1d/R4pxdBODNbJhO45QwrT0Tl3TXkJ5Cnl+/m +fQJZ3Hma8Fw6FvuFg5HbzGJ6Sbf1e7kh2WAqNyiRctb1oH1i8jLvG4u5fBCnDRTM +Lp5mu0Ey4Ix5tcA2d05uxQKBgQDDBiePIPvt9UL4gpZo9kgViAmdUBamJ3izjCGr +RQhE2r0Hu4L1ajWlJZRmMCuDY7/1uDhODXTs9GPBshJIBQoCYQcoVvaDOkf7XM6U +peY5YHERN08I5qLL1AJJGaiWj9Z+nqhgJj/uVNA5Tz6tmtg1A3Nhsqf4jCShAOu5 +cvt1QQKBgH2Lg/o9KpFLeZLVXQzW3GFB7RzDetSDbpdhBBE3o/HAtrX0foEqYfKx +JuPrlGR2L6Q8jSw7AvFErkx5g5kCgdN8mOYjCe/EsL3ctIatqaoGDrjfvgWAeanW +XxMcVRlcMFzp5XB0VQhG0nP9uvHm/eIw/izN2JN7gz3ZZp84lq3S +-----END RSA PRIVATE KEY----- \ No newline at end of file diff --git a/tests/test_actor.py b/tests/test_actor.py new file mode 100644 index 0000000..801e406 --- /dev/null +++ b/tests/test_actor.py @@ -0,0 +1,46 @@ +import httpx +import respx + +from app import models +from app.actor import fetch_actor +from app.database import Session +from tests import factories + + +def test_fetch_actor(db: Session, respx_mock) -> None: + # Given a remote actor + ra = factories.RemoteActorFactory( + base_url="https://example.com", + username="toto", + public_key="pk", + ) + respx_mock.get(ra.ap_id).mock(return_value=httpx.Response(200, json=ra.ap_actor)) + + # When fetching this actor for the first time + saved_actor = fetch_actor(db, ra.ap_id) + + # Then it has been fetched and saved in DB + assert respx.calls.call_count == 1 + assert db.query(models.Actor).one().ap_id == saved_actor.ap_id + + # When fetching it a second time + actor_from_db = fetch_actor(db, ra.ap_id) + + # Then it's read from the DB + assert actor_from_db.ap_id == ra.ap_id + assert db.query(models.Actor).count() == 1 + assert respx.calls.call_count == 1 + + +def test_sqlalchemy_factory(db: Session) -> None: + ra = factories.RemoteActorFactory( + base_url="https://example.com", + username="toto", + public_key="pk", + ) + actor_in_db = factories.ActorFactory( + ap_type=ra.ap_type, + ap_actor=ra.ap_actor, + ap_id=ra.ap_id, + ) + assert actor_in_db.id == db.query(models.Actor).one().id diff --git a/tests/test_admin.py b/tests/test_admin.py new file mode 100644 index 0000000..3eb795f --- /dev/null +++ b/tests/test_admin.py @@ -0,0 +1,21 @@ +from fastapi.testclient import TestClient + +from app.main import app + + +def test_admin_endpoints_are_authenticated(client: TestClient): + routes_tested = [] + + for route in app.routes: + if not route.path.startswith("/admin") or route.path == "/admin/login": + continue + + for method in route.methods: + resp = client.request(method, route.path) + + # Admin routes should redirect to the login page + assert resp.status_code == 302, f"{method} {route.path} is unauthenticated" + assert resp.headers.get("Location") == "http://testserver/admin/login" + routes_tested.append((method, route.path)) + + assert len(routes_tested) > 0 diff --git a/tests/test_httpsig.py b/tests/test_httpsig.py new file mode 100644 index 0000000..97a8d24 --- /dev/null +++ b/tests/test_httpsig.py @@ -0,0 +1,177 @@ +from typing import Any + +import fastapi +import httpx +import pytest +import respx +from fastapi.testclient import TestClient + +from app import activitypub as ap +from app import httpsig +from app.httpsig import HTTPSigInfo +from app.key import Key +from tests import factories + +_test_app = fastapi.FastAPI() + + +def _httpsig_info_to_dict(httpsig_info: HTTPSigInfo) -> dict[str, Any]: + return { + "has_valid_signature": httpsig_info.has_valid_signature, + "signed_by_ap_actor_id": httpsig_info.signed_by_ap_actor_id, + } + + +@_test_app.get("/httpsig_checker") +def get_httpsig_checker( + httpsig_info: httpsig.HTTPSigInfo = fastapi.Depends(httpsig.httpsig_checker), +): + return _httpsig_info_to_dict(httpsig_info) + + +@_test_app.post("/enforce_httpsig") +async def post_enforce_httpsig( + request: fastapi.Request, + httpsig_info: httpsig.HTTPSigInfo = fastapi.Depends(httpsig.enforce_httpsig), +): + await request.json() + return _httpsig_info_to_dict(httpsig_info) + + +def test_enforce_httpsig__no_signature( + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + with TestClient(_test_app) as client: + response = client.post( + "/enforce_httpsig", + headers={"Content-Type": ap.AS_CTX}, + json={"enforce_httpsig": True}, + ) + + assert response.status_code == 401 + assert response.json()["detail"] == "Invalid HTTP sig" + + +@pytest.mark.asyncio +async def test_enforce_httpsig__with_valid_signature( + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # Given a remote actor + privkey, pubkey = factories.generate_key() + ra = factories.RemoteActorFactory( + base_url="https://example.com", + username="toto", + public_key=pubkey, + ) + k = Key(ra.ap_id, f"{ra.ap_id}#main-key") + k.load(privkey) + auth = httpsig.HTTPXSigAuth(k) + respx_mock.get(ra.ap_id).mock(return_value=httpx.Response(200, json=ra.ap_actor)) + + httpsig._get_public_key.cache_clear() + + async with httpx.AsyncClient(app=_test_app, base_url="http://test") as client: + response = await client.post( + "/enforce_httpsig", + headers={"Content-Type": ap.AS_CTX}, + json={"enforce_httpsig": True}, + auth=auth, # type: ignore + ) + assert response.status_code == 200 + + json_response = response.json() + + assert json_response["has_valid_signature"] is True + assert json_response["signed_by_ap_actor_id"] == ra.ap_id + + +def test_httpsig_checker__no_signature( + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + with TestClient(_test_app) as client: + response = client.get( + "/httpsig_checker", + headers={"Accept": ap.AS_CTX}, + ) + + assert response.status_code == 200 + json_response = response.json() + assert json_response["has_valid_signature"] is False + assert json_response["signed_by_ap_actor_id"] is None + + +@pytest.mark.asyncio +async def test_httpsig_checker__with_valid_signature( + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # Given a remote actor + privkey, pubkey = factories.generate_key() + ra = factories.RemoteActorFactory( + base_url="https://example.com", + username="toto", + public_key=pubkey, + ) + respx_mock.get(ra.ap_id).mock(return_value=httpx.Response(200, json=ra.ap_actor)) + k = Key(ra.ap_id, f"{ra.ap_id}#main-key") + k.load(privkey) + auth = httpsig.HTTPXSigAuth(k) + + httpsig._get_public_key.cache_clear() + + async with httpx.AsyncClient(app=_test_app, base_url="http://test") as client: + response = await client.get( + "/httpsig_checker", + headers={"Accept": ap.AS_CTX}, + auth=auth, # type: ignore + ) + + assert response.status_code == 200 + json_response = response.json() + + assert json_response["has_valid_signature"] is True + assert json_response["signed_by_ap_actor_id"] == ra.ap_id + + +@pytest.mark.asyncio +async def test_httpsig_checker__with_invvalid_signature( + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # Given a remote actor + privkey, pubkey = factories.generate_key() + ra = factories.RemoteActorFactory( + base_url="https://example.com", + username="toto", + public_key=pubkey, + ) + k = Key(ra.ap_id, f"{ra.ap_id}#main-key") + k.load(privkey) + auth = httpsig.HTTPXSigAuth(k) + + ra2_privkey, ra2_pubkey = factories.generate_key() + ra2 = factories.RemoteActorFactory( + base_url="https://example.com", + username="toto", + public_key=ra2_pubkey, + ) + assert ra.ap_id == ra2.ap_id + respx_mock.get(ra.ap_id).mock(return_value=httpx.Response(200, json=ra2.ap_actor)) + + httpsig._get_public_key.cache_clear() + + async with httpx.AsyncClient(app=_test_app, base_url="http://test") as client: + response = await client.get( + "/httpsig_checker", + headers={"Accept": ap.AS_CTX}, + auth=auth, # type: ignore + ) + + assert response.status_code == 200 + json_response = response.json() + + assert json_response["has_valid_signature"] is False + assert json_response["signed_by_ap_actor_id"] == ra.ap_id diff --git a/tests/test_inbox.py b/tests/test_inbox.py new file mode 100644 index 0000000..736f6f3 --- /dev/null +++ b/tests/test_inbox.py @@ -0,0 +1,134 @@ +from uuid import uuid4 + +import httpx +import respx +from fastapi.testclient import TestClient + +from app import activitypub as ap +from app import models +from app.actor import LOCAL_ACTOR +from app.ap_object import RemoteObject +from app.database import Session +from tests import factories +from tests.utils import mock_httpsig_checker + + +def test_inbox_requires_httpsig( + client: TestClient, +): + response = client.post( + "/inbox", + headers={"Content-Type": ap.AS_CTX}, + json={}, + ) + assert response.status_code == 401 + assert response.json()["detail"] == "Invalid HTTP sig" + + +def test_inbox_follow_request( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # Given a remote actor + ra = factories.RemoteActorFactory( + base_url="https://example.com", + username="toto", + public_key="pk", + ) + respx_mock.get(ra.ap_id).mock(return_value=httpx.Response(200, json=ra.ap_actor)) + + # When sending a Follow activity + follow_activity = RemoteObject( + factories.build_follow_activity( + from_remote_actor=ra, + for_remote_actor=LOCAL_ACTOR, + ) + ) + with mock_httpsig_checker(ra): + response = client.post( + "/inbox", + headers={"Content-Type": ap.AS_CTX}, + json=follow_activity.ap_object, + ) + + # Then the server returns a 204 + assert response.status_code == 204 + + # And the actor was saved in DB + saved_actor = db.query(models.Actor).one() + assert saved_actor.ap_id == ra.ap_id + + # And the Follow activity was saved in the inbox + inbox_object = db.query(models.InboxObject).one() + assert inbox_object.ap_object == follow_activity.ap_object + + # And a follower was internally created + follower = db.query(models.Follower).one() + assert follower.ap_actor_id == ra.ap_id + assert follower.actor_id == saved_actor.id + assert follower.inbox_object_id == inbox_object.id + + # And an Accept activity was created in the outbox + outbox_object = db.query(models.OutboxObject).one() + assert outbox_object.ap_type == "Accept" + assert outbox_object.activity_object_ap_id == follow_activity.ap_id + + # And an outgoing activity was created to track the Accept activity delivery + outgoing_activity = db.query(models.OutgoingActivity).one() + assert outgoing_activity.outbox_object_id == outbox_object.id + + +def test_inbox_accept_follow_request( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # Given a remote actor + ra = factories.RemoteActorFactory( + base_url="https://example.com", + username="toto", + public_key="pk", + ) + respx_mock.get(ra.ap_id).mock(return_value=httpx.Response(200, json=ra.ap_actor)) + actor_in_db = factories.ActorFactory.from_remote_actor(ra) + + # And a Follow activity in the outbox + follow_id = uuid4().hex + follow_from_outbox = RemoteObject( + factories.build_follow_activity( + from_remote_actor=LOCAL_ACTOR, + for_remote_actor=ra, + outbox_public_id=follow_id, + ) + ) + outbox_object = factories.OutboxObjectFactory.from_remote_object( + follow_id, follow_from_outbox + ) + + # When sending a Accept activity + accept_activity = RemoteObject( + factories.build_accept_activity( + from_remote_actor=ra, + for_remote_object=follow_from_outbox, + ) + ) + with mock_httpsig_checker(ra): + response = client.post( + "/inbox", + headers={"Content-Type": ap.AS_CTX}, + json=accept_activity.ap_object, + ) + + # Then the server returns a 204 + assert response.status_code == 204 + + # And the Accept activity was saved in the inbox + inbox_activity = db.query(models.InboxObject).one() + assert inbox_activity.ap_type == "Accept" + assert inbox_activity.relates_to_outbox_object_id == outbox_object.id + assert inbox_activity.actor_id == actor_in_db.id + + # And a following entry was created internally + following = db.query(models.Following).one() + assert following.ap_actor_id == actor_in_db.ap_id diff --git a/tests/test_outbox.py b/tests/test_outbox.py new file mode 100644 index 0000000..b5c006e --- /dev/null +++ b/tests/test_outbox.py @@ -0,0 +1,46 @@ +import httpx +import respx +from fastapi.testclient import TestClient + +from app import models +from app.config import generate_csrf_token +from app.database import Session +from tests import factories +from tests.utils import generate_admin_session_cookies + + +def test_send_follow_request( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # Given a remote actor + ra = factories.RemoteActorFactory( + base_url="https://example.com", + username="toto", + public_key="pk", + ) + respx_mock.get(ra.ap_id).mock(return_value=httpx.Response(200, json=ra.ap_actor)) + + response = client.post( + "/admin/actions/follow", + data={ + "redirect_url": "http://testserver/", + "ap_actor_id": ra.ap_id, + "csrf_token": generate_csrf_token(), + }, + cookies=generate_admin_session_cookies(), + ) + + # Then the server returns a 302 + assert response.status_code == 302 + assert response.headers.get("Location") == "http://testserver/" + + # And the Follow activity was created in the outbox + outbox_object = db.query(models.OutboxObject).one() + assert outbox_object.ap_type == "Follow" + assert outbox_object.activity_object_ap_id == ra.ap_id + + # And an outgoing activity was queued + outgoing_activity = db.query(models.OutgoingActivity).one() + assert outgoing_activity.outbox_object_id == outbox_object.id diff --git a/tests/test_process_outgoing_activities.py b/tests/test_process_outgoing_activities.py new file mode 100644 index 0000000..e2bd43f --- /dev/null +++ b/tests/test_process_outgoing_activities.py @@ -0,0 +1,180 @@ +from uuid import uuid4 + +import httpx +import respx +from fastapi.testclient import TestClient + +from app import models +from app.actor import LOCAL_ACTOR +from app.ap_object import RemoteObject +from app.database import Session +from app.process_outgoing_activities import _MAX_RETRIES +from app.process_outgoing_activities import new_outgoing_activity +from app.process_outgoing_activities import process_next_outgoing_activity +from tests import factories + + +def _setup_outbox_object() -> models.OutboxObject: + ra = factories.RemoteActorFactory( + base_url="https://example.com", + username="toto", + public_key="pk", + ) + + # And a Follow activity in the outbox + follow_id = uuid4().hex + follow_from_outbox = RemoteObject( + factories.build_follow_activity( + from_remote_actor=LOCAL_ACTOR, + for_remote_actor=ra, + outbox_public_id=follow_id, + ) + ) + outbox_object = factories.OutboxObjectFactory.from_remote_object( + follow_id, follow_from_outbox + ) + return outbox_object + + +def test_new_outgoing_activity( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + outbox_object = _setup_outbox_object() + inbox_url = "https://example.com/inbox" + + # When queuing the activity + outgoing_activity = new_outgoing_activity(db, inbox_url, outbox_object.id) + + assert db.query(models.OutgoingActivity).one() == outgoing_activity + assert outgoing_activity.outbox_object_id == outbox_object.id + assert outgoing_activity.recipient == inbox_url + + +def test_process_next_outgoing_activity__no_next_activity( + db: Session, + respx_mock: respx.MockRouter, +) -> None: + assert process_next_outgoing_activity(db) is False + + +def test_process_next_outgoing_activity__server_200( + db: Session, + respx_mock: respx.MockRouter, +) -> None: + # And an outgoing activity + outbox_object = _setup_outbox_object() + + recipient_inbox_url = "https://example.com/users/toto/inbox" + respx_mock.post(recipient_inbox_url).mock(return_value=httpx.Response(204)) + + outgoing_activity = factories.OutgoingActivityFactory( + recipient=recipient_inbox_url, + outbox_object_id=outbox_object.id, + ) + + # When processing the next outgoing activity + # Then it is processed + assert process_next_outgoing_activity(db) is True + + assert respx_mock.calls.call_count == 1 + + outgoing_activity = db.query(models.OutgoingActivity).one() + assert outgoing_activity.is_sent is True + assert outgoing_activity.last_status_code == 204 + assert outgoing_activity.error is None + assert outgoing_activity.is_errored is False + + +def test_process_next_outgoing_activity__error_500( + db: Session, + respx_mock: respx.MockRouter, +) -> None: + outbox_object = _setup_outbox_object() + recipient_inbox_url = "https://example.com/inbox" + respx_mock.post(recipient_inbox_url).mock( + return_value=httpx.Response(500, text="oops") + ) + + # And an outgoing activity + outgoing_activity = factories.OutgoingActivityFactory( + recipient=recipient_inbox_url, + outbox_object_id=outbox_object.id, + ) + + # When processing the next outgoing activity + # Then it is processed + assert process_next_outgoing_activity(db) is True + + assert respx_mock.calls.call_count == 1 + + outgoing_activity = db.query(models.OutgoingActivity).one() + assert outgoing_activity.is_sent is False + assert outgoing_activity.last_status_code == 500 + assert outgoing_activity.last_response == "oops" + assert outgoing_activity.is_errored is False + assert outgoing_activity.tries == 1 + + +def test_process_next_outgoing_activity__connect_error( + db: Session, + respx_mock: respx.MockRouter, +) -> None: + outbox_object = _setup_outbox_object() + recipient_inbox_url = "https://example.com/inbox" + respx_mock.post(recipient_inbox_url).mock(side_effect=httpx.ConnectError) + + # And an outgoing activity + outgoing_activity = factories.OutgoingActivityFactory( + recipient=recipient_inbox_url, + outbox_object_id=outbox_object.id, + ) + + # When processing the next outgoing activity + # Then it is processed + assert process_next_outgoing_activity(db) is True + + assert respx_mock.calls.call_count == 1 + + outgoing_activity = db.query(models.OutgoingActivity).one() + assert outgoing_activity.is_sent is False + assert outgoing_activity.error is not None + assert outgoing_activity.tries == 1 + + +def test_process_next_outgoing_activity__errored( + db: Session, + respx_mock: respx.MockRouter, +) -> None: + outbox_object = _setup_outbox_object() + recipient_inbox_url = "https://example.com/inbox" + respx_mock.post(recipient_inbox_url).mock( + return_value=httpx.Response(500, text="oops") + ) + + # And an outgoing activity + outgoing_activity = factories.OutgoingActivityFactory( + recipient=recipient_inbox_url, + outbox_object_id=outbox_object.id, + tries=_MAX_RETRIES - 1, + ) + + # When processing the next outgoing activity + # Then it is processed + assert process_next_outgoing_activity(db) is True + + assert respx_mock.calls.call_count == 1 + + outgoing_activity = db.query(models.OutgoingActivity).one() + assert outgoing_activity.is_sent is False + assert outgoing_activity.last_status_code == 500 + assert outgoing_activity.last_response == "oops" + assert outgoing_activity.is_errored is True + + # And it is skipped from processing + assert process_next_outgoing_activity(db) is False + + +# TODO(ts): +# - parse retry after diff --git a/tests/test_public.py b/tests/test_public.py new file mode 100644 index 0000000..4f6a150 --- /dev/null +++ b/tests/test_public.py @@ -0,0 +1,30 @@ +import pytest +from fastapi.testclient import TestClient + +from app.database import Session + +_ACCEPTED_AP_HEADERS = [ + "application/activity+json", + "application/activity+json; charset=utf-8", + "application/ld+json", + 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"', +] + + +@pytest.mark.anyio +def test_index(db: Session, client: TestClient): + response = client.get("/") + assert response.status_code == 200 + + +@pytest.mark.parametrize("accept", _ACCEPTED_AP_HEADERS) +def test__ap_version(client, db, accept: str) -> None: + response = client.get("/followers", headers={"Accept": accept}) + assert response.status_code == 200 + assert response.headers["content-type"] == "application/activity+json" + assert response.json()["id"].endswith("/followers") + + +def test__html(client, db) -> None: + response = client.get("/followers", headers={"Accept": "application/activity+json"}) + assert response.status_code == 200 diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 0000000..cff33b2 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,29 @@ +from contextlib import contextmanager + +import fastapi + +from app import actor +from app import httpsig +from app.config import session_serializer +from app.main import app + + +@contextmanager +def mock_httpsig_checker(ra: actor.RemoteActor): + async def httpsig_checker( + request: fastapi.Request, + ) -> httpsig.HTTPSigInfo: + return httpsig.HTTPSigInfo( + has_valid_signature=True, + signed_by_ap_actor_id=ra.ap_id, + ) + + app.dependency_overrides[httpsig.httpsig_checker] = httpsig_checker + try: + yield + finally: + del app.dependency_overrides[httpsig.httpsig_checker] + + +def generate_admin_session_cookies() -> dict[str, str]: + return {"session": session_serializer.dumps({"is_logged_in": True})}