diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..bae94e1
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,661 @@
+ GNU AFFERO GENERAL PUBLIC LICENSE
+ Version 3, 19 November 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU Affero General Public License is a free, copyleft license for
+software and other kinds of works, specifically designed to ensure
+cooperation with the community in the case of network server software.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+our General Public Licenses are intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ Developers that use our General Public Licenses protect your rights
+with two steps: (1) assert copyright on the software, and (2) offer
+you this License which gives you legal permission to copy, distribute
+and/or modify the software.
+
+ A secondary benefit of defending all users' freedom is that
+improvements made in alternate versions of the program, if they
+receive widespread use, become available for other developers to
+incorporate. Many developers of free software are heartened and
+encouraged by the resulting cooperation. However, in the case of
+software used on network servers, this result may fail to come about.
+The GNU General Public License permits making a modified version and
+letting the public access it on a server without ever releasing its
+source code to the public.
+
+ The GNU Affero General Public License is designed specifically to
+ensure that, in such cases, the modified source code becomes available
+to the community. It requires the operator of a network server to
+provide the source code of the modified version running there to the
+users of that server. Therefore, public use of a modified version, on
+a publicly accessible server, gives the public access to the source
+code of the modified version.
+
+ An older license, called the Affero General Public License and
+published by Affero, was designed to accomplish similar goals. This is
+a different license, not a version of the Affero GPL, but Affero has
+released a new version of the Affero GPL which permits relicensing under
+this license.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU Affero General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Remote Network Interaction; Use with the GNU General Public License.
+
+ Notwithstanding any other provision of this License, if you modify the
+Program, your modified version must prominently offer all users
+interacting with it remotely through a computer network (if your version
+supports such interaction) an opportunity to receive the Corresponding
+Source of your version by providing access to the Corresponding Source
+from a network server at no charge, through some standard or customary
+means of facilitating copying of software. This Corresponding Source
+shall include the Corresponding Source for any work covered by version 3
+of the GNU General Public License that is incorporated pursuant to the
+following paragraph.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the work with which it is combined will remain governed by version
+3 of the GNU General Public License.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU Affero General Public License from time to time. Such new versions
+will be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU Affero General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU Affero General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU Affero General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU Affero General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU Affero General Public License for more details.
+
+ You should have received a copy of the GNU Affero General Public License
+ along with this program. If not, see .
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If your software can interact with users remotely through a computer
+network, you should also make sure that it provides a way for users to
+get its source. For example, if your program is a web application, its
+interface could display a "Source" link that leads users to an archive
+of the code. There are many ways you could offer source, and different
+solutions will be better for different programs; see section 13 for the
+specific requirements.
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU AGPL, see
+.
\ No newline at end of file
diff --git a/app/__init__.py b/app/__init__.py
index 2029d46..87d71d2 100644
--- a/app/__init__.py
+++ b/app/__init__.py
@@ -16,6 +16,7 @@ from flask_wtf.csrf import CSRFError
from werkzeug.middleware.proxy_fix import ProxyFix
from .access_logging import AccessLoggingService
+from .compression import GzipMiddleware
from .acl import AclService
from .bucket_policies import BucketPolicyStore
from .config import AppConfig
@@ -89,13 +90,24 @@ def create_app(
# Trust X-Forwarded-* headers from proxies
app.wsgi_app = ProxyFix(app.wsgi_app, x_for=1, x_proto=1, x_host=1, x_prefix=1)
+ # Enable gzip compression for responses (10-20x smaller JSON payloads)
+ if app.config.get("ENABLE_GZIP", True):
+ app.wsgi_app = GzipMiddleware(app.wsgi_app, compression_level=6)
+
_configure_cors(app)
_configure_logging(app)
limiter.init_app(app)
csrf.init_app(app)
- storage = ObjectStorage(Path(app.config["STORAGE_ROOT"]))
+ storage = ObjectStorage(
+ Path(app.config["STORAGE_ROOT"]),
+ cache_ttl=app.config.get("OBJECT_CACHE_TTL", 5),
+ )
+
+ if app.config.get("WARM_CACHE_ON_STARTUP", True) and not app.config.get("TESTING"):
+ storage.warm_cache_async()
+
iam = IamService(
Path(app.config["IAM_CONFIG"]),
auth_max_attempts=app.config.get("AUTH_MAX_ATTEMPTS", 5),
diff --git a/app/compression.py b/app/compression.py
new file mode 100644
index 0000000..a0bed7c
--- /dev/null
+++ b/app/compression.py
@@ -0,0 +1,94 @@
+from __future__ import annotations
+
+import gzip
+import io
+from typing import Callable, Iterable, List, Tuple
+
+COMPRESSIBLE_MIMES = frozenset([
+ 'application/json',
+ 'application/javascript',
+ 'application/xml',
+ 'text/html',
+ 'text/css',
+ 'text/plain',
+ 'text/xml',
+ 'text/javascript',
+ 'application/x-ndjson',
+])
+
+MIN_SIZE_FOR_COMPRESSION = 500
+
+
+class GzipMiddleware:
+ def __init__(self, app: Callable, compression_level: int = 6, min_size: int = MIN_SIZE_FOR_COMPRESSION):
+ self.app = app
+ self.compression_level = compression_level
+ self.min_size = min_size
+
+ def __call__(self, environ: dict, start_response: Callable) -> Iterable[bytes]:
+ accept_encoding = environ.get('HTTP_ACCEPT_ENCODING', '')
+ if 'gzip' not in accept_encoding.lower():
+ return self.app(environ, start_response)
+
+ response_started = False
+ status_code = None
+ response_headers: List[Tuple[str, str]] = []
+ content_type = None
+ content_length = None
+ should_compress = False
+ exc_info_holder = [None]
+
+ def custom_start_response(status: str, headers: List[Tuple[str, str]], exc_info=None):
+ nonlocal response_started, status_code, response_headers, content_type, content_length, should_compress
+ response_started = True
+ status_code = int(status.split(' ', 1)[0])
+ response_headers = list(headers)
+ exc_info_holder[0] = exc_info
+
+ for name, value in headers:
+ name_lower = name.lower()
+ if name_lower == 'content-type':
+ content_type = value.split(';')[0].strip().lower()
+ elif name_lower == 'content-length':
+ content_length = int(value)
+ elif name_lower == 'content-encoding':
+ should_compress = False
+ return start_response(status, headers, exc_info)
+
+ if content_type and content_type in COMPRESSIBLE_MIMES:
+ if content_length is None or content_length >= self.min_size:
+ should_compress = True
+
+ return None
+
+ response_body = b''.join(self.app(environ, custom_start_response))
+
+ if not response_started:
+ return [response_body]
+
+ if should_compress and len(response_body) >= self.min_size:
+ buf = io.BytesIO()
+ with gzip.GzipFile(fileobj=buf, mode='wb', compresslevel=self.compression_level) as gz:
+ gz.write(response_body)
+ compressed = buf.getvalue()
+
+ if len(compressed) < len(response_body):
+ response_body = compressed
+ new_headers = []
+ for name, value in response_headers:
+ if name.lower() not in ('content-length', 'content-encoding'):
+ new_headers.append((name, value))
+ new_headers.append(('Content-Encoding', 'gzip'))
+ new_headers.append(('Content-Length', str(len(response_body))))
+ new_headers.append(('Vary', 'Accept-Encoding'))
+ response_headers = new_headers
+
+ status_str = f"{status_code} " + {
+ 200: "OK", 201: "Created", 204: "No Content", 206: "Partial Content",
+ 301: "Moved Permanently", 302: "Found", 304: "Not Modified",
+ 400: "Bad Request", 401: "Unauthorized", 403: "Forbidden", 404: "Not Found",
+ 405: "Method Not Allowed", 409: "Conflict", 500: "Internal Server Error",
+ }.get(status_code, "Unknown")
+
+ start_response(status_str, response_headers, exc_info_holder[0])
+ return [response_body]
diff --git a/app/config.py b/app/config.py
index 02f72db..47e172f 100644
--- a/app/config.py
+++ b/app/config.py
@@ -67,6 +67,7 @@ class AppConfig:
stream_chunk_size: int
multipart_min_part_size: int
bucket_stats_cache_ttl: int
+ object_cache_ttl: int
encryption_enabled: bool
encryption_master_key_path: Path
kms_enabled: bool
@@ -161,8 +162,9 @@ class AppConfig:
cors_allow_headers = _csv(str(_get("CORS_ALLOW_HEADERS", "*")), ["*"])
cors_expose_headers = _csv(str(_get("CORS_EXPOSE_HEADERS", "*")), ["*"])
session_lifetime_days = int(_get("SESSION_LIFETIME_DAYS", 30))
- bucket_stats_cache_ttl = int(_get("BUCKET_STATS_CACHE_TTL", 60))
-
+ bucket_stats_cache_ttl = int(_get("BUCKET_STATS_CACHE_TTL", 60))
+ object_cache_ttl = int(_get("OBJECT_CACHE_TTL", 5))
+
encryption_enabled = str(_get("ENCRYPTION_ENABLED", "0")).lower() in {"1", "true", "yes", "on"}
encryption_keys_dir = storage_root / ".myfsio.sys" / "keys"
encryption_master_key_path = Path(_get("ENCRYPTION_MASTER_KEY_PATH", encryption_keys_dir / "master.key")).resolve()
@@ -200,6 +202,7 @@ class AppConfig:
stream_chunk_size=stream_chunk_size,
multipart_min_part_size=multipart_min_part_size,
bucket_stats_cache_ttl=bucket_stats_cache_ttl,
+ object_cache_ttl=object_cache_ttl,
encryption_enabled=encryption_enabled,
encryption_master_key_path=encryption_master_key_path,
kms_enabled=kms_enabled,
@@ -315,6 +318,7 @@ class AppConfig:
"STREAM_CHUNK_SIZE": self.stream_chunk_size,
"MULTIPART_MIN_PART_SIZE": self.multipart_min_part_size,
"BUCKET_STATS_CACHE_TTL": self.bucket_stats_cache_ttl,
+ "OBJECT_CACHE_TTL": self.object_cache_ttl,
"LOG_LEVEL": self.log_level,
"LOG_TO_FILE": self.log_to_file,
"LOG_FILE": str(self.log_path),
diff --git a/app/s3_api.py b/app/s3_api.py
index f12d5f6..f825aaf 100644
--- a/app/s3_api.py
+++ b/app/s3_api.py
@@ -921,6 +921,7 @@ def _maybe_handle_bucket_subresource(bucket_name: str) -> Response | None:
"object-lock": _bucket_object_lock_handler,
"notification": _bucket_notification_handler,
"logging": _bucket_logging_handler,
+ "uploads": _bucket_uploads_handler,
}
requested = [key for key in handlers if key in request.args]
if not requested:
@@ -1813,6 +1814,72 @@ def _bucket_logging_handler(bucket_name: str) -> Response:
return Response(status=200)
+def _bucket_uploads_handler(bucket_name: str) -> Response:
+ if request.method != "GET":
+ return _method_not_allowed(["GET"])
+
+ principal, error = _require_principal()
+ if error:
+ return error
+ try:
+ _authorize_action(principal, bucket_name, "list")
+ except IamError as exc:
+ return _error_response("AccessDenied", str(exc), 403)
+
+ storage = _storage()
+ if not storage.bucket_exists(bucket_name):
+ return _error_response("NoSuchBucket", "Bucket does not exist", 404)
+
+ key_marker = request.args.get("key-marker", "")
+ upload_id_marker = request.args.get("upload-id-marker", "")
+ prefix = request.args.get("prefix", "")
+ delimiter = request.args.get("delimiter", "")
+ try:
+ max_uploads = max(1, min(int(request.args.get("max-uploads", 1000)), 1000))
+ except ValueError:
+ return _error_response("InvalidArgument", "max-uploads must be an integer", 400)
+
+ uploads = storage.list_multipart_uploads(bucket_name, include_orphaned=True)
+
+ if prefix:
+ uploads = [u for u in uploads if u["object_key"].startswith(prefix)]
+ if key_marker:
+ uploads = [u for u in uploads if u["object_key"] > key_marker or
+ (u["object_key"] == key_marker and upload_id_marker and u["upload_id"] > upload_id_marker)]
+
+ uploads.sort(key=lambda u: (u["object_key"], u["upload_id"]))
+
+ is_truncated = len(uploads) > max_uploads
+ if is_truncated:
+ uploads = uploads[:max_uploads]
+
+ root = Element("ListMultipartUploadsResult", xmlns="http://s3.amazonaws.com/doc/2006-03-01/")
+ SubElement(root, "Bucket").text = bucket_name
+ SubElement(root, "KeyMarker").text = key_marker
+ SubElement(root, "UploadIdMarker").text = upload_id_marker
+ if prefix:
+ SubElement(root, "Prefix").text = prefix
+ if delimiter:
+ SubElement(root, "Delimiter").text = delimiter
+ SubElement(root, "MaxUploads").text = str(max_uploads)
+ SubElement(root, "IsTruncated").text = "true" if is_truncated else "false"
+
+ if is_truncated and uploads:
+ SubElement(root, "NextKeyMarker").text = uploads[-1]["object_key"]
+ SubElement(root, "NextUploadIdMarker").text = uploads[-1]["upload_id"]
+
+ for upload in uploads:
+ upload_el = SubElement(root, "Upload")
+ SubElement(upload_el, "Key").text = upload["object_key"]
+ SubElement(upload_el, "UploadId").text = upload["upload_id"]
+ if upload.get("created_at"):
+ SubElement(upload_el, "Initiated").text = upload["created_at"]
+ if upload.get("orphaned"):
+ SubElement(upload_el, "StorageClass").text = "ORPHANED"
+
+ return _xml_response(root)
+
+
def _object_retention_handler(bucket_name: str, object_key: str) -> Response:
if request.method not in {"GET", "PUT"}:
return _method_not_allowed(["GET", "PUT"])
diff --git a/app/storage.py b/app/storage.py
index 32403ec..d37279b 100644
--- a/app/storage.py
+++ b/app/storage.py
@@ -137,10 +137,10 @@ class ObjectStorage:
BUCKET_VERSIONS_DIR = "versions"
MULTIPART_MANIFEST = "manifest.json"
BUCKET_CONFIG_FILE = ".bucket.json"
- KEY_INDEX_CACHE_TTL = 30
+ DEFAULT_CACHE_TTL = 5
OBJECT_CACHE_MAX_SIZE = 100
- def __init__(self, root: Path) -> None:
+ def __init__(self, root: Path, cache_ttl: int = DEFAULT_CACHE_TTL) -> None:
self.root = Path(root)
self.root.mkdir(parents=True, exist_ok=True)
self._ensure_system_roots()
@@ -150,6 +150,7 @@ class ObjectStorage:
self._cache_version: Dict[str, int] = {}
self._bucket_config_cache: Dict[str, tuple[dict[str, Any], float]] = {}
self._bucket_config_cache_ttl = 30.0
+ self._cache_ttl = cache_ttl
def _get_bucket_lock(self, bucket_id: str) -> threading.Lock:
"""Get or create a lock for a specific bucket. Reduces global lock contention."""
@@ -1147,47 +1148,57 @@ class ObjectStorage:
parts.sort(key=lambda x: x["PartNumber"])
return parts
- def list_multipart_uploads(self, bucket_name: str) -> List[Dict[str, Any]]:
- """List all active multipart uploads for a bucket."""
+ def list_multipart_uploads(self, bucket_name: str, include_orphaned: bool = False) -> List[Dict[str, Any]]:
+ """List all active multipart uploads for a bucket.
+
+ Args:
+ bucket_name: The bucket to list uploads for.
+ include_orphaned: If True, also include upload directories that have
+ files but no valid manifest.json (orphaned/interrupted uploads).
+ """
bucket_path = self._bucket_path(bucket_name)
if not bucket_path.exists():
raise BucketNotFoundError("Bucket does not exist")
bucket_id = bucket_path.name
uploads = []
- multipart_root = self._multipart_bucket_root(bucket_id)
- if multipart_root.exists():
+
+ for multipart_root in (
+ self._multipart_bucket_root(bucket_id),
+ self._legacy_multipart_bucket_root(bucket_id),
+ ):
+ if not multipart_root.exists():
+ continue
for upload_dir in multipart_root.iterdir():
if not upload_dir.is_dir():
continue
manifest_path = upload_dir / "manifest.json"
- if not manifest_path.exists():
- continue
- try:
- manifest = json.loads(manifest_path.read_text(encoding="utf-8"))
- uploads.append({
- "upload_id": manifest.get("upload_id", upload_dir.name),
- "object_key": manifest.get("object_key", ""),
- "created_at": manifest.get("created_at", ""),
- })
- except (OSError, json.JSONDecodeError):
- continue
- legacy_root = self._legacy_multipart_bucket_root(bucket_id)
- if legacy_root.exists():
- for upload_dir in legacy_root.iterdir():
- if not upload_dir.is_dir():
- continue
- manifest_path = upload_dir / "manifest.json"
- if not manifest_path.exists():
- continue
- try:
- manifest = json.loads(manifest_path.read_text(encoding="utf-8"))
- uploads.append({
- "upload_id": manifest.get("upload_id", upload_dir.name),
- "object_key": manifest.get("object_key", ""),
- "created_at": manifest.get("created_at", ""),
- })
- except (OSError, json.JSONDecodeError):
- continue
+ if manifest_path.exists():
+ try:
+ manifest = json.loads(manifest_path.read_text(encoding="utf-8"))
+ uploads.append({
+ "upload_id": manifest.get("upload_id", upload_dir.name),
+ "object_key": manifest.get("object_key", ""),
+ "created_at": manifest.get("created_at", ""),
+ })
+ except (OSError, json.JSONDecodeError):
+ if include_orphaned:
+ has_files = any(upload_dir.rglob("*"))
+ if has_files:
+ uploads.append({
+ "upload_id": upload_dir.name,
+ "object_key": "(unknown)",
+ "created_at": "",
+ "orphaned": True,
+ })
+ elif include_orphaned:
+ has_files = any(f.is_file() for f in upload_dir.rglob("*"))
+ if has_files:
+ uploads.append({
+ "upload_id": upload_dir.name,
+ "object_key": "(unknown)",
+ "created_at": "",
+ "orphaned": True,
+ })
return uploads
def _bucket_path(self, bucket_name: str) -> Path:
@@ -1398,7 +1409,7 @@ class ObjectStorage:
cached = self._object_cache.get(bucket_id)
if cached:
objects, timestamp = cached
- if now - timestamp < self.KEY_INDEX_CACHE_TTL:
+ if now - timestamp < self._cache_ttl:
self._object_cache.move_to_end(bucket_id)
return objects
cache_version = self._cache_version.get(bucket_id, 0)
@@ -1409,7 +1420,7 @@ class ObjectStorage:
cached = self._object_cache.get(bucket_id)
if cached:
objects, timestamp = cached
- if now - timestamp < self.KEY_INDEX_CACHE_TTL:
+ if now - timestamp < self._cache_ttl:
self._object_cache.move_to_end(bucket_id)
return objects
objects = self._build_object_cache(bucket_path)
@@ -1455,6 +1466,36 @@ class ObjectStorage:
else:
objects[key] = meta
+ def warm_cache(self, bucket_names: Optional[List[str]] = None) -> None:
+ """Pre-warm the object cache for specified buckets or all buckets.
+
+ This is called on startup to ensure the first request is fast.
+ """
+ if bucket_names is None:
+ bucket_names = [b.name for b in self.list_buckets()]
+
+ for bucket_name in bucket_names:
+ try:
+ bucket_path = self._bucket_path(bucket_name)
+ if bucket_path.exists():
+ self._get_object_cache(bucket_path.name, bucket_path)
+ except Exception:
+ pass
+
+ def warm_cache_async(self, bucket_names: Optional[List[str]] = None) -> threading.Thread:
+ """Start cache warming in a background thread.
+
+ Returns the thread object so caller can optionally wait for it.
+ """
+ thread = threading.Thread(
+ target=self.warm_cache,
+ args=(bucket_names,),
+ daemon=True,
+ name="cache-warmer",
+ )
+ thread.start()
+ return thread
+
def _ensure_system_roots(self) -> None:
for path in (
self._system_root_path(),
diff --git a/app/ui.py b/app/ui.py
index 0dd8b90..204ba48 100644
--- a/app/ui.py
+++ b/app/ui.py
@@ -102,6 +102,12 @@ def _friendly_error_message(exc: Exception) -> str:
return message
+def _wants_json() -> bool:
+ return request.accept_mimetypes.best_match(
+ ["application/json", "text/html"]
+ ) == "application/json"
+
+
def _policy_allows_public_read(policy: dict[str, Any]) -> bool:
statements = policy.get("Statement", [])
if isinstance(statements, dict):
@@ -285,13 +291,19 @@ def create_bucket():
principal = _current_principal()
bucket_name = request.form.get("bucket_name", "").strip()
if not bucket_name:
+ if _wants_json():
+ return jsonify({"error": "Bucket name is required"}), 400
flash("Bucket name is required", "danger")
return redirect(url_for("ui.buckets_overview"))
try:
_authorize_ui(principal, bucket_name, "write")
_storage().create_bucket(bucket_name)
+ if _wants_json():
+ return jsonify({"success": True, "message": f"Bucket '{bucket_name}' created", "bucket_name": bucket_name})
flash(f"Bucket '{bucket_name}' created", "success")
except (StorageError, FileExistsError, IamError) as exc:
+ if _wants_json():
+ return jsonify({"error": _friendly_error_message(exc)}), 400
flash(_friendly_error_message(exc), "danger")
return redirect(url_for("ui.buckets_overview"))
@@ -374,6 +386,7 @@ def bucket_detail(bucket_name: str):
kms_keys = kms_manager.list_keys() if kms_manager else []
kms_enabled = current_app.config.get("KMS_ENABLED", False)
encryption_enabled = current_app.config.get("ENCRYPTION_ENABLED", False)
+ lifecycle_enabled = current_app.config.get("LIFECYCLE_ENABLED", False)
can_manage_encryption = can_manage_versioning
bucket_quota = storage.get_bucket_quota(bucket_name)
@@ -386,6 +399,7 @@ def bucket_detail(bucket_name: str):
pass
objects_api_url = url_for("ui.list_bucket_objects", bucket_name=bucket_name)
+ objects_stream_url = url_for("ui.stream_bucket_objects", bucket_name=bucket_name)
lifecycle_url = url_for("ui.bucket_lifecycle", bucket_name=bucket_name)
cors_url = url_for("ui.bucket_cors", bucket_name=bucket_name)
@@ -397,6 +411,7 @@ def bucket_detail(bucket_name: str):
"bucket_detail.html",
bucket_name=bucket_name,
objects_api_url=objects_api_url,
+ objects_stream_url=objects_stream_url,
lifecycle_url=lifecycle_url,
cors_url=cors_url,
acl_url=acl_url,
@@ -418,6 +433,7 @@ def bucket_detail(bucket_name: str):
kms_keys=kms_keys,
kms_enabled=kms_enabled,
encryption_enabled=encryption_enabled,
+ lifecycle_enabled=lifecycle_enabled,
bucket_quota=bucket_quota,
bucket_stats=bucket_stats,
can_manage_quota=can_manage_quota,
@@ -492,6 +508,100 @@ def list_bucket_objects(bucket_name: str):
})
+@ui_bp.get("/buckets//objects/stream")
+def stream_bucket_objects(bucket_name: str):
+ """Streaming NDJSON endpoint for progressive object listing.
+
+ Streams objects as newline-delimited JSON for fast progressive rendering.
+ First line is metadata, subsequent lines are objects.
+ """
+ principal = _current_principal()
+ storage = _storage()
+ try:
+ _authorize_ui(principal, bucket_name, "list")
+ except IamError as exc:
+ return jsonify({"error": str(exc)}), 403
+
+ prefix = request.args.get("prefix") or None
+
+ try:
+ versioning_enabled = storage.is_versioning_enabled(bucket_name)
+ except StorageError:
+ versioning_enabled = False
+
+ preview_template = url_for("ui.object_preview", bucket_name=bucket_name, object_key="KEY_PLACEHOLDER")
+ delete_template = url_for("ui.delete_object", bucket_name=bucket_name, object_key="KEY_PLACEHOLDER")
+ presign_template = url_for("ui.object_presign", bucket_name=bucket_name, object_key="KEY_PLACEHOLDER")
+ versions_template = url_for("ui.object_versions", bucket_name=bucket_name, object_key="KEY_PLACEHOLDER")
+ restore_template = url_for("ui.restore_object_version", bucket_name=bucket_name, object_key="KEY_PLACEHOLDER", version_id="VERSION_ID_PLACEHOLDER")
+ tags_template = url_for("ui.object_tags", bucket_name=bucket_name, object_key="KEY_PLACEHOLDER")
+ copy_template = url_for("ui.copy_object", bucket_name=bucket_name, object_key="KEY_PLACEHOLDER")
+ move_template = url_for("ui.move_object", bucket_name=bucket_name, object_key="KEY_PLACEHOLDER")
+
+ def generate():
+ meta_line = json.dumps({
+ "type": "meta",
+ "versioning_enabled": versioning_enabled,
+ "url_templates": {
+ "preview": preview_template,
+ "download": preview_template + "?download=1",
+ "presign": presign_template,
+ "delete": delete_template,
+ "versions": versions_template,
+ "restore": restore_template,
+ "tags": tags_template,
+ "copy": copy_template,
+ "move": move_template,
+ },
+ }) + "\n"
+ yield meta_line
+
+ continuation_token = None
+ total_count = None
+ batch_size = 5000
+
+ while True:
+ try:
+ result = storage.list_objects(
+ bucket_name,
+ max_keys=batch_size,
+ continuation_token=continuation_token,
+ prefix=prefix,
+ )
+ except StorageError as exc:
+ yield json.dumps({"type": "error", "error": str(exc)}) + "\n"
+ return
+
+ if total_count is None:
+ total_count = result.total_count
+ yield json.dumps({"type": "count", "total_count": total_count}) + "\n"
+
+ for obj in result.objects:
+ yield json.dumps({
+ "type": "object",
+ "key": obj.key,
+ "size": obj.size,
+ "last_modified": obj.last_modified.isoformat(),
+ "last_modified_display": obj.last_modified.strftime("%b %d, %Y %H:%M"),
+ "etag": obj.etag,
+ }) + "\n"
+
+ if not result.is_truncated:
+ break
+ continuation_token = result.next_continuation_token
+
+ yield json.dumps({"type": "done"}) + "\n"
+
+ return Response(
+ generate(),
+ mimetype='application/x-ndjson',
+ headers={
+ 'Cache-Control': 'no-cache',
+ 'X-Accel-Buffering': 'no',
+ }
+ )
+
+
@ui_bp.post("/buckets//upload")
@limiter.limit("30 per minute")
def upload_object(bucket_name: str):
@@ -647,8 +757,12 @@ def delete_bucket(bucket_name: str):
_storage().delete_bucket(bucket_name)
_bucket_policies().delete_policy(bucket_name)
_replication_manager().delete_rule(bucket_name)
+ if _wants_json():
+ return jsonify({"success": True, "message": f"Bucket '{bucket_name}' removed"})
flash(f"Bucket '{bucket_name}' removed", "success")
except (StorageError, IamError) as exc:
+ if _wants_json():
+ return jsonify({"error": _friendly_error_message(exc)}), 400
flash(_friendly_error_message(exc), "danger")
return redirect(url_for("ui.buckets_overview"))
@@ -662,12 +776,17 @@ def delete_object(bucket_name: str, object_key: str):
_authorize_ui(principal, bucket_name, "delete", object_key=object_key)
if purge_versions:
_storage().purge_object(bucket_name, object_key)
- flash(f"Permanently deleted '{object_key}' and all versions", "success")
+ message = f"Permanently deleted '{object_key}' and all versions"
else:
_storage().delete_object(bucket_name, object_key)
_replication_manager().trigger_replication(bucket_name, object_key, action="delete")
- flash(f"Deleted '{object_key}'", "success")
+ message = f"Deleted '{object_key}'"
+ if _wants_json():
+ return jsonify({"success": True, "message": message})
+ flash(message, "success")
except (IamError, StorageError) as exc:
+ if _wants_json():
+ return jsonify({"error": _friendly_error_message(exc)}), 400
flash(_friendly_error_message(exc), "danger")
return redirect(url_for("ui.bucket_detail", bucket_name=bucket_name))
@@ -977,22 +1096,32 @@ def update_bucket_policy(bucket_name: str):
try:
_authorize_ui(principal, bucket_name, "policy")
except IamError as exc:
+ if _wants_json():
+ return jsonify({"error": str(exc)}), 403
flash(str(exc), "danger")
return redirect(url_for("ui.bucket_detail", bucket_name=bucket_name))
store = _bucket_policies()
if action == "delete":
store.delete_policy(bucket_name)
+ if _wants_json():
+ return jsonify({"success": True, "message": "Bucket policy removed"})
flash("Bucket policy removed", "info")
return redirect(url_for("ui.bucket_detail", bucket_name=bucket_name, tab="permissions"))
document = request.form.get("policy_document", "").strip()
if not document:
+ if _wants_json():
+ return jsonify({"error": "Provide a JSON policy document"}), 400
flash("Provide a JSON policy document", "danger")
return redirect(url_for("ui.bucket_detail", bucket_name=bucket_name, tab="permissions"))
try:
payload = json.loads(document)
store.set_policy(bucket_name, payload)
+ if _wants_json():
+ return jsonify({"success": True, "message": "Bucket policy saved"})
flash("Bucket policy saved", "success")
except (json.JSONDecodeError, ValueError) as exc:
+ if _wants_json():
+ return jsonify({"error": f"Policy error: {exc}"}), 400
flash(f"Policy error: {exc}", "danger")
return redirect(url_for("ui.bucket_detail", bucket_name=bucket_name, tab="permissions"))
@@ -1003,6 +1132,8 @@ def update_bucket_versioning(bucket_name: str):
try:
_authorize_ui(principal, bucket_name, "write")
except IamError as exc:
+ if _wants_json():
+ return jsonify({"error": _friendly_error_message(exc)}), 403
flash(_friendly_error_message(exc), "danger")
return redirect(url_for("ui.bucket_detail", bucket_name=bucket_name, tab="properties"))
state = request.form.get("state", "enable")
@@ -1010,9 +1141,14 @@ def update_bucket_versioning(bucket_name: str):
try:
_storage().set_bucket_versioning(bucket_name, enable)
except StorageError as exc:
+ if _wants_json():
+ return jsonify({"error": _friendly_error_message(exc)}), 400
flash(_friendly_error_message(exc), "danger")
return redirect(url_for("ui.bucket_detail", bucket_name=bucket_name, tab="properties"))
- flash("Versioning enabled" if enable else "Versioning suspended", "success")
+ message = "Versioning enabled" if enable else "Versioning suspended"
+ if _wants_json():
+ return jsonify({"success": True, "message": message, "enabled": enable})
+ flash(message, "success")
return redirect(url_for("ui.bucket_detail", bucket_name=bucket_name, tab="properties"))
@@ -1020,62 +1156,83 @@ def update_bucket_versioning(bucket_name: str):
def update_bucket_quota(bucket_name: str):
"""Update bucket quota configuration (admin only)."""
principal = _current_principal()
-
+
is_admin = False
try:
_iam().authorize(principal, None, "iam:list_users")
is_admin = True
except IamError:
pass
-
+
if not is_admin:
+ if _wants_json():
+ return jsonify({"error": "Only administrators can manage bucket quotas"}), 403
flash("Only administrators can manage bucket quotas", "danger")
return redirect(url_for("ui.bucket_detail", bucket_name=bucket_name, tab="properties"))
-
+
action = request.form.get("action", "set")
-
+
if action == "remove":
try:
_storage().set_bucket_quota(bucket_name, max_bytes=None, max_objects=None)
+ if _wants_json():
+ return jsonify({"success": True, "message": "Bucket quota removed"})
flash("Bucket quota removed", "info")
except StorageError as exc:
+ if _wants_json():
+ return jsonify({"error": _friendly_error_message(exc)}), 400
flash(_friendly_error_message(exc), "danger")
return redirect(url_for("ui.bucket_detail", bucket_name=bucket_name, tab="properties"))
-
+
max_mb_str = request.form.get("max_mb", "").strip()
max_objects_str = request.form.get("max_objects", "").strip()
-
+
max_bytes = None
max_objects = None
-
+
if max_mb_str:
try:
max_mb = int(max_mb_str)
if max_mb < 1:
raise ValueError("Size must be at least 1 MB")
- max_bytes = max_mb * 1024 * 1024
+ max_bytes = max_mb * 1024 * 1024
except ValueError as exc:
+ if _wants_json():
+ return jsonify({"error": f"Invalid size value: {exc}"}), 400
flash(f"Invalid size value: {exc}", "danger")
return redirect(url_for("ui.bucket_detail", bucket_name=bucket_name, tab="properties"))
-
+
if max_objects_str:
try:
max_objects = int(max_objects_str)
if max_objects < 0:
raise ValueError("Object count must be non-negative")
except ValueError as exc:
+ if _wants_json():
+ return jsonify({"error": f"Invalid object count: {exc}"}), 400
flash(f"Invalid object count: {exc}", "danger")
return redirect(url_for("ui.bucket_detail", bucket_name=bucket_name, tab="properties"))
-
+
try:
_storage().set_bucket_quota(bucket_name, max_bytes=max_bytes, max_objects=max_objects)
if max_bytes is None and max_objects is None:
- flash("Bucket quota removed", "info")
+ message = "Bucket quota removed"
else:
- flash("Bucket quota updated", "success")
+ message = "Bucket quota updated"
+ if _wants_json():
+ return jsonify({
+ "success": True,
+ "message": message,
+ "max_bytes": max_bytes,
+ "max_objects": max_objects,
+ "has_quota": max_bytes is not None or max_objects is not None
+ })
+ flash(message, "success" if max_bytes or max_objects else "info")
except StorageError as exc:
+ if _wants_json():
+ return jsonify({"error": _friendly_error_message(exc)}), 400
flash(_friendly_error_message(exc), "danger")
-
+
return redirect(url_for("ui.bucket_detail", bucket_name=bucket_name, tab="properties"))
@@ -1086,26 +1243,34 @@ def update_bucket_encryption(bucket_name: str):
try:
_authorize_ui(principal, bucket_name, "write")
except IamError as exc:
+ if _wants_json():
+ return jsonify({"error": _friendly_error_message(exc)}), 403
flash(_friendly_error_message(exc), "danger")
return redirect(url_for("ui.bucket_detail", bucket_name=bucket_name, tab="properties"))
-
+
action = request.form.get("action", "enable")
-
+
if action == "disable":
try:
_storage().set_bucket_encryption(bucket_name, None)
+ if _wants_json():
+ return jsonify({"success": True, "message": "Default encryption disabled", "enabled": False})
flash("Default encryption disabled", "info")
except StorageError as exc:
+ if _wants_json():
+ return jsonify({"error": _friendly_error_message(exc)}), 400
flash(_friendly_error_message(exc), "danger")
return redirect(url_for("ui.bucket_detail", bucket_name=bucket_name, tab="properties"))
-
+
algorithm = request.form.get("algorithm", "AES256")
kms_key_id = request.form.get("kms_key_id", "").strip() or None
-
+
if algorithm not in ("AES256", "aws:kms"):
+ if _wants_json():
+ return jsonify({"error": "Invalid encryption algorithm"}), 400
flash("Invalid encryption algorithm", "danger")
return redirect(url_for("ui.bucket_detail", bucket_name=bucket_name, tab="properties"))
-
+
encryption_config: dict[str, Any] = {
"Rules": [
{
@@ -1115,19 +1280,24 @@ def update_bucket_encryption(bucket_name: str):
}
]
}
-
+
if algorithm == "aws:kms" and kms_key_id:
encryption_config["Rules"][0]["ApplyServerSideEncryptionByDefault"]["KMSMasterKeyID"] = kms_key_id
-
+
try:
_storage().set_bucket_encryption(bucket_name, encryption_config)
if algorithm == "aws:kms":
- flash("Default KMS encryption enabled", "success")
+ message = "Default KMS encryption enabled"
else:
- flash("Default AES-256 encryption enabled", "success")
+ message = "Default AES-256 encryption enabled"
+ if _wants_json():
+ return jsonify({"success": True, "message": message, "enabled": True, "algorithm": algorithm})
+ flash(message, "success")
except StorageError as exc:
+ if _wants_json():
+ return jsonify({"error": _friendly_error_message(exc)}), 400
flash(_friendly_error_message(exc), "danger")
-
+
return redirect(url_for("ui.bucket_detail", bucket_name=bucket_name, tab="properties"))
@@ -1176,10 +1346,14 @@ def create_iam_user():
try:
_iam().authorize(principal, None, "iam:create_user")
except IamError as exc:
+ if _wants_json():
+ return jsonify({"error": str(exc)}), 403
flash(str(exc), "danger")
return redirect(url_for("ui.iam_dashboard"))
display_name = request.form.get("display_name", "").strip() or "Unnamed"
if len(display_name) > 64:
+ if _wants_json():
+ return jsonify({"error": "Display name must be 64 characters or fewer"}), 400
flash("Display name must be 64 characters or fewer", "danger")
return redirect(url_for("ui.iam_dashboard"))
policies_text = request.form.get("policies", "").strip()
@@ -1188,11 +1362,15 @@ def create_iam_user():
try:
policies = json.loads(policies_text)
except json.JSONDecodeError as exc:
+ if _wants_json():
+ return jsonify({"error": f"Invalid JSON: {exc}"}), 400
flash(f"Invalid JSON: {exc}", "danger")
return redirect(url_for("ui.iam_dashboard"))
try:
created = _iam().create_user(display_name=display_name, policies=policies)
except IamError as exc:
+ if _wants_json():
+ return jsonify({"error": str(exc)}), 400
flash(str(exc), "danger")
return redirect(url_for("ui.iam_dashboard"))
@@ -1203,6 +1381,15 @@ def create_iam_user():
"operation": "create",
}
)
+ if _wants_json():
+ return jsonify({
+ "success": True,
+ "message": f"Created user {created['access_key']}",
+ "access_key": created["access_key"],
+ "secret_key": created["secret_key"],
+ "display_name": display_name,
+ "policies": policies or []
+ })
flash(f"Created user {created['access_key']}. Copy the secret below.", "success")
return redirect(url_for("ui.iam_dashboard", secret_token=token))
@@ -1254,18 +1441,26 @@ def update_iam_user(access_key: str):
try:
_iam().authorize(principal, None, "iam:create_user")
except IamError as exc:
+ if _wants_json():
+ return jsonify({"error": str(exc)}), 403
flash(str(exc), "danger")
return redirect(url_for("ui.iam_dashboard"))
display_name = request.form.get("display_name", "").strip()
if display_name:
if len(display_name) > 64:
+ if _wants_json():
+ return jsonify({"error": "Display name must be 64 characters or fewer"}), 400
flash("Display name must be 64 characters or fewer", "danger")
else:
try:
_iam().update_user(access_key, display_name)
+ if _wants_json():
+ return jsonify({"success": True, "message": f"Updated user {access_key}", "display_name": display_name})
flash(f"Updated user {access_key}", "success")
except IamError as exc:
+ if _wants_json():
+ return jsonify({"error": str(exc)}), 400
flash(str(exc), "danger")
return redirect(url_for("ui.iam_dashboard"))
@@ -1277,6 +1472,8 @@ def delete_iam_user(access_key: str):
try:
_iam().authorize(principal, None, "iam:delete_user")
except IamError as exc:
+ if _wants_json():
+ return jsonify({"error": str(exc)}), 403
flash(str(exc), "danger")
return redirect(url_for("ui.iam_dashboard"))
@@ -1284,16 +1481,24 @@ def delete_iam_user(access_key: str):
try:
_iam().delete_user(access_key)
session.pop("credentials", None)
+ if _wants_json():
+ return jsonify({"success": True, "message": "Your account has been deleted", "redirect": url_for("ui.login")})
flash("Your account has been deleted.", "info")
return redirect(url_for("ui.login"))
except IamError as exc:
+ if _wants_json():
+ return jsonify({"error": str(exc)}), 400
flash(str(exc), "danger")
return redirect(url_for("ui.iam_dashboard"))
try:
_iam().delete_user(access_key)
+ if _wants_json():
+ return jsonify({"success": True, "message": f"Deleted user {access_key}"})
flash(f"Deleted user {access_key}", "success")
except IamError as exc:
+ if _wants_json():
+ return jsonify({"error": str(exc)}), 400
flash(str(exc), "danger")
return redirect(url_for("ui.iam_dashboard"))
@@ -1304,6 +1509,8 @@ def update_iam_policies(access_key: str):
try:
_iam().authorize(principal, None, "iam:update_policy")
except IamError as exc:
+ if _wants_json():
+ return jsonify({"error": str(exc)}), 403
flash(str(exc), "danger")
return redirect(url_for("ui.iam_dashboard"))
@@ -1316,13 +1523,19 @@ def update_iam_policies(access_key: str):
if not isinstance(policies, list):
raise ValueError("Policies must be a list")
except (ValueError, json.JSONDecodeError):
+ if _wants_json():
+ return jsonify({"error": "Invalid JSON format for policies"}), 400
flash("Invalid JSON format for policies", "danger")
return redirect(url_for("ui.iam_dashboard"))
try:
_iam().update_user_policies(access_key, policies)
+ if _wants_json():
+ return jsonify({"success": True, "message": f"Updated policies for {access_key}", "policies": policies})
flash(f"Updated policies for {access_key}", "success")
except IamError as exc:
+ if _wants_json():
+ return jsonify({"error": str(exc)}), 400
flash(str(exc), "danger")
return redirect(url_for("ui.iam_dashboard"))
@@ -1334,19 +1547,23 @@ def create_connection():
try:
_iam().authorize(principal, None, "iam:list_users")
except IamError:
+ if _wants_json():
+ return jsonify({"error": "Access denied"}), 403
flash("Access denied", "danger")
return redirect(url_for("ui.buckets_overview"))
-
+
name = request.form.get("name", "").strip()
endpoint = request.form.get("endpoint_url", "").strip()
access_key = request.form.get("access_key", "").strip()
secret_key = request.form.get("secret_key", "").strip()
region = request.form.get("region", "us-east-1").strip()
-
+
if not all([name, endpoint, access_key, secret_key]):
+ if _wants_json():
+ return jsonify({"error": "All fields are required"}), 400
flash("All fields are required", "danger")
return redirect(url_for("ui.connections_dashboard"))
-
+
conn = RemoteConnection(
id=str(uuid.uuid4()),
name=name,
@@ -1356,6 +1573,8 @@ def create_connection():
region=region
)
_connections().add(conn)
+ if _wants_json():
+ return jsonify({"success": True, "message": f"Connection '{name}' created", "connection_id": conn.id})
flash(f"Connection '{name}' created", "success")
return redirect(url_for("ui.connections_dashboard"))
@@ -1415,11 +1634,15 @@ def update_connection(connection_id: str):
try:
_iam().authorize(principal, None, "iam:list_users")
except IamError:
+ if _wants_json():
+ return jsonify({"error": "Access denied"}), 403
flash("Access denied", "danger")
return redirect(url_for("ui.buckets_overview"))
conn = _connections().get(connection_id)
if not conn:
+ if _wants_json():
+ return jsonify({"error": "Connection not found"}), 404
flash("Connection not found", "danger")
return redirect(url_for("ui.connections_dashboard"))
@@ -1430,6 +1653,8 @@ def update_connection(connection_id: str):
region = request.form.get("region", "us-east-1").strip()
if not all([name, endpoint, access_key, secret_key]):
+ if _wants_json():
+ return jsonify({"error": "All fields are required"}), 400
flash("All fields are required", "danger")
return redirect(url_for("ui.connections_dashboard"))
@@ -1438,8 +1663,20 @@ def update_connection(connection_id: str):
conn.access_key = access_key
conn.secret_key = secret_key
conn.region = region
-
+
_connections().save()
+ if _wants_json():
+ return jsonify({
+ "success": True,
+ "message": f"Connection '{name}' updated",
+ "connection": {
+ "id": connection_id,
+ "name": name,
+ "endpoint_url": endpoint,
+ "access_key": access_key,
+ "region": region
+ }
+ })
flash(f"Connection '{name}' updated", "success")
return redirect(url_for("ui.connections_dashboard"))
@@ -1450,10 +1687,14 @@ def delete_connection(connection_id: str):
try:
_iam().authorize(principal, None, "iam:list_users")
except IamError:
+ if _wants_json():
+ return jsonify({"error": "Access denied"}), 403
flash("Access denied", "danger")
return redirect(url_for("ui.buckets_overview"))
-
+
_connections().delete(connection_id)
+ if _wants_json():
+ return jsonify({"success": True, "message": "Connection deleted"})
flash("Connection deleted", "success")
return redirect(url_for("ui.connections_dashboard"))
@@ -1464,31 +1705,41 @@ def update_bucket_replication(bucket_name: str):
try:
_authorize_ui(principal, bucket_name, "replication")
except IamError as exc:
+ if _wants_json():
+ return jsonify({"error": str(exc)}), 403
flash(str(exc), "danger")
return redirect(url_for("ui.bucket_detail", bucket_name=bucket_name, tab="replication"))
-
+
is_admin = False
try:
_iam().authorize(principal, None, "iam:list_users")
is_admin = True
except IamError:
is_admin = False
-
+
action = request.form.get("action")
-
+
if action == "delete":
if not is_admin:
+ if _wants_json():
+ return jsonify({"error": "Only administrators can remove replication configuration"}), 403
flash("Only administrators can remove replication configuration", "danger")
return redirect(url_for("ui.bucket_detail", bucket_name=bucket_name, tab="replication"))
_replication().delete_rule(bucket_name)
+ if _wants_json():
+ return jsonify({"success": True, "message": "Replication configuration removed", "action": "delete"})
flash("Replication configuration removed", "info")
elif action == "pause":
rule = _replication().get_rule(bucket_name)
if rule:
rule.enabled = False
_replication().set_rule(rule)
+ if _wants_json():
+ return jsonify({"success": True, "message": "Replication paused", "action": "pause", "enabled": False})
flash("Replication paused", "info")
else:
+ if _wants_json():
+ return jsonify({"error": "No replication configuration to pause"}), 404
flash("No replication configuration to pause", "warning")
elif action == "resume":
from .replication import REPLICATION_MODE_ALL
@@ -1498,24 +1749,33 @@ def update_bucket_replication(bucket_name: str):
_replication().set_rule(rule)
if rule.mode == REPLICATION_MODE_ALL:
_replication().replicate_existing_objects(bucket_name)
- flash("Replication resumed. Syncing pending objects in background.", "success")
+ message = "Replication resumed. Syncing pending objects in background."
else:
- flash("Replication resumed", "success")
+ message = "Replication resumed"
+ if _wants_json():
+ return jsonify({"success": True, "message": message, "action": "resume", "enabled": True})
+ flash(message, "success")
else:
+ if _wants_json():
+ return jsonify({"error": "No replication configuration to resume"}), 404
flash("No replication configuration to resume", "warning")
elif action == "create":
if not is_admin:
+ if _wants_json():
+ return jsonify({"error": "Only administrators can configure replication settings"}), 403
flash("Only administrators can configure replication settings", "danger")
return redirect(url_for("ui.bucket_detail", bucket_name=bucket_name, tab="replication"))
-
+
from .replication import REPLICATION_MODE_NEW_ONLY, REPLICATION_MODE_ALL
import time
-
+
target_conn_id = request.form.get("target_connection_id")
target_bucket = request.form.get("target_bucket", "").strip()
replication_mode = request.form.get("replication_mode", REPLICATION_MODE_NEW_ONLY)
-
+
if not target_conn_id or not target_bucket:
+ if _wants_json():
+ return jsonify({"error": "Target connection and bucket are required"}), 400
flash("Target connection and bucket are required", "danger")
else:
rule = ReplicationRule(
@@ -1527,15 +1787,20 @@ def update_bucket_replication(bucket_name: str):
created_at=time.time(),
)
_replication().set_rule(rule)
-
+
if replication_mode == REPLICATION_MODE_ALL:
_replication().replicate_existing_objects(bucket_name)
- flash("Replication configured. Existing objects are being replicated in the background.", "success")
+ message = "Replication configured. Existing objects are being replicated in the background."
else:
- flash("Replication configured. Only new uploads will be replicated.", "success")
+ message = "Replication configured. Only new uploads will be replicated."
+ if _wants_json():
+ return jsonify({"success": True, "message": message, "action": "create", "enabled": True})
+ flash(message, "success")
else:
+ if _wants_json():
+ return jsonify({"error": "Invalid action"}), 400
flash("Invalid action", "danger")
-
+
return redirect(url_for("ui.bucket_detail", bucket_name=bucket_name, tab="replication"))
@@ -1767,6 +2032,67 @@ def metrics_dashboard():
)
+@ui_bp.route("/metrics/api")
+def metrics_api():
+ principal = _current_principal()
+
+ try:
+ _iam().authorize(principal, None, "iam:list_users")
+ except IamError:
+ return jsonify({"error": "Access denied"}), 403
+
+ import time
+
+ cpu_percent = psutil.cpu_percent(interval=0.1)
+ memory = psutil.virtual_memory()
+
+ storage_root = current_app.config["STORAGE_ROOT"]
+ disk = psutil.disk_usage(storage_root)
+
+ storage = _storage()
+ buckets = storage.list_buckets()
+ total_buckets = len(buckets)
+
+ total_objects = 0
+ total_bytes_used = 0
+ total_versions = 0
+
+ cache_ttl = current_app.config.get("BUCKET_STATS_CACHE_TTL", 60)
+ for bucket in buckets:
+ stats = storage.bucket_stats(bucket.name, cache_ttl=cache_ttl)
+ total_objects += stats.get("total_objects", stats.get("objects", 0))
+ total_bytes_used += stats.get("total_bytes", stats.get("bytes", 0))
+ total_versions += stats.get("version_count", 0)
+
+ boot_time = psutil.boot_time()
+ uptime_seconds = time.time() - boot_time
+ uptime_days = int(uptime_seconds / 86400)
+
+ return jsonify({
+ "cpu_percent": cpu_percent,
+ "memory": {
+ "total": _format_bytes(memory.total),
+ "available": _format_bytes(memory.available),
+ "used": _format_bytes(memory.used),
+ "percent": memory.percent,
+ },
+ "disk": {
+ "total": _format_bytes(disk.total),
+ "free": _format_bytes(disk.free),
+ "used": _format_bytes(disk.used),
+ "percent": disk.percent,
+ },
+ "app": {
+ "buckets": total_buckets,
+ "objects": total_objects,
+ "versions": total_versions,
+ "storage_used": _format_bytes(total_bytes_used),
+ "storage_raw": total_bytes_used,
+ "uptime_days": uptime_days,
+ }
+ })
+
+
@ui_bp.route("/buckets//lifecycle", methods=["GET", "POST", "DELETE"])
def bucket_lifecycle(bucket_name: str):
principal = _current_principal()
diff --git a/app/version.py b/app/version.py
index 6f91c30..3e11219 100644
--- a/app/version.py
+++ b/app/version.py
@@ -1,6 +1,6 @@
from __future__ import annotations
-APP_VERSION = "0.2.0"
+APP_VERSION = "0.2.1"
def get_version() -> str:
diff --git a/docs.md b/docs.md
index 6515b6c..3cbbf98 100644
--- a/docs.md
+++ b/docs.md
@@ -189,6 +189,52 @@ All configuration is done via environment variables. The table below lists every
| `KMS_ENABLED` | `false` | Enable KMS key management for encryption. |
| `KMS_KEYS_PATH` | `data/.myfsio.sys/keys/kms_keys.json` | Path to store KMS key metadata. |
+
+## Lifecycle Rules
+
+Lifecycle rules automate object management by scheduling deletions based on object age.
+
+### Enabling Lifecycle Enforcement
+
+By default, lifecycle enforcement is disabled. Enable it by setting the environment variable:
+
+```bash
+LIFECYCLE_ENABLED=true python run.py
+```
+
+Or in your `myfsio.env` file:
+```
+LIFECYCLE_ENABLED=true
+LIFECYCLE_INTERVAL_SECONDS=3600 # Check interval (default: 1 hour)
+```
+
+### Configuring Rules
+
+Once enabled, configure lifecycle rules via:
+- **Web UI:** Bucket Details → Lifecycle tab → Add Rule
+- **S3 API:** `PUT /?lifecycle` with XML configuration
+
+### Available Actions
+
+| Action | Description |
+|--------|-------------|
+| **Expiration** | Delete current version objects after N days |
+| **NoncurrentVersionExpiration** | Delete old versions N days after becoming noncurrent (requires versioning) |
+| **AbortIncompleteMultipartUpload** | Clean up incomplete multipart uploads after N days |
+
+### Example Configuration (XML)
+
+```xml
+
+
+ DeleteOldLogs
+ Enabled
+ logs/
+ 30
+
+
+```
+
### Performance Tuning
| Variable | Default | Notes |
diff --git a/requirements.txt b/requirements.txt
index e69aa35..8fe9bb3 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,6 +2,7 @@ Flask>=3.1.2
Flask-Limiter>=4.1.1
Flask-Cors>=6.0.2
Flask-WTF>=1.2.2
+python-dotenv>=1.2.1
pytest>=9.0.2
requests>=2.32.5
boto3>=1.42.14
diff --git a/run.py b/run.py
index da78033..3de61c4 100644
--- a/run.py
+++ b/run.py
@@ -6,6 +6,17 @@ import os
import sys
import warnings
from multiprocessing import Process
+from pathlib import Path
+
+from dotenv import load_dotenv
+
+for _env_file in [
+ Path("/opt/myfsio/myfsio.env"),
+ Path.cwd() / ".env",
+ Path.cwd() / "myfsio.env",
+]:
+ if _env_file.exists():
+ load_dotenv(_env_file, override=True)
from app import create_api_app, create_ui_app
from app.config import AppConfig
diff --git a/static/js/bucket-detail-main.js b/static/js/bucket-detail-main.js
new file mode 100644
index 0000000..cbd657b
--- /dev/null
+++ b/static/js/bucket-detail-main.js
@@ -0,0 +1,4169 @@
+(function() {
+ 'use strict';
+
+ const { formatBytes, escapeHtml, fallbackCopy, setupJsonAutoIndent } = window.BucketDetailUtils || {
+ formatBytes: (bytes) => {
+ if (!Number.isFinite(bytes)) return `${bytes} bytes`;
+ const units = ['bytes', 'KB', 'MB', 'GB', 'TB'];
+ let i = 0;
+ let size = bytes;
+ while (size >= 1024 && i < units.length - 1) {
+ size /= 1024;
+ i++;
+ }
+ return `${size.toFixed(i === 0 ? 0 : 1)} ${units[i]}`;
+ },
+ escapeHtml: (value) => {
+ if (value === null || value === undefined) return '';
+ return String(value)
+ .replace(/&/g, '&')
+ .replace(//g, '>')
+ .replace(/"/g, '"')
+ .replace(/'/g, ''');
+ },
+ fallbackCopy: () => false,
+ setupJsonAutoIndent: () => {}
+ };
+
+ setupJsonAutoIndent(document.getElementById('policyDocument'));
+
+ const selectAllCheckbox = document.querySelector('[data-select-all]');
+ const bulkDeleteButton = document.querySelector('[data-bulk-delete-trigger]');
+ const bulkDeleteLabel = bulkDeleteButton?.querySelector('[data-bulk-delete-label]');
+ const bulkDeleteModalEl = document.getElementById('bulkDeleteModal');
+ const bulkDeleteModal = bulkDeleteModalEl ? new bootstrap.Modal(bulkDeleteModalEl) : null;
+ const bulkDeleteList = document.getElementById('bulkDeleteList');
+ const bulkDeleteCount = document.getElementById('bulkDeleteCount');
+ const bulkDeleteStatus = document.getElementById('bulkDeleteStatus');
+ const bulkDeleteConfirm = document.getElementById('bulkDeleteConfirm');
+ const bulkDeletePurge = document.getElementById('bulkDeletePurge');
+ const previewPanel = document.getElementById('preview-panel');
+ const previewEmpty = document.getElementById('preview-empty');
+ const previewKey = document.getElementById('preview-key');
+ const previewSize = document.getElementById('preview-size');
+ const previewModified = document.getElementById('preview-modified');
+ const previewEtag = document.getElementById('preview-etag');
+ const previewMetadata = document.getElementById('preview-metadata');
+ const previewMetadataList = document.getElementById('preview-metadata-list');
+ const previewPlaceholder = document.getElementById('preview-placeholder');
+ const previewImage = document.getElementById('preview-image');
+ const previewVideo = document.getElementById('preview-video');
+ const previewIframe = document.getElementById('preview-iframe');
+ const downloadButton = document.getElementById('downloadButton');
+ const presignButton = document.getElementById('presignButton');
+ const presignModalEl = document.getElementById('presignModal');
+ const presignModal = presignModalEl ? new bootstrap.Modal(presignModalEl) : null;
+ const presignMethod = document.getElementById('presignMethod');
+ const presignTtl = document.getElementById('presignTtl');
+ const presignLink = document.getElementById('presignLink');
+ const copyPresignLink = document.getElementById('copyPresignLink');
+ const copyPresignDefaultLabel = copyPresignLink?.textContent?.trim() || 'Copy';
+ const generatePresignButton = document.getElementById('generatePresignButton');
+ const policyForm = document.getElementById('bucketPolicyForm');
+ const policyTextarea = document.getElementById('policyDocument');
+ const policyPreset = document.getElementById('policyPreset');
+ const policyMode = document.getElementById('policyMode');
+ const uploadForm = document.querySelector('[data-upload-form]');
+ const uploadModalEl = document.getElementById('uploadModal');
+ const uploadModal = uploadModalEl ? bootstrap.Modal.getOrCreateInstance(uploadModalEl) : null;
+ const uploadFileInput = uploadForm?.querySelector('input[name="object"]');
+ const uploadDropZone = uploadForm?.querySelector('[data-dropzone]');
+ const uploadDropZoneLabel = uploadDropZone?.querySelector('[data-dropzone-label]');
+ const messageModalEl = document.getElementById('messageModal');
+ const messageModal = messageModalEl ? new bootstrap.Modal(messageModalEl) : null;
+ const messageModalTitle = document.getElementById('messageModalTitle');
+ const messageModalBody = document.getElementById('messageModalBody');
+ const messageModalAction = document.getElementById('messageModalAction');
+ let messageModalActionHandler = null;
+ let isGeneratingPresign = false;
+ const objectsContainer = document.querySelector('.objects-table-container[data-bucket]');
+ const bulkDeleteEndpoint = objectsContainer?.dataset.bulkDeleteEndpoint || '';
+ const objectsApiUrl = objectsContainer?.dataset.objectsApi || '';
+ const objectsStreamUrl = objectsContainer?.dataset.objectsStream || '';
+ const versionPanel = document.getElementById('version-panel');
+ const versionList = document.getElementById('version-list');
+ const refreshVersionsButton = document.getElementById('refreshVersionsButton');
+ const archivedCard = document.getElementById('archived-objects-card');
+ const archivedBody = archivedCard?.querySelector('[data-archived-body]');
+ const archivedCountBadge = archivedCard?.querySelector('[data-archived-count]');
+ const archivedRefreshButton = archivedCard?.querySelector('[data-archived-refresh]');
+ const archivedEndpoint = archivedCard?.dataset.archivedEndpoint;
+ let versioningEnabled = objectsContainer?.dataset.versioning === 'true';
+ const versionsCache = new Map();
+ let activeRow = null;
+ const selectedRows = new Map();
+ let bulkDeleting = false;
+ if (presignButton) presignButton.disabled = true;
+ if (generatePresignButton) generatePresignButton.disabled = true;
+ if (downloadButton) downloadButton.classList.add('disabled');
+
+ const objectCountBadge = document.getElementById('object-count-badge');
+ const loadMoreContainer = document.getElementById('load-more-container');
+ const loadMoreSpinner = document.getElementById('load-more-spinner');
+ const loadMoreStatus = document.getElementById('load-more-status');
+ const objectsLoadingRow = document.getElementById('objects-loading-row');
+ let nextContinuationToken = null;
+ let totalObjectCount = 0;
+ let loadedObjectCount = 0;
+ let isLoadingObjects = false;
+ let hasMoreObjects = false;
+ let currentFilterTerm = '';
+ let pageSize = 5000;
+ let currentPrefix = '';
+ let allObjects = [];
+ let urlTemplates = null;
+ let streamAbortController = null;
+ let useStreaming = !!objectsStreamUrl;
+ let streamingComplete = false;
+ const STREAM_RENDER_BATCH = 500;
+ let pendingStreamObjects = [];
+ let streamRenderScheduled = false;
+
+ const buildUrlFromTemplate = (template, key) => {
+ if (!template) return '';
+ return template.replace('KEY_PLACEHOLDER', encodeURIComponent(key).replace(/%2F/g, '/'));
+ };
+
+ const ROW_HEIGHT = 53;
+ const BUFFER_ROWS = 10;
+ let visibleItems = [];
+ let renderedRange = { start: 0, end: 0 };
+
+ const createObjectRow = (obj, displayKey = null) => {
+ const tr = document.createElement('tr');
+ tr.dataset.objectRow = '';
+ tr.dataset.key = obj.key;
+ tr.dataset.size = obj.size;
+ tr.dataset.lastModified = obj.lastModified || obj.last_modified;
+ tr.dataset.etag = obj.etag;
+ tr.dataset.previewUrl = obj.previewUrl || obj.preview_url;
+ tr.dataset.downloadUrl = obj.downloadUrl || obj.download_url;
+ tr.dataset.presignEndpoint = obj.presignEndpoint || obj.presign_endpoint;
+ tr.dataset.deleteEndpoint = obj.deleteEndpoint || obj.delete_endpoint;
+ tr.dataset.metadata = typeof obj.metadata === 'string' ? obj.metadata : JSON.stringify(obj.metadata || {});
+ tr.dataset.versionsEndpoint = obj.versionsEndpoint || obj.versions_endpoint;
+ tr.dataset.restoreTemplate = obj.restoreTemplate || obj.restore_template;
+ tr.dataset.tagsUrl = obj.tagsUrl || obj.tags_url;
+ tr.dataset.copyUrl = obj.copyUrl || obj.copy_url;
+ tr.dataset.moveUrl = obj.moveUrl || obj.move_url;
+
+ const keyToShow = displayKey || obj.key;
+ const lastModDisplay = obj.lastModifiedDisplay || obj.last_modified_display || new Date(obj.lastModified || obj.last_modified).toLocaleDateString();
+
+ tr.innerHTML = `
+