Add 'server/' from commit '006c5fc2425f5aa060d2911c175dae6b6b1a19f3'
git-subtree-dir: server git-subtree-mainline:masterb93fe69019
git-subtree-split:006c5fc242
commit
7ebd80d792
@ -0,0 +1,16 @@
|
|||||||
|
/db.sqlite3*
|
||||||
|
Session.vim
|
||||||
|
/local_settings.py
|
||||||
|
.venv
|
||||||
|
/assets
|
||||||
|
/logs
|
||||||
|
/.coverage
|
||||||
|
/tmp
|
||||||
|
/media
|
||||||
|
|
||||||
|
__pycache__
|
||||||
|
.*.swp
|
||||||
|
|
||||||
|
/.*
|
||||||
|
|
||||||
|
/sandbox
|
@ -0,0 +1,2 @@
|
|||||||
|
github: etesync
|
||||||
|
custom: https://www.etesync.com/contribute/#donate
|
@ -0,0 +1,16 @@
|
|||||||
|
/journal
|
||||||
|
/db.sqlite3*
|
||||||
|
Session.vim
|
||||||
|
/.venv
|
||||||
|
/assets
|
||||||
|
/logs
|
||||||
|
/.coverage
|
||||||
|
/tmp
|
||||||
|
/media
|
||||||
|
/.idea
|
||||||
|
|
||||||
|
__pycache__
|
||||||
|
.*.swp
|
||||||
|
|
||||||
|
/etebase_server_settings.py
|
||||||
|
/secret.txt
|
@ -0,0 +1,51 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
## Version 0.8.3
|
||||||
|
- Fix compatibility with latest fastapi
|
||||||
|
|
||||||
|
## Version 0.8.2
|
||||||
|
- Update dependencies again
|
||||||
|
|
||||||
|
## Version 0.8.1
|
||||||
|
* Fix Error `404 Not Found` for Static Files
|
||||||
|
* Fix Django 3.2 warnings
|
||||||
|
* Update dependencies while (keep Django 3.2 LTS)
|
||||||
|
|
||||||
|
## Version 0.8.0
|
||||||
|
* Update django dep.
|
||||||
|
* Fix issue with comparing ports in hostname verification with self-hosted servers.
|
||||||
|
* Fix sendfile settings to be more correct.
|
||||||
|
* Improve easy config (make it clear media_root needs to be set)
|
||||||
|
* Handle stoken being the empty string
|
||||||
|
* Fix mysql/mariadb support
|
||||||
|
* Switch to FastAPI for the server component
|
||||||
|
|
||||||
|
## Version 0.7.0
|
||||||
|
* Chunks: improve the chunk download endpoint to use sendfile extensions
|
||||||
|
* Chunks: support not passing chunk content if exists
|
||||||
|
* Chunks: fix chunk uploading media type to accept everything
|
||||||
|
* Gracefull handle uploading the same revision
|
||||||
|
* Pass generic context to callbacks instead of the whole view
|
||||||
|
* Fix handling of some validation errors
|
||||||
|
|
||||||
|
## Version 0.6.1
|
||||||
|
* Collection: save the UID on the model to use the db for enforcing uniqueness
|
||||||
|
|
||||||
|
## Version 0.6.0
|
||||||
|
* Fix stoken calculation performance - was VERY slow in some rare cases
|
||||||
|
* Fix issues with host verification failing with a custom port - part 2
|
||||||
|
|
||||||
|
## Version 0.5.3
|
||||||
|
* Add missing migration
|
||||||
|
|
||||||
|
## Version 0.5.2
|
||||||
|
* Fix issues with host verification failing with a custom port
|
||||||
|
* Add env variable to change configuration file path.
|
||||||
|
* Change user creation to not ask for a password (and clarify the readme).
|
||||||
|
|
||||||
|
## Version 0.5.1
|
||||||
|
* Enforce collections to always have a collection type set
|
||||||
|
* Collection saving: add another verification for collection UID uniqueness.
|
||||||
|
|
||||||
|
## Version 0.5.0
|
||||||
|
* First Etebase-server release (was EteSync-server before)
|
@ -0,0 +1,661 @@
|
|||||||
|
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||||
|
Version 3, 19 November 2007
|
||||||
|
|
||||||
|
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
|
||||||
|
The GNU Affero General Public License is a free, copyleft license for
|
||||||
|
software and other kinds of works, specifically designed to ensure
|
||||||
|
cooperation with the community in the case of network server software.
|
||||||
|
|
||||||
|
The licenses for most software and other practical works are designed
|
||||||
|
to take away your freedom to share and change the works. By contrast,
|
||||||
|
our General Public Licenses are intended to guarantee your freedom to
|
||||||
|
share and change all versions of a program--to make sure it remains free
|
||||||
|
software for all its users.
|
||||||
|
|
||||||
|
When we speak of free software, we are referring to freedom, not
|
||||||
|
price. Our General Public Licenses are designed to make sure that you
|
||||||
|
have the freedom to distribute copies of free software (and charge for
|
||||||
|
them if you wish), that you receive source code or can get it if you
|
||||||
|
want it, that you can change the software or use pieces of it in new
|
||||||
|
free programs, and that you know you can do these things.
|
||||||
|
|
||||||
|
Developers that use our General Public Licenses protect your rights
|
||||||
|
with two steps: (1) assert copyright on the software, and (2) offer
|
||||||
|
you this License which gives you legal permission to copy, distribute
|
||||||
|
and/or modify the software.
|
||||||
|
|
||||||
|
A secondary benefit of defending all users' freedom is that
|
||||||
|
improvements made in alternate versions of the program, if they
|
||||||
|
receive widespread use, become available for other developers to
|
||||||
|
incorporate. Many developers of free software are heartened and
|
||||||
|
encouraged by the resulting cooperation. However, in the case of
|
||||||
|
software used on network servers, this result may fail to come about.
|
||||||
|
The GNU General Public License permits making a modified version and
|
||||||
|
letting the public access it on a server without ever releasing its
|
||||||
|
source code to the public.
|
||||||
|
|
||||||
|
The GNU Affero General Public License is designed specifically to
|
||||||
|
ensure that, in such cases, the modified source code becomes available
|
||||||
|
to the community. It requires the operator of a network server to
|
||||||
|
provide the source code of the modified version running there to the
|
||||||
|
users of that server. Therefore, public use of a modified version, on
|
||||||
|
a publicly accessible server, gives the public access to the source
|
||||||
|
code of the modified version.
|
||||||
|
|
||||||
|
An older license, called the Affero General Public License and
|
||||||
|
published by Affero, was designed to accomplish similar goals. This is
|
||||||
|
a different license, not a version of the Affero GPL, but Affero has
|
||||||
|
released a new version of the Affero GPL which permits relicensing under
|
||||||
|
this license.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and
|
||||||
|
modification follow.
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
0. Definitions.
|
||||||
|
|
||||||
|
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||||
|
|
||||||
|
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||||
|
works, such as semiconductor masks.
|
||||||
|
|
||||||
|
"The Program" refers to any copyrightable work licensed under this
|
||||||
|
License. Each licensee is addressed as "you". "Licensees" and
|
||||||
|
"recipients" may be individuals or organizations.
|
||||||
|
|
||||||
|
To "modify" a work means to copy from or adapt all or part of the work
|
||||||
|
in a fashion requiring copyright permission, other than the making of an
|
||||||
|
exact copy. The resulting work is called a "modified version" of the
|
||||||
|
earlier work or a work "based on" the earlier work.
|
||||||
|
|
||||||
|
A "covered work" means either the unmodified Program or a work based
|
||||||
|
on the Program.
|
||||||
|
|
||||||
|
To "propagate" a work means to do anything with it that, without
|
||||||
|
permission, would make you directly or secondarily liable for
|
||||||
|
infringement under applicable copyright law, except executing it on a
|
||||||
|
computer or modifying a private copy. Propagation includes copying,
|
||||||
|
distribution (with or without modification), making available to the
|
||||||
|
public, and in some countries other activities as well.
|
||||||
|
|
||||||
|
To "convey" a work means any kind of propagation that enables other
|
||||||
|
parties to make or receive copies. Mere interaction with a user through
|
||||||
|
a computer network, with no transfer of a copy, is not conveying.
|
||||||
|
|
||||||
|
An interactive user interface displays "Appropriate Legal Notices"
|
||||||
|
to the extent that it includes a convenient and prominently visible
|
||||||
|
feature that (1) displays an appropriate copyright notice, and (2)
|
||||||
|
tells the user that there is no warranty for the work (except to the
|
||||||
|
extent that warranties are provided), that licensees may convey the
|
||||||
|
work under this License, and how to view a copy of this License. If
|
||||||
|
the interface presents a list of user commands or options, such as a
|
||||||
|
menu, a prominent item in the list meets this criterion.
|
||||||
|
|
||||||
|
1. Source Code.
|
||||||
|
|
||||||
|
The "source code" for a work means the preferred form of the work
|
||||||
|
for making modifications to it. "Object code" means any non-source
|
||||||
|
form of a work.
|
||||||
|
|
||||||
|
A "Standard Interface" means an interface that either is an official
|
||||||
|
standard defined by a recognized standards body, or, in the case of
|
||||||
|
interfaces specified for a particular programming language, one that
|
||||||
|
is widely used among developers working in that language.
|
||||||
|
|
||||||
|
The "System Libraries" of an executable work include anything, other
|
||||||
|
than the work as a whole, that (a) is included in the normal form of
|
||||||
|
packaging a Major Component, but which is not part of that Major
|
||||||
|
Component, and (b) serves only to enable use of the work with that
|
||||||
|
Major Component, or to implement a Standard Interface for which an
|
||||||
|
implementation is available to the public in source code form. A
|
||||||
|
"Major Component", in this context, means a major essential component
|
||||||
|
(kernel, window system, and so on) of the specific operating system
|
||||||
|
(if any) on which the executable work runs, or a compiler used to
|
||||||
|
produce the work, or an object code interpreter used to run it.
|
||||||
|
|
||||||
|
The "Corresponding Source" for a work in object code form means all
|
||||||
|
the source code needed to generate, install, and (for an executable
|
||||||
|
work) run the object code and to modify the work, including scripts to
|
||||||
|
control those activities. However, it does not include the work's
|
||||||
|
System Libraries, or general-purpose tools or generally available free
|
||||||
|
programs which are used unmodified in performing those activities but
|
||||||
|
which are not part of the work. For example, Corresponding Source
|
||||||
|
includes interface definition files associated with source files for
|
||||||
|
the work, and the source code for shared libraries and dynamically
|
||||||
|
linked subprograms that the work is specifically designed to require,
|
||||||
|
such as by intimate data communication or control flow between those
|
||||||
|
subprograms and other parts of the work.
|
||||||
|
|
||||||
|
The Corresponding Source need not include anything that users
|
||||||
|
can regenerate automatically from other parts of the Corresponding
|
||||||
|
Source.
|
||||||
|
|
||||||
|
The Corresponding Source for a work in source code form is that
|
||||||
|
same work.
|
||||||
|
|
||||||
|
2. Basic Permissions.
|
||||||
|
|
||||||
|
All rights granted under this License are granted for the term of
|
||||||
|
copyright on the Program, and are irrevocable provided the stated
|
||||||
|
conditions are met. This License explicitly affirms your unlimited
|
||||||
|
permission to run the unmodified Program. The output from running a
|
||||||
|
covered work is covered by this License only if the output, given its
|
||||||
|
content, constitutes a covered work. This License acknowledges your
|
||||||
|
rights of fair use or other equivalent, as provided by copyright law.
|
||||||
|
|
||||||
|
You may make, run and propagate covered works that you do not
|
||||||
|
convey, without conditions so long as your license otherwise remains
|
||||||
|
in force. You may convey covered works to others for the sole purpose
|
||||||
|
of having them make modifications exclusively for you, or provide you
|
||||||
|
with facilities for running those works, provided that you comply with
|
||||||
|
the terms of this License in conveying all material for which you do
|
||||||
|
not control copyright. Those thus making or running the covered works
|
||||||
|
for you must do so exclusively on your behalf, under your direction
|
||||||
|
and control, on terms that prohibit them from making any copies of
|
||||||
|
your copyrighted material outside their relationship with you.
|
||||||
|
|
||||||
|
Conveying under any other circumstances is permitted solely under
|
||||||
|
the conditions stated below. Sublicensing is not allowed; section 10
|
||||||
|
makes it unnecessary.
|
||||||
|
|
||||||
|
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||||
|
|
||||||
|
No covered work shall be deemed part of an effective technological
|
||||||
|
measure under any applicable law fulfilling obligations under article
|
||||||
|
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||||
|
similar laws prohibiting or restricting circumvention of such
|
||||||
|
measures.
|
||||||
|
|
||||||
|
When you convey a covered work, you waive any legal power to forbid
|
||||||
|
circumvention of technological measures to the extent such circumvention
|
||||||
|
is effected by exercising rights under this License with respect to
|
||||||
|
the covered work, and you disclaim any intention to limit operation or
|
||||||
|
modification of the work as a means of enforcing, against the work's
|
||||||
|
users, your or third parties' legal rights to forbid circumvention of
|
||||||
|
technological measures.
|
||||||
|
|
||||||
|
4. Conveying Verbatim Copies.
|
||||||
|
|
||||||
|
You may convey verbatim copies of the Program's source code as you
|
||||||
|
receive it, in any medium, provided that you conspicuously and
|
||||||
|
appropriately publish on each copy an appropriate copyright notice;
|
||||||
|
keep intact all notices stating that this License and any
|
||||||
|
non-permissive terms added in accord with section 7 apply to the code;
|
||||||
|
keep intact all notices of the absence of any warranty; and give all
|
||||||
|
recipients a copy of this License along with the Program.
|
||||||
|
|
||||||
|
You may charge any price or no price for each copy that you convey,
|
||||||
|
and you may offer support or warranty protection for a fee.
|
||||||
|
|
||||||
|
5. Conveying Modified Source Versions.
|
||||||
|
|
||||||
|
You may convey a work based on the Program, or the modifications to
|
||||||
|
produce it from the Program, in the form of source code under the
|
||||||
|
terms of section 4, provided that you also meet all of these conditions:
|
||||||
|
|
||||||
|
a) The work must carry prominent notices stating that you modified
|
||||||
|
it, and giving a relevant date.
|
||||||
|
|
||||||
|
b) The work must carry prominent notices stating that it is
|
||||||
|
released under this License and any conditions added under section
|
||||||
|
7. This requirement modifies the requirement in section 4 to
|
||||||
|
"keep intact all notices".
|
||||||
|
|
||||||
|
c) You must license the entire work, as a whole, under this
|
||||||
|
License to anyone who comes into possession of a copy. This
|
||||||
|
License will therefore apply, along with any applicable section 7
|
||||||
|
additional terms, to the whole of the work, and all its parts,
|
||||||
|
regardless of how they are packaged. This License gives no
|
||||||
|
permission to license the work in any other way, but it does not
|
||||||
|
invalidate such permission if you have separately received it.
|
||||||
|
|
||||||
|
d) If the work has interactive user interfaces, each must display
|
||||||
|
Appropriate Legal Notices; however, if the Program has interactive
|
||||||
|
interfaces that do not display Appropriate Legal Notices, your
|
||||||
|
work need not make them do so.
|
||||||
|
|
||||||
|
A compilation of a covered work with other separate and independent
|
||||||
|
works, which are not by their nature extensions of the covered work,
|
||||||
|
and which are not combined with it such as to form a larger program,
|
||||||
|
in or on a volume of a storage or distribution medium, is called an
|
||||||
|
"aggregate" if the compilation and its resulting copyright are not
|
||||||
|
used to limit the access or legal rights of the compilation's users
|
||||||
|
beyond what the individual works permit. Inclusion of a covered work
|
||||||
|
in an aggregate does not cause this License to apply to the other
|
||||||
|
parts of the aggregate.
|
||||||
|
|
||||||
|
6. Conveying Non-Source Forms.
|
||||||
|
|
||||||
|
You may convey a covered work in object code form under the terms
|
||||||
|
of sections 4 and 5, provided that you also convey the
|
||||||
|
machine-readable Corresponding Source under the terms of this License,
|
||||||
|
in one of these ways:
|
||||||
|
|
||||||
|
a) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by the
|
||||||
|
Corresponding Source fixed on a durable physical medium
|
||||||
|
customarily used for software interchange.
|
||||||
|
|
||||||
|
b) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by a
|
||||||
|
written offer, valid for at least three years and valid for as
|
||||||
|
long as you offer spare parts or customer support for that product
|
||||||
|
model, to give anyone who possesses the object code either (1) a
|
||||||
|
copy of the Corresponding Source for all the software in the
|
||||||
|
product that is covered by this License, on a durable physical
|
||||||
|
medium customarily used for software interchange, for a price no
|
||||||
|
more than your reasonable cost of physically performing this
|
||||||
|
conveying of source, or (2) access to copy the
|
||||||
|
Corresponding Source from a network server at no charge.
|
||||||
|
|
||||||
|
c) Convey individual copies of the object code with a copy of the
|
||||||
|
written offer to provide the Corresponding Source. This
|
||||||
|
alternative is allowed only occasionally and noncommercially, and
|
||||||
|
only if you received the object code with such an offer, in accord
|
||||||
|
with subsection 6b.
|
||||||
|
|
||||||
|
d) Convey the object code by offering access from a designated
|
||||||
|
place (gratis or for a charge), and offer equivalent access to the
|
||||||
|
Corresponding Source in the same way through the same place at no
|
||||||
|
further charge. You need not require recipients to copy the
|
||||||
|
Corresponding Source along with the object code. If the place to
|
||||||
|
copy the object code is a network server, the Corresponding Source
|
||||||
|
may be on a different server (operated by you or a third party)
|
||||||
|
that supports equivalent copying facilities, provided you maintain
|
||||||
|
clear directions next to the object code saying where to find the
|
||||||
|
Corresponding Source. Regardless of what server hosts the
|
||||||
|
Corresponding Source, you remain obligated to ensure that it is
|
||||||
|
available for as long as needed to satisfy these requirements.
|
||||||
|
|
||||||
|
e) Convey the object code using peer-to-peer transmission, provided
|
||||||
|
you inform other peers where the object code and Corresponding
|
||||||
|
Source of the work are being offered to the general public at no
|
||||||
|
charge under subsection 6d.
|
||||||
|
|
||||||
|
A separable portion of the object code, whose source code is excluded
|
||||||
|
from the Corresponding Source as a System Library, need not be
|
||||||
|
included in conveying the object code work.
|
||||||
|
|
||||||
|
A "User Product" is either (1) a "consumer product", which means any
|
||||||
|
tangible personal property which is normally used for personal, family,
|
||||||
|
or household purposes, or (2) anything designed or sold for incorporation
|
||||||
|
into a dwelling. In determining whether a product is a consumer product,
|
||||||
|
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||||
|
product received by a particular user, "normally used" refers to a
|
||||||
|
typical or common use of that class of product, regardless of the status
|
||||||
|
of the particular user or of the way in which the particular user
|
||||||
|
actually uses, or expects or is expected to use, the product. A product
|
||||||
|
is a consumer product regardless of whether the product has substantial
|
||||||
|
commercial, industrial or non-consumer uses, unless such uses represent
|
||||||
|
the only significant mode of use of the product.
|
||||||
|
|
||||||
|
"Installation Information" for a User Product means any methods,
|
||||||
|
procedures, authorization keys, or other information required to install
|
||||||
|
and execute modified versions of a covered work in that User Product from
|
||||||
|
a modified version of its Corresponding Source. The information must
|
||||||
|
suffice to ensure that the continued functioning of the modified object
|
||||||
|
code is in no case prevented or interfered with solely because
|
||||||
|
modification has been made.
|
||||||
|
|
||||||
|
If you convey an object code work under this section in, or with, or
|
||||||
|
specifically for use in, a User Product, and the conveying occurs as
|
||||||
|
part of a transaction in which the right of possession and use of the
|
||||||
|
User Product is transferred to the recipient in perpetuity or for a
|
||||||
|
fixed term (regardless of how the transaction is characterized), the
|
||||||
|
Corresponding Source conveyed under this section must be accompanied
|
||||||
|
by the Installation Information. But this requirement does not apply
|
||||||
|
if neither you nor any third party retains the ability to install
|
||||||
|
modified object code on the User Product (for example, the work has
|
||||||
|
been installed in ROM).
|
||||||
|
|
||||||
|
The requirement to provide Installation Information does not include a
|
||||||
|
requirement to continue to provide support service, warranty, or updates
|
||||||
|
for a work that has been modified or installed by the recipient, or for
|
||||||
|
the User Product in which it has been modified or installed. Access to a
|
||||||
|
network may be denied when the modification itself materially and
|
||||||
|
adversely affects the operation of the network or violates the rules and
|
||||||
|
protocols for communication across the network.
|
||||||
|
|
||||||
|
Corresponding Source conveyed, and Installation Information provided,
|
||||||
|
in accord with this section must be in a format that is publicly
|
||||||
|
documented (and with an implementation available to the public in
|
||||||
|
source code form), and must require no special password or key for
|
||||||
|
unpacking, reading or copying.
|
||||||
|
|
||||||
|
7. Additional Terms.
|
||||||
|
|
||||||
|
"Additional permissions" are terms that supplement the terms of this
|
||||||
|
License by making exceptions from one or more of its conditions.
|
||||||
|
Additional permissions that are applicable to the entire Program shall
|
||||||
|
be treated as though they were included in this License, to the extent
|
||||||
|
that they are valid under applicable law. If additional permissions
|
||||||
|
apply only to part of the Program, that part may be used separately
|
||||||
|
under those permissions, but the entire Program remains governed by
|
||||||
|
this License without regard to the additional permissions.
|
||||||
|
|
||||||
|
When you convey a copy of a covered work, you may at your option
|
||||||
|
remove any additional permissions from that copy, or from any part of
|
||||||
|
it. (Additional permissions may be written to require their own
|
||||||
|
removal in certain cases when you modify the work.) You may place
|
||||||
|
additional permissions on material, added by you to a covered work,
|
||||||
|
for which you have or can give appropriate copyright permission.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, for material you
|
||||||
|
add to a covered work, you may (if authorized by the copyright holders of
|
||||||
|
that material) supplement the terms of this License with terms:
|
||||||
|
|
||||||
|
a) Disclaiming warranty or limiting liability differently from the
|
||||||
|
terms of sections 15 and 16 of this License; or
|
||||||
|
|
||||||
|
b) Requiring preservation of specified reasonable legal notices or
|
||||||
|
author attributions in that material or in the Appropriate Legal
|
||||||
|
Notices displayed by works containing it; or
|
||||||
|
|
||||||
|
c) Prohibiting misrepresentation of the origin of that material, or
|
||||||
|
requiring that modified versions of such material be marked in
|
||||||
|
reasonable ways as different from the original version; or
|
||||||
|
|
||||||
|
d) Limiting the use for publicity purposes of names of licensors or
|
||||||
|
authors of the material; or
|
||||||
|
|
||||||
|
e) Declining to grant rights under trademark law for use of some
|
||||||
|
trade names, trademarks, or service marks; or
|
||||||
|
|
||||||
|
f) Requiring indemnification of licensors and authors of that
|
||||||
|
material by anyone who conveys the material (or modified versions of
|
||||||
|
it) with contractual assumptions of liability to the recipient, for
|
||||||
|
any liability that these contractual assumptions directly impose on
|
||||||
|
those licensors and authors.
|
||||||
|
|
||||||
|
All other non-permissive additional terms are considered "further
|
||||||
|
restrictions" within the meaning of section 10. If the Program as you
|
||||||
|
received it, or any part of it, contains a notice stating that it is
|
||||||
|
governed by this License along with a term that is a further
|
||||||
|
restriction, you may remove that term. If a license document contains
|
||||||
|
a further restriction but permits relicensing or conveying under this
|
||||||
|
License, you may add to a covered work material governed by the terms
|
||||||
|
of that license document, provided that the further restriction does
|
||||||
|
not survive such relicensing or conveying.
|
||||||
|
|
||||||
|
If you add terms to a covered work in accord with this section, you
|
||||||
|
must place, in the relevant source files, a statement of the
|
||||||
|
additional terms that apply to those files, or a notice indicating
|
||||||
|
where to find the applicable terms.
|
||||||
|
|
||||||
|
Additional terms, permissive or non-permissive, may be stated in the
|
||||||
|
form of a separately written license, or stated as exceptions;
|
||||||
|
the above requirements apply either way.
|
||||||
|
|
||||||
|
8. Termination.
|
||||||
|
|
||||||
|
You may not propagate or modify a covered work except as expressly
|
||||||
|
provided under this License. Any attempt otherwise to propagate or
|
||||||
|
modify it is void, and will automatically terminate your rights under
|
||||||
|
this License (including any patent licenses granted under the third
|
||||||
|
paragraph of section 11).
|
||||||
|
|
||||||
|
However, if you cease all violation of this License, then your
|
||||||
|
license from a particular copyright holder is reinstated (a)
|
||||||
|
provisionally, unless and until the copyright holder explicitly and
|
||||||
|
finally terminates your license, and (b) permanently, if the copyright
|
||||||
|
holder fails to notify you of the violation by some reasonable means
|
||||||
|
prior to 60 days after the cessation.
|
||||||
|
|
||||||
|
Moreover, your license from a particular copyright holder is
|
||||||
|
reinstated permanently if the copyright holder notifies you of the
|
||||||
|
violation by some reasonable means, this is the first time you have
|
||||||
|
received notice of violation of this License (for any work) from that
|
||||||
|
copyright holder, and you cure the violation prior to 30 days after
|
||||||
|
your receipt of the notice.
|
||||||
|
|
||||||
|
Termination of your rights under this section does not terminate the
|
||||||
|
licenses of parties who have received copies or rights from you under
|
||||||
|
this License. If your rights have been terminated and not permanently
|
||||||
|
reinstated, you do not qualify to receive new licenses for the same
|
||||||
|
material under section 10.
|
||||||
|
|
||||||
|
9. Acceptance Not Required for Having Copies.
|
||||||
|
|
||||||
|
You are not required to accept this License in order to receive or
|
||||||
|
run a copy of the Program. Ancillary propagation of a covered work
|
||||||
|
occurring solely as a consequence of using peer-to-peer transmission
|
||||||
|
to receive a copy likewise does not require acceptance. However,
|
||||||
|
nothing other than this License grants you permission to propagate or
|
||||||
|
modify any covered work. These actions infringe copyright if you do
|
||||||
|
not accept this License. Therefore, by modifying or propagating a
|
||||||
|
covered work, you indicate your acceptance of this License to do so.
|
||||||
|
|
||||||
|
10. Automatic Licensing of Downstream Recipients.
|
||||||
|
|
||||||
|
Each time you convey a covered work, the recipient automatically
|
||||||
|
receives a license from the original licensors, to run, modify and
|
||||||
|
propagate that work, subject to this License. You are not responsible
|
||||||
|
for enforcing compliance by third parties with this License.
|
||||||
|
|
||||||
|
An "entity transaction" is a transaction transferring control of an
|
||||||
|
organization, or substantially all assets of one, or subdividing an
|
||||||
|
organization, or merging organizations. If propagation of a covered
|
||||||
|
work results from an entity transaction, each party to that
|
||||||
|
transaction who receives a copy of the work also receives whatever
|
||||||
|
licenses to the work the party's predecessor in interest had or could
|
||||||
|
give under the previous paragraph, plus a right to possession of the
|
||||||
|
Corresponding Source of the work from the predecessor in interest, if
|
||||||
|
the predecessor has it or can get it with reasonable efforts.
|
||||||
|
|
||||||
|
You may not impose any further restrictions on the exercise of the
|
||||||
|
rights granted or affirmed under this License. For example, you may
|
||||||
|
not impose a license fee, royalty, or other charge for exercise of
|
||||||
|
rights granted under this License, and you may not initiate litigation
|
||||||
|
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||||
|
any patent claim is infringed by making, using, selling, offering for
|
||||||
|
sale, or importing the Program or any portion of it.
|
||||||
|
|
||||||
|
11. Patents.
|
||||||
|
|
||||||
|
A "contributor" is a copyright holder who authorizes use under this
|
||||||
|
License of the Program or a work on which the Program is based. The
|
||||||
|
work thus licensed is called the contributor's "contributor version".
|
||||||
|
|
||||||
|
A contributor's "essential patent claims" are all patent claims
|
||||||
|
owned or controlled by the contributor, whether already acquired or
|
||||||
|
hereafter acquired, that would be infringed by some manner, permitted
|
||||||
|
by this License, of making, using, or selling its contributor version,
|
||||||
|
but do not include claims that would be infringed only as a
|
||||||
|
consequence of further modification of the contributor version. For
|
||||||
|
purposes of this definition, "control" includes the right to grant
|
||||||
|
patent sublicenses in a manner consistent with the requirements of
|
||||||
|
this License.
|
||||||
|
|
||||||
|
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||||
|
patent license under the contributor's essential patent claims, to
|
||||||
|
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||||
|
propagate the contents of its contributor version.
|
||||||
|
|
||||||
|
In the following three paragraphs, a "patent license" is any express
|
||||||
|
agreement or commitment, however denominated, not to enforce a patent
|
||||||
|
(such as an express permission to practice a patent or covenant not to
|
||||||
|
sue for patent infringement). To "grant" such a patent license to a
|
||||||
|
party means to make such an agreement or commitment not to enforce a
|
||||||
|
patent against the party.
|
||||||
|
|
||||||
|
If you convey a covered work, knowingly relying on a patent license,
|
||||||
|
and the Corresponding Source of the work is not available for anyone
|
||||||
|
to copy, free of charge and under the terms of this License, through a
|
||||||
|
publicly available network server or other readily accessible means,
|
||||||
|
then you must either (1) cause the Corresponding Source to be so
|
||||||
|
available, or (2) arrange to deprive yourself of the benefit of the
|
||||||
|
patent license for this particular work, or (3) arrange, in a manner
|
||||||
|
consistent with the requirements of this License, to extend the patent
|
||||||
|
license to downstream recipients. "Knowingly relying" means you have
|
||||||
|
actual knowledge that, but for the patent license, your conveying the
|
||||||
|
covered work in a country, or your recipient's use of the covered work
|
||||||
|
in a country, would infringe one or more identifiable patents in that
|
||||||
|
country that you have reason to believe are valid.
|
||||||
|
|
||||||
|
If, pursuant to or in connection with a single transaction or
|
||||||
|
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||||
|
covered work, and grant a patent license to some of the parties
|
||||||
|
receiving the covered work authorizing them to use, propagate, modify
|
||||||
|
or convey a specific copy of the covered work, then the patent license
|
||||||
|
you grant is automatically extended to all recipients of the covered
|
||||||
|
work and works based on it.
|
||||||
|
|
||||||
|
A patent license is "discriminatory" if it does not include within
|
||||||
|
the scope of its coverage, prohibits the exercise of, or is
|
||||||
|
conditioned on the non-exercise of one or more of the rights that are
|
||||||
|
specifically granted under this License. You may not convey a covered
|
||||||
|
work if you are a party to an arrangement with a third party that is
|
||||||
|
in the business of distributing software, under which you make payment
|
||||||
|
to the third party based on the extent of your activity of conveying
|
||||||
|
the work, and under which the third party grants, to any of the
|
||||||
|
parties who would receive the covered work from you, a discriminatory
|
||||||
|
patent license (a) in connection with copies of the covered work
|
||||||
|
conveyed by you (or copies made from those copies), or (b) primarily
|
||||||
|
for and in connection with specific products or compilations that
|
||||||
|
contain the covered work, unless you entered into that arrangement,
|
||||||
|
or that patent license was granted, prior to 28 March 2007.
|
||||||
|
|
||||||
|
Nothing in this License shall be construed as excluding or limiting
|
||||||
|
any implied license or other defenses to infringement that may
|
||||||
|
otherwise be available to you under applicable patent law.
|
||||||
|
|
||||||
|
12. No Surrender of Others' Freedom.
|
||||||
|
|
||||||
|
If conditions are imposed on you (whether by court order, agreement or
|
||||||
|
otherwise) that contradict the conditions of this License, they do not
|
||||||
|
excuse you from the conditions of this License. If you cannot convey a
|
||||||
|
covered work so as to satisfy simultaneously your obligations under this
|
||||||
|
License and any other pertinent obligations, then as a consequence you may
|
||||||
|
not convey it at all. For example, if you agree to terms that obligate you
|
||||||
|
to collect a royalty for further conveying from those to whom you convey
|
||||||
|
the Program, the only way you could satisfy both those terms and this
|
||||||
|
License would be to refrain entirely from conveying the Program.
|
||||||
|
|
||||||
|
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, if you modify the
|
||||||
|
Program, your modified version must prominently offer all users
|
||||||
|
interacting with it remotely through a computer network (if your version
|
||||||
|
supports such interaction) an opportunity to receive the Corresponding
|
||||||
|
Source of your version by providing access to the Corresponding Source
|
||||||
|
from a network server at no charge, through some standard or customary
|
||||||
|
means of facilitating copying of software. This Corresponding Source
|
||||||
|
shall include the Corresponding Source for any work covered by version 3
|
||||||
|
of the GNU General Public License that is incorporated pursuant to the
|
||||||
|
following paragraph.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, you have
|
||||||
|
permission to link or combine any covered work with a work licensed
|
||||||
|
under version 3 of the GNU General Public License into a single
|
||||||
|
combined work, and to convey the resulting work. The terms of this
|
||||||
|
License will continue to apply to the part which is the covered work,
|
||||||
|
but the work with which it is combined will remain governed by version
|
||||||
|
3 of the GNU General Public License.
|
||||||
|
|
||||||
|
14. Revised Versions of this License.
|
||||||
|
|
||||||
|
The Free Software Foundation may publish revised and/or new versions of
|
||||||
|
the GNU Affero General Public License from time to time. Such new versions
|
||||||
|
will be similar in spirit to the present version, but may differ in detail to
|
||||||
|
address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the
|
||||||
|
Program specifies that a certain numbered version of the GNU Affero General
|
||||||
|
Public License "or any later version" applies to it, you have the
|
||||||
|
option of following the terms and conditions either of that numbered
|
||||||
|
version or of any later version published by the Free Software
|
||||||
|
Foundation. If the Program does not specify a version number of the
|
||||||
|
GNU Affero General Public License, you may choose any version ever published
|
||||||
|
by the Free Software Foundation.
|
||||||
|
|
||||||
|
If the Program specifies that a proxy can decide which future
|
||||||
|
versions of the GNU Affero General Public License can be used, that proxy's
|
||||||
|
public statement of acceptance of a version permanently authorizes you
|
||||||
|
to choose that version for the Program.
|
||||||
|
|
||||||
|
Later license versions may give you additional or different
|
||||||
|
permissions. However, no additional obligations are imposed on any
|
||||||
|
author or copyright holder as a result of your choosing to follow a
|
||||||
|
later version.
|
||||||
|
|
||||||
|
15. Disclaimer of Warranty.
|
||||||
|
|
||||||
|
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||||
|
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||||
|
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||||
|
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||||
|
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||||
|
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
16. Limitation of Liability.
|
||||||
|
|
||||||
|
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||||
|
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||||
|
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||||
|
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||||
|
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||||
|
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||||
|
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||||
|
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||||
|
SUCH DAMAGES.
|
||||||
|
|
||||||
|
17. Interpretation of Sections 15 and 16.
|
||||||
|
|
||||||
|
If the disclaimer of warranty and limitation of liability provided
|
||||||
|
above cannot be given local legal effect according to their terms,
|
||||||
|
reviewing courts shall apply local law that most closely approximates
|
||||||
|
an absolute waiver of all civil liability in connection with the
|
||||||
|
Program, unless a warranty or assumption of liability accompanies a
|
||||||
|
copy of the Program in return for a fee.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
How to Apply These Terms to Your New Programs
|
||||||
|
|
||||||
|
If you develop a new program, and you want it to be of the greatest
|
||||||
|
possible use to the public, the best way to achieve this is to make it
|
||||||
|
free software which everyone can redistribute and change under these terms.
|
||||||
|
|
||||||
|
To do so, attach the following notices to the program. It is safest
|
||||||
|
to attach them to the start of each source file to most effectively
|
||||||
|
state the exclusion of warranty; and each file should have at least
|
||||||
|
the "copyright" line and a pointer to where the full notice is found.
|
||||||
|
|
||||||
|
<one line to give the program's name and a brief idea of what it does.>
|
||||||
|
Copyright (C) <year> <name of author>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU Affero General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Affero General Public License
|
||||||
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
|
If your software can interact with users remotely through a computer
|
||||||
|
network, you should also make sure that it provides a way for users to
|
||||||
|
get its source. For example, if your program is a web application, its
|
||||||
|
interface could display a "Source" link that leads users to an archive
|
||||||
|
of the code. There are many ways you could offer source, and different
|
||||||
|
solutions will be better for different programs; see section 13 for the
|
||||||
|
specific requirements.
|
||||||
|
|
||||||
|
You should also get your employer (if you work as a programmer) or school,
|
||||||
|
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||||
|
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||||
|
<https://www.gnu.org/licenses/>.
|
@ -0,0 +1 @@
|
|||||||
|
from .app_settings_inner import app_settings
|
@ -0,0 +1,89 @@
|
|||||||
|
# Copyright © 2017 Tom Hacohen
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as
|
||||||
|
# published by the Free Software Foundation, version 3.
|
||||||
|
#
|
||||||
|
# This library is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from django.utils.functional import cached_property
|
||||||
|
|
||||||
|
|
||||||
|
class AppSettings:
|
||||||
|
def __init__(self, prefix):
|
||||||
|
self.prefix = prefix
|
||||||
|
|
||||||
|
def import_from_str(self, name):
|
||||||
|
from importlib import import_module
|
||||||
|
|
||||||
|
path, prop = name.rsplit(".", 1)
|
||||||
|
|
||||||
|
mod = import_module(path)
|
||||||
|
return getattr(mod, prop)
|
||||||
|
|
||||||
|
def _setting(self, name, dflt):
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
return getattr(settings, self.prefix + name, dflt)
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def REDIS_URI(self) -> t.Optional[str]: # pylint: disable=invalid-name
|
||||||
|
return self._setting("REDIS_URI", None)
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def API_PERMISSIONS_READ(self): # pylint: disable=invalid-name
|
||||||
|
perms = self._setting("API_PERMISSIONS_READ", tuple())
|
||||||
|
ret = []
|
||||||
|
for perm in perms:
|
||||||
|
ret.append(self.import_from_str(perm))
|
||||||
|
return ret
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def API_PERMISSIONS_WRITE(self): # pylint: disable=invalid-name
|
||||||
|
perms = self._setting("API_PERMISSIONS_WRITE", tuple())
|
||||||
|
ret = []
|
||||||
|
for perm in perms:
|
||||||
|
ret.append(self.import_from_str(perm))
|
||||||
|
return ret
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def GET_USER_QUERYSET_FUNC(self): # pylint: disable=invalid-name
|
||||||
|
get_user_queryset = self._setting("GET_USER_QUERYSET_FUNC", None)
|
||||||
|
if get_user_queryset is not None:
|
||||||
|
return self.import_from_str(get_user_queryset)
|
||||||
|
return None
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def CREATE_USER_FUNC(self): # pylint: disable=invalid-name
|
||||||
|
func = self._setting("CREATE_USER_FUNC", None)
|
||||||
|
if func is not None:
|
||||||
|
return self.import_from_str(func)
|
||||||
|
return None
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def DASHBOARD_URL_FUNC(self): # pylint: disable=invalid-name
|
||||||
|
func = self._setting("DASHBOARD_URL_FUNC", None)
|
||||||
|
if func is not None:
|
||||||
|
return self.import_from_str(func)
|
||||||
|
return None
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def CHUNK_PATH_FUNC(self): # pylint: disable=invalid-name
|
||||||
|
func = self._setting("CHUNK_PATH_FUNC", None)
|
||||||
|
if func is not None:
|
||||||
|
return self.import_from_str(func)
|
||||||
|
return None
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def CHALLENGE_VALID_SECONDS(self): # pylint: disable=invalid-name
|
||||||
|
return self._setting("CHALLENGE_VALID_SECONDS", 60)
|
||||||
|
|
||||||
|
|
||||||
|
app_settings = AppSettings("ETEBASE_")
|
@ -0,0 +1,5 @@
|
|||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class DjangoEtebaseConfig(AppConfig):
|
||||||
|
name = "django_etebase"
|
@ -0,0 +1,185 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-05-13 13:01
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
import django.core.validators
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
import django_etebase.models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="Collection",
|
||||||
|
fields=[
|
||||||
|
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||||
|
(
|
||||||
|
"uid",
|
||||||
|
models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
max_length=44,
|
||||||
|
validators=[
|
||||||
|
django.core.validators.RegexValidator(message="Not a valid UID", regex="[a-zA-Z0-9]")
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("version", models.PositiveSmallIntegerField()),
|
||||||
|
("owner", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"unique_together": {("uid", "owner")},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="CollectionItem",
|
||||||
|
fields=[
|
||||||
|
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||||
|
(
|
||||||
|
"uid",
|
||||||
|
models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
max_length=44,
|
||||||
|
null=True,
|
||||||
|
validators=[
|
||||||
|
django.core.validators.RegexValidator(message="Not a valid UID", regex="[a-zA-Z0-9]")
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("version", models.PositiveSmallIntegerField()),
|
||||||
|
("encryptionKey", models.BinaryField(editable=True, null=True)),
|
||||||
|
(
|
||||||
|
"collection",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="items",
|
||||||
|
to="django_etebase.Collection",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"unique_together": {("uid", "collection")},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="CollectionItemChunk",
|
||||||
|
fields=[
|
||||||
|
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||||
|
(
|
||||||
|
"uid",
|
||||||
|
models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
max_length=44,
|
||||||
|
validators=[
|
||||||
|
django.core.validators.RegexValidator(
|
||||||
|
message="Expected a 256bit base64url.", regex="^[a-zA-Z0-9\\-_]{43}$"
|
||||||
|
)
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"chunkFile",
|
||||||
|
models.FileField(max_length=150, unique=True, upload_to=django_etebase.models.chunk_directory_path),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"item",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="chunks",
|
||||||
|
to="django_etebase.CollectionItem",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="CollectionItemRevision",
|
||||||
|
fields=[
|
||||||
|
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||||
|
(
|
||||||
|
"uid",
|
||||||
|
models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
max_length=44,
|
||||||
|
unique=True,
|
||||||
|
validators=[
|
||||||
|
django.core.validators.RegexValidator(
|
||||||
|
message="Expected a 256bit base64url.", regex="^[a-zA-Z0-9\\-_]{43}$"
|
||||||
|
)
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("meta", models.BinaryField(editable=True)),
|
||||||
|
("current", models.BooleanField(db_index=True, default=True, null=True)),
|
||||||
|
("deleted", models.BooleanField(default=False)),
|
||||||
|
(
|
||||||
|
"item",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="revisions",
|
||||||
|
to="django_etebase.CollectionItem",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"unique_together": {("item", "current")},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="RevisionChunkRelation",
|
||||||
|
fields=[
|
||||||
|
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||||
|
(
|
||||||
|
"chunk",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="revisions_relation",
|
||||||
|
to="django_etebase.CollectionItemChunk",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"revision",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="chunks_relation",
|
||||||
|
to="django_etebase.CollectionItemRevision",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"ordering": ("id",),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="CollectionMember",
|
||||||
|
fields=[
|
||||||
|
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||||
|
("encryptionKey", models.BinaryField(editable=True)),
|
||||||
|
(
|
||||||
|
"accessLevel",
|
||||||
|
models.CharField(
|
||||||
|
choices=[("adm", "Admin"), ("rw", "Read Write"), ("ro", "Read Only")],
|
||||||
|
default="ro",
|
||||||
|
max_length=3,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"collection",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="members",
|
||||||
|
to="django_etebase.Collection",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"unique_together": {("user", "collection")},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,33 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-05-14 09:51
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("myauth", "0001_initial"),
|
||||||
|
("django_etebase", "0001_initial"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="UserInfo",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"owner",
|
||||||
|
models.OneToOneField(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("version", models.PositiveSmallIntegerField(default=1)),
|
||||||
|
("pubkey", models.BinaryField(editable=True)),
|
||||||
|
("salt", models.BinaryField(editable=True)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,61 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-05-20 11:03
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
import django.core.validators
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
("django_etebase", "0002_userinfo"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="CollectionInvitation",
|
||||||
|
fields=[
|
||||||
|
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||||
|
(
|
||||||
|
"uid",
|
||||||
|
models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
max_length=44,
|
||||||
|
validators=[
|
||||||
|
django.core.validators.RegexValidator(
|
||||||
|
message="Expected a 256bit base64url.", regex="^[a-zA-Z0-9\\-_]{43}$"
|
||||||
|
)
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("signedEncryptionKey", models.BinaryField()),
|
||||||
|
(
|
||||||
|
"accessLevel",
|
||||||
|
models.CharField(
|
||||||
|
choices=[("adm", "Admin"), ("rw", "Read Write"), ("ro", "Read Only")],
|
||||||
|
default="ro",
|
||||||
|
max_length=3,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"fromMember",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE, to="django_etebase.CollectionMember"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"user",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="incoming_invitations",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"unique_together": {("user", "fromMember")},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-05-21 14:45
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0003_collectioninvitation"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="collectioninvitation",
|
||||||
|
name="version",
|
||||||
|
field=models.PositiveSmallIntegerField(default=1),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-05-26 10:21
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0004_collectioninvitation_version"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name="userinfo",
|
||||||
|
old_name="pubkey",
|
||||||
|
new_name="loginPubkey",
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,25 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-05-26 10:40
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0005_auto_20200526_1021"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="userinfo",
|
||||||
|
name="encryptedSeckey",
|
||||||
|
field=models.BinaryField(default=b"", editable=True),
|
||||||
|
preserve_default=False,
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="userinfo",
|
||||||
|
name="pubkey",
|
||||||
|
field=models.BinaryField(default=b"", editable=True),
|
||||||
|
preserve_default=False,
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,73 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-05-26 13:36
|
||||||
|
|
||||||
|
import django.core.validators
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0006_auto_20200526_1040"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="collection",
|
||||||
|
name="uid",
|
||||||
|
field=models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
max_length=43,
|
||||||
|
validators=[django.core.validators.RegexValidator(message="Not a valid UID", regex="^[a-zA-Z0-9]*$")],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="collectioninvitation",
|
||||||
|
name="uid",
|
||||||
|
field=models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
max_length=43,
|
||||||
|
validators=[
|
||||||
|
django.core.validators.RegexValidator(
|
||||||
|
message="Expected a base64url.", regex="^[a-zA-Z0-9\\-_]{42,43}$"
|
||||||
|
)
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="collectionitem",
|
||||||
|
name="uid",
|
||||||
|
field=models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
max_length=43,
|
||||||
|
null=True,
|
||||||
|
validators=[django.core.validators.RegexValidator(message="Not a valid UID", regex="^[a-zA-Z0-9]*$")],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="collectionitemchunk",
|
||||||
|
name="uid",
|
||||||
|
field=models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
max_length=43,
|
||||||
|
validators=[
|
||||||
|
django.core.validators.RegexValidator(
|
||||||
|
message="Expected a base64url.", regex="^[a-zA-Z0-9\\-_]{42,43}$"
|
||||||
|
)
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="collectionitemrevision",
|
||||||
|
name="uid",
|
||||||
|
field=models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
max_length=43,
|
||||||
|
unique=True,
|
||||||
|
validators=[
|
||||||
|
django.core.validators.RegexValidator(
|
||||||
|
message="Expected a base64url.", regex="^[a-zA-Z0-9\\-_]{42,43}$"
|
||||||
|
)
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,43 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-05-26 15:35
|
||||||
|
|
||||||
|
import django.core.validators
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
import django_etebase.models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0007_auto_20200526_1336"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="Stoken",
|
||||||
|
fields=[
|
||||||
|
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||||
|
(
|
||||||
|
"uid",
|
||||||
|
models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
default=django_etebase.models.generate_stoken_uid,
|
||||||
|
max_length=43,
|
||||||
|
unique=True,
|
||||||
|
validators=[
|
||||||
|
django.core.validators.RegexValidator(
|
||||||
|
message="Expected a base64url.", regex="^[a-zA-Z0-9\\-_]{42,43}$"
|
||||||
|
)
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="collectionitemrevision",
|
||||||
|
name="stoken",
|
||||||
|
field=models.OneToOneField(
|
||||||
|
null=True, on_delete=django.db.models.deletion.PROTECT, to="django_etebase.Stoken"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-05-26 15:35
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
def create_stokens(apps, schema_editor):
|
||||||
|
Stoken = apps.get_model("django_etebase", "Stoken")
|
||||||
|
CollectionItemRevision = apps.get_model("django_etebase", "CollectionItemRevision")
|
||||||
|
|
||||||
|
for rev in CollectionItemRevision.objects.all():
|
||||||
|
rev.stoken = Stoken.objects.create()
|
||||||
|
rev.save()
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0008_auto_20200526_1535"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(create_stokens),
|
||||||
|
]
|
@ -0,0 +1,19 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-05-26 15:39
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0009_auto_20200526_1535"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="collectionitemrevision",
|
||||||
|
name="stoken",
|
||||||
|
field=models.OneToOneField(on_delete=django.db.models.deletion.PROTECT, to="django_etebase.Stoken"),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,21 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-05-27 07:43
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0010_auto_20200526_1539"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="collectionmember",
|
||||||
|
name="stoken",
|
||||||
|
field=models.OneToOneField(
|
||||||
|
null=True, on_delete=django.db.models.deletion.PROTECT, to="django_etebase.Stoken"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-05-27 07:43
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
def create_stokens(apps, schema_editor):
|
||||||
|
Stoken = apps.get_model("django_etebase", "Stoken")
|
||||||
|
CollectionMember = apps.get_model("django_etebase", "CollectionMember")
|
||||||
|
|
||||||
|
for member in CollectionMember.objects.all():
|
||||||
|
member.stoken = Stoken.objects.create()
|
||||||
|
member.save()
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0011_collectionmember_stoken"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(create_stokens),
|
||||||
|
]
|
@ -0,0 +1,40 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-05-27 11:29
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
("django_etebase", "0012_auto_20200527_0743"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="CollectionMemberRemoved",
|
||||||
|
fields=[
|
||||||
|
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||||
|
(
|
||||||
|
"collection",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="removed_members",
|
||||||
|
to="django_etebase.Collection",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"stoken",
|
||||||
|
models.OneToOneField(
|
||||||
|
null=True, on_delete=django.db.models.deletion.PROTECT, to="django_etebase.Stoken"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"unique_together": {("user", "collection")},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-06-02 15:58
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0013_collectionmemberremoved"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name="userinfo",
|
||||||
|
old_name="encryptedSeckey",
|
||||||
|
new_name="encryptedContent",
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-06-04 12:18
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0014_auto_20200602_1558"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="collectionitemrevision",
|
||||||
|
name="salt",
|
||||||
|
field=models.BinaryField(default=b"", editable=True),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,36 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-06-23 08:20
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0015_collectionitemrevision_salt"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="collection",
|
||||||
|
name="main_item",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.PROTECT,
|
||||||
|
related_name="parent",
|
||||||
|
to="django_etebase.CollectionItem",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterUniqueTogether(
|
||||||
|
name="collection",
|
||||||
|
unique_together=set(),
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name="collection",
|
||||||
|
name="uid",
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name="collection",
|
||||||
|
name="version",
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,34 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-06-23 09:58
|
||||||
|
|
||||||
|
import django.core.validators
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0016_auto_20200623_0820"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="collection",
|
||||||
|
name="main_item",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
|
related_name="parent",
|
||||||
|
to="django_etebase.CollectionItem",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="collectionitem",
|
||||||
|
name="uid",
|
||||||
|
field=models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
max_length=43,
|
||||||
|
validators=[django.core.validators.RegexValidator(message="Not a valid UID", regex="^[a-zA-Z0-9]*$")],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,25 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-06-24 07:48
|
||||||
|
|
||||||
|
import django.core.validators
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0017_auto_20200623_0958"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="collectionitem",
|
||||||
|
name="uid",
|
||||||
|
field=models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
max_length=43,
|
||||||
|
validators=[
|
||||||
|
django.core.validators.RegexValidator(message="Not a valid UID", regex="^[a-zA-Z0-9\\-_]*$")
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,25 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-06-26 07:48
|
||||||
|
|
||||||
|
import django.core.validators
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0018_auto_20200624_0748"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="collectionitemchunk",
|
||||||
|
name="uid",
|
||||||
|
field=models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
max_length=60,
|
||||||
|
validators=[
|
||||||
|
django.core.validators.RegexValidator(message="Not a valid UID", regex="^[a-zA-Z0-9\\-_]*$")
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,17 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-06-26 08:19
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0019_auto_20200626_0748"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name="collectionitemrevision",
|
||||||
|
name="salt",
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,73 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-06-26 09:13
|
||||||
|
|
||||||
|
import django.core.validators
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django_etebase.models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0020_remove_collectionitemrevision_salt"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="collectioninvitation",
|
||||||
|
name="uid",
|
||||||
|
field=models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
max_length=43,
|
||||||
|
validators=[
|
||||||
|
django.core.validators.RegexValidator(message="Not a valid UID", regex="^[a-zA-Z0-9\\-_]{20,}$")
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="collectionitem",
|
||||||
|
name="uid",
|
||||||
|
field=models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
max_length=43,
|
||||||
|
validators=[
|
||||||
|
django.core.validators.RegexValidator(message="Not a valid UID", regex="^[a-zA-Z0-9\\-_]{20,}$")
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="collectionitemchunk",
|
||||||
|
name="uid",
|
||||||
|
field=models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
max_length=60,
|
||||||
|
validators=[
|
||||||
|
django.core.validators.RegexValidator(message="Not a valid UID", regex="^[a-zA-Z0-9\\-_]{20,}$")
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="collectionitemrevision",
|
||||||
|
name="uid",
|
||||||
|
field=models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
max_length=43,
|
||||||
|
unique=True,
|
||||||
|
validators=[
|
||||||
|
django.core.validators.RegexValidator(message="Not a valid UID", regex="^[a-zA-Z0-9\\-_]{20,}$")
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="stoken",
|
||||||
|
name="uid",
|
||||||
|
field=models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
default=django_etebase.models.generate_stoken_uid,
|
||||||
|
max_length=43,
|
||||||
|
unique=True,
|
||||||
|
validators=[
|
||||||
|
django.core.validators.RegexValidator(message="Not a valid UID", regex="^[a-zA-Z0-9\\-_]{20,}$")
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,17 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-08-04 10:59
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0021_auto_20200626_0913"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterUniqueTogether(
|
||||||
|
name="collectionitemchunk",
|
||||||
|
unique_together={("item", "uid")},
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,24 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-08-04 12:08
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0022_auto_20200804_1059"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="collectionitemchunk",
|
||||||
|
name="collection",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="chunks",
|
||||||
|
to="django_etebase.Collection",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,22 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-08-04 12:09
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
def change_chunk_to_collections(apps, schema_editor):
|
||||||
|
CollectionItemChunk = apps.get_model("django_etebase", "CollectionItemChunk")
|
||||||
|
|
||||||
|
for chunk in CollectionItemChunk.objects.all():
|
||||||
|
chunk.collection = chunk.item.collection
|
||||||
|
chunk.save()
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0023_collectionitemchunk_collection"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(change_chunk_to_collections),
|
||||||
|
]
|
@ -0,0 +1,29 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-08-04 12:16
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0024_auto_20200804_1209"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="collectionitemchunk",
|
||||||
|
name="collection",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE, related_name="chunks", to="django_etebase.Collection"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterUniqueTogether(
|
||||||
|
name="collectionitemchunk",
|
||||||
|
unique_together={("collection", "uid")},
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name="collectionitemchunk",
|
||||||
|
name="item",
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 3.1 on 2020-09-07 07:52
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0025_auto_20200804_1216"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name="collectioninvitation",
|
||||||
|
old_name="accessLevel",
|
||||||
|
new_name="accessLevelOld",
|
||||||
|
),
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name="collectionmember",
|
||||||
|
old_name="accessLevel",
|
||||||
|
new_name="accessLevelOld",
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 3.1 on 2020-09-07 07:52
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0026_auto_20200907_0752"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="collectioninvitation",
|
||||||
|
name="accessLevel",
|
||||||
|
field=models.IntegerField(choices=[(0, "Read Only"), (1, "Admin"), (2, "Read Write")], default=0),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="collectionmember",
|
||||||
|
name="accessLevel",
|
||||||
|
field=models.IntegerField(choices=[(0, "Read Only"), (1, "Admin"), (2, "Read Write")], default=0),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,39 @@
|
|||||||
|
# Generated by Django 3.1 on 2020-09-07 07:54
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
from django_etebase.models import AccessLevels
|
||||||
|
|
||||||
|
|
||||||
|
def change_access_level_to_int(apps, schema_editor):
|
||||||
|
CollectionMember = apps.get_model("django_etebase", "CollectionMember")
|
||||||
|
CollectionInvitation = apps.get_model("django_etebase", "CollectionInvitation")
|
||||||
|
|
||||||
|
for member in CollectionMember.objects.all():
|
||||||
|
if member.accessLevelOld == "adm":
|
||||||
|
member.accessLevel = AccessLevels.ADMIN
|
||||||
|
elif member.accessLevelOld == "rw":
|
||||||
|
member.accessLevel = AccessLevels.READ_WRITE
|
||||||
|
elif member.accessLevelOld == "ro":
|
||||||
|
member.accessLevel = AccessLevels.READ_ONLY
|
||||||
|
member.save()
|
||||||
|
|
||||||
|
for invitation in CollectionInvitation.objects.all():
|
||||||
|
if invitation.accessLevelOld == "adm":
|
||||||
|
invitation.accessLevel = AccessLevels.ADMIN
|
||||||
|
elif invitation.accessLevelOld == "rw":
|
||||||
|
invitation.accessLevel = AccessLevels.READ_WRITE
|
||||||
|
elif invitation.accessLevelOld == "ro":
|
||||||
|
invitation.accessLevel = AccessLevels.READ_ONLY
|
||||||
|
invitation.save()
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0027_auto_20200907_0752"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(change_access_level_to_int),
|
||||||
|
]
|
@ -0,0 +1,21 @@
|
|||||||
|
# Generated by Django 3.1 on 2020-09-07 08:01
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0028_auto_20200907_0754"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name="collectioninvitation",
|
||||||
|
name="accessLevelOld",
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name="collectionmember",
|
||||||
|
name="accessLevelOld",
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,24 @@
|
|||||||
|
# Generated by Django 3.1.1 on 2020-09-22 08:32
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0029_auto_20200907_0801"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="collection",
|
||||||
|
name="main_item",
|
||||||
|
field=models.OneToOneField(
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
|
related_name="parent",
|
||||||
|
to="django_etebase.collectionitem",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,31 @@
|
|||||||
|
# Generated by Django 3.1.1 on 2020-10-13 13:36
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
("django_etebase", "0030_auto_20200922_0832"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="CollectionType",
|
||||||
|
fields=[
|
||||||
|
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||||
|
("uid", models.BinaryField(db_index=True, editable=True)),
|
||||||
|
("owner", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="collectionmember",
|
||||||
|
name="collectionType",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
null=True, on_delete=django.db.models.deletion.PROTECT, to="django_etebase.collectiontype"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.1 on 2020-10-13 14:09
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0031_auto_20201013_1336"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="collectiontype",
|
||||||
|
name="uid",
|
||||||
|
field=models.BinaryField(db_index=True, editable=True, max_length=1024, unique=True),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,19 @@
|
|||||||
|
# Generated by Django 3.1.1 on 2020-12-14 11:21
|
||||||
|
|
||||||
|
import django.core.validators
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('django_etebase', '0032_auto_20201013_1409'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='collection',
|
||||||
|
name='uid',
|
||||||
|
field=models.CharField(db_index=True, max_length=43, null=True, validators=[django.core.validators.RegexValidator(message='Not a valid UID', regex='^[a-zA-Z0-9\\-_]{20,}$')]),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,22 @@
|
|||||||
|
# Generated by Django 3.1.1 on 2020-12-14 11:24
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
def update_collection_uid(apps, schema_editor):
|
||||||
|
Collection = apps.get_model("django_etebase", "Collection")
|
||||||
|
|
||||||
|
for collection in Collection.objects.all():
|
||||||
|
collection.uid = collection.main_item.uid
|
||||||
|
collection.save()
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0033_collection_uid"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(update_collection_uid),
|
||||||
|
]
|
@ -0,0 +1,19 @@
|
|||||||
|
# Generated by Django 3.1.1 on 2020-12-14 11:26
|
||||||
|
|
||||||
|
import django.core.validators
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('django_etebase', '0034_auto_20201214_1124'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='collection',
|
||||||
|
name='uid',
|
||||||
|
field=models.CharField(db_index=True, max_length=43, validators=[django.core.validators.RegexValidator(message='Not a valid UID', regex='^[a-zA-Z0-9\\-_]{20,}$')]),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,19 @@
|
|||||||
|
# Generated by Django 3.1.1 on 2020-12-14 11:28
|
||||||
|
|
||||||
|
import django.core.validators
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('django_etebase', '0035_auto_20201214_1126'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='collection',
|
||||||
|
name='uid',
|
||||||
|
field=models.CharField(db_index=True, max_length=43, unique=True, validators=[django.core.validators.RegexValidator(message='Not a valid UID', regex='^[a-zA-Z0-9\\-_]{20,}$')]),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.1 on 2021-01-27 12:37
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("django_etebase", "0036_auto_20201214_1128"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="collectiontype",
|
||||||
|
name="uid",
|
||||||
|
field=models.BinaryField(db_index=True, editable=True, max_length=1024, unique=True),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,272 @@
|
|||||||
|
# Copyright © 2017 Tom Hacohen
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as
|
||||||
|
# published by the Free Software Foundation, version 3.
|
||||||
|
#
|
||||||
|
# This library is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import typing as t
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from django.db import models, transaction
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core.validators import RegexValidator
|
||||||
|
from django.db.models import Max, Value as V
|
||||||
|
from django.db.models.functions import Coalesce, Greatest
|
||||||
|
from django.utils.functional import cached_property
|
||||||
|
from django.utils.crypto import get_random_string
|
||||||
|
|
||||||
|
from . import app_settings
|
||||||
|
|
||||||
|
|
||||||
|
UidValidator = RegexValidator(regex=r"^[a-zA-Z0-9\-_]{20,}$", message="Not a valid UID")
|
||||||
|
|
||||||
|
|
||||||
|
def stoken_annotation_builder(stoken_id_fields: t.List[str]):
|
||||||
|
aggr_fields = [Coalesce(Max(field), V(0)) for field in stoken_id_fields]
|
||||||
|
return Greatest(*aggr_fields) if len(aggr_fields) > 1 else aggr_fields[0]
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionType(models.Model):
|
||||||
|
owner = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
|
||||||
|
uid = models.BinaryField(editable=True, blank=False, null=False, db_index=True, unique=True, max_length=1024)
|
||||||
|
|
||||||
|
objects: models.manager.BaseManager["CollectionType"]
|
||||||
|
|
||||||
|
|
||||||
|
class Collection(models.Model):
|
||||||
|
main_item = models.OneToOneField("CollectionItem", related_name="parent", null=True, on_delete=models.SET_NULL)
|
||||||
|
# The same as main_item.uid, we just also save it here so we have DB constraints for uniqueness (and efficiency)
|
||||||
|
uid = models.CharField(db_index=True, unique=True, blank=False, max_length=43, validators=[UidValidator])
|
||||||
|
owner = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
|
||||||
|
|
||||||
|
stoken_annotation = stoken_annotation_builder(["items__revisions__stoken", "members__stoken"])
|
||||||
|
|
||||||
|
objects: models.manager.BaseManager["Collection"]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.uid
|
||||||
|
|
||||||
|
@property
|
||||||
|
def content(self) -> "CollectionItemRevision":
|
||||||
|
assert self.main_item is not None
|
||||||
|
return self.main_item.content
|
||||||
|
|
||||||
|
@property
|
||||||
|
def etag(self) -> str:
|
||||||
|
return self.content.uid
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def stoken(self) -> str:
|
||||||
|
stoken_id = (
|
||||||
|
self.__class__.objects.filter(main_item=self.main_item)
|
||||||
|
.annotate(max_stoken=self.stoken_annotation)
|
||||||
|
.values("max_stoken")
|
||||||
|
.first()["max_stoken"]
|
||||||
|
)
|
||||||
|
|
||||||
|
if stoken_id == 0:
|
||||||
|
raise Exception("stoken is None. Should never happen")
|
||||||
|
|
||||||
|
return Stoken.objects.get(id=stoken_id).uid
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionItem(models.Model):
|
||||||
|
uid = models.CharField(db_index=True, blank=False, max_length=43, validators=[UidValidator])
|
||||||
|
collection = models.ForeignKey(Collection, related_name="items", on_delete=models.CASCADE)
|
||||||
|
version = models.PositiveSmallIntegerField()
|
||||||
|
encryptionKey = models.BinaryField(editable=True, blank=False, null=True)
|
||||||
|
|
||||||
|
stoken_annotation = stoken_annotation_builder(["revisions__stoken"])
|
||||||
|
|
||||||
|
objects: models.manager.BaseManager["CollectionItem"]
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
unique_together = ("uid", "collection")
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "{} {}".format(self.uid, self.collection.uid)
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def content(self) -> "CollectionItemRevision":
|
||||||
|
return self.revisions.get(current=True)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def etag(self) -> str:
|
||||||
|
return self.content.uid
|
||||||
|
|
||||||
|
|
||||||
|
def chunk_directory_path(instance: "CollectionItemChunk", filename: str) -> Path:
|
||||||
|
custom_func = app_settings.CHUNK_PATH_FUNC
|
||||||
|
if custom_func is not None:
|
||||||
|
return custom_func(instance, filename)
|
||||||
|
|
||||||
|
col: Collection = instance.collection
|
||||||
|
user_id: int = col.owner.id
|
||||||
|
uid_prefix: str = instance.uid[:2]
|
||||||
|
uid_rest: str = instance.uid[2:]
|
||||||
|
return Path("user_{}".format(user_id), col.uid, uid_prefix, uid_rest)
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionItemChunk(models.Model):
|
||||||
|
uid = models.CharField(db_index=True, blank=False, null=False, max_length=60, validators=[UidValidator])
|
||||||
|
collection = models.ForeignKey(Collection, related_name="chunks", on_delete=models.CASCADE)
|
||||||
|
chunkFile = models.FileField(upload_to=chunk_directory_path, max_length=150, unique=True)
|
||||||
|
|
||||||
|
objects: models.manager.BaseManager["CollectionItemChunk"]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.uid
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
unique_together = ("collection", "uid")
|
||||||
|
|
||||||
|
|
||||||
|
def generate_stoken_uid():
|
||||||
|
return get_random_string(32, allowed_chars="abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_")
|
||||||
|
|
||||||
|
|
||||||
|
class Stoken(models.Model):
|
||||||
|
id: int
|
||||||
|
uid = models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
unique=True,
|
||||||
|
blank=False,
|
||||||
|
null=False,
|
||||||
|
default=generate_stoken_uid,
|
||||||
|
max_length=43,
|
||||||
|
validators=[UidValidator],
|
||||||
|
)
|
||||||
|
|
||||||
|
objects: models.manager.BaseManager["Stoken"]
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionItemRevision(models.Model):
|
||||||
|
stoken = models.OneToOneField(Stoken, on_delete=models.PROTECT)
|
||||||
|
uid = models.CharField(
|
||||||
|
db_index=True, unique=True, blank=False, null=False, max_length=43, validators=[UidValidator]
|
||||||
|
)
|
||||||
|
item = models.ForeignKey(CollectionItem, related_name="revisions", on_delete=models.CASCADE)
|
||||||
|
meta = models.BinaryField(editable=True, blank=False, null=False)
|
||||||
|
current = models.BooleanField(db_index=True, default=True, null=True)
|
||||||
|
deleted = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
objects: models.manager.BaseManager["CollectionItemRevision"]
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
unique_together = ("item", "current")
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "{} {} current={}".format(self.uid, self.item.uid, self.current)
|
||||||
|
|
||||||
|
|
||||||
|
class RevisionChunkRelation(models.Model):
|
||||||
|
chunk = models.ForeignKey(CollectionItemChunk, related_name="revisions_relation", on_delete=models.CASCADE)
|
||||||
|
revision = models.ForeignKey(CollectionItemRevision, related_name="chunks_relation", on_delete=models.CASCADE)
|
||||||
|
|
||||||
|
objects: models.manager.BaseManager["RevisionChunkRelation"]
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
ordering = ("id",)
|
||||||
|
|
||||||
|
|
||||||
|
class AccessLevels(models.IntegerChoices):
|
||||||
|
READ_ONLY = 0
|
||||||
|
ADMIN = 1
|
||||||
|
READ_WRITE = 2
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionMember(models.Model):
|
||||||
|
stoken = models.OneToOneField(Stoken, on_delete=models.PROTECT, null=True)
|
||||||
|
collection = models.ForeignKey(Collection, related_name="members", on_delete=models.CASCADE)
|
||||||
|
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
|
||||||
|
encryptionKey = models.BinaryField(editable=True, blank=False, null=False)
|
||||||
|
collectionType = models.ForeignKey(CollectionType, on_delete=models.PROTECT, null=True)
|
||||||
|
accessLevel = models.IntegerField(
|
||||||
|
choices=AccessLevels.choices,
|
||||||
|
default=AccessLevels.READ_ONLY,
|
||||||
|
)
|
||||||
|
|
||||||
|
stoken_annotation = stoken_annotation_builder(["stoken"])
|
||||||
|
|
||||||
|
objects: models.manager.BaseManager["CollectionMember"]
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
unique_together = ("user", "collection")
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "{} {}".format(self.collection.uid, self.user)
|
||||||
|
|
||||||
|
def revoke(self):
|
||||||
|
with transaction.atomic():
|
||||||
|
CollectionMemberRemoved.objects.update_or_create(
|
||||||
|
collection=self.collection,
|
||||||
|
user=self.user,
|
||||||
|
defaults={
|
||||||
|
"stoken": Stoken.objects.create(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.delete()
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionMemberRemoved(models.Model):
|
||||||
|
stoken = models.OneToOneField(Stoken, on_delete=models.PROTECT, null=True)
|
||||||
|
collection = models.ForeignKey(Collection, related_name="removed_members", on_delete=models.CASCADE)
|
||||||
|
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
|
||||||
|
|
||||||
|
objects: models.manager.BaseManager["CollectionMemberRemoved"]
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
unique_together = ("user", "collection")
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "{} {}".format(self.collection.uid, self.user)
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionInvitation(models.Model):
|
||||||
|
uid = models.CharField(db_index=True, blank=False, null=False, max_length=43, validators=[UidValidator])
|
||||||
|
version = models.PositiveSmallIntegerField(default=1)
|
||||||
|
fromMember = models.ForeignKey(CollectionMember, on_delete=models.CASCADE)
|
||||||
|
# FIXME: make sure to delete all invitations for the same collection once one is accepted
|
||||||
|
# Make sure to not allow invitations if already a member
|
||||||
|
|
||||||
|
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="incoming_invitations", on_delete=models.CASCADE)
|
||||||
|
signedEncryptionKey = models.BinaryField(editable=False, blank=False, null=False)
|
||||||
|
accessLevel = models.IntegerField(
|
||||||
|
choices=AccessLevels.choices,
|
||||||
|
default=AccessLevels.READ_ONLY,
|
||||||
|
)
|
||||||
|
|
||||||
|
objects: models.manager.BaseManager["CollectionInvitation"]
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
unique_together = ("user", "fromMember")
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "{} {}".format(self.fromMember.collection.uid, self.user)
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def collection(self) -> Collection:
|
||||||
|
return self.fromMember.collection
|
||||||
|
|
||||||
|
|
||||||
|
class UserInfo(models.Model):
|
||||||
|
owner = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, primary_key=True)
|
||||||
|
version = models.PositiveSmallIntegerField(default=1)
|
||||||
|
loginPubkey = models.BinaryField(editable=True, blank=False, null=False)
|
||||||
|
pubkey = models.BinaryField(editable=True, blank=False, null=False)
|
||||||
|
encryptedContent = models.BinaryField(editable=True, blank=False, null=False)
|
||||||
|
salt = models.BinaryField(editable=True, blank=False, null=False)
|
||||||
|
|
||||||
|
objects: models.manager.BaseManager["UserInfo"]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "UserInfo<{}>".format(self.owner)
|
@ -0,0 +1,3 @@
|
|||||||
|
from django.dispatch import Signal
|
||||||
|
|
||||||
|
user_signed_up = Signal(providing_args=["request", "user"])
|
@ -0,0 +1,5 @@
|
|||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class TokenAuthConfig(AppConfig):
|
||||||
|
name = "django_etebase.token_auth"
|
@ -0,0 +1,38 @@
|
|||||||
|
# Generated by Django 3.0.3 on 2020-06-03 12:49
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django_etebase.token_auth import models as token_auth_models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="AuthToken",
|
||||||
|
fields=[
|
||||||
|
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||||
|
(
|
||||||
|
"key",
|
||||||
|
models.CharField(db_index=True, default=token_auth_models.generate_key, max_length=40, unique=True),
|
||||||
|
),
|
||||||
|
("created", models.DateTimeField(auto_now_add=True)),
|
||||||
|
("expiry", models.DateTimeField(blank=True, default=token_auth_models.get_default_expiry, null=True)),
|
||||||
|
(
|
||||||
|
"user",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="auth_token_set",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,25 @@
|
|||||||
|
from django.db import models
|
||||||
|
from django.utils import timezone
|
||||||
|
from django.utils.crypto import get_random_string
|
||||||
|
from myauth.models import get_typed_user_model
|
||||||
|
|
||||||
|
User = get_typed_user_model()
|
||||||
|
|
||||||
|
|
||||||
|
def generate_key():
|
||||||
|
return get_random_string(40)
|
||||||
|
|
||||||
|
|
||||||
|
def get_default_expiry():
|
||||||
|
return timezone.now() + timezone.timedelta(days=30)
|
||||||
|
|
||||||
|
|
||||||
|
class AuthToken(models.Model):
|
||||||
|
|
||||||
|
key = models.CharField(max_length=40, unique=True, db_index=True, default=generate_key)
|
||||||
|
user = models.ForeignKey(User, null=False, blank=False, related_name="auth_token_set", on_delete=models.CASCADE)
|
||||||
|
created = models.DateTimeField(auto_now_add=True)
|
||||||
|
expiry = models.DateTimeField(null=True, blank=True, default=get_default_expiry)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "{}: {}".format(self.key, self.user)
|
@ -0,0 +1,37 @@
|
|||||||
|
import typing as t
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from django.db.models import QuerySet
|
||||||
|
from django.core.exceptions import PermissionDenied
|
||||||
|
from myauth.models import UserType, get_typed_user_model
|
||||||
|
|
||||||
|
from . import app_settings
|
||||||
|
|
||||||
|
|
||||||
|
User = get_typed_user_model()
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class CallbackContext:
|
||||||
|
"""Class for passing extra context to callbacks"""
|
||||||
|
|
||||||
|
url_kwargs: t.Dict[str, t.Any]
|
||||||
|
user: t.Optional[UserType] = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_user_queryset(queryset: QuerySet[UserType], context: CallbackContext) -> QuerySet[UserType]:
|
||||||
|
custom_func = app_settings.GET_USER_QUERYSET_FUNC
|
||||||
|
if custom_func is not None:
|
||||||
|
return custom_func(queryset, context)
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
|
def create_user(context: CallbackContext, *args, **kwargs) -> UserType:
|
||||||
|
custom_func = app_settings.CREATE_USER_FUNC
|
||||||
|
if custom_func is not None:
|
||||||
|
return custom_func(context, *args, **kwargs)
|
||||||
|
return User.objects.create_user(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def create_user_blocked(*args, **kwargs):
|
||||||
|
raise PermissionDenied("Signup is disabled for this server. Please refer to the README for more information.")
|
@ -0,0 +1,16 @@
|
|||||||
|
#! /bin/bash
|
||||||
|
|
||||||
|
# Build the `test-server` image, which runs the server in a simple configuration
|
||||||
|
# designed to be used in tests, based on the current git revision.
|
||||||
|
|
||||||
|
TAG="${1:-latest}"
|
||||||
|
|
||||||
|
echo "Building working copy to etesync/test-server:${TAG}"
|
||||||
|
|
||||||
|
ETESYNC_VERSION=$(git describe --tags)
|
||||||
|
|
||||||
|
docker build \
|
||||||
|
--build-arg ETESYNC_VERSION=${ETESYNC_VERSION} \
|
||||||
|
-t etesync/test-server:${TAG} \
|
||||||
|
-f docker/test-server/Dockerfile \
|
||||||
|
.
|
@ -0,0 +1,38 @@
|
|||||||
|
FROM python:3.9.0-alpine
|
||||||
|
|
||||||
|
ARG ETESYNC_VERSION
|
||||||
|
|
||||||
|
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||||
|
ENV PIP_NO_CACHE_DIR=1
|
||||||
|
|
||||||
|
# install packages and pip requirements first, in a single step,
|
||||||
|
COPY /requirements.txt /requirements.txt
|
||||||
|
RUN set -ex ;\
|
||||||
|
apk add libpq postgresql-dev --virtual .build-deps coreutils gcc libc-dev libffi-dev make ;\
|
||||||
|
pip install -U pip ;\
|
||||||
|
pip install --no-cache-dir --progress-bar off -r /requirements.txt ;\
|
||||||
|
apk del .build-deps make gcc coreutils ;\
|
||||||
|
rm -rf /root/.cache
|
||||||
|
|
||||||
|
COPY . /app
|
||||||
|
|
||||||
|
RUN set -ex ;\
|
||||||
|
mkdir -p /data/static /data/media ;\
|
||||||
|
cd /app ;\
|
||||||
|
mkdir -p /etc/etebase-server ;\
|
||||||
|
cp docker/test-server/etebase-server.ini /etc/etebase-server ;\
|
||||||
|
sed -e '/ETEBASE_CREATE_USER_FUNC/ s/^#*/#/' -i /app/etebase_server/settings.py ;\
|
||||||
|
chmod +x docker/test-server/entrypoint.sh
|
||||||
|
|
||||||
|
# this is a test image and should start up quickly, so it starts with the DB
|
||||||
|
# and static data already fully set up.
|
||||||
|
RUN set -ex ;\
|
||||||
|
cd /app ;\
|
||||||
|
python manage.py migrate ;\
|
||||||
|
python manage.py collectstatic --noinput
|
||||||
|
|
||||||
|
ENV ETESYNC_VERSION=${ETESYNC_VERSION}
|
||||||
|
VOLUME /data
|
||||||
|
EXPOSE 3735
|
||||||
|
|
||||||
|
ENTRYPOINT ["/app/docker/test-server/entrypoint.sh"]
|
@ -0,0 +1,6 @@
|
|||||||
|
#! /bin/sh
|
||||||
|
|
||||||
|
echo "Running etesync test server ${ETESYNC_VERSION}"
|
||||||
|
|
||||||
|
cd /app
|
||||||
|
uvicorn etebase_server.asgi:application --host 0.0.0.0 --port 3735
|
@ -0,0 +1,12 @@
|
|||||||
|
[global]
|
||||||
|
secret_file = secret.txt
|
||||||
|
debug = true
|
||||||
|
static_root = /data/static
|
||||||
|
media_root = /data/media
|
||||||
|
|
||||||
|
[allowed_hosts]
|
||||||
|
allowed_host1 = *
|
||||||
|
|
||||||
|
[database]
|
||||||
|
engine = django.db.backends.sqlite3
|
||||||
|
name = /db.sqlite3
|
@ -0,0 +1,20 @@
|
|||||||
|
[global]
|
||||||
|
secret_file = secret.txt
|
||||||
|
debug = false
|
||||||
|
;Set the paths where data will be stored at
|
||||||
|
static_root = /path/to/static
|
||||||
|
media_root = /path/to/media
|
||||||
|
|
||||||
|
;Advanced options, only uncomment if you know what you're doing:
|
||||||
|
;static_url = /static/
|
||||||
|
;media_url = /user-media/
|
||||||
|
;language_code = en-us
|
||||||
|
;time_zone = UTC
|
||||||
|
;redis_uri = redis://localhost:6379
|
||||||
|
|
||||||
|
[allowed_hosts]
|
||||||
|
allowed_host1 = example.com
|
||||||
|
|
||||||
|
[database]
|
||||||
|
engine = django.db.backends.sqlite3
|
||||||
|
name = db.sqlite3
|
@ -0,0 +1,27 @@
|
|||||||
|
"""
|
||||||
|
FIXME: this whole function is a hack around the django db limitations due to how db connections are cached and cleaned.
|
||||||
|
Essentially django assumes there's the django request dispatcher to automatically clean up after the ORM.
|
||||||
|
"""
|
||||||
|
import typing as t
|
||||||
|
from functools import wraps
|
||||||
|
|
||||||
|
from django.db import close_old_connections, reset_queries
|
||||||
|
|
||||||
|
|
||||||
|
def django_db_cleanup():
|
||||||
|
reset_queries()
|
||||||
|
close_old_connections()
|
||||||
|
|
||||||
|
|
||||||
|
def django_db_cleanup_decorator(func: t.Callable[..., t.Any]):
|
||||||
|
from inspect import iscoroutinefunction
|
||||||
|
|
||||||
|
if iscoroutinefunction(func):
|
||||||
|
return func
|
||||||
|
|
||||||
|
@wraps(func)
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
django_db_cleanup()
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
|
return wrapper
|
@ -0,0 +1,88 @@
|
|||||||
|
import dataclasses
|
||||||
|
|
||||||
|
from fastapi import Depends
|
||||||
|
from fastapi.security import APIKeyHeader
|
||||||
|
|
||||||
|
from django.utils import timezone
|
||||||
|
from django.db.models import QuerySet
|
||||||
|
|
||||||
|
from django_etebase import models
|
||||||
|
from django_etebase.token_auth.models import AuthToken, get_default_expiry
|
||||||
|
from myauth.models import UserType, get_typed_user_model
|
||||||
|
from .exceptions import AuthenticationFailed
|
||||||
|
from .utils import get_object_or_404
|
||||||
|
from .db_hack import django_db_cleanup_decorator
|
||||||
|
|
||||||
|
|
||||||
|
User = get_typed_user_model()
|
||||||
|
token_scheme = APIKeyHeader(name="Authorization")
|
||||||
|
AUTO_REFRESH = True
|
||||||
|
MIN_REFRESH_INTERVAL = 60
|
||||||
|
|
||||||
|
|
||||||
|
@dataclasses.dataclass(frozen=True)
|
||||||
|
class AuthData:
|
||||||
|
user: UserType
|
||||||
|
token: AuthToken
|
||||||
|
|
||||||
|
|
||||||
|
def __renew_token(auth_token: AuthToken):
|
||||||
|
current_expiry = auth_token.expiry
|
||||||
|
new_expiry = get_default_expiry()
|
||||||
|
# Throttle refreshing of token to avoid db writes
|
||||||
|
delta = (new_expiry - current_expiry).total_seconds()
|
||||||
|
if delta > MIN_REFRESH_INTERVAL:
|
||||||
|
auth_token.expiry = new_expiry
|
||||||
|
auth_token.save(update_fields=("expiry",))
|
||||||
|
|
||||||
|
|
||||||
|
def __get_authenticated_user(api_token: str):
|
||||||
|
api_token = api_token.split()[1]
|
||||||
|
try:
|
||||||
|
token: AuthToken = AuthToken.objects.select_related("user").get(key=api_token)
|
||||||
|
except AuthToken.DoesNotExist:
|
||||||
|
raise AuthenticationFailed(detail="Invalid token.")
|
||||||
|
if not token.user.is_active:
|
||||||
|
raise AuthenticationFailed(detail="User inactive or deleted.")
|
||||||
|
|
||||||
|
if token.expiry is not None:
|
||||||
|
if token.expiry < timezone.now():
|
||||||
|
token.delete()
|
||||||
|
raise AuthenticationFailed(detail="Invalid token.")
|
||||||
|
|
||||||
|
if AUTO_REFRESH:
|
||||||
|
__renew_token(token)
|
||||||
|
|
||||||
|
return token.user, token
|
||||||
|
|
||||||
|
|
||||||
|
@django_db_cleanup_decorator
|
||||||
|
def get_auth_data(api_token: str = Depends(token_scheme)) -> AuthData:
|
||||||
|
user, token = __get_authenticated_user(api_token)
|
||||||
|
return AuthData(user, token)
|
||||||
|
|
||||||
|
|
||||||
|
@django_db_cleanup_decorator
|
||||||
|
def get_authenticated_user(api_token: str = Depends(token_scheme)) -> UserType:
|
||||||
|
user, _ = __get_authenticated_user(api_token)
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
@django_db_cleanup_decorator
|
||||||
|
def get_collection_queryset(user: UserType = Depends(get_authenticated_user)) -> QuerySet:
|
||||||
|
default_queryset: QuerySet = models.Collection.objects.all()
|
||||||
|
return default_queryset.filter(members__user=user)
|
||||||
|
|
||||||
|
|
||||||
|
@django_db_cleanup_decorator
|
||||||
|
def get_collection(collection_uid: str, queryset: QuerySet = Depends(get_collection_queryset)) -> models.Collection:
|
||||||
|
return get_object_or_404(queryset, uid=collection_uid)
|
||||||
|
|
||||||
|
|
||||||
|
@django_db_cleanup_decorator
|
||||||
|
def get_item_queryset(collection: models.Collection = Depends(get_collection)) -> QuerySet:
|
||||||
|
default_item_queryset: QuerySet = models.CollectionItem.objects.all()
|
||||||
|
# XXX Potentially add this for performance: .prefetch_related('revisions__chunks')
|
||||||
|
queryset = default_item_queryset.filter(collection__pk=collection.pk, revisions__current=True)
|
||||||
|
|
||||||
|
return queryset
|
@ -0,0 +1,128 @@
|
|||||||
|
from fastapi import status, HTTPException
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from django.core.exceptions import ValidationError as DjangoValidationError
|
||||||
|
|
||||||
|
|
||||||
|
class HttpErrorField(BaseModel):
|
||||||
|
field: str
|
||||||
|
code: str
|
||||||
|
detail: str
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
orm_mode = True
|
||||||
|
|
||||||
|
|
||||||
|
class HttpErrorOut(BaseModel):
|
||||||
|
code: str
|
||||||
|
detail: str
|
||||||
|
errors: t.Optional[t.List[HttpErrorField]]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
orm_mode = True
|
||||||
|
|
||||||
|
|
||||||
|
class CustomHttpException(HTTPException):
|
||||||
|
def __init__(self, code: str, detail: str, status_code: int = status.HTTP_400_BAD_REQUEST):
|
||||||
|
self.code = code
|
||||||
|
super().__init__(status_code, detail)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def as_dict(self) -> dict:
|
||||||
|
return {"code": self.code, "detail": self.detail}
|
||||||
|
|
||||||
|
|
||||||
|
class AuthenticationFailed(CustomHttpException):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
code="authentication_failed",
|
||||||
|
detail: str = "Incorrect authentication credentials.",
|
||||||
|
status_code: int = status.HTTP_401_UNAUTHORIZED,
|
||||||
|
):
|
||||||
|
super().__init__(code=code, detail=detail, status_code=status_code)
|
||||||
|
|
||||||
|
|
||||||
|
class NotAuthenticated(CustomHttpException):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
code="not_authenticated",
|
||||||
|
detail: str = "Authentication credentials were not provided.",
|
||||||
|
status_code: int = status.HTTP_401_UNAUTHORIZED,
|
||||||
|
):
|
||||||
|
super().__init__(code=code, detail=detail, status_code=status_code)
|
||||||
|
|
||||||
|
|
||||||
|
class PermissionDenied(CustomHttpException):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
code="permission_denied",
|
||||||
|
detail: str = "You do not have permission to perform this action.",
|
||||||
|
status_code: int = status.HTTP_403_FORBIDDEN,
|
||||||
|
):
|
||||||
|
super().__init__(code=code, detail=detail, status_code=status_code)
|
||||||
|
|
||||||
|
|
||||||
|
class NotSupported(CustomHttpException):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
code="not_implemented",
|
||||||
|
detail: str = "This server's configuration does not support this request.",
|
||||||
|
status_code: int = status.HTTP_501_NOT_IMPLEMENTED,
|
||||||
|
):
|
||||||
|
super().__init__(code=code, detail=detail, status_code=status_code)
|
||||||
|
|
||||||
|
|
||||||
|
class HttpError(CustomHttpException):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
code: str,
|
||||||
|
detail: str,
|
||||||
|
status_code: int = status.HTTP_400_BAD_REQUEST,
|
||||||
|
errors: t.Optional[t.List["HttpError"]] = None,
|
||||||
|
):
|
||||||
|
self.errors = errors
|
||||||
|
super().__init__(code=code or "generic_error", detail=detail, status_code=status_code)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def as_dict(self) -> dict:
|
||||||
|
return HttpErrorOut(code=self.code, errors=self.errors, detail=self.detail).dict()
|
||||||
|
|
||||||
|
|
||||||
|
class ValidationError(HttpError):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
code: str,
|
||||||
|
detail: str,
|
||||||
|
status_code: int = status.HTTP_400_BAD_REQUEST,
|
||||||
|
errors: t.Optional[t.List["HttpError"]] = None,
|
||||||
|
field: t.Optional[str] = None,
|
||||||
|
):
|
||||||
|
self.field = field
|
||||||
|
super().__init__(code=code, detail=detail, errors=errors, status_code=status_code)
|
||||||
|
|
||||||
|
|
||||||
|
def flatten_errors(field_name: str, errors) -> t.List[HttpError]:
|
||||||
|
ret: t.List[HttpError] = []
|
||||||
|
if isinstance(errors, dict):
|
||||||
|
for error_key in errors:
|
||||||
|
error = errors[error_key]
|
||||||
|
ret.extend(flatten_errors("{}.{}".format(field_name, error_key), error))
|
||||||
|
else:
|
||||||
|
for error in errors:
|
||||||
|
if error.messages:
|
||||||
|
message = error.messages[0]
|
||||||
|
else:
|
||||||
|
message = str(error)
|
||||||
|
ret.append(ValidationError(code=error.code or "validation_error", detail=message, field=field_name))
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def transform_validation_error(prefix: str, err: DjangoValidationError):
|
||||||
|
if hasattr(err, "error_dict"):
|
||||||
|
errors = flatten_errors(prefix, err.error_dict)
|
||||||
|
elif not hasattr(err, "message"):
|
||||||
|
errors = flatten_errors(prefix, err.error_list)
|
||||||
|
else:
|
||||||
|
raise HttpError(err.code or "validation_error", err.message)
|
||||||
|
raise HttpError(code="field_errors", detail="Field validations failed.", errors=errors)
|
@ -0,0 +1,80 @@
|
|||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
# Not at the top of the file because we first need to setup django
|
||||||
|
from fastapi import FastAPI, Request
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from fastapi.middleware.trustedhost import TrustedHostMiddleware
|
||||||
|
from fastapi.staticfiles import StaticFiles
|
||||||
|
|
||||||
|
from django_etebase import app_settings
|
||||||
|
|
||||||
|
from .exceptions import CustomHttpException
|
||||||
|
from .msgpack import MsgpackResponse
|
||||||
|
from .routers.authentication import authentication_router
|
||||||
|
from .routers.collection import collection_router, item_router
|
||||||
|
from .routers.member import member_router
|
||||||
|
from .routers.invitation import invitation_incoming_router, invitation_outgoing_router
|
||||||
|
from .routers.websocket import websocket_router
|
||||||
|
|
||||||
|
|
||||||
|
def create_application(prefix="", middlewares=[]):
|
||||||
|
app = FastAPI(
|
||||||
|
title="Etebase",
|
||||||
|
description="The Etebase server API documentation",
|
||||||
|
externalDocs={
|
||||||
|
"url": "https://docs.etebase.com",
|
||||||
|
"description": "Docs about the API specifications and clients.",
|
||||||
|
}
|
||||||
|
# FIXME: version="2.5.0",
|
||||||
|
)
|
||||||
|
VERSION = "v1"
|
||||||
|
BASE_PATH = f"{prefix}/api/{VERSION}"
|
||||||
|
COLLECTION_UID_MARKER = "{collection_uid}"
|
||||||
|
app.include_router(authentication_router, prefix=f"{BASE_PATH}/authentication", tags=["authentication"])
|
||||||
|
app.include_router(collection_router, prefix=f"{BASE_PATH}/collection", tags=["collection"])
|
||||||
|
app.include_router(item_router, prefix=f"{BASE_PATH}/collection/{COLLECTION_UID_MARKER}", tags=["item"])
|
||||||
|
app.include_router(member_router, prefix=f"{BASE_PATH}/collection/{COLLECTION_UID_MARKER}", tags=["member"])
|
||||||
|
app.include_router(
|
||||||
|
invitation_incoming_router, prefix=f"{BASE_PATH}/invitation/incoming", tags=["incoming invitation"]
|
||||||
|
)
|
||||||
|
app.include_router(
|
||||||
|
invitation_outgoing_router, prefix=f"{BASE_PATH}/invitation/outgoing", tags=["outgoing invitation"]
|
||||||
|
)
|
||||||
|
app.include_router(websocket_router, prefix=f"{BASE_PATH}/ws", tags=["websocket"])
|
||||||
|
|
||||||
|
if settings.DEBUG:
|
||||||
|
from etebase_fastapi.routers.test_reset_view import test_reset_view_router
|
||||||
|
|
||||||
|
app.include_router(test_reset_view_router, prefix=f"{BASE_PATH}/test/authentication")
|
||||||
|
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origin_regex="https?://.*",
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
app.add_middleware(TrustedHostMiddleware, allowed_hosts=settings.ALLOWED_HOSTS)
|
||||||
|
|
||||||
|
for middleware in middlewares:
|
||||||
|
app.add_middleware(middleware)
|
||||||
|
|
||||||
|
@app.on_event("startup")
|
||||||
|
async def on_startup() -> None:
|
||||||
|
from .redis import redisw
|
||||||
|
|
||||||
|
await redisw.setup()
|
||||||
|
|
||||||
|
@app.on_event("shutdown")
|
||||||
|
async def on_shutdown():
|
||||||
|
from .redis import redisw
|
||||||
|
|
||||||
|
await redisw.close()
|
||||||
|
|
||||||
|
@app.exception_handler(CustomHttpException)
|
||||||
|
async def custom_exception_handler(request: Request, exc: CustomHttpException):
|
||||||
|
return MsgpackResponse(status_code=exc.status_code, content=exc.as_dict)
|
||||||
|
|
||||||
|
app.mount(settings.STATIC_URL, StaticFiles(directory=settings.STATIC_ROOT), name="static")
|
||||||
|
|
||||||
|
return app
|
@ -0,0 +1,76 @@
|
|||||||
|
import typing as t
|
||||||
|
|
||||||
|
from fastapi.routing import APIRoute, get_request_handler
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from starlette.requests import Request
|
||||||
|
from starlette.responses import Response
|
||||||
|
|
||||||
|
from .utils import msgpack_encode, msgpack_decode
|
||||||
|
from .db_hack import django_db_cleanup_decorator
|
||||||
|
|
||||||
|
|
||||||
|
class MsgpackRequest(Request):
|
||||||
|
media_type = "application/msgpack"
|
||||||
|
|
||||||
|
async def body(self) -> bytes:
|
||||||
|
if not hasattr(self, "_json"):
|
||||||
|
body = await super().body()
|
||||||
|
self._json = msgpack_decode(body)
|
||||||
|
return self._json
|
||||||
|
|
||||||
|
|
||||||
|
class MsgpackResponse(Response):
|
||||||
|
media_type = "application/msgpack"
|
||||||
|
|
||||||
|
def render(self, content: t.Optional[t.Any]) -> bytes:
|
||||||
|
if content is None:
|
||||||
|
return b""
|
||||||
|
|
||||||
|
if isinstance(content, BaseModel):
|
||||||
|
content = content.dict()
|
||||||
|
return msgpack_encode(content)
|
||||||
|
|
||||||
|
|
||||||
|
class MsgpackRoute(APIRoute):
|
||||||
|
# keep track of content-type -> request classes
|
||||||
|
REQUESTS_CLASSES = {MsgpackRequest.media_type: MsgpackRequest}
|
||||||
|
# keep track of content-type -> response classes
|
||||||
|
ROUTES_HANDLERS_CLASSES = {MsgpackResponse.media_type: MsgpackResponse}
|
||||||
|
|
||||||
|
def __init__(self, path: str, endpoint: t.Callable[..., t.Any], *args, **kwargs):
|
||||||
|
endpoint = django_db_cleanup_decorator(endpoint)
|
||||||
|
super().__init__(path, endpoint, *args, **kwargs)
|
||||||
|
|
||||||
|
def _get_media_type_route_handler(self, media_type):
|
||||||
|
return get_request_handler(
|
||||||
|
dependant=self.dependant,
|
||||||
|
body_field=self.body_field,
|
||||||
|
status_code=self.status_code,
|
||||||
|
# use custom response class or fallback on default self.response_class
|
||||||
|
response_class=self.ROUTES_HANDLERS_CLASSES.get(media_type, self.response_class),
|
||||||
|
response_field=self.secure_cloned_response_field,
|
||||||
|
response_model_include=self.response_model_include,
|
||||||
|
response_model_exclude=self.response_model_exclude,
|
||||||
|
response_model_by_alias=self.response_model_by_alias,
|
||||||
|
response_model_exclude_unset=self.response_model_exclude_unset,
|
||||||
|
response_model_exclude_defaults=self.response_model_exclude_defaults,
|
||||||
|
response_model_exclude_none=self.response_model_exclude_none,
|
||||||
|
dependency_overrides_provider=self.dependency_overrides_provider,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_route_handler(self) -> t.Callable:
|
||||||
|
async def custom_route_handler(request: Request) -> Response:
|
||||||
|
|
||||||
|
content_type = request.headers.get("Content-Type")
|
||||||
|
try:
|
||||||
|
request_cls = self.REQUESTS_CLASSES[content_type]
|
||||||
|
request = request_cls(request.scope, request.receive)
|
||||||
|
except KeyError:
|
||||||
|
# nothing registered to handle content_type, process given requests as-is
|
||||||
|
pass
|
||||||
|
|
||||||
|
accept = request.headers.get("Accept")
|
||||||
|
route_handler = self._get_media_type_route_handler(accept)
|
||||||
|
return await route_handler(request)
|
||||||
|
|
||||||
|
return custom_route_handler
|
@ -0,0 +1,27 @@
|
|||||||
|
import typing as t
|
||||||
|
import aioredis
|
||||||
|
|
||||||
|
from django_etebase import app_settings
|
||||||
|
|
||||||
|
|
||||||
|
class RedisWrapper:
|
||||||
|
redis: aioredis.Redis
|
||||||
|
|
||||||
|
def __init__(self, redis_uri: t.Optional[str]):
|
||||||
|
self.redis_uri = redis_uri
|
||||||
|
|
||||||
|
async def setup(self):
|
||||||
|
if self.redis_uri is not None:
|
||||||
|
self.redis = await aioredis.create_redis_pool(self.redis_uri)
|
||||||
|
|
||||||
|
async def close(self):
|
||||||
|
if self.redis is not None:
|
||||||
|
self.redis.close()
|
||||||
|
await self.redis.wait_closed()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_active(self):
|
||||||
|
return self.redis_uri is not None
|
||||||
|
|
||||||
|
|
||||||
|
redisw = RedisWrapper(app_settings.REDIS_URI)
|
@ -0,0 +1,263 @@
|
|||||||
|
import typing as t
|
||||||
|
from typing_extensions import Literal
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import nacl
|
||||||
|
import nacl.encoding
|
||||||
|
import nacl.hash
|
||||||
|
import nacl.secret
|
||||||
|
import nacl.signing
|
||||||
|
from django.conf import settings
|
||||||
|
from django.contrib.auth import user_logged_out, user_logged_in
|
||||||
|
from django.core import exceptions as django_exceptions
|
||||||
|
from django.db import transaction
|
||||||
|
from django.utils.functional import cached_property
|
||||||
|
from fastapi import APIRouter, Depends, status, Request
|
||||||
|
|
||||||
|
from django_etebase import app_settings, models
|
||||||
|
from django_etebase.token_auth.models import AuthToken
|
||||||
|
from django_etebase.models import UserInfo
|
||||||
|
from django_etebase.signals import user_signed_up
|
||||||
|
from django_etebase.utils import create_user, get_user_queryset, CallbackContext
|
||||||
|
from myauth.models import UserType, get_typed_user_model
|
||||||
|
from ..exceptions import AuthenticationFailed, transform_validation_error, HttpError
|
||||||
|
from ..msgpack import MsgpackRoute
|
||||||
|
from ..utils import BaseModel, permission_responses, msgpack_encode, msgpack_decode, get_user_username_email_kwargs
|
||||||
|
from ..dependencies import AuthData, get_auth_data, get_authenticated_user
|
||||||
|
|
||||||
|
User = get_typed_user_model()
|
||||||
|
authentication_router = APIRouter(route_class=MsgpackRoute)
|
||||||
|
|
||||||
|
|
||||||
|
class LoginChallengeIn(BaseModel):
|
||||||
|
username: str
|
||||||
|
|
||||||
|
|
||||||
|
class LoginChallengeOut(BaseModel):
|
||||||
|
salt: bytes
|
||||||
|
challenge: bytes
|
||||||
|
version: int
|
||||||
|
|
||||||
|
|
||||||
|
class LoginResponse(BaseModel):
|
||||||
|
username: str
|
||||||
|
challenge: bytes
|
||||||
|
host: str
|
||||||
|
action: Literal["login", "changePassword"]
|
||||||
|
|
||||||
|
|
||||||
|
class UserOut(BaseModel):
|
||||||
|
username: str
|
||||||
|
email: str
|
||||||
|
pubkey: bytes
|
||||||
|
encryptedContent: bytes
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_orm(cls: t.Type["UserOut"], obj: UserType) -> "UserOut":
|
||||||
|
return cls(
|
||||||
|
username=obj.username,
|
||||||
|
email=obj.email,
|
||||||
|
pubkey=bytes(obj.userinfo.pubkey),
|
||||||
|
encryptedContent=bytes(obj.userinfo.encryptedContent),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class LoginOut(BaseModel):
|
||||||
|
token: str
|
||||||
|
user: UserOut
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_orm(cls: t.Type["LoginOut"], obj: UserType) -> "LoginOut":
|
||||||
|
token = AuthToken.objects.create(user=obj).key
|
||||||
|
user = UserOut.from_orm(obj)
|
||||||
|
return cls(token=token, user=user)
|
||||||
|
|
||||||
|
|
||||||
|
class Authentication(BaseModel):
|
||||||
|
class Config:
|
||||||
|
keep_untouched = (cached_property,)
|
||||||
|
|
||||||
|
response: bytes
|
||||||
|
signature: bytes
|
||||||
|
|
||||||
|
|
||||||
|
class Login(Authentication):
|
||||||
|
@cached_property
|
||||||
|
def response_data(self) -> LoginResponse:
|
||||||
|
return LoginResponse(**msgpack_decode(self.response))
|
||||||
|
|
||||||
|
|
||||||
|
class ChangePasswordResponse(LoginResponse):
|
||||||
|
loginPubkey: bytes
|
||||||
|
encryptedContent: bytes
|
||||||
|
|
||||||
|
|
||||||
|
class ChangePassword(Authentication):
|
||||||
|
@cached_property
|
||||||
|
def response_data(self) -> ChangePasswordResponse:
|
||||||
|
return ChangePasswordResponse(**msgpack_decode(self.response))
|
||||||
|
|
||||||
|
|
||||||
|
class UserSignup(BaseModel):
|
||||||
|
username: str
|
||||||
|
email: str
|
||||||
|
|
||||||
|
|
||||||
|
class SignupIn(BaseModel):
|
||||||
|
user: UserSignup
|
||||||
|
salt: bytes
|
||||||
|
loginPubkey: bytes
|
||||||
|
pubkey: bytes
|
||||||
|
encryptedContent: bytes
|
||||||
|
|
||||||
|
|
||||||
|
def get_login_user(request: Request, challenge: LoginChallengeIn) -> UserType:
|
||||||
|
username = challenge.username
|
||||||
|
|
||||||
|
kwargs = get_user_username_email_kwargs(username)
|
||||||
|
try:
|
||||||
|
user_queryset = get_user_queryset(User.objects.all(), CallbackContext(request.path_params))
|
||||||
|
user = user_queryset.get(**kwargs)
|
||||||
|
if not hasattr(user, "userinfo"):
|
||||||
|
raise AuthenticationFailed(code="user_not_init", detail="User not properly init")
|
||||||
|
return user
|
||||||
|
except User.DoesNotExist:
|
||||||
|
raise AuthenticationFailed(code="user_not_found", detail="User not found")
|
||||||
|
|
||||||
|
|
||||||
|
def get_encryption_key(salt: bytes):
|
||||||
|
key = nacl.hash.blake2b(settings.SECRET_KEY.encode(), encoder=nacl.encoding.RawEncoder)
|
||||||
|
return nacl.hash.blake2b(
|
||||||
|
b"",
|
||||||
|
key=key,
|
||||||
|
salt=salt[: nacl.hash.BLAKE2B_SALTBYTES],
|
||||||
|
person=b"etebase-auth",
|
||||||
|
encoder=nacl.encoding.RawEncoder,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def save_changed_password(data: ChangePassword, user: UserType):
|
||||||
|
response_data = data.response_data
|
||||||
|
user_info: UserInfo = user.userinfo
|
||||||
|
user_info.loginPubkey = response_data.loginPubkey
|
||||||
|
user_info.encryptedContent = response_data.encryptedContent
|
||||||
|
user_info.save()
|
||||||
|
|
||||||
|
|
||||||
|
def validate_login_request(
|
||||||
|
validated_data: LoginResponse,
|
||||||
|
challenge_sent_to_user: Authentication,
|
||||||
|
user: UserType,
|
||||||
|
expected_action: str,
|
||||||
|
host_from_request: str,
|
||||||
|
):
|
||||||
|
enc_key = get_encryption_key(bytes(user.userinfo.salt))
|
||||||
|
box = nacl.secret.SecretBox(enc_key)
|
||||||
|
challenge_data = msgpack_decode(box.decrypt(validated_data.challenge))
|
||||||
|
now = int(datetime.now().timestamp())
|
||||||
|
if validated_data.action != expected_action:
|
||||||
|
raise HttpError("wrong_action", f'Expected "{expected_action}" but got something else')
|
||||||
|
elif now - challenge_data["timestamp"] > app_settings.CHALLENGE_VALID_SECONDS:
|
||||||
|
raise HttpError("challenge_expired", "Login challenge has expired")
|
||||||
|
elif challenge_data["userId"] != user.id:
|
||||||
|
raise HttpError("wrong_user", "This challenge is for the wrong user")
|
||||||
|
elif not settings.DEBUG and validated_data.host.split(":", 1)[0] != host_from_request.split(":", 1)[0]:
|
||||||
|
raise HttpError(
|
||||||
|
"wrong_host", f'Found wrong host name. Got: "{validated_data.host}" expected: "{host_from_request}"'
|
||||||
|
)
|
||||||
|
verify_key = nacl.signing.VerifyKey(bytes(user.userinfo.loginPubkey), encoder=nacl.encoding.RawEncoder)
|
||||||
|
try:
|
||||||
|
verify_key.verify(challenge_sent_to_user.response, challenge_sent_to_user.signature)
|
||||||
|
except nacl.exceptions.BadSignatureError:
|
||||||
|
raise HttpError("login_bad_signature", "Wrong password for user.", status.HTTP_401_UNAUTHORIZED)
|
||||||
|
|
||||||
|
|
||||||
|
@authentication_router.get("/is_etebase/")
|
||||||
|
async def is_etebase():
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@authentication_router.post("/login_challenge/", response_model=LoginChallengeOut)
|
||||||
|
def login_challenge(user: UserType = Depends(get_login_user)):
|
||||||
|
salt = bytes(user.userinfo.salt)
|
||||||
|
enc_key = get_encryption_key(salt)
|
||||||
|
box = nacl.secret.SecretBox(enc_key)
|
||||||
|
challenge_data = {
|
||||||
|
"timestamp": int(datetime.now().timestamp()),
|
||||||
|
"userId": user.id,
|
||||||
|
}
|
||||||
|
challenge = bytes(box.encrypt(msgpack_encode(challenge_data), encoder=nacl.encoding.RawEncoder))
|
||||||
|
return LoginChallengeOut(salt=salt, challenge=challenge, version=user.userinfo.version)
|
||||||
|
|
||||||
|
|
||||||
|
@authentication_router.post("/login/", response_model=LoginOut)
|
||||||
|
def login(data: Login, request: Request):
|
||||||
|
user = get_login_user(request, LoginChallengeIn(username=data.response_data.username))
|
||||||
|
host = request.headers.get("Host")
|
||||||
|
validate_login_request(data.response_data, data, user, "login", host)
|
||||||
|
ret = LoginOut.from_orm(user)
|
||||||
|
user_logged_in.send(sender=user.__class__, request=None, user=user)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
@authentication_router.post("/logout/", status_code=status.HTTP_204_NO_CONTENT, responses=permission_responses)
|
||||||
|
def logout(auth_data: AuthData = Depends(get_auth_data)):
|
||||||
|
auth_data.token.delete()
|
||||||
|
user_logged_out.send(sender=auth_data.user.__class__, request=None, user=auth_data.user)
|
||||||
|
|
||||||
|
|
||||||
|
@authentication_router.post("/change_password/", status_code=status.HTTP_204_NO_CONTENT, responses=permission_responses)
|
||||||
|
def change_password(data: ChangePassword, request: Request, user: UserType = Depends(get_authenticated_user)):
|
||||||
|
host = request.headers.get("Host")
|
||||||
|
validate_login_request(data.response_data, data, user, "changePassword", host)
|
||||||
|
save_changed_password(data, user)
|
||||||
|
|
||||||
|
|
||||||
|
@authentication_router.post("/dashboard_url/", responses=permission_responses)
|
||||||
|
def dashboard_url(request: Request, user: UserType = Depends(get_authenticated_user)):
|
||||||
|
get_dashboard_url = app_settings.DASHBOARD_URL_FUNC
|
||||||
|
if get_dashboard_url is None:
|
||||||
|
raise HttpError("not_supported", "This server doesn't have a user dashboard.")
|
||||||
|
|
||||||
|
ret = {
|
||||||
|
"url": get_dashboard_url(CallbackContext(request.path_params, user=user)),
|
||||||
|
}
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def signup_save(data: SignupIn, request: Request) -> UserType:
|
||||||
|
user_data = data.user
|
||||||
|
with transaction.atomic():
|
||||||
|
try:
|
||||||
|
user_queryset = get_user_queryset(User.objects.all(), CallbackContext(request.path_params))
|
||||||
|
instance = user_queryset.get(**{User.USERNAME_FIELD: user_data.username.lower()})
|
||||||
|
except User.DoesNotExist:
|
||||||
|
# Create the user and save the casing the user chose as the first name
|
||||||
|
try:
|
||||||
|
instance = create_user(
|
||||||
|
CallbackContext(request.path_params),
|
||||||
|
**user_data.dict(),
|
||||||
|
password=None,
|
||||||
|
first_name=user_data.username,
|
||||||
|
)
|
||||||
|
instance.full_clean()
|
||||||
|
except HttpError as e:
|
||||||
|
raise e
|
||||||
|
except django_exceptions.ValidationError as e:
|
||||||
|
transform_validation_error("user", e)
|
||||||
|
except Exception as e:
|
||||||
|
raise HttpError("generic", str(e))
|
||||||
|
|
||||||
|
if hasattr(instance, "userinfo"):
|
||||||
|
raise HttpError("user_exists", "User already exists", status_code=status.HTTP_409_CONFLICT)
|
||||||
|
|
||||||
|
models.UserInfo.objects.create(**data.dict(exclude={"user"}), owner=instance)
|
||||||
|
return instance
|
||||||
|
|
||||||
|
|
||||||
|
@authentication_router.post("/signup/", response_model=LoginOut, status_code=status.HTTP_201_CREATED)
|
||||||
|
def signup(data: SignupIn, request: Request):
|
||||||
|
user = signup_save(data, request)
|
||||||
|
ret = LoginOut.from_orm(user)
|
||||||
|
user_signed_up.send(sender=user.__class__, request=None, user=user)
|
||||||
|
return ret
|
@ -0,0 +1,631 @@
|
|||||||
|
import typing as t
|
||||||
|
|
||||||
|
from asgiref.sync import sync_to_async
|
||||||
|
from django.core import exceptions as django_exceptions
|
||||||
|
from django.core.files.base import ContentFile
|
||||||
|
from django.db import transaction, IntegrityError
|
||||||
|
from django.db.models import Q, QuerySet
|
||||||
|
from fastapi import APIRouter, Depends, status, Request, BackgroundTasks
|
||||||
|
|
||||||
|
from django_etebase import models
|
||||||
|
from myauth.models import UserType
|
||||||
|
from .authentication import get_authenticated_user
|
||||||
|
from .websocket import get_ticket, TicketRequest, TicketOut
|
||||||
|
from ..exceptions import HttpError, transform_validation_error, PermissionDenied, ValidationError
|
||||||
|
from ..msgpack import MsgpackRoute
|
||||||
|
from ..stoken_handler import filter_by_stoken_and_limit, filter_by_stoken, get_stoken_obj, get_queryset_stoken
|
||||||
|
from ..utils import (
|
||||||
|
get_object_or_404,
|
||||||
|
Context,
|
||||||
|
Prefetch,
|
||||||
|
PrefetchQuery,
|
||||||
|
is_collection_admin,
|
||||||
|
msgpack_encode,
|
||||||
|
BaseModel,
|
||||||
|
permission_responses,
|
||||||
|
PERMISSIONS_READ,
|
||||||
|
PERMISSIONS_READWRITE,
|
||||||
|
)
|
||||||
|
from ..dependencies import get_collection_queryset, get_item_queryset, get_collection
|
||||||
|
from ..sendfile import sendfile
|
||||||
|
from ..redis import redisw
|
||||||
|
from ..db_hack import django_db_cleanup_decorator
|
||||||
|
|
||||||
|
collection_router = APIRouter(route_class=MsgpackRoute, responses=permission_responses)
|
||||||
|
item_router = APIRouter(route_class=MsgpackRoute, responses=permission_responses)
|
||||||
|
CollectionQuerySet = QuerySet[models.Collection]
|
||||||
|
CollectionItemQuerySet = QuerySet[models.CollectionItem]
|
||||||
|
|
||||||
|
|
||||||
|
class ListMulti(BaseModel):
|
||||||
|
collectionTypes: t.List[bytes]
|
||||||
|
|
||||||
|
|
||||||
|
ChunkType = t.Tuple[str, t.Optional[bytes]]
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionItemRevisionInOut(BaseModel):
|
||||||
|
uid: str
|
||||||
|
meta: bytes
|
||||||
|
deleted: bool
|
||||||
|
chunks: t.List[ChunkType]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
orm_mode = True
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_orm_context(
|
||||||
|
cls: t.Type["CollectionItemRevisionInOut"], obj: models.CollectionItemRevision, context: Context
|
||||||
|
) -> "CollectionItemRevisionInOut":
|
||||||
|
chunks: t.List[ChunkType] = []
|
||||||
|
for chunk_relation in obj.chunks_relation.all():
|
||||||
|
chunk_obj = chunk_relation.chunk
|
||||||
|
if context.prefetch == "auto":
|
||||||
|
with open(chunk_obj.chunkFile.path, "rb") as f:
|
||||||
|
chunks.append((chunk_obj.uid, f.read()))
|
||||||
|
else:
|
||||||
|
chunks.append((chunk_obj.uid, None))
|
||||||
|
return cls(uid=obj.uid, meta=bytes(obj.meta), deleted=obj.deleted, chunks=chunks)
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionItemCommon(BaseModel):
|
||||||
|
uid: str
|
||||||
|
version: int
|
||||||
|
encryptionKey: t.Optional[bytes]
|
||||||
|
content: CollectionItemRevisionInOut
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionItemOut(CollectionItemCommon):
|
||||||
|
class Config:
|
||||||
|
orm_mode = True
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_orm_context(
|
||||||
|
cls: t.Type["CollectionItemOut"], obj: models.CollectionItem, context: Context
|
||||||
|
) -> "CollectionItemOut":
|
||||||
|
return cls(
|
||||||
|
uid=obj.uid,
|
||||||
|
version=obj.version,
|
||||||
|
encryptionKey=obj.encryptionKey,
|
||||||
|
content=CollectionItemRevisionInOut.from_orm_context(obj.content, context),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionItemIn(CollectionItemCommon):
|
||||||
|
etag: t.Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionCommon(BaseModel):
|
||||||
|
# FIXME: remove optional once we finish collection-type-migration
|
||||||
|
collectionType: t.Optional[bytes]
|
||||||
|
collectionKey: bytes
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionOut(CollectionCommon):
|
||||||
|
accessLevel: models.AccessLevels
|
||||||
|
stoken: str
|
||||||
|
item: CollectionItemOut
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_orm_context(cls: t.Type["CollectionOut"], obj: models.Collection, context: Context) -> "CollectionOut":
|
||||||
|
member: models.CollectionMember = obj.members.get(user=context.user)
|
||||||
|
collection_type = member.collectionType
|
||||||
|
assert obj.main_item is not None
|
||||||
|
ret = cls(
|
||||||
|
collectionType=collection_type and bytes(collection_type.uid),
|
||||||
|
collectionKey=bytes(member.encryptionKey),
|
||||||
|
accessLevel=member.accessLevel,
|
||||||
|
stoken=obj.stoken,
|
||||||
|
item=CollectionItemOut.from_orm_context(obj.main_item, context),
|
||||||
|
)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionIn(CollectionCommon):
|
||||||
|
item: CollectionItemIn
|
||||||
|
|
||||||
|
|
||||||
|
class RemovedMembershipOut(BaseModel):
|
||||||
|
uid: str
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionListResponse(BaseModel):
|
||||||
|
data: t.List[CollectionOut]
|
||||||
|
stoken: t.Optional[str]
|
||||||
|
done: bool
|
||||||
|
|
||||||
|
removedMemberships: t.Optional[t.List[RemovedMembershipOut]]
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionItemListResponse(BaseModel):
|
||||||
|
data: t.List[CollectionItemOut]
|
||||||
|
stoken: t.Optional[str]
|
||||||
|
done: bool
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionItemRevisionListResponse(BaseModel):
|
||||||
|
data: t.List[CollectionItemRevisionInOut]
|
||||||
|
iterator: t.Optional[str]
|
||||||
|
done: bool
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionItemBulkGetIn(BaseModel):
|
||||||
|
uid: str
|
||||||
|
etag: t.Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
class ItemDepIn(BaseModel):
|
||||||
|
uid: str
|
||||||
|
etag: str
|
||||||
|
|
||||||
|
def validate_db(self):
|
||||||
|
item = models.CollectionItem.objects.get(uid=self.uid)
|
||||||
|
etag = self.etag
|
||||||
|
if item.etag != etag:
|
||||||
|
raise ValidationError(
|
||||||
|
"wrong_etag",
|
||||||
|
"Wrong etag. Expected {} got {}".format(item.etag, etag),
|
||||||
|
status_code=status.HTTP_409_CONFLICT,
|
||||||
|
field=self.uid,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ItemBatchIn(BaseModel):
|
||||||
|
items: t.List[CollectionItemIn]
|
||||||
|
deps: t.Optional[t.List[ItemDepIn]]
|
||||||
|
|
||||||
|
def validate_db(self):
|
||||||
|
if self.deps is not None:
|
||||||
|
errors: t.List[HttpError] = []
|
||||||
|
for dep in self.deps:
|
||||||
|
try:
|
||||||
|
dep.validate_db()
|
||||||
|
except ValidationError as e:
|
||||||
|
errors.append(e)
|
||||||
|
if len(errors) > 0:
|
||||||
|
raise ValidationError(
|
||||||
|
code="dep_failed",
|
||||||
|
detail="Dependencies failed to validate",
|
||||||
|
errors=errors,
|
||||||
|
status_code=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def report_items_changed(col_uid: str, stoken: str, items: t.List[CollectionItemIn]):
|
||||||
|
if not redisw.is_active:
|
||||||
|
return
|
||||||
|
|
||||||
|
redis = redisw.redis
|
||||||
|
content = msgpack_encode(CollectionItemListResponse(data=items, stoken=stoken, done=True).dict())
|
||||||
|
await redis.publish(f"col.{col_uid}", content)
|
||||||
|
|
||||||
|
|
||||||
|
def collection_list_common(
|
||||||
|
queryset: CollectionQuerySet,
|
||||||
|
user: UserType,
|
||||||
|
stoken: t.Optional[str],
|
||||||
|
limit: int,
|
||||||
|
prefetch: Prefetch,
|
||||||
|
) -> CollectionListResponse:
|
||||||
|
result, new_stoken_obj, done = filter_by_stoken_and_limit(
|
||||||
|
stoken, limit, queryset, models.Collection.stoken_annotation
|
||||||
|
)
|
||||||
|
new_stoken = new_stoken_obj and new_stoken_obj.uid
|
||||||
|
context = Context(user, prefetch)
|
||||||
|
data: t.List[CollectionOut] = [CollectionOut.from_orm_context(item, context) for item in result]
|
||||||
|
|
||||||
|
ret = CollectionListResponse(data=data, stoken=new_stoken, done=done)
|
||||||
|
|
||||||
|
stoken_obj = get_stoken_obj(stoken)
|
||||||
|
if stoken_obj is not None:
|
||||||
|
# FIXME: honour limit? (the limit should be combined for data and this because of stoken)
|
||||||
|
remed_qs = models.CollectionMemberRemoved.objects.filter(user=user, stoken__id__gt=stoken_obj.id)
|
||||||
|
if not done and new_stoken_obj is not None:
|
||||||
|
# We only filter by the new_stoken if we are not done. This is because if we are done, the new stoken
|
||||||
|
# can point to the most recent collection change rather than most recent removed membership.
|
||||||
|
remed_qs = remed_qs.filter(stoken__id__lte=new_stoken_obj.id)
|
||||||
|
|
||||||
|
remed = remed_qs.values_list("collection__uid", flat=True)
|
||||||
|
if len(remed) > 0:
|
||||||
|
ret.removedMemberships = [RemovedMembershipOut(uid=x) for x in remed]
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
# permissions
|
||||||
|
|
||||||
|
|
||||||
|
@django_db_cleanup_decorator
|
||||||
|
def verify_collection_admin(
|
||||||
|
collection: models.Collection = Depends(get_collection), user: UserType = Depends(get_authenticated_user)
|
||||||
|
):
|
||||||
|
if not is_collection_admin(collection, user):
|
||||||
|
raise PermissionDenied("admin_access_required", "Only collection admins can perform this operation.")
|
||||||
|
|
||||||
|
|
||||||
|
@django_db_cleanup_decorator
|
||||||
|
def has_write_access(
|
||||||
|
collection: models.Collection = Depends(get_collection), user: UserType = Depends(get_authenticated_user)
|
||||||
|
):
|
||||||
|
member = collection.members.get(user=user)
|
||||||
|
if member.accessLevel == models.AccessLevels.READ_ONLY:
|
||||||
|
raise PermissionDenied("no_write_access", "You need write access to write to this collection")
|
||||||
|
|
||||||
|
|
||||||
|
# paths
|
||||||
|
|
||||||
|
|
||||||
|
@collection_router.post(
|
||||||
|
"/list_multi/",
|
||||||
|
response_model=CollectionListResponse,
|
||||||
|
response_model_exclude_unset=True,
|
||||||
|
dependencies=PERMISSIONS_READ,
|
||||||
|
)
|
||||||
|
def list_multi(
|
||||||
|
data: ListMulti,
|
||||||
|
stoken: t.Optional[str] = None,
|
||||||
|
limit: int = 50,
|
||||||
|
queryset: CollectionQuerySet = Depends(get_collection_queryset),
|
||||||
|
user: UserType = Depends(get_authenticated_user),
|
||||||
|
prefetch: Prefetch = PrefetchQuery,
|
||||||
|
):
|
||||||
|
# FIXME: Remove the isnull part once we attach collection types to all objects ("collection-type-migration")
|
||||||
|
queryset = queryset.filter(
|
||||||
|
Q(members__collectionType__uid__in=data.collectionTypes) | Q(members__collectionType__isnull=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
return collection_list_common(queryset, user, stoken, limit, prefetch)
|
||||||
|
|
||||||
|
|
||||||
|
@collection_router.get("/", response_model=CollectionListResponse, dependencies=PERMISSIONS_READ)
|
||||||
|
def collection_list(
|
||||||
|
stoken: t.Optional[str] = None,
|
||||||
|
limit: int = 50,
|
||||||
|
prefetch: Prefetch = PrefetchQuery,
|
||||||
|
user: UserType = Depends(get_authenticated_user),
|
||||||
|
queryset: CollectionQuerySet = Depends(get_collection_queryset),
|
||||||
|
):
|
||||||
|
return collection_list_common(queryset, user, stoken, limit, prefetch)
|
||||||
|
|
||||||
|
|
||||||
|
def process_revisions_for_item(item: models.CollectionItem, revision_data: CollectionItemRevisionInOut):
|
||||||
|
chunks_objs = []
|
||||||
|
|
||||||
|
revision = models.CollectionItemRevision(**revision_data.dict(exclude={"chunks"}), item=item)
|
||||||
|
revision.validate_unique() # Verify there aren't any validation issues
|
||||||
|
|
||||||
|
for chunk in revision_data.chunks:
|
||||||
|
uid = chunk[0]
|
||||||
|
chunk_obj = models.CollectionItemChunk.objects.filter(uid=uid).first()
|
||||||
|
content = chunk[1] if len(chunk) > 1 else None
|
||||||
|
# If the chunk already exists we assume it's fine. Otherwise, we upload it.
|
||||||
|
if chunk_obj is None:
|
||||||
|
if content is not None:
|
||||||
|
chunk_obj = models.CollectionItemChunk(uid=uid, collection=item.collection)
|
||||||
|
chunk_obj.chunkFile.save("IGNORED", ContentFile(content))
|
||||||
|
chunk_obj.save()
|
||||||
|
else:
|
||||||
|
raise ValidationError("chunk_no_content", "Tried to create a new chunk without content")
|
||||||
|
|
||||||
|
chunks_objs.append(chunk_obj)
|
||||||
|
|
||||||
|
stoken = models.Stoken.objects.create()
|
||||||
|
revision.stoken = stoken
|
||||||
|
revision.save()
|
||||||
|
|
||||||
|
for chunk2 in chunks_objs:
|
||||||
|
models.RevisionChunkRelation.objects.create(chunk=chunk2, revision=revision)
|
||||||
|
return revision
|
||||||
|
|
||||||
|
|
||||||
|
def _create(data: CollectionIn, user: UserType):
|
||||||
|
with transaction.atomic():
|
||||||
|
if data.item.etag is not None:
|
||||||
|
raise ValidationError("bad_etag", "etag is not null")
|
||||||
|
instance = models.Collection(uid=data.item.uid, owner=user)
|
||||||
|
try:
|
||||||
|
instance.validate_unique()
|
||||||
|
except django_exceptions.ValidationError:
|
||||||
|
raise ValidationError(
|
||||||
|
"unique_uid", "Collection with this uid already exists", status_code=status.HTTP_409_CONFLICT
|
||||||
|
)
|
||||||
|
instance.save()
|
||||||
|
|
||||||
|
main_item = models.CollectionItem.objects.create(
|
||||||
|
uid=data.item.uid, version=data.item.version, collection=instance
|
||||||
|
)
|
||||||
|
|
||||||
|
instance.main_item = main_item
|
||||||
|
instance.save()
|
||||||
|
|
||||||
|
# TODO
|
||||||
|
process_revisions_for_item(main_item, data.item.content)
|
||||||
|
|
||||||
|
collection_type_obj, _ = models.CollectionType.objects.get_or_create(uid=data.collectionType, owner=user)
|
||||||
|
|
||||||
|
models.CollectionMember(
|
||||||
|
collection=instance,
|
||||||
|
stoken=models.Stoken.objects.create(),
|
||||||
|
user=user,
|
||||||
|
accessLevel=models.AccessLevels.ADMIN,
|
||||||
|
encryptionKey=data.collectionKey,
|
||||||
|
collectionType=collection_type_obj,
|
||||||
|
).save()
|
||||||
|
|
||||||
|
|
||||||
|
@collection_router.post("/", status_code=status.HTTP_201_CREATED, dependencies=PERMISSIONS_READWRITE)
|
||||||
|
def create(data: CollectionIn, user: UserType = Depends(get_authenticated_user)):
|
||||||
|
_create(data, user)
|
||||||
|
|
||||||
|
|
||||||
|
@collection_router.get("/{collection_uid}/", response_model=CollectionOut, dependencies=PERMISSIONS_READ)
|
||||||
|
def collection_get(
|
||||||
|
obj: models.Collection = Depends(get_collection),
|
||||||
|
user: UserType = Depends(get_authenticated_user),
|
||||||
|
prefetch: Prefetch = PrefetchQuery,
|
||||||
|
):
|
||||||
|
return CollectionOut.from_orm_context(obj, Context(user, prefetch))
|
||||||
|
|
||||||
|
|
||||||
|
def item_create(item_model: CollectionItemIn, collection: models.Collection, validate_etag: bool):
|
||||||
|
"""Function that's called when this serializer creates an item"""
|
||||||
|
etag = item_model.etag
|
||||||
|
revision_data = item_model.content
|
||||||
|
uid = item_model.uid
|
||||||
|
|
||||||
|
Model = models.CollectionItem
|
||||||
|
|
||||||
|
with transaction.atomic():
|
||||||
|
instance, created = Model.objects.get_or_create(
|
||||||
|
uid=uid, collection=collection, defaults=item_model.dict(exclude={"uid", "etag", "content"})
|
||||||
|
)
|
||||||
|
cur_etag = instance.etag if not created else None
|
||||||
|
|
||||||
|
# If we are trying to update an up to date item, abort early and consider it a success
|
||||||
|
if cur_etag == revision_data.uid:
|
||||||
|
return instance
|
||||||
|
|
||||||
|
if validate_etag and cur_etag != etag:
|
||||||
|
raise ValidationError(
|
||||||
|
"wrong_etag",
|
||||||
|
"Wrong etag. Expected {} got {}".format(cur_etag, etag),
|
||||||
|
status_code=status.HTTP_409_CONFLICT,
|
||||||
|
field=uid,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not created:
|
||||||
|
# We don't have to use select_for_update here because the unique constraint on current guards against
|
||||||
|
# the race condition. But it's a good idea because it'll lock and wait rather than fail.
|
||||||
|
current_revision = instance.revisions.filter(current=True).select_for_update().first()
|
||||||
|
assert current_revision is not None
|
||||||
|
current_revision.current = None
|
||||||
|
current_revision.save()
|
||||||
|
|
||||||
|
try:
|
||||||
|
process_revisions_for_item(instance, revision_data)
|
||||||
|
except django_exceptions.ValidationError as e:
|
||||||
|
transform_validation_error("content", e)
|
||||||
|
|
||||||
|
return instance
|
||||||
|
|
||||||
|
|
||||||
|
@item_router.get("/item/{item_uid}/", response_model=CollectionItemOut, dependencies=PERMISSIONS_READ)
|
||||||
|
def item_get(
|
||||||
|
item_uid: str,
|
||||||
|
queryset: CollectionItemQuerySet = Depends(get_item_queryset),
|
||||||
|
user: UserType = Depends(get_authenticated_user),
|
||||||
|
prefetch: Prefetch = PrefetchQuery,
|
||||||
|
):
|
||||||
|
obj = queryset.get(uid=item_uid)
|
||||||
|
return CollectionItemOut.from_orm_context(obj, Context(user, prefetch))
|
||||||
|
|
||||||
|
|
||||||
|
def item_list_common(
|
||||||
|
queryset: CollectionItemQuerySet,
|
||||||
|
user: UserType,
|
||||||
|
stoken: t.Optional[str],
|
||||||
|
limit: int,
|
||||||
|
prefetch: Prefetch,
|
||||||
|
) -> CollectionItemListResponse:
|
||||||
|
result, new_stoken_obj, done = filter_by_stoken_and_limit(
|
||||||
|
stoken, limit, queryset, models.CollectionItem.stoken_annotation
|
||||||
|
)
|
||||||
|
new_stoken = new_stoken_obj and new_stoken_obj.uid
|
||||||
|
context = Context(user, prefetch)
|
||||||
|
data: t.List[CollectionItemOut] = [CollectionItemOut.from_orm_context(item, context) for item in result]
|
||||||
|
return CollectionItemListResponse(data=data, stoken=new_stoken, done=done)
|
||||||
|
|
||||||
|
|
||||||
|
@item_router.get("/item/", response_model=CollectionItemListResponse, dependencies=PERMISSIONS_READ)
|
||||||
|
def item_list(
|
||||||
|
queryset: CollectionItemQuerySet = Depends(get_item_queryset),
|
||||||
|
stoken: t.Optional[str] = None,
|
||||||
|
limit: int = 50,
|
||||||
|
prefetch: Prefetch = PrefetchQuery,
|
||||||
|
withCollection: bool = False,
|
||||||
|
user: UserType = Depends(get_authenticated_user),
|
||||||
|
):
|
||||||
|
if not withCollection:
|
||||||
|
queryset = queryset.filter(parent__isnull=True)
|
||||||
|
|
||||||
|
response = item_list_common(queryset, user, stoken, limit, prefetch)
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
@item_router.post("/item/subscription-ticket/", response_model=TicketOut, dependencies=PERMISSIONS_READ)
|
||||||
|
async def item_list_subscription_ticket(
|
||||||
|
collection: models.Collection = Depends(get_collection),
|
||||||
|
user: UserType = Depends(get_authenticated_user),
|
||||||
|
):
|
||||||
|
"""Get an authentication ticket that can be used with the websocket endpoint"""
|
||||||
|
return await get_ticket(TicketRequest(collection=collection.uid), user)
|
||||||
|
|
||||||
|
|
||||||
|
def item_bulk_common(
|
||||||
|
data: ItemBatchIn,
|
||||||
|
user: UserType,
|
||||||
|
stoken: t.Optional[str],
|
||||||
|
uid: str,
|
||||||
|
validate_etag: bool,
|
||||||
|
background_tasks: BackgroundTasks,
|
||||||
|
):
|
||||||
|
queryset = get_collection_queryset(user)
|
||||||
|
with transaction.atomic(): # We need this for locking the collection object
|
||||||
|
collection_object = queryset.select_for_update().get(uid=uid)
|
||||||
|
|
||||||
|
if stoken and stoken != collection_object.stoken:
|
||||||
|
raise HttpError("stale_stoken", "Stoken is too old", status_code=status.HTTP_409_CONFLICT)
|
||||||
|
|
||||||
|
data.validate_db()
|
||||||
|
|
||||||
|
errors: t.List[HttpError] = []
|
||||||
|
for item in data.items:
|
||||||
|
try:
|
||||||
|
item_create(item, collection_object, validate_etag)
|
||||||
|
except ValidationError as e:
|
||||||
|
errors.append(e)
|
||||||
|
|
||||||
|
if len(errors) > 0:
|
||||||
|
raise ValidationError(
|
||||||
|
code="item_failed",
|
||||||
|
detail="Items failed to validate",
|
||||||
|
errors=errors,
|
||||||
|
status_code=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
|
|
||||||
|
background_tasks.add_task(report_items_changed, collection_object.uid, collection_object.stoken, data.items)
|
||||||
|
|
||||||
|
|
||||||
|
@item_router.get(
|
||||||
|
"/item/{item_uid}/revision/", response_model=CollectionItemRevisionListResponse, dependencies=PERMISSIONS_READ
|
||||||
|
)
|
||||||
|
def item_revisions(
|
||||||
|
item_uid: str,
|
||||||
|
limit: int = 50,
|
||||||
|
iterator: t.Optional[str] = None,
|
||||||
|
prefetch: Prefetch = PrefetchQuery,
|
||||||
|
user: UserType = Depends(get_authenticated_user),
|
||||||
|
items: CollectionItemQuerySet = Depends(get_item_queryset),
|
||||||
|
):
|
||||||
|
item = get_object_or_404(items, uid=item_uid)
|
||||||
|
|
||||||
|
queryset = item.revisions.order_by("-id")
|
||||||
|
|
||||||
|
if iterator is not None:
|
||||||
|
iterator_obj = get_object_or_404(queryset, uid=iterator)
|
||||||
|
queryset = queryset.filter(id__lt=iterator_obj.id)
|
||||||
|
|
||||||
|
result = list(queryset[: limit + 1])
|
||||||
|
if len(result) < limit + 1:
|
||||||
|
done = True
|
||||||
|
else:
|
||||||
|
done = False
|
||||||
|
result = result[:-1]
|
||||||
|
|
||||||
|
context = Context(user, prefetch)
|
||||||
|
ret_data = [CollectionItemRevisionInOut.from_orm_context(revision, context) for revision in result]
|
||||||
|
iterator = ret_data[-1].uid if len(result) > 0 else None
|
||||||
|
|
||||||
|
return CollectionItemRevisionListResponse(
|
||||||
|
data=ret_data,
|
||||||
|
iterator=iterator,
|
||||||
|
done=done,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@item_router.post("/item/fetch_updates/", response_model=CollectionItemListResponse, dependencies=PERMISSIONS_READ)
|
||||||
|
def fetch_updates(
|
||||||
|
data: t.List[CollectionItemBulkGetIn],
|
||||||
|
stoken: t.Optional[str] = None,
|
||||||
|
prefetch: Prefetch = PrefetchQuery,
|
||||||
|
user: UserType = Depends(get_authenticated_user),
|
||||||
|
queryset: CollectionItemQuerySet = Depends(get_item_queryset),
|
||||||
|
):
|
||||||
|
# FIXME: make configurable?
|
||||||
|
item_limit = 200
|
||||||
|
|
||||||
|
if len(data) > item_limit:
|
||||||
|
raise HttpError("too_many_items", "Request has too many items.", status_code=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
queryset, stoken_rev = filter_by_stoken(stoken, queryset, models.CollectionItem.stoken_annotation)
|
||||||
|
|
||||||
|
uids, etags = zip(*[(item.uid, item.etag) for item in data])
|
||||||
|
revs = models.CollectionItemRevision.objects.filter(uid__in=etags, current=True)
|
||||||
|
queryset = queryset.filter(uid__in=uids).exclude(revisions__in=revs)
|
||||||
|
|
||||||
|
new_stoken_obj = get_queryset_stoken(queryset)
|
||||||
|
new_stoken = new_stoken_obj and new_stoken_obj.uid
|
||||||
|
stoken_rev_uid = stoken_rev and getattr(stoken_rev, "uid", None)
|
||||||
|
new_stoken = new_stoken or stoken_rev_uid
|
||||||
|
|
||||||
|
context = Context(user, prefetch)
|
||||||
|
return CollectionItemListResponse(
|
||||||
|
data=[CollectionItemOut.from_orm_context(item, context) for item in queryset],
|
||||||
|
stoken=new_stoken,
|
||||||
|
done=True, # we always return all the items, so it's always done
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@item_router.post("/item/transaction/", dependencies=[Depends(has_write_access), *PERMISSIONS_READWRITE])
|
||||||
|
def item_transaction(
|
||||||
|
collection_uid: str,
|
||||||
|
data: ItemBatchIn,
|
||||||
|
background_tasks: BackgroundTasks,
|
||||||
|
stoken: t.Optional[str] = None,
|
||||||
|
user: UserType = Depends(get_authenticated_user),
|
||||||
|
):
|
||||||
|
return item_bulk_common(data, user, stoken, collection_uid, validate_etag=True, background_tasks=background_tasks)
|
||||||
|
|
||||||
|
|
||||||
|
@item_router.post("/item/batch/", dependencies=[Depends(has_write_access), *PERMISSIONS_READWRITE])
|
||||||
|
def item_batch(
|
||||||
|
collection_uid: str,
|
||||||
|
data: ItemBatchIn,
|
||||||
|
background_tasks: BackgroundTasks,
|
||||||
|
stoken: t.Optional[str] = None,
|
||||||
|
user: UserType = Depends(get_authenticated_user),
|
||||||
|
):
|
||||||
|
return item_bulk_common(data, user, stoken, collection_uid, validate_etag=False, background_tasks=background_tasks)
|
||||||
|
|
||||||
|
|
||||||
|
# Chunks
|
||||||
|
|
||||||
|
|
||||||
|
@sync_to_async
|
||||||
|
def chunk_save(chunk_uid: str, collection: models.Collection, content_file: ContentFile):
|
||||||
|
chunk_obj = models.CollectionItemChunk(uid=chunk_uid, collection=collection)
|
||||||
|
chunk_obj.chunkFile.save("IGNORED", content_file)
|
||||||
|
chunk_obj.save()
|
||||||
|
return chunk_obj
|
||||||
|
|
||||||
|
|
||||||
|
@item_router.put(
|
||||||
|
"/item/{item_uid}/chunk/{chunk_uid}/",
|
||||||
|
dependencies=[Depends(has_write_access), *PERMISSIONS_READWRITE],
|
||||||
|
status_code=status.HTTP_201_CREATED,
|
||||||
|
)
|
||||||
|
async def chunk_update(
|
||||||
|
request: Request,
|
||||||
|
chunk_uid: str,
|
||||||
|
collection: models.Collection = Depends(get_collection),
|
||||||
|
):
|
||||||
|
# IGNORED FOR NOW: col_it = get_object_or_404(col.items, uid=collection_item_uid)
|
||||||
|
content_file = ContentFile(await request.body())
|
||||||
|
try:
|
||||||
|
await chunk_save(chunk_uid, collection, content_file)
|
||||||
|
except IntegrityError:
|
||||||
|
raise HttpError("chunk_exists", "Chunk already exists.", status_code=status.HTTP_409_CONFLICT)
|
||||||
|
|
||||||
|
|
||||||
|
@item_router.get(
|
||||||
|
"/item/{item_uid}/chunk/{chunk_uid}/download/",
|
||||||
|
dependencies=PERMISSIONS_READ,
|
||||||
|
)
|
||||||
|
def chunk_download(
|
||||||
|
chunk_uid: str,
|
||||||
|
collection: models.Collection = Depends(get_collection),
|
||||||
|
):
|
||||||
|
chunk = get_object_or_404(collection.chunks, uid=chunk_uid)
|
||||||
|
|
||||||
|
filename = chunk.chunkFile.path
|
||||||
|
return sendfile(filename)
|
@ -0,0 +1,244 @@
|
|||||||
|
import typing as t
|
||||||
|
|
||||||
|
from django.db import transaction, IntegrityError
|
||||||
|
from django.db.models import QuerySet
|
||||||
|
from fastapi import APIRouter, Depends, status, Request
|
||||||
|
|
||||||
|
from django_etebase import models
|
||||||
|
from django_etebase.utils import get_user_queryset, CallbackContext
|
||||||
|
from myauth.models import UserType, get_typed_user_model
|
||||||
|
from .authentication import get_authenticated_user
|
||||||
|
from ..exceptions import HttpError, PermissionDenied
|
||||||
|
from ..msgpack import MsgpackRoute
|
||||||
|
from ..utils import (
|
||||||
|
get_object_or_404,
|
||||||
|
get_user_username_email_kwargs,
|
||||||
|
Context,
|
||||||
|
is_collection_admin,
|
||||||
|
BaseModel,
|
||||||
|
permission_responses,
|
||||||
|
PERMISSIONS_READ,
|
||||||
|
PERMISSIONS_READWRITE,
|
||||||
|
)
|
||||||
|
from ..db_hack import django_db_cleanup_decorator
|
||||||
|
|
||||||
|
User = get_typed_user_model()
|
||||||
|
invitation_incoming_router = APIRouter(route_class=MsgpackRoute, responses=permission_responses)
|
||||||
|
invitation_outgoing_router = APIRouter(route_class=MsgpackRoute, responses=permission_responses)
|
||||||
|
InvitationQuerySet = QuerySet[models.CollectionInvitation]
|
||||||
|
default_queryset: InvitationQuerySet = models.CollectionInvitation.objects.all()
|
||||||
|
|
||||||
|
|
||||||
|
class UserInfoOut(BaseModel):
|
||||||
|
pubkey: bytes
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
orm_mode = True
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_orm(cls: t.Type["UserInfoOut"], obj: models.UserInfo) -> "UserInfoOut":
|
||||||
|
return cls(pubkey=bytes(obj.pubkey))
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionInvitationAcceptIn(BaseModel):
|
||||||
|
collectionType: bytes
|
||||||
|
encryptionKey: bytes
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionInvitationCommon(BaseModel):
|
||||||
|
uid: str
|
||||||
|
version: int
|
||||||
|
accessLevel: models.AccessLevels
|
||||||
|
username: str
|
||||||
|
collection: str
|
||||||
|
signedEncryptionKey: bytes
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionInvitationIn(CollectionInvitationCommon):
|
||||||
|
def validate_db(self, context: Context):
|
||||||
|
user = context.user
|
||||||
|
if user is not None and (user.username == self.username.lower()):
|
||||||
|
raise HttpError("no_self_invite", "Inviting yourself is not allowed")
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionInvitationOut(CollectionInvitationCommon):
|
||||||
|
fromUsername: str
|
||||||
|
fromPubkey: bytes
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
orm_mode = True
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_orm(cls: t.Type["CollectionInvitationOut"], obj: models.CollectionInvitation) -> "CollectionInvitationOut":
|
||||||
|
return cls(
|
||||||
|
uid=obj.uid,
|
||||||
|
version=obj.version,
|
||||||
|
accessLevel=obj.accessLevel,
|
||||||
|
username=obj.user.username,
|
||||||
|
collection=obj.collection.uid,
|
||||||
|
fromUsername=obj.fromMember.user.username,
|
||||||
|
fromPubkey=bytes(obj.fromMember.user.userinfo.pubkey),
|
||||||
|
signedEncryptionKey=bytes(obj.signedEncryptionKey),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class InvitationListResponse(BaseModel):
|
||||||
|
data: t.List[CollectionInvitationOut]
|
||||||
|
iterator: t.Optional[str]
|
||||||
|
done: bool
|
||||||
|
|
||||||
|
|
||||||
|
@django_db_cleanup_decorator
|
||||||
|
def get_incoming_queryset(user: UserType = Depends(get_authenticated_user)):
|
||||||
|
return default_queryset.filter(user=user)
|
||||||
|
|
||||||
|
|
||||||
|
@django_db_cleanup_decorator
|
||||||
|
def get_outgoing_queryset(user: UserType = Depends(get_authenticated_user)):
|
||||||
|
return default_queryset.filter(fromMember__user=user)
|
||||||
|
|
||||||
|
|
||||||
|
def list_common(
|
||||||
|
queryset: InvitationQuerySet,
|
||||||
|
iterator: t.Optional[str],
|
||||||
|
limit: int,
|
||||||
|
) -> InvitationListResponse:
|
||||||
|
queryset = queryset.order_by("id")
|
||||||
|
|
||||||
|
if iterator is not None:
|
||||||
|
iterator_obj = get_object_or_404(queryset, uid=iterator)
|
||||||
|
queryset = queryset.filter(id__gt=iterator_obj.id)
|
||||||
|
|
||||||
|
result = list(queryset[: limit + 1])
|
||||||
|
if len(result) < limit + 1:
|
||||||
|
done = True
|
||||||
|
else:
|
||||||
|
done = False
|
||||||
|
result = result[:-1]
|
||||||
|
|
||||||
|
ret_data = result
|
||||||
|
iterator = ret_data[-1].uid if len(result) > 0 else None
|
||||||
|
|
||||||
|
return InvitationListResponse(
|
||||||
|
data=ret_data,
|
||||||
|
iterator=iterator,
|
||||||
|
done=done,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@invitation_incoming_router.get("/", response_model=InvitationListResponse, dependencies=PERMISSIONS_READ)
|
||||||
|
def incoming_list(
|
||||||
|
iterator: t.Optional[str] = None,
|
||||||
|
limit: int = 50,
|
||||||
|
queryset: InvitationQuerySet = Depends(get_incoming_queryset),
|
||||||
|
):
|
||||||
|
return list_common(queryset, iterator, limit)
|
||||||
|
|
||||||
|
|
||||||
|
@invitation_incoming_router.get(
|
||||||
|
"/{invitation_uid}/", response_model=CollectionInvitationOut, dependencies=PERMISSIONS_READ
|
||||||
|
)
|
||||||
|
def incoming_get(
|
||||||
|
invitation_uid: str,
|
||||||
|
queryset: InvitationQuerySet = Depends(get_incoming_queryset),
|
||||||
|
):
|
||||||
|
obj = get_object_or_404(queryset, uid=invitation_uid)
|
||||||
|
return CollectionInvitationOut.from_orm(obj)
|
||||||
|
|
||||||
|
|
||||||
|
@invitation_incoming_router.delete(
|
||||||
|
"/{invitation_uid}/", status_code=status.HTTP_204_NO_CONTENT, dependencies=PERMISSIONS_READWRITE
|
||||||
|
)
|
||||||
|
def incoming_delete(
|
||||||
|
invitation_uid: str,
|
||||||
|
queryset: InvitationQuerySet = Depends(get_incoming_queryset),
|
||||||
|
):
|
||||||
|
obj = get_object_or_404(queryset, uid=invitation_uid)
|
||||||
|
obj.delete()
|
||||||
|
|
||||||
|
|
||||||
|
@invitation_incoming_router.post(
|
||||||
|
"/{invitation_uid}/accept/", status_code=status.HTTP_201_CREATED, dependencies=PERMISSIONS_READWRITE
|
||||||
|
)
|
||||||
|
def incoming_accept(
|
||||||
|
invitation_uid: str,
|
||||||
|
data: CollectionInvitationAcceptIn,
|
||||||
|
queryset: InvitationQuerySet = Depends(get_incoming_queryset),
|
||||||
|
):
|
||||||
|
invitation = get_object_or_404(queryset, uid=invitation_uid)
|
||||||
|
|
||||||
|
with transaction.atomic():
|
||||||
|
user = invitation.user
|
||||||
|
collection_type_obj, _ = models.CollectionType.objects.get_or_create(uid=data.collectionType, owner=user)
|
||||||
|
|
||||||
|
models.CollectionMember.objects.create(
|
||||||
|
collection=invitation.collection,
|
||||||
|
stoken=models.Stoken.objects.create(),
|
||||||
|
user=user,
|
||||||
|
accessLevel=invitation.accessLevel,
|
||||||
|
encryptionKey=data.encryptionKey,
|
||||||
|
collectionType=collection_type_obj,
|
||||||
|
)
|
||||||
|
|
||||||
|
models.CollectionMemberRemoved.objects.filter(user=invitation.user, collection=invitation.collection).delete()
|
||||||
|
|
||||||
|
invitation.delete()
|
||||||
|
|
||||||
|
|
||||||
|
@invitation_outgoing_router.post("/", status_code=status.HTTP_201_CREATED, dependencies=PERMISSIONS_READWRITE)
|
||||||
|
def outgoing_create(
|
||||||
|
data: CollectionInvitationIn,
|
||||||
|
request: Request,
|
||||||
|
user: UserType = Depends(get_authenticated_user),
|
||||||
|
):
|
||||||
|
collection = get_object_or_404(models.Collection.objects, uid=data.collection)
|
||||||
|
kwargs = get_user_username_email_kwargs(data.username)
|
||||||
|
to_user = get_object_or_404(get_user_queryset(User.objects.all(), CallbackContext(request.path_params)), **kwargs)
|
||||||
|
|
||||||
|
context = Context(user, None)
|
||||||
|
data.validate_db(context)
|
||||||
|
|
||||||
|
if not is_collection_admin(collection, user):
|
||||||
|
raise PermissionDenied("admin_access_required", "User is not an admin of this collection")
|
||||||
|
|
||||||
|
member = collection.members.get(user=user)
|
||||||
|
|
||||||
|
with transaction.atomic():
|
||||||
|
try:
|
||||||
|
models.CollectionInvitation.objects.create(
|
||||||
|
**data.dict(exclude={"collection", "username"}), user=to_user, fromMember=member
|
||||||
|
)
|
||||||
|
except IntegrityError:
|
||||||
|
raise HttpError("invitation_exists", "Invitation already exists")
|
||||||
|
|
||||||
|
|
||||||
|
@invitation_outgoing_router.get("/", response_model=InvitationListResponse, dependencies=PERMISSIONS_READ)
|
||||||
|
def outgoing_list(
|
||||||
|
iterator: t.Optional[str] = None,
|
||||||
|
limit: int = 50,
|
||||||
|
queryset: InvitationQuerySet = Depends(get_outgoing_queryset),
|
||||||
|
):
|
||||||
|
return list_common(queryset, iterator, limit)
|
||||||
|
|
||||||
|
|
||||||
|
@invitation_outgoing_router.delete(
|
||||||
|
"/{invitation_uid}/", status_code=status.HTTP_204_NO_CONTENT, dependencies=PERMISSIONS_READWRITE
|
||||||
|
)
|
||||||
|
def outgoing_delete(
|
||||||
|
invitation_uid: str,
|
||||||
|
queryset: InvitationQuerySet = Depends(get_outgoing_queryset),
|
||||||
|
):
|
||||||
|
obj = get_object_or_404(queryset, uid=invitation_uid)
|
||||||
|
obj.delete()
|
||||||
|
|
||||||
|
|
||||||
|
@invitation_outgoing_router.get("/fetch_user_profile/", response_model=UserInfoOut, dependencies=PERMISSIONS_READ)
|
||||||
|
def outgoing_fetch_user_profile(
|
||||||
|
username: str,
|
||||||
|
request: Request,
|
||||||
|
user: UserType = Depends(get_authenticated_user),
|
||||||
|
):
|
||||||
|
kwargs = get_user_username_email_kwargs(username)
|
||||||
|
user = get_object_or_404(get_user_queryset(User.objects.all(), CallbackContext(request.path_params)), **kwargs)
|
||||||
|
user_info = get_object_or_404(models.UserInfo.objects.all(), owner=user)
|
||||||
|
return UserInfoOut.from_orm(user_info)
|
@ -0,0 +1,109 @@
|
|||||||
|
import typing as t
|
||||||
|
|
||||||
|
from django.db import transaction
|
||||||
|
from django.db.models import QuerySet
|
||||||
|
from fastapi import APIRouter, Depends, status
|
||||||
|
|
||||||
|
from django_etebase import models
|
||||||
|
from myauth.models import UserType, get_typed_user_model
|
||||||
|
from .authentication import get_authenticated_user
|
||||||
|
from ..msgpack import MsgpackRoute
|
||||||
|
from ..utils import get_object_or_404, BaseModel, permission_responses, PERMISSIONS_READ, PERMISSIONS_READWRITE
|
||||||
|
from ..stoken_handler import filter_by_stoken_and_limit
|
||||||
|
from ..db_hack import django_db_cleanup_decorator
|
||||||
|
|
||||||
|
from .collection import get_collection, verify_collection_admin
|
||||||
|
|
||||||
|
User = get_typed_user_model()
|
||||||
|
member_router = APIRouter(route_class=MsgpackRoute, responses=permission_responses)
|
||||||
|
MemberQuerySet = QuerySet[models.CollectionMember]
|
||||||
|
default_queryset: MemberQuerySet = models.CollectionMember.objects.all()
|
||||||
|
|
||||||
|
|
||||||
|
@django_db_cleanup_decorator
|
||||||
|
def get_queryset(collection: models.Collection = Depends(get_collection)) -> MemberQuerySet:
|
||||||
|
return default_queryset.filter(collection=collection)
|
||||||
|
|
||||||
|
|
||||||
|
@django_db_cleanup_decorator
|
||||||
|
def get_member(username: str, queryset: MemberQuerySet = Depends(get_queryset)) -> models.CollectionMember:
|
||||||
|
return get_object_or_404(queryset, user__username__iexact=username)
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionMemberModifyAccessLevelIn(BaseModel):
|
||||||
|
accessLevel: models.AccessLevels
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionMemberOut(BaseModel):
|
||||||
|
username: str
|
||||||
|
accessLevel: models.AccessLevels
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
orm_mode = True
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_orm(cls: t.Type["CollectionMemberOut"], obj: models.CollectionMember) -> "CollectionMemberOut":
|
||||||
|
return cls(username=obj.user.username, accessLevel=obj.accessLevel)
|
||||||
|
|
||||||
|
|
||||||
|
class MemberListResponse(BaseModel):
|
||||||
|
data: t.List[CollectionMemberOut]
|
||||||
|
iterator: t.Optional[str]
|
||||||
|
done: bool
|
||||||
|
|
||||||
|
|
||||||
|
@member_router.get(
|
||||||
|
"/member/", response_model=MemberListResponse, dependencies=[Depends(verify_collection_admin), *PERMISSIONS_READ]
|
||||||
|
)
|
||||||
|
def member_list(
|
||||||
|
iterator: t.Optional[str] = None,
|
||||||
|
limit: int = 50,
|
||||||
|
queryset: MemberQuerySet = Depends(get_queryset),
|
||||||
|
):
|
||||||
|
queryset = queryset.order_by("id")
|
||||||
|
result, new_stoken_obj, done = filter_by_stoken_and_limit(
|
||||||
|
iterator, limit, queryset, models.CollectionMember.stoken_annotation
|
||||||
|
)
|
||||||
|
new_stoken = new_stoken_obj and new_stoken_obj.uid
|
||||||
|
|
||||||
|
return MemberListResponse(
|
||||||
|
data=[CollectionMemberOut.from_orm(item) for item in result],
|
||||||
|
iterator=new_stoken,
|
||||||
|
done=done,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@member_router.delete(
|
||||||
|
"/member/{username}/",
|
||||||
|
status_code=status.HTTP_204_NO_CONTENT,
|
||||||
|
dependencies=[Depends(verify_collection_admin), *PERMISSIONS_READWRITE],
|
||||||
|
)
|
||||||
|
def member_delete(
|
||||||
|
obj: models.CollectionMember = Depends(get_member),
|
||||||
|
):
|
||||||
|
obj.revoke()
|
||||||
|
|
||||||
|
|
||||||
|
@member_router.patch(
|
||||||
|
"/member/{username}/",
|
||||||
|
status_code=status.HTTP_204_NO_CONTENT,
|
||||||
|
dependencies=[Depends(verify_collection_admin), *PERMISSIONS_READWRITE],
|
||||||
|
)
|
||||||
|
def member_patch(
|
||||||
|
data: CollectionMemberModifyAccessLevelIn,
|
||||||
|
instance: models.CollectionMember = Depends(get_member),
|
||||||
|
):
|
||||||
|
with transaction.atomic():
|
||||||
|
# We only allow updating accessLevel
|
||||||
|
if instance.accessLevel != data.accessLevel:
|
||||||
|
instance.stoken = models.Stoken.objects.create()
|
||||||
|
instance.accessLevel = data.accessLevel
|
||||||
|
instance.save()
|
||||||
|
|
||||||
|
|
||||||
|
@member_router.post("/member/leave/", status_code=status.HTTP_204_NO_CONTENT, dependencies=PERMISSIONS_READ)
|
||||||
|
def member_leave(
|
||||||
|
user: UserType = Depends(get_authenticated_user), collection: models.Collection = Depends(get_collection)
|
||||||
|
):
|
||||||
|
obj = get_object_or_404(collection.members, user=user)
|
||||||
|
obj.revoke()
|
@ -0,0 +1,38 @@
|
|||||||
|
from django.conf import settings
|
||||||
|
from django.db import transaction
|
||||||
|
from django.shortcuts import get_object_or_404
|
||||||
|
from fastapi import APIRouter, Request, status
|
||||||
|
|
||||||
|
from django_etebase.utils import get_user_queryset, CallbackContext
|
||||||
|
from .authentication import SignupIn, signup_save
|
||||||
|
from ..msgpack import MsgpackRoute
|
||||||
|
from ..exceptions import HttpError
|
||||||
|
from myauth.models import get_typed_user_model
|
||||||
|
|
||||||
|
test_reset_view_router = APIRouter(route_class=MsgpackRoute, tags=["test helpers"])
|
||||||
|
User = get_typed_user_model()
|
||||||
|
|
||||||
|
|
||||||
|
@test_reset_view_router.post("/reset/", status_code=status.HTTP_204_NO_CONTENT)
|
||||||
|
def reset(data: SignupIn, request: Request):
|
||||||
|
# Only run when in DEBUG mode! It's only used for tests
|
||||||
|
if not settings.DEBUG:
|
||||||
|
raise HttpError(code="generic", detail="Only allowed in debug mode.")
|
||||||
|
|
||||||
|
with transaction.atomic():
|
||||||
|
user_queryset = get_user_queryset(User.objects.all(), CallbackContext(request.path_params))
|
||||||
|
user = get_object_or_404(user_queryset, username=data.user.username)
|
||||||
|
# Only allow test users for extra safety
|
||||||
|
if not getattr(user, User.USERNAME_FIELD).startswith("test_user"):
|
||||||
|
raise HttpError(code="generic", detail="Endpoint not allowed for user.")
|
||||||
|
|
||||||
|
if hasattr(user, "userinfo"):
|
||||||
|
user.userinfo.delete()
|
||||||
|
|
||||||
|
signup_save(data, request)
|
||||||
|
# Delete all of the journal data for this user for a clear test env
|
||||||
|
user.collection_set.all().delete()
|
||||||
|
user.collectionmember_set.all().delete()
|
||||||
|
user.incoming_invitations.all().delete()
|
||||||
|
|
||||||
|
# FIXME: also delete chunk files!!!
|
@ -0,0 +1,145 @@
|
|||||||
|
import asyncio
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
import aioredis
|
||||||
|
from asgiref.sync import sync_to_async
|
||||||
|
from django.db.models import QuerySet
|
||||||
|
from fastapi import APIRouter, Depends, WebSocket, WebSocketDisconnect, status
|
||||||
|
import nacl.encoding
|
||||||
|
import nacl.utils
|
||||||
|
|
||||||
|
from django_etebase import models
|
||||||
|
from django_etebase.utils import CallbackContext, get_user_queryset
|
||||||
|
from myauth.models import UserType, get_typed_user_model
|
||||||
|
|
||||||
|
from ..dependencies import get_collection_queryset, get_item_queryset
|
||||||
|
from ..exceptions import NotSupported
|
||||||
|
from ..msgpack import MsgpackRoute, msgpack_decode, msgpack_encode
|
||||||
|
from ..redis import redisw
|
||||||
|
from ..utils import BaseModel, permission_responses
|
||||||
|
|
||||||
|
|
||||||
|
User = get_typed_user_model()
|
||||||
|
websocket_router = APIRouter(route_class=MsgpackRoute, responses=permission_responses)
|
||||||
|
CollectionQuerySet = QuerySet[models.Collection]
|
||||||
|
|
||||||
|
|
||||||
|
TICKET_VALIDITY_SECONDS = 10
|
||||||
|
|
||||||
|
|
||||||
|
class TicketRequest(BaseModel):
|
||||||
|
collection: str
|
||||||
|
|
||||||
|
|
||||||
|
class TicketOut(BaseModel):
|
||||||
|
ticket: str
|
||||||
|
|
||||||
|
|
||||||
|
class TicketInner(BaseModel):
|
||||||
|
user: int
|
||||||
|
req: TicketRequest
|
||||||
|
|
||||||
|
|
||||||
|
async def get_ticket(
|
||||||
|
ticket_request: TicketRequest,
|
||||||
|
user: UserType,
|
||||||
|
):
|
||||||
|
"""Get an authentication ticket that can be used with the websocket endpoint for authentication"""
|
||||||
|
if not redisw.is_active:
|
||||||
|
raise NotSupported(detail="This end-point requires Redis to be configured")
|
||||||
|
|
||||||
|
uid = nacl.encoding.URLSafeBase64Encoder.encode(nacl.utils.random(32))
|
||||||
|
ticket_model = TicketInner(user=user.id, req=ticket_request)
|
||||||
|
ticket_raw = msgpack_encode(ticket_model.dict())
|
||||||
|
await redisw.redis.set(uid, ticket_raw, expire=TICKET_VALIDITY_SECONDS * 1000)
|
||||||
|
return TicketOut(ticket=uid)
|
||||||
|
|
||||||
|
|
||||||
|
async def load_websocket_ticket(websocket: WebSocket, ticket: str) -> t.Optional[TicketInner]:
|
||||||
|
content = await redisw.redis.get(ticket)
|
||||||
|
if content is None:
|
||||||
|
await websocket.close(code=status.WS_1008_POLICY_VIOLATION)
|
||||||
|
return None
|
||||||
|
await redisw.redis.delete(ticket)
|
||||||
|
return TicketInner(**msgpack_decode(content))
|
||||||
|
|
||||||
|
|
||||||
|
def get_websocket_user(websocket: WebSocket, ticket_model: t.Optional[TicketInner] = Depends(load_websocket_ticket)):
|
||||||
|
if ticket_model is None:
|
||||||
|
return None
|
||||||
|
user_queryset = get_user_queryset(User.objects.all(), CallbackContext(websocket.path_params))
|
||||||
|
return user_queryset.get(id=ticket_model.user)
|
||||||
|
|
||||||
|
|
||||||
|
@websocket_router.websocket("/{ticket}/")
|
||||||
|
async def websocket_endpoint(
|
||||||
|
websocket: WebSocket,
|
||||||
|
stoken: t.Optional[str] = None,
|
||||||
|
user: t.Optional[UserType] = Depends(get_websocket_user),
|
||||||
|
ticket_model: TicketInner = Depends(load_websocket_ticket),
|
||||||
|
):
|
||||||
|
if user is None:
|
||||||
|
return
|
||||||
|
await websocket.accept()
|
||||||
|
await redis_connector(websocket, ticket_model, user, stoken)
|
||||||
|
|
||||||
|
|
||||||
|
async def send_item_updates(
|
||||||
|
websocket: WebSocket,
|
||||||
|
collection: models.Collection,
|
||||||
|
user: UserType,
|
||||||
|
stoken: t.Optional[str],
|
||||||
|
):
|
||||||
|
from .collection import item_list_common
|
||||||
|
|
||||||
|
done = False
|
||||||
|
while not done:
|
||||||
|
queryset = await sync_to_async(get_item_queryset)(collection)
|
||||||
|
response = await sync_to_async(item_list_common)(queryset, user, stoken, limit=50, prefetch="auto")
|
||||||
|
done = response.done
|
||||||
|
if len(response.data) > 0:
|
||||||
|
await websocket.send_bytes(msgpack_encode(response.dict()))
|
||||||
|
|
||||||
|
|
||||||
|
async def redis_connector(websocket: WebSocket, ticket_model: TicketInner, user: UserType, stoken: t.Optional[str]):
|
||||||
|
async def producer_handler(r: aioredis.Redis, ws: WebSocket):
|
||||||
|
channel_name = f"col.{ticket_model.req.collection}"
|
||||||
|
(channel,) = await r.psubscribe(channel_name)
|
||||||
|
assert isinstance(channel, aioredis.Channel)
|
||||||
|
|
||||||
|
# Send missing items if we are not up to date
|
||||||
|
queryset: QuerySet[models.Collection] = get_collection_queryset(user)
|
||||||
|
collection: t.Optional[models.Collection] = await sync_to_async(
|
||||||
|
queryset.filter(uid=ticket_model.req.collection).first
|
||||||
|
)()
|
||||||
|
if collection is None:
|
||||||
|
await websocket.close(code=status.WS_1008_POLICY_VIOLATION)
|
||||||
|
return
|
||||||
|
await send_item_updates(websocket, collection, user, stoken)
|
||||||
|
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
# We wait on the websocket so we fail if web sockets fail or get data
|
||||||
|
receive = asyncio.create_task(websocket.receive())
|
||||||
|
done, pending = await asyncio.wait(
|
||||||
|
{receive, channel.wait_message()}, return_when=asyncio.FIRST_COMPLETED
|
||||||
|
)
|
||||||
|
for task in pending:
|
||||||
|
task.cancel()
|
||||||
|
if receive in done:
|
||||||
|
# Web socket should never receieve any data
|
||||||
|
await websocket.close(code=status.WS_1008_POLICY_VIOLATION)
|
||||||
|
return
|
||||||
|
|
||||||
|
message_raw = t.cast(t.Optional[t.Tuple[str, bytes]], await channel.get())
|
||||||
|
if message_raw:
|
||||||
|
_, message = message_raw
|
||||||
|
await ws.send_bytes(message)
|
||||||
|
|
||||||
|
except aioredis.errors.ConnectionClosedError:
|
||||||
|
await websocket.close(code=status.WS_1012_SERVICE_RESTART)
|
||||||
|
except WebSocketDisconnect:
|
||||||
|
pass
|
||||||
|
|
||||||
|
redis = redisw.redis
|
||||||
|
await producer_handler(redis, websocket)
|
@ -0,0 +1,28 @@
|
|||||||
|
Copyright (c) 2011, Sensible Development.
|
||||||
|
Copyright (c) 2019, Matt Molyneaux
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the distribution.
|
||||||
|
|
||||||
|
3. Neither the name of Django Send File nor the names of its
|
||||||
|
contributors may be used to endorse or promote products derived from
|
||||||
|
this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
@ -0,0 +1,3 @@
|
|||||||
|
Heavily inspired + code borrowed from: https://github.com/moggers87/django-sendfile2/
|
||||||
|
|
||||||
|
We just simplified and inlined it because we don't want another external dependency for distribution packagers to package, as well as need a much simpler version.
|
@ -0,0 +1 @@
|
|||||||
|
from .utils import sendfile # noqa
|
@ -0,0 +1,9 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
from fastapi import Response
|
||||||
|
|
||||||
|
from ..utils import _convert_file_to_url
|
||||||
|
|
||||||
|
|
||||||
|
def sendfile(filename, **kwargs):
|
||||||
|
return Response(headers={"Location": _convert_file_to_url(filename)})
|
@ -0,0 +1,9 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
from fastapi import Response
|
||||||
|
|
||||||
|
from ..utils import _convert_file_to_url
|
||||||
|
|
||||||
|
|
||||||
|
def sendfile(filename, **kwargs):
|
||||||
|
return Response(headers={"X-Accel-Redirect": _convert_file_to_url(filename)})
|
@ -0,0 +1,12 @@
|
|||||||
|
from fastapi.responses import FileResponse
|
||||||
|
|
||||||
|
|
||||||
|
def sendfile(filename, mimetype, **kwargs):
|
||||||
|
"""Use the SENDFILE_ROOT value composed with the path arrived as argument
|
||||||
|
to build an absolute path with which resolve and return the file contents.
|
||||||
|
|
||||||
|
If the path points to a file out of the root directory (should cover both
|
||||||
|
situations with '..' and symlinks) then a 404 is raised.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return FileResponse(filename, media_type=mimetype)
|
@ -0,0 +1,6 @@
|
|||||||
|
from fastapi import Response
|
||||||
|
|
||||||
|
|
||||||
|
def sendfile(filename, **kwargs):
|
||||||
|
filename = str(filename)
|
||||||
|
return Response(headers={"X-Sendfile": filename})
|
@ -0,0 +1,88 @@
|
|||||||
|
from functools import lru_cache
|
||||||
|
from importlib import import_module
|
||||||
|
from pathlib import Path, PurePath
|
||||||
|
from urllib.parse import quote
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from fastapi import status
|
||||||
|
from ..exceptions import HttpError
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core.exceptions import ImproperlyConfigured
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=None)
|
||||||
|
def _get_sendfile():
|
||||||
|
backend = getattr(settings, "SENDFILE_BACKEND", None)
|
||||||
|
if not backend:
|
||||||
|
raise ImproperlyConfigured("You must specify a value for SENDFILE_BACKEND")
|
||||||
|
module = import_module(backend)
|
||||||
|
return module.sendfile
|
||||||
|
|
||||||
|
|
||||||
|
def _convert_file_to_url(path):
|
||||||
|
try:
|
||||||
|
url_root = PurePath(getattr(settings, "SENDFILE_URL", None))
|
||||||
|
except TypeError:
|
||||||
|
return path
|
||||||
|
|
||||||
|
path_root = PurePath(settings.SENDFILE_ROOT)
|
||||||
|
path_obj = PurePath(path)
|
||||||
|
|
||||||
|
relpath = path_obj.relative_to(path_root)
|
||||||
|
# Python 3.5: Path.resolve() has no `strict` kwarg, so use pathmod from an
|
||||||
|
# already instantiated Path object
|
||||||
|
url = relpath._flavour.pathmod.normpath(str(url_root / relpath))
|
||||||
|
|
||||||
|
return quote(str(url))
|
||||||
|
|
||||||
|
|
||||||
|
def _sanitize_path(filepath):
|
||||||
|
try:
|
||||||
|
path_root = Path(getattr(settings, "SENDFILE_ROOT", None))
|
||||||
|
except TypeError:
|
||||||
|
raise ImproperlyConfigured("You must specify a value for SENDFILE_ROOT")
|
||||||
|
|
||||||
|
filepath_obj = Path(filepath)
|
||||||
|
|
||||||
|
# get absolute path
|
||||||
|
# Python 3.5: Path.resolve() has no `strict` kwarg, so use pathmod from an
|
||||||
|
# already instantiated Path object
|
||||||
|
filepath_abs = Path(filepath_obj._flavour.pathmod.normpath(str(path_root / filepath_obj)))
|
||||||
|
|
||||||
|
# if filepath_abs is not relative to path_root, relative_to throws an error
|
||||||
|
try:
|
||||||
|
filepath_abs.relative_to(path_root)
|
||||||
|
except ValueError:
|
||||||
|
raise HttpError(
|
||||||
|
"generic", "{} wrt {} is impossible".format(filepath_abs, path_root), status_code=status.HTTP_404_NOT_FOUND
|
||||||
|
)
|
||||||
|
|
||||||
|
return filepath_abs
|
||||||
|
|
||||||
|
|
||||||
|
def sendfile(filename, mimetype="application/octet-stream", encoding=None):
|
||||||
|
"""
|
||||||
|
Create a response to send file using backend configured in ``SENDFILE_BACKEND``
|
||||||
|
|
||||||
|
``filename`` is the absolute path to the file to send.
|
||||||
|
"""
|
||||||
|
filepath_obj = _sanitize_path(filename)
|
||||||
|
logger.debug(
|
||||||
|
"filename '%s' requested \"\
|
||||||
|
\"-> filepath '%s' obtained",
|
||||||
|
filename,
|
||||||
|
filepath_obj,
|
||||||
|
)
|
||||||
|
_sendfile = _get_sendfile()
|
||||||
|
|
||||||
|
if not filepath_obj.exists():
|
||||||
|
raise HttpError("does_not_exist", '"%s" does not exist' % filepath_obj, status_code=status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
response = _sendfile(filepath_obj, mimetype=mimetype)
|
||||||
|
|
||||||
|
response.headers["Content-Type"] = mimetype
|
||||||
|
|
||||||
|
return response
|
@ -0,0 +1,62 @@
|
|||||||
|
import typing as t
|
||||||
|
|
||||||
|
from django.db.models import QuerySet
|
||||||
|
from fastapi import status
|
||||||
|
|
||||||
|
from django_etebase.models import Stoken
|
||||||
|
|
||||||
|
from .exceptions import HttpError
|
||||||
|
|
||||||
|
# TODO missing stoken_annotation type
|
||||||
|
StokenAnnotation = t.Any
|
||||||
|
|
||||||
|
|
||||||
|
def get_stoken_obj(stoken: t.Optional[str]) -> t.Optional[Stoken]:
|
||||||
|
if stoken:
|
||||||
|
try:
|
||||||
|
return Stoken.objects.get(uid=stoken)
|
||||||
|
except Stoken.DoesNotExist:
|
||||||
|
raise HttpError("bad_stoken", "Invalid stoken.", status_code=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def filter_by_stoken(
|
||||||
|
stoken: t.Optional[str], queryset: QuerySet, stoken_annotation: StokenAnnotation
|
||||||
|
) -> t.Tuple[QuerySet, t.Optional[Stoken]]:
|
||||||
|
stoken_rev = get_stoken_obj(stoken)
|
||||||
|
|
||||||
|
queryset = queryset.annotate(max_stoken=stoken_annotation).order_by("max_stoken")
|
||||||
|
|
||||||
|
if stoken_rev is not None:
|
||||||
|
queryset = queryset.filter(max_stoken__gt=stoken_rev.id)
|
||||||
|
|
||||||
|
return queryset, stoken_rev
|
||||||
|
|
||||||
|
|
||||||
|
def get_queryset_stoken(queryset: t.Iterable[t.Any]) -> t.Optional[Stoken]:
|
||||||
|
maxid = -1
|
||||||
|
for row in queryset:
|
||||||
|
rowmaxid = getattr(row, "max_stoken") or -1
|
||||||
|
maxid = max(maxid, rowmaxid)
|
||||||
|
new_stoken = Stoken.objects.get(id=maxid) if (maxid >= 0) else None
|
||||||
|
|
||||||
|
return new_stoken or None
|
||||||
|
|
||||||
|
|
||||||
|
def filter_by_stoken_and_limit(
|
||||||
|
stoken: t.Optional[str], limit: int, queryset: QuerySet, stoken_annotation: StokenAnnotation
|
||||||
|
) -> t.Tuple[list, t.Optional[Stoken], bool]:
|
||||||
|
|
||||||
|
queryset, stoken_rev = filter_by_stoken(stoken=stoken, queryset=queryset, stoken_annotation=stoken_annotation)
|
||||||
|
|
||||||
|
result = list(queryset[: limit + 1])
|
||||||
|
if len(result) < limit + 1:
|
||||||
|
done = True
|
||||||
|
else:
|
||||||
|
done = False
|
||||||
|
result = result[:-1]
|
||||||
|
|
||||||
|
new_stoken_obj = get_queryset_stoken(result) or stoken_rev
|
||||||
|
|
||||||
|
return result, new_stoken_obj, done
|
@ -0,0 +1,85 @@
|
|||||||
|
import dataclasses
|
||||||
|
import typing as t
|
||||||
|
from typing_extensions import Literal
|
||||||
|
import msgpack
|
||||||
|
import base64
|
||||||
|
|
||||||
|
from fastapi import status, Query, Depends
|
||||||
|
from pydantic import BaseModel as PyBaseModel
|
||||||
|
|
||||||
|
from django.db.models import Model, QuerySet
|
||||||
|
from django.core.exceptions import ObjectDoesNotExist
|
||||||
|
|
||||||
|
from django_etebase import app_settings
|
||||||
|
from django_etebase.models import AccessLevels
|
||||||
|
from myauth.models import UserType, get_typed_user_model
|
||||||
|
|
||||||
|
from .exceptions import HttpError, HttpErrorOut
|
||||||
|
|
||||||
|
User = get_typed_user_model()
|
||||||
|
|
||||||
|
Prefetch = Literal["auto", "medium"]
|
||||||
|
PrefetchQuery = Query(default="auto")
|
||||||
|
|
||||||
|
|
||||||
|
T = t.TypeVar("T", bound=Model, covariant=True)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseModel(PyBaseModel):
|
||||||
|
class Config:
|
||||||
|
json_encoders = {
|
||||||
|
bytes: lambda x: x,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclasses.dataclass
|
||||||
|
class Context:
|
||||||
|
user: t.Optional[UserType]
|
||||||
|
prefetch: t.Optional[Prefetch]
|
||||||
|
|
||||||
|
|
||||||
|
def get_object_or_404(queryset: QuerySet[T], **kwargs) -> T:
|
||||||
|
try:
|
||||||
|
return queryset.get(**kwargs)
|
||||||
|
except ObjectDoesNotExist as e:
|
||||||
|
raise HttpError("does_not_exist", str(e), status_code=status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
|
||||||
|
def is_collection_admin(collection, user):
|
||||||
|
member = collection.members.filter(user=user).first()
|
||||||
|
return (member is not None) and (member.accessLevel == AccessLevels.ADMIN)
|
||||||
|
|
||||||
|
|
||||||
|
def msgpack_encode(content) -> bytes:
|
||||||
|
ret = msgpack.packb(content, use_bin_type=True)
|
||||||
|
assert ret is not None
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def msgpack_decode(content: bytes):
|
||||||
|
return msgpack.unpackb(content, raw=False)
|
||||||
|
|
||||||
|
|
||||||
|
def b64encode(value: bytes):
|
||||||
|
return base64.urlsafe_b64encode(value).decode("ascii").strip("=")
|
||||||
|
|
||||||
|
|
||||||
|
def b64decode(data: str):
|
||||||
|
data += "=" * ((4 - len(data) % 4) % 4)
|
||||||
|
return base64.urlsafe_b64decode(data)
|
||||||
|
|
||||||
|
|
||||||
|
def get_user_username_email_kwargs(username: str):
|
||||||
|
field_name = User.EMAIL_FIELD if "@" in username else User.USERNAME_FIELD
|
||||||
|
return {field_name + "__iexact": username.lower()}
|
||||||
|
|
||||||
|
|
||||||
|
PERMISSIONS_READ = [Depends(x) for x in app_settings.API_PERMISSIONS_READ]
|
||||||
|
PERMISSIONS_READWRITE = PERMISSIONS_READ + [Depends(x) for x in app_settings.API_PERMISSIONS_WRITE]
|
||||||
|
|
||||||
|
|
||||||
|
response_model_dict = {"model": HttpErrorOut}
|
||||||
|
permission_responses: t.Dict[t.Union[int, str], t.Dict[str, t.Any]] = {
|
||||||
|
401: response_model_dict,
|
||||||
|
403: response_model_dict,
|
||||||
|
}
|
@ -0,0 +1,19 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
from django.core.asgi import get_asgi_application
|
||||||
|
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "etebase_server.settings")
|
||||||
|
django_application = get_asgi_application()
|
||||||
|
|
||||||
|
|
||||||
|
def create_application():
|
||||||
|
from etebase_fastapi.main import create_application
|
||||||
|
|
||||||
|
app = create_application()
|
||||||
|
|
||||||
|
app.mount("/", django_application)
|
||||||
|
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
|
application = create_application()
|
@ -0,0 +1,180 @@
|
|||||||
|
"""
|
||||||
|
Django settings for etebase_server project.
|
||||||
|
|
||||||
|
Generated by 'django-admin startproject' using Django 3.0.3.
|
||||||
|
|
||||||
|
For more information on this file, see
|
||||||
|
https://docs.djangoproject.com/en/3.0/topics/settings/
|
||||||
|
|
||||||
|
For the full list of settings and their values, see
|
||||||
|
https://docs.djangoproject.com/en/3.0/ref/settings/
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import configparser
|
||||||
|
from .utils import get_secret_from_file
|
||||||
|
|
||||||
|
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
|
||||||
|
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||||
|
|
||||||
|
AUTH_USER_MODEL = "myauth.User"
|
||||||
|
|
||||||
|
|
||||||
|
# Quick-start development settings - unsuitable for production
|
||||||
|
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
|
||||||
|
|
||||||
|
# SECURITY WARNING: keep the secret key used in production secret!
|
||||||
|
# See secret.py for how this is generated; uses a file 'secret.txt' in the root
|
||||||
|
# directory
|
||||||
|
SECRET_FILE = os.path.join(BASE_DIR, "secret.txt")
|
||||||
|
|
||||||
|
# SECURITY WARNING: don't run with debug turned on in production!
|
||||||
|
DEBUG = True
|
||||||
|
|
||||||
|
ALLOWED_HOSTS = []
|
||||||
|
|
||||||
|
# Database
|
||||||
|
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
|
||||||
|
|
||||||
|
DATABASES = {
|
||||||
|
"default": {
|
||||||
|
"ENGINE": "django.db.backends.sqlite3",
|
||||||
|
"NAME": os.environ.get("ETEBASE_DB_PATH", os.path.join(BASE_DIR, "db.sqlite3")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
DEFAULT_AUTO_FIELD = 'django.db.models.AutoField'
|
||||||
|
|
||||||
|
# Application definition
|
||||||
|
|
||||||
|
INSTALLED_APPS = [
|
||||||
|
"django.contrib.admin",
|
||||||
|
"django.contrib.auth",
|
||||||
|
"django.contrib.contenttypes",
|
||||||
|
"django.contrib.sessions",
|
||||||
|
"django.contrib.messages",
|
||||||
|
"django.contrib.staticfiles",
|
||||||
|
"myauth.apps.MyauthConfig",
|
||||||
|
"django_etebase.apps.DjangoEtebaseConfig",
|
||||||
|
"django_etebase.token_auth.apps.TokenAuthConfig",
|
||||||
|
]
|
||||||
|
|
||||||
|
MIDDLEWARE = [
|
||||||
|
"django.middleware.security.SecurityMiddleware",
|
||||||
|
"django.contrib.sessions.middleware.SessionMiddleware",
|
||||||
|
"django.middleware.common.CommonMiddleware",
|
||||||
|
"django.middleware.csrf.CsrfViewMiddleware",
|
||||||
|
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
||||||
|
"django.contrib.messages.middleware.MessageMiddleware",
|
||||||
|
"django.middleware.clickjacking.XFrameOptionsMiddleware",
|
||||||
|
]
|
||||||
|
|
||||||
|
ROOT_URLCONF = "etebase_server.urls"
|
||||||
|
|
||||||
|
TEMPLATES = [
|
||||||
|
{
|
||||||
|
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||||
|
"DIRS": [os.path.join(BASE_DIR, "templates")],
|
||||||
|
"APP_DIRS": True,
|
||||||
|
"OPTIONS": {
|
||||||
|
"context_processors": [
|
||||||
|
"django.template.context_processors.debug",
|
||||||
|
"django.template.context_processors.request",
|
||||||
|
"django.contrib.auth.context_processors.auth",
|
||||||
|
"django.contrib.messages.context_processors.messages",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
WSGI_APPLICATION = "etebase_server.wsgi.application"
|
||||||
|
|
||||||
|
|
||||||
|
# Password validation
|
||||||
|
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
|
||||||
|
|
||||||
|
AUTH_PASSWORD_VALIDATORS = [
|
||||||
|
{
|
||||||
|
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# Internationalization
|
||||||
|
# https://docs.djangoproject.com/en/3.0/topics/i18n/
|
||||||
|
|
||||||
|
LANGUAGE_CODE = "en-us"
|
||||||
|
|
||||||
|
TIME_ZONE = "UTC"
|
||||||
|
|
||||||
|
USE_I18N = True
|
||||||
|
|
||||||
|
USE_L10N = True
|
||||||
|
|
||||||
|
USE_TZ = True
|
||||||
|
|
||||||
|
# Static files (CSS, JavaScript, Images)
|
||||||
|
# https://docs.djangoproject.com/en/3.0/howto/static-files/
|
||||||
|
|
||||||
|
STATIC_URL = "/static/"
|
||||||
|
STATIC_ROOT = os.environ.get("DJANGO_STATIC_ROOT", os.path.join(BASE_DIR, "static"))
|
||||||
|
|
||||||
|
MEDIA_ROOT = os.environ.get("DJANGO_MEDIA_ROOT", os.path.join(BASE_DIR, "media"))
|
||||||
|
MEDIA_URL = "/user-media/"
|
||||||
|
|
||||||
|
|
||||||
|
# Define where to find configuration files
|
||||||
|
config_locations = [
|
||||||
|
os.environ.get("ETEBASE_EASY_CONFIG_PATH", ""),
|
||||||
|
"etebase-server.ini",
|
||||||
|
"/etc/etebase-server/etebase-server.ini",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Use config file if present
|
||||||
|
if any(os.path.isfile(x) for x in config_locations):
|
||||||
|
config = configparser.ConfigParser()
|
||||||
|
config.read(config_locations)
|
||||||
|
|
||||||
|
section = config["global"]
|
||||||
|
|
||||||
|
SECRET_FILE = section.get("secret_file", SECRET_FILE)
|
||||||
|
STATIC_ROOT = section.get("static_root", STATIC_ROOT)
|
||||||
|
STATIC_URL = section.get("static_url", STATIC_URL)
|
||||||
|
MEDIA_ROOT = section.get("media_root", MEDIA_ROOT)
|
||||||
|
MEDIA_URL = section.get("media_url", MEDIA_URL)
|
||||||
|
LANGUAGE_CODE = section.get("language_code", LANGUAGE_CODE)
|
||||||
|
TIME_ZONE = section.get("time_zone", TIME_ZONE)
|
||||||
|
DEBUG = section.getboolean("debug", DEBUG)
|
||||||
|
|
||||||
|
if "redis_uri" in section:
|
||||||
|
ETEBASE_REDIS_URI = section.get("redis_uri")
|
||||||
|
|
||||||
|
if "allowed_hosts" in config:
|
||||||
|
ALLOWED_HOSTS = [y for x, y in config.items("allowed_hosts")]
|
||||||
|
|
||||||
|
if "database" in config:
|
||||||
|
DATABASES = {"default": {x.upper(): y for x, y in config.items("database")}}
|
||||||
|
|
||||||
|
ETEBASE_CREATE_USER_FUNC = "django_etebase.utils.create_user_blocked"
|
||||||
|
|
||||||
|
# Efficient file streaming (for large files)
|
||||||
|
SENDFILE_BACKEND = "etebase_fastapi.sendfile.backends.simple"
|
||||||
|
SENDFILE_ROOT = MEDIA_ROOT
|
||||||
|
|
||||||
|
# Make an `etebase_server_settings` module available to override settings.
|
||||||
|
try:
|
||||||
|
from etebase_server_settings import *
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if "SECRET_KEY" not in locals():
|
||||||
|
SECRET_KEY = get_secret_from_file(SECRET_FILE)
|
@ -0,0 +1,25 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.conf.urls import url
|
||||||
|
from django.contrib import admin
|
||||||
|
from django.urls import path, re_path
|
||||||
|
from django.views.generic import TemplateView
|
||||||
|
from django.views.static import serve
|
||||||
|
from django.contrib.staticfiles import finders
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
url(r"^admin/", admin.site.urls),
|
||||||
|
path("", TemplateView.as_view(template_name="success.html")),
|
||||||
|
]
|
||||||
|
|
||||||
|
if settings.DEBUG:
|
||||||
|
|
||||||
|
def serve_static(request, path):
|
||||||
|
filename = finders.find(path)
|
||||||
|
dirname = os.path.dirname(filename)
|
||||||
|
basename = os.path.basename(filename)
|
||||||
|
|
||||||
|
return serve(request, basename, dirname)
|
||||||
|
|
||||||
|
urlpatterns += [re_path(r"^static/(?P<path>.*)$", serve_static)]
|
@ -0,0 +1,26 @@
|
|||||||
|
# Copyright © 2017 Tom Hacohen
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as
|
||||||
|
# published by the Free Software Foundation, version 3.
|
||||||
|
#
|
||||||
|
# This library is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from django.core.management import utils
|
||||||
|
|
||||||
|
|
||||||
|
def get_secret_from_file(path):
|
||||||
|
try:
|
||||||
|
with open(path, "r") as f:
|
||||||
|
return f.read().strip()
|
||||||
|
except EnvironmentError:
|
||||||
|
with open(path, "w") as f:
|
||||||
|
secret_key = utils.get_random_secret_key()
|
||||||
|
f.write(secret_key)
|
||||||
|
return secret_key
|
@ -0,0 +1,22 @@
|
|||||||
|
# Running `etebase` under `nginx` and `uwsgi`
|
||||||
|
|
||||||
|
This configuration assumes that etebase server has been installed in the home folder of a non privileged user
|
||||||
|
called `EtebaseUser` following the instructions in <https://github.com/etesync/server>. Also that static
|
||||||
|
files have been collected at `/srv/http/etebase_server` by running the following commands:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo mkdir -p /srv/http/etebase_server/static
|
||||||
|
sudo chown -R EtebaseUser /srv/http/etebase_server
|
||||||
|
sudo su EtebaseUser
|
||||||
|
cd /path/to/etebase
|
||||||
|
ln -s /srv/http/etebase_server/static static
|
||||||
|
./manage.py collectstatic
|
||||||
|
```
|
||||||
|
|
||||||
|
It is also assumed that `nginx` and `uwsgi` have been installed system wide by `root`, and that `nginx` is running as user/group `www-data`.
|
||||||
|
|
||||||
|
In this setup, `uwsgi` running as a `systemd` service as `root` creates a unix socket with read-write access
|
||||||
|
to both `EtebaseUser` and `nginx`. It then drops its `root` privilege and runs `etebase` as `EtebaseUser`.
|
||||||
|
|
||||||
|
`nginx` listens on the `https` port (or a non standard port `https` port if desired), delivers static pages directly
|
||||||
|
and for everything else, communicates with `etebase` over the unix socket.
|
@ -0,0 +1,15 @@
|
|||||||
|
# uwsgi configuration file
|
||||||
|
# typical location of this file would be /etc/uwsgi/sites/etebase.ini
|
||||||
|
|
||||||
|
[uwsgi]
|
||||||
|
socket = /path/to/etebase_server.sock
|
||||||
|
chown-socket = EtebaseUser:www-data
|
||||||
|
chmod-socket = 660
|
||||||
|
vacuum = true
|
||||||
|
|
||||||
|
|
||||||
|
uid = EtebaseUser
|
||||||
|
chdir = /path/to/etebase
|
||||||
|
home = %(chdir)/.venv
|
||||||
|
module = etebase_server.wsgi
|
||||||
|
master = true
|
@ -0,0 +1,15 @@
|
|||||||
|
# uwsgi configuration file
|
||||||
|
# typical location of this file would be /etc/uwsgi/sites/etesync.ini
|
||||||
|
|
||||||
|
[uwsgi]
|
||||||
|
socket = /path/to/etesync_server.sock
|
||||||
|
chown-socket = EtesyncUser:www-data
|
||||||
|
chmod-socket = 660
|
||||||
|
vacuum = true
|
||||||
|
|
||||||
|
|
||||||
|
uid = EtesyncUser
|
||||||
|
chdir = /path/to/etesync
|
||||||
|
home = %(chdir)/.venv
|
||||||
|
module = etesync_server.wsgi
|
||||||
|
master = true
|
@ -0,0 +1,36 @@
|
|||||||
|
# nginx configuration for etebase server running on https://my.server.name
|
||||||
|
# typical location of this file would be /etc/nginx/sites-available/my.server.name.conf
|
||||||
|
|
||||||
|
server {
|
||||||
|
server_name my.server.name;
|
||||||
|
|
||||||
|
root /srv/http/etebase_server;
|
||||||
|
|
||||||
|
client_max_body_size 20M;
|
||||||
|
|
||||||
|
location /static {
|
||||||
|
expires 1y;
|
||||||
|
try_files $uri $uri/ =404;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /media {
|
||||||
|
expires 1y;
|
||||||
|
try_files $uri $uri/ =404;
|
||||||
|
}
|
||||||
|
|
||||||
|
location / {
|
||||||
|
uwsgi_pass unix:/path/to/etebase_server.sock;
|
||||||
|
include uwsgi_params;
|
||||||
|
}
|
||||||
|
|
||||||
|
# change 443 to say 9443 to run on a non standard port
|
||||||
|
listen 443 ssl;
|
||||||
|
listen [::]:443 ssl;
|
||||||
|
# Enable these two instead of the two above if your nginx supports http2
|
||||||
|
# listen 443 ssl http2;
|
||||||
|
# listen [::]:443 ssl http2;
|
||||||
|
|
||||||
|
ssl_certificate /path/to/certificate-file
|
||||||
|
ssl_certificate_key /path/to/certificate-key-file
|
||||||
|
# other ssl directives as needed
|
||||||
|
}
|
@ -0,0 +1,15 @@
|
|||||||
|
# systemd unit for running uwsgi in emperor mode
|
||||||
|
# typical location of this file would be /etc/systemd/system/uwsgi.service
|
||||||
|
|
||||||
|
[Unit]
|
||||||
|
Description=uWSGI Emperor service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
ExecStart=/usr/local/bin/uwsgi --emperor /etc/uwsgi/sites
|
||||||
|
Restart=always
|
||||||
|
KillSignal=SIGQUIT
|
||||||
|
Type=notify
|
||||||
|
NotifyAccess=all
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
@ -0,0 +1,241 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<svg
|
||||||
|
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||||
|
xmlns:cc="http://creativecommons.org/ns#"
|
||||||
|
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||||
|
xmlns:svg="http://www.w3.org/2000/svg"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
|
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||||
|
height="192"
|
||||||
|
viewBox="0 0 192 192"
|
||||||
|
width="192"
|
||||||
|
version="1.1"
|
||||||
|
id="svg3688"
|
||||||
|
sodipodi:docname="logo.svg"
|
||||||
|
inkscape:version="0.92.2 5c3e80d, 2017-08-06"
|
||||||
|
inkscape:export-filename="/home/tom/projects/securesync/graphics/logo.png"
|
||||||
|
inkscape:export-xdpi="1000"
|
||||||
|
inkscape:export-ydpi="1000"
|
||||||
|
style="fill:#000000">
|
||||||
|
<metadata
|
||||||
|
id="metadata3694">
|
||||||
|
<rdf:RDF>
|
||||||
|
<cc:Work
|
||||||
|
rdf:about="">
|
||||||
|
<dc:format>image/svg+xml</dc:format>
|
||||||
|
<dc:type
|
||||||
|
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||||
|
<dc:title />
|
||||||
|
</cc:Work>
|
||||||
|
</rdf:RDF>
|
||||||
|
</metadata>
|
||||||
|
<defs
|
||||||
|
id="defs3692">
|
||||||
|
<filter
|
||||||
|
style="color-interpolation-filters:sRGB;"
|
||||||
|
inkscape:label="Drop Shadow"
|
||||||
|
id="filter4498">
|
||||||
|
<feFlood
|
||||||
|
flood-opacity="0.498039"
|
||||||
|
flood-color="rgb(0,0,0)"
|
||||||
|
result="flood"
|
||||||
|
id="feFlood4488" />
|
||||||
|
<feComposite
|
||||||
|
in="flood"
|
||||||
|
in2="SourceGraphic"
|
||||||
|
operator="in"
|
||||||
|
result="composite1"
|
||||||
|
id="feComposite4490" />
|
||||||
|
<feGaussianBlur
|
||||||
|
in="composite1"
|
||||||
|
stdDeviation="4"
|
||||||
|
result="blur"
|
||||||
|
id="feGaussianBlur4492" />
|
||||||
|
<feOffset
|
||||||
|
dx="0"
|
||||||
|
dy="4"
|
||||||
|
result="offset"
|
||||||
|
id="feOffset4494" />
|
||||||
|
<feComposite
|
||||||
|
in="SourceGraphic"
|
||||||
|
in2="offset"
|
||||||
|
operator="over"
|
||||||
|
result="composite2"
|
||||||
|
id="feComposite4496" />
|
||||||
|
</filter>
|
||||||
|
<filter
|
||||||
|
style="color-interpolation-filters:sRGB;"
|
||||||
|
inkscape:label="Drop Shadow"
|
||||||
|
id="filter4510">
|
||||||
|
<feFlood
|
||||||
|
flood-opacity="0.498039"
|
||||||
|
flood-color="rgb(0,0,0)"
|
||||||
|
result="flood"
|
||||||
|
id="feFlood4500" />
|
||||||
|
<feComposite
|
||||||
|
in="flood"
|
||||||
|
in2="SourceGraphic"
|
||||||
|
operator="in"
|
||||||
|
result="composite1"
|
||||||
|
id="feComposite4502" />
|
||||||
|
<feGaussianBlur
|
||||||
|
in="composite1"
|
||||||
|
stdDeviation="4"
|
||||||
|
result="blur"
|
||||||
|
id="feGaussianBlur4504" />
|
||||||
|
<feOffset
|
||||||
|
dx="0"
|
||||||
|
dy="4"
|
||||||
|
result="offset"
|
||||||
|
id="feOffset4506" />
|
||||||
|
<feComposite
|
||||||
|
in="SourceGraphic"
|
||||||
|
in2="offset"
|
||||||
|
operator="over"
|
||||||
|
result="composite2"
|
||||||
|
id="feComposite4508" />
|
||||||
|
</filter>
|
||||||
|
<filter
|
||||||
|
style="color-interpolation-filters:sRGB"
|
||||||
|
inkscape:label="Drop Shadow"
|
||||||
|
id="filter4498-9">
|
||||||
|
<feFlood
|
||||||
|
flood-opacity="0.498039"
|
||||||
|
flood-color="rgb(0,0,0)"
|
||||||
|
result="flood"
|
||||||
|
id="feFlood4488-1" />
|
||||||
|
<feComposite
|
||||||
|
in="flood"
|
||||||
|
in2="SourceGraphic"
|
||||||
|
operator="in"
|
||||||
|
result="composite1"
|
||||||
|
id="feComposite4490-7" />
|
||||||
|
<feGaussianBlur
|
||||||
|
in="composite1"
|
||||||
|
stdDeviation="4"
|
||||||
|
result="blur"
|
||||||
|
id="feGaussianBlur4492-7" />
|
||||||
|
<feOffset
|
||||||
|
dx="0"
|
||||||
|
dy="4"
|
||||||
|
result="offset"
|
||||||
|
id="feOffset4494-1" />
|
||||||
|
<feComposite
|
||||||
|
in="SourceGraphic"
|
||||||
|
in2="offset"
|
||||||
|
operator="over"
|
||||||
|
result="composite2"
|
||||||
|
id="feComposite4496-1" />
|
||||||
|
</filter>
|
||||||
|
<filter
|
||||||
|
style="color-interpolation-filters:sRGB"
|
||||||
|
inkscape:label="Drop Shadow"
|
||||||
|
id="filter4510-5">
|
||||||
|
<feFlood
|
||||||
|
flood-opacity="0.498039"
|
||||||
|
flood-color="rgb(0,0,0)"
|
||||||
|
result="flood"
|
||||||
|
id="feFlood4500-9" />
|
||||||
|
<feComposite
|
||||||
|
in="flood"
|
||||||
|
in2="SourceGraphic"
|
||||||
|
operator="in"
|
||||||
|
result="composite1"
|
||||||
|
id="feComposite4502-7" />
|
||||||
|
<feGaussianBlur
|
||||||
|
in="composite1"
|
||||||
|
stdDeviation="4"
|
||||||
|
result="blur"
|
||||||
|
id="feGaussianBlur4504-7" />
|
||||||
|
<feOffset
|
||||||
|
dx="0"
|
||||||
|
dy="4"
|
||||||
|
result="offset"
|
||||||
|
id="feOffset4506-6" />
|
||||||
|
<feComposite
|
||||||
|
in="SourceGraphic"
|
||||||
|
in2="offset"
|
||||||
|
operator="over"
|
||||||
|
result="composite2"
|
||||||
|
id="feComposite4508-7" />
|
||||||
|
</filter>
|
||||||
|
</defs>
|
||||||
|
<sodipodi:namedview
|
||||||
|
pagecolor="#ffffff"
|
||||||
|
bordercolor="#666666"
|
||||||
|
borderopacity="1"
|
||||||
|
objecttolerance="10"
|
||||||
|
gridtolerance="10"
|
||||||
|
guidetolerance="10"
|
||||||
|
inkscape:pageopacity="0"
|
||||||
|
inkscape:pageshadow="2"
|
||||||
|
inkscape:window-width="3832"
|
||||||
|
inkscape:window-height="2088"
|
||||||
|
id="namedview3690"
|
||||||
|
showgrid="false"
|
||||||
|
inkscape:zoom="8"
|
||||||
|
inkscape:cx="169.753"
|
||||||
|
inkscape:cy="122.38481"
|
||||||
|
inkscape:window-x="0"
|
||||||
|
inkscape:window-y="68"
|
||||||
|
inkscape:window-maximized="0"
|
||||||
|
inkscape:current-layer="svg3688" />
|
||||||
|
<path
|
||||||
|
d="M 0,-2 H 192 V 190 H 0 Z"
|
||||||
|
id="path3684"
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
style="fill:none;stroke-width:8" />
|
||||||
|
<g
|
||||||
|
inkscape:groupmode="layer"
|
||||||
|
id="layer2"
|
||||||
|
inkscape:label="circle"
|
||||||
|
style="display:inline"
|
||||||
|
transform="translate(0,168)" />
|
||||||
|
<g
|
||||||
|
inkscape:groupmode="layer"
|
||||||
|
id="layer1"
|
||||||
|
inkscape:label="arrow"
|
||||||
|
style="display:inline"
|
||||||
|
transform="translate(0,168)">
|
||||||
|
<path
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
style="fill:none;stroke-width:8"
|
||||||
|
d="M 0,-170 H 192 V 22 H 0 Z"
|
||||||
|
id="path4" />
|
||||||
|
</g>
|
||||||
|
<path
|
||||||
|
transform="matrix(1.0041182,0,0,0.99662549,-51.069563,-98.263563)"
|
||||||
|
style="fill:#ffc107;fill-opacity:1;stroke-width:8;filter:url(#filter4498-9)"
|
||||||
|
sodipodi:nodetypes="ccccccccccccccccc"
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
id="path3686-3"
|
||||||
|
d="m 210.91169,166.80218 v -37.52 h -37.52 l -26.48,-26.48 -26.48,26.48 H 82.911688 v 37.52 l -26.48,26.48 26.48,26.48 v 37.52 h 37.520002 l 26.48,26.48 26.48,-26.48 h 37.52 v -37.52 l 26.48,-26.48 z" />
|
||||||
|
<g
|
||||||
|
id="g6594"
|
||||||
|
transform="matrix(1.430883,0,0,1.3318324,-104.95215,-30.494868)">
|
||||||
|
<path
|
||||||
|
d="M 73.384012,21.91357 H 208.11948 V 165.58939 H 73.384012 Z"
|
||||||
|
id="path3684-6"
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
style="fill:none;stroke-width:5.79724312" />
|
||||||
|
<ellipse
|
||||||
|
style="display:inline;fill:#ffd740;fill-opacity:1;fill-rule:nonzero;stroke:#ffd740;stroke-width:44.18160629;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||||
|
id="path5101-5"
|
||||||
|
cx="140.78024"
|
||||||
|
cy="93.718559"
|
||||||
|
rx="11.299538"
|
||||||
|
ry="12.050389" />
|
||||||
|
<path
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
d="m 146.91169,-329.24817 v -17.44985 l -23.29487,23.26646 23.29487,23.26647 v -17.44985 c 19.2765,0 34.94231,15.6467 34.94231,34.8997 0,5.8748 -1.45593,11.45873 -4.0766,16.28652 l 8.50263,8.49226 c 4.5425,-7.15443 7.2214,-15.64669 7.2214,-24.77878 0,-25.70945 -20.8489,-46.53293 -46.58974,-46.53293 z m 0,81.43263 c -19.27651,0 -34.94231,-15.6467 -34.94231,-34.8997 0,-5.87478 1.45593,-11.45874 4.0766,-16.28653 l -8.50262,-8.49226 c -4.54251,7.15444 -7.22141,15.6467 -7.22141,24.77879 0,25.70945 20.84891,46.53293 46.58974,46.53293 v 17.44986 l 23.29487,-23.26647 -23.29487,-23.26647 z"
|
||||||
|
id="path2-6"
|
||||||
|
style="display:inline;fill:#448aff;fill-opacity:1;stroke-width:5.82016563;filter:url(#filter4510-5)"
|
||||||
|
transform="matrix(0.71930922,0,0,0.73751549,35.063296,302.33958)" />
|
||||||
|
<path
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
style="display:inline;fill:none;stroke-width:5.79724312"
|
||||||
|
d="M 73.384012,21.91357 H 208.11948 V 165.58939 H 73.384012 Z"
|
||||||
|
id="path4-3" />
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 7.5 KiB |
@ -0,0 +1,21 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
"""Django's command-line utility for administrative tasks."""
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "etebase_server.settings")
|
||||||
|
try:
|
||||||
|
from django.core.management import execute_from_command_line
|
||||||
|
except ImportError as exc:
|
||||||
|
raise ImportError(
|
||||||
|
"Couldn't import Django. Are you sure it's installed and "
|
||||||
|
"available on your PYTHONPATH environment variable? Did you "
|
||||||
|
"forget to activate a virtual environment?"
|
||||||
|
) from exc
|
||||||
|
execute_from_command_line(sys.argv)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
@ -0,0 +1,12 @@
|
|||||||
|
from django.contrib import admin
|
||||||
|
from django.contrib.auth.admin import UserAdmin as DjangoUserAdmin
|
||||||
|
from .models import User
|
||||||
|
from .forms import AdminUserCreationForm
|
||||||
|
|
||||||
|
|
||||||
|
class UserAdmin(DjangoUserAdmin):
|
||||||
|
add_form = AdminUserCreationForm
|
||||||
|
add_fieldsets = ((None, {"classes": ("wide",), "fields": ("username",),}),)
|
||||||
|
|
||||||
|
|
||||||
|
admin.site.register(User, UserAdmin)
|
@ -0,0 +1,5 @@
|
|||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class MyauthConfig(AppConfig):
|
||||||
|
name = "myauth"
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue