mirror of
https://git.anonymousland.org/anonymousland/synapse-product.git
synced 2024-10-01 08:25:44 -04:00
Merge branch 'develop' into auth
This commit is contained in:
commit
ce2a7ed6e4
74
CHANGES.rst
74
CHANGES.rst
@ -1,3 +1,77 @@
|
|||||||
|
Changes in synapse v0.10.0-rc1 (2015-08-20)
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
Also see v0.9.4-rc1 changelog, which has been amalgamated into this release.
|
||||||
|
|
||||||
|
General:
|
||||||
|
|
||||||
|
* Upgrade to Twisted 15 (PR #173)
|
||||||
|
* Add support for serving and fetching encryption keys over federation.
|
||||||
|
(PR #208)
|
||||||
|
* Add support for logging in with email address (PR #234)
|
||||||
|
* Add support for new ``m.room.canonical_alias`` event. (PR #233)
|
||||||
|
* Error if a user tries to register with an email already in use. (PR #211)
|
||||||
|
* Add extra and improve existing caches (PR #212, #219, #226, #228)
|
||||||
|
* Batch various storage request (PR #226, #228)
|
||||||
|
* Fix bug where we didn't correctly log the entity that triggered the request
|
||||||
|
if the request came in via an application service (PR #230)
|
||||||
|
* Fix bug where we needlessly regenerated the full list of rooms an AS is
|
||||||
|
interested in. (PR #232)
|
||||||
|
* Add support for AS's to use v2_alpha registration API (PR #210)
|
||||||
|
|
||||||
|
|
||||||
|
Configuration:
|
||||||
|
|
||||||
|
* Add ``--generate-keys`` that will generate any missing cert and key files in
|
||||||
|
the configuration files. This is equivalent to running ``--generate-config``
|
||||||
|
on an existing configuration file. (PR #220)
|
||||||
|
* ``--generate-config`` now no longer requires a ``--server-name`` parameter
|
||||||
|
when used on existing configuration files. (PR #220)
|
||||||
|
* Add ``--print-pidfile`` flag that controls the printing of the pid to stdout
|
||||||
|
of the demonised process. (PR #213)
|
||||||
|
|
||||||
|
Media Repository:
|
||||||
|
|
||||||
|
* Fix bug where we picked a lower resolution image than requested. (PR #205)
|
||||||
|
* Add support for specifying if a the media repository should dynamically
|
||||||
|
thumbnail images or not. (PR #206)
|
||||||
|
|
||||||
|
Metrics:
|
||||||
|
|
||||||
|
* Add statistics from the reactor to the metrics API. (PR #224, #225)
|
||||||
|
|
||||||
|
Demo Homeservers:
|
||||||
|
|
||||||
|
* Fix starting the demo homeservers without rate-limiting enabled. (PR #182)
|
||||||
|
* Fix enabling registration on demo homeservers (PR #223)
|
||||||
|
|
||||||
|
|
||||||
|
Changes in synapse v0.9.4-rc1 (2015-07-21)
|
||||||
|
==========================================
|
||||||
|
|
||||||
|
General:
|
||||||
|
|
||||||
|
* Add basic implementation of receipts. (SPEC-99)
|
||||||
|
* Add support for configuration presets in room creation API. (PR #203)
|
||||||
|
* Add auth event that limits the visibility of history for new users.
|
||||||
|
(SPEC-134)
|
||||||
|
* Add SAML2 login/registration support. (PR #201. Thanks Muthu Subramanian!)
|
||||||
|
* Add client side key management APIs for end to end encryption. (PR #198)
|
||||||
|
* Change power level semantics so that you cannot kick, ban or change power
|
||||||
|
levels of users that have equal or greater power level than you. (SYN-192)
|
||||||
|
* Improve performance by bulk inserting events where possible. (PR #193)
|
||||||
|
* Improve performance by bulk verifying signatures where possible. (PR #194)
|
||||||
|
|
||||||
|
|
||||||
|
Configuration:
|
||||||
|
|
||||||
|
* Add support for including TLS certificate chains.
|
||||||
|
|
||||||
|
Media Repository:
|
||||||
|
|
||||||
|
* Add Content-Disposition headers to content repository responses. (SYN-150)
|
||||||
|
|
||||||
|
|
||||||
Changes in synapse v0.9.3 (2015-07-01)
|
Changes in synapse v0.9.3 (2015-07-01)
|
||||||
======================================
|
======================================
|
||||||
|
|
||||||
|
17
README.rst
17
README.rst
@ -190,8 +190,8 @@ For information on how to install and use PostgreSQL, please see
|
|||||||
Running Synapse
|
Running Synapse
|
||||||
===============
|
===============
|
||||||
|
|
||||||
To actually run your new homeserver, pick a working directory for Synapse to run
|
To actually run your new homeserver, pick a working directory for Synapse to
|
||||||
(e.g. ``~/.synapse``), and::
|
run (e.g. ``~/.synapse``), and::
|
||||||
|
|
||||||
cd ~/.synapse
|
cd ~/.synapse
|
||||||
source ./bin/activate
|
source ./bin/activate
|
||||||
@ -252,9 +252,11 @@ Synapse can be installed on Cygwin. It requires the following Cygwin packages:
|
|||||||
|
|
||||||
The content repository requires additional packages and will be unable to process
|
The content repository requires additional packages and will be unable to process
|
||||||
uploads without them:
|
uploads without them:
|
||||||
|
|
||||||
- libjpeg8
|
- libjpeg8
|
||||||
- libjpeg8-devel
|
- libjpeg8-devel
|
||||||
- zlib
|
- zlib
|
||||||
|
|
||||||
If you choose to install Synapse without these packages, you will need to reinstall
|
If you choose to install Synapse without these packages, you will need to reinstall
|
||||||
``pillow`` for changes to be applied, e.g. ``pip uninstall pillow`` ``pip install
|
``pillow`` for changes to be applied, e.g. ``pip uninstall pillow`` ``pip install
|
||||||
pillow --user``
|
pillow --user``
|
||||||
@ -362,14 +364,11 @@ This should end with a 'PASSED' result::
|
|||||||
Upgrading an existing Synapse
|
Upgrading an existing Synapse
|
||||||
=============================
|
=============================
|
||||||
|
|
||||||
IMPORTANT: Before upgrading an existing synapse to a new version, please
|
The instructions for upgrading synapse are in `UPGRADE.rst`_.
|
||||||
refer to UPGRADE.rst for any additional instructions.
|
Please check these instructions as upgrading may require extra steps for some
|
||||||
|
versions of synapse.
|
||||||
Otherwise, simply re-install the new codebase over the current one - e.g.
|
|
||||||
by ``pip install --process-dependency-links
|
|
||||||
https://github.com/matrix-org/synapse/tarball/master``
|
|
||||||
if using pip, or by ``git pull`` if running off a git working copy.
|
|
||||||
|
|
||||||
|
.. _UPGRADE.rst: UPGRADE.rst
|
||||||
|
|
||||||
Setting up Federation
|
Setting up Federation
|
||||||
=====================
|
=====================
|
||||||
|
33
UPGRADE.rst
33
UPGRADE.rst
@ -1,3 +1,36 @@
|
|||||||
|
Upgrading Synapse
|
||||||
|
=================
|
||||||
|
|
||||||
|
Before upgrading check if any special steps are required to upgrade from the
|
||||||
|
what you currently have installed to current version of synapse. The extra
|
||||||
|
instructions that may be required are listed later in this document.
|
||||||
|
|
||||||
|
If synapse was installed in a virtualenv then active that virtualenv before
|
||||||
|
upgrading. If synapse is installed in a virtualenv in ``~/.synapse/`` then run:
|
||||||
|
|
||||||
|
.. code:: bash
|
||||||
|
|
||||||
|
source ~/.synapse/bin/activate
|
||||||
|
|
||||||
|
If synapse was installed using pip then upgrade to the latest version by
|
||||||
|
running:
|
||||||
|
|
||||||
|
.. code:: bash
|
||||||
|
|
||||||
|
pip install --upgrade --process-dependency-links https://github.com/matrix-org/synapse/tarball/master
|
||||||
|
|
||||||
|
If synapse was installed using git then upgrade to the latest version by
|
||||||
|
running:
|
||||||
|
|
||||||
|
.. code:: bash
|
||||||
|
|
||||||
|
# Pull the latest version of the master branch.
|
||||||
|
git pull
|
||||||
|
# Update the versions of synapse's python dependencies.
|
||||||
|
python synapse/python_dependencies.py | xargs -n1 pip install
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Upgrading to v0.9.0
|
Upgrading to v0.9.0
|
||||||
===================
|
===================
|
||||||
|
|
||||||
|
@ -16,4 +16,4 @@
|
|||||||
""" This is a reference implementation of a Matrix home server.
|
""" This is a reference implementation of a Matrix home server.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__version__ = "0.9.3"
|
__version__ = "0.10.0-rc1"
|
||||||
|
@ -76,6 +76,8 @@ class EventTypes(object):
|
|||||||
Feedback = "m.room.message.feedback"
|
Feedback = "m.room.message.feedback"
|
||||||
|
|
||||||
RoomHistoryVisibility = "m.room.history_visibility"
|
RoomHistoryVisibility = "m.room.history_visibility"
|
||||||
|
CanonicalAlias = "m.room.canonical_alias"
|
||||||
|
RoomAvatar = "m.room.avatar"
|
||||||
|
|
||||||
# These are used for validation
|
# These are used for validation
|
||||||
Message = "m.room.message"
|
Message = "m.room.message"
|
||||||
|
@ -40,6 +40,7 @@ class Codes(object):
|
|||||||
TOO_LARGE = "M_TOO_LARGE"
|
TOO_LARGE = "M_TOO_LARGE"
|
||||||
EXCLUSIVE = "M_EXCLUSIVE"
|
EXCLUSIVE = "M_EXCLUSIVE"
|
||||||
THREEPID_AUTH_FAILED = "M_THREEPID_AUTH_FAILED"
|
THREEPID_AUTH_FAILED = "M_THREEPID_AUTH_FAILED"
|
||||||
|
THREEPID_IN_USE = "THREEPID_IN_USE"
|
||||||
|
|
||||||
|
|
||||||
class CodeMessageException(RuntimeError):
|
class CodeMessageException(RuntimeError):
|
||||||
|
@ -14,6 +14,39 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from ._base import Config
|
from ._base import Config
|
||||||
|
from collections import namedtuple
|
||||||
|
|
||||||
|
ThumbnailRequirement = namedtuple(
|
||||||
|
"ThumbnailRequirement", ["width", "height", "method", "media_type"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_thumbnail_requirements(thumbnail_sizes):
|
||||||
|
""" Takes a list of dictionaries with "width", "height", and "method" keys
|
||||||
|
and creates a map from image media types to the thumbnail size, thumnailing
|
||||||
|
method, and thumbnail media type to precalculate
|
||||||
|
|
||||||
|
Args:
|
||||||
|
thumbnail_sizes(list): List of dicts with "width", "height", and
|
||||||
|
"method" keys
|
||||||
|
Returns:
|
||||||
|
Dictionary mapping from media type string to list of
|
||||||
|
ThumbnailRequirement tuples.
|
||||||
|
"""
|
||||||
|
requirements = {}
|
||||||
|
for size in thumbnail_sizes:
|
||||||
|
width = size["width"]
|
||||||
|
height = size["height"]
|
||||||
|
method = size["method"]
|
||||||
|
jpeg_thumbnail = ThumbnailRequirement(width, height, method, "image/jpeg")
|
||||||
|
png_thumbnail = ThumbnailRequirement(width, height, method, "image/png")
|
||||||
|
requirements.setdefault("image/jpeg", []).append(jpeg_thumbnail)
|
||||||
|
requirements.setdefault("image/gif", []).append(png_thumbnail)
|
||||||
|
requirements.setdefault("image/png", []).append(png_thumbnail)
|
||||||
|
return {
|
||||||
|
media_type: tuple(thumbnails)
|
||||||
|
for media_type, thumbnails in requirements.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class ContentRepositoryConfig(Config):
|
class ContentRepositoryConfig(Config):
|
||||||
@ -22,6 +55,10 @@ class ContentRepositoryConfig(Config):
|
|||||||
self.max_image_pixels = self.parse_size(config["max_image_pixels"])
|
self.max_image_pixels = self.parse_size(config["max_image_pixels"])
|
||||||
self.media_store_path = self.ensure_directory(config["media_store_path"])
|
self.media_store_path = self.ensure_directory(config["media_store_path"])
|
||||||
self.uploads_path = self.ensure_directory(config["uploads_path"])
|
self.uploads_path = self.ensure_directory(config["uploads_path"])
|
||||||
|
self.dynamic_thumbnails = config["dynamic_thumbnails"]
|
||||||
|
self.thumbnail_requirements = parse_thumbnail_requirements(
|
||||||
|
config["thumbnail_sizes"]
|
||||||
|
)
|
||||||
|
|
||||||
def default_config(self, config_dir_path, server_name):
|
def default_config(self, config_dir_path, server_name):
|
||||||
media_store = self.default_path("media_store")
|
media_store = self.default_path("media_store")
|
||||||
@ -38,4 +75,26 @@ class ContentRepositoryConfig(Config):
|
|||||||
|
|
||||||
# Maximum number of pixels that will be thumbnailed
|
# Maximum number of pixels that will be thumbnailed
|
||||||
max_image_pixels: "32M"
|
max_image_pixels: "32M"
|
||||||
|
|
||||||
|
# Whether to generate new thumbnails on the fly to precisely match
|
||||||
|
# the resolution requested by the client. If true then whenever
|
||||||
|
# a new resolution is requested by the client the server will
|
||||||
|
# generate a new thumbnail. If false the server will pick a thumbnail
|
||||||
|
# from a precalcualted list.
|
||||||
|
dynamic_thumbnails: false
|
||||||
|
|
||||||
|
# List of thumbnail to precalculate when an image is uploaded.
|
||||||
|
thumbnail_sizes:
|
||||||
|
- width: 32
|
||||||
|
height: 32
|
||||||
|
method: crop
|
||||||
|
- width: 96
|
||||||
|
height: 96
|
||||||
|
method: crop
|
||||||
|
- width: 320
|
||||||
|
height: 240
|
||||||
|
method: scale
|
||||||
|
- width: 640
|
||||||
|
height: 480
|
||||||
|
method: scale
|
||||||
""" % locals()
|
""" % locals()
|
||||||
|
@ -18,7 +18,7 @@ from twisted.internet import defer
|
|||||||
from synapse.api.errors import LimitExceededError, SynapseError
|
from synapse.api.errors import LimitExceededError, SynapseError
|
||||||
from synapse.crypto.event_signing import add_hashes_and_signatures
|
from synapse.crypto.event_signing import add_hashes_and_signatures
|
||||||
from synapse.api.constants import Membership, EventTypes
|
from synapse.api.constants import Membership, EventTypes
|
||||||
from synapse.types import UserID
|
from synapse.types import UserID, RoomAlias
|
||||||
|
|
||||||
from synapse.util.logcontext import PreserveLoggingContext
|
from synapse.util.logcontext import PreserveLoggingContext
|
||||||
|
|
||||||
@ -130,6 +130,22 @@ class BaseHandler(object):
|
|||||||
returned_invite.signatures
|
returned_invite.signatures
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if event.type == EventTypes.CanonicalAlias:
|
||||||
|
# Check the alias is acually valid (at this time at least)
|
||||||
|
room_alias_str = event.content.get("alias", None)
|
||||||
|
if room_alias_str:
|
||||||
|
room_alias = RoomAlias.from_string(room_alias_str)
|
||||||
|
directory_handler = self.hs.get_handlers().directory_handler
|
||||||
|
mapping = yield directory_handler.get_association(room_alias)
|
||||||
|
|
||||||
|
if mapping["room_id"] != event.room_id:
|
||||||
|
raise SynapseError(
|
||||||
|
400,
|
||||||
|
"Room alias %s does not point to the room" % (
|
||||||
|
room_alias_str,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
destinations = set(extra_destinations)
|
destinations = set(extra_destinations)
|
||||||
for k, s in context.current_state.items():
|
for k, s in context.current_state.items():
|
||||||
try:
|
try:
|
||||||
|
@ -162,7 +162,7 @@ class AuthHandler(BaseHandler):
|
|||||||
if not user_id.startswith('@'):
|
if not user_id.startswith('@'):
|
||||||
user_id = UserID.create(user_id, self.hs.hostname).to_string()
|
user_id = UserID.create(user_id, self.hs.hostname).to_string()
|
||||||
|
|
||||||
self._check_password(user_id, password)
|
yield self._check_password(user_id, password)
|
||||||
defer.returnValue(user_id)
|
defer.returnValue(user_id)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
@ -70,6 +70,14 @@ class EventStreamHandler(BaseHandler):
|
|||||||
self._streams_per_user[auth_user] += 1
|
self._streams_per_user[auth_user] += 1
|
||||||
|
|
||||||
rm_handler = self.hs.get_handlers().room_member_handler
|
rm_handler = self.hs.get_handlers().room_member_handler
|
||||||
|
|
||||||
|
app_service = yield self.store.get_app_service_by_user_id(
|
||||||
|
auth_user.to_string()
|
||||||
|
)
|
||||||
|
if app_service:
|
||||||
|
rooms = yield self.store.get_app_service_rooms(app_service)
|
||||||
|
room_ids = set(r.room_id for r in rooms)
|
||||||
|
else:
|
||||||
room_ids = yield rm_handler.get_joined_rooms_for_user(auth_user)
|
room_ids = yield rm_handler.get_joined_rooms_for_user(auth_user)
|
||||||
|
|
||||||
if timeout:
|
if timeout:
|
||||||
|
@ -117,3 +117,28 @@ class IdentityHandler(BaseHandler):
|
|||||||
except CodeMessageException as e:
|
except CodeMessageException as e:
|
||||||
data = json.loads(e.msg)
|
data = json.loads(e.msg)
|
||||||
defer.returnValue(data)
|
defer.returnValue(data)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def requestEmailToken(self, id_server, email, client_secret, send_attempt, **kwargs):
|
||||||
|
yield run_on_reactor()
|
||||||
|
http_client = SimpleHttpClient(self.hs)
|
||||||
|
|
||||||
|
params = {
|
||||||
|
'email': email,
|
||||||
|
'client_secret': client_secret,
|
||||||
|
'send_attempt': send_attempt,
|
||||||
|
}
|
||||||
|
params.update(kwargs)
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = yield http_client.post_urlencoded_get_json(
|
||||||
|
"https://%s%s" % (
|
||||||
|
id_server,
|
||||||
|
"/_matrix/identity/api/v1/validate/email/requestToken"
|
||||||
|
),
|
||||||
|
params
|
||||||
|
)
|
||||||
|
defer.returnValue(data)
|
||||||
|
except CodeMessageException as e:
|
||||||
|
logger.info("Proxied requestToken failed: %r", e)
|
||||||
|
raise e
|
||||||
|
@ -247,9 +247,11 @@ class RoomCreationHandler(BaseHandler):
|
|||||||
},
|
},
|
||||||
"users_default": 0,
|
"users_default": 0,
|
||||||
"events": {
|
"events": {
|
||||||
EventTypes.Name: 100,
|
EventTypes.Name: 50,
|
||||||
EventTypes.PowerLevels: 100,
|
EventTypes.PowerLevels: 100,
|
||||||
EventTypes.RoomHistoryVisibility: 100,
|
EventTypes.RoomHistoryVisibility: 100,
|
||||||
|
EventTypes.CanonicalAlias: 50,
|
||||||
|
EventTypes.RoomAvatar: 50,
|
||||||
},
|
},
|
||||||
"events_default": 0,
|
"events_default": 0,
|
||||||
"state_default": 50,
|
"state_default": 50,
|
||||||
@ -557,12 +559,6 @@ class RoomMemberHandler(BaseHandler):
|
|||||||
"""Returns a list of roomids that the user has any of the given
|
"""Returns a list of roomids that the user has any of the given
|
||||||
membership states in."""
|
membership states in."""
|
||||||
|
|
||||||
app_service = yield self.store.get_app_service_by_user_id(
|
|
||||||
user.to_string()
|
|
||||||
)
|
|
||||||
if app_service:
|
|
||||||
rooms = yield self.store.get_app_service_rooms(app_service)
|
|
||||||
else:
|
|
||||||
rooms = yield self.store.get_rooms_for_user(
|
rooms = yield self.store.get_rooms_for_user(
|
||||||
user.to_string(),
|
user.to_string(),
|
||||||
)
|
)
|
||||||
|
@ -96,9 +96,18 @@ class SyncHandler(BaseHandler):
|
|||||||
return self.current_sync_for_user(sync_config, since_token)
|
return self.current_sync_for_user(sync_config, since_token)
|
||||||
|
|
||||||
rm_handler = self.hs.get_handlers().room_member_handler
|
rm_handler = self.hs.get_handlers().room_member_handler
|
||||||
|
|
||||||
|
app_service = yield self.store.get_app_service_by_user_id(
|
||||||
|
sync_config.user.to_string()
|
||||||
|
)
|
||||||
|
if app_service:
|
||||||
|
rooms = yield self.store.get_app_service_rooms(app_service)
|
||||||
|
room_ids = set(r.room_id for r in rooms)
|
||||||
|
else:
|
||||||
room_ids = yield rm_handler.get_joined_rooms_for_user(
|
room_ids = yield rm_handler.get_joined_rooms_for_user(
|
||||||
sync_config.user
|
sync_config.user
|
||||||
)
|
)
|
||||||
|
|
||||||
result = yield self.notifier.wait_for_events(
|
result = yield self.notifier.wait_for_events(
|
||||||
sync_config.user, room_ids,
|
sync_config.user, room_ids,
|
||||||
sync_config.filter, timeout, current_sync_callback
|
sync_config.filter, timeout, current_sync_callback
|
||||||
@ -229,7 +238,16 @@ class SyncHandler(BaseHandler):
|
|||||||
logger.debug("Typing %r", typing_by_room)
|
logger.debug("Typing %r", typing_by_room)
|
||||||
|
|
||||||
rm_handler = self.hs.get_handlers().room_member_handler
|
rm_handler = self.hs.get_handlers().room_member_handler
|
||||||
room_ids = yield rm_handler.get_joined_rooms_for_user(sync_config.user)
|
app_service = yield self.store.get_app_service_by_user_id(
|
||||||
|
sync_config.user.to_string()
|
||||||
|
)
|
||||||
|
if app_service:
|
||||||
|
rooms = yield self.store.get_app_service_rooms(app_service)
|
||||||
|
room_ids = set(r.room_id for r in rooms)
|
||||||
|
else:
|
||||||
|
room_ids = yield rm_handler.get_joined_rooms_for_user(
|
||||||
|
sync_config.user
|
||||||
|
)
|
||||||
|
|
||||||
# TODO (mjark): Does public mean "published"?
|
# TODO (mjark): Does public mean "published"?
|
||||||
published_rooms = yield self.store.get_rooms(is_public=True)
|
published_rooms = yield self.store.get_rooms(is_public=True)
|
||||||
|
@ -294,6 +294,12 @@ class Pusher(object):
|
|||||||
if not single_event:
|
if not single_event:
|
||||||
self.last_token = chunk['end']
|
self.last_token = chunk['end']
|
||||||
logger.debug("Event stream timeout for pushkey %s", self.pushkey)
|
logger.debug("Event stream timeout for pushkey %s", self.pushkey)
|
||||||
|
yield self.store.update_pusher_last_token(
|
||||||
|
self.app_id,
|
||||||
|
self.pushkey,
|
||||||
|
self.user_name,
|
||||||
|
self.last_token
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
if not self.alive:
|
if not self.alive:
|
||||||
@ -345,7 +351,7 @@ class Pusher(object):
|
|||||||
if processed:
|
if processed:
|
||||||
self.backoff_delay = Pusher.INITIAL_BACKOFF
|
self.backoff_delay = Pusher.INITIAL_BACKOFF
|
||||||
self.last_token = chunk['end']
|
self.last_token = chunk['end']
|
||||||
self.store.update_pusher_last_token_and_success(
|
yield self.store.update_pusher_last_token_and_success(
|
||||||
self.app_id,
|
self.app_id,
|
||||||
self.pushkey,
|
self.pushkey,
|
||||||
self.user_name,
|
self.user_name,
|
||||||
@ -354,7 +360,7 @@ class Pusher(object):
|
|||||||
)
|
)
|
||||||
if self.failing_since:
|
if self.failing_since:
|
||||||
self.failing_since = None
|
self.failing_since = None
|
||||||
self.store.update_pusher_failing_since(
|
yield self.store.update_pusher_failing_since(
|
||||||
self.app_id,
|
self.app_id,
|
||||||
self.pushkey,
|
self.pushkey,
|
||||||
self.user_name,
|
self.user_name,
|
||||||
@ -362,7 +368,7 @@ class Pusher(object):
|
|||||||
else:
|
else:
|
||||||
if not self.failing_since:
|
if not self.failing_since:
|
||||||
self.failing_since = self.clock.time_msec()
|
self.failing_since = self.clock.time_msec()
|
||||||
self.store.update_pusher_failing_since(
|
yield self.store.update_pusher_failing_since(
|
||||||
self.app_id,
|
self.app_id,
|
||||||
self.pushkey,
|
self.pushkey,
|
||||||
self.user_name,
|
self.user_name,
|
||||||
@ -380,7 +386,7 @@ class Pusher(object):
|
|||||||
self.user_name, self.pushkey)
|
self.user_name, self.pushkey)
|
||||||
self.backoff_delay = Pusher.INITIAL_BACKOFF
|
self.backoff_delay = Pusher.INITIAL_BACKOFF
|
||||||
self.last_token = chunk['end']
|
self.last_token = chunk['end']
|
||||||
self.store.update_pusher_last_token(
|
yield self.store.update_pusher_last_token(
|
||||||
self.app_id,
|
self.app_id,
|
||||||
self.pushkey,
|
self.pushkey,
|
||||||
self.user_name,
|
self.user_name,
|
||||||
@ -388,7 +394,7 @@ class Pusher(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
self.failing_since = None
|
self.failing_since = None
|
||||||
self.store.update_pusher_failing_since(
|
yield self.store.update_pusher_failing_since(
|
||||||
self.app_id,
|
self.app_id,
|
||||||
self.pushkey,
|
self.pushkey,
|
||||||
self.user_name,
|
self.user_name,
|
||||||
|
@ -74,16 +74,23 @@ class LoginRestServlet(ClientV1RestServlet):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def do_password_login(self, login_submission):
|
def do_password_login(self, login_submission):
|
||||||
if not login_submission["user"].startswith('@'):
|
if 'medium' in login_submission and 'address' in login_submission:
|
||||||
login_submission["user"] = UserID.create(
|
user_id = yield self.hs.get_datastore().get_user_id_by_threepid(
|
||||||
login_submission["user"], self.hs.hostname).to_string()
|
login_submission['medium'], login_submission['address']
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
user_id = login_submission['user']
|
||||||
|
|
||||||
|
if not user_id.startswith('@'):
|
||||||
|
user_id = UserID.create(
|
||||||
|
user_id, self.hs.hostname).to_string()
|
||||||
|
|
||||||
token = yield self.handlers.auth_handler.login_with_password(
|
token = yield self.handlers.auth_handler.login_with_password(
|
||||||
user_id=login_submission["user"],
|
user_id=user_id,
|
||||||
password=login_submission["password"])
|
password=login_submission["password"])
|
||||||
|
|
||||||
result = {
|
result = {
|
||||||
"user_id": login_submission["user"], # may have changed
|
"user_id": user_id, # may have changed
|
||||||
"access_token": token,
|
"access_token": token,
|
||||||
"home_server": self.hs.hostname,
|
"home_server": self.hs.hostname,
|
||||||
}
|
}
|
||||||
|
@ -79,7 +79,7 @@ class PasswordRestServlet(RestServlet):
|
|||||||
new_password = params['new_password']
|
new_password = params['new_password']
|
||||||
|
|
||||||
yield self.auth_handler.set_password(
|
yield self.auth_handler.set_password(
|
||||||
user_id, new_password, None
|
user_id, new_password
|
||||||
)
|
)
|
||||||
|
|
||||||
defer.returnValue((200, {}))
|
defer.returnValue((200, {}))
|
||||||
|
@ -54,6 +54,11 @@ class RegisterRestServlet(RestServlet):
|
|||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_POST(self, request):
|
def on_POST(self, request):
|
||||||
yield run_on_reactor()
|
yield run_on_reactor()
|
||||||
|
|
||||||
|
if '/register/email/requestToken' in request.path:
|
||||||
|
ret = yield self.onEmailTokenRequest(request)
|
||||||
|
defer.returnValue(ret)
|
||||||
|
|
||||||
body = parse_json_dict_from_request(request)
|
body = parse_json_dict_from_request(request)
|
||||||
|
|
||||||
# we do basic sanity checks here because the auth layer will store these
|
# we do basic sanity checks here because the auth layer will store these
|
||||||
@ -208,6 +213,29 @@ class RegisterRestServlet(RestServlet):
|
|||||||
"home_server": self.hs.hostname,
|
"home_server": self.hs.hostname,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def onEmailTokenRequest(self, request):
|
||||||
|
body = parse_json_dict_from_request(request)
|
||||||
|
|
||||||
|
required = ['id_server', 'client_secret', 'email', 'send_attempt']
|
||||||
|
absent = []
|
||||||
|
for k in required:
|
||||||
|
if k not in body:
|
||||||
|
absent.append(k)
|
||||||
|
|
||||||
|
if len(absent) > 0:
|
||||||
|
raise SynapseError(400, "Missing params: %r" % absent, Codes.MISSING_PARAM)
|
||||||
|
|
||||||
|
existingUid = yield self.hs.get_datastore().get_user_id_by_threepid(
|
||||||
|
'email', body['email']
|
||||||
|
)
|
||||||
|
|
||||||
|
if existingUid is not None:
|
||||||
|
raise SynapseError(400, "Email is already in use", Codes.THREEPID_IN_USE)
|
||||||
|
|
||||||
|
ret = yield self.identity_handler.requestEmailToken(**body)
|
||||||
|
defer.returnValue((200, ret))
|
||||||
|
|
||||||
|
|
||||||
def register_servlets(hs, http_server):
|
def register_servlets(hs, http_server):
|
||||||
RegisterRestServlet(hs).register(http_server)
|
RegisterRestServlet(hs).register(http_server)
|
||||||
|
@ -69,6 +69,8 @@ class BaseMediaResource(Resource):
|
|||||||
self.filepaths = filepaths
|
self.filepaths = filepaths
|
||||||
self.version_string = hs.version_string
|
self.version_string = hs.version_string
|
||||||
self.downloads = {}
|
self.downloads = {}
|
||||||
|
self.dynamic_thumbnails = hs.config.dynamic_thumbnails
|
||||||
|
self.thumbnail_requirements = hs.config.thumbnail_requirements
|
||||||
|
|
||||||
def _respond_404(self, request):
|
def _respond_404(self, request):
|
||||||
respond_with_json(
|
respond_with_json(
|
||||||
@ -208,22 +210,74 @@ class BaseMediaResource(Resource):
|
|||||||
self._respond_404(request)
|
self._respond_404(request)
|
||||||
|
|
||||||
def _get_thumbnail_requirements(self, media_type):
|
def _get_thumbnail_requirements(self, media_type):
|
||||||
if media_type == "image/jpeg":
|
return self.thumbnail_requirements.get(media_type, ())
|
||||||
return (
|
|
||||||
(32, 32, "crop", "image/jpeg"),
|
def _generate_thumbnail(self, input_path, t_path, t_width, t_height,
|
||||||
(96, 96, "crop", "image/jpeg"),
|
t_method, t_type):
|
||||||
(320, 240, "scale", "image/jpeg"),
|
thumbnailer = Thumbnailer(input_path)
|
||||||
(640, 480, "scale", "image/jpeg"),
|
m_width = thumbnailer.width
|
||||||
)
|
m_height = thumbnailer.height
|
||||||
elif (media_type == "image/png") or (media_type == "image/gif"):
|
|
||||||
return (
|
if m_width * m_height >= self.max_image_pixels:
|
||||||
(32, 32, "crop", "image/png"),
|
logger.info(
|
||||||
(96, 96, "crop", "image/png"),
|
"Image too large to thumbnail %r x %r > %r",
|
||||||
(320, 240, "scale", "image/png"),
|
m_width, m_height, self.max_image_pixels
|
||||||
(640, 480, "scale", "image/png"),
|
|
||||||
)
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if t_method == "crop":
|
||||||
|
t_len = thumbnailer.crop(t_path, t_width, t_height, t_type)
|
||||||
|
elif t_method == "scale":
|
||||||
|
t_len = thumbnailer.scale(t_path, t_width, t_height, t_type)
|
||||||
else:
|
else:
|
||||||
return ()
|
t_len = None
|
||||||
|
|
||||||
|
return t_len
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def _generate_local_exact_thumbnail(self, media_id, t_width, t_height,
|
||||||
|
t_method, t_type):
|
||||||
|
input_path = self.filepaths.local_media_filepath(media_id)
|
||||||
|
|
||||||
|
t_path = self.filepaths.local_media_thumbnail(
|
||||||
|
media_id, t_width, t_height, t_type, t_method
|
||||||
|
)
|
||||||
|
self._makedirs(t_path)
|
||||||
|
|
||||||
|
t_len = yield threads.deferToThread(
|
||||||
|
self._generate_thumbnail,
|
||||||
|
input_path, t_path, t_width, t_height, t_method, t_type
|
||||||
|
)
|
||||||
|
|
||||||
|
if t_len:
|
||||||
|
yield self.store.store_local_thumbnail(
|
||||||
|
media_id, t_width, t_height, t_type, t_method, t_len
|
||||||
|
)
|
||||||
|
|
||||||
|
defer.returnValue(t_path)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def _generate_remote_exact_thumbnail(self, server_name, file_id, media_id,
|
||||||
|
t_width, t_height, t_method, t_type):
|
||||||
|
input_path = self.filepaths.remote_media_filepath(server_name, file_id)
|
||||||
|
|
||||||
|
t_path = self.filepaths.remote_media_thumbnail(
|
||||||
|
server_name, file_id, t_width, t_height, t_type, t_method
|
||||||
|
)
|
||||||
|
self._makedirs(t_path)
|
||||||
|
|
||||||
|
t_len = yield threads.deferToThread(
|
||||||
|
self._generate_thumbnail,
|
||||||
|
input_path, t_path, t_width, t_height, t_method, t_type
|
||||||
|
)
|
||||||
|
|
||||||
|
if t_len:
|
||||||
|
yield self.store.store_remote_media_thumbnail(
|
||||||
|
server_name, media_id, file_id,
|
||||||
|
t_width, t_height, t_type, t_method, t_len
|
||||||
|
)
|
||||||
|
|
||||||
|
defer.returnValue(t_path)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _generate_local_thumbnails(self, media_id, media_info):
|
def _generate_local_thumbnails(self, media_id, media_info):
|
||||||
|
@ -43,9 +43,20 @@ class ThumbnailResource(BaseMediaResource):
|
|||||||
m_type = parse_string(request, "type", "image/png")
|
m_type = parse_string(request, "type", "image/png")
|
||||||
|
|
||||||
if server_name == self.server_name:
|
if server_name == self.server_name:
|
||||||
|
if self.dynamic_thumbnails:
|
||||||
|
yield self._select_or_generate_local_thumbnail(
|
||||||
|
request, media_id, width, height, method, m_type
|
||||||
|
)
|
||||||
|
else:
|
||||||
yield self._respond_local_thumbnail(
|
yield self._respond_local_thumbnail(
|
||||||
request, media_id, width, height, method, m_type
|
request, media_id, width, height, method, m_type
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
if self.dynamic_thumbnails:
|
||||||
|
yield self._select_or_generate_remote_thumbnail(
|
||||||
|
request, server_name, media_id,
|
||||||
|
width, height, method, m_type
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
yield self._respond_remote_thumbnail(
|
yield self._respond_remote_thumbnail(
|
||||||
request, server_name, media_id,
|
request, server_name, media_id,
|
||||||
@ -82,6 +93,87 @@ class ThumbnailResource(BaseMediaResource):
|
|||||||
request, media_info, width, height, method, m_type,
|
request, media_info, width, height, method, m_type,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def _select_or_generate_local_thumbnail(self, request, media_id, desired_width,
|
||||||
|
desired_height, desired_method,
|
||||||
|
desired_type):
|
||||||
|
media_info = yield self.store.get_local_media(media_id)
|
||||||
|
|
||||||
|
if not media_info:
|
||||||
|
self._respond_404(request)
|
||||||
|
return
|
||||||
|
|
||||||
|
thumbnail_infos = yield self.store.get_local_media_thumbnails(media_id)
|
||||||
|
for info in thumbnail_infos:
|
||||||
|
t_w = info["thumbnail_width"] == desired_width
|
||||||
|
t_h = info["thumbnail_height"] == desired_height
|
||||||
|
t_method = info["thumbnail_method"] == desired_method
|
||||||
|
t_type = info["thumbnail_type"] == desired_type
|
||||||
|
|
||||||
|
if t_w and t_h and t_method and t_type:
|
||||||
|
file_path = self.filepaths.local_media_thumbnail(
|
||||||
|
media_id, desired_width, desired_height, desired_type, desired_method,
|
||||||
|
)
|
||||||
|
yield self._respond_with_file(request, desired_type, file_path)
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.debug("We don't have a local thumbnail of that size. Generating")
|
||||||
|
|
||||||
|
# Okay, so we generate one.
|
||||||
|
file_path = yield self._generate_local_exact_thumbnail(
|
||||||
|
media_id, desired_width, desired_height, desired_method, desired_type
|
||||||
|
)
|
||||||
|
|
||||||
|
if file_path:
|
||||||
|
yield self._respond_with_file(request, desired_type, file_path)
|
||||||
|
else:
|
||||||
|
yield self._respond_default_thumbnail(
|
||||||
|
request, media_info, desired_width, desired_height,
|
||||||
|
desired_method, desired_type,
|
||||||
|
)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def _select_or_generate_remote_thumbnail(self, request, server_name, media_id,
|
||||||
|
desired_width, desired_height,
|
||||||
|
desired_method, desired_type):
|
||||||
|
media_info = yield self._get_remote_media(server_name, media_id)
|
||||||
|
|
||||||
|
thumbnail_infos = yield self.store.get_remote_media_thumbnails(
|
||||||
|
server_name, media_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
file_id = media_info["filesystem_id"]
|
||||||
|
|
||||||
|
for info in thumbnail_infos:
|
||||||
|
t_w = info["thumbnail_width"] == desired_width
|
||||||
|
t_h = info["thumbnail_height"] == desired_height
|
||||||
|
t_method = info["thumbnail_method"] == desired_method
|
||||||
|
t_type = info["thumbnail_type"] == desired_type
|
||||||
|
|
||||||
|
if t_w and t_h and t_method and t_type:
|
||||||
|
file_path = self.filepaths.remote_media_thumbnail(
|
||||||
|
server_name, file_id, desired_width, desired_height,
|
||||||
|
desired_type, desired_method,
|
||||||
|
)
|
||||||
|
yield self._respond_with_file(request, desired_type, file_path)
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.debug("We don't have a local thumbnail of that size. Generating")
|
||||||
|
|
||||||
|
# Okay, so we generate one.
|
||||||
|
file_path = yield self._generate_remote_exact_thumbnail(
|
||||||
|
server_name, file_id, media_id, desired_width,
|
||||||
|
desired_height, desired_method, desired_type
|
||||||
|
)
|
||||||
|
|
||||||
|
if file_path:
|
||||||
|
yield self._respond_with_file(request, desired_type, file_path)
|
||||||
|
else:
|
||||||
|
yield self._respond_default_thumbnail(
|
||||||
|
request, media_info, desired_width, desired_height,
|
||||||
|
desired_method, desired_type,
|
||||||
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _respond_remote_thumbnail(self, request, server_name, media_id, width,
|
def _respond_remote_thumbnail(self, request, server_name, media_id, width,
|
||||||
height, method, m_type):
|
height, method, m_type):
|
||||||
|
19
synapse/storage/schema/delta/22/user_threepids_unique.sql
Normal file
19
synapse/storage/schema/delta/22/user_threepids_unique.sql
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
CREATE TABLE IF NOT EXISTS user_threepids2 (
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
medium TEXT NOT NULL,
|
||||||
|
address TEXT NOT NULL,
|
||||||
|
validated_at BIGINT NOT NULL,
|
||||||
|
added_at BIGINT NOT NULL,
|
||||||
|
CONSTRAINT medium_address UNIQUE (medium, address)
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO user_threepids2
|
||||||
|
SELECT * FROM user_threepids WHERE added_at IN (
|
||||||
|
SELECT max(added_at) FROM user_threepids GROUP BY medium, address
|
||||||
|
)
|
||||||
|
;
|
||||||
|
|
||||||
|
DROP TABLE user_threepids;
|
||||||
|
ALTER TABLE user_threepids2 RENAME TO user_threepids;
|
||||||
|
|
||||||
|
CREATE INDEX user_threepids_user_id ON user_threepids(user_id);
|
@ -13,6 +13,7 @@ class RegisterRestServletTestCase(unittest.TestCase):
|
|||||||
self.request_data = ""
|
self.request_data = ""
|
||||||
self.request = Mock(
|
self.request = Mock(
|
||||||
content=Mock(read=Mock(side_effect=lambda: self.request_data)),
|
content=Mock(read=Mock(side_effect=lambda: self.request_data)),
|
||||||
|
path='/_matrix/api/v2_alpha/register'
|
||||||
)
|
)
|
||||||
self.request.args = {}
|
self.request.args = {}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user