mirror of
https://mau.dev/maunium/synapse.git
synced 2024-10-01 01:36:05 -04:00
Merge remote-tracking branch 'origin/develop' into webclient_data_centralisation
This commit is contained in:
commit
faee41c303
11
CHANGES.rst
11
CHANGES.rst
@ -1,3 +1,14 @@
|
|||||||
|
Latest
|
||||||
|
======
|
||||||
|
|
||||||
|
Registration API:
|
||||||
|
* The registration API has been overhauled to function like the login API. In
|
||||||
|
practice, this means registration requests must now include the following:
|
||||||
|
'type':'m.login.password'. See UPGRADE for more information on this.
|
||||||
|
* The 'user_id' key has been renamed to 'user' to better match the login API.
|
||||||
|
* There is an additional login type: 'm.login.email.identity'.
|
||||||
|
* The command client and web client have been updated to reflect these changes.
|
||||||
|
|
||||||
Changes in synapse 0.2.3 (2014-09-12)
|
Changes in synapse 0.2.3 (2014-09-12)
|
||||||
=====================================
|
=====================================
|
||||||
|
|
||||||
|
23
UPGRADE.rst
23
UPGRADE.rst
@ -1,3 +1,26 @@
|
|||||||
|
Upgrading to Latest
|
||||||
|
===================
|
||||||
|
|
||||||
|
This registration API now closely matches the login API. This introduces a bit
|
||||||
|
more backwards and forwards between the HS and the client, but this improves
|
||||||
|
the overall flexibility of the API. You can now GET on /register to retrieve a list
|
||||||
|
of valid registration flows. Upon choosing one, they are submitted in the same
|
||||||
|
way as login, e.g::
|
||||||
|
|
||||||
|
{
|
||||||
|
type: m.login.password,
|
||||||
|
user: foo,
|
||||||
|
password: bar
|
||||||
|
}
|
||||||
|
|
||||||
|
The default HS supports 2 flows, with and without Identity Server email
|
||||||
|
authentication. Enabling captcha on the HS will add in an extra step to all
|
||||||
|
flows: ``m.login.recaptcha`` which must be completed before you can transition
|
||||||
|
to the next stage. There is a new login type: ``m.login.email.identity`` which
|
||||||
|
contains the ``threepidCreds`` key which were previously sent in the original
|
||||||
|
register request. For more information on this, see the specification.
|
||||||
|
|
||||||
|
|
||||||
Upgrading to v0.2.0
|
Upgrading to v0.2.0
|
||||||
===================
|
===================
|
||||||
|
|
||||||
|
@ -145,35 +145,50 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
<noupdate> : Do not automatically clobber config values.
|
<noupdate> : Do not automatically clobber config values.
|
||||||
"""
|
"""
|
||||||
args = self._parse(line, ["userid", "noupdate"])
|
args = self._parse(line, ["userid", "noupdate"])
|
||||||
path = "/register"
|
|
||||||
|
|
||||||
password = None
|
password = None
|
||||||
pwd = None
|
pwd = None
|
||||||
pwd2 = "_"
|
pwd2 = "_"
|
||||||
while pwd != pwd2:
|
while pwd != pwd2:
|
||||||
pwd = getpass.getpass("(Optional) Type a password for this user: ")
|
pwd = getpass.getpass("Type a password for this user: ")
|
||||||
if len(pwd) == 0:
|
|
||||||
print "Not using a password for this user."
|
|
||||||
break
|
|
||||||
pwd2 = getpass.getpass("Retype the password: ")
|
pwd2 = getpass.getpass("Retype the password: ")
|
||||||
if pwd != pwd2:
|
if pwd != pwd2 or len(pwd) == 0:
|
||||||
print "Password mismatch."
|
print "Password mismatch."
|
||||||
|
pwd = None
|
||||||
else:
|
else:
|
||||||
password = pwd
|
password = pwd
|
||||||
|
|
||||||
body = {}
|
body = {
|
||||||
|
"type": "m.login.password"
|
||||||
|
}
|
||||||
if "userid" in args:
|
if "userid" in args:
|
||||||
body["user_id"] = args["userid"]
|
body["user"] = args["userid"]
|
||||||
if password:
|
if password:
|
||||||
body["password"] = password
|
body["password"] = password
|
||||||
|
|
||||||
reactor.callFromThread(self._do_register, "POST", path, body,
|
reactor.callFromThread(self._do_register, body,
|
||||||
"noupdate" not in args)
|
"noupdate" not in args)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_register(self, method, path, data, update_config):
|
def _do_register(self, data, update_config):
|
||||||
url = self._url() + path
|
# check the registration flows
|
||||||
json_res = yield self.http_client.do_request(method, url, data=data)
|
url = self._url() + "/register"
|
||||||
|
json_res = yield self.http_client.do_request("GET", url)
|
||||||
|
print json.dumps(json_res, indent=4)
|
||||||
|
|
||||||
|
passwordFlow = None
|
||||||
|
for flow in json_res["flows"]:
|
||||||
|
if flow["type"] == "m.login.recaptcha" or ("stages" in flow and "m.login.recaptcha" in flow["stages"]):
|
||||||
|
print "Unable to register: Home server requires captcha."
|
||||||
|
return
|
||||||
|
if flow["type"] == "m.login.password" and "stages" not in flow:
|
||||||
|
passwordFlow = flow
|
||||||
|
break
|
||||||
|
|
||||||
|
if not passwordFlow:
|
||||||
|
return
|
||||||
|
|
||||||
|
json_res = yield self.http_client.do_request("POST", url, data=data)
|
||||||
print json.dumps(json_res, indent=4)
|
print json.dumps(json_res, indent=4)
|
||||||
if update_config and "user_id" in json_res:
|
if update_config and "user_id" in json_res:
|
||||||
self.config["user"] = json_res["user_id"]
|
self.config["user"] = json_res["user_id"]
|
||||||
|
@ -3,35 +3,38 @@
|
|||||||
"apis": [
|
"apis": [
|
||||||
{
|
{
|
||||||
"operations": [
|
"operations": [
|
||||||
|
{
|
||||||
|
"method": "GET",
|
||||||
|
"nickname": "get_registration_info",
|
||||||
|
"notes": "All login stages MUST be mentioned if there is >1 login type.",
|
||||||
|
"summary": "Get the login mechanism to use when registering.",
|
||||||
|
"type": "RegistrationFlows"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"method": "POST",
|
"method": "POST",
|
||||||
"nickname": "register",
|
"nickname": "submit_registration",
|
||||||
"notes": "Volatile: This API is likely to change.",
|
"notes": "If this is part of a multi-stage registration, there MUST be a 'session' key.",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{
|
{
|
||||||
"description": "A registration request",
|
"description": "A registration submission",
|
||||||
"name": "body",
|
"name": "body",
|
||||||
"paramType": "body",
|
"paramType": "body",
|
||||||
"required": true,
|
"required": true,
|
||||||
"type": "RegistrationRequest"
|
"type": "RegistrationSubmission"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"responseMessages": [
|
"responseMessages": [
|
||||||
{
|
{
|
||||||
"code": 400,
|
"code": 400,
|
||||||
"message": "No JSON object."
|
"message": "Bad login type"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"code": 400,
|
"code": 400,
|
||||||
"message": "User ID must only contain characters which do not require url encoding."
|
"message": "Missing JSON keys"
|
||||||
},
|
|
||||||
{
|
|
||||||
"code": 400,
|
|
||||||
"message": "User ID already taken."
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"summary": "Register with the home server.",
|
"summary": "Submit a registration action.",
|
||||||
"type": "RegistrationResponse"
|
"type": "RegistrationResult"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"path": "/register"
|
"path": "/register"
|
||||||
@ -42,30 +45,68 @@
|
|||||||
"application/json"
|
"application/json"
|
||||||
],
|
],
|
||||||
"models": {
|
"models": {
|
||||||
"RegistrationResponse": {
|
"RegistrationFlows": {
|
||||||
"id": "RegistrationResponse",
|
"id": "RegistrationFlows",
|
||||||
|
"properties": {
|
||||||
|
"flows": {
|
||||||
|
"description": "A list of valid registration flows.",
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "RegistrationInfo"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"RegistrationInfo": {
|
||||||
|
"id": "RegistrationInfo",
|
||||||
|
"properties": {
|
||||||
|
"stages": {
|
||||||
|
"description": "Multi-stage registration only: An array of all the login types required to registration.",
|
||||||
|
"items": {
|
||||||
|
"$ref": "string"
|
||||||
|
},
|
||||||
|
"type": "array"
|
||||||
|
},
|
||||||
|
"type": {
|
||||||
|
"description": "The first login type that must be used when logging in.",
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"RegistrationResult": {
|
||||||
|
"id": "RegistrationResult",
|
||||||
"properties": {
|
"properties": {
|
||||||
"access_token": {
|
"access_token": {
|
||||||
"description": "The access token for this user.",
|
"description": "The access token for this user's registration if this is the final stage of the registration process.",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"user_id": {
|
"user_id": {
|
||||||
"description": "The fully-qualified user ID.",
|
"description": "The user's fully-qualified user ID.",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"home_server": {
|
"next": {
|
||||||
"description": "The name of the home server.",
|
"description": "Multi-stage registration only: The next registration type to submit.",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"session": {
|
||||||
|
"description": "Multi-stage registration only: The session token to send when submitting the next registration type.",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"RegistrationRequest": {
|
"RegistrationSubmission": {
|
||||||
"id": "RegistrationRequest",
|
"id": "RegistrationSubmission",
|
||||||
"properties": {
|
"properties": {
|
||||||
"user_id": {
|
"type": {
|
||||||
"description": "The desired user ID. If not specified, a random user ID will be allocated.",
|
"description": "The type of registration being submitted.",
|
||||||
"type": "string",
|
"type": "string"
|
||||||
"required": false
|
},
|
||||||
|
"session": {
|
||||||
|
"description": "Multi-stage registration only: The session token from an earlier registration stage.",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"_registration_type_defined_keys_": {
|
||||||
|
"description": "Keys as defined by the specified registration type, e.g. \"user\", \"password\""
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1305,12 +1305,6 @@ display name other than it being a valid unicode string.
|
|||||||
|
|
||||||
Registration and login
|
Registration and login
|
||||||
======================
|
======================
|
||||||
.. WARNING::
|
|
||||||
The registration API is likely to change.
|
|
||||||
|
|
||||||
.. TODO
|
|
||||||
- TODO Kegan : Make registration like login (just omit the "user" key on the
|
|
||||||
initial request?)
|
|
||||||
|
|
||||||
Clients must register with a home server in order to use Matrix. After
|
Clients must register with a home server in order to use Matrix. After
|
||||||
registering, the client will be given an access token which must be used in ALL
|
registering, the client will be given an access token which must be used in ALL
|
||||||
@ -1323,9 +1317,11 @@ a token sent to their email address, etc. This specification does not define how
|
|||||||
home servers should authorise their users who want to login to their existing
|
home servers should authorise their users who want to login to their existing
|
||||||
accounts, but instead defines the standard interface which implementations
|
accounts, but instead defines the standard interface which implementations
|
||||||
should follow so that ANY client can login to ANY home server. Clients login
|
should follow so that ANY client can login to ANY home server. Clients login
|
||||||
using the |login|_ API.
|
using the |login|_ API. Clients register using the |register|_ API. Registration
|
||||||
|
follows the same procedure as login, but the path requests are sent to are
|
||||||
|
different.
|
||||||
|
|
||||||
The login process breaks down into the following:
|
The registration/login process breaks down into the following:
|
||||||
1. Determine the requirements for logging in.
|
1. Determine the requirements for logging in.
|
||||||
2. Submit the login stage credentials.
|
2. Submit the login stage credentials.
|
||||||
3. Get credentials or be told the next stage in the login process and repeat
|
3. Get credentials or be told the next stage in the login process and repeat
|
||||||
@ -1383,7 +1379,7 @@ This specification defines the following login types:
|
|||||||
- ``m.login.oauth2``
|
- ``m.login.oauth2``
|
||||||
- ``m.login.email.code``
|
- ``m.login.email.code``
|
||||||
- ``m.login.email.url``
|
- ``m.login.email.url``
|
||||||
|
- ``m.login.email.identity``
|
||||||
|
|
||||||
Password-based
|
Password-based
|
||||||
--------------
|
--------------
|
||||||
@ -1531,6 +1527,31 @@ If the link has not been visited yet, a standard error response with an errcode
|
|||||||
``M_LOGIN_EMAIL_URL_NOT_YET`` should be returned.
|
``M_LOGIN_EMAIL_URL_NOT_YET`` should be returned.
|
||||||
|
|
||||||
|
|
||||||
|
Email-based (identity server)
|
||||||
|
-----------------------------
|
||||||
|
:Type:
|
||||||
|
``m.login.email.identity``
|
||||||
|
:Description:
|
||||||
|
Login is supported by authorising an email address with an identity server.
|
||||||
|
|
||||||
|
Prior to submitting this, the client should authenticate with an identity server.
|
||||||
|
After authenticating, the session information should be submitted to the home server.
|
||||||
|
|
||||||
|
To respond to this type, reply with::
|
||||||
|
|
||||||
|
{
|
||||||
|
"type": "m.login.email.identity",
|
||||||
|
"threepidCreds": [
|
||||||
|
{
|
||||||
|
"sid": "<identity server session id>",
|
||||||
|
"clientSecret": "<identity server client secret>",
|
||||||
|
"idServer": "<url of identity server authed with, e.g. 'matrix.org:8090'>"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
N-Factor Authentication
|
N-Factor Authentication
|
||||||
-----------------------
|
-----------------------
|
||||||
Multiple login stages can be combined to create N-factor authentication during login.
|
Multiple login stages can be combined to create N-factor authentication during login.
|
||||||
@ -2242,6 +2263,9 @@ Transaction:
|
|||||||
.. |login| replace:: ``/login``
|
.. |login| replace:: ``/login``
|
||||||
.. _login: /docs/api/client-server/#!/-login
|
.. _login: /docs/api/client-server/#!/-login
|
||||||
|
|
||||||
|
.. |register| replace:: ``/register``
|
||||||
|
.. _register: /docs/api/client-server/#!/-registration
|
||||||
|
|
||||||
.. |/rooms/<room_id>/messages| replace:: ``/rooms/<room_id>/messages``
|
.. |/rooms/<room_id>/messages| replace:: ``/rooms/<room_id>/messages``
|
||||||
.. _/rooms/<room_id>/messages: /docs/api/client-server/#!/-rooms/get_messages
|
.. _/rooms/<room_id>/messages: /docs/api/client-server/#!/-rooms/get_messages
|
||||||
|
|
||||||
|
@ -50,3 +50,12 @@ class JoinRules(object):
|
|||||||
KNOCK = u"knock"
|
KNOCK = u"knock"
|
||||||
INVITE = u"invite"
|
INVITE = u"invite"
|
||||||
PRIVATE = u"private"
|
PRIVATE = u"private"
|
||||||
|
|
||||||
|
|
||||||
|
class LoginType(object):
|
||||||
|
PASSWORD = u"m.login.password"
|
||||||
|
OAUTH = u"m.login.oauth2"
|
||||||
|
EMAIL_CODE = u"m.login.email.code"
|
||||||
|
EMAIL_URL = u"m.login.email.url"
|
||||||
|
EMAIL_IDENTITY = u"m.login.email.identity"
|
||||||
|
RECAPTCHA = u"m.login.recaptcha"
|
@ -17,6 +17,19 @@ from synapse.api.errors import SynapseError, Codes
|
|||||||
from synapse.util.jsonobject import JsonEncodedObject
|
from synapse.util.jsonobject import JsonEncodedObject
|
||||||
|
|
||||||
|
|
||||||
|
def serialize_event(hs, e):
|
||||||
|
# FIXME(erikj): To handle the case of presence events and the like
|
||||||
|
if not isinstance(e, SynapseEvent):
|
||||||
|
return e
|
||||||
|
|
||||||
|
d = e.get_dict()
|
||||||
|
if "age_ts" in d:
|
||||||
|
d["age"] = int(hs.get_clock().time_msec()) - d["age_ts"]
|
||||||
|
del d["age_ts"]
|
||||||
|
|
||||||
|
return d
|
||||||
|
|
||||||
|
|
||||||
class SynapseEvent(JsonEncodedObject):
|
class SynapseEvent(JsonEncodedObject):
|
||||||
|
|
||||||
"""Base class for Synapse events. These are JSON objects which must abide
|
"""Base class for Synapse events. These are JSON objects which must abide
|
||||||
@ -43,6 +56,8 @@ class SynapseEvent(JsonEncodedObject):
|
|||||||
"content", # HTTP body, JSON
|
"content", # HTTP body, JSON
|
||||||
"state_key",
|
"state_key",
|
||||||
"required_power_level",
|
"required_power_level",
|
||||||
|
"age_ts",
|
||||||
|
"prev_content",
|
||||||
]
|
]
|
||||||
|
|
||||||
internal_keys = [
|
internal_keys = [
|
||||||
@ -158,10 +173,6 @@ class SynapseEvent(JsonEncodedObject):
|
|||||||
|
|
||||||
class SynapseStateEvent(SynapseEvent):
|
class SynapseStateEvent(SynapseEvent):
|
||||||
|
|
||||||
valid_keys = SynapseEvent.valid_keys + [
|
|
||||||
"prev_content",
|
|
||||||
]
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
if "state_key" not in kwargs:
|
if "state_key" not in kwargs:
|
||||||
kwargs["state_key"] = ""
|
kwargs["state_key"] = ""
|
||||||
|
@ -59,6 +59,14 @@ class EventFactory(object):
|
|||||||
if "ts" not in kwargs:
|
if "ts" not in kwargs:
|
||||||
kwargs["ts"] = int(self.clock.time_msec())
|
kwargs["ts"] = int(self.clock.time_msec())
|
||||||
|
|
||||||
|
# The "age" key is a delta timestamp that should be converted into an
|
||||||
|
# absolute timestamp the minute we see it.
|
||||||
|
if "age" in kwargs:
|
||||||
|
kwargs["age_ts"] = int(self.clock.time_msec()) - int(kwargs["age"])
|
||||||
|
del kwargs["age"]
|
||||||
|
elif "age_ts" not in kwargs:
|
||||||
|
kwargs["age_ts"] = int(self.clock.time_msec())
|
||||||
|
|
||||||
if etype in self._event_list:
|
if etype in self._event_list:
|
||||||
handler = self._event_list[etype]
|
handler = self._event_list[etype]
|
||||||
else:
|
else:
|
||||||
|
@ -291,6 +291,13 @@ class ReplicationLayer(object):
|
|||||||
def on_incoming_transaction(self, transaction_data):
|
def on_incoming_transaction(self, transaction_data):
|
||||||
transaction = Transaction(**transaction_data)
|
transaction = Transaction(**transaction_data)
|
||||||
|
|
||||||
|
for p in transaction.pdus:
|
||||||
|
if "age" in p:
|
||||||
|
p["age_ts"] = int(self._clock.time_msec()) - int(p["age"])
|
||||||
|
del p["age"]
|
||||||
|
|
||||||
|
pdu_list = [Pdu(**p) for p in transaction.pdus]
|
||||||
|
|
||||||
logger.debug("[%s] Got transaction", transaction.transaction_id)
|
logger.debug("[%s] Got transaction", transaction.transaction_id)
|
||||||
|
|
||||||
response = yield self.transaction_actions.have_responded(transaction)
|
response = yield self.transaction_actions.have_responded(transaction)
|
||||||
@ -303,8 +310,6 @@ class ReplicationLayer(object):
|
|||||||
|
|
||||||
logger.debug("[%s] Transacition is new", transaction.transaction_id)
|
logger.debug("[%s] Transacition is new", transaction.transaction_id)
|
||||||
|
|
||||||
pdu_list = [Pdu(**p) for p in transaction.pdus]
|
|
||||||
|
|
||||||
dl = []
|
dl = []
|
||||||
for pdu in pdu_list:
|
for pdu in pdu_list:
|
||||||
dl.append(self._handle_new_pdu(pdu))
|
dl.append(self._handle_new_pdu(pdu))
|
||||||
@ -405,9 +410,14 @@ class ReplicationLayer(object):
|
|||||||
"""Returns a new Transaction containing the given PDUs suitable for
|
"""Returns a new Transaction containing the given PDUs suitable for
|
||||||
transmission.
|
transmission.
|
||||||
"""
|
"""
|
||||||
|
pdus = [p.get_dict() for p in pdu_list]
|
||||||
|
for p in pdus:
|
||||||
|
if "age_ts" in pdus:
|
||||||
|
p["age"] = int(self.clock.time_msec()) - p["age_ts"]
|
||||||
|
|
||||||
return Transaction(
|
return Transaction(
|
||||||
pdus=[p.get_dict() for p in pdu_list],
|
|
||||||
origin=self.server_name,
|
origin=self.server_name,
|
||||||
|
pdus=pdus,
|
||||||
ts=int(self._clock.time_msec()),
|
ts=int(self._clock.time_msec()),
|
||||||
destination=None,
|
destination=None,
|
||||||
)
|
)
|
||||||
@ -593,8 +603,21 @@ class _TransactionQueue(object):
|
|||||||
logger.debug("TX [%s] Sending transaction...", destination)
|
logger.debug("TX [%s] Sending transaction...", destination)
|
||||||
|
|
||||||
# Actually send the transaction
|
# Actually send the transaction
|
||||||
|
|
||||||
|
# FIXME (erikj): This is a bit of a hack to make the Pdu age
|
||||||
|
# keys work
|
||||||
|
def cb(transaction):
|
||||||
|
now = int(self._clock.time_msec())
|
||||||
|
if "pdus" in transaction:
|
||||||
|
for p in transaction["pdus"]:
|
||||||
|
if "age_ts" in p:
|
||||||
|
p["age"] = now - int(p["age_ts"])
|
||||||
|
|
||||||
|
return transaction
|
||||||
|
|
||||||
code, response = yield self.transport_layer.send_transaction(
|
code, response = yield self.transport_layer.send_transaction(
|
||||||
transaction
|
transaction,
|
||||||
|
on_send_callback=cb,
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.debug("TX [%s] Sent transaction", destination)
|
logger.debug("TX [%s] Sent transaction", destination)
|
||||||
|
@ -144,7 +144,7 @@ class TransportLayer(object):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@log_function
|
@log_function
|
||||||
def send_transaction(self, transaction):
|
def send_transaction(self, transaction, on_send_callback=None):
|
||||||
""" Sends the given Transaction to it's destination
|
""" Sends the given Transaction to it's destination
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@ -165,10 +165,23 @@ class TransportLayer(object):
|
|||||||
|
|
||||||
data = transaction.get_dict()
|
data = transaction.get_dict()
|
||||||
|
|
||||||
|
# FIXME (erikj): This is a bit of a hack to make the Pdu age
|
||||||
|
# keys work
|
||||||
|
def cb(destination, method, path_bytes, producer):
|
||||||
|
if not on_send_callback:
|
||||||
|
return
|
||||||
|
|
||||||
|
transaction = json.loads(producer.body)
|
||||||
|
|
||||||
|
new_transaction = on_send_callback(transaction)
|
||||||
|
|
||||||
|
producer.reset(new_transaction)
|
||||||
|
|
||||||
code, response = yield self.client.put_json(
|
code, response = yield self.client.put_json(
|
||||||
transaction.destination,
|
transaction.destination,
|
||||||
path=PREFIX + "/send/%s/" % transaction.transaction_id,
|
path=PREFIX + "/send/%s/" % transaction.transaction_id,
|
||||||
data=data
|
data=data,
|
||||||
|
on_send_callback=cb,
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
|
@ -15,7 +15,6 @@
|
|||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.events import SynapseEvent
|
|
||||||
from synapse.util.logutils import log_function
|
from synapse.util.logutils import log_function
|
||||||
|
|
||||||
from ._base import BaseHandler
|
from ._base import BaseHandler
|
||||||
@ -71,10 +70,7 @@ class EventStreamHandler(BaseHandler):
|
|||||||
auth_user, room_ids, pagin_config, timeout
|
auth_user, room_ids, pagin_config, timeout
|
||||||
)
|
)
|
||||||
|
|
||||||
chunks = [
|
chunks = [self.hs.serialize_event(e) for e in events]
|
||||||
e.get_dict() if isinstance(e, SynapseEvent) else e
|
|
||||||
for e in events
|
|
||||||
]
|
|
||||||
|
|
||||||
chunk = {
|
chunk = {
|
||||||
"chunk": chunks,
|
"chunk": chunks,
|
||||||
@ -92,7 +88,9 @@ class EventStreamHandler(BaseHandler):
|
|||||||
# 10 seconds of grace to allow the client to reconnect again
|
# 10 seconds of grace to allow the client to reconnect again
|
||||||
# before we think they're gone
|
# before we think they're gone
|
||||||
def _later():
|
def _later():
|
||||||
logger.debug("_later stopped_user_eventstream %s", auth_user)
|
logger.debug(
|
||||||
|
"_later stopped_user_eventstream %s", auth_user
|
||||||
|
)
|
||||||
self.distributor.fire(
|
self.distributor.fire(
|
||||||
"stopped_user_eventstream", auth_user
|
"stopped_user_eventstream", auth_user
|
||||||
)
|
)
|
||||||
|
@ -93,22 +93,18 @@ class FederationHandler(BaseHandler):
|
|||||||
"""
|
"""
|
||||||
event = self.pdu_codec.event_from_pdu(pdu)
|
event = self.pdu_codec.event_from_pdu(pdu)
|
||||||
|
|
||||||
|
logger.debug("Got event: %s", event.event_id)
|
||||||
|
|
||||||
with (yield self.lock_manager.lock(pdu.context)):
|
with (yield self.lock_manager.lock(pdu.context)):
|
||||||
if event.is_state and not backfilled:
|
if event.is_state and not backfilled:
|
||||||
is_new_state = yield self.state_handler.handle_new_state(
|
is_new_state = yield self.state_handler.handle_new_state(
|
||||||
pdu
|
pdu
|
||||||
)
|
)
|
||||||
if not is_new_state:
|
|
||||||
return
|
|
||||||
else:
|
else:
|
||||||
is_new_state = False
|
is_new_state = False
|
||||||
# TODO: Implement something in federation that allows us to
|
# TODO: Implement something in federation that allows us to
|
||||||
# respond to PDU.
|
# respond to PDU.
|
||||||
|
|
||||||
if hasattr(event, "state_key") and not is_new_state:
|
|
||||||
logger.debug("Ignoring old state.")
|
|
||||||
return
|
|
||||||
|
|
||||||
target_is_mine = False
|
target_is_mine = False
|
||||||
if hasattr(event, "target_host"):
|
if hasattr(event, "target_host"):
|
||||||
target_is_mine = event.target_host == self.hs.hostname
|
target_is_mine = event.target_host == self.hs.hostname
|
||||||
@ -139,7 +135,11 @@ class FederationHandler(BaseHandler):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
with (yield self.room_lock.lock(event.room_id)):
|
with (yield self.room_lock.lock(event.room_id)):
|
||||||
yield self.store.persist_event(event, backfilled)
|
yield self.store.persist_event(
|
||||||
|
event,
|
||||||
|
backfilled,
|
||||||
|
is_new_state=is_new_state
|
||||||
|
)
|
||||||
|
|
||||||
room = yield self.store.get_room(event.room_id)
|
room = yield self.store.get_room(event.room_id)
|
||||||
|
|
||||||
|
@ -124,7 +124,7 @@ class MessageHandler(BaseHandler):
|
|||||||
)
|
)
|
||||||
|
|
||||||
chunk = {
|
chunk = {
|
||||||
"chunk": [e.get_dict() for e in events],
|
"chunk": [self.hs.serialize_event(e) for e in events],
|
||||||
"start": pagin_config.from_token.to_string(),
|
"start": pagin_config.from_token.to_string(),
|
||||||
"end": next_token.to_string(),
|
"end": next_token.to_string(),
|
||||||
}
|
}
|
||||||
@ -296,7 +296,7 @@ class MessageHandler(BaseHandler):
|
|||||||
end_token = now_token.copy_and_replace("room_key", token[1])
|
end_token = now_token.copy_and_replace("room_key", token[1])
|
||||||
|
|
||||||
d["messages"] = {
|
d["messages"] = {
|
||||||
"chunk": [m.get_dict() for m in messages],
|
"chunk": [self.hs.serialize_event(m) for m in messages],
|
||||||
"start": start_token.to_string(),
|
"start": start_token.to_string(),
|
||||||
"end": end_token.to_string(),
|
"end": end_token.to_string(),
|
||||||
}
|
}
|
||||||
@ -304,7 +304,7 @@ class MessageHandler(BaseHandler):
|
|||||||
current_state = yield self.store.get_current_state(
|
current_state = yield self.store.get_current_state(
|
||||||
event.room_id
|
event.room_id
|
||||||
)
|
)
|
||||||
d["state"] = [c.get_dict() for c in current_state]
|
d["state"] = [self.hs.serialize_event(c) for c in current_state]
|
||||||
except:
|
except:
|
||||||
logger.exception("Failed to get snapshot")
|
logger.exception("Failed to get snapshot")
|
||||||
|
|
||||||
|
@ -40,8 +40,7 @@ class RegistrationHandler(BaseHandler):
|
|||||||
self.distributor.declare("registered_user")
|
self.distributor.declare("registered_user")
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def register(self, localpart=None, password=None, threepidCreds=None,
|
def register(self, localpart=None, password=None):
|
||||||
captcha_info={}):
|
|
||||||
"""Registers a new client on the server.
|
"""Registers a new client on the server.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@ -54,37 +53,6 @@ class RegistrationHandler(BaseHandler):
|
|||||||
Raises:
|
Raises:
|
||||||
RegistrationError if there was a problem registering.
|
RegistrationError if there was a problem registering.
|
||||||
"""
|
"""
|
||||||
if captcha_info:
|
|
||||||
captcha_response = yield self._validate_captcha(
|
|
||||||
captcha_info["ip"],
|
|
||||||
captcha_info["private_key"],
|
|
||||||
captcha_info["challenge"],
|
|
||||||
captcha_info["response"]
|
|
||||||
)
|
|
||||||
if not captcha_response["valid"]:
|
|
||||||
logger.info("Invalid captcha entered from %s. Error: %s",
|
|
||||||
captcha_info["ip"], captcha_response["error_url"])
|
|
||||||
raise InvalidCaptchaError(
|
|
||||||
error_url=captcha_response["error_url"]
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger.info("Valid captcha entered from %s", captcha_info["ip"])
|
|
||||||
|
|
||||||
if threepidCreds:
|
|
||||||
for c in threepidCreds:
|
|
||||||
logger.info("validating theeepidcred sid %s on id server %s",
|
|
||||||
c['sid'], c['idServer'])
|
|
||||||
try:
|
|
||||||
threepid = yield self._threepid_from_creds(c)
|
|
||||||
except:
|
|
||||||
logger.err()
|
|
||||||
raise RegistrationError(400, "Couldn't validate 3pid")
|
|
||||||
|
|
||||||
if not threepid:
|
|
||||||
raise RegistrationError(400, "Couldn't validate 3pid")
|
|
||||||
logger.info("got threepid medium %s address %s",
|
|
||||||
threepid['medium'], threepid['address'])
|
|
||||||
|
|
||||||
password_hash = None
|
password_hash = None
|
||||||
if password:
|
if password:
|
||||||
password_hash = bcrypt.hashpw(password, bcrypt.gensalt())
|
password_hash = bcrypt.hashpw(password, bcrypt.gensalt())
|
||||||
@ -126,15 +94,54 @@ class RegistrationHandler(BaseHandler):
|
|||||||
raise RegistrationError(
|
raise RegistrationError(
|
||||||
500, "Cannot generate user ID.")
|
500, "Cannot generate user ID.")
|
||||||
|
|
||||||
|
defer.returnValue((user_id, token))
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def check_recaptcha(self, ip, private_key, challenge, response):
|
||||||
|
"""Checks a recaptcha is correct."""
|
||||||
|
|
||||||
|
captcha_response = yield self._validate_captcha(
|
||||||
|
ip,
|
||||||
|
private_key,
|
||||||
|
challenge,
|
||||||
|
response
|
||||||
|
)
|
||||||
|
if not captcha_response["valid"]:
|
||||||
|
logger.info("Invalid captcha entered from %s. Error: %s",
|
||||||
|
ip, captcha_response["error_url"])
|
||||||
|
raise InvalidCaptchaError(
|
||||||
|
error_url=captcha_response["error_url"]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.info("Valid captcha entered from %s", ip)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def register_email(self, threepidCreds):
|
||||||
|
"""Registers emails with an identity server."""
|
||||||
|
|
||||||
|
for c in threepidCreds:
|
||||||
|
logger.info("validating theeepidcred sid %s on id server %s",
|
||||||
|
c['sid'], c['idServer'])
|
||||||
|
try:
|
||||||
|
threepid = yield self._threepid_from_creds(c)
|
||||||
|
except:
|
||||||
|
logger.err()
|
||||||
|
raise RegistrationError(400, "Couldn't validate 3pid")
|
||||||
|
|
||||||
|
if not threepid:
|
||||||
|
raise RegistrationError(400, "Couldn't validate 3pid")
|
||||||
|
logger.info("got threepid medium %s address %s",
|
||||||
|
threepid['medium'], threepid['address'])
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def bind_emails(self, user_id, threepidCreds):
|
||||||
|
"""Links emails with a user ID and informs an identity server."""
|
||||||
|
|
||||||
# Now we have a matrix ID, bind it to the threepids we were given
|
# Now we have a matrix ID, bind it to the threepids we were given
|
||||||
if threepidCreds:
|
|
||||||
for c in threepidCreds:
|
for c in threepidCreds:
|
||||||
# XXX: This should be a deferred list, shouldn't it?
|
# XXX: This should be a deferred list, shouldn't it?
|
||||||
yield self._bind_threepid(c, user_id)
|
yield self._bind_threepid(c, user_id)
|
||||||
|
|
||||||
|
|
||||||
defer.returnValue((user_id, token))
|
|
||||||
|
|
||||||
def _generate_token(self, user_id):
|
def _generate_token(self, user_id):
|
||||||
# urlsafe variant uses _ and - so use . as the separator and replace
|
# urlsafe variant uses _ and - so use . as the separator and replace
|
||||||
# all =s with .s so http clients don't quote =s when it is used as
|
# all =s with .s so http clients don't quote =s when it is used as
|
||||||
@ -200,7 +207,8 @@ class RegistrationHandler(BaseHandler):
|
|||||||
data = yield client.post_urlencoded_get_raw(
|
data = yield client.post_urlencoded_get_raw(
|
||||||
"www.google.com:80",
|
"www.google.com:80",
|
||||||
"/recaptcha/api/verify",
|
"/recaptcha/api/verify",
|
||||||
accept_partial=True, # twisted dislikes google's response, no content length.
|
# twisted dislikes google's response, no content length.
|
||||||
|
accept_partial=True,
|
||||||
args={
|
args={
|
||||||
'privatekey': private_key,
|
'privatekey': private_key,
|
||||||
'remoteip': ip_addr,
|
'remoteip': ip_addr,
|
||||||
|
@ -335,7 +335,7 @@ class RoomMemberHandler(BaseHandler):
|
|||||||
|
|
||||||
member_list = yield self.store.get_room_members(room_id=room_id)
|
member_list = yield self.store.get_room_members(room_id=room_id)
|
||||||
event_list = [
|
event_list = [
|
||||||
entry.get_dict()
|
self.hs.serialize_event(entry)
|
||||||
for entry in member_list
|
for entry in member_list
|
||||||
]
|
]
|
||||||
chunk_data = {
|
chunk_data = {
|
||||||
|
@ -122,7 +122,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
self.hs = hs
|
self.hs = hs
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def put_json(self, destination, path, data):
|
def put_json(self, destination, path, data, on_send_callback=None):
|
||||||
if destination in _destination_mappings:
|
if destination in _destination_mappings:
|
||||||
destination = _destination_mappings[destination]
|
destination = _destination_mappings[destination]
|
||||||
|
|
||||||
@ -131,7 +131,8 @@ class TwistedHttpClient(HttpClient):
|
|||||||
"PUT",
|
"PUT",
|
||||||
path.encode("ascii"),
|
path.encode("ascii"),
|
||||||
producer=_JsonProducer(data),
|
producer=_JsonProducer(data),
|
||||||
headers_dict={"Content-Type": ["application/json"]}
|
headers_dict={"Content-Type": ["application/json"]},
|
||||||
|
on_send_callback=on_send_callback,
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.debug("Getting resp body")
|
logger.debug("Getting resp body")
|
||||||
@ -218,7 +219,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _create_request(self, destination, method, path_bytes, param_bytes=b"",
|
def _create_request(self, destination, method, path_bytes, param_bytes=b"",
|
||||||
query_bytes=b"", producer=None, headers_dict={},
|
query_bytes=b"", producer=None, headers_dict={},
|
||||||
retry_on_dns_fail=True):
|
retry_on_dns_fail=True, on_send_callback=None):
|
||||||
""" Creates and sends a request to the given url
|
""" Creates and sends a request to the given url
|
||||||
"""
|
"""
|
||||||
headers_dict[b"User-Agent"] = [b"Synapse"]
|
headers_dict[b"User-Agent"] = [b"Synapse"]
|
||||||
@ -242,6 +243,9 @@ class TwistedHttpClient(HttpClient):
|
|||||||
endpoint = self._getEndpoint(reactor, destination);
|
endpoint = self._getEndpoint(reactor, destination);
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
if on_send_callback:
|
||||||
|
on_send_callback(destination, method, path_bytes, producer)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = yield self.agent.request(
|
response = yield self.agent.request(
|
||||||
destination,
|
destination,
|
||||||
@ -310,6 +314,9 @@ class _JsonProducer(object):
|
|||||||
""" Used by the twisted http client to create the HTTP body from json
|
""" Used by the twisted http client to create the HTTP body from json
|
||||||
"""
|
"""
|
||||||
def __init__(self, jsn):
|
def __init__(self, jsn):
|
||||||
|
self.reset(jsn)
|
||||||
|
|
||||||
|
def reset(self, jsn):
|
||||||
self.body = encode_canonical_json(jsn)
|
self.body = encode_canonical_json(jsn)
|
||||||
self.length = len(self.body)
|
self.length = len(self.body)
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ class EventRestServlet(RestServlet):
|
|||||||
event = yield handler.get_event(auth_user, event_id)
|
event = yield handler.get_event(auth_user, event_id)
|
||||||
|
|
||||||
if event:
|
if event:
|
||||||
defer.returnValue((200, event.get_dict()))
|
defer.returnValue((200, self.hs.serialize_event(event)))
|
||||||
else:
|
else:
|
||||||
defer.returnValue((404, "Event not found."))
|
defer.returnValue((404, "Event not found."))
|
||||||
|
|
||||||
|
@ -17,56 +17,141 @@
|
|||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.errors import SynapseError, Codes
|
from synapse.api.errors import SynapseError, Codes
|
||||||
|
from synapse.api.constants import LoginType
|
||||||
from base import RestServlet, client_path_pattern
|
from base import RestServlet, client_path_pattern
|
||||||
|
import synapse.util.stringutils as stringutils
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
import urllib
|
import urllib
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class RegisterRestServlet(RestServlet):
|
class RegisterRestServlet(RestServlet):
|
||||||
|
"""Handles registration with the home server.
|
||||||
|
|
||||||
|
This servlet is in control of the registration flow; the registration
|
||||||
|
handler doesn't have a concept of multi-stages or sessions.
|
||||||
|
"""
|
||||||
|
|
||||||
PATTERN = client_path_pattern("/register$")
|
PATTERN = client_path_pattern("/register$")
|
||||||
|
|
||||||
|
def __init__(self, hs):
|
||||||
|
super(RegisterRestServlet, self).__init__(hs)
|
||||||
|
# sessions are stored as:
|
||||||
|
# self.sessions = {
|
||||||
|
# "session_id" : { __session_dict__ }
|
||||||
|
# }
|
||||||
|
# TODO: persistent storage
|
||||||
|
self.sessions = {}
|
||||||
|
|
||||||
|
def on_GET(self, request):
|
||||||
|
if self.hs.config.enable_registration_captcha:
|
||||||
|
return (200, {
|
||||||
|
"flows": [
|
||||||
|
{
|
||||||
|
"type": LoginType.RECAPTCHA,
|
||||||
|
"stages": ([LoginType.RECAPTCHA,
|
||||||
|
LoginType.EMAIL_IDENTITY,
|
||||||
|
LoginType.PASSWORD])
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": LoginType.RECAPTCHA,
|
||||||
|
"stages": [LoginType.RECAPTCHA, LoginType.PASSWORD]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
return (200, {
|
||||||
|
"flows": [
|
||||||
|
{
|
||||||
|
"type": LoginType.EMAIL_IDENTITY,
|
||||||
|
"stages": ([LoginType.EMAIL_IDENTITY,
|
||||||
|
LoginType.PASSWORD])
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": LoginType.PASSWORD
|
||||||
|
}
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_POST(self, request):
|
def on_POST(self, request):
|
||||||
desired_user_id = None
|
register_json = _parse_json(request)
|
||||||
password = None
|
|
||||||
|
session = (register_json["session"] if "session" in register_json
|
||||||
|
else None)
|
||||||
|
login_type = None
|
||||||
|
if "type" not in register_json:
|
||||||
|
raise SynapseError(400, "Missing 'type' key.")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
register_json = json.loads(request.content.read())
|
login_type = register_json["type"]
|
||||||
if "password" in register_json:
|
stages = {
|
||||||
password = register_json["password"].encode("utf-8")
|
LoginType.RECAPTCHA: self._do_recaptcha,
|
||||||
|
LoginType.PASSWORD: self._do_password,
|
||||||
|
LoginType.EMAIL_IDENTITY: self._do_email_identity
|
||||||
|
}
|
||||||
|
|
||||||
if type(register_json["user_id"]) == unicode:
|
session_info = self._get_session_info(request, session)
|
||||||
desired_user_id = register_json["user_id"].encode("utf-8")
|
logger.debug("%s : session info %s request info %s",
|
||||||
if urllib.quote(desired_user_id) != desired_user_id:
|
login_type, session_info, register_json)
|
||||||
raise SynapseError(
|
response = yield stages[login_type](
|
||||||
400,
|
request,
|
||||||
"User ID must only contain characters which do not " +
|
register_json,
|
||||||
"require URL encoding.")
|
session_info
|
||||||
except ValueError:
|
)
|
||||||
defer.returnValue((400, "No JSON object."))
|
|
||||||
except KeyError:
|
|
||||||
pass # user_id is optional
|
|
||||||
|
|
||||||
threepidCreds = None
|
if "access_token" not in response:
|
||||||
if 'threepidCreds' in register_json:
|
# isn't a final response
|
||||||
threepidCreds = register_json['threepidCreds']
|
response["session"] = session_info["id"]
|
||||||
|
|
||||||
|
defer.returnValue((200, response))
|
||||||
|
except KeyError as e:
|
||||||
|
logger.exception(e)
|
||||||
|
raise SynapseError(400, "Missing JSON keys for login type %s." % login_type)
|
||||||
|
|
||||||
|
def on_OPTIONS(self, request):
|
||||||
|
return (200, {})
|
||||||
|
|
||||||
|
def _get_session_info(self, request, session_id):
|
||||||
|
if not session_id:
|
||||||
|
# create a new session
|
||||||
|
while session_id is None or session_id in self.sessions:
|
||||||
|
session_id = stringutils.random_string(24)
|
||||||
|
self.sessions[session_id] = {
|
||||||
|
"id": session_id,
|
||||||
|
LoginType.EMAIL_IDENTITY: False,
|
||||||
|
LoginType.RECAPTCHA: False
|
||||||
|
}
|
||||||
|
|
||||||
|
return self.sessions[session_id]
|
||||||
|
|
||||||
|
def _save_session(self, session):
|
||||||
|
# TODO: Persistent storage
|
||||||
|
logger.debug("Saving session %s", session)
|
||||||
|
self.sessions[session["id"]] = session
|
||||||
|
|
||||||
|
def _remove_session(self, session):
|
||||||
|
logger.debug("Removing session %s", session)
|
||||||
|
self.sessions.pop(session["id"])
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def _do_recaptcha(self, request, register_json, session):
|
||||||
|
if not self.hs.config.enable_registration_captcha:
|
||||||
|
raise SynapseError(400, "Captcha not required.")
|
||||||
|
|
||||||
captcha = {}
|
|
||||||
if self.hs.config.enable_registration_captcha:
|
|
||||||
challenge = None
|
challenge = None
|
||||||
user_response = None
|
user_response = None
|
||||||
try:
|
try:
|
||||||
captcha_type = register_json["captcha"]["type"]
|
challenge = register_json["challenge"]
|
||||||
if captcha_type != "m.login.recaptcha":
|
user_response = register_json["response"]
|
||||||
raise SynapseError(400, "Sorry, only m.login.recaptcha " +
|
|
||||||
"requests are supported.")
|
|
||||||
challenge = register_json["captcha"]["challenge"]
|
|
||||||
user_response = register_json["captcha"]["response"]
|
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise SynapseError(400, "Captcha response is required",
|
raise SynapseError(400, "Captcha response is required",
|
||||||
errcode=Codes.CAPTCHA_NEEDED)
|
errcode=Codes.CAPTCHA_NEEDED)
|
||||||
|
|
||||||
# TODO determine the source IP : May be an X-Forwarding-For header depending on config
|
# May be an X-Forwarding-For header depending on config
|
||||||
ip_addr = request.getClientIP()
|
ip_addr = request.getClientIP()
|
||||||
if self.hs.config.captcha_ip_origin_is_x_forwarded:
|
if self.hs.config.captcha_ip_origin_is_x_forwarded:
|
||||||
# use the header
|
# use the header
|
||||||
@ -74,32 +159,76 @@ class RegisterRestServlet(RestServlet):
|
|||||||
ip_addr = request.requestHeaders.getRawHeaders(
|
ip_addr = request.requestHeaders.getRawHeaders(
|
||||||
"X-Forwarded-For")[0]
|
"X-Forwarded-For")[0]
|
||||||
|
|
||||||
captcha = {
|
handler = self.handlers.registration_handler
|
||||||
"ip": ip_addr,
|
yield handler.check_recaptcha(
|
||||||
"private_key": self.hs.config.recaptcha_private_key,
|
ip_addr,
|
||||||
"challenge": challenge,
|
self.hs.config.recaptcha_private_key,
|
||||||
"response": user_response
|
challenge,
|
||||||
}
|
user_response
|
||||||
|
)
|
||||||
|
session[LoginType.RECAPTCHA] = True # mark captcha as done
|
||||||
|
self._save_session(session)
|
||||||
|
defer.returnValue({
|
||||||
|
"next": [LoginType.PASSWORD, LoginType.EMAIL_IDENTITY]
|
||||||
|
})
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def _do_email_identity(self, request, register_json, session):
|
||||||
|
if (self.hs.config.enable_registration_captcha and
|
||||||
|
not session[LoginType.RECAPTCHA]):
|
||||||
|
raise SynapseError(400, "Captcha is required.")
|
||||||
|
|
||||||
|
threepidCreds = register_json['threepidCreds']
|
||||||
|
handler = self.handlers.registration_handler
|
||||||
|
yield handler.register_email(threepidCreds)
|
||||||
|
session["threepidCreds"] = threepidCreds # store creds for next stage
|
||||||
|
session[LoginType.EMAIL_IDENTITY] = True # mark email as done
|
||||||
|
self._save_session(session)
|
||||||
|
defer.returnValue({
|
||||||
|
"next": LoginType.PASSWORD
|
||||||
|
})
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def _do_password(self, request, register_json, session):
|
||||||
|
if (self.hs.config.enable_registration_captcha and
|
||||||
|
not session[LoginType.RECAPTCHA]):
|
||||||
|
# captcha should've been done by this stage!
|
||||||
|
raise SynapseError(400, "Captcha is required.")
|
||||||
|
|
||||||
|
password = register_json["password"].encode("utf-8")
|
||||||
|
desired_user_id = (register_json["user"].encode("utf-8") if "user"
|
||||||
|
in register_json else None)
|
||||||
|
if desired_user_id and urllib.quote(desired_user_id) != desired_user_id:
|
||||||
|
raise SynapseError(
|
||||||
|
400,
|
||||||
|
"User ID must only contain characters which do not " +
|
||||||
|
"require URL encoding.")
|
||||||
handler = self.handlers.registration_handler
|
handler = self.handlers.registration_handler
|
||||||
(user_id, token) = yield handler.register(
|
(user_id, token) = yield handler.register(
|
||||||
localpart=desired_user_id,
|
localpart=desired_user_id,
|
||||||
password=password,
|
password=password
|
||||||
threepidCreds=threepidCreds,
|
)
|
||||||
captcha_info=captcha)
|
|
||||||
|
if session[LoginType.EMAIL_IDENTITY]:
|
||||||
|
yield handler.bind_emails(user_id, session["threepidCreds"])
|
||||||
|
|
||||||
result = {
|
result = {
|
||||||
"user_id": user_id,
|
"user_id": user_id,
|
||||||
"access_token": token,
|
"access_token": token,
|
||||||
"home_server": self.hs.hostname,
|
"home_server": self.hs.hostname,
|
||||||
}
|
}
|
||||||
defer.returnValue(
|
self._remove_session(session)
|
||||||
(200, result)
|
defer.returnValue(result)
|
||||||
)
|
|
||||||
|
|
||||||
def on_OPTIONS(self, request):
|
|
||||||
return (200, {})
|
def _parse_json(request):
|
||||||
|
try:
|
||||||
|
content = json.loads(request.content.read())
|
||||||
|
if type(content) != dict:
|
||||||
|
raise SynapseError(400, "Content must be a JSON object.")
|
||||||
|
return content
|
||||||
|
except ValueError:
|
||||||
|
raise SynapseError(400, "Content not JSON.")
|
||||||
|
|
||||||
|
|
||||||
def register_servlets(hs, http_server):
|
def register_servlets(hs, http_server):
|
||||||
|
@ -378,7 +378,7 @@ class RoomTriggerBackfill(RestServlet):
|
|||||||
handler = self.handlers.federation_handler
|
handler = self.handlers.federation_handler
|
||||||
events = yield handler.backfill(remote_server, room_id, limit)
|
events = yield handler.backfill(remote_server, room_id, limit)
|
||||||
|
|
||||||
res = [event.get_dict() for event in events]
|
res = [self.hs.serialize_event(event) for event in events]
|
||||||
defer.returnValue((200, res))
|
defer.returnValue((200, res))
|
||||||
|
|
||||||
|
|
||||||
|
@ -20,6 +20,7 @@
|
|||||||
|
|
||||||
# Imports required for the default HomeServer() implementation
|
# Imports required for the default HomeServer() implementation
|
||||||
from synapse.federation import initialize_http_replication
|
from synapse.federation import initialize_http_replication
|
||||||
|
from synapse.api.events import serialize_event
|
||||||
from synapse.api.events.factory import EventFactory
|
from synapse.api.events.factory import EventFactory
|
||||||
from synapse.notifier import Notifier
|
from synapse.notifier import Notifier
|
||||||
from synapse.api.auth import Auth
|
from synapse.api.auth import Auth
|
||||||
@ -138,6 +139,9 @@ class BaseHomeServer(object):
|
|||||||
object."""
|
object."""
|
||||||
return RoomID.from_string(s, hs=self)
|
return RoomID.from_string(s, hs=self)
|
||||||
|
|
||||||
|
def serialize_event(self, e):
|
||||||
|
return serialize_event(self, e)
|
||||||
|
|
||||||
# Build magic accessors for every dependency
|
# Build magic accessors for every dependency
|
||||||
for depname in BaseHomeServer.DEPENDENCIES:
|
for depname in BaseHomeServer.DEPENDENCIES:
|
||||||
BaseHomeServer._make_dependency_method(depname)
|
BaseHomeServer._make_dependency_method(depname)
|
||||||
|
@ -68,7 +68,8 @@ class DataStore(RoomMemberStore, RoomStore,
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@log_function
|
@log_function
|
||||||
def persist_event(self, event=None, backfilled=False, pdu=None):
|
def persist_event(self, event=None, backfilled=False, pdu=None,
|
||||||
|
is_new_state=True):
|
||||||
stream_ordering = None
|
stream_ordering = None
|
||||||
if backfilled:
|
if backfilled:
|
||||||
if not self.min_token_deferred.called:
|
if not self.min_token_deferred.called:
|
||||||
@ -83,6 +84,7 @@ class DataStore(RoomMemberStore, RoomStore,
|
|||||||
event=event,
|
event=event,
|
||||||
backfilled=backfilled,
|
backfilled=backfilled,
|
||||||
stream_ordering=stream_ordering,
|
stream_ordering=stream_ordering,
|
||||||
|
is_new_state=is_new_state,
|
||||||
)
|
)
|
||||||
except _RollbackButIsFineException as e:
|
except _RollbackButIsFineException as e:
|
||||||
pass
|
pass
|
||||||
@ -109,12 +111,14 @@ class DataStore(RoomMemberStore, RoomStore,
|
|||||||
defer.returnValue(event)
|
defer.returnValue(event)
|
||||||
|
|
||||||
def _persist_pdu_event_txn(self, txn, pdu=None, event=None,
|
def _persist_pdu_event_txn(self, txn, pdu=None, event=None,
|
||||||
backfilled=False, stream_ordering=None):
|
backfilled=False, stream_ordering=None,
|
||||||
|
is_new_state=True):
|
||||||
if pdu is not None:
|
if pdu is not None:
|
||||||
self._persist_event_pdu_txn(txn, pdu)
|
self._persist_event_pdu_txn(txn, pdu)
|
||||||
if event is not None:
|
if event is not None:
|
||||||
return self._persist_event_txn(
|
return self._persist_event_txn(
|
||||||
txn, event, backfilled, stream_ordering
|
txn, event, backfilled, stream_ordering,
|
||||||
|
is_new_state=is_new_state,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _persist_event_pdu_txn(self, txn, pdu):
|
def _persist_event_pdu_txn(self, txn, pdu):
|
||||||
@ -141,7 +145,8 @@ class DataStore(RoomMemberStore, RoomStore,
|
|||||||
self._update_min_depth_for_context_txn(txn, pdu.context, pdu.depth)
|
self._update_min_depth_for_context_txn(txn, pdu.context, pdu.depth)
|
||||||
|
|
||||||
@log_function
|
@log_function
|
||||||
def _persist_event_txn(self, txn, event, backfilled, stream_ordering=None):
|
def _persist_event_txn(self, txn, event, backfilled, stream_ordering=None,
|
||||||
|
is_new_state=True):
|
||||||
if event.type == RoomMemberEvent.TYPE:
|
if event.type == RoomMemberEvent.TYPE:
|
||||||
self._store_room_member_txn(txn, event)
|
self._store_room_member_txn(txn, event)
|
||||||
elif event.type == FeedbackEvent.TYPE:
|
elif event.type == FeedbackEvent.TYPE:
|
||||||
@ -195,7 +200,7 @@ class DataStore(RoomMemberStore, RoomStore,
|
|||||||
)
|
)
|
||||||
raise _RollbackButIsFineException("_persist_event")
|
raise _RollbackButIsFineException("_persist_event")
|
||||||
|
|
||||||
if not backfilled and hasattr(event, "state_key"):
|
if is_new_state and hasattr(event, "state_key"):
|
||||||
vals = {
|
vals = {
|
||||||
"event_id": event.event_id,
|
"event_id": event.event_id,
|
||||||
"room_id": event.room_id,
|
"room_id": event.room_id,
|
||||||
|
@ -315,6 +315,10 @@ class SQLBaseStore(object):
|
|||||||
d["content"] = json.loads(d["content"])
|
d["content"] = json.loads(d["content"])
|
||||||
del d["unrecognized_keys"]
|
del d["unrecognized_keys"]
|
||||||
|
|
||||||
|
if "age_ts" not in d:
|
||||||
|
# For compatibility
|
||||||
|
d["age_ts"] = d["ts"] if "ts" in d else 0
|
||||||
|
|
||||||
return self.event_factory.create_event(
|
return self.event_factory.create_event(
|
||||||
etype=d["type"],
|
etype=d["type"],
|
||||||
**d
|
**d
|
||||||
|
@ -17,7 +17,7 @@ from twisted.internet import defer
|
|||||||
from tests import unittest
|
from tests import unittest
|
||||||
|
|
||||||
# python imports
|
# python imports
|
||||||
from mock import Mock
|
from mock import Mock, ANY
|
||||||
|
|
||||||
from ..utils import MockHttpResource, MockClock
|
from ..utils import MockHttpResource, MockClock
|
||||||
|
|
||||||
@ -181,7 +181,8 @@ class FederationTestCase(unittest.TestCase):
|
|||||||
"depth": 1,
|
"depth": 1,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
}
|
},
|
||||||
|
on_send_callback=ANY,
|
||||||
)
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@ -212,7 +213,9 @@ class FederationTestCase(unittest.TestCase):
|
|||||||
"content": {"testing": "content here"},
|
"content": {"testing": "content here"},
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
})
|
},
|
||||||
|
on_send_callback=ANY,
|
||||||
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_recv_edu(self):
|
def test_recv_edu(self):
|
||||||
|
@ -74,7 +74,9 @@ class FederationTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
yield self.handlers.federation_handler.on_receive_pdu(pdu, False)
|
yield self.handlers.federation_handler.on_receive_pdu(pdu, False)
|
||||||
|
|
||||||
self.datastore.persist_event.assert_called_once_with(ANY, False)
|
self.datastore.persist_event.assert_called_once_with(
|
||||||
|
ANY, False, is_new_state=False
|
||||||
|
)
|
||||||
self.notifier.on_new_room_event.assert_called_once_with(ANY)
|
self.notifier.on_new_room_event.assert_called_once_with(ANY)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
@ -319,7 +319,8 @@ class PresenceInvitesTestCase(unittest.TestCase):
|
|||||||
"observer_user": "@apple:test",
|
"observer_user": "@apple:test",
|
||||||
"observed_user": "@cabbage:elsewhere",
|
"observed_user": "@cabbage:elsewhere",
|
||||||
}
|
}
|
||||||
)
|
),
|
||||||
|
on_send_callback=ANY,
|
||||||
),
|
),
|
||||||
defer.succeed((200, "OK"))
|
defer.succeed((200, "OK"))
|
||||||
)
|
)
|
||||||
@ -345,7 +346,8 @@ class PresenceInvitesTestCase(unittest.TestCase):
|
|||||||
"observer_user": "@cabbage:elsewhere",
|
"observer_user": "@cabbage:elsewhere",
|
||||||
"observed_user": "@apple:test",
|
"observed_user": "@apple:test",
|
||||||
}
|
}
|
||||||
)
|
),
|
||||||
|
on_send_callback=ANY,
|
||||||
),
|
),
|
||||||
defer.succeed((200, "OK"))
|
defer.succeed((200, "OK"))
|
||||||
)
|
)
|
||||||
@ -376,7 +378,8 @@ class PresenceInvitesTestCase(unittest.TestCase):
|
|||||||
"observer_user": "@cabbage:elsewhere",
|
"observer_user": "@cabbage:elsewhere",
|
||||||
"observed_user": "@durian:test",
|
"observed_user": "@durian:test",
|
||||||
}
|
}
|
||||||
)
|
),
|
||||||
|
on_send_callback=ANY,
|
||||||
),
|
),
|
||||||
defer.succeed((200, "OK"))
|
defer.succeed((200, "OK"))
|
||||||
)
|
)
|
||||||
@ -765,7 +768,8 @@ class PresencePushTestCase(unittest.TestCase):
|
|||||||
"last_active_ago": 0},
|
"last_active_ago": 0},
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
)
|
),
|
||||||
|
on_send_callback=ANY,
|
||||||
),
|
),
|
||||||
defer.succeed((200, "OK"))
|
defer.succeed((200, "OK"))
|
||||||
)
|
)
|
||||||
@ -780,7 +784,8 @@ class PresencePushTestCase(unittest.TestCase):
|
|||||||
"last_active_ago": 0},
|
"last_active_ago": 0},
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
)
|
),
|
||||||
|
on_send_callback=ANY,
|
||||||
),
|
),
|
||||||
defer.succeed((200, "OK"))
|
defer.succeed((200, "OK"))
|
||||||
)
|
)
|
||||||
@ -906,6 +911,7 @@ class PresencePushTestCase(unittest.TestCase):
|
|||||||
],
|
],
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
|
on_send_callback=ANY,
|
||||||
),
|
),
|
||||||
defer.succeed((200, "OK"))
|
defer.succeed((200, "OK"))
|
||||||
)
|
)
|
||||||
@ -920,6 +926,7 @@ class PresencePushTestCase(unittest.TestCase):
|
|||||||
],
|
],
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
|
on_send_callback=ANY,
|
||||||
),
|
),
|
||||||
defer.succeed((200, "OK"))
|
defer.succeed((200, "OK"))
|
||||||
)
|
)
|
||||||
@ -949,6 +956,7 @@ class PresencePushTestCase(unittest.TestCase):
|
|||||||
],
|
],
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
|
on_send_callback=ANY,
|
||||||
),
|
),
|
||||||
defer.succeed((200, "OK"))
|
defer.succeed((200, "OK"))
|
||||||
)
|
)
|
||||||
@ -1145,6 +1153,7 @@ class PresencePollingTestCase(unittest.TestCase):
|
|||||||
"poll": [ "@potato:remote" ],
|
"poll": [ "@potato:remote" ],
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
|
on_send_callback=ANY,
|
||||||
),
|
),
|
||||||
defer.succeed((200, "OK"))
|
defer.succeed((200, "OK"))
|
||||||
)
|
)
|
||||||
@ -1157,6 +1166,7 @@ class PresencePollingTestCase(unittest.TestCase):
|
|||||||
"push": [ {"user_id": "@clementine:test" }],
|
"push": [ {"user_id": "@clementine:test" }],
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
|
on_send_callback=ANY,
|
||||||
),
|
),
|
||||||
defer.succeed((200, "OK"))
|
defer.succeed((200, "OK"))
|
||||||
)
|
)
|
||||||
@ -1185,6 +1195,7 @@ class PresencePollingTestCase(unittest.TestCase):
|
|||||||
"push": [ {"user_id": "@fig:test" }],
|
"push": [ {"user_id": "@fig:test" }],
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
|
on_send_callback=ANY,
|
||||||
),
|
),
|
||||||
defer.succeed((200, "OK"))
|
defer.succeed((200, "OK"))
|
||||||
)
|
)
|
||||||
@ -1217,6 +1228,7 @@ class PresencePollingTestCase(unittest.TestCase):
|
|||||||
"unpoll": [ "@potato:remote" ],
|
"unpoll": [ "@potato:remote" ],
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
|
on_send_callback=ANY,
|
||||||
),
|
),
|
||||||
defer.succeed((200, "OK"))
|
defer.succeed((200, "OK"))
|
||||||
)
|
)
|
||||||
@ -1248,6 +1260,7 @@ class PresencePollingTestCase(unittest.TestCase):
|
|||||||
],
|
],
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
|
on_send_callback=ANY,
|
||||||
),
|
),
|
||||||
defer.succeed((200, "OK"))
|
defer.succeed((200, "OK"))
|
||||||
)
|
)
|
||||||
|
@ -169,7 +169,8 @@ class TypingNotificationsTestCase(unittest.TestCase):
|
|||||||
"user_id": self.u_apple.to_string(),
|
"user_id": self.u_apple.to_string(),
|
||||||
"typing": True,
|
"typing": True,
|
||||||
}
|
}
|
||||||
)
|
),
|
||||||
|
on_send_callback=ANY,
|
||||||
),
|
),
|
||||||
defer.succeed((200, "OK"))
|
defer.succeed((200, "OK"))
|
||||||
)
|
)
|
||||||
@ -219,7 +220,8 @@ class TypingNotificationsTestCase(unittest.TestCase):
|
|||||||
"user_id": self.u_apple.to_string(),
|
"user_id": self.u_apple.to_string(),
|
||||||
"typing": False,
|
"typing": False,
|
||||||
}
|
}
|
||||||
)
|
),
|
||||||
|
on_send_callback=ANY,
|
||||||
),
|
),
|
||||||
defer.succeed((200, "OK"))
|
defer.succeed((200, "OK"))
|
||||||
)
|
)
|
||||||
|
@ -95,8 +95,14 @@ class RestTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def register(self, user_id):
|
def register(self, user_id):
|
||||||
(code, response) = yield self.mock_resource.trigger("POST", "/register",
|
(code, response) = yield self.mock_resource.trigger(
|
||||||
'{"user_id":"%s"}' % user_id)
|
"POST",
|
||||||
|
"/register",
|
||||||
|
json.dumps({
|
||||||
|
"user": user_id,
|
||||||
|
"password": "test",
|
||||||
|
"type": "m.login.password"
|
||||||
|
}))
|
||||||
self.assertEquals(200, code)
|
self.assertEquals(200, code)
|
||||||
defer.returnValue(response)
|
defer.returnValue(response)
|
||||||
|
|
||||||
|
@ -82,37 +82,154 @@ angular.module('matrixService', [])
|
|||||||
return $http(request);
|
return $http(request);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
var doRegisterLogin = function(path, loginType, sessionId, userName, password, threepidCreds) {
|
||||||
|
var data = {};
|
||||||
|
if (loginType === "m.login.recaptcha") {
|
||||||
|
var challengeToken = Recaptcha.get_challenge();
|
||||||
|
var captchaEntry = Recaptcha.get_response();
|
||||||
|
data = {
|
||||||
|
type: "m.login.recaptcha",
|
||||||
|
challenge: challengeToken,
|
||||||
|
response: captchaEntry
|
||||||
|
};
|
||||||
|
}
|
||||||
|
else if (loginType === "m.login.email.identity") {
|
||||||
|
data = {
|
||||||
|
threepidCreds: threepidCreds
|
||||||
|
};
|
||||||
|
}
|
||||||
|
else if (loginType === "m.login.password") {
|
||||||
|
data = {
|
||||||
|
user: userName,
|
||||||
|
password: password
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sessionId) {
|
||||||
|
data.session = sessionId;
|
||||||
|
}
|
||||||
|
data.type = loginType;
|
||||||
|
console.log("doRegisterLogin >>> " + loginType);
|
||||||
|
return doRequest("POST", path, undefined, data);
|
||||||
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
/****** Home server API ******/
|
/****** Home server API ******/
|
||||||
prefix: prefixPath,
|
prefix: prefixPath,
|
||||||
|
|
||||||
// Register an user
|
// Register an user
|
||||||
register: function(user_name, password, threepidCreds, useCaptcha) {
|
register: function(user_name, password, threepidCreds, useCaptcha) {
|
||||||
// The REST path spec
|
// registration is composed of multiple requests, to check you can
|
||||||
|
// register, then to actually register. This deferred will fire when
|
||||||
|
// all the requests are done, along with the final response.
|
||||||
|
var deferred = $q.defer();
|
||||||
var path = "/register";
|
var path = "/register";
|
||||||
|
|
||||||
var data = {
|
// check we can actually register with this HS.
|
||||||
user_id: user_name,
|
doRequest("GET", path, undefined, undefined).then(
|
||||||
password: password,
|
function(response) {
|
||||||
threepidCreds: threepidCreds
|
console.log("/register [1] : "+JSON.stringify(response));
|
||||||
};
|
var flows = response.data.flows;
|
||||||
|
var knownTypes = [
|
||||||
|
"m.login.password",
|
||||||
|
"m.login.recaptcha",
|
||||||
|
"m.login.email.identity"
|
||||||
|
];
|
||||||
|
// if they entered 3pid creds, we want to use a flow which uses it.
|
||||||
|
var useThreePidFlow = threepidCreds != undefined;
|
||||||
|
var flowIndex = 0;
|
||||||
|
var firstRegType = undefined;
|
||||||
|
|
||||||
if (useCaptcha) {
|
for (var i=0; i<flows.length; i++) {
|
||||||
// Not all home servers will require captcha on signup, but if this flag is checked,
|
var isThreePidFlow = false;
|
||||||
// send captcha information.
|
if (flows[i].stages) {
|
||||||
// TODO: Might be nice to make this a bit more flexible..
|
for (var j=0; j<flows[i].stages.length; j++) {
|
||||||
var challengeToken = Recaptcha.get_challenge();
|
var regType = flows[i].stages[j];
|
||||||
var captchaEntry = Recaptcha.get_response();
|
if (knownTypes.indexOf(regType) === -1) {
|
||||||
var captchaType = "m.login.recaptcha";
|
deferred.reject("Unknown type: "+regType);
|
||||||
|
return;
|
||||||
data.captcha = {
|
}
|
||||||
type: captchaType,
|
if (regType == "m.login.email.identity") {
|
||||||
challenge: challengeToken,
|
isThreePidFlow = true;
|
||||||
response: captchaEntry
|
}
|
||||||
};
|
if (!useCaptcha && regType == "m.login.recaptcha") {
|
||||||
|
console.error("Web client setup to not use captcha, but HS demands a captcha.");
|
||||||
|
deferred.reject({
|
||||||
|
data: {
|
||||||
|
errcode: "M_CAPTCHA_NEEDED",
|
||||||
|
error: "Home server requires a captcha."
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return doRequest("POST", path, undefined, data);
|
if ( (isThreePidFlow && useThreePidFlow) || (!isThreePidFlow && !useThreePidFlow) ) {
|
||||||
|
flowIndex = i;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (knownTypes.indexOf(flows[i].type) == -1) {
|
||||||
|
deferred.reject("Unknown type: "+flows[i].type);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// looks like we can register fine, go ahead and do it.
|
||||||
|
console.log("Using flow " + JSON.stringify(flows[flowIndex]));
|
||||||
|
firstRegType = flows[flowIndex].type;
|
||||||
|
var sessionId = undefined;
|
||||||
|
|
||||||
|
// generic response processor so it can loop as many times as required
|
||||||
|
var loginResponseFunc = function(response) {
|
||||||
|
if (response.data.session) {
|
||||||
|
sessionId = response.data.session;
|
||||||
|
}
|
||||||
|
console.log("login response: " + JSON.stringify(response.data));
|
||||||
|
if (response.data.access_token) {
|
||||||
|
deferred.resolve(response);
|
||||||
|
}
|
||||||
|
else if (response.data.next) {
|
||||||
|
var nextType = response.data.next;
|
||||||
|
if (response.data.next instanceof Array) {
|
||||||
|
for (var i=0; i<response.data.next.length; i++) {
|
||||||
|
if (useThreePidFlow && response.data.next[i] == "m.login.email.identity") {
|
||||||
|
nextType = response.data.next[i];
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
else if (!useThreePidFlow && response.data.next[i] != "m.login.email.identity") {
|
||||||
|
nextType = response.data.next[i];
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return doRegisterLogin(path, nextType, sessionId, user_name, password, threepidCreds).then(
|
||||||
|
loginResponseFunc,
|
||||||
|
function(err) {
|
||||||
|
deferred.reject(err);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
deferred.reject("Unknown continuation: "+JSON.stringify(response));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// set the ball rolling
|
||||||
|
doRegisterLogin(path, firstRegType, undefined, user_name, password, threepidCreds).then(
|
||||||
|
loginResponseFunc,
|
||||||
|
function(err) {
|
||||||
|
deferred.reject(err);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
},
|
||||||
|
function(err) {
|
||||||
|
deferred.reject(err);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
return deferred.promise;
|
||||||
},
|
},
|
||||||
|
|
||||||
// Create a room
|
// Create a room
|
||||||
|
Loading…
Reference in New Issue
Block a user