misc: save received nonces in token table (#71441) #28
|
@ -3,7 +3,6 @@ import datetime
|
||||||
import hashlib
|
import hashlib
|
||||||
import hmac
|
import hmac
|
||||||
import os
|
import os
|
||||||
import shutil
|
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
@ -290,12 +289,7 @@ def test_is_url_signed_check_nonce(pub, local_user, freezer):
|
||||||
pub.site_options.set('api-secrets', ORIG, KEY)
|
pub.site_options.set('api-secrets', ORIG, KEY)
|
||||||
with open(os.path.join(pub.app_dir, 'site-options.cfg'), 'w') as fd:
|
with open(os.path.join(pub.app_dir, 'site-options.cfg'), 'w') as fd:
|
||||||
pub.site_options.write(fd)
|
pub.site_options.write(fd)
|
||||||
# test clean_nonces do not bark when nonces directory is empty
|
pub.token_class.wipe()
|
||||||
if os.path.exists(os.path.join(pub.app_dir, 'nonces')):
|
|
||||||
shutil.rmtree(os.path.join(pub.app_dir, 'nonces'))
|
|
||||||
pub.clean_nonces(now=0)
|
|
||||||
nonce_dir = os.path.join(pub.app_dir, 'nonces')
|
|
||||||
assert not os.path.exists(nonce_dir) or not os.listdir(nonce_dir)
|
|
||||||
signed_url = sign_url('?format=json&orig=%s&email=%s' % (ORIG, urllib.parse.quote(local_user.email)), KEY)
|
signed_url = sign_url('?format=json&orig=%s&email=%s' % (ORIG, urllib.parse.quote(local_user.email)), KEY)
|
||||||
req = HTTPRequest(
|
req = HTTPRequest(
|
||||||
None, {'SCRIPT_NAME': '/', 'SERVER_NAME': 'example.net', 'QUERY_STRING': signed_url[1:]}
|
None, {'SCRIPT_NAME': '/', 'SERVER_NAME': 'example.net', 'QUERY_STRING': signed_url[1:]}
|
||||||
|
@ -310,18 +304,18 @@ def test_is_url_signed_check_nonce(pub, local_user, freezer):
|
||||||
is_url_signed()
|
is_url_signed()
|
||||||
assert exc_info.value.public_msg == 'nonce already used'
|
assert exc_info.value.public_msg == 'nonce already used'
|
||||||
# test that clean nonces works
|
# test that clean nonces works
|
||||||
pub.clean_nonces()
|
pub.token_class.clean()
|
||||||
assert os.listdir(nonce_dir)
|
assert pub.token_class.exists()
|
||||||
|
|
||||||
# 80 seconds in the future, nothing should be cleaned
|
# 20 seconds in the future, nothing should be cleaned
|
||||||
freezer.move_to(datetime.timedelta(seconds=80))
|
freezer.move_to(datetime.timedelta(seconds=20))
|
||||||
pub.clean_nonces()
|
pub.token_class.clean()
|
||||||
assert os.listdir(nonce_dir)
|
assert pub.token_class.exists()
|
||||||
|
|
||||||
# 90 seconds in the future, nonces should be removed
|
# 40 seconds in the future, nonces should be removed
|
||||||
freezer.move_to(datetime.timedelta(seconds=10))
|
freezer.move_to(datetime.timedelta(seconds=20))
|
||||||
pub.clean_nonces()
|
pub.token_class.clean()
|
||||||
assert not os.listdir(nonce_dir)
|
assert not pub.token_class.exists()
|
||||||
|
|
||||||
|
|
||||||
def test_get_user_compat_endpoint(pub, local_user):
|
def test_get_user_compat_endpoint(pub, local_user):
|
||||||
|
|
|
@ -168,8 +168,6 @@ def test_register_cronjobs():
|
||||||
# noqa pylint: disable=not-an-iterable
|
# noqa pylint: disable=not-an-iterable
|
||||||
assert 'clean_sessions' in [x.function.__name__ for x in pub.cronjobs]
|
assert 'clean_sessions' in [x.function.__name__ for x in pub.cronjobs]
|
||||||
# noqa pylint: disable=not-an-iterable
|
# noqa pylint: disable=not-an-iterable
|
||||||
assert 'clean_nonces' in [x.function.__name__ for x in pub.cronjobs]
|
|
||||||
# noqa pylint: disable=not-an-iterable
|
|
||||||
assert 'clean_afterjobs' in [x.function.__name__ for x in pub.cronjobs]
|
assert 'clean_afterjobs' in [x.function.__name__ for x in pub.cronjobs]
|
||||||
# noqa pylint: disable=not-an-iterable
|
# noqa pylint: disable=not-an-iterable
|
||||||
assert 'clean_tempfiles' in [x.function.__name__ for x in pub.cronjobs]
|
assert 'clean_tempfiles' in [x.function.__name__ for x in pub.cronjobs]
|
||||||
|
|
|
@ -2517,6 +2517,24 @@ def test_form_tokens_migration(pub):
|
||||||
assert not os.path.exists(form_tokens_dir)
|
assert not os.path.exists(form_tokens_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def test_nonces_migration(pub):
|
||||||
|
conn, cur = sql.get_connection_and_cursor()
|
||||||
|
cur.execute('UPDATE wcs_meta SET value = 76 WHERE key = %s', ('sql_level',))
|
||||||
|
conn.commit()
|
||||||
|
cur.close()
|
||||||
|
|
||||||
|
nonces_dir = os.path.join(pub.app_dir, 'nonces')
|
||||||
|
if not os.path.exists(nonces_dir):
|
||||||
|
os.mkdir(nonces_dir)
|
||||||
|
with open(os.path.join(nonces_dir, '1234'), 'w'):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert os.path.exists(os.path.join(nonces_dir, '1234'))
|
||||||
|
sql.migrate()
|
||||||
|
assert not os.path.exists(os.path.join(nonces_dir, '1234'))
|
||||||
|
assert not os.path.exists(nonces_dir)
|
||||||
|
|
||||||
|
|
||||||
def test_workflow_traces_initial_migration(pub):
|
def test_workflow_traces_initial_migration(pub):
|
||||||
formdef = FormDef()
|
formdef = FormDef()
|
||||||
formdef.name = 'tests'
|
formdef.name = 'tests'
|
||||||
|
|
|
@ -15,12 +15,9 @@
|
||||||
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
import calendar
|
|
||||||
import datetime
|
import datetime
|
||||||
import errno
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import hmac
|
import hmac
|
||||||
import os
|
|
||||||
import random
|
import random
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
|
||||||
|
@ -79,27 +76,12 @@ def is_url_signed(utcnow=None, duration=DEFAULT_DURATION):
|
||||||
# check nonce
|
# check nonce
|
||||||
nonce = get_request().form.get('nonce')
|
nonce = get_request().form.get('nonce')
|
||||||
if nonce:
|
if nonce:
|
||||||
# compute end date of the nonce as an unix timestamp
|
|
||||||
until = timestamp + datetime.timedelta(seconds=duration)
|
|
||||||
until = calendar.timegm(until.timetuple())
|
|
||||||
# normalize nonce
|
# normalize nonce
|
||||||
nonce = nonce[:128]
|
nonce = simplify(nonce[:128]).replace('/', '-')
|
||||||
nonce = simplify(nonce).replace('/', '-')
|
dummy, created = get_publisher().token_class.get_or_create(
|
||||||
nonce_dir = os.path.join(get_publisher().app_dir, 'nonces')
|
type='nonce', id=nonce, expiration_delay=duration
|
||||||
nonce_path = os.path.join(nonce_dir, nonce)
|
)
|
||||||
try:
|
if not created:
|
||||||
try:
|
|
||||||
if not os.path.exists(nonce_dir):
|
|
||||||
os.makedirs(nonce_dir)
|
|
||||||
except OSError as e:
|
|
||||||
if e.errno != errno.EEXIST:
|
|
||||||
raise
|
|
||||||
os.close(os.open(nonce_path, os.O_CREAT | os.O_EXCL | os.O_WRONLY))
|
|
||||||
# touch file in the future for cleaning_nonces
|
|
||||||
os.utime(nonce_path, (until, until))
|
|
||||||
except OSError as e:
|
|
||||||
if e.errno != errno.EEXIST:
|
|
||||||
raise
|
|
||||||
raise AccessForbiddenError('nonce already used')
|
raise AccessForbiddenError('nonce already used')
|
||||||
get_request().signed = True
|
get_request().signed = True
|
||||||
return True
|
return True
|
||||||
|
|
|
@ -851,11 +851,9 @@ class NamedDataSource(XmlStorableObject):
|
||||||
|
|
||||||
if token_context:
|
if token_context:
|
||||||
token_context['session_id'] = get_session().id
|
token_context['session_id'] = get_session().id
|
||||||
token, created = get_publisher().token_class.get_or_create(
|
token, dummy = get_publisher().token_class.get_or_create(
|
||||||
type='autocomplete', context=token_context
|
type='autocomplete', context=token_context
|
||||||
)
|
)
|
||||||
if created:
|
|
||||||
token.store()
|
|
||||||
return '/api/autocomplete/%s' % token.id
|
return '/api/autocomplete/%s' % token.id
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
@ -866,11 +864,9 @@ class NamedDataSource(XmlStorableObject):
|
||||||
new_url = self.get_variadic_url()
|
new_url = self.get_variadic_url()
|
||||||
if new_url != url:
|
if new_url != url:
|
||||||
token_context = {'session_id': get_session().id, 'url': new_url, 'slug': self.slug}
|
token_context = {'session_id': get_session().id, 'url': new_url, 'slug': self.slug}
|
||||||
token, created = get_publisher().token_class.get_or_create(
|
token, dummy = get_publisher().token_class.get_or_create(
|
||||||
type='autocomplete', context=token_context
|
type='autocomplete', context=token_context
|
||||||
)
|
)
|
||||||
if created:
|
|
||||||
token.store()
|
|
||||||
return '/api/geojson/%s' % token.id
|
return '/api/geojson/%s' % token.id
|
||||||
return '/api/geojson/%s' % self.slug
|
return '/api/geojson/%s' % self.slug
|
||||||
|
|
||||||
|
|
|
@ -598,22 +598,6 @@ class QommonPublisher(Publisher):
|
||||||
return
|
return
|
||||||
cls.cronjobs.append(cronjob)
|
cls.cronjobs.append(cronjob)
|
||||||
|
|
||||||
def clean_nonces(self, delta=60, now=None, **kwargs):
|
|
||||||
nonce_dir = os.path.join(get_publisher().app_dir, 'nonces')
|
|
||||||
if not os.path.exists(nonce_dir):
|
|
||||||
return
|
|
||||||
cleaning_lock_file = os.path.join(self.app_dir, 'cleaning_nonces.lock')
|
|
||||||
try:
|
|
||||||
now = now or time.time()
|
|
||||||
with locket.lock_file(cleaning_lock_file, timeout=0):
|
|
||||||
for nonce in os.listdir(nonce_dir):
|
|
||||||
nonce_path = os.path.join(nonce_dir, nonce)
|
|
||||||
# we need delta so that os.utime in is_url_signed has time to update file timestamp
|
|
||||||
if delta < now - os.stat(nonce_path)[8]:
|
|
||||||
os.unlink(nonce_path)
|
|
||||||
except locket.LockError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def clean_sessions(self, **kwargs):
|
def clean_sessions(self, **kwargs):
|
||||||
cleaning_lock_file = os.path.join(self.app_dir, 'cleaning_sessions.lock')
|
cleaning_lock_file = os.path.join(self.app_dir, 'cleaning_sessions.lock')
|
||||||
try:
|
try:
|
||||||
|
@ -711,7 +695,6 @@ class QommonPublisher(Publisher):
|
||||||
@classmethod
|
@classmethod
|
||||||
def register_cronjobs(cls):
|
def register_cronjobs(cls):
|
||||||
cls.register_cronjob(CronJob(cls.clean_sessions, minutes=range(0, 60, 5), name='clean_sessions'))
|
cls.register_cronjob(CronJob(cls.clean_sessions, minutes=range(0, 60, 5), name='clean_sessions'))
|
||||||
cls.register_cronjob(CronJob(cls.clean_nonces, minutes=range(0, 60, 5), name='clean_nonces'))
|
|
||||||
lguerin marked this conversation as resolved
|
|||||||
cls.register_cronjob(CronJob(cls.clean_afterjobs, minutes=[0], name='clean_afterjobs'))
|
cls.register_cronjob(CronJob(cls.clean_afterjobs, minutes=[0], name='clean_afterjobs'))
|
||||||
cls.register_cronjob(CronJob(cls.clean_tokens, minutes=[0], name='clean_tokens'))
|
cls.register_cronjob(CronJob(cls.clean_tokens, minutes=[0], name='clean_tokens'))
|
||||||
cls.register_cronjob(CronJob(cls.clean_tempfiles, minutes=[0], name='clean_tempfiles'))
|
cls.register_cronjob(CronJob(cls.clean_tempfiles, minutes=[0], name='clean_tempfiles'))
|
||||||
|
|
|
@ -31,10 +31,10 @@ class Token(StorableObject):
|
||||||
expiration = None
|
expiration = None
|
||||||
context = None
|
context = None
|
||||||
|
|
||||||
def __init__(self, expiration_delay=86400, size=16, chars=None):
|
def __init__(self, expiration_delay=None, size=16, chars=None, id=None):
|
||||||
super().__init__(self.get_new_id(size=size, chars=chars))
|
super().__init__(id or self.get_new_id(size=size, chars=chars))
|
||||||
if expiration_delay:
|
if expiration_delay:
|
||||||
self.set_expiration_delay(expiration_delay)
|
self.set_expiration_delay(expiration_delay or 86400)
|
||||||
|
|
||||||
def set_expiration_delay(self, expiration_delay):
|
def set_expiration_delay(self, expiration_delay):
|
||||||
expiration_delay = min(expiration_delay, self.MAX_DELAY * 86400) # max 1 year.
|
expiration_delay = min(expiration_delay, self.MAX_DELAY * 86400) # max 1 year.
|
||||||
|
@ -50,13 +50,20 @@ class Token(StorableObject):
|
||||||
return id
|
return id
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_or_create(cls, type, context):
|
def get_or_create(cls, type, id=None, context=None, expiration_delay=None):
|
||||||
try:
|
try:
|
||||||
return (cls.select(clause=[Equal('type', type), Equal('context', context)])[0], False)
|
clauses = [Equal('type', type)]
|
||||||
|
if id is not None:
|
||||||
|
clauses.append(Equal('id', id))
|
||||||
|
elif context is not None:
|
||||||
|
clauses.append(Equal('context', context))
|
||||||
|
return (cls.select(clause=clauses)[0], False)
|
||||||
except (IndexError, KeyError):
|
except (IndexError, KeyError):
|
||||||
token = cls()
|
token = cls(id=id, expiration_delay=expiration_delay)
|
||||||
|
token.id = id
|
||||||
token.type = type
|
token.type = type
|
||||||
token.context = context
|
token.context = context
|
||||||
|
token.store()
|
||||||
return (token, True)
|
return (token, True)
|
||||||
|
|
||||||
def migrate(self):
|
def migrate(self):
|
||||||
|
|
|
@ -4949,7 +4949,7 @@ def get_period_total(
|
||||||
# latest migration, number + description (description is not used
|
# latest migration, number + description (description is not used
|
||||||
# programmaticaly but will make sure git conflicts if two migrations are
|
# programmaticaly but will make sure git conflicts if two migrations are
|
||||||
# separately added with the same number)
|
# separately added with the same number)
|
||||||
SQL_LEVEL = (76, 'add index to workflow traces table')
|
SQL_LEVEL = (77, 'use token table for nonces')
|
||||||
|
|
||||||
|
|
||||||
def migrate_global_views(conn, cur):
|
def migrate_global_views(conn, cur):
|
||||||
|
@ -5256,6 +5256,13 @@ def migrate():
|
||||||
# 75 (part 2): migrate to dedicated workflow traces table
|
# 75 (part 2): migrate to dedicated workflow traces table
|
||||||
set_reindex('workflow_traces_migration', 'needed', conn=conn, cur=cur)
|
set_reindex('workflow_traces_migration', 'needed', conn=conn, cur=cur)
|
||||||
|
|
||||||
|
if sql_level < 77:
|
||||||
|
# 77: use token table for nonces
|
||||||
|
# it uses the existing tokens table, this "migration" is just to remove old files.
|
||||||
|
nonces_dir = os.path.join(get_publisher().app_dir, 'nonces')
|
||||||
|
if os.path.exists(nonces_dir):
|
||||||
|
shutil.rmtree(nonces_dir, ignore_errors=True)
|
||||||
|
|
||||||
if sql_level != SQL_LEVEL[0]:
|
if sql_level != SQL_LEVEL[0]:
|
||||||
cur.execute(
|
cur.execute(
|
||||||
'''UPDATE wcs_meta SET value = %s, updated_at=NOW() WHERE key = %s''',
|
'''UPDATE wcs_meta SET value = %s, updated_at=NOW() WHERE key = %s''',
|
||||||
|
|
Loading…
Reference in New Issue
on ne veut pas nettoyer les entrées expirées, quand meme ?
(phrase coupée :/ )
Elles sont nettoyées dans clean_tokens, qui joue une seule fois par heure, vs toutes les cinq minutes, mais ça va.
ok