diff --git a/.travis.yml b/.travis.yml index f623652c5..ee62681e9 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,11 +2,15 @@ cache: directories: - /home/travis/virtualenv before_install: - - mysql -uroot -Dmysql -e 'UPDATE user SET password=PASSWORD("root") WHERE user="root"; FLUSH PRIVILEGES;' + - mysql -u root -e "CREATE USER 'inboxtest'@'%' IDENTIFIED BY 'inboxtest'" + - mysql -u root -e "CREATE USER 'inboxtest'@'localhost' IDENTIFIED BY 'inboxtest'" + - mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO 'inboxtest'@'%'" + - mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO 'inboxtest'@'localhost'" + install: - sudo -H pip install flake8 - sudo -H ./setup.sh | awk '{ print strftime("%Y-%m-%d %H:%M:%S"), $0; fflush(); }' script: - flake8 --select=F inbox - pylint -d all -e w0631 inbox - - NYLAS_ENV=test py.test tests -m "not networkrequired" + - NYLAS_ENV=test py.test inbox/test -m "not networkrequired" diff --git a/MANIFEST.in b/MANIFEST.in index 0e6b194e8..a1df91d43 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -9,18 +9,20 @@ include *.yml include LICENSE include Vagrantfile include alembic.ini -include runtests include tox.ini recursive-include arclib *.php recursive-include bin *.sh recursive-include debian *.triggers recursive-include migrations *.mako recursive-include migrations *.py -recursive-include tests *.agignore -recursive-include tests *.gitignore -recursive-include tests *.ics -recursive-include tests *.ini -recursive-include tests *.jpg -recursive-include tests *.py -recursive-include tests *.sql -recursive-include tests *.wav +recursive-include inbox *.agignore +recursive-include inbox *.ics +recursive-include inbox *.ini +recursive-include inbox *.jpg +recursive-include inbox *.py +recursive-include inbox *.sql +recursive-include inbox *.wav +recursive-include inbox *.txt +recursive-include inbox *.json +recursive-include inbox *.key +recursive-include inbox *.pem diff --git a/bin/balance-fleet b/bin/balance-fleet new file mode 100644 index 000000000..495a0da67 --- /dev/null +++ b/bin/balance-fleet @@ -0,0 +1,201 @@ +#!/usr/bin/env python +from gevent import monkey +monkey.patch_all() + +import click +import json +import operator +import random +import time + +from munkres import Munkres, make_cost_matrix +from nylas.logging import get_logger, configure_logging +configure_logging() +log = get_logger() + +from inbox.config import config +from inbox.scheduling.deferred_migration import (DeferredAccountMigration, + DeferredAccountMigrationExecutor) +from inbox.models.session import global_session_scope +from inbox.models.account import Account +from inbox.util import fleet +from inbox.models.session import session_scope + +# How long we should take to migrate all accounts (in seconds). +ACCOUNT_MIGRATION_TIMESPAN = 15 * 60 # 15 minutes + + +def actual_hostname(hostname): + # A little hack for running the rebalance script locally in a dev VM + if hostname == 'localhost': + return 'precise64' + return hostname + + +def jitter_for_deadline(timespan): + min_delay = 10 + max_delay = timespan + return (random.random() * (max_delay - min_delay)) + min_delay + + +def is_account_on_debug_host(account_id, debug_hosts): + with session_scope(account_id) as db_session: + sync_host = db_session.query(Account.sync_host).get(account_id) + return sync_host in debug_hosts + + +def partition_accounts(load_per_account, num_buckets): + # Partition equitably in n-buckets. + # http://stackoverflow.com/a/6670011 + sorted_loads = sorted(load_per_account.items(), key=operator.itemgetter(1), reverse=True) + buckets = [[] for i in range(num_buckets)] + bucket_totals = [0.0 for i in range(num_buckets)] + + i = 0 + for account_id, load in sorted_loads[0:num_buckets]: + buckets[i].append(account_id) + bucket_totals[i] += load + i += 1 + + for account, load in sorted_loads[num_buckets:]: + # Find the less loaded bucket: + i = bucket_totals.index(min(bucket_totals)) + buckets[i].append(account) + bucket_totals[i] += load + return buckets + + +def get_account_hosts(): + with global_session_scope() as db_session: + return dict((str(id_), host) for id_, host in + db_session.query(Account.id, Account.sync_host). + filter(Account.sync_should_run)) + + +def do_minimize_migrations(hosts, buckets, should_optimize=True): + # Our task is to find a bipartite matching between buckets and hosts that + # maximizes the number of Accounts that are already assigned to the correct + # sync host. To do this we use the Hungarian algorithm which computes a + # bipartite matching between n workers and n tasks such that the overall + # cost is minimized (see https://en.wikipedia.org/wiki/Hungarian_algorithm). + # Luckily there's a python library (munkres) that implements this algorithm + # for us :-) Since this algorithm minimizes cost we must first build our + # profit matrix and then convert it into a cost matrix. + account_hosts = get_account_hosts() + profit_matrix = [] + max_num_present = 0 + sync_procs = [] + for host in hosts: + for i in range(host['num_procs']): + sync_procs.append('{}:{}'.format(actual_hostname(host['name']), i)) + + # Construct the profit matrix. Each row corresponds to a bucket and each + # column within that row corresponds to the number of items in that bucket + # that are currently assigned to the corresponding sync host. + for bucket in buckets: + row = [] + for proc_id in sync_procs: + num_present = 0 + for account_id in bucket: + if account_hosts.get(account_id) == proc_id: + num_present += 1 + # We add 1 because the munkres library can't really handle matrices + # with 0 values :-/ This won't change the ultimate answer, however. + num_present += 1 + row.append(num_present) + max_num_present = max(num_present, max_num_present) + profit_matrix.append(row) + + indexes = None + if should_optimize: + # We add 1 because the munkres library can't really handle matrices + # with 0 values :-/ This won't change the ultimate answer, however. + max_num_present += 1 + cost_matrix = make_cost_matrix(profit_matrix, lambda cost: max_num_present - cost) + + m = Munkres() + indexes = m.compute(cost_matrix) + else: + indexes = [(i, i) for i in range(len(sync_procs))] + + # Now that we have the optimal solution we need to reorder the original + # buckets to match to their corresponding hosts based on the results. + result_buckets = [None for _ in indexes] + total_profit = 0 + total_accounts = 0 + for row, column in indexes: + total_profit += profit_matrix[row][column] - 1 + result_buckets[column] = buckets[row] + total_accounts += len(buckets[row]) + log.info("Accounts already on the correct hosts:", + correct_accounts=total_profit, + total_accounts=total_accounts, + correct_percent=float(total_profit) / float(total_accounts) * 100.0) + return result_buckets + + +def migrate_accounts(zone, hosts, buckets, timespan): + start_time = time.time() + executor = DeferredAccountMigrationExecutor() # Just for its Redis thingy + + bucket_idx = 0 + for host_idx, host in enumerate(hosts): + host['name'] = actual_hostname(host['name']) + + for process_idx in range(host['num_procs']): + instance = '{}:{}'.format(host['name'], process_idx) + bucket = buckets[bucket_idx] + bucket_idx += 1 + + for account_id in bucket: + delay = jitter_for_deadline(timespan) + deadline = start_time + delay + log.info("Sync load balancer migrating Account", + zone=zone, + account_id=account_id, + host=instance, + delay=delay) + dam = DeferredAccountMigration(deadline, account_id, instance) + dam.save(executor.redis) + + +def balance_zone(zone, normal_hosts, debug_hosts, account_loads, timespan, minimize_migrations, dry_run): + num_buckets = sum([host['num_procs'] for host in normal_hosts]) + account_loads = {account_id: load for account_id, load in account_loads.items() + if not is_account_on_debug_host(account_id, debug_hosts)} + buckets = partition_accounts(account_loads, num_buckets) + buckets = do_minimize_migrations(normal_hosts, buckets, minimize_migrations) + if dry_run: + print "Would reassign accounts in zone {} like this:".format(zone) + for bucket in buckets: + bucket_load = 0 + for account_id in bucket: + bucket_load += account_loads[account_id] + print "\t{}: {}".format(bucket_load, bucket) + return + migrate_accounts(zone, normal_hosts, buckets, timespan) + + +@click.command() +@click.option('--level', default='staging') +@click.option('--dry-run', is_flag=True, default=False) +@click.option('--timespan', default=ACCOUNT_MIGRATION_TIMESPAN) +@click.option('--minimize-migrations/--no-minimize-migrations', default=True) +@click.argument('account-loads') +def main(dry_run, level, timespan, minimize_migrations, account_loads): + zones = {h.get('ZONE') for h in config['DATABASE_HOSTS']} + load_per_account = {} + with open(account_loads) as f: + load_per_account = json.load(f) + for zone in zones: + loads = load_per_account.get(zone) + if loads is None: + loads = load_per_account['null'] + hosts = fleet.get_sync_hosts_in_zone(zone, level) + normal_hosts = [h for h in hosts if not h['debug']] + debug_hosts = set(h for h in hosts if h['debug']) + balance_zone(zone, normal_hosts, debug_hosts, loads, timespan, minimize_migrations, dry_run) + + +if __name__ == '__main__': + main() diff --git a/bin/check-attachments b/bin/check-attachments new file mode 100755 index 000000000..02184dc44 --- /dev/null +++ b/bin/check-attachments @@ -0,0 +1,92 @@ +#!/usr/bin/env python +# Check that we can fetch attachments for 99.9% of our syncing accounts. +from gevent import monkey +monkey.patch_all() + +import datetime +import click +import gevent +from gevent.pool import Pool +import traceback + +from collections import defaultdict +from inbox.models import Account, Block +from inbox.crispin import connection_pool +from inbox.s3.base import get_raw_from_provider +from inbox.s3.exc import EmailFetchException, TemporaryEmailFetchException +from sqlalchemy.sql.expression import func + +from nylas.logging import get_logger, configure_logging +from inbox.models.backends.generic import GenericAccount +from inbox.models.session import (session_scope, global_session_scope, + session_scope_by_shard_id) + +configure_logging() +log = get_logger(purpose='separator-backfix') + +NUM_MESSAGES = 10 + + +def process_account(account_id): + ret = defaultdict(int) + + try: + with session_scope(account_id) as db_session: + acc = db_session.query(Account).get(account_id) + db_session.expunge(acc) + + one_month_ago = datetime.datetime.utcnow() - datetime.timedelta(days=30) + + for i in range(NUM_MESSAGES): + with session_scope(account_id) as db_session: + block = db_session.query(Block).filter( + Block.namespace_id == acc.namespace.id, + Block.created_at < one_month_ago).order_by( + func.rand()).limit(1).first() + + if block is None: + continue + + if len(block.parts) == 0: + ret['null_failures'] += 1 + continue + + message = block.parts[0].message + raw_mime = get_raw_from_provider(message) + + if raw_mime != '': + ret['successes'] += 1 + else: + ret['null_failures'] += 1 + except Exception as e: + ret[type(e).__name__] += 1 + + return ret + + +@click.command() +@click.option('--num-accounts', type=int, default=1500) +def main(num_accounts): + with global_session_scope() as db_session: + accounts = db_session.query(Account).filter( + Account.sync_should_run == True).order_by(func.rand()).limit( + num_accounts).all() + + accounts = [acc.id for acc in accounts][:num_accounts] + db_session.expunge_all() + + pool = Pool(size=100) + results = pool.map(process_account, accounts) + + global_results = dict() + for ret in results: + for key in ret: + if key not in global_results: + global_results[key] = 0 + + global_results[key] += ret[key] + + print global_results + +if __name__ == '__main__': + main() diff --git a/bin/create-test-db b/bin/create-test-db index 5bfe8bec1..2ff75827e 100755 --- a/bin/create-test-db +++ b/bin/create-test-db @@ -1,6 +1,6 @@ #!/bin/bash set -e -mysql -uroot -proot -e "GRANT ALL PRIVILEGES ON test.* TO inboxtest@localhost IDENTIFIED BY 'inboxtest'" -mysql -uroot -proot -e "GRANT ALL PRIVILEGES ON test_1.* TO inboxtest@localhost IDENTIFIED BY 'inboxtest'" +mysql -uroot -proot -e "GRANT ALL PRIVILEGES ON synctest.* TO inboxtest@localhost IDENTIFIED BY 'inboxtest'" +mysql -uroot -proot -e "GRANT ALL PRIVILEGES ON synctest_1.* TO inboxtest@localhost IDENTIFIED BY 'inboxtest'" mysql -uroot -proot -e 'GRANT ALL PRIVILEGES ON `test%`.* TO inboxtest@localhost IDENTIFIED BY "inboxtest"' diff --git a/bin/deferred-migration-service b/bin/deferred-migration-service new file mode 100755 index 000000000..87dbd886c --- /dev/null +++ b/bin/deferred-migration-service @@ -0,0 +1,23 @@ +#!/usr/bin/env python +"""Watches a redis priority queue for deferred account migrations to execute.""" + +import gevent.monkey +gevent.monkey.patch_all() + +from setproctitle import setproctitle +from inbox.scheduling.deferred_migration import DeferredAccountMigrationExecutor +from nylas.logging import configure_logging +configure_logging() + + +def main(): + setproctitle('deferred-migration-service') + print "Starting DeferredAccountMigrationExecutor..." + dame = DeferredAccountMigrationExecutor() + dame.start() + print "Done" + dame.join() + + +if __name__ == '__main__': + main() diff --git a/bin/detect-missing-sync-host b/bin/detect-missing-sync-host index 4dd8cdbf8..39e280c85 100755 --- a/bin/detect-missing-sync-host +++ b/bin/detect-missing-sync-host @@ -4,50 +4,29 @@ import click from sqlalchemy.orm import load_only -from inbox.scheduling.queue import QueueClient - from inbox.models.account import Account from inbox.models.session import global_session_scope @click.command() -@click.option('--fix', is_flag=True, default=False, help='use with caution') -def main(fix): +def main(): """ Detects accounts with sync_state and sync_host inconsistent with sync_should_run bit. (At one point, this could happen if, say, an account was _started_ on a new host without being first stopped on its previous host.) - If --fix is specified, will make these bits consistent with what is in - the scheduler redis store. You MUST restart account syncs on all affected - processes after doing this in order to ensure that accounts have all - been started correctly! Otherwise it is possible that some accounts will - have a sync_host assigned but won't actually be running on the process, - and appear as dead accounts. - """ - qc_a = QueueClient('us-west-2a') - qc_b = QueueClient('us-west-2b') - - assigned_a = qc_a.assigned() - assigned_b = qc_b.assigned() - with global_session_scope() as db_session: for acc in db_session.query(Account).options( - load_only('sync_state', 'sync_should_run', 'sync_host'))\ + load_only('sync_state', 'sync_should_run', 'sync_host', 'desired_sync_host'))\ .filter(Account.sync_state == 'stopped'): - if acc.id in assigned_a or acc.id in assigned_b: - sync_host = assigned_a.get(acc.id) or assigned_b.get(acc.id) + if acc.desired_sync_host is not None: print "account {} assigned to {} but has sync_state 'stopped'"\ " ({}, {})"\ - .format(acc.id, sync_host, + .format(acc.id, acc.sync_host, acc.sync_should_run, acc.sync_host) - if fix: - acc.sync_host = sync_host - if fix: - db_session.commit() if __name__ == '__main__': diff --git a/bin/get-account-loads b/bin/get-account-loads new file mode 100644 index 000000000..1e56ce05a --- /dev/null +++ b/bin/get-account-loads @@ -0,0 +1,95 @@ +#!/usr/bin/env python +from gevent import monkey +monkey.patch_all() + +import click +import json +import requests + +from inbox.config import config +from inbox.ignition import engine_manager +from inbox.mailsync.service import MAX_ACCOUNTS_PER_PROCESS +from inbox.models.account import Account +from inbox.models.session import session_scope_by_shard_id +from inbox.util import fleet + +NUM_PROCESSES_PER_HOST = 16 + + +def runnable_accounts_in_zone(zone): + accounts = set() + for key in engine_manager.shards_for_zone(zone): + with session_scope_by_shard_id(key) as db_session: + accounts.update( + id_ for id_, in db_session.query(Account.id).filter( + Account.sync_should_run)) + return accounts + + +def get_account_loads(zone, hosts): + load_per_account = dict() + total_times_per_account = dict() + for host in hosts: + for i in range(NUM_PROCESSES_PER_HOST): + url = "http://{}:{}/load".format(host['ip_address'], 16384 + i) + try: + load_profile = requests.get(url).json() + except Exception: + continue + + total_time = load_profile['total_time'] + for run_time in load_profile['times']: + # run_time is a string of the form :id:. e.g: + # "easfoldersyncengine:1:22:2": 0.0002319812774658203 + + if run_time in ['hub', 'null']: + # This is the gevent hub, which is called when the process + # is idle. + continue + + splat = run_time.split(':') + account_id = int(splat[1]) + + account_time = load_profile['times'][run_time] + + if account_id not in load_per_account: + load_per_account[account_id] = 0.0 + if account_id not in total_times_per_account: + total_times_per_account[account_id] = 0.0 + load_per_account[account_id] += account_time + total_times_per_account[account_id] += total_time + + # Scale the load based on the total time. + for account_id in load_per_account: + load_per_account[account_id] /= total_times_per_account[account_id] + + # Sometimes we're not able to find the load for a particular account. + # E.g. we might have been unable to contact the sync host it was on to ask + # about the account's load because the sync host was overloaded. In this + # situation we look at all runnable accounts and diff it with the set + # of accounts that we were able to get numbers on. We then assign those + # accounts a default load. While this isn't ideal, it's better than ignoring + # those missing accounts entirely. + default_load = 1.0 / MAX_ACCOUNTS_PER_PROCESS + runnable_accounts = runnable_accounts_in_zone(zone) + found_accounts = {account_id for account_id in load_per_account} + missing_accounts = runnable_accounts - found_accounts + for account_id in missing_accounts: + load_per_account[account_id] = default_load + return load_per_account + + +@click.command() +@click.option('--level', default='staging') +def main(level): + zones = {h.get('ZONE') for h in config['DATABASE_HOSTS']} + zone_map = {} + for zone in zones: + hosts = fleet.get_sync_hosts_in_zone(zone, level) + load_per_account = get_account_loads(zone, hosts) + zone_map[zone] = load_per_account + print json.dumps(zone_map) + + +if __name__ == '__main__': + main() diff --git a/bin/get-accounts-for-host b/bin/get-accounts-for-host new file mode 100644 index 000000000..78a9880fb --- /dev/null +++ b/bin/get-accounts-for-host @@ -0,0 +1,22 @@ +#!/usr/bin/env python + +import click + +from inbox.models.session import global_session_scope +from inbox.models.account import Account + + +@click.command() +@click.argument('hostname') +def main(hostname): + with global_session_scope() as db_session: + account_ids = db_session.query(Account.id).filter(Account.sync_host == hostname) + + print "Accounts being synced by {}:".format(hostname) + for account_id in account_ids: + print account_id[0] + db_session.commit() + + +if __name__ == "__main__": + main() diff --git a/bin/get-object b/bin/get-object index 132149f36..629815fb8 100755 --- a/bin/get-object +++ b/bin/get-object @@ -55,7 +55,8 @@ def main(type, id, public_id, account_id, namespace_id, readwrite): qu = qu.filter(cls.public_id == public_id) elif id: qu = qu.filter(cls.id == id) - elif account_id: + + if account_id: qu = qu.filter(cls.account_id == account_id) elif namespace_id: qu = qu.filter(cls.namespace_id == namespace_id) diff --git a/bin/inbox-api b/bin/inbox-api index 6b56cf6d0..3e09c6606 100755 --- a/bin/inbox-api +++ b/bin/inbox-api @@ -24,6 +24,7 @@ from gevent.pywsgi import WSGIServer from nylas.api.wsgi import NylasWSGIHandler from nylas.logging import get_logger, configure_logging +from inbox.mailsync.frontend import SyncbackHTTPFrontend from inbox.util.startup import preflight, load_overrides syncback = None @@ -36,10 +37,14 @@ http_server = None 'non-production features.') @click.option('--start-syncback/--no-start-syncback', default=True, help='Also start the syncback service') +@click.option('--enable-tracer/--disable-tracer', default=True, + help='Disables the stuck greenlet tracer') +@click.option('--enable-profiler/--disable-profiler', default=False, + help='Enables the CPU profiler web API') @click.option('-c', '--config', default=None, help='Path to JSON configuration file.') @click.option('-p', '--port', default=5555, help='Port to run flask app on.') -def main(prod, start_syncback, config, port): +def main(prod, start_syncback, enable_tracer, config, port, enable_profiler): """ Launch the Nylas API service. """ level = os.environ.get('LOGLEVEL', inbox_config.get('LOGLEVEL')) configure_logging(log_level=level) @@ -49,14 +54,15 @@ def main(prod, start_syncback, config, port): load_overrides(config_path) if prod: - start(port, start_syncback) + start(port, start_syncback, enable_tracer, enable_profiler) else: preflight() from werkzeug.serving import run_with_reloader - run_with_reloader(lambda: start(port, start_syncback)) + run_with_reloader(lambda: start(port, start_syncback, enable_tracer, + enable_profiler)) -def start(port, start_syncback): +def start(port, start_syncback, enable_tracer, enable_profiler): # We need to import this down here, because this in turn imports # ignition.engine, which has to happen *after* we read any config overrides # for the database parameters. Boo for imports with side-effects. @@ -65,7 +71,14 @@ def start(port, start_syncback): if start_syncback: # start actions service from inbox.transactions.actions import SyncbackService + + if enable_profiler: + inbox_config['DEBUG_PROFILING_ON'] = True + enable_profiler_api = inbox_config.get('DEBUG_PROFILING_ON') + syncback = SyncbackService(0, 0, 1) + profiling_frontend = SyncbackHTTPFrontend(int(port) + 1, enable_tracer, enable_profiler_api) + profiling_frontend.start() syncback.start() nylas_logger = get_logger() diff --git a/bin/inbox-start b/bin/inbox-start index 7ccacdbc7..49fd90ba8 100755 --- a/bin/inbox-start +++ b/bin/inbox-start @@ -1,5 +1,4 @@ #!/usr/bin/env python -import gevent from gevent import monkey monkey.patch_all() @@ -27,8 +26,7 @@ except ImportError: from inbox.util.startup import preflight from nylas.logging import get_logger, configure_logging from inbox.mailsync.service import SyncService -from inbox.mailsync.frontend import HTTPFrontend -from inbox.scheduling.queue import QueuePopulator +from inbox.mailsync.frontend import SyncHTTPFrontend # Set a default timeout for sockets. SOCKET_TIMEOUT = 2 * 60 @@ -41,12 +39,14 @@ socket.setdefaulttimeout(SOCKET_TIMEOUT) 'non-production features.') @click.option('--enable-tracer/--disable-tracer', default=True, help='Disables the stuck greenlet tracer') +@click.option('--enable-profiler/--disable-profiler', default=False, + help='Enables the CPU profiler web API') @click.option('-c', '--config', default=None, help='Path to JSON configuration file.') @click.option('--process_num', default=0, help="This process's number in the process group: a unique " "number satisfying 0 <= process_num < total_processes.") -def main(prod, enable_tracer, config, process_num): +def main(prod, enable_tracer, enable_profiler, config, process_num): """ Launch the Nylas sync service. """ level = os.environ.get('LOGLEVEL', inbox_config.get('LOGLEVEL')) configure_logging(log_level=level) @@ -59,9 +59,6 @@ def main(prod, enable_tracer, config, process_num): if not prod: preflight() - # Start the queue populator agent for development ease. - populator = QueuePopulator(None) - gevent.spawn(populator.run) total_processes = int(os.environ.get('MAILSYNC_PROCESSES', 1)) @@ -89,12 +86,17 @@ def main(prod, enable_tracer, config, process_num): Use CTRL-C to stop. """ + if enable_profiler: + inbox_config['DEBUG_PROFILING_ON'] = True + port = 16384 + process_num - enable_profiler = inbox_config.get('DEBUG_PROFILING_ON') + enable_profiler_api = inbox_config.get('DEBUG_PROFILING_ON') + process_identifier = '{}:{}'.format(platform.node(), process_num) sync_service = SyncService(process_identifier, process_num) - http_frontend = HTTPFrontend(sync_service, port, enable_tracer, - enable_profiler) + http_frontend = SyncHTTPFrontend(sync_service, port, enable_tracer, + enable_profiler_api) + sync_service.register_pending_avgs_provider(http_frontend) http_frontend.start() sync_service.run() diff --git a/bin/restart-forgotten-accounts b/bin/restart-forgotten-accounts new file mode 100644 index 000000000..cfbd8027c --- /dev/null +++ b/bin/restart-forgotten-accounts @@ -0,0 +1,65 @@ +#!/usr/bin/env python + +import gevent +import gevent.monkey +gevent.monkey.patch_all() + +from inbox.ignition import engine_manager +from inbox.mailsync.service import shared_sync_event_queue_for_zone +from inbox.models.account import Account +from inbox.models.session import global_session_scope +from inbox.util.concurrency import retry_with_logging +from nylas.logging import get_logger, configure_logging +configure_logging() +log = get_logger() + +accounts_without_sync_host = set() + + +def check_accounts(): + global accounts_without_sync_host + poll_interval = 30 + + with global_session_scope() as db_session: + not_syncing_accounts = set(db_session.query(Account.id). + filter(Account.sync_should_run, + Account.sync_host == None)) # noqa + still_not_syncing_accounts = accounts_without_sync_host & not_syncing_accounts + + for account_id in still_not_syncing_accounts: + account = db_session.query(Account).with_for_update().get(account_id) + + # The Account got claimed while we were checking. + if account.sync_host is not None: + not_syncing_accounts.remove(account.id) + db_session.commit() + continue + + # Notify the shared sync queue if desired_sync_host was already None. + # We have to do this because the post commit callback won't fire if + # the object isn't dirty. By clearing the desired_sync_host we allow + # any worker to claim the account. + if account.desired_sync_host is None: + queue = shared_sync_event_queue_for_zone(engine_manager.zone_for_id(account.id)) + queue.send_event({'event': 'migrate', 'id': account.id}) + else: + account.desired_sync_host = None + + log.warning("Account appears to be unclaimed, " + "clearing desired_sync_host, " + "notifying shared sync queue", + account_id=account.id) + db_session.commit() + + accounts_without_sync_host = not_syncing_accounts + + gevent.sleep(poll_interval) + + +def main(): + while True: + retry_with_logging(check_accounts, log) + + +if __name__ == "__main__": + main() diff --git a/bin/set-desired-host b/bin/set-desired-host new file mode 100644 index 000000000..f94172070 --- /dev/null +++ b/bin/set-desired-host @@ -0,0 +1,35 @@ +#!/usr/bin/env python + +import click + +from inbox.models.session import global_session_scope +from inbox.models.account import Account + + +@click.command() +@click.argument('account_id') +@click.option('--desired-host') +@click.option('--dry-run', is_flag=True) +@click.option('--toggle-sync', is_flag=True) +def main(account_id, desired_host, dry_run, toggle_sync): + with global_session_scope() as db_session: + account = db_session.query(Account).get(int(account_id)) + + print "Before sync host: {}".format(account.sync_host) + print "Before desired sync host: {}".format(account.desired_sync_host) + print "Before sync should run: {}".format(account.sync_should_run) + + if dry_run: + return + account.desired_sync_host = desired_host + if toggle_sync: + account.sync_should_run = not account.sync_should_run + + print "After sync host: {}".format(account.sync_host) + print "After desired sync host: {}".format(account.desired_sync_host) + print "After sync should run: {}".format(account.sync_should_run) + db_session.commit() + + +if __name__ == "__main__": + main() diff --git a/bin/syncback-service b/bin/syncback-service index 504bc0bfc..ada597539 100755 --- a/bin/syncback-service +++ b/bin/syncback-service @@ -19,6 +19,7 @@ gevent_openssl.monkey_patch() from nylas.logging import configure_logging from inbox.config import config as inbox_config +from inbox.mailsync.frontend import SyncbackHTTPFrontend from inbox.transactions.actions import SyncbackService from inbox.util.startup import preflight, load_overrides @@ -35,7 +36,12 @@ from inbox.util.startup import preflight, load_overrides @click.option('--syncback-id', help="This sync instance's id: a unique number assigned to " "each syncback instance.") -def main(prod, config, process_num, syncback_id): +@click.option('--enable-tracer/--disable-tracer', default=True, + help='Disables the stuck greenlet tracer') +@click.option('--enable-profiler/--disable-profiler', default=False, + help='Enables the CPU profiler web API') +def main(prod, config, process_num, syncback_id, enable_tracer, + enable_profiler): """ Launch the actions syncback service. """ if syncback_id is None: syncback_id = 0 @@ -54,6 +60,16 @@ def main(prod, config, process_num, syncback_id): def start(): # Start the syncback service, and just hang out forever syncback = SyncbackService(syncback_id, process_num, total_processes) + + if enable_profiler: + inbox_config['DEBUG_PROFILING_ON'] = True + + port = 16384 + process_num + enable_profiler_api = inbox_config.get('DEBUG_PROFILING_ON') + frontend = SyncbackHTTPFrontend(port, enable_tracer, + enable_profiler_api) + frontend.start() + syncback.start() syncback.join() diff --git a/bin/unschedule-account-syncs b/bin/unschedule-account-syncs index 46659c43d..23e4e6bc6 100755 --- a/bin/unschedule-account-syncs +++ b/bin/unschedule-account-syncs @@ -1,29 +1,8 @@ #!/usr/bin/env python import click -import redis -from boto import ec2 - -from inbox.scheduling.queue import QueueClient - - -def get_zone_for_instance(hostname): - conn = ec2.connect_to_region('us-west-2') - - instances = [] - for r in conn.get_all_instances(): - for i in r.instances: - instances.append(i) - - instances = [i for i in instances if i.tags.get('Name') == hostname and - i.tags.get('Role') == 'sync'] - - if not instances: - raise Exception("No sync host with hostname '{}'".format(hostname)) - - assert len(instances) == 1 - - return instances[0].placement +from inbox.models.account import Account +from inbox.models.session import global_session_scope, session_scope @click.command() @@ -45,29 +24,35 @@ def main(dry_run, number, hostname, process): print 'Will not proceed' return - zone = get_zone_for_instance(hostname) - qc = QueueClient(zone) - # hosts in redis are of the format hostname: - if process is not None: - hostname = ':'.join([hostname, process]) - to_unschedule = [(account_id, host) for account_id, host in - qc.assigned().items() if host.startswith(hostname)] - if number: - to_unschedule = to_unschedule[:number] + if not dry_run: + message = 'It is unsafe to unassign hosts while mailsync processes are running. '\ + 'Have you shut down the appropriate mailsync processes on {}? [Y/n]'.format(hostname) + if raw_input(message).strip().lower() == 'n': + print 'Bailing out' + return - if to_unschedule: - if dry_run: - for account_id, host in to_unschedule: + with global_session_scope() as db_session: + if process is not None: + hostname = ':'.join([hostname, process]) + to_unschedule = db_session.query(Account.id). \ + filter(Account.sync_host.like('{}%'.format(hostname))) + if number: + to_unschedule = to_unschedule.limit(number) + to_unschedule = [id_ for id_, in to_unschedule.all()] + if number: + to_unschedule = to_unschedule[:number] + + for account_id in to_unschedule: + with session_scope(account_id) as db_session: + if dry_run: print "Would unassign", account_id - else: - for account_id, host in to_unschedule: - try: - print "Unassigning", account_id - qc.unassign(account_id, host) - except redis.exceptions.TimeoutError: - print "Couldn't unassign", account_id, "due to redis timeout" - else: - print "No syncs to unschedule for", hostname + else: + account = db_session.query(Account).get(account_id) + print "Unassigning", account.id + account.desired_sync_host = None + account.sync_host = None + db_session.commit() + if __name__ == '__main__': main() diff --git a/etc/config-dev.json b/etc/config-dev.json index ca136e19c..034283738 100644 --- a/etc/config-dev.json +++ b/etc/config-dev.json @@ -54,6 +54,9 @@ "ACCOUNT_QUEUE_REDIS_HOSTNAME": "localhost", "ACCOUNT_QUEUE_REDIS_DB": 3, +"EVENT_QUEUE_REDIS_HOSTNAME": "localhost", +"EVENT_QUEUE_REDIS_DB": 3, + "BASE_ALIVE_THRESHOLD": 480, "CONTACTS_ALIVE_THRESHOLD": 480, "EVENTS_ALIVE_THRESHOLD": 480, diff --git a/etc/config-test.json b/etc/config-test.json index f7abffdad..12c52df36 100644 --- a/etc/config-test.json +++ b/etc/config-test.json @@ -1,67 +1,62 @@ { -"MYSQL_DATABASE": "test", -"MYSQL_HOSTNAME": "localhost", -"MYSQL_PORT": 3306, - "DATABASE_HOSTS": [ { - "HOSTNAME": "localhost", + "HOSTNAME": "127.0.0.1", "PORT": 3306, "ZONE": "testzone", "SHARDS": [ { "ID": 0, - "SCHEMA_NAME": "test", + "SCHEMA_NAME": "synctest", "OPEN": true }, { "ID": 1, - "SCHEMA_NAME": "test_1", + "SCHEMA_NAME": "synctest_1", "OPEN": false } ] } ], +"DATABASE_USERS": { + "127.0.0.1": { + "USER": "inboxtest", + "PASSWORD": "inboxtest" + } +}, "SYNCBACK_ASSIGNMENTS": { - 0:[0, 1, 2, 3, 4, 5] - + 0: [0,1,2,3,4,5] }, "SYNC_STEAL_ACCOUNTS": true, - +"CONCURRENT_QUERY_LIMIT": 5, "DB_POOL_SIZE": 25, "LOGDIR": "/tmp/log", "LOGLEVEL": 10, -"TEST_LOGFILE": "/tmp/log/test.log", - -"REDIS_SHARDS": ["localhost", "127.0.0.1"], -"REDIS_PORT": 6379, "ACCOUNT_QUEUE_REDIS_HOSTNAME": "mockredis", "ACCOUNT_QUEUE_REDIS_DB": 1, +"EVENT_QUEUE_REDIS_HOSTNAME": "mockredis", +"EVENT_QUEUE_REDIS_DB": 1, + +"REDIS_SHARDS": ["127.0.0.1"], +"REDIS_PORT": 6379, + "BASE_ALIVE_THRESHOLD": 480, "CONTACTS_ALIVE_THRESHOLD": 480, "EVENTS_ALIVE_THRESHOLD": 480, "EAS_THROTTLED_ALIVE_THRESHOLD": 600, "EAS_PING_ALIVE_THRESHOLD": 780, -"BASE_DUMP": "data/base_dump.sql", - "GOOGLE_OAUTH_REDIRECT_URI": "urn:ietf:wg:oauth:2.0:oob", -"MS_LIVE_OAUTH_REDIRECT_URI": "https://login.live.com/oauth20_desktop.srf", - +"GOOGLE_OAUTH_CLIENT_ID": "986659776516-fg79mqbkbktf5ku10c215vdij918ra0a.apps.googleusercontent.com", +"GOOGLE_OAUTH_CLIENT_SECRET": "zgY9wgwML0kmQ6mmYHYJE05d", "STORE_MESSAGES_ON_S3": false, - "MSG_PARTS_DIRECTORY": "tests/data/parts", -"EMAIL_EXCEPTIONS": false, -"ENCRYPT_SECRETS": false, - -"ELASTICSEARCH_HOSTS": [{"host": "localhost", "port": 9200}], - "FEATURE_FLAGS": "ical_autoimport", "THROTTLE_DELETION": false, @@ -71,5 +66,9 @@ "MAILGUN_API_KEY": null, "NOTIFICATIONS_MAILGUN_DOMAIN": null, -"NOTIFICATIONS_MAILGUN_API_KEY": null +"NOTIFICATIONS_MAILGUN_API_KEY": null, + +"EMAIL_EXCEPTIONS": false, +"SENTRY_EXCEPTIONS": false, +"ENCRYPT_SECRETS": false } diff --git a/etc/db-config-test-phab.json b/etc/db-config-test-phab.json new file mode 100644 index 000000000..c53411bdb --- /dev/null +++ b/etc/db-config-test-phab.json @@ -0,0 +1,21 @@ +{ + "DATABASE_HOSTS": [ + { + "HOSTNAME": "127.0.0.1", + "PORT": 3306, + "ZONE": "testzone", + "SHARDS": [ + { + "ID": 0, + "SCHEMA_NAME": "synctest_phab", + "OPEN": true + }, + { + "ID": 1, + "SCHEMA_NAME": "synctest_1_phab", + "OPEN": false + } + ] + } + ] +} diff --git a/etc/db-config-test-prod.json b/etc/db-config-test-prod.json new file mode 100644 index 000000000..d10cabab5 --- /dev/null +++ b/etc/db-config-test-prod.json @@ -0,0 +1,21 @@ +{ + "DATABASE_HOSTS": [ + { + "HOSTNAME": "127.0.0.1", + "PORT": 3306, + "ZONE": "testzone", + "SHARDS": [ + { + "ID": 0, + "SCHEMA_NAME": "synctest_prod", + "OPEN": true + }, + { + "ID": 1, + "SCHEMA_NAME": "synctest_1_prod", + "OPEN": false + } + ] + } + ] +} diff --git a/etc/db-config-test.json b/etc/db-config-test.json new file mode 100644 index 000000000..fbdaf6f4f --- /dev/null +++ b/etc/db-config-test.json @@ -0,0 +1,21 @@ +{ + "DATABASE_HOSTS": [ + { + "HOSTNAME": "127.0.0.1", + "PORT": 3306, + "ZONE": "testzone", + "SHARDS": [ + { + "ID": 0, + "SCHEMA_NAME": "synctest", + "OPEN": true + }, + { + "ID": 1, + "SCHEMA_NAME": "synctest_1", + "OPEN": false + } + ] + } + ] +} diff --git a/etc/secrets-test.yml b/etc/secrets-test.yml index 217ccf45d..8955c5d36 100644 --- a/etc/secrets-test.yml +++ b/etc/secrets-test.yml @@ -6,10 +6,3 @@ MS_LIVE_OAUTH_CLIENT_SECRET: tjMNyu7ACbE8DOt0LE30Ptk7muNdPosG # Hexl-encoded static keys used to encrypt blocks in S3, secrets in database: BLOCK_ENCRYPTION_KEY: 0ba4c7da83f474d2b33c8725416e444db632a1684705bc2fb7da5058e93668c9 SECRET_ENCRYPTION_KEY: 1f5be7969a7ea9abf8da443151269fe2c25f1d0e81c7ee239c67991a55a33553 -CONTACTS_SEARCH: - SENTRY_DSN: "" - -DATABASE_USERS: - "localhost": - USER: inboxtest - PASSWORD: inboxtest diff --git a/inbox/actions/backends/generic.py b/inbox/actions/backends/generic.py index 03b9d2398..b2fc6ad17 100644 --- a/inbox/actions/backends/generic.py +++ b/inbox/actions/backends/generic.py @@ -38,7 +38,7 @@ def uids_by_folder(message_id, db_session): def _create_email(account, message): blocks = [p.block for p in message.attachments] - attachments = generate_attachments(blocks) + attachments = generate_attachments(message, blocks) from_name, from_email = message.from_addr[0] msg = create_email(from_name=from_name, from_email=from_email, diff --git a/inbox/actions/backends/gmail.py b/inbox/actions/backends/gmail.py index bd7a679d5..275347877 100644 --- a/inbox/actions/backends/gmail.py +++ b/inbox/actions/backends/gmail.py @@ -16,10 +16,16 @@ def _encode_labels(labels): return map(imapclient.imap_utf7.encode, labels) -def remote_change_labels(crispin_client, account_id, message_id, +def remote_change_labels(crispin_client, account_id, message_ids, removed_labels, added_labels): + uids_for_message = {} with session_scope(account_id) as db_session: - uids_for_message = uids_by_folder(message_id, db_session) + for message_id in message_ids: + folder_uids_map = uids_by_folder(message_id, db_session) + for folder_name, uids in folder_uids_map.items(): + if folder_name not in uids_for_message: + uids_for_message[folder_name] = [] + uids_for_message[folder_name].extend(uids) for folder_name, uids in uids_for_message.items(): crispin_client.select_folder_if_necessary(folder_name, uidvalidity_cb) diff --git a/inbox/actions/base.py b/inbox/actions/base.py index 011081564..b49624bb2 100644 --- a/inbox/actions/base.py +++ b/inbox/actions/base.py @@ -45,6 +45,10 @@ log = get_logger() +def can_handle_multiple_records(action_name): + return action_name == 'change_labels' + + def mark_unread(crispin_client, account_id, message_id, args): unread = args['unread'] set_remote_unread(crispin_client, account_id, message_id, unread) @@ -60,10 +64,10 @@ def move(crispin_client, account_id, message_id, args): remote_move(crispin_client, account_id, message_id, destination) -def change_labels(crispin_client, account_id, message_id, args): +def change_labels(crispin_client, account_id, message_ids, args): added_labels = args['added_labels'] removed_labels = args['removed_labels'] - remote_change_labels(crispin_client, account_id, message_id, + remote_change_labels(crispin_client, account_id, message_ids, removed_labels, added_labels) diff --git a/inbox/api/filtering.py b/inbox/api/filtering.py index b9675cfe2..bd929e48c 100644 --- a/inbox/api/filtering.py +++ b/inbox/api/filtering.py @@ -12,6 +12,17 @@ from inbox.models.session import session_scope_by_shard_id +def contact_subquery(db_session, namespace_id, email_address, field): + return db_session.query(Message.thread_id) \ + .join(MessageContactAssociation) \ + .join(Contact, MessageContactAssociation.contact_id == Contact.id)\ + .filter( + Contact.email_address == email_address, + Contact.namespace_id == namespace_id, + MessageContactAssociation.field == field) \ + .subquery() + + def threads(namespace_id, subject, from_addr, to_addr, cc_addr, bcc_addr, any_email, thread_public_id, started_before, started_after, last_message_before, last_message_after, filename, in_, unread, @@ -24,7 +35,7 @@ def threads(namespace_id, subject, from_addr, to_addr, cc_addr, bcc_addr, else: query = db_session.query(Thread) - filters = [Thread.namespace_id == namespace_id] + filters = [Thread.namespace_id == namespace_id, Thread.deleted_at == None] if thread_public_id is not None: filters.append(Thread.public_id == thread_public_id) @@ -46,42 +57,32 @@ def threads(namespace_id, subject, from_addr, to_addr, cc_addr, bcc_addr, query = query.filter(*filters) if from_addr is not None: - from_query = db_session.query(Message.thread_id). \ - join(MessageContactAssociation).join(Contact).filter( - Contact.email_address == from_addr, - Contact.namespace_id == namespace_id, - MessageContactAssociation.field == 'from_addr').subquery() + from_query = contact_subquery(db_session, namespace_id, + from_addr, 'from_addr') query = query.filter(Thread.id.in_(from_query)) if to_addr is not None: - to_query = db_session.query(Message.thread_id). \ - join(MessageContactAssociation).join(Contact).filter( - Contact.email_address == to_addr, - Contact.namespace_id == namespace_id, - MessageContactAssociation.field == 'to_addr').subquery() + to_query = contact_subquery(db_session, namespace_id, + to_addr, 'to_addr') query = query.filter(Thread.id.in_(to_query)) if cc_addr is not None: - cc_query = db_session.query(Message.thread_id). \ - join(MessageContactAssociation).join(Contact).filter( - Contact.email_address == cc_addr, - Contact.namespace_id == namespace_id, - MessageContactAssociation.field == 'cc_addr').subquery() + cc_query = contact_subquery(db_session, namespace_id, + cc_addr, 'cc_addr') query = query.filter(Thread.id.in_(cc_query)) if bcc_addr is not None: - bcc_query = db_session.query(Message.thread_id). \ - join(MessageContactAssociation).join(Contact).filter( - Contact.email_address == bcc_addr, - Contact.namespace_id == namespace_id, - MessageContactAssociation.field == 'bcc_addr').subquery() + bcc_query = contact_subquery(db_session, namespace_id, + bcc_addr, 'bcc_addr') query = query.filter(Thread.id.in_(bcc_query)) if any_email is not None: - any_contact_query = db_session.query(Message.thread_id). \ - join(MessageContactAssociation).join(Contact). \ - filter(Contact.email_address.in_(any_email), - Contact.namespace_id == namespace_id).subquery() + any_contact_query = db_session.query(Message.thread_id) \ + .join(MessageContactAssociation) \ + .join(Contact, MessageContactAssociation.contact_id == Contact.id)\ + .filter(Contact.email_address.in_(any_email), + Contact.namespace_id == namespace_id)\ + .subquery() query = query.filter(Thread.id.in_(any_contact_query)) if filename is not None: @@ -207,7 +208,8 @@ def messages_or_drafts(namespace_id, drafts, subject, from_addr, to_addr, query += lambda q: q.join(Thread, Message.thread_id == Thread.id) query += lambda q: q.filter( Message.namespace_id == bindparam('namespace_id'), - Message.is_draft == bindparam('drafts')) + Message.is_draft == bindparam('drafts'), + Thread.deleted_at == None) if subject is not None: query += lambda q: q.filter(Message.subject == bindparam('subject')) @@ -253,8 +255,9 @@ def messages_or_drafts(namespace_id, drafts, subject, from_addr, to_addr, if to_addr is not None: query.spoil() - to_query = db_session.query(MessageContactAssociation.message_id). \ - join(Contact).filter( + to_query = db_session.query(MessageContactAssociation.message_id) \ + .join(Contact, MessageContactAssociation.contact_id == Contact.id)\ + .filter( MessageContactAssociation.field == 'to_addr', Contact.email_address == to_addr, Contact.namespace_id == bindparam('namespace_id')).subquery() @@ -262,8 +265,9 @@ def messages_or_drafts(namespace_id, drafts, subject, from_addr, to_addr, if from_addr is not None: query.spoil() - from_query = db_session.query(MessageContactAssociation.message_id). \ - join(Contact).filter( + from_query = db_session.query(MessageContactAssociation.message_id) \ + .join(Contact, MessageContactAssociation.contact_id == Contact.id)\ + .filter( MessageContactAssociation.field == 'from_addr', Contact.email_address == from_addr, Contact.namespace_id == bindparam('namespace_id')).subquery() @@ -271,8 +275,9 @@ def messages_or_drafts(namespace_id, drafts, subject, from_addr, to_addr, if cc_addr is not None: query.spoil() - cc_query = db_session.query(MessageContactAssociation.message_id). \ - join(Contact).filter( + cc_query = db_session.query(MessageContactAssociation.message_id) \ + .join(Contact, MessageContactAssociation.contact_id == Contact.id)\ + .filter( MessageContactAssociation.field == 'cc_addr', Contact.email_address == cc_addr, Contact.namespace_id == bindparam('namespace_id')).subquery() @@ -280,8 +285,9 @@ def messages_or_drafts(namespace_id, drafts, subject, from_addr, to_addr, if bcc_addr is not None: query.spoil() - bcc_query = db_session.query(MessageContactAssociation.message_id). \ - join(Contact).filter( + bcc_query = db_session.query(MessageContactAssociation.message_id) \ + .join(Contact, MessageContactAssociation.contact_id == Contact.id)\ + .filter( MessageContactAssociation.field == 'bcc_addr', Contact.email_address == bcc_addr, Contact.namespace_id == bindparam('namespace_id')).subquery() @@ -290,10 +296,12 @@ def messages_or_drafts(namespace_id, drafts, subject, from_addr, to_addr, if any_email is not None: query.spoil() any_email_query = db_session.query( - MessageContactAssociation.message_id).join(Contact). \ - filter(Contact.email_address.in_(any_email), - Contact.namespace_id == bindparam('namespace_id')). \ - subquery() + MessageContactAssociation.message_id) \ + .join(Contact, MessageContactAssociation.contact_id == Contact.id)\ + .filter( + Contact.email_address.in_(any_email), + Contact.namespace_id == bindparam('namespace_id')) \ + .subquery() query += lambda q: q.filter(Message.id.in_(any_email_query)) if filename is not None: @@ -397,7 +405,8 @@ def files(namespace_id, message_public_id, filename, content_type, def filter_event_query(query, event_cls, namespace_id, event_public_id, calendar_public_id, title, description, location, busy): - query = query.filter(event_cls.namespace_id == namespace_id) + query = query.filter(event_cls.namespace_id == namespace_id).filter( + event_cls.deleted_at == None) # noqa if event_public_id: query = query.filter(event_cls.public_id == event_public_id) diff --git a/inbox/api/kellogs.py b/inbox/api/kellogs.py index 1e298bf6a..c7ce1347f 100644 --- a/inbox/api/kellogs.py +++ b/inbox/api/kellogs.py @@ -126,6 +126,10 @@ def _get_lowercase_class_name(obj): raise Exception("Should never be serializing accounts") elif isinstance(obj, Message): + thread_public_id = None + if obj.thread: + thread_public_id = obj.thread.public_id + resp = { 'id': obj.public_id, 'object': 'message', @@ -137,7 +141,7 @@ def _get_lowercase_class_name(obj): 'cc': format_address_list(obj.cc_addr), 'bcc': format_address_list(obj.bcc_addr), 'date': obj.received_date, - 'thread_id': obj.thread.public_id, + 'thread_id': thread_public_id, 'snippet': obj.snippet, 'body': obj.body, 'unread': not obj.is_read, diff --git a/inbox/api/ns_api.py b/inbox/api/ns_api.py index 9243d65cc..5bc2c58a5 100644 --- a/inbox/api/ns_api.py +++ b/inbox/api/ns_api.py @@ -6,6 +6,7 @@ import base64 import gevent import itertools +from hashlib import sha256 from datetime import datetime from collections import namedtuple @@ -14,6 +15,7 @@ from flask import jsonify as flask_jsonify from flask.ext.restful import reqparse from sqlalchemy import asc, func +from sqlalchemy.exc import OperationalError from sqlalchemy.orm.exc import NoResultFound from inbox.models import (Message, Block, Part, Thread, Namespace, @@ -49,16 +51,30 @@ from inbox.ignition import engine_manager from inbox.models.action_log import schedule_action from inbox.models.session import new_session, session_scope -from inbox.search.base import get_search_client, SearchBackendException +from inbox.search.base import get_search_client, SearchBackendException, SearchStoreException from inbox.transactions import delta_sync from inbox.api.err import (err, APIException, NotFoundError, InputError, AccountDoesNotExistError, log_exception) from inbox.events.ical import generate_rsvp, send_rsvp from inbox.events.util import removed_participants -from inbox.util.blockstore import get_from_blockstore +from inbox.util import blockstore from inbox.util.misc import imap_folder_path from inbox.actions.backends.generic import remote_delete_sent from inbox.crispin import writable_connection_pool +from inbox.s3.base import get_raw_from_provider +from inbox.s3.exc import (EmailFetchException, TemporaryEmailFetchException, + EmailDeletedException) +from inbox.util.stats import statsd_client + +try: + from inbox.util.eas.codes import STORE_STATUS_CODES +except ImportError: + # Only important for EAS search failures, so shouldn't trigge test fail + pass + + +from nylas.logging import get_logger +log = get_logger() DEFAULT_LIMIT = 100 LONG_POLL_REQUEST_TIMEOUT = 120 @@ -102,6 +118,7 @@ # API_VERSIONS list. API_VERSIONS = ['2016-03-07', '2016-08-09'] + @app.before_request def start(): g.api_version = request.headers.get('Api-Version', API_VERSIONS[0]) @@ -114,22 +131,24 @@ def start(): else: g.api_features = APIFeatures(optimistic_updates=False) + request.environ['log_context'] = { + 'endpoint': request.endpoint, + 'api_version': g.api_version, + 'namespace_id': g.namespace_id, + } + engine = engine_manager.get_for_id(g.namespace_id) g.db_session = new_session(engine) g.namespace = Namespace.get(g.namespace_id, g.db_session) - request.environ['log_context'] = { - 'endpoint': request.endpoint, - } if not g.namespace: # The only way this can occur is if there used to be an account that # was deleted, but the API access cache entry has not been expired yet. raise AccountDoesNotExistError() - request.environ['log_context'].update({ - 'account_id': g.namespace.account_id, - 'namespace_id': g.namespace.id, - }) + request.environ['log_context']['account_id'] = g.namespace.account_id + if hasattr(g, 'application_id'): + request.environ['log_context']['application_id'] = g.application_id is_n1 = request.environ.get('IS_N1', False) g.encoder = APIEncoder(g.namespace.public_id, is_n1=is_n1) @@ -158,6 +177,12 @@ def before_remote_request(): 'namespace_api.message_streaming_search_api', 'namespace_api.thread_streaming_search_api') or request.method in ('POST', 'PUT', 'PATCH', 'DELETE')): + + if g.namespace: + # Logging provider here to ensure that the provider is only logged for + # requests that modify data or are proxied to remote servers. + request.environ['log_context']['provider'] = g.namespace.account.provider + valid_account(g.namespace) @@ -170,6 +195,20 @@ def finish(response): return response +@app.errorhandler(OperationalError) +def handle_operational_error(error): + rule = request.url_rule + if 'send' in rule.rule and 'rsvp' not in rule.rule: + message = "A temporary database error prevented us from serving this request. Your message has NOT been sent. Please try again in a few minutes." + else: + message = "A temporary database error prevented us from serving this request. Please try again." + + log.error('MySQL OperationalError', exc_info=True) + response = flask_jsonify(message=message, type='database_error') + response.status_code = 503 + return response + + @app.errorhandler(NotImplementedError) def handle_not_implemented_error(error): request.environ['log_context']['error'] = 'NotImplementedError' @@ -193,8 +232,7 @@ def handle_input_error(error): @app.errorhandler(Exception) def handle_generic_error(error): log_exception(sys.exc_info()) - response = flask_jsonify(message=error.message, - type='api_error') + response = flask_jsonify(message="An internal error occured. If this issue persists, please contact support@nylas.com and include this request_uid: {}".format(request.headers.get('X-Unique-ID'), type='api_error')) response.status_code = 500 return response @@ -283,6 +321,12 @@ def thread_search_api(): if exc.server_error: kwargs['server_error'] = exc.server_error return err(exc.http_code, exc.message, **kwargs) + except SearchStoreException as exc: + store_status = STORE_STATUS_CODES.get(str(exc.err_code)) + kwargs = {} + if store_status.requires_user_action: + kwargs['server_error'] = store_status.resolution + return err(store_status.http_code, store_status.meaning, **kwargs) @app.route('/threads/search/streaming', methods=['GET']) @@ -304,6 +348,12 @@ def thread_streaming_search_api(): if exc.server_error: kwargs['server_error'] = exc.server_error return err(exc.http_code, exc.message, **kwargs) + except SearchStoreException as exc: + store_status = STORE_STATUS_CODES.get(str(exc.err_code)) + kwargs = {} + if store_status.requires_user_action: + kwargs['server_error'] = store_status.resolution + return err(store_status.http_code, store_status.meaning, **kwargs) @app.route('/threads/') @@ -316,6 +366,7 @@ def thread_api(public_id): valid_public_id(public_id) thread = g.db_session.query(Thread).filter( Thread.public_id == public_id, + Thread.deleted_at == None, Thread.namespace_id == g.namespace.id).one() return encoder.jsonify(thread) except NoResultFound: @@ -331,6 +382,7 @@ def thread_api_update(public_id): valid_public_id(public_id) thread = g.db_session.query(Thread).filter( Thread.public_id == public_id, + Thread.deleted_at == None, Thread.namespace_id == g.namespace.id).one() except NoResultFound: raise NotFoundError("Couldn't find thread `{0}` ".format(public_id)) @@ -433,6 +485,12 @@ def message_search_api(): if exc.server_error: kwargs['server_error'] = exc.server_error return err(exc.http_code, exc.message, **kwargs) + except SearchStoreException as exc: + store_status = STORE_STATUS_CODES.get(str(exc.err_code)) + kwargs = {} + if store_status.requires_user_action: + kwargs['server_error'] = store_status.resolution + return err(store_status.http_code, store_status.meaning, **kwargs) @app.route('/messages/search/streaming', methods=['GET']) @@ -454,6 +512,12 @@ def message_streaming_search_api(): if exc.server_error: kwargs['server_error'] = exc.server_error return err(exc.http_code, exc.message, **kwargs) + except SearchStoreException as exc: + store_status = STORE_STATUS_CODES.get(str(exc.err_code)) + kwargs = {} + if store_status.requires_user_action: + kwargs['server_error'] = store_status.resolution + return err(store_status.http_code, store_status.meaning, **kwargs) @app.route('/messages/', methods=['GET']) @@ -470,13 +534,53 @@ def message_read_api(public_id): raise NotFoundError("Couldn't find message {0}".format(public_id)) if request.headers.get('Accept', None) == 'message/rfc822': - raw_message = get_from_blockstore(message.data_sha256) + raw_message = blockstore.get_from_blockstore(message.data_sha256) if raw_message is not None: return Response(raw_message, mimetype='message/rfc822') else: + # Try getting the message from the email provider. + account = g.namespace.account + statsd_string = 'api.direct_fetching.{}.{}'\ + .format(account.provider, account.id) + + try: + with statsd_client.timer('{}.provider_latency'.format( + statsd_string)): + contents = get_raw_from_provider(message) + statsd_client.incr('{}.successes'.format(statsd_string)) + except TemporaryEmailFetchException: + statsd_client.incr('{}.temporary_failure'.format(statsd_string)) + log.warning('Exception when fetching email', + account_id=account.id, provider=account.provider, + logstash_tag='direct_fetching', exc_info=True) + + return err(503, "Email server returned a temporary error. " + "Please try again in a few minutes.") + except EmailDeletedException: + statsd_client.incr('{}.deleted'.format(statsd_string)) + log.warning('Exception when fetching email', + account_id=account.id, provider=account.provider, + logstash_tag='direct_fetching', exc_info=True) + + return err(404, "The data was deleted on the email server.") + except EmailFetchException: + statsd_client.incr('{}.failures'.format(statsd_string)) + log.warning('Exception when fetching email', + account_id=account.id, provider=account.provider, + logstash_tag='direct_fetching', exc_info=True) + + return err(404, "Couldn't find data on the email server.") + + if contents is not None: + # If we found it, save it too. + data_sha256 = sha256(contents).hexdigest() + blockstore.save_to_blockstore(data_sha256, contents) + return contents + request.environ['log_context']['message_id'] = message.id raise NotFoundError( - "Couldn't find raw contents for message `{0}`" + "Couldn't find raw contents for message `{0}`. " + "Please try again in a few minutes." .format(public_id)) return encoder.jsonify(message) @@ -682,7 +786,6 @@ def folder_label_delete_api(public_id): "Folder {} cannot be deleted because it contains messages.". format(public_id)) - if g.api_features.optimistic_updates: deleted_at = datetime.utcnow() category.deleted_at = deleted_at @@ -878,7 +981,8 @@ def event_read_api(public_id): try: event = g.db_session.query(Event).filter( Event.namespace_id == g.namespace.id, - Event.public_id == public_id).one() + Event.public_id == public_id, + Event.deleted_at == None).one() # noqa except NoResultFound: raise NotFoundError("Couldn't find event id {0}".format(public_id)) return g.encoder.jsonify(event) @@ -895,7 +999,8 @@ def event_update_api(public_id): try: event = g.db_session.query(Event).filter( Event.public_id == public_id, - Event.namespace_id == g.namespace.id).one() + Event.namespace_id == g.namespace.id, + Event.deleted_at == None).one() # noqa except NoResultFound: raise NotFoundError("Couldn't find event {0}".format(public_id)) @@ -958,7 +1063,7 @@ def event_update_api(public_id): cancelled_participants=cancelled_participants, notify_participants=notify_participants) - if len(json.dumps(kwargs)) > 2**16 - 12: + if len(json.dumps(kwargs)) > 2 ** 16 - 12: raise InputError('Event update too big --- please break it in parts.') if event.calendar != account.emailed_events_calendar: @@ -977,9 +1082,10 @@ def event_delete_api(public_id): valid_public_id(public_id) try: - event = g.db_session.query(Event).filter_by( - public_id=public_id, - namespace_id=g.namespace.id).one() + event = g.db_session.query(Event).filter( + Event.public_id == public_id, + Event.namespace_id == g.namespace.id, + Event.deleted_at == None).one() # noqa except NoResultFound: raise NotFoundError("Couldn't find event {0}".format(public_id)) @@ -990,7 +1096,6 @@ def event_delete_api(public_id): raise InputError('Cannot delete event {} from read_only calendar.'. format(public_id)) - if g.api_features.optimistic_updates: # Set the local event status to 'cancelled' rather than deleting it, # in order to be consistent with how we sync deleted events from the @@ -1211,7 +1316,35 @@ def file_download_api(public_id): # TODO the part.data object should really behave like a stream we can read # & write to - response = make_response(f.data) + try: + account = g.namespace.account + statsd_string = 'api.direct_fetching.{}.{}'.format(account.provider, + account.id) + + response = make_response(f.data) + statsd_client.incr('{}.successes'.format(statsd_string)) + + except TemporaryEmailFetchException: + statsd_client.incr('{}.temporary_failure'.format(statsd_string)) + log.warning('Exception when fetching email', + account_id=account.id, provider=account.provider, + logstash_tag='direct_fetching', exc_info=True) + + return err(503, "Email server returned a temporary error. " + "Please try again in a few minutes.") + except EmailDeletedException: + statsd_client.incr('{}.deleted'.format(statsd_string)) + log.warning('Exception when fetching email', + account_id=account.id, provider=account.provider, + logstash_tag='direct_fetching', exc_info=True) + + return err(404, "The data was deleted on the email server.") + except EmailFetchException: + statsd_client.incr('{}.failures'.format(statsd_string)) + log.warning('Exception when fetching email', + logstash_tag='direct_fetching', exc_info=True) + + return err(404, "Couldn't find data on email server.") response.headers['Content-Type'] = 'application/octet-stream' # ct # Werkzeug will try to encode non-ascii header values as latin-1. Try that @@ -1413,9 +1546,6 @@ def draft_send_api(): if draft_public_id is not None: draft = get_draft(draft_public_id, data.get('version'), g.namespace.id, g.db_session) - schedule_action('delete_draft', draft, draft.namespace.id, - g.db_session, nylas_uid=draft.nylas_uid, - message_id_header=draft.message_id_header) else: draft = create_message_from_json(data, g.namespace, g.db_session, is_draft=False) @@ -1430,6 +1560,12 @@ def draft_send_api(): return err(504, 'Request timed out.') resp = send_draft(account, draft, g.db_session) + + # Only delete the draft once we know it has been sent + if draft_public_id is not None and resp.status_code == 200: + schedule_action('delete_draft', draft, draft.namespace.id, + g.db_session, nylas_uid=draft.nylas_uid, + message_id_header=draft.message_id_header) return resp diff --git a/inbox/api/validation.py b/inbox/api/validation.py index c960fcbfb..74dca41d9 100644 --- a/inbox/api/validation.py +++ b/inbox/api/validation.py @@ -221,6 +221,7 @@ def get_thread(thread_public_id, namespace_id, db_session): try: return db_session.query(Thread). \ filter(Thread.public_id == thread_public_id, + Thread.deleted_at == None, Thread.namespace_id == namespace_id).one() except NoResultFound: raise InputError('Invalid thread public id {}'. diff --git a/inbox/auth/generic.py b/inbox/auth/generic.py index 09b87598a..409dbc1b3 100644 --- a/inbox/auth/generic.py +++ b/inbox/auth/generic.py @@ -10,7 +10,8 @@ from inbox.auth.base import AuthHandler, account_or_none from inbox.basicauth import (ValidationError, UserRecoverableConfigError, - SSLNotSupportedError, SettingUpdateError) + SSLNotSupportedError, SettingUpdateError, + AppPasswordError) from inbox.models import Namespace from inbox.models.backends.generic import GenericAccount from inbox.sendmail.smtp.postel import SMTPClient @@ -146,8 +147,10 @@ def connect_account(self, account, use_timeout=True): ssl_required=ssl_required, error=exc) raise ValidationError(exc) + elif _auth_requires_app_password(exc): + raise AppPasswordError(exc) else: - log.error('IMAP login failed for an unknown reason', + log.error('IMAP login failed for an unknown reason. Check _auth_is_invalid', account_id=account.id, host=host, port=port, @@ -309,6 +312,18 @@ def interactive_auth(self, email_address): return response +def _auth_requires_app_password(exc): + # Some servers require an application specific password, token, or + # authorization code to login + PREFIXES = ( + 'Please using authorized code to login.', # http://service.mail.qq.com/cgi-bin/help?subtype=1&&id=28&&no=1001256 + 'Authorized code is incorrect', # http://service.mail.qq.com/cgi-bin/help?subtype=1&&id=28&&no=1001256 + 'Login fail. Please using weixin token', # http://service.exmail.qq.com/cgi-bin/help?subtype=1&no=1001023&id=23. + ) + return any(exc.message.lower().startswith(msg.lower()) for msg in + PREFIXES) + + def _auth_is_invalid(exc): # IMAP doesn't really have error semantics, so we have to match the error # message against a list of known response strings to determine whether we @@ -323,8 +338,18 @@ def _auth_is_invalid(exc): '[auth] authentication failed.', 'invalid login credentials', '[ALERT] Please log in via your web browser', + 'LOGIN Authentication failed', + 'authentication failed', + '[ALERT] Invalid credentials(Failure)', + 'Invalid email login', + 'failed: Re-Authentication Failure', + 'Invalid', + 'Login incorrect', + 'LOGIN GroupWise login failed', + 'authentication failed', + 'LOGIN bad', # LOGIN bad username or password ) - return any(exc.message.lower().startswith(msg) for msg in + return any(exc.message.lower().startswith(msg.lower()) for msg in AUTH_INVALID_PREFIXES) diff --git a/inbox/basicauth.py b/inbox/basicauth.py index 18f489250..13b1962be 100644 --- a/inbox/basicauth.py +++ b/inbox/basicauth.py @@ -26,7 +26,7 @@ class OAuthError(ValidationError): pass -class ConfigError(Exception): +class ConfigurationError(Exception): pass @@ -51,3 +51,7 @@ def __init__(self, reason=None): class AccessNotEnabledError(Exception): pass + + +class AppPasswordError(ValidationError): + pass diff --git a/inbox/config.py b/inbox/config.py index ed8ba2bb7..81cfc3b1d 100644 --- a/inbox/config.py +++ b/inbox/config.py @@ -38,8 +38,7 @@ class ConfigError(Exception): def __init__(self, error=None, help=None): self.error = error or '' self.help = help or \ - 'Run `sudo cp etc/config-dev.json /etc/inboxapp/config.json` and '\ - 'retry.' + 'Run `sudo cp etc/config-dev.json /etc/inboxapp/config.json` and retry.' def __str__(self): return '{0} {1}'.format(self.error, self.help) diff --git a/inbox/contacts/remote_sync.py b/inbox/contacts/remote_sync.py index 177257b50..2dd1255ae 100644 --- a/inbox/contacts/remote_sync.py +++ b/inbox/contacts/remote_sync.py @@ -63,7 +63,7 @@ def sync(self): database. This function runs every `self.poll_frequency`. """ - self.log.info('syncing contacts') + self.log.debug('syncing contacts') # Grab timestamp so next sync gets deltas from now sync_timestamp = datetime.utcnow() @@ -120,6 +120,6 @@ def sync(self): account = db_session.query(Account).get(self.account_id) account.last_synced_contacts = sync_timestamp - self.log.info('synced contacts', added=change_counter['added'], - updated=change_counter['updated'], - deleted=change_counter['deleted']) + self.log.debug('synced contacts', added=change_counter['added'], + updated=change_counter['updated'], + deleted=change_counter['deleted']) diff --git a/inbox/crispin.py b/inbox/crispin.py index 7c860bb03..52b1cc522 100644 --- a/inbox/crispin.py +++ b/inbox/crispin.py @@ -83,6 +83,10 @@ class FolderMissingError(Exception): pass +class DraftDeletionException(Exception): + pass + + def _get_connection_pool(account_id, pool_size, pool_map, readonly): with _lock_map[account_id]: if account_id not in pool_map: @@ -138,8 +142,8 @@ class CrispinConnectionPool(object): """ def __init__(self, account_id, num_connections, readonly): - log.info('Creating Crispin connection pool for account {} with {} ' - 'connections'.format(account_id, num_connections)) + log.info('Creating Crispin connection pool', + account_id=account_id, num_connections=num_connections) self.account_id = account_id self.readonly = readonly self._queue = Queue(num_connections, items=num_connections * [None]) @@ -949,6 +953,20 @@ def g_msgids(self, uids): return {uid: ret['X-GM-MSGID'] for uid, ret in data.items() if uid in uid_set} + def g_msgid_to_uids(self, g_msgid): + """ + Find all message UIDs in the selected folder with X-GM-MSGID equal to + g_msgid. + + Returns + ------- + list + """ + uids = [long(uid) for uid in + self.conn.search(['X-GM-MSGID', g_msgid])] + # UIDs ascend over time; return in order most-recent first + return sorted(uids, reverse=True) + def folder_names(self, force_resync=False): """ Return the folder names ( == label names for Gmail) for the account @@ -1093,21 +1111,54 @@ def delete_draft(self, message_id_header): message_id_header=message_id_header) drafts_folder_name = self.folder_names()['drafts'][0] trash_folder_name = self.folder_names()['trash'][0] + sent_folder_name = self.folder_names()['sent'][0] - # First find the draft in the drafts folder + # There's a race condition in how Gmail reconciles sent messages + # which sometimes causes us to delete both the sent and draft + # (because for a brief moment in time they're the same message). + # To work around this, we use x-gm-msgid and check that the + # sent message and the draft have been reconciled to different + # values. + + # First find the message in the sent folder + self.conn.select_folder(sent_folder_name) + matching_uids = self.find_by_header('Message-Id', message_id_header) + + if len(matching_uids) == 0: + raise DraftDeletionException( + "Couldn't find sent message in sent folder.") + + sent_gm_msgids = self.g_msgids(matching_uids) + if len(sent_gm_msgids) != 1: + raise DraftDeletionException( + "Only one message should have this msgid") + + # Then find the draft in the draft folder self.conn.select_folder(drafts_folder_name) matching_uids = self.find_by_header('Message-Id', message_id_header) if not matching_uids: return False - # To delete, first copy the message to trash (sufficient to move from - # gmail's All Mail folder to Trash folder) - self.conn.copy(matching_uids, trash_folder_name) + # Make sure to remove the \\Draft flags so that Gmail removes it from + # the draft folder. + self.conn.remove_flags(matching_uids, ['\\Draft']) + self.conn.remove_gmail_labels(matching_uids, ['\\Draft']) - # Next, delete the message from trash (in the normal way) to permanently - # delete it. + gm_msgids = self.g_msgids(matching_uids) + for msgid in gm_msgids.values(): + if msgid == sent_gm_msgids.values()[0]: + raise DraftDeletionException( + "Send and draft should have been reconciled as " + "different messages.") + + self.conn.copy(matching_uids, trash_folder_name) self.conn.select_folder(trash_folder_name) - self._delete_message(message_id_header, False) + + for msgid in gm_msgids.values(): + uids = self.g_msgid_to_uids(msgid) + self.conn.delete_messages(uids, silent=True) + + self.conn.expunge() return True def delete_sent_message(self, message_id_header, delete_multiple=False): diff --git a/inbox/events/google.py b/inbox/events/google.py index 02735af1c..eacd90e90 100644 --- a/inbox/events/google.py +++ b/inbox/events/google.py @@ -563,7 +563,7 @@ def _dump_event(event): if event.all_day: dump["start"] = {"date": event.start.strftime('%Y-%m-%d')} - dump["end"] = {"date": event.start.strftime('%Y-%m-%d')} + dump["end"] = {"date": event.end.strftime('%Y-%m-%d')} else: dump["start"] = {"dateTime": event.start.isoformat('T'), "timeZone": "UTC"} diff --git a/inbox/events/ical.py b/inbox/events/ical.py index 2aa9c6d01..a52d1dc54 100644 --- a/inbox/events/ical.py +++ b/inbox/events/ical.py @@ -443,7 +443,7 @@ def generate_icalendar_invite(event, invite_type='request'): account = event.namespace.account organizer = icalendar.vCalAddress(u"MAILTO:{}".format( account.email_address)) - if account.name is not None: + if account.name is not None and account.name != '': organizer.params['CN'] = account.name icalendar_event['organizer'] = organizer @@ -521,7 +521,7 @@ def generate_invite_message(ical_txt, event, account, invite_type='request'): # From should match our mailsend provider (mailgun) so it doesn't confuse # spam filters - msg.headers['From'] = "notifications@mg.nylas.com" + msg.headers['From'] = "automated@notifications.nylas.com" msg.headers['Reply-To'] = account.email_address if invite_type == 'request': @@ -545,6 +545,12 @@ def send_invite(ical_txt, event, account, invite_type='request'): if email is None: continue + if email == account.email_address: + # If the organizer is among the participants, don't send + # a second email. They already have the event on their + # calendar. + continue + msg = generate_invite_message(ical_txt, event, account, invite_type) msg.headers['To'] = email final_message = msg.to_string() diff --git a/inbox/events/recurring.py b/inbox/events/recurring.py index 7f309f4f5..027bb507f 100644 --- a/inbox/events/recurring.py +++ b/inbox/events/recurring.py @@ -57,10 +57,12 @@ def parse_rrule(event): if event.rrule is not None: if event.all_day: start = event.start.to('utc').naive + ignoretz = True else: start = event.start.datetime + ignoretz = False try: - rule = rrulestr(event.rrule, dtstart=start, + rule = rrulestr(event.rrule, dtstart=start, ignoretz=ignoretz, compatible=True) return rule diff --git a/inbox/events/remote_sync.py b/inbox/events/remote_sync.py index 1c7876777..b7da9c5d4 100644 --- a/inbox/events/remote_sync.py +++ b/inbox/events/remote_sync.py @@ -49,7 +49,7 @@ def sync(self): """Query a remote provider for updates and persist them to the database. This function runs every `self.poll_frequency`. """ - self.log.info('syncing events') + self.log.debug('syncing events') try: deleted_uids, calendar_changes = self.provider.sync_calendars() @@ -210,7 +210,7 @@ def sync(self): currently subscribed to push notificaitons and haven't heard anything new from Google. """ - self.log.info('syncing events') + self.log.debug('syncing events') try: self._refresh_gpush_subscriptions() diff --git a/inbox/folder_edge_cases.py b/inbox/folder_edge_cases.py index 779e3a273..04264a6a5 100644 --- a/inbox/folder_edge_cases.py +++ b/inbox/folder_edge_cases.py @@ -16,7 +16,8 @@ '\xd0\xa1\xd0\xbc\xd1\x96\xd1\x82\xd1\x82\xd1\x8f', 'Papierkorb/Trash', 'Gel\xc3\xb6schte Elemente', 'Deleted Messages', '[Gmail]/Trash', 'INBOX/Trash', 'Trash', - 'mail/TRASH', 'INBOX.Trash'}, + 'mail/TRASH', 'INBOX.Trash', 'INBOX.\xc9l\xe9ments supprim\xe9s', + 'INBOX.INBOX.Trash'}, 'spam': {'Roskaposti', 'INBOX.spam', 'INBOX.Spam', 'Skr\xc3\xa4ppost', 'Spamverdacht', 'spam', 'Spam', '[Gmail]/Spam', '[Imap]/Spam', '\xe5\x9e\x83\xe5\x9c\xbe\xe9\x82\xae\xe4\xbb\xb6', 'Junk', @@ -25,6 +26,7 @@ 'sent': {'Postausgang', 'INBOX.Gesendet', '[Gmail]/Sent Mail', '\xeb\xb3\xb4\xeb\x82\xbc\xed\x8e\xb8\xec\xa7\x80\xed\x95\xa8' 'Elementos enviados', 'Sent', 'Sent Items', 'Sent Messages', - 'INBOX.Papierkorb', 'Odeslan\xc3\xa9', 'mail/sent-mail', - 'Ko\xc5\xa1', 'Outbox', 'OUTBOX', 'INBOX.SentMail', 'Gesendet', - 'Ko\xc5\xa1/Sent Items', 'Gesendete Elemente'}} + 'INBOX.Sent Messages', 'Odeslan\xc3\xa9', 'mail/sent-mail', + 'Ko\xc5\xa1', 'INBOX.SentMail', 'Gesendet', + 'Ko\xc5\xa1/Sent Items', 'Gesendete Elemente', + 'INBOX.\xc9l\xe9ments envoy\xe9s', 'INBOX.INBOX.Sent',}} diff --git a/inbox/ignition.py b/inbox/ignition.py index 2350df4a5..fa2d5bc8c 100644 --- a/inbox/ignition.py +++ b/inbox/ignition.py @@ -46,7 +46,7 @@ def engine(database_name, database_uri, pool_size=DB_POOL_SIZE, engine = create_engine(database_uri, listeners=[ForceStrictMode()], isolation_level='READ COMMITTED', - echo=False, + echo=echo, pool_size=pool_size, pool_timeout=pool_timeout, pool_recycle=3600, @@ -60,17 +60,18 @@ def receive_checkout(dbapi_connection, connection_record, connection_proxy): '''Log checkedout and overflow when a connection is checked out''' hostname = gethostname().replace(".", "-") - process_name = str(config.get("PROCESS_NAME", "unknown")) + process_name = str(config.get("PROCESS_NAME", "main_process")) - statsd_client.gauge(".".join( - ["dbconn", database_name, hostname, process_name, - "checkedout"]), - connection_proxy._pool.checkedout()) + if config.get('ENABLE_DB_TXN_METRICS', False): + statsd_client.gauge(".".join( + ["dbconn", database_name, hostname, process_name, + "checkedout"]), + connection_proxy._pool.checkedout()) - statsd_client.gauge(".".join( - ["dbconn", database_name, hostname, process_name, - "overflow"]), - connection_proxy._pool.overflow()) + statsd_client.gauge(".".join( + ["dbconn", database_name, hostname, process_name, + "overflow"]), + connection_proxy._pool.overflow()) # Keep track of where and why this connection was checked out. log = get_logger() @@ -98,6 +99,7 @@ class EngineManager(object): def __init__(self, databases, users, include_disabled=False): self.engines = {} + self._engine_zones = {} keys = set() schema_names = set() use_proxysql = config.get('USE_PROXYSQL', False) @@ -106,6 +108,7 @@ def __init__(self, databases, users, include_disabled=False): port = database['PORT'] username = users[hostname]['USER'] password = users[hostname]['PASSWORD'] + zone = database.get('ZONE') for shard in database['SHARDS']: schema_name = shard['SCHEMA_NAME'] key = shard['ID'] @@ -131,6 +134,7 @@ def __init__(self, databases, users, include_disabled=False): hostname=hostname, port=port) self.engines[key] = engine(schema_name, uri) + self._engine_zones[key] = zone def shard_key_for_id(self, id_): return id_ >> 48 @@ -138,6 +142,12 @@ def shard_key_for_id(self, id_): def get_for_id(self, id_): return self.engines[self.shard_key_for_id(id_)] + def zone_for_id(self, id_): + return self._engine_zones[self.shard_key_for_id(id_)] + + def shards_for_zone(self, zone): + return [k for k, z in self._engine_zones.items() if z == zone] + engine_manager = EngineManager(config.get_required('DATABASE_HOSTS'), config.get_required('DATABASE_USERS')) diff --git a/inbox/instrumentation.py b/inbox/instrumentation.py index e3d0089b9..e5214eb05 100644 --- a/inbox/instrumentation.py +++ b/inbox/instrumentation.py @@ -1,5 +1,6 @@ import collections import math +import thread import signal import socket import sys @@ -15,7 +16,8 @@ from nylas.logging import get_logger -MAX_BLOCKING_TIME = 5 +BLOCKING_SAMPLE_PERIOD = 5 +MAX_BLOCKING_TIME_BEFORE_INTERRUPT = 60 GREENLET_SAMPLING_INTERVAL = 1 LOGGING_INTERVAL = 60 @@ -76,16 +78,16 @@ class GreenletTracer(object): Parameters ---------- - max_blocking_time: float - Log a warning if a greenlet blocks for more than max_blocking_time + blocking_sample_period: float + Log a warning if a greenlet blocks for more than blocking_sample_period seconds. """ def __init__(self, - max_blocking_time=MAX_BLOCKING_TIME, + blocking_sample_period=BLOCKING_SAMPLE_PERIOD, sampling_interval=GREENLET_SAMPLING_INTERVAL, logging_interval=LOGGING_INTERVAL): - self.max_blocking_time = max_blocking_time + self.blocking_sample_period = blocking_sample_period self.sampling_interval = sampling_interval self.logging_interval = logging_interval @@ -108,6 +110,7 @@ def __init__(self, # We need a new client instance here because this runs in its own # thread. self.statsd_client = get_statsd_client() + self.start_time = time.time() def start(self): self.start_time = time.time() @@ -156,20 +159,23 @@ def _trace(self, event, xxx_todo_changeme): self._last_switch_time = current_time self._switch_flag = True - def _check_blocking(self): + def _check_blocking(self, current_time): if self._switch_flag is False: active_greenlet = self._active_greenlet if active_greenlet is not None and active_greenlet != self._hub: - # greenlet.gr_frame doesn't work on another thread -- we have - # to get the main thread's frame. - frame = sys._current_frames()[self._main_thread_id] - formatted_frame = '\t'.join(traceback.format_stack(frame)) - self.log.warning( - 'greenlet blocking', frame=formatted_frame, - context=getattr(active_greenlet, 'context', None), - blocking_greenlet_id=id(active_greenlet)) + self._notify_greenlet_blocked(active_greenlet, current_time) self._switch_flag = False + def _notify_greenlet_blocked(self, active_greenlet, current_time): + # greenlet.gr_frame doesn't work on another thread -- we have + # to get the main thread's frame. + frame = sys._current_frames()[self._main_thread_id] + formatted_frame = '\t'.join(traceback.format_stack(frame)) + self.log.warning( + 'greenlet blocking', frame=formatted_frame, + context=getattr(active_greenlet, 'context', None), + blocking_greenlet_id=id(active_greenlet)) + def _calculate_pending_avgs(self): # Calculate a "load average" for greenlet scheduling in roughly the # same way as /proc/loadavg. I.e., a 1/5/15-minute @@ -191,12 +197,12 @@ def _calculate_cpu_avgs(self): def _publish_load_avgs(self): for k, v in self.pending_avgs.items(): - path = 'pending_avg.{}.{}.{:02d}'.format(self.hostname, - self.process_name, k) + path = 'greenlet_tracer.pending_avg.{}.{}.{:02d}'.format( + self.hostname, self.process_name, k) self.statsd_client.gauge(path, v) for k, v in self.cpu_avgs.items(): - path = 'cpu_avg.{}.{}.{:02d}'.format(self.hostname, - self.process_name, k) + path = 'greenlet_tracer.cpu_avg.{}.{}.{:02d}'.format( + self.hostname, self.process_name, k) self.statsd_client.gauge(path, v) def _monitoring_thread(self): @@ -210,8 +216,8 @@ def _run_impl(self): self._calculate_pending_avgs() self._calculate_cpu_avgs() now = time.time() - if now - self.last_checked_blocking > self.max_blocking_time: - self._check_blocking() + if now - self.last_checked_blocking > self.blocking_sample_period: + self._check_blocking(now) self.last_checked_blocking = now if now - self.last_logged_stats > self.logging_interval: self.log_stats() @@ -221,3 +227,34 @@ def _run_impl(self): except Exception: if sys is not None: raise + + +class KillerGreenletTracer(GreenletTracer): + def __init__(self, + blocking_sample_period=BLOCKING_SAMPLE_PERIOD, + sampling_interval=GREENLET_SAMPLING_INTERVAL, + logging_interval=LOGGING_INTERVAL, + max_blocking_time=MAX_BLOCKING_TIME_BEFORE_INTERRUPT): + self._max_blocking_time = max_blocking_time + super(KillerGreenletTracer, self).__init__(blocking_sample_period, + sampling_interval, + logging_interval) + + def _notify_greenlet_blocked(self, active_greenlet, current_time): + super(KillerGreenletTracer, self)._notify_greenlet_blocked(active_greenlet, current_time) + if self._last_switch_time is None: + return + + time_spent = current_time - self._last_switch_time + if time_spent <= self._max_blocking_time: + return + # This will cause the main thread (which is running the blocked greenlet) + # to raise a KeyboardInterrupt exception. + # We can't just call activet_greenlet.kill() here because gevent will + # throw an exception on this thread saying that we would block forever + # (which is true). + self.log.warning( + 'interrupting blocked greenlet', + context=getattr(active_greenlet, 'context', None), + blocking_greenlet_id=id(active_greenlet)) + thread.interrupt_main() diff --git a/inbox/mailsync/backends/gmail.py b/inbox/mailsync/backends/gmail.py index 91191c1af..9147b0b28 100644 --- a/inbox/mailsync/backends/gmail.py +++ b/inbox/mailsync/backends/gmail.py @@ -410,8 +410,8 @@ def download_and_commit_uids(self, crispin_client, uids): db_session.commit() new_uids.add(uid) - log.info('Committed new UIDs', - new_committed_message_count=len(new_uids)) + log.debug('Committed new UIDs', + new_committed_message_count=len(new_uids)) # If we downloaded uids, record message velocity (#uid / latency) if self.state == "initial" and len(new_uids): self._report_message_velocity(datetime.utcnow() - start, diff --git a/inbox/mailsync/backends/imap/common.py b/inbox/mailsync/backends/imap/common.py index b77b66497..294b2e72e 100644 --- a/inbox/mailsync/backends/imap/common.py +++ b/inbox/mailsync/backends/imap/common.py @@ -147,9 +147,10 @@ def remove_deleted_uids(account_id, folder_id, uids): if not message.imapuids and message.is_draft: # Synchronously delete drafts. thread = message.thread - thread.messages.remove(message) + if thread is not None: + thread.messages.remove(message) db_session.delete(message) - if not thread.messages: + if thread is not None and not thread.messages: db_session.delete(thread) else: account = Account.get(account_id, db_session) diff --git a/inbox/mailsync/backends/imap/generic.py b/inbox/mailsync/backends/imap/generic.py index 6d9641e69..17a316c5b 100644 --- a/inbox/mailsync/backends/imap/generic.py +++ b/inbox/mailsync/backends/imap/generic.py @@ -492,8 +492,8 @@ def create_message(self, db_session, acct, folder, msg): datetime.utcnow() - new_uid.message.received_date) \ .total_seconds() * 1000 metrics = [ - '.'.join(['accounts', 'overall', 'message_latency']), - '.'.join(['providers', self.provider_name, 'message_latency']), + '.'.join(['mailsync', 'providers', 'overall', 'message_latency']), + '.'.join(['mailsync', 'providers', self.provider_name, 'message_latency']), ] for metric in metrics: statsd_client.timing(metric, latency_millis) @@ -549,8 +549,7 @@ def download_and_commit_uids(self, crispin_client, uids): new_uids.add(uid) db_session.commit() - log.info('Committed new UIDs', - new_committed_message_count=len(new_uids)) + log.debug('Committed new UIDs', new_committed_message_count=len(new_uids)) # If we downloaded uids, record message velocity (#uid / latency) if self.state == 'initial' and len(new_uids): self._report_message_velocity(datetime.utcnow() - start, @@ -562,16 +561,22 @@ def download_and_commit_uids(self, crispin_client, uids): return len(new_uids) def _report_first_message(self): - now = datetime.utcnow() + # Only record the "time to first message" in the inbox. Because users + # can add more folders at any time, "initial sync"-style metrics for + # other folders don't mean much. + if self.folder_role not in ['inbox', 'all']: + return + now = datetime.utcnow() with session_scope(self.namespace_id) as db_session: account = db_session.query(Account).get(self.account_id) account_created = account.created_at latency = (now - account_created).total_seconds() * 1000 + metrics = [ - '.'.join(['providers', self.provider_name, 'first_message']), - '.'.join(['providers', 'overall', 'first_message']) + '.'.join(['mailsync', 'providers', self.provider_name, 'first_message']), + '.'.join(['mailsync', 'providers', 'overall', 'first_message']) ] for metric in metrics: @@ -581,9 +586,9 @@ def _report_message_velocity(self, timedelta, num_uids): latency = (timedelta).total_seconds() * 1000 latency_per_uid = float(latency) / num_uids metrics = [ - '.'.join(['providers', self.provider_name, + '.'.join(['mailsync', 'providers', self.provider_name, 'message_velocity']), - '.'.join(['providers', 'overall', 'message_velocity']) + '.'.join(['mailsync', 'providers', 'overall', 'message_velocity']) ] for metric in metrics: statsd_client.timing(metric, latency_per_uid) @@ -617,8 +622,8 @@ def get_new_uids(self, crispin_client): raise e if remote_uidnext is not None and remote_uidnext == self.uidnext: return - log.info('UIDNEXT changed, checking for new UIDs', - remote_uidnext=remote_uidnext, saved_uidnext=self.uidnext) + log.debug('UIDNEXT changed, checking for new UIDs', + remote_uidnext=remote_uidnext, saved_uidnext=self.uidnext) crispin_client.select_folder(self.folder_name, self.uidvalidity_cb) with session_scope(self.namespace_id) as db_session: @@ -652,9 +657,9 @@ def condstore_refresh_flags(self, crispin_client): saved_highestmodseq=self.highestmodseq) return - log.info('HIGHESTMODSEQ has changed, getting changed UIDs', - new_highestmodseq=new_highestmodseq, - saved_highestmodseq=self.highestmodseq) + log.debug('HIGHESTMODSEQ has changed, getting changed UIDs', + new_highestmodseq=new_highestmodseq, + saved_highestmodseq=self.highestmodseq) crispin_client.select_folder(self.folder_name, self.uidvalidity_cb) changed_flags = crispin_client.condstore_changed_flags( self.highestmodseq) diff --git a/inbox/mailsync/frontend.py b/inbox/mailsync/frontend.py index febc0f5c0..872fa7b77 100644 --- a/inbox/mailsync/frontend.py +++ b/inbox/mailsync/frontend.py @@ -3,29 +3,18 @@ from pympler import muppy, summary from werkzeug.serving import run_simple, WSGIRequestHandler from flask import Flask, jsonify, request -from inbox.instrumentation import GreenletTracer, ProfileCollector +from inbox.instrumentation import (GreenletTracer, KillerGreenletTracer, + ProfileCollector) class HTTPFrontend(object): """This is a lightweight embedded HTTP server that runs inside a mailsync - process. It allows you can programmatically interact with the process: - to get profile/memory/load metrics, or to schedule new account syncs.""" - - def __init__(self, sync_service, port, trace_greenlets, profile): - self.sync_service = sync_service - self.port = port - self.profiler = ProfileCollector() if profile else None - self.tracer = GreenletTracer() if trace_greenlets else None + or syncback process. It allows you to programmatically interact with the + process: to get profile/memory/load metrics, or to schedule new account + syncs.""" def start(self): - if self.tracer is not None: - self.tracer.start() - - if self.profiler is not None: - self.profiler.start() - app = self._create_app() - # We need to spawn an OS-level thread because we don't want a stuck # greenlet to prevent us to access the web API. gevent._threading.start_new_thread(run_simple, ('0.0.0.0', self.port, app), @@ -33,16 +22,32 @@ def start(self): def _create_app(self): app = Flask(__name__) + self._create_app_impl(app) + return app - @app.route('/unassign', methods=['POST']) - def unassign_account(): - account_id = request.json['account_id'] - ret = self.sync_service.stop_sync(account_id) - if ret: - return 'OK' - else: - return 'Account not assigned to this process', 409 +class ProfilingHTTPFrontend(HTTPFrontend): + def __init__(self, port, trace_greenlets, profile): + self.port = port + self.profiler = ProfileCollector() if profile else None + self.tracer = self.greenlet_tracer_cls()() if trace_greenlets else None + super(ProfilingHTTPFrontend, self).__init__() + + def greenlet_tracer_cls(self): + return GreenletTracer + + def get_pending_avgs(self): + assert self.tracer is not None + return self.tracer.pending_avgs + + def start(self): + if self.tracer is not None: + self.tracer.start() + if self.profiler is not None: + self.profiler.start() + super(ProfilingHTTPFrontend, self).start() + + def _create_app_impl(self, app): @app.route('/profile') def profile(): if self.profiler is None: @@ -67,7 +72,43 @@ def mem(): summ = summary.summarize(objs) return '\n'.join(summary.format_(summ)) + '\n' - return app + +class SyncbackHTTPFrontend(ProfilingHTTPFrontend): + def greenlet_tracer_cls(self): + return KillerGreenletTracer + + +class SyncHTTPFrontend(ProfilingHTTPFrontend): + def __init__(self, sync_service, port, trace_greenlets, profile): + self.sync_service = sync_service + super(SyncHTTPFrontend, self).__init__(port, trace_greenlets, profile) + + def greenlet_tracer_cls(self): + return KillerGreenletTracer + + def _create_app_impl(self, app): + super(SyncHTTPFrontend, self)._create_app_impl(app) + + @app.route('/unassign', methods=['POST']) + def unassign_account(): + account_id = request.json['account_id'] + ret = self.sync_service.stop_sync(account_id) + if ret: + return 'OK' + else: + return 'Account not assigned to this process', 409 + + @app.route('/build-metadata', methods=['GET']) + def build_metadata(): + filename = '/usr/share/python/cloud-core/metadata.txt' + with open(filename, 'r') as f: + _, build_id = f.readline().rstrip('\n').split() + build_id = build_id[1:-1] # Remove first and last single quotes. + _, git_commit = f.readline().rstrip('\n').split() + return jsonify({ + 'build_id': build_id, + 'git_commit': git_commit, + }) class _QuietHandler(WSGIRequestHandler): diff --git a/inbox/mailsync/gc.py b/inbox/mailsync/gc.py index 1a4541a1f..c2c627cb4 100644 --- a/inbox/mailsync/gc.py +++ b/inbox/mailsync/gc.py @@ -5,7 +5,7 @@ from sqlalchemy.orm import load_only from nylas.logging import get_logger log = get_logger() -from inbox.models import Message +from inbox.models import Message, Thread from inbox.models.category import Category, EPOCH from inbox.models.message import MessageCategory from inbox.models.folder import Folder @@ -18,7 +18,8 @@ from inbox.crispin import connection_pool from imapclient.imap_utf7 import encode as utf7_encode -DEFAULT_MESSAGE_TTL = 120 +DEFAULT_MESSAGE_TTL = 2 * 60 # 2 minutes +DEFAULT_THREAD_TTL = 60 * 60 * 24 * 7 # 7 days MAX_FETCH = 1000 @@ -50,7 +51,7 @@ class DeleteHandler(gevent.Greenlet): """ def __init__(self, account_id, namespace_id, provider_name, uid_accessor, - message_ttl=DEFAULT_MESSAGE_TTL): + message_ttl=DEFAULT_MESSAGE_TTL, thread_ttl=DEFAULT_THREAD_TTL): bind_context(self, 'deletehandler', account_id) self.account_id = account_id self.namespace_id = namespace_id @@ -58,6 +59,7 @@ def __init__(self, account_id, namespace_id, provider_name, uid_accessor, self.uids_for_message = uid_accessor self.log = log.new(account_id=account_id) self.message_ttl = datetime.timedelta(seconds=message_ttl) + self.thread_ttl = datetime.timedelta(seconds=thread_ttl) gevent.Greenlet.__init__(self) def _run(self): @@ -69,6 +71,7 @@ def _run_impl(self): current_time = datetime.datetime.utcnow() self.check(current_time) self.gc_deleted_categories() + self.gc_deleted_threads(current_time) gevent.sleep(self.message_ttl.total_seconds()) def check(self, current_time): @@ -100,7 +103,10 @@ def check(self, current_time): # db_session.deleted. db_session.delete(message) if not thread.messages: - db_session.delete(thread) + # We don't eagerly delete empty Threads because there's a + # race condition between deleting a Thread and creating a + # new Message that refers to the old deleted Thread. + thread.mark_for_deletion() else: # TODO(emfree): This is messy. We need better # abstractions for recomputing a thread's attributes @@ -142,6 +148,20 @@ def gc_deleted_categories(self): db_session.delete(category) db_session.commit() + def gc_deleted_threads(self, current_time): + with session_scope(self.namespace_id) as db_session: + deleted_threads = db_session.query(Thread).filter( + Thread.namespace_id == self.namespace_id, + Thread.deleted_at <= current_time - self.thread_ttl + ).limit(MAX_FETCH) + for thread in deleted_threads: + if thread.messages: + thread.deleted_at = None + db_session.commit() + continue + db_session.delete(thread) + db_session.commit() + class LabelRenameHandler(gevent.Greenlet): """ diff --git a/inbox/mailsync/service.py b/inbox/mailsync/service.py index 6f41569b2..b61fde1ed 100644 --- a/inbox/mailsync/service.py +++ b/inbox/mailsync/service.py @@ -1,11 +1,10 @@ import time import platform -import collections +import random -import gevent from gevent.lock import BoundedSemaphore +from sqlalchemy import or_, and_ from sqlalchemy.exc import OperationalError -import psutil from inbox.providers import providers from inbox.config import config @@ -14,9 +13,9 @@ from inbox.heartbeat.status import clear_heartbeat_status from nylas.logging import get_logger from nylas.logging.sentry import log_uncaught_errors -from inbox.models.session import session_scope +from inbox.models.session import session_scope, global_session_scope from inbox.models import Account -from inbox.scheduling.queue import QueueClient +from inbox.scheduling.event_queue import EventQueue, EventQueueGroup from inbox.util.concurrency import retry_with_logging from inbox.util.stats import statsd_client @@ -27,13 +26,23 @@ # How much time (in minutes) should all CPUs be over 90% to consider them # overloaded. -OVERLOAD_MIN = 20 -SYNC_POLL_INTERVAL = 10 -NUM_CPU_SAMPLES = (OVERLOAD_MIN * 60) / SYNC_POLL_INTERVAL -NOMINAL_THRESHOLD = 90.0 +SYNC_POLL_INTERVAL = 20 +PENDING_AVGS_THRESHOLD = 10 MAX_ACCOUNTS_PER_PROCESS = config.get('MAX_ACCOUNTS_PER_PROCESS', 150) +SYNC_EVENT_QUEUE_NAME = 'sync:event_queue:{}' +SHARED_SYNC_EVENT_QUEUE_NAME = 'sync:shared_event_queue:{}' + +SHARED_SYNC_EVENT_QUEUE_ZONE_MAP = {} + + +def shared_sync_event_queue_for_zone(zone): + queue_name = SHARED_SYNC_EVENT_QUEUE_NAME.format(zone) + if queue_name not in SHARED_SYNC_EVENT_QUEUE_ZONE_MAP: + SHARED_SYNC_EVENT_QUEUE_ZONE_MAP[queue_name] = EventQueue(queue_name) + return SHARED_SYNC_EVENT_QUEUE_ZONE_MAP[queue_name] + class SyncService(object): """ @@ -46,7 +55,7 @@ class SyncService(object): If a system is launching 16 sync processes, value from 0-15. (Each sync service on the system should get a different value.) poll_interval : int - Seconds between polls for account changes. + Serves as the max timeout for the redis blocking pop. """ def __init__(self, process_identifier, process_number, @@ -71,77 +80,90 @@ def __init__(self, process_identifier, process_number, self.email_sync_monitors = {} self.contact_sync_monitors = {} self.event_sync_monitors = {} - self.poll_interval = poll_interval + # Randomize the poll_interval so we maintain at least a little fairness + # when using a timeout while blocking on the redis queues. + min_poll_interval = 5 + self.poll_interval = int((random.random() * (poll_interval - min_poll_interval)) + min_poll_interval) self.semaphore = BoundedSemaphore(1) + self.zone = config.get('ZONE') + + # Note that we don't partition by zone for the private queues. + # There's not really a reason to since there's one queue per machine + # anyways. Also, if you really want to send an Account to a mailsync + # machine in another zone you can do so. + self.private_queue = EventQueue(SYNC_EVENT_QUEUE_NAME.format(self.process_identifier)) + self.queue_group = EventQueueGroup([ + shared_sync_event_queue_for_zone(self.zone), + self.private_queue, + ]) self.stealing_enabled = config.get('SYNC_STEAL_ACCOUNTS', True) - self.zone = config.get('ZONE') - self.queue_client = QueueClient(self.zone) - self.rolling_cpu_counts = collections.deque(maxlen=NUM_CPU_SAMPLES) + self._pending_avgs_provider = None self.last_unloaded_account = time.time() - # Fill the queue with initial values. - null_cpu_values = [0.0 for cpu in psutil.cpu_percent(percpu=True)] - for i in range(NUM_CPU_SAMPLES): - self.rolling_cpu_counts.append(null_cpu_values) - def run(self): while True: retry_with_logging(self._run_impl, self.log) def _run_impl(self): """ - Polls for newly registered accounts and checks for start/stop commands. - - """ - self.poll() - gevent.sleep(self.poll_interval) + Waits for notifications about Account migrations and checks for start/stop commands. - def _compute_cpu_average(self): - """ - Use our CPU data to compute the average CPU usage for this machine. """ - - # We can just zip and sum the data because psutil always returns - # results in the same order. - return [sum(x) / float(NUM_CPU_SAMPLES) for x in zip(*self.rolling_cpu_counts)] - - def poll(self): - # We really don't want to take on more load than we can bear, so we - # need to check the CPU usage before accepting new accounts. - # Note that we can't check this for the current core because the kernel - # transparently moves programs across cores. - usage_per_cpu = psutil.cpu_percent(percpu=True) - self.rolling_cpu_counts.append(usage_per_cpu) - - cpu_averages = self._compute_cpu_average() - - cpus_over_nominal = all([cpu_usage > NOMINAL_THRESHOLD for cpu_usage in cpu_averages]) - - # Conservatively, stop accepting accounts if the CPU usage is over - # NOMINAL_THRESHOLD for every core, or if the total # of accounts - # being synced by a single process exceeds the threshold. Excessive + # When the service first starts we should check the state of the world. + self.poll({'queue_name': 'none'}) + event = None + while event is None: + event = self.queue_group.receive_event(timeout=self.poll_interval) + + if shared_sync_event_queue_for_zone(self.zone).queue_name == event['queue_name']: + self.poll_shared_queue(event) + return + + # We're going to re-evaluate the world so we don't need any of the + # other pending events in our private queue. + self._flush_private_queue() + self.poll(event) + + def _flush_private_queue(self): + while True: + event = self.private_queue.receive_event(timeout=None) + if event is None: + break + + def poll_shared_queue(self, event): + # Conservatively, stop accepting accounts if the process pending averages + # is over PENDING_AVGS_THRESHOLD or if the total of accounts being + # synced by a single process exceeds the threshold. Excessive # concurrency per process can result in lowered database throughput # or availability problems, since many transactions may be held open # at the same time. - if self.stealing_enabled and not cpus_over_nominal and \ + pending_avgs_over_threshold = False + if self._pending_avgs_provider is not None: + pending_avgs = self._pending_avgs_provider.get_pending_avgs() + pending_avgs_over_threshold = pending_avgs[15] >= PENDING_AVGS_THRESHOLD + + if self.stealing_enabled and not pending_avgs_over_threshold and \ len(self.syncing_accounts) < MAX_ACCOUNTS_PER_PROCESS: - r = self.queue_client.claim_next(self.process_identifier) - if r: - self.log.info('Claimed new account sync', account_id=r) + account_id = event['id'] + if self.start_sync(account_id): + self.log.info('Claimed new unassigned account sync', account_id=account_id) + return + + if not self.stealing_enabled: + reason = 'stealing disabled' + elif pending_avgs_over_threshold: + reason = 'process pending avgs too high' else: - if not self.stealing_enabled: - reason = 'stealing disabled' - elif cpus_over_nominal: - reason = 'CPU too high' - else: - reason = 'reached max accounts for process' - self.log.info('Not claiming new account sync', reason=reason) + reason = 'reached max accounts for process' + self.log.info('Not claiming new account sync, sending event back to shared queue', reason=reason) + shared_sync_event_queue_for_zone(self.zone).send_event(event) + def poll(self, event): # Determine which accounts to sync - start_accounts = self.accounts_to_sync() + start_accounts = self.account_ids_to_sync() statsd_client.gauge( - 'accounts.{}.mailsync-{}.count'.format( + 'mailsync.account_counts.{}.mailsync-{}.count'.format( self.host, self.process_number), len(start_accounts)) # Perform the appropriate action on each account @@ -154,7 +176,7 @@ def poll(self): exc_info=True) log_uncaught_errors() - stop_accounts = self.syncing_accounts - set(start_accounts) + stop_accounts = self.account_ids_owned() - set(start_accounts) for account_id in stop_accounts: self.log.info('sync service stopping sync', account_id=account_id) @@ -165,9 +187,24 @@ def poll(self): exc_info=True) log_uncaught_errors() - def accounts_to_sync(self): - return {int(k) for k, v in self.queue_client.assigned().items() - if v == self.process_identifier} + def account_ids_to_sync(self): + with global_session_scope() as db_session: + return {r[0] for r in db_session.query(Account.id). + filter(Account.sync_should_run, + or_(and_(Account.desired_sync_host == self.process_identifier, + Account.sync_host == None), # noqa + and_(Account.desired_sync_host == None, # noqa + Account.sync_host == self.process_identifier), + and_(Account.desired_sync_host == self.process_identifier, + Account.sync_host == self.process_identifier))).all()} + + def account_ids_owned(self): + with global_session_scope() as db_session: + return {r[0] for r in db_session.query(Account.id). + filter(Account.sync_host == self.process_identifier).all()} + + def register_pending_avgs_provider(self, pending_avgs_provider): + self._pending_avgs_provider = pending_avgs_provider def start_sync(self, account_id): """ @@ -176,55 +213,66 @@ def start_sync(self, account_id): """ with self.semaphore, session_scope(account_id) as db_session: - acc = db_session.query(Account).get(account_id) + acc = db_session.query(Account).with_for_update().get(account_id) if acc is None: self.log.error('no such account', account_id=account_id) - return + return False + if not acc.sync_should_run: + return False + if acc.desired_sync_host is not None and acc.desired_sync_host != self.process_identifier: + return False + if acc.sync_host is not None and acc.sync_host != self.process_identifier: + return False self.log.info('starting sync', account_id=acc.id, email_address=acc.email_address) - if acc.id not in self.syncing_accounts: - try: - acc.sync_host = self.process_identifier - if acc.sync_email: - monitor = self.monitor_cls_for[acc.provider](acc) - self.email_sync_monitors[acc.id] = monitor - monitor.start() - - info = acc.provider_info - if info.get('contacts', None) and acc.sync_contacts: - contact_sync = ContactSync(acc.email_address, - acc.verbose_provider, - acc.id, - acc.namespace.id) - self.contact_sync_monitors[acc.id] = contact_sync - contact_sync.start() - - if info.get('events', None) and acc.sync_events: - if (USE_GOOGLE_PUSH_NOTIFICATIONS and - acc.provider == 'gmail'): - event_sync = GoogleEventSync(acc.email_address, - acc.verbose_provider, - acc.id, - acc.namespace.id) - else: - event_sync = EventSync(acc.email_address, - acc.verbose_provider, - acc.id, - acc.namespace.id) - self.event_sync_monitors[acc.id] = event_sync - event_sync.start() - - acc.sync_started() - self.syncing_accounts.add(acc.id) - db_session.commit() - self.log.info('Sync started', account_id=account_id, - sync_host=acc.sync_host) - except Exception: - self.log.error('Error starting sync', exc_info=True, - account_id=account_id) - else: + if acc.id in self.syncing_accounts: self.log.info('sync already started', account_id=account_id) + return False + + try: + acc.sync_host = self.process_identifier + if acc.sync_email: + monitor = self.monitor_cls_for[acc.provider](acc) + self.email_sync_monitors[acc.id] = monitor + monitor.start() + + info = acc.provider_info + if info.get('contacts', None) and acc.sync_contacts: + contact_sync = ContactSync(acc.email_address, + acc.verbose_provider, + acc.id, + acc.namespace.id) + self.contact_sync_monitors[acc.id] = contact_sync + contact_sync.start() + + if info.get('events', None) and acc.sync_events: + if (USE_GOOGLE_PUSH_NOTIFICATIONS and + acc.provider == 'gmail'): + event_sync = GoogleEventSync(acc.email_address, + acc.verbose_provider, + acc.id, + acc.namespace.id) + else: + event_sync = EventSync(acc.email_address, + acc.verbose_provider, + acc.id, + acc.namespace.id) + self.event_sync_monitors[acc.id] = event_sync + event_sync.start() + + acc.sync_started() + self.syncing_accounts.add(acc.id) + # TODO (mark): Uncomment this after we've transitioned to from statsd to brubeck + # statsd_client.gauge('mailsync.sync_hosts_counts.{}'.format(acc.id), 1, delta=True) + db_session.commit() + self.log.info('Sync started', account_id=account_id, + sync_host=acc.sync_host) + except Exception: + self.log.error('Error starting sync', exc_info=True, + account_id=account_id) + return False + return True def stop_sync(self, account_id): """ @@ -249,15 +297,17 @@ def stop_sync(self, account_id): self.event_sync_monitors[account_id].kill() del self.event_sync_monitors[account_id] - self.syncing_accounts.discard(account_id) - # Update database/heartbeat state with session_scope(account_id) as db_session: acc = db_session.query(Account).get(account_id) if not acc.sync_should_run: clear_heartbeat_status(acc.id) - if acc.sync_stopped(self.process_identifier): - self.log.info('sync stopped', account_id=account_id) - - r = self.queue_client.unassign(account_id, self.process_identifier) - return r + if not acc.sync_stopped(self.process_identifier): + self.syncing_accounts.discard(account_id) + return False + self.log.info('sync stopped', account_id=account_id) + # TODO (mark): Uncomment this after we've transitioned to from statsd to brubeck + # statsd_client.gauge('mailsync.sync_hosts_counts.{}'.format(acc.id), -1, delta=True) + db_session.commit() + self.syncing_accounts.discard(account_id) + return True diff --git a/inbox/models/account.py b/inbox/models/account.py index 7b8eebf26..f4837453c 100644 --- a/inbox/models/account.py +++ b/inbox/models/account.py @@ -3,10 +3,12 @@ from datetime import datetime from sqlalchemy import (Column, BigInteger, String, DateTime, Boolean, - ForeignKey, Enum, inspect, bindparam, Index) + ForeignKey, Enum, inspect, bindparam, Index, event) from sqlalchemy.orm import relationship +from sqlalchemy.orm.session import Session from sqlalchemy.sql.expression import false +from inbox.config import config from inbox.sqlalchemy_ext.util import JSON, MutableDict, bakery from inbox.models.mixins import (HasPublicID, HasEmailAddress, HasRunState, @@ -14,7 +16,11 @@ DeletedAtMixin) from inbox.models.base import MailSyncBase from inbox.models.calendar import Calendar +from inbox.scheduling.event_queue import EventQueue from inbox.providers import provider_info +from nylas.logging.sentry import log_uncaught_errors +from nylas.logging import get_logger +log = get_logger() # Note, you should never directly create Account objects. Instead you @@ -116,6 +122,7 @@ def emailed_events_calendar(self, cal): self._emailed_events_calendar = cal sync_host = Column(String(255), nullable=True) + desired_sync_host = Column(String(255), nullable=True) # current state of this account state = Column(Enum('live', 'down', 'invalid'), nullable=True) @@ -146,7 +153,8 @@ def sync_status(self): provider=self.provider, is_enabled=self.sync_enabled, state=self.sync_state, - sync_host=self.sync_host) + sync_host=self.sync_host, + desired_sync_host=self.desired_sync_host) d.update(self._sync_status or {}) return d @@ -180,8 +188,9 @@ def update_sync_error(self, error=None): self._sync_status['sync_error'] = None else: error_obj = { - 'message': str(error.message), - 'traceback': traceback.format_exc(20)} + 'message': str(error.message)[:3000], + 'exception': "".join(traceback.format_exception_only(type(error), error))[:500], + 'traceback': traceback.format_exc(20)[:3000]} self._sync_status['sync_error'] = error_obj @@ -208,11 +217,9 @@ def sync_started(self): self.sync_state = 'running' - def enable_sync(self, sync_host=None): + def enable_sync(self): """ Tell the monitor that this account should be syncing. """ self.sync_should_run = True - if sync_host is not None: - self.sync_host = sync_host def disable_sync(self, reason): """ Tell the monitor that this account should stop syncing. """ @@ -293,10 +300,119 @@ def should_suppress_transaction_creation(self): def server_settings(self): return None + def get_raw_message_contents(self, message): + # Get the raw contents of a message. We do this differently + # for every backend (Gmail, IMAP, EAS), and the best way + # to do this across repos is to make it a method of the + # account class. + raise NotImplementedError + discriminator = Column('type', String(16)) __mapper_args__ = {'polymorphic_identity': 'account', 'polymorphic_on': discriminator} +def should_send_event(obj): + if not isinstance(obj, Account): + return False + inspected_obj = inspect(obj) + hist = inspected_obj.attrs.sync_host.history + if hist.has_changes(): + return True + hist = inspected_obj.attrs.desired_sync_host.history + if hist.has_changes(): + return True + hist = inspected_obj.attrs.sync_should_run.history + return hist.has_changes() + + +def already_registered_listener(obj): + return getattr(obj, '_listener_state', None) is not None + + +def update_listener_state(obj): + obj._listener_state['sync_should_run'] = obj.sync_should_run + obj._listener_state['sync_host'] = obj.sync_host + obj._listener_state['desired_sync_host'] = obj.desired_sync_host + obj._listener_state['sent_event'] = False + + +@event.listens_for(Session, "after_flush") +def after_flush(session, flush_context): + from inbox.mailsync.service import shared_sync_event_queue_for_zone, SYNC_EVENT_QUEUE_NAME + + def send_migration_events(obj_state): + def f(session): + if obj_state['sent_event']: + return + + id = obj_state['id'] + sync_should_run = obj_state['sync_should_run'] + sync_host = obj_state['sync_host'] + desired_sync_host = obj_state['desired_sync_host'] + + try: + if sync_host is not None: + # Somebody is actively syncing this Account, so notify them if + # they should give up the Account. + if not sync_should_run or (sync_host != desired_sync_host and desired_sync_host is not None): + queue_name = SYNC_EVENT_QUEUE_NAME.format(sync_host) + log.info("Sending 'migrate_from' event for Account", + account_id=id, queue_name=queue_name) + EventQueue(queue_name).send_event({'event': 'migrate_from', 'id': id}) + return + + if not sync_should_run: + # We don't need to notify anybody because the Account is not + # actively being synced (sync_host is None) and sync_should_run is False, + # so just return early. + return + + if desired_sync_host is not None: + # Nobody is actively syncing the Account, and we have somebody + # who wants to sync this Account, so notify them. + queue_name = SYNC_EVENT_QUEUE_NAME.format(desired_sync_host) + log.info("Sending 'migrate_to' event for Account", + account_id=id, queue_name=queue_name) + EventQueue(queue_name).send_event({'event': 'migrate_to', 'id': id}) + return + + # Nobody is actively syncing the Account, and nobody in particular + # wants to sync the Account so notify the shared queue. + shared_queue = shared_sync_event_queue_for_zone(config.get('ZONE')) + log.info("Sending 'migrate' event for Account", + account_id=id, queue_name=shared_queue.queue_name) + shared_queue.send_event({'event': 'migrate', 'id': id}) + obj_state['sent_event'] = True + except: + log_uncaught_errors(log, account_id=id, sync_host=sync_host, + desired_sync_host=desired_sync_host) + return f + + for obj in session.new: + if isinstance(obj, Account): + if already_registered_listener(obj): + update_listener_state(obj) + else: + obj._listener_state = {'id': obj.id} + update_listener_state(obj) + event.listen(session, + 'after_commit', + send_migration_events(obj._listener_state)) + + for obj in session.dirty: + if not session.is_modified(obj): + continue + if should_send_event(obj): + if already_registered_listener(obj): + update_listener_state(obj) + else: + obj._listener_state = {'id': obj.id} + update_listener_state(obj) + event.listen(session, + 'after_commit', + send_migration_events(obj._listener_state)) + + Index('ix_account_sync_should_run_sync_host', Account.sync_should_run, Account.sync_host, mysql_length={'sync_host': 191}) diff --git a/inbox/models/action_log.py b/inbox/models/action_log.py index 5143443d8..3ee0aa999 100644 --- a/inbox/models/action_log.py +++ b/inbox/models/action_log.py @@ -46,5 +46,6 @@ def create(cls, action, table_name, record_id, namespace_id, extra_args): __mapper_args__ = {'polymorphic_identity': 'actionlog', 'polymorphic_on': discriminator} + Index('ix_actionlog_status_retries', ActionLog.status, ActionLog.retries) Index('idx_actionlog_status_type', ActionLog.status, ActionLog.discriminator) diff --git a/inbox/models/backends/gmail.py b/inbox/models/backends/gmail.py index 7b9f88c10..395d7bd33 100644 --- a/inbox/models/backends/gmail.py +++ b/inbox/models/backends/gmail.py @@ -316,6 +316,10 @@ def needs_new_calendar_list_watch(self): return (self.gpush_calendar_list_expiration is None or self.gpush_calendar_list_expiration < datetime.utcnow()) + def get_raw_message_contents(self, message): + from inbox.s3.backends.gmail import get_gmail_raw_contents + return get_gmail_raw_contents(message) + class GmailAuthCredentials(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): """ diff --git a/inbox/models/backends/imap.py b/inbox/models/backends/imap.py index 2797f8490..0e2cf158a 100644 --- a/inbox/models/backends/imap.py +++ b/inbox/models/backends/imap.py @@ -67,6 +67,10 @@ def smtp_endpoint(self, endpoint): self._smtp_server_host = host self._smtp_server_port = int(port) + def get_raw_message_contents(self, message): + from inbox.s3.backends.imap import get_imap_raw_contents + return get_imap_raw_contents(message) + __mapper_args__ = {'polymorphic_identity': 'imapaccount'} @@ -238,7 +242,7 @@ class ImapFolderInfo(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): # Note that some IMAP providers do not support the CONDSTORE extension, and # therefore will not use this field. highestmodseq = Column(BigInteger, nullable=True) - uidnext = Column(Integer, nullable=True) + uidnext = Column(BigInteger, nullable=True) last_slow_refresh = Column(DateTime) __table_args__ = (UniqueConstraint('account_id', 'folder_id'),) diff --git a/inbox/models/base.py b/inbox/models/base.py index bcee1533d..2fe62eace 100644 --- a/inbox/models/base.py +++ b/inbox/models/base.py @@ -1,5 +1,6 @@ from sqlalchemy import Column, BigInteger from sqlalchemy.ext.declarative import as_declarative, declared_attr +from sqlalchemy.orm.exc import DetachedInstanceError from inbox.models.mixins import CreatedAtMixin @@ -21,4 +22,10 @@ def __table_args__(cls): return {'extend_existing': True} def __repr__(self): - return "<{} (id: {})>".format(self.__module__ + "." + self.__class__.__name__, self.id) + try: + return "<{} (id: {})>".format(self.__module__ + "." + self.__class__.__name__, self.id) + except DetachedInstanceError: + # SQLAlchemy has expired all values for this object and is trying + # to refresh them from the database, but has no session for the + # refresh. + return "<{} (id: detached)>".format(self.__module__ + "." + self.__class__.__name__) diff --git a/inbox/models/calendar.py b/inbox/models/calendar.py index 2f648aaf1..1ba587a3c 100644 --- a/inbox/models/calendar.py +++ b/inbox/models/calendar.py @@ -6,6 +6,7 @@ from inbox.models.base import MailSyncBase from inbox.models.namespace import Namespace +from inbox.models.constants import MAX_INDEXABLE_LENGTH from inbox.models.mixins import (HasPublicID, HasRevisions, UpdatedAtMixin, DeletedAtMixin) @@ -22,7 +23,7 @@ class Calendar(MailSyncBase, HasPublicID, HasRevisions, UpdatedAtMixin, load_on_pending=True, backref=backref('calendars')) - name = Column(String(191), nullable=True) + name = Column(String(MAX_INDEXABLE_LENGTH), nullable=True) provider_name = Column(String(128), nullable=True, default='DEPRECATED') description = Column(Text, nullable=True) @@ -51,7 +52,7 @@ def should_suppress_transaction_creation(self): def update(self, calendar): self.uid = calendar.uid - self.name = calendar.name + self.name = calendar.name[:MAX_INDEXABLE_LENGTH] self.read_only = calendar.read_only self.description = calendar.description diff --git a/inbox/models/contact.py b/inbox/models/contact.py index 5243435c8..fa640f27a 100644 --- a/inbox/models/contact.py +++ b/inbox/models/contact.py @@ -1,8 +1,9 @@ -from sqlalchemy import Column, Integer, String, Enum, ForeignKey, Text, Index +from sqlalchemy import Column, Integer, String, Enum, Text, Index, BigInteger, \ + ForeignKey from sqlalchemy.orm import relationship, backref, validates from sqlalchemy.schema import UniqueConstraint -from inbox.sqlalchemy_ext.util import MAX_TEXT_LENGTH +from inbox.sqlalchemy_ext.util import MAX_TEXT_CHARS from inbox.models.mixins import (HasPublicID, HasEmailAddress, HasRevisions, UpdatedAtMixin, DeletedAtMixin) from inbox.models.base import MailSyncBase @@ -16,12 +17,15 @@ class Contact(MailSyncBase, HasRevisions, HasPublicID, HasEmailAddress, """Data for a user's contact.""" API_OBJECT_NAME = 'contact' - namespace_id = Column(ForeignKey(Namespace.id, ondelete='CASCADE'), - nullable=False) - namespace = relationship(Namespace, load_on_pending=True) + namespace_id = Column(BigInteger, nullable=False, index=True) + namespace = relationship( + Namespace, + primaryjoin='foreign(Contact.namespace_id) == remote(Namespace.id)', + load_on_pending=True) # A server-provided unique ID. - uid = Column(String(64), nullable=False) + # NB: We specify the collation here so that the test DB gets setup correctly. + uid = Column(String(64, collation='utf8mb4_bin'), nullable=False) # A constant, unique identifier for the remote backend this contact came # from. E.g., 'google', 'eas', 'inbox' provider_name = Column(String(64)) @@ -45,10 +49,10 @@ class Contact(MailSyncBase, HasRevisions, HasPublicID, HasEmailAddress, 'namespace_id', 'uid', 'provider_name')) @validates('raw_data') - def validate_length(self, key, value): + def validate_text_column_length(self, key, value): if value is None: return None - return unicode_safe_truncate(value, MAX_TEXT_LENGTH) + return unicode_safe_truncate(value, MAX_TEXT_CHARS) @property def versioned_relationships(self): @@ -65,10 +69,11 @@ def merge_from(self, new_contact): class PhoneNumber(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): STRING_LENGTH = 64 - contact_id = Column(ForeignKey(Contact.id, ondelete='CASCADE'), index=True) - contact = relationship(Contact, - backref=backref('phone_numbers', - cascade='all, delete-orphan')) + contact_id = Column(BigInteger, index=True) + contact = relationship( + Contact, + primaryjoin='foreign(PhoneNumber.contact_id) == remote(Contact.id)', + backref=backref('phone_numbers', cascade='all, delete-orphan')) type = Column(String(STRING_LENGTH), nullable=True) number = Column(String(STRING_LENGTH), nullable=False) @@ -86,8 +91,7 @@ class MessageContactAssociation(MailSyncBase): [assoc.message for assoc in c.message_associations if assoc.field == ... 'to_addr'] """ - contact_id = Column(ForeignKey(Contact.id, ondelete='CASCADE'), - primary_key=True) + contact_id = Column(BigInteger, primary_key=True) message_id = Column(ForeignKey(Message.id, ondelete='CASCADE'), primary_key=True) field = Column(Enum('from_addr', 'to_addr', @@ -97,6 +101,8 @@ class MessageContactAssociation(MailSyncBase): # when you try to delete a message or a contact. contact = relationship( Contact, + primaryjoin='foreign(MessageContactAssociation.contact_id) == ' + 'remote(Contact.id)', backref=backref('message_associations', cascade='all, delete-orphan')) message = relationship( Message, diff --git a/inbox/models/event.py b/inbox/models/event.py index 084628646..699e19a50 100644 --- a/inbox/models/event.py +++ b/inbox/models/event.py @@ -9,7 +9,7 @@ from sqlalchemy.types import TypeDecorator from sqlalchemy.dialects.mysql import LONGTEXT -from inbox.sqlalchemy_ext.util import MAX_TEXT_LENGTH, BigJSON, MutableList +from inbox.sqlalchemy_ext.util import MAX_TEXT_CHARS, BigJSON, MutableList from inbox.models.base import MailSyncBase from inbox.models.mixins import (HasPublicID, HasRevisions, UpdatedAtMixin, DeletedAtMixin) @@ -33,10 +33,10 @@ MAX_LENS = { 'location': LOCATION_MAX_LEN, 'owner': OWNER_MAX_LEN, - 'recurrence': MAX_TEXT_LENGTH, + 'recurrence': MAX_TEXT_CHARS, 'reminders': REMINDER_MAX_LEN, 'title': TITLE_MAX_LEN, - 'raw_data': MAX_TEXT_LENGTH + 'raw_data': MAX_TEXT_CHARS } diff --git a/inbox/models/message.py b/inbox/models/message.py index 65415ac64..1e359af81 100644 --- a/inbox/models/message.py +++ b/inbox/models/message.py @@ -10,7 +10,7 @@ Boolean, Enum, Index, bindparam) from sqlalchemy.dialects.mysql import LONGBLOB from sqlalchemy.orm import (relationship, backref, validates, joinedload, - subqueryload, load_only) + subqueryload, load_only, synonym) from sqlalchemy.sql.expression import false from sqlalchemy.ext.associationproxy import association_proxy @@ -72,13 +72,25 @@ def API_OBJECT_NAME(self): # Do delete messages if their associated thread is deleted. thread_id = Column(BigInteger, nullable=False) - thread = relationship( + _thread = relationship( 'Thread', primaryjoin='foreign(Message.thread_id) == remote(Thread.id)', # noqa backref=backref('messages', order_by='Message.received_date', cascade="all, delete-orphan")) + @property + def thread(self): + return self._thread + + @thread.setter + def thread(self, value): + if value is not None and self._thread is not None: + self._thread.deleted_at = None + self._thread = value + + thread = synonym('_thread', descriptor=thread) + from_addr = Column(JSON, nullable=False, default=lambda: []) sender_addr = Column(JSON, nullable=True) reply_to = Column(JSON, nullable=True, default=lambda: []) diff --git a/inbox/models/roles.py b/inbox/models/roles.py index 341f2d3f1..da74d0190 100644 --- a/inbox/models/roles.py +++ b/inbox/models/roles.py @@ -5,7 +5,9 @@ from nylas.logging import get_logger log = get_logger() from inbox.config import config -from inbox.util.blockstore import save_to_blockstore, get_from_blockstore +from inbox.util import blockstore +from inbox.s3.base import get_raw_from_provider +from inbox.util.stats import statsd_client # TODO: store AWS credentials in a better way. STORE_MSG_ON_S3 = config.get('STORE_MESSAGES_ON_S3', None) @@ -25,22 +27,51 @@ def data(self): # On initial download we temporarily store data in memory value = self._data else: - value = get_from_blockstore(self.data_sha256) + value = blockstore.get_from_blockstore(self.data_sha256) if value is None: - log.warning("Couldn't find data on S3 for block with hash {}" - .format(self.data_sha256)) + log.warning("Couldn't find data on S3 for block", + sha_hash=self.data_sha256) from inbox.models.block import Block if isinstance(self, Block): if self.parts: # This block is an attachment of a message that was - # accidentially deleted. We will attempt to fetch the raw + # deleted. We will attempt to fetch the raw # message and parse out the needed attachment. message = self.parts[0].message # only grab one - raw_mime = get_from_blockstore(message.data_sha256) + account = message.namespace.account + statsd_string = 'api.direct_fetching.{}.{}'.format( + account.provider, account.id) + + # Try to fetch the message from S3 first. + with statsd_client.timer('{}.blockstore_latency'.format( + statsd_string)): + raw_mime = blockstore.get_from_blockstore(message.data_sha256) + + # If it's not there, get it from the provider. + if raw_mime is None: + statsd_client.incr('{}.cache_misses'.format(statsd_string)) + + with statsd_client.timer('{}.provider_latency'.format( + statsd_string)): + raw_mime = get_raw_from_provider(message) + + msg_sha256 = sha256(raw_mime).hexdigest() + + # Cache the raw message in the blockstore so that + # we don't have to fetch it over and over. + + with statsd_client.timer('{}.blockstore_save_latency'.format( + statsd_string)): + blockstore.save_to_blockstore(msg_sha256, raw_mime) + else: + # We found it in the blockstore --- report this. + statsd_client.incr('{}.cache_hits'.format(statsd_string)) + + # If we couldn't find it there, give up. if raw_mime is None: log.error("Don't have raw message for hash {}" .format(message.data_sha256)) @@ -58,12 +89,19 @@ def data(self): if isinstance(data, unicode): data = data.encode('utf-8', 'strict') + if data is None: + continue + # Found it! if sha256(data).hexdigest() == self.data_sha256: log.info('Found subpart with hash {}'.format( self.data_sha256)) - save_to_blockstore(self.data_sha256, data) - return data + + with statsd_client.timer('{}.blockstore_save_latency'.format( + statsd_string)): + blockstore.save_to_blockstore(self.data_sha256, data) + return data + log.error("Couldn't find the attachment in the raw message", message_id=message.id) log.error('No data returned!') return value @@ -88,4 +126,4 @@ def data(self, value): log.warning('Not saving 0-length data blob') return - save_to_blockstore(self.data_sha256, value) + blockstore.save_to_blockstore(self.data_sha256, value) diff --git a/inbox/models/session.py b/inbox/models/session.py index 7485d84ed..9c8e311cf 100644 --- a/inbox/models/session.py +++ b/inbox/models/session.py @@ -72,8 +72,9 @@ def end(session): t = time.time() latency = int((t - start_time) * 1000) - statsd_client.timing(metric_name, latency) - statsd_client.incr(metric_name) + if config.get('ENABLE_DB_TXN_METRICS', False): + statsd_client.timing(metric_name, latency) + statsd_client.incr(metric_name) if latency > MAX_SANE_TRX_TIME_MS: log.warning('Long transaction', latency=latency, modname=modname, funcname=funcname) @@ -117,11 +118,11 @@ def session_scope(id_, versioned=True): Parameters ---------- + id_ : int + Object primary key to grab a session for. + versioned : bool Do you want to enable the transaction log? - debug : bool - Do you want to turn on SQL echoing? Use with caution. Engine is not - cached in this case! Yields ------ diff --git a/inbox/models/thread.py b/inbox/models/thread.py index 36a761c4b..a9dedd0f9 100644 --- a/inbox/models/thread.py +++ b/inbox/models/thread.py @@ -1,3 +1,4 @@ +import datetime import itertools from collections import defaultdict @@ -7,13 +8,15 @@ from nylas.logging import get_logger log = get_logger() -from inbox.models.mixins import HasPublicID, HasRevisions, UpdatedAtMixin +from inbox.models.mixins import (HasPublicID, HasRevisions, UpdatedAtMixin, + DeletedAtMixin) from inbox.models.base import MailSyncBase from inbox.models.namespace import Namespace from inbox.util.misc import cleanup_subject -class Thread(MailSyncBase, HasPublicID, HasRevisions, UpdatedAtMixin): +class Thread(MailSyncBase, HasPublicID, HasRevisions, UpdatedAtMixin, + DeletedAtMixin): """ Threads are a first-class object in Nylas. This thread aggregates the relevant thread metadata from elsewhere so that clients can only @@ -69,7 +72,7 @@ def update_from_message(self, k, message): def most_recent_received_date(self): received_recent_date = None for m in self.messages: - if all(category.name != "sent" for category in m.categories) and \ + if all(category.name != "sent" for category in m.categories if category is not None) and \ not m.is_draft and not m.is_sent: if not received_recent_date or \ m.received_date > received_recent_date: @@ -182,6 +185,14 @@ def api_loading_options(cls, expand=False): .joinedload('block') ) + def mark_for_deletion(self): + """ + Mark this message to be deleted by an asynchronous delete + handler. + + """ + self.deleted_at = datetime.datetime.utcnow() + discriminator = Column('type', String(16)) __mapper_args__ = {'polymorphic_on': discriminator} @@ -189,3 +200,7 @@ def api_loading_options(cls, expand=False): # subject column is too long to be fully indexed with utf8mb4 collation. Index('ix_thread_subject', Thread.subject, mysql_length=191) Index('ix_cleaned_subject', Thread._cleaned_subject, mysql_length=191) + +# For async deletion. +Index('ix_thread_namespace_id_deleted_at', Thread.namespace_id, + Thread.deleted_at) diff --git a/inbox/models/transaction.py b/inbox/models/transaction.py index 6e74509c8..77e28120d 100644 --- a/inbox/models/transaction.py +++ b/inbox/models/transaction.py @@ -1,4 +1,4 @@ -from sqlalchemy import (Column, BigInteger, String, ForeignKey, Index, Enum, +from sqlalchemy import (Column, BigInteger, String, Index, Enum, inspect) from sqlalchemy.orm import relationship @@ -10,9 +10,10 @@ class Transaction(MailSyncBase, HasPublicID): """ Transactional log to enable client syncing. """ # Do delete transactions if their associated namespace is deleted. - namespace_id = Column(ForeignKey(Namespace.id, ondelete='CASCADE'), - nullable=False) - namespace = relationship(Namespace) + namespace_id = Column(BigInteger, index=True, nullable=False) + namespace = relationship( + Namespace, + primaryjoin='foreign(Transaction.namespace_id) == remote(Namespace.id)') object_type = Column(String(20), nullable=False) record_id = Column(BigInteger, nullable=False, index=True) @@ -26,9 +27,10 @@ class Transaction(MailSyncBase, HasPublicID): class AccountTransaction(MailSyncBase, HasPublicID): - namespace_id = Column(ForeignKey(Namespace.id, ondelete='CASCADE'), - nullable=False) - namespace = relationship(Namespace) + namespace_id = Column(BigInteger, index=True, nullable=False) + namespace = relationship( + Namespace, + primaryjoin='foreign(AccountTransaction.namespace_id) == remote(Namespace.id)') object_type = Column(String(20), nullable=False) record_id = Column(BigInteger, nullable=False, index=True) @@ -106,7 +108,8 @@ def propagate_changes(session): obj_state = inspect(obj) for attr in obj.propagated_attributes: if getattr(obj_state.attrs, attr).history.has_changes(): - obj.thread.dirty = True + if obj.thread: + obj.thread.dirty = True def increment_versions(session): diff --git a/inbox/models/util.py b/inbox/models/util.py index 3f1981728..7f7be31b2 100644 --- a/inbox/models/util.py +++ b/inbox/models/util.py @@ -247,6 +247,7 @@ def _batch_delete(engine, table, xxx_todo_changeme, throttle=False, start = time.time() query = 'DELETE FROM {} WHERE {}={} LIMIT 2000;'.format(table, column, id_) + log.info('Deletion query', query=query) for i in range(0, batches): if throttle and check_throttle(): diff --git a/inbox/providers.py b/inbox/providers.py index b61af4953..5cd7d2ede 100644 --- a/inbox/providers.py +++ b/inbox/providers.py @@ -46,6 +46,8 @@ def provider_info(provider_name): "stsci.edu", "kms-technology.com", "cigital.com", + "iontrading.com", + "adaptiveinsights.com", ], "mx_servers": [ # Office365 @@ -94,14 +96,14 @@ def provider_info(provider_name): ("fastmail", { "type": "generic", "condstore": True, - "imap": ("mail.messagingengine.com", 993), - "smtp": ("mail.messagingengine.com", 587), + "imap": ("imap.fastmail.com", 993), + "smtp": ("smtp.fastmail.com", 465), "auth": "password", "folder_map": {"INBOX.Archive": "archive", "INBOX.Drafts": "drafts", "INBOX.Junk Mail": "spam", "INBOX.Sent": "sent", "INBOX.Sent Items": "sent", "INBOX.Trash": "trash"}, - "domains": ["fastmail.fm"], + "domains": ["fastmail.fm", "fastmail.com"], "mx_servers": ["in[12]-smtp.messagingengine.com"], # exact string matches "ns_servers": ["ns1.messagingengine.com.", diff --git a/inbox/s3/__init__.py b/inbox/s3/__init__.py new file mode 100644 index 000000000..b88765afd --- /dev/null +++ b/inbox/s3/__init__.py @@ -0,0 +1,3 @@ +# Allow out-of-tree backend submodules. +from pkgutil import extend_path +__path__ = extend_path(__path__, __name__) diff --git a/inbox/s3/backends/__init__.py b/inbox/s3/backends/__init__.py new file mode 100644 index 000000000..b88765afd --- /dev/null +++ b/inbox/s3/backends/__init__.py @@ -0,0 +1,3 @@ +# Allow out-of-tree backend submodules. +from pkgutil import extend_path +__path__ = extend_path(__path__, __name__) diff --git a/inbox/s3/backends/gmail.py b/inbox/s3/backends/gmail.py new file mode 100644 index 000000000..be934a7c1 --- /dev/null +++ b/inbox/s3/backends/gmail.py @@ -0,0 +1,42 @@ +import base64 +import requests +from inbox.s3.exc import TemporaryEmailFetchException, EmailDeletedException +from inbox.auth.oauth import OAuthRequestsWrapper +from inbox.models.backends.gmail import g_token_manager +from nylas.logging import get_logger +log = get_logger() + + +# We use the Google API so we don't have to worry about +# the Gmail max IMAP connection limit. +def get_gmail_raw_contents(message): + account = message.namespace.account + auth_token = g_token_manager.get_token_for_email(account) + + # The Gmail API exposes the X-GM-MSGID field but encodes it + # in hexadecimal. + g_msgid = message.g_msgid + + if g_msgid is None: + raise EmailDeletedException("Couldn't find message on backend server. This is a permanent error.") + + if isinstance(g_msgid, basestring): + g_msgid = int(g_msgid) + + hex_id = format(g_msgid, 'x') + url = 'https://www.googleapis.com/gmail/v1/users/me/messages/{}?format=raw'.format(hex_id, 'x') + r = requests.get(url, auth=OAuthRequestsWrapper(auth_token)) + + if r.status_code != 200: + log.error('Got an error when fetching raw email', r.status_code, r.text) + + if r.status_code in [403, 429]: + raise TemporaryEmailFetchException("Temporary usage limit hit. Please try again.") + if r.status_code == 404: + raise EmailDeletedException("Couldn't find message on backend server. This is a permanent error.") + elif r.status_code >= 500 and r.status_code <= 599: + raise TemporaryEmailFetchException("Backend server error. Please try again in a few minutes.") + + data = r.json() + raw = str(data['raw']) + return base64.urlsafe_b64decode(raw + '=' * (4 - len(raw) % 4)) diff --git a/inbox/s3/backends/imap.py b/inbox/s3/backends/imap.py new file mode 100644 index 000000000..677fa5c5b --- /dev/null +++ b/inbox/s3/backends/imap.py @@ -0,0 +1,32 @@ +import imapclient +from inbox.s3.exc import EmailFetchException, EmailDeletedException +from inbox.crispin import connection_pool +from inbox.mailsync.backends.imap.generic import uidvalidity_cb + +from nylas.logging import get_logger +log = get_logger() + + +def get_imap_raw_contents(message): + account = message.namespace.account + + if len(message.imapuids) == 0: + raise EmailDeletedException("Message was deleted on the backend server.") + + uid = message.imapuids[0] + folder = uid.folder + + with connection_pool(account.id).get() as crispin_client: + crispin_client.select_folder(folder.name, uidvalidity_cb) + + try: + uids = crispin_client.uids([uid.msg_uid]) + if len(uids) == 0: + raise EmailDeletedException("Message was deleted on the backend server.") + + return uids[0].body + except imapclient.IMAPClient.Error: + log.error("Error while fetching raw contents", exc_info=True, + logstash_tag='fetching_error') + raise EmailFetchException("Couldn't get message from server. " + "Please try again in a few minutes.") diff --git a/inbox/s3/base.py b/inbox/s3/base.py new file mode 100644 index 000000000..75a068eb5 --- /dev/null +++ b/inbox/s3/base.py @@ -0,0 +1,4 @@ +def get_raw_from_provider(message): + """Get the raw contents of a message from the provider.""" + account = message.account + return account.get_raw_message_contents(message) diff --git a/inbox/s3/exc.py b/inbox/s3/exc.py new file mode 100644 index 000000000..10f76fc5f --- /dev/null +++ b/inbox/s3/exc.py @@ -0,0 +1,17 @@ +class S3Exception(Exception): + pass + + +class EmailFetchException(S3Exception): + pass + + +class EmailDeletedException(EmailFetchException): + """Raises an error when the message is deleted on the remote.""" + pass + + +class TemporaryEmailFetchException(EmailFetchException): + """A class for temporary errors when trying to fetch emails. + Exchange notably seems to need warming up before fetching data.""" + pass diff --git a/inbox/scheduling/deferred_migration.py b/inbox/scheduling/deferred_migration.py new file mode 100644 index 000000000..83dc52513 --- /dev/null +++ b/inbox/scheduling/deferred_migration.py @@ -0,0 +1,99 @@ +import gevent +import json +import time + +from inbox.models.account import Account +from inbox.models.session import session_scope +from inbox.scheduling import event_queue +from inbox.util.concurrency import retry_with_logging +from inbox.util.stats import statsd_client + +from nylas.logging import get_logger +log = get_logger() + + +DEFERRED_ACCOUNT_MIGRATION_COUNTER = 'sync:deferred_account_migration_counter' +DEFERRED_ACCOUNT_MIGRATION_PQUEUE = 'sync:deferred_account_migration_pqueue' +DEFERRED_ACCOUNT_MIGRATION_EVENT_QUEUE = 'sync:deferred_account_migration_event_queue' +DEFERRED_ACCOUNT_MIGRATION_OBJ = 'sync:deferred_account_migration_objs:{}' +DEFERRED_ACCOUNT_MIGRATION_OBJ_TTL = 60 * 60 * 24 * 7 # 1 week + + +class DeferredAccountMigration(object): + _redis_fields = ['deadline', 'account_id', 'desired_host', 'id'] + + def __init__(self, deadline, account_id, desired_host, id=None): + self.deadline = float(deadline) + self.account_id = int(account_id) + self.desired_host = str(desired_host) + self.id = None if id is None else int(id) + + def execute(self, client): + with session_scope(self.account_id) as db_session: + account = db_session.query(Account).get(self.account_id) + if account is None: + log.warning('Account not found when trying to execute DeferredAccountMigration', account_id=self.account_id) + return + account.desired_sync_host = self.desired_host + db_session.commit() + self.save(client) + + def save(self, client): + if self.id is None: + self.id = client.incr(DEFERRED_ACCOUNT_MIGRATION_COUNTER) + p = client.pipeline() + key = DEFERRED_ACCOUNT_MIGRATION_OBJ.format(self.id) + p.hmset(key, dict((field, getattr(self, field)) for field in self.__class__._redis_fields)) + p.expire(key, DEFERRED_ACCOUNT_MIGRATION_OBJ_TTL) + p.zadd(DEFERRED_ACCOUNT_MIGRATION_PQUEUE, self.deadline, self.id) + p.rpush(DEFERRED_ACCOUNT_MIGRATION_EVENT_QUEUE, json.dumps({'id': self.id})) + p.execute() + + @classmethod + def try_load(cls, client, id): + values = client.hmget(DEFERRED_ACCOUNT_MIGRATION_OBJ.format(id), cls._redis_fields) + if values is None: + return None + return DeferredAccountMigration(*values) + + +class DeferredAccountMigrationExecutor(gevent.Greenlet): + def __init__(self): + self.event_queue = event_queue.EventQueue(DEFERRED_ACCOUNT_MIGRATION_EVENT_QUEUE) + self.redis = self.event_queue.redis + gevent.Greenlet.__init__(self) + + def _run(self): + while True: + retry_with_logging(self._run_impl) + + def _run_impl(self): + current_time = time.time() + timeout = event_queue.SOCKET_TIMEOUT - 2 # Minus 2 to give us some leeway. + next_deferral = self._try_get_next_deferral() + while next_deferral is not None: + if next_deferral.deadline >= current_time: + timeout = int(min(max(next_deferral.deadline - current_time, 1), timeout)) + log.info('Next deferral deadline is in the future, sleeping', + deferral_id=next_deferral.id, + deadline=next_deferral.deadline, + desired_host=next_deferral.desired_host, + account_id=next_deferral.account_id, + timeout=timeout) + break + log.info('Executing deferral', + deferral_id=next_deferral.id, + deadline=next_deferral.deadline, + desired_host=next_deferral.desired_host, + account_id=next_deferral.account_id) + next_deferral.execute(self.redis) + self.redis.zrem(DEFERRED_ACCOUNT_MIGRATION_PQUEUE, next_deferral.id) + next_deferral = self._try_get_next_deferral() + self.event_queue.receive_event(timeout=timeout) + statsd_client.incr("migrator.heartbeat") + + def _try_get_next_deferral(self): + deferral_id = self.redis.zrange(DEFERRED_ACCOUNT_MIGRATION_PQUEUE, 0, 1) + if not deferral_id: + return None + return DeferredAccountMigration.try_load(self.redis, deferral_id[0]) diff --git a/inbox/scheduling/event_queue.py b/inbox/scheduling/event_queue.py new file mode 100644 index 000000000..1262087ed --- /dev/null +++ b/inbox/scheduling/event_queue.py @@ -0,0 +1,74 @@ +import json +from redis import StrictRedis + +from inbox.config import config +from nylas.logging import get_logger +log = get_logger() + +SOCKET_CONNECT_TIMEOUT = 5 +SOCKET_TIMEOUT = 30 + + +def _get_redis_client(host=None, port=6379, db=1): + return StrictRedis(host=host, + port=port, + db=db, + socket_connect_timeout=SOCKET_CONNECT_TIMEOUT, + socket_timeout=SOCKET_TIMEOUT) + + +class EventQueue(object): + """Simple queue that clients can listen to and wait to be notified of some + event that they're interested in. + """ + def __init__(self, queue_name, redis=None): + self.redis = redis + if self.redis is None: + redis_host = config['EVENT_QUEUE_REDIS_HOSTNAME'] + redis_db = config['EVENT_QUEUE_REDIS_DB'] + self.redis = _get_redis_client(host=redis_host, db=redis_db) + self.queue_name = queue_name + + def receive_event(self, timeout=0): + result = None + if timeout is None: + result = self.redis.lpop(self.queue_name) + else: + result = self.redis.blpop([self.queue_name], timeout=timeout) + + if result is None: + return None + queue_name, event_data = (self.queue_name, result) if timeout is None else result + try: + event = json.loads(event_data) + event['queue_name'] = queue_name + return event + except Exception as e: + log.error('Failed to load event data from queue', error=e, event_data=event_data) + return None + + def send_event(self, event_data): + event_data.pop('queue_name', None) + self.redis.rpush(self.queue_name, json.dumps(event_data)) + + +class EventQueueGroup(object): + """Group of queues that can all be simultaneously watched for new events.""" + def __init__(self, queues): + self.queues = queues + self.redis = None + if len(self.queues) > 0: + self.redis = self.queues[0].redis + + def receive_event(self, timeout=0): + result = self.redis.blpop([q.queue_name for q in self.queues], timeout=timeout) + if result is None: + return None + queue_name, event_data = result + try: + event = json.loads(event_data) + event['queue_name'] = queue_name + return event + except Exception as e: + log.error('Failed to load event data from queue', error=e, event_data=event_data) + return None diff --git a/inbox/search/backends/gmail.py b/inbox/search/backends/gmail.py index 479f69c52..b358ae4ef 100644 --- a/inbox/search/backends/gmail.py +++ b/inbox/search/backends/gmail.py @@ -69,6 +69,7 @@ def search_threads(self, db_session, search_query, offset=0, limit=40): query = db_session.query(Thread). \ join(Message, Message.thread_id == Thread.id). \ filter(Thread.namespace_id == self.account.namespace.id, + Thread.deleted_at == None, Message.namespace_id == self.account.namespace.id, Message.g_msgid.in_(g_msgids)). \ order_by(desc(Message.received_date)) diff --git a/inbox/search/backends/imap.py b/inbox/search/backends/imap.py index d1c2c2958..8fa01e9bf 100644 --- a/inbox/search/backends/imap.py +++ b/inbox/search/backends/imap.py @@ -102,6 +102,7 @@ def search_threads(self, db_session, search_query, offset=0, limit=40): .join(ImapUid) \ .filter(ImapUid.account_id == self.account_id, ImapUid.msg_uid.in_(imap_uids), + Thread.deleted_at == None, Thread.id == Message.thread_id)\ .order_by(desc(Message.received_date)) diff --git a/inbox/search/base.py b/inbox/search/base.py index e1af869f6..40661d29f 100644 --- a/inbox/search/base.py +++ b/inbox/search/base.py @@ -17,3 +17,12 @@ def __init__(self, message, http_code, server_error=None): self.server_error = server_error super(SearchBackendException, self).__init__( message, http_code, server_error) + + +class SearchStoreException(Exception): + """Raised if there's an error proxying the search request to the provider. + This is a special EAS case where the Status code for the Store element has + an error""" + def __init__(self, err_code): + self.err_code = err_code + super(SearchStoreException, self).__init__(err_code) diff --git a/inbox/sendmail/base.py b/inbox/sendmail/base.py index 25f61880f..a2c48b4ab 100644 --- a/inbox/sendmail/base.py +++ b/inbox/sendmail/base.py @@ -1,5 +1,6 @@ import pkg_resources from datetime import datetime +import re from inbox.api.validation import ( get_recipients, get_attachments, get_thread, get_message) @@ -89,6 +90,21 @@ def create_draft_from_mime(account, raw_mime, db_session): return msg +def block_to_part(block, message, namespace): + inline_image_uri = r'cid:{}'.format(block.public_id) + is_inline = re.search(inline_image_uri, message.body) is not None + # Create a new Part object to associate to the message object. + # (You can't just set block.message, because if block is an + # attachment on an existing message, that would dissociate it from + # the existing message.) + part = Part(block=block) + part.content_id = block.public_id if is_inline else None + part.namespace_id = namespace.id + part.content_disposition = 'inline' if is_inline else 'attachment' + part.is_inboxapp_attachment = True + return part + + def create_message_from_json(data, namespace, db_session, is_draft): """ Construct a Message instance from `data`, a dictionary representing the POST body of an API request. All new objects are added to the session, but @@ -173,15 +189,7 @@ def create_message_from_json(data, namespace, db_session, is_draft): # Associate attachments to the draft message for block in blocks: - # Create a new Part object to associate to the message object. - # (You can't just set block.message, because if block is an - # attachment on an existing message, that would dissociate it from - # the existing message.) - part = Part(block=block) - part.namespace_id = namespace.id - part.content_disposition = 'attachment' - part.is_inboxapp_attachment = True - message.parts.append(part) + message.parts.append(block_to_part(block, message, namespace)) update_contacts_from_message(db_session, message, namespace) @@ -261,11 +269,7 @@ def update(attr, value=None): # Don't re-add attachments that are already attached if block.id in [p.block_id for p in draft.parts]: continue - part = Part(block=block) - part.namespace_id = account.namespace.id - part.content_disposition = 'attachment' - part.is_inboxapp_attachment = True - draft.parts.append(part) + draft.parts.append(block_to_part(block, draft, account.namespace)) thread = draft.thread if len(thread.messages) == 1: @@ -320,13 +324,22 @@ def delete_draft(db_session, account, draft): db_session.commit() -def generate_attachments(blocks): +def generate_attachments(message, blocks): attachment_dicts = [] for block in blocks: + content_disposition = 'attachment' + for part in block.parts: + if part.message_id == message.id and part.content_disposition == 'inline': + content_disposition = 'inline' + break + attachment_dicts.append({ + 'block_id': block.public_id, 'filename': block.filename, 'data': block.data, - 'content_type': block.content_type}) + 'content_type': block.content_type, + 'content_disposition': content_disposition, + }) return attachment_dicts diff --git a/inbox/sendmail/message.py b/inbox/sendmail/message.py index 5aa312a7e..685d5fc02 100644 --- a/inbox/sendmail/message.py +++ b/inbox/sendmail/message.py @@ -88,7 +88,7 @@ def create_email(from_name, If this message is a reply, the Message-Ids of prior messages in the thread. attachments: list of dicts, optional - a list of dicts(filename, data, content_type) + a list of dicts(filename, data, content_type, content_disposition) """ html = html if html else '' plaintext = html2text(html) @@ -110,11 +110,14 @@ def create_email(from_name, # The subsequent parts are the attachment parts for a in attachments: # Disposition should be inline if we add Content-ID - msg.append(mime.create.attachment( + attachment = mime.create.attachment( a['content_type'], a['data'], filename=a['filename'], - disposition='attachment')) + disposition=a['content_disposition']) + if a['content_disposition'] == 'inline': + attachment.headers['Content-Id'] = '<{}>'.format(a['block_id']) + msg.append(attachment) msg.headers['Subject'] = subject if subject else '' diff --git a/inbox/sendmail/smtp/postel.py b/inbox/sendmail/smtp/postel.py index 6b7411dc7..4e272effd 100644 --- a/inbox/sendmail/smtp/postel.py +++ b/inbox/sendmail/smtp/postel.py @@ -406,7 +406,7 @@ def send_custom(self, draft, body, recipients): recipient_emails: email addresses to send copies of this message to. """ blocks = [p.block for p in draft.attachments] - attachments = generate_attachments(blocks) + attachments = generate_attachments(draft, blocks) from_addr = draft.from_addr[0] msg = create_email(from_name=from_addr[0], from_email=from_addr[1], @@ -438,7 +438,7 @@ def send(self, draft): the draft message to send. """ blocks = [p.block for p in draft.attachments] - attachments = generate_attachments(blocks) + attachments = generate_attachments(draft, blocks) # @emfree - 3/19/2015 # # Note that we intentionally don't set the Bcc header in the message we diff --git a/inbox/sqlalchemy_ext/util.py b/inbox/sqlalchemy_ext/util.py index 91008eb3c..2e2e6a916 100644 --- a/inbox/sqlalchemy_ext/util.py +++ b/inbox/sqlalchemy_ext/util.py @@ -25,7 +25,9 @@ MAX_SANE_QUERIES_PER_SESSION = 100 -MAX_TEXT_LENGTH = 65535 +MAX_TEXT_BYTES = 65535 +MAX_BYTES_PER_CHAR = 4 # For collation of utf8mb4 +MAX_TEXT_CHARS = int(MAX_TEXT_BYTES / float(MAX_BYTES_PER_CHAR)) MAX_MYSQL_INTEGER = 2147483647 bakery = baked.bakery() @@ -140,7 +142,7 @@ def process_result_value(self, value, dialect): def json_field_too_long(value): - return len(json_util.dumps(value)) > MAX_TEXT_LENGTH + return len(json_util.dumps(value)) > MAX_TEXT_CHARS class LittleJSON(JSON): diff --git a/inbox/test/__init__.py b/inbox/test/__init__.py new file mode 100644 index 000000000..f5d7f9802 --- /dev/null +++ b/inbox/test/__init__.py @@ -0,0 +1,3 @@ +# Allow out-of-tree submodules. +from pkgutil import extend_path +__path__ = extend_path(__path__, __name__) diff --git a/inbox/test/api/__init__.py b/inbox/test/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/inbox/test/api/base.py b/inbox/test/api/base.py new file mode 100644 index 000000000..4ebf56b71 --- /dev/null +++ b/inbox/test/api/base.py @@ -0,0 +1,60 @@ +import json +from pytest import fixture, yield_fixture +from base64 import b64encode + + +def new_api_client(db, namespace): + from inbox.api.srv import app + app.config['TESTING'] = True + with app.test_client() as c: + return TestAPIClient(c, namespace.public_id) + + +@yield_fixture +def api_client(db, default_namespace): + from inbox.api.srv import app + app.config['TESTING'] = True + with app.test_client() as c: + yield TestAPIClient(c, default_namespace.public_id) + + +@fixture +def imap_api_client(db, generic_account): + return new_api_client(db, generic_account.namespace) + + +class TestAPIClient(object): + + """Provide more convenient access to the API for testing purposes.""" + + def __init__(self, test_client, default_namespace_public_id): + self.client = test_client + credential = '{}:'.format(default_namespace_public_id) + self.auth_header = {'Authorization': 'Basic {}' + .format(b64encode(credential))} + + def get_raw(self, path, headers={}): + headers.update(self.auth_header) + return self.client.get(path, headers=headers) + + def get_data(self, path, headers={}): + headers.update(self.auth_header) + return json.loads(self.client.get(path, headers=headers).data) + + def post_data(self, path, data, headers={}): + headers.update(self.auth_header) + return self.client.post(path, data=json.dumps(data), headers=headers) + + def post_raw(self, path, data, headers={}): + headers.update(self.auth_header) + return self.client.post(path, data=data, headers=headers) + + def put_data(self, path, data, headers={}): + headers.update(self.auth_header) + return self.client.put(path, headers=headers, + data=json.dumps(data)) + + def delete(self, path, data=None, headers={}): + headers.update(self.auth_header) + return self.client.delete(path, headers=headers, + data=json.dumps(data)) diff --git a/inbox/test/api/test_account.py b/inbox/test/api/test_account.py new file mode 100644 index 000000000..af6794253 --- /dev/null +++ b/inbox/test/api/test_account.py @@ -0,0 +1,90 @@ +# flake8: noqa: F811 +from inbox.test.util.base import (generic_account, gmail_account, db, + add_fake_yahoo_account) +from inbox.test.api.base import api_client, new_api_client + +__all__ = ['db', 'api_client', 'generic_account', 'gmail_account'] + + +def test_account(db, api_client, generic_account, gmail_account): + # Because we're using the generic_account namespace + api_client = new_api_client(db, generic_account.namespace) + + resp_data = api_client.get_data('/account') + + assert resp_data['id'] == generic_account.namespace.public_id + assert resp_data['object'] == 'account' + assert resp_data['account_id'] == generic_account.namespace.public_id + assert resp_data['email_address'] == generic_account.email_address + assert resp_data['name'] == generic_account.name + assert resp_data['organization_unit'] == 'folder' + assert 'sync_state' in resp_data + assert 'server_settings' not in resp_data + + # Because we're using the gmail account namespace + api_client = new_api_client(db, gmail_account.namespace) + + resp_data = api_client.get_data('/account') + + assert resp_data['id'] == gmail_account.namespace.public_id + assert resp_data['provider'] == 'gmail' + assert resp_data['organization_unit'] == 'label' + assert 'sync_state' in resp_data + assert 'server_settings' not in resp_data + + +def test_account_expanded(db, api_client, generic_account, gmail_account): + # Generic accounts expose a `server_settings` attribute + # Custom IMAP + api_client = new_api_client(db, generic_account.namespace) + resp_data = api_client.get_data('/account/?view=expanded') + assert resp_data['provider'] == 'custom' + assert 'server_settings' in resp_data + assert set(resp_data['server_settings']) == set({ + 'imap_host': 'imap.custom.com', + 'smtp_host': 'smtp.custom.com', + 'imap_port': 993, + 'smtp_port': 587, + 'ssl_required': True}) + + # Yahoo + yahoo_account = add_fake_yahoo_account(db.session) + api_client = new_api_client(db, yahoo_account.namespace) + resp_data = api_client.get_data('/account/?view=expanded') + assert resp_data['provider'] == 'yahoo' + assert 'server_settings' in resp_data + assert set(resp_data['server_settings']) == set({ + 'imap_host': 'imap.mail.yahoo.com', + 'smtp_host': 'smtp.mail.yahoo.com', + 'imap_port': 993, + 'smtp_port': 587, + 'ssl_required': True}) + + # Gmail accounts don't expose a `server_settings` attribute + api_client = new_api_client(db, gmail_account.namespace) + resp_data = api_client.get_data('/account/?view=expanded') + assert resp_data['provider'] == 'gmail' + assert 'server_settings' not in resp_data + + +def test_account_repr_for_new_account(db): + account = add_fake_yahoo_account(db.session) + + # Sync for the account has not started yet. + assert account.sync_state is None + + # However the API-returned account object has `sync_state=running` + # so API clients can do the right thing. + api_client = new_api_client(db, account.namespace) + resp_data = api_client.get_data('/account') + assert resp_data['id'] == account.namespace.public_id + assert resp_data['sync_state'] == 'running' + + # Verify other sync_states are not masked. + account.sync_state = 'invalid' + db.session.commit() + + api_client = new_api_client(db, account.namespace) + resp_data = api_client.get_data('/account') + assert resp_data['id'] == account.namespace.public_id + assert resp_data['sync_state'] == 'invalid' diff --git a/inbox/test/api/test_auth.py b/inbox/test/api/test_auth.py new file mode 100644 index 000000000..fa7ecea4f --- /dev/null +++ b/inbox/test/api/test_auth.py @@ -0,0 +1,60 @@ +import json +from base64 import b64encode + + +from inbox.test.util.base import generic_account, db # noqa +from inbox.test.api.base import new_api_client # noqa + + +def test_no_auth(db, generic_account): # noqa + # Because we're using the generic_account namespace + + api_client = new_api_client(db, generic_account.namespace) + api_client.auth_header = {} + + response = api_client.get_raw('/account') + assert response.status_code == 401 + + +def test_basic_auth(db, generic_account): # noqa + api_client = new_api_client(db, generic_account.namespace) + + response = api_client.get_raw('/account') + assert response.status_code == 200 + + resp_data = json.loads(response.data) + assert resp_data['id'] == generic_account.namespace.public_id + + +def test_bearer_token_auth(db, generic_account): # noqa + api_client = new_api_client(db, generic_account.namespace) + api_client.auth_header = { + 'Authorization': 'Bearer {}' + .format(generic_account.namespace.public_id)} + + response = api_client.get_raw('/account') + assert response.status_code == 200 + + resp_data = json.loads(response.data) + assert resp_data['id'] == generic_account.namespace.public_id + + +BAD_TOKEN = '1234567890abcdefg' + + +def test_invalid_basic_auth(db, generic_account): # noqa + api_client = new_api_client(db, generic_account.namespace) + api_client.auth_header = {'Authorization': 'Basic {}' + .format(b64encode(BAD_TOKEN + ':'))} + + response = api_client.get_raw('/account') + assert response.status_code == 401 + + +def test_invalid_bearer_token_auth(db, generic_account): # noqa + api_client = new_api_client(db, generic_account.namespace) + api_client.auth_header = { + 'Authorization': 'Bearer {}'.format(BAD_TOKEN)} + + response = api_client.get_raw('/account') + assert response.status_code == 401 diff --git a/inbox/test/api/test_calendars.py b/inbox/test/api/test_calendars.py new file mode 100644 index 000000000..5ae33a1eb --- /dev/null +++ b/inbox/test/api/test_calendars.py @@ -0,0 +1,94 @@ +from inbox.test.util.base import add_fake_event +from inbox.models import Calendar +from inbox.test.api.base import api_client +from inbox.test.util.base import db, default_namespace + + +__all__ = ['api_client', 'db', 'default_namespace'] + + +def test_get_calendar(db, default_namespace, api_client): + cal = Calendar( + namespace_id=default_namespace.id, + uid='uid', + provider_name='WTF', + name='Holidays') + db.session.add(cal) + db.session.commit() + cal_id = cal.public_id + calendar_item = api_client.get_data('/calendars/{}'.format(cal_id)) + + assert calendar_item['account_id'] == default_namespace.public_id + assert calendar_item['name'] == 'Holidays' + assert calendar_item['description'] is None + assert calendar_item['read_only'] is False + assert calendar_item['object'] == 'calendar' + + +def test_handle_not_found_calendar(api_client): + resp_data = api_client.get_raw('/calendars/foo') + assert resp_data.status_code == 404 + + +def test_add_to_specific_calendar(db, default_namespace, api_client): + cal = Calendar( + namespace_id=default_namespace.id, + uid='uid', + provider_name='WTF', + name='Custom') + db.session.add(cal) + db.session.commit() + cal_id = cal.public_id + + e_data = {'calendar_id': cal_id, + 'title': 'subj', 'description': 'body1', + 'when': {'time': 1}, 'location': 'NylasHQ'} + r = api_client.post_data('/events', e_data) + assert r.status_code == 200 + + events = api_client.get_data('/events?calendar_id={}'.format(cal_id)) + assert len(events) == 1 + + +def test_add_to_read_only_calendar(db, api_client): + cal_list = api_client.get_data('/calendars') + ro_cal = None + for c in cal_list: + if c['read_only']: + ro_cal = c + + assert ro_cal + + e_data = {'calendar_id': ro_cal['id'], + 'title': 'subj', 'description': 'body1', + 'when': {'time': 1}, 'location': 'NylasHQ'} + resp = api_client.post_data('/events', e_data) + assert resp.status_code == 400 + + +def test_delete_from_readonly_calendar(db, default_namespace, api_client): + + add_fake_event(db.session, default_namespace.id, + calendar=db.session.query(Calendar).filter( + Calendar.namespace_id == default_namespace.id, + Calendar.read_only == True).first(), # noqa + read_only=True) + calendar_list = api_client.get_data('/calendars') + + read_only_calendar = None + for c in calendar_list: + if c['read_only']: + read_only_calendar = c + break + events = api_client.get_data('/events?calendar_id={}'.format( + read_only_calendar['id'])) + for event in events: + if event['read_only']: + read_only_event = event + break + + assert read_only_calendar + assert read_only_event + e_id = read_only_event['id'] + resp = api_client.delete('/events/{}'.format(e_id)) + assert resp.status_code == 400 diff --git a/inbox/test/api/test_contacts.py b/inbox/test/api/test_contacts.py new file mode 100644 index 000000000..dba8f75c9 --- /dev/null +++ b/inbox/test/api/test_contacts.py @@ -0,0 +1,58 @@ +from inbox.models import Contact +from inbox.test.util.base import contact_sync, contacts_provider +from inbox.test.api.base import api_client + +__all__ = ['contacts_provider', 'contact_sync', 'api_client'] + + +def test_api_list(contacts_provider, contact_sync, db, api_client, + default_namespace): + contacts_provider.supply_contact('Contact One', + 'contact.one@email.address') + contacts_provider.supply_contact('Contact Two', + 'contact.two@email.address') + + contact_sync.provider = contacts_provider + contact_sync.sync() + + contact_list = api_client.get_data('/contacts') + contact_names = [contact['name'] for contact in contact_list] + assert 'Contact One' in contact_names + assert 'Contact Two' in contact_names + + contact_emails = [contact['email'] for contact in contact_list] + assert 'contact.one@email.address' in contact_emails + assert 'contact.two@email.address' in contact_emails + + contact_count = api_client.get_data('/contacts?view=count') + assert contact_count['count'] == db.session.query(Contact). \ + filter(Contact.namespace_id == default_namespace.id).count() + + +def test_api_get(contacts_provider, contact_sync, db, api_client, + default_namespace): + contacts_provider.supply_contact('Contact One', + 'contact.one@email.address') + contacts_provider.supply_contact('Contact Two', + 'contact.two@email.address') + + contact_sync.provider = contacts_provider + contact_sync.sync() + + contact_list = api_client.get_data('/contacts') + + contact_ids = [contact['id'] for contact in contact_list] + + c1found = False + c2found = False + for c_id in contact_ids: + contact = api_client.get_data('/contacts/' + c_id) + + if contact['name'] == 'Contact One': + c1found = True + + if contact['name'] == 'Contact Two': + c2found = True + + assert c1found + assert c2found diff --git a/inbox/test/api/test_data_processing.py b/inbox/test/api/test_data_processing.py new file mode 100644 index 000000000..8a1259de6 --- /dev/null +++ b/inbox/test/api/test_data_processing.py @@ -0,0 +1,148 @@ +import json +from inbox.models import DataProcessingCache +from sqlalchemy.orm.exc import NoResultFound +from inbox.test.util.base import (add_fake_thread, + add_fake_message, default_namespace) +from inbox.test.api.base import api_client + + +__all__ = ['api_client', 'default_namespace'] + + +def test_contact_rankings(db, api_client, default_namespace): + # Clear cached data (if it exists) + namespace_id = default_namespace.id + try: + cached_data = db.session.query(DataProcessingCache) \ + .filter(DataProcessingCache.namespace_id == + namespace_id).one() + cached_data.contact_rankings_last_updated = None + db.session.add(cached_data) + db.session.commit() + except NoResultFound: + pass + + # Send some emails + namespace_email = default_namespace.email_address + + me = ('me', namespace_email) + recipients = ([[('first', 'number1@nylas.com')]] * 8 + + [[('second', 'number2@nylas.com')]] * 4 + + [[('third', 'number3@nylas.com')]] + + [[('third', 'number3@nylas.com'), + ('fourth', 'number4@nylas.com')]]) + + for recipients_list in recipients: + fake_thread = add_fake_thread(db.session, namespace_id) + add_fake_message(db.session, namespace_id, fake_thread, + subject='Froop', + from_addr=[me], + to_addr=recipients_list, + add_sent_category=True) + + # Check contact rankings + resp = api_client.get_raw( + '/contacts/rankings?force_recalculate=true') + assert resp.status_code == 200 + + emails_scores = {e: s for (e, s) in json.loads(resp.data)} + emails = ['number1@nylas.com', 'number2@nylas.com', + 'number3@nylas.com', 'number4@nylas.com'] + for email in emails: + assert email in emails_scores + + for e1, e2 in zip(emails, emails[1:]): + assert emails_scores[e1] > emails_scores[e2] + + # make sure it works if we call it again! + resp = api_client.get_raw('/contacts/rankings') + assert resp.status_code == 200 + + emails_scores = {e: s for (e, s) in json.loads(resp.data)} + emails = ['number1@nylas.com', 'number2@nylas.com', + 'number3@nylas.com', 'number4@nylas.com'] + for email in emails: + assert email in emails_scores + + for e1, e2 in zip(emails, emails[1:]): + assert emails_scores[e1] > emails_scores[e2] + + try: + cached_data = db.session.query(DataProcessingCache) \ + .filter(DataProcessingCache.namespace_id == + namespace_id).one() + assert cached_data.contact_rankings_last_updated is not None + except (NoResultFound, AssertionError): + assert False, "Contact rankings not cached" + + +def test_contact_groups(db, api_client, default_namespace): + # Clear cached data (if it exists) + namespace_id = default_namespace.id + try: + cached_data = db.session.query(DataProcessingCache) \ + .filter(DataProcessingCache.namespace_id == + namespace_id).one() + cached_data.contact_groups_last_updated = None + db.session.add(cached_data) + db.session.commit() + except NoResultFound: + pass + + # Send some emails + namespace_email = default_namespace.email_address + me = ('me', namespace_email) + recipients = ([[('a', 'a@nylas.com'), + ('b', 'b@nylas.com'), + ('c', 'c@nylas.com')]] * 8 + + [[('b', 'b@nylas.com'), + ('c', 'c@nylas.com'), + ('d', 'd@nylas.com')]] * 8 + + [[('d', 'd@nylas.com'), + ('e', 'e@nylas.com'), + ('f', 'f@nylas.com')]] * 8 + + [[('g', 'g@nylas.com'), + ('h', 'h@nylas.com'), + ('i', 'i@nylas.com'), + ('j', 'j@nylas.com')]] * 5 + + [[('g', 'g@nylas.com'), + ('h', 'h@nylas.com'), + ('i', 'i@nylas.com')]] * 2 + + [[('k', 'k@nylas.com'), + ('l', 'l@nylas.com')]] * 3) + + for recipients_list in recipients: + fake_thread = add_fake_thread(db.session, namespace_id) + add_fake_message(db.session, namespace_id, fake_thread, + subject='Froop', + from_addr=[me], + to_addr=recipients_list, + add_sent_category=True) + + # Check contact groups + resp = api_client.get_raw('/groups/intrinsic?force_recalculate=true') + assert resp.status_code == 200 + + groups_scores = {g: s for (g, s) in json.loads(resp.data)} + groups = ['a@nylas.com, b@nylas.com, c@nylas.com, d@nylas.com', + 'd@nylas.com, e@nylas.com, f@nylas.com', + 'g@nylas.com, h@nylas.com, i@nylas.com, j@nylas.com', + 'k@nylas.com, l@nylas.com'] + for g in groups: + assert g in groups_scores + + # make sure it works when we do it again + resp = api_client.get_raw('/groups/intrinsic') + assert resp.status_code == 200 + + groups_scores = {g: s for (g, s) in json.loads(resp.data)} + for g in groups: + assert g in groups_scores + + try: + cached_data = db.session.query(DataProcessingCache) \ + .filter(DataProcessingCache.namespace_id == + namespace_id).one() + assert cached_data.contact_groups_last_updated is not None + except (NoResultFound, AssertionError): + assert False, "Contact groups not cached" diff --git a/inbox/test/api/test_drafts.py b/inbox/test/api/test_drafts.py new file mode 100644 index 000000000..82241889e --- /dev/null +++ b/inbox/test/api/test_drafts.py @@ -0,0 +1,460 @@ +# -*- coding: utf-8 -*- +"""Test local behavior for the drafts API. Doesn't test syncback or actual +sending.""" +import json +import os +from datetime import datetime +from freezegun import freeze_time + +import pytest + +from inbox.test.util.base import add_fake_message, add_fake_thread +from inbox.test.api.base import api_client + +__all__ = ['api_client'] + + +@pytest.fixture +def example_draft(db, default_account): + return { + 'subject': 'Draft test at {}'.format(datetime.utcnow()), + 'body': '

Sea, birds and sand.

', + 'to': [{'name': 'The red-haired mermaid', + 'email': default_account.email_address}] + } + + +@pytest.fixture +def example_bad_recipient_drafts(): + bad_email = { + 'subject': 'Draft test at {}'.format(datetime.utcnow()), + 'body': '

Sea, birds and sand.

', + 'to': [{'name': 'The red-haired mermaid', + 'email': 'froop'}] + } + + empty_email = { + 'subject': 'Draft test at {}'.format(datetime.utcnow()), + 'body': '

Sea, birds and sand.

', + 'to': [{'name': 'The red-haired mermaid', + 'email': ''}] + } + + return [empty_email, bad_email] + + +@pytest.fixture(scope='function') +def attachments(db): + filenames = ['muir.jpg', 'LetMeSendYouEmail.wav', 'piece-jointe.jpg'] + data = [] + for filename in filenames: + path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', + 'data', filename).encode('utf-8') + # Mac and linux fight over filesystem encodings if we store this + # filename on the fs. Work around by changing the filename we upload + # instead. + if filename == 'piece-jointe.jpg': + filename = u'pièce-jointe.jpg' + data.append((filename, path)) + return data + + +@pytest.fixture +def patch_remote_save_draft(monkeypatch): + + saved_drafts = [] + + def mock_save_draft(account_id, message_id, args): + saved_drafts.append(message_id) + + # Patch both, just in case + monkeypatch.setattr('inbox.actions.base.save_draft', mock_save_draft) + + return saved_drafts + + +def test_save_update_bad_recipient_draft(db, patch_remote_save_draft, + default_account, + example_bad_recipient_drafts): + # You should be able to save a draft, even if + # the recipient's email is invalid. + from inbox.sendmail.base import create_message_from_json + from inbox.actions.base import save_draft + + for example_draft in example_bad_recipient_drafts: + draft = create_message_from_json(example_draft, + default_account.namespace, db.session, + is_draft=True) + + save_draft(default_account.id, draft.id, {'version': draft.version}) + + assert len(patch_remote_save_draft) == 2 + + +def test_create_and_get_draft(api_client, example_draft): + r = api_client.post_data('/drafts', example_draft) + assert r.status_code == 200 + + public_id = json.loads(r.data)['id'] + version = json.loads(r.data)['version'] + assert version == 0 + + r = api_client.get_data('/drafts') + matching_saved_drafts = [draft for draft in r if draft['id'] == public_id] + assert len(matching_saved_drafts) == 1 + saved_draft = matching_saved_drafts[0] + + assert all(saved_draft[k] == v for k, v in example_draft.iteritems()) + + +def test_create_draft_replying_to_thread(api_client, thread, message): + thread = api_client.get_data('/threads')[0] + thread_id = thread['id'] + latest_message_id = thread['message_ids'][-1] + + reply_draft = { + 'subject': 'test reply', + 'body': 'test reply', + 'thread_id': thread_id + } + r = api_client.post_data('/drafts', reply_draft) + draft_id = json.loads(r.data)['id'] + + drafts = api_client.get_data('/drafts') + assert len(drafts) == 1 + + assert thread_id == drafts[0]['thread_id'] + assert drafts[0]['reply_to_message_id'] == latest_message_id + + thread_data = api_client.get_data('/threads/{}'.format(thread_id)) + assert draft_id in thread_data['draft_ids'] + + +def test_create_draft_replying_to_message(api_client, message): + message = api_client.get_data('/messages')[0] + reply_draft = { + 'subject': 'test reply', + 'body': 'test reply', + 'reply_to_message_id': message['id'] + } + r = api_client.post_data('/drafts', reply_draft) + data = json.loads(r.data) + assert data['reply_to_message_id'] == message['id'] + assert data['thread_id'] == message['thread_id'] + + +def test_reject_incompatible_reply_thread_and_message( + db, api_client, message, thread, default_namespace): + alt_thread = add_fake_thread(db.session, default_namespace.id) + add_fake_message(db.session, default_namespace.id, alt_thread) + + thread = api_client.get_data('/threads')[0] + alt_message_id = api_client.get_data('/threads')[1]['message_ids'][0] + alt_message = api_client.get_data('/messages/{}'.format(alt_message_id)) + assert thread['id'] != alt_message['thread_id'] + reply_draft = { + 'subject': 'test reply', + 'reply_to_message_id': alt_message['id'], + 'thread_id': thread['id'] + } + r = api_client.post_data('/drafts', reply_draft) + assert r.status_code == 400 + + +def test_drafts_filter(api_client, example_draft): + r = api_client.post_data('/drafts', example_draft) + thread_id = json.loads(r.data)['thread_id'] + + reply_draft = { + 'subject': 'test reply', + 'body': 'test reply', + 'thread_id': thread_id + } + r = api_client.post_data('/drafts', reply_draft) + + _filter = '?thread_id=0000000000000000000000000' + results = api_client.get_data('/drafts' + _filter) + assert len(results) == 0 + + results = api_client.get_data('/drafts?thread_id={}' + .format(thread_id)) + assert len(results) == 2 + + results = api_client.get_data('/drafts?offset={}&thread_id={}' + .format(1, thread_id)) + assert len(results) == 1 + + +def test_create_draft_with_attachments(api_client, attachments, example_draft): + attachment_ids = [] + upload_path = '/files' + for filename, path in attachments: + data = {'file': (open(path, 'rb'), filename)} + r = api_client.post_raw(upload_path, data=data) + assert r.status_code == 200 + attachment_id = json.loads(r.data)[0]['id'] + attachment_ids.append(attachment_id) + + first_attachment = attachment_ids.pop() + + example_draft['file_ids'] = [first_attachment] + r = api_client.post_data('/drafts', example_draft) + assert r.status_code == 200 + returned_draft = json.loads(r.data) + draft_public_id = returned_draft['id'] + assert returned_draft['version'] == 0 + example_draft['version'] = returned_draft['version'] + assert len(returned_draft['files']) == 1 + + attachment_ids.append(first_attachment) + example_draft['file_ids'] = attachment_ids + r = api_client.put_data('/drafts/{}'.format(draft_public_id), + example_draft) + assert r.status_code == 200 + returned_draft = json.loads(r.data) + assert len(returned_draft['files']) == 3 + assert returned_draft['version'] == 1 + example_draft['version'] = returned_draft['version'] + + # Make sure we can't delete the files now + for file_id in attachment_ids: + r = api_client.delete('/files/{}'.format(file_id)) + assert r.status_code == 400 + + # Now remove the attachment + example_draft['file_ids'] = [first_attachment] + r = api_client.put_data('/drafts/{}'.format(draft_public_id), + example_draft) + + draft_data = api_client.get_data('/drafts/{}'.format(draft_public_id)) + assert len(draft_data['files']) == 1 + assert draft_data['version'] == 2 + example_draft['version'] = draft_data['version'] + + example_draft['file_ids'] = [] + r = api_client.put_data('/drafts/{}'.format(draft_public_id), + example_draft) + draft_data = api_client.get_data('/drafts/{}'.format(draft_public_id)) + assert r.status_code == 200 + assert len(draft_data['files']) == 0 + assert draft_data['version'] == 3 + + # now that they're not attached, we should be able to delete them + for file_id in attachment_ids: + r = api_client.delete('/files/{}'.format(file_id)) + assert r.status_code == 200 + + +def test_get_all_drafts(api_client, example_draft): + r = api_client.post_data('/drafts', example_draft) + first_public_id = json.loads(r.data)['id'] + + r = api_client.post_data('/drafts', example_draft) + second_public_id = json.loads(r.data)['id'] + + drafts = api_client.get_data('/drafts') + assert len(drafts) == 2 + assert first_public_id != second_public_id + assert {first_public_id, second_public_id} == {draft['id'] for draft in + drafts} + assert all(item['object'] == 'draft' for item in drafts) + + +def test_update_draft(api_client): + with freeze_time(datetime.now()) as freezer: + original_draft = { + 'subject': 'original draft', + 'body': 'parent draft' + } + r = api_client.post_data('/drafts', original_draft) + draft_public_id = json.loads(r.data)['id'] + version = json.loads(r.data)['version'] + assert version == 0 + + freezer.tick() + + updated_draft = { + 'subject': 'updated draft', + 'body': 'updated draft', + 'version': version + } + + r = api_client.put_data('/drafts/{}'.format(draft_public_id), + updated_draft) + updated_public_id = json.loads(r.data)['id'] + updated_version = json.loads(r.data)['version'] + + assert updated_public_id == draft_public_id + assert updated_version > 0 + + drafts = api_client.get_data('/drafts') + assert len(drafts) == 1 + assert drafts[0]['id'] == updated_public_id + + # Check that the thread is updated too. + thread = api_client.get_data('/threads/{}'.format(drafts[0]['thread_id'])) + assert thread['subject'] == 'updated draft' + assert thread['first_message_timestamp'] == drafts[0]['date'] + assert thread['last_message_timestamp'] == drafts[0]['date'] + + +def test_delete_draft(api_client, thread, message): + original_draft = { + 'subject': 'parent draft', + 'body': 'parent draft' + } + r = api_client.post_data('/drafts', original_draft) + draft_public_id = json.loads(r.data)['id'] + version = json.loads(r.data)['version'] + + updated_draft = { + 'subject': 'updated draft', + 'body': 'updated draft', + 'version': version + } + r = api_client.put_data('/drafts/{}'.format(draft_public_id), + updated_draft) + updated_public_id = json.loads(r.data)['id'] + updated_version = json.loads(r.data)['version'] + + r = api_client.delete('/drafts/{}'.format(updated_public_id), + {'version': updated_version}) + + # Check that drafts were deleted + drafts = api_client.get_data('/drafts') + assert not drafts + + # Check that no orphaned threads are around + threads = api_client.get_data('/threads?subject=parent%20draft') + assert not threads + threads = api_client.get_data('/threads?subject=updated%20draft') + assert not threads + + # And check that threads aren't deleted if they still have messages. + thread_public_id = api_client.get_data('/threads')[0]['id'] + + reply_draft = { + 'subject': 'test reply', + 'body': 'test reply', + 'thread_id': thread_public_id + } + r = api_client.post_data('/drafts', reply_draft) + public_id = json.loads(r.data)['id'] + version = json.loads(r.data)['version'] + thread = api_client.get_data('/threads/{}'.format(thread_public_id)) + assert len(thread['draft_ids']) > 0 + api_client.delete('/drafts/{}'.format(public_id), + {'version': version}) + thread = api_client.get_data('/threads/{}'.format(thread_public_id)) + assert thread + assert len(thread['draft_ids']) == 0 + + +def test_delete_remote_draft(db, api_client, message): + message.is_draft = True + db.session.commit() + + drafts = api_client.get_data('/drafts') + assert len(drafts) == 1 + + public_id = drafts[0]['id'] + version = drafts[0]['version'] + + assert public_id == message.public_id and version == message.version + + api_client.delete('/drafts/{}'.format(public_id), + {'version': version}) + + # Check that drafts were deleted + drafts = api_client.get_data('/drafts') + assert not drafts + + +def test_conflicting_updates(api_client): + original_draft = { + 'subject': 'parent draft', + 'body': 'parent draft' + } + r = api_client.post_data('/drafts', original_draft) + original_public_id = json.loads(r.data)['id'] + version = json.loads(r.data)['version'] + + updated_draft = { + 'subject': 'updated draft', + 'body': 'updated draft', + 'version': version + } + r = api_client.put_data('/drafts/{}'.format(original_public_id), + updated_draft) + assert r.status_code == 200 + updated_public_id = json.loads(r.data)['id'] + updated_version = json.loads(r.data)['version'] + assert updated_version != version + + conflicting_draft = { + 'subject': 'conflicting draft', + 'body': 'conflicting draft', + 'version': version + } + r = api_client.put_data('/drafts/{}'.format(original_public_id), + conflicting_draft) + assert r.status_code == 409 + + drafts = api_client.get_data('/drafts') + assert len(drafts) == 1 + assert drafts[0]['id'] == updated_public_id + + +def test_update_to_nonexistent_draft(api_client): + updated_draft = { + 'subject': 'updated draft', + 'body': 'updated draft', + 'version': 22 + } + + r = api_client.put_data('/drafts/{}'.format('notarealid'), updated_draft) + assert r.status_code == 404 + drafts = api_client.get_data('/drafts') + assert len(drafts) == 0 + + +def test_contacts_updated(api_client): + """Tests that draft-contact associations are properly created and + updated.""" + draft = { + 'to': [{'email': 'alice@example.com'}, {'email': 'bob@example.com'}] + } + + r = api_client.post_data('/drafts', draft) + assert r.status_code == 200 + draft_id = json.loads(r.data)['id'] + draft_version = json.loads(r.data)['version'] + + r = api_client.get_data('/threads?to=alice@example.com') + assert len(r) == 1 + + updated_draft = { + 'to': [{'email': 'alice@example.com'}, {'email': 'joe@example.com'}], + 'version': draft_version + } + + r = api_client.put_data('/drafts/{}'.format(draft_id), updated_draft) + assert r.status_code == 200 + + r = api_client.get_data('/threads?to=alice@example.com') + assert len(r) == 1 + + r = api_client.get_data('/threads?to=bob@example.com') + assert len(r) == 0 + + r = api_client.get_data('/threads?to=joe@example.com') + assert len(r) == 1 + + # Check that contacts aren't created for garbage recipients. + r = api_client.post_data('/drafts', + {'to': [{'name': 'who', 'email': 'nope'}]}) + assert r.status_code == 200 + r = api_client.get_data('/threads?to=nope') + assert len(r) == 0 + r = api_client.get_data('/contacts?filter=nope') + assert len(r) == 0 diff --git a/inbox/test/api/test_event_participants.py b/inbox/test/api/test_event_participants.py new file mode 100644 index 000000000..55cdf195e --- /dev/null +++ b/inbox/test/api/test_event_participants.py @@ -0,0 +1,437 @@ +import pytest +import json + +from inbox.test.util.base import calendar +from inbox.test.api.base import api_client + + +__all__ = ['calendar', 'api_client'] + + +# TODO(emfree) WTF is all this crap anyways? + +def test_api_create(db, api_client, calendar): + e_data = { + 'title': 'Friday Office Party', + 'when': {'time': 1407542195}, + 'calendar_id': calendar.public_id, + 'participants': [{ + 'name': 'alyssa p. hacker', + 'email': 'alyssa@example.com' + }] + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + + assert len(e_resp_data['participants']) == 1 + participant = e_resp_data['participants'][0] + assert participant['name'] == e_data['participants'][0]['name'] + assert participant['email'] == e_data['participants'][0]['email'] + assert participant['status'] == 'noreply' + + e_resp_data = api_client.get_data('/events/' + e_resp_data['id']) + + assert len(e_resp_data['participants']) == 1 + participant = e_resp_data['participants'][0] + assert participant['name'] == e_data['participants'][0]['name'] + assert participant['email'] == e_data['participants'][0]['email'] + assert participant['status'] == 'noreply' + + +def test_api_create_status_yes(db, api_client, calendar): + e_data = { + 'title': 'Friday Office Party', + 'when': {'time': 1407542195}, + 'calendar_id': calendar.public_id, + 'participants': [{ + 'email': 'alyssa@example.com', + 'status': 'yes' + }] + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + + assert len(e_resp_data['participants']) == 1 + participant = e_resp_data['participants'][0] + assert participant['name'] is None + assert participant['email'] == e_data['participants'][0]['email'] + assert participant['status'] == 'yes' + + +def test_api_create_multiple(db, api_client, calendar): + e_data = { + 'title': 'Friday Office Party', + 'when': {'time': 1407542195}, + 'calendar_id': calendar.public_id, + 'participants': [{ + 'email': 'alyssa@example.com', + }, { + 'email': 'ben.bitdiddle@example.com', + }] + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + + assert len(e_resp_data['participants']) == 2 + for participant in e_resp_data['participants']: + res = [e for e in e_data['participants'] + if e['email'] == participant['email']] + assert len(res) == 1 + + participant0 = e_resp_data['participants'][0] + participant1 = e_resp_data['participants'][1] + assert participant0['name'] is None + assert participant0['status'] == 'noreply' + assert participant1['name'] is None + assert participant1['status'] == 'noreply' + + +def test_api_create_status_no(db, api_client, calendar): + e_data = { + 'title': 'Friday Office Party', + 'when': {'time': 1407542195}, + 'calendar_id': calendar.public_id, + 'participants': [{ + 'email': 'alyssa@example.com', + 'status': 'no' + }] + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + + assert len(e_resp_data['participants']) == 1 + participant = e_resp_data['participants'][0] + assert participant['name'] is None + assert participant['email'] == e_data['participants'][0]['email'] + assert participant['status'] == e_data['participants'][0]['status'] + + +def test_api_create_status_maybe(db, api_client, calendar): + e_data = { + 'title': 'Friday Office Party', + 'when': {'time': 1407542195}, + 'calendar_id': calendar.public_id, + 'participants': [{ + 'email': 'alyssa@example.com', + 'status': 'maybe' + }] + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + + assert len(e_resp_data['participants']) == 1 + participant = e_resp_data['participants'][0] + assert participant['name'] is None + assert participant['email'] == e_data['participants'][0]['email'] + assert participant['status'] == e_data['participants'][0]['status'] + + +def test_api_create_status_noreply(db, api_client, calendar): + e_data = { + 'title': 'Friday Office Party', + 'when': {'time': 1407542195}, + 'calendar_id': calendar.public_id, + 'participants': [{ + 'email': 'alyssa@example.com', + 'status': 'noreply' + }] + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + + assert len(e_resp_data['participants']) == 1 + participant = e_resp_data['participants'][0] + assert participant['name'] is None + assert participant['email'] == e_data['participants'][0]['email'] + assert participant['status'] == e_data['participants'][0]['status'] + + +def test_api_create_no_name(db, api_client, calendar): + e_data = { + 'title': 'Friday Office Party', + 'when': {'time': 1407542195}, + 'calendar_id': calendar.public_id, + 'participants': [{ + 'email': 'alyssa@example.com' + }] + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + + assert len(e_resp_data['participants']) == 1 + participant = e_resp_data['participants'][0] + assert participant['name'] is None + assert participant['email'] == e_data['participants'][0]['email'] + assert participant['status'] == 'noreply' + + +def test_api_create_no_email(db, api_client, calendar): + e_data = { + 'title': 'Friday Office Party', + 'when': {'time': 1407542195}, + 'calendar_id': calendar.public_id, + 'participants': [{ + 'name': 'alyssa p. hacker', + }] + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + + assert e_resp_data["type"] == "invalid_request_error" + + +def test_api_create_bad_status(db, api_client, calendar): + e_data = { + 'title': 'Friday Office Party', + 'when': {'time': 1407542195}, + 'calendar_id': calendar.public_id, + 'participants': [{ + 'name': 'alyssa p. hacker', + 'email': 'alyssa@example.com', + 'status': 'bad' + }] + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + + assert e_resp_data["type"] == "invalid_request_error" + + +def test_api_add_participant(db, api_client, calendar): + e_data = { + 'title': 'Friday Office Party', + 'when': {'time': 1407542195}, + 'calendar_id': calendar.public_id, + 'participants': [{'email': 'alyssa@example.com'}, + {'email': 'ben.bitdiddle@example.com'}, + {'email': 'pei.mihn@example.com'}, + {'email': 'bill.ling@example.com'}, + {'email': 'john.q@example.com'}] + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + assert len(e_resp_data['participants']) == 5 + for i, p in enumerate(e_resp_data['participants']): + res = [e for e in e_resp_data['participants'] + if e['email'] == p['email']] + assert len(res) == 1 + assert res[0]['name'] is None + + event_id = e_resp_data['id'] + e_data['participants'].append({'email': 'filet.minyon@example.com'}) + e_resp = api_client.put_data('/events/' + event_id, e_data) + e_resp_data = json.loads(e_resp.data) + + assert len(e_resp_data['participants']) == 6 + for i, p in enumerate(e_resp_data['participants']): + res = [e for e in e_resp_data['participants'] + if e['email'] == p['email']] + assert len(res) == 1 + assert res[0]['name'] is None + + +def test_api_remove_participant(db, api_client, calendar): + e_data = { + 'title': 'Friday Office Party', + 'when': {'time': 1407542195}, + 'calendar_id': calendar.public_id, + 'participants': [{'email': 'alyssa@example.com'}, + {'email': 'ben.bitdiddle@example.com'}, + {'email': 'pei.mihn@example.com'}, + {'email': 'bill.ling@example.com'}, + {'email': 'john.q@example.com'}] + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + assert len(e_resp_data['participants']) == 5 + for i, p in enumerate(e_resp_data['participants']): + res = [e for e in e_resp_data['participants'] + if e['email'] == p['email']] + assert len(res) == 1 + assert res[0]['name'] is None + + event_id = e_resp_data['id'] + e_data['participants'].pop() + e_resp = api_client.put_data('/events/' + event_id, e_data) + e_resp_data = json.loads(e_resp.data) + assert len(e_resp_data['participants']) == 4 + for i, p in enumerate(e_resp_data['participants']): + res = [e for e in e_resp_data['participants'] + if e['email'] == p['email']] + assert len(res) == 1 + assert p['name'] is None + + +def test_api_update_participant_status(db, api_client, calendar): + e_data = { + 'title': 'Friday Office Party', + 'when': {'time': 1407542195}, + 'calendar_id': calendar.public_id, + 'participants': [{'email': 'alyssa@example.com'}, + {'email': 'ben.bitdiddle@example.com'}, + {'email': 'pei.mihn@example.com'}, + {'email': 'bill.ling@example.com'}, + {'email': 'john.q@example.com'}] + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + assert len(e_resp_data['participants']) == 5 + for i, p in enumerate(e_resp_data['participants']): + res = [e for e in e_data['participants'] if e['email'] == p['email']] + assert len(res) == 1 + assert p['name'] is None + + event_id = e_resp_data['id'] + + update_data = { + 'calendar_id': calendar.public_id, + 'participants': [{'email': 'alyssa@example.com', + 'status': 'yes'}, + {'email': 'ben.bitdiddle@example.com', + 'status': 'no'}, + {'email': 'pei.mihn@example.com', + 'status': 'maybe'}, + {'email': 'bill.ling@example.com'}, + {'email': 'john.q@example.com'}] + } + + e_resp = api_client.put_data('/events/' + event_id, update_data) + e_resp_data = json.loads(e_resp.data) + + # Make sure that nothing changed that we didn't specify + assert e_resp_data['title'] == 'Friday Office Party' + assert e_resp_data['when']['time'] == 1407542195 + + assert len(e_resp_data['participants']) == 5 + for i, p in enumerate(e_resp_data['participants']): + res = [e for e in e_data['participants'] if e['email'] == p['email']] + assert len(res) == 1 + assert p['name'] is None + + +@pytest.mark.parametrize('rsvp', ['yes', 'no', 'maybe']) +def test_api_participant_reply(db, api_client, rsvp, calendar): + + e_data = { + 'title': 'Friday Office Party', + 'calendar_id': calendar.public_id, + 'when': {'time': 1407542195}, + 'participants': [{'email': 'alyssa@example.com'}, + {'email': 'ben.bitdiddle@example.com'}, + {'email': 'pei.mihn@example.com'}, + {'email': 'bill.ling@example.com'}, + {'email': 'john.q@example.com'}] + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + assert len(e_resp_data['participants']) == 5 + + assert e_resp_data['id'] + assert e_resp_data['participants'] + + +def test_api_participant_reply_invalid_rsvp(db, api_client, calendar): + e_data = { + 'title': 'Friday Office Party', + 'calendar_id': calendar.public_id, + 'when': {'time': 1407542195}, + 'participants': [{'email': 'alyssa@example.com'}, + {'email': 'ben.bitdiddle@example.com'}, + {'email': 'pei.mihn@example.com'}, + {'email': 'bill.ling@example.com'}, + {'email': 'john.q@example.com'}] + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + assert len(e_resp_data['participants']) == 5 + + assert e_resp_data['id'] + assert e_resp_data['participants'] + + +def test_api_participant_reply_invalid_participant(db, api_client, calendar): + + e_data = { + 'title': 'Friday Office Party', + 'when': {'time': 1407542195}, + 'calendar_id': calendar.public_id, + 'participants': [{'email': 'alyssa@example.com'}, + {'email': 'ben.bitdiddle@example.com'}, + {'email': 'pei.mihn@example.com'}, + {'email': 'bill.ling@example.com'}, + {'email': 'john.q@example.com'}] + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + assert len(e_resp_data['participants']) == 5 + + assert e_resp_data['id'] + + +def test_api_participant_reply_invalid_event(db, api_client, calendar): + e_data = { + 'title': 'Friday Office Party', + 'when': {'time': 1407542195}, + 'calendar_id': calendar.public_id, + 'participants': [{'email': 'alyssa@example.com'}, + {'email': 'ben.bitdiddle@example.com'}, + {'email': 'pei.mihn@example.com'}, + {'email': 'bill.ling@example.com'}, + {'email': 'john.q@example.com'}] + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + assert len(e_resp_data['participants']) == 5 + + assert e_resp_data['participants'] + + +def test_api_participant_reply_invalid_event2(db, api_client, calendar): + e_data = { + 'title': 'Friday Office Party', + 'when': {'time': 1407542195}, + 'calendar_id': calendar.public_id, + 'participants': [{'email': 'alyssa@example.com'}, + {'email': 'ben.bitdiddle@example.com'}, + {'email': 'pei.mihn@example.com'}, + {'email': 'bill.ling@example.com'}, + {'email': 'john.q@example.com'}] + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + assert len(e_resp_data['participants']) == 5 + + +def test_api_participant_reply_invalid_action(db, api_client, calendar): + e_data = { + 'title': 'Friday Office Party', + 'when': {'time': 1407542195}, + 'calendar_id': calendar.public_id, + 'participants': [{'email': 'alyssa@example.com'}, + {'email': 'ben.bitdiddle@example.com'}, + {'email': 'pei.mihn@example.com'}, + {'email': 'bill.ling@example.com'}, + {'email': 'john.q@example.com'}] + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + assert len(e_resp_data['participants']) == 5 + assert e_resp_data['id'] diff --git a/inbox/test/api/test_event_when.py b/inbox/test/api/test_event_when.py new file mode 100644 index 000000000..aa82e758d --- /dev/null +++ b/inbox/test/api/test_event_when.py @@ -0,0 +1,267 @@ +import json +import pytest + +import arrow + +from inbox.test.api.base import api_client + +__all__ = ['api_client'] + + +class CreateError(Exception): + pass + + +def _verify_create(ns_id, api_client, e_data): + e_resp = api_client.post_data('/events', e_data) + if e_resp.status_code != 200: + raise CreateError("Expected status 200, got %d" % e_resp.status_code) + + e_resp_data = json.loads(e_resp.data) + assert e_resp_data['object'] == 'event' + assert e_resp_data['account_id'] == ns_id + assert e_resp_data['title'] == e_data['title'] + assert e_resp_data['location'] == e_data['location'] + for k, v in e_data['when'].iteritems(): + assert arrow.get(e_resp_data['when'][k]) == arrow.get(v) + assert 'id' in e_resp_data + e_id = e_resp_data['id'] + e_get_resp = api_client.get_data('/events/' + e_id) + + assert e_get_resp['object'] == 'event' + assert e_get_resp['account_id'] == ns_id + assert e_get_resp['id'] == e_id + assert e_get_resp['title'] == e_data['title'] + for k, v in e_data['when'].iteritems(): + assert arrow.get(e_get_resp['when'][k]) == arrow.get(v) + + return e_resp_data + + +def test_api_when_as_str(db, api_client, calendar, default_namespace): + e_data = { + 'title': 'Friday Office Party', + 'when': {'time': '1407542195'}, + 'calendar_id': calendar.public_id, + 'location': 'Nylas HQ', + } + + e_resp_data = _verify_create(default_namespace.public_id, api_client, + e_data) + assert e_resp_data['when']['object'] == 'time' + + +def test_api_time(db, api_client, calendar, default_namespace): + e_data = { + 'title': 'Friday Office Party', + 'when': {'time': 1407542195}, + 'calendar_id': calendar.public_id, + 'location': 'Nylas HQ', + } + + e_resp_data = _verify_create(default_namespace.public_id, api_client, + e_data) + assert e_resp_data['when']['object'] == 'time' + + +def test_api_timespan(db, api_client, calendar, default_namespace): + e_data = { + 'title': 'Friday Office Party', + 'calendar_id': calendar.public_id, + 'when': {'start_time': 1407542195, 'end_time': 1407548195}, + 'location': 'Nylas HQ', + } + + e_resp_data = _verify_create(default_namespace.public_id, api_client, + e_data) + assert e_resp_data['when']['object'] == 'timespan' + + +def test_api_date(db, api_client, calendar, default_namespace): + e_data = { + 'title': 'Friday Office Party', + 'calendar_id': calendar.public_id, + 'when': {'date': '2014-08-27'}, + 'location': 'Nylas HQ', + } + + e_resp_data = _verify_create(default_namespace.public_id, api_client, + e_data) + assert e_resp_data['when']['object'] == 'date' + + +def test_api_datespan(db, api_client, calendar, default_namespace): + e_data = { + 'title': 'Friday Office Party', + 'calendar_id': calendar.public_id, + 'when': {'start_date': '2014-08-27', 'end_date': '2014-08-28'}, + 'location': 'Nylas HQ', + } + + e_resp_data = _verify_create(default_namespace.public_id, api_client, + e_data) + assert e_resp_data['when']['object'] == 'datespan' + + +# Invalid + + +def test_api_invalid_event_no_when(db, api_client, calendar, + default_namespace): + e_data = { + 'title': 'Friday Office Party', + 'calendar_id': calendar.public_id + } + + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + +def test_api_invalid_event_when_no_params(db, api_client, calendar, + default_namespace): + e_data = { + 'title': 'Friday Office Party', + 'when': {}, + 'calendar_id': calendar.public_id, + } + + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + +def test_api_invalid_event_when_bad_params(db, api_client, calendar, + default_namespace): + e_data = { + 'title': 'Friday Office Party', + 'calendar_id': calendar.public_id, + 'when': {'start': 0}} + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + +def test_api_invalid_event_when_timespan_bad_params(db, api_client, calendar, + default_namespace): + e_data = {'title': 'Friday Office Party'} + + e_data['when'] = {'object': 'time', 'start': 0} + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + e_data['when'] = {'object': 'time', 'start_time': 0} + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + e_data['when'] = {'start_time': 0} + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + e_data['when'] = {'start_time': 'a', 'end_time': 0} + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + e_data['when'] = {'start_time': 0, 'end_time': 'a'} + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + e_data['when'] = {'start_time': 2, 'end_time': 1} + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + e_data['when'] = {'start_time': 0, 'end_time': 1, 'time': 2} + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + +def test_api_invalid_event_when_datespan_bad_params(db, api_client, calendar, + default_namespace): + e_data = { + 'title': 'Friday Office Party', + 'calendar_id': calendar.public_id, + } + + e_data['when'] = {'object': 'date', 'start': 0} + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + e_data['when'] = {'object': 'date', 'start_date': 0} + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + e_data['when'] = {'start_date': 0} + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + e_data['when'] = {'start_date': 'a', 'end_date': 0} + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + e_data['when'] = {'start_date': 0, 'end_date': 'a'} + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + e_data['when'] = {'start_date': '2014-08-27', + 'end_date': '2014-08-28', + 'date': '2014-08-27'} + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + e_data['when'] = {'start_date': '2014-08-29', + 'end_date': '2014-08-28', + 'date': '2014-08-27'} + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + +def test_api_invalid_event_when_time_bad_params(db, api_client, calendar, + default_namespace): + e_data = { + 'title': 'Friday Office Party', + 'calendar_id': calendar.public_id, + } + + e_data['when'] = {'object': 'date', 'time': 0} + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + e_data['when'] = {'time': 'a'} + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + e_data['when'] = {'time': 0, 'date': '2014-08-23'} + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + +def test_api_invalid_event_when_date_bad_params(db, api_client, calendar, + default_namespace): + e_data = { + 'title': 'Friday Office Party', + 'calendar_id': calendar.public_id, + } + + e_data['when'] = {'object': 'time', 'date': 0} + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + e_data['when'] = {'date': 'j'} + with pytest.raises(CreateError): + _verify_create(default_namespace.public_id, api_client, e_data) + + +def test_api_event_when_update(db, api_client, calendar, default_namespace): + e_data = { + 'title': 'Friday Office Party', + 'location': 'home', + 'calendar_id': calendar.public_id, + } + + e_data['when'] = {'time': 0} + e_resp_data = _verify_create(default_namespace.public_id, api_client, + e_data) + e_id = e_resp_data['id'] + + e_update_data = {'when': {'time': 1}} + e_put_resp = api_client.put_data('/events/' + e_id, e_update_data) + e_put_data = json.loads(e_put_resp.data) + assert e_put_data['when']['object'] == 'time' + assert e_put_data['when']['time'] == e_update_data['when']['time'] diff --git a/inbox/test/api/test_events.py b/inbox/test/api/test_events.py new file mode 100644 index 000000000..e9917ba4a --- /dev/null +++ b/inbox/test/api/test_events.py @@ -0,0 +1,383 @@ +import json + +from inbox.api.ns_api import API_VERSIONS +from inbox.sqlalchemy_ext.util import generate_public_id +from inbox.models import Event, Calendar +from inbox.test.util.base import db, calendar, add_fake_event +from inbox.test.api.base import api_client + +__all__ = ['api_client', 'calendar', 'db'] + + +def test_create_event(db, api_client, calendar): + e_data = {'title': 'subj', 'description': 'body1', + 'calendar_id': calendar.public_id, + 'when': {'time': 1}, 'location': 'NylasHQ'} + e_data2 = {'title': 'subj2', 'description': 'body2', + 'calendar_id': calendar.public_id, + 'when': {'time': 1}, 'location': 'NylasHQ'} + api_client.post_data('/events', e_data) + api_client.post_data('/events', e_data2) + db.session.commit() + + +def test_api_list(db, api_client, calendar): + e_data = {'title': 'subj', 'description': 'body1', + 'calendar_id': calendar.public_id, + 'when': {'time': 1}, 'location': 'NylasHQ'} + e_data2 = {'title': 'subj2', 'description': 'body2', + 'calendar_id': calendar.public_id, + 'when': {'time': 1}, 'location': 'NylasHQ'} + api_client.post_data('/events', e_data) + api_client.post_data('/events', e_data2) + + event_list = api_client.get_data('/events') + event_titles = [event['title'] for event in event_list] + assert 'subj' in event_titles + assert 'subj2' in event_titles + + event_descriptions = [event['description'] for event in event_list] + assert 'body1' in event_descriptions + assert 'body2' in event_descriptions + + event_ids = [event['id'] for event in event_list] + + for e_id in event_ids: + ev = db.session.query(Event).filter_by(public_id=e_id).one() + db.session.delete(ev) + db.session.commit() + + +def test_api_get(db, api_client, calendar): + e_data = {'title': 'subj', 'when': {'time': 1}, + 'calendar_id': calendar.public_id, 'location': 'NylasHQ'} + e_data2 = {'title': 'subj2', 'when': {'time': 1}, + 'calendar_id': calendar.public_id, 'location': 'NylasHQ'} + api_client.post_data('/events', e_data) + api_client.post_data('/events', e_data2) + + event_list = api_client.get_data('/events') + + event_ids = [event['id'] for event in event_list] + + c1found = False + c2found = False + for c_id in event_ids: + event = api_client.get_data('/events/' + c_id) + + if event['title'] == 'subj': + c1found = True + + if event['title'] == 'subj2': + c2found = True + + assert c1found + assert c2found + + +def test_api_create(db, api_client, calendar, default_account): + e_data = { + 'title': 'Friday Office Party', + 'calendar_id': calendar.public_id, + 'when': {'time': 1407542195}, + 'location': 'Nylas HQ', + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + assert e_resp_data['object'] == 'event' + assert e_resp_data['account_id'] == default_account.namespace.public_id + assert e_resp_data['title'] == e_data['title'] + assert e_resp_data['location'] == e_data['location'] + assert e_resp_data['when']['time'] == e_data['when']['time'] + assert 'id' in e_resp_data + e_id = e_resp_data['id'] + e_get_resp = api_client.get_data('/events/' + e_id) + + assert e_get_resp['object'] == 'event' + assert e_get_resp['account_id'] == default_account.namespace.public_id + assert e_get_resp['id'] == e_id + assert e_get_resp['title'] == e_data['title'] + assert e_get_resp['when']['time'] == e_data['when']['time'] + + +def test_api_create_no_title(db, api_client, calendar, default_account): + e_data = { + 'title': '', + 'calendar_id': calendar.public_id, + 'when': {'time': 1407542195}, + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + assert e_resp_data['object'] == 'event' + assert e_resp_data['account_id'] == default_account.namespace.public_id + assert e_resp_data['title'] == e_data['title'] + assert e_resp_data['when']['time'] == e_data['when']['time'] + assert 'id' in e_resp_data + e_id = e_resp_data['id'] + e_get_resp = api_client.get_data('/events/' + e_id) + + assert e_get_resp['object'] == 'event' + assert e_get_resp['account_id'] == default_account.namespace.public_id + assert e_get_resp['id'] == e_id + assert e_get_resp['title'] == e_data['title'] + assert e_get_resp['when']['time'] == e_data['when']['time'] + + +def test_api_update_title(db, api_client, calendar, default_account): + e_data = { + 'title': '', + 'calendar_id': calendar.public_id, + 'when': {'time': 1407542195}, + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + assert e_resp_data['object'] == 'event' + assert e_resp_data['account_id'] == default_account.namespace.public_id + assert e_resp_data['title'] == e_data['title'] + assert e_resp_data['when']['time'] == e_data['when']['time'] + assert 'id' in e_resp_data + e_id = e_resp_data['id'] + + e_update_data = {'title': 'new title'} + e_put_resp = api_client.put_data('/events/' + e_id, e_update_data) + e_put_data = json.loads(e_put_resp.data) + + assert e_put_data['object'] == 'event' + assert e_put_data['account_id'] == default_account.namespace.public_id + assert e_put_data['id'] == e_id + assert e_put_data['title'] == 'new title' + assert e_put_data['when']['object'] == 'time' + assert e_put_data['when']['time'] == e_data['when']['time'] + + +import pytest +@pytest.mark.only +def test_api_pessimistic_update(db, api_client, calendar, default_account): + e_data = { + 'title': '', + 'calendar_id': calendar.public_id, + 'when': {'time': 1407542195}, + } + + e_resp = api_client.post_data('/events', e_data, + headers={ "Api-Version": API_VERSIONS[1] }) + + e_resp_data = json.loads(e_resp.data) + assert e_resp_data['object'] == 'event' + assert e_resp_data['account_id'] == default_account.namespace.public_id + assert e_resp_data['title'] == e_data['title'] + assert e_resp_data['when']['time'] == e_data['when']['time'] + assert 'id' in e_resp_data + e_id = e_resp_data['id'] + + e_update_data = {'title': 'new title'} + e_put_resp = api_client.put_data('/events/' + e_id, e_update_data, + headers={ "Api-Version": API_VERSIONS[1] }) + + e_put_data = json.loads(e_put_resp.data) + + assert e_put_data['object'] == 'event' + assert e_put_data['account_id'] == default_account.namespace.public_id + assert e_put_data['id'] == e_id + assert e_put_data['title'] == '' + assert e_put_data['when']['object'] == 'time' + assert e_put_data['when']['time'] == e_data['when']['time'] + + +def test_api_update_invalid(db, api_client, calendar): + e_update_data = {'title': 'new title'} + e_id = generate_public_id() + e_put_resp = api_client.put_data('/events/' + e_id, e_update_data) + assert e_put_resp.status_code != 200 + + +def test_api_delete(db, api_client, calendar, default_account): + e_data = { + 'title': '', + 'calendar_id': calendar.public_id, + 'when': {'time': 1407542195}, + } + + e_resp = api_client.post_data('/events', e_data) + e_resp_data = json.loads(e_resp.data) + assert e_resp_data['object'] == 'event' + assert e_resp_data['title'] == e_data['title'] + assert e_resp_data['when']['time'] == e_data['when']['time'] + assert 'id' in e_resp_data + e_id = e_resp_data['id'] + + e_delete_resp = api_client.delete('/events/' + e_id) + assert e_delete_resp.status_code == 200 + + e_resp = api_client.get_data('/events/' + e_id) + assert e_resp['status'] == 'cancelled' + + +def test_api_pessimistic_delete(db, api_client, calendar, default_account): + e_data = { + 'title': '', + 'calendar_id': calendar.public_id, + 'when': {'time': 1407542195}, + } + + e_resp = api_client.post_data('/events', e_data, + headers={ "Api-Version": API_VERSIONS[1] }) + e_resp_data = json.loads(e_resp.data) + assert e_resp_data['object'] == 'event' + assert e_resp_data['title'] == e_data['title'] + assert e_resp_data['when']['time'] == e_data['when']['time'] + assert 'id' in e_resp_data + e_id = e_resp_data['id'] + + e_delete_resp = api_client.delete('/events/' + e_id, + headers={ "Api-Version": API_VERSIONS[1] }) + assert e_delete_resp.status_code == 200 + + e_resp = api_client.get_data('/events/' + e_id) + assert e_resp['status'] == 'confirmed' + + +def test_api_delete_invalid(db, api_client, calendar): + e_id = 'asdf' + resp = api_client.delete('/events/' + e_id) + assert resp.status_code != 200 + + e_id = generate_public_id() + resp = api_client.delete('/events/' + e_id) + assert resp.status_code != 200 + + +def test_api_update_read_only(db, api_client, calendar, default_namespace): + add_fake_event(db.session, default_namespace.id, + calendar=calendar, + read_only=True) + event_list = api_client.get_data('/events') + + read_only_event = None + for e in event_list: + if e['read_only']: + read_only_event = e + break + + assert read_only_event + + e_id = read_only_event['id'] + e_update_data = {'title': 'new title'} + e_put_resp = api_client.put_data('/events/' + e_id, e_update_data) + assert e_put_resp.status_code != 200 + + +def test_api_filter(db, api_client, calendar, default_namespace): + cal = Calendar(namespace_id=default_namespace.id, + uid='uid', + provider_name='Nylas', + name='Climbing Schedule') + db.session.add(cal) + db.session.commit() + cal_id = cal.public_id + + e1_data = {'calendar_id': cal_id, + 'title': 'Normal Party', + 'description': 'Everyone Eats Cake', + 'when': {'time': 1}, + 'location': 'Normal Town'} + post_1 = api_client.post_data('/events', e1_data) + assert post_1.status_code == 200 + + e2_data = {'calendar_id': cal_id, + 'title': 'Hipster Party', + 'description': 'Everyone Eats Kale', + 'when': {'time': 3}, + 'location': 'Hipster Town'} + post_2 = api_client.post_data('/events', e2_data) + assert post_2.status_code == 200 + + # This event exists to test for unicode handling. + e3_data = {'calendar_id': cal_id, + 'title': u'Unicode Party \U0001F389', + 'description': u'Everyone Eats Unicode Tests \u2713', + 'when': {'start_time': 2678401, + 'end_time': 5097601}, + 'location': u'Unicode Castle \U0001F3F0'} + event_3 = api_client.post_data('/events', e3_data) + assert event_3.status_code == 200 + e3_id = json.loads(event_3.data)['id'] + + events = api_client.get_data('/events?offset=%s' % '1') + assert len(events) == 2 + + events = api_client.get_data('/events?limit=%s' % '1') + assert len(events) == 1 + + # Test description queries: all, some, unicode, none + events = api_client.get_data('/events?description=%s' % 'Everyone Eats') + assert len(events) == 3 + + events = api_client.get_data('/events?description=%s' % 'Cake') + assert len(events) == 1 + + events = api_client.get_data('/events?description=%s' % u'\u2713') + assert len(events) == 1 + + events = api_client.get_data('/events?description=%s' % 'bad') + assert len(events) == 0 + + # Test title queries: all, some, unicode, none + events = api_client.get_data('/events?title=%s' % 'Party') + assert len(events) == 3 + + events = api_client.get_data('/events?title=%s' % 'Hipster') + assert len(events) == 1 + + events = api_client.get_data('/events?title=%s' % u'\U0001F389') + assert len(events) == 1 + + events = api_client.get_data('/events?title=%s' % 'bad') + assert len(events) == 0 + + # Test location queries: all, some, unicode, none + events = api_client.get_data('/events?location=%s' % 'o') + assert len(events) == 3 + + events = api_client.get_data('/events?location=%s' % 'Town') + assert len(events) == 2 + + events = api_client.get_data('/events?location=%s' % u'\U0001F3F0') + assert len(events) == 1 + + events = api_client.get_data('/events?location=%s' % 'bad') + assert len(events) == 0 + + # Test ID queries + _filter = 'event_id={}'.format(e3_id) + events = api_client.get_data('/events?' + _filter) + assert len(events) == 1 + + # Test time queries + _filter = 'starts_before=2' + events = api_client.get_data('/events?' + _filter) + assert len(events) == 1 + + _filter = 'starts_after=2' + events = api_client.get_data('/events?' + _filter) + assert len(events) == 2 + + _filter = 'ends_before=2700000' + events = api_client.get_data('/events?' + _filter) + assert len(events) == 2 + + _filter = 'ends_after=2700000' + events = api_client.get_data('/events?' + _filter) + assert len(events) == 1 + + # Test calendar queries + _filter = 'calendar_id={}'.format(cal_id) + events = api_client.get_data('/events?' + _filter) + assert len(events) == 3 + + _filter = 'calendar_id=0000000000000000000000000' + events = api_client.get_data('/events?' + _filter) + assert len(events) == 0 diff --git a/inbox/test/api/test_events_recurring.py b/inbox/test/api/test_events_recurring.py new file mode 100644 index 000000000..a74f77b0a --- /dev/null +++ b/inbox/test/api/test_events_recurring.py @@ -0,0 +1,186 @@ +# flake8: noqa: F401 +import arrow +import urllib +import pytest +from inbox.models import Event, Calendar +from inbox.test.api.base import api_client +from inbox.test.util.base import message + +__all__ = ['api_client'] + + +@pytest.fixture(params=[{"all_day": True}, {"all_day": False}]) +def recurring_event(db, default_namespace, request): + params = request.param + all_day = params.get('all_day', False) + + rrule = ["RRULE:FREQ=WEEKLY", "EXDATE:20150324T013000,20150331T013000Z"] + cal = db.session.query(Calendar).filter_by( + namespace_id=default_namespace.id).order_by('id').first() + ev = Event(namespace_id=default_namespace.id, + calendar=cal, + title='recurring-weekly', + description='', + uid='recurapitest', + location='', + busy=False, + read_only=False, + reminders='', + recurrence=rrule, + start=arrow.get(2015, 3, 17, 1, 30, 00), + end=arrow.get(2015, 3, 17, 1, 45, 00), + all_day=all_day, + is_owner=True, + participants=[], + provider_name='inbox', + raw_data='', + original_start_tz='America/Los_Angeles', + original_start_time=None, + master_event_uid=None, + source='local') + db.session.add(ev) + db.session.commit() + return ev + + +def test_api_expand_recurring(db, api_client, recurring_event): + event = recurring_event + + events = api_client.get_data('/events?expand_recurring=false') + assert len(events) == 1 + # Make sure the recurrence info is on the recurring event + for e in events: + if e['title'] == 'recurring-weekly': + assert e.get('recurrence') is not None + + thirty_weeks = event.start.replace(weeks=+30).isoformat() + starts_after = event.start.replace(days=-1).isoformat() + recur = 'expand_recurring=true&starts_after={}&ends_before={}'.format( + urllib.quote_plus(starts_after), urllib.quote_plus(thirty_weeks)) + all_events = api_client.get_data('/events?' + recur) + + if not event.all_day: + assert len(all_events) == 28 + + # the ordering should be correct + prev = all_events[0]['when']['start_time'] + for e in all_events[1:]: + assert e['when']['start_time'] > prev + prev = e['when']['start_time'] + + # Check that the parent event recurring id is included + # too. + assert e['calendar_id'] == recurring_event.calendar.public_id + + events = api_client.get_data('/events?' + recur + '&view=count') + assert events.get('count') == 28 + else: + # Since an all-day event starts at 00:00 we're returning one + # more event. + assert len(all_events) == 29 + # the ordering should be correct + prev = all_events[0]['when']['date'] + for e in all_events[1:]: + assert e['when']['date'] > prev + prev = e['when']['date'] + + # Check that the parent event recurring id is included + # too. + assert e['calendar_id'] == recurring_event.calendar.public_id + + events = api_client.get_data('/events?' + recur + '&view=count') + assert events.get('count') == 29 + + events = api_client.get_data('/events?' + recur + '&limit=5') + assert len(events) == 5 + + events = api_client.get_data('/events?' + recur + '&offset=5') + assert events[0]['id'] == all_events[5]['id'] + + +def urlsafe(dt): + return urllib.quote_plus(dt.isoformat()) + + +def test_api_expand_recurring_before_after(db, api_client, recurring_event): + event = recurring_event + starts_after = event.start.replace(weeks=+15) + ends_before = starts_after.replace(days=+1) + + recur = 'expand_recurring=true&starts_after={}&ends_before={}'.format( + urlsafe(starts_after), urlsafe(ends_before)) + all_events = api_client.get_data('/events?' + recur) + assert len(all_events) == 1 + + recur = 'expand_recurring=true&starts_after={}&starts_before={}'.format( + urlsafe(starts_after), urlsafe(ends_before)) + all_events = api_client.get_data('/events?' + recur) + assert len(all_events) == 1 + + recur = 'expand_recurring=true&ends_after={}&starts_before={}'.format( + urlsafe(starts_after), urlsafe(ends_before)) + all_events = api_client.get_data('/events?' + recur) + assert len(all_events) == 1 + + recur = 'expand_recurring=true&ends_after={}&ends_before={}'.format( + urlsafe(starts_after), urlsafe(ends_before)) + all_events = api_client.get_data('/events?' + recur) + assert len(all_events) == 1 + + +def test_api_override_serialization(db, api_client, default_namespace, + recurring_event): + event = recurring_event + + override = Event(original_start_time=event.start, + master_event_uid=event.uid, + namespace_id=default_namespace.id, + calendar_id=event.calendar_id) + override.update(event) + override.uid = event.uid + "_" + event.start.strftime("%Y%m%dT%H%M%SZ") + override.master = event + override.master_event_uid = event.uid + override.cancelled = True + db.session.add(override) + db.session.commit() + + filter = 'starts_after={}&ends_before={}'.format( + urlsafe(event.start.replace(hours=-1)), + urlsafe(event.start.replace(weeks=+1))) + events = api_client.get_data('/events?' + filter) + # We should have the base event and the override back, but no extras; + # this allows clients to do their own expansion, should they ever desire + # to experience the joy that is RFC 2445 section 4.8.5.4. + assert len(events) == 2 + assert events[0].get('object') == 'event' + assert events[0].get('recurrence') is not None + assert events[1].get('object') == 'event' + assert events[1].get('status') == 'cancelled' + + +def test_api_expand_recurring_message(db, api_client, message, + recurring_event): + # This is a regression test for https://phab.nylas.com/T3556 + # ("InflatedEvent should not be committed" exception in API"). + event = recurring_event + event.message = message + db.session.commit() + + events = api_client.get_data('/events?expand_recurring=false') + assert len(events) == 1 + + # Make sure the recurrence info is on the recurring event + for e in events: + if e['title'] == 'recurring-weekly': + assert e.get('recurrence') is not None + assert e.get('message_id') is not None + + r = api_client.get_raw('/events?expand_recurring=true') + assert r.status_code == 200 + + all_events = api_client.get_data('/events?expand_recurring=true') + assert len(all_events) != 0 + + for event in all_events: + assert event['master_event_id'] is not None + assert 'message_id' not in event diff --git a/inbox/test/api/test_files.py b/inbox/test/api/test_files.py new file mode 100644 index 000000000..99c670a93 --- /dev/null +++ b/inbox/test/api/test_files.py @@ -0,0 +1,200 @@ +# -*- coding: utf-8 -*- +import os +import md5 +import json +import mock + +from datetime import datetime + +import pytest +from inbox.models import Block, Part +from inbox.test.api.base import api_client +from inbox.util.testutils import FILENAMES + +__all__ = ['api_client'] + + +@pytest.fixture +def draft(db, default_account): + return { + 'subject': 'Draft test at {}'.format(datetime.utcnow()), + 'body': '

Sea, birds and sand.

', + 'to': [{'name': 'The red-haired mermaid', + 'email': default_account.email_address}] + } + + +def test_file_filtering(api_client, uploaded_file_ids, draft): + # Attach the files to a draft and search there + draft['file_ids'] = uploaded_file_ids + r = api_client.post_data('/drafts', draft) + assert r.status_code == 200 + + draft_resp = json.loads(r.data) + assert len(draft_resp['files']) == len(uploaded_file_ids) + d_id = draft_resp['id'] + + results = api_client.get_data('/files') + assert len(results) == len(uploaded_file_ids) + + results = api_client.get_data('/files?message_id={}'.format(d_id)) + + assert all([d_id in f['message_ids'] for f in results]) + assert len(results) == len(uploaded_file_ids) + + results = api_client.get_data('/files?message_id={}&limit=1' + .format(d_id)) + assert len(results) == 1 + + results = api_client.get_data('/files?message_id={}&offset=2' + .format(d_id)) + assert len(results) == 3 + + results = api_client.get_data('/files?filename=LetMeSendYouEmail.wav') + assert len(results) == 1 + + results = api_client.get_data('/files?content_type=audio%2Fx-wav') + assert len(results) == 1 + + results = api_client.get_data('/files?content_type=image%2Fjpeg') + assert len(results) == 2 + + results = api_client.get_data( + '/files?content_type=image%2Fjpeg&view=count') + assert results["count"] == 2 + + results = api_client.get_data('/files?content_type=image%2Fjpeg&view=ids') + assert len(results) == 2 + + +def test_attachment_has_same_id(api_client, uploaded_file_ids, draft): + attachment_id = uploaded_file_ids.pop() + draft['file_ids'] = [attachment_id] + r = api_client.post_data('/drafts', draft) + assert r.status_code == 200 + draft_resp = json.loads(r.data) + assert attachment_id in [x['id'] for x in draft_resp['files']] + + +def test_delete(api_client, uploaded_file_ids, draft): + non_attachment_id = uploaded_file_ids.pop() + attachment_id = uploaded_file_ids.pop() + draft['file_ids'] = [attachment_id] + r = api_client.post_data('/drafts', draft) + assert r.status_code == 200 + + # Test that we can delete a non-attachment + r = api_client.delete('/files/{}'.format(non_attachment_id)) + assert r.status_code == 200 + + data = api_client.get_data('/files/{}'.format(non_attachment_id)) + assert data['message'].startswith("Couldn't find file") + + # Make sure that we cannot delete attachments + r = api_client.delete('/files/{}'.format(attachment_id)) + assert r.status_code == 400 + + data = api_client.get_data('/files/{}'.format(attachment_id)) + assert data['id'] == attachment_id + + +@pytest.mark.parametrize("filename", FILENAMES) +def test_get_with_id(api_client, uploaded_file_ids, filename): + # See comment in uploaded_file_ids() + if filename == 'piece-jointe.jpg': + filename = u'pièce-jointe.jpg' + elif filename == 'andra-moi-ennepe.txt': + filename = u'ἄνδρα μοι ἔννεπε' + elif filename == 'long-non-ascii-filename.txt': + filename = 100 * u'μ' + in_file = api_client.get_data(u'/files?filename={}'.format(filename))[0] + data = api_client.get_data('/files/{}'.format(in_file['id'])) + assert data['filename'] == filename + + +def test_get_invalid(api_client, uploaded_file_ids): + data = api_client.get_data('/files/0000000000000000000000000') + assert data['message'].startswith("Couldn't find file") + data = api_client.get_data('/files/!') + assert data['message'].startswith("Invalid id") + + data = api_client.get_data('/files/0000000000000000000000000/download') + assert data['message'].startswith("Couldn't find file") + data = api_client.get_data('/files/!/download') + assert data['message'].startswith("Invalid id") + + r = api_client.delete('/files/0000000000000000000000000') + assert r.status_code == 404 + r = api_client.delete('/files/!') + assert r.status_code == 400 + + +@pytest.mark.parametrize("filename", FILENAMES) +def test_download(api_client, uploaded_file_ids, filename): + # See comment in uploaded_file_ids() + original_filename = filename + if filename == 'piece-jointe.jpg': + filename = u'pièce-jointe.jpg' + elif filename == 'andra-moi-ennepe.txt': + filename = u'ἄνδρα μοι ἔννεπε' + elif filename == 'long-non-ascii-filename.txt': + filename = 100 * u'μ' + + in_file = api_client.get_data(u'/files?filename={}'.format(filename))[0] + data = api_client.get_raw('/files/{}/download'.format(in_file['id'])).data + + path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', + 'data', original_filename.encode('utf-8')) + local_data = open(path, 'rb').read() + local_md5 = md5.new(local_data).digest() + dl_md5 = md5.new(data).digest() + assert local_md5 == dl_md5 + + +@pytest.fixture(scope='function') +def fake_attachment(db, default_account, message): + block = Block() + namespace_id = default_account.namespace.id + block.namespace_id = namespace_id + block.filename = 'zambla.txt' + block.content_type = 'text/plain' + block.size = 32 + # This is sha256 of an attachment in our test email. + block.data_sha256 = '27dc8e801f962098fd4a741ccbd6ca24d42805df0b8035cfb881ad6e5a1bf4b2' + p = Part(block=block, message=message) + db.session.add(p) + db.session.commit() + return p + + +def test_direct_fetching(api_client, db, message, + fake_attachment, monkeypatch): + # Mark a file as missing and check that we try to + # fetch it from the remote provider. + get_mock = mock.Mock(return_value=None) + monkeypatch.setattr('inbox.util.blockstore.get_from_blockstore', + get_mock) + + save_mock = mock.Mock() + monkeypatch.setattr('inbox.util.blockstore.save_to_blockstore', + save_mock) + + # Mock the request to return the contents of an actual email. + path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', + 'data', 'raw_message_with_filename_attachment.txt') + data = "" + with open(path) as fd: + data = fd.read() + + raw_mock = mock.Mock(return_value=data) + monkeypatch.setattr('inbox.s3.backends.gmail.get_gmail_raw_contents', + raw_mock) + + resp = api_client.get_raw('/files/{}/download'.format(fake_attachment.block.public_id)) + + for m in [get_mock, save_mock, raw_mock]: + assert m.called + + # Check that we got back the right data, with the right headers. + assert resp.headers['Content-Disposition'] == 'attachment; filename=zambla.txt' + assert resp.data.decode("utf8") == u'Chuis pas rassur\xe9' diff --git a/inbox/test/api/test_filtering.py b/inbox/test/api/test_filtering.py new file mode 100644 index 000000000..12f9e43cb --- /dev/null +++ b/inbox/test/api/test_filtering.py @@ -0,0 +1,353 @@ +import json +import datetime +import calendar +from sqlalchemy import desc +from inbox.models import Message, Thread, Namespace, Block, Category +from inbox.util.misc import dt_to_timestamp +from inbox.test.util.base import (test_client, add_fake_message, + add_fake_thread) +from inbox.test.api.base import api_client + +__all__ = ['api_client', 'test_client'] + + +def test_filtering(db, api_client, default_namespace): + thread = add_fake_thread(db.session, default_namespace.id) + message = add_fake_message(db.session, default_namespace.id, thread, + to_addr=[('Bob', 'bob@foocorp.com')], + from_addr=[('Alice', 'alice@foocorp.com')], + subject='some subject') + message.categories.add( + Category(namespace_id=message.namespace_id, + name='inbox', display_name='Inbox', type_='label')) + thread.subject = message.subject + db.session.commit() + + t_start = dt_to_timestamp(thread.subjectdate) + t_lastmsg = dt_to_timestamp(thread.recentdate) + subject = message.subject + to_addr = message.to_addr[0][1] + from_addr = message.from_addr[0][1] + received_date = message.received_date + unread = not message.is_read + starred = message.is_starred + + results = api_client.get_data('/threads?thread_id={}' + .format(thread.public_id)) + assert len(results) == 1 + + results = api_client.get_data('/messages?thread_id={}' + .format(thread.public_id)) + assert len(results) == 1 + + results = api_client.get_data('/threads?cc={}' + .format(message.cc_addr)) + assert len(results) == 0 + + results = api_client.get_data('/messages?cc={}' + .format(message.cc_addr)) + assert len(results) == 0 + + results = api_client.get_data('/threads?bcc={}' + .format(message.bcc_addr)) + assert len(results) == 0 + + results = api_client.get_data('/messages?bcc={}' + .format(message.bcc_addr)) + assert len(results) == 0 + + results = api_client.get_data('/threads?filename=test') + assert len(results) == 0 + + results = api_client.get_data('/messages?filename=test') + assert len(results) == 0 + + results = api_client.get_data('/threads?started_after={}' + .format(t_start - 1)) + assert len(results) == 1 + + results = api_client.get_data('/messages?started_after={}' + .format(t_start - 1)) + assert len(results) == 1 + + results = api_client.get_data('/messages?last_message_before={}&limit=1' + .format(t_lastmsg + 1)) + assert len(results) == 1 + + results = api_client.get_data('/threads?last_message_before={}&limit=1' + .format(t_lastmsg + 1)) + assert len(results) == 1 + + results = api_client.get_data('/threads?in=inbox&limit=1') + assert len(results) == 1 + + results = api_client.get_data('/messages?in=inbox&limit=1') + assert len(results) == 1 + + results = api_client.get_data('/messages?in=banana%20rama') + assert len(results) == 0 + + results = api_client.get_data('/threads?subject={}'.format(subject)) + assert len(results) == 1 + + results = api_client.get_data('/messages?subject={}'.format(subject)) + assert len(results) == 1 + + results = api_client.get_data('/threads?unread={}'.format(unread)) + assert len(results) == 1 + + results = api_client.get_data('/messages?unread={}'.format((not unread))) + assert len(results) == 0 + + results = api_client.get_data('/threads?starred={}'.format((not starred))) + assert len(results) == 0 + + results = api_client.get_data('/messages?starred={}'.format(starred)) + assert len(results) == 1 + + for _ in range(3): + add_fake_message(db.session, default_namespace.id, + to_addr=[('', 'inboxapptest@gmail.com')], + thread=add_fake_thread(db.session, + default_namespace.id)) + + results = api_client.get_data('/messages?any_email={}'. + format('inboxapptest@gmail.com')) + assert len(results) > 1 + + # Test multiple any_email params + multiple_results = api_client.get_data('/messages?any_email={},{},{}'. + format('inboxapptest@gmail.com', + 'bob@foocorp.com', + 'unused@gmail.com')) + assert len(multiple_results) > len(results) + + # Check that we canonicalize when searching. + alternate_results = api_client.get_data('/threads?any_email={}'. + format('inboxapp.test@gmail.com')) + assert len(alternate_results) == len(results) + + results = api_client.get_data('/messages?from={}'.format(from_addr)) + assert len(results) == 1 + results = api_client.get_data('/threads?from={}'.format(from_addr)) + assert len(results) == 1 + + early_time = received_date - datetime.timedelta(seconds=1) + late_time = received_date + datetime.timedelta(seconds=1) + early_ts = calendar.timegm(early_time.utctimetuple()) + late_ts = calendar.timegm(late_time.utctimetuple()) + + results = api_client.get_data('/messages?subject={}&started_before={}'. + format(subject, early_ts)) + assert len(results) == 0 + results = api_client.get_data('/threads?subject={}&started_before={}'. + format(subject, early_ts)) + assert len(results) == 0 + + results = api_client.get_data('/messages?subject={}&started_before={}'. + format(subject, late_ts)) + assert len(results) == 1 + results = api_client.get_data('/threads?subject={}&started_before={}'. + format(subject, late_ts)) + assert len(results) == 1 + + results = api_client.get_data('/messages?subject={}&last_message_after={}'. + format(subject, early_ts)) + assert len(results) == 1 + results = api_client.get_data('/threads?subject={}&last_message_after={}'. + format(subject, early_ts)) + assert len(results) == 1 + + results = api_client.get_data('/messages?subject={}&last_message_after={}'. + format(subject, late_ts)) + assert len(results) == 0 + results = api_client.get_data('/threads?subject={}&last_message_after={}'. + format(subject, late_ts)) + assert len(results) == 0 + + results = api_client.get_data('/messages?subject={}&started_before={}'. + format(subject, early_ts)) + assert len(results) == 0 + results = api_client.get_data('/threads?subject={}&started_before={}'. + format(subject, early_ts)) + assert len(results) == 0 + + results = api_client.get_data('/messages?subject={}&started_before={}'. + format(subject, late_ts)) + assert len(results) == 1 + results = api_client.get_data('/threads?subject={}&started_before={}'. + format(subject, late_ts)) + assert len(results) == 1 + + results = api_client.get_data('/messages?from={}&to={}'. + format(from_addr, to_addr)) + assert len(results) == 1 + + results = api_client.get_data('/threads?from={}&to={}'. + format(from_addr, to_addr)) + assert len(results) == 1 + + results = api_client.get_data('/messages?to={}&limit={}&offset={}'. + format('inboxapptest@gmail.com', 2, 1)) + assert len(results) == 2 + + results = api_client.get_data('/threads?to={}&limit={}'. + format('inboxapptest@gmail.com', 3)) + assert len(results) == 3 + + results = api_client.get_data('/threads?view=count') + + assert results['count'] == 4 + + results = api_client.get_data('/threads?view=ids&to={}&limit=3'. + format('inboxapptest@gmail.com', 3)) + + assert len(results) == 3 + assert all(isinstance(r, basestring) + for r in results), "Returns a list of string" + + +def test_query_target(db, api_client, thread, default_namespace): + cat = Category(namespace_id=default_namespace.id, + name='inbox', display_name='Inbox', type_='label') + for _ in range(3): + message = add_fake_message(db.session, default_namespace.id, thread, + to_addr=[('Bob', 'bob@foocorp.com')], + from_addr=[('Alice', 'alice@foocorp.com')], + subject='some subject') + message.categories.add(cat) + db.session.commit() + + results = api_client.get_data('/messages?in=inbox') + assert len(results) == 3 + + count = api_client.get_data('/messages?in=inbox&view=count') + assert count['count'] == 3 + + +def test_ordering(api_client, db, default_namespace): + for i in range(3): + thr = add_fake_thread(db.session, default_namespace.id) + received_date = (datetime.datetime.utcnow() + + datetime.timedelta(seconds=22 * (i + 1))) + add_fake_message(db.session, default_namespace.id, + thr, received_date=received_date) + ordered_results = api_client.get_data('/messages') + ordered_dates = [result['date'] for result in ordered_results] + assert ordered_dates == sorted(ordered_dates, reverse=True) + + ordered_results = api_client.get_data('/messages?limit=3') + expected_public_ids = [ + public_id for public_id, in + db.session.query(Message.public_id). + filter(Message.namespace_id == default_namespace.id). + order_by(desc(Message.received_date)).limit(3)] + assert expected_public_ids == [r['id'] for r in ordered_results] + + +def test_strict_argument_parsing(api_client): + r = api_client.get_raw('/threads?foo=bar') + assert r.status_code == 400 + + +def test_distinct_results(api_client, db, default_namespace): + """Test that limit and offset parameters work correctly when joining on + multiple matching messages per thread.""" + # Create a thread with multiple messages on it. + first_thread = add_fake_thread(db.session, default_namespace.id) + add_fake_message(db.session, default_namespace.id, first_thread, + from_addr=[('', 'hello@example.com')], + received_date=datetime.datetime.utcnow(), + add_sent_category=True) + add_fake_message(db.session, default_namespace.id, first_thread, + from_addr=[('', 'hello@example.com')], + received_date=datetime.datetime.utcnow(), + add_sent_category=True) + + # Now create another thread with the same participants + older_date = datetime.datetime.utcnow() - datetime.timedelta(hours=1) + second_thread = add_fake_thread(db.session, default_namespace.id) + add_fake_message(db.session, default_namespace.id, second_thread, + from_addr=[('', 'hello@example.com')], + received_date=older_date, + add_sent_category=True) + add_fake_message(db.session, default_namespace.id, second_thread, + from_addr=[('', 'hello@example.com')], + received_date=older_date, + add_sent_category=True) + + second_thread.recentdate = older_date + db.session.commit() + + filtered_results = api_client.get_data('/threads?from=hello@example.com' + '&limit=1&offset=0') + assert len(filtered_results) == 1 + assert filtered_results[0]['id'] == first_thread.public_id + + filtered_results = api_client.get_data('/threads?from=hello@example.com' + '&limit=1&offset=1') + assert len(filtered_results) == 1 + assert filtered_results[0]['id'] == second_thread.public_id + + filtered_results = api_client.get_data('/threads?from=hello@example.com' + '&limit=2&offset=0') + assert len(filtered_results) == 2 + + filtered_results = api_client.get_data('/threads?from=hello@example.com' + '&limit=2&offset=1') + assert len(filtered_results) == 1 + + # Ensure that it works when using the _in filter + filtered_results = api_client.get_data('/threads?in=sent' + '&limit=2&offset=0') + assert len(filtered_results) == 2 + + filtered_results = api_client.get_data('/threads?in=sent' + '&limit=1&offset=0') + assert len(filtered_results) == 1 + + +def test_filtering_accounts(db, test_client, default_namespace): + all_accounts = json.loads(test_client.get('/accounts/?limit=100').data) + email = all_accounts[0]['email_address'] + + some_accounts = json.loads(test_client.get('/accounts/?offset=1&limit=99').data) + assert len(some_accounts) == len(all_accounts) - 1 + + no_all_accounts = json.loads(test_client.get('/accounts/?limit=0').data) + assert no_all_accounts == [] + + all_accounts = json.loads(test_client.get('/accounts/?limit=1').data) + assert len(all_accounts) == 1 + + filter_ = '?email_address={}'.format(email) + all_accounts = json.loads(test_client.get('/accounts/' + filter_).data) + assert all_accounts[0]['email_address'] == email + + filter_ = '?email_address=unknown@email.com' + accounts = json.loads(test_client.get('/accounts/' + filter_).data) + assert len(accounts) == 0 + + +def test_namespace_limiting(db, api_client, default_namespaces): + dt = datetime.datetime.utcnow() + subject = dt.isoformat() + namespaces = db.session.query(Namespace).all() + assert len(namespaces) > 1 + for ns in namespaces: + thread = Thread(namespace=ns, subjectdate=dt, recentdate=dt, + subject=subject) + add_fake_message(db.session, ns.id, thread, received_date=dt, + subject=subject) + db.session.add(Block(namespace=ns, filename=subject)) + db.session.commit() + + for ns in namespaces: + r = api_client.get_data('/threads?subject={}'.format(subject)) + assert len(r) == 1 + + r = api_client.get_data('/messages?subject={}'.format(subject)) + assert len(r) == 1 + + r = api_client.get_data('/files?filename={}'.format(subject)) + assert len(r) == 1 diff --git a/inbox/test/api/test_folders.py b/inbox/test/api/test_folders.py new file mode 100644 index 000000000..61e3cab07 --- /dev/null +++ b/inbox/test/api/test_folders.py @@ -0,0 +1,74 @@ +# flake8: noqa: F401, F811 +import mock +import gevent + +from inbox.test.api.base import imap_api_client +from inbox.test.util.base import add_fake_folder, add_fake_category +from inbox.util.testutils import mock_imapclient # noqa + + +# Check that folders of the form INBOX.A.B get converted by the API +# to A/B. +def test_folder_stripping(db, generic_account, imap_api_client): + # Check that regular IMAP paths get converted to unix-style paths + generic_account.folder_separator = '.' + folder = add_fake_folder(db.session, generic_account) + add_fake_category(db.session, generic_account.namespace.id, + 'INBOX.Red.Carpet') + + r = imap_api_client.get_data('/folders') + for folder in r: + if 'Carpet' in folder['display_name']: + assert folder['display_name'] == 'INBOX/Red/Carpet' + + # Check that if we define an account-level prefix, it gets stripped + # from the API response. + generic_account.folder_prefix = 'INBOX.' + db.session.commit() + + r = imap_api_client.get_data('/folders') + for folder in r: + if 'Carpet' in folder['display_name']: + assert folder['display_name'] == 'Red/Carpet' + + # Test again with a prefix without integrated separator: + generic_account.folder_prefix = 'INBOX' + db.session.commit() + + r = imap_api_client.get_data('/folders') + for folder in r: + if 'Carpet' in folder['display_name']: + assert folder['display_name'] == 'Red/Carpet' + + +# This test is kind of complicated --- basically we mock +# the output of the IMAP NAMESPACE command to check that +# we are correctly translating Unix-style paths to IMAP +# paths. +def test_folder_name_translation(empty_db, generic_account, imap_api_client, + mock_imapclient, monkeypatch): + from inbox.transactions.actions import SyncbackService + syncback = SyncbackService(syncback_id=0, process_number=0, + total_processes=1, num_workers=2) + + imap_namespaces = (((u'INBOX.', u'.'),),) + mock_imapclient.create_folder = mock.Mock() + mock_imapclient.namespace = mock.Mock(return_value=imap_namespaces) + + folder_list = [(('\\HasChildren',), '.', u'INBOX')] + mock_imapclient.list_folders = mock.Mock(return_value=folder_list) + mock_imapclient.has_capability = mock.Mock(return_value=True) + + folder_prefix, folder_separator = imap_namespaces[0][0] + generic_account.folder_prefix = folder_prefix + generic_account.folder_separator = folder_separator + empty_db.session.commit() + + folder_json = {'display_name': 'Taxes/Accounting'} + imap_api_client.post_data('/folders', folder_json) + + syncback._process_log() + syncback._restart_workers() + while not syncback.task_queue.empty(): + gevent.sleep(0.1) + mock_imapclient.create_folder.assert_called_with('INBOX.Taxes.Accounting') diff --git a/inbox/test/api/test_folders_labels.py b/inbox/test/api/test_folders_labels.py new file mode 100644 index 000000000..6c62c7a27 --- /dev/null +++ b/inbox/test/api/test_folders_labels.py @@ -0,0 +1,304 @@ +# flake8: noqa: F811 +import json +from datetime import datetime + +import pytest +from freezegun import freeze_time + +from inbox.api.ns_api import API_VERSIONS +from inbox.models.category import Category, EPOCH + +from inbox.test.util.base import (add_fake_message, thread, add_fake_thread, + generic_account, gmail_account) +from inbox.test.api.base import api_client, new_api_client + + +__all__ = ['api_client', 'thread', 'generic_account', 'gmail_account'] + + +@pytest.fixture +def folder_client(db, generic_account): + api_client = new_api_client(db, generic_account.namespace) + + api_client.post_data('/folders/', + {"display_name": "Test_Folder"}) + return api_client + + +@pytest.fixture +def label_client(db, gmail_account): + api_client = new_api_client(db, gmail_account.namespace) + + # Whereas calling generic_account always makes a new IMAP account, + # calling gmail_account checks first to see if there's an existing + # Gmail account and uses it if so. This can cause namespace + # conflicts if a label is "created" more than once. Since + # labels can't be deleted and then re-created, this fixture only + # makes a new label if there are no existing labels. + g_data = api_client.get_raw('/labels/') + if not json.loads(g_data.data): + api_client.post_data('/labels/', + {"display_name": "Test_Label"}) + return api_client + + +def test_folder_post(db, generic_account): + api_client = new_api_client(db, generic_account.namespace) + po_data = api_client.post_data('/folders/', + {"display_name": "Test_Folder"}) + assert po_data.status_code == 200 + + category_id = json.loads(po_data.data)['id'] + category = db.session.query(Category).filter( + Category.public_id == category_id).one() + assert category.display_name == 'Test_Folder' + assert category.name == '' + assert category.type == 'folder' + assert category.deleted_at == EPOCH + assert category.is_deleted is False + + +def test_label_post(db, gmail_account): + api_client = new_api_client(db, gmail_account.namespace) + po_data = api_client.post_data('/labels/', + {"display_name": "Test_Label"}) + assert po_data.status_code == 200 + + category_id = json.loads(po_data.data)['id'] + category = db.session.query(Category).filter( + Category.public_id == category_id).one() + assert category.display_name == 'Test_Label' + assert category.name == '' + assert category.type == 'label' + assert category.deleted_at == EPOCH + assert category.is_deleted is False + + +def test_folder_get(folder_client): + g_data = folder_client.get_raw('/folders/') + assert g_data.status_code == 200 + + gen_folder = json.loads(g_data.data)[0] + gid_data = folder_client.get_raw('/folders/{}'.format(gen_folder['id'])) + assert gid_data.status_code == 200 + + +def test_label_get(label_client): + g_data = label_client.get_raw('/labels/') + assert g_data.status_code == 200 + + gmail_label = json.loads(g_data.data)[0] + gid_data = label_client.get_raw('/labels/{}'.format(gmail_label['id'])) + assert gid_data.status_code == 200 + + +@pytest.mark.parametrize("api_version", API_VERSIONS) +def test_folder_put(db, folder_client, api_version): + headers = dict() + headers['Api-Version'] = api_version + + # GET request for the folder ID + g_data = folder_client.get_raw('/folders/') + gen_folder = json.loads(g_data.data)[0] + + pu_data = folder_client.put_data('/folders/{}'.format(gen_folder['id']), + {"display_name": "Test_Folder_Renamed"}, + headers=headers) + assert pu_data.status_code == 200 + + if api_version == API_VERSIONS[0]: + assert json.loads(pu_data.data)['display_name'] == 'Test_Folder_Renamed' + + category_id = gen_folder['id'] + category = db.session.query(Category).filter( + Category.public_id == category_id).one() + assert category.display_name == 'Test_Folder_Renamed' + assert category.name == '' + else: + assert json.loads(pu_data.data)['display_name'] == gen_folder['display_name'] + + +@pytest.mark.parametrize("api_version", API_VERSIONS) +def test_label_put(db, label_client, api_version): + headers = dict() + headers['Api-Version'] = api_version + + # GET request for the label ID + g_data = label_client.get_raw('/labels/') + gmail_label = json.loads(g_data.data)[0] + + new_name = "Test_Label_Renamed {}".format(api_version) + pu_data = label_client.put_data('/labels/{}'.format(gmail_label['id']), + {"display_name": new_name}, headers=headers) + assert pu_data.status_code == 200 + + if api_version == API_VERSIONS[0]: + assert json.loads(pu_data.data)['display_name'] == new_name + + category_id = gmail_label['id'] + category = db.session.query(Category).filter( + Category.public_id == category_id).one() + assert category.display_name == new_name + assert category.name == '' + else: + # non-optimistic update + assert json.loads(pu_data.data)['display_name'] == gmail_label['display_name'] + + +@pytest.mark.parametrize("api_version", API_VERSIONS) +def test_folder_delete(db, generic_account, folder_client, api_version): + headers = dict() + headers['Api-Version'] = api_version + + # Make a new message + generic_thread = add_fake_thread(db.session, generic_account.namespace.id) + gen_message = add_fake_message(db.session, + generic_account.namespace.id, + generic_thread) + g_data = folder_client.get_raw('/folders/') + # Add message to folder + generic_folder = json.loads(g_data.data)[0] + data = {"folder_id": generic_folder['id']} + folder_client.put_data('/messages/{}'.format(gen_message.public_id), data) + + # Test that DELETE requests 403 on folders with items in them + d_data = folder_client.delete('/folders/{}'.format(generic_folder['id'])) + assert d_data.status_code == 400 + + # Make an empty folder + resp = folder_client.post_data('/folders/', + {"display_name": "Empty_Folder"}) + empty_folder = json.loads(resp.data) + # Test that DELETE requests delete empty folders + d_data = folder_client.delete('/folders/{}'.format(empty_folder['id'])) + assert d_data.status_code == 200 + + if api_version == API_VERSIONS[0]: + # Did we update things optimistically? + category_id = empty_folder['id'] + category = db.session.query(Category).filter( + Category.public_id == category_id).one() + assert category.deleted_at != EPOCH + assert category.is_deleted is True + + db.session.rollback() + + +@pytest.mark.parametrize("api_version", API_VERSIONS) +def test_label_delete(db, gmail_account, label_client, api_version): + headers = dict() + headers['Api-Version'] = api_version + + # Make a new message + gmail_thread = add_fake_thread(db.session, gmail_account.namespace.id) + gmail_message = add_fake_message(db.session, + gmail_account.namespace.id, gmail_thread) + g_data = label_client.get_raw('/labels/', headers=headers) + # Add label to message + gmail_label = json.loads(g_data.data)[0] + data = {"labels": [gmail_label['id']]} + label_client.put_data('/messages/{}'.format(gmail_message.public_id), data, + headers=headers) + + # DELETE requests should work on labels whether or not messages have them + d_data = label_client.delete('/labels/{}'.format(gmail_label['id']), + headers=headers) + assert d_data.status_code == 200 + + if api_version == API_VERSIONS[0]: + # Optimistic update. + category_id = gmail_label['id'] + category = db.session.query(Category).filter( + Category.public_id == category_id).one() + assert category.deleted_at != EPOCH + assert category.is_deleted is True + + +def test_folder_exclusivity(folder_client): + g_data = folder_client.get_raw('/folders/') + generic_folder = json.loads(g_data.data)[0] + # These requests to /labels/ should all 404, since the account uses folders + po_data = folder_client.post_data('/labels/', + {"display_name": "Test_E_Label"}) + assert po_data.status_code == 404 + pu_data = folder_client.put_data('/labels/{}'.format(generic_folder['id']), + {"display_name": "Test_E_Folder_Renamed"}) + assert pu_data.status_code == 404 + g_data = folder_client.get_raw('/labels/') + assert g_data.status_code == 404 + gid_data = folder_client.get_raw('/labels/{}'.format(generic_folder['id'])) + assert gid_data.status_code == 404 + d_data = folder_client.delete('/labels/{}'.format(generic_folder['id'])) + assert d_data.status_code == 404 + + +def test_label_exclusivity(label_client): + g_data = label_client.get_raw('/labels/') + gmail_label = json.loads(g_data.data)[0] + # These requests to /folders/ should all 404, since the account uses labels + po_data = label_client.post_data('/folders/', + {"display_name": "Test_E_Folder"}) + assert po_data.status_code == 404 + pu_data = label_client.put_data('/folders/{}'.format(gmail_label['id']), + {"display_name": "Test_E _Label_Renamed"}) + assert pu_data.status_code == 404 + g_data = label_client.get_raw('/folders/') + assert g_data.status_code == 404 + gid_data = label_client.get_raw('/folders/{}'.format(gmail_label['id'])) + assert gid_data.status_code == 404 + d_data = label_client.delete('/folders/{}'.format(gmail_label['id'])) + assert d_data.status_code == 404 + + +def test_duplicate_folder_create(folder_client): + # Creating a folder with an existing, non-deleted folder's name + # returns an HTTP 400. + data = folder_client.get_raw('/folders/') + folder = json.loads(data.data)[0] + data = folder_client.post_data('/folders/', + {"display_name": folder['display_name']}) + assert data.status_code == 400 + + # Deleting the folder and re-creating (with the same name) succeeds. + # Doing so repeatedly succeeds IFF the delete/ re-create requests are + # spaced >= 1 second apart (MySQL rounds up microseconds). + initial_ts = datetime.utcnow() + with freeze_time(initial_ts) as frozen_ts: + data = folder_client.delete('/folders/{}'.format(folder['id'])) + assert data.status_code == 200 + + data = folder_client.post_data('/folders/', + {"display_name": folder['display_name']}) + assert data.status_code == 200 + new_folder = json.loads(data.data) + assert new_folder['display_name'] == folder['display_name'] + assert new_folder['id'] != folder['id'] + + folder = new_folder + frozen_ts.tick() + + +def test_duplicate_label_create(label_client): + data = label_client.get_raw('/labels/') + label = json.loads(data.data)[0] + data = label_client.post_data('/labels/', + {"display_name": label['display_name']}) + assert data.status_code == 400 + + # Deleting the label and re-creating (with the same name) succeeds. + # Doing so repeatedly succeeds IFF the delete/ re-create requests are + # spaced >= 1 second apart (MySQL rounds up microseconds). + initial_ts = datetime.utcnow() + with freeze_time(initial_ts) as frozen_ts: + data = label_client.delete('/labels/{}'.format(label['id'])) + assert data.status_code == 200 + + data = label_client.post_data('/labels/', + {"display_name": label['display_name']}) + assert data.status_code == 200 + new_label = json.loads(data.data) + assert new_label['display_name'] == label['display_name'] + assert new_label['id'] != label['id'] + + label = new_label + frozen_ts.tick() diff --git a/inbox/test/api/test_invalid_account.py b/inbox/test/api/test_invalid_account.py new file mode 100644 index 000000000..832deeb77 --- /dev/null +++ b/inbox/test/api/test_invalid_account.py @@ -0,0 +1,115 @@ +import datetime +import json +import mock + +import pytest +import requests + +from inbox.test.util.base import db +from inbox.test.api.base import api_client + +__all__ = ['api_client', 'db'] + + +@pytest.fixture +def token_manager(monkeypatch): + monkeypatch.setattr( + 'inbox.models.backends.gmail.g_token_manager.get_token_for_email', + lambda *args, **kwargs: 'token') + + +@pytest.fixture +def search_response(): + resp = requests.Response() + resp.status_code = 200 + resp.elapsed = datetime.timedelta(seconds=22) + resp._content = json.dumps({ + 'messages': [{'id': '1'}, {'id': '2'}, {'id': '3'}] + }) + requests.get = mock.Mock(return_value=resp) + + +@pytest.fixture +def setup_account(message, thread, label, contact, event): + return { + 'message': message.public_id, + 'thread': thread.public_id, + 'label': label.category.public_id, + 'contact': contact.public_id, + 'event': event.public_id + } + + +def test_read_endpoints(db, setup_account, api_client, default_account): + # Read operations succeed. + for resource, public_id in setup_account.items(): + endpoint = '/{}s'.format(resource) + r = api_client.get_raw(endpoint) + assert r.status_code == 200 + + read_endpoint = '{}/{}'.format(endpoint, public_id) + r = api_client.get_raw(read_endpoint) + assert r.status_code == 200 + + default_account.sync_state = 'invalid' + db.session.commit() + + # Read operations on an invalid account also succeed. + for resource, public_id in setup_account.items(): + endpoint = '/{}s'.format(resource) + r = api_client.get_raw(endpoint) + assert r.status_code == 200 + + read_endpoint = '{}/{}'.format(endpoint, public_id) + r = api_client.get_raw(read_endpoint) + assert r.status_code == 200 + + +def test_search_endpoints(db, api_client, token_manager, search_response, + default_account): + # Message, thread search succeeds. + for endpoint in ('messages', 'threads'): + r = api_client.get_raw('/{}/search?q=queryme'.format(endpoint)) + assert r.status_code == 200 + + default_account.sync_state = 'invalid' + db.session.commit() + + # Message, thread search on an invalid account fails with an HTTP 403. + for endpoint in ('messages', 'threads'): + r = api_client.get_raw('/{}/search?q=queryme'.format(endpoint)) + assert r.status_code == 403 + + +def test_write_endpoints(db, setup_account, api_client, default_account): + # Write operations (create, update, delete) succeed. + r = api_client.post_data( + '/drafts', + data={ + 'body': '

Sea, birds and sand.

' + }) + assert r.status_code == 200 + draft_id = json.loads(r.data)['id'] + + endpoint = '/messages/{}'.format(setup_account['message']) + r = api_client.put_data(endpoint, data={"starred": True}) + assert r.status_code == 200 + + endpoint = '/events/{}'.format(setup_account['event']) + r = api_client.delete(endpoint) + assert r.status_code == 200 + + default_account.sync_state = 'invalid' + db.session.commit() + + # Write operations fail with an HTTP 403. + r = api_client.post_data('/labels', data={"display_name": "Neu!"}) + assert r.status_code == 403 + + endpoint = '/threads/{}'.format(setup_account['thread']) + r = api_client.put_data(endpoint, data={"starred": True}) + assert r.status_code == 403 + + endpoint = '/drafts/{}'.format(draft_id) + r = api_client.delete(endpoint) + assert r.status_code == 403 diff --git a/inbox/test/api/test_messages.py b/inbox/test/api/test_messages.py new file mode 100644 index 000000000..12035d0a6 --- /dev/null +++ b/inbox/test/api/test_messages.py @@ -0,0 +1,287 @@ +# flake8: noqa: F811 +import json +import mock +import pytest + +from inbox.api.ns_api import API_VERSIONS +from inbox.util.blockstore import get_from_blockstore + +from inbox.test.util.base import (add_fake_message, default_namespace, + new_message_from_synced, mime_message, thread, + add_fake_thread, generic_account, gmail_account) +from inbox.test.api.base import api_client, new_api_client + + +__all__ = ['api_client', 'default_namespace', 'new_message_from_synced', + 'mime_message', 'thread', 'generic_account', 'gmail_account'] + + +@pytest.fixture +def stub_message_from_raw(db, thread, new_message_from_synced): + new_msg = new_message_from_synced + new_msg.thread = thread + db.session.add(new_msg) + db.session.commit() + return new_msg + + +@pytest.fixture +def stub_message(db, new_message_from_synced, default_namespace, thread): + message = add_fake_message(db.session, default_namespace.id, thread, + subject="Golden Gate Park next Sat", + from_addr=[('alice', 'alice@example.com')], + to_addr=[('bob', 'bob@example.com')]) + message.snippet = 'Banh mi paleo pickled, sriracha' + message.body = """ +Banh mi paleo pickled, sriracha biodiesel chambray seitan +mumblecore mustache. Raw denim gastropub 8-bit, butcher +PBR sartorial photo booth Pinterest blog Portland roof party +cliche bitters aesthetic. Ugh. +""" + + message = add_fake_message(db.session, default_namespace.id, thread, + subject="Re:Golden Gate Park next Sat", + from_addr=[('bob', 'bob@example.com')], + to_addr=[('alice', 'alice@example.com')], + cc_addr=[('Cheryl', 'cheryl@gmail.com')]) + message.snippet = 'Bushwick meggings ethical keffiyeh' + message.body = """ +Bushwick meggings ethical keffiyeh. Chambray lumbersexual wayfarers, +irony Banksy cred bicycle rights scenester artisan tote bag YOLO gastropub. +""" + + draft = add_fake_message(db.session, default_namespace.id, thread, + subject="Re:Golden Gate Park next Sat", + from_addr=[('alice', 'alice@example.com')], + to_addr=[('bob', 'bob@example.com')], + cc_addr=[('Cheryl', 'cheryl@gmail.com')]) + draft.snippet = 'Hey there friend writing a draft' + draft.body = """ +DIY tousled Tumblr, VHS meditation 3 wolf moon listicle fingerstache viral +bicycle rights. Thundercats kale chips church-key American Apparel. +""" + draft.is_draft = True + draft.reply_to_message = message + + db.session.commit() + return message + + +# TODO(emfree) clean up fixture dependencies +def test_rfc822_format(stub_message_from_raw, api_client, mime_message): + """ Test the API response to retreive raw message contents """ + full_path = '/messages/{}'.format(stub_message_from_raw.public_id) + + resp = api_client.get_raw(full_path, + headers={'Accept': 'message/rfc822'}) + assert resp.data == get_from_blockstore(stub_message_from_raw.data_sha256) + + +def test_direct_fetching(stub_message_from_raw, api_client, mime_message, + monkeypatch): + # Mark a message as missing and check that we try to + # fetch it from the remote provider. + get_mock = mock.Mock(return_value=None) + monkeypatch.setattr('inbox.util.blockstore.get_from_blockstore', + get_mock) + + save_mock = mock.Mock() + monkeypatch.setattr('inbox.util.blockstore.save_to_blockstore', + save_mock) + + raw_mock = mock.Mock(return_value='Return contents') + monkeypatch.setattr('inbox.s3.backends.gmail.get_gmail_raw_contents', + raw_mock) + + full_path = '/messages/{}'.format(stub_message_from_raw.public_id) + + resp = api_client.get_raw(full_path, + headers={'Accept': 'message/rfc822'}) + + for m in [get_mock, save_mock, raw_mock]: + assert m.called + + assert resp.data == 'Return contents' + + +@pytest.mark.parametrize("api_version", API_VERSIONS) +def test_sender_and_participants(stub_message, api_client, api_version): + headers = dict() + headers['Api-Version'] = api_version + + resp = api_client.get_raw('/threads/{}' + .format(stub_message.thread.public_id), + headers=headers) + assert resp.status_code == 200 + resp_dict = json.loads(resp.data) + participants = resp_dict['participants'] + assert len(participants) == 3 + + # Not expanded, should only return IDs + assert 'message' not in resp_dict + assert 'drafts' not in resp_dict + + +@pytest.mark.parametrize("api_version", API_VERSIONS) +def test_expanded_threads(stub_message, api_client, api_version): + def _check_json_thread(resp_dict): + assert 'message_ids' not in resp_dict + assert 'messages' in resp_dict + assert 'drafts' in resp_dict + assert len(resp_dict['participants']) == 3 + assert len(resp_dict['messages']) == 2 + assert len(resp_dict['drafts']) == 1 + + for msg_dict in resp_dict['messages']: + assert 'body' not in msg_dict + assert msg_dict['object'] == 'message' + assert msg_dict['thread_id'] == stub_message.thread.public_id + valid_keys = ['account_id', 'to', 'from', 'files', 'unread', + 'unread', 'date', 'snippet'] + assert all(x in msg_dict for x in valid_keys) + + for draft in resp_dict['drafts']: + assert 'body' not in draft + assert draft['object'] == 'draft' + assert draft['thread_id'] == stub_message.thread.public_id + valid_keys = ['account_id', 'to', 'from', 'files', 'unread', + 'snippet', 'date', 'version', 'reply_to_message_id'] + assert all(x in draft for x in valid_keys) + + headers = dict() + headers['Api-Version'] = api_version + + # /threads/ + resp = api_client.get_raw( + '/threads/{}?view=expanded'.format(stub_message.thread.public_id), + headers=headers) + assert resp.status_code == 200 + resp_dict = json.loads(resp.data) + _check_json_thread(resp_dict) + + # /threads/ + resp = api_client.get_raw( + '/threads/?view=expanded'.format(stub_message.thread.public_id), + headers=headers) + assert resp.status_code == 200 + resp_dict = json.loads(resp.data) + + for thread_json in resp_dict: + if thread_json['id'] == stub_message.thread.public_id: + _check_json_thread(thread_json) + + +def test_expanded_message(stub_message, api_client): + def _check_json_message(msg_dict): + assert 'body' in msg_dict + assert msg_dict['object'] == 'message' + assert msg_dict['thread_id'] == stub_message.thread.public_id + + assert isinstance(msg_dict['headers'], dict) + assert 'In-Reply-To' in msg_dict['headers'] + assert 'References' in msg_dict['headers'] + assert 'Message-Id' in msg_dict['headers'] + + valid_keys = ['account_id', 'to', 'from', 'files', 'unread', + 'unread', 'date', 'snippet'] + assert all(x in msg_dict for x in valid_keys) + + # /message/ + resp = api_client.get_raw( + '/messages/{}?view=expanded'.format(stub_message.public_id)) + assert resp.status_code == 200 + resp_dict = json.loads(resp.data) + _check_json_message(resp_dict) + + # /messages/ + resp = api_client.get_raw('/messages/?view=expanded') + assert resp.status_code == 200 + resp_dict = json.loads(resp.data) + + for message_json in resp_dict: + if message_json['id'] == stub_message.public_id: + _check_json_message(message_json) + + +def test_message_folders(db, generic_account): + # Because we're using the generic_account namespace + api_client = new_api_client(db, generic_account.namespace) + + # Generic IMAP threads, messages have a 'folders' field + generic_thread = add_fake_thread(db.session, generic_account.namespace.id) + generic_message = add_fake_message(db.session, + generic_account.namespace.id, + generic_thread) + + resp_data = api_client.get_data( + '/threads/{}'.format(generic_thread.public_id)) + + assert resp_data['id'] == generic_thread.public_id + assert resp_data['object'] == 'thread' + assert 'folders' in resp_data and 'labels' not in resp_data + + resp_data = api_client.get_data( + '/messages/{}'.format(generic_message.public_id)) + + assert resp_data['id'] == generic_message.public_id + assert resp_data['object'] == 'message' + assert 'folder' in resp_data and 'labels' not in resp_data + + +def test_message_labels(db, gmail_account): + # Because we're using the gmail_account namespace + api_client = new_api_client(db, gmail_account.namespace) + + # Gmail threads, messages have a 'labels' field + gmail_thread = add_fake_thread(db.session, gmail_account.namespace.id) + gmail_message = add_fake_message(db.session, + gmail_account.namespace.id, gmail_thread) + + resp_data = api_client.get_data( + '/threads/{}'.format(gmail_thread.public_id)) + + assert resp_data['id'] == gmail_thread.public_id + assert resp_data['object'] == 'thread' + assert 'labels' in resp_data and 'folders' not in resp_data + + resp_data = api_client.get_data( + '/messages/{}'.format(gmail_message.public_id)) + + assert resp_data['id'] == gmail_message.public_id + assert resp_data['object'] == 'message' + assert 'labels' in resp_data and 'folders' not in resp_data + + +@pytest.mark.parametrize("api_version", API_VERSIONS) +def test_message_label_updates(db, api_client, default_account, api_version, + custom_label): + """Check that you can update a message (optimistically or not), + and that the update is queued in the ActionLog.""" + + headers = dict() + headers['Api-Version'] = api_version + + # Gmail threads, messages have a 'labels' field + gmail_thread = add_fake_thread(db.session, default_account.namespace.id) + gmail_message = add_fake_message(db.session, + default_account.namespace.id, gmail_thread) + + resp_data = api_client.get_data( + '/messages/{}'.format(gmail_message.public_id), headers=headers) + + assert resp_data['labels'] == [] + + category = custom_label.category + update = dict(labels=[category.public_id]) + + resp = api_client.put_data( + '/messages/{}'.format(gmail_message.public_id), update, + headers=headers) + + resp_data = json.loads(resp.data) + + if api_version == API_VERSIONS[0]: + assert len(resp_data['labels']) == 1 + assert resp_data['labels'][0]['id'] == category.public_id + else: + assert resp_data['labels'] == [] diff --git a/inbox/test/api/test_searching.py b/inbox/test/api/test_searching.py new file mode 100644 index 000000000..78a7a784b --- /dev/null +++ b/inbox/test/api/test_searching.py @@ -0,0 +1,469 @@ +# -*- coding: utf-8 -*- +# flake8: noqa: F401, F811 +import datetime +import json +import mock +import requests +import pytest +from pytest import fixture +from inbox.models import Folder +from inbox.search.base import get_search_client +from inbox.search.backends.gmail import GmailSearchClient +from inbox.search.backends.imap import IMAPSearchClient +from inbox.test.util.base import (add_fake_message, add_fake_thread, + add_fake_imapuid, add_fake_folder) +from inbox.test.api.base import api_client, imap_api_client + +__all__ = ['api_client'] + + +@fixture +def test_gmail_thread(db, default_account): + return add_fake_thread(db.session, default_account.namespace.id) + + +@fixture +def imap_folder(db, generic_account): + f = Folder.find_or_create(db.session, generic_account, + u'Boîte de réception', 'inbox') + db.session.add(f) + db.session.commit() + return f + + +@fixture +def different_imap_folder(db, generic_account): + f = Folder.find_or_create(db.session, generic_account, + 'Archive', 'archive') + db.session.add(f) + db.session.commit() + return f + + +@fixture +def sorted_gmail_threads(db, default_account): + thread1 = add_fake_thread(db.session, default_account.namespace.id) + thread2 = add_fake_thread(db.session, default_account.namespace.id) + thread3 = add_fake_thread(db.session, default_account.namespace.id) + + return [thread1, thread2, thread3] + + +@fixture +def sorted_gmail_messages(db, default_account, sorted_gmail_threads, folder): + thread1, thread2, thread3 = sorted_gmail_threads + message1 = add_fake_message(db.session, default_account.namespace.id, + thread=thread1, + g_msgid=1, + from_addr=[{'name': 'Ben Bitdiddle', + 'email': 'ben@bitdiddle.com'}], + to_addr=[{'name': 'Barrack Obama', + 'email': 'barrack@obama.com'}], + received_date=datetime. + datetime(2015, 7, 9, 23, 50, 7), + subject='YOO!') + + add_fake_imapuid(db.session, default_account.id, message1, + folder, 3000) + + message2 = add_fake_message(db.session, default_account.namespace.id, + thread=thread2, + g_msgid=2, + from_addr=[{'name': 'Ben Bitdiddle', + 'email': 'ben@bitdiddle.com'}], + to_addr=[{'name': 'Barrack Obama', + 'email': 'barrack@obama.com'}], + received_date=datetime. + datetime(2014, 7, 9, 23, 50, 7), + subject='Hey!') + + add_fake_imapuid(db.session, default_account.id, message2, + folder, 3001) + + message3 = add_fake_message(db.session, default_account.namespace.id, + thread=thread3, + g_msgid=3, + from_addr=[{'name': 'Ben Bitdiddle', + 'email': 'ben@bitdiddle.com'}], + to_addr=[{'name': 'Barrack Obama', + 'email': 'barrack@obama.com'}], + received_date=datetime. + datetime(2013, 7, 9, 23, 50, 7), + subject='Sup?') + + add_fake_imapuid(db.session, default_account.id, message3, + folder, 3002) + + return [message1, message2, message3] + + +@fixture +def sorted_imap_threads(db, generic_account): + thread1 = add_fake_thread(db.session, generic_account.namespace.id) + thread2 = add_fake_thread(db.session, generic_account.namespace.id) + thread3 = add_fake_thread(db.session, generic_account.namespace.id) + + return [thread1, thread2, thread3] + + +@fixture +def sorted_imap_messages(db, generic_account, sorted_imap_threads, imap_folder): + thread1, thread2, thread3 = sorted_imap_threads + message1 = add_fake_message(db.session, generic_account.namespace.id, + thread=thread1, + from_addr=[{'name': '', + 'email': + 'inboxapptest@example.com'}], + to_addr=[{'name': 'Ben Bitdiddle', + 'email': 'ben@bitdiddle.com'}], + received_date=datetime. + datetime(2015, 7, 9, 23, 50, 7), + subject='YOO!') + + add_fake_imapuid(db.session, generic_account.id, message1, + imap_folder, 2000) + + message2 = add_fake_message(db.session, generic_account.namespace.id, + thread=thread2, + from_addr=[{'name': '', + 'email': + 'inboxapptest@example.com'}], + to_addr=[{'name': 'Ben Bitdiddle', + 'email': 'ben@bitdiddle.com'}], + received_date=datetime. + datetime(2014, 7, 9, 23, 50, 7), + subject='Hey!') + + add_fake_imapuid(db.session, generic_account.id, message2, + imap_folder, 2001) + + message3 = add_fake_message(db.session, generic_account.namespace.id, + thread=thread3, + from_addr=[{'name': '', + 'email': + 'inboxapptest@example.com'}], + to_addr=[{'name': 'Ben Bitdiddle', + 'email': 'ben@bitdiddle.com'}], + received_date=datetime. + datetime(2013, 7, 9, 23, 50, 7), + subject='Sup?') + + add_fake_imapuid(db.session, generic_account.id, message3, + imap_folder, 2002) + + return [message1, message2, message3] + + +@fixture +def different_sorted_imap_threads(db, generic_account): + thread1 = add_fake_thread(db.session, generic_account.namespace.id) + thread2 = add_fake_thread(db.session, generic_account.namespace.id) + thread3 = add_fake_thread(db.session, generic_account.namespace.id) + + return [thread1, thread2, thread3] + + +@fixture +def different_imap_messages(db, generic_account, different_sorted_imap_threads, + different_imap_folder): + thread1, thread2, thread3 = different_sorted_imap_threads + message1 = add_fake_message(db.session, generic_account.namespace.id, + thread=thread1, + from_addr=[{'name': '', + 'email': + 'inboxapptest@example.com'}], + to_addr=[{'name': 'Ben Bitdiddle', + 'email': 'ben@bitdiddle.com'}], + received_date=datetime. + datetime(2015, 7, 9, 23, 50, 7), + subject='LOL') + + add_fake_imapuid(db.session, generic_account.id, message1, + different_imap_folder, 5000) + + message2 = add_fake_message(db.session, generic_account.namespace.id, + thread=thread2, + from_addr=[{'name': '', + 'email': + 'inboxapptest@example.com'}], + to_addr=[{'name': 'Ben Bitdiddle', + 'email': 'ben@bitdiddle.com'}], + received_date=datetime. + datetime(2014, 7, 9, 23, 50, 7), + subject='ROTFLMO') + + add_fake_imapuid(db.session, generic_account.id, message2, + different_imap_folder, 5001) + + message3 = add_fake_message(db.session, generic_account.namespace.id, + thread=thread3, + from_addr=[{'name': '', + 'email': + 'inboxapptest@example.com'}], + to_addr=[{'name': 'Ben Bitdiddle', + 'email': 'ben@bitdiddle.com'}], + received_date=datetime. + datetime(2013, 7, 9, 23, 50, 7), + subject='ROFLCOPTER') + + add_fake_imapuid(db.session, generic_account.id, message3, + different_imap_folder, 5002) + + return [message1, message2, message3] + + +class MockImapConnection(object): + + def __init__(self): + self.search_args = None + + def select_folder(self, name, **_): + return {'UIDVALIDITY': 123} + + def logout(self): + pass + + def search(self, criteria, charset=None): + self.search_args = (criteria, charset) + return [2000, 2001, 2002] + + def assert_search(self, criteria, charset=None): + assert self.search_args == (criteria, charset) + + +@fixture +def imap_connection(monkeypatch): + conn = MockImapConnection() + monkeypatch.setattr( + 'inbox.auth.generic.GenericAuthHandler.connect_account', + lambda *_, **__: conn) + return conn + + +@fixture +def invalid_imap_connection(monkeypatch): + from inbox.basicauth import ValidationError + + def raise_401(*args): + raise ValidationError() + conn = MockImapConnection() + monkeypatch.setattr( + 'inbox.auth.generic.GenericAuthHandler.connect_account', + raise_401) + return conn + + +@fixture +def patch_token_manager(monkeypatch): + monkeypatch.setattr( + 'inbox.models.backends.gmail.g_token_manager.get_token_for_email', + lambda *args, **kwargs: 'token') + + +@fixture +def patch_gmail_search_response(): + resp = requests.Response() + resp.status_code = 200 + resp.elapsed = datetime.timedelta(seconds=22) + resp._content = json.dumps({ + 'messages': [{'id': '1'}, {'id': '2'}, {'id': '3'}] + }) + requests.get = mock.Mock(return_value=resp) + + +@fixture +def invalid_gmail_token(monkeypatch): + from inbox.basicauth import OAuthError + + def raise_401(*args): + raise OAuthError() + monkeypatch.setattr( + 'inbox.models.backends.gmail.g_token_manager.get_token_for_email', + raise_401) + + +@pytest.mark.parametrize('is_streaming', [True, False]) +def test_gmail_message_search(api_client, default_account, + patch_token_manager, + patch_gmail_search_response, + sorted_gmail_messages, is_streaming): + search_client = get_search_client(default_account) + assert isinstance(search_client, GmailSearchClient) + + if is_streaming: + messages = api_client.get_data('/messages/search/streaming?q=blah%20blah%20blah') + else: + messages = api_client.get_data('/messages/search?q=blah%20blah%20blah') + + assert_search_result(sorted_gmail_messages, messages) + + +@pytest.mark.parametrize('is_streaming', [True, False]) +def test_gmail_thread_search(api_client, test_gmail_thread, + default_account, + patch_token_manager, + patch_gmail_search_response, + sorted_gmail_messages, + sorted_gmail_threads, is_streaming): + search_client = get_search_client(default_account) + assert isinstance(search_client, GmailSearchClient) + + if is_streaming: + threads = api_client.get_data('/threads/search/streaming?q=blah%20blah%20blah') + else: + threads = api_client.get_data('/threads/search?q=blah%20blah%20blah') + + assert_search_result(sorted_gmail_threads, threads) + + +@pytest.mark.parametrize('is_streaming', [True, False]) +def test_gmail_search_unicode(db, api_client, test_gmail_thread, + patch_token_manager, + patch_gmail_search_response, + default_account, + sorted_gmail_messages, + sorted_gmail_threads, is_streaming): + search_client = get_search_client(default_account) + assert isinstance(search_client, GmailSearchClient) + + if is_streaming: + threads = api_client.get_data('/threads/search/streaming?q=存档') + else: + threads = api_client.get_data('/threads/search?q=存档') + + assert_search_result(sorted_gmail_threads, threads) + + +@pytest.mark.parametrize('is_streaming', [True, False]) +def test_invalid_gmail_account_search(db, api_client, default_account, + invalid_gmail_token, + patch_gmail_search_response, + sorted_gmail_messages, is_streaming): + + if is_streaming: + response = api_client.get_raw('/messages/search/streaming?' + 'q=blah%20blah%20blah') + else: + response = api_client.get_raw('/messages/search?' + 'q=blah%20blah%20blah') + + assert response.status_code == 403 + assert "This search can\'t be performed because the account\'s "\ + "credentials are out of date." in json.loads(response.data)['message'] + + +@pytest.mark.parametrize('is_streaming', [True, False]) +def test_imap_message_search(imap_api_client, generic_account, + imap_folder, imap_connection, + sorted_imap_messages, is_streaming): + search_client = get_search_client(generic_account) + assert isinstance(search_client, IMAPSearchClient) + + if is_streaming: + messages = imap_api_client.get_data('/messages/search/streaming?' + 'q=blah%20blah%20blah') + else: + messages = imap_api_client.get_data('/messages/search?' + 'q=blah%20blah%20blah') + + imap_connection.assert_search(["TEXT", "blah blah blah"]) + assert_search_result(sorted_imap_messages, messages) + + +@pytest.mark.parametrize('is_streaming', [True, False]) +def test_imap_thread_search(imap_api_client, generic_account, + imap_folder, imap_connection, + sorted_imap_messages, + sorted_imap_threads, is_streaming): + search_client = get_search_client(generic_account) + assert isinstance(search_client, IMAPSearchClient) + + if is_streaming: + threads = imap_api_client.get_data('/threads/search/streaming?q=blah%20blah%20blah') + else: + threads = imap_api_client.get_data('/threads/search?q=blah%20blah%20blah') + + imap_connection.assert_search(["TEXT", "blah blah blah"]) + assert_search_result(sorted_imap_threads, threads) + + +@pytest.mark.parametrize('is_streaming', [True, False]) +def test_imap_thread_search_unicode(db, imap_api_client, generic_account, + imap_folder, imap_connection, + sorted_imap_messages, + sorted_imap_threads, is_streaming): + search_client = get_search_client(generic_account) + assert isinstance(search_client, IMAPSearchClient) + + if is_streaming: + threads = imap_api_client.get_data('/threads/search/streaming?q=存档') + else: + threads = imap_api_client.get_data('/threads/search?q=存档') + + imap_connection.assert_search([u"TEXT", u"\u5b58\u6863"], "UTF-8") + assert_search_result(sorted_imap_threads, threads) + + +@pytest.mark.parametrize('is_streaming', [True, False]) +def test_invalid_imap_account_search(db, imap_api_client, generic_account, + invalid_imap_connection, + imap_folder, + sorted_imap_messages, is_streaming): + + if is_streaming: + # Because of the way streaming search work, it will return a + # 200 response even though we can't access the account. + response = imap_api_client.get_raw('/messages/search/streaming?' + 'q=blah%20blah%20blah') + assert response.status_code == 200 + else: + response = imap_api_client.get_raw('/messages/search?' + 'q=blah%20blah%20blah') + + assert response.status_code == 403 + assert "This search can\'t be performed because the account\'s "\ + "credentials are out of date." in json.loads(response.data)['message'] + + +def assert_search_result(expected, actual): + assert len(expected) == len(actual) + for expected_item, actual_item in zip(expected, actual): + assert expected_item.public_id == actual_item['id'] + + +@pytest.mark.parametrize('endpoint', ['messages', 'threads']) +def test_streaming_search_results(db, imap_api_client, generic_account, + imap_folder, monkeypatch, sorted_imap_messages, + different_imap_messages, endpoint): + # Check that the streaming search returns results from different + # folders. + + class MultiFolderMockImapConnection(MockImapConnection): + + def __init__(self): + self._responses = list(reversed([ + [2000, 2001, 2002], + [5000, 5001], + ])) + + def search(self, criteria, charset=None): + self.search_args = (criteria, charset) + return self._responses.pop() + + conn = MultiFolderMockImapConnection() + monkeypatch.setattr( + 'inbox.auth.generic.GenericAuthHandler.connect_account', + lambda *_, **__: conn) + + search_client = get_search_client(generic_account) + assert isinstance(search_client, IMAPSearchClient) + + url = '/{}/search/streaming?q=fantastic'.format(endpoint) + raw_data = imap_api_client.get_raw(url).data + assert len(conn._responses) == 0, "Search should go through both folders" + + # The API returns JSON lists separated by '\n' + responses = raw_data.split('\n') + assert len(responses) == 3 and responses[2] == '' + assert len(json.loads(responses[0])) == 3 + assert len(json.loads(responses[1])) == 2 diff --git a/inbox/test/api/test_sending.py b/inbox/test/api/test_sending.py new file mode 100644 index 000000000..6737d9988 --- /dev/null +++ b/inbox/test/api/test_sending.py @@ -0,0 +1,985 @@ +# -*- coding: utf-8 -*- +# flake8: noqa: F811 +import smtplib +import json +import time +import pytest +import re +import traceback +import contextlib +from flanker import mime +import nylas.logging +import inbox.api.ns_api +from inbox.basicauth import OAuthError +from inbox.models import Message, Event +from inbox.test.util.base import thread, message, imported_event +from inbox.test.api.base import api_client +from inbox.sendmail.smtp.postel import _substitute_bcc + + +__all__ = ['thread', 'message', 'api_client', 'imported_event'] + + +class MockTokenManager(object): + + def __init__(self, allow_auth=True): + self.allow_auth = allow_auth + + def get_token(self, account, force_refresh=True): + if self.allow_auth: + # return a fake token. + return 'foo' + raise OAuthError() + + +class MockGoogleTokenManager(object): + + def __init__(self, allow_auth=True): + self.allow_auth = allow_auth + + def get_token_for_email(self, account, force_refresh=False): + if self.allow_auth: + return 'foo' + raise OAuthError() + + +@pytest.fixture +def patch_token_manager(monkeypatch): + monkeypatch.setattr('inbox.sendmail.smtp.postel.default_token_manager', + MockTokenManager()) + monkeypatch.setattr('inbox.sendmail.smtp.postel.g_token_manager', + MockGoogleTokenManager()) + + +@pytest.fixture +def disallow_auth(monkeypatch): + monkeypatch.setattr('inbox.sendmail.smtp.postel.default_token_manager', + MockTokenManager(allow_auth=False)) + monkeypatch.setattr('inbox.sendmail.smtp.postel.g_token_manager', + MockGoogleTokenManager(allow_auth=False)) + + +@pytest.fixture +def patch_smtp(patch_token_manager, monkeypatch): + submitted_messages = [] + + class MockSMTPConnection(object): + + def __init__(self, *args, **kwargs): + pass + + def __enter__(self): + return self + + def __exit__(self, exc_type, value, traceback): + pass + + def sendmail(self, recipients, msg): + submitted_messages.append((recipients, msg)) + + monkeypatch.setattr('inbox.sendmail.smtp.postel.SMTPConnection', + MockSMTPConnection) + return submitted_messages + + +def erring_smtp_connection(exc_type, *args): + class ErringSMTPConnection(object): + + def __init__(self, *args, **kwargs): + pass + + def __enter__(self): + return self + + def __exit__(self, exc_type, value, traceback): + pass + + def sendmail(self, recipients, msg): + raise exc_type(*args) + + return ErringSMTPConnection + + +# Different providers use slightly different errors, so parametrize this test +# fixture to imitate them. +@pytest.fixture(params=[ + "5.4.5 Daily sending quota exceeded", + "5.7.1 You have exceeded your daily sending limits"]) +@pytest.fixture +def quota_exceeded(patch_token_manager, monkeypatch, request): + monkeypatch.setattr('inbox.sendmail.smtp.postel.SMTPConnection', + erring_smtp_connection( + smtplib.SMTPDataError, 550, + request.param)) + + +@pytest.fixture +def connection_closed(patch_token_manager, monkeypatch): + monkeypatch.setattr('inbox.sendmail.smtp.postel.SMTPConnection', + erring_smtp_connection(smtplib.SMTPServerDisconnected)) + + +@pytest.fixture(params=[ + "User unknown", + "5.1.1 : Recipient address rejected: " + "User unknown in virtual mailbox table" +]) +def recipients_refused(patch_token_manager, monkeypatch, request): + monkeypatch.setattr('inbox.sendmail.smtp.postel.SMTPConnection', + erring_smtp_connection(smtplib.SMTPRecipientsRefused, + {'foo@foocorp.com': + (550, request.param)})) + + +# Different providers use slightly different errors, so parametrize this test +# fixture to imitate them. +@pytest.fixture(params=[ + "5.2.3 Your message exceeded Google's message size limits", + "5.3.4 Message size exceeds fixed maximum message size"]) +def message_too_large(patch_token_manager, monkeypatch, request): + monkeypatch.setattr( + 'inbox.sendmail.smtp.postel.SMTPConnection', + erring_smtp_connection( + smtplib.SMTPSenderRefused, 552, + request.param, None)) + + +@pytest.fixture +def insecure_content(patch_token_manager, monkeypatch): + monkeypatch.setattr( + 'inbox.sendmail.smtp.postel.SMTPConnection', + erring_smtp_connection( + smtplib.SMTPDataError, 552, + '5.7.0 This message was blocked because its content presents a ' + 'potential\\n5.7.0 security issue.')) + + +@pytest.fixture +def example_draft(db, default_account): + return { + 'subject': 'Draft test', + 'body': '

Sea, birds and sand.

', + 'to': [{'name': 'The red-haired mermaid', + 'email': default_account.email_address}] + } + + +@pytest.fixture +def example_rsvp(imported_event): + return { + 'event_id': imported_event.public_id, + 'comment': 'I will come.', + 'status': 'yes', + } + + +@pytest.fixture +def example_draft_bad_subject(db, default_account): + return { + 'subject': ['draft', 'test'], + 'body': '

Sea, birds and sand.

', + 'to': [{'name': 'The red-haired mermaid', + 'email': default_account.email_address}] + } + + +@pytest.fixture +def example_draft_bad_body(db, default_account): + return { + 'subject': 'Draft test', + 'body': {'foo': 'bar'}, + 'to': [{'name': 'The red-haired mermaid', + 'email': default_account.email_address}] + } + + +@pytest.fixture +def example_event(db, api_client): + from inbox.models.calendar import Calendar + cal = db.session.query(Calendar).get(1) + + event = { + 'title': 'Invite test', + 'when': { + "end_time": 1436210662, + "start_time": 1436207062 + }, + 'participants': [ + {'email': 'helena@nylas.com'} + ], + 'calendar_id': cal.public_id, + } + + r = api_client.post_data('/events', event) + event_public_id = json.loads(r.data)['id'] + return event_public_id + + +def test_send_existing_draft(patch_smtp, api_client, example_draft): + r = api_client.post_data('/drafts', example_draft) + draft_public_id = json.loads(r.data)['id'] + version = json.loads(r.data)['version'] + + r = api_client.post_data('/send', + {'draft_id': draft_public_id, + 'version': version}) + assert r.status_code == 200 + + # Test that the sent draft can't be sent again. + r = api_client.post_data('/send', + {'draft_id': draft_public_id, + 'version': version}) + assert r.status_code == 400 + + drafts = api_client.get_data('/drafts') + assert not drafts + + message = api_client.get_data('/messages/{}'.format(draft_public_id)) + assert message['object'] == 'message' + + +def test_send_rejected_without_version(api_client, example_draft): + r = api_client.post_data('/drafts', example_draft) + draft_public_id = json.loads(r.data)['id'] + r = api_client.post_data('/send', {'draft_id': draft_public_id}) + assert r.status_code == 400 + + +def test_send_rejected_with_wrong_version(api_client, example_draft): + r = api_client.post_data('/drafts', example_draft) + draft_public_id = json.loads(r.data)['id'] + r = api_client.post_data('/send', {'draft_id': draft_public_id, + 'version': 222}) + assert r.status_code == 409 + + +def test_send_rejected_without_recipients(api_client): + r = api_client.post_data('/drafts', {'subject': 'Hello there'}) + draft_public_id = json.loads(r.data)['id'] + version = json.loads(r.data)['version'] + + r = api_client.post_data('/send', + {'draft_id': draft_public_id, + 'version': version}) + assert r.status_code == 400 + + +def test_send_new_draft(patch_smtp, api_client, default_account, + example_draft): + r = api_client.post_data('/send', example_draft) + assert r.status_code == 200 + + +def test_malformed_body_rejected(api_client, example_draft_bad_body): + r = api_client.post_data('/send', example_draft_bad_body) + + assert r.status_code == 400 + + decoded = json.loads(r.get_data()) + assert decoded['type'] == 'invalid_request_error' + assert decoded['message'] == '"body" should be a string' + + +def test_malformed_subject_rejected(api_client, example_draft_bad_subject): + r = api_client.post_data('/send', example_draft_bad_subject) + assert r.status_code == 400 + + decoded = json.loads(r.get_data()) + assert decoded['type'] == 'invalid_request_error' + assert decoded['message'] == '"subject" should be a string' + + +def test_malformed_request_rejected(api_client): + r = api_client.post_data('/send', {}) + assert r.status_code == 400 + + +def test_recipient_validation(patch_smtp, api_client): + r = api_client.post_data('/drafts', {'to': [{'email': 'foo@example.com'}]}) + assert r.status_code == 200 + r = api_client.post_data('/drafts', {'to': {'email': 'foo@example.com'}}) + assert r.status_code == 400 + r = api_client.post_data('/drafts', {'to': 'foo@example.com'}) + assert r.status_code == 400 + r = api_client.post_data('/drafts', {'to': [{'name': 'foo'}]}) + assert r.status_code == 400 + r = api_client.post_data('/send', {'to': [{'email': 'foo'}]}) + assert r.status_code == 400 + r = api_client.post_data('/send', {'to': [{'email': 'föö'}]}) + assert r.status_code == 400 + r = api_client.post_data('/drafts', {'to': [{'email': ['foo']}]}) + assert r.status_code == 400 + r = api_client.post_data('/drafts', {'to': [{'name': ['Mr. Foo'], + 'email': 'foo@example.com'}]}) + assert r.status_code == 400 + r = api_client.post_data('/drafts', + {'to': [{'name': 'Good Recipient', + 'email': 'goodrecipient@example.com'}, + 'badrecipient@example.com']}) + assert r.status_code == 400 + + # Test that sending a draft with invalid recipients fails. + for field in ('to', 'cc', 'bcc'): + r = api_client.post_data('/drafts', {field: [{'email': 'foo'}]}) + draft_id = json.loads(r.data)['id'] + draft_version = json.loads(r.data)['version'] + r = api_client.post_data('/send', {'draft_id': draft_id, + 'draft_version': draft_version}) + assert r.status_code == 400 + + +def test_handle_invalid_credentials(disallow_auth, api_client, example_draft): + r = api_client.post_data('/send', example_draft) + assert r.status_code == 403 + assert json.loads(r.data)['message'] == 'Could not authenticate with ' \ + 'the SMTP server.' + + +def test_handle_quota_exceeded(quota_exceeded, api_client, example_draft): + r = api_client.post_data('/send', example_draft) + assert r.status_code == 429 + assert json.loads(r.data)['message'] == 'Daily sending quota exceeded' + + +def test_handle_server_disconnected(connection_closed, api_client, + example_draft): + r = api_client.post_data('/send', example_draft) + assert r.status_code == 503 + assert json.loads(r.data)['message'] == 'The server unexpectedly closed ' \ + 'the connection' + + +def test_handle_recipients_rejected(recipients_refused, api_client, + example_draft): + r = api_client.post_data('/send', example_draft) + assert r.status_code == 402 + assert json.loads(r.data)['message'] == 'Sending to all recipients failed' + + +def test_handle_message_too_large(message_too_large, api_client, + example_draft): + r = api_client.post_data('/send', example_draft) + assert r.status_code == 402 + assert json.loads(r.data)['message'] == 'Message too large' + + +def test_message_rejected_for_security(insecure_content, api_client, + example_draft): + r = api_client.post_data('/send', example_draft) + assert r.status_code == 402 + assert json.loads(r.data)['message'] == \ + 'Message content rejected for security reasons' + + +def test_bcc_in_recipients_but_stripped_from_headers(patch_smtp, api_client): + r = api_client.post_data( + '/send', + { + 'to': [{'email': 'bob@foocorp.com'}], + 'cc': [{'email': 'jane@foocorp.com'}], + 'bcc': [{'email': 'spies@nsa.gov'}], + 'subject': 'Banalities' + }) + assert r.status_code == 200 + recipients, msg = patch_smtp[0] + assert set(recipients) == {'bob@foocorp.com', 'jane@foocorp.com', + 'spies@nsa.gov'} + parsed = mime.from_string(msg) + assert 'Bcc' not in parsed.headers + assert parsed.headers.get('To') == 'bob@foocorp.com' + assert parsed.headers.get('Cc') == 'jane@foocorp.com' + + +def test_reply_headers_set(db, patch_smtp, api_client, example_draft, thread, + message): + message.message_id_header = '' + db.session.commit() + thread_id = api_client.get_data('/threads')[0]['id'] + + api_client.post_data('/send', {'to': [{'email': 'bob@foocorp.com'}], + 'thread_id': thread_id}) + _, msg = patch_smtp[-1] + parsed = mime.from_string(msg) + assert 'In-Reply-To' in parsed.headers + assert 'References' in parsed.headers + + +def test_body_construction(patch_smtp, api_client): + api_client.post_data('/send', + {'to': [{'email': 'bob@foocorp.com'}], + 'subject': 'Banalities', + 'body': 'Hello there'}) + _, msg = patch_smtp[-1] + parsed = mime.from_string(msg) + assert len(parsed.parts) == 2 + plain_part_found = False + html_part_found = False + for part in parsed.parts: + if part.content_type.value == 'text/plain': + plain_part_found = True + assert part.body.strip() == 'Hello there' + elif part.content_type.value == 'text/html': + html_part_found = True + assert part.body.strip() == 'Hello there' + assert plain_part_found and html_part_found + + +def test_quoted_printable_encoding_avoided_for_compatibility( + patch_smtp, api_client): + # Test that messages with long lines don't get quoted-printable encoded, + # for maximum server compatibility. + api_client.post_data( + '/send', + {'to': [{'email': 'bob@foocorp.com'}], + 'subject': 'In Catilinam', + 'body': 'Etenim quid est, Catilina, quod iam amplius exspectes, si ' + 'neque nox tenebris obscurare coeptus nefarios neque privata domus ' + 'parietibus continere voces conjurationis tuae potest? Si ' + 'illustrantur, si erumpunt omnia? Muta iam istam mentem, mihi crede! ' + 'obliviscere caedis atque incendiorum. Teneris undique: luce sunt ' + 'clariora nobis tua consilia omnia; quae iam mecum licet recognoscas.' + ' Meministine me ante diem duodecimum Kalendas Novembres dicere in ' + 'senatu, fore in armis certo die, qui dies futurus esset ante diem ' + 'sextum Kalendas Novembres, C. Manlium, audaciae satellitem atque ' + 'administrum tuae? Num me fefellit, Catilina, non modo res tanta, tam' + ' atrox, tamque incredibilis, verum id quod multo magis admirandum, ' + 'dies? '}) + _, msg = patch_smtp[-1] + parsed = mime.from_string(msg) + assert len(parsed.parts) == 2 + for part in parsed.parts: + if part.content_type.value == 'text/html': + assert part.content_encoding[0] == 'base64' + elif part.content_type.value == 'text/plain': + assert part.content_encoding[0] in ('7bit', 'base64') + + +def test_draft_not_persisted_if_sending_fails(recipients_refused, api_client, + db): + api_client.post_data('/send', {'to': [{'email': 'bob@foocorp.com'}], + 'subject': 'some unique subject'}) + assert db.session.query(Message).filter_by( + subject='some unique subject').first() is None + + +def test_setting_reply_to_headers(patch_smtp, api_client): + api_client.post_data('/send', + {'to': [{'email': 'bob@foocorp.com'}], + 'reply_to': [{'name': 'admin', + 'email': 'prez@whitehouse.gov'}], + 'subject': 'Banalities', + 'body': 'Hello there'}) + _, msg = patch_smtp[-1] + parsed = mime.from_string(msg) + assert 'Reply-To' in parsed.headers + assert parsed.headers['Reply-To'] == 'admin ' + + +def test_sending_from_email_alias(patch_smtp, api_client): + api_client.post_data('/send', + {'to': [{'email': 'bob@foocorp.com'}], + 'from': [{'name': 'admin', + 'email': 'prez@whitehouse.gov'}], + 'subject': 'Banalities', + 'body': 'Hello there'}) + _, msg = patch_smtp[-1] + parsed = mime.from_string(msg) + assert 'From' in parsed.headers + assert parsed.headers['From'] == 'admin ' + + +def test_sending_raw_mime(patch_smtp, api_client): + api_client.post_raw('/send', ('From: bob@foocorp.com\r\n' + 'To: golang-nuts ' + '\r\n' + 'Cc: prez@whitehouse.gov\r\n' + 'Bcc: Some Guy \r\n' + 'Subject: ' + '[go-nuts] Runtime Panic On Method Call' + '\r\n' + 'Mime-Version: 1.0\r\n' + 'In-Reply-To: ' + '<78pgxboai332pi9p2smo4db73-0' + '@mailer.nylas.com>\r\n' + 'References: ' + '<78pgxboai332pi9p2smo4db73-0' + '@mailer.nylas.com>\r\n' + 'Content-Type: text/plain; charset=UTF-8' + '\r\n' + 'Content-Transfer-Encoding: 7bit\r\n' + 'X-My-Custom-Header: Random\r\n\r\n' + 'Yo.'), + headers={'Content-Type': 'message/rfc822'}) + + _, msg = patch_smtp[-1] + parsed = mime.from_string(msg) + assert parsed.body == 'Yo.' + assert parsed.headers['From'] == 'bob@foocorp.com' + assert parsed.headers['Subject'] == \ + '[go-nuts] Runtime Panic On Method Call' + assert parsed.headers['Cc'] == 'prez@whitehouse.gov' + assert parsed.headers['To'] == 'golang-nuts ' + assert parsed.headers['In-Reply-To'] == \ + '<78pgxboai332pi9p2smo4db73-0@mailer.nylas.com>' + assert parsed.headers['References'] == \ + '<78pgxboai332pi9p2smo4db73-0@mailer.nylas.com>' + assert parsed.headers['X-My-Custom-Header'] == 'Random' + assert 'Bcc' not in parsed.headers + assert 'X-INBOX-ID' in parsed.headers + assert 'Message-Id' in parsed.headers + assert 'User-Agent' in parsed.headers + + +def test_sending_bad_raw_mime(patch_smtp, api_client): + res = api_client.post_raw('/send', ('From: bob@foocorp.com\r\n' + 'To: \r\n' + 'Subject: ' + '[go-nuts] Runtime Panic On Method' + 'Call \r\n' + 'Mime-Version: 1.0\r\n' + 'Content-Type: ' + 'text/plain; charset=UTF-8\r\n' + 'Content-Transfer-Encoding: 7bit\r\n' + 'X-My-Custom-Header: Random' + '\r\n\r\n' + 'Yo.'), headers={'Content-Type': + 'message/rfc822'}) + + assert res.status_code == 400 + + +def test_sending_from_email_multiple_aliases(patch_smtp, patch_token_manager, + api_client): + res = api_client.post_data('/send', + {'to': [{'email': 'bob@foocorp.com'}], + 'from': [{'name': 'admin', + 'email': 'prez@whitehouse.gov'}, + {'name': 'the rock', + 'email': 'd.johnson@gmail.com'}], + 'subject': 'Banalities', + 'body': 'Hello there'}) + assert res.status_code == 400 + + res = api_client.post_data('/send', + {'to': [{'email': 'bob@foocorp.com'}], + 'reply_to': [{'name': 'admin', + 'email': 'prez@whitehouse.gov'}, + {'name': 'the rock', + 'email': 'd.johnson@gmail.com'}], + 'subject': 'Banalities', + 'body': 'Hello there'}) + assert res.status_code == 400 + + +def test_rsvp_invalid_credentials(disallow_auth, api_client, example_rsvp): + r = api_client.post_data('/send-rsvp', example_rsvp) + assert r.status_code == 403 + assert json.loads(r.data)['message'] == 'Could not authenticate with ' \ + 'the SMTP server.' + + +def test_rsvp_quota_exceeded(quota_exceeded, api_client, example_rsvp): + r = api_client.post_data('/send-rsvp', example_rsvp) + assert r.status_code == 429 + assert json.loads(r.data)['message'] == 'Daily sending quota exceeded' + + +def test_rsvp_server_disconnected(connection_closed, api_client, example_rsvp): + r = api_client.post_data('/send-rsvp', example_rsvp) + assert r.status_code == 503 + assert json.loads(r.data)['message'] == 'The server unexpectedly closed ' \ + 'the connection' + + +def test_rsvp_recipients_rejected(recipients_refused, api_client, + example_rsvp): + r = api_client.post_data('/send-rsvp', example_rsvp) + assert r.status_code == 402 + assert json.loads(r.data)['message'] == 'Sending to all recipients failed' + + +def test_rsvp_message_too_large(message_too_large, api_client, example_rsvp): + r = api_client.post_data('/send-rsvp', example_rsvp) + assert r.status_code == 402 + assert json.loads(r.data)['message'] == 'Message too large' + + +def test_rsvp_message_rejected_for_security(insecure_content, api_client, + example_rsvp): + r = api_client.post_data('/send-rsvp', example_rsvp) + assert r.status_code == 402 + assert json.loads(r.data)['message'] == \ + 'Message content rejected for security reasons' + + +def test_rsvp_updates_status(patch_smtp, api_client, example_rsvp, + imported_event): + assert len(imported_event.participants) == 1 + assert imported_event.participants[0]['email'] == 'inboxapptest@gmail.com' + assert imported_event.participants[0]['status'] == 'noreply' + + r = api_client.post_data('/send-rsvp', example_rsvp) + assert r.status_code == 200 + dct = json.loads(r.data) + + # check that the event's status got updated + assert len(dct['participants']) == 1 + assert dct['participants'][0]['email'] == 'inboxapptest@gmail.com' + assert dct['participants'][0]['status'] == 'yes' + assert dct['participants'][0]['comment'] == 'I will come.' + + +@pytest.mark.parametrize('status,comment', [ + ('yes', ''), + ('no', ''), + ('yes', None), + ('maybe', None), + ('yes', 'I will come'), + ('no', "I won't come"), + ('yes', u"Нэ дуо рэгяонэ фабулаз аккоммодары."), +]) +def test_rsvp_idempotent(db, patch_smtp, api_client, example_rsvp, + imported_event, status, comment): + part = imported_event.participants[0] + part['status'] = status + part['comment'] = comment + + # MutableList shenanigans -- it won't update + # what's stored in the db otherwise. + imported_event.participants = [] + db.session.commit() + + imported_event.participants = [part] + db.session.commit() + + old_update_date = imported_event.updated_at + db.session.expunge(imported_event) + + rsvp = {'event_id': imported_event.public_id, + 'status': status, 'comment': comment} + r = api_client.post_data('/send-rsvp', rsvp) + assert r.status_code == 200 + dct = json.loads(r.data) + + # check that the event's status is the same. + assert len(dct['participants']) == 1 + assert dct['participants'][0]['email'] == 'inboxapptest@gmail.com' + assert dct['participants'][0]['status'] == status + + assert dct['participants'][0]['comment'] == comment + + # Check that the event hasn't been updated. + refreshed_event = db.session.query(Event).get(imported_event.id) + assert refreshed_event.updated_at == old_update_date + + +def test_sent_messages_shown_in_delta(patch_smtp, api_client, example_draft): + ts = int(time.time()) + r = api_client.post_data('/delta/generate_cursor', {'start': ts}) + cursor = json.loads(r.data)['cursor'] + r = api_client.post_data('/send', example_draft) + message_id = json.loads(r.data)['id'] + deltas = api_client.get_data('/delta?cursor={}'.format(cursor))['deltas'] + message_delta = next((d for d in deltas if d['id'] == message_id), None) + assert message_delta is not None + assert message_delta['object'] == 'message' + assert message_delta['event'] == 'create' + + +# MULTI-SEND # + +def test_multisend_init_new_draft(patch_smtp, api_client, example_draft): + r = api_client.post_data('/send-multiple', + example_draft) + assert r.status_code == 200 + draft_public_id = json.loads(r.data)['id'] + + # Test that the sent draft can't be sent normally now + r = api_client.post_data('/send', + {'draft_id': draft_public_id, + 'version': 0}) + assert r.status_code == 400 + + # It's not a draft anymore + drafts = api_client.get_data('/drafts') + assert not drafts + + # We can retrieve it as a message, but it's not "sent" yet + message = api_client.get_data('/messages/{}'.format(draft_public_id)) + assert message['object'] == 'message' + + +def test_multisend_init_rejected_with_existing_draft(api_client, + example_draft): + r = api_client.post_data('/drafts', example_draft) + draft_public_id = json.loads(r.data)['id'] + version = json.loads(r.data)['version'] + + r = api_client.post_data('/send-multiple', + {'draft_id': draft_public_id, + 'version': version}) + assert r.status_code == 400 + + +def test_multisend_init_rejected_without_recipients(api_client): + r = api_client.post_data('/send-multiple', + {'subject': 'Hello there'}) + assert r.status_code == 400 + + +def test_multisend_init_malformed_body_rejected(api_client, + example_draft_bad_body): + r = api_client.post_data('/send-multiple', example_draft_bad_body) + + assert r.status_code == 400 + + decoded = json.loads(r.get_data()) + assert decoded['type'] == 'invalid_request_error' + assert decoded['message'] == '"body" should be a string' + + +def test_multisend_init_malformed_subject_rejected(api_client, + example_draft_bad_subject): + r = api_client.post_data('/send-multiple', example_draft_bad_subject) + assert r.status_code == 400 + + decoded = json.loads(r.get_data()) + assert decoded['type'] == 'invalid_request_error' + assert decoded['message'] == '"subject" should be a string' + + +def test_multisend_init_malformed_request_rejected(api_client): + r = api_client.post_data('/send-multiple', {}) + assert r.status_code == 400 + + +@pytest.fixture +def multisend_draft(api_client, example_draft): + example_draft['to'].append({'email': 'bob@foocorp.com'}) + r = api_client.post_data('/send-multiple', example_draft) + assert r.status_code == 200 + return json.loads(r.get_data()) + + +@pytest.fixture +def multisend(multisend_draft): + return { + 'id': multisend_draft['id'], + 'send_req': {'body': "email body", + 'send_to': multisend_draft['to'][0]}, + 'draft': multisend_draft + } + + +@pytest.fixture +def multisend2(multisend_draft): + return { + 'id': multisend_draft['id'], + 'send_req': {'body': "email body 2", + 'send_to': multisend_draft['to'][1]}, + 'draft': multisend_draft + } + + +@pytest.fixture +def patch_crispin_del_sent(monkeypatch): + # This lets us test the interface between remote_delete_sent / + # writable_connection_pool and the multi-send delete API endpoint, without + # running an actual crispin client. + # + # Multi-send uses these functions from syncback to synchronously delete + # sent messages. They usually don't appear in API code, so this makes sure + # their usage is correct. + + def fake_remote_delete_sent(crispin_client, account_id, message_id_header, + delete_multiple=False): + return True + + class FakeConnWrapper(object): + def __init__(self): + pass + + @contextlib.contextmanager + def get(self): + yield MockCrispinClient() + + class MockCrispinClient(object): + def folder_names(self): + return ['sent'] + + def delete_sent_message(message_id_header, delete_multiple=False): + pass + + + def fake_conn_pool(acct_id): + return FakeConnWrapper() + + monkeypatch.setattr('inbox.api.ns_api.remote_delete_sent', + fake_remote_delete_sent) + monkeypatch.setattr('inbox.api.ns_api.writable_connection_pool', + fake_conn_pool) + + +@pytest.fixture +def patch_sentry_to_raise(monkeypatch): + # This makes it so calls to sentry instead raise an exception in the test + # and fail it. This is used to test the multi-send delete endpoint, where + # we wrap sent message deletion in a catch-all but still report to sentry. + def make_sentry_raise(): + traceback.print_exc() + raise + monkeypatch.setattr(nylas.logging.sentry, 'sentry_alert', + make_sentry_raise) + + +def test_multisend_session(api_client, multisend, multisend2, patch_smtp, + patch_crispin_del_sent, patch_sentry_to_raise): + + r = api_client.post_data('/send-multiple/' + multisend['id'], + multisend['send_req']) + assert r.status_code == 200 + assert json.loads(r.data)['body'] == multisend['send_req']['body'] + + r = api_client.post_data('/send-multiple/' + multisend2['id'], + multisend2['send_req']) + assert r.status_code == 200 + assert json.loads(r.data)['body'] == multisend2['send_req']['body'] + + # Make sure we can't send to people not in the message recipients + req_body = {'send_req': {'body': "you're not even a recipient!", + 'send_to': {'name': 'not in message', + 'email': 'not@in.msg'}}} + r = api_client.post_data('/send-multiple/' + multisend['id'], req_body) + assert r.status_code == 400 + + r = api_client.delete('/send-multiple/' + multisend['id']) + assert r.status_code == 200 + assert json.loads(r.data)['body'] == multisend['draft']['body'] + + +def test_multisend_handle_invalid_credentials(disallow_auth, api_client, + multisend, + patch_crispin_del_sent): + r = api_client.post_data('/send-multiple/' + multisend['id'], + multisend['send_req']) + assert r.status_code == 403 + assert json.loads(r.data)['message'] == 'Could not authenticate with ' \ + 'the SMTP server.' + + +def test_multisend_handle_quota_exceeded(quota_exceeded, api_client, + multisend, patch_crispin_del_sent): + r = api_client.post_data('/send-multiple/' + multisend['id'], + multisend['send_req']) + assert r.status_code == 429 + assert json.loads(r.data)['message'] == 'Daily sending quota exceeded' + + +def test_multisend_handle_server_disconnected(connection_closed, api_client, + multisend, + patch_crispin_del_sent): + r = api_client.post_data('/send-multiple/' + multisend['id'], + multisend['send_req']) + assert r.status_code == 503 + assert json.loads(r.data)['message'] == 'The server unexpectedly closed ' \ + 'the connection' + + +def test_multisend_handle_recipients_rejected(recipients_refused, api_client, + multisend, + patch_crispin_del_sent): + r = api_client.post_data('/send-multiple/' + multisend['id'], + multisend['send_req']) + assert r.status_code == 402 + assert json.loads(r.data)['message'] == 'Sending to all recipients failed' + + +def test_multisend_handle_message_too_large(message_too_large, api_client, + multisend, patch_crispin_del_sent): + r = api_client.post_data('/send-multiple/' + multisend['id'], + multisend['send_req']) + assert r.status_code == 402 + assert json.loads(r.data)['message'] == 'Message too large' + + +def test_multisend_message_rejected_for_security(insecure_content, api_client, + multisend, + patch_crispin_del_sent): + r = api_client.post_data('/send-multiple/' + multisend['id'], + multisend['send_req']) + assert r.status_code == 402 + assert json.loads(r.data)['message'] == 'Message content rejected ' \ + 'for security reasons' + + +def test_raw_bcc_replacements(patch_smtp, api_client): + # Check that we're replacing "Bcc:" correctly from messages. + res = _substitute_bcc('From: bob@foocorp.com\r\n' + 'To: \r\n' + 'Bcc: karim@nylas.com\r\n' + 'Subject: ' + '[go-nuts] Runtime Panic On Method' + 'Call \r\n' + 'Mime-Version: 1.0\r\n' + 'Content-Type: ' + 'text/plain; charset=UTF-8\r\n' + 'Content-Transfer-Encoding: 7bit\r\n' + 'X-My-Custom-Header: Random' + '\r\n\r\n') + + assert 'karim@nylas.com' not in res + + res = _substitute_bcc('From: bob@foocorp.com\r\n' + 'To: \r\n' + 'BCC: karim@nylas.com\r\n' + 'Subject: ' + '[go-nuts] Runtime BCC: On Method' + 'Call \r\n' + 'Mime-Version: 1.0\r\n' + 'Content-Type: ' + 'text/plain; charset=UTF-8\r\n' + 'Content-Transfer-Encoding: 7bit\r\n' + 'X-My-Custom-Header: Random' + '\r\n\r\n') + + assert 'karim@nylas.com' not in res + assert 'Runtime BCC:' in res + + +def test_inline_image_send(patch_smtp, api_client, uploaded_file_ids): + file_id = uploaded_file_ids[0] + r = api_client.post_data('/send', { + 'subject': 'Inline image test', + 'body': 'Before image\r\n[cid:{}]\r\nAfter image'.format(file_id), + 'file_ids': [file_id], + 'to': [{'name': 'Foo Bar', + 'email': 'foobar@nylas.com'}] + }) + assert r.status_code == 200 + + _, msg = patch_smtp[-1] + parsed = mime.from_string(msg) + for mimepart in parsed.walk(): + if mimepart.headers['Content-Type'] == 'image/jpeg': + assert mimepart.headers['Content-Id'] == '<{}>'.format(file_id) + assert mimepart.headers['Content-Disposition'][0] == 'inline' + + +def test_inline_html_image_send(patch_smtp, api_client, uploaded_file_ids): + file_id = uploaded_file_ids[0] + r = api_client.post_data('/send', { + 'subject': 'Inline image test', + 'body': 'Before image\r\n[cid:{}]\r\nAfter image'.format(file_id), + 'body': '
'.format(file_id), + 'file_ids': [file_id], + 'to': [{'name': 'Foo Bar', + 'email': 'foobar@nylas.com'}] + }) + assert r.status_code == 200 + + _, msg = patch_smtp[-1] + parsed = mime.from_string(msg) + for mimepart in parsed.walk(): + if mimepart.headers['Content-Type'] == 'image/jpeg': + assert mimepart.headers['Content-Id'] == '<{}>'.format(file_id) + assert mimepart.headers['Content-Disposition'][0] == 'inline' diff --git a/inbox/test/api/test_streaming.py b/inbox/test/api/test_streaming.py new file mode 100644 index 000000000..c41d5875e --- /dev/null +++ b/inbox/test/api/test_streaming.py @@ -0,0 +1,168 @@ +import json +import time +from gevent import Greenlet + +import pytest +from inbox.test.util.base import add_fake_message +from inbox.util.url import url_concat +from inbox.test.api.base import api_client + +GEVENT_EPSILON = .5 # Greenlet switching time. VMs on Macs suck :() +LONGPOLL_EPSILON = 2 + GEVENT_EPSILON # API implementation polls every second + +__all__ = ['api_client'] + + +@pytest.yield_fixture +def streaming_test_client(db): + from inbox.api.srv import app + app.config['TESTING'] = True + with app.test_client() as c: + yield c + + +def get_cursor(api_client, timestamp, namespace): + cursor_response = api_client.post_data( + '/delta/generate_cursor', + data={'start': timestamp}) + return json.loads(cursor_response.data)['cursor'] + + +def validate_response_format(response_string): + response = json.loads(response_string) + assert 'cursor' in response + assert 'attributes' in response + assert 'object' in response + assert 'id' in response + assert 'event' in response + + +def test_response_when_old_cursor_given(db, api_client, + default_namespace): + url = url_concat('/delta/streaming', {'timeout': .1, + 'cursor': '0'}) + r = api_client.get_raw(url) + assert r.status_code == 200 + responses = r.data.split('\n') + for response_string in responses: + if response_string: + validate_response_format(response_string) + + +def test_empty_response_when_latest_cursor_given(db, + api_client, + default_namespace): + cursor = get_cursor(api_client, int(time.time() + 22), + default_namespace) + url = url_concat('/delta/streaming', {'timeout': .1, + 'cursor': cursor}) + r = api_client.get_raw(url) + assert r.status_code == 200 + assert r.data.strip() == '' + + +def test_exclude_and_include_object_types(db, + api_client, thread, + default_namespace): + + add_fake_message(db.session, default_namespace.id, thread, + from_addr=[('Bob', 'bob@foocorp.com')]) + # Check that we do get message and contact changes by default. + url = url_concat('/delta/streaming', {'timeout': .1, + 'cursor': '0'}) + r = api_client.get_raw(url) + assert r.status_code == 200 + responses = r.data.split('\n') + parsed_responses = [json.loads(resp) for resp in responses if resp != ''] + assert any(resp['object'] == 'message' for resp in parsed_responses) + assert any(resp['object'] == 'contact' for resp in parsed_responses) + + # And check that we don't get message/contact changes if we exclude them. + url = url_concat('/delta/streaming', {'timeout': .1, + 'cursor': '0', + 'exclude_types': 'message,contact'}) + r = api_client.get_raw(url) + assert r.status_code == 200 + responses = r.data.split('\n') + parsed_responses = [json.loads(resp) for resp in responses if resp != ''] + assert not any(resp['object'] == 'message' for resp in parsed_responses) + assert not any(resp['object'] == 'contact' for resp in parsed_responses) + + # And check we only get message objects if we use include_types + url = url_concat('/delta/streaming', {'timeout': .1, + 'cursor': '0', + 'include_types': 'message'}) + r = api_client.get_raw(url) + assert r.status_code == 200 + responses = r.data.split('\n') + parsed_responses = [json.loads(resp) for resp in responses if resp != ''] + assert all(resp['object'] == 'message' for resp in parsed_responses) + + +def test_expanded_view(db, api_client, thread, message, default_namespace): + url = url_concat('/delta/streaming', {'timeout': .1, 'cursor': '0', + 'include_types': 'message,thread', + 'view': 'expanded'}) + r = api_client.get_raw(url) + assert r.status_code == 200 + responses = r.data.split('\n') + parsed_responses = [json.loads(resp) for resp in responses if resp != ''] + for delta in parsed_responses: + if delta['object'] == 'message': + assert 'headers' in delta['attributes'] + elif delta['object'] == 'thread': + assert 'messages' in delta['attributes'] + + +def test_invalid_timestamp(api_client, default_namespace): + # Valid UNIX timestamp + response = api_client.post_data( + '/delta/generate_cursor', + data={'start': int(time.time())}) + assert response.status_code == 200 + + # Invalid timestamp + response = api_client.post_data( + '/delta/generate_cursor', + data={'start': 1434591487647}) + assert response.status_code == 400 + + +def test_longpoll_delta_newitem(db, api_client, + default_namespace, thread): + cursor = get_cursor(api_client, int(time.time() + 22), + default_namespace) + url = url_concat('/delta/longpoll', {'cursor': cursor}) + start_time = time.time() + # Spawn the request in background greenlet + longpoll_greenlet = Greenlet.spawn(api_client.get_raw, url) + # This should make it return immediately + add_fake_message(db.session, default_namespace.id, thread, + from_addr=[('Bob', 'bob@foocorp.com')]) + longpoll_greenlet.join() # now block and wait + end_time = time.time() + assert end_time - start_time < LONGPOLL_EPSILON + parsed_responses = json.loads(longpoll_greenlet.value.data) + assert len(parsed_responses['deltas']) == 3 + assert set(k['object'] for k in parsed_responses['deltas']) == \ + set([u'message', u'contact', u'thread']) + + +def test_longpoll_delta_timeout(db, api_client, + default_namespace): + test_timeout = 2 + cursor = get_cursor(api_client, int(time.time() + 22), + default_namespace) + url = url_concat('/delta/longpoll', {'timeout': test_timeout, + 'cursor': cursor}) + start_time = time.time() + resp = api_client.get_raw(url) + end_time = time.time() + assert resp.status_code == 200 + + assert end_time - start_time - test_timeout < GEVENT_EPSILON + parsed_responses = json.loads(resp.data) + assert len(parsed_responses['deltas']) == 0 + assert type(parsed_responses['deltas']) == list + assert parsed_responses['cursor_start'] == cursor + assert parsed_responses['cursor_end'] == cursor diff --git a/inbox/test/api/test_threads.py b/inbox/test/api/test_threads.py new file mode 100644 index 000000000..d8bacfb6c --- /dev/null +++ b/inbox/test/api/test_threads.py @@ -0,0 +1,149 @@ +import json +import datetime +import pytest +from inbox.api.ns_api import API_VERSIONS +from inbox.test.util.base import (add_fake_message, default_account, + add_fake_thread, db) +from inbox.test.api.base import api_client + +__all__ = ['db', 'api_client', 'default_account'] + + +def test_thread_received_recent_date(db, api_client, default_account): + date1 = datetime.datetime(2015, 1, 1, 0, 0, 0) + date2 = datetime.datetime(2012, 1, 1, 0, 0, 0) + + thread1 = add_fake_thread(db.session, default_account.namespace.id) + + date_dict = dict() + + add_fake_message(db.session, default_account.namespace.id, thread1, + subject="Test Thread 1", received_date=date1, + add_sent_category=True) + add_fake_message(db.session, default_account.namespace.id, thread1, + subject="Test Thread 1", received_date=date2) + + date_dict["Test Thread 1"] = date2 + + thread2 = add_fake_thread(db.session, default_account.namespace.id) + add_fake_message(db.session, default_account.namespace.id, thread2, + subject="Test Thread 2", received_date=date1, + add_sent_category=True) + + date_dict["Test Thread 2"] = date1 + + resp = api_client.get_raw('/threads/') + assert resp.status_code == 200 + threads = json.loads(resp.data) + + for thread in threads: + assert date_dict[thread['subject']] == \ + datetime.datetime.fromtimestamp( + thread['last_message_received_timestamp']) + + +def test_thread_sent_recent_date(db, api_client, default_account): + date1 = datetime.datetime(2015, 1, 1, 0, 0, 0) + date2 = datetime.datetime(2012, 1, 1, 0, 0, 0) + date3 = datetime.datetime(2010, 1, 1, 0, 0, 0) + date4 = datetime.datetime(2009, 1, 1, 0, 0, 0) + date5 = datetime.datetime(2008, 1, 1, 0, 0, 0) + + thread1 = add_fake_thread(db.session, default_account.namespace.id) + + test_subject = "test_thread_sent_recent_date" + + add_fake_message(db.session, default_account.namespace.id, thread1, + subject=test_subject, received_date=date1) + add_fake_message(db.session, default_account.namespace.id, thread1, + subject=test_subject, received_date=date2, + add_sent_category=True) + add_fake_message(db.session, default_account.namespace.id, thread1, + subject=test_subject, received_date=date3) + add_fake_message(db.session, default_account.namespace.id, thread1, + subject=test_subject, received_date=date4, + add_sent_category=True) + add_fake_message(db.session, default_account.namespace.id, thread1, + subject=test_subject, received_date=date5) + + resp = api_client.get_raw('/threads/') + assert resp.status_code == 200 + threads = json.loads(resp.data) + + for thread in threads: # should only be one + assert datetime.datetime.fromtimestamp( + thread['last_message_sent_timestamp']) == date2 + + +def test_thread_count(db, api_client, default_account): + date1 = datetime.datetime(2015, 1, 1, 0, 0, 0) + date2 = datetime.datetime(2012, 1, 1, 0, 0, 0) + date3 = datetime.datetime(2010, 1, 1, 0, 0, 0) + date4 = datetime.datetime(2009, 1, 1, 0, 0, 0) + date5 = datetime.datetime(2008, 1, 1, 0, 0, 0) + + thread1 = add_fake_thread(db.session, default_account.namespace.id) + thread2 = add_fake_thread(db.session, default_account.namespace.id) + + test_subject = "test_thread_view_count_with_category" + + for thread in [thread1, thread2]: + add_fake_message(db.session, default_account.namespace.id, thread, + subject=test_subject, received_date=date1) + add_fake_message(db.session, default_account.namespace.id, thread, + subject=test_subject, received_date=date2, + add_sent_category=True) + add_fake_message(db.session, default_account.namespace.id, thread, + subject=test_subject, received_date=date3) + add_fake_message(db.session, default_account.namespace.id, thread, + subject=test_subject, received_date=date4, + add_sent_category=True) + add_fake_message(db.session, default_account.namespace.id, thread, + subject=test_subject, received_date=date5) + + resp = api_client.get_raw('/threads/?view=count&in=sent') + assert resp.status_code == 200 + threads = json.loads(resp.data) + assert threads['count'] == 2 + + +@pytest.mark.parametrize("api_version", API_VERSIONS) +def test_thread_label_updates(db, api_client, default_account, api_version, + custom_label): + """Check that you can update a message (optimistically or not), + and that the update is queued in the ActionLog.""" + + headers = dict() + headers['Api-Version'] = api_version + + # Gmail threads, messages have a 'labels' field + gmail_thread = add_fake_thread(db.session, default_account.namespace.id) + gmail_message = add_fake_message(db.session, + default_account.namespace.id, gmail_thread) + + resp_data = api_client.get_data( + '/threads/{}'.format(gmail_thread.public_id), headers=headers) + + assert resp_data['labels'] == [] + + category = custom_label.category + update = dict(labels=[category.public_id]) + + resp = api_client.put_data( + '/threads/{}'.format(gmail_thread.public_id), update, + headers=headers) + + resp_data = json.loads(resp.data) + + if api_version == API_VERSIONS[0]: + assert len(resp_data['labels']) == 1 + assert resp_data['labels'][0]['id'] == category.public_id + + # Also check that the label got added to the message. + resp_data = api_client.get_data( + '/messages/{}'.format(gmail_message.public_id), headers=headers) + + assert len(resp_data['labels']) == 1 + assert resp_data['labels'][0]['id'] == category.public_id + else: + assert resp_data['labels'] == [] diff --git a/inbox/test/api/test_validation.py b/inbox/test/api/test_validation.py new file mode 100644 index 000000000..d5445bb3f --- /dev/null +++ b/inbox/test/api/test_validation.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +import json +from inbox.models import Namespace +from inbox.api.validation import noop_event_update, valid_email +from inbox.test.util.base import db, calendar, add_fake_event +from inbox.test.api.base import api_client + +__all__ = ['api_client', 'db', 'calendar'] + + +# TODO(emfree): Add more comprehensive parameter-validation tests. + + +def test_account_validation(api_client, db, default_namespace): + + draft = { + 'body': '

Sea, birds and sand.

' + } + + r = api_client.post_data('/drafts', draft) + assert r.status_code == 200 + + namespace_id = json.loads(r.data)['account_id'] + account = db.session.query(Namespace).filter( + Namespace.public_id == namespace_id).first().account + + account.sync_state = 'invalid' + db.session.commit() + + r = api_client.post_data('/drafts', draft) + assert r.status_code == 403 + + +def test_noop_event_update(db, default_namespace, calendar): + event = add_fake_event(db.session, default_namespace.id, + calendar=calendar, + read_only=True) + + event.title = 'Test event' + event.participants = [{'email': 'helena@nylas.com'}, + {'email': 'benb@nylas.com'}] + + assert noop_event_update(event, {}) is True + + update = {'title': 'Test event'} + assert noop_event_update(event, update) is True + + update = {'title': 'Different'} + assert noop_event_update(event, update) is False + + update = {'location': 'Different'} + assert noop_event_update(event, update) is False + + update = {'description': 'Different'} + assert noop_event_update(event, update) is False + + update = {'when': {'start_time': 123453453, 'end_time': 1231231}} + assert noop_event_update(event, update) is False + + event.when = {'start_time': 123453453, 'end_time': 1231231} + update = {'when': {'start_time': 123453453, 'end_time': 1231231}} + assert noop_event_update(event, update) is True + + update = {'participants': [{'email': 'benb@nylas.com'}, + {'email': 'helena@nylas.com'}]} + assert noop_event_update(event, update) is True + + update = {'participants': [{'email': 'benb@nylas.com', 'status': 'yes'}, + {'email': 'helena@nylas.com'}]} + assert noop_event_update(event, update) is False + + event.participants = [{'email': 'benb@nylas.com', 'status': 'yes'}, + {'email': 'helena@nylas.com'}] + update = {'participants': [{'email': 'benb@nylas.com', 'status': 'yes'}, + {'email': 'helena@nylas.com'}]} + assert noop_event_update(event, update) is True + + +def test_valid_email(): + assert valid_email('karim@nylas.com') is True + assert valid_email('karim nylas.com') is False + # We want email addresses, not full addresses + assert valid_email('Helena Handbasket ') is False + assert valid_email('le roi de la montagne') is False + assert valid_email('le roi de la montagne@example.com') is False + assert valid_email('le-roi-de-la-montagne@example.com') is True + assert valid_email('le_roi_de_la_montagne@example.com') is True + assert valid_email('spaces with@example.com') is False diff --git a/inbox/test/api/test_views.py b/inbox/test/api/test_views.py new file mode 100644 index 000000000..09e527f38 --- /dev/null +++ b/inbox/test/api/test_views.py @@ -0,0 +1,31 @@ +# flake8: noqa: F811 +import pytest +from inbox.test.api.base import api_client, new_api_client +from inbox.test.util.base import generic_account + +__all__ = ['api_client', 'generic_account'] + + +# Label views should only work for Gmail accounts. folders 404 +@pytest.mark.parametrize('resource_name', + ['messages', 'drafts', 'files', 'events', + 'folders', 'labels', 'calendars', 'contacts']) +def test_resource_views(resource_name, db, api_client, generic_account, + message, thread, event, label, contact, folder): + """Exercises various tests for views, mostly related to + filtering. Note: this only tests views, it assumes the + resources are working as expected.""" + # Folders don't work with GMail accounts, need generic IMAP + if resource_name == 'folders': + api_client = new_api_client(db, generic_account.namespace) + elements = api_client.get_data('/{}'.format(resource_name)) + count = api_client.get_data('/{}?view=count'.format(resource_name)) + + assert count["count"] == len(elements) + + ids = api_client.get_data('/{}?view=ids'.format(resource_name)) + + for i, elem in enumerate(elements): + assert isinstance(ids[i], basestring), \ + "&views=ids should return string" + assert elem["id"] == ids[i], "view=ids should preserve order" diff --git a/inbox/test/auth/__init__.py b/inbox/test/auth/__init__.py new file mode 100644 index 000000000..f5d7f9802 --- /dev/null +++ b/inbox/test/auth/__init__.py @@ -0,0 +1,3 @@ +# Allow out-of-tree submodules. +from pkgutil import extend_path +__path__ = extend_path(__path__, __name__) diff --git a/inbox/test/auth/providers/__init__.py b/inbox/test/auth/providers/__init__.py new file mode 100644 index 000000000..f5d7f9802 --- /dev/null +++ b/inbox/test/auth/providers/__init__.py @@ -0,0 +1,3 @@ +# Allow out-of-tree submodules. +from pkgutil import extend_path +__path__ = extend_path(__path__, __name__) diff --git a/inbox/test/auth/providers/mock_gmail.py b/inbox/test/auth/providers/mock_gmail.py new file mode 100644 index 000000000..80eeebecc --- /dev/null +++ b/inbox/test/auth/providers/mock_gmail.py @@ -0,0 +1,75 @@ +""" +Mock provider which triggers failure at a specific step of the +authentication process depending on email entered. +Note that this uses live Gmail authentication, so auth should be called +using real email addresses with + parameters, eg. +foobar+no_all_mail@gmail.com. + +* Gmail All Mail folder missing +* Gmail Trash folder missing +* OAuth error during scope acceptance + +""" +from inbox.models import Namespace +from inbox.models.backends.gmail import GmailAccount + +from inbox.auth.gmail import GmailAuthHandler +from inbox.basicauth import (OAuthError, UserRecoverableConfigError, + GmailSettingError, ImapSupportDisabledError) + +from nylas.logging import get_logger +log = get_logger() + +PROVIDER = 'gmail' # Uses the default gmail provider info from providers.py +AUTH_HANDLER_CLS = 'MockGmailAuthHandler' + + +def raise_setting_error(folder): + raise GmailSettingError(folder) + + +def raise_imap_disabled_error(*args): + raise ImapSupportDisabledError() + + +def raise_oauth_error(e): + raise OAuthError(e) + + +fake_responses = { + 'no_all_mail': raise_setting_error, + 'no_trash': raise_setting_error, + 'oauth_fail': raise_oauth_error, + 'imap_disabled': raise_imap_disabled_error +} + + +class MockGmailAuthHandler(GmailAuthHandler): + + def create_account(self, email_address, response): + # Override create_account to persist the 'login hint' email_address + # rather than the canonical email that is contained in response. + # This allows us to trigger errors by authing with addresses of the + # format: + # foobar+no_all_mail@gmail.com + + # Since verify_config throws an Exception if no specific case is + # triggered, this account is never committed. + namespace = Namespace() + account = GmailAccount(namespace=namespace) + account.email_address = email_address + + try: + self.verify_config(account) + except GmailSettingError as e: + print e + raise UserRecoverableConfigError(e) + + return account + + def verify_config(self, account): + for key, response in fake_responses.iteritems(): + if key in account.email_address: + return response(key) + # Raise an exception to prevent committing test accounts + raise Exception("Auth succeeded") diff --git a/inbox/test/auth/test_generic_auth.py b/inbox/test/auth/test_generic_auth.py new file mode 100644 index 000000000..1b6ce8e6a --- /dev/null +++ b/inbox/test/auth/test_generic_auth.py @@ -0,0 +1,296 @@ +# -*- coding: utf-8 -*- +import copy +import socket + +import pytest + +from inbox.util.url import parent_domain +from inbox.models.account import Account +from inbox.auth.generic import GenericAuthHandler +from inbox.basicauth import SettingUpdateError, ValidationError + + +settings = { + 'provider': 'custom', + 'settings': { + 'name': 'MyAOL', + 'email': 'benbitdit@aol.com', + 'imap_server_host': 'imap.aol.com', + 'imap_server_port': 143, + 'imap_username': 'benbitdit@aol.com', + 'imap_password': 'IHate2Gmail', + 'smtp_server_host': 'smtp.aol.com', + 'smtp_server_port': 587, + 'smtp_username': 'benbitdit@aol.com', + 'smtp_password': 'IHate2Gmail' + } +} + + +def test_create_account(db): + email = settings['settings']['email'] + imap_host = settings['settings']['imap_server_host'] + imap_port = settings['settings']['imap_server_port'] + smtp_host = settings['settings']['smtp_server_host'] + smtp_port = settings['settings']['smtp_server_port'] + + handler = GenericAuthHandler(settings['provider']) + + # Create an authenticated account + account = handler.create_account(email, settings['settings']) + db.session.add(account) + db.session.commit() + # Verify its settings + id_ = account.id + account = db.session.query(Account).get(id_) + assert account.imap_endpoint == (imap_host, imap_port) + assert account.smtp_endpoint == (smtp_host, smtp_port) + # Ensure that the emailed events calendar was created + assert account._emailed_events_calendar is not None + assert account._emailed_events_calendar.name == 'Emailed events' + + +def test_update_account(db): + email = settings['settings']['email'] + imap_host = settings['settings']['imap_server_host'] + imap_port = settings['settings']['imap_server_port'] + smtp_host = settings['settings']['smtp_server_host'] + smtp_port = settings['settings']['smtp_server_port'] + + handler = GenericAuthHandler(settings['provider']) + + # Create an authenticated account + account = handler.create_account(email, settings['settings']) + db.session.add(account) + db.session.commit() + id_ = account.id + + # A valid update + updated_settings = copy.deepcopy(settings) + updated_settings['settings']['name'] = 'Neu!' + account = handler.update_account(account, updated_settings['settings']) + db.session.add(account) + db.session.commit() + account = db.session.query(Account).get(id_) + assert account.name == 'Neu!' + + # Invalid updates + for (attr, value, updated_settings) in generate_endpoint_updates(settings): + assert value in updated_settings['settings'].values() + with pytest.raises(SettingUpdateError): + account = handler.update_account(account, updated_settings['settings']) + db.session.add(account) + db.session.commit() + + account = db.session.query(Account).get(id_) + assert getattr(account, attr) != value + assert account.imap_endpoint == (imap_host, imap_port) + assert account.smtp_endpoint == (smtp_host, smtp_port) + + +def test_update_account_with_different_subdomain(db, monkeypatch): + # Check that you can update the server endpoints for an account + # provided that + # 1/ they're on a subdomain of the same domain name. + # 2/ they have the same IP address. + # + # To test this we use Microsoft's Office365 setup, which + # has mail.office365.com and outlook.office365.com point to + # the same address. + email = settings['settings']['email'] + settings['settings']['imap_server_host'] = 'outlook.office365.com' + settings['settings']['smtp_server_host'] = 'outlook.office365.com' + + handler = GenericAuthHandler(settings['provider']) + + # Create an authenticated account + account = handler.create_account(email, settings['settings']) + db.session.add(account) + db.session.commit() + id_ = account.id + + def gethostbyname_patch(x): + return "127.0.0.1" + + monkeypatch.setattr(socket, 'gethostbyname', gethostbyname_patch) + + # A valid update + updated_settings = copy.deepcopy(settings) + updated_settings['settings']['imap_server_host'] = 'mail.office365.com' + updated_settings['settings']['smtp_server_host'] = 'mail.office365.com' + updated_settings['settings']['name'] = 'Neu!' + account = handler.update_account(account, updated_settings['settings']) + db.session.add(account) + db.session.commit() + account = db.session.query(Account).get(id_) + assert account.name == 'Neu!' + assert account._imap_server_host == 'mail.office365.com' + assert account._smtp_server_host == 'mail.office365.com' + + +def test_update_account_when_no_server_provided(db): + email = settings['settings']['email'] + imap_host = settings['settings']['imap_server_host'] + imap_port = settings['settings']['imap_server_port'] + smtp_host = settings['settings']['smtp_server_host'] + smtp_port = settings['settings']['smtp_server_port'] + + handler = GenericAuthHandler(settings['provider']) + + account = handler.create_account(email, settings['settings']) + # On successful auth, the account's imap_server is stored. + db.session.add(account) + db.session.commit() + id_ = account.id + db.session.commit() + + # Valid updates: + # A future authentication does not include the `imap_server_host` either. + db.session.expire(account) + account = db.session.query(Account).get(id_) + + updated_settings = copy.deepcopy(settings) + del updated_settings['settings']['imap_server_host'] + del updated_settings['settings']['smtp_server_host'] + + account = handler.update_account(account, updated_settings['settings']) + db.session.add(account) + db.session.commit() + account = db.session.query(Account).get(id_) + acc_imap_host, acc_imap_port = account.imap_endpoint + assert acc_imap_host == imap_host + assert acc_imap_port == imap_port + + acc_smtp_host, acc_smtp_port = account.smtp_endpoint + assert acc_smtp_host == smtp_host + assert acc_smtp_port == smtp_port + + # A future authentication has the `imap_server_host='' + # and smtp_server_host=''`. + # This is what happens in the legacy auth flow, since + # Proposal.imap_server_host and smtp_server_host will be set to u'' + # if not provided. + db.session.expire(account) + account = db.session.query(Account).get(id_) + updated_settings['settings']['imap_server_host'] = u'' + updated_settings['settings']['smtp_server_host'] = u'' + account = handler.update_account(account, updated_settings['settings']) + db.session.add(account) + db.session.commit() + account = db.session.query(Account).get(id_) + acc_imap_host, acc_imap_port = account.imap_endpoint + assert acc_imap_host == imap_host + assert acc_imap_port == imap_port + + acc_smtp_host, acc_smtp_port = account.smtp_endpoint + assert acc_smtp_host == smtp_host + assert acc_smtp_port == smtp_port + + +@pytest.mark.usefixtures('mock_smtp_get_connection') +def test_double_auth(db, mock_imapclient): + settings = { + 'provider': 'yahoo', + 'settings': { + 'name': 'Y.Y!', + 'locale': 'fr', + 'email': 'benbitdiddle1861@yahoo.com', + 'password': 'EverybodyLovesIMAPv4'} + } + email = settings['settings']['email'] + password = settings['settings']['password'] + mock_imapclient._add_login(email, password) + + handler = GenericAuthHandler(settings['provider']) + + # First authentication, using a valid password, succeeds. + valid_settings = copy.deepcopy(settings) + + account = handler.create_account(email, valid_settings['settings']) + assert handler.verify_account(account) is True + + db.session.add(account) + db.session.commit() + id_ = account.id + account = db.session.query(Account).get(id_) + assert account.email_address == email + assert account.imap_username == email + assert account.smtp_username == email + assert account.password == password + assert account.imap_password == password + assert account.smtp_password == password + + # Second auth using an invalid password should fail. + invalid_settings = copy.deepcopy(settings) + invalid_settings['settings']['password'] = 'invalid_password' + with pytest.raises(ValidationError): + account = handler.update_account(account, invalid_settings['settings']) + handler.verify_account(account) + + db.session.expire(account) + + # Ensure original account is unaffected + account = db.session.query(Account).get(id_) + assert account.email_address == email + assert account.imap_username == email + assert account.smtp_username == email + assert account.password == password + assert account.imap_password == password + assert account.smtp_password == password + + +def test_parent_domain(): + assert parent_domain('x.a.com') == 'a.com' + assert parent_domain('a.com') == 'a.com' + assert parent_domain('.com') == '' + assert parent_domain('test.google.com') == 'google.com' + + assert parent_domain('smtp.example.a.com') == parent_domain('imap.example.a.com') + assert parent_domain('smtp.example.a.com') == parent_domain('imap.a.com') + + assert parent_domain('company.co.uk') != parent_domain('evilcompany.co.uk') + + +@pytest.mark.usefixtures('mock_smtp_get_connection') +def test_successful_reauth_resets_sync_state(db, mock_imapclient): + settings = { + 'provider': 'yahoo', + 'settings': { + 'name': 'Y.Y!', + 'locale': 'fr', + 'email': 'benbitdiddle1861@yahoo.com', + 'password': 'EverybodyLovesIMAPv4'} + } + email = settings['settings']['email'] + password = settings['settings']['password'] + mock_imapclient._add_login(email, password) + handler = GenericAuthHandler(settings['provider']) + + account = handler.create_account(email, settings['settings']) + assert handler.verify_account(account) is True + # Brand new accounts have `sync_state`=None. + assert account.sync_state is None + db.session.add(account) + db.session.commit() + + # Pretend account sync starts, and subsequently the password changes, + # causing the account to be in `sync_state`='invalid'. + account.mark_invalid() + db.session.commit() + assert account.sync_state == 'invalid' + + # Verify the `sync_state` is reset to 'running' on a successful "re-auth". + account = handler.update_account(account, settings['settings']) + assert handler.verify_account(account) is True + assert account.sync_state == 'running' + db.session.add(account) + db.session.commit() + + +def generate_endpoint_updates(settings): + for key in ('imap_server_host', 'smtp_server_host'): + attr = '_{}'.format(key) + value = 'I.am.Malicious.{}'.format(key) + updated_settings = copy.deepcopy(settings) + updated_settings['settings'][key] = value + yield (attr, value, updated_settings) diff --git a/inbox/test/auth/test_gmail_auth.py b/inbox/test/auth/test_gmail_auth.py new file mode 100644 index 000000000..8406d67e2 --- /dev/null +++ b/inbox/test/auth/test_gmail_auth.py @@ -0,0 +1,116 @@ +import copy +import mock + +import pytest + +from inbox.models.account import Account +from inbox.auth.gmail import GmailAuthHandler +from inbox.basicauth import ImapSupportDisabledError + +settings = {'email': 't.est@gmail.com', + 'name': 'T.Est', + 'refresh_token': 'MyRefreshToken', + 'scope': '', + 'id_token': '', + 'sync_email': True, + 'contacts': False, + 'events': True} + + +@pytest.fixture +def patched_gmail_client(monkeypatch): + def raise_exc(*args, **kwargs): + raise ImapSupportDisabledError() + + monkeypatch.setattr('inbox.crispin.GmailCrispinClient.__init__', + raise_exc) + + +def test_create_account(db): + handler = GmailAuthHandler('gmail') + + # Create an account + account = handler.create_account(settings['email'], settings) + db.session.add(account) + db.session.commit() + # Verify its settings + id_ = account.id + account = db.session.query(Account).get(id_) + assert account.email_address == settings['email'] + assert account.name == settings['name'] + assert account.sync_email == settings['sync_email'] + assert account.sync_contacts == settings['contacts'] + assert account.sync_events == settings['events'] + # Ensure that the emailed events calendar was created + assert account._emailed_events_calendar is not None + assert account._emailed_events_calendar.name == 'Emailed events' + + +def test_update_account(db): + handler = GmailAuthHandler('gmail') + + # Create an account + account = handler.create_account(settings['email'], settings) + db.session.add(account) + db.session.commit() + id_ = account.id + + # Verify it is updated correctly. + updated_settings = copy.deepcopy(settings) + updated_settings['name'] = 'Neu!' + account = handler.update_account(account, updated_settings) + db.session.add(account) + db.session.commit() + account = db.session.query(Account).get(id_) + assert account.name == 'Neu!' + + +def test_verify_account(db, patched_gmail_client): + handler = GmailAuthHandler('gmail') + handler.connect_account = lambda account: None + + # Create an account with sync_email=True + account = handler.create_account(settings['email'], settings) + db.session.add(account) + db.session.commit() + assert account.sync_email is True + # Verify an exception is raised if there is an email settings error. + with pytest.raises(ImapSupportDisabledError): + handler.verify_account(account) + + # Create an account with sync_email=True + updated_settings = copy.deepcopy(settings) + updated_settings['email'] = 'another@gmail.com' + updated_settings['sync_email'] = False + account = handler.create_account(updated_settings['email'], updated_settings) + db.session.add(account) + db.session.commit() + assert account.sync_email is False + # Verify an exception is NOT raised if there is an email settings error. + account = handler.verify_account(account) + + +def test_successful_reauth_resets_sync_state(monkeypatch, db): + monkeypatch.setattr('inbox.auth.gmail.GmailCrispinClient', mock.Mock()) + handler = GmailAuthHandler('gmail') + handler.connect_account = lambda account: mock.Mock() + + account = handler.create_account(settings['email'], settings) + assert handler.verify_account(account) is True + # Brand new accounts have `sync_state`=None. + assert account.sync_state is None + db.session.add(account) + db.session.commit() + + # Pretend account sync starts, and subsequently the password changes, + # causing the account to be in `sync_state`='invalid'. + account.mark_invalid() + db.session.commit() + assert account.sync_state == 'invalid' + + # Verify the `sync_state` is reset to 'running' on a successful "re-auth". + account = handler.update_account(account, settings) + assert handler.verify_account(account) is True + assert account.sync_state == 'running' + db.session.add(account) + db.session.commit() diff --git a/inbox/test/auth/test_gmail_auth_credentials.py b/inbox/test/auth/test_gmail_auth_credentials.py new file mode 100644 index 000000000..5c5c81b1a --- /dev/null +++ b/inbox/test/auth/test_gmail_auth_credentials.py @@ -0,0 +1,473 @@ +# -*- coding: UTF-8 -*- +import pytest +from sqlalchemy.orm import joinedload, object_session + +from inbox.auth.gmail import GmailAuthHandler +from inbox.models.session import session_scope +from inbox.models.account import Account +from inbox.models.backends.gmail import (GOOGLE_CALENDAR_SCOPE, + GOOGLE_CONTACTS_SCOPE, + GOOGLE_EMAIL_SCOPE, + GmailAccount) +from inbox.auth.gmail import g_token_manager +from inbox.basicauth import OAuthError, ConnectionError + +SHARD_ID = 0 +ACCESS_TOKEN = 'this_is_an_access_token' + + +@pytest.fixture +def account_with_multiple_auth_creds(db): + email = 'test_account@localhost.com' + resp = {'access_token': '', + 'expires_in': 3600, + 'email': email, + 'family_name': '', + 'given_name': '', + 'name': '', + 'gender': '', + 'id': 0, + 'user_id': '', + 'id_token': '', + 'link': 'http://example.com', + 'locale': '', + 'picture': '', + 'hd': ''} + + all_scopes = ' '.join( + [GOOGLE_CALENDAR_SCOPE, GOOGLE_CONTACTS_SCOPE, GOOGLE_EMAIL_SCOPE]) + + first_auth_args = { + 'refresh_token': 'refresh_token_1', + 'client_id': 'client_id_1', + 'client_secret': 'client_secret_1', + 'scope': all_scopes, + 'sync_contacts': True, + 'sync_events': True + } + + second_auth_args = { + 'refresh_token': 'refresh_token_2', + 'client_id': 'client_id_2', + 'client_secret': 'client_secret_2', + 'scope': GOOGLE_EMAIL_SCOPE, + 'sync_contacts': False, + 'sync_events': False + } + + g = GmailAuthHandler('gmail') + g.verify_config = lambda x: True + + resp.update(first_auth_args) + account = g.get_account(SHARD_ID, email, resp) + db.session.add(account) + db.session.commit() + + resp.update(second_auth_args) + account = g.get_account(SHARD_ID, email, resp) + db.session.add(account) + db.session.commit() + + return account + + +@pytest.fixture +def account_with_single_auth_creds(db): + email = 'test_account2@localhost.com' + resp = {'access_token': '', + 'expires_in': 3600, + 'email': email, + 'family_name': '', + 'given_name': '', + 'name': '', + 'gender': '', + 'id': 0, + 'user_id': '', + 'id_token': '', + 'link': 'http://example.com', + 'locale': '', + 'picture': '', + 'hd': '', + 'refresh_token': 'refresh_token_3', + 'client_id': 'client_id_1', + 'client_secret': 'client_secret_1', + 'scope': ' '.join([GOOGLE_CALENDAR_SCOPE, GOOGLE_EMAIL_SCOPE]), + 'sync_contacts': False, + 'sync_events': True + } + + g = GmailAuthHandler('gmail') + g.verify_config = lambda x: True + + account = g.get_account(SHARD_ID, email, resp) + db.session.add(account) + db.session.commit() + + return account + + +@pytest.fixture +def patch_access_token_getter(monkeypatch): + class TokenGenerator: + + def __init__(self): + self.revoked_refresh_tokens = [] + self.connection_error_tokens = [] + + def new_token(self, refresh_token, client_id=None, client_secret=None): + if refresh_token in self.connection_error_tokens: + raise ConnectionError("Invalid connection!") + if refresh_token in self.revoked_refresh_tokens: + raise OAuthError("Invalid token") + expires_in = 10000 + return ACCESS_TOKEN, expires_in + + def revoke_refresh_token(self, refresh_token): + self.revoked_refresh_tokens.append(refresh_token) + + def force_connection_errors(self, refresh_token): + self.connection_error_tokens.append(refresh_token) + + token_generator = TokenGenerator() + monkeypatch.setattr('inbox.auth.oauth.OAuthAuthHandler.new_token', + token_generator.new_token) + return token_generator + + +def test_auth_revoke( + db, account_with_multiple_auth_creds, patch_access_token_getter): + account = account_with_multiple_auth_creds + refresh_token1 = account.auth_credentials[0].refresh_token + refresh_token2 = account.auth_credentials[1].refresh_token + + assert len(account.auth_credentials) == 2 + assert len(account.valid_auth_credentials) == 2 + assert account.sync_contacts is True + assert account.sync_events is True + assert account.sync_state != 'invalid' + assert account.sync_should_run is True + + patch_access_token_getter.revoke_refresh_token(refresh_token1) + with pytest.raises(OAuthError): + account.new_token(GOOGLE_CONTACTS_SCOPE) + assert account.new_token(GOOGLE_EMAIL_SCOPE).value == ACCESS_TOKEN + with pytest.raises(OAuthError): + account.new_token(GOOGLE_CALENDAR_SCOPE) + + account.verify_all_credentials() + assert len(account.auth_credentials) == 2 + assert len(account.valid_auth_credentials) == 1 + assert account.sync_contacts is False + assert account.sync_events is False + assert account.sync_state != 'invalid' + assert account.sync_should_run is True + + patch_access_token_getter.revoke_refresh_token(refresh_token2) + with pytest.raises(OAuthError): + account.new_token(GOOGLE_CONTACTS_SCOPE) + with pytest.raises(OAuthError): + account.new_token(GOOGLE_EMAIL_SCOPE) + with pytest.raises(OAuthError): + account.new_token(GOOGLE_CALENDAR_SCOPE) + + account.verify_all_credentials() + assert len(account.auth_credentials) == 2 + assert len(account.valid_auth_credentials) == 0 + assert account.sync_state == 'invalid' + assert account.sync_should_run is False + + +def test_auth_revoke_different_order( + db, account_with_multiple_auth_creds, patch_access_token_getter): + account = account_with_multiple_auth_creds + refresh_token1 = account.auth_credentials[0].refresh_token + refresh_token2 = account.auth_credentials[1].refresh_token + + assert len(account.auth_credentials) == 2 + assert len(account.valid_auth_credentials) == 2 + assert account.sync_contacts is True + assert account.sync_events is True + assert account.sync_state != 'invalid' + assert account.sync_should_run is True + + patch_access_token_getter.revoke_refresh_token(refresh_token2) + assert account.new_token(GOOGLE_EMAIL_SCOPE).value == ACCESS_TOKEN + assert account.new_token(GOOGLE_CONTACTS_SCOPE).value == ACCESS_TOKEN + assert account.new_token(GOOGLE_CALENDAR_SCOPE).value == ACCESS_TOKEN + + account.verify_all_credentials() + assert len(account.auth_credentials) == 2 + assert account.sync_contacts is True + assert account.sync_events is True + assert account.sync_state != 'invalid' + assert account.sync_should_run is True + assert len(account.valid_auth_credentials) == 1 + + patch_access_token_getter.revoke_refresh_token(refresh_token1) + with pytest.raises(OAuthError): + account.new_token(GOOGLE_CONTACTS_SCOPE) + with pytest.raises(OAuthError): + account.new_token(GOOGLE_EMAIL_SCOPE) + with pytest.raises(OAuthError): + account.new_token(GOOGLE_CALENDAR_SCOPE) + + account.verify_all_credentials() + assert len(account.auth_credentials) == 2 + assert len(account.valid_auth_credentials) == 0 + assert account.sync_contacts is False + assert account.sync_events is False + assert account.sync_state == 'invalid' + assert account.sync_should_run is False + + +def test_create_account(db): + email = 'vault.test@localhost.com' + resp = {'access_token': '', + 'expires_in': 3600, + 'email': email, + 'family_name': '', + 'given_name': '', + 'name': '', + 'gender': '', + 'id': 0, + 'user_id': '', + 'id_token': '', + 'link': 'http://example.com', + 'locale': '', + 'picture': '', + 'hd': ''} + + g = GmailAuthHandler('gmail') + g.verify_config = lambda x: True + + # Auth me once... + token_1 = 'the_first_token' + client_id_1 = 'first client id' + client_secret_1 = 'first client secret' + scopes_1 = 'scope scop sco sc s' + scopes_1_list = scopes_1.split(' ') + first_auth_args = { + 'refresh_token': token_1, + 'scope': scopes_1, + 'client_id': client_id_1, + 'client_secret': client_secret_1 + } + resp.update(first_auth_args) + + account = g.create_account(email, resp) + db.session.add(account) + db.session.commit() + account_id = account.id + + with session_scope(account_id) as db_session: + account = db_session.query(Account).filter( + Account.email_address == email).one() + + assert account.id == account_id + assert isinstance(account, GmailAccount) + + assert len(account.auth_credentials) == 1 + auth_creds = account.auth_credentials[0] + assert auth_creds.client_id == client_id_1 + assert auth_creds.client_secret == client_secret_1 + assert auth_creds.scopes == scopes_1_list + assert auth_creds.refresh_token == token_1 + + +def test_get_account(db): + email = 'vault.test@localhost.com' + resp = {'access_token': '', + 'expires_in': 3600, + 'email': email, + 'family_name': '', + 'given_name': '', + 'name': '', + 'gender': '', + 'id': 0, + 'user_id': '', + 'id_token': '', + 'link': 'http://example.com', + 'locale': '', + 'picture': '', + 'hd': ''} + + g = GmailAuthHandler('gmail') + g.verify_config = lambda x: True + + # Auth me once... + token_1 = 'the_first_token' + client_id_1 = 'first client id' + client_secret_1 = 'first client secret' + scopes_1 = 'scope scop sco sc s' + scopes_1_list = scopes_1.split(' ') + first_auth_args = { + 'refresh_token': token_1, + 'scope': scopes_1, + 'client_id': client_id_1, + 'client_secret': client_secret_1 + } + resp.update(first_auth_args) + + account = g.get_account(SHARD_ID, email, resp) + db.session.add(account) + db.session.commit() + + db.session.refresh(account) + assert len(account.auth_credentials) == 1 + auth_creds = account.auth_credentials[0] + assert auth_creds.client_id == client_id_1 + assert auth_creds.client_secret == client_secret_1 + assert auth_creds.scopes == scopes_1_list + assert auth_creds.refresh_token == token_1 + + # Auth me twice... + token_2 = 'second_token_!' + client_id_2 = 'second client id' + client_secret_2 = 'second client secret' + scopes_2 = 'scope scop sco sc s' + scopes_2_list = scopes_2.split(' ') + second_auth_args = { + 'refresh_token': token_2, + 'scope': scopes_2, + 'client_id': client_id_2, + 'client_secret': client_secret_2 + } + resp.update(second_auth_args) + + account = g.get_account(SHARD_ID, email, resp) + db.session.merge(account) + db.session.commit() + + assert len(account.auth_credentials) == 2 + auth_creds = next((creds for creds in account.auth_credentials + if creds.refresh_token == token_2), False) + assert auth_creds + assert auth_creds.client_id == client_id_2 + assert auth_creds.client_secret == client_secret_2 + assert auth_creds.scopes == scopes_2_list + + # Don't add duplicate row in GmailAuthCredentials for the same + # client_id/client_secret pair. + resp.update(first_auth_args) + resp['refresh_token'] = 'a new refresh token' + account = g.get_account(SHARD_ID, email, resp) + db.session.merge(account) + db.session.commit() + + assert len(account.auth_credentials) == 2 + + # Should still work okay if we don't get a refresh token back + del resp['refresh_token'] + account = g.get_account(SHARD_ID, email, resp) + db.session.merge(account) + db.session.commit() + + assert len(account.auth_credentials) == 2 + + +def test_g_token_manager( + db, patch_access_token_getter, + account_with_multiple_auth_creds, + account_with_single_auth_creds): + account = account_with_multiple_auth_creds + refresh_token1 = account.auth_credentials[0].refresh_token + refresh_token2 = account.auth_credentials[1].refresh_token + g_token_manager.clear_cache(account) + + # existing account w/ multiple credentials, all valid + assert (g_token_manager.get_token(account, GOOGLE_EMAIL_SCOPE) == + ACCESS_TOKEN) + assert (g_token_manager.get_token(account, GOOGLE_CONTACTS_SCOPE) == + ACCESS_TOKEN) + assert (g_token_manager.get_token(account, GOOGLE_CALENDAR_SCOPE) == + ACCESS_TOKEN) + for auth_creds in account.auth_credentials: + assert auth_creds.is_valid + + # existing account w/ multiple credentials: some valid + patch_access_token_getter.revoke_refresh_token(refresh_token1) + g_token_manager.clear_cache(account) + + with pytest.raises(OAuthError): + g_token_manager.get_token(account, GOOGLE_CONTACTS_SCOPE) + + assert (g_token_manager.get_token(account, GOOGLE_EMAIL_SCOPE) == + ACCESS_TOKEN) + + with pytest.raises(OAuthError): + g_token_manager.get_token(account, GOOGLE_CALENDAR_SCOPE) + + # existing account w/ multiple credentials: all invalid + patch_access_token_getter.revoke_refresh_token(refresh_token2) + g_token_manager.clear_cache(account) + + with pytest.raises(OAuthError): + g_token_manager.get_token(account, GOOGLE_EMAIL_SCOPE) + with pytest.raises(OAuthError): + g_token_manager.get_token(account, GOOGLE_CALENDAR_SCOPE) + with pytest.raises(OAuthError): + g_token_manager.get_token(account, GOOGLE_CONTACTS_SCOPE) + db.session.refresh(account) + for auth_creds in account.auth_credentials: + assert not auth_creds.is_valid + + # existing account w/ one credential + account = account_with_single_auth_creds + g_token_manager.clear_cache(account) + + assert (g_token_manager.get_token(account, GOOGLE_EMAIL_SCOPE) == + ACCESS_TOKEN) + assert (g_token_manager.get_token(account, GOOGLE_CALENDAR_SCOPE) == + ACCESS_TOKEN) + with pytest.raises(OAuthError): + g_token_manager.get_token(account, GOOGLE_CONTACTS_SCOPE) + + +def test_new_token_with_non_oauth_error( + db, patch_access_token_getter, account_with_multiple_auth_creds): + account = account_with_multiple_auth_creds + refresh_token1 = account.auth_credentials[0].refresh_token + refresh_token2 = account.auth_credentials[1].refresh_token + g_token_manager.clear_cache(account) + + assert account.new_token(GOOGLE_EMAIL_SCOPE).value == ACCESS_TOKEN + + patch_access_token_getter.revoke_refresh_token(refresh_token1) + patch_access_token_getter.force_connection_errors(refresh_token2) + + with pytest.raises(ConnectionError): + g_token_manager.get_token(account, GOOGLE_EMAIL_SCOPE) + db.session.refresh(account) + assert len(account.valid_auth_credentials) == 1 + + +def test_invalid_token_during_connect(db, patch_access_token_getter, + account_with_single_auth_creds): + account_id = account_with_single_auth_creds.id + + patch_access_token_getter.revoke_refresh_token( + account_with_single_auth_creds.auth_credentials[0].refresh_token) + account_with_single_auth_creds.verify_all_credentials() + assert len(account_with_single_auth_creds.valid_auth_credentials) == 0 + g_token_manager.clear_cache(account_with_single_auth_creds) + + # connect_account() takes an /expunged/ account object + # that has the necessary relationships eager-loaded + object_session(account_with_single_auth_creds).expunge( + account_with_single_auth_creds) + assert not object_session(account_with_single_auth_creds) + + account = db.session.query(GmailAccount).options( + joinedload(GmailAccount.auth_credentials)).get( + account_id) + db.session.expunge(account) + assert not object_session(account) + + g = GmailAuthHandler('gmail') + + with pytest.raises(OAuthError): + g.connect_account(account) + + invalid_account = db.session.query(GmailAccount).get(account_id) + for auth_creds in invalid_account.auth_credentials: + assert not auth_creds.is_valid diff --git a/inbox/test/auth/test_imap_smtp_auth.py b/inbox/test/auth/test_imap_smtp_auth.py new file mode 100644 index 000000000..ff1c83f10 --- /dev/null +++ b/inbox/test/auth/test_imap_smtp_auth.py @@ -0,0 +1,86 @@ +import pytest + +from inbox.auth.generic import GenericAuthHandler +from inbox.basicauth import ValidationError + +creds = [ + { + 'provider': 'yahoo', + 'settings': { + 'name': 'Y.Y!', + 'locale': 'fr', + 'email': 'cypresstest@yahoo.com', + 'password': 'IHate2Gmail'} + }, + { + 'provider': 'custom', + 'settings': { + 'name': 'MyAOL', + 'email': 'benbitdit@aol.com', + 'imap_server_host': 'imap.aol.com', + 'imap_server_port': 993, + 'imap_username': 'benbitdit@aol.com', + 'imap_password': 'IHate2Gmail', + 'smtp_server_host': 'smtp.aol.com', + 'smtp_server_port': 587, + 'smtp_username': 'benbitdit@aol.com', + 'smtp_password': 'IHate2Gmail' + } + }, + { + 'provider': 'custom', + 'settings': { + 'name': 'Nylas', + 'email': 'nylastest@runbox.com', + 'imap_server_host': 'mail.runbox.com', + 'imap_server_port': 993, + 'imap_username': 'nylastest', + 'imap_password': 'IHate2Gmail!', + 'smtp_server_host': 'mail.runbox.com', + 'smtp_server_port': 587, + 'smtp_username': 'nylastest', + 'smtp_password': 'IHate2Gmail!' + } + } +] + + +@pytest.mark.parametrize('creds', creds) +@pytest.mark.usefixtures('mock_smtp_get_connection') +def test_auth(creds, mock_imapclient): + imap_username = creds['settings'].get('imap_username') + if imap_username is None: + imap_username = creds['settings']['email'] + imap_password = creds['settings'].get('imap_password') + if imap_password is None: + imap_password = creds['settings']['password'] + mock_imapclient._add_login(imap_username, imap_password) + + handler = GenericAuthHandler(creds['provider']) + email = creds['settings']['email'] + account = handler.create_account(email, creds['settings']) + + # Test that the account was successfully created by the handler. + assert account.imap_password == imap_password + if 'smtp_password' in creds['settings']: + assert account.smtp_password == creds['settings']['smtp_password'] + else: + assert account.imap_password == creds['settings']['password'] + assert account.smtp_password == creds['settings']['password'] + + # Test that the account is valid. + assert handler.verify_account(account) is True + + # Test that the password can be updated... + bad_creds = {'email': creds['settings']['email'], + 'imap_password': 'bad_password', + 'imap_server_host': creds['settings'].get('imap_server_host'), + 'imap_server_port': 993, + 'smtp_server_host': creds['settings'].get('smtp_server_host'), + 'smtp_server_port': 587 + } + handler.update_account(account, bad_creds) + assert account.imap_password == 'bad_password' + # ...but logging in again won't work. + with pytest.raises(ValidationError): + handler.verify_account(account) diff --git a/inbox/test/auth/test_ssl_auth.py b/inbox/test/auth/test_ssl_auth.py new file mode 100644 index 000000000..f81dc4a76 --- /dev/null +++ b/inbox/test/auth/test_ssl_auth.py @@ -0,0 +1,119 @@ +import copy + +import pytest +from imapclient import IMAPClient + +from inbox.auth.generic import GenericAuthHandler, create_imap_connection +from inbox.sendmail.base import SendMailException +from inbox.sendmail.smtp.postel import SMTPClient +from inbox.basicauth import SSLNotSupportedError + + +settings = [ + { + 'provider': 'custom', + 'settings': { + 'name': 'MyAOL', + 'email': 'benbitdit@aol.com', + 'imap_server_host': 'imap.aol.com', + 'imap_server_port': 143, + 'imap_username': 'benbitdit@aol.com', + 'imap_password': 'IHate2Gmail', + 'smtp_server_host': 'smtp.aol.com', + 'smtp_server_port': 587, + 'smtp_username': 'benbitdit@aol.com', + 'smtp_password': 'IHate2Gmail', + 'ssl_required': True + } + }, + { + 'provider': 'custom', + 'settings': { + 'name': 'Test', + 'email': 'test@tivertical.com', + 'imap_server_host': 'tivertical.com', + 'imap_server_port': 143, + 'imap_username': 'test@tivertical.com', + 'imap_password': 'testpwd', + 'smtp_server_host': 'tivertical.com', + 'smtp_server_port': 587, + 'smtp_username': 'test@tivertical.com', + 'smtp_password': 'testpwd', + 'ssl_required': False + } + } +] + + +def _create_account(settings, ssl): + email = settings['settings']['email'] + handler = GenericAuthHandler(settings['provider']) + credentials = copy.deepcopy(settings) + credentials['settings']['ssl_required'] = ssl + account = handler.create_account(email, credentials['settings']) + return account + + +def test_account_ssl_required(): + for ssl in (True, False): + account = _create_account(settings[0], ssl) + assert account.ssl_required == ssl + + +@pytest.mark.parametrize('settings', settings) +@pytest.mark.networkrequired +def test_imap_connection(settings): + host = settings['settings']['imap_server_host'] + port = settings['settings']['imap_server_port'] + + conn = IMAPClient(host, port=port, use_uid=True, ssl=False, timeout=120) + + if conn.has_capability('STARTTLS'): + conn = create_imap_connection(host, port, ssl_required=True) + conn.login(settings['settings']['imap_username'], + settings['settings']['imap_password']) + else: + with pytest.raises(SSLNotSupportedError): + create_imap_connection(host, port, ssl_required=True) + conn = create_imap_connection(host, port, ssl_required=False) + conn.login(settings['settings']['imap_username'], + settings['settings']['imap_password']) + + +@pytest.mark.parametrize('settings', settings) +@pytest.mark.networkrequired +def test_smtp_connection(settings): + has_starttls = ('aol' in settings['settings']['smtp_server_host']) + + if has_starttls: + account = _create_account(settings, ssl=True) + smtp_client = SMTPClient(account) + with smtp_client._get_connection(): + pass + else: + account = _create_account(settings, ssl=True) + smtp_client = SMTPClient(account) + with pytest.raises(SendMailException): + with smtp_client._get_connection(): + pass + account = _create_account(settings, ssl=False) + smtp_client = SMTPClient(account) + with smtp_client._get_connection(): + pass + + +@pytest.mark.parametrize('settings', settings) +@pytest.mark.networkrequired +def test_auth(settings): + handler = GenericAuthHandler(settings['provider']) + + has_starttls = ('aol' in settings['settings']['imap_server_host']) + if has_starttls: + account = _create_account(settings, ssl=True) + handler.verify_account(account) + else: + account = _create_account(settings, ssl=True) + with pytest.raises(Exception): + handler.verify_account(account) + account = _create_account(settings, ssl=False) + handler.verify_account(account) diff --git a/inbox/test/conftest.py b/inbox/test/conftest.py new file mode 100644 index 000000000..fc1c7e40f --- /dev/null +++ b/inbox/test/conftest.py @@ -0,0 +1,16 @@ +""" Fixtures don't go here; see util/base.py and friends. """ +# Monkeypatch first, to prevent "AttributeError: 'module' object has no +# attribute 'poll'" errors when tests import socket, then monkeypatch. +from gevent import monkey +monkey.patch_all(aggressive=False) + +import gevent_openssl +gevent_openssl.monkey_patch() + +from inbox.test.util.base import * # noqa +from inbox.util.testutils import (mock_imapclient, # noqa + mock_smtp_get_connection, # noqa + mock_dns_resolver, # noqa + dump_dns_queries, # noqa + files, # noqa + uploaded_file_ids) # noqa diff --git a/inbox/test/contacts/test_process_mail.py b/inbox/test/contacts/test_process_mail.py new file mode 100644 index 000000000..fecc76783 --- /dev/null +++ b/inbox/test/contacts/test_process_mail.py @@ -0,0 +1,71 @@ +"""Sanity-check our logic for updating contact data from message addressees +during a sync.""" +from inbox.models import Contact +from inbox.test.util.base import add_fake_message + + +def test_update_contacts_from_message(db, default_namespace, thread): + # Check that only one Contact is created for repeatedly-referenced + # addresses. + add_fake_message(db.session, default_namespace.id, thread, + from_addr=[('', 'alpha@example.com')], + cc_addr=[('', 'alpha@example.com')]) + + assert db.session.query(Contact).filter_by( + email_address='alpha@example.com').count() == 1 + + # Check that existing Contacts are used when we process a new message + # referencing them. + add_fake_message(db.session, default_namespace.id, thread, + from_addr=[('', 'alpha@example.com')], + cc_addr=[('', 'alpha@example.com')], + to_addr=[('', 'beta@example.com'), + ('', 'gamma@example.com')]) + + assert db.session.query(Contact).filter( + Contact.email_address.like('%@example.com'), + Contact.namespace_id == default_namespace.id).count() == 3 + alpha = db.session.query(Contact).filter_by( + email_address='alpha@example.com', + namespace_id=default_namespace.id).one() + assert len(alpha.message_associations) == 4 + + +def test_addresses_canonicalized(db, default_namespace, thread): + msg = add_fake_message(db.session, default_namespace.id, thread, + from_addr=[('', 'alpha.beta@gmail.com')], + cc_addr=[('', 'alphabeta@gmail.com')], + bcc_addr=[('', 'ALPHABETA@GMAIL.COM')]) + + # Because Gmail addresses with and without periods are the same, check that + # there are three MessageContactAssociation instances attached to the + # message (one each from the from/to/cc fields), but that they reference + # the same contact. + assert len(msg.contacts) == 3 + assert len(set(association.contact for association in msg.contacts)) == 1 + + +def test_handle_noreply_addresses(db, default_namespace, thread): + add_fake_message( + db.session, default_namespace.id, thread, + from_addr=[('Alice', 'drive-shares-noreply@google.com')]) + add_fake_message( + db.session, default_namespace.id, thread, + from_addr=[('Bob', 'drive-shares-noreply@google.com')]) + + noreply_contact = db.session.query(Contact).filter( + Contact.namespace == default_namespace, + Contact.email_address == 'drive-shares-noreply@google.com').one() + assert noreply_contact.name is None + + add_fake_message( + db.session, default_namespace.id, thread, + from_addr=[('Alice', 'alice@example.com')]) + add_fake_message( + db.session, default_namespace.id, thread, + from_addr=[('Alice Lastname', 'alice@example.com')]) + + contact = db.session.query(Contact).filter( + Contact.namespace == default_namespace, + Contact.email_address == 'alice@example.com').first() + assert contact.name is not None diff --git a/inbox/test/contacts/test_remote_sync.py b/inbox/test/contacts/test_remote_sync.py new file mode 100644 index 000000000..510b8f133 --- /dev/null +++ b/inbox/test/contacts/test_remote_sync.py @@ -0,0 +1,100 @@ +import pytest + +from inbox.test.util.base import (contact_sync, contacts_provider, + ContactsProviderStub) + +from inbox.models import Contact + +__all__ = ['contact_sync', 'contacts_provider'] + + +@pytest.fixture(scope='function') +def alternate_contacts_provider(): + return ContactsProviderStub('alternate_provider') + + +def test_add_contacts_case_insensitive(contacts_provider, contact_sync, db, default_namespace): + """Tests that syncing two contacts with uids that differ only in case sensitivity doesn't cause an error.""" + num_original_contacts = db.session.query(Contact). \ + filter_by(namespace_id=default_namespace.id).count() + contacts_provider._next_uid = 'foo' + contacts_provider._get_next_uid = lambda current: 'FOO' + contacts_provider.supply_contact('Contact One', + 'contact.one@email.address') + contacts_provider.supply_contact('Contact Two', + 'contact.two@email.address') + contact_sync.provider = contacts_provider + contact_sync.sync() + num_current_contacts = db.session.query(Contact). \ + filter_by(namespace_id=default_namespace.id).count() + assert num_current_contacts - num_original_contacts == 2 + + +def test_add_contacts(contacts_provider, contact_sync, db, default_namespace): + """Test that added contacts get stored.""" + num_original_contacts = db.session.query(Contact). \ + filter_by(namespace_id=default_namespace.id).count() + contacts_provider.supply_contact('Contact One', + 'contact.one@email.address') + contacts_provider.supply_contact('Contact Two', + 'contact.two@email.address') + + contact_sync.provider = contacts_provider + contact_sync.sync() + num_current_contacts = db.session.query(Contact). \ + filter_by(namespace_id=default_namespace.id).count() + assert num_current_contacts - num_original_contacts == 2 + + +def test_update_contact(contacts_provider, contact_sync, db): + """Test that subsequent contact updates get stored.""" + contacts_provider.supply_contact('Old Name', 'old@email.address') + contact_sync.provider = contacts_provider + contact_sync.sync() + results = db.session.query(Contact).all() + email_addresses = [r.email_address for r in results] + assert 'old@email.address' in email_addresses + + contacts_provider.__init__() + contacts_provider.supply_contact('New Name', 'new@email.address') + contact_sync.sync() + db.session.commit() + + results = db.session.query(Contact).all() + names = [r.name for r in results] + assert 'New Name' in names + email_addresses = [r.email_address for r in results] + assert 'new@email.address' in email_addresses + + +def test_deletes(contacts_provider, contact_sync, db): + num_original_contacts = db.session.query(Contact).count() + contacts_provider.supply_contact('Name', 'name@email.address') + contact_sync.provider = contacts_provider + contact_sync.sync() + num_current_contacts = db.session.query(Contact).count() + assert num_current_contacts - num_original_contacts == 1 + + contacts_provider.__init__() + contacts_provider.supply_contact(None, None, deleted=True) + contact_sync.sync() + + num_current_contacts = db.session.query(Contact).count() + assert num_current_contacts == num_original_contacts + + +def test_auth_error_handling(contact_sync, default_account, db): + """Test that the contact sync greenlet stops if account credentials are + invalid.""" + # Give the default test account patently invalid OAuth credentials. + default_account.refresh_token = 'foo' + for auth_creds in default_account.auth_credentials: + auth_creds.refresh_token = 'foo' + db.session.commit() + + contact_sync.start() + contact_sync.join(timeout=10) + success = contact_sync.successful() + if not success: + contact_sync.kill() + assert success, "contact sync greenlet didn't terminate." diff --git a/inbox/test/data/.agignore b/inbox/test/data/.agignore new file mode 100644 index 000000000..72e8ffc0d --- /dev/null +++ b/inbox/test/data/.agignore @@ -0,0 +1 @@ +* diff --git a/inbox/test/data/LetMeSendYouEmail.wav b/inbox/test/data/LetMeSendYouEmail.wav new file mode 100644 index 000000000..241a03aec Binary files /dev/null and b/inbox/test/data/LetMeSendYouEmail.wav differ diff --git a/inbox/test/data/__init__.py b/inbox/test/data/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/inbox/test/data/andra-moi-ennepe.txt b/inbox/test/data/andra-moi-ennepe.txt new file mode 100644 index 000000000..a936408d8 --- /dev/null +++ b/inbox/test/data/andra-moi-ennepe.txt @@ -0,0 +1,10 @@ +ἄνδρα μοι ἔννεπε, μοῦσα, πολύτροπον, ὃς μάλα πολλὰ +πλάγχθη, ἐπεὶ Τροίης ἱερὸν πτολίεθρον ἔπερσεν: +πολλῶν δ' ἀνθρώπων ἴδεν ἄστεα καὶ νόον ἔγνω, +πολλὰ δ' ὅ γ' ἐν πόντῳ πάθεν ἄλγεα ὃν κατὰ θυμόν, +ἀρνύμενος ἥν τε ψυχὴν καὶ νόστον ἑταίρων. 5 +ἀλλ' οὐδ' ὣς ἑτάρους ἐρρύσατο, ἱέμενός περ: +αὐτῶν γὰρ σφετέρῃσιν ἀτασθαλίῃσιν ὄλοντο, +νήπιοι, οἳ κατὰ βοῦς Ὑπερίονος Ἠελίοιο +ἤσθιον: αὐτὰρ ὁ τοῖσιν ἀφείλετο νόστιμον ἦμαρ. +τῶν ἁμόθεν γε, θεά, θύγατερ Διός, εἰπὲ καὶ ἡμῖν. diff --git a/inbox/test/data/general_test_provider_resolution.json b/inbox/test/data/general_test_provider_resolution.json new file mode 100644 index 000000000..41e21899a --- /dev/null +++ b/inbox/test/data/general_test_provider_resolution.json @@ -0,0 +1,320 @@ +{ + "mx": { + "aol.com": [ + "mailin-02.mx.aol.com.", + "mailin-01.mx.aol.com.", + "mailin-03.mx.aol.com.", + "mailin-04.mx.aol.com." + ], + "autobizbrokers.com": [ + "alt1.aspmx.l.google.com.", + "alt2.aspmx.l.google.com.", + "mail.autobizbrokers.com.", + "alt4.aspmx.l.google.com.", + "aspmx.l.google.com.", + "alt3.aspmx.l.google.com." + ], + "debuggers.co": { + "error": "NXDOMAIN" + }, + "doesnotexist.nilas.com": { + "error": "NXDOMAIN" + }, + "espertech.onmicrosoft.com": [ + "espertech.mail.protection.outlook.com." + ], + "example.com": { + "error": "NoAnswer" + }, + "exchange.mit.edu": [ + "mailsec-scanner-4.mit.edu.", + "mailsec-scanner-5.mit.edu.", + "mailsec-scanner-8.mit.edu.", + "mailsec-scanner-1.mit.edu.", + "mailsec-scanner-2.mit.edu.", + "mailsec-scanner-3.mit.edu.", + "mailsec-scanner-6.mit.edu.", + "mailsec-scanner-7.mit.edu." + ], + "fastmail.com": [ + "in1-smtp.messagingengine.com.", + "in2-smtp.messagingengine.com." + ], + "fastmail.fm": [ + "in2-smtp.messagingengine.com.", + "in1-smtp.messagingengine.com." + ], + "fastmail.net": [ + "in1-smtp.messagingengine.com.", + "in2-smtp.messagingengine.com." + ], + "forumone.com": [ + "alt1.aspmx.l.google.com.", + "alt3.aspmx.l.google.com.", + "alt4.aspmx.l.google.com.", + "alt2.aspmx.l.google.com.", + "aspmx.l.google.com." + ], + "games.com": [ + "mailin-03.mx.aol.com.", + "mailin-04.mx.aol.com.", + "mailin-02.mx.aol.com.", + "mailin-01.mx.aol.com." + ], + "gandi.net": [ + "mail4.gandi.net.", + "mail8.gandi.net." + ], + "getbannerman.com": [ + "aspmx.l.google.com.", + "alt1.aspmx.l.google.com.", + "alt2.aspmx.l.google.com.", + "aspmx2.googlemail.com.", + "aspmx3.googlemail.com." + ], + "gmail.com": [ + "alt3.gmail-smtp-in.l.google.com.", + "alt4.gmail-smtp-in.l.google.com.", + "alt1.gmail-smtp-in.l.google.com.", + "gmail-smtp-in.l.google.com.", + "alt2.gmail-smtp-in.l.google.com." + ], + "gmx.com": [ + "mx01.gmx.net.", + "mx00.gmx.net." + ], + "hotmail.com": [ + "mx4.hotmail.com.", + "mx1.hotmail.com.", + "mx2.hotmail.com.", + "mx3.hotmail.com." + ], + "hover.com": [ + "mx.hover.com.cust.hostedemail.com." + ], + "icloud.com": [ + "mx6.mail.icloud.com.", + "mx5.mail.icloud.com.", + "mx4.mail.icloud.com.", + "mx3.mail.icloud.com.", + "mx2.mail.icloud.com.", + "mx1.mail.icloud.com." + ], + "inboxapp.onmicrosoft.com": { + "error": "NoAnswer" + }, + "love.com": [ + "mailin-04.mx.aol.com.", + "mailin-03.mx.aol.com.", + "mailin-02.mx.aol.com.", + "mailin-01.mx.aol.com." + ], + "mac.com": [ + "mx4.mail.icloud.com.", + "mx3.mail.icloud.com.", + "mx2.mail.icloud.com.", + "mx1.mail.icloud.com.", + "mx6.mail.icloud.com.", + "mx5.mail.icloud.com." + ], + "mrmail.com": [ + "mx1.mrmail.com.", + "mx2.mrmail.com." + ], + "noresolve.com": { + "error": "NoAnswer" + }, + "outlook.com": [ + "mx3.hotmail.com.", + "mx4.hotmail.com.", + "mx1.hotmail.com.", + "mx2.hotmail.com." + ], + "postini.com": [ + "postini.com.s8a1.psmtp.com.", + "postini.com.s8b1.psmtp.com.", + "postini.com.s8b2.psmtp.com.", + "postini.com.s8a2.psmtp.com." + ], + "yahoo.com": [ + "mta5.am0.yahoodns.net.", + "mta7.am0.yahoodns.net.", + "mta6.am0.yahoodns.net." + ], + "yahoo.se": [ + "mx-eu.mail.am0.yahoodns.net." + ], + "yandex.com": [ + "mx.yandex.ru." + ] + }, + "ns": { + "aol.com": [ + "dns-06.ns.aol.com.", + "dns-07.ns.aol.com.", + "dns-01.ns.aol.com.", + "dns-02.ns.aol.com." + ], + "autobizbrokers.com": [ + "ns1.bluehost.com.", + "ns2.bluehost.com." + ], + "debuggers.co": { + "error": "NXDOMAIN" + }, + "doesnotexist.nilas.com": { + "error": "NXDOMAIN" + }, + "espertech.onmicrosoft.com": [ + "ns1.bdm.microsoftonline.com.", + "ns2.bdm.microsoftonline.com.", + "ns3.bdm.microsoftonline.com.", + "ns4.bdm.microsoftonline.com." + ], + "example.com": [ + "a.iana-servers.net.", + "b.iana-servers.net." + ], + "exchange.mit.edu": [ + "ns1-37.akam.net.", + "use5.akam.net.", + "usw2.akam.net.", + "asia2.akam.net.", + "ns1-173.akam.net.", + "use2.akam.net.", + "eur5.akam.net.", + "asia1.akam.net." + ], + "fastmail.com": [ + "ns1.messagingengine.com.", + "ns2.messagingengine.com." + ], + "fastmail.fm": [ + "ns2.messagingengine.com.", + "ns1.messagingengine.com." + ], + "fastmail.net": [ + "ns2.messagingengine.com.", + "ns1.messagingengine.com." + ], + "forumone.com": [ + "ns.forumone.com.", + "ns2.forumone.com." + ], + "games.com": [ + "dns-01.ns.aol.com.", + "dns-02.ns.aol.com.", + "dns-07.ns.aol.com.", + "dns-06.ns.aol.com." + ], + "gandi.net": [ + "dns1.gandi.net.", + "dns2.gandi.net.", + "dns0.gandi.net.", + "dns4.gandi.net.", + "dns3.gandi.net." + ], + "getbannerman.com": [ + "ns-131.awsdns-16.com.", + "ns-1436.awsdns-51.org.", + "ns-1604.awsdns-08.co.uk.", + "ns-833.awsdns-40.net." + ], + "gmail.com": [ + "ns1.google.com.", + "ns2.google.com.", + "ns3.google.com.", + "ns4.google.com." + ], + "gmx.com": [ + "ns-gmx.ui-dns.de.", + "ns-gmx.ui-dns.biz.", + "ns-gmx.ui-dns.org.", + "ns-gmx.ui-dns.com." + ], + "hotmail.com": [ + "ns4.msft.net.", + "ns1.msft.net.", + "ns2.msft.net.", + "ns3.msft.net." + ], + "hover.com": [ + "ns2.hover.com.", + "ns1.hover.com." + ], + "icloud.com": [ + "nserver4.apple.com.", + "adns2.apple.com.", + "nserver.apple.com.", + "adns1.apple.com.", + "nserver2.apple.com.", + "nserver5.apple.com.", + "nserver3.apple.com.", + "nserver6.apple.com." + ], + "inboxapp.onmicrosoft.com": [ + "ns1.bdm.microsoftonline.com.", + "ns2.bdm.microsoftonline.com.", + "ns3.bdm.microsoftonline.com.", + "ns4.bdm.microsoftonline.com." + ], + "love.com": [ + "dns-01.ns.aol.com.", + "dns-06.ns.aol.com.", + "dns-02.ns.aol.com.", + "dns-07.ns.aol.com." + ], + "mac.com": [ + "nserver2.apple.com.", + "nserver3.apple.com.", + "nserver.apple.com.", + "nserver4.apple.com.", + "adns2.apple.com.", + "adns1.apple.com.", + "nserver5.apple.com.", + "nserver6.apple.com." + ], + "mrmail.com": [ + "ns2.mrmail.com.", + "ns1.mrmail.com." + ], + "noresolve.com": [ + "sell.internettraffic.com.", + "buy.internettraffic.com." + ], + "outlook.com": [ + "ns2.msft.net.", + "ns2a.o365filtering.com.", + "ns4a.o365filtering.com.", + "ns1a.o365filtering.com.", + "ns3.msft.net.", + "ns1.msft.net.", + "ns4.msft.net." + ], + "postini.com": [ + "ns4.google.com.", + "ns3.google.com.", + "ns1.google.com.", + "ns2.google.com." + ], + "yahoo.com": [ + "ns1.yahoo.com.", + "ns6.yahoo.com.", + "ns3.yahoo.com.", + "ns4.yahoo.com.", + "ns2.yahoo.com.", + "ns5.yahoo.com." + ], + "yahoo.se": [ + "ns1.yahoo.com.", + "ns3.yahoo.com.", + "ns2.yahoo.com.", + "ns5.yahoo.com.", + "ns4.yahoo.com." + ], + "yandex.com": [ + "ns2.yandex.net.", + "ns1.yandex.net." + ] + } +} diff --git a/inbox/test/data/invite.ics b/inbox/test/data/invite.ics new file mode 100644 index 000000000..5490ffcbe --- /dev/null +++ b/inbox/test/data/invite.ics @@ -0,0 +1,48 @@ +BEGIN:VCALENDAR +PRODID:-//Google Inc//Google Calendar 70.9054//EN +VERSION:2.0 +CALSCALE:GREGORIAN +METHOD:REQUEST +BEGIN:VTIMEZONE +TZID:America/Los_Angeles +X-LIC-LOCATION:America/Los_Angeles +BEGIN:DAYLIGHT +TZOFFSETFROM:-0800 +TZOFFSETTO:-0700 +TZNAME:PDT +DTSTART:19700308T020000 +RRULE:FREQ=YEARLY;BYMONTH=3;BYDAY=2SU +END:DAYLIGHT +BEGIN:STANDARD +TZOFFSETFROM:-0700 +TZOFFSETTO:-0800 +TZNAME:PST +DTSTART:19701101T020000 +RRULE:FREQ=YEARLY;BYMONTH=11;BYDAY=1SU +END:STANDARD +END:VTIMEZONE +BEGIN:VEVENT +DTSTART;VALUE=DATE:20140810 +DTEND;VALUE=DATE:20140811 +RRULE:FREQ=WEEKLY;UNTIL=20140928;BYDAY=SU,WE,SA +DTSTAMP:20140817T214506Z +ORGANIZER;CN=Charles Gruenwald:mailto:charles@inboxapp.com +UID:3f449c2mddkrkat2e803koqgt8@google.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;RSVP= + TRUE;CN=John Q. Public;X-NUM-GUESTS=0:mailto:johnqpublic@example.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=ACCEPTED;RSVP=TRUE + ;CN=Alyssa P Hacker;X-NUM-GUESTS=1:mailto:alyssaphacker@example.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=DECLINED;RSVP= + TRUE;CN=benbitdit@example.com;X-NUM-GUESTS=0:mailto:benbitdit@example.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=TENTATIVE;RSVP= + TRUE;CN=Filet Minyon;X-NUM-GUESTS=0:mailto:filet.minyon@example.com +CREATED:20140811T220954Z +DESCRIPTION:Event Discription +LAST-MODIFIED:20140817T214506Z +LOCATION:just some location +SEQUENCE:0 +STATUS:CONFIRMED +SUMMARY:test recurring event +TRANSP:TRANSPARENT +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/data/long-non-ascii-filename.txt b/inbox/test/data/long-non-ascii-filename.txt new file mode 100644 index 000000000..8d1c8b69c --- /dev/null +++ b/inbox/test/data/long-non-ascii-filename.txt @@ -0,0 +1 @@ + diff --git a/inbox/test/data/muir.jpg b/inbox/test/data/muir.jpg new file mode 100644 index 000000000..533368dc1 Binary files /dev/null and b/inbox/test/data/muir.jpg differ diff --git a/inbox/test/data/piece-jointe.jpg b/inbox/test/data/piece-jointe.jpg new file mode 100644 index 000000000..7a147b694 Binary files /dev/null and b/inbox/test/data/piece-jointe.jpg differ diff --git a/inbox/test/data/raw_message_with_bad_attachment.txt b/inbox/test/data/raw_message_with_bad_attachment.txt new file mode 100644 index 000000000..6dc08cf4e --- /dev/null +++ b/inbox/test/data/raw_message_with_bad_attachment.txt @@ -0,0 +1,44 @@ +From: Ben Bitdiddle +To: Helena Handbasket +Subject: The Monsters of MIME +Date: Thu, 30 Apr 2015 16:55:12 +0000 +Content-Type: multipart/related; boundary="_004_143041291414210227nylascom_"; + type="multipart/alternative" +MIME-Version: 1.0 + +--_004_143041291414210227nylascom_ +Content-Disposition: attachment + +EMPTY 😊 + +--_004_143041291414210227nylascom_ +Content-Type: multipart/alternative; + boundary="_000_143041291414210227nylascom_" + +--_000_143041291414210227nylascom_ +Content-Type: text/plain; charset="utf-8" +Content-Transfer-Encoding: base64 + +Cgo+IGxvb2sKCllvdSBhcmUgaW4gYSByb29tIHdpdGggdGFsbCBjZWlsaW5ncywgYSBkaW5neSBi +bHVlIGNhcnBldCB3aXRoIHN0YWlucywKYW5kIGFuIGltcHJlc3NpdmUgc2V0IG9mIGJyaWdodGx5 +IHBhaW50ZWQgcmVkIHNpZWdlIGRvb3JzLiBTdW5saWdodApzdHJlYW1zIHRocm91Z2ggdGhlIGNv +cGlvdXMgd2luZG93cyBvbnRvIHNvbWUgY291Y2hlcyBpbiB0aGUgY29ybmVyLgpTb21lIGVtcHR5 +IHdoaXRlIGRlc2tzIG9uIHRoZSBvdGhlciBzaWRlIG9mIHRoZSByb29tIGFyZSBjb3ZlcmVkIGlu +CnBlbnMsIHBhcGVycywgc3RpY2tlcnMsIG11Z3MsIGhlYWRwaG9uZXMsIGFuZCB2YXJpb3VzIG90 +aGVyIG9kZHMgYW5kCmVuZHMuCg== + +--_000_143041291414210227nylascom_ +Content-Type: text/html; charset="utf-8" +Content-Transfer-Encoding: base64 + +PHA+CiZndDsgbG9vawo8L3A+CjxwPllvdSBhcmUgaW4gYSByb29tIHdpdGggdGFsbCBjZWlsaW5n +cywgYSBkaW5neSBibHVlIGNhcnBldCB3aXRoIHN0YWlucywKYW5kIGFuIGltcHJlc3NpdmUgc2V0 +IG9mIGJyaWdodGx5IHBhaW50ZWQgcmVkIHNpZWdlIGRvb3JzLiBTdW5saWdodApzdHJlYW1zIHRo +cm91Z2ggdGhlIGNvcGlvdXMgd2luZG93cyBvbnRvIHNvbWUgY291Y2hlcyBpbiB0aGUgY29ybmVy +LgpTb21lIGVtcHR5IHdoaXRlIGRlc2tzIG9uIHRoZSBvdGhlciBzaWRlIG9mIHRoZSByb29tIGFy +ZSBjb3ZlcmVkIGluCnBlbnMsIHBhcGVycywgc3RpY2tlcnMsIG11Z3MsIGhlYWRwaG9uZXMsIGFu +ZCB2YXJpb3VzIG90aGVyIG9kZHMgYW5kCmVuZHMuCjwvcD4K + +--_000_143041291414210227nylascom_-- + +--_004_143041291414210227nylascom_-- diff --git a/inbox/test/data/raw_message_with_filename_attachment.txt b/inbox/test/data/raw_message_with_filename_attachment.txt new file mode 100644 index 000000000..a007316cb --- /dev/null +++ b/inbox/test/data/raw_message_with_filename_attachment.txt @@ -0,0 +1,43 @@ +MIME-Version: 1.0 +From: +To: +Message-ID: +Subject: Entretien d'embauche +X-Mailer: Airmail (284) +MIME-Version: 1.0 +Content-Type: multipart/mixed; boundary="54a04110_507ed7ab_6dd7" + +--54a04110_507ed7ab_6dd7 +Content-Type: multipart/alternative; boundary="54a04110_3d1b58ba_6dd7" + +--54a04110_3d1b58ba_6dd7 +Content-Type: text/plain; charset="utf-8" +Content-Transfer-Encoding: quoted-printable +Content-Disposition: inline + +Au plaisir de vous parler. + + +Georges Abitbol + +--54a04110_3d1b58ba_6dd7 +Content-Type: text/html; charset="utf-8" +Content-Transfer-Encoding: quoted-printable +Content-Disposition: inline + + + + =20 + =20 + + =20 +
+
+ + + <= +/tr> + + + + + + + + =20 + =20 + + + + + + + + + +
+ + + =20 + + + + =20 + + =20 + + =09 =20 + =09 =20 + =09 =20 + =09 =20 + =09=09=09=09=09=09=09=09=09=09 + =09=09=09=09=09=09=09=09=09 + =09=09=09=09=09=09=09=09=09=09 + =09=09=09=09=09=09=09=09=09=09 + + =09 =20 + =09 =20 + =09 + =09 + =09 =20 + =20 + + + + +=09 +=09 =20 +
+
+ 3D""= +
+
+ 3D"" +
+
+ Karim Hamidou vous a invit=C3=A9 =C3=A0 =C2=AB=C2=A0Nil= +as test drive=C2=A0=C2=BB +
date et heure + =09=09=09=09=09=09=09=09=09=09 + =09 lundi 16 mars 2015, 06:0= +0 CET =E2=80=93 07:00 CET + =09 =20 + =09
+ invit=C3=A9s + +=09 =20 +=09 Vous +=09 =20 +=09
+
+ + + + + + + + +
+ + + + + + + + +
+
+
+
+ + + + + + + + +
+
+
+
+ + + + + + + + +
+
+
+
+
+ +
+ + iCloud est un service fourni par Apple. + + + + Mon identifiant Apple + + | =20 + + + Assistance + + | =20 + + + Conditions g=C3=A9n=C3=A9rales + + | + + + Engagement de confidentialit=C3=A9 + + +
+ + + Copyright =C2=A9 2015=C2=A0 + + =20 + iTunes S.=C3=A0 r.l. 31-33, rue Sainte Zithe, L-2763 Luxembourg. + + + Tous droits r=C3=A9serv=C3=A9s. + + +
+ + + + + +=20 +
+ + + +
+ + + +------=_Part_36228589_1171711376.1426478374032-- + +------=_Part_36228588_1499615624.1426478374032 +Content-Type: text/calendar; method=REQUEST +Content-Transfer-Encoding: Base64 +Content-Disposition: attachment; filename=iCal-20150315-205933.ics + +QkVHSU46VkNBTEVOREFSDQpWRVJTSU9OOjIuMA0KUFJPRElEOi0vL0NBTEVOREFSU0VSVkVSLk9S +Ry8vTk9OU0dNTCBWZXJzaW9uIDEvL0VODQpNRVRIT0Q6UkVRVUVTVA0KQkVHSU46VlRJTUVaT05F +DQpUWklEOkV1cm9wZS9QYXJpcw0KWC1MSUMtTE9DQVRJT046RXVyb3BlL1BhcmlzDQpCRUdJTjpT +VEFOREFSRA0KRFRTVEFSVDoxODkxMDMxNVQwMDAxMDANClJEQVRFO1ZBTFVFPURBVEUtVElNRTox +ODkxMDMxNVQwMDAxMDANClRaTkFNRTpQTVQNClRaT0ZGU0VURlJPTTorMDkyMQ0KVFpPRkZTRVRU +TzorMDkyMQ0KRU5EOlNUQU5EQVJEDQpCRUdJTjpTVEFOREFSRA0KRFRTVEFSVDoxOTExMDMxMVQw +MDAxMDANClJEQVRFO1ZBTFVFPURBVEUtVElNRToxOTExMDMxMVQwMDAxMDANClRaTkFNRTpXRVNU +DQpUWk9GRlNFVEZST006KzA5MjENClRaT0ZGU0VUVE86KzAwMDANCkVORDpTVEFOREFSRA0KQkVH +SU46REFZTElHSFQNCkRUU1RBUlQ6MTkxNjA2MTRUMjMwMDAwDQpSREFURTtWQUxVRT1EQVRFLVRJ +TUU6MTkxNjA2MTRUMjMwMDAwDQpSREFURTtWQUxVRT1EQVRFLVRJTUU6MTkxNzAzMjRUMjMwMDAw +DQpSREFURTtWQUxVRT1EQVRFLVRJTUU6MTkxODAzMDlUMjMwMDAwDQpSREFURTtWQUxVRT1EQVRF +LVRJTUU6MTkxOTAzMDFUMjMwMDAwDQpSREFURTtWQUxVRT1EQVRFLVRJTUU6MTkyMDAyMTRUMjMw +MDAwDQpSREFURTtWQUxVRT1EQVRFLVRJTUU6MTkyMTAzMTRUMjMwMDAwDQpSREFURTtWQUxVRT1E +QVRFLVRJTUU6MTkyMjAzMjVUMjMwMDAwDQpSREFURTtWQUxVRT1EQVRFLVRJTUU6MTkyMzA1MjZU +MjMwMDAwDQpSREFURTtWQUxVRT1EQVRFLVRJTUU6MTkyNDAzMjlUMjMwMDAwDQpSREFURTtWQUxV +RT1EQVRFLVRJTUU6MTkyNTA0MDRUMjMwMDAwDQpSREFURTtWQUxVRT1EQVRFLVRJTUU6MTkyNjA0 +MTdUMjMwMDAwDQpSREFURTtWQUxVRT1EQVRFLVRJTUU6MTkyNzA0MDlUMjMwMDAwDQpSREFURTtW +QUxVRT1EQVRFLVRJTUU6MTkyODA0MTRUMjMwMDAwDQpSREFURTtWQUxVRT1EQVRFLVRJTUU6MTky +OTA0MjBUMjMwMDAwDQpSREFURTtWQUxVRT1EQVRFLVRJTUU6MTkzMDA0MTJUMjMwMDAwDQpSREFU +RTtWQUxVRT1EQVRFLVRJTUU6MTkzMTA0MThUMjMwMDAwDQpSREFURTtWQUxVRT1EQVRFLVRJTUU6 +MTkzMjA0MDJUMjMwMDAwDQpSREFURTtWQUxVRT1EQVRFLVRJTUU6MTkzMzAzMjVUMjMwMDAwDQpS +REFURTtWQUxVRT1EQVRFLVRJTUU6MTkzNDA0MDdUMjMwMDAwDQpSREFURTtWQUxVRT1EQVRFLVRJ +TUU6MTkzNTAzMzBUMjMwMDAwDQpSREFURTtWQUxVRT1EQVRFLVRJTUU6MTkzNjA0MThUMjMwMDAw +DQpSREFURTtWQUxVRT1EQVRFLVRJTUU6MTkzNzA0MDNUMjMwMDAwDQpSREFURTtWQUxVRT1EQVRF +LVRJTUU6MTkzODAzMjZUMjMwMDAwDQpSREFURTtWQUxVRT1EQVRFLVRJTUU6MTkzOTA0MTVUMjMw +MDAwDQpSREFURTtWQUxVRT1EQVRFLVRJTUU6MTk0MDAyMjVUMDIwMDAwDQpUWk5BTUU6V0VTVA0K +VFpPRkZTRVRGUk9NOiswMDAwDQpUWk9GRlNFVFRPOiswMTAwDQpFTkQ6REFZTElHSFQNCkJFR0lO +OlNUQU5EQVJEDQpEVFNUQVJUOjE5MTYxMDAyVDAwMDAwMA0KUlJVTEU6RlJFUT1ZRUFSTFk7VU5U +SUw9MTkxOTEwMDVUMjMwMDAwWjtCWURBWT1NTzsNCiBCWU1PTlRIREFZPTIsMyw0LDUsNiw3LDg7 +QllNT05USD0xMA0KVFpOQU1FOldFVA0KVFpPRkZTRVRGUk9NOiswMTAwDQpUWk9GRlNFVFRPOisw +MDAwDQpFTkQ6U1RBTkRBUkQNCkJFR0lOOlNUQU5EQVJEDQpEVFNUQVJUOjE5MjAxMDI0VDAwMDAw +MA0KUkRBVEU7VkFMVUU9REFURS1USU1FOjE5MjAxMDI0VDAwMDAwMA0KUkRBVEU7VkFMVUU9REFU +RS1USU1FOjE5MjExMDI2VDAwMDAwMA0KUkRBVEU7VkFMVUU9REFURS1USU1FOjE5MzkxMTE5VDAw +MDAwMA0KVFpOQU1FOldFVA0KVFpPRkZTRVRGUk9NOiswMTAwDQpUWk9GRlNFVFRPOiswMDAwDQpF +TkQ6U1RBTkRBUkQNCkJFR0lOOlNUQU5EQVJEDQpEVFNUQVJUOjE5MjIxMDA4VDAwMDAwMA0KUlJV +TEU6RlJFUT1ZRUFSTFk7VU5USUw9MTkzODEwMDFUMjMwMDAwWjtCWURBWT1TVTsNCiBCWU1PTlRI +REFZPTIsMyw0LDUsNiw3LDg7QllNT05USD0xMA0KVFpOQU1FOldFVA0KVFpPRkZTRVRGUk9NOisw +MTAwDQpUWk9GRlNFVFRPOiswMDAwDQpFTkQ6U1RBTkRBUkQNCkJFR0lOOlNUQU5EQVJEDQpEVFNU +QVJUOjE5NDAwNjE0VDIzMDAwMA0KUkRBVEU7VkFMVUU9REFURS1USU1FOjE5NDAwNjE0VDIzMDAw +MA0KVFpOQU1FOkNFU1QNClRaT0ZGU0VURlJPTTorMDEwMA0KVFpPRkZTRVRUTzorMDIwMA0KRU5E +OlNUQU5EQVJEDQpCRUdJTjpTVEFOREFSRA0KRFRTVEFSVDoxOTQyMTEwMlQwMzAwMDANClJEQVRF +O1ZBTFVFPURBVEUtVElNRToxOTQyMTEwMlQwMzAwMDANClJEQVRFO1ZBTFVFPURBVEUtVElNRTox +OTQzMTAwNFQwMzAwMDANClJEQVRFO1ZBTFVFPURBVEUtVElNRToxOTc2MDkyNlQwMTAwMDANClJE +QVRFO1ZBTFVFPURBVEUtVElNRToxOTc3MDkyNVQwMzAwMDANClJEQVRFO1ZBTFVFPURBVEUtVElN +RToxOTc4MTAwMVQwMzAwMDANClRaTkFNRTpDRVQNClRaT0ZGU0VURlJPTTorMDIwMA0KVFpPRkZT +RVRUTzorMDEwMA0KRU5EOlNUQU5EQVJEDQpCRUdJTjpEQVlMSUdIVA0KRFRTVEFSVDoxOTQzMDMy +OVQwMjAwMDANClJEQVRFO1ZBTFVFPURBVEUtVElNRToxOTQzMDMyOVQwMjAwMDANClJEQVRFO1ZB +TFVFPURBVEUtVElNRToxOTQ0MDQwM1QwMjAwMDANClJEQVRFO1ZBTFVFPURBVEUtVElNRToxOTc2 +MDMyOFQwMTAwMDANClRaTkFNRTpDRVNUDQpUWk9GRlNFVEZST006KzAxMDANClRaT0ZGU0VUVE86 +KzAyMDANCkVORDpEQVlMSUdIVA0KQkVHSU46U1RBTkRBUkQNCkRUU1RBUlQ6MTk0NDA4MjVUMDAw +MDAwDQpSREFURTtWQUxVRT1EQVRFLVRJTUU6MTk0NDA4MjVUMDAwMDAwDQpUWk5BTUU6V0VTVA0K +VFpPRkZTRVRGUk9NOiswMjAwDQpUWk9GRlNFVFRPOiswMjAwDQpFTkQ6U1RBTkRBUkQNCkJFR0lO +OkRBWUxJR0hUDQpEVFNUQVJUOjE5NDQxMDA4VDAxMDAwMA0KUkRBVEU7VkFMVUU9REFURS1USU1F +OjE5NDQxMDA4VDAxMDAwMA0KVFpOQU1FOldFU1QNClRaT0ZGU0VURlJPTTorMDIwMA0KVFpPRkZT +RVRUTzorMDEwMA0KRU5EOkRBWUxJR0hUDQpCRUdJTjpEQVlMSUdIVA0KRFRTVEFSVDoxOTQ1MDQw +MlQwMjAwMDANClJEQVRFO1ZBTFVFPURBVEUtVElNRToxOTQ1MDQwMlQwMjAwMDANClRaTkFNRTpX +RU1UDQpUWk9GRlNFVEZST006KzAxMDANClRaT0ZGU0VUVE86KzAyMDANCkVORDpEQVlMSUdIVA0K +QkVHSU46U1RBTkRBUkQNCkRUU1RBUlQ6MTk0NTA5MTZUMDMwMDAwDQpSREFURTtWQUxVRT1EQVRF +LVRJTUU6MTk0NTA5MTZUMDMwMDAwDQpUWk5BTUU6Q0VTVA0KVFpPRkZTRVRGUk9NOiswMjAwDQpU +Wk9GRlNFVFRPOiswMTAwDQpFTkQ6U1RBTkRBUkQNCkJFR0lOOlNUQU5EQVJEDQpEVFNUQVJUOjE5 +NzcwMTAxVDAwMDAwMA0KUkRBVEU7VkFMVUU9REFURS1USU1FOjE5NzcwMTAxVDAwMDAwMA0KVFpO +QU1FOkNFU1QNClRaT0ZGU0VURlJPTTorMDEwMA0KVFpPRkZTRVRUTzorMDEwMA0KRU5EOlNUQU5E +QVJEDQpCRUdJTjpEQVlMSUdIVA0KRFRTVEFSVDoxOTc3MDQwM1QwMjAwMDANClJSVUxFOkZSRVE9 +WUVBUkxZO1VOVElMPTE5ODAwNDA2VDAxMDAwMFo7QllEQVk9MVNVO0JZTU9OVEg9NA0KVFpOQU1F +OkNFU1QNClRaT0ZGU0VURlJPTTorMDEwMA0KVFpPRkZTRVRUTzorMDIwMA0KRU5EOkRBWUxJR0hU +DQpCRUdJTjpTVEFOREFSRA0KRFRTVEFSVDoxOTc5MDkzMFQwMzAwMDANClJSVUxFOkZSRVE9WUVB +UkxZO1VOVElMPTE5OTUwOTI0VDAxMDAwMFo7QllEQVk9LTFTVTtCWU1PTlRIPTkNClRaTkFNRTpD +RVQNClRaT0ZGU0VURlJPTTorMDIwMA0KVFpPRkZTRVRUTzorMDEwMA0KRU5EOlNUQU5EQVJEDQpC +RUdJTjpEQVlMSUdIVA0KRFRTVEFSVDoxOTgxMDMyOVQwMjAwMDANClJSVUxFOkZSRVE9WUVBUkxZ +O0JZREFZPS0xU1U7QllNT05USD0zDQpUWk5BTUU6Q0VTVA0KVFpPRkZTRVRGUk9NOiswMTAwDQpU +Wk9GRlNFVFRPOiswMjAwDQpFTkQ6REFZTElHSFQNCkJFR0lOOlNUQU5EQVJEDQpEVFNUQVJUOjE5 +OTYxMDI3VDAzMDAwMA0KUlJVTEU6RlJFUT1ZRUFSTFk7QllEQVk9LTFTVTtCWU1PTlRIPTEwDQpU +Wk5BTUU6Q0VUDQpUWk9GRlNFVEZST006KzAyMDANClRaT0ZGU0VUVE86KzAxMDANCkVORDpTVEFO +REFSRA0KRU5EOlZUSU1FWk9ORQ0KQkVHSU46VkVWRU5UDQpVSUQ6NjVCRUI0NEMtOUI5Ri00REUz +LUJBNUYtODE1MzYyOUNBMDc3DQpTVU1NQVJZOk5pbGFzIHRlc3QgZHJpdmUNCkxPQ0FUSU9OOg0K +RFRTVEFSVDtUWklEPUV1cm9wZS9QYXJpczoyMDE1MDMxNlQwNjAwMDANCkRURU5EO1RaSUQ9RXVy +b3BlL1BhcmlzOjIwMTUwMzE2VDA3MDAwMA0KTEFTVC1NT0RJRklFRDoyMDE1MDMxNlQwMzU5MzNa +DQpBVFRFTkRFRTtST0xFPVJFUS1QQVJUSUNJUEFOVDtQQVJUU1RBVD1ORUVEUy1BQ1RJT047UlNW +UD1UUlVFOm1haWx0bzoNCiBrYXJpbUBuaWxhcy5jb20NCkFUVEVOREVFO1JPTEU9Q0hBSVI7Q049 +S2FyaW0gSGFtaWRvdTtQQVJUU1RBVD1BQ0NFUFRFRDsNCiBFTUFJTD1kb250c3BhbUBraGFtaWRv +dS5jb206bWFpbHRvOmRvbnRzcGFtQGtoYW1pZG91LmNvbQ0KU0VRVUVOQ0U6MQ0KRFRTVEFNUDoy +MDE1MDMxNlQwMzU5MzNaDQpPUkdBTklaRVI7Q049S2FyaW0gSGFtaWRvdTtFTUFJTD1kb250c3Bh +bUBraGFtaWRvdS5jb206bWFpbHRvOg0KIDJfSEFZVEdNSlhHSTNESU1CVEhBWVRHTUpYR0tRWEtU +T1dCTlREWFpBNUo3T0s0WDZINFdRU0tCQ0dNR0FJV1ZJMlhIS1ZYU1NaDQogTlU0RDRAaW1pcC5t +ZS5jb20NCkVORDpWRVZFTlQNCkVORDpWQ0FMRU5EQVINCg== +------=_Part_36228588_1499615624.1426478374032-- diff --git a/inbox/test/data/raw_message_with_inline_attachment.txt b/inbox/test/data/raw_message_with_inline_attachment.txt new file mode 100644 index 000000000..e803da1a5 --- /dev/null +++ b/inbox/test/data/raw_message_with_inline_attachment.txt @@ -0,0 +1,47 @@ +From: Karim Hamidou +Content-Type: multipart/alternative; boundary="Apple-Mail=_E8277AD7-1E39-4BB2-9864-E1817A7092D5" +Subject: Angie +Message-Id: +Date: Fri, 14 Aug 2015 16:43:50 +0200 +To: "InboxApp, Inc." +Mime-Version: 1.0 (Mac OS X Mail 8.2 \(2102\)) +X-Mailer: Apple Mail (2.2102) + + +--Apple-Mail=_E8277AD7-1E39-4BB2-9864-E1817A7092D5 +Content-Transfer-Encoding: 7bit +Content-Type: text/plain; + charset=us-ascii + + + +you can say we never tried +--Apple-Mail=_E8277AD7-1E39-4BB2-9864-E1817A7092D5 +Content-Type: multipart/related; + type="text/html"; + boundary="Apple-Mail=_E27D59DD-9040-4516-AA44-3F9A9B0AD2A0" + + +--Apple-Mail=_E27D59DD-9040-4516-AA44-3F9A9B0AD2A0 +Content-Transfer-Encoding: 7bit +Content-Type: text/html; + charset=us-ascii + +

you can say we never tried
+--Apple-Mail=_E27D59DD-9040-4516-AA44-3F9A9B0AD2A0 +Content-Transfer-Encoding: base64 +Content-Disposition: inline; + filename*=utf-8''Capture%20d%27e%CC%81cran%202015%2D08%2D13%2020.58.24.png +Content-Type: image/png; + x-mac-hide-extension=yes; + x-unix-mode=0644; + name="=?utf-8?Q?Capture_d=27e=CC=81cran_2015-08-13_20=2E58=2E24=2Epng?=" +Content-Id: <40FD3830-B451-4CC8-BEF2-777BA245215E> + +iVBORw0KGgoAAAANSUhEUgAABkQAAAI4CAYAAAA72YViAAAMFWlDQ1BJQ0MgUHJvZmlsZQAASImV +lwdUk8kWx+crKYSEFoh0Qm+C9Cq9d6SDjZAECCWGQFCxo4sKrl1EUFR0BUTBtQCy2LA3BHtfEFFR +1sUCFlTeJAH0+d6ed96cM9/3y5177/xnMvOdGQDk7VgCQRaqAEA2P08YFeDNTEhMYpL+BDjQAFTg +/wd5AuuhBhuyKQAAAABJRU5ErkJggg== +--Apple-Mail=_E27D59DD-9040-4516-AA44-3F9A9B0AD2A0-- + +--Apple-Mail=_E8277AD7-1E39-4BB2-9864-E1817A7092D5-- diff --git a/inbox/test/data/raw_message_with_long_content_id.txt b/inbox/test/data/raw_message_with_long_content_id.txt new file mode 100644 index 000000000..1352f8405 --- /dev/null +++ b/inbox/test/data/raw_message_with_long_content_id.txt @@ -0,0 +1,19 @@ +From: from@example.com +To: to@example.com +Subject: test +Content-Type: multipart/mixed; + boundary="Apple-Mail=_A5779F7D-F2DA-42C3-BE13-48D5C62A02BD" + + +--Apple-Mail=_A5779F7D-F2DA-42C3-BE13-48D5C62A02BD +Content-Disposition: attachment; + filename=attachment.txt +Content-Type: text/plain; + name="attachment.txt" +Content-Transfer-Encoding: 7bit +Content-ID: xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + +hi + +--Apple-Mail=_A5779F7D-F2DA-42C3-BE13-48D5C62A02BD-- + diff --git a/inbox/test/data/raw_message_with_long_message_id.txt b/inbox/test/data/raw_message_with_long_message_id.txt new file mode 100644 index 000000000..30fa1a388 --- /dev/null +++ b/inbox/test/data/raw_message_with_long_message_id.txt @@ -0,0 +1,20 @@ +From: from@example.com +To: to@example.com +Subject: test +Message-ID: <2342SFSDT45T423FSDFSDF@pervenche-5523> ! ! ! +Content-Type: multipart/mixed; + boundary="Apple-Mail=_A5779F7D-F2DA-42C3-BE13-48D5C62A02BD" + + +--Apple-Mail=_A5779F7D-F2DA-42C3-BE13-48D5C62A02BD +Content-Disposition: attachment; + filename=attachment.txt +Content-Type: text/plain; + name="attachment.txt" +Content-Transfer-Encoding: 7bit +Content-ID: xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + +hi + +--Apple-Mail=_A5779F7D-F2DA-42C3-BE13-48D5C62A02BD-- + diff --git a/inbox/test/data/raw_message_with_many_recipients.txt b/inbox/test/data/raw_message_with_many_recipients.txt new file mode 100644 index 000000000..3a572760b --- /dev/null +++ b/inbox/test/data/raw_message_with_many_recipients.txt @@ -0,0 +1,4428 @@ +Delivered-To: freemaneben@gmail.com +Received: by 10.66.230.197 with SMTP id ta5csp290458pac; + Mon, 22 Sep 2014 10:25:47 -0700 (PDT) +X-Received: by 10.140.40.169 with SMTP id x38mr24009947qgx.73.1411406746920; + Mon, 22 Sep 2014 10:25:46 -0700 (PDT) +Return-Path: +Received: from dmz-mailsec-scanner-6.mit.edu (dmz-mailsec-scanner-6.mit.edu. [18.7.68.35]) + by mx.google.com with ESMTPS id 4si9701263qav.89.2014.09.22.10.25.43 + for + (version=TLSv1 cipher=RC4-SHA bits=128/128); + Mon, 22 Sep 2014 10:25:46 -0700 (PDT) +Received-SPF: softfail (google.com: domain of transitioning csail-announce-bounces@lists.csail.mit.edu does not designate 18.7.68.35 as permitted sender) client-ip=18.7.68.35; +Authentication-Results: mx.google.com; + spf=softfail (google.com: domain of transitioning csail-announce-bounces@lists.csail.mit.edu does not designate 18.7.68.35 as permitted sender) smtp.mail=csail-announce-bounces@lists.csail.mit.edu +Received: from mailhub-dmz-1.mit.edu ( [18.9.21.41]) + (using TLS with cipher AES256-SHA (256/256 bits)) + (Client did not present a certificate) + by dmz-mailsec-scanner-6.mit.edu (Symantec Messaging Gateway) with SMTP id F1.A4.13180.79B50245; Mon, 22 Sep 2014 13:25:43 -0400 (EDT) +Received: from dmz-mailsec-scanner-3.mit.edu (dmz-mailsec-scanner-3.mit.edu [18.9.25.14]) + by mailhub-dmz-1.mit.edu (8.13.8/8.9.2) with ESMTP id s8MHPFvU022947; + Mon, 22 Sep 2014 13:25:21 -0400 +X-AuditID: 12074423-f799d6d00000337c-07-54205b9792a5 +Authentication-Results: symauth.service.identifier +Received: from incoming.csail.mit.edu (incoming.csail.mit.edu [128.30.2.16]) + (using TLS with cipher AES256-SHA (256/256 bits)) + (Client did not present a certificate) + by dmz-mailsec-scanner-3.mit.edu (Symantec Messaging Gateway) with SMTP id 9B.73.13891.08B50245; Mon, 22 Sep 2014 13:25:20 -0400 (EDT) +Received: from lists.csail.mit.edu ([128.30.2.182]) + by incoming.csail.mit.edu with esmtps (TLS1.0:RSA_AES_256_CBC_SHA1:32) + (Exim 4.72) + (envelope-from ) + id 1XW7MR-0003cu-7J; Mon, 22 Sep 2014 13:25:16 -0400 +Received: from localhost ([127.0.0.1] helo=lists.csail.mit.edu) + by lists.csail.mit.edu with esmtp (Exim 4.72) + (envelope-from ) + id 1XW7MM-0007rE-Ls; Mon, 22 Sep 2014 13:25:10 -0400 +Received: from incoming.csail.mit.edu ([128.30.2.16]) + by lists.csail.mit.edu with esmtp (Exim 4.72) + (envelope-from ) + id 1XW7MJ-0007r4-VB; Mon, 22 Sep 2014 13:25:08 -0400 +Received: from outgoing.csail.mit.edu ([128.30.2.149]) + by incoming.csail.mit.edu with esmtp (Exim 4.72) + (envelope-from ) + id 1XW7MF-0003Y8-Kb; Mon, 22 Sep 2014 13:25:07 -0400 +Received: from [18.111.17.249] (helo=Tomasos-MacBook-Air-2.local) + by outgoing.csail.mit.edu with esmtpsa + (TLS1.0:DHE_RSA_AES_128_CBC_SHA1:16) (Exim 4.72) + (envelope-from ) + id 1XW7MF-0003G2-9c; Mon, 22 Sep 2014 13:25:03 -0400 +Message-ID: <54205B6D.10308@ai.mit.edu> +Date: Mon, 22 Sep 2014 13:25:01 -0400 +From: Tomaso Poggio +User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; + rv:24.0) Gecko/20100101 Thunderbird/24.6.0 +MIME-Version: 1.0 +To: x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, + x@y.co, +Subject: Fwd: Fwd: Talk: Tuesday 09-23-2014 The Integrated Information Theory + of Consciousness +References: <54204C4C.1070008@csail.mit.edu> +In-Reply-To: <54204C4C.1070008@csail.mit.edu> +X-Forwarded-Message-Id: <54204C4C.1070008@csail.mit.edu> +Content-Type: multipart/mixed; boundary="------------030907040501070005080007" +X-BeenThere: csail-announce@lists.csail.mit.edu +X-Mailman-Version: 2.1.13 +Precedence: list +List-Id: "CSAIL-related announcements." +List-Unsubscribe: , + +List-Archive: +List-Post: +List-Help: +List-Subscribe: , + +Sender: csail-announce-bounces@lists.csail.mit.edu +Errors-To: csail-announce-bounces@lists.csail.mit.edu +X-Brightmail-Tracker: H4sIAAAAAAAAA2VTf0xTVxT2vvfaXpo+d6kiR9yElS3uh9X9CrlOR7bMRKPbsi3EzJlNnvRJ + u7UFX4sRZxYa0EUzBltARiVkBicGsop1nRAQQjfdqlKZyRguMAcIGb/qHKOyoGbv9bVqsv++ + c853vu+cm3Mxa4zo0rDN6RYlp2A3afWcMSnlSXPNtoycZxomgUYP/IxoR0OThl6vHdXQRu8c + S0Nls4g2zb9FKyr9HK3aP6qjnXeqWRqo92jpnbkuhn7RekRLI1NdWnrjYoSjJ/oHGXp27CZH + K0qzX07e2OYd1G28EjnDvsm8q19nEe223aK0OjtXb52cOYEKyzbsudx9lytB0axDKAkDeQHC + ZadYFS+B3t9Pag8hPTYSLwOXpy7p1OB7BFf7rqBEh2/mllbBiDwH7eEJjUryMxAcrkBqUMLA + rT5PXGsUwd+eQDzoRHCj72w8CCL450tf3CWK4KeGuxpFmScr4GB0klEwRx6HoeYenYK1JBPq + Lv4Vc08hIgTCPazKT4ZQ7XVOwYvJdjg6fzs27SKSC3N//hvjGMlKGOnpj+EkYgZvdSenbkTh + eLcvps+SN8Az0qFR86vh19BUfOul0Np7NeZLCIH6Y22x2QzkNWg5HY69gIF4EUS+60dqYROc + Gw7pVJwFn4y1alS8GX4bGNOqDTUITtddYitRpveBJbwPDKLiLVA+1K71IizjtRBtl9R0OpQG + jrAqfhqaynuYRP7MdF08vwG+bbyg+X/+VYiO/8J+hfRN6BGLY6/ZIdjsLjHP7MoTnE5RMmet + ctjcq0RLkR8ph6tbn9mKbnebgohgZDLwuyLpOUaNsNtV7AiipZgxpfD47Ywc48IdBZZiq+Cy + bpeK7KIriB6TvYZbmntRGucscIqmxTy/SebxFqF4rygVJGjLMGdK5f1zC3OMJF9wix+KYqEo + JaoPY2wCfn6r3Jgsifninp02u/t+mcFJ8iFhgyx+XuHwrkLB4bLlq/UL6HncMjtezmDfwWgF + Y4zNkZbKY4VKFKq1yHlPLfFNJ1CqvOgifkRhGeRPfE9vQrZiZKtrP6YrVm7hfimtBH363pYX + h9birVMvec81t9PZgQ9ODXRsu5ZqtfZVv7O+NCPonv/MWoVXLKtZ0Di4ZvmCH3zZDZ9XHQ7+ + cTwz1PbN1x8t8aBpIbCvsi1vBId3+Q221907qpbX12fve+VA1/DMox0fH11TcDN3+v0nGjf7 + a8P8QyvPR1PE8ZPh/cc6d67T5Zo4l1V49ilWcgn/AXaXjqyBBAAA +X-Brightmail-Tracker: H4sIAAAAAAAAA2VTfUwTZxzmvbter02PvBw4XroJpMMscaJON/cmWxYSkrk4MpeFmOC+vNEb + NLaFtcVQ98WHjDEHiBkTS52NGtkgdojyJZOkjWzC+JKEb2EpaqM4HApUGITsrlccy/65PL/n + 97y/5/m9eY8huTFGywi5NsFi5o06Wk3lxRIwMe/d+NTtN3q24K6qVRLPd+lx38gKiTtLJgg8 + 5CqjcNPoHIknz5UR+MJpnwKf+bpfiSs7Kij8wFlF466mJRo/dM6LJ65207jm+jiBR4palLh1 + 8DKFK4YrAB66OUHhCyf9BO7zl9F4uXVVgYdnjwJc31dE4ppfjyjxxe9NuLTxhBJP3RlR4p++ + nSDxzKCHxsXjVwh8fqWZxEvfjQLcf6OaSNK9UdDdQb4N9qtf1QtGwyHBsu21A+rM+3M/guwj + u3P7PKtUHgjs+gaoGARfRO65x7SEAdyB2nqnFTL/FOqf/Fnk1QwHGwjknSoHcpFHoMdD+aHO + HYAe5TeGinaA/hq6Giq8AM1XuZVyEQDo+tnV4GQWPodKAvcJCVNwE/LV9SglTMNnkfP32WCS + DVBAjb09pKyPQJ0nb1MSjoJpaGauOIgj4QG0eHcpqOHgFnSrZySIVTAROSrbKXkLjM573MH5 + JHwL5d/6JbTdNjTc+SeQcQxq6R8N+kII0Q/nWoPZNDAF1V/qVUgLaKADoAdNI0Bu7EEdU51K + Ge9Cxf4WhYzfRGM3/bR84ARAl5zdpJx0B/IU1gQdFPAlVL1YRhwDcY51yznWBZTxPlTqa6Md + gBHxKyjQZpHpOFTYWE3K+HlUW9pDrPHNM84QvxtdrulS/J9PRoF7g6QLqGvBRr3pcKKJNxit + QnqiNZ03mwVL4s6tJoNtq6DPaQDi0+dUMeEtYMGj8wLIAJ2GdRXEpXIK/pDVbvKCGIbQbWCZ + d+JTufCPsvT2TN6a+aElxyhYvSBB9Jqqr+sHWsqcZRZ0USy7R9Sxet5+WLBkrcmeZihdNNuw + GJ7KwQzeJhwUhGzBstYlGKUXPMMwOsQup4mnIyxChpD7scFoW69RSR+1+NQYjWhD7ZdsrNm8 + yWrIkEVdYCdTv3CvlGDcJYFyggsm0kazjDQTStLMHPOTkWu//ADYqI1kQVhYGKcRM4nX8t/+ + NIgWrySS3SQZagxm2xO/aTEKIUb547c4KYqN/7elzQO1whVvbMDVOhvBzR6Pdbd9bn/vA41l + cuXiXF3B3SKVNuqr9uR4e+w1T/ooolL+/gLgl/tcgUfjyqP5n56i0xOSC30Dla9/9v7t8YbJ + U1nTZU6sHUjBldyxos3+haTIsyljBxMafdeWj9uNzfu4T9LqzOV7vdttxLJvPDlp4suHOsqa + yb+wmbRY+X8AFwSpA+0EAAA= + +This is a multi-part message in MIME format. +--------------030907040501070005080007 +Content-Type: multipart/alternative; + boundary="------------040108070008060704040409" + + +--------------040108070008060704040409 +Content-Type: text/plain; charset=ISO-8859-1; format=flowed +Content-Transfer-Encoding: 8bit + + + + +-------- Original Message -------- +Subject: Fwd: Talk: Tuesday 09-23-2014 The Integrated Information +Theory of Consciousness +Date: Mon, 22 Sep 2014 12:20:28 -0400 +From: Kathleen Sullivan +To: bcs-all@mit.edu + + + +*Brains, Minds and Machines Seminar Series * + + + The Integrated Information Theory of Consciousness + +Speaker: Dr. Christof Koch, Chief Scientific Officer, Allen Institute +for Brain Science +Date: Tuesday, September 23, 2014 +Time: 4:00 PM +Location: Singleton Auditorium, MIT 46-3002, 43 Vassar St., Cambridge MA +Host: Prof. Tomaso Poggio, Director CBMM + +Abstract:Â The science of consciousness has made great strides by +focusing on the behavioral and neuronal correlates of experience. +However, such correlates are not enough if we are to understand even +basic facts, for example, why the cerebral cortex gives rise to +consciousness but the cerebellum does not, though it has even more +neurons and appears to be just as complicated. Moreover, correlates are +of little help in many instances where we would like to know if +consciousness is present: patients with a few remaining islands of +functioning cortex, pre-term infants, non-mammalian species, and +machines that are rapidly outperforming people at driving, recognizing +faces and objects, and answering difficult questions. To address these +issues, we need a theory of consciousness â?? one that says what +experience is and what type of physical systems can have it. Giulio +Tononiâ??s Integrated Information Theory (IIT) does so by starting from +conscious experience itself via five phenomenological axioms of +existence, composition, information, integration, and exclusion. From +these it derives five postulates about the properties required of +physical mechanisms to support consciousness. The theory provides a +principled account of both the quantity and the quality of an individual +experience, and a calculus to evaluate whether or not a particular +system of mechanisms is conscious and of what. Moreover, IIT can explain +a range of clinical and laboratory findings, makes a number of testable +predictions, and extrapolates to a number of unusual conditions. In +sharp contrast with widespread functionalist beliefs, IIT implies that +digital computers, even if their behavior were to be functionally +equivalent to ours, and even if they were to run faithful simulations of +the human brain, would experience next to nothing. + +Relevant URL: http://cbmm.mit.edu/events/ + +Refreshments to be served immediately after the talk. + +See other events that are part of the Brains, Minds and Machines Seminar +Series September 2015-June 2016. + + + +-- +Kathleen D. Sullivan +Center Manager +Center for Brains, Minds and Machines (CBMM) +McGovern Institute for Brain Research at MIT +Massachusetts Institute of Technology +Department of Brain and Cognitive Sciences +Office: MIT 46-5169A +Tel.: (617) 253-0551 + + + + + + +--------------040108070008060704040409 +Content-Type: text/html; charset=ISO-8859-1 +Content-Transfer-Encoding: 7bit + + + + + + + +
+

+
+ -------- Original Message -------- + + + + + + + + + + + + + + + + + + + +
Subject: + Fwd: Talk: Tuesday 09-23-2014 The Integrated Information + Theory of Consciousness
Date: Mon, 22 Sep 2014 12:20:28 -0400
From: Kathleen Sullivan <kdsulliv@csail.mit.edu>
To: bcs-all@mit.edu <bcs-all@mit.edu>
+
+
+ + Brains, Minds and Machines Seminar Series
+
+

The Integrated Information Theory of Consciousness

+ Speaker: Dr. Christof Koch, Chief Scientific Officer, Allen + Institute for Brain Science
+ Date: Tuesday, September 23, 2014
+ Time: 4:00 PM
+ Location: Singleton Auditorium, MIT 46-3002, 43 Vassar St., + Cambridge MA
+ Host: Prof. Tomaso Poggio, Director CBMM
+
+ Abstract:  The science of consciousness has made great strides + by focusing on the behavioral and neuronal correlates of + experience. However, such correlates are not enough if we are to + understand even basic facts, for example, why the cerebral + cortex gives rise to consciousness but the cerebellum does not, + though it has even more neurons and appears to be just as + complicated. Moreover, correlates are of little help in many + instances where we would like to know if consciousness is + present: patients with a few remaining islands of functioning + cortex, pre-term infants, non-mammalian species, and machines + that are rapidly outperforming people at driving, recognizing + faces and objects, and answering difficult questions. To address + these issues, we need a theory of consciousness – one that + says what experience is and what type of physical systems can + have it. Giulio Tononi’s Integrated Information Theory (IIT) + does so by starting from conscious experience itself via five + phenomenological axioms of existence, composition, information, + integration, and exclusion. From these it derives five + postulates about the properties required of physical mechanisms + to support consciousness. The theory provides a principled + account of both the quantity and the quality of an individual + experience, and a calculus to evaluate whether or not a + particular system of mechanisms is conscious and of what. + Moreover, IIT can explain a range of clinical and laboratory + findings, makes a number of testable predictions, and + extrapolates to a number of unusual conditions. In sharp + contrast with widespread functionalist beliefs, IIT implies that + digital computers, even if their behavior were to be + functionally equivalent to ours, and even if they were to run + faithful simulations of the human brain, would experience next + to nothing.
+
+ Relevant URL: http://cbmm.mit.edu/events/ +
+
+ Refreshments to be served immediately after the talk.
+
+ See + other events that are part of the Brains, Minds and Machines + Seminar Series September 2015-June 2016. +


+

+
--
+Kathleen D. Sullivan
+Center Manager
+Center for Brains, Minds and Machines (CBMM)
+McGovern Institute for Brain Research at MIT
+Massachusetts Institute of Technology
+Department of Brain and Cognitive Sciences
+Office: MIT 46-5169A
+Tel.: (617) 253-0551
+
+
+
+
+
+
+
+ + + +--------------040108070008060704040409-- + +--------------030907040501070005080007 +Content-Type: text/plain; charset=UTF-8; + name="Attached Message Part" +Content-Transfer-Encoding: 7bit +Content-Disposition: attachment; + filename="Attached Message Part" + +_______________________________________________ +Bcs-all mailing list +Bcs-all@mit.edu +http://mailman.mit.edu/mailman/listinfo/bcs-all + + +--------------030907040501070005080007-- + diff --git a/inbox/test/data/raw_message_with_name_attachment.txt b/inbox/test/data/raw_message_with_name_attachment.txt new file mode 100644 index 000000000..653d463f9 --- /dev/null +++ b/inbox/test/data/raw_message_with_name_attachment.txt @@ -0,0 +1,42 @@ +MIME-Version: 1.0 +From: +To: +Message-ID: +Subject: Entretien d'embauche +X-Mailer: Airmail (284) +MIME-Version: 1.0 +Content-Type: multipart/mixed; boundary="54a04110_507ed7ab_6dd7" + +--54a04110_507ed7ab_6dd7 +Content-Type: multipart/alternative; boundary="54a04110_3d1b58ba_6dd7" + +--54a04110_3d1b58ba_6dd7 +Content-Type: text/plain; charset="utf-8" +Content-Transfer-Encoding: quoted-printable +Content-Disposition: inline + +Au plaisir de vous parler. + + +Georges Abitbol + +--54a04110_3d1b58ba_6dd7 +Content-Type: text/html; charset="utf-8" +Content-Transfer-Encoding: quoted-printable +Content-Disposition: inline + + + + +
+


+

+
+ + + + +--B_3524048720_5820507-- + + +--B_3524048720_5836019 +Content-type: image/png; name=" =?UTF-8?B?T3V0bG9va0Vtb2ppLfCfmIo=?=.png"; + x-mac-type="504E4766" +Content-ID: <3f0ea351-779e-48b3-bfa9-7c2a9e373aeb> +Content-disposition: attachment; + filename*=UTF-8''OutlookEmoji-%F0%9F%98%8A.png +Content-transfer-encoding: base64 + + +iVBORw0KGgoAAAANSUhEUgAAABMAAAATCAYAAAByUDbMAAAAGXRFWHRTb2Z0d2FyZQBBZG9i +ZSBJbWFnZVJlYWR5ccllPAAAAYpJREFUeNpi/P//PwO1ACM2wf9njBWAVD4QBwCxApLUAyDe +AMQTGU3OPkDRA3QUIxaD+oFUAREOaQQa2IDTMKBB84FUAgk+WwA0MBFmGBOaixJIDKYEoD64 +65igBjkge+3CrW94TUCTr4eGMdxl8TCZxMYHDIZR1xk2HPiA1SCQOEgepA4J5CMbFgAPhM1v +wfTFW9+xGgYTh6lD1s8IdKIAkH4PEz1w9jOYVpBiZ1CQZMMw7MHzXwwPnv0Esx2MeRESxmcY +WYCUAbJiFAVYAMgCbJbAvPmAWjmACZqS4aHdOOs5wdgEBcWE5a8Y0HIGPAIOwETtjXkYJqIq +xAAgeQM1ThTzkQ2biB5mC7a8xZ7kgeICvMzoYTsRJaMDY3U9chIBpaMHz34xxPsKgwMcFIsL +gckB5KL+YllkgyYAg6oQJW9Ck8h+5NgFhd3GAx/humAGI2cGIHYEGvYBW0YHGTgf2YX4MjkQ +F4IMwlkEIeXVfByGwsqzAwTLMywGg7wNcvED9AIR3TCAAAMAqh+p+YMVeBQAAAAASUVORK5C +YII= +--B_3524048720_5836019-- + + diff --git a/inbox/test/data/raw_message_with_outlook_emoji_inline.txt b/inbox/test/data/raw_message_with_outlook_emoji_inline.txt new file mode 100644 index 000000000..462906ffd --- /dev/null +++ b/inbox/test/data/raw_message_with_outlook_emoji_inline.txt @@ -0,0 +1,68 @@ +From: John Example +To: "inboxapptest.french@gmail.com" +Subject: Smiley +Thread-Topic: Smiley +Thread-Index: AQHQ5LKeWxugXw4roE+fHlPO0MpKoQ== +Date: Tue, 1 Sep 2015 12:35:01 +0000 +Message-ID: +Accept-Language: en-US, fr-FR +Content-Language: en-US +spamdiagnosticmetadata: NSPM +Content-Type: multipart/related; + boundary="_004_CY1PR10MB0521064804BF73AC6B8A8B33A06A0CY1PR10MB0521namp_"; + type="multipart/alternative" +MIME-Version: 1.0 +X-OriginatorOrg: nylas.com +X-MS-Exchange-CrossTenant-originalarrivaltime: 01 Sep 2015 12:35:01.7780 + (UTC) +X-MS-Exchange-CrossTenant-fromentityheader: Hosted +X-MS-Exchange-CrossTenant-id: b6c65e69-9405-47aa-a7f8-15d1677da46c +X-MS-Exchange-Transport-CrossTenantHeadersStamped: CY1PR10MB0524 + +--_004_CY1PR10MB0521064804BF73AC6B8A8B33A06A0CY1PR10MB0521namp_ +Content-Type: multipart/alternative; + boundary="_000_CY1PR10MB0521064804BF73AC6B8A8B33A06A0CY1PR10MB0521namp_" + +--_000_CY1PR10MB0521064804BF73AC6B8A8B33A06A0CY1PR10MB0521namp_ +Content-Type: text/plain; charset="utf-8" +Content-Transfer-Encoding: base64 + +W/CfmIpdDQo= + +--_000_CY1PR10MB0521064804BF73AC6B8A8B33A06A0CY1PR10MB0521namp_ +Content-Type: text/html; charset="utf-8" +Content-Transfer-Encoding: base64 + +PGh0bWw+DQo8aGVhZD4NCjxtZXRhIGh0dHAtZXF1aXY9IkNvbnRlbnQtVHlwZSIgY29udGVudD0i +dGV4dC9odG1sOyBjaGFyc2V0PXV0Zi04Ij4NCjxzdHlsZSB0eXBlPSJ0ZXh0L2NzcyIgc3R5bGU9 +ImRpc3BsYXk6bm9uZTsiPjwhLS0gUCB7bWFyZ2luLXRvcDowO21hcmdpbi1ib3R0b206MDt9IC0t +Pjwvc3R5bGU+DQo8L2hlYWQ+DQo8Ym9keSBkaXI9Imx0ciI+DQo8ZGl2IGlkPSJkaXZ0YWdkZWZh +dWx0d3JhcHBlciIgc3R5bGU9ImZvbnQtc2l6ZToxMnB0O2NvbG9yOiMwMDAwMDA7YmFja2dyb3Vu +ZC1jb2xvcjojRkZGRkZGO2ZvbnQtZmFtaWx5OkNhbGlicmksQXJpYWwsSGVsdmV0aWNhLHNhbnMt +c2VyaWY7Ij4NCjxwPjxpbWcgY2xhc3M9IkVtb2ppSW5zZXJ0IiBzcmM9ImNpZDozZjBlYTM1MS03 +NzllLTQ4YjMtYmZhOS03YzJhOWUzNzNhZWIiIGFsdD0i8J+YiiIgc3R5bGU9InZlcnRpY2FsLWFs +aWduOmJvdHRvbSI+PGJyPg0KPC9wPg0KPC9kaXY+DQo8L2JvZHk+DQo8L2h0bWw+DQo= + +--_000_CY1PR10MB0521064804BF73AC6B8A8B33A06A0CY1PR10MB0521namp_-- + +--_004_CY1PR10MB0521064804BF73AC6B8A8B33A06A0CY1PR10MB0521namp_ +Content-Type: image/png; name="=?utf-8?B?T3V0bG9va0Vtb2ppLfCfmIoucG5n?=" +Content-Description: =?utf-8?B?T3V0bG9va0Vtb2ppLfCfmIoucG5n?= +Content-Disposition: inline; + filename="=?utf-8?B?T3V0bG9va0Vtb2ppLfCfmIoucG5n?="; size=488; + creation-date="Tue, 01 Sep 2015 12:35:01 GMT"; + modification-date="Tue, 01 Sep 2015 12:35:01 GMT" +Content-ID: <3f0ea351-779e-48b3-bfa9-7c2a9e373aeb> +Content-Transfer-Encoding: base64 + +iVBORw0KGgoAAAANSUhEUgAAABMAAAATCAYAAAByUDbMAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJ +bWFnZVJlYWR5ccllPAAAAYpJREFUeNpi/P//PwO1ACM2wf9njBWAVD4QBwCxApLUAyDeAMQTGU3O +PkDRA3QUIxaD+oFUAREOaQQa2IDTMKBB84FUAgk+WwA0MBFmGBOaixJIDKYEoD6465igBjkge+3C +rW94TUCTr4eGMdxl8TCZxMYHDIZR1xk2HPiA1SCQOEgepA4J5CMbFgAPhM1vwfTFW9+xGgYTh6lD +1s8IdKIAkH4PEz1w9jOYVpBiZ1CQZMMw7MHzXwwPnv0Esx2MeRESxmcYWYCUAbJiFAVYAMgCbJbA +vPmAWjmACZqS4aHdOOs5wdgEBcWE5a8Y0HIGPAIOwETtjXkYJqIqxAAgeQM1ThTzkQ2biB5mC7a8 +xZ7kgeICvMzoYTsRJaMDY3U9chIBpaMHz34xxPsKgwMcFIsLgckB5KL+YllkgyYAg6oQJW9Ck8h+ +5NgFhd3GAx/humAGI2cGIHYEGvYBW0YHGTgf2YX4MjkQF4IMwlkEIeXVfByGwsqzAwTLMywGg7wN +cvED9AIR3TCAAAMAqh+p+YMVeBQAAAAASUVORK5CYII= + +--_004_CY1PR10MB0521064804BF73AC6B8A8B33A06A0CY1PR10MB0521namp_-- diff --git a/inbox/test/data/self_signed_cert.key b/inbox/test/data/self_signed_cert.key new file mode 100644 index 000000000..bb6d212db --- /dev/null +++ b/inbox/test/data/self_signed_cert.key @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDLEOPZuuZytAMe +rhRaYLnx1NJKqXK7pmLZ5pRSw5rw1Y3J2LzKnd7uxWHfSX/0RH8cXvqN/PukJGZW +/fo3WpaGnuCWZ4aFulyKfLxTHAcHRZRlt5thrI9pd44B9NP22YNZ9TZa+ZldLS9p +kFba8zvk0BxBGlnFWdl7XQnHhhinHoV5Yas1tajq007N3n7xy5FCW9ErKdV3dtQr +ZstquAeQY7rc7Nb79gkGIT7Oa/Qcq0BKaC9aX6MAxm1i78t8l7UdvEYIGTnFsIMZ +GHQu5AU/RdwdXqSxfHntggYSYngJgUlt/knpysFgOdbEcaOrj1TVpQ8+YvH++LAn +dokpoOblAgMBAAECggEBAIvVw7HJvKu8PnHAGo/qD+HyViXH7hQ+jc/TsswTmvzB +cnRO5iL49+2gyrcCXDSPfqDA3vUsAvWTg03gc4/ut71YZvvrqn0oSlWrc9wfbs+Z +IImzGDL4q4FLLOU3/Ub9njAyZg3b17qMRRwCo2z/Fn2Iugwy4yK9rr/+358gPpXF +ehZ8eALU4JqIX6/9ifUT9jlM/Okd51CNQUfWa3+2LQ401tJe+3DWAe3yABXyG2eu +RxldqVAcIRl5DR8kbTNmRRaEqeh6WiPRVHjrr19EIXX/FaaoNPYbHPilZeOoc7ec +EOZGJ3X4A4kZLQHxZwNBbe3qvaI/GNWl9tQUx0St7bkCgYEA89mIWRf1RgEQrNPI +9qvva5MutsTw6CQEneuTXCEVXKyo6+WI4ScWa0X5wEBKgBJdIpwLih0FJCqTOiWC +MIhAyNk+AV5QiztXJ+/mmZjW0GMM/v0DaKyXFJVlFitlUsx67TGJF2FkpF5fZejy +okWLy3Rl/YM2iO3RUvVmmezu2ycCgYEA1S8iFbVCTRyDJRPjFNV/3vPOfA520qyJ +BC14ra81hjWBTmAPglcmu9OTyygu8jh4Qhm/16Kty4ItlLzhZtvzZttwniVosC86 +nu8+Wrj5bZK8yRTL8Bi6Cl62tzGo48EyA1LNLe0Q6kcFDVUrQFBr+NY+W2lNxRrT +lv+C2gw2JRMCgYEAw0N7ElTD/FWYA7znXNdbNdo6+vIDMvC8nbRpYMnBMF+2OIeq +Lxk8s5PqJQ+WWK3b1H3+I4+PcWGCWXp1oOVKc0q6gyACASv4Y6uQDYi840uduYXj +N+uUY1AqDTGcxkulV9BlxcogbBqB67o4qMZl5fqEPRfYEJfXGcYWbcWzktsCgYA/ +fIrdBh7Zkf0oRJTitnYfMmIypagTM1iWiugeaUg6UKLScV8NBOE/HFIuNShN8C04 +Giz8jbDOI1Vprxo9JJGG9yDBqAhWN6pwfHUIVX/KduGLzIqzVNnKjsxBICPYt8cq +IDWEUM4xxralctCvlYOiFkN5u5Fyenu6wxr3zp0tSwKBgAnrLLjW7Wp4fHGD8ksh +ifah/f6ZRBHj3Odmo4eJEA+Ayg72vtSOS+1dBuPHZ0m0J2erKg5090zm4z/x8Ymc +/94cDMDb9kt83EOPpmLRFYhTy5UDS9M7syjLJNOCKJgiIZ/cLvcMwBVl88SwqvHt +PSurgvTulkeDm4qDUZizAnAT +-----END PRIVATE KEY----- diff --git a/inbox/test/data/self_signed_cert.pem b/inbox/test/data/self_signed_cert.pem new file mode 100644 index 000000000..2b82f9174 --- /dev/null +++ b/inbox/test/data/self_signed_cert.pem @@ -0,0 +1,22 @@ +-----BEGIN CERTIFICATE----- +MIIDszCCApugAwIBAgIJAMp7rFcHjiqWMA0GCSqGSIb3DQEBCwUAMHAxCzAJBgNV +BAYTAlVTMRMwEQYDVQQIDApDYWxpZm9ybmlhMQ4wDAYDVQQKDAVOeWxhczEcMBoG +A1UEAwwTc210cC10ZXN0Lm55bGFzLmNvbTEeMBwGCSqGSIb3DQEJARYPYWRtaW5A +bnlsYXMuY29tMB4XDTE1MDUwOTE2NDg1OVoXDTQyMDkyMzE2NDg1OVowcDELMAkG +A1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExDjAMBgNVBAoMBU55bGFzMRww +GgYDVQQDDBNzbXRwLXRlc3QubnlsYXMuY29tMR4wHAYJKoZIhvcNAQkBFg9hZG1p +bkBueWxhcy5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDLEOPZ +uuZytAMerhRaYLnx1NJKqXK7pmLZ5pRSw5rw1Y3J2LzKnd7uxWHfSX/0RH8cXvqN +/PukJGZW/fo3WpaGnuCWZ4aFulyKfLxTHAcHRZRlt5thrI9pd44B9NP22YNZ9TZa ++ZldLS9pkFba8zvk0BxBGlnFWdl7XQnHhhinHoV5Yas1tajq007N3n7xy5FCW9Er +KdV3dtQrZstquAeQY7rc7Nb79gkGIT7Oa/Qcq0BKaC9aX6MAxm1i78t8l7UdvEYI +GTnFsIMZGHQu5AU/RdwdXqSxfHntggYSYngJgUlt/knpysFgOdbEcaOrj1TVpQ8+ +YvH++LAndokpoOblAgMBAAGjUDBOMB0GA1UdDgQWBBTuM0tI40j/zwG9NVuBwVDP +q9KVqjAfBgNVHSMEGDAWgBTuM0tI40j/zwG9NVuBwVDPq9KVqjAMBgNVHRMEBTAD +AQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBA9Qs8ZIltCE43p3i+efK6csMrQcmeTtnb +4gb8QjJW3sLoYBuszHj78nBjrCcvqEOl+/4jBzEJ409A777SAwh6zzgtVgPaEOPX +rWNJFMUtA5WYgUlONadFoNW2qMZr2P2ohqctC8ws5kDCtmh/lzzvCb0OTivTGjoH +0NA7wCcA80p5QABm4QjVMoreUeJX0KRB2k1yjYlMPaRQh1pChfAxruL0/1aiIhSa +wzrF+R29tZ/1mUuhHOvVqVztoppqwavWWUMEvK8eqyHQSFxYIeBNAAWP3IlNaPpf +Zgiid15nyArsQVZGY4FfR75SJvO9OYOVSKnjKRyFUDbmaizqWI9H +-----END CERTIFICATE----- diff --git a/inbox/test/events/fixtures/bogus_sequence_number.ics b/inbox/test/events/fixtures/bogus_sequence_number.ics new file mode 100644 index 000000000..3a7ba845f --- /dev/null +++ b/inbox/test/events/fixtures/bogus_sequence_number.ics @@ -0,0 +1,31 @@ +BEGIN:VCALENDAR +PRODID:-//Google Inc//Google Calendar 70.9054//EN +VERSION:2.0 +CALSCALE:GREGORIAN +METHOD:REQUEST +BEGIN:VEVENT +DTSTART:20150401T140000Z +DTEND:20150401T143000Z +DTSTAMP:20150331T124344Z +ORGANIZER;CN=Inbox Apptest2:mailto:test2@example.com +UID:234252cccc@google.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=DECLINED;CN=Karim + Hamidou;X-NUM-GUESTS=0:mailto:karim@example.com +CREATED:20150331T124302Z +DESCRIPTION: +LAST-MODIFIED:20150331T124344Z +LOCATION: +SEQUENCE:1407317367304 +STATUS:CONFIRMED +SUMMARY:Spot le chien +TRANSP:OPAQUE +X-MICROSOFT-CDO-ALLDAYEVENT:FALSE +X-MICROSOFT-CDO-APPT-SEQUENCE:0 +X-MICROSOFT-CDO-BUSYSTATUS:TENTATIVE +X-MICROSOFT-CDO-IMPORTANCE:1 +X-MICROSOFT-CDO-INSTTYPE:0 +X-MICROSOFT-CDO-INTENDEDSTATUS:BUSY +X-MICROSOFT-CDO-OWNERAPPTID:2113142777 +X-MICROSOFT-DISALLOW-COUNTER:FALSE +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/event_with_no_participants.ics b/inbox/test/events/fixtures/event_with_no_participants.ics new file mode 100644 index 000000000..ae9c80bd5 --- /dev/null +++ b/inbox/test/events/fixtures/event_with_no_participants.ics @@ -0,0 +1,19 @@ +BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN +METHOD:REQUEST +BEGIN:VEVENT +UID:8153F823-B9F1-4BE0-ADFB-5FEEB01C08A9 +SUMMARY:An all-day meeting about meetings +SEQUENCE:0 +LOCATION:1\, Infinite Loop +DTSTART;VALUE=DATE:20150316 +DTEND;VALUE=DATE:20150317 +TRANSP:TRANSPARENT +LAST-MODIFIED:20150312T181936Z +DTSTAMP:20150312T181936Z +ORGANIZER;CN=Ben Bitdiddle;EMAIL=benbitdiddle@icloud.com:mailto: + 2_HEZDSOJZGEZTSMJZGI4TSOJRGNOIFYHPYTDQMCIAF5U2J7KGUYDTWMZSMEX4QJ23ABSXJO6R + JCXDA@imip.me.com +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/event_without_sequence.ics b/inbox/test/events/fixtures/event_without_sequence.ics new file mode 100644 index 000000000..f8fbe000a --- /dev/null +++ b/inbox/test/events/fixtures/event_without_sequence.ics @@ -0,0 +1,18 @@ +BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN +METHOD:REQUEST +BEGIN:VEVENT +UID:8153F823-B9F1-4BE0-ADFB-5FEEB01C08A9 +SUMMARY:An all-day meeting about meetings +LOCATION:1\, Infinite Loop +DTSTART;VALUE=DATE:20150316 +DTEND;VALUE=DATE:20150317 +TRANSP:TRANSPARENT +LAST-MODIFIED:20150312T181936Z +DTSTAMP:20150312T181936Z +ORGANIZER;CN=Ben Bitdiddle;EMAIL=benbitdiddle@icloud.com:mailto: + 2_HEZDSOJZGEZTSMJZGI4TSOJRGNOIFYHPYTDQMCIAF5U2J7KGUYDTWMZSMEX4QJ23ABSXJO6R + JCXDA@imip.me.com +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/gcal_recur.ics b/inbox/test/events/fixtures/gcal_recur.ics new file mode 100644 index 000000000..866c59cb2 --- /dev/null +++ b/inbox/test/events/fixtures/gcal_recur.ics @@ -0,0 +1,50 @@ +BEGIN:VCALENDAR +PRODID:-//Google Inc//Google Calendar 70.9054//EN +VERSION:2.0 +CALSCALE:GREGORIAN +METHOD:REQUEST +BEGIN:VTIMEZONE +TZID:America/Los_Angeles +X-LIC-LOCATION:America/Los_Angeles +BEGIN:DAYLIGHT +TZOFFSETFROM:-0800 +TZOFFSETTO:-0700 +TZNAME:PDT +DTSTART:19700308T020000 +RRULE:FREQ=YEARLY;BYMONTH=3;BYDAY=2SU +END:DAYLIGHT +BEGIN:STANDARD +TZOFFSETFROM:-0700 +TZOFFSETTO:-0800 +TZNAME:PST +DTSTART:19701101T020000 +RRULE:FREQ=YEARLY;BYMONTH=11;BYDAY=1SU +END:STANDARD +END:VTIMEZONE +BEGIN:VEVENT +DTSTART;TZID=America/Los_Angeles:20150112T133000 +DTEND;TZID=America/Los_Angeles:20150112T143000 +RRULE:FREQ=WEEKLY;WKST=SU +DTSTAMP:20150112T000742Z +ORGANIZER;CN=Person 1:mailto:foobar@nilas.com +UID:flg2h6nam1cb1uqetgfkslrfrc@google.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;RSVP= + TRUE;CN=Person 2;X-NUM-GUESTS=0:mailto:foobar2@nilas.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=ACCEPTED;RSVP=TRUE + ;CN=Person 1;X-NUM-GUESTS=0:mailto:foobar@nilas.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;RSVP= + TRUE;CN=Person 3;X-NUM-GUESTS=0:mailto:foobar3@nilas.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;RSVP= + TRUE;CN=Person 4;X-NUM-GUESTS=0:mailto:foobar4@nilas.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;RSVP= + TRUE;CN=Person 5;X-NUM-GUESTS=0:mailto:foobar5@nilas.com +CREATED:20150112T000742Z +DESCRIPTION:View your event online! +LAST-MODIFIED:20150112T000742Z +LOCATION: +SEQUENCE:0 +STATUS:CONFIRMED +SUMMARY:Weekly Planning Meeting +TRANSP:OPAQUE +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/gcal_v1.ics b/inbox/test/events/fixtures/gcal_v1.ics new file mode 100644 index 000000000..9294aa929 --- /dev/null +++ b/inbox/test/events/fixtures/gcal_v1.ics @@ -0,0 +1,29 @@ +BEGIN:VCALENDAR +PRODID:-//Google Inc//Google Calendar 70.9054//EN +VERSION:2.0 +CALSCALE:GREGORIAN +METHOD:REQUEST +BEGIN:VEVENT +DTSTART:20150319T233000Z +DTEND:20150320T003000Z +DTSTAMP:20150318T190309Z +ORGANIZER;CN=Karim Hamidou:mailto:karim@nilas.com +UID:jvbroggos139aumnj4p5og9rd0@google.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=ACCEPTED;RSVP=TRUE + ;CN=Karim Hamidou;X-NUM-GUESTS=0:mailto:karim@nilas.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;RSVP= + TRUE;CN=inboxapptest.french@gmail.com;X-NUM-GUESTS=0:mailto:inboxapptest.fr + ench@gmail.com +CREATED:20150318T190309Z +DESCRIPTION:Affichez votre \xc3\xa9v\xc3\xa9nement sur la page https://www.google.com/cal + endar/event?action=VIEW&eid=anZicm9nZ29zMTM5YXVtbmo0cDVvZzlyZDAgaW5ib3hhcHB + 0ZXN0LmZyZW5jaEBt&tok=MTUja2FyaW1AbmlsYXMuY29tOTdlNTFmYzliN2Y5Y2RhMTQ1MzAwM + GYyMThjNGVlNGM3NTYwYzZjYg&ctz=America/Los_Angeles&hl=fr. +LAST-MODIFIED:20150318T190309Z +LOCATION:Olympia Hall\\, 28 Boulevard des Capucines\\, 75009 Paris\\, France +SEQUENCE:0 +STATUS:CONFIRMED +SUMMARY:the Strokes +TRANSP:OPAQUE +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/gcal_v2.ics b/inbox/test/events/fixtures/gcal_v2.ics new file mode 100644 index 000000000..3f5272119 --- /dev/null +++ b/inbox/test/events/fixtures/gcal_v2.ics @@ -0,0 +1,29 @@ +BEGIN:VCALENDAR +PRODID:-//Google Inc//Google Calendar 70.9054//EN +VERSION:2.0 +CALSCALE:GREGORIAN +METHOD:REQUEST +BEGIN:VEVENT +DTSTART:20150319T233000Z +DTEND:20150320T003000Z +DTSTAMP:20150318T190431Z +ORGANIZER;CN=Karim Hamidou:mailto:karim@nilas.com +UID:jvbroggos139aumnj4p5og9rd0@google.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=ACCEPTED;RSVP=TRUE + ;CN=Karim Hamidou;X-NUM-GUESTS=0:mailto:karim@nilas.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;RSVP= + TRUE;CN=inboxapptest.french@gmail.com;X-NUM-GUESTS=0:mailto:inboxapptest.fr + ench@gmail.com +CREATED:20150318T190309Z +DESCRIPTION:Affichez votre \xc3\xa9v\xc3\xa9nement sur la page https://www.google.com/cal + endar/event?action=VIEW&eid=anZicm9nZ29zMTM5YXVtbmo0cDVvZzlyZDAgaW5ib3hhcHB + 0ZXN0LmZyZW5jaEBt&tok=MTUja2FyaW1AbmlsYXMuY29tOTdlNTFmYzliN2Y5Y2RhMTQ1MzAwM + GYyMThjNGVlNGM3NTYwYzZjYg&ctz=America/Los_Angeles&hl=fr. +LAST-MODIFIED:20150318T190431Z +LOCATION:Le Zenith\\, 211 Avenue Jean Jaures\\, 75019 Paris\\, France +SEQUENCE:0 +STATUS:CONFIRMED +SUMMARY:the Strokes +TRANSP:OPAQUE +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/google_cancelled1.ics b/inbox/test/events/fixtures/google_cancelled1.ics new file mode 100644 index 000000000..7754a0a48 --- /dev/null +++ b/inbox/test/events/fixtures/google_cancelled1.ics @@ -0,0 +1,28 @@ +BEGIN:VCALENDAR +PRODID:-//Google Inc//Google Calendar 70.9054//EN +VERSION:2.0 +CALSCALE:GREGORIAN +METHOD:REQUEST +BEGIN:VEVENT +DTSTART:20150401T140000Z +DTEND:20150401T150000Z +DTSTAMP:20150330T133948Z +ORGANIZER;CN=Inbox Apptest:mailto:inboxapptest.french@gmail.com +UID:c74p2nmutcd0kt69ku7rs8vu2g@google.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=ACCEPTED;RSVP=TRUE + ;CN=Inbox Apptest;X-NUM-GUESTS=0:mailto:inboxapptest.french@gmail.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;RSVP= + TRUE;CN=karim@nilas.com;X-NUM-GUESTS=0:mailto:karim@nilas.com +CREATED:20150330T133947Z +DESCRIPTION:View your event at https://www.google.com/calendar/event?action + =VIEW&eid=Yzc0cDJubXV0Y2Qwa3Q2OWt1N3JzOHZ1Mmcga2FyaW1AbmlsYXMuY29t&tok=Mjkj + aW5ib3hhcHB0ZXN0LmZyZW5jaEBnbWFpbC5jb202YTJlN2NjMDc1NWFhMTdmYzU5YzNkYjJkNGY + wMDk4ZWUwNzU1ZThi&ctz=Europe/Paris&hl=en. +LAST-MODIFIED:20150330T133948Z +LOCATION: +SEQUENCE:0 +STATUS:CONFIRMED +SUMMARY:Buffalo Buffalo +TRANSP:OPAQUE +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/google_cancelled2.ics b/inbox/test/events/fixtures/google_cancelled2.ics new file mode 100644 index 000000000..4bfc40865 --- /dev/null +++ b/inbox/test/events/fixtures/google_cancelled2.ics @@ -0,0 +1,25 @@ +BEGIN:VCALENDAR +PRODID:-//Google Inc//Google Calendar 70.9054//EN +VERSION:2.0 +CALSCALE:GREGORIAN +METHOD:CANCEL +BEGIN:VEVENT +DTSTART:20150401T140000Z +DTEND:20150401T150000Z +DTSTAMP:20150330T134102Z +ORGANIZER;CN=Inbox Apptest:mailto:inboxapptest.french@gmail.com +UID:c74p2nmutcd0kt69ku7rs8vu2g@google.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=ACCEPTED;CN=Inbox + Apptest;X-NUM-GUESTS=0:mailto:inboxapptest.french@gmail.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;CN=ka + rim@nilas.com;X-NUM-GUESTS=0:mailto:karim@nilas.com +CREATED:20150330T133947Z +DESCRIPTION: +LAST-MODIFIED:20150330T134102Z +LOCATION: +SEQUENCE:1 +STATUS:CANCELLED +SUMMARY:Buffalo Buffalo +TRANSP:OPAQUE +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/icloud_cancelled1.ics b/inbox/test/events/fixtures/icloud_cancelled1.ics new file mode 100644 index 000000000..1913e465c --- /dev/null +++ b/inbox/test/events/fixtures/icloud_cancelled1.ics @@ -0,0 +1,185 @@ +BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN +METHOD:REQUEST +BEGIN:VTIMEZONE +TZID:Europe/Paris +X-LIC-LOCATION:Europe/Paris +BEGIN:STANDARD +DTSTART:18910315T000100 +RDATE;VALUE=DATE-TIME:18910315T000100 +TZNAME:PMT +TZOFFSETFROM:+0921 +TZOFFSETTO:+0921 +END:STANDARD +BEGIN:STANDARD +DTSTART:19110311T000100 +RDATE;VALUE=DATE-TIME:19110311T000100 +TZNAME:WEST +TZOFFSETFROM:+0921 +TZOFFSETTO:+0000 +END:STANDARD +BEGIN:DAYLIGHT +DTSTART:19160614T230000 +RDATE;VALUE=DATE-TIME:19160614T230000 +RDATE;VALUE=DATE-TIME:19170324T230000 +RDATE;VALUE=DATE-TIME:19180309T230000 +RDATE;VALUE=DATE-TIME:19190301T230000 +RDATE;VALUE=DATE-TIME:19200214T230000 +RDATE;VALUE=DATE-TIME:19210314T230000 +RDATE;VALUE=DATE-TIME:19220325T230000 +RDATE;VALUE=DATE-TIME:19230526T230000 +RDATE;VALUE=DATE-TIME:19240329T230000 +RDATE;VALUE=DATE-TIME:19250404T230000 +RDATE;VALUE=DATE-TIME:19260417T230000 +RDATE;VALUE=DATE-TIME:19270409T230000 +RDATE;VALUE=DATE-TIME:19280414T230000 +RDATE;VALUE=DATE-TIME:19290420T230000 +RDATE;VALUE=DATE-TIME:19300412T230000 +RDATE;VALUE=DATE-TIME:19310418T230000 +RDATE;VALUE=DATE-TIME:19320402T230000 +RDATE;VALUE=DATE-TIME:19330325T230000 +RDATE;VALUE=DATE-TIME:19340407T230000 +RDATE;VALUE=DATE-TIME:19350330T230000 +RDATE;VALUE=DATE-TIME:19360418T230000 +RDATE;VALUE=DATE-TIME:19370403T230000 +RDATE;VALUE=DATE-TIME:19380326T230000 +RDATE;VALUE=DATE-TIME:19390415T230000 +RDATE;VALUE=DATE-TIME:19400225T020000 +TZNAME:WEST +TZOFFSETFROM:+0000 +TZOFFSETTO:+0100 +END:DAYLIGHT +BEGIN:STANDARD +DTSTART:19161002T000000 +RRULE:FREQ=YEARLY;UNTIL=19191005T230000Z;BYDAY=MO; + BYMONTHDAY=2,3,4,5,6,7,8;BYMONTH=10 +TZNAME:WET +TZOFFSETFROM:+0100 +TZOFFSETTO:+0000 +END:STANDARD +BEGIN:STANDARD +DTSTART:19201024T000000 +RDATE;VALUE=DATE-TIME:19201024T000000 +RDATE;VALUE=DATE-TIME:19211026T000000 +RDATE;VALUE=DATE-TIME:19391119T000000 +TZNAME:WET +TZOFFSETFROM:+0100 +TZOFFSETTO:+0000 +END:STANDARD +BEGIN:STANDARD +DTSTART:19221008T000000 +RRULE:FREQ=YEARLY;UNTIL=19381001T230000Z;BYDAY=SU; + BYMONTHDAY=2,3,4,5,6,7,8;BYMONTH=10 +TZNAME:WET +TZOFFSETFROM:+0100 +TZOFFSETTO:+0000 +END:STANDARD +BEGIN:STANDARD +DTSTART:19400614T230000 +RDATE;VALUE=DATE-TIME:19400614T230000 +TZNAME:CEST +TZOFFSETFROM:+0100 +TZOFFSETTO:+0200 +END:STANDARD +BEGIN:STANDARD +DTSTART:19421102T030000 +RDATE;VALUE=DATE-TIME:19421102T030000 +RDATE;VALUE=DATE-TIME:19431004T030000 +RDATE;VALUE=DATE-TIME:19760926T010000 +RDATE;VALUE=DATE-TIME:19770925T030000 +RDATE;VALUE=DATE-TIME:19781001T030000 +TZNAME:CET +TZOFFSETFROM:+0200 +TZOFFSETTO:+0100 +END:STANDARD +BEGIN:DAYLIGHT +DTSTART:19430329T020000 +RDATE;VALUE=DATE-TIME:19430329T020000 +RDATE;VALUE=DATE-TIME:19440403T020000 +RDATE;VALUE=DATE-TIME:19760328T010000 +TZNAME:CEST +TZOFFSETFROM:+0100 +TZOFFSETTO:+0200 +END:DAYLIGHT +BEGIN:STANDARD +DTSTART:19440825T000000 +RDATE;VALUE=DATE-TIME:19440825T000000 +TZNAME:WEST +TZOFFSETFROM:+0200 +TZOFFSETTO:+0200 +END:STANDARD +BEGIN:DAYLIGHT +DTSTART:19441008T010000 +RDATE;VALUE=DATE-TIME:19441008T010000 +TZNAME:WEST +TZOFFSETFROM:+0200 +TZOFFSETTO:+0100 +END:DAYLIGHT +BEGIN:DAYLIGHT +DTSTART:19450402T020000 +RDATE;VALUE=DATE-TIME:19450402T020000 +TZNAME:WEMT +TZOFFSETFROM:+0100 +TZOFFSETTO:+0200 +END:DAYLIGHT +BEGIN:STANDARD +DTSTART:19450916T030000 +RDATE;VALUE=DATE-TIME:19450916T030000 +TZNAME:CEST +TZOFFSETFROM:+0200 +TZOFFSETTO:+0100 +END:STANDARD +BEGIN:STANDARD +DTSTART:19770101T000000 +RDATE;VALUE=DATE-TIME:19770101T000000 +TZNAME:CEST +TZOFFSETFROM:+0100 +TZOFFSETTO:+0100 +END:STANDARD +BEGIN:DAYLIGHT +DTSTART:19770403T020000 +RRULE:FREQ=YEARLY;UNTIL=19800406T010000Z;BYDAY=1SU;BYMONTH=4 +TZNAME:CEST +TZOFFSETFROM:+0100 +TZOFFSETTO:+0200 +END:DAYLIGHT +BEGIN:STANDARD +DTSTART:19790930T030000 +RRULE:FREQ=YEARLY;UNTIL=19950924T010000Z;BYDAY=-1SU;BYMONTH=9 +TZNAME:CET +TZOFFSETFROM:+0200 +TZOFFSETTO:+0100 +END:STANDARD +BEGIN:DAYLIGHT +DTSTART:19810329T020000 +RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3 +TZNAME:CEST +TZOFFSETFROM:+0100 +TZOFFSETTO:+0200 +END:DAYLIGHT +BEGIN:STANDARD +DTSTART:19961027T030000 +RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 +TZNAME:CET +TZOFFSETFROM:+0200 +TZOFFSETTO:+0100 +END:STANDARD +END:VTIMEZONE +BEGIN:VEVENT +UID:5919D444-7C99-4687-A526-FC5D10091318 +SUMMARY:New Era +SEQUENCE:0 +LOCATION: +DTSTART;TZID=Europe/Paris:20150331T043000 +DTEND;TZID=Europe/Paris:20150331T053000 +LAST-MODIFIED:20150330T125200Z +ATTENDEE;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;RSVP=TRUE:mailto: + karim@nilas.com +ATTENDEE;ROLE=CHAIR;CN=Karim Hamidou;PARTSTAT=ACCEPTED; + EMAIL=invalid@example.com:mailto:invalid@example.com +DTSTAMP:20150330T125200Z +ORGANIZER;CN=Karim Hamidou;EMAIL=invalid@example.com:mailto: + 2_addr@imip.me.com +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/icloud_cancelled2.ics b/inbox/test/events/fixtures/icloud_cancelled2.ics new file mode 100644 index 000000000..f4afad275 --- /dev/null +++ b/inbox/test/events/fixtures/icloud_cancelled2.ics @@ -0,0 +1,181 @@ +BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN +METHOD:CANCEL +BEGIN:VTIMEZONE +TZID:Europe/Paris +X-LIC-LOCATION:Europe/Paris +BEGIN:STANDARD +DTSTART:18910315T000100 +RDATE;VALUE=DATE-TIME:18910315T000100 +TZNAME:PMT +TZOFFSETFROM:+0921 +TZOFFSETTO:+0921 +END:STANDARD +BEGIN:STANDARD +DTSTART:19110311T000100 +RDATE;VALUE=DATE-TIME:19110311T000100 +TZNAME:WEST +TZOFFSETFROM:+0921 +TZOFFSETTO:+0000 +END:STANDARD +BEGIN:DAYLIGHT +DTSTART:19160614T230000 +RDATE;VALUE=DATE-TIME:19160614T230000 +RDATE;VALUE=DATE-TIME:19170324T230000 +RDATE;VALUE=DATE-TIME:19180309T230000 +RDATE;VALUE=DATE-TIME:19190301T230000 +RDATE;VALUE=DATE-TIME:19200214T230000 +RDATE;VALUE=DATE-TIME:19210314T230000 +RDATE;VALUE=DATE-TIME:19220325T230000 +RDATE;VALUE=DATE-TIME:19230526T230000 +RDATE;VALUE=DATE-TIME:19240329T230000 +RDATE;VALUE=DATE-TIME:19250404T230000 +RDATE;VALUE=DATE-TIME:19260417T230000 +RDATE;VALUE=DATE-TIME:19270409T230000 +RDATE;VALUE=DATE-TIME:19280414T230000 +RDATE;VALUE=DATE-TIME:19290420T230000 +RDATE;VALUE=DATE-TIME:19300412T230000 +RDATE;VALUE=DATE-TIME:19310418T230000 +RDATE;VALUE=DATE-TIME:19320402T230000 +RDATE;VALUE=DATE-TIME:19330325T230000 +RDATE;VALUE=DATE-TIME:19340407T230000 +RDATE;VALUE=DATE-TIME:19350330T230000 +RDATE;VALUE=DATE-TIME:19360418T230000 +RDATE;VALUE=DATE-TIME:19370403T230000 +RDATE;VALUE=DATE-TIME:19380326T230000 +RDATE;VALUE=DATE-TIME:19390415T230000 +RDATE;VALUE=DATE-TIME:19400225T020000 +TZNAME:WEST +TZOFFSETFROM:+0000 +TZOFFSETTO:+0100 +END:DAYLIGHT +BEGIN:STANDARD +DTSTART:19161002T000000 +RRULE:FREQ=YEARLY;UNTIL=19191005T230000Z;BYDAY=MO; + BYMONTHDAY=2,3,4,5,6,7,8;BYMONTH=10 +TZNAME:WET +TZOFFSETFROM:+0100 +TZOFFSETTO:+0000 +END:STANDARD +BEGIN:STANDARD +DTSTART:19201024T000000 +RDATE;VALUE=DATE-TIME:19201024T000000 +RDATE;VALUE=DATE-TIME:19211026T000000 +RDATE;VALUE=DATE-TIME:19391119T000000 +TZNAME:WET +TZOFFSETFROM:+0100 +TZOFFSETTO:+0000 +END:STANDARD +BEGIN:STANDARD +DTSTART:19221008T000000 +RRULE:FREQ=YEARLY;UNTIL=19381001T230000Z;BYDAY=SU; + BYMONTHDAY=2,3,4,5,6,7,8;BYMONTH=10 +TZNAME:WET +TZOFFSETFROM:+0100 +TZOFFSETTO:+0000 +END:STANDARD +BEGIN:STANDARD +DTSTART:19400614T230000 +RDATE;VALUE=DATE-TIME:19400614T230000 +TZNAME:CEST +TZOFFSETFROM:+0100 +TZOFFSETTO:+0200 +END:STANDARD +BEGIN:STANDARD +DTSTART:19421102T030000 +RDATE;VALUE=DATE-TIME:19421102T030000 +RDATE;VALUE=DATE-TIME:19431004T030000 +RDATE;VALUE=DATE-TIME:19760926T010000 +RDATE;VALUE=DATE-TIME:19770925T030000 +RDATE;VALUE=DATE-TIME:19781001T030000 +TZNAME:CET +TZOFFSETFROM:+0200 +TZOFFSETTO:+0100 +END:STANDARD +BEGIN:DAYLIGHT +DTSTART:19430329T020000 +RDATE;VALUE=DATE-TIME:19430329T020000 +RDATE;VALUE=DATE-TIME:19440403T020000 +RDATE;VALUE=DATE-TIME:19760328T010000 +TZNAME:CEST +TZOFFSETFROM:+0100 +TZOFFSETTO:+0200 +END:DAYLIGHT +BEGIN:STANDARD +DTSTART:19440825T000000 +RDATE;VALUE=DATE-TIME:19440825T000000 +TZNAME:WEST +TZOFFSETFROM:+0200 +TZOFFSETTO:+0200 +END:STANDARD +BEGIN:DAYLIGHT +DTSTART:19441008T010000 +RDATE;VALUE=DATE-TIME:19441008T010000 +TZNAME:WEST +TZOFFSETFROM:+0200 +TZOFFSETTO:+0100 +END:DAYLIGHT +BEGIN:DAYLIGHT +DTSTART:19450402T020000 +RDATE;VALUE=DATE-TIME:19450402T020000 +TZNAME:WEMT +TZOFFSETFROM:+0100 +TZOFFSETTO:+0200 +END:DAYLIGHT +BEGIN:STANDARD +DTSTART:19450916T030000 +RDATE;VALUE=DATE-TIME:19450916T030000 +TZNAME:CEST +TZOFFSETFROM:+0200 +TZOFFSETTO:+0100 +END:STANDARD +BEGIN:STANDARD +DTSTART:19770101T000000 +RDATE;VALUE=DATE-TIME:19770101T000000 +TZNAME:CEST +TZOFFSETFROM:+0100 +TZOFFSETTO:+0100 +END:STANDARD +BEGIN:DAYLIGHT +DTSTART:19770403T020000 +RRULE:FREQ=YEARLY;UNTIL=19800406T010000Z;BYDAY=1SU;BYMONTH=4 +TZNAME:CEST +TZOFFSETFROM:+0100 +TZOFFSETTO:+0200 +END:DAYLIGHT +BEGIN:STANDARD +DTSTART:19790930T030000 +RRULE:FREQ=YEARLY;UNTIL=19950924T010000Z;BYDAY=-1SU;BYMONTH=9 +TZNAME:CET +TZOFFSETFROM:+0200 +TZOFFSETTO:+0100 +END:STANDARD +BEGIN:DAYLIGHT +DTSTART:19810329T020000 +RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3 +TZNAME:CEST +TZOFFSETFROM:+0100 +TZOFFSETTO:+0200 +END:DAYLIGHT +BEGIN:STANDARD +DTSTART:19961027T030000 +RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 +TZNAME:CET +TZOFFSETFROM:+0200 +TZOFFSETTO:+0100 +END:STANDARD +END:VTIMEZONE +BEGIN:VEVENT +DTSTAMP:20150330T125659Z +UID:5919D444-7C99-4687-A526-FC5D10091318 +SEQUENCE:1 +ORGANIZER;CN=Karim Hamidou;EMAIL=invalid@example.com:mailto: + invalid@example.com +SUMMARY:New Era +DTSTART;TZID=Europe/Paris:20150331T043000 +DTEND;TZID=Europe/Paris:20150331T053000 +ATTENDEE;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;RSVP=TRUE:mailto: + karim@nilas.com +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/icloud_oneday_event.ics b/inbox/test/events/fixtures/icloud_oneday_event.ics new file mode 100644 index 000000000..f766f115e --- /dev/null +++ b/inbox/test/events/fixtures/icloud_oneday_event.ics @@ -0,0 +1,23 @@ +BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN +METHOD:REQUEST +BEGIN:VEVENT +UID:8153F823-B9F1-4BE0-ADFB-5FEEB01C08A9 +SUMMARY:An all-day meeting about meetings +SEQUENCE:0 +LOCATION:1\, Infinite Loop +DTSTART;VALUE=DATE:20150316 +DTEND;VALUE=DATE:20150317 +TRANSP:TRANSPARENT +LAST-MODIFIED:20150312T181936Z +ATTENDEE;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;RSVP=TRUE:mailto: + karim@nilas.com +ATTENDEE;ROLE=CHAIR;CN=Ben Bitdiddle;PARTSTAT=ACCEPTED; + EMAIL=benbitdiddle@icloud.com:mailto:benbitdiddle@icloud.com +DTSTAMP:20150312T181936Z +ORGANIZER;CN=Ben Bitdiddle;EMAIL=benbitdiddle@icloud.com:mailto: + 2_HEZDSOJZGEZTSMJZGI4TSOJRGNOIFYHPYTDQMCIAF5U2J7KGUYDTWMZSMEX4QJ23ABSXJO6R + JCXDA@imip.me.com +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/invalid_rsvp.ics b/inbox/test/events/fixtures/invalid_rsvp.ics new file mode 100644 index 000000000..5c938a890 --- /dev/null +++ b/inbox/test/events/fixtures/invalid_rsvp.ics @@ -0,0 +1,31 @@ +BEGIN:VCALENDAR +PRODID:-//Google Inc//Google Calendar 70.9054//EN +VERSION:2.0 +CALSCALE:GREGORIAN +METHOD:REPLY +BEGIN:VEVENT +DTSTART:20150401T140000Z +DTEND:20150401T143000Z +DTSTAMP:20150331T124344Z +ORGANIZER;CN=Inbox Apptest2:mailto:test2@example.com +UID:234252$cccc@nylas.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=DECLINED;CN=Karim + Hamidou;X-NUM-GUESTS=0:mailto:karim@example.com +CREATED:20150331T124302Z +DESCRIPTION: +LAST-MODIFIED:20150331T124344Z +LOCATION: +SEQUENCE:0 +STATUS:CONFIRMED +SUMMARY:Spot le chien +TRANSP:OPAQUE +X-MICROSOFT-CDO-ALLDAYEVENT:FALSE +X-MICROSOFT-CDO-APPT-SEQUENCE:0 +X-MICROSOFT-CDO-BUSYSTATUS:TENTATIVE +X-MICROSOFT-CDO-IMPORTANCE:1 +X-MICROSOFT-CDO-INSTTYPE:0 +X-MICROSOFT-CDO-INTENDEDSTATUS:BUSY +X-MICROSOFT-CDO-OWNERAPPTID:2113142777 +X-MICROSOFT-DISALLOW-COUNTER:FALSE +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/invalid_rsvp2.ics b/inbox/test/events/fixtures/invalid_rsvp2.ics new file mode 100644 index 000000000..20265cc4a --- /dev/null +++ b/inbox/test/events/fixtures/invalid_rsvp2.ics @@ -0,0 +1,31 @@ +BEGIN:VCALENDAR +PRODID:-//Google Inc//Google Calendar 70.9054//EN +VERSION:2.0 +CALSCALE:GREGORIAN +METHOD:REPLY +BEGIN:VEVENT +DTSTART:20150401T140000Z +DTEND:20150401T143000Z +DTSTAMP:20150331T124344Z +ORGANIZER;CN=Inbox Apptest2:mailto:test2@example.com +UID:234252cccc@google.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=DECLINED;CN=Karim + Hamidou;X-NUM-GUESTS=0:mailto:karim@example.com +CREATED:20150331T124302Z +DESCRIPTION: +LAST-MODIFIED:20150331T124344Z +LOCATION: +SEQUENCE:0 +STATUS:CONFIRMED +SUMMARY:Spot le chien +TRANSP:OPAQUE +X-MICROSOFT-CDO-ALLDAYEVENT:FALSE +X-MICROSOFT-CDO-APPT-SEQUENCE:0 +X-MICROSOFT-CDO-BUSYSTATUS:TENTATIVE +X-MICROSOFT-CDO-IMPORTANCE:1 +X-MICROSOFT-CDO-INSTTYPE:0 +X-MICROSOFT-CDO-INTENDEDSTATUS:BUSY +X-MICROSOFT-CDO-OWNERAPPTID:2113142777 +X-MICROSOFT-DISALLOW-COUNTER:FALSE +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/invite_w_rsvps1.ics b/inbox/test/events/fixtures/invite_w_rsvps1.ics new file mode 100644 index 000000000..3b9b994d9 --- /dev/null +++ b/inbox/test/events/fixtures/invite_w_rsvps1.ics @@ -0,0 +1,51 @@ +BEGIN:VCALENDAR +METHOD:REQUEST +PRODID:Microsoft Exchange Server 2010 +VERSION:2.0 +BEGIN:VTIMEZONE +TZID:Romance Standard Time +BEGIN:STANDARD +DTSTART:16010101T030000 +TZOFFSETFROM:+0200 +TZOFFSETTO:+0100 +RRULE:FREQ=YEARLY;INTERVAL=1;BYDAY=-1SU;BYMONTH=10 +END:STANDARD +BEGIN:DAYLIGHT +DTSTART:16010101T020000 +TZOFFSETFROM:+0100 +TZOFFSETTO:+0200 +RRULE:FREQ=YEARLY;INTERVAL=1;BYDAY=-1SU;BYMONTH=3 +END:DAYLIGHT +END:VTIMEZONE +BEGIN:VEVENT +ORGANIZER;CN=Inbox Apptest2:MAILTO:test2@example.com +ATTENDEE;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;RSVP=TRUE;CN=Karim Hami + dou:MAILTO:karim@example.com +ATTENDEE;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;RSVP=TRUE;CN=test1@example.com:MAILTO:test1@example.com +UID:040000008200E00074C5B7101A82E00800000000F9125A30B06BD001000000000000000 + 0100000009D791C7548BFD144BFA54F14213CAD25 +SUMMARY;LANGUAGE=fr-FR:Spot le chien +DTSTART;TZID=Romance Standard Time:20150401T160000 +DTEND;TZID=Romance Standard Time:20150401T163000 +CLASS:PUBLIC +PRIORITY:5 +DTSTAMP:20150331T124302Z +TRANSP:OPAQUE +STATUS:CONFIRMED +SEQUENCE:0 +LOCATION;LANGUAGE=fr-FR: +X-MICROSOFT-CDO-APPT-SEQUENCE:0 +X-MICROSOFT-CDO-OWNERAPPTID:2113142777 +X-MICROSOFT-CDO-BUSYSTATUS:TENTATIVE +X-MICROSOFT-CDO-INTENDEDSTATUS:BUSY +X-MICROSOFT-CDO-ALLDAYEVENT:FALSE +X-MICROSOFT-CDO-IMPORTANCE:1 +X-MICROSOFT-CDO-INSTTYPE:0 +X-MICROSOFT-DISALLOW-COUNTER:FALSE +BEGIN:VALARM +DESCRIPTION:REMINDER +TRIGGER;RELATED=START:-PT15M +ACTION:DISPLAY +END:VALARM +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/invite_w_rsvps2.ics b/inbox/test/events/fixtures/invite_w_rsvps2.ics new file mode 100644 index 000000000..b2cdca169 --- /dev/null +++ b/inbox/test/events/fixtures/invite_w_rsvps2.ics @@ -0,0 +1,31 @@ +BEGIN:VCALENDAR +PRODID:-//Google Inc//Google Calendar 70.9054//EN +VERSION:2.0 +CALSCALE:GREGORIAN +METHOD:REPLY +BEGIN:VEVENT +DTSTART:20150401T140000Z +DTEND:20150401T143000Z +DTSTAMP:20150331T131419Z +ORGANIZER;CN=Test2:mailto:test2@example.com +UID:cccc@nylas.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=TENTATIVE;CN=Inbox + Apptest;X-NUM-GUESTS=0:mailto:test1@example.com +CREATED:20150331T124302Z +DESCRIPTION: +LAST-MODIFIED:20150331T131419Z +LOCATION: +SEQUENCE:0 +STATUS:CONFIRMED +SUMMARY:Spot le chien +TRANSP:OPAQUE +X-MICROSOFT-CDO-ALLDAYEVENT:FALSE +X-MICROSOFT-CDO-APPT-SEQUENCE:0 +X-MICROSOFT-CDO-BUSYSTATUS:TENTATIVE +X-MICROSOFT-CDO-IMPORTANCE:1 +X-MICROSOFT-CDO-INSTTYPE:0 +X-MICROSOFT-CDO-INTENDEDSTATUS:BUSY +X-MICROSOFT-CDO-OWNERAPPTID:2113142777 +X-MICROSOFT-DISALLOW-COUNTER:FALSE +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/invite_w_rsvps3.ics b/inbox/test/events/fixtures/invite_w_rsvps3.ics new file mode 100644 index 000000000..25fc90820 --- /dev/null +++ b/inbox/test/events/fixtures/invite_w_rsvps3.ics @@ -0,0 +1,31 @@ +BEGIN:VCALENDAR +PRODID:-//Google Inc//Google Calendar 70.9054//EN +VERSION:2.0 +CALSCALE:GREGORIAN +METHOD:REPLY +BEGIN:VEVENT +DTSTART:20150401T140000Z +DTEND:20150401T143000Z +DTSTAMP:20150331T124344Z +ORGANIZER;CN=Inbox Apptest2:mailto:test2@example.com +UID:cccc@nylas.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=ACCEPTED;CN=Karim + Hamidou;X-NUM-GUESTS=0:mailto:karim@example.com +CREATED:20150331T124302Z +DESCRIPTION: +LAST-MODIFIED:20150331T124344Z +LOCATION: +SEQUENCE:0 +STATUS:CONFIRMED +SUMMARY:Spot le chien +TRANSP:OPAQUE +X-MICROSOFT-CDO-ALLDAYEVENT:FALSE +X-MICROSOFT-CDO-APPT-SEQUENCE:0 +X-MICROSOFT-CDO-BUSYSTATUS:TENTATIVE +X-MICROSOFT-CDO-IMPORTANCE:1 +X-MICROSOFT-CDO-INSTTYPE:0 +X-MICROSOFT-CDO-INTENDEDSTATUS:BUSY +X-MICROSOFT-CDO-OWNERAPPTID:2113142777 +X-MICROSOFT-DISALLOW-COUNTER:FALSE +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/invite_w_rsvps_4.ics b/inbox/test/events/fixtures/invite_w_rsvps_4.ics new file mode 100644 index 000000000..9f6d69e2a --- /dev/null +++ b/inbox/test/events/fixtures/invite_w_rsvps_4.ics @@ -0,0 +1,31 @@ +BEGIN:VCALENDAR +PRODID:-//Google Inc//Google Calendar 70.9054//EN +VERSION:2.0 +CALSCALE:GREGORIAN +METHOD:REPLY +BEGIN:VEVENT +DTSTART:20150401T140000Z +DTEND:20150401T143000Z +DTSTAMP:20150331T124344Z +ORGANIZER;CN=Inbox Apptest2:mailto:test2@example.com +UID:cccc@nylas.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=DECLINED;CN=Karim + Hamidou;X-NUM-GUESTS=0:mailto:karim@example.com +CREATED:20150331T124302Z +DESCRIPTION: +LAST-MODIFIED:20150331T124344Z +LOCATION: +SEQUENCE:0 +STATUS:CONFIRMED +SUMMARY:Spot le chien +TRANSP:OPAQUE +X-MICROSOFT-CDO-ALLDAYEVENT:FALSE +X-MICROSOFT-CDO-APPT-SEQUENCE:0 +X-MICROSOFT-CDO-BUSYSTATUS:TENTATIVE +X-MICROSOFT-CDO-IMPORTANCE:1 +X-MICROSOFT-CDO-INSTTYPE:0 +X-MICROSOFT-CDO-INTENDEDSTATUS:BUSY +X-MICROSOFT-CDO-OWNERAPPTID:2113142777 +X-MICROSOFT-DISALLOW-COUNTER:FALSE +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/iphone_through_exchange.ics b/inbox/test/events/fixtures/iphone_through_exchange.ics new file mode 100644 index 000000000..28598fc12 --- /dev/null +++ b/inbox/test/events/fixtures/iphone_through_exchange.ics @@ -0,0 +1,39 @@ +BEGIN:VCALENDAR +CALSCALE:GREGORIAN +METHOD:REQUEST +PRODID:-//Apple Inc.//iPhone 3.0//EN +VERSION:2.0 +BEGIN:VTIMEZONE +TZID:Europe/Paris +BEGIN:DAYLIGHT +DTSTART:20140330T020000 +RRULE:FREQ=YEARLY;BYMONTH=3;BYDAY=-1SU +TZNAME:UTC+02:00 +TZOFFSETFROM:+0100 +TZOFFSETTO:+0200 +END:DAYLIGHT +BEGIN:STANDARD +DTSTART:20141026T030000 +RRULE:FREQ=YEARLY;BYMONTH=10;BYDAY=-1SU +TZNAME:UTC+01:00 +TZOFFSETFROM:+0200 +TZOFFSETTO:+0100 +END:STANDARD +END:VTIMEZONE +BEGIN:VEVENT +ATTENDEE;CN="khamidou@inboxapp.com";CUTYPE=INDIVIDUAL;PARTSTAT=NEEDS-ACT + ION;RSVP=TRUE:mailto:khamidou@inboxapp.com +CREATED:20141222T143614Z +DTEND;TZID=Europe/Paris:20141227T170000 +DTSTAMP:20141222T143614Z +DTSTART;TZID=Europe/Paris:20141227T160000 +LAST-MODIFIED:20141222T143614Z +ORGANIZER;CN="Inbox Apptest2";EMAIL="inbox.apptest2@inboxapp.onmicrosoft + .com":mailto:inbox.apptest2@inboxapp.onmicrosoft.com +SEQUENCE:0 +SUMMARY:Nouvel ?v?nement +TRANSP:OPAQUE +UID:091BD1D08D29478193C4AD1C0E60965400000000000000000000000000000000 +X-MICROSOFT-CDO-INTENDEDSTATUS:BUSY +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/meetup_infinite.ics b/inbox/test/events/fixtures/meetup_infinite.ics new file mode 100644 index 000000000..eb30eff14 --- /dev/null +++ b/inbox/test/events/fixtures/meetup_infinite.ics @@ -0,0 +1,18 @@ +BEGIN:VCALENDAR +PRODID:-//Meetup//Meetup Events v1.0//EN +X-ORIGINAL-URL:http://www.meetup.com/sfgamedevelopers/events/16078654/ +VERSION:2.0 +CALSCALE:GREGORIAN +UID:nih2h78am1cb1uqetgfkslrfrc@meetup.com +METHOD:PUBLISH +BEGIN:VEVENT +DTSTAMP:20110418T211031Z +DTSTART;TZID=America/Los_Angeles:20110419T170000 +SUMMARY:Bay Area Video Game Development Meetup Group Monthly Meetup +URL:http://www.meetup.com/sfgamedevelopers/events/16078654/ +LOCATION:Jillian's (Metreon (101 4th St.)\, San Francisco\, CA 94101) +SUMMARY:Bay Area Video Game Development Meetup Group Monthly Meetup +DESCRIPTION:Bay Area Video Game Development Meetup Group\nTuesday\, April 19 at 8:00 PM\n\nHey Gamers\,\n\nWe will be meeting again this month at Jillian's. We will do our best to get the front lounge area near the entrance. If you do not see...\n\nDetails: http://www.meetup.com/sfgamedevelopers/events/16078654/ +ORGANIZER;CN=Meetup Reminder:MAILTO:info@meetup.com +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/multiple_events.ics b/inbox/test/events/fixtures/multiple_events.ics new file mode 100644 index 000000000..999da81af --- /dev/null +++ b/inbox/test/events/fixtures/multiple_events.ics @@ -0,0 +1,35 @@ +BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN +METHOD:REQUEST +BEGIN:VEVENT +UID:8153F823-B9F1-4BE0-ADFB-5FEEB01C08A9 +SUMMARY:An all-day meeting about meetings +SEQUENCE:0 +LOCATION:1\, Infinite Loop +DTSTART;VALUE=DATE:20150316 +DTEND;VALUE=DATE:20150317 +TRANSP:TRANSPARENT +LAST-MODIFIED:20150312T181936Z +DTSTAMP:20150312T181936Z +ORGANIZER;CN=Ben Bitdiddle;EMAIL=benbitdiddle@icloud.com:mailto: + 2_HEZDSOJZGEZTSMJZGI4TSOJRGNOIFYHPYTDQMCIAF5U2J7KGUYDTWMZSMEX4QJ23ABSXJO6R + JCXDA@imip.me.com +END:VEVENT + +BEGIN:VEVENT +UID:8153F823-B9F1-4BE0-ADFB-5FEEB01C08A9 +SUMMARY:More meetings +SEQUENCE:0 +LOCATION:1\, Infinite Loop +DTSTART;VALUE=DATE:20150317 +DTEND;VALUE=DATE:20150318 +TRANSP:TRANSPARENT +LAST-MODIFIED:20150312T181936Z +DTSTAMP:20150312T181936Z +ORGANIZER;CN=Ben Bitdiddle;EMAIL=benbitdiddle@icloud.com:mailto: + 2_HEZDSOJZGEZTSMJZGI4TSOJRGNOIFYHPYTDQMCIAF5U2J7KGUYDTWMZSMEX4QJ23ABSXJO6R + JCXDA@imip.me.com +END:VEVENT + +END:VCALENDAR diff --git a/inbox/test/events/fixtures/multiple_summaries.ics b/inbox/test/events/fixtures/multiple_summaries.ics new file mode 100644 index 000000000..9b048b1a9 --- /dev/null +++ b/inbox/test/events/fixtures/multiple_summaries.ics @@ -0,0 +1,29 @@ +BEGIN:VCALENDAR +PRODID:-//Google Inc//Google Calendar 70.9054//EN +VERSION:2.0 +CALSCALE:GREGORIAN +METHOD:REQUEST +BEGIN:VEVENT +DTSTART:20150319T233000Z +DTEND:20150320T003000Z +DTSTAMP:20150318T190309Z +ORGANIZER;CN=Karim Hamidou:mailto:test@nilas.com +UID:jvbogoss39aumnj4p5og9rd0@google.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=ACCEPTED;RSVP=TRUE + ;CN=Karim Hamidou;X-NUM-GUESTS=0:mailto:test@nilas.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;RSVP= + TRUE;CN=testaccount@gmail.com;X-NUM-GUESTS=0:mailto:testaccount@gmail.com +CREATED:20150318T190309Z +DESCRIPTION:Affichez votre \xc3\xa9v\xc3\xa9nement sur la page https://www.google.com/cal + endar/event?action=VIEW&eid=anZicm9nZ29zMTM5YXVtbmo0cDVvZzlyZDAgaW5ib3hhcHB + 0ZXN0LmZyZW5jaEBt&tok=MTUja2FyaW1AbmlsYXMuY29tOTdlNTFmYzliN2Y5Y2RhMTQ1MzAwM + GYyMThjNGVlNGM3NTYwYzZjYg&ctz=America/Los_Angeles&hl=fr. +LAST-MODIFIED:20150318T190309Z +LOCATION:Olympia Hall\\, 28 Boulevard des Capucines\\, 75009 Paris\\, France +SEQUENCE:0 +STATUS:CONFIRMED +SUMMARY;LANGUAGE=en-us:The Strokes +SUMMARY:Is this it? +TRANSP:OPAQUE +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/self_sent_v1.ics b/inbox/test/events/fixtures/self_sent_v1.ics new file mode 100644 index 000000000..f2eb3c60a --- /dev/null +++ b/inbox/test/events/fixtures/self_sent_v1.ics @@ -0,0 +1,29 @@ +BEGIN:VCALENDAR +PRODID:-//Google Inc//Google Calendar 70.9054//EN +VERSION:2.0 +CALSCALE:GREGORIAN +METHOD:REQUEST +BEGIN:VEVENT +DTSTART:20150319T233000Z +DTEND:20150320T003000Z +DTSTAMP:20150318T190309Z +ORGANIZER;CN=Karim Hamidou:mailto:inboxapptest@gmail.com +UID:burgos@google.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=ACCEPTED;RSVP=TRUE + ;CN=Karim Hamidou;X-NUM-GUESTS=0:mailto:karim@nilas.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;RSVP= + TRUE;CN=inboxapptest.french@gmail.com;X-NUM-GUESTS=0:mailto:inboxapptest.fr + ench@gmail.com +CREATED:20150318T190309Z +DESCRIPTION:Affichez votre \xc3\xa9v\xc3\xa9nement sur la page https://www.google.com/cal + endar/event?action=VIEW&eid=anZicm9nZ29zMTM5YXVtbmo0cDVvZzlyZDAgaW5ib3hhcHB + 0ZXN0LmZyZW5jaEBt&tok=MTUja2FyaW1AbmlsYXMuY29tOTdlNTFmYzliN2Y5Y2RhMTQ1MzAwM + GYyMThjNGVlNGM3NTYwYzZjYg&ctz=America/Los_Angeles&hl=fr. +LAST-MODIFIED:20150318T190309Z +LOCATION:Olympia Hall\\, 28 Boulevard des Capucines\\, 75009 Paris\\, France +SEQUENCE:0 +STATUS:CONFIRMED +SUMMARY:the Strokes +TRANSP:OPAQUE +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/self_sent_v2.ics b/inbox/test/events/fixtures/self_sent_v2.ics new file mode 100644 index 000000000..f8ee59064 --- /dev/null +++ b/inbox/test/events/fixtures/self_sent_v2.ics @@ -0,0 +1,29 @@ +BEGIN:VCALENDAR +PRODID:-//Google Inc//Google Calendar 70.9054//EN +VERSION:2.0 +CALSCALE:GREGORIAN +METHOD:REQUEST +BEGIN:VEVENT +DTSTART:20150319T233000Z +DTEND:20150320T003000Z +DTSTAMP:20150318T190431Z +ORGANIZER;CN=Karim Hamidou:mailto:inboxapptest@gmail.com +UID:burgos@google.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=ACCEPTED;RSVP=TRUE + ;CN=Karim Hamidou;X-NUM-GUESTS=0:mailto:karim@nilas.com +ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;RSVP= + TRUE;CN=inboxapptest.french@gmail.com;X-NUM-GUESTS=0:mailto:inboxapptest.fr + ench@gmail.com +CREATED:20150318T190309Z +DESCRIPTION:Affichez votre \xc3\xa9v\xc3\xa9nement sur la page https://www.google.com/cal + endar/event?action=VIEW&eid=anZicm9nZ29zMTM5YXVtbmo0cDVvZzlyZDAgaW5ib3hhcHB + 0ZXN0LmZyZW5jaEBt&tok=MTUja2FyaW1AbmlsYXMuY29tOTdlNTFmYzliN2Y5Y2RhMTQ1MzAwM + GYyMThjNGVlNGM3NTYwYzZjYg&ctz=America/Los_Angeles&hl=fr. +LAST-MODIFIED:20150318T190431Z +LOCATION:Le Zenith\\, 211 Avenue Jean Jaures\\, 75019 Paris\\, France +SEQUENCE:1 +STATUS:CONFIRMED +SUMMARY:the Strokes +TRANSP:OPAQUE +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/fixtures/windows_event.ics b/inbox/test/events/fixtures/windows_event.ics new file mode 100644 index 000000000..ee0c24d86 --- /dev/null +++ b/inbox/test/events/fixtures/windows_event.ics @@ -0,0 +1,51 @@ +BEGIN:VCALENDAR +METHOD:REQUEST +PRODID:Microsoft Exchange Server 2010 +VERSION:2.0 +BEGIN:VTIMEZONE +TZID:Romance Standard Time +BEGIN:STANDARD +DTSTART:16010101T030000 +TZOFFSETFROM:+0200 +TZOFFSETTO:+0100 +RRULE:FREQ=YEARLY;INTERVAL=1;BYDAY=-1SU;BYMONTH=10 +END:STANDARD +BEGIN:DAYLIGHT +DTSTART:16010101T020000 +TZOFFSETFROM:+0100 +TZOFFSETTO:+0200 +RRULE:FREQ=YEARLY;INTERVAL=1;BYDAY=-1SU;BYMONTH=3 +END:DAYLIGHT +END:VTIMEZONE +BEGIN:VEVENT +ORGANIZER;CN=Inbox Apptest2:MAILTO:inbox.apptest2@inboxapp.onmicrosoft.com +ATTENDEE;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;RSVP=TRUE;CN=Karim Hami + dou:MAILTO:karim@nilas.com +DESCRIPTION;LANGUAGE=fr-FR:De terre\n +UID:040000008200E00074C5B7101A82E008000000005853D6D90C4BD001000000000000000 + 0100000000827DA11FE47F5489049C8464AB0838A +SUMMARY;LANGUAGE=fr-FR:Pommes +DTSTART;TZID=Romance Standard Time:20150220T093000 +DTEND;TZID=Romance Standard Time:20150220T100000 +CLASS:PUBLIC +PRIORITY:5 +DTSTAMP:20150217T235839Z +TRANSP:OPAQUE +STATUS:CONFIRMED +SEQUENCE:4 +LOCATION;LANGUAGE=fr-FR: +X-MICROSOFT-CDO-APPT-SEQUENCE:4 +X-MICROSOFT-CDO-OWNERAPPTID:2113048664 +X-MICROSOFT-CDO-BUSYSTATUS:TENTATIVE +X-MICROSOFT-CDO-INTENDEDSTATUS:BUSY +X-MICROSOFT-CDO-ALLDAYEVENT:FALSE +X-MICROSOFT-CDO-IMPORTANCE:1 +X-MICROSOFT-CDO-INSTTYPE:0 +X-MICROSOFT-DISALLOW-COUNTER:FALSE +BEGIN:VALARM +DESCRIPTION:REMINDER +TRIGGER;RELATED=START:-PT15M +ACTION:DISPLAY +END:VALARM +END:VEVENT +END:VCALENDAR diff --git a/inbox/test/events/test_datetime.py b/inbox/test/events/test_datetime.py new file mode 100644 index 000000000..1b636b3a3 --- /dev/null +++ b/inbox/test/events/test_datetime.py @@ -0,0 +1,168 @@ +import arrow +from datetime import timedelta + +from inbox.models.when import (Time, TimeSpan, Date, DateSpan, parse_as_when, + parse_utc) +from inbox.models.event import time_parse +from inbox.events.util import (google_to_event_time, parse_google_time, + parse_datetime) + + +def test_when_time(): + start_time = arrow.get('2014-09-30T15:34:00.000-07:00') + time = {'time': start_time.timestamp} + ts = parse_as_when(time) + assert isinstance(ts, Time) + assert ts.start == start_time.to('utc') + assert ts.end == start_time.to('utc') + assert not ts.spanning + assert not ts.all_day + assert ts.is_time + assert not ts.is_date + assert ts.delta == timedelta(hours=0) + + +def test_when_timespan(): + start_time = arrow.get('2014-09-30T15:34:00.000-07:00') + end_time = arrow.get('2014-09-30T16:34:00.000-07:00') + timespan = {'start_time': start_time.timestamp, + 'end_time': end_time.timestamp} + ts = parse_as_when(timespan) + assert isinstance(ts, TimeSpan) + assert ts.start == start_time.to('utc') + assert ts.end == end_time.to('utc') + assert ts.spanning + assert not ts.all_day + assert ts.is_time + assert not ts.is_date + assert ts.delta == timedelta(hours=1) + + +def test_when_date(): + start_date = arrow.get('2014-09-30') + date = {'date': start_date.format('YYYY-MM-DD')} + ts = parse_as_when(date) + assert isinstance(ts, Date) + assert ts.start == start_date + assert ts.end == start_date + assert not ts.spanning + assert ts.all_day + assert not ts.is_time + assert ts.is_date + assert ts.delta == timedelta(days=0) + + +def test_when_datespan(): + start_date = arrow.get('2014-09-30') + end_date = arrow.get('2014-10-01') + datespan = {'start_date': start_date.format('YYYY-MM-DD'), + 'end_date': end_date.format('YYYY-MM-DD')} + ts = parse_as_when(datespan) + assert isinstance(ts, DateSpan) + assert ts.start == start_date + assert ts.end == end_date + assert ts.spanning + assert ts.all_day + assert not ts.is_time + assert ts.is_date + assert ts.delta == timedelta(days=1) + + +def test_when_spans_arent_spans(): + # If start and end are the same, don't create a Span object + start_date = arrow.get('2014-09-30') + end_date = arrow.get('2014-09-30') + datespan = {'start_date': start_date.format('YYYY-MM-DD'), + 'end_date': end_date.format('YYYY-MM-DD')} + ts = parse_as_when(datespan) + assert isinstance(ts, Date) + + start_time = arrow.get('2014-09-30T15:34:00.000-07:00') + end_time = arrow.get('2014-09-30T15:34:00.000-07:00') + timespan = {'start_time': start_time.timestamp, + 'end_time': end_time.timestamp} + ts = parse_as_when(timespan) + assert isinstance(ts, Time) + + +def test_parse_datetime(): + t = '20140104T102030Z' + dt = parse_datetime(t) + assert dt == arrow.get(2014, 01, 04, 10, 20, 30) + + t = '2014-01-15T17:00:00-05:00' + dt = parse_datetime(t) + assert dt == arrow.get(2014, 01, 15, 22, 00, 00) + + t = None + dt = parse_datetime(t) + assert dt is None + + t = 1426008600 + dt = parse_datetime(t) + assert dt == arrow.get(2015, 03, 10, 17, 30, 00) + + +def test_time_parse(): + t = 1426008600 + validated = parse_utc(t) + stored = time_parse(t) + + assert validated.naive == stored + + t = str(1426008600) + validated = parse_utc(t) + stored = time_parse(t) + + assert validated.naive == stored + + +def test_parse_google_time(): + t = {'dateTime': '2012-10-15T17:00:00-07:00', + 'timeZone': 'America/Los_Angeles'} + gt = parse_google_time(t) + assert gt.to('utc') == arrow.get(2012, 10, 16, 00, 00, 00) + + t = {'dateTime': '2012-10-15T13:00:00+01:00'} + gt = parse_google_time(t) + assert gt.to('utc') == arrow.get(2012, 10, 15, 12, 00, 00) + + t = {'date': '2012-10-15'} + gt = parse_google_time(t) + assert gt == arrow.get(2012, 10, 15) + + +def test_google_to_event_time(): + start = {'dateTime': '2012-10-15T17:00:00-07:00', + 'timeZone': 'America/Los_Angeles'} + end = {'dateTime': '2012-10-15T17:25:00-07:00', + 'timeZone': 'America/Los_Angeles'} + event_time = google_to_event_time(start, end) + assert event_time.start == arrow.get(2012, 10, 16, 00, 00, 00) + assert event_time.end == arrow.get(2012, 10, 16, 00, 25, 00) + assert event_time.all_day is False + + start = {'date': '2012-10-15'} + end = {'date': '2012-10-16'} + event_time = google_to_event_time(start, end) + assert event_time.start == arrow.get(2012, 10, 15) + assert event_time.end == arrow.get(2012, 10, 15) + assert event_time.all_day is True + + +def test_google_to_event_time_reverse(): + end = {'dateTime': '2012-10-15T17:00:00-07:00', + 'timeZone': 'America/Los_Angeles'} + start = {'dateTime': '2012-10-15T17:25:00-07:00', + 'timeZone': 'America/Los_Angeles'} + event_time = google_to_event_time(start, end) + assert event_time.start == arrow.get(2012, 10, 16, 00, 00, 00) + assert event_time.end == arrow.get(2012, 10, 16, 00, 25, 00) + assert event_time.all_day is False + + start = {'date': '2012-10-15'} + end = {'date': '2012-10-16'} + event_time = google_to_event_time(start, end) + assert event_time.start == arrow.get(2012, 10, 15) + assert event_time.end == arrow.get(2012, 10, 15) + assert event_time.all_day is True diff --git a/inbox/test/events/test_events_util.py b/inbox/test/events/test_events_util.py new file mode 100644 index 000000000..66e0d1780 --- /dev/null +++ b/inbox/test/events/test_events_util.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# flake8: noqa: F811 + +from datetime import datetime +from inbox.models.event import Event + + +def test_base36_validation(): + from inbox.events.util import valid_base36 + assert valid_base36("1234zerzerzedsfsd") is True + assert valid_base36("zerzerzedsfsd") is True + assert valid_base36("é(§è!è§('") is False + assert valid_base36("_°987643") is False + + +def test_event_organizer_parsing(): + from inbox.models.event import Event + e = Event() + e.owner = 'Jean Lecanuet ' + assert e.organizer_email == 'jean.lecanuet@orange.fr' + + e.owner = u'Pierre Mendès-France ' + assert e.organizer_email == 'pierre-mendes.france@orange.fr' + + e.owner = u'Pierre Messmer < pierre.messmer@orange.fr >' + assert e.organizer_email == 'pierre.messmer@orange.fr' + + +def test_removed_participants(): + from inbox.events.util import removed_participants + helena = {'email': 'helena@nylas.com', 'name': 'Helena Handbasket'} + ben = {'email': 'ben@nylas.com', 'name': 'Ben Handbasket'} + paul = {'email': 'paul@nylas.com', 'name': 'Paul Hochon'} + + assert removed_participants([], []) == [] + assert removed_participants([helena], [ben]) == [helena] + assert removed_participants([helena, ben], [helena]) == [ben] + assert removed_participants([helena, ben], [paul, helena]) == [ben] + assert len(removed_participants([helena, ben, paul], [helena])) == 2 + assert ben in removed_participants([helena, ben, paul], [helena]) + assert paul in removed_participants([helena, ben, paul], [helena]) + + +def test_unicode_event_truncation(db, default_account): + emoji_str = u"".join([u"😁" for i in range(300)]) + title = "".join(["a" for i in range(2000)]) + + e = Event(raw_data='', + busy=True, + all_day=False, + read_only=False, + uid='31418', + start=datetime(2015, 2, 22, 11, 11), + end=datetime(2015, 2, 22, 22, 22), + is_owner=True, + calendar=default_account.emailed_events_calendar, + title=title, + location=emoji_str, + participants=[]) + e.namespace = default_account.namespace + db.session.add(e) + db.session.commit() + + # Both location and title should be properly truncated to their max lengths. + # It's ok to have N unicode characters in a VARCHAR(N) field because + # the column is uft8-encoded. + assert len(e.location) == 255 + assert len(e.title) == 1024 diff --git a/inbox/test/events/test_google_events.py b/inbox/test/events/test_google_events.py new file mode 100644 index 000000000..6ae05142a --- /dev/null +++ b/inbox/test/events/test_google_events.py @@ -0,0 +1,601 @@ +import arrow +import json +import gevent +import mock +import pytest +import requests +from inbox.basicauth import AccessNotEnabledError +from inbox.events.google import GoogleEventsProvider, parse_event_response +from inbox.models import Calendar, Event +from inbox.models.event import RecurringEvent, RecurringEventOverride + + +def cmp_cal_attrs(calendar1, calendar2): + return all(getattr(calendar1, attr) == getattr(calendar2, attr) for attr in + ('name', 'uid', 'description', 'read_only')) + + +def cmp_event_attrs(event1, event2): + for attr in ('title', 'description', 'location', 'start', 'end', 'all_day', + 'owner', 'read_only', 'participants', 'recurrence'): + if getattr(event1, attr) != getattr(event2, attr): + print attr, getattr(event1, attr), getattr(event2, attr) + return all(getattr(event1, attr) == getattr(event2, attr) for attr in + ('title', 'description', 'location', 'start', 'end', 'all_day', + 'owner', 'read_only', 'participants', 'recurrence')) + + +def test_calendar_parsing(): + raw_response = [ + { + 'accessRole': 'owner', + 'backgroundColor': '#9a9cff', + 'colorId': '17', + 'defaultReminders': [{'method': 'popup', 'minutes': 30}], + 'etag': '"1425508164135000"', + 'foregroundColor': '#000000', + 'id': 'ben.bitdiddle2222@gmail.com', + 'kind': 'calendar#calendarListEntry', + 'notificationSettings': { + 'notifications': [ + {'method': 'email', 'type': 'eventCreation'}, + {'method': 'email', 'type': 'eventChange'}, + {'method': 'email', 'type': 'eventCancellation'}, + {'method': 'email', 'type': 'eventResponse'} + ] + }, + 'primary': True, + 'selected': True, + 'summary': 'ben.bitdiddle2222@gmail.com', + 'timeZone': 'America/Los_Angeles' + }, + { + 'accessRole': 'reader', + 'backgroundColor': '#f83a22', + 'colorId': '3', + 'defaultReminders': [], + 'description': 'Holidays and Observances in United States', + 'etag': '"1399416119263000"', + 'foregroundColor': '#000000', + 'id': 'en.usa#holiday@group.v.calendar.google.com', + 'kind': 'calendar#calendarListEntry', + 'selected': True, + 'summary': 'Holidays in United States', + 'timeZone': 'America/Los_Angeles' + }, + { + 'defaultReminders': [], + 'deleted': True, + 'etag': '"1425952878772000"', + 'id': 'fg0s7qel95q86log75ilhhf12g@group.calendar.google.com', + 'kind': 'calendar#calendarListEntry' + } + ] + expected_deletes = ['fg0s7qel95q86log75ilhhf12g@group.calendar.google.com'] + expected_updates = [ + Calendar(uid='ben.bitdiddle2222@gmail.com', + name='ben.bitdiddle2222@gmail.com', + description=None, + read_only=False), + Calendar(uid='en.usa#holiday@group.v.calendar.google.com', + name='Holidays in United States', + description='Holidays and Observances in United States', + read_only=True) + ] + + provider = GoogleEventsProvider(1, 1) + provider._get_raw_calendars = mock.MagicMock( + return_value=raw_response) + deletes, updates = provider.sync_calendars() + assert deletes == expected_deletes + for obtained, expected in zip(updates, expected_updates): + assert cmp_cal_attrs(obtained, expected) + + +def test_event_parsing(): + raw_response = [ + { + 'created': '2012-10-09T22:35:50.000Z', + 'creator': { + 'displayName': 'Eben Freeman', + 'email': 'freemaneben@gmail.com', + 'self': True + }, + 'end': {'dateTime': '2012-10-15T18:00:00-07:00'}, + 'etag': '"2806773858144000"', + 'htmlLink': 'https://www.google.com/calendar/event?eid=FOO', + 'iCalUID': 'tn7krk4cekt8ag3pk6gapqqbro@google.com', + 'id': 'tn7krk4cekt8ag3pk6gapqqbro', + 'kind': 'calendar#event', + 'organizer': { + 'displayName': 'Eben Freeman', + 'email': 'freemaneben@gmail.com', + 'self': True + }, + 'attendees': [ + {'displayName': 'MITOC BOD', + 'email': 'mitoc-bod@mit.edu', + 'responseStatus': 'accepted'}, + {'displayName': 'Eben Freeman', + 'email': 'freemaneben@gmail.com', + 'responseStatus': 'accepted'} + ], + 'reminders': {'useDefault': True}, + 'recurrence': ['RRULE:FREQ=WEEKLY;UNTIL=20150209T075959Z;BYDAY=MO'], + 'sequence': 0, + 'start': {'dateTime': '2012-10-15T17:00:00-07:00'}, + 'status': 'confirmed', + 'summary': 'BOD Meeting', + 'updated': '2014-06-21T21:42:09.072Z' + }, + { + 'created': '2014-01-09T03:33:02.000Z', + 'creator': { + 'displayName': 'Holidays in United States', + 'email': 'en.usa#holiday@group.v.calendar.google.com', + 'self': True + }, + 'end': {u'date': '2014-06-16'}, + 'etag': '"2778476764000000"', + 'htmlLink': 'https://www.google.com/calendar/event?eid=BAR', + 'iCalUID': '20140615_60o30dr564o30c1g60o30dr4ck@google.com', + 'id': '20140615_60o30dr564o30c1g60o30dr4ck', + 'kind': 'calendar#event', + 'organizer': { + 'displayName': 'Holidays in United States', + 'email': 'en.usa#holiday@group.v.calendar.google.com', + 'self': True + }, + 'sequence': 0, + 'start': {'date': '2014-06-15'}, + 'status': 'confirmed', + 'summary': "Fathers' Day", + 'transparency': 'transparent', + 'updated': '2014-01-09T03:33:02.000Z', + 'visibility': 'public' + }, + { + 'created': '2015-03-10T01:19:59.000Z', + 'creator': { + 'displayName': 'Ben Bitdiddle', + 'email': 'ben.bitdiddle2222@gmail.com', + 'self': True + }, + 'end': {u'date': '2015-03-11'}, + 'etag': '"2851906839480000"', + 'htmlLink': 'https://www.google.com/calendar/event?eid=BAZ', + 'iCalUID': '3uisajkmdjqo43tfc3ig1l5hek@google.com', + 'id': '3uisajkmdjqo43tfc3ig1l5hek', + 'kind': 'calendar#event', + 'organizer': { + 'displayName': 'Ben Bitdiddle', + 'email': 'ben.bitdiddle2222@gmail.com', + 'self': True}, + 'reminders': {u'useDefault': False}, + 'sequence': 1, + 'start': {u'date': '2015-03-10'}, + 'status': 'cancelled', + 'summary': 'TUESDAY', + 'transparency': 'transparent', + 'updated': '2015-03-10T02:10:19.740Z' + } + ] + expected_deletes = ['3uisajkmdjqo43tfc3ig1l5hek'] + expected_updates = [ + RecurringEvent(uid='tn7krk4cekt8ag3pk6gapqqbro', + title='BOD Meeting', + description=None, + read_only=False, + start=arrow.get(2012, 10, 16, 0, 0, 0), + end=arrow.get(2012, 10, 16, 1, 0, 0), + all_day=False, + busy=True, + owner='Eben Freeman ', + recurrence=['RRULE:FREQ=WEEKLY;UNTIL=20150209T075959Z;BYDAY=MO'], + participants=[ + {'email': 'mitoc-bod@mit.edu', + 'name': 'MITOC BOD', + 'status': 'yes', + 'notes': None}, + {'email': 'freemaneben@gmail.com', + 'name': 'Eben Freeman', + 'status': 'yes', + 'notes': None} + ]), + Event(uid='20140615_60o30dr564o30c1g60o30dr4ck', + title="Fathers' Day", + description=None, + read_only=False, + busy=False, + start=arrow.get(2014, 06, 15), + end=arrow.get(2014, 06, 15), + all_day=True, + owner='Holidays in United States ', + participants=[]) + ] + + provider = GoogleEventsProvider(1, 1) + provider.calendars_table = {'uid': False} + provider._get_raw_events = mock.MagicMock( + return_value=raw_response) + updates = provider.sync_events('uid', 1) + + # deleted events are actually only marked as + # cancelled. Look for them in the updates stream. + found_cancelled_event = False + for event in updates: + if event.uid in expected_deletes and event.status == 'cancelled': + found_cancelled_event = True + break + + assert found_cancelled_event + + for obtained, expected in zip(updates, expected_updates): + print obtained, expected + assert cmp_event_attrs(obtained, expected) + + # Test read-only support + raw_response = [ + { + 'created': '2014-01-09T03:33:02.000Z', + 'creator': { + 'displayName': 'Holidays in United States', + 'email': 'en.usa#holiday@group.v.calendar.google.com', + 'self': True + }, + 'end': {u'date': '2014-06-16'}, + 'etag': '"2778476764000000"', + 'htmlLink': 'https://www.google.com/calendar/event?eid=BAR', + 'iCalUID': '20140615_60o30dr564o30c1g60o30dr4ck@google.com', + 'id': '20140615_60o30dr564o30c1g60o30dr4ck', + 'kind': 'calendar#event', + 'organizer': { + 'displayName': 'Holidays in United States', + 'email': 'en.usa#holiday@group.v.calendar.google.com', + 'self': True + }, + 'sequence': 0, + 'start': {'date': '2014-06-15'}, + 'status': 'confirmed', + 'summary': "Fathers' Day", + 'transparency': 'transparent', + 'updated': '2014-01-09T03:33:02.000Z', + 'visibility': 'public', + 'guestCanModify': True}] + + provider = GoogleEventsProvider(1, 1) + + # This is a read-only calendar + provider.calendars_table = {'uid': True} + provider._get_raw_events = mock.MagicMock( + return_value=raw_response) + updates = provider.sync_events('uid', 1) + assert len(updates) == 1 + assert updates[0].read_only is True + + +def test_handle_offset_all_day_events(): + raw_event = { + 'created': '2014-01-09T03:33:02.000Z', + 'creator': { + 'displayName': 'Ben Bitdiddle', + 'email': 'ben.bitdiddle2222@gmail.com', + 'self': True + }, + 'etag': '"2778476764000000"', + 'htmlLink': 'https://www.google.com/calendar/event?eid=BAR', + 'iCalUID': '20140615_60o30dr564o30c1g60o30dr4ck@google.com', + 'id': '20140615_60o30dr564o30c1g60o30dr4ck', + 'kind': 'calendar#event', + 'organizer': { + 'displayName': 'Ben Bitdiddle', + 'email': 'ben.bitdiddle2222@gmail.com', + 'self': True + }, + 'sequence': 0, + 'start': {'date': '2014-03-15'}, + 'end': {u'date': '2014-03-15'}, + 'status': 'confirmed', + 'summary': 'Ides of March', + 'transparency': 'transparent', + 'updated': '2014-01-09T03:33:02.000Z', + 'visibility': 'public' + } + expected = Event(uid='20140615_60o30dr564o30c1g60o30dr4ck', + title='Ides of March', + description=None, + read_only=False, + busy=False, + start=arrow.get(2014, 03, 15), + end=arrow.get(2014, 03, 15), + all_day=True, + owner='Ben Bitdiddle ', + participants=[]) + assert cmp_event_attrs(expected, parse_event_response(raw_event, False)) + + +def test_handle_unparseable_dates(): + raw_response = [{ + 'id': '20140615_60o30dr564o30c1g60o30dr4ck', + 'start': {'date': '0000-01-01'}, + 'end': {'date': '0000-01-02'}, + 'summary': 'test' + }] + provider = GoogleEventsProvider(1, 1) + provider._get_raw_events = mock.MagicMock( + return_value=raw_response) + updates = provider.sync_events('uid', 1) + assert len(updates) == 0 + + +def test_pagination(): + first_response = requests.Response() + first_response.status_code = 200 + first_response._content = json.dumps({ + 'items': ['A', 'B', 'C'], + 'nextPageToken': 'CjkKKzlhb2tkZjNpZTMwNjhtZThllU' + }) + second_response = requests.Response() + second_response.status_code = 200 + second_response._content = json.dumps({ + 'items': ['D', 'E'] + }) + + requests.get = mock.Mock(side_effect=[first_response, second_response]) + provider = GoogleEventsProvider(1, 1) + provider._get_access_token = mock.Mock(return_value='token') + items = provider._get_resource_list('https://googleapis.com/testurl') + assert items == ['A', 'B', 'C', 'D', 'E'] + + +def test_handle_http_401(): + first_response = requests.Response() + first_response.status_code = 401 + + second_response = requests.Response() + second_response.status_code = 200 + second_response._content = json.dumps({ + 'items': ['A', 'B', 'C'] + }) + + requests.get = mock.Mock(side_effect=[first_response, second_response]) + provider = GoogleEventsProvider(1, 1) + provider._get_access_token = mock.Mock(return_value='token') + items = provider._get_resource_list('https://googleapis.com/testurl') + assert items == ['A', 'B', 'C'] + # Check that we actually refreshed the access token + assert len(provider._get_access_token.mock_calls) == 2 + + +@pytest.mark.usefixtures('mock_gevent_sleep') +def test_handle_quota_exceeded(): + first_response = requests.Response() + first_response.status_code = 403 + first_response._content = json.dumps({ + 'error': { + 'errors': [ + {'domain': 'usageLimits', + 'reason': 'userRateLimitExceeded', + 'message': 'User Rate Limit Exceeded'} + ], + 'code': 403, + 'message': 'User Rate Limit Exceeded' + } + }) + + second_response = requests.Response() + second_response.status_code = 200 + second_response._content = json.dumps({ + 'items': ['A', 'B', 'C'] + }) + + requests.get = mock.Mock(side_effect=[first_response, second_response]) + provider = GoogleEventsProvider(1, 1) + provider._get_access_token = mock.Mock(return_value='token') + items = provider._get_resource_list('https://googleapis.com/testurl') + # Check that we slept, then retried. + assert gevent.sleep.called + assert items == ['A', 'B', 'C'] + + +@pytest.mark.usefixtures('mock_gevent_sleep') +def test_handle_internal_server_error(): + first_response = requests.Response() + first_response.status_code = 503 + + second_response = requests.Response() + second_response.status_code = 200 + second_response._content = json.dumps({ + 'items': ['A', 'B', 'C'] + }) + + requests.get = mock.Mock(side_effect=[first_response, second_response]) + provider = GoogleEventsProvider(1, 1) + provider._get_access_token = mock.Mock(return_value='token') + items = provider._get_resource_list('https://googleapis.com/testurl') + # Check that we slept, then retried. + assert gevent.sleep.called + assert items == ['A', 'B', 'C'] + + +def test_handle_api_not_enabled(): + response = requests.Response() + response.status_code = 403 + response._content = json.dumps({ + 'error': { + 'code': 403, + 'message': 'Access Not Configured.', + 'errors': [ + {'domain': 'usageLimits', 'message': 'Access Not Configured', + 'reason': 'accessNotConfigured', + 'extendedHelp': 'https://console.developers.google.com'} + ] + } + }) + + requests.get = mock.Mock(return_value=response) + provider = GoogleEventsProvider(1, 1) + provider._get_access_token = mock.Mock(return_value='token') + with pytest.raises(AccessNotEnabledError): + provider._get_resource_list('https://googleapis.com/testurl') + + +def test_handle_other_errors(): + response = requests.Response() + response.status_code = 403 + response._content = "This is not the JSON you're looking for" + requests.get = mock.Mock(return_value=response) + provider = GoogleEventsProvider(1, 1) + provider._get_access_token = mock.Mock(return_value='token') + with pytest.raises(requests.exceptions.HTTPError): + provider._get_resource_list('https://googleapis.com/testurl') + + response = requests.Response() + response.status_code = 404 + requests.get = mock.Mock(return_value=response) + provider = GoogleEventsProvider(1, 1) + provider._get_access_token = mock.Mock(return_value='token') + with pytest.raises(requests.exceptions.HTTPError): + provider._get_resource_list('https://googleapis.com/testurl') + + +def test_recurrence_creation(): + event = { + 'created': '2012-10-09T22:35:50.000Z', + 'creator': { + 'displayName': 'Eben Freeman', + 'email': 'freemaneben@gmail.com', + 'self': True + }, + 'end': {'dateTime': '2012-10-15T18:00:00-07:00'}, + 'etag': '"2806773858144000"', + 'htmlLink': 'https://www.google.com/calendar/event?eid=FOO', + 'iCalUID': 'tn7krk4cekt8ag3pk6gapqqbro@google.com', + 'id': 'tn7krk4cekt8ag3pk6gapqqbro', + 'kind': 'calendar#event', + 'organizer': { + 'displayName': 'Eben Freeman', + 'email': 'freemaneben@gmail.com', + 'self': True + }, + 'attendees': [ + {'displayName': 'MITOC BOD', + 'email': 'mitoc-bod@mit.edu', + 'responseStatus': 'accepted'}, + {'displayName': 'Eben Freeman', + 'email': 'freemaneben@gmail.com', + 'responseStatus': 'accepted'} + ], + 'reminders': {'useDefault': True}, + 'recurrence': ['RRULE:FREQ=WEEKLY;UNTIL=20150209T075959Z;BYDAY=MO', + 'EXDATE;TZID=America/Los_Angeles:20150208T010000'], + 'sequence': 0, + 'start': {'dateTime': '2012-10-15T17:00:00-07:00', + 'timeZone': 'America/Los_Angeles'}, + 'status': 'confirmed', + 'summary': 'BOD Meeting', + 'updated': '2014-06-21T21:42:09.072Z' + } + event = parse_event_response(event, False) + assert isinstance(event, RecurringEvent) + assert event.rrule == 'RRULE:FREQ=WEEKLY;UNTIL=20150209T075959Z;BYDAY=MO' + assert event.exdate == 'EXDATE;TZID=America/Los_Angeles:20150208T010000' + assert event.until == arrow.get(2015, 02, 9, 7, 59, 59) + assert event.start_timezone == 'America/Los_Angeles' + + +def test_override_creation(): + event = { + 'created': '2012-10-09T22:35:50.000Z', + 'creator': { + 'displayName': 'Eben Freeman', + 'email': 'freemaneben@gmail.com', + 'self': True + }, + 'end': {'dateTime': '2012-10-22T19:00:00-07:00'}, + 'etag': '"2806773858144000"', + 'htmlLink': 'https://www.google.com/calendar/event?eid=FOO', + 'iCalUID': 'tn7krk4cekt8ag3pk6gapqqbro@google.com', + 'id': 'tn7krk4cekt8ag3pk6gapqqbro_20121022T170000Z', + 'kind': 'calendar#event', + 'organizer': { + 'displayName': 'Eben Freeman', + 'email': 'freemaneben@gmail.com', + 'self': True + }, + 'attendees': [ + {'displayName': 'MITOC BOD', + 'email': 'mitoc-bod@mit.edu', + 'responseStatus': 'accepted'}, + {'displayName': 'Eben Freeman', + 'email': 'freemaneben@gmail.com', + 'responseStatus': 'accepted'} + ], + 'originalStartTime': { + 'dateTime': '2012-10-22T17:00:00-07:00', + 'timeZone': 'America/Los_Angeles' + }, + 'recurringEventId': 'tn7krk4cekt8ag3pk6gapqqbro', + 'reminders': {'useDefault': True}, + 'sequence': 0, + 'start': {'dateTime': '2012-10-22T18:00:00-07:00', + 'timeZone': 'America/Los_Angeles'}, + 'status': 'confirmed', + 'summary': 'BOD Meeting', + 'updated': '2014-06-21T21:42:09.072Z' + } + event = parse_event_response(event, False) + assert isinstance(event, RecurringEventOverride) + assert event.master_event_uid == 'tn7krk4cekt8ag3pk6gapqqbro' + assert event.original_start_time == arrow.get(2012, 10, 23, 00, 00, 00) + + +def test_cancelled_override_creation(): + # With showDeleted=True, we receive cancelled events (including instances + # of recurring events) as full event objects, with status = 'cancelled'. + # Test that we save this as a RecurringEventOverride rather than trying + # to delete the UID. + raw_response = [{ + 'created': '2012-10-09T22:35:50.000Z', + 'creator': { + 'displayName': 'Eben Freeman', + 'email': 'freemaneben@gmail.com', + 'self': True + }, + 'end': {'dateTime': '2012-10-22T19:00:00-07:00'}, + 'etag': '"2806773858144000"', + 'htmlLink': 'https://www.google.com/calendar/event?eid=FOO', + 'iCalUID': 'tn7krk4cekt8ag3pk6gapqqbro@google.com', + 'id': 'tn7krk4cekt8ag3pk6gapqqbro_20121022T170000Z', + 'kind': 'calendar#event', + 'organizer': { + 'displayName': 'Eben Freeman', + 'email': 'freemaneben@gmail.com', + 'self': True + }, + 'attendees': [ + {'displayName': 'MITOC BOD', + 'email': 'mitoc-bod@mit.edu', + 'responseStatus': 'accepted'}, + {'displayName': 'Eben Freeman', + 'email': 'freemaneben@gmail.com', + 'responseStatus': 'accepted'} + ], + 'originalStartTime': { + 'dateTime': '2012-10-22T17:00:00-07:00', + 'timeZone': 'America/Los_Angeles' + }, + 'recurringEventId': 'tn7krk4cekt8ag3pk6gapqqbro', + 'reminders': {'useDefault': True}, + 'sequence': 0, + 'start': {'dateTime': '2012-10-22T18:00:00-07:00', + 'timeZone': 'America/Los_Angeles'}, + 'status': 'cancelled', + 'summary': 'BOD Meeting', + }] + + provider = GoogleEventsProvider(1, 1) + provider._get_raw_events = mock.MagicMock( + return_value=raw_response) + updates = provider.sync_events('uid', 1) + assert updates[0].cancelled is True diff --git a/inbox/test/events/test_ics_parsing.py b/inbox/test/events/test_ics_parsing.py new file mode 100644 index 000000000..adc39de86 --- /dev/null +++ b/inbox/test/events/test_ics_parsing.py @@ -0,0 +1,494 @@ +# -*- coding: utf-8 -*- +# flake8: noqa: F401 +import pytest +import arrow +from inbox.models.event import Event, RecurringEvent +from inbox.events.util import MalformedEventError +from inbox.events.ical import events_from_ics, import_attached_events +from inbox.test.util.base import (absolute_path, add_fake_calendar, + generic_account, add_fake_msg_with_calendar_part) + +FIXTURES = './events/fixtures/' + + +def test_invalid_ical(db, default_account): + with pytest.raises(MalformedEventError): + events_from_ics(default_account.namespace, + default_account.emailed_events_calendar, "asdf") + + +def test_windows_tz_ical(db, default_account): + data = None + with open(absolute_path(FIXTURES + 'windows_event.ics')) as fd: + data = fd.read() + + events = events_from_ics(default_account.namespace, + default_account.emailed_events_calendar, data) + events = events['invites'] + assert len(events) == 1, "There should be only one event in the test file" + + ev = events[0] + assert ev.start == arrow.get(2015, 2, 20, 8, 30) + assert ev.end == arrow.get(2015, 2, 20, 9, 0) + assert ev.title == "Pommes" + assert len(ev.participants) == 1 + assert ev.participants[0]['email'] == 'karim@nilas.com' + + +def test_icloud_allday_event(db, default_account): + data = None + with open(absolute_path(FIXTURES + 'icloud_oneday_event.ics')) as fd: + data = fd.read() + + events = events_from_ics(default_account.namespace, + default_account.emailed_events_calendar, data) + events = events['invites'] + assert len(events) == 1, "There should be only one event in the test file" + + ev = events[0] + assert ev.all_day is True + assert ev.start == arrow.get(2015, 3, 16, 0, 0) + assert ev.end == arrow.get(2015, 3, 17, 0, 0) + + assert len(ev.participants) == 2 + assert ev.participants[0]['email'] == 'karim@nilas.com' + + +def test_iphone_through_exchange(db, default_account): + data = None + with open(absolute_path(FIXTURES + 'iphone_through_exchange.ics')) as fd: + data = fd.read() + + events = events_from_ics(default_account.namespace, + default_account.emailed_events_calendar, data) + events = events['invites'] + assert len(events) == 1, "There should be only one event in the test file" + + ev = events[0] + assert ev.start == arrow.get(2014, 12, 27, 15, 0) + assert ev.end == arrow.get(2014, 12, 27, 16, 0) + + +def test_event_update(db, default_account, message): + add_fake_calendar(db.session, default_account.namespace.id, + name="Emailed events", read_only=True) + + with open(absolute_path(FIXTURES + 'gcal_v1.ics')) as fd: + ics_data = fd.read() + + msg = add_fake_msg_with_calendar_part(db.session, default_account, + ics_data) + + import_attached_events(db.session, default_account, msg) + db.session.commit() + + ev = db.session.query(Event).filter( + Event.uid == "jvbroggos139aumnj4p5og9rd0@google.com").one() + + assert ev.location == ("Olympia Hall, 28 Boulevard des Capucines, " + "75009 Paris, France") + + with open(absolute_path(FIXTURES + 'gcal_v2.ics')) as fd: + ics_data = fd.read() + + msg = add_fake_msg_with_calendar_part( + db.session, default_account, ics_data) + + import_attached_events(db.session, default_account, msg) + db.session.commit() + + ev = db.session.query(Event).filter( + Event.uid == "jvbroggos139aumnj4p5og9rd0@google.com").one() + + assert ev.location == (u"Le Zenith, 211 Avenue Jean Jaures, " + "75019 Paris, France") + + +# This test checks that: +# 1. we're processing invites we've sent to ourselves. +# 2. update only update events in the "emailed events" calendar. +@pytest.mark.only +def test_self_sent_update(db, default_account, message): + + # Create the calendars + add_fake_calendar(db.session, default_account.namespace.id, + name="Emailed events", read_only=True) + + default_calendar = add_fake_calendar(db.session, + default_account.namespace.id, + name="Calendar", read_only=False) + + # Import the self-sent event. + with open(absolute_path(FIXTURES + 'self_sent_v1.ics')) as fd: + ics_data = fd.read() + + msg = add_fake_msg_with_calendar_part(db.session, default_account, + ics_data) + msg.from_addr = [(default_account.name, default_account.email_address)] + import_attached_events(db.session, default_account, msg) + db.session.commit() + + evs = db.session.query(Event).filter( + Event.uid == "burgos@google.com").all() + + assert len(evs) == 1 + ev = evs[0] + assert ev.location == ("Olympia Hall, 28 Boulevard des Capucines, " + "75009 Paris, France") + + # Create a copy of the event, and store it in the default calendar. + event_copy = Event() + event_copy.update(ev) + event_copy.calendar = default_calendar + db.session.add(event_copy) + db.session.commit() + + with open(absolute_path(FIXTURES + 'self_sent_v2.ics')) as fd: + ics_data = fd.read() + + msg = add_fake_msg_with_calendar_part( + db.session, default_account, ics_data) + + import_attached_events(db.session, default_account, msg) + db.session.commit() + + evs = db.session.query(Event).filter( + Event.uid == "burgos@google.com").all() + + # Check that the event in the default calendar didn't get updated. + assert len(evs) == 2 + for ev in evs: + db.session.refresh(ev) + if ev.calendar_id == default_calendar.id: + assert ev.location == ("Olympia Hall, 28 Boulevard des Capucines, " + "75009 Paris, France") + else: + assert ev.location == (u"Le Zenith, 211 Avenue Jean Jaures, " + "75019 Paris, France") + + +def test_recurring_ical(db, default_account): + with open(absolute_path(FIXTURES + 'gcal_recur.ics')) as fd: + ics_data = fd.read() + + msg = add_fake_msg_with_calendar_part( + db.session, default_account, ics_data) + + import_attached_events(db.session, default_account, msg) + db.session.commit() + + ev = db.session.query(Event).filter( + Event.uid == "flg2h6nam1cb1uqetgfkslrfrc@google.com").one() + + assert isinstance(ev, RecurringEvent) + assert isinstance(ev.recurring, list) + assert ev.start_timezone == 'America/Los_Angeles' + + +def test_event_no_end_time(db, default_account): + # With no end time, import should fail + with open(absolute_path(FIXTURES + 'meetup_infinite.ics')) as fd: + ics_data = fd.read() + + add_fake_msg_with_calendar_part(db.session, default_account, ics_data) + + # doesn't raise an exception (to not derail message parsing, but also + # doesn't create an event) + ev = db.session.query(Event).filter( + Event.uid == "nih2h78am1cb1uqetgfkslrfrc@meetup.com").first() + assert not ev + + +def test_event_no_participants(db, default_account): + data = None + with open(absolute_path(FIXTURES + 'event_with_no_participants.ics')) as fd: + data = fd.read() + + events = events_from_ics(default_account.namespace, + default_account.emailed_events_calendar, data) + events = events['invites'] + assert len(events) == 1, "There should be only one event in the test file" + ev = events[0] + assert len(ev.participants) == 0 + + +def test_multiple_events(db, default_account): + data = None + with open(absolute_path(FIXTURES + 'multiple_events.ics')) as fd: + data = fd.read() + + events = events_from_ics(default_account.namespace, + default_account.emailed_events_calendar, data) + events = events['invites'] + assert len(events) == 2 + ev0 = events[0] + ev1 = events[1] + assert len(ev0.participants) == 0 + assert len(ev1.participants) == 0 + + assert ev1.start == arrow.get(2015, 03, 17, 0, 0) + + +def test_icalendar_import(db, generic_account, message): + add_fake_calendar(db.session, generic_account.namespace.id, + name="Emailed events", read_only=True) + + with open(absolute_path(FIXTURES + 'invite_w_rsvps1.ics')) as fd: + ics_data = fd.read() + + msg = add_fake_msg_with_calendar_part( + db.session, generic_account, ics_data) + + import_attached_events(db.session, generic_account, msg) + + ev = db.session.query(Event).filter( + Event.uid == ("040000008200E00074C5B7101A82E00800000000" + "F9125A30B06BD001000000000000000010000000" + "9D791C7548BFD144BFA54F14213CAD25")).one() + + assert len(ev.participants) == 2 + for participant in ev.participants: + assert participant['status'] == 'noreply' + + +def test_rsvp_merging(db, generic_account, message): + # This test checks that RSVPs to invites we sent get merged. + # It does some funky stuff around calendars because by default + # autoimported invites end up in the "emailed events" calendar. + # However, we're simulating invite sending, which supposes using + # an event from another calendar. + add_fake_calendar(db.session, generic_account.namespace.id, + name="Emailed events", read_only=True) + cal2 = add_fake_calendar(db.session, generic_account.namespace.id, + name="Random calendar", read_only=True) + + with open(absolute_path(FIXTURES + 'invite_w_rsvps1.ics')) as fd: + ics_data = fd.read() + + msg = add_fake_msg_with_calendar_part( + db.session, generic_account, ics_data) + + import_attached_events(db.session, generic_account, msg) + + ev = db.session.query(Event).filter( + Event.uid == ("040000008200E00074C5B7101A82E00800000000" + "F9125A30B06BD001000000000000000010000000" + "9D791C7548BFD144BFA54F14213CAD25")).one() + + assert len(ev.participants) == 2 + for participant in ev.participants: + assert participant['status'] == 'noreply' + + ev.public_id = "cccc" + ev.calendar = cal2 + + with open(absolute_path(FIXTURES + 'invite_w_rsvps2.ics')) as fd: + ics_data = fd.read() + + msg2 = add_fake_msg_with_calendar_part( + db.session, generic_account, ics_data) + + import_attached_events(db.session, generic_account, msg2) + + ev = db.session.query(Event).filter( + Event.uid == ("040000008200E00074C5B7101A82E00800000000" + "F9125A30B06BD001000000000000000010000000" + "9D791C7548BFD144BFA54F14213CAD25")).one() + + assert len(ev.participants) == 2 + for participant in ev.participants: + if participant['email'] == 'test1@example.com': + assert participant['status'] == 'maybe' + assert participant['name'] == 'Inbox Apptest' + elif participant['email'] == 'karim@example.com': + assert participant['status'] == 'noreply' + + with open(absolute_path(FIXTURES + 'invite_w_rsvps3.ics')) as fd: + ics_data = fd.read() + + msg3 = add_fake_msg_with_calendar_part( + db.session, generic_account, ics_data) + + import_attached_events(db.session, generic_account, msg3) + + ev = db.session.query(Event).filter( + Event.uid == ("040000008200E00074C5B7101A82E00800000000" + "F9125A30B06BD001000000000000000010000000" + "9D791C7548BFD144BFA54F14213CAD25")).one() + + assert len(ev.participants) == 2 + + for participant in ev.participants: + if participant['email'] == 'test1@example.com': + assert participant['status'] == 'maybe' + assert participant['name'] == 'Inbox Apptest' + elif participant['email'] == 'karim@example.com': + assert participant['name'] == 'Karim Hamidou' + assert participant['status'] == 'yes' + + # Check that we're handling sequence numbers correctly - i.e: an RSVP + # with a sequence number < to the event's sequence number should be + # discarded. + ev.sequence_number += 1 + + with open(absolute_path(FIXTURES + 'invite_w_rsvps_4.ics')) as fd: + ics_data = fd.read() + + msg4 = add_fake_msg_with_calendar_part( + db.session, generic_account, ics_data) + + import_attached_events(db.session, generic_account, msg3) + + ev = db.session.query(Event).filter( + Event.uid == ("040000008200E00074C5B7101A82E00800000000" + "F9125A30B06BD001000000000000000010000000" + "9D791C7548BFD144BFA54F14213CAD25")).one() + + assert len(ev.participants) == 2 + for participant in ev.participants: + if participant['email'] == 'test1@example.com': + assert participant['status'] == 'maybe' + assert participant['name'] == 'Inbox Apptest' + elif participant['email'] == 'karim@example.com': + assert participant['name'] == 'Karim Hamidou' + assert participant['status'] == 'yes' + + +def test_cancelled_event(db, default_account): + with open(absolute_path(FIXTURES + 'google_cancelled1.ics')) as fd: + ics_data = fd.read() + + msg = add_fake_msg_with_calendar_part( + db.session, default_account, ics_data) + + import_attached_events(db.session, default_account, msg) + db.session.commit() + + ev = db.session.query(Event).filter( + Event.uid == "c74p2nmutcd0kt69ku7rs8vu2g@google.com").one() + + assert ev.status == 'confirmed' + + with open(absolute_path(FIXTURES + 'google_cancelled2.ics')) as fd: + ics_data = fd.read() + + msg2 = add_fake_msg_with_calendar_part( + db.session, default_account, ics_data) + + import_attached_events(db.session, default_account, msg2) + db.session.commit() + + ev = db.session.query(Event).filter( + Event.uid == "c74p2nmutcd0kt69ku7rs8vu2g@google.com").one() + + assert ev.status == 'cancelled' + + +def test_icloud_cancelled_event(db, default_account): + with open(absolute_path(FIXTURES + 'icloud_cancelled1.ics')) as fd: + ics_data = fd.read() + + msg = add_fake_msg_with_calendar_part( + db.session, default_account, ics_data) + + import_attached_events(db.session, default_account, msg) + db.session.commit() + + ev = db.session.query(Event).filter( + Event.uid == "5919D444-7C99-4687-A526-FC5D10091318").one() + + assert ev.status == 'confirmed' + + with open(absolute_path(FIXTURES + 'icloud_cancelled2.ics')) as fd: + ics_data = fd.read() + + msg = add_fake_msg_with_calendar_part( + db.session, default_account, ics_data) + + import_attached_events(db.session, default_account, msg) + db.session.commit() + + ev = db.session.query(Event).filter( + Event.uid == "5919D444-7C99-4687-A526-FC5D10091318").one() + + assert ev.status == 'cancelled' + + +def test_multiple_summaries(db, default_account): + data = None + with open(absolute_path(FIXTURES + 'multiple_summaries.ics')) as fd: + data = fd.read() + + events = events_from_ics(default_account.namespace, + default_account.emailed_events_calendar, data) + events = events['invites'] + assert len(events) == 1 + assert events[0].title == 'The Strokes - Is this it?' + + +def test_invalid_rsvp(db, default_account): + # Test that we don't save an RSVP reply with an invalid id. + data = None + with open(absolute_path(FIXTURES + 'invalid_rsvp.ics')) as fd: + data = fd.read() + + msg = add_fake_msg_with_calendar_part( + db.session, default_account, data) + + import_attached_events(db.session, default_account, msg) + db.session.commit() + + ev = db.session.query(Event).filter( + Event.uid == "234252$cccc@nylas.com").all() + + assert len(ev) == 0 + + +def test_rsvp_for_other_provider(db, default_account): + # Test that we don't save RSVP replies which aren't replies to a Nylas + # invite. + data = None + with open(absolute_path(FIXTURES + 'invalid_rsvp2.ics')) as fd: + data = fd.read() + + msg = add_fake_msg_with_calendar_part( + db.session, default_account, data) + + import_attached_events(db.session, default_account, msg) + db.session.commit() + + ev = db.session.query(Event).filter( + Event.uid == "234252cccc@google.com").all() + + assert len(ev) == 0 + + +def test_truncate_bogus_sequence_numbers(db, default_account): + data = None + with open(absolute_path(FIXTURES + 'bogus_sequence_number.ics')) as fd: + data = fd.read() + + msg = add_fake_msg_with_calendar_part( + db.session, default_account, data) + + import_attached_events(db.session, default_account, msg) + db.session.commit() + + ev = db.session.query(Event).filter( + Event.uid == "234252cccc@google.com").one() + + # Check that the sequence number got truncated to the biggest possible + # number. + assert ev.sequence_number == 2147483647L + + +def test_handle_missing_sequence_number(db, default_account): + with open(absolute_path(FIXTURES + 'event_without_sequence.ics')) as fd: + data = fd.read() + + events = events_from_ics(default_account.namespace, + default_account.emailed_events_calendar, data) + events = events['invites'] + assert len(events) == 1 + ev = events[0] + assert ev.sequence_number == 0 diff --git a/inbox/test/events/test_inviting.py b/inbox/test/events/test_inviting.py new file mode 100644 index 000000000..aa812c067 --- /dev/null +++ b/inbox/test/events/test_inviting.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- +# flake8: noqa: F401,F811 +from inbox.test.util.base import event + + +def test_invite_generation(event, default_account): + from inbox.events.ical import generate_icalendar_invite + + event.sequence_number = 1 + event.participants = [{'email': 'helena@nylas.com'}, + {'email': 'myles@nylas.com'}] + cal = generate_icalendar_invite(event) + assert cal['method'] == 'REQUEST' + + for component in cal.walk(): + if component.name == "VEVENT": + assert component.get('summary') == event.title + assert int(component.get('sequence')) == event.sequence_number + assert component.get('location') == event.location + + attendees = component.get('attendee', []) + + # the iCalendar python module doesn't return a list when + # there's only one attendee. Go figure. + if not isinstance(attendees, list): + attendees = [attendees] + + for attendee in attendees: + email = unicode(attendee) + # strip mailto: if it exists + if email.lower().startswith('mailto:'): + email = email[7:] + + assert email in ['helena@nylas.com', 'myles@nylas.com'] + + +def test_message_generation(event, default_account): + from inbox.events.ical import generate_invite_message + event.title = 'A long walk on the beach' + event.participants = [{'email': 'helena@nylas.com'}] + msg = generate_invite_message('empty', event, default_account) + + # Make sure the From header is set correctly + assert msg.headers['From'] == "automated@notifications.nylas.com" + + # Check that we have an email with an HTML part, a plain text part, a + # text/calendar with METHOD=REQUEST and an attachment. + count = 0 + for mimepart in msg.walk(with_self=msg.content_type.is_singlepart()): + format_type = mimepart.content_type.format_type + subtype = mimepart.content_type.subtype + + if (format_type, subtype) in [('text', 'plain'), ('text', 'html'), + ('text', 'calendar; method=request'), + ('application', 'ics')]: + count += 1 + assert count == 3 + + +def test_unicode_message_generation(event, default_account): + from inbox.events.ical import generate_invite_message + event.title = u'Dîner chez François et Hélène' + event.description = u"""Cher Paul, +Je suis heureux de vous inviter à un diner le samedi 19 novembre 2011 à 19h30 +au chalet de l'île Daumesnil dans le bois de Vincennes. + +Je vous attend avec impatience. Bien à vous. +Hélène (Ἑλένη) +""" + + event.participants = [{'email': 'hélène@nylas.com'}] + generate_invite_message('empty', event, default_account) + + # That's it --- we just needed to make sure message + # generation shouldn't blow up. diff --git a/inbox/test/events/test_merge.py b/inbox/test/events/test_merge.py new file mode 100644 index 000000000..caf9b61f8 --- /dev/null +++ b/inbox/test/events/test_merge.py @@ -0,0 +1,87 @@ +# Basic tests for events merging. +from inbox.models.event import Event + + +def fake_event(): + return Event(title="The fifth element", + participants=[{"name": "Ronald Zubar", + "email": "ronald@example.com", + "status": "noreply", + "notes": "required"}]) + + +def fake_event2(): + return Event(title="The fifth element", + participants=[{"name": "Ronald Zubar", + "email": "ronald@example.com", + "status": "noreply", + "notes": "required"}, + {"name": "Ronald McDonald", + "email": "ronald@mcdonalds.com", + "status": "noreply", + "notes": "required"}]) + + +def test_overwrite(): + ev = fake_event() + ev2 = fake_event() + ev2.participants[0]["status"] = "yes" + + merged_participants = ev._partial_participants_merge(ev2) + assert merged_participants[0]["status"] == "yes" + + +def test_name_merging(): + # Test that we merge correctly emails and names. + ev = fake_event() + ev2 = fake_event() + + # Office365 sets the name to the email address when it's + # not available. + ev.participants[0]["name"] = "ronald@example.com" + ev2.participants[0]["status"] = "yes" + merged_participants = ev._partial_participants_merge(ev2) + + assert len(merged_participants) == 1 + assert merged_participants[0]["status"] == "yes" + assert merged_participants[0]["name"] == "Ronald Zubar" + + +def test_name_conflicts(): + # Test that we handle participants having the same name correctly. + ev = fake_event() + ev2 = fake_event() + + # Office365 sets the name to the email address when it's + # not available. + ev2.participants[0]["email"] = None + ev2.participants[0]["status"] = "yes" + merged_participants = ev._partial_participants_merge(ev2) + + assert len(merged_participants) == 2 + for participant in merged_participants: + if participant["email"] is None: + assert participant["status"] == "yes" + else: + assert participant["name"] == "Ronald Zubar" + + +def test_no_unrelated_overwrites(): + # Test that we're not overwriting participants who haven't been + # updated. + ev = fake_event2() + ev2 = fake_event() + + ev2.participants[0]["email"] = None + ev2.participants[0]["status"] = "yes" + merged_participants = ev._partial_participants_merge(ev2) + + assert len(merged_participants) == 3 + + for participant in merged_participants: + if participant["email"] is None: + assert participant["status"] == "yes" + elif participant["email"] == "ronald@mcdonalds.com": + assert participant["name"] == "Ronald McDonald" + elif participant["email"] == "ronald@example.com": + assert participant["name"] == "Ronald Zubar" diff --git a/inbox/test/events/test_recurrence.py b/inbox/test/events/test_recurrence.py new file mode 100644 index 000000000..c46bc57e5 --- /dev/null +++ b/inbox/test/events/test_recurrence.py @@ -0,0 +1,593 @@ +import pytest +import arrow +import datetime +from dateutil import tz +from dateutil.rrule import rrulestr +from datetime import timedelta +from inbox.models.event import Event, RecurringEvent, RecurringEventOverride +from inbox.models.when import Date, Time, DateSpan, TimeSpan +from inbox.events.remote_sync import handle_event_updates +from inbox.events.recurring import (link_events, get_start_times, + parse_exdate, rrule_to_json) + +from nylas.logging import get_logger +log = get_logger() + +TEST_RRULE = ["RRULE:FREQ=WEEKLY;UNTIL=20140918T203000Z;BYDAY=TH"] +TEST_EXDATE = ["EXDATE;TZID=America/Los_Angeles:20140904T133000"] +ALL_DAY_RRULE = ["RRULE:FREQ=WEEKLY;UNTIL=20140911;BYDAY=TH"] +TEST_EXDATE_RULE = TEST_RRULE[:] +TEST_EXDATE_RULE.extend(TEST_EXDATE) + + +def recurring_event(db, account, calendar, rrule, + start=arrow.get(2014, 8, 7, 20, 30, 00), + end=arrow.get(2014, 8, 7, 21, 30, 00), + all_day=False, commit=True): + + # commit: are we returning a commited instance object? + if commit: + ev = db.session.query(Event).filter_by(uid='myuid').first() + if ev: + db.session.delete(ev) + ev = Event(namespace_id=account.namespace.id, + calendar=calendar, + title='recurring', + description='', + uid='myuid', + location='', + busy=False, + read_only=False, + reminders='', + recurrence=rrule, + start=start, + end=end, + all_day=all_day, + is_owner=False, + participants=[], + provider_name='inbox', + raw_data='', + original_start_tz='America/Los_Angeles', + original_start_time=None, + master_event_uid=None, + source='local') + + if commit: + db.session.add(ev) + db.session.commit() + + return ev + + +def recurring_override(db, master, original_start, start, end): + # Returns an Override that is explicitly linked to master + ev = recurring_override_instance(db, master, original_start, start, end) + ev.master = master + db.session.commit() + return ev + + +def recurring_override_instance(db, master, original_start, start, end): + # Returns an Override that has the master's UID, but is not linked yet + override_uid = '{}_{}'.format(master.uid, + original_start.strftime("%Y%m%dT%H%M%SZ")) + ev = db.session.query(Event).filter_by(uid=override_uid).first() + if ev: + db.session.delete(ev) + db.session.commit() + ev = Event(original_start_time=original_start, + master_event_uid=master.uid, + namespace_id=master.namespace_id, + calendar_id=master.calendar_id) + ev.update(master) + ev.uid = override_uid + ev.start = start + ev.end = end + ev.master_event_uid = master.uid + db.session.add(ev) + return ev + + +def test_create_recurrence(db, default_account, calendar): + event = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) + assert isinstance(event, RecurringEvent) + assert event.rrule is not None + assert event.exdate is not None + assert event.until is not None + + +def test_link_events_from_override(db, default_account, calendar, other_calendar): + # Test that by creating a recurring event and override separately, we + # can link them together based on UID and namespace_id when starting + # from the override. + master = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) + original_start = parse_exdate(master)[0] + override = Event(original_start_time=original_start, + master_event_uid=master.uid, + namespace_id=master.namespace_id, + calendar_id=calendar.id, + source='local') + assert isinstance(override, RecurringEventOverride) + link_events(db.session, override) + assert override.master == master + + +def test_linking_events_from_different_calendars(db, default_account, + calendar, other_calendar): + # Test that two events with the same UID but in different calendars don't + # get linked together. This is important because with the Google API, a + # recurring events can be in two calendars and have the same UID. + # In this case, we create two different recurring events. + master = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) + original_start = parse_exdate(master)[0] + override = Event(original_start_time=original_start, + master_event_uid=master.uid, + namespace_id=master.namespace_id, + calendar_id=other_calendar.id, + uid='blah', + source='local') + + assert isinstance(override, RecurringEventOverride) + link_events(db.session, override) + assert override.master is None + + +def test_link_events_from_master(db, default_account, calendar): + # Test that by creating a recurring event and override separately, we + # can link them together based on UID and namespace_id when starting + # from the master event. + master = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) + original_start = parse_exdate(master)[0] + override = recurring_override_instance(db, master, original_start, + master.start, master.end) + assert isinstance(master, RecurringEvent) + assert len(link_events(db.session, master)) == 1 + assert override in master.overrides + assert override.uid in [o.uid for o in master.overrides] + + +def test_link_events_from_master_diff_calendars(db, default_account, calendar, + other_calendar): + # Same as the previous test except that we check that it doesn't work across + # calendars (see test_link_events_from_master_diff_calendars for more + # details). + master = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) + original_start = parse_exdate(master)[0] + override = recurring_override_instance(db, master, original_start, + master.start, master.end) + override.calendar = other_calendar + assert isinstance(master, RecurringEvent) + o = link_events(db.session, master) + assert len(o) == 0 + + +def test_rrule_parsing(db, default_account, calendar): + # This test event starts on Aug 7 and recurs every Thursday at 20:30 + # until Sept 18. + # There should be 7 total occurrences including Aug 7 and Sept 18. + event = recurring_event(db, default_account, calendar, TEST_RRULE) + g = get_start_times(event) + assert len(g) == 7 + # Check we can supply an end date to cut off recurrence expansion + g = get_start_times(event, end=arrow.get(2014, 9, 12, 21, 30, 00)) + assert len(g) == 6 + + +def test_all_day_rrule_parsing(db, default_account, calendar): + event = recurring_event(db, default_account, calendar, ALL_DAY_RRULE, + start=arrow.get(2014, 8, 7), + end=arrow.get(2014, 8, 7), + all_day=True) + g = get_start_times(event) + assert len(g) == 6 + + +@pytest.mark.parametrize("rule", [ + "RRULE:FREQ=DAILY;UNTIL=20160913", + "RRULE:FREQ=DAILY;UNTIL=20160913T070000Z", + "RRULE:FREQ=DAILY;UNTIL=20160913T070000" +]) +def test_all_day_rrule_parsing_utc(db, default_account, calendar, rule): + # Use an RRULE with timezone away until date + all day event + event = recurring_event(db, default_account, calendar, rule, + start=arrow.get(2016, 9, 10), + end=arrow.get(2016, 9, 13), + all_day=True) + + start_boundary = datetime.datetime(2016, 9, 8, 1, 21, 55) + end_boundary = datetime.datetime(2016, 9, 16, 0, 31, 55) + + g = get_start_times(event, start=start_boundary, end=end_boundary) + assert len(g) == 4 + + +def test_rrule_exceptions(db, default_account, calendar): + # This test event starts on Aug 7 and recurs every Thursday at 20:30 + # until Sept 18, except on September 4. + event = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) + g = get_start_times(event) + assert len(g) == 6 + assert arrow.get(2014, 9, 4, 13, 30, 00) not in g + + +def test_inflation(db, default_account, calendar): + event = recurring_event(db, default_account, calendar, TEST_RRULE) + infl = event.inflate() + for i in infl: + assert i.title == event.title + assert (i.end - i.start) == (event.end - event.start) + assert i.public_id.startswith(event.public_id) + # make sure the original event instance appears too + assert event.start in [e.start for e in infl] + + +def test_inflation_exceptions(db, default_account, calendar): + event = recurring_event(db, default_account, calendar, TEST_RRULE) + infl = event.inflate() + for i in infl: + assert i.title == event.title + assert (i.end - i.start) == (event.end - event.start) + assert i.start != arrow.get(2014, 9, 4, 13, 30, 00) + + +def test_inflate_across_DST(db, default_account, calendar): + # If we inflate a RRULE that covers a change to/from Daylight Savings Time, + # adjust the base time accordingly to account for the new UTC offset. + # Daylight Savings for US/PST: March 8, 2015 - Nov 1, 2015 + dst_rrule = ["RRULE:FREQ=WEEKLY;BYDAY=TU"] + dst_event = recurring_event(db, default_account, calendar, dst_rrule, + start=arrow.get(2015, 03, 03, 03, 03, 03), + end=arrow.get(2015, 03, 03, 04, 03, 03)) + g = get_start_times(dst_event, end=arrow.get(2015, 03, 21)) + + # In order for this event to occur at the same local time, the recurrence + # rule should be expanded to 03:03:03 before March 8, and 02:03:03 after, + # keeping the local time of the event consistent at 19:03. + # This is consistent with how Google returns recurring event instances. + local_tz = tz.gettz(dst_event.start_timezone) + + for time in g: + if time < arrow.get(2015, 3, 8): + assert time.hour == 3 + else: + assert time.hour == 2 + # Test that localizing these times is consistent + assert time.astimezone(local_tz).hour == 19 + + # Test an event that starts during local daylight savings time + dst_event = recurring_event(db, default_account, calendar, dst_rrule, + start=arrow.get(2015, 10, 27, 02, 03, 03), + end=arrow.get(2015, 10, 27, 03, 03, 03)) + g = get_start_times(dst_event, end=arrow.get(2015, 11, 11)) + for time in g: + if time > arrow.get(2015, 11, 1): + assert time.hour == 3 + else: + assert time.hour == 2 + assert time.astimezone(local_tz).hour == 19 + + +def test_inflate_all_day_event(db, default_account, calendar): + event = recurring_event(db, default_account, calendar, ALL_DAY_RRULE, + start=arrow.get(2014, 9, 4), + end=arrow.get(2014, 9, 4), all_day=True) + infl = event.inflate() + for i in infl: + assert i.all_day + assert isinstance(i.when, Date) + assert i.start in [arrow.get(2014, 9, 4), arrow.get(2014, 9, 11)] + + +def test_inflate_multi_day_event(db, default_account, calendar): + event = recurring_event(db, default_account, calendar, ALL_DAY_RRULE, + start=arrow.get(2014, 9, 4), + end=arrow.get(2014, 9, 5), all_day=True) + infl = event.inflate() + for i in infl: + assert i.all_day + assert isinstance(i.when, DateSpan) + assert i.start in [arrow.get(2014, 9, 4), arrow.get(2014, 9, 11)] + assert i.end in [arrow.get(2014, 9, 5), arrow.get(2014, 9, 12)] + + +def test_invalid_rrule_entry(db, default_account, calendar): + # If we don't know how to expand the RRULE, we treat the event as if + # it were a single instance. + event = recurring_event(db, default_account, calendar, 'INVALID_RRULE_YAY') + infl = event.inflate() + assert len(infl) == 1 + assert infl[0].start == event.start + + +def test_invalid_parseable_rrule_entry(db, default_account, calendar): + event = recurring_event(db, default_account, calendar, + ["RRULE:FREQ=CHRISTMAS;UNTIL=1984;BYDAY=QQ"]) + infl = event.inflate() + assert len(infl) == 1 + assert infl[0].start == event.start + + +def test_non_recurring_events_behave(db, default_account, calendar): + event = Event(namespace_id=default_account.namespace.id, + calendar=calendar, + title='not recurring', + description='', + uid='non_recurring_uid', + location='', + busy=False, + read_only=False, + reminders='', + recurrence=None, + start=arrow.get(2014, 07, 07, 13, 30), + end=arrow.get(2014, 07, 07, 13, 55), + all_day=False, + is_owner=False, + participants=[], + provider_name='inbox', + raw_data='', + original_start_tz='America/Los_Angeles', + original_start_time=None, + master_event_uid=None, + source='local') + assert isinstance(event, Event) + with pytest.raises(AttributeError): + event.inflate() + + +def test_inflated_events_cant_persist(db, default_account, calendar): + event = recurring_event(db, default_account, calendar, TEST_RRULE) + infl = event.inflate() + for i in infl: + db.session.add(i) + with pytest.raises(Exception) as excinfo: + # FIXME "No handlers could be found for logger" - ensure this is only + # a test issue or fix. + db.session.commit() + assert 'should not be committed' in str(excinfo.value) + + +def test_override_instantiated(db, default_account, calendar): + # Test that when a recurring event has overrides, they show up as + # RecurringEventOverrides, have links back to the parent, and don't + # appear twice in the event list. + event = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) + override = recurring_override(db, event, + arrow.get(2014, 9, 4, 20, 30, 00), + arrow.get(2014, 9, 4, 21, 30, 00), + arrow.get(2014, 9, 4, 22, 30, 00)) + all_events = event.all_events() + assert len(all_events) == 7 + assert override in all_events + + +def test_override_same_start(db, default_account, calendar): + # Test that when a recurring event has an override without a modified + # start date (ie. the RRULE has no EXDATE for that event), it doesn't + # appear twice in the all_events list. + event = recurring_event(db, default_account, calendar, TEST_RRULE) + override = recurring_override(db, event, + arrow.get(2014, 9, 4, 20, 30, 00), + arrow.get(2014, 9, 4, 20, 30, 00), + arrow.get(2014, 9, 4, 21, 30, 00)) + all_events = event.all_events() + assert len(all_events) == 7 + unique_starts = list(set([e.start for e in all_events])) + assert len(unique_starts) == 7 + assert override in all_events + + +def test_override_updated(db, default_account, calendar): + # Test that when a recurring event override is created or updated + # remotely, we update our override links appropriately. + event = recurring_event(db, default_account, calendar, TEST_RRULE) + assert event is not None + # create a new Event, as if we just got it from Google + master_uid = event.uid + override_uid = master_uid + "_20140814T203000Z" + override = Event(title='new override from google', + description='', + uid=override_uid, + location='', + busy=False, + read_only=False, + reminders='', + recurrence=None, + start=arrow.get(2014, 8, 14, 22, 30, 00), + end=arrow.get(2014, 8, 14, 23, 30, 00), + all_day=False, + is_owner=False, + participants=[], + provider_name='inbox', + raw_data='', + original_start_tz='America/Los_Angeles', + original_start_time=arrow.get(2014, 8, 14, 21, 30, 00), + master_event_uid=master_uid, + source='local') + handle_event_updates(default_account.namespace.id, + calendar.id, + [override], + log, + db.session) + db.session.commit() + # Lets see if the event got saved with the right info + find_override = db.session.query(Event).filter_by(uid=override_uid).one() + assert find_override is not None + assert find_override.master_event_id == event.id + + # Update the same override, making sure we don't create two + override = Event(title='new override from google', + description='', + uid=override_uid, + location='walk and talk', + busy=False, + read_only=False, + reminders='', + recurrence=None, + start=arrow.get(2014, 8, 14, 22, 15, 00), + end=arrow.get(2014, 8, 14, 23, 15, 00), + all_day=False, + is_owner=False, + participants=[], + provider_name='inbox', + raw_data='', + original_start_tz='America/Los_Angeles', + original_start_time=arrow.get(2014, 8, 14, 21, 30, 00), + master_event_uid=master_uid, + source='local') + handle_event_updates(default_account.namespace.id, + calendar.id, + [override], log, db.session) + db.session.commit() + # Let's see if the event got saved with the right info + find_override = db.session.query(Event).filter_by(uid=override_uid).one() + assert find_override is not None + assert find_override.master_event_id == event.id + assert find_override.location == 'walk and talk' + + +def test_override_cancelled(db, default_account, calendar): + # Test that overrides with status 'cancelled' are appropriately missing + # from the expanded event. + event = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) + override = recurring_override(db, event, + arrow.get(2014, 9, 4, 20, 30, 00), + arrow.get(2014, 9, 4, 21, 30, 00), + arrow.get(2014, 9, 4, 22, 30, 00)) + override.cancelled = True + all_events = event.all_events() + assert len(all_events) == 6 + assert override not in all_events + assert not any([e.start == arrow.get(2014, 9, 4, 20, 30, 00) + for e in all_events]) + + +def test_new_instance_cancelled(db, default_account, calendar): + # Test that if we receive a cancelled override from Google, we save it + # as an override with cancelled status rather than deleting it. + event = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) + override_uid = event.uid + "_20140814T203000Z" + override = Event(title='CANCELLED', + description='', + uid=override_uid, + location='', + busy=False, + read_only=False, + reminders='', + recurrence=None, + start=arrow.get(2014, 8, 14, 22, 15, 00), + end=arrow.get(2014, 8, 14, 23, 15, 00), + all_day=False, + is_owner=False, + participants=[], + provider_name='inbox', + raw_data='', + original_start_tz='America/Los_Angeles', + original_start_time=arrow.get(2014, 8, 14, 21, 30, 00), + master_event_uid=event.uid, + cancelled=True, + source='local') + handle_event_updates(default_account.namespace.id, + calendar.id, + [override], log, db.session) + db.session.commit() + # Check the event got saved with the cancelled flag + find_override = db.session.query(Event).filter_by( + uid=override_uid, namespace_id=default_account.namespace.id).one() + assert find_override.cancelled is True + + +def test_when_delta(): + # Test that the event length is calculated correctly + ev = Event(namespace_id=0) + # Time: minutes is 0 if start/end at same time + ev.start = arrow.get(2015, 01, 01, 10, 00, 00) + ev.end = arrow.get(2015, 01, 01, 10, 00, 00) + when = ev.when + assert isinstance(when, Time) + assert ev.length == timedelta(minutes=0) + + # TimeSpan + ev.start = arrow.get(2015, 01, 01, 10, 00, 00) + ev.end = arrow.get(2015, 01, 01, 10, 30, 00) + when = ev.when + assert isinstance(when, TimeSpan) + assert ev.length == timedelta(minutes=30) + + # Date: notice days is 0 if starts/ends on same day + ev.all_day = True + ev.start = arrow.get(2015, 01, 01, 00, 00, 00) + ev.end = arrow.get(2015, 01, 01, 00, 00, 00) + when = ev.when + assert isinstance(when, Date) + assert ev.length == timedelta(days=0) + + # DateSpan + ev.all_day = True + ev.start = arrow.get(2015, 01, 01, 10, 00, 00) + ev.end = arrow.get(2015, 01, 02, 10, 00, 00) + when = ev.when + assert isinstance(when, DateSpan) + assert ev.length == timedelta(days=1) + + +def test_rrule_to_json(): + # Generate more test cases! + # http://jakubroztocil.github.io/rrule/ + r = 'RRULE:FREQ=WEEKLY;UNTIL=20140918T203000Z;BYDAY=TH' + r = rrulestr(r, dtstart=None) + j = rrule_to_json(r) + assert j.get('freq') == 'WEEKLY' + assert j.get('byweekday') == 'TH' + + r = 'FREQ=HOURLY;COUNT=30;WKST=MO;BYMONTH=1;BYMINUTE=42;BYSECOND=24' + r = rrulestr(r, dtstart=None) + j = rrule_to_json(r) + assert j.get('until') is None + assert j.get('byminute') is 42 + + +def test_master_cancelled(db, default_account, calendar): + # Test that when the master recurring event is cancelled, we cancel every + # override too. + event = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) + override = recurring_override(db, event, + arrow.get(2014, 9, 4, 20, 30, 00), + arrow.get(2014, 9, 4, 21, 30, 00), + arrow.get(2014, 9, 4, 22, 30, 00)) + + update = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE, + commit=False) + update.status = 'cancelled' + updates = [update] + + handle_event_updates(default_account.namespace.id, + calendar.id, + updates, log, db.session) + db.session.commit() + find_master = db.session.query(Event).filter_by(uid=event.uid).first() + assert find_master.status == 'cancelled' + + find_override = db.session.query(Event).filter_by(uid=override.uid).first() + assert find_override.status == 'cancelled' + + +def test_made_recurring_then_cancelled(db, default_account, calendar): + # Test that when an event is updated with a recurrence and cancelled at + # the same time, we cancel it. + normal = recurring_event(db, default_account, calendar, None) + # Check this is specifically an Event, not a RecurringEvent + assert type(normal) == Event + + # Update with a recurrence rule *and* cancellation + update = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE, + commit=False) + update.status = 'cancelled' + updates = [update] + + handle_event_updates(default_account.namespace.id, + calendar.id, + updates, log, db.session) + db.session.commit() + + find_master = db.session.query(Event).filter_by(uid=normal.uid).first() + assert find_master.status == 'cancelled' diff --git a/inbox/test/events/test_rsvp.py b/inbox/test/events/test_rsvp.py new file mode 100644 index 000000000..c0a40ad2f --- /dev/null +++ b/inbox/test/events/test_rsvp.py @@ -0,0 +1,39 @@ +from inbox.models.event import Event +from inbox.events.ical import rsvp_recipient + + +def test_rsvp_recipient(default_account, message): + assert rsvp_recipient(None) is None + + event = Event() + event.owner = 'Georges Perec ' + assert rsvp_recipient(event) == 'georges@gmail.com' + + event = Event() + event.owner = '' + assert rsvp_recipient(event) == 'perec@gmail.com' + + event = Event() + event.owner = 'perec@gmail.com' + assert rsvp_recipient(event) == 'perec@gmail.com' + + event.owner = 'None ' + assert rsvp_recipient(event) is None + + message.from_addr = [('Georges Perec', 'georges@gmail.com')] + event = Event() + event.owner = None + event.message = message + assert rsvp_recipient(event) == message.from_addr[0][1] + + message.from_addr = None + assert rsvp_recipient(event) is None + + message.from_addr = [] + assert rsvp_recipient(event) is None + + message.from_addr = [('', '')] + assert rsvp_recipient(event) is None + + message.from_addr = [('Georges Sans Addresse', '')] + assert rsvp_recipient(event) is None diff --git a/inbox/test/events/test_sync.py b/inbox/test/events/test_sync.py new file mode 100644 index 000000000..0d9fc275c --- /dev/null +++ b/inbox/test/events/test_sync.py @@ -0,0 +1,194 @@ +# flake8: noqa: F401 +from datetime import datetime +from inbox.events.remote_sync import EventSync +from inbox.events.util import CalendarSyncResponse +from inbox.models import Calendar, Event, Transaction +from inbox.models.constants import MAX_INDEXABLE_LENGTH + + +# Placeholder values for non-nullable attributes +default_params = dict(raw_data='', + busy=True, + all_day=False, + read_only=False, + start=datetime(2015, 2, 22, 11, 11), + end=datetime(2015, 2, 22, 22, 22), + is_owner=True, + participants=[]) + + +# Mock responses from the provider with adds/updates/deletes + + +def calendar_response(): + return CalendarSyncResponse([], [ + Calendar(name='Important Meetings', + uid='first_calendar_uid', + read_only=False), + Calendar(name='Nefarious Schemes', + uid='second_calendar_uid', + read_only=False), + ]) + + +# Returns a calendar with name that is longer that our allowed column length of +# 191 (MAX_INDEXABLE_LENGTH). This name is 192 characters +def calendar_long_name(): + return CalendarSyncResponse( + [], [Calendar(name='Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris_!', + uid='long_calendar_uid', read_only=True)]) + + +def calendar_response_with_update(): + return CalendarSyncResponse( + [], [Calendar(name='Super Important Meetings', + uid='first_calendar_uid', + read_only=False)]) + + +def calendar_response_with_delete(): + return (['first_calendar_uid'], []) + + +def event_response(calendar_uid, sync_from_time): + if calendar_uid == 'first_calendar_uid': + return [ + Event(uid='first_event_uid', + title='Plotting Meeting', + **default_params), + Event(uid='second_event_uid', + title='Scheming meeting', + **default_params), + Event(uid='third_event_uid', + title='Innocent Meeting', + **default_params) + ] + else: + return [ + Event(uid='second_event_uid', + title='Plotting Meeting', + **default_params), + Event(uid='third_event_uid', + title='Scheming meeting', + **default_params) + ] + + +def event_response_with_update(calendar_uid, sync_from_time): + if calendar_uid == 'first_calendar_uid': + return [Event(uid='first_event_uid', + title='Top Secret Plotting Meeting', + **default_params)] + + +def event_response_with_delete(calendar_uid, sync_from_time): + if calendar_uid == 'first_calendar_uid': + return [Event(uid='first_event_uid', status='cancelled', + **default_params)] + + +def test_handle_changes(db, generic_account): + namespace_id = generic_account.namespace.id + event_sync = EventSync(generic_account.email_address, 'google', + generic_account.id, namespace_id) + + # Sync calendars/events + event_sync.provider.sync_calendars = calendar_response + event_sync.provider.sync_events = event_response + event_sync.sync() + + assert db.session.query(Calendar).filter( + Calendar.namespace_id == namespace_id, + Calendar.name != 'Emailed events').count() == 2 + + assert db.session.query(Event).join(Calendar).filter( + Event.namespace_id == namespace_id, + Calendar.uid == 'first_calendar_uid').count() == 3 + + assert db.session.query(Event).join(Calendar).filter( + Event.namespace_id == namespace_id, + Calendar.uid == 'second_calendar_uid').count() == 2 + + # Sync a calendar update with long name + event_sync.provider.sync_calendars = calendar_long_name + event_sync.sync() + + long_calendar = db.session.query(Calendar).filter( + Calendar.namespace_id == namespace_id, + Calendar.uid == 'long_calendar_uid').one() + + assert len(long_calendar.name) == MAX_INDEXABLE_LENGTH + + # Sync a calendar update + event_sync.provider.sync_calendars = calendar_response_with_update + event_sync.provider.sync_events = event_response + event_sync.sync() + + # Check that we have the same number of calendars and events as before + assert db.session.query(Calendar).filter( + Calendar.namespace_id == namespace_id, + Calendar.name != 'Emailed events').count() == 3 + + assert db.session.query(Event).join(Calendar).filter( + Event.namespace_id == namespace_id, + Calendar.uid == 'first_calendar_uid').count() == 3 + + assert db.session.query(Event).join(Calendar).filter( + Event.namespace_id == namespace_id, + Calendar.uid == 'second_calendar_uid').count() == 2 + + assert db.session.query(Event).join(Calendar).filter( + Event.namespace_id == namespace_id, + Calendar.uid == 'long_calendar_uid').count() == 2 + + # Check that calendar attribute was updated. + first_calendar = db.session.query(Calendar).filter( + Calendar.namespace_id == namespace_id, + Calendar.uid == 'first_calendar_uid').one() + assert first_calendar.name == 'Super Important Meetings' + + # Sync an event update + event_sync.provider.sync_events = event_response_with_update + event_sync.sync() + # Make sure the update was persisted + first_event = db.session.query(Event).filter( + Event.namespace_id == namespace_id, + Event.calendar_id == first_calendar.id, + Event.uid == 'first_event_uid').one() + assert first_event.title == 'Top Secret Plotting Meeting' + + # Sync an event delete + event_sync.provider.sync_events = event_response_with_delete + event_sync.sync() + # Make sure the delete was persisted. + first_event = db.session.query(Event).filter( + Event.namespace_id == namespace_id, + Event.calendar_id == first_calendar.id, + Event.uid == 'first_event_uid').first() + + db.session.refresh(first_event) + assert first_event.status == 'cancelled' + + # Sync a calendar delete + event_public_ids = [id_ for id_, in db.session.query(Event.public_id). + filter(Event.namespace_id == namespace_id, + Event.calendar_id == first_calendar.id)] + event_sync.provider.sync_calendars = calendar_response_with_delete + event_sync.sync() + assert db.session.query(Calendar).filter( + Calendar.namespace_id == namespace_id, + Calendar.uid == 'first_calendar_uid').first() is None + + # Check that delete transactions are created for events on the deleted + # calendar. + deleted_event_transactions = db.session.query(Transaction).filter( + Transaction.object_type == 'event', + Transaction.command == 'delete', + Transaction.namespace_id == namespace_id, + Transaction.object_public_id.in_(event_public_ids)).all() + assert len(deleted_event_transactions) == 3 + + # Check that events with the same uid but associated to a different + # calendar still survive. + assert db.session.query(Event).filter( + Event.namespace_id == namespace_id).count() == 4 diff --git a/inbox/test/general/__init__.py b/inbox/test/general/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/inbox/test/general/test_account.py b/inbox/test/general/test_account.py new file mode 100644 index 000000000..8c641c7d2 --- /dev/null +++ b/inbox/test/general/test_account.py @@ -0,0 +1,34 @@ +import pytest + +from inbox.test.util.base import add_fake_gmail_account, add_generic_imap_account + + +def add_fake_imap_account(db_session, provider, email_address, password): + account = add_generic_imap_account(db_session) + account.provider = provider + account.email_address = email_address + account.imap_password = password + account.smtp_password = password + db_session.commit() + return account + + +@pytest.fixture +def fake_imap_accounts(db): + imap_account_data = { + 'yahoo': 'cypresstest@yahoo.com', + 'aol': 'benbitdit@aol.com', + 'icloud': 'inbox.watchdog@icloud.com', + 'imap': 'heyhey@mycustomimap.com', + } + accounts = {'gmail': add_fake_gmail_account(db.session)} + for provider, email in imap_account_data.items(): + accounts[provider] = add_fake_imap_account(db.session, provider, email, + 'sEcr3T') + return accounts + + +def test_provider_setting(db, fake_imap_accounts): + for provider, account in fake_imap_accounts.items(): + assert account.provider == provider + assert account.verbose_provider == provider diff --git a/inbox/test/general/test_address_canonicalization.py b/inbox/test/general/test_address_canonicalization.py new file mode 100644 index 000000000..b4b244507 --- /dev/null +++ b/inbox/test/general/test_address_canonicalization.py @@ -0,0 +1,23 @@ +def test_canonicalization(db): + from inbox.models import Namespace, Account + ns = Namespace() + account = Account(namespace=ns, + email_address='lambda.the.ultimate@gmail.com') + db.session.add(account) + db.session.add(ns) + db.session.commit() + assert account.email_address == 'lambda.the.ultimate@gmail.com' + + assert db.session.query(Account). \ + filter_by(email_address='lambdatheultimate@gmail.com').count() == 1 + + assert db.session.query(Account). \ + filter_by(email_address='lambda.theultimate@gmail.com').count() == 1 + + # Check that nothing bad happens if you pass something that can't actually + # be parsed as an email address. + assert db.session.query(Account). \ + filter_by(email_address='foo').count() == 0 + # Flanker will parse hostnames too, don't break on that. + assert db.session.query(Account). \ + filter_by(email_address='http://example.com').count() == 0 diff --git a/inbox/test/general/test_category.py b/inbox/test/general/test_category.py new file mode 100644 index 000000000..5147ff7cd --- /dev/null +++ b/inbox/test/general/test_category.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +from inbox.models import Folder, Label +from inbox.models.category import sanitize_name +from inbox.models.constants import MAX_INDEXABLE_LENGTH +from inbox.test.util.base import (add_fake_folder, add_fake_label, generic_account, + gmail_account, db) + +__all__ = ['db', 'generic_account', 'gmail_account'] + + +def test_category_sanitize_name(): + assert sanitize_name(42) == u'42' + assert sanitize_name('42') == u'42' + assert sanitize_name(u' Boîte de réception ') ==\ + u' Boîte de réception' + long_name = 'N' * (MAX_INDEXABLE_LENGTH + 10) + assert sanitize_name(long_name) == 'N' * MAX_INDEXABLE_LENGTH + + long_name = 'N' * (MAX_INDEXABLE_LENGTH - 2) + ' ' + assert sanitize_name(long_name) == 'N' * (MAX_INDEXABLE_LENGTH - 2) + + +def test_folder_sanitized(db, generic_account): + long_name = 'F' * (MAX_INDEXABLE_LENGTH + 10) + folder = add_fake_folder(db.session, generic_account, long_name) + assert len(folder.name) == MAX_INDEXABLE_LENGTH + + # Test that we get back the correct model even when querying with a long + # name + found = db.session.query(Folder).filter(Folder.name == long_name).one() + assert len(found.name) == MAX_INDEXABLE_LENGTH + assert folder.id == found.id + assert found.name == folder.name + + +def test_label_sanitized(db, gmail_account): + long_name = 'L' * (MAX_INDEXABLE_LENGTH + 10) + label = add_fake_label(db.session, gmail_account, long_name) + assert len(label.name) == MAX_INDEXABLE_LENGTH + + # Test that we get back the correct model even when querying with a long + # name + found = db.session.query(Label).filter(Label.name == long_name).one() + assert len(found.name) == MAX_INDEXABLE_LENGTH + assert label.id == found.id + assert found.name == label.name diff --git a/inbox/test/general/test_concurrency.py b/inbox/test/general/test_concurrency.py new file mode 100644 index 000000000..44615080f --- /dev/null +++ b/inbox/test/general/test_concurrency.py @@ -0,0 +1,132 @@ +import time + +import pytest +import _mysql_exceptions + +from gevent import GreenletExit +from gevent import socket +from sqlalchemy.exc import StatementError +from inbox.util.concurrency import retry_with_logging + + +class MockLogger(object): + + def __init__(self): + self.call_count = 0 + + def error(self, *args, **kwargs): + self.call_count += 1 + + +class FailingFunction(object): + __name__ = 'FailingFunction' + + def __init__(self, exc_type, max_executions=3, delay=0): + self.exc_type = exc_type + self.max_executions = max_executions + self.delay = delay + self.call_count = 0 + + def __call__(self): + self.call_count += 1 + time.sleep(self.delay) + if self.call_count < self.max_executions: + raise self.exc_type + return + + +@pytest.mark.usefixtures('mock_gevent_sleep') +def test_retry_with_logging(): + logger = MockLogger() + failing_function = FailingFunction(ValueError) + retry_with_logging(failing_function, logger=logger, backoff_delay=0) + assert logger.call_count == failing_function.max_executions - 1 + assert failing_function.call_count == failing_function.max_executions + + +def test_no_logging_on_greenlet_exit(): + logger = MockLogger() + failing_function = FailingFunction(GreenletExit) + with pytest.raises(GreenletExit): + retry_with_logging(failing_function, logger=logger) + assert logger.call_count == 0 + assert failing_function.call_count == 1 + + +def test_selective_retry(): + logger = MockLogger() + failing_function = FailingFunction(ValueError) + with pytest.raises(ValueError): + retry_with_logging(failing_function, logger=logger, + fail_classes=[ValueError]) + assert logger.call_count == 0 + assert failing_function.call_count == 1 + + +@pytest.mark.usefixtures('mock_gevent_sleep') +def test_no_logging_until_many_transient_error(): + transient = [ + socket.timeout, + socket.error, + _mysql_exceptions.OperationalError( + "(_mysql_exceptions.OperationalError) (1213, 'Deadlock " + "found when trying to get lock; try restarting transaction')"), + _mysql_exceptions.OperationalError( + "(_mysql_exceptions.OperationalError) Lost connection to MySQL " + "server during query"), + _mysql_exceptions.OperationalError( + "(_mysql_exceptions.OperationalError) MySQL server has gone away."), + _mysql_exceptions.OperationalError( + "(_mysql_exceptions.OperationalError) Can't connect to MySQL " + "server on 127.0.0.1"), + _mysql_exceptions.OperationalError( + "(_mysql_exceptions.OperationalError) Max connect timeout reached " + "while reaching hostgroup 71"), + StatementError( + message="?", statement="SELECT *", params={}, + orig=_mysql_exceptions.OperationalError( + "(_mysql_exceptions.OperationalError) MySQL server has gone away.")), + ] + + for transient_exc in transient: + logger = MockLogger() + failing_function = FailingFunction(transient_exc, max_executions=2) + retry_with_logging(failing_function, logger=logger) + + assert logger.call_count == 0, '{} should not be logged'.format(transient_exc) + assert failing_function.call_count == 2 + + failing_function = FailingFunction(socket.error, max_executions=21) + retry_with_logging(failing_function, logger=logger) + + assert logger.call_count == 1 + assert failing_function.call_count == 21 + + failing_function = FailingFunction(socket.error, max_executions=2) + + +@pytest.mark.usefixtures('mock_gevent_sleep') +def test_logging_on_critical_error(): + critical = [ + TypeError("Example TypeError"), + StatementError( + message="?", statement="SELECT *", params={}, orig=None), + StatementError( + message="?", statement="SELECT *", params={}, + orig=_mysql_exceptions.OperationalError( + "(_mysql_exceptions.OperationalError) Incorrect string value " + "'\\xE7\\x(a\\x84\\xE5'")), + _mysql_exceptions.OperationalError( + "(_mysql_exceptions.OperationalError) Incorrect string value " + "'\\xE7\\x(a\\x84\\xE5'"), + _mysql_exceptions.IntegrityError( + "(_mysql_exceptions.IntegrityError) Column not found"), + ] + + for critical_exc in critical: + logger = MockLogger() + failing_function = FailingFunction(critical_exc, max_executions=2) + retry_with_logging(failing_function, logger=logger) + + assert logger.call_count == 1, '{} should be logged'.format(critical_exc) + assert failing_function.call_count == 2 diff --git a/inbox/test/general/test_draft_creation.py b/inbox/test/general/test_draft_creation.py new file mode 100644 index 000000000..327dd62ac --- /dev/null +++ b/inbox/test/general/test_draft_creation.py @@ -0,0 +1,19 @@ +from inbox.sendmail.base import create_message_from_json, update_draft + + +def test_headers_presence(default_namespace, db): + data = {'subject': 'test draft', 'to': [{'email': 'karim@nylas.com'}]} + draft = create_message_from_json(data, default_namespace, db.session, + False) + + assert draft.nylas_uid is not None + assert draft.message_id_header is not None + + old_uid = draft.nylas_uid + + update_draft(db.session, default_namespace.account, draft, + body="updated body", blocks=[]) + + assert draft.nylas_uid is not None + assert draft.message_id_header is not None + assert draft.nylas_uid != old_uid diff --git a/inbox/test/general/test_filename_truncation.py b/inbox/test/general/test_filename_truncation.py new file mode 100644 index 000000000..efdbfbbea --- /dev/null +++ b/inbox/test/general/test_filename_truncation.py @@ -0,0 +1,24 @@ +from inbox.models.message import _trim_filename + + +def test_filename_truncation(): + # Note: test both 3-byte and 4-byte UTF8 chars to make sure truncation + # follows UTF8 boundaries. + uname = u'\U0001f1fa\U0001f1f8\u2678\U0001f602.txt' + assert _trim_filename(uname, 'a', max_len=8) == uname + assert _trim_filename(uname, 'a', max_len=7) == u'\U0001f1fa\U0001f1f8\u2678.txt' + assert _trim_filename(uname, 'a', max_len=6) == u'\U0001f1fa\U0001f1f8.txt' + assert _trim_filename(uname, 'a', max_len=5) == u'\U0001f1fa.txt' + + # Note: Test input that is not unicode, ensure it uses unicode length not byte length + cname = '\xf0\x9f\x87\xba\xf0\x9f\x87\xb8\xe2\x99\xb8\xf0\x9f\x98\x82.txt' + assert _trim_filename(cname, 'a', max_len=8) == uname + assert _trim_filename(cname, 'a', max_len=7) == u'\U0001f1fa\U0001f1f8\u2678.txt' + assert _trim_filename(cname, 'a', max_len=6) == u'\U0001f1fa\U0001f1f8.txt' + assert _trim_filename(cname, 'a', max_len=5) == u'\U0001f1fa.txt' + + uname = 'ABCDEF.txttxttxtxtxttxttxtx' + assert _trim_filename(uname, 'a', max_len=8) == 'A.txttxt' + + uname = '.txttxttxtxtxttxttxtx' + assert _trim_filename(uname, 'a', max_len=8) == '.txttxtt' diff --git a/inbox/test/general/test_html_parsing.py b/inbox/test/general/test_html_parsing.py new file mode 100644 index 000000000..21a6d288b --- /dev/null +++ b/inbox/test/general/test_html_parsing.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +"""Regression tests for HTML parsing.""" +from inbox.util.html import strip_tags + + +def test_strip_tags(): + text = ('
' + 'check out this link yo!
') + assert strip_tags(text).strip() == 'check out this link yo!' + + +def test_preserve_refs(): + """Test that HTML character/entity references are preserved when we strip + tags.""" + text = u'la philologie mène au pire' + assert strip_tags(text) == u'la philologie mène au pire' + + text = u'la philologie mène au pire' + assert strip_tags(text) == u'la philologie mène au pire' + + text = u'veer & wander' + assert strip_tags(text) == 'veer & wander' diff --git a/inbox/test/general/test_ignition.py b/inbox/test/general/test_ignition.py new file mode 100644 index 000000000..d03cce0a3 --- /dev/null +++ b/inbox/test/general/test_ignition.py @@ -0,0 +1,62 @@ +# flake8: noqa: F401 +import pytest + +from inbox.ignition import init_db, verify_db, reset_invalid_autoincrements +from inbox.util.sharding import get_shard_schemas +from inbox.models.session import session_scope_by_shard_id +from inbox.util.testutils import create_test_db, setup_test_db + + +@pytest.yield_fixture(scope='function') +def base_db(config): + from inbox.ignition import engine_manager + create_test_db() + yield engine_manager + setup_test_db() + + +def test_verify_db(base_db): + engines = base_db.engines + shard_schemas = get_shard_schemas() + + # A correctly set auto_increment. + key = 0 + init_db(engines[key], key) + verify_db(engines[key], shard_schemas[key], key) + + # An incorrectly set auto_increment. + key = 1 + init_db(engines[key], key + 1) + with pytest.raises(AssertionError): + verify_db(engines[key], shard_schemas[key], key) + + +def test_reset_autoincrements(base_db): + engines = base_db.engines + shard_schemas = get_shard_schemas() + + # A correctly set auto_increment. + key = 0 + init_db(engines[key], key) + reset_tables = reset_invalid_autoincrements(engines[key], + shard_schemas[key], key, + False) + assert len(reset_tables) == 0 + + # Ensure dry_run mode does not reset tables + key = 1 + init_db(engines[key], key + 1) + reset_tables = reset_invalid_autoincrements(engines[key], + shard_schemas[key], key, + True) + assert len(reset_tables) > 0 + + with pytest.raises(AssertionError): + verify_db(engines[key], shard_schemas[key], key) + + reset_tables = reset_invalid_autoincrements(engines[key], + shard_schemas[key], key, + False) + + assert len(reset_tables) > 0 + verify_db(engines[key], shard_schemas[key], key) diff --git a/inbox/test/general/test_lock.py b/inbox/test/general/test_lock.py new file mode 100644 index 000000000..66ff28678 --- /dev/null +++ b/inbox/test/general/test_lock.py @@ -0,0 +1,61 @@ +""" Tests for file lock implementation. """ + +import tempfile + +import pytest + +from gevent import spawn, sleep + +from inbox.util.file import Lock + + +def lock(block, filename=None): + if filename is None: + handle, filename = tempfile.mkstemp() + return Lock(filename, block=block) + + +@pytest.fixture +def b_lock(): + """ Blocking lock fixture. """ + return lock(block=True) + + +@pytest.fixture +def nb_lock(): + """ Non-blocking lock fixture. """ + return lock(block=False) + + +def grab_lock(l): + """ Stub fn to grab lock inside a Greenlet. """ + l.acquire() + print "Got the lock again", l.filename + l.release() + + +def test_nb_lock(nb_lock): + with nb_lock as l: + filename = l.filename + with pytest.raises(IOError): + with lock(block=False, filename=filename): + pass + # Should be able to acquire the lock again after the scope ends (also + # testing that the non-context-manager acquire works). + l.acquire() + # Should NOT be able to take the same lock from a Greenlet. + g = spawn(grab_lock, l) + g.join() + assert not g.successful(), "greenlet should throw error" + l.release() + + +def test_b_lock(b_lock): + with b_lock as l: + # A greenlet should hang forever if it tries to acquire this lock. + g = spawn(grab_lock, l) + # Wait long enough that the greenlet ought to be able to finish if + # it's not blocking, but not long enough to make the test suite hella + # slow. + sleep(0.2) + assert not g.ready(), "greenlet shouldn't be able to grab lock" diff --git a/inbox/test/general/test_message_parsing.py b/inbox/test/general/test_message_parsing.py new file mode 100644 index 000000000..b26820d04 --- /dev/null +++ b/inbox/test/general/test_message_parsing.py @@ -0,0 +1,480 @@ +# flake8: noqa: F401, F811 +# -*- coding: utf-8 -*- +"""Sanity-check our construction of a Message object from raw synced data.""" +import datetime +import pkgutil + +import pytest +from flanker import mime + +from inbox.models import Message, Block +from inbox.util.blockstore import get_from_blockstore + +from inbox.util.addr import parse_mimepart_address_header +from inbox.test.util.base import (default_account, default_namespace, thread, + new_message_from_synced, mime_message, + add_fake_thread) + +__all__ = ['default_namespace', 'thread', 'default_account'] + + +def create_from_synced(db, account, raw_message): + thread = add_fake_thread(db.session, account.namespace.id) + received_date = datetime.datetime.utcnow() + m = Message.create_from_synced(account, 22, '[Gmail]/All Mail', + received_date, raw_message) + m.thread = thread + db.session.add(m) + db.session.commit() + return m + + +@pytest.fixture +def raw_message_with_many_recipients(): + # Message carefully constructed s.t. the length of the serialized 'to' + # field is 65536. + return pkgutil.get_data('inbox', 'test/data/raw_message_with_many_recipients.txt') + + +@pytest.fixture +def mime_message_with_bad_date(mime_message): + mime_message.headers['Date'] = 'unparseable' + return mime_message + + +@pytest.fixture +def raw_message_with_long_content_id(): + return pkgutil.get_data('inbox', 'test/data/raw_message_with_long_content_id.txt') + + +@pytest.fixture +def raw_message_with_ical_invite(): + return pkgutil.get_data('inbox', 'test/data/raw_message_with_ical_invite.txt') + + +@pytest.fixture +def raw_message_with_bad_attachment(): + return pkgutil.get_data('inbox', 'test/data/raw_message_with_bad_attachment.txt') + + +@pytest.fixture +def raw_message_with_filename_attachment(): + return pkgutil.get_data('inbox', 'test/data/raw_message_with_filename_attachment.txt') + + +@pytest.fixture +def raw_message_with_name_attachment(): + return pkgutil.get_data('inbox', 'test/data/raw_message_with_name_attachment.txt') + + +@pytest.fixture +def raw_message_with_inline_name_attachment(): + return pkgutil.get_data('inbox', 'test/data/raw_message_with_inline_attachment.txt') + + +@pytest.fixture +def raw_message_with_outlook_emoji(): + return pkgutil.get_data('inbox', 'test/data/raw_message_with_outlook_emoji.txt') + + +@pytest.fixture +def raw_message_with_outlook_emoji_inline(): + return pkgutil.get_data('inbox', 'test/data/raw_message_with_outlook_emoji_inline.txt') + + +@pytest.fixture +def raw_message_with_long_message_id(): + return pkgutil.get_data('inbox', 'test/data/raw_message_with_long_message_id.txt') + + +def test_message_from_synced(db, new_message_from_synced, default_namespace): + thread = add_fake_thread(db.session, default_namespace.id) + m = new_message_from_synced + assert m.namespace_id == default_namespace.id + assert m.to_addr == [['Alice', 'alice@example.com']] + assert m.cc_addr == [['Bob', 'bob@example.com']] + assert m.subject == 'Hello' + assert m.body == 'Hello World!' + assert m.data_sha256 + m.thread = thread + db.session.add(m) + db.session.commit() + + assert (db.session.query(Block).filter( + Block.namespace_id == default_namespace.id).count() == 0) + assert len(m.parts) == 0 + + +def test_save_attachments(db, default_account): + mime_msg = mime.create.multipart('mixed') + mime_msg.append( + mime.create.text('plain', 'This is a message with attachments'), + mime.create.attachment('image/png', 'filler', 'attached_image.png', + 'attachment'), + mime.create.attachment('application/pdf', 'filler', + 'attached_file.pdf', 'attachment') + ) + msg = create_from_synced(db, default_account, mime_msg.to_string()) + assert len(msg.parts) == 2 + assert all(part.content_disposition == 'attachment' for part in msg.parts) + assert {part.block.filename for part in msg.parts} == \ + {'attached_image.png', 'attached_file.pdf'} + assert {part.block.content_type for part in msg.parts} == \ + {'image/png', 'application/pdf'} + assert (db.session.query(Block).filter( + Block.namespace_id == default_account.namespace.id).count() == 2) + + +def test_save_inline_attachments(db, default_account): + mime_msg = mime.create.multipart('mixed') + inline_attachment = mime.create.attachment('image/png', 'filler', + 'inline_image.png', 'inline') + inline_attachment.headers['Content-Id'] = '' + mime_msg.append(inline_attachment) + return mime_msg + msg = create_from_synced(db, default_account, mime_message.to_string()) + assert len(msg.parts) == 1 + part = msg.parts[0] + assert part.content_disposition == 'inline' + assert part.content_id == '' + assert part.block.content_type == 'image/png' + assert part.block.data == 'filler' + assert (db.session.query(Block).filter( + Block.namespace_id == default_account.namespace.id).count() == 1) + + +def test_concatenate_parts_for_body(db, default_account): + # Test that when a message has multiple inline attachments / text parts, we + # concatenate to form the text body (Apple Mail constructs such messages). + # Example MIME structure: + # multipart/mixed + # | + # +-text/html + # | + # +-image/jpeg + # | + # +-text/html + # | + # +-image/jpeg + # | + # +-text/html + mime_msg = mime.create.multipart('mixed') + mime_msg.append( + mime.create.text('html', 'First part'), + mime.create.attachment('image/png', 'filler', disposition='inline'), + mime.create.text('html', 'Second part'), + mime.create.attachment('image/png', 'more filler', + disposition='inline'), + mime.create.text('html', '3rd part'), + ) + m = create_from_synced(db, default_account, mime_msg.to_string()) + assert m.body == \ + 'First partSecond part3rd part' + assert len(m.parts) == 2 + assert (db.session.query(Block).filter( + Block.namespace_id == default_account.namespace.id).count() == 2) + + +def test_inline_parts_may_form_body_text(db, default_account): + # Some clients (Slack) will set Content-Disposition: inline on text/plain + # or text/html parts that are really just the body text. Check that we + # don't save them as inline atrachments, but just use them to form the body + # text. + mime_msg = mime.create.multipart('mixed') + mime_msg.append( + mime.create.attachment('text/html', 'Hello World!', + disposition='inline'), + mime.create.attachment('text/plain', 'Hello World!', + disposition='inline') + ) + m = create_from_synced(db, default_account, mime_msg.to_string()) + assert m.body == 'Hello World!' + assert len(m.parts) == 0 + assert (db.session.query(Block).filter( + Block.namespace_id == default_account.namespace.id).count() == 0) + + +def test_convert_plaintext_body_to_html(db, default_account): + mime_msg = mime.create.text('plain', 'Hello World!') + m = create_from_synced(db, default_account, mime_msg.to_string()) + assert m.body == '

Hello World!

' + + +def test_save_parts_without_disposition_as_attachments(db, default_account): + mime_msg = mime.create.multipart('mixed') + mime_msg.append( + mime.create.attachment('image/png', 'filler', + disposition=None) + ) + m = create_from_synced(db, default_account, mime_msg.to_string()) + assert len(m.parts) == 1 + assert m.parts[0].content_disposition == 'attachment' + assert m.parts[0].block.content_type == 'image/png' + assert m.parts[0].block.data == 'filler' + assert (db.session.query(Block).filter( + Block.namespace_id == default_account.namespace.id).count() == 1) + + +def test_handle_long_filenames(db, default_account): + mime_msg = mime.create.multipart('mixed') + mime_msg.append( + mime.create.attachment('image/png', 'filler', + filename=990 * 'A' + '.png', + disposition='attachment') + ) + m = create_from_synced(db, default_account, mime_msg.to_string()) + assert len(m.parts) == 1 + saved_filename = m.parts[0].block.filename + assert len(saved_filename) < 256 + # Check that we kept the extension + assert saved_filename.endswith('.png') + + +def test_handle_long_subjects(db, default_account, mime_message): + mime_message.headers['Subject'] = 4096 * 'A' + m = create_from_synced(db, default_account, mime_message.to_string()) + assert len(m.subject) < 256 + + +def test_dont_use_attached_html_to_form_body(db, default_account): + mime_msg = mime.create.multipart('mixed') + mime_msg.append( + mime.create.text('plain', 'Please see attachment'), + mime.create.attachment('text/html', 'This is attached', + disposition='attachment', + filename='attachment.html') + ) + m = create_from_synced(db, default_account, mime_msg.to_string()) + assert len(m.parts) == 1 + assert m.parts[0].content_disposition == 'attachment' + assert m.parts[0].block.content_type == 'text/html' + assert m.body == '

Please see attachment

' + assert (db.session.query(Block).filter( + Block.namespace_id == default_account.namespace.id).count() == 1) + + +def test_truncate_recipients(db, default_account, thread, + raw_message_with_many_recipients): + m = create_from_synced(db, default_account, raw_message_with_many_recipients) + m.thread = thread + db.session.add(m) + # Check that no database error is raised. + db.session.commit() + + +def test_address_parsing(): + """Check that header parsing can handle a variety of tricky input.""" + # Extra quotes around display name + mimepart = mime.from_string('From: ""Bob"" ') + parsed = parse_mimepart_address_header(mimepart, 'From') + assert parsed == [[' Bob ', 'bob@foocorp.com']] + + # Comments after addr-spec + mimepart = mime.from_string( + 'From: "Bob" (through Yahoo! Store Order System)') + parsed = parse_mimepart_address_header(mimepart, 'From') + assert parsed == [['Bob', 'bob@foocorp.com']] + + mimepart = mime.from_string( + 'From: Indiegogo (no reply)') + parsed = parse_mimepart_address_header(mimepart, 'From') + assert parsed == [['Indiegogo', 'noreply@indiegogo.com']] + + mimepart = mime.from_string( + 'From: Anon (GitHub Staff)') + parsed = parse_mimepart_address_header(mimepart, 'From') + assert parsed == [['Anon', 'support@github.com']] + + # Display name in comment + mimepart = mime.from_string('From: root@gunks (Cron Daemon)') + parsed = parse_mimepart_address_header(mimepart, 'From') + assert parsed == [['Cron Daemon', 'root@gunks']] + + # Missing closing angle bracket + mimepart = mime.from_string('From: Bob ') + parsed = parse_mimepart_address_header(mimepart, 'From') + assert parsed == [['Foo, Corp.', 'info@foocorp.com']] + + mimepart = mime.from_string( + 'To: =?utf-8?Q?Foo=2C=20Corp.?= , ' + '=?utf-8?Q?Support?= ') + parsed = parse_mimepart_address_header(mimepart, 'To') + assert parsed == [['Foo, Corp.', 'info@foocorp.com'], + ['Support', 'support@foocorp.com']] + + # Multiple header lines + mimepart = mime.from_string( + 'To: alice@foocorp.com\nSubject: Hello\nTo: bob@foocorp.com') + parsed = parse_mimepart_address_header(mimepart, 'To') + assert parsed == [['', 'alice@foocorp.com'], ['', 'bob@foocorp.com']] + + +def test_handle_bad_content_disposition(db, default_account, default_namespace, + mime_message): + # Message with a MIME part that has an invalid content-disposition. + mime_message.append( + mime.create.attachment('image/png', 'filler', 'attached_image.png', + disposition='alternative') + ) + m = create_from_synced(db, default_account, mime_message.to_string()) + assert m.namespace_id == default_namespace.id + assert m.to_addr == [['Alice', 'alice@example.com']] + assert m.cc_addr == [['Bob', 'bob@example.com']] + assert m.body == 'Hello World!' + assert len(m.parts) == 0 + assert (db.session.query(Block).filter( + Block.namespace_id == default_namespace.id).count() == 0) + + +def test_store_full_body_on_parse_error( + default_account, mime_message_with_bad_date): + received_date = None + m = Message.create_from_synced(default_account, 139219, '[Gmail]/All Mail', + received_date, + mime_message_with_bad_date.to_string()) + assert get_from_blockstore(m.data_sha256) + + +def test_long_content_id(db, default_account, thread, + raw_message_with_long_content_id): + m = create_from_synced(db, default_account, raw_message_with_long_content_id) + m.thread = thread + db.session.add(m) + # Check that no database error is raised. + db.session.commit() + + +def test_parse_body_on_bad_attachment( + default_account, raw_message_with_bad_attachment): + received_date = None + m = Message.create_from_synced(default_account, 139219, '[Gmail]/All Mail', + received_date, + raw_message_with_bad_attachment) + assert m.decode_error + assert 'dingy blue carpet' in m.body + assert len(m.parts) == 0 + + +def test_calculate_snippet(): + m = Message() + # Check that we strip contents of title, script, style tags + body = 'EMAIL' \ + 'Hello, world' + assert m.calculate_html_snippet(body) == 'Hello, world' + + # Check that we replace various incarnations of
by spaces + body = 'Hello,
world' + assert m.calculate_html_snippet(body) == 'Hello, world' + + body = 'Hello,
world' + assert m.calculate_html_snippet(body) == 'Hello, world' + + body = 'Hello,
world' + assert m.calculate_html_snippet(body) == 'Hello, world' + + body = 'Hello,

world' + assert m.calculate_html_snippet(body) == 'Hello, world' + + body = '
line1
line2
line3

' + assert m.calculate_html_snippet(body) == 'line1 line2 line3' + + # Check that snippets are properly truncated to 191 characters. + body = '''Etenim quid est, Catilina, quod iam amplius + exspectes, si neque nox tenebris obscurare coetus nefarios nec + privata domus parietibus continere voces coniurationis tuae + potest, si illustrantur, si erumpunt omnia?''' + expected_snippet = 'Etenim quid est, Catilina, quod iam amplius ' \ + 'exspectes, si neque nox tenebris obscurare coetus ' \ + 'nefarios nec privata domus parietibus continere ' \ + 'voces coniurationis tuae potest, si illustrantur,' + assert len(expected_snippet) == 191 + assert m.calculate_html_snippet(body) == expected_snippet + + +def test_sanitize_subject(default_account, mime_message): + # Parse a raw message with encoded null bytes in subject header; + # check that we strip the null bytes. + mime_message.headers['Subject'] = \ + '=?UTF-8?B?WW91ciBVUFMgUGFja2FnZSB3YXMgZGVsaXZlcmVkAAAA?=' + m = Message.create_from_synced( + default_account, 22, '[Gmail]/All Mail', datetime.datetime.utcnow(), + mime_message.to_string()) + assert m.subject == u'Your UPS Package was delivered' + + +def test_attachments_filename_parsing(db, default_account, + raw_message_with_filename_attachment, + raw_message_with_name_attachment): + m = create_from_synced(db, default_account, + raw_message_with_filename_attachment) + assert len(m.attachments) == 1 + assert m.attachments[0].block.filename == 'bewerbung_anschreiben_positivbeispiel.txt' + + m = create_from_synced(db, default_account, + raw_message_with_name_attachment) + assert len(m.attachments) == 1 + assert m.attachments[0].block.filename == 'bewerbung_anschreiben_positivbeispiel.txt' + + +def test_inline_attachments_filename_parsing(db, default_account, + raw_message_with_inline_name_attachment): + m = create_from_synced(db, default_account, + raw_message_with_inline_name_attachment) + assert len(m.attachments) == 1 + assert m.attachments[0].block.filename == u"Capture d'e\u0301cran 2015-08-13 20.58.24.png" + + +def test_attachments_emoji_filename_parsing(db, default_account, + raw_message_with_outlook_emoji): + m = create_from_synced(db, default_account, + raw_message_with_outlook_emoji) + assert len(m.attachments) == 1 + assert m.attachments[0].block.filename == u'OutlookEmoji-\U0001f60a.png' + assert m.attachments[0].block.content_type == 'image/png' + assert m.attachments[0].content_id == '<3f0ea351-779e-48b3-bfa9-7c2a9e373aeb>' + assert m.attachments[0].content_disposition == 'attachment' + + +def test_attachments_emoji_filename_parsing(db, default_account, + raw_message_with_outlook_emoji_inline): + m = create_from_synced(db, default_account, + raw_message_with_outlook_emoji_inline) + assert len(m.attachments) == 1 + assert m.attachments[0].block.filename == u'OutlookEmoji-\U0001f60a.png' + assert m.attachments[0].block.content_type == 'image/png' + assert m.attachments[0].content_id == '<3f0ea351-779e-48b3-bfa9-7c2a9e373aeb>' + assert m.attachments[0].content_disposition == 'inline' + + +@pytest.mark.only +def test_long_message_id(db, default_account, thread, + raw_message_with_long_message_id): + m = create_from_synced(db, default_account, + raw_message_with_long_message_id) + m.thread = thread + db.session.add(m) + # Check that no database error is raised. + db.session.commit() + assert len(m.message_id_header) <= 998 diff --git a/inbox/test/general/test_mutable_json_type.py b/inbox/test/general/test_mutable_json_type.py new file mode 100644 index 000000000..b2f90dbcf --- /dev/null +++ b/inbox/test/general/test_mutable_json_type.py @@ -0,0 +1,41 @@ +""" Tests for our mutable JSON column type. """ + +from datetime import datetime + + +def test_mutable_json_type(db, config, default_account, folder): + """ + Test that FolderSync._sync_status which is a mutable JSON column is + updated as expected. + + """ + from inbox.models.backends.imap import ImapFolderSyncStatus + + sync_status = ImapFolderSyncStatus( + account_id=default_account.id, + folder=folder) + db.session.add(sync_status) + db.session.commit() + + original_metrics = sync_status.metrics + + metrics = dict(download_uid_count=10, + queue_checked_at=datetime.utcnow()) + sync_status.update_metrics(metrics) + + updated_metrics = sync_status.metrics + + metrics.update(original_metrics) + assert updated_metrics != original_metrics and updated_metrics == metrics,\ + 'metrics not updated correctly' + + # Reupdate status + new_metrics = dict(delete_uid_count=50, + download_uid_count=100, + queue_checked_at=datetime.utcnow()) + sync_status.update_metrics(new_metrics) + + latest_metrics = sync_status.metrics + + metrics.update(new_metrics) + assert latest_metrics == metrics, 'metrics not re-updated correctly' diff --git a/inbox/test/general/test_namespace.py b/inbox/test/general/test_namespace.py new file mode 100644 index 000000000..dadd2c99b --- /dev/null +++ b/inbox/test/general/test_namespace.py @@ -0,0 +1,460 @@ +import random +import gevent +from requests import Response +from pytest import fixture +from freezegun import freeze_time + +from inbox.models.namespace import Namespace +from inbox.test.util.base import (add_generic_imap_account, add_fake_thread, add_fake_message, + add_fake_calendar, add_fake_event, add_fake_folder, + add_fake_imapuid, add_fake_gmail_account, + add_fake_contact, add_fake_msg_with_calendar_part) + + +@fixture +def patch_requests_throttle(monkeypatch): + def get(*args, **kwargs): + resp = Response() + resp.status_code = 500 + + monkeypatch.setattr( + 'requests.get', + lambda *args, **kwargs: get()) + + +@fixture +def patch_requests_no_throttle(monkeypatch): + def get(*args, **kwargs): + resp = Response() + resp.status_code = 500 + + monkeypatch.setattr( + 'requests.get', + lambda *args, **kwargs: get()) + + +def random_range(start, end): + return range(random.randrange(start, end)) + + +def add_completely_fake_account(db, email='test@nylas.com'): + from inbox.models.backends.gmail import GmailAuthCredentials + fake_account = add_fake_gmail_account(db.session, email_address=email) + calendar = add_fake_calendar(db.session, fake_account.namespace.id) + for i in random_range(1, 10): + add_fake_event(db.session, fake_account.namespace.id, + calendar=calendar, title='%s' % i) + + # Add fake Threads, Messages and ImapUids. + folder = add_fake_folder(db.session, fake_account) + for i in random_range(1, 4): + th = add_fake_thread(db.session, fake_account.namespace.id) + + for j in random_range(1, 3): + msg = add_fake_msg_with_calendar_part(db.session, + fake_account, + 'fake part', thread=th) + db.session.add(msg) + db.session.flush() + + for k in random_range(1, 2): + add_fake_imapuid(db.session, fake_account.id, msg, folder, + int('%s%s' % (msg.id, k))) + # Add fake contacts + for i in random_range(1, 5): + add_fake_contact(db.session, fake_account.namespace.id, uid=str(i)) + + auth_creds = GmailAuthCredentials() + auth_creds.gmailaccount = fake_account + auth_creds.scopes = "email" + auth_creds.g_id_token = "test" + auth_creds.client_id = "test" + auth_creds.client_secret = "test" + auth_creds.refresh_token = "test" + auth_creds.is_valid = True + db.session.add(auth_creds) + db.session.commit() + + return fake_account + + +def test_get_accounts_to_delete(db): + from inbox.models import Account + from inbox.models.util import get_accounts_to_delete + + existing_account_count = db.session.query(Account.id).count() + + accounts = [] + email = 'test{}@nylas.com' + for i in range(1, 6): + account = add_completely_fake_account(db, email.format(i)) + accounts.append(account) + + # Ensure all of the accounts have been created successfully + assert db.session.query(Account.id).count() == (existing_account_count + 5) + + # get_accounts_to_delete() with no accounts marked as deleted + accounts_to_delete = get_accounts_to_delete(0) + assert len(accounts_to_delete) == 0 + + # get_accounts_to_delete() with one account marked as deleted + accounts[0].mark_deleted() + db.session.commit() + + accounts_to_delete = get_accounts_to_delete(0) + assert len(accounts_to_delete) == 1 + + # get_accounts_to_delete() with more than one account marked as deleted + for i in range(1, 4): + accounts[i].mark_deleted() + db.session.commit() + + accounts_to_delete = get_accounts_to_delete(0) + assert len(accounts_to_delete) == 4 + + +def test_bulk_namespace_deletion(db): + from inbox.models import Account + from inbox.models.util import get_accounts_to_delete, delete_marked_accounts + + db.session.query(Account).delete(synchronize_session=False) + db.session.commit() + assert db.session.query(Account.id).count() == 0 + + # Add 5 accounts + account_1 = add_completely_fake_account(db) + account_1_id = account_1.id + + account_2 = add_completely_fake_account(db, "test2@nylas.com") + account_2_id = account_2.id + + account_3 = add_completely_fake_account(db, "test3@nylas.com") + account_3_id = account_3.id + + account_4 = add_completely_fake_account(db, "test4@nylas.com") + account_4_id = account_4.id + + add_completely_fake_account(db, "test5@nylas.com") + + # Ensure all of the accounts have been created successfully + assert db.session.query(Account).count() == 5 + + # delete_marked_accounts() with no accounts marked as deleted + to_delete = get_accounts_to_delete(0) + delete_marked_accounts(0, to_delete) + assert len(db.session.query(Account.id).all()) == 5 + + # delete_marked_accounts() with one account marked as deleted + account_1.mark_deleted() + db.session.commit() + + to_delete = get_accounts_to_delete(0) + delete_marked_accounts(0, to_delete) + + alive_accounts = db.session.query(Account.id).all() + assert len(alive_accounts) == 4 + assert account_1_id not in alive_accounts + + # delete_marked_accounts() with more than one account marked as deleted + account_2.mark_deleted() + account_3.mark_deleted() + account_4.mark_deleted() + db.session.commit() + + to_delete = get_accounts_to_delete(0) + delete_marked_accounts(0, to_delete) + + alive_accounts = db.session.query(Account.id).all() + assert len(alive_accounts) == 1 + assert account_4_id not in alive_accounts + assert account_3_id not in alive_accounts + assert account_2_id not in alive_accounts + + +@freeze_time("2016-02-02 11:01:34") +def test_deletion_no_throttle(db, patch_requests_no_throttle): + from inbox.models import Account + from inbox.models.util import get_accounts_to_delete, delete_marked_accounts + + new_accounts = set() + account_1 = add_completely_fake_account(db) + new_accounts.add(account_1.id) + + account_2 = add_completely_fake_account(db, "test2@nylas.com") + new_accounts.add(account_2.id) + + account_1.mark_deleted() + account_2.mark_deleted() + db.session.commit() + + to_delete = get_accounts_to_delete(0) + greenlet = gevent.spawn(delete_marked_accounts, 0, to_delete, throttle=True) + greenlet.join() + + alive_accounts = db.session.query(Account.id).all() + + # Ensure the two accounts we added were deleted + assert new_accounts - set(alive_accounts) == new_accounts + + +@freeze_time("2016-02-02 11:01:34") +def test_deletion_metric_throttle(db, patch_requests_throttle): + from inbox.models import Account + from inbox.models.util import get_accounts_to_delete, delete_marked_accounts + + account_1 = add_completely_fake_account(db) + account_1_id = account_1.id + + account_2 = add_completely_fake_account(db, "test2@nylas.com") + account_2_id = account_2.id + + account_1.mark_deleted() + account_2.mark_deleted() + db.session.commit() + + to_delete = get_accounts_to_delete(0) + greenlet = gevent.spawn(delete_marked_accounts, 0, to_delete, throttle=True) + greenlet.join() + + alive_accounts = [acc.id for acc in db.session.query(Account).all()] + + # Ensure the two accounts we added are still present + assert account_1_id in alive_accounts + assert account_2_id in alive_accounts + + +@freeze_time("2016-02-02 01:01:34") +def test_deletion_time_throttle(db, patch_requests_no_throttle): + from inbox.models import Account + from inbox.models.util import get_accounts_to_delete, delete_marked_accounts + + account_1 = add_completely_fake_account(db, "test5@nylas.com") + account_1_id = account_1.id + + account_2 = add_completely_fake_account(db, "test6@nylas.com") + account_2_id = account_2.id + + account_1.mark_deleted() + account_2.mark_deleted() + db.session.commit() + + to_delete = get_accounts_to_delete(0) + greenlet = gevent.spawn(delete_marked_accounts, 0, to_delete, throttle=True) + greenlet.join() + + alive_accounts = [acc.id for acc in db.session.query(Account).all()] + + # Ensure the two accounts we added are still present + assert account_1_id in alive_accounts + assert account_2_id in alive_accounts + + +def test_namespace_deletion(db, default_account): + from inbox.models import Account, Thread, Message + from inbox.models.util import delete_namespace + + models = [Thread, Message] + + namespace = default_account.namespace + namespace_id = namespace.id + account_id = default_account.id + + account = db.session.query(Account).get(account_id) + assert account + + thread = add_fake_thread(db.session, namespace_id) + + message = add_fake_message(db.session, namespace_id, thread) + + for m in models: + c = db.session.query(m).filter( + m.namespace_id == namespace_id).count() + print "count for", m, ":", c + assert c != 0 + + fake_account = add_generic_imap_account(db.session) + fake_account_id = fake_account.id + + assert fake_account_id != account.id and \ + fake_account.namespace.id != namespace_id + + thread = add_fake_thread(db.session, fake_account.namespace.id) + thread_id = thread.id + + message = add_fake_message(db.session, fake_account.namespace.id, thread) + message_id = message.id + + assert len(db.session.query(Namespace).filter(Namespace.id == namespace_id).all()) > 0 + + # Delete namespace, verify data corresponding to this namespace /only/ + # is deleted + delete_namespace(account_id, namespace_id) + db.session.commit() + + assert len(db.session.query(Namespace).filter(Namespace.id == namespace_id).all()) == 0 + + account = db.session.query(Account).get(account_id) + assert not account + + for m in models: + assert db.session.query(m).filter( + m.namespace_id == namespace_id).count() == 0 + + fake_account = db.session.query(Account).get(fake_account_id) + assert fake_account + + thread = db.session.query(Thread).get(thread_id) + message = db.session.query(Message).get(message_id) + assert thread and message + + +def test_namespace_delete_cascade(db, default_account): + from inbox.models import Account, Thread, Message + + models = [Thread, Message] + + namespace = default_account.namespace + namespace_id = namespace.id + account_id = default_account.id + + account = db.session.query(Account).get(account_id) + assert account + + thread = add_fake_thread(db.session, namespace_id) + + add_fake_message(db.session, namespace_id, thread) + + for m in models: + c = db.session.query(m).filter( + m.namespace_id == namespace_id).count() + print "count for", m, ":", c + assert c != 0 + + fake_account = add_generic_imap_account(db.session) + fake_account_id = fake_account.id + + assert fake_account_id != account.id and \ + fake_account.namespace.id != namespace_id + + thread = add_fake_thread(db.session, fake_account.namespace.id) + + add_fake_message(db.session, fake_account.namespace.id, thread) + + assert len(db.session.query(Namespace).filter(Namespace.id == namespace_id).all()) > 0 + + # This test is separate from test_namespace_deletion because we want to + # do a raw SQLAlchemy delete rather than using delete_namespace, which does + # a bunch of extra work to ensure that objects associated with a Namespace + # are actually deleted. + db.session.query(Namespace).filter(Namespace.id == namespace_id).delete() + db.session.commit() + + assert len(db.session.query(Namespace).filter(Namespace.id == namespace_id).all()) == 0 + + +def test_fake_accounts(empty_db): + from inbox.models import (Account, Thread, Message, Block, + Secret, Contact, Event, Transaction) + from inbox.models.backends.imap import ImapUid + from inbox.models.backends.gmail import GmailAuthCredentials + from inbox.models.util import delete_namespace + + models = [Thread, Message, Event, Transaction, Contact, Block] + + db = empty_db + account = add_completely_fake_account(db) + + for m in models: + c = db.session.query(m).filter( + m.namespace_id == account.namespace.id).count() + assert c != 0 + + assert db.session.query(ImapUid).count() != 0 + assert db.session.query(Secret).count() != 0 + assert db.session.query(GmailAuthCredentials).count() != 0 + assert db.session.query(Account).filter( + Account.id == account.id).count() == 1 + + # Try the dry-run mode: + delete_namespace(account.id, account.namespace.id, dry_run=True) + + for m in models: + c = db.session.query(m).filter( + m.namespace_id == account.namespace.id).count() + assert c != 0 + + assert db.session.query(Account).filter( + Account.id == account.id).count() != 0 + + assert db.session.query(Secret).count() != 0 + assert db.session.query(GmailAuthCredentials).count() != 0 + assert db.session.query(ImapUid).count() != 0 + + # Now delete the account for reals. + delete_namespace(account.id, account.namespace.id) + + for m in models: + c = db.session.query(m).filter( + m.namespace_id == account.namespace.id).count() + assert c == 0 + + assert db.session.query(Account).filter( + Account.id == account.id).count() == 0 + + assert db.session.query(Secret).count() == 0 + assert db.session.query(GmailAuthCredentials).count() == 0 + assert db.session.query(ImapUid).count() == 0 + + +def test_multiple_fake_accounts(empty_db): + # Add three fake accounts, check that removing one doesn't affect + # the two others. + from inbox.models import (Thread, Message, Block, Secret, Contact, Event, + Transaction) + from inbox.models.backends.gmail import GmailAuthCredentials + from inbox.models.util import delete_namespace + + db = empty_db + accounts = [] + accounts.append(add_completely_fake_account(db, 'test1@nylas.com')) + accounts.append(add_completely_fake_account(db, 'test2@nylas.com')) + + # Count secrets and authcredentials now. We can't do it after adding + # the third account because our object model is a bit cumbersome. + secret_count = db.session.query(Secret).count() + authcredentials_count = db.session.query(GmailAuthCredentials).count() + assert secret_count != 0 + assert authcredentials_count != 0 + + accounts.append(add_completely_fake_account(db, 'test3@nylas.com')) + + stats = {} + models = [Thread, Message, Event, Transaction, Contact, Block] + + for account in accounts: + stats[account.email_address] = {} + for model in models: + clsname = model.__name__ + stats[account.email_address][clsname] = db.session.query(model).filter( + model.namespace_id == account.namespace.id).count() + + # now delete the third account. + last_account_id = accounts[2].id + last_namespace_id = accounts[2].namespace.id + + delete_namespace(last_account_id, last_namespace_id) + for account in accounts[:2]: + for model in models: + clsname = model.__name__ + assert stats[account.email_address][clsname] == db.session.query(model).filter( + model.namespace_id == account.namespace.id).count() + + # check that no model from the last account is present. + for model in models: + clsname = model.__name__ + assert db.session.query(model).filter( + model.namespace_id == last_namespace_id).count() == 0 + + # check that we didn't delete a secret that wasn't ours. + assert db.session.query(Secret).count() == secret_count + assert db.session.query(GmailAuthCredentials).count() == authcredentials_count diff --git a/inbox/test/general/test_paths.py b/inbox/test/general/test_paths.py new file mode 100644 index 000000000..f45903e5e --- /dev/null +++ b/inbox/test/general/test_paths.py @@ -0,0 +1,30 @@ +# Test path conversion functions. +from inbox.util.misc import imap_folder_path, fs_folder_path + + +def test_imap_folder_path(): + assert imap_folder_path('a/b') == 'a.b' + assert imap_folder_path('a/b', separator='?') == 'a?b' + + assert imap_folder_path('/A/b') == 'A.b' + assert imap_folder_path('/INBOX/b') == 'INBOX.b' + assert imap_folder_path('INBOX/b') == 'INBOX.b' + + assert imap_folder_path('a/very/deep/nested/folder') == 'a.very.deep.nested.folder' + assert imap_folder_path('/a/very/deep/nested/folder') == 'a.very.deep.nested.folder' + + assert imap_folder_path('') is None + assert imap_folder_path('/') is None + + assert imap_folder_path('A/B', prefix='INBOX.', separator='.') == 'INBOX.A.B' + assert imap_folder_path('/A/B', prefix='INBOX.', separator='.') == 'INBOX.A.B' + assert imap_folder_path('/A/B', prefix='INBOX', separator='.') == 'INBOX.A.B' + assert imap_folder_path('INBOX/A/B', prefix='INBOX', separator='.') == 'INBOX.A.B' + + +def test_fs_folder_path(): + assert fs_folder_path('INBOX.A.B') == 'INBOX/A/B' + assert fs_folder_path('INBOX.A.B', prefix='INBOX.') == 'A/B' + assert fs_folder_path('INBOX?A?B', prefix='INBOX?', separator='?') == 'A/B' + assert fs_folder_path('INBOX.a.very.deep.nested.folder') == 'INBOX/a/very/deep/nested/folder' + assert fs_folder_path(imap_folder_path('a/b')) == 'a/b' diff --git a/inbox/test/general/test_provider_export.py b/inbox/test/general/test_provider_export.py new file mode 100644 index 000000000..d6d4e537c --- /dev/null +++ b/inbox/test/general/test_provider_export.py @@ -0,0 +1,7 @@ +from inbox.providers import providers +import json + + +def test_provider_export_as_json(): + """Provider dict should be exportable as json""" + assert json.dumps(dict(providers)) diff --git a/inbox/test/general/test_provider_resolution.py b/inbox/test/general/test_provider_resolution.py new file mode 100644 index 000000000..b6c663267 --- /dev/null +++ b/inbox/test/general/test_provider_resolution.py @@ -0,0 +1,63 @@ +import pytest +from inbox.util.url import provider_from_address +from inbox.util.url import InvalidEmailAddressError +from inbox.auth.base import handler_from_provider +from inbox.auth.generic import GenericAuthHandler +from inbox.auth.gmail import GmailAuthHandler +from inbox.basicauth import NotSupportedError + + +def test_provider_resolution(mock_dns_resolver): + mock_dns_resolver._load_records('inbox', 'test/data/general_test_provider_resolution.json') + test_cases = [ + ('foo@example.com', 'unknown'), + ('foo@noresolve.com', 'unknown'), + ('foo@gmail.com', 'gmail'), + ('foo@postini.com', 'gmail'), + ('foo@yahoo.com', 'yahoo'), + ('foo@yahoo.se', 'yahoo'), + ('foo@hotmail.com', 'outlook'), + ('foo@outlook.com', 'outlook'), + ('foo@aol.com', 'aol'), + ('foo@love.com', 'aol'), + ('foo@games.com', 'aol'), + ('foo@exchange.mit.edu', 'eas'), + ('foo@fastmail.fm', 'fastmail'), + ('foo@fastmail.net', 'fastmail'), + ('foo@fastmail.com', 'fastmail'), + ('foo@hover.com', 'hover'), + ('foo@yahoo.com', 'yahoo'), + ('foo@yandex.com', 'yandex'), + ('foo@mrmail.com', 'zimbra'), + ('foo@icloud.com', 'icloud'), + ('foo@mac.com', 'icloud'), + ('foo@gmx.com', 'gmx'), + ('foo@gandi.net', 'gandi'), + ('foo@debuggers.co', 'gandi'), + ('foo@forumone.com', 'gmail'), + ('foo@getbannerman.com', 'gmail'), + ('foo@inboxapp.onmicrosoft.com', 'eas'), + ('foo@espertech.onmicrosoft.com', 'eas'), + ('foo@doesnotexist.nilas.com', 'unknown'), + ('foo@autobizbrokers.com', 'bluehost'), + ] + for email, expected_provider in test_cases: + assert provider_from_address(email, lambda: mock_dns_resolver) == expected_provider + + with pytest.raises(InvalidEmailAddressError): + provider_from_address('notanemail', lambda: mock_dns_resolver) + with pytest.raises(InvalidEmailAddressError): + provider_from_address('not@anemail', lambda: mock_dns_resolver) + with pytest.raises(InvalidEmailAddressError): + provider_from_address('notanemail.com', lambda: mock_dns_resolver) + + +def test_auth_handler_dispatch(): + assert isinstance(handler_from_provider('custom'), GenericAuthHandler) + assert isinstance(handler_from_provider('fastmail'), GenericAuthHandler) + assert isinstance(handler_from_provider('aol'), GenericAuthHandler) + assert isinstance(handler_from_provider('yahoo'), GenericAuthHandler) + assert isinstance(handler_from_provider('gmail'), GmailAuthHandler) + + with pytest.raises(NotSupportedError): + handler_from_provider('NOTAREALMAILPROVIDER') diff --git a/inbox/test/general/test_relationships.py b/inbox/test/general/test_relationships.py new file mode 100644 index 000000000..879e69682 --- /dev/null +++ b/inbox/test/general/test_relationships.py @@ -0,0 +1,99 @@ +import json + +from inbox.models import Category, Message, MessageCategory, Thread + +from inbox.test.util.base import add_fake_message, add_fake_thread +from inbox.test.api.base import new_api_client + + +def test_category_delete(db, gmail_account): + """ Ensure that all associated MessageCategories are deleted + when a Category is deleted """ + + api_client = new_api_client(db, gmail_account.namespace) + po_data = api_client.post_data('/labels/', + {"display_name": "Test_Label"}) + assert po_data.status_code == 200 + + category_public_id = json.loads(po_data.data)['id'] + category = db.session.query(Category).filter( + Category.public_id == category_public_id).one() + category_id = category.id + + for i in xrange(10): + generic_thread = add_fake_thread(db.session, + gmail_account.namespace.id) + gen_message = add_fake_message(db.session, + gmail_account.namespace.id, + generic_thread) + data = {"label_ids": [category_public_id]} + resp = api_client.put_data('/messages/{}'. + format(gen_message.public_id), data) + assert resp.status_code == 200 + + associated_mcs = db.session.query(MessageCategory). \ + filter(MessageCategory.category_id == category_id).all() + assert len(associated_mcs) == 10 + + db.session.delete(category) + db.session.commit() + + assert db.session.query(MessageCategory). \ + filter(MessageCategory.category_id == category_id).all() == [] + + +def test_message_delete(db, gmail_account): + """ Ensure that all associated MessageCategories are deleted + when a Message is deleted """ + + api_client = new_api_client(db, gmail_account.namespace) + + generic_thread = add_fake_thread(db.session, gmail_account.namespace.id) + gen_message = add_fake_message(db.session, + gmail_account.namespace.id, + generic_thread) + + category_ids = [] + for i in xrange(10): + po_data = api_client.post_data('/labels/', + {"display_name": str(i)}) + assert po_data.status_code == 200 + + category_ids.append(json.loads(po_data.data)['id']) + + data = {"label_ids": category_ids} + resp = api_client.put_data('/messages/{}'. + format(gen_message.public_id), data) + assert resp.status_code == 200 + + associated_mcs = db.session.query(MessageCategory). \ + filter(MessageCategory.message_id == gen_message.id).all() + assert len(associated_mcs) == 10 + + db.session.delete(gen_message) + db.session.commit() + + assert db.session.query(MessageCategory). \ + filter(MessageCategory.message_id == gen_message.id).all() == [] + + +def test_thread_delete(db, gmail_account): + """ Ensure that all associated Messages are deleted + when a Thread is deleted.""" + + generic_thread = add_fake_thread(db.session, gmail_account.namespace.id) + generic_message = add_fake_message(db.session, + gmail_account.namespace.id, + generic_thread) + assert db.session.query(Thread). \ + filter(Thread.id == generic_thread.id).all() == [generic_thread] + assert db.session.query(Message). \ + filter(Message.id == generic_message.id).all() == [generic_message] + + db.session.delete(generic_thread) + db.session.commit() + + assert db.session.query(Thread). \ + filter(Thread.id == generic_thread.id).all() == [] + assert db.session.query(Message). \ + filter(Message.id == generic_message.id).all() == [] diff --git a/inbox/test/general/test_required_folders.py b/inbox/test/general/test_required_folders.py new file mode 100644 index 000000000..f1f4d1a71 --- /dev/null +++ b/inbox/test/general/test_required_folders.py @@ -0,0 +1,59 @@ +""" Test that the All Mail folder is enabled for Gmail. """ +import pytest + +from inbox.auth.gmail import GmailAuthHandler +from inbox.basicauth import GmailSettingError +from inbox.crispin import GmailCrispinClient + + +class AccountStub(object): + id = 0 + email_address = 'bob@bob.com' + access_token = None + imap_endpoint = None + sync_state = 'running' + + def new_token(self): + return ('foo', 22) + + def validate_token(self, new_token): + return True + + +class ConnectionStub(object): + + def logout(self): + pass + + +def get_auth_handler(monkeypatch, folders): + g = GmailAuthHandler('gmail') + + def mock_connect(a): + return ConnectionStub() + + g.connect_account = mock_connect + monkeypatch.setattr(GmailCrispinClient, 'folder_names', + lambda x: folders) + return g + + +def test_all_mail_missing(monkeypatch): + """ + Test that validate_folders throws a GmailSettingError if All Mail + is not in the list of folders. + + """ + g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'}) + with pytest.raises(GmailSettingError): + g.verify_account(AccountStub()) + + +def test_all_mail_present(monkeypatch): + """ + Test that the validate_folders passes if All Mail is present. + + """ + g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX', + 'trash': 'TRASH'}) + assert g.verify_account(AccountStub()) diff --git a/inbox/test/general/test_sync_engine_exit.py b/inbox/test/general/test_sync_engine_exit.py new file mode 100644 index 000000000..2f464e38d --- /dev/null +++ b/inbox/test/general/test_sync_engine_exit.py @@ -0,0 +1,64 @@ +# flake8: noqa: F401 +# test that we correctly exit a sync engine instance if the folder we are +# trying to sync comes back as deleted while syncing + +import pytest +from sqlalchemy.exc import IntegrityError + +from inbox.mailsync.backends.imap.monitor import ImapSyncMonitor +from inbox.mailsync.backends.imap.generic import FolderSyncEngine +from inbox.mailsync.backends.base import MailsyncDone +from inbox.models import Folder +from inbox.auth.generic import GenericAuthHandler +from inbox.crispin import FolderMissingError + +TEST_YAHOO_EMAIL = "inboxapptest1@yahoo.com" + + +@pytest.fixture +def yahoo_account(db): + account = GenericAuthHandler('yahoo').create_account( + TEST_YAHOO_EMAIL, + {"email": TEST_YAHOO_EMAIL, "password": "BLAH"}) + db.session.add(account) + db.session.commit() + return account + + +def raise_folder_error(*args, **kwargs): + raise FolderMissingError() + + +@pytest.fixture +def sync_engine_stub(db, yahoo_account): + db.session.add(Folder(account=yahoo_account, name='Inbox')) + db.session.commit() + engine = FolderSyncEngine(yahoo_account.id, yahoo_account.namespace.id, + "Inbox", TEST_YAHOO_EMAIL, "yahoo", None) + + return engine + + +def test_folder_engine_exits_if_folder_missing(db, yahoo_account, + sync_engine_stub): + # if the folder does not exist in our database, _load_state will + # encounter an IntegrityError as it tries to insert a child + # ImapFolderSyncStatus against an invalid foreign key + folder = db.session.query(Folder).filter_by(account=yahoo_account, + name='Inbox').one() + db.session.delete(folder) + db.session.commit() + with pytest.raises(IntegrityError): + sync_engine_stub.update_folder_sync_status(lambda s: s) + + # and we should use this to signal that mailsync is done + with pytest.raises(MailsyncDone): + sync_engine_stub._run() + + # also check that we handle the crispin select_folder error appropriately + # within the core True loop of _run() + sync_engine_stub._load_state = lambda: True + sync_engine_stub.state = 'poll' + sync_engine_stub.poll_impl = raise_folder_error + with pytest.raises(MailsyncDone): + sync_engine_stub._run() diff --git a/inbox/test/general/test_thread_creation.py b/inbox/test/general/test_thread_creation.py new file mode 100644 index 000000000..c6e80fbc7 --- /dev/null +++ b/inbox/test/general/test_thread_creation.py @@ -0,0 +1,85 @@ +# flake8: noqa: F401 +# test that T441 doesn't reappear, ever. +import datetime +import pytest +from collections import namedtuple +from inbox.mailsync.backends.imap.generic import FolderSyncEngine +from inbox.models import Folder, Namespace +from inbox.models.backends.generic import GenericAccount +from inbox.models.backends.imap import ImapUid +from inbox.util.threading import fetch_corresponding_thread +from inbox.test.util.base import (add_fake_thread, add_fake_message, + add_generic_imap_account) + +MockRawMessage = namedtuple('RawMessage', ['flags']) + + +@pytest.fixture +def folder_sync_engine(db, generic_account): + db.session.add(Folder(account=generic_account, name='Inbox')) + db.session.commit() + engine = FolderSyncEngine(generic_account.id, + generic_account.namespace.id, + "Inbox", + generic_account.email_address, + generic_account.provider, + None) + return engine + + +def test_generic_grouping(db, default_account): + thread = add_fake_thread(db.session, default_account.namespace.id) + message = add_fake_message(db.session, default_account.namespace.id, + thread, subject="Golden Gate Park next Sat") + folder = Folder(account=default_account, name='Inbox', + canonical_name='inbox') + ImapUid(message=message, account_id=default_account.id, + msg_uid=2222, folder=folder) + + thread = add_fake_thread(db.session, default_account.namespace.id) + + account = add_generic_imap_account(db.session) + message = add_fake_message(db.session, account.namespace.id, + thread, subject="Golden Gate Park next Sat") + + thread = fetch_corresponding_thread(db.session, + default_account.namespace.id, message) + assert thread is None, ("fetch_similar_threads should " + "heed namespace boundaries") + + +def test_threading_limit(db, folder_sync_engine, monkeypatch): + """Test that custom threading doesn't produce arbitrarily long threads, + which eventually break things.""" + from inbox.models import Message, Thread + # Shorten bound to make test faster + MAX_THREAD_LENGTH = 10 + monkeypatch.setattr( + 'inbox.mailsync.backends.imap.generic.MAX_THREAD_LENGTH', + MAX_THREAD_LENGTH) + namespace_id = folder_sync_engine.namespace_id + + msg = MockRawMessage([]) + for i in range(3 * MAX_THREAD_LENGTH): + m = Message() + m.namespace_id = namespace_id + m.received_date = datetime.datetime.utcnow() + m.references = [] + m.size = 0 + m.body = '' + m.from_addr = [("Karim Hamidou", "karim@nilas.com")] + m.to_addr = [("Eben Freeman", "eben@nilas.com")] + m.snippet = '' + m.subject = 'unique subject' + db.session.add(m) + folder_sync_engine.add_message_to_thread(db.session, m, msg) + db.session.commit() + new_threads = db.session.query(Thread). \ + filter(Thread.subject == 'unique subject').all() + assert len(new_threads) == 3 + assert all(len(thread.messages) == MAX_THREAD_LENGTH for thread in + new_threads) + + +if __name__ == '__main__': + pytest.main([__file__]) diff --git a/inbox/test/general/test_threading.py b/inbox/test/general/test_threading.py new file mode 100644 index 000000000..1188cd7fc --- /dev/null +++ b/inbox/test/general/test_threading.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +# flake8: noqa: F401 +import pytest +from inbox.util.threading import fetch_corresponding_thread +from inbox.util.misc import cleanup_subject +from inbox.test.util.base import (add_fake_message, add_fake_thread, + add_fake_imapuid) + + +def test_message_cleanup(): + assert cleanup_subject("Re: Birthday") == "Birthday" + assert cleanup_subject("Re:Birthday") == "Birthday" + assert cleanup_subject("Re:FWD: Birthday") == "Birthday" + assert (cleanup_subject("Re: RE: Alors, comment ça s'est passé ?") == + "Alors, comment ça s'est passé ?") + assert cleanup_subject("Re: FWD:FWD: Re:La chaise") == "La chaise" + + assert cleanup_subject("Aw: über cool") == "über cool" + assert cleanup_subject("Aw:Re:wienerschnitzel") == "wienerschnitzel" + assert cleanup_subject("Aw: wienerschnitzel") == "wienerschnitzel" + assert cleanup_subject("aw: wg:wienerschnitzel") == "wienerschnitzel" + assert cleanup_subject( + "Undeliverable: Message returned to sender") == "Message returned to sender" + assert cleanup_subject( + "Undelivered: Message returned to sender") == "Message returned to sender" + + +def test_basic_message_grouping(db, default_namespace): + first_thread = add_fake_thread(db.session, default_namespace.id) + first_thread.subject = 'Some kind of test' + + add_fake_message(db.session, default_namespace.id, + thread=first_thread, + subject='Some kind of test', + from_addr=[('Karim Hamidou', 'karim@nilas.com')], + to_addr=[('Eben Freeman', 'emfree@nilas.com')], + bcc_addr=[('Some person', 'person@nilas.com')]) + + msg2 = add_fake_message(db.session, default_namespace.id, thread=None, + subject='Re: Some kind of test', + from_addr=[('Some random dude', + 'random@pobox.com')], + to_addr=[('Karim Hamidou', 'karim@nilas.com')]) + + matched_thread = fetch_corresponding_thread(db.session, + default_namespace.id, msg2) + assert matched_thread is None, "the algo shouldn't thread different convos" + + msg3 = add_fake_message(db.session, default_namespace.id, thread=None) + msg3.subject = 'Re: Some kind of test' + msg3.from_addr = [('Eben Freeman', 'emfree@nilas.com')] + msg3.to_addr = [('Karim Hamidou', 'karim@nilas.com')] + + matched_thread = fetch_corresponding_thread(db.session, default_namespace.id, msg3) + assert matched_thread is first_thread, "Should match on participants" + + +def test_self_send(db, default_namespace): + first_thread = add_fake_thread(db.session, default_namespace.id) + first_thread.subject = 'Some kind of test' + + add_fake_message(db.session, default_namespace.id, + thread=first_thread, + subject='Some kind of test', + from_addr=[('Karim Hamidou', 'karim@nilas.com')], + to_addr=[('Karim Hamidou', 'karim@nilas.com')]) + + msg2 = add_fake_message(db.session, default_namespace.id, + thread=None, + subject='Re: Some kind of test', + from_addr=[('Karim Hamidou', 'karim@nilas.com')], + to_addr=[('Karim Hamidou', 'karim@nilas.com')]) + + matched_thread = fetch_corresponding_thread(db.session, + default_namespace.id, msg2) + assert matched_thread is first_thread, "Should match on self-send" + + +if __name__ == '__main__': + pytest.main([__file__]) diff --git a/inbox/test/general/test_util.py b/inbox/test/general/test_util.py new file mode 100644 index 000000000..135e6c15c --- /dev/null +++ b/inbox/test/general/test_util.py @@ -0,0 +1,56 @@ +# test_util.py --- test various utility functions. +import socket +from inbox.util.url import naked_domain, matching_subdomains + + +def test_naked_domain(): + assert naked_domain( + 'python.linux.com') == 'python.linux.com' + assert naked_domain( + 'iplayer.forums.bbc.co.uk') == 'iplayer.forums.bbc.co.uk' + assert naked_domain( + 'parliament.org.au') == 'parliament.org.au' + assert naked_domain( + 'prime-minister.parliament.org.au') == 'prime-minister.parliament.org.au' + assert naked_domain( + 'https://python.linux.com/resume-guido.pdf') == 'python.linux.com' + assert naked_domain( + 'ftp://linux.com/vmlinuz') == 'linux.com' + assert naked_domain( + 'ftp://parliament.co.uk/vmlinuz') == 'parliament.co.uk' + assert naked_domain( + 'ftp://pm.parliament.co.uk/vmlinuz') == 'pm.parliament.co.uk' + assert naked_domain( + 'https://username:password@python.linux.com/vmlinuz') == 'python.linux.com' + + +def test_matching_subdomains(monkeypatch): + def gethostbyname_patch(x): + return "127.0.0.1" + + monkeypatch.setattr(socket, 'gethostbyname', gethostbyname_patch) + + assert matching_subdomains(None, 'mail.nylas.com') is False + + # Two domains with the same IP but different domains aren't matched. + assert matching_subdomains('mail.microsoft.com', 'mail.nylas.com') is False + assert matching_subdomains('test.nylas.co.uk', 'mail.nylas.co.uk') is True + assert matching_subdomains('test.servers.nylas.com.au', 'mail.nylas.com.au') is True + assert matching_subdomains('test.servers.nylas.com', 'mail.nylas.com.au') is False + assert matching_subdomains('test.servers.co.uk', 'evil.co.uk') is False + + addresses = ['127.0.0.1', '192.168.1.11'] + + def gethostbyname_patch(x): + return addresses.pop() + + monkeypatch.setattr(socket, 'gethostbyname', gethostbyname_patch) + + addresses = ['127.0.0.1', '192.168.1.11'] + + def gethostbyname_patch(x): + return addresses.pop() + + # Check that if the domains are the same, we're not doing an + # IP address resolution. + assert matching_subdomains('nylas.com', 'nylas.com') is True diff --git a/inbox/test/heartbeat/test_heartbeat.py b/inbox/test/heartbeat/test_heartbeat.py new file mode 100644 index 000000000..aec203667 --- /dev/null +++ b/inbox/test/heartbeat/test_heartbeat.py @@ -0,0 +1,143 @@ +# flake8: noqa: F401, F811 +import pytest +import json +import time +from datetime import datetime, timedelta + +from inbox.heartbeat.store import (HeartbeatStore, HeartbeatStatusProxy, + HeartbeatStatusKey) +from inbox.heartbeat.status import (clear_heartbeat_status, + get_ping_status) +import inbox.heartbeat.config as heartbeat_config +from inbox.heartbeat.config import ALIVE_EXPIRY +from inbox.config import config + +from nylas.logging import configure_logging +configure_logging(config.get('LOGLEVEL')) + +from mockredis import MockRedis +# Note that all Redis commands are mocked via mockredis in conftest.py. + + +def proxy_for(account_id, folder_id, email='test@test.com', provider='gmail', + device_id=0): + return HeartbeatStatusProxy(account_id=account_id, folder_id=folder_id, + folder_name="Inbox", + email_address=email, + provider_name=provider, + device_id=device_id) + + +def fuzzy_equals(a, b): + if isinstance(a, datetime) or isinstance(b, datetime): + if not isinstance(a, datetime): + b = datetime.fromtimestamp(a) + if not isinstance(b, datetime): + b = datetime.fromtimestamp(b) + s = abs(a - b) + return s < timedelta(seconds=0.1) + return abs(a - b) < 0.1 + + +# Test storing and removing heartbeats + + +def test_heartbeat_store_singleton(): + # Test we don't unnecessarily create multiple instances of HeartbeatStore + store_one = HeartbeatStore.store() + store_two = HeartbeatStore.store() + assert id(store_one) == id(store_two) + + +def test_heartbeat_status_key(): + account_id = 1 + folder_id = 2 + key = HeartbeatStatusKey(account_id, folder_id) + assert str(key) == "1:2" + key = HeartbeatStatusKey.from_string("2:1") + assert key.account_id == 2 + assert key.folder_id == 1 + + +def test_proxy_publish_doesnt_break_everything(monkeypatch): + def break_things(s, k, d, v): + raise Exception("Redis connection failure") + monkeypatch.setattr("mockredis.MockRedis.hset", break_things) + # Check heartbeat publish exception doesn't pass up through to caller. + # It will print out an error in the log, though. + proxy_for(1, 2) + assert True + + +def test_folder_publish_in_index(redis_client): + proxy = proxy_for(1, 2) + proxy.publish() + client = heartbeat_config.get_redis_client() + assert '1' in client.keys() + + # Check the per-account folder-list index was populated correctly: it + # should be a sorted set of all folder IDs for that account, with the + # folder's last heartbeat timestamp. + acct_folder_index = client.zrange('1', 0, -1, withscores=True) + assert len(acct_folder_index) == 1 + key, timestamp = acct_folder_index[0] + assert key == '2' + assert fuzzy_equals(proxy.heartbeat_at, timestamp) + + +def test_kill_device_multiple(): + # If we kill a device and the folder has multiple devices, don't clear + # the heartbeat status + local_store = HeartbeatStore().store() + + proxy_for(1, 2, device_id=2).publish() + proxy_for(1, 2, device_id=3).publish() + clear_heartbeat_status(1, device_id=2) + folders = local_store.get_account_folders(1) + + assert len(folders) == 1 + f, ts = folders[0] + assert f == '2' + + +# Test querying heartbeats +@pytest.fixture +def random_heartbeats(): + # generate some random heartbeats for accounts 1..10 and folders -2..2 + proxies = {} + for i in range(10): + proxies[i] = {} + for f in range(-2, 3): + proxy = proxy_for(i, f) + proxy.publish() + proxies[i][f] = proxy + return proxies + + +def make_dead_heartbeat(store, proxies, account_id, folder_id, time_dead): + dead_time = time.time() - ALIVE_EXPIRY - time_dead + dead_proxy = proxies[account_id][folder_id] + store.publish(dead_proxy.key, dead_proxy.device_id, + json.dumps(dead_proxy.value), dead_time) + + +def test_ping(random_heartbeats): + # Get the lightweight ping (only checks indices) and make sure it conforms + # to the expected format. + ping = get_ping_status(range(10)) + assert isinstance(ping, dict) + assert sorted(ping.keys()) == sorted(random_heartbeats.keys()) + single = ping[0] + attrs = ('id', 'folders') + for attr in attrs: + assert hasattr(single, attr) + for f in single.folders: + assert f.alive + + +def test_ping_single(random_heartbeats): + ping = get_ping_status([0]) + assert isinstance(ping, dict) + single = ping[0] + for f in single.folders: + assert f.alive diff --git a/inbox/test/imap/__init__.py b/inbox/test/imap/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/inbox/test/imap/data.py b/inbox/test/imap/data.py new file mode 100644 index 000000000..82ec8d34e --- /dev/null +++ b/inbox/test/imap/data.py @@ -0,0 +1,99 @@ +"""This module defines strategies for generating test data for IMAP sync, all +well as a mock IMAPClient isntance that can be used to deterministically test +aspects of IMAP sync. +See https://hypothesis.readthedocs.org/en/latest/data.html for more information +about how this works.""" +import string + +import os +import tempfile +# don't try writing to .hypothesis +os.environ['HYPOTHESIS_STORAGE_DIRECTORY'] = hyp_dir = tempfile.mkdtemp() +os.environ['HYPOTHESIS_DATABASE_FILE'] = os.path.join(hyp_dir, 'db') + +from hypothesis import strategies as s +from hypothesis.extra.datetime import datetimes +import flanker +from flanker import mime + + +def _build_address_header(addresslist): + return ', '.join( + flanker.addresslib.address.EmailAddress(phrase, spec).full_spec() + for phrase, spec in addresslist + ) + + +def build_mime_message(from_, to, cc, bcc, subject, body): + msg = mime.create.multipart('alternative') + msg.append( + mime.create.text('plain', body) + ) + msg.headers['Subject'] = subject + msg.headers['From'] = _build_address_header(from_) + msg.headers['To'] = _build_address_header(to) + msg.headers['Cc'] = _build_address_header(cc) + msg.headers['Bcc'] = _build_address_header(bcc) + return msg.to_string() + + +def build_uid_data(internaldate, flags, body, g_labels, g_msgid, modseq): + return { + 'INTERNALDATE': internaldate, + 'FLAGS': flags, + 'BODY[]': body, + 'RFC822.SIZE': len(body), + 'X-GM-LABELS': g_labels, + 'X-GM-MSGID': g_msgid, + 'X-GM-THRID': g_msgid, # For simplicity + 'MODSEQ': modseq + } + + +# We don't want to worry about whacky encodings or pathologically long data +# here, so just generate some basic, sane ASCII text. +basic_text = s.text(string.ascii_letters, min_size=1, max_size=64) + + +# An email address of the form 'foo@bar'. +address = s.builds( + lambda localpart, domain: '{}@{}'.format(localpart, domain), + basic_text, basic_text) + + +# A list of tuples ('displayname', 'addr@domain') +addresslist = s.lists( + s.tuples(basic_text, address), + min_size=1, + max_size=5 +) + + +# A basic MIME message with plaintext body plus From/To/Cc/Bcc/Subject headers +mime_message = s.builds( + build_mime_message, + addresslist, + addresslist, + addresslist, + addresslist, + basic_text, + basic_text +) + +randint = s.basic(generate=lambda random, _: random.getrandbits(63)) + +uid_data = s.builds( + build_uid_data, + datetimes(timezones=[]), + s.sampled_from([(), ('\\Seen',)]), + mime_message, + s.sampled_from([(), ('\\Inbox',)]), + randint, + randint) + + +uids = s.dictionaries( + s.integers(min_value=22), + uid_data, + min_size=5, + max_size=10) diff --git a/inbox/test/imap/network/__init__.py b/inbox/test/imap/network/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/inbox/test/imap/network/test_actions_syncback.py b/inbox/test/imap/network/test_actions_syncback.py new file mode 100644 index 000000000..ec965af5e --- /dev/null +++ b/inbox/test/imap/network/test_actions_syncback.py @@ -0,0 +1,107 @@ +from inbox.test.util.crispin import crispin_client + +ACCOUNT_ID = 1 +NAMESPACE_ID = 1 +THREAD_ID = 2 + +# Unit tests for the functions, not the queue runners. These tests use a real +# Gmail test account and idempotently put the account back to the state it +# started in when the test is done. They intentionally make the local Inbox +# datastore out of sync. That's okay---our goal is to minimally unit test the +# syncback methods, not to do a system-level test here. + + +def test_archive_move_syncback(db, config): + from inbox.actions.backends.gmail import (set_remote_archived, + remote_move, uidvalidity_cb) + from inbox.models.backends.imap import ImapAccount, ImapThread + g_thrid = db.session.query(ImapThread.g_thrid).filter_by( + id=THREAD_ID, namespace_id=NAMESPACE_ID).one()[0] + account = db.session.query(ImapAccount).get(ACCOUNT_ID) + + set_remote_archived(account, THREAD_ID, False, db.session) + set_remote_archived(account, THREAD_ID, True, db.session) + + assert account.inbox_folder_id and account.all_folder_id, \ + "`inbox_folder_id` and `all_folder_id` cannot be NULL" + with crispin_client(account.id, account.provider) as client: + client.select_folder(account.inbox_folder.name, uidvalidity_cb) + inbox_uids = client.find_messages(g_thrid) + assert not inbox_uids, "thread still present in inbox" + client.select_folder(account.all_folder.name, uidvalidity_cb) + archive_uids = client.find_messages(g_thrid) + assert archive_uids, "thread missing from archive" + + # and put things back the way they were :) + remote_move(account, THREAD_ID, account.all_folder.name, + account.inbox_folder.name, db.session) + client.select_folder(account.inbox_folder.name, uidvalidity_cb) + inbox_uids = client.find_messages(g_thrid) + assert inbox_uids, "thread missing from inbox" + client.select_folder(account.all_folder.name, uidvalidity_cb) + archive_uids = client.find_messages(g_thrid) + assert archive_uids, "thread missing from archive" + + +def test_copy_delete_syncback(db, config): + from inbox.actions.backends.gmail import (_remote_copy, _remote_delete, + uidvalidity_cb) + from inbox.models.backends.imap import ImapAccount, ImapThread + + g_thrid = db.session.query(ImapThread.g_thrid). \ + filter_by(id=THREAD_ID, namespace_id=NAMESPACE_ID).one()[0] + account = db.session.query(ImapAccount).get(ACCOUNT_ID) + + _remote_copy(account, THREAD_ID, account.inbox_folder.name, 'testlabel', + db.session) + + with crispin_client(account.id, account.provider) as client: + client.select_folder(account.inbox_folder.name, uidvalidity_cb) + inbox_uids = client.find_messages(g_thrid) + assert inbox_uids, "thread missing from inbox" + client.select_folder(account.all_folder.name, uidvalidity_cb) + archive_uids = client.find_messages(g_thrid) + assert archive_uids, "thread missing from archive" + client.select_folder('testlabel', uidvalidity_cb) + testlabel_uids = client.find_messages(g_thrid) + assert testlabel_uids, "thread missing from testlabel" + + # and put things back the way they were :) + _remote_delete(account, THREAD_ID, 'testlabel', db.session) + client.select_folder(account.inbox_folder.name, uidvalidity_cb) + inbox_uids = client.find_messages(g_thrid) + assert inbox_uids, "thread missing from inbox" + client.select_folder(account.all_folder.name, uidvalidity_cb) + archive_uids = client.find_messages(g_thrid) + assert archive_uids, "thread missing from archive" + client.select_folder('testlabel', uidvalidity_cb) + testlabel_uids = client.find_messages(g_thrid) + assert not testlabel_uids, "thread still present in testlabel" + + +def test_remote_unread_syncback(db, config): + from inbox.actions.backends.gmail import set_remote_unread, uidvalidity_cb + from inbox.models.backends.imap import ImapAccount, ImapThread + + account = db.session.query(ImapAccount).get(ACCOUNT_ID) + g_thrid, = db.session.query(ImapThread.g_thrid). \ + filter_by(id=THREAD_ID).one() + + set_remote_unread(account, THREAD_ID, True, db.session) + + with crispin_client(account.id, account.provider) as client: + client.select_folder(account.all_folder.name, uidvalidity_cb) + uids = client.find_messages(g_thrid) + assert not any('\\Seen' in flags for flags, _ in + client.flags(uids).values()) + + set_remote_unread(account, THREAD_ID, False, db.session) + assert all('\\Seen' in flags for flags, _ in + client.flags(uids).values()) + + set_remote_unread(account, THREAD_ID, True, db.session) + assert not any('\\Seen' in flags for flags, _ in + client.flags(uids).values()) + + +# TODO: Test more of the different cases here. diff --git a/inbox/test/imap/network/test_drafts_syncback.py b/inbox/test/imap/network/test_drafts_syncback.py new file mode 100644 index 000000000..c5380fc99 --- /dev/null +++ b/inbox/test/imap/network/test_drafts_syncback.py @@ -0,0 +1,104 @@ +import uuid +from datetime import datetime + +import pytest + +from inbox.test.util.crispin import crispin_client + +ACCOUNT_ID = 1 +NAMESPACE_ID = 1 +THREAD_ID = 2 + +# These tests use a real Gmail test account and idempotently put the account +# back to the state it started in when the test is done. + + +@pytest.fixture(scope='function') +def message(db, config): + from inbox.models.backends.imap import ImapAccount + + account = db.session.query(ImapAccount).get(ACCOUNT_ID) + to = [{'name': u'"\u2605The red-haired mermaid\u2605"', + 'email': account.email_address}] + subject = 'Draft test: ' + str(uuid.uuid4().hex) + body = '

Sea, birds, yoga and sand.

' + + return (to, subject, body) + + +def test_remote_save_draft(db, config, message): + """ Tests the save_draft function, which saves the draft to the remote. """ + from inbox.actions.backends.gmail import remote_save_draft + from inbox.sendmail.base import _parse_recipients + from inbox.sendmail.message import create_email, Recipients + from inbox.models import Account + + account = db.session.query(Account).get(ACCOUNT_ID) + to, subject, body = message + to_addr = _parse_recipients(to) + recipients = Recipients(to_addr, [], []) + email = create_email(account.sender_name, account.email_address, None, + recipients, subject, body, None) + date = datetime.utcnow() + + remote_save_draft(account, account.drafts_folder.name, email.to_string(), + db.session, date) + + with crispin_client(account.id, account.provider) as c: + criteria = ['NOT DELETED', 'SUBJECT "{0}"'.format(subject)] + + c.conn.select_folder(account.drafts_folder.name, readonly=False) + + draft_uids = c.conn.search(criteria) + assert draft_uids, 'Message missing from Drafts folder' + + flags = c.conn.get_flags(draft_uids) + for uid in draft_uids: + f = flags.get(uid) + assert f and '\\Draft' in f, "Message missing '\\Draft' flag" + + c.conn.delete_messages(draft_uids) + c.conn.expunge() + + +def test_remote_delete_draft(db, config, message): + """ + Tests the delete_draft function, which deletes the draft from the + remote. + + """ + from inbox.actions.backends.gmail import (remote_save_draft, + remote_delete_draft) + from inbox.sendmail.base import _parse_recipients + from inbox.sendmail.message import create_email, Recipients + from inbox.models import Account + + account = db.session.query(Account).get(ACCOUNT_ID) + to, subject, body = message + to_addr = _parse_recipients(to) + recipients = Recipients(to_addr, [], []) + email = create_email(account.sender_name, account.email_address, None, + recipients, subject, body, None) + date = datetime.utcnow() + + # Save on remote + remote_save_draft(account, account.drafts_folder.name, email.to_string(), + db.session, date) + + inbox_uid = email.headers['X-INBOX-ID'] + + with crispin_client(account.id, account.provider) as c: + criteria = ['DRAFT', 'NOT DELETED', + 'HEADER X-INBOX-ID {0}'.format(inbox_uid)] + + c.conn.select_folder(account.drafts_folder.name, readonly=False) + uids = c.conn.search(criteria) + assert uids, 'Message missing from Drafts folder' + + # Delete on remote + remote_delete_draft(account, account.drafts_folder.name, inbox_uid, + db.session) + + c.conn.select_folder(account.drafts_folder.name, readonly=False) + uids = c.conn.search(criteria) + assert not uids, 'Message still in Drafts folder' diff --git a/inbox/test/imap/network/test_send.py b/inbox/test/imap/network/test_send.py new file mode 100644 index 000000000..ecafef160 --- /dev/null +++ b/inbox/test/imap/network/test_send.py @@ -0,0 +1,55 @@ +import json +from datetime import datetime + +import pytest + +from inbox.test.util.base import default_account +from inbox.test.util.crispin import crispin_client +from inbox.test.api.base import api_client + +__all__ = ['default_account', 'api_client'] + + +@pytest.fixture +def example_draft(db, default_account): + return { + 'subject': 'Draft test at {}'.format(datetime.utcnow()), + 'body': '

Sea, birds and sand.

', + 'to': [{'name': 'The red-haired mermaid', + 'email': default_account.email_address}] + } + + +def test_send_draft(db, api_client, example_draft, default_account): + + r = api_client.post_data('/drafts', example_draft) + assert r.status_code == 200 + public_id = json.loads(r.data)['id'] + version = json.loads(r.data)['version'] + + r = api_client.post_data('/send', {'draft_id': public_id, + 'version': version}) + assert r.status_code == 200 + + draft = api_client.get_data('/drafts/{}'.format(public_id)) + assert draft is not None + + assert draft['object'] != 'draft' + + with crispin_client(default_account.id, default_account.provider) as c: + criteria = ['NOT DELETED', 'SUBJECT "{0}"'.format( + example_draft['subject'])] + + c.conn.select_folder(default_account.drafts_folder.name, + readonly=False) + + draft_uids = c.conn.search(criteria) + assert not draft_uids, 'Message still in Drafts folder' + + c.conn.select_folder(default_account.sent_folder.name, readonly=False) + + sent_uids = c.conn.search(criteria) + assert sent_uids, 'Message missing from Sent folder' + + c.conn.delete_messages(sent_uids) + c.conn.expunge() diff --git a/inbox/test/imap/test_actions.py b/inbox/test/imap/test_actions.py new file mode 100644 index 000000000..b53f98e2f --- /dev/null +++ b/inbox/test/imap/test_actions.py @@ -0,0 +1,203 @@ +# flake8: noqa: F401, F811 +# -*- coding: utf-8 -*- +import mock +import pytest +import gevent +from flanker import mime +from inbox.actions.base import (change_labels, save_draft, update_draft, + delete_draft, create_folder, update_folder, + delete_folder, create_label, update_label, + delete_label, mark_unread, mark_starred) +from inbox.util.testutils import mock_imapclient # noqa +from inbox.test.util.base import add_fake_imapuid, add_fake_category +from inbox.crispin import writable_connection_pool +from inbox.models import Category, ActionLog +from inbox.models.action_log import schedule_action +from inbox.sendmail.base import create_message_from_json +from inbox.sendmail.base import update_draft as sendmail_update_draft +from inbox.events.actions.backends.gmail import remote_create_event +from inbox.transactions.actions import SyncbackService +from inbox.models.session import new_session +from inbox.actions.backends.generic import _create_email + +import pytest +@pytest.mark.only +def test_draft_updates(db, default_account, mock_imapclient): + # Set up folder list + mock_imapclient._data['Drafts'] = {} + mock_imapclient._data['Trash'] = {} + mock_imapclient._data['Sent Mail'] = {} + mock_imapclient.list_folders = lambda: [ + (('\\HasNoChildren', '\\Drafts'), '/', 'Drafts'), + (('\\HasNoChildren', '\\Trash'), '/', 'Trash'), + (('\\HasNoChildren', '\\Sent'), '/', 'Sent Mail'), + ] + + pool = writable_connection_pool(default_account.id) + + draft = create_message_from_json({'subject': 'Test draft'}, + default_account.namespace, db.session, + True) + draft.is_draft = True + draft.version = 0 + db.session.commit() + with pool.get() as conn: + save_draft(conn, default_account.id, draft.id, {'version': 0}) + conn.select_folder('Drafts', lambda *args: True) + assert len(conn.all_uids()) == 1 + + # Check that draft is not resaved if already synced. + update_draft(conn, default_account.id, draft.id, {'version': 0}) + conn.select_folder('Drafts', lambda *args: True) + assert len(conn.all_uids()) == 1 + + # Check that an older version is deleted + draft.version = 4 + sendmail_update_draft(db.session, default_account, draft, + from_addr=draft.from_addr, subject='New subject', + blocks=[]) + db.session.commit() + + update_draft(conn, default_account.id, draft.id, {'version': 5}) + + conn.select_folder('Drafts', lambda *args: True) + all_uids = conn.all_uids() + assert len(all_uids) == 1 + data = conn.uids(all_uids)[0] + parsed = mime.from_string(data.body) + expected_message_id = '<{}-{}@mailer.nylas.com>'.format( + draft.public_id, draft.version) + assert parsed.headers.get('Message-Id') == expected_message_id + + # We're testing the draft deletion with Gmail here. However, + # because of a race condition in Gmail's reconciliation algorithm, + # we need to check if the sent mail has been created in the sent + # folder. Since we're mocking everything, we have to create it + # ourselves. + mock_imapclient.append('Sent Mail', data.body, None, None, + x_gm_msgid=4323) + + delete_draft(conn, default_account.id, draft.id, + {'message_id_header': draft.message_id_header, + 'nylas_uid': draft.nylas_uid, 'version': 5}) + + conn.select_folder('Drafts', lambda *args: True) + all_uids = conn.all_uids() + assert len(all_uids) == 0 + + +def test_change_flags(db, default_account, message, folder, mock_imapclient): + mock_imapclient.add_folder_data(folder.name, {}) + mock_imapclient.add_flags = mock.Mock() + mock_imapclient.remove_flags = mock.Mock() + add_fake_imapuid(db.session, default_account.id, message, folder, 22) + with writable_connection_pool(default_account.id).get() as crispin_client: + mark_unread(crispin_client, default_account.id, message.id, + {'unread': False}) + mock_imapclient.add_flags.assert_called_with([22], ['\\Seen'], silent=True) + + mark_unread(crispin_client, default_account.id, message.id, + {'unread': True}) + mock_imapclient.remove_flags.assert_called_with([22], ['\\Seen'], silent=True) + + mark_starred(crispin_client, default_account.id, message.id, + {'starred': True}) + mock_imapclient.add_flags.assert_called_with([22], ['\\Flagged'], silent=True) + + mark_starred(crispin_client, default_account.id, message.id, + {'starred': False}) + mock_imapclient.remove_flags.assert_called_with([22], ['\\Flagged'], silent=True) + + +def test_change_labels(db, default_account, message, folder, mock_imapclient): + mock_imapclient.add_folder_data(folder.name, {}) + mock_imapclient.add_gmail_labels = mock.Mock() + mock_imapclient.remove_gmail_labels = mock.Mock() + add_fake_imapuid(db.session, default_account.id, message, folder, 22) + + with writable_connection_pool(default_account.id).get() as crispin_client: + change_labels(crispin_client, default_account.id, [message.id], + {'removed_labels': ['\\Inbox'], + 'added_labels': [u'motörhead', u'μετάνοια']}) + mock_imapclient.add_gmail_labels.assert_called_with( + [22], ['mot&APY-rhead', '&A7wDtQPEA6wDvQO,A7kDsQ-'], silent=True) + mock_imapclient.remove_gmail_labels.assert_called_with([22], ['\\Inbox'], + silent=True) + + +@pytest.mark.parametrize('obj_type', ['folder', 'label']) +def test_folder_crud(db, default_account, mock_imapclient, obj_type): + mock_imapclient.create_folder = mock.Mock() + mock_imapclient.rename_folder = mock.Mock() + mock_imapclient.delete_folder = mock.Mock() + cat = add_fake_category(db.session, default_account.namespace.id, + 'MyFolder') + with writable_connection_pool(default_account.id).get() as crispin_client: + if obj_type == 'folder': + create_folder(crispin_client, default_account.id, cat.id) + else: + create_label(crispin_client, default_account.id, cat.id) + mock_imapclient.create_folder.assert_called_with('MyFolder') + + cat.display_name = 'MyRenamedFolder' + db.session.commit() + if obj_type == 'folder': + update_folder(crispin_client, default_account.id, cat.id, + {'old_name': 'MyFolder', + 'new_name': 'MyRenamedFolder'}) + else: + update_label(crispin_client, default_account.id, cat.id, + {'old_name': 'MyFolder', + 'new_name': 'MyRenamedFolder'}) + mock_imapclient.rename_folder.assert_called_with('MyFolder', + 'MyRenamedFolder') + + category_id = cat.id + if obj_type == 'folder': + delete_folder(crispin_client, default_account.id, cat.id) + else: + delete_label(crispin_client, default_account.id, cat.id) + mock_imapclient.delete_folder.assert_called_with('MyRenamedFolder') + db.session.commit() + assert db.session.query(Category).get(category_id) is None + +@pytest.yield_fixture +def patched_syncback_task(monkeypatch): + # Ensures 'create_event' actions fail and all others succeed + def function_for_action(name): + def func(*args): + if name == 'create_event': + raise Exception("Failed to create remote event") + return func + + monkeypatch.setattr("inbox.transactions.actions.function_for_action", function_for_action) + monkeypatch.setattr("inbox.transactions.actions.ACTION_MAX_NR_OF_RETRIES", 1) + yield + monkeypatch.undo() + +# Test that failing to create a remote copy of an event marks all pending actions +# for that event as failed. +def test_failed_event_creation(db, patched_syncback_task, default_account, event): + schedule_action('create_event', event, default_account.namespace.id, db.session) + schedule_action('update_event', event, default_account.namespace.id, db.session) + schedule_action('update_event', event, default_account.namespace.id, db.session) + schedule_action('delete_event', event, default_account.namespace.id, db.session) + db.session.commit() + + NUM_WORKERS = 2 + service = SyncbackService(syncback_id=0, process_number=0, + total_processes=NUM_WORKERS, num_workers=NUM_WORKERS) + service._restart_workers() + service._process_log() + + while not service.task_queue.empty(): + gevent.sleep(0.1) + + # This has to be a separate while-loop because there's a brief moment where + # the task queue is empty, but num_idle_workers hasn't been updated yet. + # On slower systems, we might need to sleep a bit between the while-loops. + while service.num_idle_workers != NUM_WORKERS: + gevent.sleep(0.1) + + q = db.session.query(ActionLog).filter_by(record_id=event.id).all() + assert all(a.status == 'failed' for a in q) diff --git a/inbox/test/imap/test_crispin_client.py b/inbox/test/imap/test_crispin_client.py new file mode 100644 index 000000000..28459bc27 --- /dev/null +++ b/inbox/test/imap/test_crispin_client.py @@ -0,0 +1,651 @@ +# -*- coding: utf-8 -*- +""" +Basic tests for GmailCrispinClient/CrispinClient methods. We replace +imapclient.IMAPClient._imap by a mock in order to test these. In particular, we +want to test that we handle unsolicited FETCH responses, which may be returned +by some providers (Gmail, Fastmail). +""" +from datetime import datetime +import mock +import imapclient +import pytest + +from inbox.crispin import (CrispinClient, GmailCrispinClient, GMetadata, + GmailFlags, RawMessage, Flags, + FolderMissingError, localized_folder_names) + + +class MockedIMAPClient(imapclient.IMAPClient): + + def _create_IMAP4(self): + return mock.Mock() + + +@pytest.fixture +def gmail_client(): + conn = MockedIMAPClient(host='somehost') + return GmailCrispinClient(account_id=1, provider_info=None, + email_address='inboxapptest@gmail.com', + conn=conn) + + +@pytest.fixture +def generic_client(): + conn = MockedIMAPClient(host='somehost') + return CrispinClient(account_id=1, provider_info=None, + email_address='inboxapptest@fastmail.fm', conn=conn) + + +@pytest.fixture +def constants(): + # Global constants. + g_msgid = 1494576757102068682 + g_thrid = 1494576757102068682 + seq = 1231 + uid = 1764 + modseq = 95020 + size = 16384 + flags = () + raw_g_labels = '(mot&APY-rhead &A7wDtQPEA6wDvQO,A7kDsQ- \\Inbox)' + unicode_g_labels = [u'motörhead', u'μετάνοια', '\\Inbox'] + + internaldate = '02-Mar-2015 23:36:20 +0000' + body = 'Delivered-To: ...' + body_size = len(body) + + # folder test constant + gmail_role_map = { + '[Gmail]/All Mail': 'all', + 'Inbox': 'inbox', + '[Gmail]/Trash': 'trash', + '[Gmail]/Spam': 'spam', + '[Gmail]/Drafts': 'drafts', + '[Gmail]/Sent Mail': 'sent', + '[Gmail]/Important': 'important', + '[Gmail]/Starred': 'starred', + 'reference': None + } + + gmail_folders = \ + [(('\\HasNoChildren',), '/', u'INBOX'), + (('\\Noselect', '\\HasChildren'), '/', u'[Gmail]'), + (('\\HasNoChildren', '\\All'), '/', u'[Gmail]/All Mail'), + (('\\HasNoChildren', '\\Drafts'), '/', u'[Gmail]/Drafts'), + (('\\HasNoChildren', '\\Important'), '/', u'[Gmail]/Important'), + (('\\HasNoChildren', '\\Sent'), '/', u'[Gmail]/Sent Mail'), + (('\\HasNoChildren', '\\Junk'), '/', u'[Gmail]/Spam'), + (('\\Flagged', '\\HasNoChildren'), '/', u'[Gmail]/Starred'), + (('\\HasNoChildren', '\\Trash'), '/', u'[Gmail]/Trash'), + (('\\HasNoChildren',), '/', u'reference')] + imap_folders = \ + [(('\\HasNoChildren',), '/', u'INBOX'), + (('\\Noselect', '\\HasChildren'), '/', u'SKIP'), + (('\\HasNoChildren', '\\Drafts'), '/', u'Drafts'), + (('\\HasNoChildren', '\\Sent'), '/', u'Sent'), + (('\\HasNoChildren', '\\Sent'), '/', u'Sent Items'), + (('\\HasNoChildren', '\\Junk'), '/', u'Spam'), + (('\\HasNoChildren', '\\Trash'), '/', u'Trash'), + (('\\HasNoChildren',), '/', u'reference')] + + imap_role_map = { + 'INBOX': 'inbox', + 'Trash': 'trash', + 'Drafts': 'drafts', + 'Sent': 'sent', + 'Sent Items': 'sent', + 'Spam': 'spam', + u'reference': None + } + return dict(g_msgid=g_msgid, g_thrid=g_thrid, seq=seq, uid=uid, + modseq=modseq, size=size, flags=flags, + raw_g_labels=raw_g_labels, unicode_g_labels=unicode_g_labels, + body=body, body_size=body_size, internaldate=internaldate, + gmail_role_map=gmail_role_map, gmail_folders=gmail_folders, + imap_role_map=imap_role_map, imap_folders=imap_folders) + + +def patch_gmail_client(monkeypatch, folders): + monkeypatch.setattr(GmailCrispinClient, '_fetch_folder_list', + lambda x: folders) + + conn = MockedIMAPClient(host='somehost') + return GmailCrispinClient(account_id=1, provider_info=None, + email_address='inboxapptest@gmail.com', + conn=conn) + + +def patch_generic_client(monkeypatch, folders): + monkeypatch.setattr(CrispinClient, '_fetch_folder_list', + lambda x: folders) + + conn = MockedIMAPClient(host='somehost') + return CrispinClient(account_id=1, provider_info={}, + email_address='inboxapptest@fastmail.fm', conn=conn) + + +def patch_imap4(crispin_client, resp): + crispin_client.conn._imap._command_complete.return_value = ( + 'OK', ['Success']) + crispin_client.conn._imap._untagged_response.return_value = ('OK', resp) + + +def test_g_metadata(gmail_client, constants): + expected_resp = '{seq} (X-GM-THRID {g_thrid} X-GM-MSGID {g_msgid} ' \ + 'RFC822.SIZE {size} UID {uid} MODSEQ ({modseq}))'. \ + format(**constants) + unsolicited_resp = '1198 (UID 1731 MODSEQ (95244) FLAGS (\\Seen))' + patch_imap4(gmail_client, [expected_resp, unsolicited_resp]) + uid = constants['uid'] + g_msgid = constants['g_msgid'] + g_thrid = constants['g_thrid'] + size = constants['size'] + assert gmail_client.g_metadata([uid]) == {uid: GMetadata(g_msgid, g_thrid, + size)} + + +def test_gmail_flags(gmail_client, constants): + expected_resp = '{seq} (FLAGS {flags} X-GM-LABELS {raw_g_labels} ' \ + 'UID {uid} MODSEQ ({modseq}))'.format(**constants) + unsolicited_resp = '1198 (UID 1731 MODSEQ (95244) FLAGS (\\Seen))' + patch_imap4(gmail_client, [expected_resp, unsolicited_resp]) + uid = constants['uid'] + flags = constants['flags'] + modseq = constants['modseq'] + g_labels = constants['unicode_g_labels'] + assert gmail_client.flags([uid]) == {uid: GmailFlags(flags, g_labels, + modseq)} + + +def test_g_msgids(gmail_client, constants): + expected_resp = '{seq} (X-GM-MSGID {g_msgid} ' \ + 'UID {uid} MODSEQ ({modseq}))'.format(**constants) + unsolicited_resp = '1198 (UID 1731 MODSEQ (95244) FLAGS (\\Seen))' + patch_imap4(gmail_client, [expected_resp, unsolicited_resp]) + uid = constants['uid'] + g_msgid = constants['g_msgid'] + assert gmail_client.g_msgids([uid]) == {uid: g_msgid} + + +def test_gmail_body(gmail_client, constants): + expected_resp = ('{seq} (X-GM-MSGID {g_msgid} X-GM-THRID {g_thrid} ' + 'X-GM-LABELS {raw_g_labels} UID {uid} MODSEQ ({modseq}) ' + 'INTERNALDATE "{internaldate}" FLAGS {flags} ' + 'BODY[] {{{body_size}}}'.format(**constants), + constants['body']) + unsolicited_resp = '1198 (UID 1731 MODSEQ (95244) FLAGS (\\Seen))' + patch_imap4(gmail_client, [expected_resp, ')', unsolicited_resp]) + + uid = constants['uid'] + flags = constants['flags'] + g_labels = constants['unicode_g_labels'] + g_thrid = constants['g_thrid'] + g_msgid = constants['g_msgid'] + body = constants['body'] + assert gmail_client.uids([uid]) == [ + RawMessage(uid=long(uid), + internaldate=datetime(2015, 3, 2, 23, 36, 20), + flags=flags, + body=body, + g_labels=g_labels, + g_thrid=g_thrid, + g_msgid=g_msgid) + ] + + +def test_flags(generic_client, constants): + expected_resp = '{seq} (FLAGS {flags} ' \ + 'UID {uid} MODSEQ ({modseq}))'.format(**constants) + unsolicited_resp = '1198 (UID 1731 MODSEQ (95244) FLAGS (\\Seen))' + patch_imap4(generic_client, [expected_resp, unsolicited_resp]) + uid = constants['uid'] + flags = constants['flags'] + assert generic_client.flags([uid]) == {uid: Flags(flags, None)} + + +def test_body(generic_client, constants): + expected_resp = ('{seq} (UID {uid} MODSEQ ({modseq}) ' + 'INTERNALDATE "{internaldate}" FLAGS {flags} ' + 'BODY[] {{{body_size}}}'.format(**constants), + constants['body']) + unsolicited_resp = '1198 (UID 1731 MODSEQ (95244) FLAGS (\\Seen))' + patch_imap4(generic_client, [expected_resp, ')', unsolicited_resp]) + + uid = constants['uid'] + flags = constants['flags'] + body = constants['body'] + + assert generic_client.uids([uid]) == [ + RawMessage(uid=long(uid), + internaldate=datetime(2015, 3, 2, 23, 36, 20), + flags=flags, + body=body, + g_labels=None, + g_thrid=None, + g_msgid=None) + ] + + +def test_internaldate(generic_client, constants): + """ Test that our monkeypatched imaplib works through imapclient """ + dates_to_test = [ + ('6-Mar-2015 10:02:32 +0900', datetime(2015, 3, 6, 1, 2, 32)), + (' 6-Mar-2015 10:02:32 +0900', datetime(2015, 3, 6, 1, 2, 32)), + ('06-Mar-2015 10:02:32 +0900', datetime(2015, 3, 6, 1, 2, 32)), + ('6-Mar-2015 07:02:32 +0900', datetime(2015, 3, 5, 22, 2, 32)), + (' 3-Sep-1922 09:16:51 +0000', datetime(1922, 9, 3, 9, 16, 51)), + ('2-Jan-2015 03:05:37 +0800', datetime(2015, 1, 1, 19, 5, 37)) + ] + + for internaldate_string, native_date in dates_to_test: + constants['internaldate'] = internaldate_string + expected_resp = ('{seq} (UID {uid} MODSEQ ({modseq}) ' + 'INTERNALDATE "{internaldate}" FLAGS {flags} ' + 'BODY[] {{{body_size}}}'.format(**constants), + constants['body']) + patch_imap4(generic_client, [expected_resp, ')']) + + uid = constants['uid'] + assert generic_client.uids([uid]) == [ + RawMessage(uid=long(uid), + internaldate=native_date, + flags=constants['flags'], + body=constants['body'], + g_labels=None, + g_thrid=None, + g_msgid=None) + ] + + +def test_deleted_folder_on_select(monkeypatch, generic_client, constants): + """ Test that a 'select failed EXAMINE' error specifying that a folder + doesn't exist is converted into a FolderMissingError. (Yahoo style) + """ + def raise_invalid_folder_exc(*args, **kwargs): + raise imapclient.IMAPClient.Error("select failed: '[TRYCREATE] EXAMINE" + " error - Folder does not exist or" + " server encountered an error") + + monkeypatch.setattr('imapclient.IMAPClient.select_folder', + raise_invalid_folder_exc) + + with pytest.raises(FolderMissingError): + generic_client.select_folder('missing_folder', lambda: True) + + +def test_deleted_folder_on_fetch(monkeypatch, generic_client, constants): + """ Test that a 'select failed EXAMINE' error specifying that a folder + doesn't exist is converted into a FolderMissingError. (Yahoo style) + """ + def raise_invalid_uid_exc(*args, **kwargs): + raise imapclient.IMAPClient.Error( + '[UNAVAILABLE] UID FETCH Server error while fetching messages') + + monkeypatch.setattr('imapclient.IMAPClient.fetch', + raise_invalid_uid_exc) + + # Simply check that the Error exception is handled. + generic_client.uids(["125"]) + + +def test_gmail_folders(monkeypatch, constants): + folders = constants['gmail_folders'] + role_map = constants['gmail_role_map'] + + client = patch_gmail_client(monkeypatch, folders) + + raw_folders = client.folders() + generic_folder_checks(raw_folders, role_map, client, 'gmail') + + +def generic_folder_checks(raw_folders, role_map, client, provider): + + # Should not contain the `\\Noselect' folder + assert filter(lambda y: '\\Noselect' in y, + map(lambda x: x[0], raw_folders)) == [] + if provider == 'gmail': + assert {f.display_name: f.role for f in raw_folders} == role_map + elif provider == 'imap': + for f in raw_folders: + if f.display_name in role_map: + assert f.role == role_map[f.display_name] + else: + assert f.display_name in ['reference'] + assert f.role is None + + folder_names = client.folder_names() + if provider == 'gmail': + for role in ['inbox', 'all', 'trash', 'drafts', 'important', 'sent', + 'spam', 'starred']: + assert role in folder_names + + names = folder_names[role] + assert isinstance(names, list) and len(names) == 1 + elif provider == 'imap': + for role in ['inbox', 'trash', 'drafts', 'sent', 'spam']: + assert role in folder_names + + names = folder_names[role] + assert isinstance(names, list) + + if role == 'sent': + assert len(names) == 2 + else: + assert len(names) == 1 + # Inbox folder should be synced first. + assert client.sync_folders()[0] == 'INBOX' + + +def test_gmail_missing_trash(constants, monkeypatch): + ''' + Test that we can label their folder when they + don't have a folder labeled trash. This test will go through a list + of examples of trash aliases we have seen in the wild, + and check that we are able to properly label those folders. + ''' + # create list of folders that doesn't have a trash folder + folder_base = \ + [(('\\HasNoChildren',), '/', u'INBOX'), + (('\\Noselect', '\\HasChildren'), '/', u'[Gmail]'), + (('\\HasNoChildren', '\\All'), '/', u'[Gmail]/All Mail'), + (('\\HasNoChildren', '\\Drafts'), '/', u'[Gmail]/Drafts'), + (('\\HasNoChildren', '\\Important'), '/', u'[Gmail]/Important'), + (('\\HasNoChildren', '\\Sent'), '/', u'[Gmail]/Sent Mail'), + (('\\HasNoChildren', '\\Junk'), '/', u'[Gmail]/Spam'), + (('\\Flagged', '\\HasNoChildren'), '/', u'[Gmail]/Starred'), + (('\\HasNoChildren',), '/', u'reference')] + check_missing_generic('trash', + folder_base, localized_folder_names['trash'], + 'gmail', constants, monkeypatch) + + +def test_imap_missing_trash(constants, monkeypatch): + ''' + Same strategy as test_gmail_missing_trash, except with imap as a provider + ''' + folder_base = \ + [(('\\HasNoChildren',), '/', u'INBOX'), + (('\\Noselect', '\\HasChildren'), '/', u'SKIP'), + (('\\HasNoChildren', '\\Drafts'), '/', u'Drafts'), + (('\\HasNoChildren', '\\Sent'), '/', u'Sent'), + (('\\HasNoChildren', '\\Sent'), '/', u'Sent Items'), + (('\\HasNoChildren', '\\Junk'), '/', u'Spam'), + (('\\HasNoChildren',), '/', u'reference')] + check_missing_generic('trash', + folder_base, localized_folder_names['trash'], + 'imap', constants, monkeypatch) + + +def test_gmail_missing_spam(constants, monkeypatch): + ''' + Same strategy as test_gmail_missing_trash, except with spam folder aliases + ''' + # Create a list of folders thath doesn't have a spam folder + folder_base = \ + [(('\\HasNoChildren',), '/', u'INBOX'), + (('\\Noselect', '\\HasChildren'), '/', u'[Gmail]'), + (('\\HasNoChildren', '\\All'), '/', u'[Gmail]/All Mail'), + (('\\HasNoChildren', '\\Drafts'), '/', u'[Gmail]/Drafts'), + (('\\HasNoChildren', '\\Important'), '/', u'[Gmail]/Important'), + (('\\HasNoChildren', '\\Sent'), '/', u'[Gmail]/Sent Mail'), + (('\\Flagged', '\\HasNoChildren'), '/', u'[Gmail]/Starred'), + (('\\HasNoChildren', '\\Trash'), '/', u'[Gmail]/Trash'), + (('\\HasNoChildren',), '/', u'reference')] + check_missing_generic('spam', folder_base, localized_folder_names['spam'], + 'gmail', constants, monkeypatch) + + +def test_imap_missing_spam(constants, monkeypatch): + ''' + Same strategy as test_gmail_missing_spam, except with imap as a provider + ''' + folder_base = \ + [(('\\HasNoChildren',), '/', u'INBOX'), + (('\\Noselect', '\\HasChildren'), '/', u'SKIP'), + (('\\HasNoChildren', '\\Drafts'), '/', u'Drafts'), + (('\\HasNoChildren', '\\Sent'), '/', u'Sent'), + (('\\HasNoChildren', '\\Sent'), '/', u'Sent Items'), + (('\\HasNoChildren', '\\Trash'), '/', u'Trash'), + (('\\HasNoChildren',), '/', u'reference')] + check_missing_generic('spam', folder_base, localized_folder_names['spam'], + 'imap', constants, monkeypatch) + + +def test_gmail_missing_sent(constants, monkeypatch): + ''' + Same strategy as test_gmail_missing_trash, except with sent folder aliases + ''' + # Create a list of folders thath doesn't have a sent folder + folder_base = \ + [(('\\HasNoChildren',), '/', u'INBOX'), + (('\\Noselect', '\\HasChildren'), '/', u'[Gmail]'), + (('\\HasNoChildren', '\\All'), '/', u'[Gmail]/All Mail'), + (('\\HasNoChildren', '\\Drafts'), '/', u'[Gmail]/Drafts'), + (('\\HasNoChildren', '\\Important'), '/', u'[Gmail]/Important'), + (('\\HasNoChildren', '\\Junk'), '/', u'[Gmail]/Spam'), + (('\\Flagged', '\\HasNoChildren'), '/', u'[Gmail]/Starred'), + (('\\HasNoChildren', '\\Trash'), '/', u'[Gmail]/Trash'), + (('\\HasNoChildren',), '/', u'reference')] + check_missing_generic('sent', folder_base, localized_folder_names['sent'], + 'gmail', constants, monkeypatch) + + +def test_imap_missing_sent(constants, monkeypatch): + ''' + Almost same strategy as test_gmail_missing_sent, + except with imap as a provider + we can't really make call to checking_missing_geneirc, + because imap and sent are + special because there are allowed to be more than 1 sent folder. + ''' + folder_base = \ + [(('\\HasNoChildren',), '/', u'INBOX'), + (('\\Noselect', '\\HasChildren'), '/', u'SKIP'), + (('\\HasNoChildren', '\\Drafts'), '/', u'Drafts'), + (('\\HasNoChildren', '\\Junk'), '/', u'Spam'), + (('\\HasNoChildren', '\\Trash'), '/', u'Trash'), + (('\\HasNoChildren',), '/', u'reference')] + role_map = { + 'INBOX': 'inbox', + 'Trash': 'trash', + 'Drafts': 'drafts', + 'Spam': 'spam', + u'reference': None + } + for role_alias in localized_folder_names['sent']: + folders = folder_base + [(('\\HasNoChildren'), '/', role_alias)] + client = patch_generic_client(monkeypatch, folders) + raw_folders = client.folders() + folder_names = client.folder_names() + role_map[role_alias] = 'sent' + + # Explcit checks. Different than check_missing_generic + # and generic_folder_checks, because imap allows + # for 2 sent folders, and I couldn't quite make the check + # be happy with only being given one sent folder alias + for f in raw_folders: + if f.display_name in role_map: + assert f.role == role_map[f.display_name] + else: + assert f.display_name in ['reference'] + assert f.role is None + for role in ['inbox', 'trash', 'drafts', 'sent', 'spam']: + assert role in folder_names + + names = folder_names[role] + assert isinstance(names, list) + assert len(names) == 1 + + del role_map[role_alias] + + +def check_missing_generic(role, folder_base, generic_role_names, + provider, constants, monkeypatch): + ''' + check clients label every folder in generic_role_names as input role + + role: the role that the generic_role_names should be assigned + folder_base: generic list of folders, excluding one that is assigned role + generic_role_names: list of strings that represent common role liases for + ''' + assert folder_base is not None + role_map = constants['gmail_role_map'] \ + if provider == 'gmail' else constants['imap_role_map'] + # role_map is close, but not quite right, because it has a role key + keys_to_remove = [] + # done in two loops to avoid modifying map while iterating through it + for folder_name in role_map: + if role_map[folder_name] == role: + keys_to_remove.append(folder_name) + for key in keys_to_remove: + del role_map[key] + for role_alias in generic_role_names: + # add in a folder with name of role alias, without it's role flag + folders = folder_base + [(('\\HasNoChildren'), '/', role_alias)] + client = patch_gmail_client(monkeypatch, folders) \ + if provider == 'gmail' \ + else patch_generic_client(monkeypatch, folders) + + raw_folders = client.folders() + role_map[role_alias] = role + generic_folder_checks(raw_folders, role_map, client, provider) + del role_map[role_alias] + + +def test_gmail_folders_no_flags(monkeypatch): + """ + Tests that system folders (trash, inbox, sent) without flags can be labeled + """ + + folders = \ + [(('\\HasNoChildren',), '/', u'INBOX'), + (('\\Noselect', '\\HasChildren'), '/', u'[Gmail]'), + (('\\HasNoChildren', '\\All'), '/', u'[Gmail]/All Mail'), + (('\\HasNoChildren', '\\Drafts'), '/', u'[Gmail]/Drafts'), + (('\\HasNoChildren', '\\Important'), '/', u'[Gmail]/Important'), + (('\\HasNoChildren'), '/', u'[Gmail]/Sent Mail'), + (('\\HasNoChildren'), '/', u'[Gmail]/Spam'), + (('\\Flagged', '\\HasNoChildren'), '/', u'[Gmail]/Starred'), + (('\\HasNoChildren'), '/', u'[Gmail]/Trash'), + (('\\HasNoChildren',), '/', u'reference')] + + gmail_role_map = { + '[Gmail]/All Mail': 'all', + 'Inbox': 'inbox', + '[Gmail]/Trash': 'trash', + '[Gmail]/Spam': 'spam', + '[Gmail]/Drafts': 'drafts', + '[Gmail]/Sent Mail': 'sent', + '[Gmail]/Important': 'important', + '[Gmail]/Starred': 'starred', + 'reference': None + } + client = patch_gmail_client(monkeypatch, folders) + + raw_folders = client.folders() + generic_folder_checks(raw_folders, gmail_role_map, client, 'gmail') + + +def test_gmail_many_folders_one_role(monkeypatch, constants): + """ + Tests that accounts with many folders with + similar system folders have only one role. + + i.e accounts with [Imap]/Trash, Trash, and [Gmail]/Trash + should only have one folder with the role trash + """ + + # some duplitace folders where one has been flagged, + # and neither have been flagged + # in both cases, only one should come out flagged. + folders = constants['gmail_folders'] + duplicates = [(('\HasNoChildren'), '/', u'[Imap]/Trash'), + (('\\HasNoChildren'), '/', u'[Imap]/Sent')] + folders += duplicates + # This test adds [Imap]/Trash and [Imap]/sent + # because we've seen them in the wild with gmail + client = patch_gmail_client(monkeypatch, folders) + + raw_folders = client.folders() + + folder_names = client.folder_names() + for role in ['inbox', 'all', 'trash', 'drafts', 'important', 'sent', + 'spam', 'starred']: + assert role in folder_names + test_set = filter(lambda x: x == role, + map(lambda y: y.role, raw_folders)) + assert len(test_set) == 1, "assigned wrong number of {}".format(role) + + names = folder_names[role] + assert isinstance(names, list) + assert len(names) == 1, "assign same role to %s folders" % len(names) + + +def test_imap_folders(monkeypatch, constants): + folders = constants['imap_folders'] + role_map = constants['imap_role_map'] + + client = patch_generic_client(monkeypatch, folders) + + raw_folders = client.folders() + generic_folder_checks(raw_folders, role_map, client, 'imap') + + +def test_imap_folders_no_flags(monkeypatch, constants): + """ + Tests that system folders (trash, inbox, sent) without flags can be labeled + """ + folders = \ + [(('\\HasNoChildren',), '/', u'INBOX'), + (('\\Noselect', '\\HasChildren'), '/', u'SKIP'), + (('\\HasNoChildren', '\\Drafts'), '/', u'Drafts'), + (('\\HasNoChildren'), '/', u'Sent'), + (('\\HasNoChildren'), '/', u'Sent Items'), + (('\\HasNoChildren', '\\Junk'), '/', u'Spam'), + (('\\HasNoChildren'), '/', u'Trash'), + (('\\HasNoChildren',), '/', u'reference')] + + role_map = { + 'INBOX': 'inbox', + 'Trash': 'trash', + 'Drafts': 'drafts', + 'Sent': 'sent', + 'Sent Items': 'sent', + 'Spam': 'spam', + u'[Gmail]/Sent Mail': None, + u'[Gmail]/Trash': 'trash', + u'reference': None + } + client = patch_generic_client(monkeypatch, folders) + + raw_folders = client.folders() + generic_folder_checks(raw_folders, role_map, client, 'imap') + + +def test_imap_many_folders_one_role(monkeypatch, constants): + """ + Tests that accounts with many folders with + similar system folders have only one role. + + i.e accounts with [Imap]/Trash, Trash, and [Gmail]/Trash + should only have one folder with the role trash + + This test should result in 2 sent folders, and 2 trash folders. There is an + extra folder with the name sent, but since it doesn't have flags + and there is already sent folder, than we don't coerce it to a sent folder + """ + folders = constants['imap_folders'] + duplicates = [(('\HasNoChildren', '\\Trash'), '/', u'[Gmail]/Trash'), + (('\\HasNoChildren'), '/', u'[Gmail]/Sent')] + folders += duplicates + + client = patch_generic_client(monkeypatch, folders) + + raw_folders = client.folders() + folder_names = client.folder_names() + for role in ['inbox', 'trash', 'drafts', 'sent', 'spam']: + assert role in folder_names + number_roles = 2 if (role in ['sent', 'trash']) else 1 + test_set = filter(lambda x: x == role, + map(lambda y: y.role, raw_folders)) + assert len(test_set) == number_roles,\ + "assigned wrong number of {}".format(role) diff --git a/inbox/test/imap/test_delete_handling.py b/inbox/test/imap/test_delete_handling.py new file mode 100644 index 000000000..e423b52c4 --- /dev/null +++ b/inbox/test/imap/test_delete_handling.py @@ -0,0 +1,299 @@ +# flake8: noqa: F401, F811 +from datetime import datetime, timedelta +import pytest +from sqlalchemy import desc +import gevent +from gevent.lock import Semaphore +from sqlalchemy.orm.exc import ObjectDeletedError +from inbox.crispin import GmailFlags +from inbox.mailsync.backends.imap.common import (remove_deleted_uids, + update_metadata) +from inbox.mailsync.gc import DeleteHandler, LabelRenameHandler +from inbox.models import Folder, Message, Transaction +from inbox.models.label import Label +from inbox.util.testutils import mock_imapclient, MockIMAPClient +from inbox.test.util.base import add_fake_imapuid, add_fake_message + + +@pytest.fixture() +def marked_deleted_message(db, message): + deleted_timestamp = datetime(2015, 2, 22, 22, 22, 22) + message.deleted_at = deleted_timestamp + db.session.commit() + return message + + +def test_messages_deleted_asynchronously(db, default_account, thread, message, + imapuid, folder): + msg_uid = imapuid.msg_uid + update_metadata(default_account.id, folder.id, folder.canonical_name, + {msg_uid: GmailFlags((), ('label',), None)}, db.session) + assert 'label' in [cat.display_name for cat in message.categories] + remove_deleted_uids(default_account.id, folder.id, [msg_uid]) + db.session.expire_all() + assert abs((message.deleted_at - datetime.utcnow()).total_seconds()) < 2 + # Check that message categories do get updated synchronously. + assert 'label' not in [cat.display_name for cat in message.categories] + + +def test_drafts_deleted_synchronously(db, default_account, thread, message, + imapuid, folder): + message.is_draft = True + db.session.commit() + msg_uid = imapuid.msg_uid + remove_deleted_uids(default_account.id, folder.id, [msg_uid]) + db.session.expire_all() + with pytest.raises(ObjectDeletedError): + message.id + with pytest.raises(ObjectDeletedError): + thread.id + + +def test_deleting_from_a_message_with_multiple_uids(db, default_account, + message, thread): + """Check that deleting a imapuid from a message with + multiple uids doesn't mark the message for deletion.""" + inbox_folder = Folder.find_or_create(db.session, default_account, 'inbox', + 'inbox') + sent_folder = Folder.find_or_create(db.session, default_account, 'sent', + 'sent') + + add_fake_imapuid(db.session, default_account.id, message, sent_folder, + 1337) + add_fake_imapuid(db.session, default_account.id, message, inbox_folder, + 2222) + + assert len(message.imapuids) == 2 + + remove_deleted_uids(default_account.id, inbox_folder.id, [2222]) + db.session.expire_all() + + assert message.deleted_at is None, \ + "The associated message should not have been marked for deletion." + + assert len(message.imapuids) == 1, \ + "The message should have only one imapuid." + + +def test_deletion_with_short_ttl(db, default_account, default_namespace, + marked_deleted_message, thread, folder): + handler = DeleteHandler(account_id=default_account.id, + namespace_id=default_namespace.id, + provider_name=default_account.provider, + uid_accessor=lambda m: m.imapuids, + message_ttl=0, thread_ttl=0) + handler.check(marked_deleted_message.deleted_at + timedelta(seconds=1)) + handler.gc_deleted_threads(thread.deleted_at + timedelta(seconds=1)) + db.session.expire_all() + # Check that objects were actually deleted + with pytest.raises(ObjectDeletedError): + marked_deleted_message.id + with pytest.raises(ObjectDeletedError): + thread.id + + +def test_thread_deletion_with_short_ttl(db, default_account, default_namespace, + marked_deleted_message, thread, folder): + handler = DeleteHandler(account_id=default_account.id, + namespace_id=default_namespace.id, + provider_name=default_account.provider, + uid_accessor=lambda m: m.imapuids, + message_ttl=0, thread_ttl=120) + + delete_time = marked_deleted_message.deleted_at + handler.check(delete_time + timedelta(seconds=1)) + handler.gc_deleted_threads(delete_time + timedelta(seconds=1)) + db.session.expire_all() + + with pytest.raises(ObjectDeletedError): + marked_deleted_message.id + thread.id + assert thread.deleted_at is not None + + handler.check(thread.deleted_at + timedelta(seconds=121)) + handler.gc_deleted_threads(thread.deleted_at + timedelta(seconds=121)) + db.session.expire_all() + + with pytest.raises(ObjectDeletedError): + marked_deleted_message.id + with pytest.raises(ObjectDeletedError): + thread.id + + +def test_non_orphaned_messages_get_unmarked(db, default_account, + default_namespace, + marked_deleted_message, thread, + folder, imapuid): + handler = DeleteHandler(account_id=default_account.id, + namespace_id=default_namespace.id, + provider_name=default_account.provider, + uid_accessor=lambda m: m.imapuids, + message_ttl=0) + handler.check(marked_deleted_message.deleted_at + timedelta(seconds=1)) + db.session.expire_all() + # message actually has an imapuid associated, so check that the + # DeleteHandler unmarked it. + assert marked_deleted_message.deleted_at is None + + +def test_threads_only_deleted_when_no_messages_left(db, default_account, + default_namespace, + marked_deleted_message, + thread, folder): + handler = DeleteHandler(account_id=default_account.id, + namespace_id=default_namespace.id, + provider_name=default_account.provider, + uid_accessor=lambda m: m.imapuids, + message_ttl=0) + # Add another message onto the thread + add_fake_message(db.session, default_namespace.id, thread) + + handler.check(marked_deleted_message.deleted_at + timedelta(seconds=1)) + db.session.expire_all() + # Check that the orphaned message was deleted. + with pytest.raises(ObjectDeletedError): + marked_deleted_message.id + # Would raise ObjectDeletedError if thread was deleted. + thread.id + + +def test_deletion_deferred_with_longer_ttl(db, default_account, + default_namespace, + marked_deleted_message, thread, + folder): + handler = DeleteHandler(account_id=default_account.id, + namespace_id=default_namespace.id, + provider_name=default_account.provider, + uid_accessor=lambda m: m.imapuids, + message_ttl=5) + db.session.commit() + + handler.check(marked_deleted_message.deleted_at + timedelta(seconds=1)) + # Would raise ObjectDeletedError if objects were deleted + marked_deleted_message.id + thread.id + + +def test_deletion_creates_revision(db, default_account, default_namespace, + marked_deleted_message, thread, folder): + message_id = marked_deleted_message.id + thread_id = thread.id + handler = DeleteHandler(account_id=default_account.id, + namespace_id=default_namespace.id, + provider_name=default_account.provider, + uid_accessor=lambda m: m.imapuids, + message_ttl=0) + handler.check(marked_deleted_message.deleted_at + timedelta(seconds=1)) + db.session.commit() + latest_message_transaction = db.session.query(Transaction). \ + filter(Transaction.record_id == message_id, + Transaction.object_type == 'message', + Transaction.namespace_id == default_namespace.id). \ + order_by(desc(Transaction.id)).first() + assert latest_message_transaction.command == 'delete' + + latest_thread_transaction = db.session.query(Transaction). \ + filter(Transaction.record_id == thread_id, + Transaction.object_type == 'thread', + Transaction.namespace_id == default_namespace.id). \ + order_by(desc(Transaction.id)).first() + assert latest_thread_transaction.command == 'update' + + +def test_deleted_labels_get_gced(empty_db, default_account, thread, message, + imapuid, folder): + # Check that only the labels without messages attached to them + # get deleted. + default_namespace = default_account.namespace + + # Create a label w/ no messages attached. + label = Label.find_or_create(empty_db.session, default_account, + 'dangling label') + label.deleted_at = datetime.utcnow() + label.category.deleted_at = datetime.utcnow() + label_id = label.id + empty_db.session.commit() + + # Create a label with attached messages. + msg_uid = imapuid.msg_uid + update_metadata(default_account.id, folder.id, folder.canonical_name, + {msg_uid: GmailFlags((), ('label',), None)}, empty_db.session) + + label_ids = [] + for cat in message.categories: + for l in cat.labels: + label_ids.append(l.id) + + handler = DeleteHandler(account_id=default_account.id, + namespace_id=default_namespace.id, + provider_name=default_account.provider, + uid_accessor=lambda m: m.imapuids, + message_ttl=0) + handler.gc_deleted_categories() + empty_db.session.commit() + + # Check that the first label got gc'ed + marked_deleted = empty_db.session.query(Label).get(label_id) + assert marked_deleted is None + + # Check that the other labels didn't. + for label_id in label_ids: + assert empty_db.session.query(Label).get(label_id) is not None + + +def test_renamed_label_refresh(db, default_account, thread, message, + imapuid, folder, mock_imapclient, monkeypatch): + # Check that imapuids see their labels refreshed after running + # the LabelRenameHandler. + msg_uid = imapuid.msg_uid + uid_dict = {msg_uid: GmailFlags((), ('stale label',), ('23',))} + + update_metadata(default_account.id, folder.id, folder.canonical_name, + uid_dict, db.session) + + new_flags = {msg_uid: {'FLAGS': ('\\Seen',), 'X-GM-LABELS': ('new label',), + 'MODSEQ': ('23',)}} + mock_imapclient._data['[Gmail]/All mail'] = new_flags + + mock_imapclient.add_folder_data(folder.name, new_flags) + + monkeypatch.setattr(MockIMAPClient, 'search', + lambda x, y: [msg_uid]) + + semaphore = Semaphore(value=1) + + rename_handler = LabelRenameHandler(default_account.id, + default_account.namespace.id, + 'new label', semaphore) + + # Acquire the semaphore to check that LabelRenameHandlers block if + # the semaphore is in-use. + semaphore.acquire() + rename_handler.start() + + gevent.sleep(0) # yield to the handler + + labels = list(imapuid.labels) + assert len(labels) == 1 + assert labels[0].name == 'stale label' + semaphore.release() + rename_handler.join() + + db.session.refresh(imapuid) + # Now check that the label got updated. + labels = list(imapuid.labels) + assert len(labels) == 1 + assert labels[0].name == 'new label' + + +def test_reply_to_message_cascade(db, default_namespace, thread, message): + reply = add_fake_message(db.session, default_namespace.id, thread) + reply.reply_to_message = message + db.session.commit() + + db.session.expire_all() + db.session.delete(message) + db.session.commit() + + assert db.session.query(Message).filter(Message.id == message.id).all() == [] + assert db.session.query(Message).filter(Message.id == reply.id).all() == [reply] diff --git a/inbox/test/imap/test_folder_state.py b/inbox/test/imap/test_folder_state.py new file mode 100644 index 000000000..5fb3328f9 --- /dev/null +++ b/inbox/test/imap/test_folder_state.py @@ -0,0 +1,77 @@ +from inbox.crispin import RawFolder +from inbox.models import Folder +from inbox.mailsync.backends.imap.monitor import ImapSyncMonitor +from inbox.models.backends.imap import ImapFolderInfo, ImapFolderSyncStatus + + +def add_imap_status_info_rows(folder_id, account_id, db_session): + """Add placeholder ImapFolderSyncStatus and ImapFolderInfo rows for this + folder_id if none exist. + """ + if not db_session.query(ImapFolderSyncStatus).filter_by( + account_id=account_id, folder_id=folder_id).all(): + db_session.add(ImapFolderSyncStatus( + account_id=account_id, + folder_id=folder_id, + state='initial')) + + if not db_session.query(ImapFolderInfo).filter_by( + account_id=account_id, folder_id=folder_id).all(): + db_session.add(ImapFolderInfo( + account_id=account_id, + folder_id=folder_id, + uidvalidity=1, + highestmodseq=22)) + + +def create_foldersyncstatuses(db, default_account): + # Create a bunch of folder sync statuses. + monitor = ImapSyncMonitor(default_account) + + folder_names_and_roles = { + RawFolder('INBOX', 'inbox'), + RawFolder('Sent Mail', 'sent'), + RawFolder('Sent Messages', 'sent'), + RawFolder('Drafts', 'drafts'), + RawFolder('Miscellania', None), + RawFolder('miscellania', None), + RawFolder('Recipes', None), + } + monitor.save_folder_names(db.session, folder_names_and_roles) + folders = db.session.query(Folder).filter_by(account_id=default_account.id) + for folder in folders: + add_imap_status_info_rows(folder.id, default_account.id, db.session) + db.session.commit() + + +def test_imap_folder_run_state_always_true(db, default_account): + """Test that for an IMAP account, the sync_should_run flag on the + account's folder statuses is always true. (This is not the case for + all backends, and may not always be the case in future. Other backends + should have an appropriate test parallel to this one.) + + The sync_should_run flag for a folder reflects whether that folder's + sync should be running iff the account's sync should be running, so + overall state depends on the account.sync_should_run bit being correct. + """ + create_foldersyncstatuses(db, default_account) + + for folderstatus in default_account.foldersyncstatuses: + assert folderstatus.sync_should_run is True + + +def test_imap_folder_sync_enabled(db, default_account): + """Test that the IMAP folder's sync_enabled property mirrors the account + level sync_enabled property. (Again, this might not be the case for non- + IMAP backends.) + """ + create_foldersyncstatuses(db, default_account) + + assert all([fs.sync_enabled for fs in default_account.foldersyncstatuses]) + + # Disable sync. Folders should now not have sync_enabled. + default_account.disable_sync('testing') + db.session.commit() + + assert all([not fs.sync_enabled + for fs in default_account.foldersyncstatuses]) diff --git a/inbox/test/imap/test_folder_sync.py b/inbox/test/imap/test_folder_sync.py new file mode 100644 index 000000000..473ea639f --- /dev/null +++ b/inbox/test/imap/test_folder_sync.py @@ -0,0 +1,338 @@ +# flake8: noqa: F401, F811 +import pytest +from hashlib import sha256 +from gevent.lock import BoundedSemaphore +from sqlalchemy.orm.exc import ObjectDeletedError +from inbox.models import Folder, Message +from inbox.models.backends.imap import (ImapFolderSyncStatus, ImapUid, + ImapFolderInfo) +from inbox.mailsync.backends.imap.generic import (FolderSyncEngine, UidInvalid, + MAX_UIDINVALID_RESYNCS) +from inbox.mailsync.backends.gmail import GmailFolderSyncEngine +from inbox.mailsync.backends.base import MailsyncDone +from inbox.test.imap.data import uids, uid_data # noqa +from inbox.util.testutils import mock_imapclient # noqa + + +def create_folder_with_syncstatus(account, name, canonical_name, + db_session): + folder = Folder.find_or_create(db_session, account, name, canonical_name) + folder.imapsyncstatus = ImapFolderSyncStatus(account=account) + db_session.commit() + return folder + + +@pytest.fixture +def inbox_folder(db, generic_account): + return create_folder_with_syncstatus(generic_account, 'Inbox', 'inbox', + db.session) + + +@pytest.fixture +def generic_trash_folder(db, generic_account): + return create_folder_with_syncstatus(generic_account, '/Trash', + 'trash', db.session) + + +@pytest.fixture +def all_mail_folder(db, default_account): + return create_folder_with_syncstatus(default_account, '[Gmail]/All Mail', + 'all', db.session) + + +@pytest.fixture +def trash_folder(db, default_account): + return create_folder_with_syncstatus(default_account, '[Gmail]/Trash', + 'trash', db.session) + + +def test_initial_sync(db, generic_account, inbox_folder, mock_imapclient): + # We should really be using hypothesis.given() to generate lots of + # different uid sets, but it's not trivial to ensure that no state is + # carried over between runs. This will have to suffice for now as a way to + # at least establish coverage. + uid_dict = uids.example() + mock_imapclient.add_folder_data(inbox_folder.name, uid_dict) + + folder_sync_engine = FolderSyncEngine(generic_account.id, + generic_account.namespace.id, + inbox_folder.name, + generic_account.email_address, + 'custom', + BoundedSemaphore(1)) + folder_sync_engine.initial_sync() + + saved_uids = db.session.query(ImapUid).filter( + ImapUid.folder_id == inbox_folder.id) + assert {u.msg_uid for u in saved_uids} == set(uid_dict) + + saved_message_hashes = {u.message.data_sha256 for u in saved_uids} + assert saved_message_hashes == {sha256(v['BODY[]']).hexdigest() for v in + uid_dict.values()} + + +def test_new_uids_synced_when_polling(db, generic_account, inbox_folder, + mock_imapclient): + uid_dict = uids.example() + mock_imapclient.add_folder_data(inbox_folder.name, uid_dict) + inbox_folder.imapfolderinfo = ImapFolderInfo(account=generic_account, + uidvalidity=1, + uidnext=1) + db.session.commit() + folder_sync_engine = FolderSyncEngine(generic_account.id, + generic_account.namespace.id, + inbox_folder.name, + generic_account.email_address, + 'custom', + BoundedSemaphore(1)) + # Don't sleep at the end of poll_impl before returning. + folder_sync_engine.poll_frequency = 0 + folder_sync_engine.poll_impl() + + saved_uids = db.session.query(ImapUid).filter( + ImapUid.folder_id == inbox_folder.id) + assert {u.msg_uid for u in saved_uids} == set(uid_dict) + + +def test_condstore_flags_refresh(db, default_account, all_mail_folder, + mock_imapclient, monkeypatch): + monkeypatch.setattr( + 'inbox.mailsync.backends.imap.generic.CONDSTORE_FLAGS_REFRESH_BATCH_SIZE', + 10) + uid_dict = uids.example() + mock_imapclient.add_folder_data(all_mail_folder.name, uid_dict) + mock_imapclient.capabilities = lambda: ['CONDSTORE'] + + folder_sync_engine = FolderSyncEngine(default_account.id, + default_account.namespace.id, + all_mail_folder.name, + default_account.email_address, + 'gmail', + BoundedSemaphore(1)) + folder_sync_engine.initial_sync() + + # Change the labels provided by the mock IMAP server + for k, v in mock_imapclient._data[all_mail_folder.name].items(): + v['X-GM-LABELS'] = ('newlabel',) + v['MODSEQ'] = (k,) + + folder_sync_engine.highestmodseq = 0 + # Don't sleep at the end of poll_impl before returning. + folder_sync_engine.poll_frequency = 0 + folder_sync_engine.poll_impl() + imapuids = db.session.query(ImapUid). \ + filter_by(folder_id=all_mail_folder.id).all() + for imapuid in imapuids: + assert 'newlabel' in [l.name for l in imapuid.labels] + + assert folder_sync_engine.highestmodseq == mock_imapclient.folder_status( + all_mail_folder.name, ['HIGHESTMODSEQ'])['HIGHESTMODSEQ'] + + +def test_generic_flags_refresh_expunges_transient_uids( + db, generic_account, inbox_folder, mock_imapclient, monkeypatch): + # Check that we delete UIDs which are synced but quickly deleted, so never + # show up in flags refresh. + uid_dict = uids.example() + mock_imapclient.add_folder_data(inbox_folder.name, uid_dict) + inbox_folder.imapfolderinfo = ImapFolderInfo(account=generic_account, + uidvalidity=1, + uidnext=1) + db.session.commit() + folder_sync_engine = FolderSyncEngine(generic_account.id, + generic_account.namespace.id, + inbox_folder.name, + generic_account.email_address, + 'custom', + BoundedSemaphore(1)) + folder_sync_engine.initial_sync() + # Don't sleep at the end of poll_impl before returning. + folder_sync_engine.poll_frequency = 0 + folder_sync_engine.poll_impl() + msg = db.session.query(Message).filter_by( + namespace_id=generic_account.namespace.id).first() + transient_uid = ImapUid(folder=inbox_folder, account=generic_account, + message=msg, msg_uid=max(uid_dict) + 1) + db.session.add(transient_uid) + db.session.commit() + folder_sync_engine.last_slow_refresh = None + folder_sync_engine.poll_impl() + with pytest.raises(ObjectDeletedError): + transient_uid.id + + +def test_handle_uidinvalid(db, generic_account, inbox_folder, mock_imapclient): + uid_dict = uids.example() + mock_imapclient.add_folder_data(inbox_folder.name, uid_dict) + inbox_folder.imapfolderinfo = ImapFolderInfo(account=generic_account, + uidvalidity=1, + uidnext=1) + db.session.commit() + folder_sync_engine = FolderSyncEngine(generic_account.id, + generic_account.namespace.id, + inbox_folder.name, + generic_account.email_address, + 'custom', + BoundedSemaphore(1)) + folder_sync_engine.initial_sync() + mock_imapclient.uidvalidity = 2 + with pytest.raises(UidInvalid): + folder_sync_engine.poll_impl() + + new_state = folder_sync_engine.resync_uids() + + assert new_state == 'initial' + assert db.session.query(ImapUid).filter( + ImapUid.folder_id == inbox_folder.id).all() == [] + + +def test_handle_uidinvalid_loops(db, generic_account, inbox_folder, + mock_imapclient, monkeypatch): + + import inbox.mailsync.backends.imap.generic as generic_import + + mock_imapclient.uidvalidity = 1 + + # We're using a list here because of weird monkeypatching shenanigans. + uidinvalid_count = [] + + def fake_poll_function(self): + uidinvalid_count.append(1) + raise UidInvalid + + monkeypatch.setattr("inbox.mailsync.backends.imap.generic.FolderSyncEngine.poll", + fake_poll_function) + + uid_dict = uids.example() + mock_imapclient.add_folder_data(inbox_folder.name, uid_dict) + inbox_folder.imapfolderinfo = ImapFolderInfo(account=generic_account, + uidvalidity=1, + uidnext=1) + db.session.commit() + folder_sync_engine = generic_import.FolderSyncEngine(generic_account.id, + generic_account.namespace.id, + inbox_folder.name, + generic_account.email_address, + 'custom', + BoundedSemaphore(1)) + + folder_sync_engine.state = 'poll' + + db.session.expunge(inbox_folder.imapsyncstatus) + + with pytest.raises(MailsyncDone): + folder_sync_engine._run() + + assert len(uidinvalid_count) == MAX_UIDINVALID_RESYNCS + 1 + + +def raise_imap_error(self): + from imaplib import IMAP4 + raise IMAP4.error('Unexpected IDLE response') + + +def test_gmail_initial_sync(db, default_account, all_mail_folder, + mock_imapclient): + uid_dict = uids.example() + mock_imapclient.add_folder_data(all_mail_folder.name, uid_dict) + mock_imapclient.list_folders = lambda: [(('\\All', '\\HasNoChildren',), + '/', u'[Gmail]/All Mail')] + mock_imapclient.idle = lambda: None + mock_imapclient.idle_check = raise_imap_error + + folder_sync_engine = GmailFolderSyncEngine(default_account.id, + default_account.namespace.id, + all_mail_folder.name, + default_account.email_address, + 'gmail', + BoundedSemaphore(1)) + folder_sync_engine.initial_sync() + + saved_uids = db.session.query(ImapUid).filter( + ImapUid.folder_id == all_mail_folder.id) + assert {u.msg_uid for u in saved_uids} == set(uid_dict) + + +def test_gmail_message_deduplication(db, default_account, all_mail_folder, + trash_folder, mock_imapclient): + uid = 22 + uid_values = uid_data.example() + + mock_imapclient.list_folders = lambda: [(('\\All', '\\HasNoChildren',), + '/', u'[Gmail]/All Mail'), + (('\\Trash', '\\HasNoChildren',), + '/', u'[Gmail]/Trash')] + mock_imapclient.idle = lambda: None + mock_imapclient.add_folder_data(all_mail_folder.name, {uid: uid_values}) + mock_imapclient.add_folder_data(trash_folder.name, {uid: uid_values}) + mock_imapclient.idle_check = raise_imap_error + + all_folder_sync_engine = GmailFolderSyncEngine( + default_account.id, default_account.namespace.id, all_mail_folder.name, + default_account.email_address, 'gmail', + BoundedSemaphore(1)) + all_folder_sync_engine.initial_sync() + + trash_folder_sync_engine = GmailFolderSyncEngine( + default_account.id, default_account.namespace.id, trash_folder.name, + default_account.email_address, 'gmail', + BoundedSemaphore(1)) + trash_folder_sync_engine.initial_sync() + + # Check that we have two uids, but just one message. + assert [(uid,)] == db.session.query(ImapUid.msg_uid).filter( + ImapUid.folder_id == all_mail_folder.id).all() + + assert [(uid,)] == db.session.query(ImapUid.msg_uid).filter( + ImapUid.folder_id == trash_folder.id).all() + + assert db.session.query(Message).filter( + Message.namespace_id == default_account.namespace.id, + Message.g_msgid == uid_values['X-GM-MSGID']).count() == 1 + + +def test_imap_message_deduplication(db, generic_account, inbox_folder, + generic_trash_folder, mock_imapclient): + uid = 22 + uid_values = uid_data.example() + + mock_imapclient.list_folders = lambda: [(('\\All', '\\HasNoChildren',), + '/', u'/Inbox'), + (('\\Trash', '\\HasNoChildren',), + '/', u'/Trash')] + mock_imapclient.idle = lambda: None + mock_imapclient.add_folder_data(inbox_folder.name, {uid: uid_values}) + mock_imapclient.add_folder_data(generic_trash_folder.name, + {uid: uid_values}) + + folder_sync_engine = FolderSyncEngine( + generic_account.id, + generic_account.namespace.id, + inbox_folder.name, + generic_account.email_address, + 'custom', + BoundedSemaphore(1)) + folder_sync_engine.initial_sync() + + trash_folder_sync_engine = FolderSyncEngine( + generic_account.id, + generic_account.namespace.id, + generic_trash_folder.name, + generic_account.email_address, + 'custom', + BoundedSemaphore(1)) + trash_folder_sync_engine.initial_sync() + + # Check that we have two uids, but just one message. + assert [(uid,)] == db.session.query(ImapUid.msg_uid).filter( + ImapUid.folder_id == inbox_folder.id).all() + + assert [(uid,)] == db.session.query(ImapUid.msg_uid).filter( + ImapUid.folder_id == generic_trash_folder.id).all() + + # used to uniquely ID messages + body_sha = sha256(uid_values['BODY[]']).hexdigest() + + assert db.session.query(Message).filter( + Message.namespace_id == generic_account.namespace.id, + Message.data_sha256 == body_sha).count() == 1 diff --git a/inbox/test/imap/test_full_imap_enabled.py b/inbox/test/imap/test_full_imap_enabled.py new file mode 100644 index 000000000..94fe707e7 --- /dev/null +++ b/inbox/test/imap/test_full_imap_enabled.py @@ -0,0 +1,50 @@ +import pytest +from imapclient import IMAPClient +from mock import Mock + +from inbox.auth.generic import GenericAuthHandler +from inbox.basicauth import UserRecoverableConfigError + + +class MockIMAPClient(IMAPClient): + + def __init__(self): + super(MockIMAPClient, self).__init__('randomhost') + + def _create_IMAP4(self): + return Mock() + + def logout(self): + pass + + +def test_imap_not_fully_enabled(monkeypatch): + + def folder_list_fail(conn): + raise Exception("LIST failed: '[ALERT] full IMAP support " + "is NOT enabled for this account'") + + monkeypatch.setattr('imapclient.IMAPClient.list_folders', + folder_list_fail) + + def fake_connect(account): + return MockIMAPClient() + + response = { + 'email': 'test@test.com', + 'password': 'test123', + 'imap_server_host': '0.0.0.0', + 'imap_server_port': 22, + 'smtp_server_host': '0.0.0.0', + 'smtp_server_port': 23 + } + + handler = GenericAuthHandler('custom') + acct = handler.create_account( + 'test@test.com', + response) + handler.connect_account = fake_connect + handler._supports_condstore = lambda x: True + with pytest.raises(UserRecoverableConfigError): + verified = handler.verify_account(acct) + assert verified is not True diff --git a/inbox/test/imap/test_labels.py b/inbox/test/imap/test_labels.py new file mode 100644 index 000000000..33e27d815 --- /dev/null +++ b/inbox/test/imap/test_labels.py @@ -0,0 +1,271 @@ +import json + +import pytest + +from inbox.mailsync.backends.imap.common import update_message_metadata + +from inbox.test.util.base import (default_account, add_fake_folder, add_fake_message, + add_fake_thread, add_fake_imapuid) +from inbox.test.api.base import api_client + +__all__ = ['default_account', 'api_client'] + + +def add_fake_label(db_session, default_account, display_name, name): + from inbox.models.label import Label + return Label.find_or_create(db_session, default_account, display_name, name) + + +@pytest.fixture +def folder_and_message_maps(db, default_account): + folder_map, message_map = {}, {} + for name in ('all', 'trash', 'spam'): + # Create a folder + display_name = name.capitalize() if name != 'all' else 'All Mail' + folder = add_fake_folder(db.session, default_account, display_name, name) + thread = add_fake_thread(db.session, default_account.namespace.id) + # Create a message in the folder + message = add_fake_message(db.session, default_account.namespace.id, + thread) + add_fake_imapuid(db.session, default_account.id, message, folder, 13) + update_message_metadata(db.session, default_account, message, False) + db.session.commit() + folder_map[name] = folder + message_map[name] = message + return folder_map, message_map + + +def add_inbox_label(db, default_account, message): + assert len(message.imapuids) == 1 + imapuid = message.imapuids[0] + assert set([c.name for c in imapuid.categories]) == set(['all']) + imapuid.update_labels(['\\Inbox']) + db.session.commit() + assert set([c.name for c in imapuid.categories]) == set(['all', 'inbox']) + update_message_metadata(db.session, default_account, message, False) + db.session.commit() + return message + + +def add_custom_label(db, default_account, message): + assert len(message.imapuids) == 1 + imapuid = message.imapuids[0] + existing = [c.name for c in imapuid.categories][0] + imapuid.update_labels(['<3']) + db.session.commit() + assert set([c.name for c in imapuid.categories]) == set([existing, '']) + update_message_metadata(db.session, default_account, message, False) + db.session.commit() + return message + + +@pytest.mark.parametrize('label', ['all', 'trash', 'spam']) +def test_validation(db, api_client, default_account, folder_and_message_maps, + label): + folder_map, message_map = folder_and_message_maps + + message = message_map[label] + resp_data = api_client.get_data('/messages/{}'.format(message.public_id)) + labels = resp_data['labels'] + assert len(labels) == 1 + assert labels[0]['name'] == label + existing_label = labels[0]['id'] + + # Adding more than one mutually exclusive label is not allowed. + # For example, adding 'trash' and 'spam'. + # (Adding one is okay because it's simply replaced). + labels_to_add = [] + for key in message_map: + if key == label: + continue + labels_to_add += [folder_map[key].category.public_id] + + response = api_client.put_data( + '/messages/{}'.format(message.public_id), + {'label_ids': labels_to_add}) + resp_data = json.loads(response.data) + assert response.status_code == 400 + assert resp_data.get('type') == 'invalid_request_error' + + response = api_client.put_data( + '/messages/{}'.format(message.public_id), + {'label_ids': labels_to_add + [existing_label]}) + resp_data = json.loads(response.data) + assert response.status_code == 400 + assert resp_data.get('type') == 'invalid_request_error' + + # Removing all labels is not allowed, because this will remove + # the required label (one of 'all'/ 'trash'/ 'spam') too. + response = api_client.put_data( + '/messages/{}'.format(message.public_id), + {'label_ids': []}) + resp_data = json.loads(response.data) + assert response.status_code == 400 + assert resp_data.get('type') == 'invalid_request_error' + + +@pytest.mark.parametrize('label', ['all', 'trash', 'spam']) +def test_adding_a_mutually_exclusive_label_replaces_the_other( + db, api_client, default_account, folder_and_message_maps, label): + # Verify a Gmail message can only have ONE of the 'all', 'trash', 'spam' + # labels at a time. We specifically test that adding 'all'/ 'trash'/ 'spam' + # to a message in one of the other two folders *replaces* + # the existing label with the label being added. + folder_map, message_map = folder_and_message_maps + label_to_add = folder_map[label] + + for key in message_map: + if key == label: + continue + + message = message_map[key] + resp_data = api_client.get_data('/messages/{}'.format(message.public_id)) + labels = resp_data['labels'] + assert len(labels) == 1 + assert labels[0]['name'] == key + existing_label = labels[0]['id'] + + # Adding 'all'/ 'trash'/ 'spam' removes the existing one, + # irrespective of whether it's provided in the request or not. + response = api_client.put_data( + '/messages/{}'.format(message.public_id), + {'label_ids': [label_to_add.category.public_id, + existing_label]}) + labels = json.loads(response.data)['labels'] + assert len(labels) == 1 + assert labels[0]['name'] == label + + +@pytest.mark.parametrize('label', ['trash', 'spam']) +def test_adding_trash_or_spam_removes_inbox( + db, api_client, default_account, folder_and_message_maps, label): + # Verify a Gmail message in 'trash', 'spam' cannot have 'inbox'. + # We specifically test that adding 'trash'/ 'spam' to a message with 'inbox' + # removes it. + folder_map, message_map = folder_and_message_maps + + message = message_map['all'] + add_inbox_label(db, default_account, message) + resp_data = api_client.get_data('/messages/{}'.format(message.public_id)) + labels = resp_data['labels'] + assert len(labels) == 2 + assert set([l['name'] for l in labels]) == set(['all', 'inbox']) + + # Adding 'trash'/ 'spam' removes 'inbox' (and 'all'), + # irrespective of whether it's provided in the request or not. + label_to_add = folder_map[label] + response = api_client.put_data( + '/messages/{}'.format(message.public_id), + {'label_ids': [label_to_add.category.public_id] + + [l['id'] for l in labels]}) + labels = json.loads(response.data)['labels'] + assert len(labels) == 1 + assert labels[0]['name'] == label + + +@pytest.mark.parametrize('label', ['all', 'trash', 'spam']) +def test_adding_a_mutually_exclusive_label_does_not_affect_custom_labels( + db, api_client, default_account, folder_and_message_maps, label): + folder_map, message_map = folder_and_message_maps + label_to_add = folder_map[label] + + for key in message_map: + if key == label: + continue + + message = message_map[key] + add_custom_label(db, default_account, message) + resp_data = api_client.get_data('/messages/{}'.format(message.public_id)) + labels = resp_data['labels'] + assert len(labels) == 2 + assert key in [l['name'] for l in labels] + assert '<3' in [l['display_name'] for l in labels] + + # Adding only 'all'/ 'trash'/ 'spam' does not change custom labels. + response = api_client.put_data( + '/messages/{}'.format(message.public_id), + {'label_ids': [label_to_add.category.public_id] + + [l['id'] for l in labels]}) + labels = json.loads(response.data)['labels'] + assert len(labels) == 2 + assert label in [l['name'] for l in labels] + assert '<3' in [l['display_name'] for l in labels] + + +@pytest.mark.parametrize('label', ['all', 'trash', 'spam']) +def test_adding_inbox_adds_all_and_removes_trash_spam( + db, api_client, default_account, folder_and_message_maps, label): + # Verify a Gmail message in 'trash', 'spam' cannot have 'inbox'. + # This time we test that adding 'inbox' to a message in the 'trash'/ 'spam' + # moves it to 'all' in addition to adding 'inbox'. + folder_map, message_map = folder_and_message_maps + + message = message_map[label] + resp_data = api_client.get_data('/messages/{}'.format(message.public_id)) + labels = resp_data['labels'] + assert len(labels) == 1 + assert labels[0]['name'] == label + existing_label = labels[0]['id'] + + inbox_label = add_fake_label(db.session, default_account, 'Inbox', 'inbox') + db.session.commit() + + # Adding 'inbox' adds 'all', replacing 'trash'/ 'spam' if needed. + response = api_client.put_data( + '/messages/{}'.format(message.public_id), + {'label_ids': [inbox_label.category.public_id, existing_label]}) + db.session.commit() + labels = json.loads(response.data)['labels'] + assert len(labels) == 2 + assert set([l['name'] for l in labels]) == set(['all', 'inbox']) + + +@pytest.mark.parametrize('label', ['all', 'trash', 'spam']) +def test_adding_a_custom_label_preserves_other_labels( + db, api_client, default_account, folder_and_message_maps, label): + folder_map, message_map = folder_and_message_maps + + message = message_map[label] + resp_data = api_client.get_data('/messages/{}'.format(message.public_id)) + labels = resp_data['labels'] + assert len(labels) == 1 + assert labels[0]['name'] == label + existing_label = labels[0]['id'] + + custom_label = add_fake_label(db.session, default_account, '<3', None) + db.session.commit() + + # Adding only a custom label does not move a message to a different folder + # i.e. does not change its 'all'/ 'trash'/ 'spam' labels. + response = api_client.put_data( + '/messages/{}'.format(message.public_id), + {'label_ids': [custom_label.category.public_id, existing_label]}) + labels = json.loads(response.data)['labels'] + assert len(labels) == 2 + assert set([l['name'] for l in labels]) == set([label, None]) + assert '<3' in [l['display_name'] for l in labels] + + +@pytest.mark.parametrize('label', ['all', 'trash', 'spam']) +def test_removing_a_mutually_exclusive_label_does_not_orphan_a_message( + db, api_client, default_account, folder_and_message_maps, label): + folder_map, message_map = folder_and_message_maps + + message = message_map[label] + resp_data = api_client.get_data('/messages/{}'.format(message.public_id)) + labels = resp_data['labels'] + assert len(labels) == 1 + assert labels[0]['name'] == label + + custom_label = add_fake_label(db.session, default_account, '<3', None) + db.session.commit() + + # Removing a message's ONLY folder "label" does not remove it. + # Gmail messages MUST belong to one of 'all'/ 'trash'/ 'spam'. + response = api_client.put_data( + '/messages/{}'.format(message.public_id), + {'label_ids': [custom_label.category.public_id]}) + labels = json.loads(response.data)['labels'] + assert len(labels) == 2 + assert set([l['name'] for l in labels]) == set([label, None]) + assert '<3' in [l['display_name'] for l in labels] diff --git a/inbox/test/imap/test_pooling.py b/inbox/test/imap/test_pooling.py new file mode 100644 index 000000000..421e1462a --- /dev/null +++ b/inbox/test/imap/test_pooling.py @@ -0,0 +1,66 @@ +import imaplib +import socket + +import gevent +import pytest +import mock +from backports import ssl + +from inbox.crispin import CrispinConnectionPool + + +class TestableConnectionPool(CrispinConnectionPool): + + def _set_account_info(self): + pass + + def _new_connection(self): + return mock.Mock() + + +def test_pool(): + pool = TestableConnectionPool(1, num_connections=3, readonly=True) + with pool.get() as conn: + pass + assert pool._queue.full() + assert conn in pool._queue + + +@pytest.mark.skipif(True, reason="randomly blocks forever") +def test_block_on_depleted_pool(): + pool = TestableConnectionPool(1, num_connections=1, readonly=True) + # Test that getting a connection when the pool is empty blocks + with pytest.raises(gevent.hub.LoopExit): + with pool.get(): + with pool.get(): + pass + + +@pytest.mark.parametrize("error_class,expect_logout_called", [ + (imaplib.IMAP4.error, True), + (imaplib.IMAP4.abort, False), + (socket.error, False), + (socket.timeout, False), + (ssl.SSLError, False), + (ssl.CertificateError, False), +]) +def test_imap_and_network_errors(error_class, expect_logout_called): + pool = TestableConnectionPool(1, num_connections=3, readonly=True) + with pytest.raises(error_class): + with pool.get() as conn: + raise error_class + assert pool._queue.full() + # Check that the connection wasn't returned to the pool + while not pool._queue.empty(): + item = pool._queue.get() + assert item is None + assert conn.logout.called is expect_logout_called + + +def test_connection_retained_on_other_errors(): + pool = TestableConnectionPool(1, num_connections=3, readonly=True) + with pytest.raises(ValueError): + with pool.get() as conn: + raise ValueError + assert conn in pool._queue + assert not conn.logout.called diff --git a/inbox/test/imap/test_save_folder_names.py b/inbox/test/imap/test_save_folder_names.py new file mode 100644 index 000000000..da64eb428 --- /dev/null +++ b/inbox/test/imap/test_save_folder_names.py @@ -0,0 +1,250 @@ +from inbox.crispin import RawFolder +from inbox.mailsync.backends.imap.monitor import ImapSyncMonitor +from inbox.mailsync.backends.gmail import GmailSyncMonitor +from inbox.models import Folder, Label, Category +from inbox.models.category import EPOCH + + +def test_imap_save_generic_folder_names(db, default_account): + monitor = ImapSyncMonitor(default_account) + folder_names_and_roles = { + ('INBOX', 'inbox'), + ('Sent Mail', 'sent'), + ('Sent Messages', 'sent'), + ('Drafts', 'drafts'), + ('Miscellania', ''), + ('miscellania', ''), + ('Recipes', ''), + } + raw_folders = [RawFolder(*args) for args in folder_names_and_roles] + monitor.save_folder_names(db.session, raw_folders) + saved_folder_data = set( + db.session.query(Folder.name, Folder.canonical_name).filter( + Folder.account_id == default_account.id).all()) + assert saved_folder_data == folder_names_and_roles + + +def test_handle_folder_deletions(db, default_account): + monitor = ImapSyncMonitor(default_account) + folder_names_and_roles = { + ('INBOX', 'inbox'), + ('Miscellania', None), + } + raw_folders = [RawFolder(*args) for args in folder_names_and_roles] + monitor.save_folder_names(db.session, raw_folders) + assert len(db.session.query(Folder).filter( + Folder.account_id == default_account.id).all()) == 2 + + monitor.save_folder_names(db.session, [RawFolder('INBOX', 'inbox')]) + saved_folder_data = set( + db.session.query(Folder.name, Folder.canonical_name).filter( + Folder.account_id == default_account.id).all()) + assert saved_folder_data == {('INBOX', 'inbox')} + + +def test_imap_handle_folder_renames(db, default_account): + monitor = ImapSyncMonitor(default_account) + folder_names_and_roles = { + ('INBOX', 'inbox'), + ('[Gmail]/Todos', 'all'), + ('[Gmail]/Basura', 'trash') + } + + folders_renamed = { + ('INBOX', 'inbox'), + ('[Gmail]/All', 'all'), + ('[Gmail]/Trash', 'trash') + } + original_raw_folders = [RawFolder(*args) for args in + folder_names_and_roles] + renamed_raw_folders = [RawFolder(*args) for args in folders_renamed] + monitor.save_folder_names(db.session, original_raw_folders) + assert len(db.session.query(Folder).filter( + Folder.account_id == default_account.id).all()) == 3 + + monitor.save_folder_names(db.session, renamed_raw_folders) + saved_folder_data = set( + db.session.query(Folder.name, Folder.canonical_name).filter( + Folder.account_id == default_account.id).all()) + assert saved_folder_data == folders_renamed + + +def test_gmail_handle_folder_renames(db, default_account): + monitor = GmailSyncMonitor(default_account) + folder_names_and_roles = { + ('[Gmail]/Todos', 'all'), + ('[Gmail]/Basura', 'trash') + } + + folders_renamed = { + ('[Gmail]/All', 'all'), + ('[Gmail]/Trash', 'trash') + } + original_raw_folders = [RawFolder(*args) for args in + folder_names_and_roles] + renamed_raw_folders = [RawFolder(*args) for args in folders_renamed] + monitor.save_folder_names(db.session, original_raw_folders) + original_folders = db.session.query(Folder).filter( + Folder.account_id == default_account.id).all() + + assert len(original_folders) == 2 + for folder in original_folders: + assert folder.category is not None + + original_categories = {f.canonical_name: f.category.display_name for f in + original_folders} + + for folder in folder_names_and_roles: + display_name, role = folder + assert original_categories[role] == display_name + + monitor.save_folder_names(db.session, renamed_raw_folders) + saved_folder_data = set( + db.session.query(Folder.name, Folder.canonical_name).filter( + Folder.account_id == default_account.id).all()) + assert saved_folder_data == folders_renamed + + renamed_folders = db.session.query(Folder).filter( + Folder.account_id == default_account.id).all() + + for folder in renamed_folders: + assert folder.category is not None + + renamed_categories = {f.canonical_name: f.category.display_name for f in + renamed_folders} + + for folder in folders_renamed: + display_name, role = folder + assert renamed_categories[role] == display_name + + +def test_save_gmail_folder_names(db, default_account): + monitor = GmailSyncMonitor(default_account) + folder_names_and_roles = { + ('[Gmail]/All Mail', 'all'), + ('[Gmail]/Trash', 'trash'), + ('[Gmail]/Spam', 'spam'), + ('Miscellania', ''), + ('Recipes', ''), + } + raw_folders = [RawFolder(*args) for args in folder_names_and_roles] + monitor.save_folder_names(db.session, raw_folders) + + saved_folder_data = set( + db.session.query(Folder.name, Folder.canonical_name).filter( + Folder.account_id == default_account.id) + ) + assert saved_folder_data == { + ('[Gmail]/All Mail', 'all'), + ('[Gmail]/Trash', 'trash'), + ('[Gmail]/Spam', 'spam') + } + + # Casing on "Inbox" is different to make what we get from folder listing + # consistent with what we get in X-GM-LABELS during sync. + expected_saved_names_and_roles = { + ('[Gmail]/All Mail', 'all'), + ('[Gmail]/Trash', 'trash'), + ('[Gmail]/Spam', 'spam'), + ('Miscellania', ''), + ('Recipes', ''), + } + saved_label_data = set( + db.session.query(Label.name, Label.canonical_name).filter( + Label.account_id == default_account.id) + ) + assert saved_label_data == expected_saved_names_and_roles + + saved_category_data = set( + db.session.query(Category.display_name, Category.name).filter( + Category.namespace_id == default_account.namespace.id) + ) + assert saved_category_data == expected_saved_names_and_roles + + +def test_handle_trailing_whitespace(db, default_account): + raw_folders = [ + RawFolder('Miscellania', ''), + RawFolder('Miscellania ', ''), + RawFolder('Inbox', 'inbox') + ] + monitor = ImapSyncMonitor(default_account) + monitor.save_folder_names(db.session, raw_folders) + saved_folder_data = set( + db.session.query(Folder.name, Folder.canonical_name).filter( + Folder.account_id == default_account.id) + ) + assert saved_folder_data == {('Miscellania', ''), ('Inbox', 'inbox')} + + +def test_imap_remote_delete(db, default_account): + monitor = ImapSyncMonitor(default_account) + folders = { + ('All', 'inbox'), + ('Trash', 'trash'), + ('Applications', ''), + } + + new_folders = { + ('All', 'inbox'), + ('Trash', 'trash') + } + original_raw_folders = [RawFolder(*args) for args in + folders] + new_raw_folders = [RawFolder(*args) for args in new_folders] + monitor.save_folder_names(db.session, original_raw_folders) + original_folders = db.session.query(Folder).filter( + Folder.account_id == default_account.id).all() + + assert len(original_folders) == 3 + for label in original_folders: + assert label.category is not None + + original_categories = {f.canonical_name: f.category.display_name for f in + original_folders} + + for folder in folders: + display_name, role = folder + assert original_categories[role] == display_name + + monitor.save_folder_names(db.session, new_raw_folders) + saved_folder_data = set( + db.session.query(Folder.name, Folder.canonical_name).filter( + Folder.account_id == default_account.id).all()) + assert saved_folder_data == new_folders + + renamed_folders = db.session.query(Folder).filter( + Folder.account_id == default_account.id).all() + + for folder in renamed_folders: + assert folder.category is not None + + renamed_categories = {f.canonical_name: f.category.display_name for f in + renamed_folders} + + for folder in new_folders: + display_name, role = folder + assert renamed_categories[role] == display_name + + +def test_not_deleting_canonical_folders(empty_db, default_account): + # Create a label w/ no messages attached. + label = Label.find_or_create(empty_db.session, default_account, + '[Gmail]/Tous les messages') + label.canonical_name = 'all' + empty_db.session.commit() + + monitor = GmailSyncMonitor(default_account) + + folder_names_and_roles = { + ('[Gmail]/Corbeille', 'trash'), + ('[Gmail]/Spam', 'spam'), + ('Recettes', None), + } + + raw_folders = [RawFolder(*args) for args in folder_names_and_roles] + monitor.save_folder_names(empty_db.session, raw_folders) + + label = empty_db.session.query(Label).get(label.id) + assert label.deleted_at is None + assert label.category.deleted_at == EPOCH diff --git a/inbox/test/imap/test_smtp.py b/inbox/test/imap/test_smtp.py new file mode 100644 index 000000000..f504b7500 --- /dev/null +++ b/inbox/test/imap/test_smtp.py @@ -0,0 +1,80 @@ +import smtplib +import pytest +import mock + +from inbox.sendmail.base import SendMailException +from inbox.sendmail.smtp.postel import SMTPConnection +from nylas.logging import get_logger + + +@pytest.mark.networkrequired +def test_use_smtp_over_ssl(): + # Auth won't actually work but we just want to test connection + # initialization here and below. + SMTPConnection.smtp_password = mock.Mock() + conn = SMTPConnection(account_id=1, + email_address='inboxapptest@gmail.com', + smtp_username='inboxapptest@gmail.com', + auth_type='password', + auth_token='secret_password', + smtp_endpoint=('smtp.gmail.com', 465), + ssl_required=True, + log=get_logger()) + assert isinstance(conn.connection, smtplib.SMTP_SSL) + + +@pytest.mark.networkrequired +def test_use_starttls(): + conn = SMTPConnection(account_id=1, + email_address='inboxapptest@gmail.com', + smtp_username='inboxapptest@gmail.com', + auth_type='password', + auth_token='secret_password', + smtp_endpoint=('smtp.gmail.com', 587), + ssl_required=True, + log=get_logger()) + assert isinstance(conn.connection, smtplib.SMTP) + + +@pytest.mark.networkrequired +def test_use_plain(): + ssl = True + with pytest.raises(SendMailException): + conn = SMTPConnection(account_id=1, + email_address='test@tivertical.com', + smtp_username='test@tivertical.com', + auth_type='password', + auth_token='testpwd', + smtp_endpoint=('tivertical.com', 587), + ssl_required=ssl, + log=get_logger()) + + ssl = False + conn = SMTPConnection(account_id=1, + email_address='test@tivertical.com', + smtp_username='test@tivertical.com', + auth_type='password', + auth_token='testpwd', + smtp_endpoint=('tivertical.com', 587), + ssl_required=ssl, + log=get_logger()) + assert isinstance(conn.connection, smtplib.SMTP) + + +@pytest.mark.parametrize('smtp_port', [465, 587]) +@pytest.mark.networkrequired +def test_handle_disconnect(monkeypatch, smtp_port): + def simulate_disconnect(self): + raise smtplib.SMTPServerDisconnected() + monkeypatch.setattr('smtplib.SMTP.rset', simulate_disconnect) + monkeypatch.setattr('smtplib.SMTP.mail', lambda *args: (550, 'NOPE')) + conn = SMTPConnection(account_id=1, + email_address='inboxapptest@gmail.com', + smtp_username='inboxapptest@gmail.com', + auth_type='password', + auth_token='secret_password', + smtp_endpoint=('smtp.gmail.com', smtp_port), + ssl_required=True, + log=get_logger()) + with pytest.raises(smtplib.SMTPSenderRefused): + conn.sendmail(['test@example.com'], 'hello there') diff --git a/inbox/test/imap/test_update_metadata.py b/inbox/test/imap/test_update_metadata.py new file mode 100644 index 000000000..42b0b7bb2 --- /dev/null +++ b/inbox/test/imap/test_update_metadata.py @@ -0,0 +1,78 @@ +import pytest +import json +from inbox.crispin import GmailFlags, Flags +from inbox.models.backends.imap import ImapUid +from inbox.mailsync.backends.imap.common import (update_metadata, + update_message_metadata) +from inbox.test.util.base import (add_fake_message, add_fake_imapuid, + add_fake_folder, add_fake_thread) + + +def test_gmail_label_sync(db, default_account, message, folder, + imapuid, default_namespace): + # Note that IMAPClient parses numeric labels into integer types. We have to + # correctly handle those too. + new_flags = { + imapuid.msg_uid: GmailFlags((), + (u'\\Important', u'\\Starred', u'foo', 42), + None) + } + update_metadata(default_namespace.account.id, + folder.id, folder.canonical_name, new_flags, db.session) + category_canonical_names = {c.name for c in message.categories} + category_display_names = {c.display_name for c in message.categories} + assert 'important' in category_canonical_names + assert {'foo', '42'}.issubset(category_display_names) + + +def test_gmail_drafts_flag_constrained_by_folder(db, default_account, message, + imapuid, folder): + new_flags = {imapuid.msg_uid: GmailFlags((), (u'\\Draft',), None)} + update_metadata(default_account.id, folder.id, 'all', new_flags, + db.session) + assert message.is_draft + update_metadata(default_account.id, folder.id, 'trash', new_flags, + db.session) + assert not message.is_draft + + +@pytest.mark.parametrize('folder_role', ['drafts', 'trash', 'archive']) +def test_generic_drafts_flag_constrained_by_folder(db, generic_account, + folder_role): + msg_uid = 22 + thread = add_fake_thread(db.session, generic_account.namespace.id) + message = add_fake_message(db.session, generic_account.namespace.id, + thread) + folder = add_fake_folder(db.session, generic_account) + add_fake_imapuid(db.session, generic_account.id, message, folder, msg_uid) + + new_flags = {msg_uid: Flags(('\\Draft',), None)} + update_metadata(generic_account.id, folder.id, folder_role, new_flags, + db.session) + assert message.is_draft == (folder_role == 'drafts') + + +def test_update_categories_when_actionlog_entry_missing( + db, default_account, message, imapuid): + message.categories_changes = True + db.session.commit() + update_message_metadata(db.session, imapuid.account, message, False) + assert message.categories == {imapuid.folder.category} + + +def test_truncate_imapuid_extra_flags(db, default_account, message, folder): + + imapuid = ImapUid(message=message, account_id=default_account.id, + msg_uid=2222, folder=folder) + imapuid.update_flags(['We', 'the', 'People', 'of', 'the', 'United', + 'States', 'in', 'Order', 'to', 'form', 'a', 'more', + 'perfect', 'Union', 'establish', 'Justice', + 'insure', 'domestic', 'Tranquility', 'provide', + 'for', 'the', 'common', 'defence', 'promote', 'the', + 'general', 'Welfare', 'and', 'secure', 'the', + 'Blessings', 'of', 'Liberty', 'to', 'ourselves', + 'and', 'our', 'Posterity', 'do', 'ordain', 'and', + 'establish', 'this', 'Constitution', 'for', 'the', + 'United', 'States', 'of', 'America']) + + assert len(json.dumps(imapuid.extra_flags)) < 255 diff --git a/inbox/test/providers/__init__.py b/inbox/test/providers/__init__.py new file mode 100644 index 000000000..737940769 --- /dev/null +++ b/inbox/test/providers/__init__.py @@ -0,0 +1,5 @@ +# Allow out-of-tree backend submodules. +from pkgutil import extend_path +__path__ = extend_path(__path__, __name__) +from inbox.util.misc import register_backends +module_registry = register_backends(__name__, __path__) diff --git a/inbox/test/pytest.ini b/inbox/test/pytest.ini new file mode 100644 index 000000000..e4299b389 --- /dev/null +++ b/inbox/test/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +norecursedirs = imap/network eas/network data system diff --git a/inbox/test/scheduling/conftest.py b/inbox/test/scheduling/conftest.py new file mode 100644 index 000000000..5130fd390 --- /dev/null +++ b/inbox/test/scheduling/conftest.py @@ -0,0 +1,2 @@ +# flake8: noqa: F401 +from inbox.test.util.base import dbloader, db, default_account diff --git a/inbox/test/scheduling/test_sync_start_logic.py b/inbox/test/scheduling/test_sync_start_logic.py new file mode 100644 index 000000000..d55b1fcb7 --- /dev/null +++ b/inbox/test/scheduling/test_sync_start_logic.py @@ -0,0 +1,212 @@ +import json +import mock +import pytest +import platform +from inbox.ignition import engine_manager +from inbox.mailsync.frontend import SyncHTTPFrontend +from inbox.mailsync.service import SyncService +from inbox.models import Account +from inbox.models.session import session_scope_by_shard_id +from inbox.test.util.base import add_generic_imap_account + + +host = platform.node() + + +def patched_sync_service(db, host=host, process_number=0): + s = SyncService(process_identifier='{}:{}'.format(host, process_number), + process_number=process_number) + + def start_sync(aid): + acc = db.session.query(Account).get(aid) + acc.sync_host = s.process_identifier + acc.sync_started() + s.syncing_accounts.add(aid) + db.session.commit() + return True + + s.start_sync = mock.Mock(side_effect=start_sync) + return s + + +def purge_other_accounts(default_account=None): + for key in engine_manager.engines: + with session_scope_by_shard_id(key) as db_session: + q = db_session.query(Account) + if default_account is not None: + q = q.filter(Account.id != default_account.id) + q.delete(synchronize_session='fetch') + db_session.commit() + + +def test_accounts_started_when_process_previously_assigned( + db, default_account, config): + config['SYNC_STEAL_ACCOUNTS'] = False + default_account.desired_sync_host = '{}:{}'.format(host, 0) + db.session.commit() + s = patched_sync_service(db, host=host, process_number=0) + assert s.account_ids_to_sync() == {default_account.id} + + +def test_start_new_accounts_when_stealing_enabled(monkeypatch, db, + default_account, config): + config['SYNC_STEAL_ACCOUNTS'] = True + + purge_other_accounts(default_account) + s = patched_sync_service(db) + default_account.sync_host = None + db.session.commit() + + s.poll_shared_queue({'queue_name': 'foo', 'id': default_account.id}) + assert s.start_sync.call_count == 1 + assert s.start_sync.call_args == mock.call(default_account.id) + + +def test_dont_start_accounts_if_over_ppa_limit(monkeypatch, db, + default_account, config): + config['SYNC_STEAL_ACCOUNTS'] = True + + purge_other_accounts(default_account) + default_account.sync_host = None + db.session.commit() + s = patched_sync_service(db) + s._pending_avgs_provider = mock.Mock() + s._pending_avgs_provider.get_pending_avgs = lambda *args: {15: 11} + + s.poll_shared_queue({'queue_name': 'foo', 'id': default_account.id}) + assert s.start_sync.call_count == 0 + + +def test_dont_start_new_accounts_when_stealing_disabled(db, config, + default_account): + config['SYNC_STEAL_ACCOUNTS'] = False + s = patched_sync_service(db) + default_account.sync_host = None + db.session.commit() + s.poll_shared_queue({'queue_name': 'foo', 'id': default_account.id}) + assert s.start_sync.call_count == 0 + + +def test_concurrent_syncs(monkeypatch, db, default_account, config): + config['SYNC_STEAL_ACCOUNTS'] = True + + purge_other_accounts(default_account) + s1 = patched_sync_service(db, process_number=0) + s2 = patched_sync_service(db, process_number=2) + default_account.desired_sync_host = s1.process_identifier + db.session.commit() + s1.poll({'queue_name': 'foo'}) + s2.poll({'queue_name': 'foo'}) + # Check that only one SyncService instance claims the account. + assert s1.start_sync.call_count == 1 + assert s1.start_sync.call_args == mock.call(default_account.id) + assert s2.start_sync.call_count == 0 + + +def test_twice_queued_accounts_started_once(monkeypatch, db, default_account): + purge_other_accounts(default_account) + s = patched_sync_service(db) + default_account.desired_sync_host = s.process_identifier + db.session.commit() + s.poll({'queue_name': 'foo'}) + s.poll({'queue_name': 'foo'}) + assert default_account.sync_host == s.process_identifier + assert s.start_sync.call_count == 1 + + +def test_external_sync_disabling(monkeypatch, db): + purge_other_accounts() + account = add_generic_imap_account(db.session, + email_address='test@example.com') + other_account = add_generic_imap_account( + db.session, email_address='test2@example.com') + account.sync_host = None + account.desired_sync_host = None + other_account.sync_host = None + other_account.desired_sync_host = None + db.session.commit() + s = patched_sync_service(db) + + s.poll_shared_queue({'queue_name': 'foo', 'id': account.id}) + s.poll_shared_queue({'queue_name': 'foo', 'id': other_account.id}) + assert len(s.syncing_accounts) == 2 + + account.mark_deleted() + db.session.commit() + assert account.sync_should_run is False + assert account._sync_status['sync_disabled_reason'] == 'account deleted' + + account.mark_invalid() + db.session.commit() + assert account.sync_should_run is False + assert account.sync_state == 'invalid' + assert account._sync_status['sync_disabled_reason'] == \ + 'invalid credentials' + + s.poll({'queue_name': 'foo'}) + assert s.syncing_accounts == {other_account.id} + + +def test_http_frontend(db, default_account, monkeypatch): + s = patched_sync_service(db) + s.poll({'queue_name': 'foo'}) + + monkeypatch.setattr('pympler.muppy.get_objects', lambda *args: []) + monkeypatch.setattr('pympler.summary.summarize', lambda *args: []) + + frontend = SyncHTTPFrontend(s, 16384, trace_greenlets=True, profile=True) + app = frontend._create_app() + app.config['TESTING'] = True + with app.test_client() as c: + resp = c.get('/profile') + assert resp.status_code == 200 + resp = c.get('/load') + assert resp.status_code == 200 + resp = c.get('/mem') + assert resp.status_code == 200 + monkeypatch.undo() + + +def test_http_unassignment(db, default_account): + purge_other_accounts(default_account) + s = patched_sync_service(db) + default_account.desired_sync_host = None + default_account.sync_host = None + db.session.commit() + s.poll_shared_queue({'queue_name': 'foo', 'id': default_account.id}) + + frontend = SyncHTTPFrontend(s, 16384, False, False) + app = frontend._create_app() + app.config['TESTING'] = True + with app.test_client() as c: + resp = c.post( + '/unassign', data=json.dumps({'account_id': default_account.id}), + content_type='application/json') + assert resp.status_code == 200 + db.session.expire_all() + assert default_account.sync_host is None + + # Check that 409 is returned if account is not actually assigned to + # process. + with app.test_client() as c: + resp = c.post( + '/unassign', data=json.dumps({'account_id': default_account.id}), + content_type='application/json') + assert resp.status_code == 409 + + +@pytest.mark.parametrize("sync_state", ["running", "stopped", "invalid", None]) +def test_start_accounts_w_sync_should_run_set(monkeypatch, db, default_account, + config, + sync_state): + purge_other_accounts(default_account) + config['SYNC_STEAL_ACCOUNTS'] = True + default_account.sync_should_run = True + default_account.sync_state = sync_state + default_account.sync_host = None + default_account.desired_sync_host = None + db.session.commit() + + s = patched_sync_service(db) + s.poll_shared_queue({'queue_name': 'foo', 'id': default_account.id}) + assert s.start_sync.call_count == 1 diff --git a/inbox/test/scheduling/test_syncback_logic.py b/inbox/test/scheduling/test_syncback_logic.py new file mode 100644 index 000000000..9d63d32c0 --- /dev/null +++ b/inbox/test/scheduling/test_syncback_logic.py @@ -0,0 +1,179 @@ +import random + +import pytest +import gevent + +from inbox.ignition import engine_manager +from inbox.models.session import session_scope, session_scope_by_shard_id +from inbox.models.action_log import ActionLog, schedule_action +from inbox.transactions.actions import SyncbackService + +from inbox.test.util.base import add_generic_imap_account + + +@pytest.fixture +def purge_accounts_and_actions(): + for key in engine_manager.engines: + with session_scope_by_shard_id(key) as db_session: + db_session.query(ActionLog).delete(synchronize_session=False) + db_session.commit() + + +@pytest.yield_fixture +def patched_enginemanager(monkeypatch): + engines = {k: None for k in range(0, 6)} + monkeypatch.setattr('inbox.ignition.engine_manager.engines', engines) + yield + monkeypatch.undo() + + +@pytest.yield_fixture +def patched_task(monkeypatch): + def uses_crispin_client(self): + return False + + def execute_with_lock(self): + with session_scope(self.account_id) as db_session: + action_log_entries = db_session.query(ActionLog).filter( + ActionLog.id.in_(self.action_log_ids)) + for action_log_entry in action_log_entries: + action_log_entry.status = 'successful' + db_session.commit() + monkeypatch.setattr('inbox.transactions.actions.SyncbackTask.uses_crispin_client', uses_crispin_client) + monkeypatch.setattr('inbox.transactions.actions.SyncbackTask.execute_with_lock', execute_with_lock) + yield + monkeypatch.undo() + + +def schedule_test_action(db_session, account): + from inbox.models.category import Category + + category_type = 'label' if account.provider == 'gmail' else 'folder' + category = Category.find_or_create( + db_session, account.namespace.id, name=None, + display_name='{}-{}'.format(account.id, random.randint(1, 356)), + type_=category_type) + db_session.flush() + + if category_type == 'folder': + schedule_action('create_folder', category, account.namespace.id, + db_session) + else: + schedule_action('create_label', category, account.namespace.id, + db_session) + db_session.commit() + + +def test_all_keys_are_assigned_exactly_once(patched_enginemanager): + assigned_keys = [] + + service = SyncbackService( + syncback_id=0, process_number=0, total_processes=2, num_workers=2) + assert service.keys == [0, 2, 4] + assigned_keys.extend(service.keys) + + service = SyncbackService( + syncback_id=0, process_number=1, total_processes=2, num_workers=2) + assert service.keys == [1, 3, 5] + assigned_keys.extend(service.keys) + + # All keys are assigned (therefore all accounts are assigned) + assert set(engine_manager.engines.keys()) == set(assigned_keys) + # No key is assigned more than once (and therefore, no account) + assert len(assigned_keys) == len(set(assigned_keys)) + + +def test_actions_are_claimed(purge_accounts_and_actions, patched_task): + with session_scope_by_shard_id(0) as db_session: + account = add_generic_imap_account( + db_session, email_address='{}@test.com'.format(0)) + schedule_test_action(db_session, account) + + with session_scope_by_shard_id(1) as db_session: + account = add_generic_imap_account( + db_session, email_address='{}@test.com'.format(1)) + schedule_test_action(db_session, account) + + service = SyncbackService( + syncback_id=0, process_number=1, total_processes=2, num_workers=2) + service._restart_workers() + service._process_log() + + while not service.task_queue.empty(): + gevent.sleep(0) + + with session_scope_by_shard_id(0) as db_session: + q = db_session.query(ActionLog) + assert q.count() == 1 + assert all(a.status == 'pending' for a in q) + + with session_scope_by_shard_id(1) as db_session: + q = db_session.query(ActionLog) + assert q.count() == 1 + assert all(a.status != 'pending' for a in q) + + +def test_actions_claimed_by_a_single_service(purge_accounts_and_actions, + patched_task): + actionlogs = [] + for key in (0, 1): + with session_scope_by_shard_id(key) as db_session: + account = add_generic_imap_account( + db_session, + email_address='{}@test.com'.format(key)) + schedule_test_action(db_session, account) + actionlogs += [db_session.query(ActionLog).one().id] + + services = [] + for process_number in (0, 1): + service = SyncbackService( + syncback_id=0, process_number=process_number, total_processes=2, + num_workers=2) + service._process_log() + services.append(service) + + for i, service in enumerate(services): + assert service.task_queue.qsize() == 1 + assert service.task_queue.peek().action_log_ids == [actionlogs[i]] + + +@pytest.mark.skipif(True, reason='Test if causing Jenkins build to fail') +def test_actions_for_invalid_accounts_are_skipped(purge_accounts_and_actions, + patched_task): + with session_scope_by_shard_id(0) as db_session: + account = add_generic_imap_account( + db_session, email_address='person@test.com') + schedule_test_action(db_session, account) + namespace_id = account.namespace.id + count = db_session.query(ActionLog).filter( + ActionLog.namespace_id == namespace_id).count() + assert account.sync_state != 'invalid' + + another_account = add_generic_imap_account( + db_session, email_address='another@test.com') + schedule_test_action(db_session, another_account) + another_namespace_id = another_account.namespace.id + another_count = db_session.query(ActionLog).filter( + ActionLog.namespace_id == another_namespace_id).count() + assert another_account.sync_state != 'invalid' + + account.mark_invalid() + db_session.commit() + + service = SyncbackService( + syncback_id=0, process_number=0, total_processes=2, num_workers=2) + service._process_log() + + while not service.task_queue.empty(): + gevent.sleep(0) + + with session_scope_by_shard_id(0) as db_session: + q = db_session.query(ActionLog).filter( + ActionLog.namespace_id == namespace_id, + ActionLog.status == 'pending') + assert q.count() == count + + q = db_session.query(ActionLog).filter( + ActionLog.namespace_id == another_namespace_id) + assert q.filter(ActionLog.status == 'pending').count() == 0 + assert q.filter(ActionLog.status == 'successful').count() == another_count diff --git a/inbox/test/search/__init__.py b/inbox/test/search/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/inbox/test/search/conftest.py b/inbox/test/search/conftest.py new file mode 100644 index 000000000..ccaeba2b0 --- /dev/null +++ b/inbox/test/search/conftest.py @@ -0,0 +1,6 @@ +from inbox.test.util.base import (config, db, absolute_path, + default_namespace) +from inbox.test.api.base import api_client + +__all__ = ['config', 'db', 'absolute_path', 'default_namespace', + 'api_client'] diff --git a/inbox/test/security/test_blobstorage.py b/inbox/test/security/test_blobstorage.py new file mode 100644 index 000000000..95a47abed --- /dev/null +++ b/inbox/test/security/test_blobstorage.py @@ -0,0 +1,34 @@ +import zlib +import hypothesis +from inbox.security.blobstorage import encode_blob, decode_blob + + +# This will run the test for a bunch of randomly-chosen values of sample_input. +@hypothesis.given(str, bool) +def test_blobstorage(config, sample_input, encrypt): + config['ENCRYPT_SECRETS'] = encrypt + assert decode_blob(encode_blob(sample_input)) == sample_input + + +@hypothesis.given(str, bool) +def test_encoded_format(config, sample_input, encrypt): + config['ENCRYPT_SECRETS'] = encrypt + encoded = encode_blob(sample_input) + assert encoded.startswith(chr(encrypt) + '\x00\x00\x00\x00') + data = encoded[5:] + if encrypt: + assert data != sample_input + assert data != zlib.compress(sample_input) + else: + assert data == zlib.compress(sample_input) + + +@hypothesis.given(unicode, bool) +def test_message_body_storage(config, message, sample_input, encrypt): + config['ENCRYPT_SECRETS'] = encrypt + message.body = None + assert message._compacted_body is None + message.body = sample_input + assert message._compacted_body.startswith( + chr(encrypt) + '\x00\x00\x00\x00') + assert message.body == sample_input diff --git a/inbox/test/security/test_secret.py b/inbox/test/security/test_secret.py new file mode 100644 index 000000000..a47cf647d --- /dev/null +++ b/inbox/test/security/test_secret.py @@ -0,0 +1,142 @@ +# -*- coding: UTF-8 -*- +import pytest + +from inbox.auth.gmail import GmailAuthHandler +from inbox.models.secret import Secret + +SHARD_ID = 0 +ACCOUNT_ID = 1 + + +@pytest.mark.parametrize('encrypt', [True, False]) +def test_secret(db, config, encrypt): + """ + If encryption is enabled, ensure that: + * secrets are encrypted. + * secrets are decrypted correctly on retrieval. + * secrets are bytes. + """ + config['ENCRYPT_SECRETS'] = encrypt + bytes_secret = b'\xff\x00\xf1' + unicode_secret = u'foo\u00a0' + + secret = Secret() + secret.type = 'password' + secret.secret = bytes_secret + + db.session.add(secret) + db.session.commit() + + secret = db.session.query(Secret).get(secret.id) + + if encrypt: + assert secret._secret != bytes_secret, 'secret is not encrypted' + else: + assert secret._secret == bytes_secret + assert secret.secret == bytes_secret, 'secret not decrypted correctly' + + with pytest.raises(TypeError) as e: + secret.secret = unicode_secret + + assert e.typename == 'TypeError', 'secret cannot be unicode' + + +@pytest.mark.parametrize('encrypt', [True, False]) +def test_token(db, config, encrypt): + """ + If encryption is enabled, ensure that: + * tokens are encrypted. + * tokens are decrypted correctly on retrieval. + + Note: This tests refresh_tokens but passwords work in the same way + + """ + config['ENCRYPT_SECRETS'] = encrypt + token = 'tH*$&123abcº™™∞' + + email = 'vault.test@localhost.com' + resp = {'access_token': '', + 'expires_in': 3600, + 'refresh_token': token, + 'scope': '', + 'email': email, + 'family_name': '', + 'given_name': '', + 'name': '', + 'gender': '', + 'id': 0, + 'user_id': '', + 'id_token': '', + 'link': 'http://example.com', + 'locale': '', + 'picture': '', + 'hd': ''} + g = GmailAuthHandler('gmail') + g.verify_config = lambda x: True + account = g.get_account(SHARD_ID, email, resp) + + db.session.add(account) + db.session.commit() + + secret_id = account.refresh_token_id + secret = db.session.query(Secret).get(secret_id) + + assert secret == account.secret + + if encrypt: + assert secret._secret != token, 'token not encrypted' + else: + assert secret._secret == token, \ + 'token encrypted when encryption disabled' + + decrypted_secret = secret.secret + assert decrypted_secret == token and \ + account.refresh_token == decrypted_secret, \ + 'token not decrypted correctly' + + # Remove auth credentials row, else weird things + # happen when we try to read both encrypted and + # unencrypted data from the database. + for ac in account.auth_credentials: + db.session.delete(ac) + # db.session.delete(account.auth_credentials[0]) + db.session.commit() + + +@pytest.mark.parametrize('encrypt', [True, False]) +def test_token_inputs(db, config, encrypt, default_account): + """ + Ensure unicode tokens are converted to bytes. + Ensure invalid UTF-8 tokens are handled correctly. + + """ + config['ENCRYPT_SECRETS'] = encrypt + # Unicode + unicode_token = u'myunicodesecret' + + # Invalid UTF-8 byte sequence + invalid_token = b'\xff\x10' + + # NULL byte + null_token = b'\x1f\x00\xf1' + + default_account.refresh_token = unicode_token + db.session.commit() + + secret_id = default_account.refresh_token_id + secret = db.session.query(Secret).get(secret_id) + + assert not isinstance(secret.secret, unicode), 'secret cannot be unicode' + assert secret.secret == unicode_token, 'token not decrypted correctly' + + with pytest.raises(ValueError) as e: + default_account.refresh_token = invalid_token + + assert e.typename == 'ValueError', 'token cannot be invalid UTF-8' + + with pytest.raises(ValueError) as f: + default_account.refresh_token = null_token + + assert f.typename == 'ValueError', 'token cannot contain NULL byte' + + assert default_account.refresh_token == unicode_token diff --git a/inbox/test/security/test_smtp_ssl.py b/inbox/test/security/test_smtp_ssl.py new file mode 100644 index 000000000..713c1b4cd --- /dev/null +++ b/inbox/test/security/test_smtp_ssl.py @@ -0,0 +1,104 @@ +# flake8: noqa: F811 +import os +import sys +import ssl +import smtpd +import socket +import asyncore +import datetime + +import pytest +import gevent + +from inbox.test.util.base import default_account +from inbox.test.api.base import api_client, new_api_client + +smtpd.DEBUGSTREAM = sys.stderr + +__all__ = ['api_client', 'default_account'] + +current_dir = os.path.dirname(__file__) +SELF_SIGNED_CERTFILE = os.path.realpath(os.path.join(current_dir, '..', 'data/self_signed_cert.pem')) +SELF_SIGNED_KEYFILE = os.path.realpath(os.path.join(current_dir, '..', 'data/self_signed_cert.key')) + +from inbox.sendmail.smtp import postel + +SHARD_ID = 0 +SMTP_SERVER_HOST = 'localhost' + + +class BadCertSMTPServer(smtpd.DebuggingServer): + + def __init__(self, localaddr, remoteaddr): + smtpd.DebuggingServer.__init__(self, localaddr, remoteaddr) + self.set_socket(ssl.wrap_socket(self.socket, + certfile=SELF_SIGNED_CERTFILE, + keyfile=SELF_SIGNED_KEYFILE, + server_side=True)) + + +def run_bad_cert_smtp_server(): + serv = BadCertSMTPServer((SMTP_SERVER_HOST, 0), (None, None)) + + # override global so SMTP server knows we want an SSL connection + postel.SMTP_OVER_SSL_TEST_PORT = serv.socket.getsockname()[1] + + asyncore.loop() + + +@pytest.yield_fixture(scope='module') +def bad_cert_smtp_server(): + s = gevent.spawn(run_bad_cert_smtp_server) + yield s + s.kill() + + +@pytest.fixture +def patched_smtp(monkeypatch): + monkeypatch.setattr('inbox.sendmail.smtp.postel.SMTPConnection.smtp_password', + lambda x: None) + + +@pytest.fixture(scope='function') +def local_smtp_account(db): + from inbox.auth.generic import GenericAuthHandler + + handler = GenericAuthHandler(provider_name='custom') + acc = handler.get_account(SHARD_ID, + 'user@gmail.com', + {'email': 'user@gmail.com', + 'password': 'hunter2', + 'imap_server_host': 'imap-test.nylas.com', + 'imap_server_port': 143, + 'smtp_server_host': SMTP_SERVER_HOST, + 'smtp_server_port': postel.SMTP_OVER_SSL_TEST_PORT}) + db.session.add(acc) + db.session.commit() + return acc + + +@pytest.fixture +def example_draft(db, default_account): + return { + 'subject': 'Draft test at {}'.format(datetime.datetime.utcnow()), + 'body': '

Sea, birds and sand.

', + 'to': [{'name': 'The red-haired mermaid', + 'email': default_account.email_address}] + } + + +def test_smtp_ssl_verification_bad_cert(db, bad_cert_smtp_server, + example_draft, local_smtp_account, + api_client, patched_smtp): + + api_client = new_api_client(db, local_smtp_account.namespace) + while len(asyncore.socket_map) < 1: + gevent.sleep(0) # let SMTP daemon start up + r = api_client.post_data('/send', example_draft) + assert r.status_code == 200 + + +if __name__ == '__main__': + server = BadCertSMTPServer((SMTP_SERVER_HOST, SMTP_SERVER_PORT), + (None, None)) + asyncore.loop() diff --git a/inbox/test/system/.gitignore b/inbox/test/system/.gitignore new file mode 100644 index 000000000..90541f258 --- /dev/null +++ b/inbox/test/system/.gitignore @@ -0,0 +1 @@ +accounts.py diff --git a/inbox/test/system/__init__.py b/inbox/test/system/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/inbox/test/system/client.py b/inbox/test/system/client.py new file mode 100644 index 000000000..dc8632767 --- /dev/null +++ b/inbox/test/system/client.py @@ -0,0 +1,17 @@ +import os +from inbox import APIClient + + +class NylasTestClient(APIClient): + + def __init__(self, email_address=None, api_base=os.getenv("INBOX_API_PORT_5555_TCP_ADDR", "http://localhost:5555")): + self.email_address = email_address + APIClient.__init__(self, None, None, None, api_base) + + @property + def namespaces(self): + all_ns = super(NylasTestClient, self).namespaces + if self.email_address: + return all_ns.where(email_address=self.email_address) + else: + return all_ns diff --git a/inbox/test/system/conftest.py b/inbox/test/system/conftest.py new file mode 100644 index 000000000..0426549d3 --- /dev/null +++ b/inbox/test/system/conftest.py @@ -0,0 +1,92 @@ +# This file contains pytest fixtures as well as some config +import os +import platform + +API_BASE = "http://%s:%s" % (os.getenv("API_PORT_5555_TCP_ADDR", "localhost"), os.getenv("API_PORT_5555_TCP_PORT", "5555")) +TEST_MAX_DURATION_SECS = 360 +TEST_GRANULARITY_CHECK_SECS = 0.1 + +from time import time, sleep +from client import NylasTestClient +from inbox.util.url import provider_from_address +from google_auth_helper import google_auth +from outlook_auth_helper import outlook_auth +from inbox.auth.base import handler_from_provider + + +# we don't want to commit passwords to the repo. +# load them from an external json file. +try: + from accounts import credentials as raw_credentials + credentials = [(c['user'], c['password']) for c in raw_credentials] + all_accounts = [NylasTestClient(email, API_BASE) for email, _ in credentials] + gmail_accounts = [NylasTestClient(email, API_BASE) + for email, password in credentials + if "gmail.com" in email or + "inboxapp.com" in email] + + calendar_providers = ["gmail.com", "onmicrosoft.com"] + calendar_accounts = [NylasTestClient(email, API_BASE) + for email, password in credentials + if any(domain in email for domain in calendar_providers)] + +except ImportError: + print ("Error: test accounts file not found. " + "You need to create accounts.py\n" + "File format: credentials = [{'user': 'bill@example.com', " + "'password': 'VerySecret'}]") + raise + + +def timeout_loop(name): + def wrap(f): + def wrapped_f(*args, **kwargs): + client = args[0] + print "Waiting for: {}...".format(name) + success = False + start_time = time() + while time() - start_time < TEST_MAX_DURATION_SECS: + if f(*args, **kwargs): + success = True + break + sleep(TEST_GRANULARITY_CHECK_SECS) + + assert success, ("Failed to {} in less than {}s on {}" + .format(name, TEST_MAX_DURATION_SECS, + client.email_address)) + + format_test_result(name, client.provider, + client.email_address, start_time) + return True + return wrapped_f + return wrap + + +def format_test_result(function_name, provider, email, start_time): + print "%s\t%s\t%s\t%f" % (function_name, provider, + email, time() - start_time) + + +def create_account(db_session, email, password): + provider = provider_from_address(email) + auth_handler = handler_from_provider(provider) + # Special-case Gmail and Outlook, because we need to provide an oauth token + # and not merely a password. + response = {'email': email} + if provider == 'gmail': + code = google_auth(email, password) + response = auth_handler._get_authenticated_user(code) + elif provider == 'outlook': + code = outlook_auth(email, password) + response = auth_handler._get_authenticated_user(code) + else: + response = {"email": email, "password": password} + + account = auth_handler.create_account(email, response) + auth_handler.verify_account(account) + account.throttled = False + account.sync_host = platform.node() + account.desired_sync_host = platform.node() + db_session.add(account) + db_session.commit() + return account diff --git a/inbox/test/system/google_auth_helper.py b/inbox/test/system/google_auth_helper.py new file mode 100644 index 000000000..e62f152ff --- /dev/null +++ b/inbox/test/system/google_auth_helper.py @@ -0,0 +1,107 @@ +import requests +from HTMLParser import HTMLParser + +from inbox.auth.gmail import GmailAuthHandler +from inbox.util.url import url_concat + + +class GoogleAuthParser(HTMLParser): + _in_form = False + + def handle_starttag(self, tag, attrs): + if tag == 'form': + self._in_form = True + self.params = {} + + for k, v in attrs: + if k == 'action': + self.action = v + + if self._in_form: + attr_dict = {} + for k, v in attrs: + attr_dict[k] = v + if tag == 'input': + if 'value' in attr_dict: + self.params[attr_dict['name']] = attr_dict['value'] + + def handle_endtag(self, tag): + if tag == 'form': + self._in_form = False + + +class GoogleConnectParser(HTMLParser): + _in_form = False + params = {} + + def handle_starttag(self, tag, attrs): + if tag == 'form': + self._in_form = True + + for k, v in attrs: + if k == 'action': + self.action = v + + if self._in_form: + attr_dict = {} + for k, v in attrs: + attr_dict[k] = v + + if tag == 'input': + if 'value' in attr_dict: + self.params[attr_dict['name']] = attr_dict['value'] + + def handle_endtag(self, tag): + if tag == 'form': + self._in_form = False + + +class GoogleTokenParser(HTMLParser): + + def handle_starttag(self, tag, attrs): + if tag == 'input': + attr_dict = {} + for k, v in attrs: + attr_dict[k] = v + if attr_dict['id'] == 'code': + self.code = attr_dict['value'] + + +def google_auth(email, password): + session = requests.Session() + url_args = {'redirect_uri': GmailAuthHandler.OAUTH_REDIRECT_URI, + 'client_id': GmailAuthHandler.OAUTH_CLIENT_ID, + 'response_type': 'code', + 'scope': GmailAuthHandler.OAUTH_SCOPE, + 'access_type': 'offline', + 'login_hint': email} + url = url_concat(GmailAuthHandler.OAUTH_AUTHENTICATE_URL, url_args) + req = session.get(url) + assert req.ok + auth_parser = GoogleAuthParser() + auth_parser.feed(req.text) + + params = auth_parser.params + action = auth_parser.action + + params['Email'] = email + params['Passwd'] = password + + req = session.post(action, data=params) + assert req.ok + + connect_parser = GoogleConnectParser() + connect_parser.feed(req.text) + + params = connect_parser.params + action = connect_parser.action + + params['submit_access'] = 'true' + + req = session.post(action, data=params) + assert req.ok + + token_parser = GoogleTokenParser() + token_parser.feed(req.text) + + return token_parser.code diff --git a/inbox/test/system/outlook_auth_helper.py b/inbox/test/system/outlook_auth_helper.py new file mode 100644 index 000000000..cd4ce0597 --- /dev/null +++ b/inbox/test/system/outlook_auth_helper.py @@ -0,0 +1,144 @@ +import requests +from HTMLParser import HTMLParser + +from inbox.auth.outlook import OutlookAuthHandler +from inbox.util.url import url_concat +import re + + +class OutlookAuthParser(HTMLParser): + _in_script = False + params = {} + action = None + + def handle_starttag(self, tag, attrs): + if tag == 'script': + self._in_script = True + + for k, v in attrs: + if k == 'action': + self.action = v + + def handle_endtag(self, tag): + if tag == 'script': + self._in_script = False + + def parse_params(self, data): + vals = {} + # Convert the server data into a dict + for i in filter(lambda x: ':' in x, data.split(',')): + m = re.match('(.*?):(.*)', i) + k = m.group(1) + v = m.group(2) + vals[k] = v + + # extract the PPFT + sfttag = vals['sFTTag'] + m = re.match('.*value="(.*)".*', sfttag) + self.action = vals['urlPost'][1:-1] + + # Static parameters that don't change between logins. Yes they look + # obscure, because they are. They were taken from the login process + # and although this may be a bit fragile, this is necessary for + # getting the refresh token without a heavy-weight headless browser + # that supports javascript just for this login flow. -cg3 + self.params = {'type': '11', 'PPSX': 'Passpo', 'NewUser': '1', + 'LoginOptions': '1', 'i3': '53255', 'm1': '2560', + 'm2': '1600', 'm3': '0', 'i12': '1', 'i17': '0', + 'i18': '__Login_Host|1'} + + # Generated value that we need to use to login + self.params['PPFT'] = m.group(1) + + def handle_data(self, data): + if self._in_script: + if data.startswith("var ServerData"): + # Extract the server data + m = re.match('var ServerData = {(.*)};', data).group(1) + self.parse_params(m) + + +class OutlookUpdateParser(HTMLParser): + _in_form = False + params = {} + + def handle_starttag(self, tag, attrs): + if tag == 'form': + self._in_form = True + + for k, v in attrs: + if k == 'action': + self.action = v + + if self._in_form: + attr_dict = {} + for k, v in attrs: + attr_dict[k] = v + + if tag == 'input': + if 'value' in attr_dict: + self.params[attr_dict['name']] = attr_dict['value'] + + def handle_endtag(self, tag): + if tag == 'form': + self._in_form = False + + +class OutlookConsentParser(HTMLParser): + _in_form = False + params = {} + + def handle_starttag(self, tag, attrs): + if tag == 'form': + self._in_form = True + + if self._in_form: + attr_dict = {} + for k, v in attrs: + attr_dict[k] = v + + if tag == 'input': + if 'value' in attr_dict: + self.params[attr_dict['name']] = attr_dict['value'] + + def handle_endtag(self, tag): + if tag == 'form': + self._in_form = False + + +def outlook_auth(email, password): + session = requests.Session() + url_args = {'redirect_uri': OutlookAuthHandler.OAUTH_REDIRECT_URI, + 'client_id': OutlookAuthHandler.OAUTH_CLIENT_ID, + 'response_type': 'code', + 'scope': OutlookAuthHandler.OAUTH_SCOPE, + 'access_type': 'offline', + 'login_hint': email} + url = url_concat(OutlookAuthHandler.OAUTH_AUTHENTICATE_URL, url_args) + req = session.get(url) + assert req.ok + + auth_parser = OutlookAuthParser() + auth_parser.feed(req.text) + + params = auth_parser.params + params['login'] = email + params['passwd'] = password + + req = session.post(auth_parser.action, data=params) + assert req.ok + + update_parser = OutlookUpdateParser() + update_parser.feed(req.text) + + req = session.post(update_parser.action, data=update_parser.params) + assert req.ok + + consent_parser = OutlookConsentParser() + consent_parser.feed(req.text) + + req = session.post(update_parser.action, data=consent_parser.params) + assert req.ok + + code = re.match('https.*code=(.*)&lc=1033', req.url).group(1) + return code diff --git a/inbox/test/system/random_words.py b/inbox/test/system/random_words.py new file mode 100755 index 000000000..4eb833abb --- /dev/null +++ b/inbox/test/system/random_words.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python +import string +import random +import json + +DICT_FILE = '/etc/dictionaries-common/words' + + +def get_words(): + words = [] + try: + with open(DICT_FILE, 'r') as f: + words.extend(f.read().split('\n')) + except IOError: + try: + with open('LICENSE', 'r') as f: + words.extend(f.read().translate(string.maketrans("", ""), + string.punctuation).split()) + except IOError: + print json.dumps({'error': "couldn't open dictionary file", + 'filename': DICT_FILE}) + return words + + +def random_words(count=int(random.uniform(1, 500)), sig='me'): + words = get_words() + random_word_list = [] + + if sig: + word_index = int(random.uniform(1, len(words))) + random_word = words[word_index] + + salutation = ['Hey', 'Hi', 'Ahoy', 'Yo'][int(random.uniform(0, 3))] + random_word_list.append("{} {},\n\n".format(salutation, random_word)) + + just_entered = False + for i in range(count): + word_index = int(random.uniform(1, len(words))) + random_word = words[word_index] + + if i > 0 and not just_entered: + random_word = ' ' + random_word + + just_entered = False + + if int(random.uniform(1, 15)) == 1: + random_word += ('.') + + if int(random.uniform(1, 3)) == 1 and sig: + random_word += ('\n') + just_entered = True + + if int(random.uniform(1, 3)) == 1 and sig: + random_word += ('\n') + just_entered = True + + random_word_list.append(random_word) + + text = ''.join(random_word_list) + '.' + if sig: + if int(random.uniform(1, 2)) == 1: + salutation = ['Cheers', 'Adios', 'Ciao', 'Bye'][int(random.uniform(0, 3))] + punct = ['.', ',', '!', ''][int(random.uniform(0, 3))] + text += "\n\n{}{}\n".format(salutation, punct) + else: + text += '\n\n' + + punct = ['-', '- ', '--', '-- '][int(random.uniform(0, 3))] + text += '{}{}'.format(punct, sig) + + return text + + +if __name__ == '__main__': + print random_words() diff --git a/inbox/test/system/test_auth.py b/inbox/test/system/test_auth.py new file mode 100644 index 000000000..923c2c14d --- /dev/null +++ b/inbox/test/system/test_auth.py @@ -0,0 +1,66 @@ +import pytest + +from inbox.models.session import session_scope +from client import NylasTestClient +from conftest import (timeout_loop, credentials, create_account, API_BASE) + +try: + # If there's no broken accounts file, well, tough luck but don't crash. + # This should only be a problem locally; the jenkins jobs generates those + # credentials. + from accounts import broken_credentials +except ImportError: + print "test_auth.py: Warning -- No broken accounts credentials." + broken_credentials = [] + + +@timeout_loop('sync_start') +def wait_for_sync_start(client): + return True if client.messages.first() else False + + +@timeout_loop('auth') +def wait_for_auth(client): + namespaces = client.namespaces.all() + if len(namespaces): + client.email_address = namespaces[0]['email_address'] + client.provider = namespaces[0]['provider'] + return True + return False + + +@pytest.mark.parametrize("account_credentials", credentials) +def test_account_auth(account_credentials): + email, password = account_credentials + create_account(email, password) + client = NylasTestClient(email, API_BASE) + wait_for_auth(client) + + # wait for sync to start. tests rely on things setup at beginning + # of sync (e.g. folder hierarchy) + wait_for_sync_start(client) + + +errors = __import__('inbox.basicauth', fromlist=['basicauth']) + + +def test_account_create_should_fail(): + """Test that creation fails with appropriate errors, as defined in + the broken_credentials list. + Credentials have the format: + ({email, password}, error_type) + e.g. + ({'user': 'foo@foo.com', 'password': 'pass'}, 'ConfigurationError') + """ + credentials = [((c['user'], c['password']), e) + for (c, e) in broken_credentials] + + for ((email, password), error) in credentials: + error_obj = getattr(errors, error) + with session_scope() as db_session: + with pytest.raises(error_obj): + create_account(db_session, email, password) + + +if __name__ == '__main__': + pytest.main([__file__]) diff --git a/inbox/test/system/test_drafts.py b/inbox/test/system/test_drafts.py new file mode 100644 index 000000000..954d606cd --- /dev/null +++ b/inbox/test/system/test_drafts.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +import pytest +import time +from conftest import timeout_loop, all_accounts +from inbox.client.errors import NotFoundError + + +@timeout_loop('file') +def wait_for_file(client, file_id): + try: + client.files.find(file_id) + return True + except NotFoundError: + return False + + +@timeout_loop('draft') +def wait_for_draft(client, draft_id): + try: + return client.drafts.find(draft_id) + except NotFoundError: + return False + + +@timeout_loop('draft_removed') +def check_draft_is_removed(client, draft_id): + try: + client.drafts.find(draft_id) + return False + except NotFoundError: + return True + + +@pytest.mark.parametrize("client", all_accounts) +def test_draft(client): + # Let's create a draft, attach a file to it and delete it + + # Create the file + myfile = client.files.create() + myfile.filename = 'file_%d.txt' % time.time() + myfile.data = 'This is a file' + myfile.save() + wait_for_file(client, myfile.id) + + # And the draft + mydraft = client.drafts.create() + mydraft.to = [{'email': client.email_address}] + mydraft.subject = "Test draft from Inbox - %s" % time.strftime("%H:%M:%S") + mydraft.body = "This is a test email, disregard this." + mydraft.attach(myfile) + mydraft.save() + wait_for_draft(client, mydraft.id) + mydraft.send() + + # Not sure about the correct behaviour for this one - + # are sent drafts kept? + # check_draft_is_removed(client, mydraft.id) + + +if __name__ == '__main__': + pytest.main([__file__]) diff --git a/inbox/test/system/test_events.py b/inbox/test/system/test_events.py new file mode 100644 index 000000000..638349060 --- /dev/null +++ b/inbox/test/system/test_events.py @@ -0,0 +1,111 @@ +import pytest +import random +import datetime +import time + +from inbox.client.errors import NotFoundError +from conftest import calendar_accounts, timeout_loop +from inbox.models.session import new_session +from inbox.models import ActionLog +from inbox.ignition import main_engine + +random.seed(None) + + +@pytest.yield_fixture(scope="module") +def real_db(): + """A fixture to get access to the real mysql db. We need this + to log in to providers like gmail to check that events changes + are synced back.""" + engine = main_engine() + session = new_session(engine) + yield session + session.rollback() + session.close() + + +@timeout_loop('event') +def wait_for_event(client, event_id, real_db): + try: + return client.events.find(event_id) + except NotFoundError: + return False + + +@timeout_loop('event') +def wait_for_event_rename(client, event_id, new_title, real_db): + try: + ev = client.events.find(event_id) + return ev.title == new_title + except NotFoundError: + return False + + +@timeout_loop('event') +def wait_for_event_deletion(client, event_id, real_db): + try: + client.events.find(event_id) + return False + except NotFoundError: + return True + + +@timeout_loop('event action') +def wait_for_syncback_success(client, real_db, action): + # Waits for the most recent action of the specified type to transition + # to 'successful'. Otherwise, we don't know the test has actually passed. + action_log = real_db.query(ActionLog).filter_by( + table_name='event', + action=action).order_by('created_at desc').first() + if not action_log: + return False + if action_log.status == 'successful': + return True + if action_log.status == 'pending' and action_log.retries > 2: + # Give up after two retries in the test environment. + return False + + +# We define this test function separately from test_event_crud +# because we want to be able to pass different types of accounts +# to it. For instance, test_event_crud here takes a list of +# generic accounts which support calendars but in test_google_events.py +# test_event_crud takes a list of gmail accounts. +# - karim +def real_test_event_crud(client, real_db): + # create an event + ns = client.namespaces[0] + ev = ns.events.create() + ev.calendar_id = ns.calendars[1].id + ev.title = "Rodomontades" + d1 = datetime.datetime.now() + datetime.timedelta(hours=2) + d2 = datetime.datetime.now() + datetime.timedelta(hours=9) + start = int(time.mktime(d1.timetuple())) + end = int(time.mktime(d2.timetuple())) + ev.when = {"start_time": start, "end_time": end} + ev.save() + + wait_for_event(client, ev.id, real_db) + wait_for_syncback_success(client, real_db, 'create_event') + + # now, update it + ev.title = "Renamed title" + ev.participants = [{'email': 'bland@example.com', 'name': 'John Bland'}] + ev.save() + + wait_for_event_rename(client, ev.id, ev.title, real_db) + wait_for_syncback_success(client, real_db, 'update_event') + + # finally, delete it + ns.events.delete(ev.id) + + wait_for_event_deletion(client, ev.id, real_db) + wait_for_syncback_success(client, real_db, 'delete_event') + + +@pytest.mark.parametrize("client", calendar_accounts) +def test_event_crud(client, real_db): + real_test_event_crud(client, real_db) + +if __name__ == '__main__': + pytest.main([__file__]) diff --git a/inbox/test/system/test_google_events.py b/inbox/test/system/test_google_events.py new file mode 100644 index 000000000..cc89ddf8d --- /dev/null +++ b/inbox/test/system/test_google_events.py @@ -0,0 +1,87 @@ +# flake8: noqa: F401, F811 + +# Google calendar-specific event creation tests. +# +# This is a little hackish -- test_events.py defines a bunch of helper +# functions to check that an event appears on the API server. We redefine +# these functions to check instead for changes to the event on +# the Google backend. + +import pytest +import random + +from inbox.client.errors import NotFoundError +from inbox.events.google import GoogleEventsProvider +from inbox.models import Account + +from conftest import gmail_accounts, timeout_loop +from test_events import real_db, real_test_event_crud + +random.seed(None) + + +def get_api_access(db_session, email_address): + account = db_session.query(Account).filter( + Account.email_address == email_address).one() + if account is None: + raise Exception(("No account found for email address %s. " + "Are you sure you've authed it?") % email_address) + + return GoogleEventsProvider(account.id, account.namespace.id).\ + _get_google_service() + + +@timeout_loop('event') +def wait_for_event(client, event_id, real_db): + try: + ev = client.events.find(event_id) + cal = client.calendars.find(ev.calendar_id) + api = get_api_access(real_db, client.email_address) + events = api.events().list(calendarId=cal.name).execute() + for event in events['items']: + if event['summary'] == ev.title: + return True + + return False + except NotFoundError: + return False + + +@timeout_loop('event') +def wait_for_event_rename(client, event_id, new_title, real_db): + try: + ev = client.events.find(event_id) + cal = client.calendars.find(ev.calendar_id) + api = get_api_access(real_db, client.email_address) + events = api.events().list(calendarId=cal.name).execute() + for event in events['items']: + if event['summary'] == new_title: + return True + + return False + except NotFoundError: + return False + + +@timeout_loop('event') +def wait_for_event_deletion(client, calendar_id, event_title, real_db): + try: + cal = client.calendars.find(calendar_id) + api = get_api_access(real_db, client.email_address) + events = api.events().list(calendarId=cal.name).execute() + for event in events['items']: + if event['summary'] == event_title: + return False + + return True + except NotFoundError: + return False + + +@pytest.mark.parametrize("client", gmail_accounts) +def test_event_crud(client, real_db): + real_test_event_crud(client, real_db) + + +if __name__ == '__main__': + pytest.main([__file__]) diff --git a/inbox/test/system/test_labels.py b/inbox/test/system/test_labels.py new file mode 100644 index 000000000..7923121b3 --- /dev/null +++ b/inbox/test/system/test_labels.py @@ -0,0 +1,66 @@ +# gmail-specific label handling tests. +import pytest +import random +from datetime import datetime + +from inbox.crispin import writable_connection_pool +from inbox.models.session import session_scope +from inbox.models import Account +from inbox.mailsync.backends.imap.generic import uidvalidity_cb + +from conftest import gmail_accounts, timeout_loop + + +@timeout_loop('tag_add') +def wait_for_tag(client, thread_id, tagname): + thread = client.threads.find(thread_id) + tags = [tag['name'] for tag in thread.tags] + return True if tagname in tags else False + + +@timeout_loop('tag_remove') +def wait_for_tag_removal(client, thread_id, tagname): + thread = client.threads.find(thread_id) + tags = [tag['name'] for tag in thread.tags] + return True if tagname not in tags else False + + +@pytest.mark.parametrize("client", gmail_accounts) +def test_gmail_labels(client): + # test case: create a label on the gmail account + # apply it to a thread. Check that it gets picked up. + # Remove it. Check that it gets picked up. + thread = random.choice(client.threads.all()) + + account = None + with session_scope() as db_session: + account = db_session.query(Account).filter_by( + email_address=client.email_address).one() + + connection_pool = writable_connection_pool(account.id, pool_size=1) + with connection_pool.get() as crispin_client: + labelname = "custom-label" + datetime.now().strftime("%s.%f") + print "Label: %s" % labelname + + folder_name = crispin_client.folder_names()['all'] + crispin_client.select_folder(folder_name, uidvalidity_cb) + + print "Subject : %s" % thread.subject + uids = crispin_client.search_uids(['SUBJECT', thread.subject]) + g_thrid = crispin_client.g_metadata(uids).items()[0][1].thrid + + crispin_client.add_label(g_thrid, labelname) + wait_for_tag(client, thread.id, labelname) + + draft = client.drafts.create( + to=[{'name': 'Nylas SelfSend', 'email': client.email_address}], + body="Blah, replying to message", + subject=thread.subject) + draft.send() + + crispin_client.remove_label(g_thrid, labelname) + wait_for_tag_removal(client, thread.id, labelname) + + +if __name__ == '__main__': + pytest.main([__file__]) diff --git a/inbox/test/system/test_sending.py b/inbox/test/system/test_sending.py new file mode 100644 index 000000000..6fe8c6166 --- /dev/null +++ b/inbox/test/system/test_sending.py @@ -0,0 +1,119 @@ +# -*- coding: utf-8 -*- +import pytest +from time import strftime +from conftest import timeout_loop, all_accounts +from random_words import random_words +from inbox.util.url import provider_from_address +import json + + +@timeout_loop('send') +def wait_for_send(client, subject): + thread_query = client.threads.where(subject=subject) + + threads = thread_query.all() + + if not threads: + return False + if provider_from_address(client.email_address) not in ['unknown', 'eas']: + # Reconciliation doesn't seem to quite work on EAS because the + # X-INBOX-ID header is stripped? + assert len(threads) == 1, \ + "Warning: Number of threads for unique subject is > 1!" + + tags = [t['name'] for thread in threads for t in thread.tags] + return True if ("sent" in tags and "inbox" in tags) else False + + +@timeout_loop('archive') +def wait_for_archive(client, thread_id): + thread = client.threads.find(thread_id) + tags = [tag["name"] for tag in thread.tags] + return True if ("archive" in tags and "inbox" not in tags) else False + + +@timeout_loop('trash') +def wait_for_trash(client, thread_id): + thread = client.threads.find(thread_id) + tags = [tag['name'] for tag in thread.tags] + return True if ("trash" in tags and "archive" not in tags) else False + + +@pytest.mark.parametrize("client", all_accounts) +def test_sending(client): + # Create a message and send it to ourselves + subject = "%s (Self Send Test)" % strftime("%Y-%m-%d %H:%M:%S") + draft = client.drafts.create(to=[{"email": client.email_address}], + subject=subject, + body=subject + "Test email.") + + body = random_words(sig=client.email_address.split('@')[0]) + + draft = client.drafts.create(to=[{"email": client.email_address}], + subject=subject, + body=body) + draft.send() + wait_for_send(client, subject) + + # Archive the message + thread = client.threads.where(subject=subject, tag='inbox').first() + thread.archive() + wait_for_archive(client, thread.id) + + # Trash the message + # Remove guard when working + if False: + client.threads.first().trash() + wait_for_trash(client, thread.id) + + +# TODO: do these tests even get run?? +@pytest.mark.parametrize("client", all_accounts) +def test_multi_sending(client): + # Create a message and send it to ourselves, with a different body + subject = "%s (Self Multi Send Test)" % strftime("%Y-%m-%d %H:%M:%S") + sent_body = subject + "Test email." + draft = client.drafts.create(to=[{"email": client.email_address}], + subject=subject, + body=sent_body) + recv_body = subject + "Different body" + + resp = client.session.post('{}/send-multiple'.format(client.api_server)) + assert resp.status_code == 200 + + resp = client.session.post('{}/send-multiple/{}'.format(client.api_server, + draft.id), + data=json.dumps({"body": recv_body, + "send_to": [ + {"email": + client.email_address} + ]})) + assert resp.status_code == 200 + wait_for_send(client, subject) + + resp = client.session.delete('{}/send-multiple/{}' + .format(client.api_server, draft.id)) + assert resp.status_code == 200 + wait_for_send(client, subject) + + # Check that there are two messages, one sent and one recieved, with + # different bodies. + thread = client.threads.where(subject=subject, tag='inbox').first() + assert len(thread.messages) == 2 + assert thread.messages[0].body == recv_body + assert thread.messages[1].body == sent_body + + # Archive the thread + thread = client.threads.where(subject=subject, tag='inbox').first() + thread.archive() + wait_for_archive(client, thread.id) + + # Trash the message + # Remove guard when working + if False: + client.threads.first().trash() + wait_for_trash(client, thread.id) + + +if __name__ == '__main__': + pytest.main([__file__]) diff --git a/inbox/test/transactions/test_action_scheduling.py b/inbox/test/transactions/test_action_scheduling.py new file mode 100644 index 000000000..35c22ad10 --- /dev/null +++ b/inbox/test/transactions/test_action_scheduling.py @@ -0,0 +1,35 @@ +from inbox.models.action_log import schedule_action, ActionLog + +from inbox.test.util.base import add_fake_event + + +def test_action_scheduling(db, default_account): + event = add_fake_event(db.session, default_account.namespace.id) + + schedule_action('create_event', event, default_account.namespace.id, + db.session) + db.session.commit() + + entry = db.session.query(ActionLog).filter( + ActionLog.namespace_id == default_account.namespace.id, + ActionLog.action == 'create_event').one() + + assert entry.discriminator == 'actionlog' + assert entry.table_name == 'event' and entry.record_id == event.id + assert not entry.extra_args + + schedule_action('delete_event', event, default_account.namespace.id, + db.session, event_uid=event.uid, + calendar_name=event.calendar.name, + calendar_uid=event.calendar.uid) + db.session.commit() + + entry = db.session.query(ActionLog).filter( + ActionLog.namespace_id == default_account.namespace.id, + ActionLog.action == 'delete_event').one() + + assert entry.discriminator == 'actionlog' + assert entry.table_name == 'event' and entry.record_id == event.id + assert entry.extra_args == \ + dict(event_uid=event.uid, calendar_name=event.calendar.name, + calendar_uid=event.calendar.uid) diff --git a/inbox/test/transactions/test_delta_sync.py b/inbox/test/transactions/test_delta_sync.py new file mode 100644 index 000000000..6ab48c985 --- /dev/null +++ b/inbox/test/transactions/test_delta_sync.py @@ -0,0 +1,225 @@ +import datetime +import json +import time +from freezegun import freeze_time +from inbox.test.util.base import add_fake_message +from inbox.test.api.base import api_client + +__all__ = ['api_client'] + + +def add_account_with_different_namespace_id(db_session, + email_address='cypress@yahoo.com'): + import platform + from inbox.models.backends.generic import GenericAccount + from inbox.models import Namespace + account = GenericAccount(id=11, + email_address=email_address, + sync_host=platform.node(), + desired_sync_host=platform.node(), + provider='yahoo') + account.imap_password = 'bananagrams' + account.smtp_password = 'bananagrams' + account.namespace = Namespace() + db_session.add(account) + db_session.commit() + assert account.namespace.id != account.id + return account + + +def get_cursor(api_client, timestamp): + cursor_response = api_client.post_data('/delta/generate_cursor', + {'start': timestamp}) + return json.loads(cursor_response.data)['cursor'] + + +def test_latest_cursor(api_client): + with freeze_time(datetime.datetime.utcnow()) as freezer: + freezer.tick(datetime.timedelta(seconds=5)) + now = int(time.time()) + + latest_cursor_resp = api_client.post_raw('/delta/latest_cursor', None) + latest_cursor = json.loads(latest_cursor_resp.data)['cursor'] + + now_cursor = get_cursor(api_client, now) + assert latest_cursor == now_cursor + + +def test_invalid_input(api_client): + cursor_response = api_client.post_data('/delta/generate_cursor', + {'start': "I'm not a timestamp!"}) + assert cursor_response.status_code == 400 + + sync_response = api_client.client.get( + '/delta?cursor={}'.format('fake cursor'), + headers=api_client.auth_header) + assert sync_response.status_code == 400 + + +def test_events_are_condensed(api_client, message): + """ + Test that multiple revisions of the same object are rolled up in the + delta response. + + """ + ts = int(time.time() + 22) + cursor = get_cursor(api_client, ts) + + # Modify a message, then modify it again + message_id = api_client.get_data('/messages/')[0]['id'] + message_path = '/messages/{}'.format(message_id) + api_client.put_data(message_path, {'unread': True}) + api_client.put_data(message_path, {'unread': False}) + api_client.put_data(message_path, {'unread': True}) + + # Check that successive modifies are condensed. + sync_data = api_client.get_data('/delta?cursor={}'.format(cursor)) + deltas = sync_data['deltas'] + # A message modify propagates to its thread + message_deltas = [d for d in deltas if d['object'] == 'message'] + assert len(message_deltas) == 1 + + delta = message_deltas[0] + assert delta['object'] == 'message' and delta['event'] == 'modify' + assert delta['attributes']['unread'] is True + + +def test_message_events_are_propagated_to_thread(api_client, message): + """ + Test that a revision to a message's `propagated_attributes` returns a delta + for the message and for its thread. + + """ + ts = int(time.time() + 22) + cursor = get_cursor(api_client, ts) + + message = api_client.get_data('/messages/')[0] + message_id = message['id'] + assert message['unread'] is True + + thread = api_client.get_data('/threads/{}'.format(message['thread_id'])) + assert thread['unread'] is True + + # Modify a `propagated_attribute` of the message + message_path = '/messages/{}'.format(message_id) + api_client.put_data(message_path, {'unread': False}) + + # Verify that a `message` and a `thread` modify delta is returned + sync_data = api_client.get_data('/delta?cursor={}'.format(cursor)) + deltas = sync_data['deltas'] + assert len(deltas) == 2 + + message_deltas = [d for d in deltas if d['object'] == 'message'] + assert len(message_deltas) == 1 + delta = message_deltas[0] + assert delta['object'] == 'message' and delta['event'] == 'modify' + assert delta['attributes']['unread'] is False + + thread_deltas = [d for d in deltas if d['object'] == 'thread'] + assert len(thread_deltas) == 1 + delta = thread_deltas[0] + assert delta['object'] == 'thread' and delta['event'] == 'modify' + assert delta['attributes']['unread'] is False + assert delta['attributes']['version'] == thread['version'] + 1 + + +def test_handle_missing_objects(api_client, db, thread, default_namespace): + ts = int(time.time() + 22) + cursor = get_cursor(api_client, ts) + + messages = [] + for _ in range(100): + messages.append(add_fake_message(db.session, default_namespace.id, + thread)) + for message in messages: + db.session.delete(message) + db.session.commit() + sync_data = api_client.get_data('/delta?cursor={}&exclude_types=thread'. + format(cursor)) + assert len(sync_data['deltas']) == 100 + assert all(delta['event'] == 'delete' for delta in sync_data['deltas']) + + +def test_exclude_account(api_client, db, default_namespace, thread): + ts = int(time.time() + 22) + cursor = get_cursor(api_client, ts) + + # Create `account`, `message`, `thread` deltas + default_namespace.account.sync_state = 'invalid' + db.session.commit() + add_fake_message(db.session, default_namespace.id, thread) + + # Verify the default value of `exclude_account`=True and + # the account delta is *not* included + sync_data = api_client.get_data('/delta?cursor={}'.format(cursor)) + assert len(sync_data['deltas']) == 2 + assert set([d['object'] for d in sync_data['deltas']]) == \ + set(['message', 'thread']) + + # Verify setting `exclude_account`=True returns the account delta as well. + sync_data = api_client.get_data('/delta?cursor={}&exclude_account=false'. + format(cursor)) + assert len(sync_data['deltas']) == 3 + assert set([d['object'] for d in sync_data['deltas']]) == \ + set(['message', 'thread', 'account']) + + +def test_account_delta(api_client, db, default_namespace): + ts = int(time.time() + 22) + cursor = get_cursor(api_client, ts) + + account = default_namespace.account + + # Create an `account` delta + default_namespace.account.sync_state = 'invalid' + db.session.commit() + + sync_data = api_client.get_data('/delta?cursor={}&exclude_account=false'. + format(cursor)) + assert len(sync_data['deltas']) == 1 + delta = sync_data['deltas'][0] + assert delta['object'] == 'account' + assert delta['event'] == 'modify' + assert delta['attributes']['id'] == default_namespace.public_id + assert delta['attributes']['account_id'] == default_namespace.public_id + assert delta['attributes']['email_address'] == account.email_address + assert delta['attributes']['name'] == account.name + assert delta['attributes']['provider'] == account.provider + assert delta['attributes']['organization_unit'] == account.category_type + assert delta['attributes']['sync_state'] == 'invalid' + + cursor = sync_data['cursor_end'] + + # Create an new `account` delta + default_namespace.account.sync_state = 'running' + db.session.commit() + sync_data = api_client.get_data('/delta?cursor={}&exclude_account=false'. + format(cursor)) + + assert len(sync_data['deltas']) == 1 + delta = sync_data['deltas'][0] + assert delta['object'] == 'account' + assert delta['event'] == 'modify' + assert delta['attributes']['id'] == default_namespace.public_id + assert delta['attributes']['sync_state'] == 'running' + + +def test_account_delta_for_different_namespace_id(db): + from inbox.transactions.delta_sync import format_transactions_after_pointer + + account = add_account_with_different_namespace_id(db.session) + namespace = account.namespace + + # Create an `account` delta + account.sync_state = 'invalid' + db.session.commit() + + # Verify `account` delta is not returned when exclude_account=True + txns, _ = format_transactions_after_pointer(namespace, 0, db.session, 10, + exclude_account=True) + assert not txns + + # Verify `account` delta is returned when exclude_account=False + txns, _ = format_transactions_after_pointer(namespace, 0, db.session, 10, + exclude_account=False) + assert txns diff --git a/inbox/test/transactions/test_thread_versioning.py b/inbox/test/transactions/test_thread_versioning.py new file mode 100644 index 000000000..7981f1171 --- /dev/null +++ b/inbox/test/transactions/test_thread_versioning.py @@ -0,0 +1,50 @@ +from inbox.test.util.base import add_fake_message, add_fake_category + + +def test_adding_and_removing_message_on_thread_increments_version( + db, thread, default_namespace): + assert thread.version == 0 + message = add_fake_message(db.session, default_namespace.id, thread) + assert thread.version == 1 + thread.messages.remove(message) + db.session.commit() + assert thread.version == 2 + + +def test_updating_message_read_starred_increments_version( + db, thread, default_namespace): + assert thread.version == 0 + + message = add_fake_message(db.session, default_namespace.id, thread) + assert thread.version == 1 + + # Modifying a non-propagated attribute does /not/ increment thread.version + # (Non-propagated attributes on non-draft messages are technically + # never modified) + message.subject = 'Jen nova temo' + db.session.commit() + assert thread.version == 1 + + # Modifying message.is_read /is_starred increments the thread.version + message.is_read = not message.is_read + db.session.commit() + assert thread.version == 2 + + message.is_starred = not message.is_starred + db.session.commit() + assert thread.version == 3 + + +def test_updating_message_categories_increments_version( + db, thread, default_namespace): + assert thread.version == 0 + + message = add_fake_message(db.session, default_namespace.id, thread) + category = add_fake_category(db.session, default_namespace.id, + 'mia kategorio') + + # Modifying message's categories increments the thread.version + message.categories = [category] + db.session.commit() + + assert thread.version == 2 diff --git a/inbox/test/transactions/test_transaction_creation.py b/inbox/test/transactions/test_transaction_creation.py new file mode 100644 index 000000000..5b684751a --- /dev/null +++ b/inbox/test/transactions/test_transaction_creation.py @@ -0,0 +1,343 @@ +from datetime import datetime + +from sqlalchemy import desc +from flanker import mime + +from inbox.models import Transaction, AccountTransaction, Calendar +from inbox.models.mixins import HasRevisions +from inbox.models.util import transaction_objects + +from inbox.test.util.base import (add_fake_message, add_fake_thread, add_fake_event, + add_fake_category) + + +def get_latest_transaction(db_session, object_type, record_id, namespace_id): + return db_session.query(Transaction).filter( + Transaction.namespace_id == namespace_id, + Transaction.object_type == object_type, + Transaction.record_id == record_id). \ + order_by(desc(Transaction.id)).first() + + +def get_latest_transaction_any(db_session, namespace_id): + return db_session.query(Transaction).filter( + Transaction.namespace_id == namespace_id).\ + order_by(desc(Transaction.id)).first() + + +def test_thread_insert_creates_transaction(db, default_namespace): + thr = add_fake_thread(db.session, default_namespace.id) + transaction = get_latest_transaction(db.session, 'thread', thr.id, + default_namespace.id) + assert transaction.command == 'insert' + + +def test_message_insert_creates_transaction(db, default_namespace): + with db.session.no_autoflush: + thr = add_fake_thread(db.session, default_namespace.id) + msg = add_fake_message(db.session, default_namespace.id, thr) + transaction = get_latest_transaction(db.session, 'message', msg.id, + default_namespace.id) + assert transaction.command == 'insert' + + # Test that the thread gets revised too + transaction = get_latest_transaction(db.session, 'thread', thr.id, + default_namespace.id) + assert transaction.command == 'update' + + +def test_message_updates_create_transaction(db, default_namespace): + with db.session.no_autoflush: + thr = add_fake_thread(db.session, default_namespace.id) + msg = add_fake_message(db.session, default_namespace.id, thr) + + msg.is_read = True + db.session.commit() + transaction = get_latest_transaction(db.session, 'message', msg.id, + default_namespace.id) + assert transaction.record_id == msg.id + assert transaction.object_type == 'message' + assert transaction.command == 'update' + + +def test_message_updates_create_thread_transaction(db, default_namespace): + with db.session.no_autoflush: + thr = add_fake_thread(db.session, default_namespace.id) + msg = add_fake_message(db.session, default_namespace.id, thr) + + transaction = get_latest_transaction(db.session, 'thread', thr.id, + default_namespace.id) + assert (transaction.record_id == thr.id and + transaction.object_type == 'thread') + assert transaction.command == 'update' + + # An update to one of the message's propagated_attributes creates a + # revision for the thread + msg.is_read = True + db.session.commit() + + new_transaction = get_latest_transaction(db.session, 'thread', thr.id, + default_namespace.id) + assert new_transaction.id != transaction.id + assert (new_transaction.record_id == thr.id and + new_transaction.object_type == 'thread') + assert new_transaction.command == 'update' + + # An update to one of its other attributes does not + msg.subject = 'Ice cubes and dogs' + db.session.commit() + + same_transaction = get_latest_transaction(db.session, 'thread', thr.id, + default_namespace.id) + assert same_transaction.id == new_transaction.id + + +def test_message_category_updates_create_transaction(db, default_namespace): + with db.session.no_autoflush: + thr = add_fake_thread(db.session, default_namespace.id) + msg = add_fake_message(db.session, default_namespace.id, thr) + cat = add_fake_category(db.session, default_namespace.id, 'category') + thread_trx_before_category_change = get_latest_transaction( + db.session, 'thread', thr.id, default_namespace.id) + + msg.categories = [cat] + db.session.commit() + latest_message_trx = get_latest_transaction( + db.session, 'message', msg.id, default_namespace.id) + thread_trx_after_category_change = get_latest_transaction( + db.session, 'thread', thr.id, default_namespace.id) + + assert latest_message_trx.command == 'update' + assert thread_trx_before_category_change.id != \ + thread_trx_after_category_change.id + + +def test_object_type_distinguishes_messages_and_drafts(db, default_namespace): + with db.session.no_autoflush: + thr = add_fake_thread(db.session, default_namespace.id) + msg = add_fake_message(db.session, default_namespace.id, thr) + msg.is_draft = 1 + db.session.commit() + transaction = get_latest_transaction(db.session, 'draft', msg.id, + default_namespace.id) + assert transaction.command == 'update' + db.session.delete(msg) + db.session.commit() + transaction = get_latest_transaction(db.session, 'draft', msg.id, + default_namespace.id) + assert transaction.command == 'delete' + + +def test_event_insert_creates_transaction(db, default_namespace): + with db.session.no_autoflush: + event = add_fake_event(db.session, default_namespace.id) + transaction = get_latest_transaction(db.session, 'event', + event.id, default_namespace.id) + assert transaction.record_id == event.id + assert transaction.object_type == 'event' + assert transaction.command == 'insert' + + +def test_transactions_created_for_calendars(db, default_namespace): + calendar = Calendar( + namespace_id=default_namespace.id, + name='New Calendar', + uid='uid') + db.session.add(calendar) + db.session.commit() + transaction = get_latest_transaction(db.session, 'calendar', + calendar.id, default_namespace.id) + assert transaction.record_id == calendar.id + assert transaction.object_type == 'calendar' + assert transaction.command == 'insert' + + calendar.name = 'Updated Calendar' + db.session.commit() + transaction = get_latest_transaction(db.session, 'calendar', + calendar.id, default_namespace.id) + assert transaction.record_id == calendar.id + assert transaction.object_type == 'calendar' + assert transaction.command == 'update' + + db.session.delete(calendar) + db.session.commit() + transaction = get_latest_transaction(db.session, 'calendar', + calendar.id, default_namespace.id) + assert transaction.record_id == calendar.id + assert transaction.object_type == 'calendar' + assert transaction.command == 'delete' + + +def test_file_transactions(db, default_namespace): + from inbox.models.message import Message + + account = default_namespace.account + thread = add_fake_thread(db.session, default_namespace.id) + mime_msg = mime.create.multipart('mixed') + mime_msg.append( + mime.create.text('plain', 'This is a message with attachments'), + mime.create.attachment('image/png', 'filler', 'attached_image.png', + 'attachment'), + mime.create.attachment('application/pdf', 'filler', + 'attached_file.pdf', 'attachment') + ) + msg = Message.create_from_synced(account, 22, '[Gmail]/All Mail', + datetime.utcnow(), mime_msg.to_string()) + msg.thread = thread + db.session.add(msg) + db.session.commit() + + assert len(msg.parts) == 2 + assert all(part.content_disposition == 'attachment' for part in msg.parts) + + block_ids = [part.block.id for part in msg.parts] + + with db.session.no_autoflush: + transaction = get_latest_transaction(db.session, 'file', block_ids[0], + default_namespace.id) + assert transaction.command == 'insert' + + transaction = get_latest_transaction(db.session, 'file', block_ids[1], + default_namespace.id) + assert transaction.command == 'insert' + + +def test_account_transactions(db, default_namespace): + account = default_namespace.account + + transaction = get_latest_transaction(db.session, 'account', account.id, + default_namespace.id) + assert transaction.command == 'insert' + transaction_id = transaction.id + + with db.session.no_autoflush: + account.last_synced_events = datetime.utcnow() + db.session.commit() + transaction = get_latest_transaction(db.session, 'account', account.id, + default_namespace.id) + assert transaction.id == transaction_id + + account.sync_state = 'invalid' + db.session.commit() + transaction = get_latest_transaction(db.session, 'account', account.id, + default_namespace.id) + assert transaction.id != transaction_id + assert transaction.command == 'update' + + account.sync_host = 'anewhost' + db.session.commit() + same_transaction = get_latest_transaction(db.session, 'account', + account.id, + default_namespace.id) + assert same_transaction.id == transaction.id + + +def test_object_deletions_create_transaction(db, default_namespace): + with db.session.no_autoflush: + thr = add_fake_thread(db.session, default_namespace.id) + msg = add_fake_message(db.session, default_namespace.id, thr) + db.session.delete(msg) + db.session.commit() + transaction = get_latest_transaction(db.session, 'message', msg.id, + default_namespace.id) + assert transaction.record_id == msg.id + assert transaction.object_type == 'message' + assert transaction.command == 'delete' + + db.session.delete(thr) + db.session.commit() + transaction = get_latest_transaction(db.session, 'thread', thr.id, + default_namespace.id) + assert transaction.record_id == thr.id + assert transaction.object_type == 'thread' + assert transaction.command == 'delete' + + +def test_transaction_creation_for_self_referential_message_relationship( + db, default_namespace): + # Make sure that updating the self-refential relationship + # `Message.reply_to_message` does not create a spurious update delta for + # the parent message. + thr = add_fake_thread(db.session, default_namespace.id) + msg = add_fake_message(db.session, default_namespace.id, thr) + reply = add_fake_message(db.session, default_namespace.id, thr) + reply.reply_to_message = msg + db.session.commit() + assert reply.reply_to_message_id is not None + assert msg.reply_to_message_id is None + transaction = get_latest_transaction(db.session, 'message', msg.id, + default_namespace.id) + assert transaction.record_id == msg.id + assert transaction.object_type == 'message' + assert transaction.command == 'insert' + + +def test_transaction_objects_mapped_for_all_models(db, default_namespace): + """ + Test that all subclasses of HasRevisions are mapped by the + transaction_objects() function. + + """ + assert set(HasRevisions.__subclasses__()).issubset( + transaction_objects().values()) + + +def test_accounttransactions(db, default_namespace): + account = default_namespace.account + + transaction = get_latest_transaction(db.session, 'account', + default_namespace.account.id, + default_namespace.id) + assert transaction.command == 'insert' + transaction_id = transaction.id + + # Verify an AccountTransaction is created + accounttransactions = db.session.query(AccountTransaction).filter( + AccountTransaction.namespace_id == default_namespace.id).all() + assert len(accounttransactions) == 1 + accounttransaction = accounttransactions[0] + assert accounttransaction.namespace_id == default_namespace.id + assert accounttransaction.command == 'insert' + assert accounttransaction.object_type == 'account' + assert accounttransaction.record_id == default_namespace.account.id + accounttransaction_id = accounttransaction.id + + with db.session.no_autoflush: + # No Transaction or AccountTransaction records created + + account.last_synced_events = datetime.utcnow() + db.session.commit() + transaction = get_latest_transaction(db.session, 'account', + default_namespace.account.id, + default_namespace.id) + assert transaction.id == transaction_id + accounttransactions = db.session.query(AccountTransaction).filter( + AccountTransaction.namespace_id == default_namespace.id).all() + assert len(accounttransactions) == 1 + assert accounttransactions[0].id == accounttransaction_id + + # Only Transaction record created + + thread = add_fake_thread(db.session, default_namespace.id) + transaction = get_latest_transaction(db.session, 'thread', thread.id, + default_namespace.id) + assert transaction.id > transaction_id + accounttransactions = db.session.query(AccountTransaction).filter( + AccountTransaction.namespace_id == default_namespace.id).all() + assert len(accounttransactions) == 1 + assert accounttransactions[0].id == accounttransaction_id + + # Both Transaction or AccountTransaction records created + + account.sync_state = 'invalid' + db.session.commit() + transaction = get_latest_transaction(db.session, 'account', + default_namespace.account.id, + default_namespace.id) + assert transaction.id > transaction_id + assert transaction.command == 'update' + accounttransactions = db.session.query(AccountTransaction).filter( + AccountTransaction.namespace_id == default_namespace.id).all() + assert len(accounttransactions) == 2 + assert accounttransactions[1].id != accounttransaction_id + assert accounttransactions[1].command == 'update' diff --git a/inbox/test/transactions/test_transaction_deletion.py b/inbox/test/transactions/test_transaction_deletion.py new file mode 100644 index 000000000..54fe37b79 --- /dev/null +++ b/inbox/test/transactions/test_transaction_deletion.py @@ -0,0 +1,78 @@ +import random +import uuid +from datetime import datetime, timedelta + +from sqlalchemy import desc + +from inbox.models import Transaction +from inbox.models.util import purge_transactions + + +def get_latest_transaction(db_session, namespace_id): + return db_session.query(Transaction).filter( + Transaction.namespace_id == namespace_id).\ + order_by(desc(Transaction.id)).first() + + +def create_transaction(db, created_at, namespace_id): + t = Transaction(created_at=created_at, + namespace_id=namespace_id, + object_type='message', + command='insert', + record_id=random.randint(1, 9999), + object_public_id=uuid.uuid4().hex) + db.session.add(t) + db.session.commit() + return t + + +def format_datetime(dt): + return "'{}'".format(dt.strftime('%Y-%m-%d %H:%M:%S')) + + +def test_transaction_deletion(db, default_namespace): + # Test that transaction deletion respects the days_ago + # parameter. Arbitrarily chose 30 days for `days_ago` + now = datetime.now() + # Transactions created less than 30 days ago should not be deleted + t0 = create_transaction(db, now, default_namespace.id) + create_transaction(db, now - timedelta(days=29), default_namespace.id) + create_transaction(db, now - timedelta(days=30), default_namespace.id) + + # Transactions older than 30 days should be deleted + for i in xrange(10): + create_transaction(db, now - timedelta(days=31 + i), + default_namespace.id) + + shard_id = (default_namespace.id >> 48) + query = "SELECT count(id) FROM transaction WHERE namespace_id={}".\ + format(default_namespace.id) + all_transactions = db.session.execute(query).scalar() + date_query = ("SELECT count(id) FROM transaction WHERE created_at < " + "DATE_SUB({}, INTERVAL 30 day)").format(format_datetime(now)) + older_than_thirty_days = db.session.execute(date_query).scalar() + + # Ensure no transactions are deleted during a dry run + purge_transactions(shard_id, days_ago=30, dry_run=True, now=now) + assert db.session.execute(query).scalar() == all_transactions + + # Delete all transactions older than 30 days + purge_transactions(shard_id, days_ago=30, dry_run=False, now=now) + assert all_transactions - older_than_thirty_days == \ + db.session.execute(query).scalar() + + query = "SELECT count(id) FROM transaction WHERE namespace_id={}".\ + format(default_namespace.id) + all_transactions = db.session.execute(query).scalar() + + date_query = ("SELECT count(id) FROM transaction WHERE created_at < " + "DATE_SUB({}, INTERVAL 1 day)").format(format_datetime(now)) + older_than_one_day = db.session.execute(date_query).scalar() + # Delete all transactions older than 1 day + purge_transactions(shard_id, days_ago=1, dry_run=False, now=now) + assert all_transactions - older_than_one_day == \ + db.session.execute(query).scalar() + + latest_transaction = get_latest_transaction(db.session, + default_namespace.id) + assert latest_transaction.id == t0.id diff --git a/inbox/test/util/__init__.py b/inbox/test/util/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/inbox/test/util/base.py b/inbox/test/util/base.py new file mode 100644 index 000000000..233d930aa --- /dev/null +++ b/inbox/test/util/base.py @@ -0,0 +1,669 @@ +import json +import mock +import os +import uuid +from datetime import datetime, timedelta + +from pytest import fixture, yield_fixture +from flanker import mime +from mockredis import mock_strict_redis_client + +from inbox.util.testutils import setup_test_db, MockIMAPClient # noqa + + +def absolute_path(path): + """ + Returns the absolute path for a path specified as relative to the + tests/ directory, needed for the dump file name in config.cfg + + """ + return os.path.abspath( + os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', path)) + + +def make_config(tmpdir_factory): + from inbox.config import config + assert 'NYLAS_ENV' in os.environ and \ + os.environ['NYLAS_ENV'] == 'test', \ + "NYLAS_ENV must be 'test' to run tests" + # don't try to write test data to the module tree + config['MSG_PARTS_DIRECTORY'] = str(tmpdir_factory.mktemp("parts")) + return config + + +@fixture(scope='session', autouse=True) +def config(tmpdir_factory): + return make_config(tmpdir_factory) + + +@fixture(scope='session') +def dbloader(config): + setup_test_db() + + +@yield_fixture(scope='function') +def db(dbloader): + from inbox.ignition import engine_manager + from inbox.models.session import new_session + engine = engine_manager.get_for_id(0) + # TODO(emfree): tests should really either instantiate their own sessions, + # or take a fixture that is itself a session. + engine.session = new_session(engine) + yield engine + engine.session.close() + + +@yield_fixture(scope='function') +def empty_db(config): + from inbox.ignition import engine_manager + from inbox.models.session import new_session + setup_test_db() + engine = engine_manager.get_for_id(0) + engine.session = new_session(engine) + yield engine + engine.session.close() + + +@yield_fixture +def test_client(db): + from inbox.api.srv import app + app.config['TESTING'] = True + with app.test_client() as c: + yield c + + +@yield_fixture +def webhooks_client(db): + from inbox.api.srv import app + app.config['TESTING'] = True + with app.test_client() as c: + yield TestWebhooksClient(c) + + +class TestWebhooksClient(object): + + def __init__(self, test_client): + self.client = test_client + + def post_data(self, path, data, headers={}): + path = '/w' + path + return self.client.post(path, data=json.dumps(data), headers=headers) + + +@fixture +def patch_network_functions(monkeypatch): + """ + Monkeypatch syncback functions that actually talk to Gmail so that the + tests can run faster. + + """ + import inbox.actions.backends + for backend in inbox.actions.backends.module_registry.values(): + for method_name in backend.__all__: + monkeypatch.setattr(backend.__name__ + '.' + method_name, + lambda *args, **kwargs: None) + + +def make_default_account(db, config): + import platform + from inbox.models.backends.gmail import GmailAccount + from inbox.models.backends.gmail import GmailAuthCredentials + from inbox.auth.gmail import OAUTH_SCOPE + from inbox.models import Namespace + + ns = Namespace() + account = GmailAccount( + sync_host='{}:{}'.format(platform.node(), 0), + email_address='inboxapptest@gmail.com') + account.namespace = ns + account.create_emailed_events_calendar() + account.refresh_token = 'faketoken' + + auth_creds = GmailAuthCredentials() + auth_creds.client_id = config.get_required('GOOGLE_OAUTH_CLIENT_ID') + auth_creds.client_secret = \ + config.get_required('GOOGLE_OAUTH_CLIENT_SECRET') + auth_creds.refresh_token = 'faketoken' + auth_creds.g_id_token = 'foo' + auth_creds.created_at = datetime.utcnow() + auth_creds.updated_at = datetime.utcnow() + auth_creds.gmailaccount = account + auth_creds.scopes = OAUTH_SCOPE + + db.session.add(account) + db.session.add(auth_creds) + db.session.commit() + return account + + +def delete_default_accounts(db): + from inbox.models.backends.gmail import GmailAccount + from inbox.models.backends.gmail import GmailAuthCredentials + from inbox.models import Namespace + delete_messages(db.session) + db.session.rollback() + db.session.query(GmailAccount).delete() + db.session.query(GmailAuthCredentials).delete() + db.session.query(Namespace).delete() + db.session.commit() + + +@yield_fixture(scope='function') +def default_account(db, config, redis_mock): + yield make_default_account(db, config) + delete_default_accounts(db) + + +@yield_fixture(scope='function') +def default_namespace(db, default_account): + yield default_account.namespace + + +@yield_fixture(scope='function') +def default_accounts(db, config, redis_mock): + yield [make_default_account(db, config) for _ in range(3)] + delete_default_accounts(db) + + +@yield_fixture(scope='function') +def default_namespaces(db, default_accounts): + yield [account.namespace for account in default_accounts] + + +@yield_fixture(scope='function') +def generic_account(db): + yield add_generic_imap_account(db.session) + + +@yield_fixture(scope='function') +def gmail_account(db): + yield add_fake_gmail_account(db.session, + email_address='almondsunshine', + refresh_token='tearsofgold', + password='COyPtHmj9E9bvGdN') + delete_gmail_accounts(db.session) + + +@fixture(scope='function') +def contact_sync(config, db, default_account): + from inbox.contacts.remote_sync import ContactSync + return ContactSync('inboxapptest@gmail.com', 'gmail', default_account.id, + default_account.namespace.id) + + +@fixture(scope='function') +def contacts_provider(config, db): + return ContactsProviderStub() + + +class ContactsProviderStub(object): + """ + Contacts provider stub to stand in for an actual provider. + When an instance's get_items() method is called, return an iterable of + Contact objects corresponding to the data it's been fed via + supply_contact(). + + """ + + def __init__(self, provider_name='test_provider'): + self._contacts = [] + self._next_uid = 1 + self.PROVIDER_NAME = provider_name + self._get_next_uid = lambda current: current + 1 + + def supply_contact(self, name, email_address, deleted=False): + from inbox.models import Contact + self._contacts.append(Contact(namespace_id=1, + uid=str(self._next_uid), + provider_name=self.PROVIDER_NAME, + name=name, + email_address=email_address, + deleted=deleted)) + self._next_uid = self._get_next_uid(self._next_uid) + + def get_items(self, *args, **kwargs): + return self._contacts + + +def add_fake_folder(db_session, default_account, display_name='All Mail', + name='all'): + from inbox.models.folder import Folder + return Folder.find_or_create(db_session, default_account, display_name, name) + + +def add_fake_label(db_session, default_account, display_name='My Label', + name=None): + from inbox.models.label import Label + return Label.find_or_create(db_session, default_account, display_name, name) + + +def add_generic_imap_account(db_session, email_address='test@nylas.com'): + import platform + from inbox.models.backends.generic import GenericAccount + from inbox.models import Namespace + account = GenericAccount(email_address=email_address, + sync_host=platform.node(), + provider='custom') + account.imap_endpoint = ('imap.custom.com', 993) + account.smtp_endpoint = ('smtp.custom.com', 587) + account.imap_password = 'bananagrams' + account.smtp_password = 'bananagrams' + account.namespace = Namespace() + db_session.add(account) + db_session.commit() + return account + + +def delete_generic_imap_accounts(db_session): + from inbox.models.backends.generic import GenericAccount + from inbox.models import Namespace + db_session.rollback() + db_session.query(GenericAccount).delete() + db_session.query(Namespace).delete() + db_session.commit() + + +def add_fake_yahoo_account(db_session, email_address='cypresstest@yahoo.com'): + import platform + from inbox.models.backends.generic import GenericAccount + from inbox.models import Namespace + account = GenericAccount(email_address=email_address, + sync_host=platform.node(), + provider='yahoo') + account.imap_password = 'bananagrams' + account.smtp_password = 'bananagrams' + account.namespace = Namespace() + db_session.add(account) + db_session.commit() + return account + + +def add_fake_gmail_account(db_session, email_address='test@nilas.com', + refresh_token='tearsofgold', + password='COyPtHmj9E9bvGdN'): + from inbox.models import Namespace + from inbox.models.backends.gmail import GmailAccount + import platform + + with db_session.no_autoflush: + namespace = Namespace() + + account = GmailAccount( + email_address=email_address, + refresh_token=refresh_token, + sync_host=platform.node(), + namespace=namespace) + account.password = password + + db_session.add(account) + db_session.commit() + return account + + +def delete_gmail_accounts(db_session): + from inbox.models import Namespace + from inbox.models.backends.gmail import GmailAccount + db_session.rollback() + db_session.query(GmailAccount).delete() + db_session.query(Namespace).delete() + db_session.commit() + + +def add_fake_message(db_session, namespace_id, thread=None, from_addr=None, + to_addr=None, cc_addr=None, bcc_addr=None, + received_date=None, subject='', + body='', snippet='', g_msgid=None, + add_sent_category=False): + from inbox.models import Message, Category + from inbox.contacts.process_mail import update_contacts_from_message + m = Message() + m.namespace_id = namespace_id + m.from_addr = from_addr or [] + m.to_addr = to_addr or [] + m.cc_addr = cc_addr or [] + m.bcc_addr = bcc_addr or [] + m.received_date = received_date or datetime.utcnow() + m.size = 0 + m.is_read = False + m.is_starred = False + m.body = body + m.snippet = snippet + m.subject = subject + m.g_msgid = g_msgid + + if thread: + thread.messages.append(m) + update_contacts_from_message(db_session, m, thread.namespace) + + db_session.add(m) + db_session.commit() + + if add_sent_category: + category = Category.find_or_create( + db_session, namespace_id, 'sent', 'sent', type_='folder') + if category not in m.categories: + m.categories.add(category) + db_session.commit() + + return m + + +def delete_messages(db_session): + from inbox.models import Message + db_session.rollback() + db_session.query(Message).update({'reply_to_message_id': None}) + db_session.query(Message).delete() + db_session.commit() + + +def delete_categories(db_session): + from inbox.models import Category + db_session.rollback() + db_session.query(Category).delete() + db_session.commit() + + +def add_fake_thread(db_session, namespace_id): + from inbox.models import Thread + dt = datetime.utcnow() + thr = Thread(subjectdate=dt, recentdate=dt, namespace_id=namespace_id) + db_session.add(thr) + db_session.commit() + return thr + + +def delete_threads(db_session): + from inbox.models import Thread + delete_messages(db_session) + db_session.rollback() + db_session.query(Thread).delete() + db_session.commit() + + +def add_fake_imapuid(db_session, account_id, message, folder, msg_uid): + from inbox.models.backends.imap import ImapUid + imapuid = ImapUid(account_id=account_id, + message=message, + folder=folder, + msg_uid=msg_uid) + db_session.add(imapuid) + db_session.commit() + return imapuid + + +def delete_imapuids(db_session): + from inbox.models.backends.imap import ImapUid + db_session.rollback() + db_session.query(ImapUid).delete() + db_session.commit() + + +def add_fake_calendar(db_session, namespace_id, name="Cal", + description="A Calendar", uid="UID", read_only=False): + from inbox.models import Calendar + calendar = Calendar(namespace_id=namespace_id, + name=name, + description=description, + uid=uid, + read_only=read_only) + db_session.add(calendar) + db_session.commit() + return calendar + + +def delete_calendars(db_session): + from inbox.models import Calendar + db_session.rollback() + db_session.query(Calendar).delete() + db_session.commit() + + +def add_fake_event(db_session, namespace_id, calendar=None, + title='title', description='', location='', + busy=False, read_only=False, reminders='', recurrence='', + start=None, end=None, all_day=False): + from inbox.models import Event + start = start or datetime.utcnow() + end = end or (datetime.utcnow() + timedelta(seconds=1)) + calendar = calendar or add_fake_calendar(db_session, namespace_id) + event = Event(namespace_id=namespace_id, + calendar=calendar, + title=title, + description=description, + location=location, + busy=busy, + read_only=read_only, + reminders=reminders, + recurrence=recurrence, + start=start, + end=end, + all_day=all_day, + raw_data='', + uid=str(uuid.uuid4())) + event.sequence_number = 0 + db_session.add(event) + db_session.commit() + return event + + +def delete_events(db_session): + from inbox.models import Event + db_session.rollback() + db_session.query(Event).delete() + db_session.commit() + + +def add_fake_contact(db_session, namespace_id, name='Ben Bitdiddle', + email_address='inboxapptest@gmail.com', uid='22'): + from inbox.models import Contact + contact = Contact(namespace_id=namespace_id, + name=name, + email_address=email_address, + uid=uid) + + db_session.add(contact) + db_session.commit() + return contact + + +def delete_contacts(db_session): + from inbox.models import Contact + db_session.rollback() + db_session.query(Contact).delete() + db_session.commit() + + +def add_fake_category(db_session, namespace_id, display_name, name=None): + from inbox.models import Category + category = Category(namespace_id=namespace_id, + display_name=display_name, + name=name) + db_session.add(category) + db_session.commit() + return category + + +@yield_fixture +def thread(db, default_namespace): + yield add_fake_thread(db.session, default_namespace.id) + delete_threads(db.session) + + +@yield_fixture +def message(db, default_namespace, thread): + yield add_fake_message(db.session, default_namespace.id, thread) + delete_messages(db.session) + + +@fixture +def folder(db, default_account): + from inbox.models.folder import Folder + return Folder.find_or_create(db.session, default_account, + '[Gmail]/All Mail', 'all') + + +@fixture +def label(db, default_account): + from inbox.models import Label + return Label.find_or_create(db.session, default_account, + 'Inbox', 'inbox') + + +@fixture +def custom_label(db, default_account): + from inbox.models import Label + return Label.find_or_create(db.session, default_account, + 'Kraftwerk', '') + + +@yield_fixture +def contact(db, default_account): + yield add_fake_contact(db.session, default_account.namespace.id) + delete_contacts(db.session) + + +@yield_fixture +def imapuid(db, default_account, message, folder): + yield add_fake_imapuid(db.session, default_account.id, message, + folder, 2222) + delete_imapuids(db.session) + + +@yield_fixture(scope='function') +def calendar(db, default_account): + yield add_fake_calendar(db.session, default_account.namespace.id) + delete_calendars(db.session) + + +@yield_fixture(scope='function') +def other_calendar(db, default_account): + yield add_fake_calendar(db.session, default_account.namespace.id, + uid='uid2', name='Calendar 2') + delete_calendars(db.session) + + +@yield_fixture(scope='function') +def event(db, default_account): + yield add_fake_event(db.session, default_account.namespace.id) + delete_events(db.session) + delete_calendars(db.session) + + +@yield_fixture(scope='function') +def imported_event(db, default_account, message): + ev = add_fake_event(db.session, default_account.namespace.id) + ev.message = message + message.from_addr = [['Mick Taylor', 'mick@example.com']] + ev.owner = 'Mick Taylor ' + ev.participants = [{"email": "inboxapptest@gmail.com", + "name": "Inbox Apptest", "status": "noreply"}] + db.session.commit() + yield ev + delete_events(db.session) + delete_calendars(db.session) + + +@fixture +def mime_message(): + msg = mime.create.multipart('alternative') + msg.append( + mime.create.text('plain', 'Hello World!'), + mime.create.text('html', 'Hello World!') + ) + msg.headers['To'] = 'Alice ' + msg.headers['Cc'] = 'Bob ' + msg.headers['Subject'] = 'Hello' + return msg + + +@fixture +def new_message_from_synced(db, default_account, mime_message): + from inbox.models import Message + received_date = datetime(2014, 9, 22, 17, 25, 46) + new_msg = Message.create_from_synced(default_account, + 139219, + '[Gmail]/All Mail', + received_date, + mime_message.to_string()) + assert new_msg.received_date == received_date + new_msg.is_read = True + new_msg.is_starred = False + return new_msg + + +def add_fake_msg_with_calendar_part(db_session, account, ics_str, thread=None): + from inbox.models import Message + parsed = mime.create.multipart('mixed') + parsed.append( + mime.create.attachment('text/calendar', + ics_str, + disposition=None) + ) + msg = Message.create_from_synced( + account, 22, '[Gmail]/All Mail', datetime.utcnow(), parsed.to_string()) + msg.from_addr = [('Ben Bitdiddle', 'ben@inboxapp.com')] + + if thread is None: + msg.thread = add_fake_thread(db_session, account.namespace.id) + else: + msg.thread = thread + + assert msg.has_attached_events + return msg + + +@yield_fixture +def mock_gevent_sleep(monkeypatch): + monkeypatch.setattr('gevent.sleep', mock.Mock()) + yield + monkeypatch.undo() + + +def mock_client(): + mock_client = mock_strict_redis_client() + + # Adding a couple of methods we use that mockredis doesn't support yet. + def scan_iter_patch(match=None, count=100): + match = str(match).replace('*', '') + return filter(lambda k: k.startswith(match), mock_client.keys()) + + mock_client.scan_iter = scan_iter_patch + mock_client.reset = lambda: True + + def zscan_iter_patch(key, match=None): + match = str(match).replace('*', '') + return filter(lambda k: k.startswith(match), + mock_client.zrange(key, 0, -1)) + mock_client.zscan_iter = zscan_iter_patch + return mock_client + + +@yield_fixture(scope='function') +def redis_client(monkeypatch): + client = mock_client() + yield client + # Flush on teardown + client.flushdb() + + +@yield_fixture(scope='function', autouse=True) +def redis_mock(redis_client, monkeypatch): + def set_self_client(self, *args, **kwargs): + # Ensure the same 'redis' client is returned across HeartbeatStore + # calls and direct checks. Mocking StrictRedis() directly causes + # different clients to be initialized, so we can't check contents. + self.host = None + self.port = 6379 + + def fake_redis_client(host=None, port=6379, db=1): + return redis_client + + monkeypatch.setattr("inbox.heartbeat.config.get_redis_client", + fake_redis_client) + monkeypatch.setattr("inbox.heartbeat.store.HeartbeatStore.__init__", + set_self_client) + monkeypatch.setattr('inbox.scheduling.event_queue._get_redis_client', + fake_redis_client) + monkeypatch.setattr('inbox.mailsync.service.SHARED_SYNC_EVENT_QUEUE_ZONE_MAP', {}) + yield + monkeypatch.undo() diff --git a/inbox/test/util/crispin.py b/inbox/test/util/crispin.py new file mode 100644 index 000000000..14f12a267 --- /dev/null +++ b/inbox/test/util/crispin.py @@ -0,0 +1,4 @@ +# NOT a fixture because it needs args +def crispin_client(account_id, account_provider): + from inbox.crispin import connection_pool + return connection_pool(account_id, pool_size=1).get() diff --git a/inbox/test/webhooks/__init__.py b/inbox/test/webhooks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/inbox/test/webhooks/test_gpush_calendar_notifications.py b/inbox/test/webhooks/test_gpush_calendar_notifications.py new file mode 100644 index 000000000..b8ed01e37 --- /dev/null +++ b/inbox/test/webhooks/test_gpush_calendar_notifications.py @@ -0,0 +1,189 @@ +import pytest +from datetime import datetime, timedelta + +from inbox.models.calendar import Calendar +from inbox.test.util.base import webhooks_client +__all__ = ['webhooks_client'] + +CALENDAR_LIST_PATH = '/calendar_list_update/{}' +CALENDAR_PATH = '/calendar_update/{}' + +ACCOUNT_WATCH_UUID = 'this_is_a_unique_identifier' +CALENDAR_WATCH_UUID = 'this_is_a_unique_identifier' # lol + +SYNC_HEADERS = { + 'X-Goog-Channel-Id': 'id', + 'X-Goog-Message-Number': 1, + 'X-Goog-Resource-Id': 'not relevant', + 'X-Goog-Resource-State': 'sync', + 'X-Goog-Resource-URI': 'resource/location' +} + +UPDATE_HEADERS = { + 'X-Goog-Channel-Id': 'id', + 'X-Goog-Message-Number': 2, + 'X-Goog-Resource-Id': 'not relevant', + 'X-Goog-Resource-State': 'update', + 'X-Goog-Resource-URI': 'resource/location' +} + +WATCH_EXPIRATION = 1426325213000 # 3/14/15 - utc TS in milliseconds + + +@pytest.fixture +def watched_account(db, default_account): + account = default_account + account.new_calendar_list_watch(WATCH_EXPIRATION) + db.session.add(account) + db.session.commit() + return account + + +@pytest.fixture +def watched_calendar(db, default_namespace): + calendar = Calendar(name='Colander', + uid='this_is_a_uid', + read_only=True, + namespace_id=default_namespace.id) + + calendar.new_event_watch(WATCH_EXPIRATION) + db.session.add(calendar) + db.session.commit() + return calendar + + +def test_should_update_logic(db, watched_account, watched_calendar): + # Watch should be not-expired + expiration = WATCH_EXPIRATION + 20 * 365 * 24 * 60 * 60 * 1000 + watched_account.new_calendar_list_watch(expiration) + watched_calendar.new_event_watch(expiration) + + ten_minutes = timedelta(minutes=10) + # Never synced - should update + assert watched_account.should_update_calendars(ten_minutes) + assert watched_calendar.should_update_events(ten_minutes) + + five_minutes_ago = datetime.utcnow() - timedelta(minutes=5) + watched_account.last_calendar_list_sync = five_minutes_ago + watched_calendar.last_synced = five_minutes_ago + assert not watched_account.should_update_calendars(ten_minutes) + assert not watched_calendar.should_update_events(ten_minutes) + + four_minutes = timedelta(minutes=4) + assert watched_account.should_update_calendars(four_minutes) + assert watched_calendar.should_update_events(four_minutes) + + watched_account.handle_gpush_notification() + watched_calendar.handle_gpush_notification() + assert watched_account.should_update_calendars(ten_minutes) + assert watched_calendar.should_update_events(ten_minutes) + + watched_account.last_calendar_list_sync = datetime.utcnow() + watched_calendar.last_synced = datetime.utcnow() + + assert not watched_account.should_update_calendars(ten_minutes) + assert not watched_calendar.should_update_events(ten_minutes) + + # If watch is expired, should always update + watched_account.new_calendar_list_watch(WATCH_EXPIRATION) + watched_calendar.new_event_watch(WATCH_EXPIRATION) + assert watched_account.should_update_calendars(ten_minutes) + assert watched_calendar.should_update_events(ten_minutes) + + +def test_needs_new_watch_logic(db, watched_account, watched_calendar): + assert watched_account.needs_new_calendar_list_watch() + assert watched_calendar.needs_new_watch() + + expiration = WATCH_EXPIRATION + 20 * 365 * 24 * 60 * 60 * 1000 + watched_account.new_calendar_list_watch(expiration) + watched_calendar.new_event_watch(expiration) + + assert not watched_account.needs_new_calendar_list_watch() + assert not watched_calendar.needs_new_watch() + + +def test_receive_sync_message(db, webhooks_client, + watched_account, watched_calendar): + # Sync messages can basically be ignored + # (see https://developers.google.com/google-apps/calendar/v3/push#sync) + + calendar_path = CALENDAR_LIST_PATH.format(watched_account.public_id) + event_path = CALENDAR_PATH.format(watched_calendar.public_id) + + r = webhooks_client.post_data(calendar_path, {}, SYNC_HEADERS) + assert r.status_code == 204 # No content + + r = webhooks_client.post_data(event_path, {}, SYNC_HEADERS) + assert r.status_code == 204 # No content + + +def test_calendar_update(db, webhooks_client, watched_account): + + calendar_path = CALENDAR_LIST_PATH.format(watched_account.public_id) + + before = datetime.utcnow() - timedelta(seconds=1) + assert watched_account.gpush_calendar_list_last_ping is None + + headers = UPDATE_HEADERS.copy() + headers['X-Goog-Channel-Id'] = ACCOUNT_WATCH_UUID + r = webhooks_client.post_data(calendar_path, {}, headers) + assert r.status_code == 200 + db.session.refresh(watched_account) + assert watched_account.gpush_calendar_list_last_ping > before + + unknown_id_path = CALENDAR_LIST_PATH.format(11111111111) + r = webhooks_client.post_data(unknown_id_path, {}, headers) + assert r.status_code == 404 # account not found + + invalid_id_path = CALENDAR_LIST_PATH.format('invalid_id') + r = webhooks_client.post_data(invalid_id_path, {}, headers) + assert r.status_code == 400 + + bad_headers = UPDATE_HEADERS.copy() + del bad_headers['X-Goog-Resource-State'] + r = webhooks_client.post_data(calendar_path, {}, bad_headers) + assert r.status_code == 400 + + +def test_event_update(db, webhooks_client, watched_calendar): + event_path = CALENDAR_PATH.format(watched_calendar.public_id) + + before = datetime.utcnow() - timedelta(seconds=1) + assert watched_calendar.gpush_last_ping is None + + headers = UPDATE_HEADERS.copy() + headers['X-Goog-Channel-Id'] = CALENDAR_WATCH_UUID + r = webhooks_client.post_data(event_path, {}, headers) + assert r.status_code == 200 + db.session.refresh(watched_calendar) + gpush_last_ping = watched_calendar.gpush_last_ping + assert gpush_last_ping > before + + # Test that gpush_last_ping is not updated if already recently updated + watched_calendar.gpush_last_ping = gpush_last_ping - timedelta(seconds=2) + gpush_last_ping = watched_calendar.gpush_last_ping + db.session.commit() + r = webhooks_client.post_data(event_path, {}, headers) + db.session.refresh(watched_calendar) + assert gpush_last_ping == watched_calendar.gpush_last_ping + + # Test that gpush_last_ping *is* updated if last updated too long ago + watched_calendar.gpush_last_ping = gpush_last_ping - timedelta(seconds=22) + db.session.commit() + r = webhooks_client.post_data(event_path, {}, headers) + db.session.refresh(watched_calendar) + assert watched_calendar.gpush_last_ping > gpush_last_ping + + bad_event_path = CALENDAR_PATH.format(1111111111111) + r = webhooks_client.post_data(bad_event_path, {}, headers) + assert r.status_code == 404 # calendar not found + + invalid_id_path = CALENDAR_PATH.format('invalid_id') + r = webhooks_client.post_data(invalid_id_path, {}, headers) + assert r.status_code == 400 + + bad_headers = UPDATE_HEADERS.copy() + del bad_headers['X-Goog-Resource-State'] + r = webhooks_client.post_data(event_path, {}, bad_headers) + assert r.status_code == 400 diff --git a/inbox/transactions/actions.py b/inbox/transactions/actions.py index d4e8c1c15..cf67ccced 100644 --- a/inbox/transactions/actions.py +++ b/inbox/transactions/actions.py @@ -11,7 +11,7 @@ from datetime import datetime import gevent -from gevent.event import Event +import gevent.event from gevent.queue import Queue import random from gevent.coros import BoundedSemaphore @@ -24,10 +24,11 @@ from inbox.ignition import engine_manager from inbox.util.concurrency import retry_with_logging from inbox.models.session import session_scope, session_scope_by_shard_id -from inbox.models import ActionLog +from inbox.models import ActionLog, Event from inbox.util.misc import DummyContextManager from inbox.util.stats import statsd_client -from inbox.actions.base import (mark_unread, +from inbox.actions.base import (can_handle_multiple_records, + mark_unread, mark_starred, move, change_labels, @@ -123,7 +124,7 @@ def __init__(self, syncback_id, process_number, total_processes, poll_interval=1 self.log = logger.new(component='syncback') self.num_workers = num_workers self.num_idle_workers = 0 - self.worker_did_finish = Event() + self.worker_did_finish = gevent.event.Event() self.worker_did_finish.clear() self.task_queue = Queue() self.running_action_ids = set() @@ -133,14 +134,15 @@ def _batch_log_entries(self, db_session, log_entries): tasks = [] semaphore = None account_id = None + last_task = None for log_entry in log_entries: if log_entry is None: self.log.error('Got no action, skipping') continue if log_entry.id in self.running_action_ids: - self.log.info('Skipping already running action', - action_id=log_entry.id) + self.log.debug('Skipping already running action', + action_log_id=log_entry.id) # We're already running an action for this account, so don't # queue up any additional actions for this account until the # previous batch has finished. @@ -155,7 +157,7 @@ def _batch_log_entries(self, db_session, log_entries): if namespace.account.sync_state == 'invalid': self.log.warning('Skipping action for invalid account', account_id=account_id, - action_id=log_entry.id, + action_log_id=log_entry.id, action=log_entry.action) action_age = (datetime.utcnow() - @@ -168,7 +170,7 @@ def _batch_log_entries(self, db_session, log_entries): 'invalid account, older than ' 'grace period', account_id=account_id, - action_id=log_entry.id, + action_log_id=log_entry.id, action=log_entry.action) statsd_client.incr('syncback.invalid_failed.total') statsd_client.incr('syncback.invalid_failed.{}'. @@ -179,31 +181,43 @@ def _batch_log_entries(self, db_session, log_entries): semaphore = self.account_semaphores[account_id] else: assert semaphore is self.account_semaphores[account_id] - tasks.append( - SyncbackTask(action_name=log_entry.action, - semaphore=semaphore, - action_log_id=log_entry.id, - record_id=log_entry.record_id, - account_id=account_id, - provider=namespace.account. - verbose_provider, - service=self, - retry_interval=self.retry_interval, - extra_args=log_entry.extra_args)) + task = SyncbackTask(action_name=log_entry.action, + semaphore=semaphore, + action_log_ids=[log_entry.id], + record_ids=[log_entry.record_id], + account_id=account_id, + provider=namespace.account. + verbose_provider, + service=self, + retry_interval=self.retry_interval, + extra_args=log_entry.extra_args) + if last_task is None: + last_task = task + else: + merged_task = last_task.try_merge_with(task) + if merged_task is None: + tasks.append(last_task) + last_task = task + else: + last_task = merged_task + if last_task is not None: + assert len(tasks) == 0 or last_task != tasks[-1] + tasks.append(last_task) + if len(tasks) == 0: return None for task in tasks: - self.running_action_ids.add(task.action_log_id) - self.log.info('Syncback added task', - process=self.process_number, - action_id=task.action_log_id, - msg=task.action_name, - task_count=self.task_queue.qsize()) + self.running_action_ids.update(task.action_log_ids) + self.log.debug('Syncback added task', + process=self.process_number, + action_log_ids=task.action_log_ids, + num_actions=len(task.action_log_ids), + msg=task.action_name, + task_count=self.task_queue.qsize()) return SyncbackBatchTask(semaphore, tasks, account_id) def _process_log(self): - before = datetime.utcnow() for key in self.keys: with session_scope_by_shard_id(key) as db_session: @@ -222,10 +236,6 @@ def _process_log(self): else: namespaces_to_process = random.sample(namespace_ids, NUM_PARALLEL_ACCOUNTS) - self.log.debug('Syncback namespace_ids count', shard_id=key, - process=self.process_number, - num_namespace_ids=len(namespace_ids)) - for ns_id in namespaces_to_process: # The discriminator filter restricts actions to IMAP. EAS # uses a different system. @@ -238,12 +248,6 @@ def _process_log(self): if task is not None: self.task_queue.put(task) - after = datetime.utcnow() - self.log.debug('Syncback completed one iteration', - process=self.process_number, - duration=(after - before).total_seconds(), - idle_workers=self.num_idle_workers) - def _restart_workers(self): while len(self.workers) < self.num_workers: worker = SyncbackWorker(self) @@ -305,8 +309,8 @@ def execute(self): log = logger.new() with self.semaphore: with self._crispin_client_or_none() as crispin_client: - log.info("Syncback running batch of actions", - num_actions=len(self.tasks)) + log.debug("Syncback running batch of actions", + num_actions=len(self.tasks)) for task in self.tasks: task.crispin_client = crispin_client task.execute_with_lock() @@ -317,9 +321,10 @@ def uses_crispin_client(self): def timeout(self, per_task_timeout): return len(self.tasks) * per_task_timeout + @property def action_log_ids(self): return [entry for task in self.tasks - for entry in task.action_log_ids()] + for entry in task.action_log_ids] class SyncbackTask(object): @@ -338,21 +343,56 @@ class SyncbackTask(object): """ - def __init__(self, action_name, semaphore, action_log_id, record_id, + def __init__(self, action_name, semaphore, action_log_ids, record_ids, account_id, provider, service, retry_interval=30, extra_args=None): self.parent_service = weakref.ref(service) self.action_name = action_name self.semaphore = semaphore self.func = function_for_action(action_name) - self.action_log_id = action_log_id - self.record_id = record_id + self.action_log_ids = list(action_log_ids) + self.record_ids = record_ids self.account_id = account_id self.provider = provider self.extra_args = extra_args self.retry_interval = retry_interval self.crispin_client = None + def try_merge_with(self, other): + if self.func != other.func: + return None + + if self.action_name == 'change_labels': + my_removed_labels = set(self.extra_args['removed_labels']) + other_removed_labels = set(other.extra_args['removed_labels']) + if my_removed_labels != other_removed_labels: + return None + + my_added_labels = set(self.extra_args['added_labels']) + other_added_labels = set(other.extra_args['added_labels']) + if my_added_labels != other_added_labels: + return None + + # If anything seems fishy, conservatively return None. + if (self.provider != other.provider or + self.action_log_ids == other.action_log_ids or + self.record_ids == other.record_ids or + self.account_id != other.account_id or + self.action_name != other.action_name): + return None + return SyncbackTask( + self.action_name, + self.semaphore, + self.action_log_ids + other.action_log_ids, + self.record_ids + other.record_ids, + self.account_id, + self.provider, + self.parent_service(), + self.retry_interval, + self.extra_args + ) + return None + def _log_to_statsd(self, action_log_status, latency=None): metric_names = [ "syncback.overall.{}".format(action_log_status), @@ -365,60 +405,55 @@ def _log_to_statsd(self, action_log_status, latency=None): statsd_client.timing(metric, latency * 1000) def execute_with_lock(self): - log = logger.new( - record_id=self.record_id, action_log_id=self.action_log_id, + self.log = logger.new( + record_ids=self.record_ids, action_log_ids=self.action_log_ids, action=self.action_name, account_id=self.account_id, extra_args=self.extra_args) - for _ in range(ACTION_MAX_NR_OF_RETRIES): + # Double-check that the action is still pending. + # Although the task queue is populated based on pending actions, it's + # possible that the processing of one action involved marking other + # actions as failed. + records_to_process, action_ids_to_process = self._get_records_and_actions_to_process() + if len(action_ids_to_process) == 0: + return + + for attempt in range(ACTION_MAX_NR_OF_RETRIES): + self.log.debug("executing action", attempt=attempt) try: - before_func = datetime.utcnow() - func_args = [self.account_id, self.record_id] - if self.extra_args: - func_args.append(self.extra_args) - if self.uses_crispin_client(): - assert self.crispin_client is not None - func_args.insert(0, self.crispin_client) - self.func(*func_args) - after_func = datetime.utcnow() + before, after = self._execute_timed_action(records_to_process) with session_scope(self.account_id) as db_session: - action_log_entry = db_session.query(ActionLog).get( - self.action_log_id) - action_log_entry.status = 'successful' - db_session.commit() - latency = round((datetime.utcnow() - - action_log_entry.created_at). - total_seconds(), 2) - func_latency = round((after_func - before_func). - total_seconds(), 2) - log.info('syncback action completed', - action_id=self.action_log_id, - latency=latency, - process=self.parent_service().process_number, - func_latency=func_latency) - self._log_to_statsd(action_log_entry.status, latency) + action_log_entries = db_session.query(ActionLog). \ + filter(ActionLog.id.in_(action_ids_to_process)) + + for action_log_entry in action_log_entries: + self._mark_action_as_successful(action_log_entry, before, after, db_session) return - except Exception: - log_uncaught_errors(log, account_id=self.account_id, + except: + log_uncaught_errors(self.log, account_id=self.account_id, provider=self.provider) with session_scope(self.account_id) as db_session: - action_log_entry = db_session.query(ActionLog).get( - self.action_log_id) - action_log_entry.retries += 1 - if (action_log_entry.retries == - ACTION_MAX_NR_OF_RETRIES): - log.critical('Max retries reached, giving up.', - exc_info=True) - action_log_entry.status = 'failed' - self._log_to_statsd(action_log_entry.status) + action_log_entries = db_session.query(ActionLog). \ + filter(ActionLog.id.in_(action_ids_to_process)) + + marked_as_failed = False + for action_log_entry in action_log_entries: + action_log_entry.retries += 1 + if action_log_entry.retries == ACTION_MAX_NR_OF_RETRIES: + marked_as_failed = True + self._mark_action_as_failed(action_log_entry, db_session) + # If we've merged SyncbackTasks then their corresponding + # actions should all fail at the same time. + assert (not marked_as_failed or + action_log_entry.retries == ACTION_MAX_NR_OF_RETRIES) db_session.commit() + if marked_as_failed: return - db_session.commit() # Wait before retrying - log.info("Syncback task retrying action after sleeping", - duration=self.retry_interval) + self.log.info("Syncback task retrying action after sleeping", + duration=self.retry_interval) # TODO(T6974): We might want to do some kind of exponential # backoff with jitter to avoid the thundering herd problem if a @@ -426,12 +461,75 @@ def execute_with_lock(self): # time. gevent.sleep(self.retry_interval) + def _get_records_and_actions_to_process(self): + records_to_process = [] + action_ids_to_process = [] + action_log_record_map = dict(zip(self.action_log_ids, self.record_ids)) + with session_scope(self.account_id) as db_session: + action_log_entries = db_session.query(ActionLog). \ + filter(ActionLog.id.in_(self.action_log_ids)) + for action_log_entry in action_log_entries: + if action_log_entry.status != 'pending': + self.log.info('Skipping SyncbackTask, action is no longer pending') + continue + action_ids_to_process.append(action_log_entry.id) + records_to_process.append(action_log_record_map[action_log_entry.id]) + return records_to_process, action_ids_to_process + + def _execute_timed_action(self, records_to_process): + before_func = datetime.utcnow() + func_args = [self.account_id] + if can_handle_multiple_records(self.action_name): + func_args.append(records_to_process) + else: + assert len(records_to_process) == 1 + func_args.append(records_to_process[0]) + + if self.extra_args: + func_args.append(self.extra_args) + if self.uses_crispin_client(): + assert self.crispin_client is not None + func_args.insert(0, self.crispin_client) + self.func(*func_args) + after_func = datetime.utcnow() + return before_func, after_func + + def _mark_action_as_successful(self, action_log_entry, before, after, db_session): + action_log_entry.status = 'successful' + db_session.commit() + latency = round((datetime.utcnow() - action_log_entry.created_at).total_seconds(), 2) + func_latency = round((after - before).total_seconds(), 2) + self.log.info('syncback action completed', + latency=latency, + process=self.parent_service().process_number, + func_latency=func_latency) + self._log_to_statsd(action_log_entry.status, latency) + + def _mark_action_as_failed(self, action_log_entry, db_session): + self.log.critical('Max retries reached, giving up.', exc_info=True) + action_log_entry.status = 'failed' + self._log_to_statsd(action_log_entry.status) + + if action_log_entry.action == 'create_event': + # Creating a remote copy of the event failed. + # Without it, none of the other pending actions + # for this event will succeed. To prevent their + # execution, preemptively mark them as failed. + actions = db_session.query(ActionLog).filter_by( + record_id=action_log_entry.record_id, + namespace_id=action_log_entry.namespace_id, + status='pending').all() + for pending_action in actions: + pending_action.status = 'failed' + + # Mark the local copy as deleted so future actions can't be made. + event = db_session.query(Event).get(action_log_entry.record_id) + event.deleted_at = datetime.now() + db_session.commit() + def uses_crispin_client(self): return action_uses_crispin_client(self.action_name) - def action_log_ids(self): - return [self.action_log_id] - def timeout(self, per_task_timeout): return per_task_timeout @@ -445,6 +543,7 @@ class SyncbackWorker(gevent.Greenlet): def __init__(self, parent_service, task_timeout=60): self.parent_service = weakref.ref(parent_service) self.task_timeout = task_timeout + self.log = logger.new(component='syncback-worker') gevent.Greenlet.__init__(self) def _run(self): @@ -454,6 +553,9 @@ def _run(self): try: self.parent_service().notify_worker_active() gevent.with_timeout(task.timeout(self.task_timeout), task.execute) + except: + self.log.error('SyncbackWorker caught exception', exc_info=True, + account_id=task.account_id) finally: self.parent_service().notify_worker_finished( - task.action_log_ids()) + task.action_log_ids) diff --git a/inbox/transactions/delta_sync.py b/inbox/transactions/delta_sync.py index 801f9aa23..c372d6797 100644 --- a/inbox/transactions/delta_sync.py +++ b/inbox/transactions/delta_sync.py @@ -195,10 +195,12 @@ def format_transactions_after_pointer(namespace, pointer, db_session, if object_cls == Thread: query = query.options(*Thread.api_loading_options(expand)) - elif object_cls == Message: + if object_cls == Message: query = query.options(*Message.api_loading_options(expand)) - - objects = {obj.id: obj for obj in query} + # T7045: Workaround for some SQLAlchemy bugs. + objects = {obj.id: obj for obj in query if obj.thread is not None} + else: + objects = {obj.id: obj for obj in query} for trx in latest_trxs: delta = { diff --git a/inbox/transactions/search.py b/inbox/transactions/search.py index 2f4a14907..17ba2cec5 100644 --- a/inbox/transactions/search.py +++ b/inbox/transactions/search.py @@ -39,7 +39,7 @@ def __init__(self, poll_interval=30, chunk_size=DOC_UPLOAD_CHUNK_SIZE): def _report_batch_upload(self): metric_names = [ - "inbox-contacts-search.transactions.batch_upload", + "contacts_search_index.transactions.batch_upload", ] for metric in metric_names: @@ -47,7 +47,7 @@ def _report_batch_upload(self): def _report_transactions_latency(self, latency): metric_names = [ - "inbox-contacts-search.transactions.latency", + "contacts_search_index.transactions.latency", ] for metric in metric_names: @@ -55,7 +55,7 @@ def _report_transactions_latency(self, latency): def _publish_heartbeat(self): metric_names = [ - "inbox-contacts-search.heartbeat", + "contacts_search_index.heartbeat", ] for metric in metric_names: diff --git a/inbox/util/blockstore.py b/inbox/util/blockstore.py index 56c590ee4..e8bb794ec 100644 --- a/inbox/util/blockstore.py +++ b/inbox/util/blockstore.py @@ -42,42 +42,23 @@ def save_to_blockstore(data_sha256, data): f.write(data) -def is_in_blockstore(data_sha256): - if STORE_MSG_ON_S3: - return _is_in_s3(data_sha256) - else: - return os.path.exists(_data_file_path(data_sha256)) - - -def get_from_blockstore(data_sha256): - if STORE_MSG_ON_S3: - value = _get_from_s3(data_sha256) - else: - value = _get_from_disk(data_sha256) - - if value is None: - # We don't store None values so if such is returned, it's an error. - log.error('No data returned!') - return value +def _save_to_s3(data_sha256, data): + assert 'TEMP_MESSAGE_STORE_BUCKET_NAME' in config, \ + 'Need temp bucket name to store message data!' - assert data_sha256 == sha256(value).hexdigest(), \ - "Returned data doesn't match stored hash!" - return value + _save_to_s3_bucket(data_sha256, + config.get('TEMP_MESSAGE_STORE_BUCKET_NAME'), data) -def _save_to_s3(data_sha256, data): +def _save_to_s3_bucket(data_sha256, bucket_name, data): assert 'AWS_ACCESS_KEY_ID' in config, 'Need AWS key!' assert 'AWS_SECRET_ACCESS_KEY' in config, 'Need AWS secret!' - assert 'MESSAGE_STORE_BUCKET_NAME' in config, \ - 'Need bucket name to store message data!' - start = time.time() # Boto pools connections at the class level conn = S3Connection(config.get('AWS_ACCESS_KEY_ID'), config.get('AWS_SECRET_ACCESS_KEY')) - bucket = conn.get_bucket(config.get('MESSAGE_STORE_BUCKET_NAME'), - validate=False) + bucket = conn.get_bucket(bucket_name, validate=False) # See if it already exists; if so, don't recreate. key = bucket.get_key(data_sha256) @@ -90,32 +71,55 @@ def _save_to_s3(data_sha256, data): end = time.time() latency_millis = (end - start) * 1000 - statsd_client.timing('s3.save_latency', latency_millis) + statsd_client.timing('s3_blockstore.save_latency', latency_millis) + + +def get_from_blockstore(data_sha256): + if STORE_MSG_ON_S3: + value = _get_from_s3(data_sha256) + else: + value = _get_from_disk(data_sha256) + + if value is None: + # We don't store None values so if such is returned, it's an error. + log.error('No data returned!') + return value + + assert data_sha256 == sha256(value).hexdigest(), \ + "Returned data doesn't match stored hash!" + return value -def _is_in_s3(data_sha256): +def _get_from_s3(data_sha256): assert 'AWS_ACCESS_KEY_ID' in config, 'Need AWS key!' assert 'AWS_SECRET_ACCESS_KEY' in config, 'Need AWS secret!' - assert 'MESSAGE_STORE_BUCKET_NAME' in config, \ - 'Need bucket name to store message data!' - # Boto pools connections at the class level - conn = S3Connection(config.get('AWS_ACCESS_KEY_ID'), - config.get('AWS_SECRET_ACCESS_KEY')) - bucket = conn.get_bucket(config.get('MESSAGE_STORE_BUCKET_NAME'), - validate=False) + assert 'TEMP_MESSAGE_STORE_BUCKET_NAME' in config, \ + 'Need temp bucket name to store message data!' - return bool(bucket.get_key(data_sha256)) + # Try getting data from our temporary blockstore before + # trying getting it from the provider. + data = _get_from_s3_bucket(data_sha256, + config.get('TEMP_MESSAGE_STORE_BUCKET_NAME')) + if data is not None: + log.info('Found hash in temporary blockstore!', + sha256=data_sha256, logstash_tag='s3_direct') + return data -def _get_from_s3(data_sha256): + log.info("Couldn't find data in blockstore", + sha256=data_sha256, logstash_tag='s3_direct') + + return None + + +def _get_from_s3_bucket(data_sha256, bucket_name): if not data_sha256: return None conn = S3Connection(config.get('AWS_ACCESS_KEY_ID'), config.get('AWS_SECRET_ACCESS_KEY')) - bucket = conn.get_bucket(config.get('MESSAGE_STORE_BUCKET_NAME'), - validate=False) + bucket = conn.get_bucket(bucket_name, validate=False) key = bucket.get_key(data_sha256) diff --git a/inbox/util/concurrency.py b/inbox/util/concurrency.py index 2dd1a1ed1..5b951a03b 100644 --- a/inbox/util/concurrency.py +++ b/inbox/util/concurrency.py @@ -6,6 +6,8 @@ from backports import ssl from gevent import socket from redis import TimeoutError +import _mysql_exceptions +from sqlalchemy.exc import StatementError from inbox.models import Account from inbox.models.session import session_scope @@ -15,7 +17,20 @@ log = get_logger() BACKOFF_DELAY = 30 # seconds to wait before retrying after a failure -TRANSIENT_NETWORK_ERRS = (socket.timeout, TimeoutError, socket.error, ssl.SSLError) + +TRANSIENT_NETWORK_ERRS = ( + socket.timeout, + TimeoutError, + socket.error, + ssl.SSLError) + +TRANSIENT_MYSQL_MESSAGES = ( + "try restarting transaction", + "Too many connections", + "Lost connection to MySQL server", + "MySQL server has gone away", + "Can't connect to MySQL server", + "Max connect timeout reached") def retry(func, retry_classes=None, fail_classes=None, exc_callback=None, @@ -82,7 +97,22 @@ def retry_with_logging(func, logger=None, retry_classes=None, occurrences = [0] def callback(e): - if isinstance(e, TRANSIENT_NETWORK_ERRS): + is_transient = isinstance(e, TRANSIENT_NETWORK_ERRS) + mysql_error = None + + log = logger or get_logger() + + if isinstance(e, _mysql_exceptions.OperationalError): + mysql_error = e + elif isinstance(e, StatementError) and isinstance(e.orig, _mysql_exceptions.OperationalError): + mysql_error = e.orig + + if mysql_error: + for msg in TRANSIENT_MYSQL_MESSAGES: + if msg in mysql_error.message: + is_transient = True + + if is_transient: occurrences[0] += 1 if occurrences[0] < 20: return @@ -98,9 +128,9 @@ def callback(e): account.update_sync_error(e) db_session.commit() except: - logger.error('Error saving sync_error to account object', - account_id=account_id, - **create_error_log_context(sys.exc_info())) + log.error('Error saving sync_error to account object', + account_id=account_id, + **create_error_log_context(sys.exc_info())) log_uncaught_errors(logger, account_id=account_id, provider=provider, occurrences=occurrences[0]) diff --git a/inbox/util/fleet.py b/inbox/util/fleet.py new file mode 100644 index 000000000..e07d77232 --- /dev/null +++ b/inbox/util/fleet.py @@ -0,0 +1,61 @@ +from boto import ec2 + + +def get_sync_hosts_in_zone(zone, level, include_debug=False): + # Hack to make local dev VM work. + if zone is None: + return [{'name': 'localhost', 'ip_address': '127.0.0.1', 'num_procs': 4}] + + instances = [] + regions = ec2.regions() + for region in regions: + if not zone.startswith(region.name): + continue + try: + conn = ec2.connect_to_region(region.name) + if conn is None: + continue + + for r in conn.get_all_instances(): + for i in r.instances: + if i.placement != zone: + continue + if i.tags.get('Role') != 'sync': + continue + if i.tags.get('Level') != level: + continue + if not include_debug and i.tags.get('Debug') == 'true': + continue + instances.append(i) + except: + print "Unable to connect to region {}".format(region.name) + raise + return [{ + 'name': i.tags.get('Name'), + 'ip_address': i.private_ip_address, + 'num_procs': num_vcpus(i.instance_type) * 2, + 'debug': i.tags.get('Debug') == 'true', + } for i in instances] + + +# For whatever reason, the ec2 API doesn't provide us with an easy way to get +# the CPU count :-( +# These numbers were grabbed from https://aws.amazon.com/ec2/instance-types/ +def num_vcpus(instance_type): + return { + 't2.nano': 1, + 't2.micro': 1, + 't2.small': 1, + 't2.medium': 2, + 't2.large': 2, + 'm3.medium': 1, + 'm3.large': 2, + 'm3.xlarge': 4, + 'm3.2xlarge': 8, + 'm4.large': 2, + 'm4.xlarge': 4, + 'm4.2xlarge': 8, + 'm4.4xlarge': 16, + 'm4.10xlarge': 40, + 'm4.16xlarge': 64, + }[instance_type] diff --git a/inbox/util/stats.py b/inbox/util/stats.py index 345859a06..10e59fa3a 100644 --- a/inbox/util/stats.py +++ b/inbox/util/stats.py @@ -7,7 +7,6 @@ def get_statsd_client(): return statsd.StatsClient( str(config.get("STATSD_HOST", "localhost")), config.get("STATSD_PORT", 8125), - prefix=config.get("STATSD_PREFIX", "mailsync")) - + prefix=config.get("STATSD_PREFIX", "stats")) statsd_client = get_statsd_client() diff --git a/inbox/util/testutils.py b/inbox/util/testutils.py index 705ae9526..383e42e79 100644 --- a/inbox/util/testutils.py +++ b/inbox/util/testutils.py @@ -1,31 +1,38 @@ +# -*- coding: utf-8 -*- import contextlib import dns import json +import os import re import pytest import subprocess +import pkgutil from inbox.basicauth import ValidationError +FILENAMES = ['muir.jpg', 'LetMeSendYouEmail.wav', 'piece-jointe.jpg', + 'andra-moi-ennepe.txt', 'long-non-ascii-filename.txt'] + + def create_test_db(): """ Creates new, empty test databases. """ from inbox.config import config database_hosts = config.get_required('DATABASE_HOSTS') - schemas = [shard['SCHEMA_NAME'] for host in database_hosts for - shard in host['SHARDS']] + schemas = [(shard['SCHEMA_NAME'], host['HOSTNAME']) + for host in database_hosts for shard in host['SHARDS']] # The various test databases necessarily have "test" in their name. - assert all(['test' in s for s in schemas]) + assert all(['test' in s for s, h in schemas]) - for name in schemas: + for name, host in schemas: cmd = 'DROP DATABASE IF EXISTS {name}; ' \ 'CREATE DATABASE IF NOT EXISTS {name} ' \ 'DEFAULT CHARACTER SET utf8mb4 DEFAULT COLLATE ' \ 'utf8mb4_general_ci'.format(name=name) - subprocess.check_call('mysql -uinboxtest -pinboxtest ' - '-e "{}"'.format(cmd), shell=True) + subprocess.check_call('mysql -h {} -uinboxtest -pinboxtest ' + '-e "{}"'.format(host, cmd), shell=True) def setup_test_db(): @@ -60,9 +67,8 @@ class MockDNSResolver(object): def __init__(self): self._registry = {'mx': {}, 'ns': {}} - def _load_records(self, filename): - with open(filename, 'r') as registry_file: - self._registry = json.load(registry_file) + def _load_records(self, pkg, filename): + self._registry = json.loads(pkgutil.get_data(pkg, filename)) def query(self, domain, record_type): record_type = record_type.lower() @@ -164,10 +170,10 @@ def search(self, criteria): # Slow implementation, but whatever return [u for u, v in uid_dict.items() if headerstring in v['BODY[]'].lower()] - if criteria[0] == 'X-GM-THRID': + if criteria[0] in ['X-GM-THRID', 'X-GM-MSGID']: assert len(criteria) == 2 thrid = criteria[1] - return [u for u, v in uid_dict.items() if v['X-GM-THRID'] == thrid] + return [u for u, v in uid_dict.items() if v[criteria[0]] == thrid] raise ValueError('unsupported test criteria: {!r}'.format(criteria)) def select_folder(self, folder_name, readonly=False): @@ -198,7 +204,8 @@ def fetch(self, items, data, modifiers=None): k == 'MODSEQ'} return resp - def append(self, folder_name, mimemsg, flags, date): + def append(self, folder_name, mimemsg, flags, date, + x_gm_msgid=0, x_gm_thrid=0): uid_dict = self._data[folder_name] uidnext = max(uid_dict) if uid_dict else 1 uid_dict[uidnext] = { @@ -207,8 +214,8 @@ def append(self, folder_name, mimemsg, flags, date): 'INTERNALDATE': None, 'X-GM-LABELS': (), 'FLAGS': (), - 'X-GM-MSGID': 0, - 'X-GM-THRID': 0 + 'X-GM-MSGID': x_gm_msgid, + 'X-GM-THRID': x_gm_thrid, } def copy(self, matching_uids, folder_name): @@ -239,6 +246,12 @@ def delete_messages(self, uids, silent=False): for u in uids: del self._data[self.selected_folder][u] + def remove_flags(self, uids, flags): + pass + + def remove_gmail_labels(self, uids, labels): + pass + def expunge(self): pass @@ -283,3 +296,37 @@ def get_connection(account): ) yield client monkeypatch.undo() + + +@pytest.fixture(scope='function') +def files(db): + filenames = FILENAMES + data = [] + for filename in filenames: + path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', + 'test', 'data', filename).encode('utf-8') + data.append((filename, path)) + return data + + +@pytest.fixture(scope='function') +def uploaded_file_ids(api_client, files): + file_ids = [] + upload_path = '/files' + for filename, path in files: + # Mac and linux fight over filesystem encodings if we store this + # filename on the fs. Work around by changing the filename we upload + # instead. + if filename == 'piece-jointe.jpg': + filename = u'pièce-jointe.jpg' + elif filename == 'andra-moi-ennepe.txt': + filename = u'ἄνδρα μοι ἔννεπε' + elif filename == 'long-non-ascii-filename.txt': + filename = 100 * u'μ' + data = {'file': (open(path, 'rb'), filename)} + r = api_client.post_raw(upload_path, data=data) + assert r.status_code == 200 + file_id = json.loads(r.data)[0]['id'] + file_ids.append(file_id) + + return file_ids diff --git a/migrations/versions/229_drop_transaction_foreign_keys.py b/migrations/versions/229_drop_transaction_foreign_keys.py new file mode 100644 index 000000000..fe5e78664 --- /dev/null +++ b/migrations/versions/229_drop_transaction_foreign_keys.py @@ -0,0 +1,32 @@ +"""Remove Transaction ForeignKeys + +Revision ID: 23ff7f0b506d +Revises: 3df39f4fbdec +Create Date: 2016-09-07 19:31:02.396029 + +""" + +# revision identifiers, used by Alembic. +revision = '23ff7f0b506d' +down_revision = '3df39f4fbdec' + +from alembic import op +from sqlalchemy.sql import text + + +def upgrade(): + conn = op.get_bind() + conn.execute(text("ALTER TABLE transaction" + " DROP FOREIGN KEY transaction_ibfk_1")) + conn.execute(text("ALTER TABLE accounttransaction" + " DROP FOREIGN KEY accounttransaction_ibfk_1")) + + +def downgrade(): + conn = op.get_bind() + conn.execute(text("ALTER TABLE accounttransaction" + "ADD CONSTRAINT accounttransaction_ibfk_1 FOREIGN KEY " + "(namespace_id) REFERENCES namespace(id)")) + conn.execute(text("ALTER TABLE transaction" + "ADD CONSTRAINT transaction_ibfk_1 FOREIGN KEY " + "(namespace_id) REFERENCES namespace(id)")) diff --git a/migrations/versions/230_drop_block_foreign_keys.py b/migrations/versions/230_drop_block_foreign_keys.py new file mode 100644 index 000000000..c71df138f --- /dev/null +++ b/migrations/versions/230_drop_block_foreign_keys.py @@ -0,0 +1,37 @@ +"""Drop Block and Part ForeignKeys + +Revision ID: 4265dc58eec6 +Revises: 23ff7f0b506d +Create Date: 2016-09-20 20:39:09.078087 + +""" + +# revision identifiers, used by Alembic. +revision = '4265dc58eec6' +down_revision = '23ff7f0b506d' + +from alembic import op +from sqlalchemy.sql import text + + +def upgrade(): + conn = op.get_bind() + conn.execute(text("ALTER TABLE part" + " DROP FOREIGN KEY part_ibfk_1")) + conn.execute(text("ALTER TABLE part" + " DROP FOREIGN KEY part_ibfk_2")) + conn.execute(text("ALTER TABLE block" + " DROP FOREIGN KEY block_ibfk_1")) + + +def downgrade(): + conn = op.get_bind() + conn.execute(text("ALTER TABLE block " + "ADD CONSTRAINT block_ibfk_1 FOREIGN KEY " + "(namespace_id) REFERENCES namespace(id)")) + conn.execute(text("ALTER TABLE part " + "ADD CONSTRAINT part_ibfk_2 FOREIGN KEY " + "(message_id) REFERENCES message(id)")) + conn.execute(text("ALTER TABLE part " + "ADD CONSTRAINT part_ibfk_1 FOREIGN KEY " + "(block_id) REFERENCES block(id)")) diff --git a/migrations/versions/231_drop_contact_foreign_keys.py b/migrations/versions/231_drop_contact_foreign_keys.py new file mode 100644 index 000000000..e54ee6237 --- /dev/null +++ b/migrations/versions/231_drop_contact_foreign_keys.py @@ -0,0 +1,41 @@ +"""Drop Contact foreign keys + +Revision ID: c48fc8dea1b +Revises: 23ff7f0b506d +Create Date: 2016-09-13 17:29:27.783566 + +""" + +# revision identifiers, used by Alembic. +revision = 'c48fc8dea1b' +down_revision = '4265dc58eec6' + +from alembic import op +from sqlalchemy.sql import text + + +def upgrade(): + conn = op.get_bind() + conn.execute(text("ALTER TABLE contact" + " DROP FOREIGN KEY contact_ibfk_1")) + + conn.execute(text("ALTER TABLE phonenumber" + " DROP FOREIGN KEY phonenumber_ibfk_1")) + + conn.execute(text("ALTER TABLE messagecontactassociation" + " DROP FOREIGN KEY messagecontactassociation_ibfk_1")) + + +def downgrade(): + conn = op.get_bind() + conn.execute(text("ALTER TABLE contact" + " ADD CONSTRAINT contact_ibfk_1 FOREIGN KEY" + " (namespace_id) REFERENCES namespace(id)")) + + conn.execute(text("ALTER TABLE phonenumber" + " ADD CONSTRAINT phonenumber_ibfk_1 FOREIGN KEY" + " (contact_id) REFERENCES contact(id)")) + + conn.execute(text("ALTER TABLE messagecontactassociation" + " ADD CONSTRAINT messagecontactassociation_ibfk_1" + " FOREIGN KEY (contact_id) REFERENCES contact(id)")) diff --git a/migrations/versions/232_add_thread_deleted_at.py b/migrations/versions/232_add_thread_deleted_at.py new file mode 100644 index 000000000..ee14185d5 --- /dev/null +++ b/migrations/versions/232_add_thread_deleted_at.py @@ -0,0 +1,26 @@ +"""At Thread.deleted_at + +Revision ID: 4a44b06cd53b +Revises: c48fc8dea1b +Create Date: 2016-09-30 21:37:00.824566 + +""" + +# revision identifiers, used by Alembic. +revision = '4a44b06cd53b' +down_revision = 'c48fc8dea1b' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + op.add_column('thread', sa.Column('deleted_at', sa.DateTime(), + nullable=True)) + op.create_index('ix_thread_namespace_id_deleted_at', 'thread', + ['namespace_id', 'deleted_at'], unique=False) + + +def downgrade(): + op.drop_index('ix_thread_namespace_id_deleted_at', table_name='thread') + op.drop_column('thread', 'deleted_at') diff --git a/migrations/versions/233_revert_drop_block_foreign_keys.py b/migrations/versions/233_revert_drop_block_foreign_keys.py new file mode 100644 index 000000000..09b9145a6 --- /dev/null +++ b/migrations/versions/233_revert_drop_block_foreign_keys.py @@ -0,0 +1,37 @@ +"""Revert "Drop Block and Part ForeignKeys" + +Revision ID: 569ebe8e383d +Revises: 4a44b06cd53b +Create Date: 2016-10-10 18:26:43.036307 + +""" + +# revision identifiers, used by Alembic. +revision = '569ebe8e383d' +down_revision = '4a44b06cd53b' + +from alembic import op +from sqlalchemy.sql import text + + +def upgrade(): + conn = op.get_bind() + conn.execute(text("ALTER TABLE block " + "ADD CONSTRAINT block_ibfk_1 FOREIGN KEY " + "(namespace_id) REFERENCES namespace(id)")) + conn.execute(text("ALTER TABLE part " + "ADD CONSTRAINT part_ibfk_2 FOREIGN KEY " + "(message_id) REFERENCES message(id)")) + conn.execute(text("ALTER TABLE part " + "ADD CONSTRAINT part_ibfk_1 FOREIGN KEY " + "(block_id) REFERENCES block(id)")) + + +def downgrade(): + conn = op.get_bind() + conn.execute(text("ALTER TABLE part" + " DROP FOREIGN KEY part_ibfk_1")) + conn.execute(text("ALTER TABLE part" + " DROP FOREIGN KEY part_ibfk_2")) + conn.execute(text("ALTER TABLE block" + " DROP FOREIGN KEY block_ibfk_1")) diff --git a/migrations/versions/234_change_contact_uid_collation.py b/migrations/versions/234_change_contact_uid_collation.py new file mode 100644 index 000000000..6122c98ef --- /dev/null +++ b/migrations/versions/234_change_contact_uid_collation.py @@ -0,0 +1,24 @@ +"""Make Contact.uid collation case sensitive + +Revision ID: 53e6a7446c45 +Revises: 569ebe8e383d +Create Date: 2016-10-07 22:51:31.495243 + +""" + +# revision identifiers, used by Alembic. +revision = '53e6a7446c45' +down_revision = '569ebe8e383d' + +from alembic import op +from sqlalchemy.sql import text + + +def upgrade(): + conn = op.get_bind() + conn.execute(text("ALTER TABLE contact MODIFY uid varchar(64) NOT NULL COLLATE utf8mb4_bin")) + + +def downgrade(): + conn = op.get_bind() + conn.execute(text("ALTER TABLE contact MODIFY uid varchar(64) NOT NULL COLLATE utf8mb4_general_ci")) diff --git a/migrations/versions/235_change_imapfolderinfo_column.py b/migrations/versions/235_change_imapfolderinfo_column.py new file mode 100644 index 000000000..bf62079bb --- /dev/null +++ b/migrations/versions/235_change_imapfolderinfo_column.py @@ -0,0 +1,28 @@ +"""empty Change imapfolderinfo uidnext to bigint from int + +Revision ID: 34815f9e639c +Revises: 53e6a7446c45 +Create Date: 2016-10-14 23:13:19.620120 + +""" + +# revision identifiers, used by Alembic. +revision = '34815f9e639c' +down_revision = '53e6a7446c45' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + op.alter_column('imapfolderinfo', 'uidnext', + type_=sa.BigInteger, existing_type=sa.Integer, + existing_server_default=sa.sql.expression.null(), + existing_nullable=True) + + +def downgrade(): + op.alter_column('imapfolderinfo', 'uidnext', + type_=sa.Integer, existing_type=sa.BigInteger, + existing_server_default=sa.sql.expression.null(), + existing_nullable=True) diff --git a/migrations/versions/236_add_desired_sync_host.py b/migrations/versions/236_add_desired_sync_host.py new file mode 100644 index 000000000..26806003c --- /dev/null +++ b/migrations/versions/236_add_desired_sync_host.py @@ -0,0 +1,24 @@ +"""Add Account.desired_sync_host column + +Revision ID: 3eb4f30c8ed3 +Revises: 34815f9e639c +Create Date: 2016-10-19 23:35:58.866180 + +""" + +# revision identifiers, used by Alembic. +revision = '3eb4f30c8ed3' +down_revision = '34815f9e639c' + +from alembic import op +from sqlalchemy.sql import text + + +def upgrade(): + conn = op.get_bind() + conn.execute(text("ALTER TABLE account ADD COLUMN desired_sync_host varchar(255)")) + + +def downgrade(): + conn = op.get_bind() + conn.execute(text("ALTER TABLE account DROP COLUMN desired_sync_host")) diff --git a/requirements.txt b/requirements.txt index cf6147a41..4daa76e9d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ -nylas-production-python>=0.3.4 -coverage==3.7.1 +nylas-production-python>=0.3.9 +coverage==4.2 httplib2==0.8 pytest==2.8.3 pytest-xdist==1.13.1 @@ -14,6 +14,7 @@ freezegun==0.3.7 gevent==1.1.2 gevent-socketio==0.3.5-rc2 gunicorn==19.4.5 +munkres==1.0.8 mysqlclient==1.3.7 requests==2.11.0 urllib3[secure]>=1.16 @@ -26,6 +27,7 @@ alembic==0.6.5 iconv==1.0 pep8==1.4.6 flake8==2.1.0 +flake8-pep3101==0.6 pyflakes==0.7.3 pylint==1.5.1 html2text==2014.9.8 @@ -60,5 +62,7 @@ psutil==3.3.0 pyopenssl>=0.15.1 gevent_openssl==1.2 backports.ssl==0.0.9 -git+https://github.com/nylas/imapclient.git@3b0cffc2a4964aceb0b7f41d40855c6d285e88ef#egg=imapclient +git+https://github.com/mjs/imapclient.git@77047bafd9a82f3bb1faa5e909d776b7a2e1b432#egg=imapclient tldextract==1.7.5 +tox==2.3.1 +nylas==1.2.3 diff --git a/setup.py b/setup.py index 6bfa517df..836114daa 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ setup( name="inbox-sync", - version="0.4", + version="17.1.6", # Release Jan 6, 2017 packages=find_packages(), install_requires=[], @@ -18,7 +18,8 @@ # And include any *.msg files found in the 'hello' package, too: # 'hello': ['*.msg'], }, - data_files=[("alembic-inbox-sync", ["alembic.ini"]), + data_files=[("sync-engine-test-config", glob.glob("etc/*test*")), + ("alembic-inbox-sync", ["alembic.ini"]), ("alembic-inbox-sync/migrations", filter(os.path.isfile, glob.glob("migrations/*"))), ("alembic-inbox-sync/migrations/versions", @@ -42,7 +43,6 @@ 'bin/contact-search-service', 'bin/contact-search-backfill', 'bin/contact-search-delete-index', - 'bin/populate-sync-queue', 'bin/delete-marked-accounts', 'bin/backfix-generic-imap-separators.py', 'bin/backfix-duplicate-categories.py', @@ -52,7 +52,13 @@ 'bin/purge-transaction-log', 'bin/mysql-prompt', 'bin/unschedule-account-syncs', - 'bin/syncback-stats' + 'bin/syncback-stats', + 'bin/set-desired-host', + 'bin/get-accounts-for-host', + 'bin/deferred-migration-service', + 'bin/balance-fleet', + 'bin/get-account-loads', + 'bin/restart-forgotten-accounts', ], # See: @@ -60,7 +66,7 @@ # https://pythonhosted.org/setuptools/pkg_resources.html#entry-points zip_safe=False, author="Nylas Team", - author_email="team@nylas.com", + author_email="support@nylas.com", description="The Nylas Sync Engine", license="AGPLv3", keywords="nylas", diff --git a/tox.ini b/tox.ini index e4735db8d..2636755ed 100644 --- a/tox.ini +++ b/tox.ini @@ -14,8 +14,7 @@ deps = requests setenv = NYLAS_ENV=test -#commands = py.test --timeout=30 [] -commands = ./runtests [] +commands = py.test inbox/test/ sitepackages = True usedevelop = True