diff --git a/.buildkite/docker-compose.yml b/.buildkite/docker-compose.yml index a669b468..b51c0245 100644 --- a/.buildkite/docker-compose.yml +++ b/.buildkite/docker-compose.yml @@ -1,10 +1,6 @@ -version: "2" services: mltshp: - image: mltshp/mltshp-web:latest - build: - context: .. - dockerfile: Dockerfile + build: .. volumes: - ./settings.py:/srv/mltshp.com/mltshp/settings.py - ../.git:/srv/mltshp.com/mltshp/.git diff --git a/.buildkite/settings.py b/.buildkite/settings.py index 6018da33..e758c0df 100644 --- a/.buildkite/settings.py +++ b/.buildkite/settings.py @@ -28,6 +28,7 @@ "use_workers": False, "debug_workers": True, "superuser_list": "admin", + "tornado_logging": False, # these must be set for testing test/unit/externalservice_tests.py # "twitter_consumer_key" : "twitter_consumer_key_here", # "twitter_consumer_secret" : "twitter_consumer_secret_key_here", diff --git a/.buildkite/steps/build-web.sh b/.buildkite/steps/build-web.sh index 6239a5df..90b77666 100755 --- a/.buildkite/steps/build-web.sh +++ b/.buildkite/steps/build-web.sh @@ -1,9 +1,15 @@ #!/bin/bash +# exit if any command fails (e); strict variable substitution (u); +# set exit code to non-zero for any failed piped commands (o pipefail) +# See also: http://redsymbol.net/articles/unofficial-bash-strict-mode/ set -euo pipefail -# pull prior image to populate layer cache -docker pull mltshp/mltshp-web:latest +echo "--- Pulling base Docker image" + docker pull mltshp/mltshp-web:latest -docker build -t mltshp/mltshp-web:build-${BUILDKITE_BUILD_NUMBER} . -docker push mltshp/mltshp-web:build-${BUILDKITE_BUILD_NUMBER} \ No newline at end of file +echo "+++ Building Docker image for web node" + docker build -t mltshp/mltshp-web:build-${BUILDKITE_BUILD_NUMBER} . + +echo "--- Pushing build Docker image to Docker Hub" + docker push mltshp/mltshp-web:build-${BUILDKITE_BUILD_NUMBER} diff --git a/.buildkite/steps/build-worker.sh b/.buildkite/steps/build-worker.sh index 8e477aca..6d987f47 100755 --- a/.buildkite/steps/build-worker.sh +++ b/.buildkite/steps/build-worker.sh @@ -1,9 +1,15 @@ #!/bin/bash +# exit if any command fails (e); strict variable substitution (u); +# set exit code to non-zero for any failed piped commands (o pipefail) +# See also: http://redsymbol.net/articles/unofficial-bash-strict-mode/ set -euo pipefail -# pull prior image to populate layer cache -docker pull mltshp/mltshp-worker:latest +echo "--- Pulling base Docker image" + docker pull mltshp/mltshp-worker:latest -docker build -t mltshp/mltshp-worker:build-${BUILDKITE_BUILD_NUMBER} -f Dockerfile.worker . -docker push mltshp/mltshp-worker:build-${BUILDKITE_BUILD_NUMBER} \ No newline at end of file +echo "+++ Building Docker image for worker node" + docker build -t mltshp/mltshp-worker:build-${BUILDKITE_BUILD_NUMBER} -f Dockerfile.worker . + +echo "--- Pushing build Docker image to Docker Hub" + docker push mltshp/mltshp-worker:build-${BUILDKITE_BUILD_NUMBER} \ No newline at end of file diff --git a/.buildkite/steps/deploy.sh b/.buildkite/steps/deploy.sh index cb556640..9a573846 100755 --- a/.buildkite/steps/deploy.sh +++ b/.buildkite/steps/deploy.sh @@ -1,6 +1,9 @@ #!/bin/bash -set -eo pipefail +# exit if any command fails (e); strict variable substitution (u); +# set exit code to non-zero for any failed piped commands (o pipefail) +# See also: http://redsymbol.net/articles/unofficial-bash-strict-mode/ +set -euo pipefail function slackpost { # Usage: slackpost @@ -18,13 +21,19 @@ function slackpost { fi } -# Grab CI images -docker pull mltshp/mltshp-web:build-${BUILDKITE_BUILD_NUMBER} -docker tag mltshp/mltshp-web:build-${BUILDKITE_BUILD_NUMBER} mltshp/mltshp-web:latest -docker push mltshp/mltshp-web:latest +echo "--- Pulling Docker image for web node build ${BUILDKITE_BUILD_NUMBER}" + docker pull mltshp/mltshp-web:build-${BUILDKITE_BUILD_NUMBER} -docker pull mltshp/mltshp-worker:build-${BUILDKITE_BUILD_NUMBER} -docker tag mltshp/mltshp-worker:build-${BUILDKITE_BUILD_NUMBER} mltshp/mltshp-worker:latest -docker push mltshp/mltshp-worker:latest +echo "--- Tagging Docker image as latest and pushing to Docker Hub" + docker tag mltshp/mltshp-web:build-${BUILDKITE_BUILD_NUMBER} mltshp/mltshp-web:latest + docker push mltshp/mltshp-web:latest -slackpost "#operations" "Build ${BUILDKITE_BUILD_NUMBER} has been pushed to Docker cloud by ${BUILDKITE_UNBLOCKER}: ${BUILDKITE_BUILD_URL}" +echo "--- Pulling Docker image for worker node build ${BUILDKITE_BUILD_NUMBER}" + docker pull mltshp/mltshp-worker:build-${BUILDKITE_BUILD_NUMBER} + +echo "--- Tagging Docker image as latest and pushing to Docker Hub" + docker tag mltshp/mltshp-worker:build-${BUILDKITE_BUILD_NUMBER} mltshp/mltshp-worker:latest + docker push mltshp/mltshp-worker:latest + +echo "--- Posting Slack alert!" + slackpost "#operations" "Build ${BUILDKITE_BUILD_NUMBER} has been pushed to Docker cloud by ${BUILDKITE_UNBLOCKER}: ${BUILDKITE_BUILD_URL}" diff --git a/.buildkite/steps/test.sh b/.buildkite/steps/test.sh index b6df82ce..5750f4a9 100755 --- a/.buildkite/steps/test.sh +++ b/.buildkite/steps/test.sh @@ -1,5 +1,8 @@ #!/bin/bash +# exit if any command fails (e); strict variable substitution (u); +# set exit code to non-zero for any failed piped commands (o pipefail) +# See also: http://redsymbol.net/articles/unofficial-bash-strict-mode/ set -euo pipefail wait_for() { @@ -7,24 +10,24 @@ wait_for() { while ! nc -z $1 $2; do echo sleeping; sleep 2; done } -docker pull mltshp/mltshp-web:build-${BUILDKITE_BUILD_NUMBER} -docker tag mltshp/mltshp-web:build-${BUILDKITE_BUILD_NUMBER} mltshp/mltshp-web:latest +echo "--- Pulling base Docker image" + docker pull mltshp/mltshp-web:build-${BUILDKITE_BUILD_NUMBER} + docker tag mltshp/mltshp-web:build-${BUILDKITE_BUILD_NUMBER} mltshp/mltshp-web:latest -# launch fakes3/mysql/web app -docker-compose -f .buildkite/docker-compose.yml up -d +echo "--- Launching Docker containers" + docker compose -f .buildkite/docker-compose.yml up -d --build -# let's wait and allow mysql/fakes3 to spin up -#wait_for localhost 3306 -#wait_for localhost 8000 -sleep 10 +echo "~~~ Waiting for containers to start" + #wait_for localhost 3306 + #wait_for localhost 8000 + sleep 10 -# run our tests against it -docker exec -t buildkite_mltshp_1 ./run-tests.sh +echo "+++ Running unit tests" + docker exec -t buildkite_mltshp_1 ./run-tests.sh -# submit coverage data -docker exec -t -e BUILDKITE -e BUILDKITE_JOB_ID -e BUILDKITE_BRANCH -e COVERALLS_REPO_TOKEN buildkite_mltshp_1 ./coveralls-report.sh +echo "--- Submitting coverage data" + docker exec -t -e BUILDKITE -e BUILDKITE_JOB_ID -e BUILDKITE_BRANCH -e COVERALLS_REPO_TOKEN buildkite_mltshp_1 ./coveralls-report.sh -# tear down containers -docker-compose -f .buildkite/docker-compose.yml down - -docker container prune -f +echo "~~~ Stopping containers; cleanup" + docker compose -f .buildkite/docker-compose.yml down + docker container prune -f diff --git a/.gitignore b/.gitignore index 1934add2..5b8b75f1 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ pip-log.txt .DS_Store *.swp env +env3 mounts .env .deploy.env diff --git a/Dockerfile b/Dockerfile index 11c4924b..c6b1a5b6 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:16.04 +FROM ubuntu:22.04 LABEL maintainer "brad@bradchoate.com" ENV PYTHONUNBUFFERED 1 @@ -7,9 +7,9 @@ ENV PYTHONUNBUFFERED 1 # whole layer and steps to build it should be cached. RUN apt-get -y update && apt-get install -y \ supervisor \ + python3-dev \ libmysqlclient-dev \ mysql-client \ - python-dev \ libjpeg-dev \ libcurl4-openssl-dev \ curl \ @@ -20,20 +20,14 @@ RUN apt-get -y update && apt-get install -y \ libpcre3-dev \ libssl-dev \ libffi-dev \ - python-pip && \ + python3-pip && \ rm -rf /var/lib/apt/lists/* && \ - \ - pip install -U 'pip==20.3.4' 'setuptools==44.0.0' distribute && \ - # fixes a weird issue where distribute complains about setuptools "0.7" - # (incorrectly matching version "20.7.0" which ubuntu 16.04 has preinstalled) - rm -rf /usr/lib/python2.7/dist-packages/setuptools-20.7.0.egg-info && \ - \ # install nginx + upload module mkdir -p /tmp/install && \ cd /tmp/install && \ - wget http://nginx.org/download/nginx-0.8.55.tar.gz && tar zxf nginx-0.8.55.tar.gz && \ - wget https://github.com/fdintino/nginx-upload-module/archive/2.2.0.tar.gz && tar zxf 2.2.0.tar.gz && \ - cd /tmp/install/nginx-0.8.55 && \ + wget http://nginx.org/download/nginx-1.25.3.tar.gz && tar zxf nginx-1.25.3.tar.gz && \ + wget https://github.com/fdintino/nginx-upload-module/archive/2.3.0.tar.gz && tar zxf 2.3.0.tar.gz && \ + cd /tmp/install/nginx-1.25.3 && \ ./configure \ --with-http_ssl_module \ --with-http_stub_status_module \ @@ -43,7 +37,7 @@ RUN apt-get -y update && apt-get install -y \ --conf-path=/etc/nginx/nginx.conf \ --error-log-path=/srv/mltshp.com/nginx-error.log \ --http-log-path=/srv/mltshp.com/nginx-access.log \ - --add-module=/tmp/install/nginx-upload-module-2.2.0 && \ + --add-module=/tmp/install/nginx-upload-module-2.3.0 && \ make && make install && \ mkdir -p /etc/nginx && \ rm -rf /tmp/install && \ diff --git a/Dockerfile.worker b/Dockerfile.worker index 3b5d9678..6c084933 100644 --- a/Dockerfile.worker +++ b/Dockerfile.worker @@ -1,4 +1,4 @@ -FROM ubuntu:16.04 +FROM ubuntu:22.04 LABEL maintainer "brad@bradchoate.com" ENV PYTHONUNBUFFERED 1 @@ -10,7 +10,7 @@ RUN apt-get -y update && \ cron \ libmysqlclient-dev \ mysql-client \ - python-dev \ + python3-dev \ libjpeg-dev \ libcurl4-openssl-dev \ curl \ @@ -22,14 +22,8 @@ RUN apt-get -y update && \ libpcre3-dev \ libssl-dev \ libffi-dev \ - python-pip && \ + python3-pip && \ rm -rf /var/lib/apt/lists/* && \ - pip install -U 'pip==20.3.4' 'setuptools==44.0.0' distribute && \ - # Fix for a really weird issue when installing postmark library - # distribute fails to run since it sees a setuptools with "0.7" - # in the name, even though ubuntu:16.04 has pre-installed "20.7.0" - # https://github.com/pypa/setuptools/issues/543 - rm -rf /usr/lib/python2.7/dist-packages/setuptools-20.7.0.egg-info && \ groupadd ubuntu --gid=1010 && \ useradd ubuntu --create-home --home-dir=/home/ubuntu \ --uid=1010 --gid=1010 && \ diff --git a/Makefile b/Makefile index f6a5d613..ae8887e5 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -.PHONY: init-dev run shell test destroy migrate mysql +.PHONY: init-dev run stop custom-build build shell test destroy migrate mysql init-dev: cp settings.example.py settings.py @@ -6,26 +6,29 @@ init-dev: mkdir -p mounts/mysql mounts/logs mounts/fakes3 mounts/uploaded run: - docker-compose --env-file .env up -d + docker compose up -d stop: - docker-compose --env-file .env down + docker compose down + +custom-build: + @read -p "build tag (default is 'latest'): " build_tag; \ + docker build -t mltshp/mltshp-web:$${build_tag:-latest} build: docker build -t mltshp/mltshp-web:latest . shell: - docker-compose --env-file .env exec mltshp bash + docker compose exec mltshp bash test: - docker-compose --env-file .env exec mltshp su ubuntu -c "cd /srv/mltshp.com/mltshp; python test.py $(TEST)" + docker compose exec mltshp su ubuntu -c "cd /srv/mltshp.com/mltshp; python3 test.py $(TEST)" destroy: - docker-compose down - rm -rf mounts + docker compose down && rm -rf mounts migrate: - docker-compose --env-file .env exec mltshp su ubuntu -c "cd /srv/mltshp.com/mltshp; python migrate.py" + docker compose exec mltshp su ubuntu -c "cd /srv/mltshp.com/mltshp; python3 migrate.py" mysql: - docker-compose --env-file .env exec mltshp su ubuntu -c "cd /srv/mltshp.com/mltshp; mysql -u root --host mysql mltshp" + docker compose exec mltshp su ubuntu -c "cd /srv/mltshp.com/mltshp; mysql -u root --host mysql mltshp" diff --git a/README.md b/README.md index ab6e1118..83545014 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,12 @@ [![Build status](https://badge.buildkite.com/a86854c6272f21c9b46b8b6aafd3a4fb99bcfabe6e611bc370.svg)](https://buildkite.com/mltshp-inc/mltshp-web-service) [![Coverage Status](https://coveralls.io/repos/github/MLTSHP/mltshp/badge.svg?branch=master)](https://coveralls.io/github/MLTSHP/mltshp?branch=master) +## Project Description + +This project is the codebase for running [mltshp.com](https://mltshp.com). +It's a Python 3 application, utilizing a MySQL database, Amazon S3 for +asset storage, and RabbitMQ for background jobs. + ## Development Environment MLTSHP is a Dockerized application. This greatly simplifies running the @@ -20,6 +26,11 @@ should be okay): $ make init-dev +Check the process limits for your computer using the `ulimit -a` command. Increase +the file descriptor limit if it's not at least 1000, using `ulimit -n 1000` (or +some suitably higher value). You can add this to your shell startup script to +make it permanent. + You should be able to start the app itself using: $ make run @@ -59,7 +70,7 @@ When you run the application, it launches it into a background process. But if you want to watch the realtime logs emitted by each service, just use this command: - $ docker-compose logs -f + $ docker compose logs -f In addition to that, the web app produces some log files that are captured under the "mounts/logs" folder of your git repository. @@ -114,8 +125,7 @@ if you have a MySQL database you use locally for development and testing and keep it versus using the `destroy` and `init-dev` commands to make a new one. To update your database, just do this: - $ make shell - docker-shell$ cd /srv/mltshp.com/mltshp; python migrate.py + $ make migrate That should do it. @@ -138,6 +148,11 @@ Then, just run: Which will invoke a Docker process to run the unit test suite. +You can also run a specific unit test by setting a TEST environment +variable (you can find the unit test names in `test.py`): + + $ TEST=test.unit.shake_tests make test + ## Connecting to the MLTSHP shell If you ever need to access the Docker image running the application, @@ -150,6 +165,14 @@ This should place you in the /srv/mltshp.com/mltshp directory as the root user. You can use `apt-get` commands to install utilities you may need. +## Connecting to the MLTSHP MySQL database + +You can also access the MySQL shell using: + + $ make mysql + +Useful for inspecting the database for your local directly. + ## Cleanup If you ever want to _wipe your local data_ and rebuild your Docker @@ -160,7 +183,7 @@ containers, just use this command: If you just wish to rebuild the Docker container, use the Docker compose command: - $ docker-compose down + $ make stop Then, run another `make run`. @@ -174,6 +197,6 @@ update from the pattern library. ## About -MLTSHP is open-source software, ©2017 the MLTSHP team and released to the public under the terms of the Mozilla Public License. A copy of the MPL can be found in the LICENSE file. +MLTSHP is open-source software, ©2023 the MLTSHP team and released to the public under the terms of the Mozilla Public License. A copy of the MPL can be found in the LICENSE file. [![Fastly logo](/static/images/fastly-logo.png)](https://www.fastly.com) MLTSHP is proudly powered by Fastly. diff --git a/developerdocs/conf.py b/developerdocs/conf.py index cb5fd0f2..784218b1 100644 --- a/developerdocs/conf.py +++ b/developerdocs/conf.py @@ -40,8 +40,8 @@ master_doc = 'docindex' # General information about the project. -project = u'mltshp' -copyright = u'2017 MLTSHP, Inc.' +project = 'mltshp' +copyright = '2023 MLTSHP, Inc.' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -180,8 +180,8 @@ # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('docindex', 'mltshp-api.tex', u'mltshp-api Documentation', - u'MLTSHP, LLC', 'manual'), + ('docindex', 'mltshp-api.tex', 'mltshp-api Documentation', + 'MLTSHP, LLC', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of @@ -213,6 +213,6 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ('docindex', 'mltshp-api', u'mltshp-api Documentation', - [u'MLTSHP, LLC'], 1) + ('docindex', 'mltshp-api', 'mltshp-api Documentation', + ['MLTSHP, LLC'], 1) ] diff --git a/docker-compose.yml b/docker-compose.yml index ef51fdce..bdf409bf 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,8 @@ -version: "2" services: mltshp: + build: . + env_file: + - .env image: mltshp/mltshp-web volumes: - .:/srv/mltshp.com/mltshp @@ -21,8 +23,12 @@ services: aliases: - mltshp.localhost fakes3: - image: ourtownrentals/fake-s3 - entrypoint: fakes3 -r /srv --license ${FAKES3_LICENSE_KEY} -p 8000 + build: ./setup/dev/fakes3 + env_file: + - .env + entrypoint: fakes3 --root /srv --license ${FAKES3_LICENSE_KEY} --port 4567 + ports: + - "4567:4567" volumes: - ./mounts/fakes3:/srv networks: diff --git a/handlers/__init__.py b/handlers/__init__.py index 0f5f0efc..43e743c5 100644 --- a/handlers/__init__.py +++ b/handlers/__init__.py @@ -1,20 +1,20 @@ #handlers -import base -import account -import admin -import image -import home -import misc -import tools -import upload -import incoming -import friends -import api -import developers -import conversations -import error -import popular -import shake -import tag -import stripe_hooks -import search +from . import base +from . import account +from . import admin +from . import image +from . import home +from . import misc +from . import tools +from . import upload +from . import incoming +from . import friends +from . import api +from . import developers +from . import conversations +from . import error +from . import popular +from . import shake +from . import tag +from . import stripe_hooks +from . import search diff --git a/handlers/account.py b/handlers/account.py index c8d0694c..aa99cfa4 100644 --- a/handlers/account.py +++ b/handlers/account.py @@ -1,7 +1,7 @@ import datetime import re import json -from urllib import urlencode +from urllib.parse import urlencode import logging import tornado.httpclient @@ -12,7 +12,7 @@ import postmark import requests -from base import BaseHandler, require_membership +from .base import BaseHandler, require_membership from models import User, Invitation, Shake, Notification, Conversation, Invitation,\ App, PaymentLog, Voucher, Promotion, MigrationState from lib.utilities import email_re, base36decode, is_valid_voucher_key,\ @@ -350,9 +350,9 @@ def post(self, app_id=None): user = self.get_current_user_object() app = App.get("id = %s", app_id) if not app: - return {'error' : 'Invalid request.'} + return self.write({'error' : 'Invalid request.'}) app.disconnect_for_user(user) - return {'result' : 'ok'} + return self.write({'result' : 'ok'}) class ForgotPasswordHandler(BaseHandler): @@ -845,7 +845,7 @@ def get(self, page=None): """ current_user_obj = self.get_current_user_object() - if self.get_arguments('agree', None): + if self.get_argument('agree', None): current_user_obj.tou_agreed = True current_user_obj.save() return self.redirect("/") @@ -931,7 +931,7 @@ def post(self): if plan_id == "mltshp-double": quantity = int(float(self.get_argument("quantity"))) if quantity < 24 or quantity > 500: - raise "Invalid request" + raise Exception("Invalid request") customer = None sub = None @@ -948,7 +948,7 @@ def post(self): if customer is None: if token_id is None: # FIXME: handle this more gracefully... - raise "Invalid request" + raise Exception("Invalid request") # create a new customer object for this subscription customer = stripe.Customer.create( diff --git a/handlers/admin.py b/handlers/admin.py index e45661a5..959e4d13 100644 --- a/handlers/admin.py +++ b/handlers/admin.py @@ -4,7 +4,7 @@ import tornado.web import postmark -from base import BaseHandler +from .base import BaseHandler from models import Sharedfile, User, Shake, Shakesharedfile, Invitation, Waitlist, ShakeCategory from lib.utilities import send_slack_notification diff --git a/handlers/api.py b/handlers/api.py index 6f5a97b8..32ff996d 100644 --- a/handlers/api.py +++ b/handlers/api.py @@ -1,16 +1,16 @@ from datetime import datetime import time -from urllib import urlencode -from urlparse import urlparse, urlunparse, urljoin +from urllib.parse import urlencode +from urllib.parse import urlparse, urlunparse from hashlib import sha1 import hmac import base64 import functools import tornado.web -from tornado.options import define, options +from tornado.options import options -from base import BaseHandler +from .base import BaseHandler from lib.utilities import normalize_string, base36decode from models import Accesstoken, Apihit, Apilog, App, Authorizationcode, \ Favorite, Magicfile, Sharedfile, User, Shake, Comment @@ -102,8 +102,8 @@ def wrapper(self, *args, **kwargs): parsed_url.path, query_array) - digest = hmac.new(access_token.consumer_secret.encode('ascii'), normalized_string, sha1).digest() - signature = base64.encodestring(digest).strip() + digest = hmac.new(access_token.consumer_secret.encode('ascii'), normalized_string.encode('ascii'), sha1).digest() + signature = base64.encodebytes(digest).strip().decode('ascii') if signature == auth_items['signature']: self.oauth2_user_id = access_token.user_id diff --git a/handlers/base.py b/handlers/base.py index c500e62e..b87cc74d 100644 --- a/handlers/base.py +++ b/handlers/base.py @@ -5,7 +5,6 @@ from tornado.options import define, options from lib.flyingcow.cache import RequestHandlerQueryCache -from lib.s3 import S3Connection import models SESSION_COOKIE = "sid" @@ -60,9 +59,6 @@ def finish(self, chunk=None): self.set_header('x-proc-time',"%s" % (proc_time)) super(BaseHandler, self).finish(chunk) - def get_s3_connection(self): - return S3Connection() - def get_current_user(self): sid = self.get_secure_cookie(SESSION_COOKIE) if sid: @@ -103,7 +99,7 @@ def add_error(self, key, message): self._errors[key] = message def add_errors(self, errors_dict): - for error_key in errors_dict.keys(): + for error_key in list(errors_dict.keys()): self.add_error(error_key, errors_dict[error_key]) def log_user_in(self, user): diff --git a/handlers/conversations.py b/handlers/conversations.py index f5f5abde..ed56d648 100644 --- a/handlers/conversations.py +++ b/handlers/conversations.py @@ -1,6 +1,6 @@ import tornado.web -from base import BaseHandler, require_membership +from .base import BaseHandler, require_membership from models import Conversation, Notification, Comment, Sharedfile class IndexHandler(BaseHandler): diff --git a/handlers/developers.py b/handlers/developers.py index b1e02944..88cc5875 100644 --- a/handlers/developers.py +++ b/handlers/developers.py @@ -1,6 +1,6 @@ import tornado.web -from base import BaseHandler +from .base import BaseHandler from models import App diff --git a/handlers/error.py b/handlers/error.py index a98f2eee..29d22a38 100644 --- a/handlers/error.py +++ b/handlers/error.py @@ -1,5 +1,5 @@ import tornado -import base +from . import base class NotFoundHandler(base.BaseHandler): def check_xsrf_cookie(self): diff --git a/handlers/friends.py b/handlers/friends.py index d7d6d8b7..dc128718 100644 --- a/handlers/friends.py +++ b/handlers/friends.py @@ -1,5 +1,5 @@ import tornado.web -from base import BaseHandler, require_membership +from .base import BaseHandler, require_membership class FriendHandler(BaseHandler): diff --git a/handlers/home.py b/handlers/home.py index 5006f56c..ae7907c4 100644 --- a/handlers/home.py +++ b/handlers/home.py @@ -1,6 +1,4 @@ -import hashlib - -from base import BaseHandler +from .base import BaseHandler import models import lib.utilities @@ -35,7 +33,7 @@ def get(self, before_or_after=None, base36_id=None, returning=None): # We're going to older, so ony use before_id. if before_id: notifications_count = models.Notification.for_user_count(current_user_obj) - sharedfiles = current_user_obj.sharedfiles_from_subscriptions(before_id=before_id,per_page=11) + sharedfiles = current_user_obj.sharedfiles_from_subscriptions(before_id=before_id, per_page=11) # we have nothing on this page, redirect to home page with out params. if len(sharedfiles) == 0: return self.redirect('/') diff --git a/handlers/image.py b/handlers/image.py index 2ccf9165..fe9d5d6c 100644 --- a/handlers/image.py +++ b/handlers/image.py @@ -1,6 +1,6 @@ import tempfile import re -from urlparse import urlparse +from urllib.parse import urlparse import time from datetime import datetime, timedelta import tornado.web @@ -8,7 +8,7 @@ from tornado.escape import json_encode from tornado.options import options -from base import BaseHandler, require_membership +from .base import BaseHandler, require_membership from models import Favorite, User, Sharedfile, Sourcefile, Comment, Shake, Externalservice import models from lib.utilities import s3_authenticated_url, uses_a_banned_phrase @@ -35,7 +35,7 @@ def post(self, share_key): if not current_user: raise tornado.web.HTTPError(403) - json = self.get_arguments('json', False) + json = self.get_argument('json', False) if not sharedfile.can_save(current_user): if json: return self.write({'error' : "Can't save that file."}) @@ -148,7 +148,7 @@ class ShowLikesHandler(BaseHandler): def get(self, share_key): sharedfile = Sharedfile.get_by_share_key(share_key) if not sharedfile: - return {'error': 'Invalid file key.'} + return self.write({'error': 'Invalid file key.'}) response_data = [] for sharedfile in sharedfile.favorites(): @@ -169,7 +169,7 @@ class ShowSavesHandler(BaseHandler): def get(self, share_key): sharedfile = Sharedfile.get_by_share_key(share_key) if not sharedfile: - return {'error': 'Invalid file key.'} + return self.write({'error': 'Invalid file key.'}) response_data = [] for sharedfile in sharedfile.saves(): @@ -353,7 +353,7 @@ def post(self, share_key): raise tornado.web.HTTPError(404) if current_user.id != sharedfile.user_id: raise tornado.web.HTTPError(403) - shakes = self.get_arguments('shakes', []) + shakes = self.get_arguments('shakes') for shake_id in shakes: shake = Shake.get("id = %s", shake_id) if shake.can_update(current_user.id): diff --git a/handlers/incoming.py b/handlers/incoming.py index 58545c1c..7269fafa 100644 --- a/handlers/incoming.py +++ b/handlers/incoming.py @@ -1,7 +1,7 @@ from lib.utilities import base36decode import tornado.web -from base import BaseHandler, require_membership +from .base import BaseHandler, require_membership from models import User, Sharedfile, notification diff --git a/handlers/misc.py b/handlers/misc.py index e483c274..dc617038 100644 --- a/handlers/misc.py +++ b/handlers/misc.py @@ -1,4 +1,4 @@ -from base import BaseHandler +from .base import BaseHandler import postmark diff --git a/handlers/popular.py b/handlers/popular.py index 2f844d8f..bfe9d86a 100644 --- a/handlers/popular.py +++ b/handlers/popular.py @@ -3,7 +3,7 @@ import tornado.web from tornado.options import options -from base import BaseHandler, require_membership +from .base import BaseHandler, require_membership from models import sharedfile, notification, user diff --git a/handlers/search.py b/handlers/search.py index 331540fe..a7906741 100644 --- a/handlers/search.py +++ b/handlers/search.py @@ -3,7 +3,7 @@ import tornado.web from tornado import escape from tornado.options import options -from base import BaseHandler, require_membership +from .base import BaseHandler, require_membership import lib.utilities from models import sharedfile, user diff --git a/handlers/shake.py b/handlers/shake.py index 19606d4e..47e59838 100644 --- a/handlers/shake.py +++ b/handlers/shake.py @@ -4,7 +4,7 @@ import torndb from tornado.options import options -from base import BaseHandler, require_membership +from .base import BaseHandler, require_membership from models import Shake, User, Notification, ShakeManager, MigrationState from lib.utilities import base36decode @@ -101,6 +101,10 @@ def get(self, shake_name=None, before_or_after=None, base36_id=None): older_link=older_link,newer_link=newer_link, shake_editor=shake.owner(), managers=managers, is_shake_manager=is_shake_manager, followers=followers[:10], + base36_id=base36_id, + sharedfile_id=sharedfile_id, + since_id=since_id, + max_id=max_id, follower_count=follower_count) @@ -137,7 +141,7 @@ def post(self): new_shake = Shake(name=name, title=title, description=description, user_id=user_object.id, type='group') try: if new_shake.save(): - return self.redirect('/%s' % (new_shake.name)) + return self.redirect('/%s' % new_shake.name) except torndb.IntegrityError: # This is a rare edge case, so we handle it lazily -- IK. pass @@ -187,7 +191,7 @@ def post(self, shake_name): shake_to_update.description = new_description shake_to_update.save() - return self.redirect('/shake/' + shake_to_update.name + '/quick-details') + return self.redirect('/shake/%s/quick-details' % shake_to_update.name) class UpdateShakeHandler(BaseHandler): @@ -235,7 +239,7 @@ def post(self, shake_name=None): if json: return self.write({'ok':'Saved it!'}) else: - return self.redirect("/%s" % (shake_name)) + return self.redirect("/%s" % shake_name) class SubscribeShakeHandler(BaseHandler): @@ -314,13 +318,13 @@ def post(self, shake_name): if is_json: return self.write({'error':'error'}) else: - return self.redirect('/%s' % (shake_name)) + return self.redirect('/%s' % shake_name) Notification.new_invitation(sender, receiver, shake.id) if is_json: return self.write({'invitation_status':True}) else: - return self.redirect('/%s' % (shake_name)) + return self.redirect('/%s' % shake_name) class AcceptInvitationHandler(BaseHandler): @@ -369,7 +373,7 @@ def post(self, shake_name): return self.write({'response' : 'ok', 'count' : remaining_notifications_count}) else: - return self.redirect("/%s", shake_object.name) + return self.redirect("/%s" % shake_object.name) class RequestInvitationHandler(BaseHandler): @@ -390,7 +394,7 @@ def post(self, shake_name=None): if self.get_argument('json', None): return self.write({'status':'ok'}) else: - return self.redirect('/%s' % (shake.name)) + return self.redirect('/%s' % shake.name) else: if self.get_argument('json', None): return self.write({'status':'error', 'message':'not allowed'}) @@ -416,7 +420,7 @@ def post(self, shake_name=None): if self.get_argument('json', None): return self.write({'status':'error'}) else: - return self.redirect('/%s' % (shake.name)) + return self.redirect('/%s' % shake.name) no = Notification.get('sender_id = %s and receiver_id = %s and action_id = %s and deleted = 0', requestor.id, current_user_object.id, shake.id) @@ -424,7 +428,7 @@ def post(self, shake_name=None): if self.get_argument('json', None): return self.write({'status':'error'}) else: - return self.redirect('/%s' % (shake.name)) + return self.redirect('/%s' % shake.name) if shake.add_manager(user_to_add=requestor): no.delete() @@ -433,12 +437,12 @@ def post(self, shake_name=None): if self.get_argument('json', None): return self.write({'status':'ok', 'count' : Notification.count_for_user_by_type(current_user_object.id, 'invitation_request')}) else: - return self.redirect('/%s' % (shake.name)) + return self.redirect('/%s' % shake.name) else: if self.get_argument('json', None): return self.write({'status':'error'}) else: - return self.redirect('/%s' % (shake.name)) + return self.redirect('/%s' % shake.name) class DeclineInvitationHandler(BaseHandler): @@ -459,7 +463,7 @@ def post(self, shake_name=None): if self.get_argument('json', None): return self.write({'status':'error'}) else: - return self.redirect('/%s', shake.name) + return self.redirect('/%s' % shake.name) no = Notification.get('sender_id = %s and receiver_id = %s and action_id = %s and deleted = 0', requestor.id, current_user_object.id, shake.id) @@ -467,14 +471,14 @@ def post(self, shake_name=None): if self.get_argument('json', None): return self.write({'status':'error'}) else: - return self.redirect('/%s' % (shake.name)) + return self.redirect('/%s' % shake.name) no.delete() if self.get_argument('json', None): return self.write({'status':'ok', 'count' : Notification.count_for_user_by_type(current_user_object.id, 'invitation_request')}) else: - return self.redirect('/%s' % (shake.name)) + return self.redirect('/%s' % shake.name) class RSSFeedHandler(BaseHandler): diff --git a/handlers/stripe_hooks.py b/handlers/stripe_hooks.py index 8bd0044b..116b06ea 100644 --- a/handlers/stripe_hooks.py +++ b/handlers/stripe_hooks.py @@ -2,12 +2,13 @@ from tornado.options import options -from base import BaseHandler +from .base import BaseHandler from models import User, PaymentLog import json import postmark import stripe +import stripe.util class StripeWebhook(BaseHandler): @@ -17,7 +18,6 @@ def check_xsrf_cookie(self): def post(self): # type of message is passed through "type" parameter json_response = json.loads(self.request.body) - body_str = json.dumps(json_response).replace("\n","\\n") stripe_customer_id = None period_start = None @@ -30,7 +30,7 @@ def post(self): amount = 0 operation = None - evt = stripe.convert_to_stripe_object(json_response, + evt = stripe.util.convert_to_stripe_object(json_response, options.stripe_secret_key, None) if evt.type == 'invoice.payment_failed': diff --git a/handlers/tag.py b/handlers/tag.py index 578e252d..da74b826 100644 --- a/handlers/tag.py +++ b/handlers/tag.py @@ -2,7 +2,7 @@ from tornado.escape import json_encode from tornado import escape -from base import BaseHandler, require_membership +from .base import BaseHandler, require_membership from models import Tag, TaggedFile from lib.utilities import base36decode diff --git a/handlers/tools.py b/handlers/tools.py index 5474be48..6e6bd286 100644 --- a/handlers/tools.py +++ b/handlers/tools.py @@ -1,4 +1,4 @@ -from urlparse import urlparse, parse_qs +from urllib.parse import urlparse, parse_qs import os import re import random @@ -8,15 +8,15 @@ from tornado.httpclient import HTTPRequest import tornado.auth import tornado.web -from tornado.escape import url_escape, json_decode -from tornado.options import define, options +from tornado.escape import json_decode +from tornado.options import options -from BeautifulSoup import BeautifulSoup - -from models import Externalservice, User, Sourcefile, Sharedfile, Shake, ExternalRelationship, ShakeCategory -from base import BaseHandler, require_membership +from models import Externalservice, User, Sourcefile, Sharedfile, Shake, ShakeCategory +from .base import BaseHandler, require_membership from lib.utilities import base36encode -import lib.feathers + +import logging +logger = logging.getLogger("mltshp") class PickerPopupHandler(BaseHandler): @@ -81,9 +81,8 @@ def get(self): can_upload_this_month=can_upload_this_month) @tornado.web.authenticated - @tornado.web.asynchronous @require_membership - def post(self): + async def post(self): """ TODO: better determination of correct file name, if it is indeed a file, plus type. """ @@ -98,17 +97,9 @@ def post(self): http = tornado.httpclient.AsyncHTTPClient() - #this sends a header value for cookie to d/l protected FP files - fp_cookie = None - b = re.compile(r"^http(s?)://(.*?)(.?)filepile\.org") - m = b.match(self.url) - if m: - for char in [' ', '[', ']']: - self.url = self.url.replace(char, url_escape(char)) - - fp_cookie = {'Cookie':'_filepile_session=4c2eff30dd27e679d38fbc030b204488'} + logger.debug("Fetching %s" % (self.url)) + request = HTTPRequest(self.url, header_callback=self.on_header) - request = HTTPRequest(self.url, headers=fp_cookie, header_callback=self.on_header) if self.get_argument('skip_s3', None): # This parameter is used specifically for unit testing, so mock the file being # served as well. @@ -119,9 +110,13 @@ def post(self): dummy_response = tornado.httpclient.HTTPResponse(request, 200, buffer=dummy_buffer) self.on_response(dummy_response) else: - http.fetch(request, self.on_response) + fut = http.fetch(request) + response = await fut + logger.debug("Got response for %s" % (self.url)) + self.on_response(response) def on_response(self, response): + logger.debug("Parsing response for %s" % (self.url)) url_parts = urlparse(response.request.url) file_name = os.path.basename(url_parts.path) title = self.get_argument("title", None) @@ -135,23 +130,26 @@ def on_response(self, response): title = None if self.content_type not in self.approved_content_types: - if response.body[0:50].find('JFIF') > -1: + if response.body[0:50].find(b'JFIF') > -1: self.content_type = 'image/jpeg' else: - return self.render("tools/picker-error.html") + return self.render("tools/picker-error.html", error="Invalid file type: %s" % self.content_type) if len(file_name) == 0: - return self.render("tools/picker-error.html") + return self.render("tools/picker-error.html", error="file_name is empty") sha1_file_key = Sourcefile.get_sha1_file_key(file_data=response.body) user = self.get_current_user() try: + logger.debug("Writing file %s/%s" % (options.uploaded_files, sha1_file_key)) fh = open("%s/%s" % (options.uploaded_files, sha1_file_key), 'wb') fh.write(response.body) fh.close() except Exception as e: + logger.error("Error saving file %s/%s" % (options.uploaded_files, sha1_file_key)) raise tornado.web.HTTPError(500) + logger.debug("Creating sharedfile") sf = Sharedfile.create_from_file( file_path = "%s/%s" % (options.uploaded_files, sha1_file_key), file_name = file_name, @@ -164,6 +162,7 @@ def on_response(self, response): sf.source_url = source_url sf.description = description sf.alt_text = alt_text + logger.debug("Saving to database") sf.save() if not options.debug: # file cleanup @@ -171,15 +170,16 @@ def on_response(self, response): os.remove("%s/%s" % (options.uploaded_files, sha1_file_key)) except: pass + logger.debug("Rendering picker-success.html") self.render("tools/picker-success.html", sf=sf) def on_header(self, header): - if header.startswith("Content-Length:"): - content_length = re.search("Content-Length: (.*)", header) + if header.lower().startswith("content-length:"): + content_length = re.search("content-length: (.*)", header, re.IGNORECASE) if int(content_length.group(1).rstrip()) > 10000000: #this is not the correct size to error on raise tornado.web.HTTPError(413) - elif header.startswith("Content-Type:"): - ct = re.search("Content-Type: (.*)", header) + elif header.lower().startswith("content-type:"): + ct = re.search("content-type: (.*)", header, re.IGNORECASE) self.content_type = ct.group(1).rstrip() @@ -205,10 +205,9 @@ def get(self): class ToolsTwitterConnectHandler(BaseHandler, tornado.auth.TwitterMixin): - @tornado.web.asynchronous @tornado.web.authenticated @require_membership - def get(self): + async def get(self): if self.get_argument("oauth_token", None): self.get_authenticated_user(self._on_auth) return @@ -267,10 +266,9 @@ def get(self): class SaveVideoHandler(BaseHandler): - @tornado.web.asynchronous @tornado.web.authenticated @require_membership - def get(self): + async def get(self): url = self.get_argument('url', None) shake_id = self.get_argument('shake_id', "") if not url: @@ -279,30 +277,32 @@ def get(self): url = Sourcefile.make_oembed_url(url.strip()) if url: - self.handle_oembed_url(url) + await self.handle_oembed_url(url) else: self.render("tools/save-video-error.html", message="Invalid URL. We didn't recognize that URL") - def on_oembed_response(self, response): + async def on_oembed_response(self, response): if response.code == 401: self.render("tools/save-video-error.html", message="Embedding disabled by request. The user who uploaded this file has requested it not be embedded on other web sites.") return - self.handle_oembed_data(response.body) + await self.handle_oembed_data(response.body) - def handle_oembed_url(self, url): + async def handle_oembed_url(self, url): """Takes a sanitized URL (as created by models.sourcefile.make_oembed_url) and issues a request for it. If the URL is actually a data URI, strip off the well-known header, and handle the oembed JSON encoded into it instead. """ if url.startswith('data:text/json;charset=utf-8,'): j_oembed = url.replace('data:text/json;charset=utf-8,', '', 1) - self.handle_oembed_data(j_oembed) + await self.handle_oembed_data(j_oembed) else: request = HTTPRequest(url, 'GET') http = tornado.httpclient.AsyncHTTPClient() - http.fetch(request,self.on_oembed_response) + fut = http.fetch(request) + response = await fut + await self.on_oembed_response(response) - def handle_oembed_data(self, oembed): + async def handle_oembed_data(self, oembed): try: j_oembed = json_decode(oembed) except Exception as e: @@ -313,7 +313,7 @@ def handle_oembed_data(self, oembed): self.render("tools/save-video-error.html", message="We could not load the embed code for this file. Please contact support.") return - if j_oembed.has_key('type') and j_oembed['provider_name'] == 'Flickr' and j_oembed['type'] != 'video': + if 'type' in j_oembed and j_oembed['provider_name'] == 'Flickr' and j_oembed['type'] != 'video': self.render("tools/save-video-error.html", message="We could not load the embed code for this file. Please contact support.") return @@ -338,7 +338,9 @@ def handle_oembed_data(self, oembed): self.oembed_doc = j_oembed request = HTTPRequest(self.oembed_doc['thumbnail_url'], 'GET') http = tornado.httpclient.AsyncHTTPClient() - http.fetch(request,self.on_thumbnail_response) + fut = http.fetch(request) + response = await fut + self.on_thumbnail_response(response) else: self.render("tools/save-video.html", url=url, html=j_oembed['html'], shake_id=shake_id) @@ -367,7 +369,7 @@ def on_thumbnail_response(self, response): pass title = '' - if self.oembed_doc.has_key('title'): + if 'title' in self.oembed_doc: title = self.oembed_doc['title'] shared_file = Sharedfile(user_id=current_user.id, name=url, content_type='text/html', source_id=source_file.id, title=title, source_url=url) @@ -380,16 +382,15 @@ def on_thumbnail_response(self, response): user_shake = Shake.get('user_id = %s and type=%s', current_user.id, 'user') shared_file.add_to_shake(self.destination_shake) - if self.oembed_doc.has_key('description'): + if 'description' in self.oembed_doc: shared_file.description = self.oembed_doc['description'] self.write({'path' : "/p/%s" % (share_key)}) self.finish() - @tornado.web.asynchronous @tornado.web.authenticated @require_membership - def post(self): + async def post(self): url = self.get_argument('url', None) if not url: self.render("tools/save-video.html", url = url, title = None, description=None) @@ -407,7 +408,7 @@ def post(self): return self.render("tools/save-video-error.html", message="We couldn't save the video to specified shake. Please contact support.") if current_user.email_confirmed != 1: return self.render("tools/save-video-error.html", message="You must confirm your email address before you can post.") - self.handle_oembed_url(url) + await self.handle_oembed_url(url) else: self.render("tools/save-video-error.html", message="We could not load the embed code. The video server may be down. Please contact support.") diff --git a/handlers/upload.py b/handlers/upload.py index 8e32146a..492a1766 100644 --- a/handlers/upload.py +++ b/handlers/upload.py @@ -1,10 +1,9 @@ -from base import BaseHandler, require_membership +from .base import BaseHandler, require_membership import tornado.httpclient from tornado.httpclient import HTTPRequest import tornado.web -import postmark -from tornado.options import define, options +from tornado.options import options import tornado.escape from models import Sharedfile, Externalservice @@ -15,14 +14,13 @@ class UploadHandler(BaseHandler): """ def check_xsrf_cookie(self): user = self.get_current_user() - if self.request.headers.has_key('X-Verify-Credentials-Authorization') and not user: + if 'X-Verify-Credentials-Authorization' in self.request.headers and not user: return else: super(UploadHandler, self).check_xsrf_cookie() - @tornado.web.asynchronous @require_membership - def post(self): + async def post(self): """ { "file_name": ["before.png"], @@ -64,7 +62,7 @@ def post(self): raise tornado.web.HTTPError(403) else: return self.redirect("/") - elif self.request.headers.has_key('X-Verify-Credentials-Authorization') and self.request.headers.has_key('X-Auth-Service-Provider'): + elif 'X-Verify-Credentials-Authorization' in self.request.headers and 'X-Auth-Service-Provider' in self.request.headers: #pm = postmark.PMMail(api_key=options.postmark_api_key, # sender="hello@mltshp.com", to="notifications@mltshp.com", # subject="TWITTER REQUEST", @@ -73,14 +71,16 @@ def post(self): if self.request.headers['X-Auth-Service-Provider'].startswith("https://api.twitter.com/1.1/account/verify_credentials.json") or self.request.headers['X-Auth-Service-Provider'].startswith("http://localhost:"): http = tornado.httpclient.AsyncHTTPClient() - http.fetch( + fut = http.fetch( HTTPRequest( url=self.request.headers['X-Auth-Service-Provider'], method='GET', headers={'Authorization':self.request.headers['X-Verify-Credentials-Authorization']}, body=None #"asdf=asdf" -- GET requests can't have a body ), - callback=self.on_response) + ) + response = await fut + self.on_response(response) else: raise tornado.web.HTTPError(403) else: diff --git a/lib/db/__init__.py b/lib/db/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/lib/feathers/__init__.py b/lib/feathers/__init__.py index be23e089..9f9f9dc7 100644 --- a/lib/feathers/__init__.py +++ b/lib/feathers/__init__.py @@ -1,6 +1,6 @@ import sys -import urllib -import urlparse +import urllib.request, urllib.parse, urllib.error +import urllib.parse import hmac import hashlib import binascii @@ -71,10 +71,10 @@ def _build_base_url(self, method): return self.endpoint + self.api_version + method + '.' + self.format def _build_url(self, base_url, params): - query = '' if not params else '?' + urllib.urlencode(params) + query = '' if not params else '?' + urllib.parse.urlencode(params) return base_url + query - def _fetch(self, url, headers={}, callback=None): + async def _fetch(self, url, headers={}, callback=None): """ Make the request. If an IOloop is available make request asynchronous and use the passed in callback if it's provided. @@ -82,7 +82,9 @@ def _fetch(self, url, headers={}, callback=None): request = tornado.httpclient.HTTPRequest(url=url, method="GET", headers=headers) if tornado.ioloop.IOLoop.initialized(): http = tornado.httpclient.AsyncHTTPClient() - http.fetch(request, callback) + fut = http.fetch(request) + response = await fut + callback(response) else: http = tornado.httpclient.HTTPClient() return http.fetch(request) @@ -96,7 +98,7 @@ def _oauth_signature(self, consumer_secret, method, url, parameters={}, token_se See http://oauth.net/core/1.0/#signing_process """ - parts = urlparse.urlparse(url) + parts = urllib.parse.urlparse(url) scheme, netloc, path = parts[:3] normalized_url = scheme.lower() + "://" + netloc.lower() + path @@ -111,7 +113,7 @@ def _oauth_signature(self, consumer_secret, method, url, parameters={}, token_se key_elems.append(token_secret if token_secret else "") key = "&".join(key_elems) - hash = hmac.new(key, base_string, hashlib.sha1) + hash = hmac.new(key.encode("ascii"), base_string.encode("ascii"), hashlib.sha1) return binascii.b2a_base64(hash.digest())[:-1] @classmethod @@ -119,6 +121,6 @@ def _oauth_escape(self, val): """ From tornado.auth """ - if isinstance(val, unicode): + if isinstance(val, str): val = val.encode("utf-8") - return urllib.quote(val, safe="~") + return urllib.parse.quote(val, safe="~") diff --git a/lib/flyingcow/__init__.py b/lib/flyingcow/__init__.py index 61c108c5..603a2136 100644 --- a/lib/flyingcow/__init__.py +++ b/lib/flyingcow/__init__.py @@ -1,3 +1,3 @@ -from db import register_connection, connection -from model import Model -from properties import Property \ No newline at end of file +from .db import register_connection, connection +from .model import Model +from .properties import Property \ No newline at end of file diff --git a/lib/flyingcow/db.py b/lib/flyingcow/db.py index d70f18f8..3767c30a 100644 --- a/lib/flyingcow/db.py +++ b/lib/flyingcow/db.py @@ -15,7 +15,7 @@ def __init__(self): self._connection = None def register(self, host='localhost', name=None, user=None, password=None, charset="utf8"): - self._connection = torndb.Connection(host, name, user, password, charset=charset) + self._connection = self._connection or torndb.Connection(host, name, user, password, charset=charset) return self._connection def connection(self): diff --git a/lib/flyingcow/model.py b/lib/flyingcow/model.py index 47c78c8d..67df2347 100644 --- a/lib/flyingcow/model.py +++ b/lib/flyingcow/model.py @@ -2,9 +2,9 @@ import logging import time -import error -import db -import properties +from . import error +from . import db +from . import properties class MultipleRows(Exception): def __str__(self): @@ -19,20 +19,19 @@ class BaseModel(type): def __new__(cls, name, bases, attrs): if name == 'Model': return super(BaseModel, cls).__new__(cls, name, bases, attrs) - + new_class = type.__new__(cls, name, bases, attrs) - for key, value in attrs.items(): + for key, value in list(attrs.items()): if isinstance(value, properties.Property): value.contribute_to_class(new_class, key) new_class.properties() return new_class -class Model(object): +class Model(object, metaclass=BaseModel): """ The Model class that gets inherited to create table-specific Models. """ - __metaclass__ = BaseModel - + def __init__(self, *args, **kwargs): self._id = None self.connection = db.connection() @@ -51,7 +50,7 @@ def id(self): that an instance is tied to a DB record. """ return self._id - + def saved(self): """ A way to check if object has been saved. @@ -65,19 +64,19 @@ def initialize(self, *args, **kwargs): """ A hook for subclasses to override model initialization. """ - + def add_error(self, property_name, error_message): """ Used to add any errors while validating an object. """ self.errors[property_name] = error_message - + def on_create(self): pass - + def on_update(self): pass - + def save(self): """ Builds the query to save all of the object's db attributes. @@ -115,7 +114,7 @@ def update_attribute(self, name, value): sql = "UPDATE %s SET %s = %s where id = %s" % (self._table_name(), name, "%s", "%s") self.execute(sql, value, self.id) return True - + def _populate_properties(self, include_id, **kwargs): """ Populates the properties with values, if include_id is passed in, @@ -181,7 +180,7 @@ def object_query(cls, query, *args): for result in cls.query(query, *args): results.append(cls._make_instance(result)) return results - + @classmethod def query(cls, query, *args): """ @@ -225,7 +224,7 @@ def properties(cls): return cls._properties_cache cls._properties_cache = [] - for key in cls.__dict__.keys(): + for key in list(cls.__dict__.keys()): if isinstance(cls.__dict__[key], properties.Property): cls._properties_cache.append(key) return cls._properties_cache diff --git a/lib/s3.py b/lib/s3.py index 3acc9f10..f66760a0 100644 --- a/lib/s3.py +++ b/lib/s3.py @@ -1,26 +1,81 @@ -from boto.s3.connection import S3Connection as Connection +import io +import boto3 from tornado.options import options +from hashlib import md5 -def S3Connection(): + +def S3Client(): kwargs = {} if options.aws_port and options.aws_host: - kwargs['host'] = options.aws_host - kwargs['port'] = options.aws_port - # if we're using a custom AWS host/port, disable - # SSL, since fakes3 doesn't support it - kwargs['is_secure'] = False - - return Connection( - options.aws_key, - options.aws_secret, + kwargs['endpoint_url'] = "http://%s:%s" % (options.aws_host, options.aws_port) + kwargs['use_ssl'] = False + + return boto3.client('s3', + aws_access_key_id=options.aws_key, + aws_secret_access_key=options.aws_secret, **kwargs) +class S3BucketWrapper(object): + def __init__(self, bucket_name, create=False): + self.bucket_name = bucket_name + self.client = S3Client() + if create: + self.client.create_bucket(Bucket=options.aws_bucket) + + def generate_url(self, key, **kwargs): + return self.client.generate_presigned_url( + ClientMethod="get_object", + Params={ + 'Bucket': self.bucket_name, + 'Key': key, + }, + **kwargs) + + def upload_file(self, file_name, key, **kwargs): + return self.client.upload_file( + Filename=file_name, + Key=key, + Bucket=self.bucket_name, + **kwargs + ) + + def put_object(self, data, key, **kwargs): + if 'ContentMD5' not in kwargs: + md5_hash = md5() + md5_hash.update(data) + kwargs['ContentMD5'] = md5_hash.hexdigest() + if 'ContentLength' not in kwargs: + if hasattr(data, 'read'): + kwargs['ContentLength'] = data.seek(0, 2) + data.seek(0) + else: + kwargs['ContentLength'] = len(data) + return self.client.put_object( + Body=data, + Key=key, + Bucket=self.bucket_name, + **kwargs, + ) + + def get_object(self, key, **kwargs): + data = io.BytesIO() + self.client.download_fileobj( + Key=key, Fileobj=data, Bucket=self.bucket_name, **kwargs + ) + return data.getvalue() + + def download_file(self, file_name, key, **kwargs): + with open(file_name, "wb") as f: + self.client.download_fileobj( + Key=key, Fileobj=f, Bucket=self.bucket_name, **kwargs + ) + + def S3Bucket(): - # if we're testing, then just auto-create a bucket if it doesn't exist already - if options.aws_bucket.endswith("-dev") or options.aws_bucket.endswith("-testing"): - return S3Connection().create_bucket(options.aws_bucket) - else: - return S3Connection().get_bucket(options.aws_bucket) \ No newline at end of file + return S3BucketWrapper( + options.aws_bucket, + # if we're testing, then just auto-create a bucket if it doesn't exist already + create=options.aws_bucket.endswith("-dev") or options.aws_bucket.endswith("-testing")) diff --git a/lib/uimodules.py b/lib/uimodules.py index b6d62588..dcc91e60 100644 --- a/lib/uimodules.py +++ b/lib/uimodules.py @@ -33,7 +33,7 @@ def render(self, object_count=1, current_page=1, url_format='', per_page=10, \ def chunk_pages(self, num_pages=1, current_page=1, adjacent=2): max_pages_display = (adjacent * 2) + 1 + 4 - mylist = range(1,num_pages+1) + mylist = list(range(1,num_pages+1)) if num_pages < max_pages_display: return mylist diff --git a/lib/utilities.py b/lib/utilities.py index 3c74a08c..b30fb703 100644 --- a/lib/utilities.py +++ b/lib/utilities.py @@ -4,9 +4,9 @@ import time import json import base64 -import urllib +import urllib.request, urllib.parse, urllib.error from datetime import datetime -import urllib +import urllib.request, urllib.parse, urllib.error from PIL import Image from tornado.options import options @@ -58,8 +58,8 @@ def s3_authenticated_url(s3_key, s3_secret, bucket_name=None, file_path=None, seconds = int(time.time()) + seconds to_sign = "GET\n\n\n%s\n/%s/%s" % (seconds, bucket_name, file_path) - digest = hmac.new(s3_secret, to_sign, hashlib.sha1).digest() - signature = urllib.quote(base64.encodestring(digest).strip()) + digest = hmac.new(s3_secret.encode("ascii"), to_sign.encode("ascii"), hashlib.sha1).digest() + signature = urllib.parse.quote(base64.b64encode(digest).strip()) signature = "?AWSAccessKeyId=%s&Expires=%s&Signature=%s" % (s3_key, seconds, signature) if options.aws_host == "s3.amazonaws.com": @@ -93,7 +93,7 @@ def s3_url(file_path): def base36encode(number, alphabet='0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'): - if not isinstance(number, (int, long)): + if not isinstance(number, int): raise TypeError('number must be an integer') # Special case for zero @@ -120,7 +120,7 @@ def base36decode(number): def generate_digest_from_dictionary(values): h = hashlib.sha1() for value in values: - h.update("%s" % (value)) + h.update(("%s" % value).encode("ascii")) return h.hexdigest() @@ -237,12 +237,14 @@ def payment_notifications(user, action, amount=None): def transform_to_square_thumbnail(file_path, size_constraint, destination): """ Takes a local path with a file, and writes the thumbntail to the destination - which is a cStringIO.StringIO instance. Used by User.set_profile_image() + which is a io.BytesIO instance. Used by User.set_profile_image() """ img = Image.open(file_path) # convert to RGB, probably for proper resizing of gifs. if img.mode != "RGB": - img = img.convert("RGB") + img2 = img.convert("RGB") + img.close() + img = img2 width, height = img.size # if image is below size constraint, we just paste it on @@ -261,8 +263,9 @@ def transform_to_square_thumbnail(file_path, size_constraint, destination): max_dimension = width cropped = img.crop((0,0,max_dimension,max_dimension,)) cropped.load() - cropped.thumbnail((size_constraint,size_constraint), Image.ANTIALIAS) + cropped.thumbnail((size_constraint,size_constraint), Image.Resampling.LANCZOS) cropped.save(destination, format="JPEG", quality=95) + img.close() return True email_re = re.compile( @@ -321,4 +324,4 @@ def clean_search_term(s): if s is None: return None - return s.replace(u"\u201C", '"').replace(u"\u201D", '"') + return s.replace("\u201C", '"').replace("\u201D", '"') diff --git a/main.py b/main.py index e1761b2e..ebe6e5cf 100755 --- a/main.py +++ b/main.py @@ -3,12 +3,10 @@ import os.path import sys -import tornado.httpserver -import tornado.ioloop +import asyncio import tornado.web import tornado.options -import torndb -from tornado.options import define, options +from tornado.options import options from tornado.httpclient import AsyncHTTPClient from lib.flyingcow import register_connection @@ -19,6 +17,10 @@ from settings import settings import stripe +import logging + +logger = logging.getLogger('mltshp') + AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient") @@ -37,16 +39,17 @@ def app_settings(cls): # invariant settings "login_url": "/sign-in", "static_path": os.path.join(dirname, "static"), - "template_path": os.path.join(dirname, "templates"), + "template_path": os.path.join(dirname, "templates"), "ui_modules": lib.uimodules, } def __init__(self, *args, **settings): - self.db = register_connection(host=options.database_host, - name=options.database_name, - user=options.database_user, - password=options.database_password, - charset="utf8mb4") + self.db = settings.get("db") or register_connection( + host=options.database_host, + name=options.database_name, + user=options.database_user, + password=options.database_password, + charset="utf8mb4") if options.use_query_cache: lib.flyingcow.cache.use_query_cache = True if options.stripe_secret_key: @@ -54,21 +57,26 @@ def __init__(self, *args, **settings): super(MltshpApplication, self).__init__(*args, **settings) -if __name__ == "__main__": + +async def main(): mltshpoptions.parse_dictionary(settings) tornado.options.parse_command_line() + app_settings = MltshpApplication.app_settings() + if options.dump_settings: from pprint import pprint pprint({'options': dict((k, opt.value()) - for k, opt in options.iteritems()), + for k, opt in options.items()), 'app_settings': app_settings}) sys.exit(0) - app_settings = MltshpApplication.app_settings() application = MltshpApplication(routes, autoescape=None, **app_settings) - http_server = tornado.httpserver.HTTPServer(application) + if options.debug: + logger.setLevel(logging.DEBUG) + logger.info("starting on port %s" % (options.on_port)) + application.listen(int(options.on_port)) + await asyncio.Event().wait() - print "starting on port %s" % (options.on_port) - http_server.listen(int(options.on_port)) - tornado.ioloop.IOLoop.instance().start() +if __name__ == "__main__": + asyncio.run(main()) diff --git a/migrate.py b/migrate.py index b008cd44..8f5c656e 100644 --- a/migrate.py +++ b/migrate.py @@ -13,6 +13,6 @@ (options.database_user, options.database_password, options.database_host, options.database_name)) migrations = read_migrations("migrations") -print "Applying migrations..." +print("Applying migrations...") backend.apply_migrations(backend.to_apply(migrations)) -print "...complete!" +print("...complete!") diff --git a/mltshpoptions.py b/mltshpoptions.py index ee521322..86cd8a5b 100644 --- a/mltshpoptions.py +++ b/mltshpoptions.py @@ -2,13 +2,14 @@ def parse_dictionary(settings): - for key, value in settings.iteritems(): + for key, value in settings.items(): if key in options: setattr(options, key, value) define('debug', type=bool, default=True, help="Run in debug/development mode") define('dump_settings', type=bool, default=False, help="Dump evaluated settings and exit") +define('tornado_logging', type=bool, default=True, help="Controls Tornado logging") # app settings define('app_host', default='mltshp.com', metavar="HOST", help="Base hostname for web site") diff --git a/models/__init__.py b/models/__init__.py index 86d05732..fdd36fa8 100644 --- a/models/__init__.py +++ b/models/__init__.py @@ -1,34 +1,34 @@ -from favorite import Favorite -from subscription import Subscription -from user import User -from invitation import Invitation -from sharedfile import Sharedfile -from sourcefile import Sourcefile -from externalservice import Externalservice -from shake import Shake -from shakesharedfile import Shakesharedfile -from waitlist import Waitlist -from comment import Comment -from post import Post -from notification import Notification -from app import App -from authorizationcode import Authorizationcode -from accesstoken import Accesstoken -from apilog import Apilog -from conversation import Conversation -from external_relationship import ExternalRelationship -from shakemanager import ShakeManager -from payment_log import PaymentLog -from bookmark import Bookmark -from apihit import Apihit -from magicfile import Magicfile -from nsfw_log import NSFWLog -from script_log import ScriptLog -from fileview import Fileview -from shake_category import ShakeCategory -from comment_like import CommentLike -from tag import Tag -from tagged_file import TaggedFile -from voucher import Voucher -from promotion import Promotion -from migration_state import MigrationState \ No newline at end of file +from .favorite import Favorite +from .subscription import Subscription +from .user import User +from .invitation import Invitation +from .sharedfile import Sharedfile +from .sourcefile import Sourcefile +from .externalservice import Externalservice +from .shake import Shake +from .shakesharedfile import Shakesharedfile +from .waitlist import Waitlist +from .comment import Comment +from .post import Post +from .notification import Notification +from .app import App +from .authorizationcode import Authorizationcode +from .accesstoken import Accesstoken +from .apilog import Apilog +from .conversation import Conversation +from .external_relationship import ExternalRelationship +from .shakemanager import ShakeManager +from .payment_log import PaymentLog +from .bookmark import Bookmark +from .apihit import Apihit +from .magicfile import Magicfile +from .nsfw_log import NSFWLog +from .script_log import ScriptLog +from .fileview import Fileview +from .shake_category import ShakeCategory +from .comment_like import CommentLike +from .tag import Tag +from .tagged_file import TaggedFile +from .voucher import Voucher +from .promotion import Promotion +from .migration_state import MigrationState \ No newline at end of file diff --git a/models/accesstoken.py b/models/accesstoken.py index bc8a0d81..5e924a19 100644 --- a/models/accesstoken.py +++ b/models/accesstoken.py @@ -1,7 +1,7 @@ from lib.flyingcow import Model, Property from tornado.options import options from datetime import datetime, timedelta -import authorizationcode +from . import authorizationcode from hashlib import sha224 from lib.utilities import base36encode @@ -49,7 +49,7 @@ def generate(authorization_id): """ auth = authorizationcode.Authorizationcode.get('id=%s', authorization_id) consumer_key = uuid.uuid3(uuid.NAMESPACE_DNS, base36encode(auth.id) + '-' + base36encode(auth.app_id)) - consumer_secret = sha224("%s%s" % (str(uuid.uuid1()), time.time())).hexdigest() + consumer_secret = sha224(("%s%s" % (str(uuid.uuid1()), time.time())).encode("ascii")).hexdigest() if auth.expires_at > datetime.utcnow(): access_token = Accesstoken(user_id=auth.user_id, app_id=auth.app_id, consumer_key=str(consumer_key), consumer_secret=str(consumer_secret)) diff --git a/models/app.py b/models/app.py index 5b5ea1a9..15f39974 100644 --- a/models/app.py +++ b/models/app.py @@ -2,14 +2,14 @@ import time import uuid import hashlib -from urlparse import urlparse +from urllib.parse import urlparse from tornado.options import options from lib.flyingcow import Model, Property from lib.utilities import base36encode, base36decode -import user -import accesstoken +from . import user +from . import accesstoken class App(Model): @@ -72,7 +72,7 @@ def _verify_redirect_url(self): def on_create(self): """Set the secret""" - self.secret = hashlib.sha224("%s%s" % (str(uuid.uuid1()), time.time())).hexdigest() + self.secret = hashlib.sha224(("%s%s" % (str(uuid.uuid1()), time.time())).encode('ascii')).hexdigest() self.save() def key(self): diff --git a/models/bookmark.py b/models/bookmark.py index 74c489d8..eaff0830 100644 --- a/models/bookmark.py +++ b/models/bookmark.py @@ -25,7 +25,7 @@ def save(self, *args, **kwargs): def on_create(self): existing_previous_bookmark = Bookmark.where('user_id=%s and id < %s ORDER BY id desc LIMIT 1', self.user_id, self.id) - if existing_previous_bookmark and existing_previous_bookmark[0].sharedfile_id > 0: + if existing_previous_bookmark and existing_previous_bookmark[0] and (existing_previous_bookmark[0].sharedfile_id or 0) > 0: self.previous_sharedfile_id = existing_previous_bookmark[0].sharedfile_id self.save() @@ -100,18 +100,19 @@ def merge_with_sharedfiles(self, bookmarks, sharedfiles): Sorts a list of bookmarks with a list of sharedfiles based on created_at. Bookmark will come before sharedfile if date is the same.. """ - def compare_created_at(x, y): - if x.created_at > y.created_at: - return -1 - elif x.created_at < y.created_at: - return 1 - else: - x_name = x.__class__.__name__ - y_name = y.__class__.__name__ - if x_name == 'Bookmark' and y_name == 'Sharedfile': - return -1 - return 0 + def compare_created_at_key(x): + return str(x.created_at) + (x.__class__.__name__ == "Bookmark" and "1" or "0") + # if x.created_at > y.created_at: + # return -1 + # elif x.created_at < y.created_at: + # return 1 + # else: + # x_name = x.__class__.__name__ + # y_name = y.__class__.__name__ + # if x_name == 'Bookmark' and y_name == 'Sharedfile': + # return -1 + # return 0 composite_list = sharedfiles + bookmarks - composite_list.sort(compare_created_at) + composite_list.sort(key=compare_created_at_key, reverse=True) return composite_list diff --git a/models/comment.py b/models/comment.py index ec2b150a..982ae4c1 100644 --- a/models/comment.py +++ b/models/comment.py @@ -5,12 +5,12 @@ from tornado.options import options from lib.flyingcow import Model, Property from lib.utilities import pretty_date -from BeautifulSoup import BeautifulSoup +from bs4 import BeautifulSoup -import user -import notification -import sharedfile -import conversation +from . import user +from . import notification +from . import sharedfile +from . import conversation class Comment(Model): @@ -73,7 +73,7 @@ def chopped_body(self): """ Returns a comment that has its HTML removed, shortened to 15 words, and if it doesn't end in a period, add ... """ - new_body = ''.join(BeautifulSoup(self.body).findAll(text=True)) + new_body = ''.join(BeautifulSoup(self.body, features="html.parser").findAll(string=True)) new_body = new_body.replace('\n', '') body_parts = new_body.split(' ') new_body = body_parts[:12] diff --git a/models/comment_like.py b/models/comment_like.py index 107c673e..06dc2733 100644 --- a/models/comment_like.py +++ b/models/comment_like.py @@ -1,7 +1,7 @@ from lib.flyingcow import Model, Property from datetime import datetime from lib.utilities import pretty_date -import user, notification +from . import user, notification from tornado.options import options diff --git a/models/conversation.py b/models/conversation.py index 8b7aa5ef..15a94197 100644 --- a/models/conversation.py +++ b/models/conversation.py @@ -2,8 +2,8 @@ from datetime import datetime from tornado.options import options -import comment -import sharedfile +from . import comment +from . import sharedfile class Conversation(Model): diff --git a/models/externalservice.py b/models/externalservice.py index 5b35c9d8..8118af94 100644 --- a/models/externalservice.py +++ b/models/externalservice.py @@ -2,7 +2,7 @@ from datetime import datetime from tornado.options import options -import user +from . import user class Externalservice(Model): diff --git a/models/favorite.py b/models/favorite.py index 22ac49ab..cf85c680 100644 --- a/models/favorite.py +++ b/models/favorite.py @@ -1,7 +1,7 @@ from lib.flyingcow import Model, Property from datetime import datetime from lib.utilities import pretty_date -import user +from . import user from tornado.options import options diff --git a/models/invitation.py b/models/invitation.py index 92c788a8..bed206ac 100644 --- a/models/invitation.py +++ b/models/invitation.py @@ -5,7 +5,7 @@ import hashlib import time -import user +from . import user from datetime import datetime @@ -52,8 +52,8 @@ def create_for_email(self, email, user_id): Creates an invitation for an email address. """ h = hashlib.sha1() - h.update("%s" % (time.time())) - h.update("%s" % (email)) + h.update(("%s" % (time.time())).encode('ascii')) + h.update(("%s" % (email)).encode('ascii')) invitation_key = h.hexdigest() sending_user = user.User.get('id = %s', user_id) invitation = Invitation(user_id=user_id, invitation_key=invitation_key, email_address=email, claimed_by_user_id=0) diff --git a/models/notification.py b/models/notification.py index 2fcbff31..c1da7001 100644 --- a/models/notification.py +++ b/models/notification.py @@ -3,12 +3,12 @@ from datetime import datetime, timedelta import postmark -import sharedfile -import user -import comment -import shake -import invitation -import subscription +from . import sharedfile +from . import user +from . import comment +from . import shake +from . import invitation +from . import subscription class Notification(Model): @@ -127,7 +127,7 @@ def display_for_user(cls, user): _notification = {'sender' : sender, 'related_object' : related_object, 'id' : notification.id} if notification.type == 'favorite': - if not notifications['like']['items'].has_key(related_object.id): + if related_object.id not in notifications['like']['items']: notifications['like']['items'][related_object.id] = [] notifications['like']['items'][related_object.id].append(_notification) notifications['like']['count'] += 1 @@ -137,7 +137,7 @@ def display_for_user(cls, user): notifications['follow'].append(_notification) elif notification.type == 'save': - if not notifications['save']['items'].has_key(related_object.id): + if related_object.id not in notifications['save']['items']: notifications['save']['items'][related_object.id] = [] notifications['save']['items'][related_object.id].append(_notification) notifications['save']['count'] += 1 diff --git a/models/shake.py b/models/shake.py index 8f96b40f..4ee689b3 100644 --- a/models/shake.py +++ b/models/shake.py @@ -1,11 +1,10 @@ import re -import cStringIO +import io from datetime import datetime -from urlparse import urljoin +from urllib.parse import urljoin from tornado.options import options from lib.s3 import S3Bucket -from boto.s3.key import Key from PIL import Image from lib.flyingcow import Model, Property @@ -13,7 +12,7 @@ from lib.reservedshakenames import reserved_names from lib.utilities import transform_to_square_thumbnail, s3_url -import user, shake, shakesharedfile, sharedfile, subscription, shakemanager +from . import user, shake, shakesharedfile, sharedfile, subscription, shakemanager class Shake(ModelQueryCache, Model): user_id = Property(name='user_id') @@ -195,7 +194,7 @@ def subscribers(self, page=None): WHERE shake_id = %s AND subscription.deleted = 0 ORDER BY subscription.id """ - if page > 0: + if page is not None and page > 0: limit_start = (page-1) * 20 sql = "%s LIMIT %s, %s" % (sql, limit_start, 20) return user.User.object_query(sql, self.id) @@ -317,8 +316,8 @@ def can_edit(self, user): return self.is_owner(user) def set_page_image(self, file_path=None, sha1_value=None): - thumb_cstr = cStringIO.StringIO() - image_cstr = cStringIO.StringIO() + thumb_cstr = io.BytesIO() + image_cstr = io.BytesIO() if not file_path or not sha1_value: return False @@ -334,20 +333,22 @@ def set_page_image(self, file_path=None, sha1_value=None): try: #save thumbnail - k = Key(bucket) - k.key = "account/%s/shake_%s_small.jpg" % (self.user_id, self.name) - k.set_metadata('Content-Type', 'image/jpeg') - k.set_metadata('Cache-Control', 'max-age=86400') - k.set_contents_from_string(thumb_cstr.getvalue()) - k.set_acl('public-read') + bucket.put_object( + thumb_cstr.getvalue(), + "account/%s/shake_%s_small.jpg" % (self.user_id, self.name), + ContentType="image/jpeg", + CacheControl="max-age=86400", + ACL="public-read", + ) #save small - k = Key(bucket) - k.key = "account/%s/shake_%s.jpg" % (self.user_id, self.name) - k.set_metadata('Content-Type', 'image/jpeg') - k.set_metadata('Cache-Control', 'max-age=86400') - k.set_contents_from_string(image_cstr.getvalue()) - k.set_acl('public-read') + bucket.put_object( + image_cstr.getvalue(), + "account/%s/shake_%s.jpg" % (self.user_id, self.name), + ContentType="image/jpeg", + CacheControl="max-age=86400", + ACL="public-read", + ) self.image = 1 self.save() diff --git a/models/sharedfile.py b/models/sharedfile.py index db574879..f32d28cd 100644 --- a/models/sharedfile.py +++ b/models/sharedfile.py @@ -11,15 +11,15 @@ from lib.flyingcow.db import IntegrityError from lib.utilities import base36encode, base36decode, pretty_date, s3_authenticated_url -import user -import sourcefile -import fileview -import favorite -import shakesharedfile -import shake -import comment -import notification -import conversation +from . import user +from . import sourcefile +from . import fileview +from . import favorite +from . import shakesharedfile +from . import shake +from . import comment +from . import notification +from . import conversation import models.post import models.nsfw_log import models.tag @@ -780,7 +780,7 @@ def create_from_file(file_path, file_name, sha1_value, content_type, user_id, ti """ TODO: Must only accept acceptable content-types after consulting a list. """ - if len(sha1_value) <> 40: + if len(sha1_value) != 40: return None if user_id == None: diff --git a/models/sourcefile.py b/models/sourcefile.py index 57920645..9861e1b2 100644 --- a/models/sourcefile.py +++ b/models/sourcefile.py @@ -1,19 +1,21 @@ import hashlib -import cStringIO +import io from os import path from datetime import datetime -from urlparse import urlparse +from urllib.parse import urlparse import re from tornado.escape import url_escape, json_decode, json_encode from tornado.options import options from PIL import Image from lib.s3 import S3Bucket -from boto.s3.key import Key from lib.flyingcow import Model, Property from lib.flyingcow.cache import ModelQueryCache +import logging +logger = logging.getLogger('mltshp') + class Sourcefile(ModelQueryCache, Model): width = Property() # original width dimension of source file @@ -101,17 +103,24 @@ def get_sha1_file_key(file_path=None, file_data=None): except Exception as e: return None else: - h.update(file_data) + # test if file_data is a string + if isinstance(file_data, str): + h.update(file_data.encode("UTF-8")) + elif isinstance(file_data, bytes): + h.update(file_data) + else: + raise Exception("file_data must be a string or bytes") return h.hexdigest() @staticmethod def get_from_file(file_path, sha1_value, type='image', skip_s3=None): existing_source_file = Sourcefile.get("file_key = %s", sha1_value) - thumb_cstr = cStringIO.StringIO() - small_cstr = cStringIO.StringIO() + thumb_cstr = io.BytesIO() + small_cstr = io.BytesIO() if existing_source_file: return existing_source_file try: + logger.debug("creating %s" % file_path) img = Image.open(file_path) original_width = img.size[0] original_height= img.size[1] @@ -120,44 +129,60 @@ def get_from_file(file_path, sha1_value, type='image', skip_s3=None): return None if img.mode != "RGB": - img = img.convert("RGB") + logger.debug("converting to RGB") + img2 = img.convert("RGB") + img.close() + img = img2 #generate smaller versions thumb = img.copy() small = img.copy() - thumb.thumbnail((100,100), Image.ANTIALIAS) - small.thumbnail((240,184), Image.ANTIALIAS) + thumb.thumbnail((100,100), Image.Resampling.LANCZOS) + small.thumbnail((240,184), Image.Resampling.LANCZOS) thumb.save(thumb_cstr, format="JPEG") small.save(small_cstr, format="JPEG") bucket = None if not skip_s3: + logger.debug("making S3 bucket") bucket = S3Bucket() #save original file if type != 'link': if not skip_s3: - k = Key(bucket) - k.key = "originals/%s" % (sha1_value) - k.set_contents_from_filename(file_path) + logger.debug("putting object originals/%s" % sha1_value) + bucket.upload_file( + file_path, + "originals/%s" % sha1_value, + ) + img.close() #save thumbnail thumbnail_file_key = Sourcefile.get_sha1_file_key(file_data=thumb_cstr.getvalue()) if not skip_s3: - k = Key(bucket) - k.key = "thumbnails/%s" % thumbnail_file_key - k.set_contents_from_string(thumb_cstr.getvalue()) + thumb_bytes = thumb_cstr.getvalue() + logger.debug("putting object thumbnails/%s (length %d)" % (thumbnail_file_key, len(thumb_bytes))) + bucket.put_object( + thumb_bytes, + "thumbnails/%s" % thumbnail_file_key, + ) + thumb.close() #save small small_file_key = Sourcefile.get_sha1_file_key(file_data=small_cstr.getvalue()) if not skip_s3: - k = Key(bucket) - k.key = "smalls/%s" % small_file_key - k.set_contents_from_string(small_cstr.getvalue()) + small_bytes = small_cstr.getvalue() + logger.debug("putting object smalls/%s (length %d)" % (small_file_key, len(small_bytes))) + bucket.put_object( + small_bytes, + "smalls/%s" % small_file_key, + ) + small.close() #save source file + logger.debug("saving sourcefile") sf = Sourcefile(width=original_width, height=original_height, file_key=sha1_value, thumb_key=thumbnail_file_key, small_key=small_file_key, type=type) sf.save() return sf diff --git a/models/subscription.py b/models/subscription.py index b5284186..e9c83257 100644 --- a/models/subscription.py +++ b/models/subscription.py @@ -3,9 +3,9 @@ from lib.flyingcow import Model, Property from tornado.options import options -import shake -import user -import post +from . import shake +from . import user +from . import post class Subscription(Model): diff --git a/models/tag.py b/models/tag.py index dc0f0ef5..0aaab8c8 100644 --- a/models/tag.py +++ b/models/tag.py @@ -1,7 +1,7 @@ from lib.flyingcow import Model, Property from lib.flyingcow.cache import ModelQueryCache from datetime import datetime -import sharedfile +from . import sharedfile from tornado.options import options diff --git a/models/user.py b/models/user.py index 5c2a84c3..92806472 100644 --- a/models/user.py +++ b/models/user.py @@ -1,14 +1,13 @@ -import cStringIO +import io import time import hashlib import calendar import random import re -import urlparse +import urllib.parse from datetime import datetime from lib.s3 import S3Bucket -from boto.s3.key import Key import postmark from tornado.options import define, options @@ -20,14 +19,14 @@ from tasks.migration import migrate_for_user from lib.badpasswords import bad_list -import notification -import subscription -import shake -import invitation -import sharedfile -import externalservice -import invitation_request -import shakemanager +from . import notification +from . import subscription +from . import shake +from . import invitation +from . import sharedfile +from . import externalservice +from . import invitation_request +from . import shakemanager # we use models.favorite due to some weird edge case where the reference # to the module gets lost. To recreate, rename to "import favorite" and # change references from models.favorite to just favorite. You can then @@ -163,8 +162,8 @@ def send_invitation(self, email_address): def invalidate_email(self): self.email_confirmed = 0 h = hashlib.sha1() - h.update("%s" % (time.time())) - h.update("%s" % (random.random())) + h.update(str(time.time()).encode("ascii")) + h.update(str(random.random()).encode("ascii")) self.verify_email_token = h.hexdigest() self.save() if not options.debug: @@ -183,8 +182,8 @@ def create_reset_password_token(self): """ h = hashlib.sha1() - h.update("%s" % (time.time())) - h.update("%s" % (random.random())) + h.update(str(time.time()).encode("ascii")) + h.update(str(random.random()).encode("ascii")) self.reset_password_token = h.hexdigest() self.save() body = """ @@ -233,18 +232,19 @@ def set_profile_image(self, file_path, file_name, content_type, skip_s3=False): if content_type not in valid_content_types: return False - destination = cStringIO.StringIO() + destination = io.BytesIO() if not transform_to_square_thumbnail(file_path, 100*2, destination): return False if not skip_s3: bucket = S3Bucket() - k = Key(bucket) - k.key = "account/%s/profile.jpg" % (self.id) - k.set_metadata('Content-Type', 'image/jpeg') - k.set_metadata('Cache-Control', 'max-age=86400') - k.set_contents_from_string(destination.getvalue()) - k.set_acl('public-read') + bucket.put_object( + destination.getvalue(), + "account/%s/profile.jpg" % self.id, + ContentType="image/jpeg", + CacheControl="max-age=86400", + ACL="public-read" + ) self.profile_image = 1 self.save() return True @@ -442,7 +442,7 @@ def total_file_stats(self): """ counts = sharedfile.Sharedfile.query("SELECT sum(like_count) as likes, sum(save_count) as saves, sum(view_count) as views from sharedfile where user_id = %s AND deleted=0", self.id) counts = counts[0] - for key, value in counts.items(): + for key, value in list(counts.items()): if not value: counts[key] = 0 return counts @@ -643,7 +643,7 @@ def following(self, page=None): AND subscription.deleted = 0 """ % self.id - if page > 0: + if page is not None and page > 0: limit_start = (page-1) * 20 select = "%s LIMIT %s, %s" % (select, limit_start, 20) @@ -926,7 +926,7 @@ def _validate_website(self): self.add_error('website', "The URL is too long.") return False if self.website != '': - parsed = urlparse.urlparse(self.website) + parsed = urllib.parse.urlparse(self.website) if parsed.scheme not in ('http', 'https',): self.add_error('website', "Doesn't look to be a valid URL.") return False @@ -1044,8 +1044,8 @@ def find_unmigrated_user(name, password): def generate_password_digest(password): secret = options.auth_secret h = hashlib.sha1() - h.update(password) - h.update(secret) + h.update(password.encode(encoding="UTF-8")) + h.update(secret.encode(encoding="UTF-8")) return h.hexdigest() -from sharedfile import Sharedfile +from .sharedfile import Sharedfile diff --git a/models/voucher.py b/models/voucher.py index baf7bff8..afb64393 100644 --- a/models/voucher.py +++ b/models/voucher.py @@ -5,10 +5,10 @@ import hashlib import time -import user +from . import user import datetime -import promotion +from . import promotion from lib.utilities import payment_notifications diff --git a/requirements-test.txt b/requirements-test.txt index 9d643313..a7cafe10 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,2 +1,2 @@ -coverage -coveralls +coverage==6.5.0 +coveralls==3.3.1 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 97815b0b..43e521d8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,26 +1,18 @@ -BeautifulSoup==3.2.0 -MySQL-python==1.2.5 -Pillow==6.2.2 -amqplib==1.0.0 -anyjson==0.3.1 -boto==2.8.0 -celery==4.0.2 -distribute==0.7.3 -kombu==4.0.2 -pycurl==7.43.0 -pyparsing==2.2.0 -python-dateutil==1.5 -python-postmark==0.5.0 -python-slugify==5 -recaptcha-client==1.0.6 -tornado==4.1 -torndb==0.3 -tweepy==2.3 -wsgiref==0.1.2 -mock==1.0.1 -pyOpenSSL==17.5.0 -backports.ssl==0.0.9 -requests==2.20.0 -stripe==1.82.1 -yoyo-migrations==5.0.5 -ffmpy==0.2.2 +amqplib==1.0.2 +beautifulsoup4==4.12.2 +boto3==1.34.2 +celery==5.3.6 +ffmpy==0.3.1 +kombu==5.3.4 +mock==5.1.0 +mysqlclient==2.1.1 +Pillow==10.1.0 +pycurl==7.45.2 +pyOpenSSL==23.3.0 +pyparsing==3.1.1 +python-dateutil==2.8.2 +python-postmark==0.6.0 +requests==2.31.0 +stripe==7.9.0 +tornado==6.4.0 +yoyo-migrations==8.2.0 \ No newline at end of file diff --git a/run-tests.sh b/run-tests.sh index 90aebcfb..ef197abb 100755 --- a/run-tests.sh +++ b/run-tests.sh @@ -1,7 +1,11 @@ #!/bin/bash +# exit if any command fails (e); strict variable substitution (u); +# set exit code to non-zero for any failed piped commands (o pipefail) +# See also: http://redsymbol.net/articles/unofficial-bash-strict-mode/ set -euo pipefail +export PYTHONUNBUFFERED=1 pip install -r requirements-test.txt coverage run --source=handlers,models,tasks,lib test.py coverage xml diff --git a/runner.py b/runner.py index d9b73fee..8ead6c7f 100755 --- a/runner.py +++ b/runner.py @@ -39,7 +39,7 @@ def run(script_path): if result: script_log.result = result script_log.success = 1 - except Exception, e: + except Exception as e: script_log.success = 0 script_log.result = json.dumps({'error': str(e)}) sys.exit("Exception: %s" % e) diff --git a/scripts/make-zip-of-images.py b/scripts/make-zip-of-images.py index f19bd514..b929d739 100644 --- a/scripts/make-zip-of-images.py +++ b/scripts/make-zip-of-images.py @@ -7,7 +7,6 @@ import models import sys from lib.s3 import S3Bucket -from boto.s3.key import Key from tornado.options import options import json import os @@ -27,7 +26,7 @@ def main(): } return json.dumps(results) -def percent_cb(complete, total): +def percent_cb(bytes_amount): sys.stdout.write('.') sys.stdout.flush() @@ -51,7 +50,7 @@ def make_zip_file(for_user=None): sfs = models.Sharedfile.where("user_id = %s and deleted=0 order by id", user.id) if sfs: - print(len(sfs)) + print((len(sfs))) for sf in sfs: source = sf.sourcefile() if source.type == 'link': @@ -61,7 +60,6 @@ def make_zip_file(for_user=None): else: sys.stdout.write('.') sys.stdout.flush() - file_object = s3_bucket.get_key("originals/{0}".format(source.file_key)) extension = "" if sf.content_type == 'image/gif': extension = "gif" @@ -71,21 +69,22 @@ def make_zip_file(for_user=None): extension = "png" if extension == "": - print(sf.content_type) + print((sf.content_type)) print("extension blank") sys.exit() - file_object.get_contents_to_filename("/mnt/backups/users/{0}/{1}.{2}".format(user.name, sf.share_key, extension)) + file_name = "/mnt/backups/users/{0}/{1}.{2}".format(user.name, sf.share_key, extension) + s3_bucket.download_file(file_name, "originals/{0}".format(source.file_key)) #zip contents of directory and save to /users/id-name.zip - subprocess.call(["zip", "-r", "/mnt/backups/users/{0}.zip".format(user.name), "/mnt/backups/users/{0}/".format(user.name)]) + zip_file = "/mnt/backups/users/{0}.zip".format(user.name) + subprocess.call(["zip", "-r", zip_file, "/mnt/backups/users/{0}/".format(user.name)]) #upload to s3 as /bucket-name/account/id/images.zip - k = Key(s3_bucket) - k.key = "account/{0}/images.zip".format(user.id) - k.set_contents_from_filename("/mnt/backups/users/{0}.zip".format(user.name), cb=percent_cb, num_cb=10) + key = "account/{0}/images.zip".format(user.id) + s3_bucket.upload_file(zip_file, key, Callback=percent_cb, ExtraArgs={"ContentType": "application/zip"}) - happy_url = k.generate_url(expires_in=72000) + happy_url = s3_bucket.generate_url(key, ExpiresIn=72000) #email link to user email 8 hours pm = postmark.PMMail(api_key=options.postmark_api_key, sender="hello@mltshp.com", to=user.email, diff --git a/scripts/migrate-user.py b/scripts/migrate-user.py index 7f891161..8baf89f9 100644 --- a/scripts/migrate-user.py +++ b/scripts/migrate-user.py @@ -16,7 +16,7 @@ def main(): for name in names: user = User.get("name=%s and deleted=2", name) if user is not None: - print "Migrating %s..." % name + print("Migrating %s..." % name) migrate_for_user.delay_or_run(user.id) else: - print "Could not find user named: %s" % name + print("Could not find user named: %s" % name) diff --git a/scripts/populate-save-like.py b/scripts/populate-save-like.py index 1826ff77..426e4d50 100755 --- a/scripts/populate-save-like.py +++ b/scripts/populate-save-like.py @@ -28,5 +28,5 @@ def main(): saves = save_counts[0]['save_count'] if likes > 0 or saves > 0: - print "UPDATE sharedfile SET like_count = %s, save_count = %s WHERE id = %s" % (likes, saves, sf.id) - print db1.execute("UPDATE sharedfile SET like_count = %s, save_count = %s WHERE id = %s", likes, saves, sf.id) + print("UPDATE sharedfile SET like_count = %s, save_count = %s WHERE id = %s" % (likes, saves, sf.id)) + print(db1.execute("UPDATE sharedfile SET like_count = %s, save_count = %s WHERE id = %s", likes, saves, sf.id)) diff --git a/scripts/populate-timelines.py b/scripts/populate-timelines.py index 1bb0005a..245ec173 100755 --- a/scripts/populate-timelines.py +++ b/scripts/populate-timelines.py @@ -12,5 +12,5 @@ def main(): ssfs = db1.query("""SELECT shake_id, sharedfile_id from shakesharedfile order by created_at""") for shakesharedfile in ssfs: sf = db1.get("""SELECT id, source_id, name, deleted, created_at FROM sharedfile WHERE id = %s""", shakesharedfile['sharedfile_id']) - print "%s. Adding posts for sharedfile: %s created at %s." % (sf.id, sf.name, sf.created_at) + print("%s. Adding posts for sharedfile: %s created at %s." % (sf.id, sf.name, sf.created_at)) add_posts(shake_id=shakesharedfile['shake_id'], sharedfile_id=sf['id'], sourcefile_id=sf['source_id'], deleted=sf['deleted'], created_at=sf['created_at']) diff --git a/scripts/transcode-sharedfile.py b/scripts/transcode-sharedfile.py index dab2e91e..7cfbee9f 100644 --- a/scripts/transcode-sharedfile.py +++ b/scripts/transcode-sharedfile.py @@ -13,7 +13,7 @@ def main(): options.use_workers = False if len(keys) == 0: - print "Selecting untranscoded sharedfiles..." + print("Selecting untranscoded sharedfiles...") select = """SELECT share_key FROM sharedfile JOIN sourcefile ON sourcefile.id = sharedfile.source_id @@ -25,12 +25,12 @@ def main(): results = Sharedfile.query(select) for result in results: keys.append(result["share_key"]) - print "Found %d sharedfiles to transcode" % len(keys) + print("Found %d sharedfiles to transcode" % len(keys)) for key in keys: sf = Sharedfile.get("share_key=%s AND content_type='image/gif' AND deleted=0", key) if sf is not None: - print "Transcoding %s..." % sf.share_key + print("Transcoding %s..." % sf.share_key) transcode_sharedfile.delay_or_run(sf.id) else: - print "Could not find sharedfile with key: %s" % key + print("Could not find sharedfile with key: %s" % key) diff --git a/settings.example.py b/settings.example.py index 244ff833..a63b2b5e 100644 --- a/settings.example.py +++ b/settings.example.py @@ -7,7 +7,7 @@ "auth_secret" : "dummy-secret", "aws_bucket": "mltshp-dev", "aws_host": "fakes3", - "aws_port": 8000, + "aws_port": 4567, "aws_key": "dummy-key", "aws_secret": "dummy-secret", "cookie_secret": "some secret string", @@ -41,7 +41,7 @@ "database_host": "mysql", "aws_bucket": "mltshp-testing", "aws_host": "fakes3", - "aws_port": 8000, + "aws_port": 4567, "aws_key": "dummy-key", "aws_secret": "dummy-secret", "max_mb_per_month" : 300, @@ -49,6 +49,7 @@ "use_workers": False, "debug_workers": True, "superuser_list": "admin", + "tornado_logging": False, # these must be set for testing test/unit/externalservice_tests.py # "twitter_consumer_key" : "twitter_consumer_key_here", # "twitter_consumer_secret" : "twitter_consumer_secret_key_here", diff --git a/setup/dev/fakes3/Dockerfile b/setup/dev/fakes3/Dockerfile new file mode 100644 index 00000000..de8f6717 --- /dev/null +++ b/setup/dev/fakes3/Dockerfile @@ -0,0 +1,23 @@ +FROM ruby:2.7 as builder + +ENV FAKES3_VERSION 2.0.0 + +RUN gem install fakes3 -v ${FAKES3_VERSION} \ + && rm -rf /usr/local/bundle/cache/* + +FROM ruby:2.7-slim + +COPY --from=builder /usr/local/bundle /usr/local/bundle/ + +VOLUME /srv +RUN mkdir -p /srv \ + && chown nobody:nogroup /srv \ + && chmod 750 /srv \ + && ln -s /usr/local/bundle/bin/fakes3 /usr/bin/fakes3 +WORKDIR /srv + +EXPOSE 4567 + +USER nobody +ENTRYPOINT ["fakes3", "--port", "4567"] +CMD ["--root", "/srv"] diff --git a/setup/dev/nginx.conf b/setup/dev/nginx.conf index edacdfc8..78aac568 100644 --- a/setup/dev/nginx.conf +++ b/setup/dev/nginx.conf @@ -1,5 +1,7 @@ # MLTSHP.com dev config +worker_processes auto; + worker_rlimit_nofile 20480; error_log /srv/mltshp.com/logs/error.log; @@ -30,7 +32,7 @@ http { # Only retry if there was a communication error, not a timeout # on the Tornado server (to avoid propagating "queries of death" # to all frontends) - proxy_next_upstream error; + proxy_next_upstream error timeout; # This will load a dynamically generated "resolver n.n.n.n;" line # that will have the correct resolver IP address for the dev @@ -86,6 +88,7 @@ http { upload_pass_form_field "_xsrf"; upload_pass_form_field "title"; + upload_pass_form_field "alt_text"; upload_pass_form_field "description"; upload_cleanup 400 404 499 500-505; @@ -124,6 +127,7 @@ http { upload_aggregate_form_field "${upload_field_name}_size" "$upload_file_size"; upload_pass_form_field "title"; + upload_pass_form_field "alt_text"; upload_pass_form_field "description"; upload_pass_form_field "shake_id"; @@ -131,7 +135,7 @@ http { } location ~* ^/s3/((?:account)/\d+/[a-zA-Z0-9_-]+\.jpg) { - set $download_url http://mltshp-dev.fakes3:8000/$1; + set $download_url http://mltshp-dev.fakes3:4567/$1; proxy_hide_header Content-Disposition; proxy_hide_header Content-Type; proxy_set_header X-Rewrite-URL $download_url; @@ -144,7 +148,7 @@ http { internal; set $download_uri $1; - set $download_url http://mltshp-dev.fakes3:8000/$download_uri$is_args$args; + set $download_url http://fakes3:4567/mltshp-dev/$download_uri$is_args$args; proxy_hide_header Content-Disposition; proxy_hide_header Content-Type; proxy_set_header X-Rewrite-URL $download_url; diff --git a/setup/dev/supervisord.conf b/setup/dev/supervisord.conf index ecb6aed4..6fbac4a0 100644 --- a/setup/dev/supervisord.conf +++ b/setup/dev/supervisord.conf @@ -9,7 +9,7 @@ autorestart=true programs=main-8000 [program:main-8000] -command=python main.py --on_port=8000 +command=python3 main.py --on_port=8000 directory=/srv/mltshp.com/mltshp autorestart=true redirect_stderr=true diff --git a/setup/production/mysql.conf b/setup/production/mysql.conf new file mode 100644 index 00000000..da44f0d8 --- /dev/null +++ b/setup/production/mysql.conf @@ -0,0 +1,18 @@ +# /etc/mysql/mysql.conf.d/mltshp.cnf +[mysqld] +skip-name-resolve = 1 +key_buffer_size = 2048M +sort_buffer_size = 16M +table_open_cache = 4000 +thread_cache_size = 8 +query_cache_limit = 1M +query_cache_size = 32M +query_cache_type = 1 +innodb_buffer_pool_size = 8G +innodb_ft_min_token_size = 2 + +# +# Here you can see queries with especially long duration +#log_slow_queries = /var/log/mysql/mysql-slow.log +#long_query_time = 2 +#log-queries-not-using-indexes \ No newline at end of file diff --git a/setup/production/nginx.conf b/setup/production/nginx.conf index 6b1f7f2f..531fd54b 100644 --- a/setup/production/nginx.conf +++ b/setup/production/nginx.conf @@ -1,7 +1,6 @@ # MLTSHP.com production config -# FIXME: Not supported on ancient nginx -#worker_processes auto; +worker_processes auto; worker_rlimit_nofile 20480; @@ -14,12 +13,6 @@ http { include mime.types; default_type application/octet-stream; - # Accepts the origin IP address as sent - # by our Linode NodeBalancer - # FIXME: Not supported on ancient nginx - #real_ip_header X-Forwarded-For; - #set_real_ip_from 192.168.255.0/24; - upstream frontends { server 127.0.0.1:8001; server 127.0.0.1:8002; @@ -39,14 +32,10 @@ http { application/x-javascript application/xml application/atom+xml text/javascript; - # FIXME: Not supported on ancient nginx - #ssl_protocols TLSv1 TLSv1.1 TLSv1.2; # Dropping SSLv3, ref: POODLE - #ssl_prefer_server_ciphers on; - # Only retry if there was a communication error, not a timeout # on the Tornado server (to avoid propagating "queries of death" # to all frontends) - proxy_next_upstream error; + proxy_next_upstream error timeout; resolver 8.8.8.8; @@ -85,7 +74,7 @@ http { location = /upload { # Pass altered request body to this location - upload_pass /internalupload; + upload_pass /internalupload; # Store files to this directory # The directory is hashed, subdirectories 0 1 2 3 4 5 6 7 8 9 should exist @@ -122,6 +111,7 @@ http { upload_pass_form_field "_xsrf"; upload_pass_form_field "title"; + upload_pass_form_field "alt_text"; upload_pass_form_field "description"; upload_cleanup 400 404 499 500-505; } @@ -155,6 +145,7 @@ http { upload_aggregate_form_field "${upload_field_name}_sha1" "$upload_file_sha1"; upload_aggregate_form_field "${upload_field_name}_size" "$upload_file_size"; upload_pass_form_field "title"; + upload_pass_form_field "alt_text"; upload_pass_form_field "description"; upload_pass_form_field "shake_id"; diff --git a/setup/production/supervisord-web.conf b/setup/production/supervisord-web.conf index 96f9d2a4..eebff2e0 100644 --- a/setup/production/supervisord-web.conf +++ b/setup/production/supervisord-web.conf @@ -7,7 +7,7 @@ autorestart=true [program:main] process_name=mltshp-%(process_num)d -command=python main.py --on_port=80%(process_num)02d +command=python3 main.py --on_port=80%(process_num)02d numprocs=6 numprocs_start=1 directory=/srv/mltshp.com/mltshp diff --git a/setup/production/supervisord-worker.conf b/setup/production/supervisord-worker.conf index dfdbef70..bdfc9bee 100644 --- a/setup/production/supervisord-worker.conf +++ b/setup/production/supervisord-worker.conf @@ -9,7 +9,7 @@ autorestart=true programs=celeryd-celery,celeryd-transcode [program:celeryd-celery] -command=python worker.py -Q celery --loglevel INFO +command=python3 worker.py -Q celery --loglevel INFO directory=/srv/mltshp.com/mltshp autorestart=true redirect_stderr=true @@ -23,7 +23,7 @@ user=ubuntu environment=HOME="/home/ubuntu",USER="ubuntu" [program:celeryd-transcode] -command=python worker.py -Q transcode --loglevel INFO +command=python3 worker.py -Q transcode --loglevel INFO directory=/srv/mltshp.com/mltshp autorestart=true redirect_stderr=true @@ -34,4 +34,4 @@ stdout_capture_maxbytes=1MB stdout_events_enabled=false loglevel=info user=ubuntu -environment=HOME="/home/ubuntu",USER="ubuntu" \ No newline at end of file +environment=HOME="/home/ubuntu",USER="ubuntu" diff --git a/static/developers/example.py.txt b/static/developers/example.py.txt index 06fcf498..c62d6739 100755 --- a/static/developers/example.py.txt +++ b/static/developers/example.py.txt @@ -68,7 +68,7 @@ print "SEE CODE FOR THE STEPS" # on the developer site soon. There is a specific method for encoding this bit.) timestamp = int(time.mktime(datetime.utcnow().timetuple())) -nonce = md5("%s" % random.random()).hexdigest() #you might want to generate better nonces +nonce = md5(("%s" % random.random()).encode('ascii')).hexdigest() #you might want to generate better nonces #start by normalizing the message. the order here is important normalized_string = "%s\n" % (j_response['access_token']) @@ -82,9 +82,9 @@ normalized_string += "/api/sharedfile/GA4\n" #note, we're not including the query string because there isn't one. #now we sign the request -digest = hmac.new(j_response['secret'].encode('ascii'), normalized_string, sha1).digest() -signature = base64.encodestring(digest).strip() #we strip the end off because it gives us a \n at the end -authorization_string = 'MAC token="%s", timestamp="%s", nonce="%s", signature="%s"' % (j_response['access_token'], str(timestamp), nonce, signature) +digest = hmac.new(j_response['secret'].encode('ascii'), normalized_string.encode('ascii'), sha1).digest() +signature = base64.encodebytes(digest).strip() #we strip the end off because it gives us a \n at the end +authorization_string = 'MAC token="%s", timestamp="%s", nonce="%s", signature="%s"' % (j_response['access_token'], str(timestamp), nonce, signature.decode('ascii')) req = urllib2.Request(RESOURCE_URL,headers={ 'Authorization' : authorization_string }) diff --git a/tasks/__init__.py b/tasks/__init__.py index 84332227..38d3a820 100644 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -1,6 +1,5 @@ -from celery.task import task -from celery.task.base import Task -import postmark +from celery import shared_task +from celery.app.task import Task from tornado.options import define, options import mltshpoptions @@ -30,4 +29,4 @@ def delay_or_run(self, *args, **kwargs): def mltshp_task(*args, **options): # This is how celery's periodic_task decorator customizes the class, so try it here too. - return task(**dict({"base": MltshpTask}, **options)) + return shared_task(**dict({"base": MltshpTask}, **options)) diff --git a/tasks/counts.py b/tasks/counts.py index f7c01323..03e46ce4 100644 --- a/tasks/counts.py +++ b/tasks/counts.py @@ -1,8 +1,6 @@ from torndb import Connection from tornado.options import options from datetime import datetime -import os -import tweepy #temporary, need to use feathers from tasks import mltshp_task @@ -34,21 +32,23 @@ def tweet_or_magic(db, sharedfile_id, like_count): if like_count == likes_to_magic: created_at = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S") db.execute("INSERT IGNORE INTO magicfile (sharedfile_id, created_at) VALUES (%s, %s)", sharedfile_id, created_at) - if like_count == likes_to_tweet and not options.debug: - title = '' - if sf['title'] == '' or sf['title'] == None: - title = sf['name'] - else: - title = sf['title'] - auth = tweepy.OAuthHandler(options.twitter_consumer_key, options.twitter_consumer_secret) - auth.set_access_token(options.twitter_access_key, options.twitter_access_secret) - api = tweepy.API(auth) - via_twitter_account = "" - twitter_account = db.get("SELECT screen_name from externalservice where user_id = %s and deleted=0", sf['user_id']) - if twitter_account: - via_twitter_account = " via @{0}".format(twitter_account['screen_name']) - api.update_status('https://mltshp.com/p/%s "%s"%s' % (sf['share_key'], title[:90], via_twitter_account)) + # The Twitter API is dead. + # if like_count == likes_to_tweet and not options.debug: + # title = '' + # if sf['title'] == '' or sf['title'] == None: + # title = sf['name'] + # else: + # title = sf['title'] + + # auth = tweepy.OAuthHandler(options.twitter_consumer_key, options.twitter_consumer_secret) + # auth.set_access_token(options.twitter_access_key, options.twitter_access_secret) + # api = tweepy.API(auth) + # via_twitter_account = "" + # twitter_account = db.get("SELECT screen_name from externalservice where user_id = %s and deleted=0", sf['user_id']) + # if twitter_account: + # via_twitter_account = " via @{0}".format(twitter_account['screen_name']) + # api.update_status('https://mltshp.com/p/%s "%s"%s' % (sf['share_key'], title[:90], via_twitter_account)) @mltshp_task() diff --git a/tasks/transcode.py b/tasks/transcode.py index 5405858f..2b4d457f 100644 --- a/tasks/transcode.py +++ b/tasks/transcode.py @@ -11,7 +11,6 @@ from ffmpy import FFmpeg from PIL import Image from lib.s3 import S3Bucket -from boto.s3.key import Key logger = get_task_logger(__name__) @@ -69,11 +68,11 @@ def gif_to_video(sourcefile_id, file_key, input_file, format): # upload transcoded file to S3, then flag the sourcefile bucket = S3Bucket() - key = Key(bucket) - key.key = "%s/%s" % (format, file_key) - logger.info("uploading transcoded video: %s" % file_key) - key.set_contents_from_filename(output_file) + bucket.upload_file( + output_file, + "%s/%s" % (format, file_key), + ) logger.info("-- upload complete") db = db_connect() db.execute( @@ -121,10 +120,9 @@ def transcode_sharedfile(sharedfile_id): input_file = input_temp.name bucket = S3Bucket() - key = Key(bucket) - key.key = "originals/%s" % sourcefile["file_key"] + key = "originals/%s" % sourcefile["file_key"] logger.info("Downloading original GIF from S3 for sourcefile %s..." % sharedfile["source_id"]) - key.get_contents_to_filename(input_file) + bucket.download_file(input_file, key) # Test to see if GIF is animated or not animated = False @@ -134,6 +132,7 @@ def transcode_sharedfile(sharedfile_id): animated = True except EOFError: pass + im.close() if not animated: os.unlink(input_file) diff --git a/templates/account/settings-profile.html b/templates/account/settings-profile.html index 37320b79..59f52286 100644 --- a/templates/account/settings-profile.html +++ b/templates/account/settings-profile.html @@ -36,8 +36,8 @@

Want to change it?

{% if not site_is_readonly and user.email_confirmed == 1 %}
-

Tips for your profile image:

-

Shoot for 200x200 pixels square, it will also be displayed at 48×48 for file pages. (100k maximum size)

+

Tips for your profile image:

+

Shoot for 200×200 pixels square, it will also be displayed at 48×48 for file pages. (100k maximum size)

{% end %} diff --git a/templates/account/settings.html b/templates/account/settings.html index bb1c83b0..f97756a1 100644 --- a/templates/account/settings.html +++ b/templates/account/settings.html @@ -161,7 +161,7 @@

You have canceled your subscription, but it will remain active {% end %}

Your Membership Plan: {{ plan_name }}

- {% if user.stripe_plan_id == "mltshp-double" and user.stripe_plan_rate > 24 %} + {% if user.stripe_plan_id == "mltshp-double" and user.stripe_plan_rate is not None and user.stripe_plan_rate > 24 %}

You subscribe at a custom rate of ${{ user.stripe_plan_rate }}/year. Thank you for your added support.

{% end %} diff --git a/templates/home/index.html b/templates/home/index.html index 4c429567..fdc8af8c 100644 --- a/templates/home/index.html +++ b/templates/home/index.html @@ -127,7 +127,7 @@

Cool Tools! Cool Tools!

- {% if sharedfile.previous_sharedfile_id > 0 %} + {% if sharedfile.previous_sharedfile_id or 0 > 0 %} jump to previous diff --git a/templates/tools/picker-error.html b/templates/tools/picker-error.html index 41274710..6999af34 100644 --- a/templates/tools/picker-error.html +++ b/templates/tools/picker-error.html @@ -2,6 +2,6 @@ {% block main %}
-

We had a problem.

+

We had a problem ({{ error }}).

{% end %} diff --git a/test.py b/test.py index 74679f07..f891ec37 100755 --- a/test.py +++ b/test.py @@ -1,9 +1,12 @@ #!/usr/bin/env python import unittest +import logging + from torndb import Connection from tornado.options import options from tornado.httpclient import AsyncHTTPClient +from tornado.options import options import MySQLdb @@ -19,7 +22,7 @@ TEST_MODULES = [ 'test.AccountTests', 'test.CommentTests', - 'test.ExternalAccountTests', + # 'test.ExternalAccountTests', 'test.FileTests', 'test.SimpleTests', 'test.SiteFunctionTests', @@ -70,12 +73,17 @@ def all(): if __name__ == '__main__': mltshpoptions.parse_dictionary(test_settings) + if not options.tornado_logging: + options.logging = None + logging.getLogger("tornado.access").disabled = True + logging.getLogger("tornado.application").disabled = True + logging.getLogger("tornado.general").disabled = True import tornado.testing db = Connection(options.database_host, 'mysql', options.database_user, options.database_password) try: db.execute("CREATE database %s" % options.database_name) - except MySQLdb.ProgrammingError, exc: + except MySQLdb.ProgrammingError as exc: if exc.args[0] != 1007: # database already exists raise else: diff --git a/test/AccountTests.py b/test/AccountTests.py index f6206f56..a477e280 100644 --- a/test/AccountTests.py +++ b/test/AccountTests.py @@ -1,21 +1,14 @@ -from tornado.testing import AsyncHTTPTestCase -from torndb import Connection -from tornado.httpclient import HTTPRequest from tornado.escape import json_decode - -import Cookie -import base64 import time -import copy import hashlib import random import os +from .base import BaseAsyncTestCase -from base import BaseAsyncTestCase +from models import User, Sourcefile, Sharedfile, Subscription, Notification, Post -from models import User, Sourcefile, Sharedfile, Shake, Subscription, Notification, Post class AccountIndexTests(BaseAsyncTestCase): def setUp(self): @@ -24,9 +17,9 @@ def setUp(self): self.admin.set_password('asdfasdf') self.admin.save() self.sign_in('admin', 'asdfasdf') - self.xsrf = self.get_xsrf() + self.xsrf = self.get_xsrf().decode("ascii") self.flake=str(time.time()) - + def test_pagination_returns_correct_counts(self): """ This tests creating 111 shared files for a user and then tests that pagination @@ -36,14 +29,14 @@ def test_pagination_returns_correct_counts(self): user_shake = user.shake() source_file = Sourcefile(width=10, height=10, file_key='mumbles', thumb_key='bumbles') source_file.save() - + missing_ids = [] - + for i in range(111): sf = Sharedfile(source_id = source_file.id, user_id = user.id, name="shgaredfile.png", title='shared file', share_key='asdf', content_type='image/png', deleted=0) sf.save() sf.add_to_shake(user_shake) - + for i in range(12): response = self.fetch('/user/admin/%s' % (i + 1)) self.assertEqual(response.code, 200) @@ -52,10 +45,9 @@ def test_new_user_sees_welcome_page(self): """ This tests that a new user who just signed up will see the welcome page. """ - self.http_client.fetch(self.get_url('/'), self.stop) - response = self.wait() + response = self.fetch_url('/') self.assertEqual(response.code, 200) - self.assertTrue(response.body.find('Getting Started')) + self.assertIn('Getting Started', response.body) class SubscriptionTests(BaseAsyncTestCase): @@ -64,99 +56,83 @@ def setUp(self): self.admin = User(name='admin', email='admin@mltshp.com', email_confirmed=1, is_paid=1) self.admin.set_password('asdfasdf') self.admin.save() - + self.user2 = User(name='user2', email='user2@mltshp.com', email_confirmed=1, is_paid=1) self.user2.set_password('asdfasdf') self.user2.save() - + self.user3 = User(name='user3', email='user3@mltshp.com', email_confirmed=1, is_paid=1) self.user3.set_password('asdfasdf') self.user3.save() - + self.sid = self.sign_in('admin', 'asdfasdf') - self.xsrf = self.get_xsrf() + self.xsrf = self.get_xsrf().decode("ascii") self.flake=str(time.time()) - + def test_follow_signed_in(self): - request = HTTPRequest(self.get_url('/user/user3/subscribe?json=1'), 'POST', {'Cookie':'sid=%s;_xsrf=%s' % (self.sid, self.xsrf)}, "_xsrf=%s" % (self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() - + response = self.fetch("/user/user3/subscribe?json=1", method="POST", headers={'Cookie':'sid=%s;_xsrf=%s' % (self.sid, self.xsrf)}, body="_xsrf=%s" % self.xsrf) + j = json_decode(response.body) self.assertEqual(j['subscription_status'], True) - + subscription = Subscription.get('user_id=1 and shake_id=3') self.assertTrue(subscription) self.assertFalse(subscription.deleted) - + def test_follow_creates_posts(self): self.sign_in('user3', 'asdfasdf') response = self.upload_test_file() self.sign_in('admin', 'asdfasdf') self.post_url('/user/user3/subscribe?json=1') - + post = Post.where('user_id=%s', self.admin.id) + self.assertTrue(len(post) > 0) self.assertEqual(3, post[0].shake_id) - + def test_cannot_follow_self(self): - request = HTTPRequest(self.get_url('/user/admin/subscribe?json=1'), 'POST', {'Cookie':'sid=%s;_xsrf=%s' % (self.sid, self.xsrf)}, "_xsrf=%s" % (self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() - + response = self.fetch('/user/admin/subscribe?json=1', method='POST', headers={'Cookie':'sid=%s;_xsrf=%s' % (self.sid, self.xsrf)}, body="_xsrf=%s" % (self.xsrf)) + j = json_decode(response.body) self.assertTrue('error' in j) - + subscription = Subscription.all() self.assertTrue(len(subscription) == 0) def test_cannot_subscribe_if_not_signed_in(self): - request = HTTPRequest(self.get_url('/user/user3/subscribe?json=1'), 'POST', {'Cookie':'_xsrf=%s' % (self.xsrf)}, "_xsrf=%s" % (self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.fetch('/user/user3/subscribe?json=1', method="POST", headers={'Cookie':'_xsrf=%s' % self.xsrf}, body="_xsrf=%s" % self.xsrf) self.assertEqual(response.code, 403) - + def test_unsubscribe_shake(self): - request = HTTPRequest(self.get_url('/user/user3/subscribe?json=1'), 'POST', {'Cookie':'sid=%s;_xsrf=%s' % (self.sid, self.xsrf)}, "_xsrf=%s" % (self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() - - request = HTTPRequest(self.get_url('/user/user3/unsubscribe?json=1'), 'POST', {'Cookie':'sid=%s;_xsrf=%s' % (self.sid, self.xsrf)}, "_xsrf=%s" % (self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() - + response = self.fetch('/user/user3/subscribe?json=1', method="POST", headers={'Cookie':'sid=%s;_xsrf=%s' % (self.sid, self.xsrf)}, body="_xsrf=%s" % self.xsrf) + + response = self.fetch('/user/user3/unsubscribe?json=1', method='POST', headers={'Cookie':'sid=%s;_xsrf=%s' % (self.sid, self.xsrf)}, body="_xsrf=%s" % self.xsrf) + j = json_decode(response.body) self.assertEqual(j['subscription_status'], False) - + subscription = Subscription.get('user_id=1 and shake_id=3') self.assertTrue(subscription) self.assertTrue(subscription.deleted) - - + def test_subscribe_unsubscribe_is_same_object(self): - request = HTTPRequest(self.get_url('/user/user3/subscribe?json=1'), 'POST', {'Cookie':'sid=%s;_xsrf=%s' % (self.sid, self.xsrf)}, "_xsrf=%s" % (self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() - + response = self.fetch('/user/user3/subscribe?json=1', method='POST', headers={'Cookie':'sid=%s;_xsrf=%s' % (self.sid, self.xsrf)}, body="_xsrf=%s" % self.xsrf) + first_subscription = Subscription.get('user_id=1 and shake_id=3') - - request = HTTPRequest(self.get_url('/user/user3/unsubscribe?json=1'), 'POST', {'Cookie':'sid=%s;_xsrf=%s' % (self.sid, self.xsrf)}, "_xsrf=%s" % (self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() - + + response = self.fetch('/user/user3/unsubscribe?json=1', method='POST', headers={'Cookie':'sid=%s;_xsrf=%s' % (self.sid, self.xsrf)}, body="_xsrf=%s" % self.xsrf) + j = json_decode(response.body) self.assertEqual(j['subscription_status'], False) - + second_subscription = Subscription.get('user_id=1 and shake_id=3') self.assertEqual(first_subscription.id, second_subscription.id) - + def test_notification_created_when_subscription_created(self): """ User is followed. Notification created. """ - request = HTTPRequest(self.get_url('/user/user3/subscribe?json=1'), 'POST', {'Cookie':'sid=%s;_xsrf=%s' % (self.sid, self.xsrf)}, "_xsrf=%s" % (self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() - + response = self.fetch('/user/user3/subscribe?json=1', method='POST', headers={'Cookie':'sid=%s;_xsrf=%s' % (self.sid, self.xsrf)}, body="_xsrf=%s" % self.xsrf) + subscriptions = Subscription.all() self.assertEqual(len(subscriptions), 1) notifications = Notification.all() @@ -174,58 +150,52 @@ def setUp(self): self.user = User(name="admin", email="admin@mltshp.com", email_confirmed=1, is_paid=1) self.user.set_password('asdfasdf') self.user.save() - self.xsrf = self.get_xsrf() - + self.xsrf = self.get_xsrf().decode("ascii") + def test_email_verification(self): self.assertEqual(self.user.email_confirmed, 1) self.assertTrue(self.user.verify_email_token == None) - - + self.user.invalidate_email() reload_user = User.get("id=%s", self.user.id) - + self.assertEqual(reload_user.email_confirmed, 0) self.assertTrue(len(reload_user.verify_email_token) == 40) - + def test_lost_password(self): self.assertTrue(self.user.reset_password_token == None) - request=HTTPRequest( - url = self.get_url("/account/forgot-password"), + response = self.fetch( + "/account/forgot-password", method='POST', - headers = {'Cookie':'_xsrf=%s' % (self.xsrf)}, + headers = {'Cookie':'_xsrf=%s' % self.xsrf}, body = "_xsrf=%s&email=%s" % (self.xsrf, self.user.email)) - self.http_client.fetch(request, self.stop) - response = self.wait() self.assertEqual(response.code, 200) - self.assertTrue(response.body.find("We Sent You Instructions!") > 0) + self.assertIn("We Sent You Instructions!", response.body) user = User.get("id=%s", 1) self.assertTrue(len(user.reset_password_token) == 40) def test_lost_password_lookup_failure(self): self.assertTrue(self.user.reset_password_token == None) - request=HTTPRequest( - url = self.get_url("/account/forgot-password"), + response = self.fetch( + "/account/forgot-password", method='POST', headers = {'Cookie':'_xsrf=%s' % self.xsrf}, - body = "_xsrf=%s&email=25235235" % (self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() - self.assertTrue(response.body.find("That email address doesn't have an account.") > 0) + body = "_xsrf=%s&email=25235235" % self.xsrf) + self.assertIn("That email address doesn't have an account.", response.body) def test_reset_password_lookup(self): """ Hitting the reset-password url with a valid key will correctly look it up. """ h = hashlib.sha1() - h.update("%s-%s" % (time.time(), random.random())) + h.update(("%s-%s" % (time.time(), random.random())).encode("ascii")) self.user.reset_password_token = h.hexdigest() self.user.save() - self.http_client.fetch(self.get_url("/account/reset-password/%s" % (self.user.reset_password_token)), self.stop) - response = self.wait() - self.assertTrue(response.body.find("Enter a new password for your account.") > -1) + response = self.fetch("/account/reset-password/%s" % self.user.reset_password_token) + self.assertIn("Enter a new password for your account.", response.body) def test_reset_password_finish(self): """ @@ -233,17 +203,14 @@ def test_reset_password_finish(self): """ self.user.create_reset_password_token() - self.http_client.fetch(self.get_url("/account/reset-password/%s" % (self.user.reset_password_token)), self.stop) - response = self.wait() + response = self.fetch("/account/reset-password/%s" % self.user.reset_password_token) - request = HTTPRequest( - url = self.get_url("/account/reset-password/%s" % (self.user.reset_password_token)), + response = self.fetch( + "/account/reset-password/%s" % self.user.reset_password_token, method='POST', - headers={"Cookie":"_xsrf=%s" % (self.xsrf)}, + headers={"Cookie":"_xsrf=%s" % self.xsrf}, body="password=%s&password_again=%s&_xsrf=%s" % ("qwertyqwerty", "qwertyqwerty", self.xsrf) ) - self.http_client.fetch(request, self.stop) - response = self.wait() self.user = User.get("id=%s", 1) self.assertEqual(self.user.reset_password_token, "") @@ -256,15 +223,13 @@ def test_reset_password_throws_error_if_passwords_dont_match(self): self.user.create_reset_password_token() reset_token = self.user.reset_password_token - request = HTTPRequest( - url = self.get_url("/account/reset-password/%s" % (reset_token)), + response = self.fetch( + "/account/reset-password/%s" % reset_token, method='POST', headers={"Cookie":"_xsrf=%s" % (self.xsrf)}, body="password=%s&password_again=%s&_xsrf=%s" % ("qwertyqwerty", "poiupoiu", self.xsrf) ) - self.http_client.fetch(request, self.stop) - response = self.wait() - self.assertTrue(response.body.find("Those passwords didn't match, or are invalid. Please try again.") > -1) + self.assertIn("Those passwords didn't match, or are invalid. Please try again.", response.body) self.user = User.get("id = %s", 1) self.assertEqual(self.user.hashed_password, User.generate_password_digest('asdfasdf')) self.assertEqual(reset_token, self.user.reset_password_token) @@ -279,8 +244,7 @@ def test_reset_password_throws_404s_on_invalid_tokens(self): "", "029%203208%2032093093%2020923"] for token in invalid_tokens: - self.http_client.fetch(self.get_url("/account/reset-password/%s" % (token)), self.stop) - response = self.wait() + response = self.fetch("/account/reset-password/%s" % token) self.assertEqual(response.code, 404) def test_password_reset_while_signed_in(self): @@ -290,14 +254,12 @@ def test_password_reset_while_signed_in(self): sid = self.sign_in("admin", "asdfasdf") self.user.create_reset_password_token() - request = HTTPRequest( - url = self.get_url("/account/reset-password/%s" % (self.user.reset_password_token)), + response = self.fetch( + "/account/reset-password/%s" % self.user.reset_password_token, method='GET', headers={'Cookie':"sid=%s" % (sid)}, follow_redirects=False ) - self.http_client.fetch(request, self.stop) - response = self.wait() class NotificationTests(BaseAsyncTestCase): def setUp(self): @@ -309,7 +271,7 @@ def setUp(self): self.receiver.set_password('asdfasdf') self.receiver.save() - self.xsrf = self.get_xsrf() + self.xsrf = self.get_xsrf().decode("ascii") self.sender_sid = self.sign_in('admin', 'asdfasdf') self.receiver_sid = self.sign_in('user2', 'asdfasdf') @@ -323,9 +285,7 @@ def test_clear_single_notification(self): sharedfile = Sharedfile.get('id=1') n = Notification.new_favorite(self.sender, sharedfile) - request = HTTPRequest(self.get_url('/account/clear-notification'), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.receiver_sid)}, '_xsrf=%s&type=single&id=%s' % (self.xsrf, n.id)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.fetch('/account/clear-notification', method='POST', headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.receiver_sid)}, body='_xsrf=%s&type=single&id=%s' % (self.xsrf, n.id)) j = json_decode(response.body) self.assertTrue('response' in j) @@ -336,9 +296,7 @@ def test_only_allowed_to_clear_own_notifications(self): sharedfile = Sharedfile.get('id=1') n = Notification.new_favorite(self.sender, sharedfile) - request = HTTPRequest(self.get_url('/account/clear-notification'), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sender_sid)}, '_xsrf=%s&type=single&id=%s' % (self.xsrf, n.id)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.fetch('/account/clear-notification', method='POST', headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sender_sid)}, body='_xsrf=%s&type=single&id=%s' % (self.xsrf, n.id)) j = json_decode(response.body) self.assertTrue('error' in j) @@ -354,9 +312,7 @@ def test_clears_all_saves(self): n = Notification.new_save(self.sender, sharedfile) - request = HTTPRequest(self.get_url('/account/clear-notification'), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.receiver_sid)}, '_xsrf=%s&type=save' % (self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.fetch('/account/clear-notification', method='POST', headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.receiver_sid)}, body='_xsrf=%s&type=save' % (self.xsrf)) j = json_decode(response.body) self.assertEqual(j['response'], "0 new saves") @@ -371,9 +327,7 @@ def test_clears_all_favorites(self): n = Notification.new_favorite(self.sender, sharedfile) n = Notification.new_favorite(self.sender, sharedfile) - request = HTTPRequest(self.get_url('/account/clear-notification'), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.receiver_sid)}, '_xsrf=%s&type=favorite' % (self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.fetch('/account/clear-notification', method='POST', headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.receiver_sid)}, body='_xsrf=%s&type=favorite' % self.xsrf) j = json_decode(response.body) self.assertEqual(j['response'], "0 new likes") @@ -394,9 +348,7 @@ def test_clears_all_subscriptions(self): Notification.new_subscriber(self.sender, self.receiver, 1) Notification.new_subscriber(user3, self.receiver, 2) - request = HTTPRequest(self.get_url('/account/clear-notification'), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.receiver_sid)}, '_xsrf=%s&type=subscriber' % (self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.fetch('/account/clear-notification', method='POST', headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.receiver_sid)}, body='_xsrf=%s&type=subscriber' % self.xsrf) j = json_decode(response.body) self.assertEqual(j['response'], "You have 0 new followers") @@ -411,7 +363,7 @@ def setUp(self): self.admin.set_password('asdfasdf') self.admin.save() self.sid = self.sign_in('admin', 'asdfasdf') - self.xsrf = self.get_xsrf() + self.xsrf = self.get_xsrf().decode("ascii") self.flake=str(time.time()) def test_usernames_with_special_characters_can_be_seen(self): @@ -420,8 +372,5 @@ def test_usernames_with_special_characters_can_be_seen(self): new_user = User(name=name, email='%s@mltshp.com' % (name), email_confirmed=1) new_user.set_password('asdfasdf') new_user.save() - self.http_client.fetch(self.get_url('/user/%s' % name), self.stop) - response = self.wait() + response = self.fetch('/user/%s' % name) self.assertEqual(response.code, 200) - - diff --git a/test/CommentTests.py b/test/CommentTests.py index 1d0e2190..6a2c4cea 100644 --- a/test/CommentTests.py +++ b/test/CommentTests.py @@ -1,11 +1,10 @@ from tornado.testing import AsyncHTTPTestCase from tornado.web import Application -from tornado.httpclient import HTTPRequest from tornado.escape import json_decode, url_escape import time -from base import BaseAsyncTestCase +from .base import BaseAsyncTestCase from models import User, Sharedfile, Sourcefile, Conversation import models @@ -17,7 +16,7 @@ def setUp(self): self.admin.set_password('asdfasdf') self.admin.save() self.sid = self.sign_in('admin', 'asdfasdf') - self.xsrf = self.get_xsrf() + self.xsrf = self.get_xsrf().decode("ascii") self.flake=str(time.time()) self.src = Sourcefile(width=1, height=1, file_key='asdf', thumb_key='qwer') self.src.save() @@ -32,9 +31,7 @@ def test_saving_a_comment_is_stored(self): That is all.&_xsrf=asdf """ - request = HTTPRequest(self.get_url('/p/%s/comment' % self.shf.share_key), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, "body=%s&_xsrf=%s" % (url_escape(body), self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.fetch('/p/%s/comment' % self.shf.share_key, method='POST', headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, body="body=%s&_xsrf=%s" % (url_escape(body), self.xsrf)) comments = self.shf.comments() self.assertEqual(len(comments), 1) @@ -42,9 +39,7 @@ def test_saving_a_comment_is_stored(self): def test_blank_comment_doesnt_save(self): body = "" - request = HTTPRequest(self.get_url('/p/%s/comment' % self.shf.share_key), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, "body=%s&_xsrf=%s" % (url_escape(body), self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.fetch('/p/%s/comment' % self.shf.share_key, method='POST', headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, body="body=%s&_xsrf=%s" % (url_escape(body), self.xsrf)) comments = self.shf.comments() self.assertEqual(len(comments), 0) @@ -53,9 +48,7 @@ def test_saving_an_empty_comment_not_stored(self): #submit a comment to /share_key/save_comment body = """ """ - request = HTTPRequest(self.get_url('/p/%s/comment' % self.shf.share_key), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, "body=%s&_xsrf=%s" % (url_escape(body), self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.fetch('/p/%s/comment' % self.shf.share_key, method='POST', headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, body="body=%s&_xsrf=%s" % (url_escape(body), self.xsrf)) comments = self.shf.comments() self.assertEqual(len(comments), 0) @@ -65,9 +58,7 @@ def test_saving_not_signed_in_not_stored(self): body = """ This is a comment. """ - request = HTTPRequest(self.get_url('/p/%s/comment' % self.shf.share_key), 'POST', {'Cookie':'_xsrf=%s' % (self.xsrf)}, "body=%s&_xsrf=%s" % (url_escape(body), self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.fetch('/p/%s/comment' % self.shf.share_key, method='POST', headers={'Cookie':'_xsrf=%s' % (self.xsrf)}, body="body=%s&_xsrf=%s" % (url_escape(body), self.xsrf)) comments = self.shf.comments() self.assertEqual(len(comments), 0) @@ -77,13 +68,9 @@ def test_saving_not_signed_in_not_stored(self): # This is a comment. # """ - # request = HTTPRequest(self.get_url('/p/%s/comment' % self.shf.share_key), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, "body=%s&_xsrf=%s" % (url_escape(body), self.xsrf)) - # self.http_client.fetch(request, self.stop) - # response = self.wait() + # response = self.fetch('/p/%s/comment' % self.shf.share_key, method='POST', headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, body="body=%s&_xsrf=%s" % (url_escape(body), self.xsrf)) - # request = HTTPRequest(self.get_url('/p/%s/comment' % self.shf.share_key), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, "body=%s&_xsrf=%s" % (url_escape(body), self.xsrf)) - # self.http_client.fetch(request, self.stop) - # response = self.wait() + # response = self.fetch('/p/%s/comment' % self.shf.share_key, method='POST', headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, body="body=%s&_xsrf=%s" % (url_escape(body), self.xsrf)) # test_user = User.get('id = 1') # self.assertEqual(test_user.restricted, 1) @@ -95,13 +82,9 @@ def test_saving_not_signed_in_not_stored(self): # body = """ # This is a comment. # """ - # request = HTTPRequest(self.get_url('/p/%s/comment' % self.shf.share_key), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, "body=%s&_xsrf=%s" % (url_escape(body), self.xsrf)) - # self.http_client.fetch(request, self.stop) - # response = self.wait() + # response = self.fetch('/p/%s/comment' % self.shf.share_key, method='POST', headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, body="body=%s&_xsrf=%s" % (url_escape(body), self.xsrf)) - # request = HTTPRequest(self.get_url('/p/%s/comment' % self.shf.share_key), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, "body=%s&_xsrf=%s" % (url_escape(body), self.xsrf)) - # self.http_client.fetch(request, self.stop) - # response = self.wait() + # response = self.fetch('/p/%s/comment' % self.shf.share_key, method='POST', headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, body="body=%s&_xsrf=%s" % (url_escape(body), self.xsrf)) # test_user = User.get('id = 1') # self.assertEqual(test_user.restricted, 0) diff --git a/test/ExternalAccountTests.py b/test/ExternalAccountTests.py index 8cc9e102..251e99ee 100644 --- a/test/ExternalAccountTests.py +++ b/test/ExternalAccountTests.py @@ -1,132 +1,125 @@ -from functools import wraps +### RIP Twitter -from tornado.testing import AsyncHTTPTestCase -from torndb import Connection -from tornado.httpclient import HTTPRequest -from tornado.options import options -import tornado.ioloop -import Cookie -import base64 -import time -import os -import hashlib +# from functools import wraps -from base import BaseAsyncTestCase -from models import User, Sharedfile, Sourcefile, Externalservice +# from tornado.testing import AsyncHTTPTestCase +# from torndb import Connection +# from tornado.options import options +# import tornado.ioloop +# import http.cookies +# import base64 +# import time +# import os +# import hashlib +# from .base import BaseAsyncTestCase +# from models import User, Sharedfile, Sourcefile, Externalservice -def twittertest(fn): - # This would be a "skip" if unittest v1 supported skipping. - @wraps(fn) - def test(self): - if options.twitter_consumer_key: - return fn(self) - return test +# def twittertest(fn): +# # This would be a "skip" if unittest v1 supported skipping. +# @wraps(fn) +# def test(self): +# if options.twitter_consumer_key: +# return fn(self) +# return test -class TwitterTests(BaseAsyncTestCase): - def setUp(self): - super(TwitterTests, self).setUp() - self.user = User(name="admin", email="admin@mltshp.com", email_confirmed=1, is_paid=1) - self.user.set_password('asdfasdf') - self.user.save() - self.sid = self.sign_in("admin", "asdfasdf") + +# class TwitterTests(BaseAsyncTestCase): +# def setUp(self): +# super(TwitterTests, self).setUp() +# self.user = User(name="admin", email="admin@mltshp.com", email_confirmed=1, is_paid=1) +# self.user.set_password('asdfasdf') +# self.user.save() +# self.sid = self.sign_in("admin", "asdfasdf") - self.externalservice = Externalservice(user_id=self.user.id, service_id=555, screen_name='mltshp', type=Externalservice.TWITTER, service_key="blah", service_secret="mah") - self.externalservice.save() +# self.externalservice = Externalservice(user_id=self.user.id, service_id=555, screen_name='mltshp', type=Externalservice.TWITTER, service_key="blah", service_secret="mah") +# self.externalservice.save() - def get_new_ioloop(self): - return tornado.ioloop.IOLoop.instance() +# def get_new_ioloop(self): +# return tornado.ioloop.IOLoop.instance() - @twittertest - def test_twitter_connect(self): - request = HTTPRequest(self.get_url("/tools/twitter/connect"), 'GET', {'Cookie':"sid=%s" % (self.sid)}, follow_redirects=False) - self.http_client.fetch(request, self.stop) - response = self.wait() - self.assertTrue(response.headers['location'].startswith("https://api.twitter.com/oauth/authorize?oauth")) +# @twittertest +# def test_twitter_connect(self): +# response = self.fetch("/tools/twitter/connect", method='GET', headers={'Cookie':"sid=%s" % (self.sid)}, follow_redirects=False) +# self.assertTrue(response.headers['location'].startswith("https://api.twitter.com/oauth/authorize?oauth")) - def test_post_from_twitter(self): - #provider = "https://api.twitter.com/1.1/account/verify_credentials.json" - provider = self.get_url('/heartbeat') +# def test_post_from_twitter(self): +# #provider = "https://api.twitter.com/1.1/account/verify_credentials.json" +# provider = self.get_url('/heartbeat') - """ - Copies a file to the file-system, then POSTs the location and details to the upload method - for processing - """ - file_path = os.path.abspath("test/files/1.png") - sha1 = Sourcefile.get_sha1_file_key(file_path) - content_type = "image/png" +# """ +# Copies a file to the file-system, then POSTs the location and details to the upload method +# for processing +# """ +# file_path = os.path.abspath("test/files/1.png") +# sha1 = Sourcefile.get_sha1_file_key(file_path) +# content_type = "image/png" - file_name = os.path.basename(file_path) - file_size = os.path.getsize(file_path) - body = "media_name=%s&media_content_type=%s&media_sha1=%s&media_size=%s&media_path=%s&skip_s3=1" % (file_name, content_type, sha1, file_size, file_path) +# file_name = os.path.basename(file_path) +# file_size = os.path.getsize(file_path) +# body = "media_name=%s&media_content_type=%s&media_sha1=%s&media_size=%s&media_path=%s&skip_s3=1" % (file_name, content_type, sha1, file_size, file_path) - request = HTTPRequest( - url=self.get_url('/upload'), - method='POST', - headers={'X-Auth-Service-Provider':provider, 'X-Verify-Credentials-Authorization': 'OAuth oauth_timestamp="1290404453", oauth_version="1.0", oauth_consumer_key="IQKbtAYlXLripLGPWd0HUA", oauth_token="37458155-JCG7c8oejM6N4TK4HJbXVC5VGq1gtaSUPt90wxFI", oauth_signature="9QxkJqBAJfZ83sbz6SCJKSaPn9U%3D", oauth_nonce="C7AB0CBC-9193-44EE-AFC1-6FE3BA51F048", oauth_signature_method="HMAC-SHA1"'}, - body=body, - ) - self.http_client.fetch(request, self.stop) - response = self.wait() - self.assertEqual(response.code, 200) - self.assertEqual(response.body, "https://s.mltshp.com/r/1.png") - sf = Sharedfile.get("id = %s", 1) - self.assertEqual(sf.id, 1) - self.assertEqual(sf.name, '1.png') - self.assertEqual(sf.user_id, self.user.id) +# response = self.fetch( +# '/upload', +# method='POST', +# headers={'X-Auth-Service-Provider':provider, 'X-Verify-Credentials-Authorization': 'OAuth oauth_timestamp="1290404453", oauth_version="1.0", oauth_consumer_key="IQKbtAYlXLripLGPWd0HUA", oauth_token="37458155-JCG7c8oejM6N4TK4HJbXVC5VGq1gtaSUPt90wxFI", oauth_signature="9QxkJqBAJfZ83sbz6SCJKSaPn9U%3D", oauth_nonce="C7AB0CBC-9193-44EE-AFC1-6FE3BA51F048", oauth_signature_method="HMAC-SHA1"'}, +# body=body, +# ) +# self.assertEqual(response.code, 200) +# self.assertEqual(response.body, "https://s.mltshp.com/r/1.png") +# sf = Sharedfile.get("id = %s", 1) +# self.assertEqual(sf.id, 1) +# self.assertEqual(sf.name, '1.png') +# self.assertEqual(sf.user_id, self.user.id) - def test_posting_fails_when_provider_is_not_localhost(self): - provider = "https://example.com" - """ - Copies a file to the file-system, then POSTs the location and details to the upload method - for processing - """ - file_path = os.path.abspath("test/files/1.png") - sha1 = Sourcefile.get_sha1_file_key(file_path) - content_type = "image/png" +# def test_posting_fails_when_provider_is_not_localhost(self): +# provider = "http://notes.torrez.org" +# """ +# Copies a file to the file-system, then POSTs the location and details to the upload method +# for processing +# """ +# file_path = os.path.abspath("test/files/1.png") +# sha1 = Sourcefile.get_sha1_file_key(file_path) +# content_type = "image/png" - file_name = os.path.basename(file_path) - file_size = os.path.getsize(file_path) - body = "media_name=%s&media_content_type=%s&media_sha1=%s&media_size=%s&media_path=%s&skip_s3=1" % (file_name, content_type, sha1, file_size, file_path) +# file_name = os.path.basename(file_path) +# file_size = os.path.getsize(file_path) +# body = "media_name=%s&media_content_type=%s&media_sha1=%s&media_size=%s&media_path=%s&skip_s3=1" % (file_name, content_type, sha1, file_size, file_path) - request = HTTPRequest( - url=self.get_url('/upload'), - method='POST', - headers={'X-Auth-Service-Provider':provider, 'X-Verify-Credentials-Authorization': 'OAuth oauth_timestamp="1290404453", oauth_version="1.0", oauth_consumer_key="IQKbtAYlXLripLGPWd0HUA", oauth_token="37458155-JCG7c8oejM6N4TK4HJbXVC5VGq1gtaSUPt90wxFI", oauth_signature="9QxkJqBAJfZ83sbz6SCJKSaPn9U%3D", oauth_nonce="C7AB0CBC-9193-44EE-AFC1-6FE3BA51F048", oauth_signature_method="HMAC-SHA1"'}, - body=body, - ) - self.http_client.fetch(request, self.stop) - response = self.wait() - self.assertEqual(response.code, 403) +# response = self.fetch( +# '/upload', +# method='POST', +# headers={'X-Auth-Service-Provider':provider, 'X-Verify-Credentials-Authorization': 'OAuth oauth_timestamp="1290404453", oauth_version="1.0", oauth_consumer_key="IQKbtAYlXLripLGPWd0HUA", oauth_token="37458155-JCG7c8oejM6N4TK4HJbXVC5VGq1gtaSUPt90wxFI", oauth_signature="9QxkJqBAJfZ83sbz6SCJKSaPn9U%3D", oauth_nonce="C7AB0CBC-9193-44EE-AFC1-6FE3BA51F048", oauth_signature_method="HMAC-SHA1"'}, +# body=body, +# ) +# self.assertEqual(response.code, 403) - def test_post_from_twitter_with_message(self): - provider = self.get_url('/heartbeat') +# def test_post_from_twitter_with_message(self): +# provider = self.get_url('/heartbeat') - """ - Copies a file to the file-system, then POSTs the location and details to the upload method - for processing - """ - file_path = os.path.abspath("test/files/1.png") - sha1 = Sourcefile.get_sha1_file_key(file_path) - content_type = "image/png" +# """ +# Copies a file to the file-system, then POSTs the location and details to the upload method +# for processing +# """ +# file_path = os.path.abspath("test/files/1.png") +# sha1 = Sourcefile.get_sha1_file_key(file_path) +# content_type = "image/png" - file_name = os.path.basename(file_path) - file_size = os.path.getsize(file_path) - message = "hey look\r\n at me!\r\n" - body = "message=%s&media_name=%s&media_content_type=%s&media_sha1=%s&media_size=%s&media_path=%s&skip_s3=1" % (message, file_name, content_type, sha1, file_size, file_path) +# file_name = os.path.basename(file_path) +# file_size = os.path.getsize(file_path) +# message = "hey look\r\n at me!\r\n" +# body = "message=%s&media_name=%s&media_content_type=%s&media_sha1=%s&media_size=%s&media_path=%s&skip_s3=1" % (message, file_name, content_type, sha1, file_size, file_path) - request = HTTPRequest( - url=self.get_url('/upload'), - method='POST', - headers={'X-Auth-Service-Provider':provider, 'X-Verify-Credentials-Authorization': 'OAuth oauth_timestamp="1290404453", oauth_version="1.0", oauth_consumer_key="IQKbtAYlXLripLGPWd0HUA", oauth_token="37458155-JCG7c8oejM6N4TK4HJbXVC5VGq1gtaSUPt90wxFI", oauth_signature="9QxkJqBAJfZ83sbz6SCJKSaPn9U%3D", oauth_nonce="C7AB0CBC-9193-44EE-AFC1-6FE3BA51F048", oauth_signature_method="HMAC-SHA1"'}, - body=body, - ) - self.http_client.fetch(request, self.stop) - response = self.wait() - self.assertEqual(response.code, 200) - self.assertEqual(response.body, "https://s.mltshp.com/r/1.png") - sf = Sharedfile.get("id = %s", 1) - self.assertEqual(sf.id, 1) - self.assertEqual(sf.get_title(), message.replace('\n', '').replace('\r', '')) - self.assertEqual(sf.user_id, self.user.id) +# response = self.fetch( +# '/upload', +# method='POST', +# headers={'X-Auth-Service-Provider':provider, 'X-Verify-Credentials-Authorization': 'OAuth oauth_timestamp="1290404453", oauth_version="1.0", oauth_consumer_key="IQKbtAYlXLripLGPWd0HUA", oauth_token="37458155-JCG7c8oejM6N4TK4HJbXVC5VGq1gtaSUPt90wxFI", oauth_signature="9QxkJqBAJfZ83sbz6SCJKSaPn9U%3D", oauth_nonce="C7AB0CBC-9193-44EE-AFC1-6FE3BA51F048", oauth_signature_method="HMAC-SHA1"'}, +# body=body, +# ) +# self.assertEqual(response.code, 200) +# self.assertEqual(response.body, "https://s.mltshp.com/r/1.png") +# sf = Sharedfile.get("id = %s", 1) +# self.assertEqual(sf.id, 1) +# self.assertEqual(sf.get_title(), message.replace('\n', '').replace('\r', '')) +# self.assertEqual(sf.user_id, self.user.id) diff --git a/test/FileTests.py b/test/FileTests.py index abbd43a7..6b452242 100644 --- a/test/FileTests.py +++ b/test/FileTests.py @@ -1,22 +1,16 @@ -from tornado.testing import AsyncHTTPTestCase -from torndb import Connection -from tornado.httpclient import HTTPRequest -from tornado.httpclient import AsyncHTTPClient -from tornado.escape import url_escape, url_unescape, json_decode, json_encode +from tornado.escape import url_escape, json_decode from tornado.options import options import tornado.ioloop -import handlers -import base64 import time import json import os -from urlparse import urlparse +from urllib.parse import urlparse from contextlib import contextmanager -from base import BaseAsyncTestCase +from .base import BaseAsyncTestCase -from models import Sharedfile, Sourcefile, Favorite, User, Shake, Shakesharedfile, Post, Notification +from models import Sharedfile, Sourcefile, User, Shakesharedfile, Post from lib.utilities import base36encode @@ -37,7 +31,7 @@ def setUp(self): self.user.save() self.sid = self.sign_in("admin", "asdfasdf") - self.xsrf = self.get_xsrf() + self.xsrf = self.get_xsrf().decode("ascii") self.test_file1_path = os.path.abspath("test/files/1.png") self.test_file1_sha1 = Sourcefile.get_sha1_file_key(self.test_file1_path) @@ -46,8 +40,7 @@ def setUp(self): self.upload_file(self.test_file1_path, self.test_file1_sha1, self.test_file1_content_type, 1, self.sid, self.xsrf) def test_deleting_file_sets_to_true(self): - self.http_client.fetch(HTTPRequest(self.get_url("/p/1/delete"), 'POST', {'Cookie': "_xsrf=%s;sid=%s" % (self.xsrf, self.sid)}, "_xsrf=%s" % (self.xsrf)), self.stop) - response = self.wait() + response = self.post_url("/p/1/delete") sf = Sharedfile.get("id=1") self.assertEqual(sf.deleted, 1) @@ -58,14 +51,12 @@ def test_delete_button_only_shows_for_owner(self): bill.save() self.sign_in("bill", "asdfasdf") - self.http_client.fetch(HTTPRequest(self.get_url("/p/1"), 'GET', {'Cookie':'sid=%s' % (self.sid)}), self.stop) - response = self.wait() - self.assertEqual(response.body.find('/p/1/delete'), -1) + response = self.fetch("/p/1", method='GET', headers={'Cookie':'sid=%s' % self.sid}) + self.assertEqual(response.body.find('/p/1/delete'.encode("ascii")), -1) self.sign_in("admin", "asdfasdf") - self.http_client.fetch(HTTPRequest(self.get_url("/p/1"), 'GET', {'Cookie':'sid=%s' % (self.sid)}), self.stop) - response = self.wait() - self.assertTrue(response.body.find('/p/1/delete') > 0) + response = self.fetch("/p/1", method='GET', headers={'Cookie':'sid=%s' % self.sid}) + self.assertIn('/p/1/delete', response.body) def test_delete_button_only_works_for_owner(self): bill = User(name='bill', email='bill@mltshp.com', email_confirmed=1, is_paid=1) @@ -73,8 +64,7 @@ def test_delete_button_only_works_for_owner(self): bill.save() sid = self.sign_in("bill", "asdfasdf") - self.http_client.fetch(HTTPRequest(self.get_url("/p/1/delete"), 'POST', {'Cookie': "_xsrf=%s;sid=%s" % (self.xsrf, sid)}, "_xsrf=%s" % (self.xsrf)), self.stop) - response = self.wait() + response = self.post_url("/p/1/delete") sf = Sharedfile.get("id=1") self.assertEqual(sf.deleted, 0) @@ -94,7 +84,7 @@ def setUp(self): self.sid2 = self.sign_in('user', 'asdfasdf') self.sid = self.sign_in('admin', 'asdfasdf') - self.xsrf = self.get_xsrf() + self.xsrf = self.get_xsrf().decode("ascii") self.test_file1_path = os.path.abspath("test/files/1.png") self.test_file1_sha1 = Sourcefile.get_sha1_file_key(self.test_file1_path) @@ -106,18 +96,14 @@ def setUp(self): def test_raw_image_view_counts(self): response = self.upload_file(self.test_file1_path, self.test_file1_sha1, self.test_file1_content_type, 1, self.sid, self.xsrf) - request = HTTPRequest(self.get_url('/user/admin'), 'GET', {"Cookie":"sid=%s" % (self.sid2)}) - self.http_client.fetch(request, self.stop) - response = self.wait() - self.assertTrue(response.body.find("1.png") > 0) + response = self.fetch('/user/admin', method='GET', headers={"Cookie":"sid=%s" % self.sid2}) + self.assertIn("1.png", response.body) for i in range(0,10): if i % 2 == 0: - request = HTTPRequest(self.get_url('/r/1'), 'GET', {"Cookie":"sid=%s" % (self.sid2)}) + response = self.fetch('/r/1', method='GET', headers={"Cookie":"sid=%s" % self.sid2}) else: - request = HTTPRequest(self.get_url('/r/1'), 'GET') - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.fetch('/r/1', method='GET') imageviews = self.db.query("SELECT id, user_id, sharedfile_id, created_at from fileview") self.assertEqual(len(imageviews), 10) @@ -127,18 +113,14 @@ def test_raw_image_view_counts(self): def test_raw_load_with_extension(self): response = self.upload_file(self.test_file1_path, self.test_file1_sha1, self.test_file1_content_type, 1, self.sid, self.xsrf) - request = HTTPRequest(self.get_url('/user/admin'), 'GET', {"Cookie":"sid=%s" % (self.sid2)}) - self.http_client.fetch(request, self.stop) - response = self.wait() - self.assertTrue(response.body.find("1.png") > 0) + response = self.fetch('/user/admin', method='GET', headers={"Cookie":"sid=%s" % self.sid2}) + self.assertIn("1.png", response.body) for i in range(0,10): if i % 2 == 0: - request = HTTPRequest(self.get_url('/r/1.jpg'), 'GET', {"Cookie":"sid=%s" % (self.sid2)}) + response = self.fetch('/r/1.jpg', method='GET', headers={"Cookie":"sid=%s" % self.sid2}) else: - request = HTTPRequest(self.get_url('/r/1.jpg'), 'GET') - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.fetch('/r/1.jpg', method='GET') imageviews = self.db.query("SELECT id, user_id, sharedfile_id, created_at from fileview") self.assertEqual(len(imageviews), 10) @@ -153,8 +135,7 @@ def test_delete_image_raw_404s(self): self.assertEqual("1.png", sf.name) sf.delete() - self.http_client.fetch(self.get_url('/r/%s' % (sf.share_key)), self.stop) - response = self.wait() + response = self.fetch('/r/%s' % sf.share_key) self.assertEqual(response.error.code, 404) def test_raw_head_handler(self): @@ -163,10 +144,9 @@ def test_raw_head_handler(self): sf = Sharedfile.get("id=1") self.assertEqual("1.png", sf.name) - self.http_client.fetch(HTTPRequest(self.get_url('/r/%s' % (sf.share_key)), 'HEAD'), self.stop) - response = self.wait() + response = self.fetch('/r/%s' % sf.share_key, method='HEAD') self.assertEqual(response.headers['Content-Type'], 'image/png') - self.assertEqual(response.body, '') + self.assertEqual(response.body, b'') def test_delete_image_permalink_404s(self): response = self.upload_file(self.test_file1_path, self.test_file1_sha1, self.test_file1_content_type, 1, self.sid, self.xsrf) @@ -174,8 +154,7 @@ def test_delete_image_permalink_404s(self): self.assertEqual("1.png", sf.name) sf.delete() - self.http_client.fetch(self.get_url('/p/%s' % (sf.share_key)), self.stop, follow_redirects=False) - response = self.wait() + response = self.fetch('/p/%s' % sf.share_key, follow_redirects=False) self.assertEqual(response.error.code, 404) @@ -186,8 +165,7 @@ def setUp(self): self.user.set_password('asdfasdf') self.user.save() self.sid = self.sign_in('admin', 'asdfasdf') - self.xsrf = self.get_xsrf() - + self.xsrf = self.get_xsrf().decode("ascii") self.test_file1_path = os.path.abspath("test/files/1.png") self.test_file1_sha1 = Sourcefile.get_sha1_file_key(self.test_file1_path) @@ -200,8 +178,7 @@ def setUp(self): def test_oembed_response_json(self): with test_option("cdn_host", "cdn-service.com"): response = self.upload_file(self.test_file1_path, self.test_file1_sha1, self.test_file1_content_type, 1, self.sid, self.xsrf) - self.http_client.fetch(self.get_url("/services/oembed?url=http%3A//mltshp.com/p/1"), self.stop) - response = self.wait() + response = self.fetch("/services/oembed?url=http%3A//mltshp.com/p/1") j = json.loads(response.body) self.assertEqual(j['width'], 1) self.assertEqual(j['height'], 1) @@ -214,12 +191,11 @@ def test_oembed_response_json(self): sharedfile = Sharedfile.get('id = %s', 1) file_time_stamp = int(time.mktime(sharedfile.created_at.timetuple())) callback = "jsonp" + str(file_time_stamp) - self.http_client.fetch(self.get_url("/services/oembed?url=http%3A//mltshp.com/p/1&jsoncallback=" + callback), self.stop) - response = self.wait() + response = self.fetch("/services/oembed?url=http%3A//mltshp.com/p/1&jsoncallback=" + callback) j = json.loads(response.body.strip()[len(callback)+1:-1]) self.assertEqual(j['callback'], callback) - self.assertTrue(response.body.startswith(callback)) + self.assertTrue(response.body.startswith(callback.encode("ascii"))) def test_oembed_response_json_for_link(self): url = 'https://vimeo.com/20379529' @@ -232,8 +208,7 @@ def test_oembed_response_json_for_link(self): sharedfile.save() sharedfile = Sharedfile.get('id = %s', 1) file_time_stamp = int(time.mktime(sharedfile.created_at.timetuple())) - self.http_client.fetch(self.get_url("/services/oembed?url=http%3A//mltshp.com/p/1"), self.stop) - response = self.wait() + response = self.fetch("/services/oembed?url=http%3A//mltshp.com/p/1") j_response = json_decode(response.body) self.assertEqual(j_response['type'], "link") self.assertEqual(j_response['url'], url) @@ -241,8 +216,7 @@ def test_oembed_response_json_for_link(self): def test_oembed_malformed_requests(self): malformed_requests = ['http%3A//mltshp.com/p', 'http%3A//mltshp.com/p/', 'http%3A//mltshp.com/', 'http%3A//mltshp.com/r/1', 'NaN', 'http%3A//cnn.com/p/1'] for request in malformed_requests: - self.http_client.fetch(self.get_url("/services/oembed?url=%s" % request), self.stop) - response = self.wait() + response = self.fetch("/services/oembed?url=%s" % request) self.assertEqual(response.code, 404) def test_title_pulls_from_name_if_blank_or_null(self): @@ -252,33 +226,25 @@ def test_title_pulls_from_name_if_blank_or_null(self): def test_quick_edit_title(self): self.upload_file(self.test_file1_path, self.test_file1_sha1, self.test_file1_content_type, 1, self.sid, self.xsrf) - request = HTTPRequest(self.get_url('/p/1/quick-edit-title'), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, "_xsrf=%s&title=%s" % (self.xsrf, url_escape("Monkey Business"))) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.post_url('/p/1/quick-edit-title', arguments={"title": "Monkey Business"}) j = json_decode(response.body) self.assertEqual(j['title'], 'Monkey Business') def test_quick_edit_description(self): self.upload_file(self.test_file1_path, self.test_file1_sha1, self.test_file1_content_type, 1, self.sid, self.xsrf) - request = HTTPRequest(self.get_url('/p/1/quick-edit-description'), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, "_xsrf=%s&description=%s" % (self.xsrf, url_escape('Bilbo\nbaggins'))) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.post_url('/p/1/quick-edit-description', arguments={"description": "Bilbo\nbaggins"}) j = json_decode(response.body) self.assertEqual(j['description_raw'], 'Bilbo\nbaggins') def test_quick_edit_alt_text(self): self.upload_file(self.test_file1_path, self.test_file1_sha1, self.test_file1_content_type, 1, self.sid, self.xsrf) - request = HTTPRequest(self.get_url('/p/1/quick-edit-alt-text'), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, "_xsrf=%s&alt_text=%s" % (self.xsrf, url_escape('A small person carrying a ring'))) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.post_url('/p/1/quick-edit-alt-text', arguments={"alt_text": 'A small person carrying a ring'}) j = json_decode(response.body) self.assertEqual(j['alt_text_raw'], 'A small person carrying a ring') def test_quick_edit_source_url(self): self.upload_file(self.test_file1_path, self.test_file1_sha1, self.test_file1_content_type, 1, self.sid, self.xsrf) - request = HTTPRequest(self.get_url('/p/1/quick-edit-source-url'), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, "_xsrf=%s&source_url=%s" % (self.xsrf, url_escape('http://www.example.com/'))) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.post_url('/p/1/quick-edit-source-url', arguments={"source_url": 'http://www.example.com/'}) j = json_decode(response.body) self.assertEqual(j['source_url'], 'http://www.example.com/') @@ -291,31 +257,24 @@ def setUp(self): self.user.save() self.user_shake = self.user.shake() self.sid = self.sign_in('admin', 'asdfasdf') - self.xsrf = self.get_xsrf() - - def get_new_ioloop(self): - return tornado.ioloop.IOLoop.instance() + self.xsrf = self.get_xsrf().decode("ascii") def test_save_video_allows_link_to_vimeo_youtube(self): video_sites = {'vimeo':'https://vimeo.com/20379529', 'youtube':'https://www.youtube.com/watch?v=EmcMG4uxiHk', 'flickr':'https://www.flickr.com/photos/dahliablack/5497635343/'} - for site in video_sites.keys(): - request = HTTPRequest(self.get_url('/tools/save-video?url=%s' % (url_escape(video_sites[site]))), 'GET', {"Cookie":"sid=%s" % (self.sid)}) - self.http_client.fetch(request, self.stop) - response = self.wait() + for site in list(video_sites.keys()): + response = self.fetch('/tools/save-video?url=%s' % (url_escape(video_sites[site])), method='GET', headers={"Cookie":"sid=%s" % self.sid}) if site == 'vimeo': - self.assertTrue(response.body.find('value="https://vimeo.com/20379529">') > -1) + self.assertIn('value="https://vimeo.com/20379529">', response.body) elif site == 'youtube': - self.assertTrue(response.body.find('value="https://www.youtube.com/watch?v=EmcMG4uxiHk">') > -1) + self.assertIn('value="https://www.youtube.com/watch?v=EmcMG4uxiHk">', response.body) elif site == 'flickr': - self.assertTrue(response.body.find('value="https://www.flickr.com/photos/dahliablack/5497635343/">') > -1) + self.assertIn('value="https://www.flickr.com/photos/dahliablack/5497635343/">', response.body) def test_save_video_correctly_processes_various_youtube_urls(self): urls = ['https://www.youtube.com/watch?v=EmcMG4uxiHk&recommended=0', 'https://youtu.be/EmcMG4uxiHk', 'https://www.youtube.com/watch?v=EmcMG4uxiHk&feature=rec-LGOUT-real_rev-rn-1r-11-HM'] for url in urls: - request = HTTPRequest(self.get_url('/tools/save-video?url=%s' % (url_escape(url))), 'GET', {"Cookie":"sid=%s" % (self.sid)}) - self.http_client.fetch(request, self.stop) - response = self.wait() - self.assertTrue(response.body.find('value="https://www.youtube.com/watch?v=EmcMG4uxiHk">') > -1) + response = self.fetch('/tools/save-video?url=%s' % url_escape(url), method='GET', headers={"Cookie":"sid=%s" % self.sid}) + self.assertIn('value="https://www.youtube.com/watch?v=EmcMG4uxiHk">', response.body) def test_adding_video_makes_it_show_up_in_friends_shake(self): user2 = User(name='user2', email='user2@mltshp.com', email_confirmed=1, is_paid=1) @@ -324,9 +283,7 @@ def test_adding_video_makes_it_show_up_in_friends_shake(self): user2.subscribe(self.user.shake()) url = 'https://vimeo.com/20379529' - request = HTTPRequest(self.get_url('/tools/save-video'), 'POST', {"Cookie":"sid=%s;_xsrf=%s" % (self.sid, self.xsrf)}, "url=%s&_xsrf=%s&skip_s3=1" % (url_escape(url), self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.post_url('/tools/save-video', arguments={"url": url, "skip_s3": "1"}) sfs = Sharedfile.from_subscriptions(user2.id) self.assertTrue(len(sfs) > 0) self.assertEqual(sfs[0].name , url) @@ -340,32 +297,24 @@ def setUp(self): self.user.save() self.user_shake = self.user.shake() self.sid = self.sign_in('admin', 'asdfasdf') - self.xsrf = self.get_xsrf() + self.xsrf = self.get_xsrf().decode("ascii") self.url = 'https://example.com/images/television.png?x=1' - self.source_url = url_escape('https://example.com/') + self.source_url = 'https://example.com/' self.description = "This is a multi-\nline\ndescription" self.alt_text = "This is some alt text\nit spans two lines." - def get_new_ioloop(self): - return tornado.ioloop.IOLoop.instance() - def test_picker_not_authed_displays_sign_in(self): - self.http_client.fetch(self.get_url('/tools/p?url=%s' % (self.url)), self.stop) - response = self.wait() - self.assertTrue(response.body.find('action="/sign-in/"') > 0) + response = self.fetch('/tools/p?url=%s' % self.url) + self.assertIn('action="/sign-in/"', response.body) def test_picker_get_displays_image_passed_in(self): - request = HTTPRequest(self.get_url('/tools/p?url=%s' % (self.url)), 'GET', {"Cookie":"sid=%s" % (self.sid)}) - self.http_client.fetch(request, self.stop) - response = self.wait() - self.assertTrue(response.body.find('hidden" name="url" value="%s"' % (self.url)) > 0) + response = self.fetch('/tools/p?url=%s' % self.url, method='GET', headers={"Cookie":"sid=%s" % self.sid}) + self.assertIn('hidden" name="url" value="%s"' % self.url, response.body) def test_picker_authenticated_stores_image(self): - request = HTTPRequest(self.get_url('/tools/p'), 'POST', {"Cookie":"_xsrf=%s;sid=%s" % (self.xsrf,self.sid)}, "_xsrf=%s&url=%s&title=boatmoatgoat&skip_s3=1" % (self.xsrf, self.url)) - self.http_client.fetch(request, self.stop) - response = self.wait() - self.assertTrue(response.body.find("ERROR") == -1) + response = self.post_url('/tools/p', arguments={"url": self.url, "title": "boatmoatgoat", "skip_s3": "1"}, raise_error=True) + self.assertNotIn("ERROR", response.body) sf = Sharedfile.get("id=1") self.assertEqual(sf.name, "television.png") self.assertEqual(sf.title, "boatmoatgoat") @@ -384,61 +333,45 @@ def test_picker_errors(self): host = "http://localhost:%s/tools/p?url=" % (self.get_http_port()) bad_urls = ["http://", "hps://sdlfkj.com/asdlkfj", "something.com/file.jpg"] for url in bad_urls: - request = HTTPRequest(self.get_url('/tools/p?url=%s' % (url)), 'GET', {"Cookie":"sid=%s" % (self.sid)}) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.fetch('/tools/p?url=%s' % url, method='GET', headers={"Cookie":"sid=%s" % self.sid}) self.assertTrue(response.error) def test_picker_stores_image_and_shakesharedfile(self): - request = HTTPRequest(self.get_url('/tools/p'), 'POST', {"Cookie":"_xsrf=%s;sid=%s" % (self.xsrf,self.sid)}, "_xsrf=%s&url=%s&title=boatmoatgoat&skip_s3=1" % (self.xsrf, self.url)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.post_url('/tools/p', arguments={"url": self.url, "title": "boatmoatgoat", "skip_s3": "1"}) ssf = Shakesharedfile.get("sharedfile_id=1 and shake_id=%s", self.user_shake.id) self.assertTrue(ssf) def test_picker_stores_source_url(self): - request = HTTPRequest(self.get_url('/tools/p'), 'POST', {"Cookie":"_xsrf=%s;sid=%s" % (self.xsrf,self.sid)}, "_xsrf=%s&url=%s&title=boatmoatgoat&source_url=%s&skip_s3=1" % (self.xsrf, self.url, self.source_url)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.post_url('/tools/p', arguments={"url": self.url, "title": "boatmoatgoat", "source_url": self.source_url, "skip_s3": "1"}) sf = Sharedfile.get("id=1") self.assertEqual(sf.source_url, 'https://example.com/') def test_picker_stores_description(self): - request = HTTPRequest(self.get_url('/tools/p'), 'POST', {"Cookie":"_xsrf=%s;sid=%s" % (self.xsrf,self.sid)}, "_xsrf=%s&url=%s&title=boatmoatgoat&description=%s&skip_s3=1" % (self.xsrf, url_escape(self.url), url_escape(self.description))) - self.http_client.fetch(request, self.stop) - response = self.wait() - self.assertTrue(response.body.find("ERROR") == -1) + response = self.post_url('/tools/p', arguments={"url": self.url, "title": "boatmoatgoat", "description": self.description, "skip_s3": "1"}) + self.assertNotIn("ERROR", response.body) sf = Sharedfile.get("id=1") self.assertEqual(sf.description, self.description) def test_picker_stores_alt_text(self): - request = HTTPRequest(self.get_url('/tools/p'), 'POST', {"Cookie":"_xsrf=%s;sid=%s" % (self.xsrf,self.sid)}, "_xsrf=%s&url=%s&title=boatmoatgoat&description=%s&alt_text=%s&skip_s3=1" % (self.xsrf, url_escape(self.url), url_escape(self.description), url_escape(self.alt_text))) - self.http_client.fetch(request, self.stop) - response = self.wait() - self.assertTrue(response.body.find("ERROR") == -1) + response = self.post_url('/tools/p', arguments={"url": self.url, "title": "boatmoatgoat", "description": self.description, "alt_text": self.alt_text, "skip_s3": "1"}) + self.assertNotIn("ERROR", response.body) sf = Sharedfile.get("id=1") self.assertEqual(sf.alt_text, self.alt_text) def test_picker_doesnt_see_filepile(self): - request = HTTPRequest(self.get_url('/tools/p?url=%s' % (url_escape("http://www.filepile.org/something/something"))), 'GET', {"Cookie":"sid=%s" % (self.sid)}) - self.http_client.fetch(request, self.stop) - response = self.wait() - self.assertEqual(response.body.find('source: http://www.filepile.org'), -1) + response = self.fetch('/tools/p?url=%s' % url_escape("http://www.filepile.org/something/something"), method='GET', headers={"Cookie":"sid=%s" % self.sid}) + self.assertEqual(response.body.find('source: http://www.filepile.org'.encode("ascii")), -1) def test_picker_strips_google_reader_url(self): - request = HTTPRequest(self.get_url('/tools/p?url=%s&source_url=%s' % (self.url, url_escape("http://laughingsquid.com/skeleton-light-painting/?utm_source=feedburner&utm_medium=feed&utm_campaign=Feed%3A laughingsquid %28Laughing Squid%29"))), 'GET', {"Cookie":"sid=%s" % (self.sid)}) - self.http_client.fetch(request, self.stop) - response = self.wait() - self.assertTrue(response.body.find("source: http://laughingsquid.com/skeleton-light-painting/") > 0) + response = self.fetch('/tools/p?url=%s&source_url=%s' % (self.url, url_escape("http://laughingsquid.com/skeleton-light-painting/?utm_source=feedburner&utm_medium=feed&utm_campaign=Feed%3A laughingsquid %28Laughing Squid%29")), method='GET', headers={"Cookie":"sid=%s" % self.sid}) + self.assertIn("source: http://laughingsquid.com/skeleton-light-painting/", response.body) def test_picker_strips_google_img_url(self): """ https://www.google.com/imgres?imgurl=http://cragganmorefarm.com/user/gimage/Baby-Ground-hogs_480_320.jpg&imgrefurl=http://cragganmorefarm.com/&usg=__kpRJbm_WBlbEnqDvfi3A2JuJ9Wg=&h=320&w=480&sz=33&hl=en&start=24&sig2=SyR_NSDovcsOYu5tJYtlig&zoom=1&tbnid=TT5jIOrb76kqbM:&tbnh=130&tbnw=173&ei=f5lJTdjbHoL6lweT2cU3&prev=/images%3Fq%3Dbaby%2Bgroundhogs%26um%3D1%26hl%3Den%26client%3Dfirefox-a%26sa%3DX%26rls%3Dorg.mozilla:en-US:official%26biw%3D1152%26bih%3D709%26tbs%3Disch:10%2C540&um=1&itbs=1&iact=rc&dur=326&oei=YZlJTYuYJsH78AaQh6msDg&esq=2&page=2&ndsp=24&ved=1t:429,r:8,s:24&tx=103&ty=85&biw=1152&bih=709 """ - request = HTTPRequest(self.get_url('/tools/p?url=%s&source_url=%s' % (self.url, url_escape("https://www.google.com/imgres?imgurl=http://cragganmorefarm.com/user/gimage/Baby-Ground-hogs_480_320.jpg&imgrefurl=http://cragganmorefarm.com/&usg=__kpRJbm_WBlbEnqDvfi3A2JuJ9Wg=&h=320&w=480&sz=33&hl=en&start=24&sig2=SyR_NSDovcsOYu5tJYtlig&zoom=1&tbnid=TT5jIOrb76kqbM:&tbnh=130&tbnw=173&ei=f5lJTdjbHoL6lweT2cU3&prev=/images%3Fq%3Dbaby%2Bgroundhogs%26um%3D1%26hl%3Den%26client%3Dfirefox-a%26sa%3DX%26rls%3Dorg.mozilla:en-US:official%26biw%3D1152%26bih%3D709%26tbs%3Disch:10%2C540&um=1&itbs=1&iact=rc&dur=326&oei=YZlJTYuYJsH78AaQh6msDg&esq=2&page=2&ndsp=24&ved=1t:429,r:8,s:24&tx=103&ty=85&biw=1152&bih=709"))), 'GET', {"Cookie":"sid=%s" % (self.sid)}) - self.http_client.fetch(request, self.stop) - response = self.wait() - self.assertTrue(response.body.find("source: http://cragganmorefarm.com/") > 0) + response = self.fetch('/tools/p?url=%s&source_url=%s' % (self.url, url_escape("https://www.google.com/imgres?imgurl=http://cragganmorefarm.com/user/gimage/Baby-Ground-hogs_480_320.jpg&imgrefurl=http://cragganmorefarm.com/&usg=__kpRJbm_WBlbEnqDvfi3A2JuJ9Wg=&h=320&w=480&sz=33&hl=en&start=24&sig2=SyR_NSDovcsOYu5tJYtlig&zoom=1&tbnid=TT5jIOrb76kqbM:&tbnh=130&tbnw=173&ei=f5lJTdjbHoL6lweT2cU3&prev=/images%3Fq%3Dbaby%2Bgroundhogs%26um%3D1%26hl%3Den%26client%3Dfirefox-a%26sa%3DX%26rls%3Dorg.mozilla:en-US:official%26biw%3D1152%26bih%3D709%26tbs%3Disch:10%2C540&um=1&itbs=1&iact=rc&dur=326&oei=YZlJTYuYJsH78AaQh6msDg&esq=2&page=2&ndsp=24&ved=1t:429,r:8,s:24&tx=103&ty=85&biw=1152&bih=709")), method='GET', headers={"Cookie":"sid=%s" % self.sid}) + self.assertIn("source: http://cragganmorefarm.com/", response.body) class FileUploadTests(BaseAsyncTestCase): def setUp(self): @@ -448,7 +381,7 @@ def setUp(self): self.user.save() self.user_shake = self.user.shake() self.sid = self.sign_in('admin', 'asdfasdf') - self.xsrf = self.get_xsrf() + self.xsrf = self.get_xsrf().decode("ascii") self.test_file1_path = os.path.abspath("test/files/1.png") self.test_file1_sha1 = Sourcefile.get_sha1_file_key(self.test_file1_path) @@ -461,6 +394,7 @@ def setUp(self): def test_file_upload_size_check(self): response = self.upload_test_file() sharedfile = Sharedfile.get('id=1') + self.assertIsNotNone(sharedfile) sourcefile = sharedfile.sourcefile() self.assertEqual(sourcefile.width, 640) self.assertEqual(sourcefile.height, 643) @@ -468,6 +402,7 @@ def test_file_upload_size_check(self): def test_file_upload_with_user(self): response = self.upload_test_file() shared_file = Sharedfile.get('id=1') + self.assertIsNotNone(shared_file) self.assertEqual(shared_file.name, "love.gif") self.assertEqual(shared_file.source_id, 1) self.assertEqual(shared_file.user_id, 1) @@ -478,8 +413,7 @@ def test_file_upload_user_missing(self): def test_file_upload_contents(self): response = self.upload_test_file() - self.http_client.fetch(self.get_url('/r/1'), self.stop) - response = self.wait() + response = self.fetch('/r/1') self.assertTrue(response.headers['X-Accel-Redirect'].startswith("/s3/originals/ac7180f6b038d5ae4f2297989e39a900995bb8fc?")) def test_uploading_file_creates_shared_shake_file(self): @@ -490,6 +424,7 @@ def test_uploading_file_creates_shared_shake_file(self): def test_uploading_to_a_shake_saves_shake_id_in_post(self): response = self.upload_test_file() all_posts = Post.all() + self.assertTrue(len(all_posts) > 0) self.assertEqual(1, all_posts[0].shake_id) def test_uploading_when_over_limit(self): @@ -506,14 +441,14 @@ def test_uploading_when_over_limit(self): self.user.stripe_plan_id = "mltshp-single" self.user.save() response = self.upload_test_file() - self.assertEqual(True, response.body.find('Single Scoop Account Limit') > -1) + self.assertEqual(True, response.body.find('Single Scoop Account Limit'.encode("ascii")) > -1) # but if they paid, we're good. self.user.stripe_plan_id = "mltshp-double" self.user.save() response = self.upload_test_file() - self.assertEqual(True, response.body.find('Single Scoop Account Limit') == -1) + self.assertEqual(True, response.body.find('Single Scoop Account Limit'.encode("ascii")) == -1) def test_uploading_file_creates_post_record_for_user(self): response = self.upload_test_file() @@ -533,7 +468,7 @@ def test_uploading_file_with_unsupported_content_type(self): """ response = self.upload_file(self.test_file1_path, self.test_file1_sha1, "image/tiff", 1, self.sid, self.xsrf) self.assertEqual(200, response.code) - self.assertTrue(response.body.find("We don't support that file type.") > 0) + self.assertIn("We don't support that file type.", response.body) posts = Post.all() self.assertEqual(len(posts), 0) diff --git a/test/SimpleTests.py b/test/SimpleTests.py index d9b24e4c..871e5736 100644 --- a/test/SimpleTests.py +++ b/test/SimpleTests.py @@ -2,7 +2,7 @@ import handlers import time from models import Sharedfile, Sourcefile, User -from base import BaseAsyncTestCase +from .base import BaseAsyncTestCase from handlers import base @@ -11,18 +11,15 @@ class TwoHundredTests(BaseAsyncTestCase): def test_sign_in(self): - self.http_client.fetch(self.get_url('/sign-in/'), self.stop) - response = self.wait() + response = self.fetch('/sign-in/') self.assertEqual(response.code, 200) def test_nonexistant(self): - self.http_client.fetch(self.get_url('/asdf/asdf'), self.stop) - response = self.wait() + response = self.fetch('/asdf/asdf') self.assertEqual(response.code, 404) def test_no_access_to_create_users(self): - self.http_client.fetch(self.get_url('/admin/create-users'), self.stop) - response = self.wait() + response = self.fetch('/admin/create-users') self.assertEqual(response.code, 403) def test_non_signed_in_permalink_view(self): @@ -33,11 +30,9 @@ def test_non_signed_in_permalink_view(self): sf = Sharedfile(source_id=src.id, user_id=1, name="some.jpg", title="some", share_key="1", content_type="image/jpg") sf.save() - self.http_client.fetch(self.get_url('/p/1'), self.stop) - response = self.wait() + response = self.fetch('/p/1') self.assertEqual(response.code, 200) def test_twitter_page(self): - self.http_client.fetch(self.get_url('/tools/twitter'), self.stop) - response = self.wait() + response = self.fetch('/tools/twitter') self.assertEqual(response.code, 200) diff --git a/test/SiteFunctionTests.py b/test/SiteFunctionTests.py index 15865ab9..f5555d2b 100644 --- a/test/SiteFunctionTests.py +++ b/test/SiteFunctionTests.py @@ -1,13 +1,7 @@ -from tornado.testing import AsyncHTTPTestCase -from torndb import Connection from tornado.options import options -from tornado.httpclient import HTTPRequest -import handlers -import base64 -import time import os -from base import BaseAsyncTestCase +from .base import BaseAsyncTestCase from models import Sourcefile, User @@ -19,7 +13,7 @@ def setUp(self): self.user.set_password('asdfasdf') self.user.save() self.sid = self.sign_in('admin', 'asdfasdf') - self.xsrf = self.get_xsrf() + self.xsrf = self.get_xsrf().decode("ascii") self.test_file1_path = os.path.abspath("test/files/1.png") self.test_file1_sha1 = Sourcefile.get_sha1_file_key(self.test_file1_path) @@ -32,7 +26,7 @@ def setUp(self): def test_account_images_page_works(self): response = self.upload_test_file() response = self.fetch_url('/user/admin') - self.assertTrue(response.body.find("/p/1") > 0) + self.assertIn("/p/1", response.body) def test_no_friends(self): response = self.fetch_url('/friends') @@ -52,7 +46,7 @@ def setUp(self): self.sid2 = self.sign_in("user2", "asdfasdf") self.sid = self.sign_in("admin", "asdfasdf") - self.xsrf = self.get_xsrf() + self.xsrf = self.get_xsrf().decode("ascii") self.test_file1_path = os.path.abspath("test/files/1.png") self.test_file1_sha1 = Sourcefile.get_sha1_file_key(self.test_file1_path) @@ -67,62 +61,52 @@ def test_cdn_image_view(self): self.test_file1_content_type, 1, self.sid, self.xsrf) options.use_cdn = True - request = HTTPRequest(self.get_url('/r/1'), 'GET', - {"Cookie": "sid=%s" % (self.sid), "Host": "s.mltshp.com"}, + response = self.fetch('/r/1', method='GET', + headers={"Cookie": "sid=%s" % self.sid, "Host": "s.mltshp.com"}, follow_redirects=False) - self.http_client.fetch(request, self.stop) - response = self.wait() options.use_cdn = False - self.assertEquals(response.headers['location'], 'https://mltshp-cdn.com/r/1') + self.assertEqual(response.headers['location'], 'https://mltshp-cdn.com/r/1') def test_cdn_image_view_with_width(self): response = self.upload_file(self.test_file1_path, self.test_file1_sha1, self.test_file1_content_type, 1, self.sid, self.xsrf) options.use_cdn = True - request = HTTPRequest(self.get_url('/r/1?width=550'), 'GET', - {"Cookie": "sid=%s" % (self.sid), "Host": "s.mltshp.com"}, + response = self.fetch('/r/1?width=550', method='GET', + headers={"Cookie": "sid=%s" % self.sid, "Host": "s.mltshp.com"}, follow_redirects=False) - self.http_client.fetch(request, self.stop) - response = self.wait() options.use_cdn = False - self.assertEquals(response.headers['location'], 'https://mltshp-cdn.com/r/1?width=550&dpr=1') + self.assertEqual(response.headers['location'], 'https://mltshp-cdn.com/r/1?width=550&dpr=1') def test_cdn_image_view_with_width_and_dpr(self): response = self.upload_file(self.test_file1_path, self.test_file1_sha1, self.test_file1_content_type, 1, self.sid, self.xsrf) options.use_cdn = True - request = HTTPRequest(self.get_url('/r/1?width=550&dpr=2'), 'GET', - {"Cookie": "sid=%s" % (self.sid), "Host": "s.mltshp.com"}, + response = self.fetch('/r/1?width=550&dpr=2', method='GET', + headers={"Cookie": "sid=%s" % self.sid, "Host": "s.mltshp.com"}, follow_redirects=False) - self.http_client.fetch(request, self.stop) - response = self.wait() options.use_cdn = False - self.assertEquals(response.headers['location'], 'https://mltshp-cdn.com/r/1?width=550&dpr=2') + self.assertEqual(response.headers['location'], 'https://mltshp-cdn.com/r/1?width=550&dpr=2') def test_raw_image_view_counts(self): response = self.upload_file(self.test_file1_path, self.test_file1_sha1, self.test_file1_content_type, 1, self.sid, self.xsrf) - request = HTTPRequest(self.get_url('/user/admin'), 'GET', - {"Cookie":"sid=%s" % (self.sid)}) - self.http_client.fetch(request, self.stop) - response = self.wait() - self.assertTrue(response.body.find("1.png") > -1) + response = self.fetch('/user/admin', method='GET', + headers={"Cookie":"sid=%s" % self.sid}) + self.assertIn("1.png", response.body) for i in range(0,10): if i % 2 == 0: # views by owner aren't counted - request = HTTPRequest(self.get_url('/r/1'), 'GET', - {"Cookie":"sid=%s" % (self.sid2)}) + response = self.fetch('/r/1', method='GET', + headers={"Cookie":"sid=%s" % self.sid2}) else: # views by non-owner are counted - request = HTTPRequest(self.get_url('/r/1'), 'GET') - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.fetch('/r/1', method='GET') imageviews = self.db.query("SELECT id, user_id, sharedfile_id, created_at from fileview") self.assertEqual(len(imageviews), 10) diff --git a/test/base.py b/test/base.py index 24b31ce1..933119c5 100644 --- a/test/base.py +++ b/test/base.py @@ -1,14 +1,11 @@ -import tornado.ioloop -from tornado.testing import AsyncHTTPTestCase, LogTrapTestCase -from tornado.httpclient import HTTPRequest +from tornado.testing import AsyncHTTPTestCase, ExpectLog from tornado.options import options -import Cookie -from lib.flyingcow import db as _db +import http.cookies from main import MltshpApplication +from lib.flyingcow import register_connection from tornado.escape import json_encode from routes import routes -import urllib -import shutil +import urllib.request, urllib.parse, urllib.error import os import time import base64 @@ -16,32 +13,41 @@ import hashlib import binascii import uuid +import logging from models import User, Sourcefile -class BaseAsyncTestCase(AsyncHTTPTestCase, LogTrapTestCase): +logger = logging.getLogger('mltshp.test') +logger.setLevel(logging.INFO) + +class BaseAsyncTestCase(AsyncHTTPTestCase, ExpectLog): sid = '' + def __init__(self, *args, **kwargs): + self.db = register_connection( + host=options.database_host, + name=options.database_name, + user=options.database_user, + password=options.database_password, + charset="utf8mb4") + super(BaseAsyncTestCase, self).__init__(*args, **kwargs) + def get_app(self): app_settings = MltshpApplication.app_settings() - application = MltshpApplication(routes, autoescape=None, autoreload=False, **app_settings) - self.db = self.create_database() - return application + return MltshpApplication(routes, autoescape=None, autoreload=False, + db=self.db, **app_settings) def setUp(self): super(BaseAsyncTestCase, self).setUp() self.start_time = time.time() - self.io_loop.make_current() #### this line + if options.database_name != "mltshp_testing": + raise Exception("Invalid database name for unit tests") + self.create_database() def get_httpserver_options(self): return {'no_keep_alive':False} - def tearDown(self): - super(BaseAsyncTestCase, self).tearDown() - diff = time.time() - self.start_time - self.db.close() - def sign_in(self, name, password): """ Authenticates the user and sets an instance variable to the user's @@ -59,19 +65,19 @@ def sign_out(self): self.sid = None def get_sid(self, response): - cookie = Cookie.BaseCookie(response.headers['Set-Cookie']) + cookie = http.cookies.BaseCookie(response.headers['Set-Cookie']) return cookie['sid'].value def get_xsrf(self): return binascii.b2a_hex(uuid.uuid4().bytes) def create_database(self): - start_time = int(time.time()) - db = _db.connection() + # start_time = int(time.time()) - db.execute("DROP database IF EXISTS %s" % (options.database_name)) - db.execute("CREATE database %s" % (options.database_name)) - db.execute("USE %s" % (options.database_name)) + # logger.info("Creating database from BaseAsyncTestCase...") + self.db.execute("DROP database IF EXISTS %s" % (options.database_name)) + self.db.execute("CREATE database %s" % (options.database_name)) + self.db.execute("USE %s" % (options.database_name)) f = open("setup/db-install.sql") load_query = f.read() f.close() @@ -79,10 +85,9 @@ def create_database(self): statements = load_query.split(";") for statement in statements: if statement.strip() != "": - db.execute(statement.strip()) - end_time = int(time.time()) - #print "Database reset took: %s" % (end_time - start_time) - return db + self.db.execute(statement.strip()) + # end_time = int(time.time()) + # print "Database reset took: %s" % (end_time - start_time) def upload_file(self, file_path, sha1, content_type, user_id, sid, xsrf, shake_id=None): """ @@ -94,10 +99,8 @@ def upload_file(self, file_path, sha1, content_type, user_id, sid, xsrf, shake_i shake_string = '' if shake_id: shake_string="shake_id=%s" % (shake_id) - request = HTTPRequest(self.get_url('/upload'), 'POST', {'Cookie':"_xsrf=%s;sid=%s" % (xsrf, sid)}, - "_xsrf=%s&file_name=%s&file_content_type=%s&file_sha1=%s&file_size=%s&file_path=%s&skip_s3=1&%s" % (xsrf,file_name, content_type, sha1, file_size, file_path, shake_string)) - self.http_client.fetch(request, self.stop) - return self.wait() + return self.fetch('/upload', method='POST', headers={'Cookie':"_xsrf=%s;sid=%s" % (xsrf, sid)}, + body="_xsrf=%s&file_name=%s&file_content_type=%s&file_sha1=%s&file_size=%s&file_path=%s&skip_s3=1&%s" % (xsrf, file_name, content_type, sha1, file_size, file_path, shake_string)) def upload_test_file(self, shake_id=None): arguments = {} @@ -114,14 +117,15 @@ def upload_test_file(self, shake_id=None): def create_signed_value(self, name, value): ### HERE!@! timestamp = str(int(time.time())) - value = base64.b64encode(value) + value = base64.b64encode(value.encode(encoding="utf-8")).decode("ascii") signature = self.cookie_signature(name, value, timestamp) value = "|".join([value, timestamp, signature]) return value def cookie_signature(self, *parts): - hash = hmac.new(options.cookie_secret, digestmod=hashlib.sha1) - for part in parts: hash.update(part) + hash = hmac.new(options.cookie_secret.encode(encoding="utf-8"), digestmod=hashlib.sha1) + for part in parts: + hash.update(type(part) == str and part.encode(encoding="utf-8") or part) return hash.hexdigest() def post_url(self, path, arguments={}, **kwargs): @@ -129,7 +133,7 @@ def post_url(self, path, arguments={}, **kwargs): Posts the URL, if self.sign_in() is called and user is logged in, the user's authenticated cookie will be passed along. """ - xsrf = self.get_xsrf() + xsrf = self.get_xsrf().decode("ascii") headers = {'Cookie':'sid=%s;_xsrf=%s' % (self.sid, xsrf)} if 'headers' in kwargs: headers.update(kwargs['headers']) @@ -137,7 +141,7 @@ def post_url(self, path, arguments={}, **kwargs): else: kwargs['headers'] = headers arguments['_xsrf'] = xsrf - body = urllib.urlencode(arguments) + body = urllib.parse.urlencode(arguments) return self.fetch(path, method="POST", body=body, **kwargs) def fetch_url(self, path, **kwargs): @@ -145,7 +149,7 @@ def fetch_url(self, path, **kwargs): Gets the URL, if self.sign_in() is called and user is logged in, the user's authenticated cookie will be passed along. """ - headers = {'Cookie':'sid=%s' % (self.sid)} + headers = {'Cookie': 'sid=%s' % self.sid} if 'headers' in kwargs: headers.update(kwargs['headers']) kwargs['headers'] = headers @@ -153,8 +157,17 @@ def fetch_url(self, path, **kwargs): kwargs['headers'] = headers return self.fetch(path, method='GET', **kwargs) - def assert_has_string(self, response, string_to_match): - self.assertTrue(response.body.find(string_to_match) > 0) + def assertNotIn(self, needle, haystack): + if isinstance(needle, str): + return super(BaseAsyncTestCase, self).assertNotIn(needle.encode("utf-8"), haystack) + else: + return super(BaseAsyncTestCase, self).assertNotIn(needle, haystack) + + def assertIn(self, needle, haystack): + if isinstance(needle, str): + return super(BaseAsyncTestCase, self).assertIn(needle.encode("utf-8"), haystack) + else: + return super(BaseAsyncTestCase, self).assertIn(needle, haystack) def assert_redirect(self, response, url): self.assertEqual(302, response.code) diff --git a/test/factories.py b/test/factories.py index cf0c7a75..eb0f9f1e 100644 --- a/test/factories.py +++ b/test/factories.py @@ -5,6 +5,7 @@ import models import lib.utilities + def sharedfile(user, **kwargs): """ Returns a sharedfile with a unique source file for the user. diff --git a/test/functional/account_settings_tests.py b/test/functional/account_settings_tests.py index d8f2bf53..9bf3f4d4 100644 --- a/test/functional/account_settings_tests.py +++ b/test/functional/account_settings_tests.py @@ -225,7 +225,7 @@ def test_unverified_email_notification_on_settings_page(self): self.user.save() response = self.fetch_url("/account/settings") - self.assertTrue(response.body.find('Please check your inbox for verification') > -1) + self.assertIn('Please check your inbox for verification', response.body) def test_resend_verification_email_changes_key(self): self.user.verify_email_token = "asdf" diff --git a/test/functional/account_tests.py b/test/functional/account_tests.py index 96870b36..f9b351a5 100644 --- a/test/functional/account_tests.py +++ b/test/functional/account_tests.py @@ -52,13 +52,13 @@ def test_email_not_confirmed_puts_notice_at_top(self): self.user.save() response = self.fetch_url('/') - self.assertTrue(response.body.find('Please visit settings to confirm your email!') > -1) + self.assertIn('Please visit settings to confirm your email!', response.body) response = self.fetch_url('/incoming') - self.assertTrue(response.body.find('Please visit settings to confirm your email!') > -1) + self.assertIn('Please visit settings to confirm your email!', response.body) response = self.fetch_url('/friends') - self.assertTrue(response.body.find('Please visit settings to confirm your email!') > -1) + self.assertIn('Please visit settings to confirm your email!', response.body) def test_quick_notifications(self): """ diff --git a/test/functional/api_tests.py b/test/functional/api_tests.py index 580cdfae..c6a8743d 100644 --- a/test/functional/api_tests.py +++ b/test/functional/api_tests.py @@ -2,21 +2,16 @@ import time from datetime import datetime, timedelta import random -import string -from urlparse import urlparse +from urllib.parse import urlparse from hashlib import md5, sha1 -import urllib +import urllib.request, urllib.parse, urllib.error import hmac import base64 import os -from tornado.testing import AsyncHTTPTestCase -from torndb import Connection -from tornado.httpclient import HTTPRequest from tornado.escape import url_escape, json_decode from tornado.httputil import HTTPHeaders from tornado.options import options -import handlers import test.base from models import Accesstoken, Apihit, App, Authorizationcode, Favorite, \ @@ -44,7 +39,7 @@ def setUp(self): self.user_a.set_password('asdfasdf') self.user_a.save() self.sign_in('admin', 'asdfasdf') - self.xsrf = self.get_xsrf() + self.xsrf = self.get_xsrf().decode("ascii") self.user_b = User(name='user2', email='user2@mltshp.com', email_confirmed=1, is_paid=1) self.user_b.set_password('asdfasdf') @@ -79,7 +74,7 @@ def test_authorization_code_request_accepts_authtime_redirect(self): response = api_request(self, self.get_url(authorization_url), headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, unsigned=True) self.assertEqual(response.effective_url, self.get_url(authorization_url)) self.assertEqual(response.code, 200) - self.assertTrue('http://client.example.com/return' in response.body) + self.assertIn('http://client.example.com/return', response.body) def test_authorization_code_request_accepts_matching_redirect(self): authorization_url = '/api/authorize?response_type=code&client_id=%s&redirect_uri=http://client.example.com/return' % (self.app.key()) @@ -87,7 +82,7 @@ def test_authorization_code_request_accepts_matching_redirect(self): response = api_request(self, self.get_url(authorization_url), headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, unsigned=True) self.assertEqual(response.effective_url, self.get_url(authorization_url)) self.assertEqual(response.code, 200) - self.assertTrue('http://client.example.com/return' in response.body) + self.assertIn('http://client.example.com/return', response.body) def test_authorization_code_request_error_on_mismatched_redirect(self): authorization_url = '/api/authorize?response_type=code&client_id=%s&redirect_uri=http://othersite.example.com/path' % (self.app.key()) @@ -219,7 +214,7 @@ def setUp(self): self.user_a.set_password('asdfasdf') self.user_a.save() self.sid = self.sign_in('admin', 'asdfasdf') - self.xsrf = self.get_xsrf() + self.xsrf = self.get_xsrf().decode("ascii") self.user_b = User(name='user2', email='user2@mltshp.com', email_confirmed=1, is_paid=1) @@ -339,7 +334,7 @@ def setUp(self): self.user_a.set_password('asdfasdf') self.user_a.save() self.sid = self.sign_in('admin', 'asdfasdf') - self.xsrf = self.get_xsrf() + self.xsrf = self.get_xsrf().decode("ascii") self.user_b = User(name='user2', email='user2@mltshp.com', email_confirmed=1, is_paid=1) @@ -414,7 +409,7 @@ def setUp(self): self.user_a.set_password('asdfasdf') self.user_a.save() self.sid = self.sign_in('admin', 'asdfasdf') - self.xsrf = self.get_xsrf() + self.xsrf = self.get_xsrf().decode("ascii") self.test_file1_path = os.path.abspath("test/files/1.png") self.test_file1_sha1 = Sourcefile.get_sha1_file_key(self.test_file1_path) @@ -448,10 +443,7 @@ def setUp(self): self.user_b.subscribe(self.user_a.shake()) def test_bad_signature_denied(self): - request = signed_request(self.access_token, self.get_url('/api/sharedfile/1')) - request.headers['Authorization'] = request.headers['Authorization'].replace('signature="', 'signature="asdf') - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/sharedfile/1'), signature=b"asdf") self.assertTrue(response.code, 401) def test_unsigned_resource_query_denied(self): @@ -459,30 +451,23 @@ def test_unsigned_resource_query_denied(self): self.assertEqual(response.code, 401) def test_duplicate_nonce(self): - request = signed_request(self.access_token, self.get_url('/api/sharedfile/1')) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/sharedfile/1'), nonce="abcd") self.assertEqual(response.code, 200) self.assertTrue('Www-Authenticate' not in response.headers) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/sharedfile/1'), nonce="abcd") self.assertEqual(response.code, 401) self.assertTrue(response.headers['Www-Authenticate'].find("Duplicate nonce.") > 0) def test_rate_limit(self): - request = signed_request(self.ratelimited_access_token, self.get_url('/api/sharedfile/1')) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.ratelimited_access_token, self.get_url('/api/sharedfile/1')) self.assertEqual(response.code, 200) self.assertEqual(response.headers['X-RateLimit-Remaining'], '1') - request = signed_request(self.ratelimited_access_token, self.get_url('/api/sharedfile/1')) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.ratelimited_access_token, self.get_url('/api/sharedfile/1')) self.assertEqual(response.code, 400) self.assertEqual(response.headers['X-RateLimit-Remaining'], '0') @@ -494,9 +479,7 @@ def test_query_favorites(self): f.sharedfile_id = 1 f.save() - request = signed_request(self.access_token, self.get_url('/api/favorites')) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/favorites')) j_response = json_decode(response.body) self.assertTrue('favorites' in j_response) @@ -509,17 +492,13 @@ def test_query_favorites_before_after(self): sf = self._post_to_shake(self.user_a) self.user_b.add_favorite(sf) - request = signed_request(self.access_token, self.get_url('/api/favorites')) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/favorites')) original_favorites = json_decode(response.body) pivot_id = original_favorites['favorites'][5]['pivot_id'] after_pivot_ids = [fav['sharekey'] for fav in original_favorites['favorites'][0:5]] before_pivot_ids = [fav['sharekey'] for fav in original_favorites['favorites'][6:]] - request = signed_request(self.access_token, self.get_url('/api/favorites/before/%s' % pivot_id)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/favorites/before/%s' % pivot_id)) j_response = json_decode(response.body) self.assertTrue('favorites' in j_response) favs = j_response['favorites'] @@ -527,9 +506,7 @@ def test_query_favorites_before_after(self): pivot_ids = [fav['sharekey'] for fav in favs] self.assertEqual(before_pivot_ids, pivot_ids) - request = signed_request(self.access_token, self.get_url('/api/favorites/after/%s' % pivot_id)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/favorites/after/%s' % pivot_id)) j_response = json_decode(response.body) self.assertTrue('favorites' in j_response) favs = j_response['favorites'] @@ -538,9 +515,7 @@ def test_query_favorites_before_after(self): self.assertEqual(after_pivot_ids, pivot_ids) def test_query_file_resource(self): - request = signed_request(self.access_token, self.get_url('/api/sharedfile/1')) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/sharedfile/1')) j_response = json_decode(response.body) self.assertEqual(j_response['name'], '1.png') self.assertEqual(j_response['user']['name'], 'admin') @@ -549,9 +524,7 @@ def test_query_sharedfile_resource(self): sf = Sharedfile.get('id=%s', 1) posted = sf.created_at.replace(microsecond=0, tzinfo=None).isoformat() + 'Z' - request = signed_request(self.access_token, self.get_url('/api/sharedfile/1')) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/sharedfile/1')) j_response = json_decode(response.body) self.assertEqual(j_response['user']['name'], 'admin') self.assertEqual(j_response['posted_at'], posted) @@ -560,9 +533,7 @@ def test_query_sharedfile_resource(self): def test_can_update_own_sharedfile(self): user_b_file = self._post_to_shake(self.user_b) message_body = "description=newdescription&title=newtitle&alt_text=newalttext" - request = signed_request(self.access_token, self.get_url('/api/sharedfile/%s' % user_b_file.share_key), 'POST', {}, message_body) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/sharedfile/%s' % user_b_file.share_key), 'POST', {}, message_body) self.assertEqual(response.code, 200) user_b_file = Sharedfile.get("id = %s", user_b_file.id) self.assertEqual('newdescription', user_b_file.description) @@ -572,9 +543,7 @@ def test_can_update_own_sharedfile(self): def test_can_not_update_anothers_sharedfile(self): user_a_file = self._post_to_shake(self.user_a) message_body = "description=newdescription&title=newtitle&alt_text=newalt" - request = signed_request(self.access_token, self.get_url('/api/sharedfile/%s' % user_a_file.share_key), 'POST', {}, message_body) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/sharedfile/%s' % user_a_file.share_key), 'POST', {}, message_body) self.assertEqual(response.code, 403) user_a_file = Sharedfile.get("id = %s", user_a_file.id) self.assertNotEqual('newdescription', user_a_file.description) @@ -582,9 +551,7 @@ def test_can_not_update_anothers_sharedfile(self): self.assertNotEqual('newalt', user_a_file.alt_text) def test_query_user_name_resource(self): - request = signed_request(self.access_token, self.get_url('/api/user_name/admin')) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/user_name/admin')) j_response = json_decode(response.body) self.assertEqual(j_response['name'], 'admin') self.assertEqual(j_response['profile_image_url'], 'https://mltshp-cdn.com/static/images/default-icon-venti.svg') @@ -594,25 +561,19 @@ def test_query_user_name_resource(self): self.assertEqual(2, len(j_response['shakes'])) def test_query_user_id_resource(self): - request = signed_request(self.access_token, self.get_url('/api/user_id/1')) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/user_id/1')) j_response = json_decode(response.body) self.assertEqual(j_response['name'], 'admin') self.assertEqual(j_response['id'], 1) def test_query_user_resource(self): - request = signed_request(self.access_token, self.get_url('/api/user')) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/user')) j_response = json_decode(response.body) self.assertEqual(j_response['name'], 'user2') self.assertEqual(j_response['id'], 2) def test_query_usershakes_resource(self): - request = signed_request(self.access_token, self.get_url('/api/shakes')) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/shakes')) j_response = json_decode(response.body) self.assertEqual(len(j_response['shakes']), 3) user_shake, group_shake, group_shake_2 = j_response['shakes'] @@ -641,9 +602,7 @@ def test_query_usershakes_resource(self): self.assertEqual(group_shake_2['owner'], {'name': 'admin', 'id': 1, 'profile_image_url': "https://mltshp-cdn.com/static/images/default-icon-venti.svg"}) def test_query_friend_shake(self): - request = signed_request(self.access_token, self.get_url('/api/friends')) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/friends')) j_response = json_decode(response.body) self.assertEqual(j_response['friend_shake'][0]['name'], '1.png') self.assertEqual(j_response['friend_shake'][0]['width'], 1) @@ -654,9 +613,7 @@ def test_query_friend_shake_shows_nsfw(self): sf = Sharedfile.get('id=%s', 1) sf.set_nsfw(self.user_a) - request = signed_request(self.access_token, self.get_url('/api/friends')) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/friends')) j_response = json_decode(response.body) self.assertEqual(j_response['friend_shake'][0]['nsfw'], True) @@ -682,24 +639,18 @@ def test_query_friend_shake_before_after(self): sf.add_to_shake(self.user_a.shake()) files.append(sf) - request = signed_request(self.access_token, self.get_url('/api/friends/before/%s' % files[3].share_key)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/friends/before/%s' % files[3].share_key)) j_response = json_decode(response.body) self.assertEqual(4, len(j_response['friend_shake'])) - request = signed_request(self.access_token, self.get_url('/api/friends/after/%s' % files[3].share_key)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/friends/after/%s' % files[3].share_key)) j_response = json_decode(response.body) self.assertEqual(6, len(j_response['friend_shake'])) def test_shake_stream(self): user_shake = self.user_a.shake() url = self.get_url("/api/shakes/%s" % user_shake.id) - request = signed_request(self.access_token, url, 'GET', {}) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, url, 'GET', {}) j_response = json_decode(response.body) self.assertEqual(1, len(j_response['sharedfiles'])) @@ -710,9 +661,7 @@ def test_shake_stream_before(self): sharedfiles = user_shake.sharedfiles() self.assertEqual(3, len(sharedfiles)) url = self.get_url("/api/shakes/%s/before/%s" % (user_shake.id, sharedfiles[1].share_key)) - request = signed_request(self.access_token, url, 'GET', {}) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, url, 'GET', {}) j_response = json_decode(response.body) self.assertEqual(1, len(j_response['sharedfiles'])) self.assertEqual(sharedfiles[2].share_key, j_response['sharedfiles'][0]['sharekey']) @@ -724,9 +673,7 @@ def test_shake_stream_after(self): sharedfiles = user_shake.sharedfiles() self.assertEqual(3, len(sharedfiles)) url = self.get_url("/api/shakes/%s/after/%s" % (user_shake.id, sharedfiles[1].share_key)) - request = signed_request(self.access_token, url, 'GET', {}) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, url, 'GET', {}) j_response = json_decode(response.body) self.assertEqual(1, len(j_response['sharedfiles'])) self.assertEqual(sharedfiles[0].share_key, j_response['sharedfiles'][0]['sharekey']) @@ -734,9 +681,7 @@ def test_shake_stream_after(self): def test_upload_file(self): message = "file_name=%s&file_content_type=%s&file_sha1=%s&file_size=%s&file_path=%s&skip_s3=1" % \ ("2.png", self.test_file1_content_type, self.test_file1_sha1, 69, self.test_file1_path) - request = signed_request(self.access_token, self.get_url('/api/upload'), 'POST', {}, message) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/upload'), 'POST', {}, message) j_response = json_decode(response.body) self.assertEqual(j_response['name'], '2.png') self.assertEqual(j_response['share_key'], '2') @@ -744,9 +689,7 @@ def test_upload_file(self): def test_upload_file_with_title_description_alt_text(self): message = "file_name=%s&title=%s&description=%s&alt_text=%s&file_content_type=%s&file_sha1=%s&file_size=%s&file_path=%s&skip_s3=1" % \ ("2.png", "two", "a thing i wrote", "the number two", self.test_file1_content_type, self.test_file1_sha1, 69, self.test_file1_path) - request = signed_request(self.access_token, self.get_url('/api/upload'), 'POST', {}, message) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/upload'), 'POST', {}, message) j_response = json_decode(response.body) sf = Sharedfile.get('share_key = %s', j_response['share_key']) self.assertEqual(sf.title, 'two') @@ -767,7 +710,7 @@ def test_magicfiles_resource(self): testfile_in_another_group = testfile_2.save_to_shake(self.user_a, self.group_shake_2) sid_b = self.sign_in('user2', 'asdfasdf') - xsrf_b = self.get_xsrf() + xsrf_b = self.get_xsrf().decode("ascii") response = self.upload_file(file_path=self.test_file1_path, sha1=self.test_file1_sha1, content_type=self.test_file1_content_type, user_id=2, sid=sid_b, xsrf=xsrf_b, shake_id=self.group_shake.id) @@ -786,9 +729,7 @@ def test_magicfiles_resource(self): calculate_likes(sf.id) # What's best? - request = signed_request(self.access_token, self.get_url('/api/magicfiles'), 'GET') - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/magicfiles'), 'GET') j_response = json_decode(response.body) magicfiles = j_response['magicfiles'] @@ -798,22 +739,16 @@ def test_magicfiles_resource(self): self.assertEqual(pivot_ids, ['2', '1']) # Pagination check. - request = signed_request(self.access_token, self.get_url('/api/magicfiles/before/2'), 'GET') - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/magicfiles/before/2'), 'GET') j_response = json_decode(response.body) self.assertEqual('1', j_response['magicfiles'][0]['pivot_id']) - request = signed_request(self.access_token, self.get_url('/api/magicfiles/after/1'), 'GET') - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/magicfiles/after/1'), 'GET') j_response = json_decode(response.body) self.assertEqual('2', j_response['magicfiles'][0]['pivot_id']) def test_like_resource(self): - request = signed_request(self.access_token, self.get_url('/api/sharedfile/1/like'), 'POST', {}, '') - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/sharedfile/1/like'), 'POST', {}, '') self.assertEqual(response.code, 200) j_response = json_decode(response.body) @@ -823,14 +758,10 @@ def test_like_resource(self): self.assertEqual(testfile.like_count, 1) def test_like_resource_already_liked(self): - request = signed_request(self.access_token, self.get_url('/api/sharedfile/1/like'), 'POST', {}, '') - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/sharedfile/1/like'), 'POST', {}, '') self.assertEqual(response.code, 200) - request = signed_request(self.access_token, self.get_url('/api/sharedfile/1/like'), 'POST', {}, '') - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/sharedfile/1/like'), 'POST', {}, '') self.assertEqual(response.code, 400) j_response = json_decode(response.body) @@ -841,17 +772,13 @@ def test_like_resource_already_liked(self): self.assertEqual(testfile.like_count, 1) def test_like_resource_not_found(self): - request = signed_request(self.access_token, self.get_url('/api/sharedfile/444Z/like'), 'POST') - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/sharedfile/444Z/like'), 'POST') self.assertEqual(response.code, 404) j_response = json_decode(response.body) self.assertTrue('error' in j_response) def test_save_sharedfile(self): - request = signed_request(self.access_token, self.get_url('/api/sharedfile/1/save'), 'POST') - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/sharedfile/1/save'), 'POST') self.assertEqual(response.code, 200) testfile = Sharedfile.get("id = %s", 1) self.assertEqual(1, testfile.save_count) @@ -859,27 +786,21 @@ def test_save_sharedfile(self): self.assertTrue(1, j_response['saves']) def test_save_nonexistant_sharedfile(self): - request = signed_request(self.access_token, self.get_url('/api/sharedfile/50000/save'), 'POST') - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, self.get_url('/api/sharedfile/50000/save'), 'POST') self.assertEqual(response.code, 404) def test_save_own_sharedfile(self): self._post_to_shake(self.user_b) sharedfile = self.user_b.shake().sharedfiles()[0] url = self.get_url('/api/sharedfile/%s/save' % sharedfile.share_key) - request = signed_request(self.access_token, url, 'POST', {}, '') - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, url, 'POST', {}, '') self.assertEqual(response.code, 400) def test_save_to_shake_with_valid_permissions(self): url = self.get_url('/api/sharedfile/1/save') body = "shake_id=%s" % self.group_shake.id self.assertEqual(0, len(self.group_shake.sharedfiles())) - request = signed_request(self.access_token, url, 'POST', body=body) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, url, 'POST', body=body) self.assertEqual(response.code, 200) self.assertEqual(1, len(self.group_shake.sharedfiles())) j_response = json_decode(response.body) @@ -890,9 +811,7 @@ def test_save_to_shake_with_no_permissions(self): shake = self.user_a.shake() body = "shake_id=%s" % shake.id original_num_sharedfiles = len(shake.sharedfiles()) - request = signed_request(self.access_token, url, 'POST', body=body) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = signed_request(self, self.access_token, url, 'POST', body=body) self.assertEqual(original_num_sharedfiles, len(shake.sharedfiles())) self.assertEqual(response.code, 403) @@ -999,19 +918,16 @@ def api_request(obj, url, unsigned=False, arguments={}, headers={}, method='GET' if method == 'GET': body = None elif arguments: - body = urllib.urlencode(arguments) + body = urllib.parse.urlencode(arguments) if unsigned: - request = HTTPRequest(url, method, headers, body) + return obj.fetch(url, method=method, headers=headers, body=body) else: - request = signed_request(obj.access_token, url, headers=headers, method=method, body=body) - obj.http_client.fetch(request, obj.stop) - response = obj.wait() - return response + return signed_request(obj, obj.access_token, url, headers=headers, method=method, body=body) -def signed_request(access_token, url, method='GET', headers={}, body=''): +def signed_request(obj, access_token, url, method='GET', headers={}, body='', signature=None, nonce=None): timestamp = int(time.mktime(datetime.utcnow().timetuple())) - nonce = md5("%s%s" % (str(timestamp), random.random())).hexdigest() + nonce = nonce or md5(("%s%s" % (str(timestamp), random.random())).encode("ascii")).hexdigest() parsed_url = urlparse(url) query_array = [] if parsed_url.query: @@ -1032,12 +948,12 @@ def signed_request(access_token, url, method='GET', headers={}, body=''): parsed_url.port, parsed_url.path, query_array) - digest = hmac.new(access_token.consumer_secret, normalized_string, sha1).digest() - signature = base64.encodestring(digest).strip() - authorization_string = 'MAC token="%s", timestamp="%s", nonce="%s", signature="%s"' % (access_token.consumer_key, str(int(timestamp)), nonce, signature) + digest = hmac.new(access_token.consumer_secret.encode("ascii"), normalized_string.encode("ascii"), sha1).digest() + signature = signature or base64.encodebytes(digest).strip() + authorization_string = 'MAC token="%s", timestamp="%s", nonce="%s", signature="%s"' % (access_token.consumer_key, str(int(timestamp)), nonce, signature.decode("ascii")) if headers: headers.add("Authorization", authorization_string) else: headers = HTTPHeaders({"Authorization": authorization_string}) - return HTTPRequest(url, method, headers, body) + return obj.fetch(url, method=method, headers=headers, body=body) diff --git a/test/functional/comment_favor_tests.py b/test/functional/comment_favor_tests.py index aeec50d0..beefbe21 100644 --- a/test/functional/comment_favor_tests.py +++ b/test/functional/comment_favor_tests.py @@ -1,6 +1,4 @@ import time -from tornado.httpclient import HTTPRequest -from tornado.escape import url_escape import test.base import models @@ -21,10 +19,10 @@ def setUp(self): self.shf = models.Sharedfile(source_id=self.src.id, user_id=self.admin.id, name='shared.jpg', title='shared', share_key='1', content_type='image/jpg') self.shf.save() - print "person who owns the comment" + print("person who owns the comment") self.comment = models.Comment(user_id=self.user2.id, sharedfile_id=self.shf.id, body="just a comment") self.comment.save() - print self.comment.user_id + print(self.comment.user_id) self.sign_in('admin','asdfasdf') response = self.post_url('/p/%s/comment/%s/like?json=1' % (self.shf.share_key, self.comment.id)) @@ -60,7 +58,7 @@ def test_notification_created_for_like(self): for n in notifications: if n.type == 'comment_like': - print n.__dict__ + print(n.__dict__) # #self.assertEqual(len(notifications), 2) #self.assertEqual(notifications[1].sender_id, self.admin.id) diff --git a/test/functional/conversations_tests.py b/test/functional/conversations_tests.py index fefebf8a..23aed1d3 100644 --- a/test/functional/conversations_tests.py +++ b/test/functional/conversations_tests.py @@ -1,5 +1,4 @@ import time -from tornado.httpclient import HTTPRequest from tornado.escape import url_escape import test.base @@ -11,108 +10,84 @@ def setUp(self): self.admin = User(name='admin', email='admin@mltshp.com', email_confirmed=1, is_paid=1) self.admin.set_password('asdfasdf') self.admin.save() - + self.user2 = User(name='user2', email='user2@example.com', email_confirmed=1, is_paid=1) self.user2.set_password('asdfasdf') self.user2.save() - + self.sid = self.sign_in('user2', 'asdfasdf') - self.xsrf = self.get_xsrf() + self.xsrf = self.get_xsrf().decode("ascii") self.src = Sourcefile(width=1, height=1, file_key='asdf', thumb_key='qwer') self.src.save() self.shf = Sharedfile(source_id=self.src.id, user_id=self.admin.id, name='shared.jpg', title='shared', share_key='1', content_type='image/jpg') self.shf.save() - + def test_creating_a_new_comment_creates_a_conversation(self): - request = HTTPRequest(self.get_url('/p/%s/comment' % self.shf.share_key), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, "body=%s&_xsrf=%s" % (url_escape("a comment"), self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() - + response = self.fetch('/p/%s/comment' % self.shf.share_key, method='POST', headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, body="body=%s&_xsrf=%s" % (url_escape("a comment"), self.xsrf)) + conversations = Conversation.all() self.assertEqual(len(conversations), 2) def test_creating_a_new_comment_does_not_create_a_duplicate_conversation(self): - request = HTTPRequest(self.get_url('/p/%s/comment' % self.shf.share_key), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, "body=%s&_xsrf=%s" % (url_escape("a comment"), self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.fetch('/p/%s/comment' % self.shf.share_key, method='POST', headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, body="body=%s&_xsrf=%s" % (url_escape("a comment"), self.xsrf)) - request = HTTPRequest(self.get_url('/p/%s/comment' % self.shf.share_key), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, "body=%s&_xsrf=%s" % (url_escape("a second comment"), self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.fetch('/p/%s/comment' % self.shf.share_key, method='POST', headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, body="body=%s&_xsrf=%s" % (url_escape("a second comment"), self.xsrf)) conversations = Conversation.all() self.assertEqual(len(conversations), 2) - def test_another_user_commenting_will_update_the_files_activity_at(self): - request = HTTPRequest(self.get_url('/p/%s/comment' % self.shf.share_key), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, "body=%s&_xsrf=%s" % (url_escape("a comment"), self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.fetch('/p/%s/comment' % self.shf.share_key, method='POST', headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, body="body=%s&_xsrf=%s" % (url_escape("a comment"), self.xsrf)) time.sleep(1) - sf = Sharedfile.get('id=%s', self.shf.id) activity_one = sf.activity_at - request = HTTPRequest(self.get_url('/p/%s/comment' % self.shf.share_key), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, "body=%s&_xsrf=%s" % (url_escape("a second comment"), self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() - + response = self.fetch('/p/%s/comment' % self.shf.share_key, method='POST', headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, body="body=%s&_xsrf=%s" % (url_escape("a second comment"), self.xsrf)) sf = Sharedfile.get('id=%s', self.shf.id) activity_two = sf.activity_at self.assertTrue(activity_two > activity_one) - def test_deleting_a_file_will_set_conversation_to_muted(self): - request = HTTPRequest(self.get_url('/p/%s/comment' % self.shf.share_key), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, "body=%s&_xsrf=%s" % (url_escape("a comment"), self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.fetch('/p/%s/comment' % self.shf.share_key, method='POST', headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, body="body=%s&_xsrf=%s" % (url_escape("a comment"), self.xsrf)) - request = HTTPRequest(self.get_url('/p/%s/comment' % self.shf.share_key), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, "body=%s&_xsrf=%s" % (url_escape("a second comment"), self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() + response = self.fetch('/p/%s/comment' % self.shf.share_key, method='POST', headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, body="body=%s&_xsrf=%s" % (url_escape("a second comment"), self.xsrf)) self.shf.delete() - + conversations = Conversation.all() self.assertEqual(conversations[0].muted, 1) self.assertEqual(conversations[1].muted, 1) - - + def test_muting_conversation(self): """ Add a comment, which will create a conversation for the commenter (user2) and sharedfile owner (admin). - + When user2 tries to mute admin's conversation, it should fail and admin's conversation state will remain unchanged. When muting own converastion, "muted" flag should change to true. - + Contingent on user2 being signed in. (see setUp) """ comment = Comment(sharedfile_id=self.shf.id, user_id=self.user2.id, body='test') comment.save() - + admin_conversation = Conversation.get('user_id = %s', self.admin.id) user2_conversation = Conversation.get('user_id = %s', self.user2.id) self.assertEqual(admin_conversation.muted, 0) self.assertEqual(user2_conversation.muted, 0) - - request = HTTPRequest(self.get_url('/conversations/%s/mute' % admin_conversation.id), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, "_xsrf=%s" % (self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() - request = HTTPRequest(self.get_url('/conversations/%s/mute' % user2_conversation.id), 'POST', {'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, "_xsrf=%s" % (self.xsrf)) - self.http_client.fetch(request, self.stop) - response = self.wait() - + + response = self.fetch('/conversations/%s/mute' % admin_conversation.id, method='POST', headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, body="_xsrf=%s" % self.xsrf) + response = self.fetch('/conversations/%s/mute' % user2_conversation.id, method='POST', headers={'Cookie':'_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, body="_xsrf=%s" % self.xsrf) + # refetch from DB, and verify mute flags remain 0. admin_conversation = Conversation.get('user_id = %s', self.admin.id) user2_conversation = Conversation.get('user_id = %s', self.user2.id) self.assertEqual(admin_conversation.muted, 0) self.assertEqual(user2_conversation.muted, 1) - - + def test_order_of_conversations_changes_when_new_comment_is_created(self): pass diff --git a/test/functional/create_account_tests.py b/test/functional/create_account_tests.py index 43775f5a..d1e55947 100644 --- a/test/functional/create_account_tests.py +++ b/test/functional/create_account_tests.py @@ -10,9 +10,9 @@ def test_username_too_long(self): arguments = self._valid_arguments() arguments['name'] = 'asdfasdfasdfasdfasdfasdfasdfasd' response = self.post_url('/create-account', arguments) - self.assert_has_string( - response, - 'Username should be less than 30 characters.' + self.assertIn( + 'Username should be less than 30 characters.', + response.body ) def test_username_contains_invalid_chars(self): @@ -28,9 +28,9 @@ def test_username_contains_invalid_chars(self): arguments = self._valid_arguments() arguments['name'] = name response = self.post_url('/create-account', arguments) - self.assert_has_string( - response, - 'Username can only contain letters, numbers' + self.assertIn( + 'Username can only contain letters, numbers', + response.body ) def test_username_exists(self): @@ -45,27 +45,27 @@ def test_username_exists(self): arguments = self._valid_arguments() arguments['name'] = existant_user.name response = self.post_url('/create-account', arguments) - self.assert_has_string( - response, - 'Username has already been taken.' + self.assertIn( + 'Username has already been taken.', + response.body ) def test_username_is_blank(self): arguments = self._valid_arguments() arguments['name'] = "" response = self.post_url('/create-account', arguments) - self.assert_has_string( - response, - 'You definitely need a username' + self.assertIn( + 'You definitely need a username', + response.body ) def test_email_is_blank(self): arguments = self._valid_arguments() arguments['email'] = "" response = self.post_url('/create-account', arguments) - self.assert_has_string( - response, - 'You\'ll need an email to verify your account.' + self.assertIn( + 'You\'ll need an email to verify your account.', + response.body ) def test_email_already_exists(self): @@ -80,18 +80,18 @@ def test_email_already_exists(self): arguments = self._valid_arguments() arguments['email'] = existant_user.email response = self.post_url('/create-account', arguments) - self.assert_has_string( - response, - 'This email already has an account.' + self.assertIn( + 'This email already has an account.', + response.body ) def test_email_aint_right(self): arguments = self._valid_arguments() arguments['email'] = "admin-torresdz.org" response = self.post_url('/create-account', arguments) - self.assert_has_string( - response, - 'Email doesn\'t look right.' + self.assertIn( + 'Email doesn\'t look right.', + response.body ) def test_bad_passwords(self): @@ -100,9 +100,9 @@ def test_bad_passwords(self): arguments['password'] = bad_password arguments['password_again'] = bad_password response = self.post_url('/create-account', arguments) - self.assert_has_string( - response, - 'That is not a good password.' + self.assertIn( + 'That is not a good password.', + response.body ) def test_successful_signup(self): diff --git a/test/functional/home_tests.py b/test/functional/home_tests.py index 57a97d05..ab4bfe9e 100644 --- a/test/functional/home_tests.py +++ b/test/functional/home_tests.py @@ -21,7 +21,7 @@ def test_not_logged_in(self): self.sign_out() response = self.fetch_url('/') self.assertEqual(200, response.code) - self.assertTrue(response.body.find('Save, Share & Discover') > -1) + self.assertIn('Save, Share & Discover', response.body) def test_home_page_no_sharedfiles(self): """ @@ -62,7 +62,6 @@ def test_home_page_with_friends(self): self.assertEqual(response.code, 200) self.assertEqual(1, len(Bookmark.all())) - def test_paginating_home_stream(self): """ Test going back and forward in the timeline using /before/{share_key} @@ -79,26 +78,31 @@ def test_paginating_home_stream(self): saved_files = [] for x in range(15): - sf = test.factories.sharedfile(user) + sf = test.factories.sharedfile(user, name=f"sharedfile_{x}.png") sf.add_to_shake(user.shake()) saved_files.append(sf) response = self.fetch_url('/before/%s' % saved_files[5].share_key) self.assertEqual(response.code, 200) - self.assertTrue(response.body.find('sharedfile_0.png')) - self.assertTrue(response.body.find('sharedfile_1.png')) - self.assertTrue(response.body.find('sharedfile_2.png')) - self.assertTrue(response.body.find('sharedfile_3.png')) - self.assertTrue(response.body.find('sharedfile_4.png')) - self.assertTrue(response.body.find('sharedfile_5.png')) - self.assertEqual(-1, response.body.find('sharedfile_6.png')) - response = self.fetch_url('/after/%s' % saved_files[10].share_key) + self.assertIn('sharedfile_0.png', response.body) + self.assertIn('sharedfile_1.png', response.body) + self.assertIn('sharedfile_2.png', response.body) + self.assertIn('sharedfile_3.png', response.body) + self.assertIn('sharedfile_4.png', response.body) + self.assertNotIn('sharedfile_5.png', response.body) + response = self.fetch_url('/after/%s' % saved_files[4].share_key) self.assertEqual(response.code, 200) - self.assertTrue(response.body.find('sharedfile_11.png')) - self.assertTrue(response.body.find('sharedfile_12.png')) - self.assertTrue(response.body.find('sharedfile_13.png')) - self.assertTrue(response.body.find('sharedfile_14.png')) - self.assertEqual(-1, response.body.find('sharedfile_10.png')) + self.assertIn('sharedfile_5.png', response.body) + self.assertIn('sharedfile_6.png', response.body) + self.assertIn('sharedfile_7.png', response.body) + self.assertIn('sharedfile_8.png', response.body) + self.assertIn('sharedfile_9.png', response.body) + self.assertIn('sharedfile_10.png', response.body) + self.assertIn('sharedfile_11.png', response.body) + self.assertIn('sharedfile_12.png', response.body) + self.assertIn('sharedfile_13.png', response.body) + self.assertIn('sharedfile_14.png', response.body) + self.assertNotIn('sharedfile_4.png', response.body) def test_home_page_non_user_request(self): """ @@ -132,6 +136,3 @@ def test_home_page_non_user_request(self): response = self.fetch_url('/friends', ) self.assertEqual(response.code, 200) self.assertEqual(1, len(Bookmark.all())) - - - diff --git a/test/functional/image_like_tests.py b/test/functional/image_like_tests.py index b34c0462..c16c0a57 100644 --- a/test/functional/image_like_tests.py +++ b/test/functional/image_like_tests.py @@ -132,7 +132,7 @@ def test_cannot_like_own(self): json_response = json.loads(response.body) self.assertEqual(response.code, 200) - self.assertTrue(json_response.has_key('error')) + self.assertTrue('error' in json_response) favorite = Favorite.get('user_id= %s and sharedfile_id = %s', self.admin.id, sharedfile.id) self.assertFalse(favorite) diff --git a/test/functional/image_save_tests.py b/test/functional/image_save_tests.py index b0f7706a..a8056021 100644 --- a/test/functional/image_save_tests.py +++ b/test/functional/image_save_tests.py @@ -1,5 +1,5 @@ import json -from urlparse import urlparse +from urllib.parse import urlparse import test.base import lib.utilities diff --git a/test/functional/payments_tests.py b/test/functional/payments_tests.py index 248ec7e8..e4827b85 100644 --- a/test/functional/payments_tests.py +++ b/test/functional/payments_tests.py @@ -23,13 +23,13 @@ def setUp(self): def test_no_subscription_sees_subscription_button(self): response = self.fetch_url("/account/settings") - self.assertTrue(response.body.find('You are currently using a free account.') > -1) + self.assertIn('You are currently using a free account.', response.body) def test_subscriber_sees_subscription(self): self.user.is_paid = 1 self.user.save() response = self.fetch_url('/account/settings') - self.assertTrue(response.body.find('Your recent payment history:') > -1) + self.assertIn('Your recent payment history:', response.body) def test_subcription_webhook_sets_paid_status(self): self.user.stripe_customer_id = "cus_AHgKQnggJErzEA" @@ -116,7 +116,7 @@ def test_subcription_webhook_sets_paid_status(self): } """ response = self.fetch("/webhooks/stripe", method="POST", body=body) - self.assertEquals(response.body, "OK") + self.assertEqual(response.body, "OK".encode("ascii")) user = User.get(self.user.id) self.assertEqual(user.is_paid, 1) diff --git a/test/functional/readonly_tests.py b/test/functional/readonly_tests.py index f0594bb0..5dfb1036 100644 --- a/test/functional/readonly_tests.py +++ b/test/functional/readonly_tests.py @@ -15,7 +15,7 @@ def setUp(self): options.readonly = False self.admin = test.factories.user() self.sid = self.sign_in("admin", "password") - self.xsrf = self.get_xsrf() + self.xsrf = self.get_xsrf().decode("ascii") self.test_file1_path = os.path.abspath("test/files/1.png") self.test_file1_sha1 = Sourcefile.get_sha1_file_key(self.test_file1_path) @@ -40,17 +40,17 @@ def test_user_model_wont_save(self): def test_uploads_return_403(self): options.readonly = True - response = self.upload_file(self.test_file1_path, self.test_file1_sha1, self.test_file1_content_type, 1, self.sid, self.get_xsrf()) + response = self.upload_file(self.test_file1_path, self.test_file1_sha1, self.test_file1_content_type, 1, self.sid, self.get_xsrf().decode("ascii")) self.assertEqual(response.code, 403) def test_no_post_button(self): # when site is writable, "New Post" button is present: response = self.fetch_url('/') self.assertEqual(200, response.code) - self.assertTrue(response.body.find('New Post') > -1) + self.assertIn('New Post', response.body) # when site is readonly, "New Post" button is suppressed: options.readonly = True response = self.fetch_url('/') self.assertEqual(200, response.code) - self.assertTrue(response.body.find('New Post') == -1) + self.assertNotIn('New Post', response.body) diff --git a/test/functional/request_invitation_tests.py b/test/functional/request_invitation_tests.py index 0f95f09f..bae166ad 100644 --- a/test/functional/request_invitation_tests.py +++ b/test/functional/request_invitation_tests.py @@ -54,7 +54,7 @@ def test_posting_request_doesnt_recreate_request(self): def test_no_button_shows_when_request_has_been_made(self): response = self.post_url('/shake/derp/request_invitation?json=1') response = self.fetch_url('/derp') - self.assertTrue(response.body.find('/request_invitation') == -1) + self.assertNotIn('/request_invitation', response.body) def test_shake_manager_gets_notification_created(self): response = self.post_url('/shake/derp/request_invitation?json=1') @@ -110,5 +110,5 @@ def test_already_a_member_do_not_see_request_button(self): self.shake.add_manager(self.user) response = self.fetch_url('/derp') - self.assertTrue(response.body.find('join this shake') == -1) + self.assertNotIn('join this shake', response.body) diff --git a/test/functional/shake_crud_tests.py b/test/functional/shake_crud_tests.py index 02205bd5..e3567089 100644 --- a/test/functional/shake_crud_tests.py +++ b/test/functional/shake_crud_tests.py @@ -43,7 +43,7 @@ def test_shake_create_error_on_name(self): } response = self.post_url('/shake/create', arguments=arguments) self.assertEqual(response.effective_url, self.get_url('/shake/create')) - self.assertTrue(response.body.find('That URL is not valid.') > -1) + self.assertIn('That URL is not valid.', response.body) def test_shake_create_error_on_title(self): """ @@ -58,7 +58,7 @@ def test_shake_create_error_on_title(self): } response = self.post_url('/shake/create', arguments=arguments) self.assertEqual(response.effective_url, self.get_url('/shake/create')) - self.assertTrue(response.body.find("Title can't be blank.") > -1) + self.assertIn("Title can't be blank.", response.body) def test_shake_update_description(self): @@ -99,7 +99,7 @@ def test_shake_duplicate_error(self): 'title' : 'got one' } response = self.post_url('/shake/create', arguments=arguments) - self.assertTrue(response.body.find('That URL is already taken.') > -1) + self.assertIn('That URL is already taken.', response.body) def test_subscribe_unsubscribe_works(self): user_a = User(name='user_a', email='user_a@example.com', email_confirmed=1, is_paid=1, stripe_plan_id="mltshp-double") @@ -142,7 +142,7 @@ def test_cannot_create_shake_if_not_a_plus_member(self): 'title' : 'Shake Test', } response = self.post_url('/shake/create', arguments=arguments) - self.assertTrue(response.body.find('Create up to 100 group shakes') > -1) + self.assertIn('Create up to 100 group shakes', response.body) def test_create_shake_page_works_for_plus_members(self): user_a = User(name='user_a', email='user_a@example.com', email_confirmed=1, diff --git a/test/functional/tag_tests.py b/test/functional/tag_tests.py index e00a25d8..a2710a36 100644 --- a/test/functional/tag_tests.py +++ b/test/functional/tag_tests.py @@ -1,5 +1,5 @@ import json -from urlparse import urlparse +from urllib.parse import urlparse import test.base import lib.utilities @@ -52,7 +52,7 @@ def test_not_signedin_user_cant_creat_tag(self): response = self.post_url('/p/%s/create_tag' % self.sharedfile.share_key, {'tag':'asdf'}) - print self.response.code + print(self.response.code) all_tags = Tag.all() all_tag_shared_files = TagSharedfile.all() diff --git a/test/functional/verify_email_tests.py b/test/functional/verify_email_tests.py index 8cde3282..d2fc7bf9 100644 --- a/test/functional/verify_email_tests.py +++ b/test/functional/verify_email_tests.py @@ -8,7 +8,7 @@ class VerifyEmailTests(test.base.BaseAsyncTestCase): def test_verify_key_success(self): h = hashlib.sha1() - h.update("%s" % time.time()) + h.update(("%s" % time.time()).encode('ascii')) verify_token = h.hexdigest() existant_user = User( diff --git a/test/functional/voucher_tests.py b/test/functional/voucher_tests.py index cee2b99a..1013d4c0 100644 --- a/test/functional/voucher_tests.py +++ b/test/functional/voucher_tests.py @@ -1,6 +1,6 @@ import random import mock -import cStringIO +import io from datetime import datetime, timedelta import tornado.httpclient @@ -49,7 +49,7 @@ def test_create_account_has_code_field(self): self.sign_out() response = self.fetch_url("/create-account") self.assertEqual(200, response.code) - self.assertTrue(response.body.find("Discount code:") > -1) + self.assertIn("Discount code:", response.body) def test_create_account_with_bad_voucher(self): """ @@ -60,7 +60,7 @@ def test_create_account_with_bad_voucher(self): arguments["key"] = "ABCDEFGHIJKL" response = self.post_url("/create-account", arguments=arguments) self.assertEqual(200, response.code) - self.assertTrue(response.body.find("Invalid discount code") > -1) + self.assertIn("Invalid discount code", response.body) def test_create_account_with_unrecognized_voucher(self): """ @@ -71,7 +71,7 @@ def test_create_account_with_unrecognized_voucher(self): arguments["key"] = "foobar" response = self.post_url("/create-account", arguments=arguments) self.assertEqual(200, response.code) - self.assertTrue(response.body.find("Invalid discount code") > -1) + self.assertIn("Invalid discount code", response.body) def test_create_account_with_good_voucher(self): """ @@ -87,10 +87,10 @@ def test_create_account_with_good_voucher(self): self.post_url("/create-account", arguments=arguments) self.sign_in(arguments["name"], arguments["password"]) response = self.fetch_url("/confirm-account") - self.assertTrue(response.body.find( - "Hello, %s!" % arguments["name"]) > -1) + self.assertIn( + ("Hello, %s!" % arguments["name"]), response.body) response = self.fetch_url("/account/settings") - self.assertTrue(response.body.find("5 Years") > -1) + self.assertIn("5 Years", response.body) def test_settings_page_with_credit(self): """ @@ -98,14 +98,14 @@ def test_settings_page_with_credit(self): credit on their account settings. """ response = self.fetch_url("/account/settings") - self.assertTrue(response.body.find("5 Years") > -1) + self.assertIn("5 Years", response.body) def test_redeem_page_with_pro_user(self): """ A pro member shouldn't have access to the redeem page. """ response = self.fetch_url("/account/redeem") - self.assertTrue(response.body.find("Redeem a Coupon") == -1) + self.assertNotIn("Redeem a Coupon", response.body) def test_redeem_voucher_with_bad_voucher(self): self.sign_out() @@ -113,13 +113,13 @@ def test_redeem_voucher_with_bad_voucher(self): self.sign_in(user.name, "password") response = self.fetch_url("/account/settings") # verify this account is currently free - self.assertTrue(response.body.find("You are currently using a free account.") > -1) + self.assertIn("You are currently using a free account.", response.body) arguments = { "key": "abc123" } response = self.post_url("/account/redeem", arguments) - self.assertTrue(response.body.find("Invalid") > -1) + self.assertIn("Invalid", response.body) def test_redeem_voucher_with_good_voucher(self): self.sign_out() @@ -129,7 +129,7 @@ def test_redeem_voucher_with_good_voucher(self): self.sign_in(user.name, "password") response = self.fetch_url("/account/settings") # verify this account is currently free - self.assertTrue(response.body.find("You are currently using a free account.") > -1) + self.assertIn("You are currently using a free account.", response.body) arguments = { "key": "unclaimed" @@ -137,22 +137,22 @@ def test_redeem_voucher_with_good_voucher(self): # this will post and redirect to the settings page which should # then reflect that we are a paid user with 5 years of credit response = self.post_url("/account/redeem", arguments) - self.assertTrue(response.body.find("5 Years") > -1) + self.assertIn("5 Years", response.body) payments = PaymentLog.where("user_id=%s", user.id) - self.assertEquals(len(payments), 1) - self.assertEquals(payments[0].operation, "redeem") - self.assertEquals(payments[0].status, "credit") - self.assertEquals(payments[0].reference_id, str(self.promotion.id)) - self.assertEquals(payments[0].transaction_id, arguments['key']) - self.assertEquals(payments[0].buyer_email, user.email) - self.assertEquals(payments[0].buyer_name, user.name) - # self.assertEquals(payments[0].next_transaction_date, ) + self.assertEqual(len(payments), 1) + self.assertEqual(payments[0].operation, "redeem") + self.assertEqual(payments[0].status, "credit") + self.assertEqual(payments[0].reference_id, str(self.promotion.id)) + self.assertEqual(payments[0].transaction_id, arguments['key']) + self.assertEqual(payments[0].buyer_email, user.email) + self.assertEqual(payments[0].buyer_name, user.name) + # self.assertEqual(payments[0].next_transaction_date, ) voucher = Voucher.get("claimed_by_user_id=%s", user.id) - self.assertEquals(voucher.promotion_id, self.promotion.id) - self.assertEquals(voucher.claimed_by_user_id, user.id) - self.assertEquals(voucher.offered_by_user_id, self.admin.id) + self.assertEqual(voucher.promotion_id, self.promotion.id) + self.assertEqual(voucher.claimed_by_user_id, user.id) + self.assertEqual(voucher.offered_by_user_id, self.admin.id) def test_active_promotion_list(self): promotions = Promotion.active() diff --git a/test/unit/apihit_tests.py b/test/unit/apihit_tests.py index 6b173d6c..77d97660 100644 --- a/test/unit/apihit_tests.py +++ b/test/unit/apihit_tests.py @@ -2,7 +2,7 @@ from functools import wraps from models import Apihit -from base import BaseTestCase +from .base import BaseTestCase class ApihitModelTests(BaseTestCase): diff --git a/test/unit/base.py b/test/unit/base.py index ea523a20..ed603c55 100644 --- a/test/unit/base.py +++ b/test/unit/base.py @@ -4,28 +4,40 @@ import string import random -class BaseTestCase(unittest.TestCase): +import logging + +logger = logging.getLogger('mltshp.test') +logger.setLevel(logging.INFO) + +class BaseTestCase(unittest.TestCase): + def __init__(self, *args, **kwargs): + super(BaseTestCase, self).__init__(*args, **kwargs) + self.db = register_connection( + host=options.database_host, + name=options.database_name, + user=options.database_user, + password=options.database_password, + charset="utf8mb4") + def setUp(self): super(BaseTestCase, self).setUp() - self.db = self.create_database('mltshp_testing') - - def tearDown(self): - super(BaseTestCase, self).tearDown() + if options.database_name != "mltshp_testing": + raise Exception("Invalid database name for unit tests") + self.create_database() - def create_database(self, name): - db = register_connection(options.database_host, name, options.database_user, options.database_password) - db.execute("DROP database IF EXISTS %s" % (name)) - db.execute("CREATE database %s" % (name)) - db.execute("USE %s" % (name)) + def create_database(self): + # logger.info("Creating database from BaseTestCase...") + self.db.execute("DROP database IF EXISTS %s" % (options.database_name)) + self.db.execute("CREATE database %s" % (options.database_name)) + self.db.execute("USE %s" % (options.database_name)) f = open("setup/db-install.sql") load_query = f.read() f.close() statements = load_query.split(";") for statement in statements: if statement.strip() != "": - db.execute(statement.strip()) - return db + self.db.execute(statement.strip()) def generate_string_of_len(self, length): - return ''.join(random.choice(string.letters) for i in xrange(length)) + return ''.join(random.choice(string.ascii_letters) for i in range(length)) diff --git a/test/unit/bookmark_tests.py b/test/unit/bookmark_tests.py index 5b3961a1..24ee2b78 100644 --- a/test/unit/bookmark_tests.py +++ b/test/unit/bookmark_tests.py @@ -2,7 +2,7 @@ from datetime import datetime, timedelta import models -from base import BaseTestCase +from .base import BaseTestCase class BookmarkTests(BaseTestCase): def setUp(self): diff --git a/test/unit/comment_tests.py b/test/unit/comment_tests.py index 5944b174..c648e12a 100644 --- a/test/unit/comment_tests.py +++ b/test/unit/comment_tests.py @@ -1,7 +1,7 @@ from models import Sharedfile, Sourcefile, User, Comment, Conversation from datetime import datetime, timedelta import os, shutil -from base import BaseTestCase +from .base import BaseTestCase class CommentModelTests(BaseTestCase): diff --git a/test/unit/conversation_tests.py b/test/unit/conversation_tests.py index b25c0c92..87a83542 100644 --- a/test/unit/conversation_tests.py +++ b/test/unit/conversation_tests.py @@ -1,6 +1,6 @@ from models import User, Sharedfile, Sourcefile, Shake, Favorite, Comment, Conversation -from base import BaseTestCase +from .base import BaseTestCase class ConversationModelTests(BaseTestCase): diff --git a/test/unit/external_relationship_tests.py b/test/unit/external_relationship_tests.py index dc88f48f..fbab44cf 100644 --- a/test/unit/external_relationship_tests.py +++ b/test/unit/external_relationship_tests.py @@ -1,5 +1,5 @@ from models import User, ExternalRelationship -from base import BaseTestCase +from .base import BaseTestCase class ExternalRelationshipTests(BaseTestCase): diff --git a/test/unit/externalservice_tests.py b/test/unit/externalservice_tests.py index f2876a64..dc1c30bf 100644 --- a/test/unit/externalservice_tests.py +++ b/test/unit/externalservice_tests.py @@ -1,5 +1,5 @@ from models import User, Externalservice, ExternalRelationship -from base import BaseTestCase +from .base import BaseTestCase class ExternalserviceModelTests(BaseTestCase): def setUp(self): diff --git a/test/unit/fileview_tests.py b/test/unit/fileview_tests.py index 273e5112..338b7e1b 100644 --- a/test/unit/fileview_tests.py +++ b/test/unit/fileview_tests.py @@ -1,7 +1,7 @@ import datetime from models import Fileview -from base import BaseTestCase +from .base import BaseTestCase import test.factories class FileviewTests(BaseTestCase): diff --git a/test/unit/notification_tests.py b/test/unit/notification_tests.py index 8cf70196..a79eaa95 100644 --- a/test/unit/notification_tests.py +++ b/test/unit/notification_tests.py @@ -1,5 +1,5 @@ from models import Notification, User, Sourcefile, Sharedfile, Comment, Subscription -from base import BaseTestCase +from .base import BaseTestCase from settings import test_settings as settings class NotificationModelTests(BaseTestCase): diff --git a/test/unit/script_log_tests.py b/test/unit/script_log_tests.py index 2e972ca1..af36a640 100644 --- a/test/unit/script_log_tests.py +++ b/test/unit/script_log_tests.py @@ -1,7 +1,7 @@ import datetime from models import ScriptLog -from base import BaseTestCase +from .base import BaseTestCase import test.factories class ScriptLogTests(BaseTestCase): diff --git a/test/unit/shake_tests.py b/test/unit/shake_tests.py index d2ea1057..c4676971 100644 --- a/test/unit/shake_tests.py +++ b/test/unit/shake_tests.py @@ -1,7 +1,7 @@ from tornado.options import options from models import User, Shake, Sourcefile, Sharedfile, ShakeManager -from base import BaseTestCase +from .base import BaseTestCase class ShakeModelTests(BaseTestCase): diff --git a/test/unit/sharedfile_tests.py b/test/unit/sharedfile_tests.py index cbc9e612..28439e9b 100644 --- a/test/unit/sharedfile_tests.py +++ b/test/unit/sharedfile_tests.py @@ -1,7 +1,7 @@ from models import Sharedfile, Sourcefile, User, Comment, Conversation, Shake, Shakesharedfile, Favorite, NSFWLog, Tag, TaggedFile from datetime import datetime, timedelta import os, shutil, calendar -from base import BaseTestCase +from .base import BaseTestCase class SharedfileModelTests(BaseTestCase): diff --git a/test/unit/sourcefile_tests.py b/test/unit/sourcefile_tests.py index eb8faab1..64687b68 100644 --- a/test/unit/sourcefile_tests.py +++ b/test/unit/sourcefile_tests.py @@ -1,5 +1,5 @@ from models import Sharedfile, Sourcefile, User -from base import BaseTestCase +from .base import BaseTestCase import os from tornado.escape import json_decode import re diff --git a/test/unit/task_tests.py b/test/unit/task_tests.py index cb016686..bf70a108 100644 --- a/test/unit/task_tests.py +++ b/test/unit/task_tests.py @@ -1,25 +1,11 @@ -from base import BaseTestCase +from .base import BaseTestCase from models import User, Shake, Sourcefile, Sharedfile, Shakesharedfile, Post, Magicfile from tasks.timeline import add_posts, delete_posts from tornado.options import options -import tweepy from mock import patch -class MockTweepy(object): - count = 0 - - @classmethod - def API(cls, auth_obj): - cls.count = 0 - return cls - - @classmethod - def update_status(cls, *args, **kwargs): - cls.count = cls.count + 1 - - class CeleryTaskTests(BaseTestCase): def setUp(self): @@ -72,23 +58,3 @@ def test_task_timeline_delete_posts(self): posts = Post.all() for post in posts: self.assertTrue(post.deleted) - - @patch('tweepy.API', MockTweepy.API) - def test_tweet_best_posts(self): - old_likes = options.likes_to_tweet - old_magic = options.likes_to_magic - old_debug = options.debug - try: - options.likes_to_tweet = 1 - options.likes_to_magic = 1 - options.debug = False - add_posts(shake_id=self.shake_a.id, sharedfile_id=self.shared_1.id, sourcefile_id=self.source.id) - self.user_b.add_favorite(self.shared_1) - # this like should trigger a tweet - self.assertEqual(MockTweepy.count, 1) - mf = Magicfile.get("sharedfile_id = %s", self.shared_1.id) - self.assertIsNotNone(mf) - finally: - options.likes_to_tweet = old_likes - options.likes_to_magic = old_magic - options.debug = old_debug diff --git a/test/unit/user_tests.py b/test/unit/user_tests.py index a00b77e9..39694ff3 100644 --- a/test/unit/user_tests.py +++ b/test/unit/user_tests.py @@ -1,5 +1,5 @@ from models import User, Sharedfile, Sourcefile, Shake, Favorite, invitation, Shakesharedfile, Subscription, ShakeManager -from base import BaseTestCase +from .base import BaseTestCase import random, os, calendar from datetime import datetime from tornado.options import options diff --git a/torndb.py b/torndb.py new file mode 100644 index 00000000..ff3e4b89 --- /dev/null +++ b/torndb.py @@ -0,0 +1,253 @@ +# +# Copyright 2023, MLTSHP +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""A lightweight wrapper around MySQLdb. + +Originally part of the Tornado framework. The tornado.database module +is slated for removal in Tornado 3.0, and it is now available separately +as torndb. + +Updated for Python 3 by Brad Choate, for MLTSHP. ❤️ + +""" + +import logging +import time + +import MySQLdb.constants +import MySQLdb.converters +import MySQLdb.cursors + +version = "0.4" +version_info = (0, 4, 0, 0) + + +class Connection(object): + """A lightweight wrapper around MySQLdb DB-API connections. + + The main value we provide is wrapping rows in a dict/object so that + columns can be accessed by name. Typical usage:: + + db = torndb.Connection("localhost", "mydatabase") + for article in db.query("SELECT * FROM articles"): + print article.title + + Cursors are hidden by the implementation, but other than that, the methods + are very similar to the DB-API. + + We explicitly set the timezone to UTC and assume the character encoding to + UTF-8 (can be changed) on all connections to avoid time zone and encoding errors. + + The sql_mode parameter is set by default to "traditional", which "gives an error instead of a warning" + (http://dev.mysql.com/doc/refman/5.0/en/server-sql-mode.html). However, it can be set to + any other mode including blank (None) thereby explicitly clearing the SQL mode. + """ + def __init__(self, host, database, user=None, password=None, + max_idle_time=7 * 3600, connect_timeout=0, + time_zone="+0:00", charset = "utf8", sql_mode="TRADITIONAL"): + self.host = host + self.database = database + self.max_idle_time = float(max_idle_time) + + # removing `conv=CONVERSIONS` + args = dict(use_unicode=True, charset=charset, + db=database, init_command=('SET time_zone = "%s"' % time_zone), + connect_timeout=connect_timeout, sql_mode=sql_mode) + if user is not None: + args["user"] = user + if password is not None: + args["passwd"] = password + + # We accept a path to a MySQL socket file or a host(:port) string + if "/" in host: + args["unix_socket"] = host + else: + self.socket = None + pair = host.split(":") + if len(pair) == 2: + args["host"] = pair[0] + args["port"] = int(pair[1]) + else: + args["host"] = host + args["port"] = 3306 + + self._db = None + self._db_args = args + self._last_use_time = time.time() + try: + self.reconnect() + except Exception: + logging.error("Cannot connect to MySQL on %s", self.host, + exc_info=True) + + def __del__(self): + self.close() + + def close(self): + """Closes this database connection.""" + if getattr(self, "_db", None) is not None: + self._db.close() + self._db = None + + def reconnect(self): + """Closes the existing database connection and re-opens it.""" + self.close() + self._db = MySQLdb.connect(**self._db_args) + self._db.autocommit(True) + + def iter(self, query, *parameters, **kwparameters): + """Returns an iterator for the given query and parameters.""" + self._ensure_connected() + cursor = MySQLdb.cursors.SSCursor(self._db) + try: + self._execute(cursor, query, parameters, kwparameters) + column_names = [d[0] for d in cursor.description] + for row in cursor: + yield Row(list(zip(column_names, row))) + finally: + cursor.close() + + def query(self, query, *parameters, **kwparameters): + """Returns a row list for the given query and parameters.""" + cursor = self._cursor() + try: + self._execute(cursor, query, parameters, kwparameters) + column_names = [d[0] for d in cursor.description] + return [Row(list(zip(column_names, row))) for row in cursor] + finally: + cursor.close() + + def get(self, query, *parameters, **kwparameters): + """Returns the (singular) row returned by the given query. + + If the query has no results, returns None. If it has + more than one result, raises an exception. + """ + rows = self.query(query, *parameters, **kwparameters) + if not rows: + return None + elif len(rows) > 1: + raise Exception("Multiple rows returned for Database.get() query") + else: + return rows[0] + + # rowcount is a more reasonable default return value than lastrowid, + # but for historical compatibility execute() must return lastrowid. + def execute(self, query, *parameters, **kwparameters): + """Executes the given query, returning the lastrowid from the query.""" + return self.execute_lastrowid(query, *parameters, **kwparameters) + + def execute_lastrowid(self, query, *parameters, **kwparameters): + """Executes the given query, returning the lastrowid from the query.""" + cursor = self._cursor() + try: + self._execute(cursor, query, parameters, kwparameters) + return cursor.lastrowid + finally: + cursor.close() + + def execute_rowcount(self, query, *parameters, **kwparameters): + """Executes the given query, returning the rowcount from the query.""" + cursor = self._cursor() + try: + self._execute(cursor, query, parameters, kwparameters) + return cursor.rowcount + finally: + cursor.close() + + def executemany(self, query, parameters): + """Executes the given query against all the given param sequences. + + We return the lastrowid from the query. + """ + return self.executemany_lastrowid(query, parameters) + + def executemany_lastrowid(self, query, parameters): + """Executes the given query against all the given param sequences. + + We return the lastrowid from the query. + """ + cursor = self._cursor() + try: + cursor.executemany(query, parameters) + return cursor.lastrowid + finally: + cursor.close() + + def executemany_rowcount(self, query, parameters): + """Executes the given query against all the given param sequences. + + We return the rowcount from the query. + """ + cursor = self._cursor() + try: + cursor.executemany(query, parameters) + return cursor.rowcount + finally: + cursor.close() + + update = execute_rowcount + updatemany = executemany_rowcount + + insert = execute_lastrowid + insertmany = executemany_lastrowid + + def _ensure_connected(self): + # Mysql by default closes client connections that are idle for + # 8 hours, but the client library does not report this fact until + # you try to perform a query and it fails. Protect against this + # case by preemptively closing and reopening the connection + # if it has been idle for too long (7 hours by default). + if (self._db is None or + (time.time() - self._last_use_time > self.max_idle_time)): + self.reconnect() + self._last_use_time = time.time() + + def _cursor(self): + self._ensure_connected() + return self._db.cursor() + + def _execute(self, cursor, query, parameters, kwparameters): + try: + return cursor.execute(query, kwparameters or parameters) + except OperationalError: + logging.error("Error connecting to MySQL on %s", self.host) + self.close() + raise + + +class Row(dict): + """A dict that allows for object-like property access syntax.""" + def __getattr__(self, name): + try: + return self[name] + except KeyError: + raise AttributeError(name) + +# Fix the access conversions to properly recognize unicode/binary +#FIELD_TYPE = MySQLdb.constants.FIELD_TYPE +#FLAG = MySQLdb.constants.FLAG +#CONVERSIONS = copy.copy(MySQLdb.converters.conversions) + +#field_types = [FIELD_TYPE.BLOB, FIELD_TYPE.STRING, FIELD_TYPE.VAR_STRING] +#if 'VARCHAR' in vars(FIELD_TYPE): +# field_types.append(FIELD_TYPE.VARCHAR) + +#for field_type in field_types: +# CONVERSIONS[field_type] = [(FLAG.BINARY, str)] + [CONVERSIONS[field_type]] + +# Alias some common MySQL exceptions +IntegrityError = MySQLdb.IntegrityError +OperationalError = MySQLdb.OperationalError