defines += options.defines cmdline_default_variables = NameValueListToDict(defines) if DEBUG_GENERAL in gyp.debug: DebugOutput(DEBUG_GENERAL, "cmdline_default_variables: %s", cmdline_default_variables) # Set up includes. includes = [] # If ~/.gyp/include.gypi exists, it'll be forcibly included into every # .gyp file that's loaded, before anything else is included. if home_dot_gyp != None: default_include = os.path.join(home_dot_gyp, 'include.gypi') if os.path.exists(default_include): print 'Using overrides found in ' + default_include includes.append(default_include) # Command-line --include files come after the default include. if options.includes: includes.extend(options.includes) # Generator flags should be prefixed with the target generator since they # are global across all generator runs. gen_flags = [] if options.use_environment: gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS') if options.generator_flags: gen_flags += options.generator_flags generator_flags = NameValueListToDict(gen_flags) if DEBUG_GENERAL in gyp.debug.keys(): DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags) # Generate all requested formats (use a set in case we got one format request # twice) for format in set(options.formats): params = {'options': options, 'build_files': build_files, 'generator_flags': generator_flags, 'cwd': os.getcwd(), 'build_files_arg': build_files_arg, 'gyp_binary': sys.argv[0], 'home_dot_gyp': home_dot_gyp, 'parallel': options.parallel, 'root_targets': options.root_targets, 'target_arch': cmdline_default_variables.get('target_arch', '')} # Start with the default variables from the command line. [generator, flat_list, targets, data] = Load( build_files, format, cmdline_default_variables, includes, options.depth, params, options.check, options.circular_check) # TODO(mark): Pass |data| for now because the generator needs a list of # build files that came in. In the future, maybe it should just accept # a list, and not the whole data dict. # NOTE: flat_list is the flattened dependency graph specifying the order # that targets may be built. Build systems that operate serially or that # need to have dependencies defined before dependents reference them should # generate targets in the order specified in flat_list. generator.GenerateOutput(flat_list, targets, data, params) if options.configs: valid_configs = targets[flat_list[0]]['configurations'].keys() for conf in options.configs: if conf not in valid_configs: raise GypError('Invalid config specified via --build: %s' % conf) generator.PerformBuild(data, options.configs, params) # Done return 0 def main(args): try: return gyp_main(args) except GypError, e: sys.stderr.write("gyp: %s\n" % e) return 1 # NOTE: setuptools generated console_scripts calls function with no arguments def script_main(): return main(sys.argv[1:]) if __name__ == '__main__': sys.exit(script_main()) # (c) 2014, Michael DeHaan # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see . import textwrap from ansible import constants as C from ansible import errors from ansible.callbacks import display __all__ = ['deprecated', 'warning', 'system_warning'] # list of all deprecation messages to prevent duplicate display deprecations = {} warns = {} def deprecated(msg, version, removed=False): ''' used to print out a deprecation message.''' if not removed and not C.DEPRECATION_WARNINGS: return if not removed: if version: new_msg = "\n[DEPRECATION WARNING]: %s. This feature will be removed in version %s." % (msg, version) else: new_msg = "\n[DEPRECATION WARNING]: %s. This feature will be removed in a future release." % (msg) new_msg = new_msg + " Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg.\n\n" else: raise errors.AnsibleError("[DEPRECATED]: %s. Please update your playbooks." % msg) wrapped = textwrap.wrap(new_msg, 79) new_msg = "\n".join(wrapped) + "\n" if new_msg not in deprecations: display(new_msg, color='purple', stderr=True) deprecations[new_msg] = 1 def warning(msg): new_msg = "\n[WARNING]: %s" % msg wrapped = textwrap.wrap(new_msg, 79) new_msg = "\n".join(wrapped) + "\n" if new_msg not in warns: display(new_msg, color='bright purple', stderr=True) warns[new_msg] = 1 def system_warning(msg): if C.SYSTEM_WARNINGS: warning(msg) # -*- coding: utf-8 -*- """QGIS Unit tests for QgsBlockingNetworkRequest .. note:: This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. """ from builtins import chr from builtins import str __author__ = 'Nyall Dawson' __date__ = '12/11/2018' __copyright__ = 'Copyright 2018, The QGIS Project' import qgis # NOQA import os from qgis.testing import unittest, start_app from qgis.core import QgsBlockingNetworkRequest from utilities import unitTestDataPath from qgis.PyQt.QtCore import QUrl from qgis.PyQt.QtTest import QSignalSpy from qgis.PyQt.QtNetwork import QNetworkReply, QNetworkRequest import socketserver import threading import http.server app = start_app() class TestQgsBlockingNetworkRequest(unittest.TestCase): @classmethod def setUpClass(cls): # Bring up a simple HTTP server os.chdir(unitTestDataPath() + '') handler = http.server.SimpleHTTPRequestHandler cls.httpd = socketserver.TCPServer(('localhost', 0), handler) cls.port = cls.httpd.server_address[1] cls.httpd_thread = threading.Thread(target=cls.httpd.serve_forever) cls.httpd_thread.setDaemon(True) cls.httpd_thread.start() def testFetchEmptyUrl(self): request = QgsBlockingNetworkRequest() spy = QSignalSpy(request.downloadFinished) err = request.get(QNetworkRequest(QUrl())) self.assertEqual(len(spy), 1) self.assertEqual(err, QgsBlockingNetworkRequest.ServerExceptionError) self.assertEqual(request.errorMessage(), 'Protocol "" is unknown') reply = request.reply() self.assertFalse(reply.content()) def testFetchBadUrl(self): request = QgsBlockingNetworkRequest() spy = QSignalSpy(request.downloadFinished) err = request.get(QNetworkRequest(QUrl('http://x'))) self.assertEqual(len(spy), 1) self.assertEqual(err, QgsBlockingNetworkRequest.ServerExceptionError) self.assertEqual(request.errorMessage(), 'Host x not found') reply = request.reply() self.assertFalse(reply.content()) def testFetchBadUrl2(self): request = QgsBlockingNetworkRequest() spy = QSignalSpy(request.downloadFinished) err = request.get(QNetworkRequest(QUrl('http://localhost:' + str(TestQgsBlockingNetworkRequest.port) + '/ffff'))) self.assertEqual(len(spy), 1) self.assertEqual(err, QgsBlockingNetworkRequest.ServerExceptionError) self.assertIn('File not found', request.errorMessage()) reply = request.reply() self.assertEqual(reply.error(), QNetworkReply.ContentNotFoundError) self.assertFalse(reply.content()) def testGet(self): request = QgsBlockingNetworkRequest() spy = QSignalSpy(request.downloadFinished) err = request.get(QNetworkRequest(QUrl('http://localhost:' + str(TestQgsBlockingNetworkRequest.port) + '/qgis_local_server/index.html'))) self.assertEqual(len(spy), 1) self.assertEqual(err, QgsBlockingNetworkRequest.NoError) self.assertEqual(request.errorMessage(), '') reply = request.reply() self.assertEqual(reply.error(), QNetworkReply.NoError) self.assertEqual(reply.content(), '\n\n\n\t\n\tLocal QGIS Server Default Index\n\n\n

Web Server Working

\n\n\n') self.assertEqual(reply.rawHeaderList(), [b'Server', b'Date', b'Content-type', b'Content-Length', b'Last-Modified']) self.assertEqual(reply.rawHeader(b'Content-type'), 'text/html') self.assertEqual(reply.rawHeader(b'xxxxxxxxx'), '') self.assertEqual(reply.attribute(QNetworkRequest.HttpStatusCodeAttribute), 200) self.assertEqual(reply.attribute(QNetworkRequest.HttpReasonPhraseAttribute), 'OK') self.assertEqual(reply.attribute(QNetworkRequest.HttpStatusCodeAttribute), 200) self.assertEqual(reply.attribute(QNetworkRequest.RedirectionTargetAttribute), None) if __name__ == "__main__": unittest.main() # -*- test-case-name: twisted.test.test_monkey -*- # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. from __future__ import division, absolute_import class MonkeyPatcher(object): """ Cover up attributes with new objects. Neat for monkey-patching things for unit-testing purposes. """ def __init__(self, *patches): # List of patches to apply in (obj, name, value). self._patchesToApply = [] # List of the original values for things that have been patched. # (obj, name, value) format. self._originals = [] for patch in patches: self.addPatch(*patch) def addPatch(self, obj, name, value): """ Add a patch so that the attribute C{name} on C{obj} will be assigned to C{value} when C{patch} is called or during C{runWithPatches}. You can restore the original values with a call to restore(). """ self._patchesToApply.append((obj, name, value)) def _alreadyPatched(self, obj, name): """ Has the C{name} attribute of C{obj} already been patched by this patcher? """ for o, n, v in self._originals: if (o, n) == (obj, name): return True return False def patch(self): """ Apply all of the patches that have been specified with L{addPatch}. Reverse this operation using L{restore}. """ for obj, name, value in self._patchesToApply: if not self._alreadyPatched(obj, name): self._originals.append((obj, name, getattr(obj, name))) setattr(obj, name, value) def restore(self): """ Restore all original values to any patched objects. """ while self._originals: obj, name, value = self._originals.pop() setattr(obj, name, value) def runWithPatches(self, f, *args, **kw): """ Apply each patch already specified. Then run the function f with the given args and kwargs. Restore everything when done. """ self.patch() try: return f(*args, **kw) finally: self.restore() # Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Protobuf related tests.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.framework import constant_op from tensorflow.python.framework import ops from tensorflow.python.platform import test class ProtoTest(test.TestCase): # TODO(vrv): re-enable this test once we figure out how this can # pass the pip install test (where the user is expected to have # protobuf installed). def _testLargeProto(self): # create a constant of size > 64MB. a = constant_op.constant(np.zeros([1024, 1024, 17])) # Serialize the resulting graph def. gdef = a.op.graph.as_graph_def() serialized = gdef.SerializeToString() unserialized = ops.Graph().as_graph_def() # Deserialize back. Protobuf python library should support # protos larger than 64MB. unserialized.ParseFromString(serialized) self.assertProtoEquals(unserialized, gdef) if __name__ == "__main__": test.main() """Parabolic geometrical entity. Contains * Parabola """ from __future__ import division, print_function from sympy.core import S from sympy.core.numbers import oo from sympy.geometry.entity import GeometryEntity, GeometrySet from sympy.geometry.point import Point from sympy.geometry.line import Line from sympy.geometry.util import _symbol class Parabola(GeometrySet): """A parabolic GeometryEntity. A parabola is declared with a point, that is called 'focus', and a line, that is called 'directrix'. Only vertical or horizontal parabolas are currently supported. Parameters ========== focus : Point Default value is Point(0, 0) directrix : Line Attributes ========== focus directrix axis of symmetry focal length p parameter vertex eccentricity Raises ====== ValueError When `focus` is not a two dimensional point. When `focus` is a point of directrix. NotImplementedError When `directrix` is neither horizontal nor vertical. Examples ======== >>> from sympy import Parabola, Point, Line >>> p1 = Parabola(Point(0, 0), Line(Point(5, 8), Point(7,8))) >>> p1.focus Point2D(0, 0) >>> p1.directrix Line2D(Point2D(5, 8), Point2D(7, 8)) """ def __new__(cls, focus=None, directrix=None, **kwargs): if focus: focus = Point(focus, dim=2) else: focus = Point(0, 0) directrix = Line(directrix) if (directrix.slope != 0 and directrix.slope != S.Infinity): raise NotImplementedError('The directrix must be a horizontal' ' or vertical line') if directrix.contains(focus): raise ValueError('The focus must not be a point of directrix') return GeometryEntity.__new__(cls, focus, directrix, **kwargs) @property def ambient_dimension(self): return S(2) @property def axis_of_symmetry(self): """The axis of symmetry of the parabola. Returns ======= axis_of_symmetry : Line See Also ======== sympy.geometry.line.Line Examples ======== >>> from sympy import Parabola, Point, Line >>> p1 = Parabola(Point(0, 0), Line(Point(5, 8), Point(7, 8))) >>> p1.axis_of_symmetry Line2D(Point2D(0, 0), Point2D(0, 1)) """ return self.directrix.perpendicular_line(self.focus) @property def directrix(self): """The directrix of the parabola. Returns ======= directrix : Line See Also ======== sympy.geometry.line.Line Examples ======== >>> from sympy import Parabola, Point, Line >>> l1 = Line(Point(5, 8), Point(7, 8)) >>> p1 = Parabola(Point(0, 0), l1) >>> p1.directrix Line2D(Point2D(5, 8), Point2D(7, 8)) """ return self.args[1] @property def eccentricity(self): """The eccentricity of the parabola. Returns ======= eccentricity : number A parabola may also be characterized as a conic section with an eccentricity of 1. As a consequence of this, all parabolas are similar, meaning that while they can be different sizes, they are all the same shape. See Also ======== https://en.wikipedia.org/wiki/Parabola Examples ======== >>> from sympy import Parabola, Point, Line >>> p1 = Parabola(Point(0, 0), Line(Point(5, 8), Point(7, 8))) >>> p1.eccentricity 1 Notes ----- The eccentricity for every Parabola is 1 by definition. """ return S(1) def equation(self, x='x', y='y'): """The equation of the parabola. Parameters ========== x : str, optional Label for the x-axis. Default value is 'x'. y : str, optional Label for the y-axis. Default value is 'y'. Returns ======= equation : sympy expression Examples ======== >>> from sympy import Parabola, Point, Line >>> p1 = Parabola(Point(0, 0), Line(Point(5, 8), Point(7, 8))) >>> p1.equation() -x**2 - 16*y + 64 >>> p1.equation('f') -f**2 - 16*y + 64 >>> p1.equation(y='z') -x**2 - 16*z + 64 """ x = _symbol(x) y = _symbol(y) if (self.axis_of_symmetry.slope == 0): t1 = 4 * (self.p_parameter) * (x - self.vertex.x) t2 = (y - self.vertex.y)**2 else: t1 = 4 * (self.p_parameter) * (y - self.vertex.y) t2 = (x - self.vertex.x)**2 return t1 - t2 @property def focal_length(self): """The focal length of the parabola. Returns ======= focal_lenght : number or symbolic expression Notes ===== The distance between the vertex and the focus (or the vertex and directrix), measured along the axis of symmetry, is the "focal length". See Also ======== https://en.wikipedia.org/wiki/Parabola Examples ======== >>> from sympy import Parabola, Point, Line >>> p1 = Parabola(Point(0, 0), Line(Point(5, 8), Point(7, 8))) >>> p1.focal_length 4 """ distance = self.directrix.distance(self.focus) focal_length = distance/2 return focal_length @property def focus(self): """The focus of the parabola. Returns ======= focus : Point See Also ======== sympy.geometry.point.Point Examples ======== >>> from sympy import Parabola, Point, Line >>> f1 = Point(0, 0) >>> p1 = Parabola(f1, Line(Point(5, 8), Point(7, 8))) >>> p1.focus Point2D(0, 0) """ return self.args[0] @property def p_parameter(self): """P is a parameter of parabola. Returns ======= p : number or symbolic expression Notes ===== The absolute value of p is the focal length. The sign on p tells which way the parabola faces. Vertical parabolas that open up and horizontal that open right, give a positive value for p. Vertical parabolas that open down and horizontal that open left, give a negative value for p. See Also ======== http://www.sparknotes.com/math/precalc/conicsections/section2.rhtml Examples ======== >>> from sympy import Parabola, Point, Line >>> p1 = Parabola(Point(0, 0), Line(Point(5, 8), Point(7, 8))) >>> p1.p_parameter -4 """ if (self.axis_of_symmetry.slope == 0): x = -(self.directrix.coefficients[2]) if (x < self.focus.args[0]): p = self.focal_length else: p = -self.focal_length else: y = -(self.directrix.coefficients[2]) if (y > self.focus.args[1]): p = -self.focal_length else: p = self.focal_length return p @property def vertex(self): """The vertex of the parabola. Returns ======= vertex : Point See Also ======== sympy.geometry.point.Point Examples ======== >>> from sympy import Parabola, Point, Line >>> p1 = Parabola(Point(0, 0), Line(Point(5, 8), Point(7, 8))) >>> p1.vertex Point2D(0, 4) """ focus = self.focus if (self.axis_of_symmetry.slope == 0): vertex = Point(focus.args[0] - self.p_parameter, focus.args[1]) else: vertex = Point(focus.args[0], focus.args[1] - self.p_parameter) return vertex # -*- coding: utf-8 -*- from __future__ import unicode_literals import gzip import random import re from io import BytesIO from unittest import skipIf from django.conf import settings from django.core import mail from django.core.exceptions import PermissionDenied from django.http import ( FileResponse, HttpRequest, HttpResponse, HttpResponseNotFound, HttpResponsePermanentRedirect, HttpResponseRedirect, StreamingHttpResponse, ) from django.middleware.clickjacking import XFrameOptionsMiddleware from django.middleware.common import ( BrokenLinkEmailsMiddleware, CommonMiddleware, ) from django.middleware.gzip import GZipMiddleware from django.middleware.http import ConditionalGetMiddleware from django.test import RequestFactory, SimpleTestCase, override_settings from django.utils import six from django.utils.encoding import force_str from django.utils.six.moves import range from django.utils.six.moves.urllib.parse import quote @override_settings(ROOT_URLCONF='middleware.urls') class CommonMiddlewareTest(SimpleTestCase): rf = RequestFactory() @override_settings(APPEND_SLASH=True) def test_append_slash_have_slash(self): """ URLs with slashes should go unmolested. """ request = self.rf.get('/slash/') self.assertEqual(CommonMiddleware().process_request(request), None) response = HttpResponseNotFound() self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_slashless_resource(self): """ Matches to explicit slashless URLs should go unmolested. """ request = self.rf.get('/noslash') self.assertEqual(CommonMiddleware().process_request(request), None) response = HttpResponse("Here's the text of the Web page.") self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_slashless_unknown(self): """ APPEND_SLASH should not redirect to unknown resources. """ request = self.rf.get('/unknown') response = HttpResponseNotFound() self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_redirect(self): """ APPEND_SLASH should redirect slashless URLs to a valid pattern. """ request = self.rf.get('/slash') response = HttpResponseNotFound() r = CommonMiddleware().process_response(request, response) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, '/slash/') @override_settings(APPEND_SLASH=True) def test_append_slash_redirect_querystring(self): """ APPEND_SLASH should preserve querystrings when redirecting. """ request = self.rf.get('/slash?test=1') response = HttpResponseNotFound() r = CommonMiddleware().process_response(request, response) self.assertEqual(r.url, '/slash/?test=1') @override_settings(APPEND_SLASH=True, DEBUG=True) def test_append_slash_no_redirect_on_POST_in_DEBUG(self): """ Tests that while in debug mode, an exception is raised with a warning when a failed attempt is made to POST, PUT, or PATCH to an URL which would normally be redirected to a slashed version. """ msg = "maintaining %s data. Change your form to point to testserver/slash/" request = self.rf.get('/slash') request.method = 'POST' response = HttpResponseNotFound() with six.assertRaisesRegex(self, RuntimeError, msg % request.method): CommonMiddleware().process_response(request, response) request = self.rf.get('/slash') request.method = 'PUT' with six.assertRaisesRegex(self, RuntimeError, msg % request.method): CommonMiddleware().process_response(request, response) request = self.rf.get('/slash') request.method = 'PATCH' with six.assertRaisesRegex(self, RuntimeError, msg % request.method): CommonMiddleware().process_response(request, response) @override_settings(APPEND_SLASH=False) def test_append_slash_disabled(self): """ Disabling append slash functionality should leave slashless URLs alone. """ request = self.rf.get('/slash') response = HttpResponseNotFound() self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_quoted(self): """ URLs which require quoting should be redirected to their slash version ok. """ request = self.rf.get(quote('/needsquoting#')) response = HttpResponseNotFound() r = CommonMiddleware().process_response(request, response) self.assertEqual(r.status_code, 301) self.assertEqual( r.url, '/needsquoting%23/') @override_settings(APPEND_SLASH=False, PREPEND_WWW=True) def test_prepend_www(self): request = self.rf.get('/path/') r = CommonMiddleware().process_request(request) self.assertEqual(r.status_code, 301) self.assertEqual( r.url, 'http://www.testserver/path/') @override_settings(APPEND_SLASH=True, PREPEND_WWW=True) def test_prepend_www_append_slash_have_slash(self): request = self.rf.get('/slash/') r = CommonMiddleware().process_request(request) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, 'http://www.testserver/slash/') @override_settings(APPEND_SLASH=True, PREPEND_WWW=True) def test_prepend_www_append_slash_slashless(self): request = self.rf.get('/slash') r = CommonMiddleware().process_request(request) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, 'http://www.testserver/slash/') # The following tests examine expected behavior given a custom urlconf that # overrides the default one through the request object. @override_settings(APPEND_SLASH=True) def test_append_slash_have_slash_custom_urlconf(self): """ URLs with slashes should go unmolested. """ request = self.rf.get('/customurlconf/slash/') request.urlconf = 'middleware.extra_urls' self.assertEqual(CommonMiddleware().process_request(request), None) response = HttpResponseNotFound() self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_slashless_resource_custom_urlconf(self): """ Matches to explicit slashless URLs should go unmolested. """ request = self.rf.get('/customurlconf/noslash') request.urlconf = 'middleware.extra_urls' self.assertEqual(CommonMiddleware().process_request(request), None) response = HttpResponse("Here's the text of the Web page.") self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_slashless_unknown_custom_urlconf(self): """ APPEND_SLASH should not redirect to unknown resources. """ request = self.rf.get('/customurlconf/unknown') request.urlconf = 'middleware.extra_urls' self.assertEqual(CommonMiddleware().process_request(request), None) response = HttpResponseNotFound() self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_redirect_custom_urlconf(self): """ APPEND_SLASH should redirect slashless URLs to a valid pattern. """ request = self.rf.get('/customurlconf/slash') request.urlconf = 'middleware.extra_urls' response = HttpResponseNotFound() r = CommonMiddleware().process_response(request, response) self.assertIsNotNone(r, "CommonMiddlware failed to return APPEND_SLASH redirect using request.urlconf") self.assertEqual(r.status_code, 301) self.assertEqual(r.url, '/customurlconf/slash/') @override_settings(APPEND_SLASH=True, DEBUG=True) def test_append_slash_no_redirect_on_POST_in_DEBUG_custom_urlconf(self): """ Tests that while in debug mode, an exception is raised with a warning when a failed attempt is made to POST to an URL which would normally be redirected to a slashed version. """ request = self.rf.get('/customurlconf/slash') request.urlconf = 'middleware.extra_urls' request.method = 'POST' response = HttpResponseNotFound() with six.assertRaisesRegex(self, RuntimeError, 'end in a slash'): CommonMiddleware().process_response(request, response) @override_settings(APPEND_SLASH=False) def test_append_slash_disabled_custom_urlconf(self): """ Disabling append slash functionality should leave slashless URLs alone. """ request = self.rf.get('/customurlconf/slash') request.urlconf = 'middleware.extra_urls' self.assertEqual(CommonMiddleware().process_request(request), None) response = HttpResponseNotFound() self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_quoted_custom_urlconf(self): """ URLs which require quoting should be redirected to their slash version ok. """ request = self.rf.get(quote('/customurlconf/needsquoting#')) request.urlconf = 'middleware.extra_urls' response = HttpResponseNotFound() r = CommonMiddleware().process_response(request, response) self.assertIsNotNone(r, "CommonMiddlware failed to return APPEND_SLASH redirect using request.urlconf") self.assertEqual(r.status_code, 301) self.assertEqual( r.url, '/customurlconf/needsquoting%23/') @override_settings(APPEND_SLASH=False, PREPEND_WWW=True) def test_prepend_www_custom_urlconf(self): request = self.rf.get('/customurlconf/path/') request.urlconf = 'middleware.extra_urls' r = CommonMiddleware().process_request(request) self.assertEqual(r.status_code, 301) self.assertEqual( r.url, 'http://www.testserver/customurlconf/path/') @override_settings(APPEND_SLASH=True, PREPEND_WWW=True) def test_prepend_www_append_slash_have_slash_custom_urlconf(self): request = self.rf.get('/customurlconf/slash/') request.urlconf = 'middleware.extra_urls' r = CommonMiddleware().process_request(request) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, 'http://www.testserver/customurlconf/slash/') @override_settings(APPEND_SLASH=True, PREPEND_WWW=True) def test_prepend_www_append_slash_slashless_custom_urlconf(self): request = self.rf.get('/customurlconf/slash') request.urlconf = 'middleware.extra_urls' r = CommonMiddleware().process_request(request) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, 'http://www.testserver/customurlconf/slash/') # Other tests @override_settings(DISALLOWED_USER_AGENTS=[re.compile(r'foo')]) def test_disallowed_user_agents(self): request = self.rf.get('/slash') request.META['HTTP_USER_AGENT'] = 'foo' with self.assertRaisesMessage(PermissionDenied, 'Forbidden user agent'): CommonMiddleware().process_request(request) def test_non_ascii_query_string_does_not_crash(self): """Regression test for #15152""" request = self.rf.get('/slash') request.META['QUERY_STRING'] = force_str('drink=café') r = CommonMiddleware().process_request(request) self.assertIsNone(r) response = HttpResponseNotFound() r = CommonMiddleware().process_response(request, response) self.assertEqual(r.status_code, 301) def test_response_redirect_class(self): request = self.rf.get('/slash') response = HttpResponseNotFound() r = CommonMiddleware().process_response(request, response) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, '/slash/') self.assertIsInstance(r, HttpResponsePermanentRedirect) def test_response_redirect_class_subclass(self): class MyCommonMiddleware(CommonMiddleware): response_redirect_class = HttpResponseRedirect request = self.rf.get('/slash') response = HttpResponseNotFound() r = MyCommonMiddleware().process_response(request, response) self.assertEqual(r.status_code, 302) self.assertEqual(r.url, '/slash/') self.assertIsInstance(r, HttpResponseRedirect) @override_settings( IGNORABLE_404_URLS=[re.compile(r'foo')], MANAGERS=['PHB@dilbert.com'], ) class BrokenLinkEmailsMiddlewareTest(SimpleTestCase): rf = RequestFactory() def setUp(self): self.req = self.rf.get('/regular_url/that/does/not/exist') self.resp = self.client.get(self.req.path) def test_404_error_reporting(self): self.req.META['HTTP_REFERER'] = '/another/url/' BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 1) self.assertIn('Broken', mail.outbox[0].subject) def test_404_error_reporting_no_referer(self): BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 0) def test_404_error_reporting_ignored_url(self): self.req.path = self.req.path_info = 'foo_url/that/does/not/exist' BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 0) @skipIf(six.PY3, "HTTP_REFERER is str type on Python 3") def test_404_error_nonascii_referrer(self): # Such referer strings should not happen, but anyway, if it happens, # let's not crash self.req.META['HTTP_REFERER'] = b'http://testserver/c/\xd0\xbb\xd0\xb8/' BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 1) @skipIf(six.PY3, "HTTP_USER_AGENT is str type on Python 3") def test_404_error_nonascii_user_agent(self): # Such user agent strings should not happen, but anyway, if it happens, # let's not crash self.req.META['HTTP_REFERER'] = '/another/url/' self.req.META['HTTP_USER_AGENT'] = b'\xd0\xbb\xd0\xb8\xff\xff' BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 1) self.assertIn('User agent: \u043b\u0438\ufffd\ufffd\n', mail.outbox[0].body) def test_custom_request_checker(self): class SubclassedMiddleware(BrokenLinkEmailsMiddleware): ignored_user_agent_patterns = (re.compile(r'Spider.*'), re.compile(r'Robot.*')) def is_ignorable_request(self, request, uri, domain, referer): '''Check user-agent in addition to normal checks.''' if super(SubclassedMiddleware, self).is_ignorable_request(request, uri, domain, referer): return True user_agent = request.META['HTTP_USER_AGENT'] return any(pattern.search(user_agent) for pattern in self.ignored_user_agent_patterns) self.req.META['HTTP_REFERER'] = '/another/url/' self.req.META['HTTP_USER_AGENT'] = 'Spider machine 3.4' SubclassedMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 0) self.req.META['HTTP_USER_AGENT'] = 'My user agent' SubclassedMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 1) def test_referer_equal_to_requested_url(self): """ Some bots set the referer to the current URL to avoid being blocked by an referer check (#25302). """ self.req.META['HTTP_REFERER'] = self.req.path BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 0) # URL with scheme and domain should also be ignored self.req.META['HTTP_REFERER'] = 'http://testserver%s' % self.req.path BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 0) def test_referer_equal_to_requested_url_on_another_domain(self): self.req.META['HTTP_REFERER'] = 'http://anotherserver%s' % self.req.path BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 1) @override_settings(ROOT_URLCONF='middleware.cond_get_urls') class ConditionalGetMiddlewareTest(SimpleTestCase): def setUp(self): self.req = RequestFactory().get('/') self.resp = self.client.get(self.req.path_info) # Tests for the Date header def test_date_header_added(self): self.assertNotIn('Date', self.resp) self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertIn('Date', self.resp) # Tests for the Content-Length header def test_content_length_header_added(self): content_length = len(self.resp.content) self.assertNotIn('Content-Length', self.resp) self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertIn('Content-Length', self.resp) self.assertEqual(int(self.resp['Content-Length']), content_length) def test_content_length_header_not_added(self): resp = StreamingHttpResponse('content') self.assertNotIn('Content-Length', resp) resp = ConditionalGetMiddleware().process_response(self.req, resp) self.assertNotIn('Content-Length', resp) def test_content_length_header_not_changed(self): bad_content_length = len(self.resp.content) + 10 self.resp['Content-Length'] = bad_content_length self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(int(self.resp['Content-Length']), bad_content_length) # Tests for the ETag header def test_if_none_match_and_no_etag(self): self.req.META['HTTP_IF_NONE_MATCH'] = 'spam' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 200) def test_no_if_none_match_and_etag(self): self.resp['ETag'] = 'eggs' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 200) def test_if_none_match_and_same_etag(self): self.req.META['HTTP_IF_NONE_MATCH'] = self.resp['ETag'] = 'spam' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 304) def test_if_none_match_and_different_etag(self): self.req.META['HTTP_IF_NONE_MATCH'] = 'spam' self.resp['ETag'] = 'eggs' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 200) def test_if_none_match_and_redirect(self): self.req.META['HTTP_IF_NONE_MATCH'] = self.resp['ETag'] = 'spam' self.resp['Location'] = '/' self.resp.status_code = 301 self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 301) def test_if_none_match_and_client_error(self): self.req.META['HTTP_IF_NONE_MATCH'] = self.resp['ETag'] = 'spam' self.resp.status_code = 400 self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 400) @override_settings(USE_ETAGS=True) def test_etag(self): req = HttpRequest() res = HttpResponse('content') self.assertTrue( CommonMiddleware().process_response(req, res).has_header('ETag')) @override_settings(USE_ETAGS=True) def test_etag_streaming_response(self): req = HttpRequest() res = StreamingHttpResponse(['content']) res['ETag'] = 'tomatoes' self.assertEqual( CommonMiddleware().process_response(req, res).get('ETag'), 'tomatoes') @override_settings(USE_ETAGS=True) def test_no_etag_streaming_response(self): req = HttpRequest() res = StreamingHttpResponse(['content']) self.assertFalse( CommonMiddleware().process_response(req, res).has_header('ETag')) # Tests for the Last-Modified header def test_if_modified_since_and_no_last_modified(self): self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 200) def test_no_if_modified_since_and_last_modified(self): self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 200) def test_if_modified_since_and_same_last_modified(self): self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 304) def test_if_modified_since_and_last_modified_in_the_past(self): self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:35:44 GMT' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 304) def test_if_modified_since_and_last_modified_in_the_future(self): self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:41:44 GMT' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 200) def test_if_modified_since_and_redirect(self): self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:35:44 GMT' self.resp['Location'] = '/' self.resp.status_code = 301 self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 301) def test_if_modified_since_and_client_error(self): self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:35:44 GMT' self.resp.status_code = 400 self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 400) class XFrameOptionsMiddlewareTest(SimpleTestCase): """ Tests for the X-Frame-Options clickjacking prevention middleware. """ def test_same_origin(self): """ Tests that the X_FRAME_OPTIONS setting can be set to SAMEORIGIN to have the middleware use that value for the HTTP header. """ with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'): r = XFrameOptionsMiddleware().process_response(HttpRequest(), HttpResponse()) self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN') with override_settings(X_FRAME_OPTIONS='sameorigin'): r = XFrameOptionsMiddleware().process_response(HttpRequest(), HttpResponse()) self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN') def test_deny(self): """ Tests that the X_FRAME_OPTIONS setting can be set to DENY to have the middleware use that value for the HTTP header. """ with override_settings(X_FRAME_OPTIONS='DENY'): r = XFrameOptionsMiddleware().process_response(HttpRequest(), HttpResponse()) self.assertEqual(r['X-Frame-Options'], 'DENY') with override_settings(X_FRAME_OPTIONS='deny'): r = XFrameOptionsMiddleware().process_response(HttpRequest(), HttpResponse()) self.assertEqual(r['X-Frame-Options'], 'DENY') def test_defaults_sameorigin(self): """ Tests that if the X_FRAME_OPTIONS setting is not set then it defaults to SAMEORIGIN. """ with override_settings(X_FRAME_OPTIONS=None): del settings.X_FRAME_OPTIONS # restored by override_settings r = XFrameOptionsMiddleware().process_response(HttpRequest(), HttpResponse()) self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN') def test_dont_set_if_set(self): """ Tests that if the X-Frame-Options header is already set then the middleware does not attempt to override it. """ with override_settings(X_FRAME_OPTIONS='DENY'): response = HttpResponse() response['X-Frame-Options'] = 'SAMEORIGIN' r = XFrameOptionsMiddleware().process_response(HttpRequest(), response) self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN') with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'): response = HttpResponse() response['X-Frame-Options'] = 'DENY' r = XFrameOptionsMiddleware().process_response(HttpRequest(), response) self.assertEqual(r['X-Frame-Options'], 'DENY') def test_response_exempt(self): """ Tests that if the response has a xframe_options_exempt attribute set to False then it still sets the header, but if it's set to True then it does not. """ with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'): response = HttpResponse() response.xframe_options_exempt = False r = XFrameOptionsMiddleware().process_response(HttpRequest(), response) self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN') response = HttpResponse() response.xframe_options_exempt = True r = XFrameOptionsMiddleware().process_response(HttpRequest(), response) self.assertEqual(r.get('X-Frame-Options', None), None) def test_is_extendable(self): """ Tests that the XFrameOptionsMiddleware method that determines the X-Frame-Options header value can be overridden based on something in the request or response. """ class OtherXFrameOptionsMiddleware(XFrameOptionsMiddleware): # This is just an example for testing purposes... def get_xframe_options_value(self, request, response): if getattr(request, 'sameorigin', False): return 'SAMEORIGIN' if getattr(response, 'sameorigin', False): return 'SAMEORIGIN' return 'DENY' with override_settings(X_FRAME_OPTIONS='DENY'): response = HttpResponse() response.sameorigin = True r = OtherXFrameOptionsMiddleware().process_response(HttpRequest(), response) self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN') request = HttpRequest() request.sameorigin = True r = OtherXFrameOptionsMiddleware().process_response(request, HttpResponse()) self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN') with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'): r = OtherXFrameOptionsMiddleware().process_response(HttpRequest(), HttpResponse()) self.assertEqual(r['X-Frame-Options'], 'DENY') class GZipMiddlewareTest(SimpleTestCase): """ Tests the GZip middleware. """ short_string = b"This string is too short to be worth compressing." compressible_string = b'a' * 500 uncompressible_string = b''.join(six.int2byte(random.randint(0, 255)) for _ in range(500)) sequence = [b'a' * 500, b'b' * 200, b'a' * 300] sequence_unicode = ['a' * 500, 'é' * 200, 'a' * 300] def setUp(self): self.req = RequestFactory().get('/') self.req.META['HTTP_ACCEPT_ENCODING'] = 'gzip, deflate' self.req.META['HTTP_USER_AGENT'] = 'Mozilla/5.0 (Windows NT 5.1; rv:9.0.1) Gecko/20100101 Firefox/9.0.1' self.resp = HttpResponse() self.resp.status_code = 200 self.resp.content = self.compressible_string self.resp['Content-Type'] = 'text/html; charset=UTF-8' self.stream_resp = StreamingHttpResponse(self.sequence) self.stream_resp['Content-Type'] = 'text/html; charset=UTF-8' self.stream_resp_unicode = StreamingHttpResponse(self.sequence_unicode) self.stream_resp_unicode['Content-Type'] = 'text/html; charset=UTF-8' @staticmethod def decompress(gzipped_string): with gzip.GzipFile(mode='rb', fileobj=BytesIO(gzipped_string)) as f: return f.read() def test_compress_response(self): """ Tests that compression is performed on responses with compressible content. """ r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(self.decompress(r.content), self.compressible_string) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertEqual(r.get('Content-Length'), str(len(r.content))) def test_compress_streaming_response(self): """ Tests that compression is performed on responses with streaming content. """ r = GZipMiddleware().process_response(self.req, self.stream_resp) self.assertEqual(self.decompress(b''.join(r)), b''.join(self.sequence)) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertFalse(r.has_header('Content-Length')) def test_compress_streaming_response_unicode(self): """ Tests that compression is performed on responses with streaming Unicode content. """ r = GZipMiddleware().process_response(self.req, self.stream_resp_unicode) self.assertEqual(self.decompress(b''.join(r)), b''.join(x.encode('utf-8') for x in self.sequence_unicode)) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertFalse(r.has_header('Content-Length')) def test_compress_file_response(self): """ Tests that compression is performed on FileResponse. """ open_file = lambda: open(__file__, 'rb') with open_file() as file1: file_resp = FileResponse(file1) file_resp['Content-Type'] = 'text/html; charset=UTF-8' r = GZipMiddleware().process_response(self.req, file_resp) with open_file() as file2: self.assertEqual(self.decompress(b''.join(r)), file2.read()) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertIsNot(r.file_to_stream, file1) def test_compress_non_200_response(self): """ Tests that compression is performed on responses with a status other than 200. See #10762. """ self.resp.status_code = 404 r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(self.decompress(r.content), self.compressible_string) self.assertEqual(r.get('Content-Encoding'), 'gzip') def test_no_compress_short_response(self): """ Tests that compression isn't performed on responses with short content. """ self.resp.content = self.short_string r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(r.content, self.short_string) self.assertEqual(r.get('Content-Encoding'), None) def test_no_compress_compressed_response(self): """ Tests that compression isn't performed on responses that are already compressed. """ self.resp['Content-Encoding'] = 'deflate' r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(r.content, self.compressible_string) self.assertEqual(r.get('Content-Encoding'), 'deflate') def test_no_compress_uncompressible_response(self): """ Tests that compression isn't performed on responses with uncompressible content. """ self.resp.content = self.uncompressible_string r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(r.content, self.uncompressible_string) self.assertEqual(r.get('Content-Encoding'), None) @override_settings(USE_ETAGS=True) class ETagGZipMiddlewareTest(SimpleTestCase): """ Tests if the ETag middleware behaves correctly with GZip middleware. """ rf = RequestFactory() compressible_string = b'a' * 500 def test_compress_response(self): """ Tests that ETag is changed after gzip compression is performed. """ request = self.rf.get('/', HTTP_ACCEPT_ENCODING='gzip, deflate') response = GZipMiddleware().process_response(request, CommonMiddleware().process_response(request, HttpResponse(self.compressible_string))) gzip_etag = response.get('ETag') request = self.rf.get('/', HTTP_ACCEPT_ENCODING='') response = GZipMiddleware().process_response(request, CommonMiddleware().process_response(request, HttpResponse(self.compressible_string))) nogzip_etag = response.get('ETag') self.assertNotEqual(gzip_etag, nogzip_etag) # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import time from charmhelpers.core import hookenv from charms.layer.apache_bigtop_base import get_package_version from charms.layer.bigtop_zookeeper import Zookeeper from charms.leadership import leader_set, leader_get from charms.reactive import set_state, when, when_not, is_state from charms.reactive.helpers import data_changed @when('bigtop.available') @when_not('zookeeper.installed') def install_zookeeper(): ''' After Bigtop has done the initial setup, trigger a puppet install, via our Zooekeeper library. puppet will start the service, as a side effect. ''' hookenv.status_set('maintenance', 'installing zookeeper') zookeeper = Zookeeper() # Prime data changed data_changed('zkpeer.nodes', zookeeper.read_peers()) data_changed( 'zk.network_interface', hookenv.config().get('network_interface')) zookeeper.install() zookeeper.open_ports() set_state('zookeeper.installed') set_state('zookeeper.started') hookenv.status_set('active', 'ready {}'.format(zookeeper.quorum_check())) # set app version string for juju status output zoo_version = get_package_version('zookeeper') or 'unknown' hookenv.application_version_set(zoo_version) def _restart_zookeeper(msg): ''' Restart Zookeeper by re-running the puppet scripts. ''' hookenv.status_set('maintenance', msg) zookeeper = Zookeeper() zookeeper.install() hookenv.status_set('active', 'ready {}'.format(zookeeper.quorum_check())) @when('zookeeper.started') def update_network_interface(): ''' Possibly restart zookeeper, due to the network interface that it should listen on changing. ''' network_interface = hookenv.config().get('network_interface') if data_changed('zk.network_interface', network_interface): _restart_zookeeper('updating network interface') @when('zookeeper.started', 'zookeeper.joined') def serve_client(client): config = Zookeeper().dist_config port = config.port('zookeeper') rest_port = config.port('zookeeper-rest') # TODO: add zookeeper REST client.send_port(port, rest_port) # # Rolling restart -- helpers and handlers # # When we add or remove a Zookeeper peer, Zookeeper needs to perform a # rolling restart of all of its peers, restarting the Zookeeper # "leader" last. # # The following functions accomplish this. Here's how they all fit together: # # (As you read, keep in mind that one node functions as the "leader" # in the context of Juju, and one node functions as the "leader" in # the context of Zookeeper; these nodes may or may not be the same.) # # 0. Whenever the Zookeeper server starts, it attempts to determine # whether it is the Zookeeper leader. If so, it sets a flag on the # Juju peer relation data. # # 1. When a node is added or remove from the cluster, the Juju leader # runs `check_cluster`, and generates a "restart queue" comprising # nodes in the cluster, with the Zookeeper lead node sorted last in # the queue. It also sets a nonce, to identify this restart queue # uniquely, and thus handle the situation where another node is # added or restarted while we're still reacting to the first node's # addition or removal. The leader drops the queue and nonce into # the leadership data as "restart_queue" and "restart_nonce", # respectively. # # 2. When any node detects a leadership.changed.restart_queue event, # it runs `restart_for_quorum`, which is a noop unless the node's # private address is the first element of the restart queue. In # that case, if the node is the Juju leader, it will restart, then # remove itself from the restart queue, triggering another # leadership.changed.restart_queue event. If the node isn't the # Juju leader, it will restart itself, then run `inform_restart`. # # 3. `inform_restart` will create a relation data changed event, which # triggers `update_restart_queue` to run on the leader. This method # will update the restart_queue, clearing any nodes that have # restarted for the current nonce, and looping us back to step 2. # # 4. Once all the nodes have restarted, we should be in the following state: # # * All nodes have an updated Zookeeper server running with the new # * peer data. # # * The Zookeeper leader has restarted last, which should help # prevent orphaned jobs, per the Zookeeper docs. # # * peers still have zkpeer.restarted. set on their relation # data. This is okay, as we will generate a new nonce next time, # and the data is small. # # Edge cases and potential bugs: # # 1. Juju leader changes in the middle of a restart: this gets a # little bit dicey, but it should work. The new leader should run # `check_cluster_departed`, and start a new restart_queue. # def _ip_list(nodes): ''' Given a list of nodes, in the format that our peer relation or zookeeper lib will typically return node lists in, make a list of just the ips (stripping ports, if they have been added). We expect the list we passed in to look something like this: [('zookeeper/0', '10.0.0.4'), ('zookeeper/1', '10.0.0.5')] or this: [('0', '10.0.0.4:2888:4888'), ('1', '10.0.0.5:2888:4888')] We will return a list in the form: ['10.0.0.4', '10.0.0.5'] ''' return [node[1].split(':')[0] for node in nodes] @when('zookeeper.started', 'leadership.is_leader', 'zkpeer.joined') @when_not('zkpeer.departed') def check_cluster(zkpeer): ''' Checkup on the state of the cluster. Start a rolling restart if the peers have changed. ''' zk = Zookeeper() if data_changed('zkpeer.nodes', zk.read_peers()): peers = _ip_list(zk.sort_peers(zkpeer)) nonce = time.time() hookenv.log('Quorum changed. Restart queue: {}'.format(peers)) leader_set( restart_queue=json.dumps(peers), restart_nonce=json.dumps(nonce) ) @when('zookeeper.started', 'leadership.is_leader', 'zkpeer.joined', 'zkpeer.departed') def check_cluster_departed(zkpeer, zkpeer_departed): ''' Wrapper around check_cluster. Together with check_cluster, implements the following logic: "Run this when zkpeer.joined and zkpeer departed, or zkpeer.joined and not zkpeer.departed" ''' check_cluster(zkpeer) @when('leadership.changed.restart_queue', 'zkpeer.joined') def restart_for_quorum(zkpeer): ''' If we're the next node in the restart queue, restart, and then inform the leader that we've restarted. (If we are the leader, remove ourselves from the queue, and update the leadership data.) ''' private_address = hookenv.unit_get('private-address') queue = json.loads(leader_get('restart_queue') or '[]') if not queue: # Everything has restarted. return if private_address == queue[0]: # It's our turn to restart. _restart_zookeeper('rolling restart for quorum update') if is_state('leadership.is_leader'): queue = queue[1:] hookenv.log('Leader updating restart queue: {}'.format(queue)) leader_set(restart_queue=json.dumps(queue)) else: zkpeer.inform_restart() @when('leadership.is_leader', 'zkpeer.joined') def update_restart_queue(zkpeer): ''' If a Zookeeper node has restarted as part of a rolling restart, pop it off of the queue. ''' queue = json.loads(leader_get('restart_queue') or '[]') if not queue: return restarted_nodes = _ip_list(zkpeer.restarted_nodes()) new_queue = [node for node in queue if node not in restarted_nodes] if new_queue != queue: hookenv.log('Leader updating restart queue: {}'.format(queue)) leader_set(restart_queue=json.dumps(new_queue)) #!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import cStringIO import logging import os import sys import unittest ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) sys.path.insert(0, ROOT_DIR) from proc_maps import ProcMaps class ProcMapsTest(unittest.TestCase): _TEST_PROCMAPS = '\n'.join([ '00000000-00001000 r--p 00000000 fc:00 0', '0080b000-0080c000 r-xp 0020b000 fc:00 2231329' ' /usr/bin/some', '0080c000-0080f000 ---p 0020c000 fc:00 2231329' ' /usr/bin/some', '0100a000-0100c000 r-xp 0120a000 fc:00 22381' ' /usr/bin/chrome', '0100c000-0100f000 ---p 0120c000 fc:00 22381' ' /usr/bin/chrome', '0237d000-02a9b000 rw-p 00000000 00:00 0' ' [heap]', '7fb920e6d000-7fb920e85000 r-xp 00000000 fc:00 263482' ' /lib/x86_64-linux-gnu/libpthread-2.15.so', '7fb920e85000-7fb921084000 ---p 00018000 fc:00 263482' ' /lib/x86_64-linux-gnu/libpthread-2.15.so', '7fb9225f4000-7fb922654000 rw-s 00000000 00:04 19660808' ' /SYSV00000000 (deleted)', 'ffffffffff600000-ffffffffff601000 r-xp 00000000 00:00 0' ' [vsyscall]', ]) _EXPECTED = [ (0x0, 0x1000, 'r', '-', '-', 'p', 0x0, 'fc', '00', 0, ''), (0x80b000, 0x80c000, 'r', '-', 'x', 'p', 0x20b000, 'fc', '00', 2231329, '/usr/bin/some'), (0x80c000, 0x80f000, '-', '-', '-', 'p', 0x20c000, 'fc', '00', 2231329, '/usr/bin/some'), (0x100a000, 0x100c000, 'r', '-', 'x', 'p', 0x120a000, 'fc', '00', 22381, '/usr/bin/chrome'), (0x100c000, 0x100f000, '-', '-', '-', 'p', 0x120c000, 'fc', '00', 22381, '/usr/bin/chrome'), (0x237d000, 0x2a9b000, 'r', 'w', '-', 'p', 0x0, '00', '00', 0, '[heap]'), (0x7fb920e6d000, 0x7fb920e85000, 'r', '-', 'x', 'p', 0x0, 'fc', '00', 263482, '/lib/x86_64-linux-gnu/libpthread-2.15.so'), (0x7fb920e85000, 0x7fb921084000, '-', '-', '-', 'p', 0x18000, 'fc', '00', 263482, '/lib/x86_64-linux-gnu/libpthread-2.15.so'), (0x7fb9225f4000, 0x7fb922654000, 'r', 'w', '-', 's', 0x0, '00', '04', 19660808, '/SYSV00000000 (deleted)'), (0xffffffffff600000, 0xffffffffff601000, 'r', '-', 'x', 'p', 0x0, '00', '00', 0, '[vsyscall]'), ] @staticmethod def _expected_as_dict(index): return { 'begin': ProcMapsTest._EXPECTED[index][0], 'end': ProcMapsTest._EXPECTED[index][1], 'readable': ProcMapsTest._EXPECTED[index][2], 'writable': ProcMapsTest._EXPECTED[index][3], 'executable': ProcMapsTest._EXPECTED[index][4], 'private': ProcMapsTest._EXPECTED[index][5], 'offset': ProcMapsTest._EXPECTED[index][6], 'major': ProcMapsTest._EXPECTED[index][7], 'minor': ProcMapsTest._EXPECTED[index][8], 'inode': ProcMapsTest._EXPECTED[index][9], 'name': ProcMapsTest._EXPECTED[index][10], } def test_load(self): maps = ProcMaps.load(cStringIO.StringIO(self._TEST_PROCMAPS)) for index, entry in enumerate(maps): self.assertEqual(entry.as_dict(), self._expected_as_dict(index)) def test_constants(self): maps = ProcMaps.load(cStringIO.StringIO(self._TEST_PROCMAPS)) selected = [4, 7] for index, entry in enumerate(maps.iter(ProcMaps.constants)): self.assertEqual(entry.as_dict(), self._expected_as_dict(selected[index])) def test_executable(self): maps = ProcMaps.load(cStringIO.StringIO(self._TEST_PROCMAPS)) selected = [3, 6] for index, entry in enumerate(maps.iter(ProcMaps.executable)): self.assertEqual(entry.as_dict(), self._expected_as_dict(selected[index])) def test_executable_and_constants(self): maps = ProcMaps.load(cStringIO.StringIO(self._TEST_PROCMAPS)) selected = [3, 4, 6, 7] for index, entry in enumerate(maps.iter(ProcMaps.executable_and_constants)): self.assertEqual(entry.as_dict(), self._expected_as_dict(selected[index])) if __name__ == '__main__': logging.basicConfig( level=logging.DEBUG if '-v' in sys.argv else logging.ERROR, format='%(levelname)5s %(filename)15s(%(lineno)3d): %(message)s') unittest.main() # -*- coding: utf-8 -*- # # Copyright (C) 2015 Matt Martz # Copyright (C) 2015 Rackspace US, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . import ast import sys from io import BytesIO, TextIOWrapper import yaml import yaml.reader def find_globals(g, tree): """Uses AST to find globals in an ast tree""" for child in tree: if hasattr(child, 'body') and isinstance(child.body, list): find_globals(g, child.body) elif isinstance(child, (ast.FunctionDef, ast.ClassDef)): g.add(child.name) continue elif isinstance(child, ast.Assign): try: g.add(child.targets[0].id) except (IndexError, AttributeError): pass elif isinstance(child, ast.Import): g.add(child.names[0].name) elif isinstance(child, ast.ImportFrom): for name in child.names: g_name = name.asname or name.name if g_name == '*': continue g.add(g_name) class CaptureStd(): """Context manager to handle capturing stderr and stdout""" def __enter__(self): self.sys_stdout = sys.stdout self.sys_stderr = sys.stderr sys.stdout = self.stdout = TextIOWrapper(BytesIO(), encoding=self.sys_stdout.encoding) sys.stderr = self.stderr = TextIOWrapper(BytesIO(), encoding=self.sys_stderr.encoding) return self def __exit__(self, exc_type, exc_value, traceback): sys.stdout = self.sys_stdout sys.stderr = self.sys_stderr def get(self): """Return ``(stdout, stderr)``""" return self.stdout.getvalue(), self.stderr.getvalue() def parse_yaml(value, lineno, module, name, load_all=False): traces = [] errors = [] data = None if load_all: loader = yaml.safe_load_all else: loader = yaml.safe_load try: data = loader(value) if load_all: data = list(data) except yaml.MarkedYAMLError as e: e.problem_mark.line += lineno - 1 e.problem_mark.name = '%s.%s' % (module, name) errors.append({ 'msg': '%s is not valid YAML' % name, 'line': e.problem_mark.line + 1, 'column': e.problem_mark.column + 1 }) traces.append(e) except yaml.reader.ReaderError as e: traces.append(e) # TODO: Better line/column detection errors.append({ 'msg': ('%s is not valid YAML. Character ' '0x%x at position %d.' % (name, e.character, e.position)), 'line': lineno }) except yaml.YAMLError as e: traces.append(e) errors.append({ 'msg': '%s is not valid YAML: %s: %s' % (name, type(e), e), 'line': lineno }) return data, errors, traces #!/usr/bin/env python2.7 # pylint: disable=C0301 from __future__ import absolute_import, unicode_literals, print_function, division from sys import argv from os import environ, stat, chdir, remove as _delete_file from os.path import dirname, basename, abspath, realpath, expandvars from hashlib import sha256 from subprocess import check_call as run from json import load, dump as save from contextlib import contextmanager from datetime import datetime from boto.s3.connection import S3Connection from boto.s3.key import Key from boto.exception import S3ResponseError CONFIG_FILE = './S3Cachefile.json' UPLOAD_TODO_FILE = './S3CacheTodo.json' BYTES_PER_MB = 1024 * 1024 @contextmanager def timer(): start = datetime.utcnow() yield end = datetime.utcnow() elapsed = end - start print("\tDone. Took", int(elapsed.total_seconds()), "second(s).") @contextmanager def todo_file(writeback=True): try: with open(UPLOAD_TODO_FILE, 'rt') as json_file: todo = load(json_file) except (IOError, OSError, ValueError): todo = {} yield todo if writeback: try: with open(UPLOAD_TODO_FILE, 'wt') as json_file: save(todo, json_file) except (OSError, IOError) as save_err: print("Error saving {}:".format(UPLOAD_TODO_FILE), save_err) def _sha256_of_file(filename): hasher = sha256() with open(filename, 'rb') as input_file: hasher.update(input_file.read()) file_hash = hasher.hexdigest() print('sha256({}) = {}'.format(filename, file_hash)) return file_hash def _delete_file_quietly(filename): try: _delete_file(filename) except (OSError, IOError): pass def mark_needs_uploading(cache_name): with todo_file() as todo: todo[cache_name] = True def mark_uploaded(cache_name): with todo_file() as todo: todo.pop(cache_name, None) def need_to_upload(cache_name): with todo_file(writeback=False) as todo: return todo.get(cache_name, False) def _tarball_size(directory): kib = stat(_tarball_filename_for(directory)).st_size // BYTES_PER_MB return "{} MiB".format(kib) def _tarball_filename_for(directory): return abspath('./{}.tar.gz'.format(basename(directory))) def _create_tarball(directory): print("Creating tarball of {}...".format(directory)) with timer(): run(['tar', '-czf', _tarball_filename_for(directory), '-C', dirname(directory), basename(directory)]) def _extract_tarball(directory): print("Extracting tarball of {}...".format(directory)) with timer(): run(['tar', '-xzf', _tarball_filename_for(directory), '-C', dirname(directory)]) def download(directory): mark_uploaded(cache_name) # reset try: print("Downloading {} tarball from S3...".format(cache_name)) with timer(): key.get_contents_to_filename(_tarball_filename_for(directory)) except S3ResponseError as err: mark_needs_uploading(cache_name) raise SystemExit("Cached {} download failed!".format(cache_name)) print("Downloaded {}.".format(_tarball_size(directory))) _extract_tarball(directory) print("{} successfully installed from cache.".format(cache_name)) def upload(directory): _create_tarball(directory) print("Uploading {} tarball to S3... ({})".format(cache_name, _tarball_size(directory))) with timer(): key.set_contents_from_filename(_tarball_filename_for(directory)) print("{} cache successfully updated.".format(cache_name)) mark_uploaded(cache_name) if __name__ == '__main__': # Uses environment variables: # AWS_ACCESS_KEY_ID -- AWS Access Key ID # AWS_SECRET_ACCESS_KEY -- AWS Secret Access Key argv.pop(0) if len(argv) != 2: raise SystemExit("USAGE: s3_cache.py ") mode, cache_name = argv script_dir = dirname(realpath(__file__)) chdir(script_dir) try: with open(CONFIG_FILE, 'rt') as config_file: config = load(config_file) except (IOError, OSError, ValueError) as config_err: print(config_err) raise SystemExit("Error when trying to load config from JSON file!") try: cache_info = config[cache_name] key_file = expandvars(cache_info["key"]) fallback_cmd = cache_info["generate"] directory = expandvars(cache_info["cache"]) except (TypeError, KeyError) as load_err: print(load_err) raise SystemExit("Config for cache named {!r} is missing or malformed!".format(cache_name)) try: try: BUCKET_NAME = environ['TWBS_S3_BUCKET'] except KeyError: raise SystemExit("TWBS_S3_BUCKET environment variable not set!") conn = S3Connection() bucket = conn.lookup(BUCKET_NAME) if bucket is None: raise SystemExit("Could not access bucket!") key_file_hash = _sha256_of_file(key_file) key = Key(bucket, key_file_hash) key.storage_class = 'REDUCED_REDUNDANCY' if mode == 'download': download(directory) elif mode == 'upload': if need_to_upload(cache_name): upload(directory) else: print("No need to upload anything.") else: raise SystemExit("Unrecognized mode {!r}".format(mode)) except BaseException as exc: if mode != 'download': raise print("Error!:", exc) print("Unable to download from cache.") print("Running fallback command to generate cache directory {!r}: {}".format(directory, fallback_cmd)) with timer(): run(fallback_cmd, shell=True) #!/usr/bin/env python # # Copyright 2005,2007,2011 Free Software Foundation, Inc. # # This file is part of GNU Radio # # GNU Radio is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # GNU Radio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # from gnuradio import gr from gnuradio import audio from gnuradio import blocks from gnuradio import vocoder def build_graph(): tb = gr.top_block() src = audio.source(8000) src_scale = blocks.multiply_const_ff(32767) f2s = blocks.float_to_short() enc = vocoder.gsm_fr_encode_sp() dec = vocoder.gsm_fr_decode_ps() s2f = blocks.short_to_float() sink_scale = blocks.multiply_const_ff(1.0/32767.) sink = audio.sink(8000) tb.connect(src, src_scale, f2s, enc, dec, s2f, sink_scale, sink) return tb if __name__ == '__main__': tb = build_graph() tb.start() raw_input ('Press Enter to exit: ') tb.stop() tb.wait() # Copyright (c) 2010-2013 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. class ClientException(Exception): def __init__(self, msg, http_scheme='', http_host='', http_port='', http_path='', http_query='', http_status=0, http_reason='', http_device='', http_response_content=''): Exception.__init__(self, msg) self.msg = msg self.http_scheme = http_scheme self.http_host = http_host self.http_port = http_port self.http_path = http_path self.http_query = http_query self.http_status = http_status self.http_reason = http_reason self.http_device = http_device self.http_response_content = http_response_content def __str__(self): a = self.msg b = '' if self.http_scheme: b += '%s://' % self.http_scheme if self.http_host: b += self.http_host if self.http_port: b += ':%s' % self.http_port if self.http_path: b += self.http_path if self.http_query: b += '?%s' % self.http_query if self.http_status: if b: b = '%s %s' % (b, self.http_status) else: b = str(self.http_status) if self.http_reason: if b: b = '%s %s' % (b, self.http_reason) else: b = '- %s' % self.http_reason if self.http_device: if b: b = '%s: device %s' % (b, self.http_device) else: b = 'device %s' % self.http_device if self.http_response_content: if len(self.http_response_content) <= 60: b += ' %s' % self.http_response_content else: b += ' [first 60 chars of response] %s' \ % self.http_response_content[:60] return b and '%s: %s' % (a, b) or a from __future__ import unicode_literals from django.contrib.syndication.views import Feed as BaseFeed from django.utils.feedgenerator import Atom1Feed, Rss201rev2Feed class GeoFeedMixin(object): """ This mixin provides the necessary routines for SyndicationFeed subclasses to produce simple GeoRSS or W3C Geo elements. """ def georss_coords(self, coords): """ In GeoRSS coordinate pairs are ordered by lat/lon and separated by a single white space. Given a tuple of coordinates, this will return a unicode GeoRSS representation. """ return ' '.join('%f %f' % (coord[1], coord[0]) for coord in coords) def add_georss_point(self, handler, coords, w3c_geo=False): """ Adds a GeoRSS point with the given coords using the given handler. Handles the differences between simple GeoRSS and the more popular W3C Geo specification. """ if w3c_geo: lon, lat = coords[:2] handler.addQuickElement('geo:lat', '%f' % lat) handler.addQuickElement('geo:lon', '%f' % lon) else: handler.addQuickElement('georss:point', self.georss_coords((coords,))) def add_georss_element(self, handler, item, w3c_geo=False): """ This routine adds a GeoRSS XML element using the given item and handler. """ # Getting the Geometry object. geom = item.get('geometry', None) if geom is not None: if isinstance(geom, (list, tuple)): # Special case if a tuple/list was passed in. The tuple may be # a point or a box box_coords = None if isinstance(geom[0], (list, tuple)): # Box: ( (X0, Y0), (X1, Y1) ) if len(geom) == 2: box_coords = geom else: raise ValueError('Only should be two sets of coordinates.') else: if len(geom) == 2: # Point: (X, Y) self.add_georss_point(handler, geom, w3c_geo=w3c_geo) elif len(geom) == 4: # Box: (X0, Y0, X1, Y1) box_coords = (geom[:2], geom[2:]) else: raise ValueError('Only should be 2 or 4 numeric elements.') # If a GeoRSS box was given via tuple. if box_coords is not None: if w3c_geo: raise ValueError('Cannot use simple GeoRSS box in W3C Geo feeds.') handler.addQuickElement('georss:box', self.georss_coords(box_coords)) else: # Getting the lower-case geometry type. gtype = str(geom.geom_type).lower() if gtype == 'point': self.add_georss_point(handler, geom.coords, w3c_geo=w3c_geo) else: if w3c_geo: raise ValueError('W3C Geo only supports Point geometries.') # For formatting consistent w/the GeoRSS simple standard: # http://georss.org/1.0#simple if gtype in ('linestring', 'linearring'): handler.addQuickElement('georss:line', self.georss_coords(geom.coords)) elif gtype in ('polygon',): # Only support the exterior ring. handler.addQuickElement('georss:polygon', self.georss_coords(geom[0].coords)) else: raise ValueError('Geometry type "%s" not supported.' % geom.geom_type) # ### SyndicationFeed subclasses ### class GeoRSSFeed(Rss201rev2Feed, GeoFeedMixin): def rss_attributes(self): attrs = super(GeoRSSFeed, self).rss_attributes() attrs['xmlns:georss'] = 'http://www.georss.org/georss' return attrs def add_item_elements(self, handler, item): super(GeoRSSFeed, self).add_item_elements(handler, item) self.add_georss_element(handler, item) def add_root_elements(self, handler): super(GeoRSSFeed, self).add_root_elements(handler) self.add_georss_element(handler, self.feed) class GeoAtom1Feed(Atom1Feed, GeoFeedMixin): def root_attributes(self): attrs = super(GeoAtom1Feed, self).root_attributes() attrs['xmlns:georss'] = 'http://www.georss.org/georss' return attrs def add_item_elements(self, handler, item): super(GeoAtom1Feed, self).add_item_elements(handler, item) self.add_georss_element(handler, item) def add_root_elements(self, handler): super(GeoAtom1Feed, self).add_root_elements(handler) self.add_georss_element(handler, self.feed) class W3CGeoFeed(Rss201rev2Feed, GeoFeedMixin): def rss_attributes(self): attrs = super(W3CGeoFeed, self).rss_attributes() attrs['xmlns:geo'] = 'http://www.w3.org/2003/01/geo/wgs84_pos#' return attrs def add_item_elements(self, handler, item): super(W3CGeoFeed, self).add_item_elements(handler, item) self.add_georss_element(handler, item, w3c_geo=True) def add_root_elements(self, handler): super(W3CGeoFeed, self).add_root_elements(handler) self.add_georss_element(handler, self.feed, w3c_geo=True) # ### Feed subclass ### class Feed(BaseFeed): """ This is a subclass of the `Feed` from `django.contrib.syndication`. This allows users to define a `geometry(obj)` and/or `item_geometry(item)` methods on their own subclasses so that geo-referenced information may placed in the feed. """ feed_type = GeoRSSFeed def feed_extra_kwargs(self, obj): return {'geometry': self.__get_dynamic_attr('geometry', obj)} def item_extra_kwargs(self, item): return {'geometry': self.__get_dynamic_attr('item_geometry', item)} #!/usr/bin/python # # Copyright (c) 2016 Matt Davis, # Chris Houseknecht, # # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: azure_rm_resourcegroup_info version_added: "2.1" short_description: Get resource group facts description: - Get facts for a specific resource group or all resource groups. options: name: description: - Limit results to a specific resource group. tags: description: - Limit results by providing a list of tags. Format tags as 'key' or 'key:value'. list_resources: description: - List all resources under the resource group. - Note this will cost network overhead for each resource group. Suggest use this when I(name) set. version_added: "2.8" extends_documentation_fragment: - azure author: - Chris Houseknecht (@chouseknecht) - Matt Davis (@nitzmahone) ''' EXAMPLES = ''' - name: Get facts for one resource group azure_rm_resourcegroup_info: name: myResourceGroup - name: Get facts for all resource groups azure_rm_resourcegroup_info: - name: Get facts by tags azure_rm_resourcegroup_info: tags: - testing - foo:bar - name: Get facts for one resource group including resources it contains azure_rm_resourcegroup_info: name: myResourceGroup list_resources: yes ''' RETURN = ''' azure_resourcegroups: description: - List of resource group dicts. returned: always type: list contains: id: description: - Resource id. returned: always type: str sample: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroup/myResourceGroup" name: description: - Resource group name. returned: always type: str sample: foo tags: description: - Tags assigned to resource group. returned: always type: dict sample: { "tag": "value" } resources: description: - List of resources under the resource group. returned: when I(list_resources=yes). type: list contains: id: description: - Resource id. returned: always type: str sample: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.Compute/virtualMa chines/myVirtualMachine" name: description: - Resource name. returned: always type: str sample: myVirtualMachine location: description: - Resource region. returned: always type: str sample: eastus type: description: - Resource type. returned: always type: str sample: "Microsoft.Compute/virtualMachines" tags: description: - Tags to assign to the managed disk. returned: always type: dict sample: { "tag": "value" } ''' try: from msrestazure.azure_exceptions import CloudError except Exception: # This is handled in azure_rm_common pass from ansible.module_utils.azure_rm_common import AzureRMModuleBase AZURE_OBJECT_CLASS = 'ResourceGroup' class AzureRMResourceGroupInfo(AzureRMModuleBase): def __init__(self): self.module_arg_spec = dict( name=dict(type='str'), tags=dict(type='list'), list_resources=dict(type='bool') ) self.results = dict( changed=False, resourcegroups=[] ) self.name = None self.tags = None self.list_resources = None super(AzureRMResourceGroupInfo, self).__init__(self.module_arg_spec, supports_tags=False, facts_module=True) def exec_module(self, **kwargs): is_old_facts = self.module._name == 'azure_rm_resourcegroup_facts' if is_old_facts: self.module.deprecate("The 'azure_rm_resourcegroup_facts' module has been renamed to 'azure_rm_resourcegroup_info'", version='2.13') for key in self.module_arg_spec: setattr(self, key, kwargs[key]) if self.name: result = self.get_item() else: result = self.list_items() if self.list_resources: for item in result: item['resources'] = self.list_by_rg(item['name']) if is_old_facts: self.results['ansible_facts']['azure_resourcegroups'] = result self.results['resourcegroups'] = result return self.results def get_item(self): self.log('Get properties for {0}'.format(self.name)) item = None result = [] try: item = self.rm_client.resource_groups.get(self.name) except CloudError: pass if item and self.has_tags(item.tags, self.tags): result = [self.serialize_obj(item, AZURE_OBJECT_CLASS)] return result def list_items(self): self.log('List all items') try: response = self.rm_client.resource_groups.list() except CloudError as exc: self.fail("Failed to list all items - {0}".format(str(exc))) results = [] for item in response: if self.has_tags(item.tags, self.tags): results.append(self.serialize_obj(item, AZURE_OBJECT_CLASS)) return results def list_by_rg(self, name): self.log('List resources under resource group') results = [] try: response = self.rm_client.resources.list_by_resource_group(name) while True: results.append(response.next().as_dict()) except StopIteration: pass except CloudError as exc: self.fail('Error when listing resources under resource group {0}: {1}'.format(name, exc.message or str(exc))) return results def main(): AzureRMResourceGroupInfo() if __name__ == '__main__': main() # -*- coding: utf-8 -*- # Automatic provisioning of EC2 placement groups import boto.ec2.placementgroup import nixops.resources import nixops.util import nixops.ec2_utils class EC2PlacementGroupDefinition(nixops.resources.ResourceDefinition): """Definition of an EC2 placement group.""" @classmethod def get_type(cls): return "ec2-placement-group" @classmethod def get_resource_type(cls): return "ec2PlacementGroups" def __init__(self, xml): super(EC2PlacementGroupDefinition, self).__init__(xml) self.placement_group_name = xml.find("attrs/attr[@name='name']/string").get("value") self.placement_group_strategy = xml.find("attrs/attr[@name='strategy']/string").get("value") self.region = xml.find("attrs/attr[@name='region']/string").get("value") self.access_key_id = xml.find("attrs/attr[@name='accessKeyId']/string").get("value") def show_type(self): return "{0} [{1}]".format(self.get_type(), self.region) class EC2PlacementGroupState(nixops.resources.ResourceState): """State of an EC2 placement group.""" region = nixops.util.attr_property("ec2.region", None) placement_group_name = nixops.util.attr_property("ec2.placementGroupName", None) placement_group_strategy = nixops.util.attr_property("ec2.placementGroupStrategy", None) old_placement_groups = nixops.util.attr_property("ec2.oldPlacementGroups", [], 'json') access_key_id = nixops.util.attr_property("ec2.accessKeyId", None) @classmethod def get_type(cls): return "ec2-placement-group" def __init__(self, depl, name, id): super(EC2PlacementGroupState, self).__init__(depl, name, id) self._conn = None def show_type(self): s = super(EC2PlacementGroupState, self).show_type() if self.region: s = "{0} [{1}]".format(s, self.region) return s def prefix_definition(self, attr): return {('resources', 'ec2PlacementGroups'): attr} def get_physical_spec(self): return {} @property def resource_id(self): return self.placement_group_name def _connect(self): if self._conn: return self._conn = nixops.ec2_utils.connect(self.region, self.access_key_id) def create(self, defn, check, allow_reboot, allow_recreate): # Name or region change means a completely new security group if self.placement_group_name and (defn.placement_group_name != self.placement_group_name or defn.region != self.region): with self.depl._db: self.state = self.UNKNOWN self.old_placement_groups = self.old_placement_groups + [{'name': self.placement_group_name, 'region': self.region}] with self.depl._db: self.region = defn.region self.access_key_id = defn.access_key_id or nixops.ec2_utils.get_access_key_id() self.placement_group_name = defn.placement_group_name self.placement_group_strategy = defn.placement_group_strategy grp = None if check: with self.depl._db: self._connect() try: grp = self._conn.get_all_placement_groups([ defn.placement_group_name ])[0] self.state = self.UP self.placement_group_strategy = grp.strategy except boto.exception.EC2ResponseError as e: if e.error_code == u'InvalidGroup.NotFound': self.state = self.Missing else: raise if self.state == self.MISSING or self.state == self.UNKNOWN: self._connect() try: self.logger.log("creating EC2 placement group ‘{0}’...".format(self.placement_group_name)) created = self._conn.create_placement_group(self.placement_group_name, self.placement_group_strategy) except boto.exception.EC2ResponseError as e: if self.state != self.UNKNOWN or e.error_code != u'InvalidGroup.Duplicate': raise self.state = self.UP def after_activation(self, defn): region = self.region self._connect() conn = self._conn for group in self.old_placement_groups: if group['region'] != region: region = group['region'] conn = nixops.ec2_utils.connect(region, self.access_key_id) try: conn.delete_placement_group(group['name']) except boto.exception.EC2ResponseError as e: if e.error_code != u'InvalidGroup.NotFound': raise self.old_placement_groups = [] def destroy(self, wipe=False): if self.state == self.UP or self.state == self.STARTING: self.logger.log("deleting EC2 placement group `{0}'...".format(self.placement_group_name)) self._connect() self._conn.delete_placement_group(self.placement_group_name) self.state = self.MISSING return True # -*- coding: utf-8 -*- """ flask.templating ~~~~~~~~~~~~~~~~ Implements the bridge to Jinja2. :copyright: (c) 2015 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ from jinja2 import BaseLoader, Environment as BaseEnvironment, \ TemplateNotFound from .globals import _request_ctx_stack, _app_ctx_stack from .signals import template_rendered def _default_template_ctx_processor(): """Default template context processor. Injects `request`, `session` and `g`. """ reqctx = _request_ctx_stack.top appctx = _app_ctx_stack.top rv = {} if appctx is not None: rv['g'] = appctx.g if reqctx is not None: rv['request'] = reqctx.request rv['session'] = reqctx.session return rv class Environment(BaseEnvironment): """Works like a regular Jinja2 environment but has some additional knowledge of how Flask's blueprint works so that it can prepend the name of the blueprint to referenced templates if necessary. """ def __init__(self, app, **options): if 'loader' not in options: options['loader'] = app.create_global_jinja_loader() BaseEnvironment.__init__(self, **options) self.app = app class DispatchingJinjaLoader(BaseLoader): """A loader that looks for templates in the application and all the blueprint folders. """ def __init__(self, app): self.app = app def get_source(self, environment, template): explain = self.app.config['EXPLAIN_TEMPLATE_LOADING'] attempts = [] tmplrv = None for srcobj, loader in self._iter_loaders(template): try: rv = loader.get_source(environment, template) if tmplrv is None: tmplrv = rv if not explain: break except TemplateNotFound: rv = None attempts.append((loader, srcobj, rv)) if explain: from .debughelpers import explain_template_loading_attempts explain_template_loading_attempts(self.app, template, attempts) if tmplrv is not None: return tmplrv raise TemplateNotFound(template) def _iter_loaders(self, template): loader = self.app.jinja_loader if loader is not None: yield self.app, loader for blueprint in self.app.iter_blueprints(): loader = blueprint.jinja_loader if loader is not None: yield blueprint, loader def list_templates(self): result = set() loader = self.app.jinja_loader if loader is not None: result.update(loader.list_templates()) for blueprint in self.app.iter_blueprints(): loader = blueprint.jinja_loader if loader is not None: for template in loader.list_templates(): result.add(template) return list(result) def _render(template, context, app): """Renders the template and fires the signal""" rv = template.render(context) template_rendered.send(app, template=template, context=context) return rv def render_template(template_name_or_list, **context): """Renders a template from the template folder with the given context. :param template_name_or_list: the name of the template to be rendered, or an iterable with template names the first one existing will be rendered :param context: the variables that should be available in the context of the template. """ ctx = _app_ctx_stack.top ctx.app.update_template_context(context) return _render(ctx.app.jinja_env.get_or_select_template(template_name_or_list), context, ctx.app) def render_template_string(source, **context): """Renders a template from the given template source string with the given context. :param source: the source code of the template to be rendered :param context: the variables that should be available in the context of the template. """ ctx = _app_ctx_stack.top ctx.app.update_template_context(context) return _render(ctx.app.jinja_env.from_string(source), context, ctx.app) #!/usr/bin/env python # -*- coding: utf-8 -*- "Fully test this module's functionality through the use of fixtures." from megacosm.generators import Motivation from megacosm.generators import NPC import unittest2 as unittest import fixtures import fakeredis from config import TestConfiguration class TestMotivation(unittest.TestCase): def setUp(self): self.redis = fakeredis.FakeRedis() fixtures.motivation.import_fixtures(self) fixtures.phobia.import_fixtures(self) fixtures.npc.import_fixtures(self) self.redis.lpush('npc_race','gnome') def tearDown(self): self.redis.flushall() def test_random_motivation(self): """ """ motivation = Motivation(self.redis) self.assertNotEqual(motivation.text, '') def test_motivation_w_npc(self): """ """ npc = NPC(self.redis) motivation = Motivation(self.redis, {'npc': npc}) self.assertNotEqual(motivation.text, '') self.assertEqual(motivation.npc, npc) self.assertNotEqual('%s' % motivation, '') def test_motivation_w_fear(self): """ """ npc = NPC(self.redis) motivation = Motivation(self.redis, {'npc': npc, 'kind': 'fear'}) self.assertNotEqual(motivation.text, '') self.assertEqual(motivation.npc, npc) self.assertNotEqual('%s' % motivation, '') # Util.py - Python extension for perf script, miscellaneous utility code # # Copyright (C) 2010 by Tom Zanussi # # This software may be distributed under the terms of the GNU General # Public License ("GPL") version 2 as published by the Free Software # Foundation. import errno, os FUTEX_WAIT = 0 FUTEX_WAKE = 1 FUTEX_PRIVATE_FLAG = 128 FUTEX_CLOCK_REALTIME = 256 FUTEX_CMD_MASK = ~(FUTEX_PRIVATE_FLAG | FUTEX_CLOCK_REALTIME) NSECS_PER_SEC = 1000000000 def avg(total, n): return total / n def nsecs(secs, nsecs): return secs * NSECS_PER_SEC + nsecs def nsecs_secs(nsecs): return nsecs / NSECS_PER_SEC def nsecs_nsecs(nsecs): return nsecs % NSECS_PER_SEC def nsecs_str(nsecs): str = "%5u.%09u" % (nsecs_secs(nsecs), nsecs_nsecs(nsecs)), return str def add_stats(dict, key, value): if not dict.has_key(key): dict[key] = (value, value, value, 1) else: min, max, avg, count = dict[key] if value < min: min = value if value > max: max = value avg = (avg + value) / 2 dict[key] = (min, max, avg, count + 1) def clear_term(): print("\x1b[H\x1b[2J") audit_package_warned = False try: import audit machine_to_id = { 'x86_64': audit.MACH_86_64, 'alpha' : audit.MACH_ALPHA, 'ia64' : audit.MACH_IA64, 'ppc' : audit.MACH_PPC, 'ppc64' : audit.MACH_PPC64, 's390' : audit.MACH_S390, 's390x' : audit.MACH_S390X, 'i386' : audit.MACH_X86, 'i586' : audit.MACH_X86, 'i686' : audit.MACH_X86, } try: machine_to_id['armeb'] = audit.MACH_ARMEB except: pass machine_id = machine_to_id[os.uname()[4]] except: if not audit_package_warned: audit_package_warned = True print "Install the audit-libs-python package to get syscall names" def syscall_name(id): try: return audit.audit_syscall_to_name(id, machine_id) except: return str(id) def strerror(nr): try: return errno.errorcode[abs(nr)] except: return "Unknown %d errno" % nr # -*- coding: utf-8 -*- from __future__ import absolute_import from collections import namedtuple import sip from PyQt5.QtWebKitWidgets import QWebPage, QWebView from PyQt5.QtCore import QByteArray from twisted.python import log import six from splash.har_builder import HarBuilder RenderErrorInfo = namedtuple('RenderErrorInfo', 'type code text url') class SplashQWebView(QWebView): """ QWebView subclass that handles 'close' requests. """ onBeforeClose = None def closeEvent(self, event): dont_close = False if self.onBeforeClose: dont_close = self.onBeforeClose() if dont_close: event.ignore() else: event.accept() class SplashQWebPage(QWebPage): """ QWebPage subclass that: * changes user agent; * logs JS console messages; * handles alert and confirm windows; * returns additional info about render errors; * logs HAR events; * stores options for various Splash components. """ error_info = None custom_user_agent = None custom_headers = None skip_custom_headers = False navigation_locked = False resource_timeout = 0 response_body_enabled = False def __init__(self, verbosity=0): super(QWebPage, self).__init__() self.verbosity = verbosity self.callbacks = { "on_request": [], "on_response_headers": [], "on_response": [], } self.mainFrame().urlChanged.connect(self.on_url_changed) self.mainFrame().titleChanged.connect(self.on_title_changed) self.mainFrame().loadFinished.connect(self.on_load_finished) self.mainFrame().initialLayoutCompleted.connect(self.on_layout_completed) self.har = HarBuilder() def reset_har(self): self.har.reset() def clear_callbacks(self, event=None): """ Unregister all callbacks for an event. If event is None then all callbacks are removed. """ if event is None: for ev in self.callbacks: assert ev is not None self.clear_callbacks(ev) return del self.callbacks[event][:] def on_title_changed(self, title): self.har.store_title(title) def on_url_changed(self, url): self.har.store_url(url) def on_load_finished(self, ok): self.har.store_timing("onLoad") def on_layout_completed(self): self.har.store_timing("onContentLoad") def acceptNavigationRequest(self, webFrame, networkRequest, navigationType): if self.navigation_locked: return False self.error_info = None return super(SplashQWebPage, self).acceptNavigationRequest(webFrame, networkRequest, navigationType) def javaScriptAlert(self, frame, msg): return def javaScriptConfirm(self, frame, msg): return False def javaScriptConsoleMessage(self, msg, line_number, source_id): if self.verbosity >= 2: log.msg("JsConsole(%s:%d): %s" % (source_id, line_number, msg), system='render') def userAgentForUrl(self, url): if self.custom_user_agent is None: return super(SplashQWebPage, self).userAgentForUrl(url) else: return self.custom_user_agent # loadFinished signal handler receives ok=False at least these cases: # 1. when there is an error with the page (e.g. the page is not available); # 2. when a redirect happened before all related resource are loaded; # 3. when page sends headers that are not parsed correctly # (e.g. a bad Content-Type). # By implementing ErrorPageExtension we can catch (1) and # distinguish it from (2) and (3). def extension(self, extension, info=None, errorPage=None): if extension == QWebPage.ErrorPageExtension: # catch the error, populate self.errorInfo and return an error page info = sip.cast(info, QWebPage.ErrorPageExtensionOption) domain = 'Unknown' if info.domain == QWebPage.QtNetwork: domain = 'Network' elif info.domain == QWebPage.Http: domain = 'HTTP' elif info.domain == QWebPage.WebKit: domain = 'WebKit' self.error_info = RenderErrorInfo( domain, int(info.error), six.text_type(info.errorString), six.text_type(info.url.toString()) ) # XXX: this page currently goes nowhere content = u""" Failed loading page

Failed loading page ({0.text})

{0.url}

{0.type} error #{0.code}

""".format(self.error_info) errorPage = sip.cast(errorPage, QWebPage.ErrorPageExtensionReturn) errorPage.content = QByteArray(content.encode('utf-8')) return True # XXX: this method always returns True, even if we haven't # handled the extension. Is it correct? When can this method be # called with extension which is not ErrorPageExtension if we # are returning False in ``supportsExtension`` for such extensions? return True def supportsExtension(self, extension): if extension == QWebPage.ErrorPageExtension: return True return False def maybe_redirect(self, load_finished_ok): """ Return True if the current webpage state looks like a redirect. Use this function from loadFinished handler to ignore spurious signals. FIXME: This can return True if server returned incorrect Content-Type header, but there is no an additional loadFinished signal in this case. """ return not load_finished_ok and self.error_info is None def is_ok(self, load_finished_ok): return load_finished_ok and self.error_info is None def error_loading(self, load_finished_ok): return load_finished_ok and self.error_info is not None from PySide.QtGui import QDialog, QDialogButtonBox, QVBoxLayout, QLabel, QLineEdit from PySide import QtGui, QtCore import Lifeline class ClusterDialog(QDialog): editClusterName = None def __init__(self, lifeline, defaultName, parent = None): super(ClusterDialog, self).__init__(parent) self.lifeline = lifeline layout = QVBoxLayout(self) message = QLabel('Enter group name') layout.addWidget(message) self.editClusterName = QLineEdit(defaultName) self.editClusterName.setFixedHeight(30) self.editClusterName.setFixedWidth(400) self.editClusterName.textChanged.connect(self.validateCluster) layout.addWidget(self.editClusterName) self.validation_msg = QLabel(' ') layout.addWidget(self.validation_msg) buttons = QtGui.QDialogButtonBox(QtGui.QDialogButtonBox.Ok | QtGui.QDialogButtonBox.Cancel, QtCore.Qt.Horizontal, self) buttons.accepted.connect(self.accept) buttons.rejected.connect(self.reject) layout.addWidget(buttons) self.validateCluster() def validateCluster(self): cnt = 0 for l in self.lifeline: if self.editClusterName.text() in l.getClassName() and not l.getFlagCluster() and not l.getClusterLifeline(): cnt += 1 available_flag = True for l in self.lifeline: if self.editClusterName.text() in l.getClassName() and l.getFlagCluster(): available_flag = False break if available_flag: self.validation_msg.setText("group name includes %d life-lines" % (cnt)) else: self.validation_msg.setText("group name is not available") def getClusterText(self): return self.editClusterName.text() @staticmethod def getClusterName(lifelines, defaultName, parent = None): dialog = ClusterDialog(lifelines,defaultName,parent) result = dialog.exec_() return (result, dialog.getClusterText()) # Amara, universalsubtitles.org # # Copyright (C) 2013 Participatory Culture Foundation # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see # http://www.gnu.org/licenses/agpl-3.0.html. from __future__ import absolute_import from django.test import TestCase from caching.tests.utils import assert_invalidates_model_cache from teams.models import MembershipNarrowing from utils.factories import * class TeamCacheInvalidationTest(TestCase): def setUp(self): self.team = TeamFactory() def test_change_team(self): with assert_invalidates_model_cache(self.team): self.team.save() def test_change_team_member(self): with assert_invalidates_model_cache(self.team): member = TeamMemberFactory(team=self.team) with assert_invalidates_model_cache(self.team): member.save() with assert_invalidates_model_cache(self.team): member.delete() def test_change_membership_narrowing(self): admin = TeamMemberFactory(team=self.team) member = TeamMemberFactory(team=self.team) with assert_invalidates_model_cache(self.team): narrowing = MembershipNarrowing.objects.create( member=member, language='en', added_by=admin) with assert_invalidates_model_cache(self.team): narrowing.save() with assert_invalidates_model_cache(self.team): narrowing.delete() # Copyright 2013 Rackspace Hosting # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova.api.openstack import extensions class Extended_quotas(extensions.ExtensionDescriptor): """Adds ability for admins to delete quota and optionally force the update Quota command. """ name = "ExtendedQuotas" alias = "os-extended-quotas" namespace = ("http://docs.openstack.org/compute/ext/extended_quotas" "/api/v1.1") updated = "2013-06-09T00:00:00Z" from unittest import mock from django.conf.global_settings import PASSWORD_HASHERS from django.contrib.auth import get_user_model from django.contrib.auth.base_user import AbstractBaseUser from django.contrib.auth.hashers import get_hasher from django.contrib.auth.models import ( AbstractUser, Group, Permission, User, UserManager, ) from django.contrib.contenttypes.models import ContentType from django.core import mail from django.db.models.signals import post_save from django.test import TestCase, override_settings from .models.with_custom_email_field import CustomEmailField class NaturalKeysTestCase(TestCase): def test_user_natural_key(self): staff_user = User.objects.create_user(username='staff') self.assertEqual(User.objects.get_by_natural_key('staff'), staff_user) self.assertEqual(staff_user.natural_key(), ('staff',)) def test_group_natural_key(self): users_group = Group.objects.create(name='users') self.assertEqual(Group.objects.get_by_natural_key('users'), users_group) class LoadDataWithoutNaturalKeysTestCase(TestCase): fixtures = ['regular.json'] def test_user_is_created_and_added_to_group(self): user = User.objects.get(username='my_username') group = Group.objects.get(name='my_group') self.assertEqual(group, user.groups.get()) class LoadDataWithNaturalKeysTestCase(TestCase): fixtures = ['natural.json'] def test_user_is_created_and_added_to_group(self): user = User.objects.get(username='my_username') group = Group.objects.get(name='my_group') self.assertEqual(group, user.groups.get()) class LoadDataWithNaturalKeysAndMultipleDatabasesTestCase(TestCase): multi_db = True def test_load_data_with_user_permissions(self): # Create test contenttypes for both databases default_objects = [ ContentType.objects.db_manager('default').create( model='examplemodela', app_label='app_a', ), ContentType.objects.db_manager('default').create( model='examplemodelb', app_label='app_b', ), ] other_objects = [ ContentType.objects.db_manager('other').create( model='examplemodelb', app_label='app_b', ), ContentType.objects.db_manager('other').create( model='examplemodela', app_label='app_a', ), ] # Now we create the test UserPermission Permission.objects.db_manager("default").create( name="Can delete example model b", codename="delete_examplemodelb", content_type=default_objects[1], ) Permission.objects.db_manager("other").create( name="Can delete example model b", codename="delete_examplemodelb", content_type=other_objects[0], ) perm_default = Permission.objects.get_by_natural_key( 'delete_examplemodelb', 'app_b', 'examplemodelb', ) perm_other = Permission.objects.db_manager('other').get_by_natural_key( 'delete_examplemodelb', 'app_b', 'examplemodelb', ) self.assertEqual(perm_default.content_type_id, default_objects[1].id) self.assertEqual(perm_other.content_type_id, other_objects[0].id) class UserManagerTestCase(TestCase): def test_create_user(self): email_lowercase = 'normal@normal.com' user = User.objects.create_user('user', email_lowercase) self.assertEqual(user.email, email_lowercase) self.assertEqual(user.username, 'user') self.assertFalse(user.has_usable_password()) def test_create_user_email_domain_normalize_rfc3696(self): # According to http://tools.ietf.org/html/rfc3696#section-3 # the "@" symbol can be part of the local part of an email address returned = UserManager.normalize_email(r'Abc\@DEF@EXAMPLE.com') self.assertEqual(returned, r'Abc\@DEF@example.com') def test_create_user_email_domain_normalize(self): returned = UserManager.normalize_email('normal@DOMAIN.COM') self.assertEqual(returned, 'normal@domain.com') def test_create_user_email_domain_normalize_with_whitespace(self): returned = UserManager.normalize_email(r'email\ with_whitespace@D.COM') self.assertEqual(returned, r'email\ with_whitespace@d.com') def test_empty_username(self): with self.assertRaisesMessage(ValueError, 'The given username must be set'): User.objects.create_user(username='') def test_create_user_is_staff(self): email = 'normal@normal.com' user = User.objects.create_user('user', email, is_staff=True) self.assertEqual(user.email, email) self.assertEqual(user.username, 'user') self.assertTrue(user.is_staff) def test_create_super_user_raises_error_on_false_is_superuser(self): with self.assertRaisesMessage(ValueError, 'Superuser must have is_superuser=True.'): User.objects.create_superuser( username='test', email='test@test.com', password='test', is_superuser=False, ) def test_create_superuser_raises_error_on_false_is_staff(self): with self.assertRaisesMessage(ValueError, 'Superuser must have is_staff=True.'): User.objects.create_superuser( username='test', email='test@test.com', password='test', is_staff=False, ) def test_make_random_password(self): allowed_chars = 'abcdefg' password = UserManager().make_random_password(5, allowed_chars) self.assertEqual(len(password), 5) for char in password: self.assertIn(char, allowed_chars) class AbstractBaseUserTests(TestCase): def test_clean_normalize_username(self): # The normalization happens in AbstractBaseUser.clean() ohm_username = 'iamtheΩ' # U+2126 OHM SIGN for model in ('auth.User', 'auth_tests.CustomUser'): with self.subTest(model=model), self.settings(AUTH_USER_MODEL=model): User = get_user_model() user = User(**{User.USERNAME_FIELD: ohm_username, 'password': 'foo'}) user.clean() username = user.get_username() self.assertNotEqual(username, ohm_username) self.assertEqual(username, 'iamtheΩ') # U+03A9 GREEK CAPITAL LETTER OMEGA def test_default_email(self): user = AbstractBaseUser() self.assertEqual(user.get_email_field_name(), 'email') def test_custom_email(self): user = CustomEmailField() self.assertEqual(user.get_email_field_name(), 'email_address') class AbstractUserTestCase(TestCase): def test_email_user(self): # valid send_mail parameters kwargs = { "fail_silently": False, "auth_user": None, "auth_password": None, "connection": None, "html_message": None, } abstract_user = AbstractUser(email='foo@bar.com') abstract_user.email_user( subject="Subject here", message="This is a message", from_email="from@domain.com", **kwargs ) self.assertEqual(len(mail.outbox), 1) message = mail.outbox[0] self.assertEqual(message.subject, "Subject here") self.assertEqual(message.body, "This is a message") self.assertEqual(message.from_email, "from@domain.com") self.assertEqual(message.to, [abstract_user.email]) def test_last_login_default(self): user1 = User.objects.create(username='user1') self.assertIsNone(user1.last_login) user2 = User.objects.create_user(username='user2') self.assertIsNone(user2.last_login) def test_user_clean_normalize_email(self): user = User(username='user', password='foo', email='foo@BAR.com') user.clean() self.assertEqual(user.email, 'foo@bar.com') def test_user_double_save(self): """ Calling user.save() twice should trigger password_changed() once. """ user = User.objects.create_user(username='user', password='foo') user.set_password('bar') with mock.patch('django.contrib.auth.password_validation.password_changed') as pw_changed: user.save() self.assertEqual(pw_changed.call_count, 1) user.save() self.assertEqual(pw_changed.call_count, 1) @override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS) def test_check_password_upgrade(self): """ password_changed() shouldn't be called if User.check_password() triggers a hash iteration upgrade. """ user = User.objects.create_user(username='user', password='foo') initial_password = user.password self.assertTrue(user.check_password('foo')) hasher = get_hasher('default') self.assertEqual('pbkdf2_sha256', hasher.algorithm) old_iterations = hasher.iterations try: # Upgrade the password iterations hasher.iterations = old_iterations + 1 with mock.patch('django.contrib.auth.password_validation.password_changed') as pw_changed: user.check_password('foo') self.assertEqual(pw_changed.call_count, 0) self.assertNotEqual(initial_password, user.password) finally: hasher.iterations = old_iterations class IsActiveTestCase(TestCase): """ Tests the behavior of the guaranteed is_active attribute """ def test_builtin_user_isactive(self): user = User.objects.create(username='foo', email='foo@bar.com') # is_active is true by default self.assertIs(user.is_active, True) user.is_active = False user.save() user_fetched = User.objects.get(pk=user.pk) # the is_active flag is saved self.assertFalse(user_fetched.is_active) @override_settings(AUTH_USER_MODEL='auth_tests.IsActiveTestUser1') def test_is_active_field_default(self): """ tests that the default value for is_active is provided """ UserModel = get_user_model() user = UserModel(username='foo') self.assertIs(user.is_active, True) # you can set the attribute - but it will not save user.is_active = False # there should be no problem saving - but the attribute is not saved user.save() user_fetched = UserModel._default_manager.get(pk=user.pk) # the attribute is always true for newly retrieved instance self.assertIs(user_fetched.is_active, True) class TestCreateSuperUserSignals(TestCase): """ Simple test case for ticket #20541 """ def post_save_listener(self, *args, **kwargs): self.signals_count += 1 def setUp(self): self.signals_count = 0 post_save.connect(self.post_save_listener, sender=User) def tearDown(self): post_save.disconnect(self.post_save_listener, sender=User) def test_create_user(self): User.objects.create_user("JohnDoe") self.assertEqual(self.signals_count, 1) def test_create_superuser(self): User.objects.create_superuser("JohnDoe", "mail@example.com", "1") self.assertEqual(self.signals_count, 1) from mpmath.libmp import * from mpmath import mpf, mp from random import randint, choice, seed all_modes = [round_floor, round_ceiling, round_down, round_up, round_nearest] fb = from_bstr fi = from_int ff = from_float def test_div_1_3(): a = fi(1) b = fi(3) c = fi(-1) # floor rounds down, ceiling rounds up assert mpf_div(a, b, 7, round_floor) == fb('0.01010101') assert mpf_div(a, b, 7, round_ceiling) == fb('0.01010110') assert mpf_div(a, b, 7, round_down) == fb('0.01010101') assert mpf_div(a, b, 7, round_up) == fb('0.01010110') assert mpf_div(a, b, 7, round_nearest) == fb('0.01010101') # floor rounds up, ceiling rounds down assert mpf_div(c, b, 7, round_floor) == fb('-0.01010110') assert mpf_div(c, b, 7, round_ceiling) == fb('-0.01010101') assert mpf_div(c, b, 7, round_down) == fb('-0.01010101') assert mpf_div(c, b, 7, round_up) == fb('-0.01010110') assert mpf_div(c, b, 7, round_nearest) == fb('-0.01010101') def test_mpf_divi_1_3(): a = 1 b = fi(3) c = -1 assert mpf_rdiv_int(a, b, 7, round_floor) == fb('0.01010101') assert mpf_rdiv_int(a, b, 7, round_ceiling) == fb('0.01010110') assert mpf_rdiv_int(a, b, 7, round_down) == fb('0.01010101') assert mpf_rdiv_int(a, b, 7, round_up) == fb('0.01010110') assert mpf_rdiv_int(a, b, 7, round_nearest) == fb('0.01010101') assert mpf_rdiv_int(c, b, 7, round_floor) == fb('-0.01010110') assert mpf_rdiv_int(c, b, 7, round_ceiling) == fb('-0.01010101') assert mpf_rdiv_int(c, b, 7, round_down) == fb('-0.01010101') assert mpf_rdiv_int(c, b, 7, round_up) == fb('-0.01010110') assert mpf_rdiv_int(c, b, 7, round_nearest) == fb('-0.01010101') def test_div_300(): q = fi(1000000) a = fi(300499999) # a/q is a little less than a half-integer b = fi(300500000) # b/q exactly a half-integer c = fi(300500001) # c/q is a little more than a half-integer # Check nearest integer rounding (prec=9 as 2**8 < 300 < 2**9) assert mpf_div(a, q, 9, round_down) == fi(300) assert mpf_div(b, q, 9, round_down) == fi(300) assert mpf_div(c, q, 9, round_down) == fi(300) assert mpf_div(a, q, 9, round_up) == fi(301) assert mpf_div(b, q, 9, round_up) == fi(301) assert mpf_div(c, q, 9, round_up) == fi(301) # Nearest even integer is down assert mpf_div(a, q, 9, round_nearest) == fi(300) assert mpf_div(b, q, 9, round_nearest) == fi(300) assert mpf_div(c, q, 9, round_nearest) == fi(301) # Nearest even integer is up a = fi(301499999) b = fi(301500000) c = fi(301500001) assert mpf_div(a, q, 9, round_nearest) == fi(301) assert mpf_div(b, q, 9, round_nearest) == fi(302) assert mpf_div(c, q, 9, round_nearest) == fi(302) def test_tight_integer_division(): # Test that integer division at tightest possible precision is exact N = 100 seed(1) for i in range(N): a = choice([1, -1]) * randint(1, 1<') def js(filepath): return static_file(filepath, root='./js') @route('/css/') def css(filepath): return static_file(filepath, root='./css') @route('/fonts/') def fonts(filepath): return static_file(filepath, root='./fonts') @route('/') def index(): return static_file('index.html', root='./views') @post('/proxy') def proxy_post(): url = request.params.get('url') data = request.params.get('data') headers = request.params.get('headers') req = urllib2.Request(url,data) headers = headers.split(",") for header in headers: data = request.headers.get(header) if data is not None: req.add_header(header, data) try: res = urllib2.urlopen(req) response.status = res.getcode() return res.read() except HTTPError, e: response.status = e.getcode() return e.read() @get('/proxy') def proxy_get(): url = request.params.get('url') headers = request.params.get('headers') req = urllib2.Request(url) headers = headers.split(",") for header in headers: data = request.headers.get(header) if data is not None: req.add_header(header, data) try: res = urllib2.urlopen(req) response.status = res.getcode() return res.read() except HTTPError, e: response.status = e.getcode() return e.read() run(port=8000) #!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Richard Hull and contributors # See LICENSE.rst for details. """ Tests for the :py:class:`luma.lcd.device.ili9486` device. """ import pytest from luma.lcd.device import ili9486 from luma.core.render import canvas from luma.core.framebuffer import full_frame from baseline_data import get_reference_data, primitives from helpers import serial, setup_function, assert_invalid_dimensions # noqa: F401 from unittest.mock import Mock def test_init_320x480(): recordings = [] def data(data): recordings.append({'data': data}) def command(*cmd): recordings.append({'command': list(cmd)}) serial.command.side_effect = command serial.data.side_effect = data ili9486(serial, gpio=Mock(), framebuffer=full_frame()) assert serial.data.called assert serial.command.called # This set of expected results include the padding bytes that # appear necessary with Waveshare's ili9486 implementation. assert recordings == [ {'command': [0xb0]}, {'data': [0x00, 0x00]}, {'command': [0x11]}, {'command': [0x3a]}, {'data': [0x00, 0x66]}, {'command': [0x21]}, {'command': [0xc0]}, {'data': [0x00, 0x09, 0x00, 0x09]}, {'command': [0xc1]}, {'data': [0x00, 0x41, 0x00, 0x00]}, {'command': [0xc2]}, {'data': [0x00, 0x33]}, {'command': [0xc5]}, {'data': [0x00, 0x00, 0x00, 0x36]}, {'command': [0x36]}, {'data': [0x00, 0x08]}, {'command': [0xb6]}, {'data': [0x00, 0x00, 0x00, 0x42, 0x00, 0x3b]}, {'command': [0x13]}, {'command': [0x34]}, {'command': [0x38]}, {'command': [0x11]}, {'command': [0x2a]}, {'data': [0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x3f]}, {'command': [0x2b]}, {'data': [0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0xdf]}, {'command': [0x2c]}, {'data': bytearray([0x00] * (320 * 480 * 3))}, {'command': [0x29]}, ] def test_init_invalid_dimensions(): """ ILI9486 LCD with an invalid resolution raises a :py:class:`luma.core.error.DeviceDisplayModeError`. """ assert_invalid_dimensions(ili9486, serial, 128, 77) def test_offsets(): recordings = [] def data(data): recordings.append({'data': data}) def command(*cmd): recordings.append({'command': list(cmd)}) serial.command.side_effect = command serial.data.side_effect = data ili9486(serial, gpio=Mock(), width=320, height=480, h_offset=2, v_offset=1, framebuffer=full_frame()) assert serial.data.called assert serial.command.called assert recordings == [ {'command': [0xb0]}, {'data': [0x00, 0x00]}, {'command': [0x11]}, {'command': [0x3a]}, {'data': [0x00, 0x66]}, {'command': [0x21]}, {'command': [0xc0]}, {'data': [0x00, 0x09, 0x00, 0x09]}, {'command': [0xc1]}, {'data': [0x00, 0x41, 0x00, 0x00]}, {'command': [0xc2]}, {'data': [0x00, 0x33]}, {'command': [0xc5]}, {'data': [0x00, 0x00, 0x00, 0x36]}, {'command': [0x36]}, {'data': [0x00, 0x08]}, {'command': [0xb6]}, {'data': [0x00, 0x00, 0x00, 0x42, 0x00, 0x3b]}, {'command': [0x13]}, {'command': [0x34]}, {'command': [0x38]}, {'command': [0x11]}, {'command': [0x2A]}, {'data': [0x00, 0x00, 0x00, 0x02, 0x00, 0x01, 0x00, 0x3f + 0x02]}, {'command': [0x2B]}, {'data': [0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0xdf + 0x01]}, {'command': [0x2C]}, {'data': bytearray([0x00] * (320 * 480 * 3))}, {'command': [0x29]}, ] def test_contrast(): device = ili9486(serial, gpio=Mock()) serial.reset_mock() with pytest.raises(AssertionError): device.contrast(300) def test_hide(): device = ili9486(serial, gpio=Mock()) serial.reset_mock() device.hide() serial.command.assert_called_once_with(40) def test_show(): device = ili9486(serial, gpio=Mock()) serial.reset_mock() device.show() serial.command.assert_called_once_with(41) def test_display_full_frame(): device = ili9486(serial, gpio=Mock(), framebuffer=full_frame()) serial.reset_mock() recordings = [] def data(data): recordings.append({'data': list(data)}) def command(*cmd): recordings.append({'command': list(cmd)}) serial.command.side_effect = command serial.data.side_effect = data # Use the same drawing primitives as the demo with canvas(device) as draw: primitives(device, draw) assert serial.data.called assert serial.command.called # To regenerate test data, uncomment the following (remember not to commit though) # ================================================================================ # from baseline_data import save_reference_data # save_reference_data("demo_ili9486", recordings) assert recordings == get_reference_data('demo_ili9486') # -*- coding: utf-8 -*- # # This file is part of INSPIRE. # Copyright (C) 2014, 2015 CERN. # # INSPIRE is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # INSPIRE is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with INSPIRE. If not, see . # # In applying this license, CERN does not waive the privileges and immunities # granted to it by virtue of its status as an Intergovernmental Organization # or submit itself to any jurisdiction. """Approval action for INSPIRE arXiv harvesting.""" from flask import render_template, request, url_for from flask.ext.login import current_user from invenio_base.i18n import _ class hep_approval(object): """Class representing the approval action.""" name = _("Approve record") url = url_for("holdingpen.resolve_action") def render_mini(self, obj): """Method to render the minified action.""" return render_template( 'workflows/actions/hep_approval_mini.html', message=obj.get_action_message(), object=obj, resolve_url=self.url, ) def render(self, obj): """Method to render the action.""" return { "side": render_template('workflows/actions/hep_approval_side.html', message=obj.get_action_message(), object=obj, resolve_url=self.url), "main": render_template('workflows/actions/hep_approval_main.html', message=obj.get_action_message(), object=obj, resolve_url=self.url) } @staticmethod def resolve(bwo): """Resolve the action taken in the approval action.""" from invenio_workflows.models import ObjectVersion from inspire.modules.audit.api import log_prediction_action value = request.form.get("value", "") # Audit logging results = bwo.get_tasks_results() prediction_results = results.get("arxiv_guessing", {}) log_prediction_action( action="resolve", prediction_results=prediction_results, object_id=bwo.id, user_id=current_user.get_id(), source="holdingpen", user_action=value, ) upload_pdf = request.form.get("pdf_submission", False) bwo.remove_action() extra_data = bwo.get_extra_data() extra_data["approved"] = value in ('accept', 'accept_core') extra_data["core"] = value == "accept_core" extra_data["reason"] = request.form.get("text", "") extra_data["pdf_upload"] = True if upload_pdf == "true" else False bwo.set_extra_data(extra_data) bwo.save(version=ObjectVersion.WAITING) bwo.continue_workflow(delayed=True) if extra_data["approved"]: return { "message": "Suggestion has been accepted!", "category": "success", } else: return { "message": "Suggestion has been rejected", "category": "warning", } # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2011 OpenStack Foundation # Copyright 2011 Grid Dynamics # Copyright 2011 Eldar Nugaev, Kirill Shileev, Ilya Alekseyev # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import re import webob from nova.api.openstack import extensions from nova.api.openstack import wsgi from nova import compute from nova import exception from nova.openstack.common.gettextutils import _ authorize = extensions.extension_authorizer('compute', 'console_output') class ConsoleOutputController(wsgi.Controller): def __init__(self, *args, **kwargs): super(ConsoleOutputController, self).__init__(*args, **kwargs) self.compute_api = compute.API() @wsgi.action('os-getConsoleOutput') def get_console_output(self, req, id, body): """Get text console output.""" context = req.environ['nova.context'] authorize(context) try: instance = self.compute_api.get(context, id) except exception.NotFound: raise webob.exc.HTTPNotFound(_('Instance not found')) try: length = body['os-getConsoleOutput'].get('length') except (TypeError, KeyError): raise webob.exc.HTTPBadRequest(_('os-getConsoleOutput malformed ' 'or missing from request body')) if length is not None: try: # NOTE(maurosr): cast length into a string before cast into an # integer to avoid thing like: int(2.5) which is 2 instead of # raise ValueError like it would when we try int("2.5"). This # can be removed once we have api validation landed. int(str(length)) except ValueError: raise webob.exc.HTTPBadRequest(_('Length in request body must ' 'be an integer value')) try: output = self.compute_api.get_console_output(context, instance, length) except exception.NotFound: raise webob.exc.HTTPNotFound(_('Unable to get console')) except exception.InstanceNotReady as e: raise webob.exc.HTTPConflict(explanation=e.format_message()) # XML output is not correctly escaped, so remove invalid characters remove_re = re.compile('[\x00-\x08\x0B-\x1F]') output = remove_re.sub('', output) return {'output': output} class Console_output(extensions.ExtensionDescriptor): """Console log output support, with tailing ability.""" name = "ConsoleOutput" alias = "os-console-output" namespace = ("http://docs.openstack.org/compute/ext/" "os-console-output/api/v2") updated = "2011-12-08T00:00:00+00:00" def get_controller_extensions(self): controller = ConsoleOutputController() extension = extensions.ControllerExtension(self, 'servers', controller) return [extension] # # This macro generates # a begin_html Canvas end_html # with 2 views of the NA49 detector. # import ROOT c1 = ROOT.TCanvas( 'c1', 'The NA49 canvas', 200, 10, 700, 780 ) ROOT.gBenchmark.Start( 'na49view' ) all = ROOT.TPad( 'all', 'A Global view of NA49', 0.02, 0.02, 0.48, 0.82, 28 ) tof = ROOT.TPad( 'tof', 'One Time Of Flight element', 0.52, 0.02, 0.98, 0.82, 28 ) all.Draw(); tof.Draw(); na49title = ROOT.TPaveLabel( 0.04, 0.86, 0.96, 0.98, 'Two views of the NA49 detector' ) na49title.SetFillColor( 32 ) na49title.Draw() # nageom = ROOT.TFile( 'py-na49.root' ) n49 = ROOT.gROOT.FindObject( 'na49' ) n49.SetBomb( 1.2 ) n49.cd() # Set current geometry all.cd() # Set current pad n49.Draw() c1.Update() tof.cd() TOFR1 = n49.GetNode( 'TOFR1' ) TOFR1.Draw() c1.Update() ROOT.gBenchmark.Show( 'na49view' ) # To have a better and dynamic view of any of these pads, # you can click with the middle button of your mouse to select it. # Then select "View with x3d" in the VIEW menu of the Canvas. # Once in x3d, you are in wireframe mode by default. # You can switch to: # - Hidden Line mode by typing E # - Solid mode by typing R # - Wireframe mode by typing W # - Stereo mode by clicking S (and you need special glasses) # - To leave x3d type Q from __future__ import unicode_literals import re import tempfile from django.contrib.gis import gdal from django.contrib.gis.db.models import Extent, MakeLine, Union from django.contrib.gis.geos import ( GeometryCollection, GEOSGeometry, LinearRing, LineString, Point, Polygon, fromstr, ) from django.core.management import call_command from django.db import connection from django.test import TestCase, ignore_warnings, skipUnlessDBFeature from django.utils import six from django.utils.deprecation import ( RemovedInDjango20Warning, RemovedInDjango110Warning, ) from ..utils import no_oracle, oracle, postgis, spatialite from .models import ( City, Country, Feature, MinusOneSRID, NonConcreteModel, PennsylvaniaCity, State, Track, ) def postgis_bug_version(): spatial_version = getattr(connection.ops, "spatial_version", (0, 0, 0)) return spatial_version and (2, 0, 0) <= spatial_version <= (2, 0, 1) @skipUnlessDBFeature("gis_enabled") class GeoModelTest(TestCase): fixtures = ['initial'] def test_fixtures(self): "Testing geographic model initialization from fixtures." # Ensuring that data was loaded from initial data fixtures. self.assertEqual(2, Country.objects.count()) self.assertEqual(8, City.objects.count()) self.assertEqual(2, State.objects.count()) def test_proxy(self): "Testing Lazy-Geometry support (using the GeometryProxy)." # Testing on a Point pnt = Point(0, 0) nullcity = City(name='NullCity', point=pnt) nullcity.save() # Making sure TypeError is thrown when trying to set with an # incompatible type. for bad in [5, 2.0, LineString((0, 0), (1, 1))]: try: nullcity.point = bad except TypeError: pass else: self.fail('Should throw a TypeError') # Now setting with a compatible GEOS Geometry, saving, and ensuring # the save took, notice no SRID is explicitly set. new = Point(5, 23) nullcity.point = new # Ensuring that the SRID is automatically set to that of the # field after assignment, but before saving. self.assertEqual(4326, nullcity.point.srid) nullcity.save() # Ensuring the point was saved correctly after saving self.assertEqual(new, City.objects.get(name='NullCity').point) # Setting the X and Y of the Point nullcity.point.x = 23 nullcity.point.y = 5 # Checking assignments pre & post-save. self.assertNotEqual(Point(23, 5), City.objects.get(name='NullCity').point) nullcity.save() self.assertEqual(Point(23, 5), City.objects.get(name='NullCity').point) nullcity.delete() # Testing on a Polygon shell = LinearRing((0, 0), (0, 100), (100, 100), (100, 0), (0, 0)) inner = LinearRing((40, 40), (40, 60), (60, 60), (60, 40), (40, 40)) # Creating a State object using a built Polygon ply = Polygon(shell, inner) nullstate = State(name='NullState', poly=ply) self.assertEqual(4326, nullstate.poly.srid) # SRID auto-set from None nullstate.save() ns = State.objects.get(name='NullState') self.assertEqual(ply, ns.poly) # Testing the `ogr` and `srs` lazy-geometry properties. if gdal.HAS_GDAL: self.assertIsInstance(ns.poly.ogr, gdal.OGRGeometry) self.assertEqual(ns.poly.wkb, ns.poly.ogr.wkb) self.assertIsInstance(ns.poly.srs, gdal.SpatialReference) self.assertEqual('WGS 84', ns.poly.srs.name) # Changing the interior ring on the poly attribute. new_inner = LinearRing((30, 30), (30, 70), (70, 70), (70, 30), (30, 30)) ns.poly[1] = new_inner ply[1] = new_inner self.assertEqual(4326, ns.poly.srid) ns.save() self.assertEqual(ply, State.objects.get(name='NullState').poly) ns.delete() @skipUnlessDBFeature("supports_transform") def test_lookup_insert_transform(self): "Testing automatic transform for lookups and inserts." # San Antonio in 'WGS84' (SRID 4326) sa_4326 = 'POINT (-98.493183 29.424170)' wgs_pnt = fromstr(sa_4326, srid=4326) # Our reference point in WGS84 # Oracle doesn't have SRID 3084, using 41157. if oracle: # San Antonio in 'Texas 4205, Southern Zone (1983, meters)' (SRID 41157) # Used the following Oracle SQL to get this value: # SELECT SDO_UTIL.TO_WKTGEOMETRY( # SDO_CS.TRANSFORM(SDO_GEOMETRY('POINT (-98.493183 29.424170)', 4326), 41157)) # ) # FROM DUAL; nad_wkt = 'POINT (300662.034646583 5416427.45974934)' nad_srid = 41157 else: # San Antonio in 'NAD83(HARN) / Texas Centric Lambert Conformal' (SRID 3084) # Used ogr.py in gdal 1.4.1 for this transform nad_wkt = 'POINT (1645978.362408288754523 6276356.025927528738976)' nad_srid = 3084 # Constructing & querying with a point from a different SRID. Oracle # `SDO_OVERLAPBDYINTERSECT` operates differently from # `ST_Intersects`, so contains is used instead. nad_pnt = fromstr(nad_wkt, srid=nad_srid) if oracle: tx = Country.objects.get(mpoly__contains=nad_pnt) else: tx = Country.objects.get(mpoly__intersects=nad_pnt) self.assertEqual('Texas', tx.name) # Creating San Antonio. Remember the Alamo. sa = City.objects.create(name='San Antonio', point=nad_pnt) # Now verifying that San Antonio was transformed correctly sa = City.objects.get(name='San Antonio') self.assertAlmostEqual(wgs_pnt.x, sa.point.x, 6) self.assertAlmostEqual(wgs_pnt.y, sa.point.y, 6) # If the GeometryField SRID is -1, then we shouldn't perform any # transformation if the SRID of the input geometry is different. if spatialite and connection.ops.spatial_version < (3, 0, 0): # SpatiaLite < 3 does not support missing SRID values. return m1 = MinusOneSRID(geom=Point(17, 23, srid=4326)) m1.save() self.assertEqual(-1, m1.geom.srid) def test_createnull(self): "Testing creating a model instance and the geometry being None" c = City() self.assertEqual(c.point, None) def test_geometryfield(self): "Testing the general GeometryField." Feature(name='Point', geom=Point(1, 1)).save() Feature(name='LineString', geom=LineString((0, 0), (1, 1), (5, 5))).save() Feature(name='Polygon', geom=Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0)))).save() Feature(name='GeometryCollection', geom=GeometryCollection(Point(2, 2), LineString((0, 0), (2, 2)), Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))))).save() f_1 = Feature.objects.get(name='Point') self.assertIsInstance(f_1.geom, Point) self.assertEqual((1.0, 1.0), f_1.geom.tuple) f_2 = Feature.objects.get(name='LineString') self.assertIsInstance(f_2.geom, LineString) self.assertEqual(((0.0, 0.0), (1.0, 1.0), (5.0, 5.0)), f_2.geom.tuple) f_3 = Feature.objects.get(name='Polygon') self.assertIsInstance(f_3.geom, Polygon) f_4 = Feature.objects.get(name='GeometryCollection') self.assertIsInstance(f_4.geom, GeometryCollection) self.assertEqual(f_3.geom, f_4.geom[2]) @skipUnlessDBFeature("supports_transform") def test_inherited_geofields(self): "Test GeoQuerySet methods on inherited Geometry fields." # Creating a Pennsylvanian city. PennsylvaniaCity.objects.create(name='Mansfield', county='Tioga', point='POINT(-77.071445 41.823881)') # All transformation SQL will need to be performed on the # _parent_ table. qs = PennsylvaniaCity.objects.transform(32128) self.assertEqual(1, qs.count()) for pc in qs: self.assertEqual(32128, pc.point.srid) def test_raw_sql_query(self): "Testing raw SQL query." cities1 = City.objects.all() # Only PostGIS would support a 'select *' query because of its recognized # HEXEWKB format for geometry fields as_text = 'ST_AsText(%s)' if postgis else connection.ops.select cities2 = City.objects.raw( 'select id, name, %s from geoapp_city' % as_text % 'point' ) self.assertEqual(len(cities1), len(list(cities2))) self.assertIsInstance(cities2[0].point, Point) def test_dumpdata_loaddata_cycle(self): """ Test a dumpdata/loaddata cycle with geographic data. """ out = six.StringIO() original_data = list(City.objects.all().order_by('name')) call_command('dumpdata', 'geoapp.City', stdout=out) result = out.getvalue() houston = City.objects.get(name='Houston') self.assertIn('"point": "%s"' % houston.point.ewkt, result) # Reload now dumped data with tempfile.NamedTemporaryFile(mode='w', suffix='.json') as tmp: tmp.write(result) tmp.seek(0) call_command('loaddata', tmp.name, verbosity=0) self.assertListEqual(original_data, list(City.objects.all().order_by('name'))) @skipUnlessDBFeature("gis_enabled") class GeoLookupTest(TestCase): fixtures = ['initial'] def test_disjoint_lookup(self): "Testing the `disjoint` lookup type." ptown = City.objects.get(name='Pueblo') qs1 = City.objects.filter(point__disjoint=ptown.point) self.assertEqual(7, qs1.count()) if connection.features.supports_real_shape_operations: qs2 = State.objects.filter(poly__disjoint=ptown.point) self.assertEqual(1, qs2.count()) self.assertEqual('Kansas', qs2[0].name) def test_contains_contained_lookups(self): "Testing the 'contained', 'contains', and 'bbcontains' lookup types." # Getting Texas, yes we were a country -- once ;) texas = Country.objects.get(name='Texas') # Seeing what cities are in Texas, should get Houston and Dallas, # and Oklahoma City because 'contained' only checks on the # _bounding box_ of the Geometries. if connection.features.supports_contained_lookup: qs = City.objects.filter(point__contained=texas.mpoly) self.assertEqual(3, qs.count()) cities = ['Houston', 'Dallas', 'Oklahoma City'] for c in qs: self.assertIn(c.name, cities) # Pulling out some cities. houston = City.objects.get(name='Houston') wellington = City.objects.get(name='Wellington') pueblo = City.objects.get(name='Pueblo') okcity = City.objects.get(name='Oklahoma City') lawrence = City.objects.get(name='Lawrence') # Now testing contains on the countries using the points for # Houston and Wellington. tx = Country.objects.get(mpoly__contains=houston.point) # Query w/GEOSGeometry nz = Country.objects.get(mpoly__contains=wellington.point.hex) # Query w/EWKBHEX self.assertEqual('Texas', tx.name) self.assertEqual('New Zealand', nz.name) # Spatialite 2.3 thinks that Lawrence is in Puerto Rico (a NULL geometry). if not (spatialite and connection.ops.spatial_version < (3, 0, 0)): ks = State.objects.get(poly__contains=lawrence.point) self.assertEqual('Kansas', ks.name) # Pueblo and Oklahoma City (even though OK City is within the bounding box of Texas) # are not contained in Texas or New Zealand. self.assertEqual(len(Country.objects.filter(mpoly__contains=pueblo.point)), 0) # Query w/GEOSGeometry object self.assertEqual(len(Country.objects.filter(mpoly__contains=okcity.point.wkt)), 0 if connection.features.supports_real_shape_operations else 1) # Query w/WKT # OK City is contained w/in bounding box of Texas. if connection.features.supports_bbcontains_lookup: qs = Country.objects.filter(mpoly__bbcontains=okcity.point) self.assertEqual(1, len(qs)) self.assertEqual('Texas', qs[0].name) @skipUnlessDBFeature("supports_crosses_lookup") def test_crosses_lookup(self): Track.objects.create( name='Line1', line=LineString([(-95, 29), (-60, 0)]) ) self.assertEqual( Track.objects.filter(line__crosses=LineString([(-95, 0), (-60, 29)])).count(), 1 ) self.assertEqual( Track.objects.filter(line__crosses=LineString([(-95, 30), (0, 30)])).count(), 0 ) @skipUnlessDBFeature("supports_left_right_lookups") def test_left_right_lookups(self): "Testing the 'left' and 'right' lookup types." # Left: A << B => true if xmax(A) < xmin(B) # Right: A >> B => true if xmin(A) > xmax(B) # See: BOX2D_left() and BOX2D_right() in lwgeom_box2dfloat4.c in PostGIS source. # The left/right lookup tests are known failures on PostGIS 2.0/2.0.1 # http://trac.osgeo.org/postgis/ticket/2035 if postgis_bug_version(): self.skipTest("PostGIS 2.0/2.0.1 left and right lookups are known to be buggy.") # Getting the borders for Colorado & Kansas co_border = State.objects.get(name='Colorado').poly ks_border = State.objects.get(name='Kansas').poly # Note: Wellington has an 'X' value of 174, so it will not be considered # to the left of CO. # These cities should be strictly to the right of the CO border. cities = ['Houston', 'Dallas', 'Oklahoma City', 'Lawrence', 'Chicago', 'Wellington'] qs = City.objects.filter(point__right=co_border) self.assertEqual(6, len(qs)) for c in qs: self.assertIn(c.name, cities) # These cities should be strictly to the right of the KS border. cities = ['Chicago', 'Wellington'] qs = City.objects.filter(point__right=ks_border) self.assertEqual(2, len(qs)) for c in qs: self.assertIn(c.name, cities) # Note: Wellington has an 'X' value of 174, so it will not be considered # to the left of CO. vic = City.objects.get(point__left=co_border) self.assertEqual('Victoria', vic.name) cities = ['Pueblo', 'Victoria'] qs = City.objects.filter(point__left=ks_border) self.assertEqual(2, len(qs)) for c in qs: self.assertIn(c.name, cities) def test_equals_lookups(self): "Testing the 'same_as' and 'equals' lookup types." pnt = fromstr('POINT (-95.363151 29.763374)', srid=4326) c1 = City.objects.get(point=pnt) c2 = City.objects.get(point__same_as=pnt) c3 = City.objects.get(point__equals=pnt) for c in [c1, c2, c3]: self.assertEqual('Houston', c.name) @skipUnlessDBFeature("supports_null_geometries") def test_null_geometries(self): "Testing NULL geometry support, and the `isnull` lookup type." # Creating a state with a NULL boundary. State.objects.create(name='Puerto Rico') # Querying for both NULL and Non-NULL values. nullqs = State.objects.filter(poly__isnull=True) validqs = State.objects.filter(poly__isnull=False) # Puerto Rico should be NULL (it's a commonwealth unincorporated territory) self.assertEqual(1, len(nullqs)) self.assertEqual('Puerto Rico', nullqs[0].name) # The valid states should be Colorado & Kansas self.assertEqual(2, len(validqs)) state_names = [s.name for s in validqs] self.assertIn('Colorado', state_names) self.assertIn('Kansas', state_names) # Saving another commonwealth w/a NULL geometry. nmi = State.objects.create(name='Northern Mariana Islands', poly=None) self.assertEqual(nmi.poly, None) # Assigning a geometry and saving -- then UPDATE back to NULL. nmi.poly = 'POLYGON((0 0,1 0,1 1,1 0,0 0))' nmi.save() State.objects.filter(name='Northern Mariana Islands').update(poly=None) self.assertIsNone(State.objects.get(name='Northern Mariana Islands').poly) @skipUnlessDBFeature("supports_relate_lookup") def test_relate_lookup(self): "Testing the 'relate' lookup type." # To make things more interesting, we will have our Texas reference point in # different SRIDs. pnt1 = fromstr('POINT (649287.0363174 4177429.4494686)', srid=2847) pnt2 = fromstr('POINT(-98.4919715741052 29.4333344025053)', srid=4326) # Not passing in a geometry as first param should # raise a type error when initializing the GeoQuerySet self.assertRaises(ValueError, Country.objects.filter, mpoly__relate=(23, 'foo')) # Making sure the right exception is raised for the given # bad arguments. for bad_args, e in [((pnt1, 0), ValueError), ((pnt2, 'T*T***FF*', 0), ValueError)]: qs = Country.objects.filter(mpoly__relate=bad_args) self.assertRaises(e, qs.count) # Relate works differently for the different backends. if postgis or spatialite: contains_mask = 'T*T***FF*' within_mask = 'T*F**F***' intersects_mask = 'T********' elif oracle: contains_mask = 'contains' within_mask = 'inside' # TODO: This is not quite the same as the PostGIS mask above intersects_mask = 'overlapbdyintersect' # Testing contains relation mask. self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt1, contains_mask)).name) self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt2, contains_mask)).name) # Testing within relation mask. ks = State.objects.get(name='Kansas') self.assertEqual('Lawrence', City.objects.get(point__relate=(ks.poly, within_mask)).name) # Testing intersection relation mask. if not oracle: self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt1, intersects_mask)).name) self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt2, intersects_mask)).name) self.assertEqual('Lawrence', City.objects.get(point__relate=(ks.poly, intersects_mask)).name) @skipUnlessDBFeature("gis_enabled") @ignore_warnings(category=RemovedInDjango20Warning) class GeoQuerySetTest(TestCase): fixtures = ['initial'] # Please keep the tests in GeoQuerySet method's alphabetic order @skipUnlessDBFeature("has_centroid_method") def test_centroid(self): "Testing the `centroid` GeoQuerySet method." qs = State.objects.exclude(poly__isnull=True).centroid() if oracle: tol = 0.1 elif spatialite: tol = 0.000001 else: tol = 0.000000001 for s in qs: self.assertTrue(s.poly.centroid.equals_exact(s.centroid, tol)) @skipUnlessDBFeature( "has_difference_method", "has_intersection_method", "has_sym_difference_method", "has_union_method") def test_diff_intersection_union(self): "Testing the `difference`, `intersection`, `sym_difference`, and `union` GeoQuerySet methods." geom = Point(5, 23) qs = Country.objects.all().difference(geom).sym_difference(geom).union(geom) # XXX For some reason SpatiaLite does something screwy with the Texas geometry here. Also, # XXX it doesn't like the null intersection. if spatialite: qs = qs.exclude(name='Texas') else: qs = qs.intersection(geom) for c in qs: if oracle: # Should be able to execute the queries; however, they won't be the same # as GEOS (because Oracle doesn't use GEOS internally like PostGIS or # SpatiaLite). pass else: self.assertEqual(c.mpoly.difference(geom), c.difference) if not spatialite: self.assertEqual(c.mpoly.intersection(geom), c.intersection) # Ordering might differ in collections self.assertSetEqual(set(g.wkt for g in c.mpoly.sym_difference(geom)), set(g.wkt for g in c.sym_difference)) self.assertSetEqual(set(g.wkt for g in c.mpoly.union(geom)), set(g.wkt for g in c.union)) @skipUnlessDBFeature("has_envelope_method") def test_envelope(self): "Testing the `envelope` GeoQuerySet method." countries = Country.objects.all().envelope() for country in countries: self.assertIsInstance(country.envelope, Polygon) @skipUnlessDBFeature("supports_extent_aggr") @ignore_warnings(category=RemovedInDjango110Warning) def test_extent(self): """ Testing the (deprecated) `extent` GeoQuerySet method and the Extent aggregate. """ # Reference query: # `SELECT ST_extent(point) FROM geoapp_city WHERE (name='Houston' or name='Dallas');` # => BOX(-96.8016128540039 29.7633724212646,-95.3631439208984 32.7820587158203) expected = (-96.8016128540039, 29.7633724212646, -95.3631439208984, 32.782058715820) qs = City.objects.filter(name__in=('Houston', 'Dallas')) extent1 = qs.extent() extent2 = qs.aggregate(Extent('point'))['point__extent'] for extent in (extent1, extent2): for val, exp in zip(extent, expected): self.assertAlmostEqual(exp, val, 4) self.assertIsNone(City.objects.filter(name=('Smalltown')).extent()) self.assertIsNone(City.objects.filter(name=('Smalltown')).aggregate(Extent('point'))['point__extent']) @skipUnlessDBFeature("supports_extent_aggr") def test_extent_with_limit(self): """ Testing if extent supports limit. """ extent1 = City.objects.all().aggregate(Extent('point'))['point__extent'] extent2 = City.objects.all()[:3].aggregate(Extent('point'))['point__extent'] self.assertNotEqual(extent1, extent2) @skipUnlessDBFeature("has_force_rhr_method") def test_force_rhr(self): "Testing GeoQuerySet.force_rhr()." rings = ( ((0, 0), (5, 0), (0, 5), (0, 0)), ((1, 1), (1, 3), (3, 1), (1, 1)), ) rhr_rings = ( ((0, 0), (0, 5), (5, 0), (0, 0)), ((1, 1), (3, 1), (1, 3), (1, 1)), ) State.objects.create(name='Foo', poly=Polygon(*rings)) s = State.objects.force_rhr().get(name='Foo') self.assertEqual(rhr_rings, s.force_rhr.coords) @skipUnlessDBFeature("has_geohash_method") def test_geohash(self): "Testing GeoQuerySet.geohash()." # Reference query: # SELECT ST_GeoHash(point) FROM geoapp_city WHERE name='Houston'; # SELECT ST_GeoHash(point, 5) FROM geoapp_city WHERE name='Houston'; ref_hash = '9vk1mfq8jx0c8e0386z6' h1 = City.objects.geohash().get(name='Houston') h2 = City.objects.geohash(precision=5).get(name='Houston') self.assertEqual(ref_hash, h1.geohash) self.assertEqual(ref_hash[:5], h2.geohash) def test_geojson(self): "Testing GeoJSON output from the database using GeoQuerySet.geojson()." # Only PostGIS and SpatiaLite 3.0+ support GeoJSON. if not connection.ops.geojson: self.assertRaises(NotImplementedError, Country.objects.all().geojson, field_name='mpoly') return pueblo_json = '{"type":"Point","coordinates":[-104.609252,38.255001]}' houston_json = ( '{"type":"Point","crs":{"type":"name","properties":' '{"name":"EPSG:4326"}},"coordinates":[-95.363151,29.763374]}' ) victoria_json = ( '{"type":"Point","bbox":[-123.30519600,48.46261100,-123.30519600,48.46261100],' '"coordinates":[-123.305196,48.462611]}' ) chicago_json = ( '{"type":"Point","crs":{"type":"name","properties":{"name":"EPSG:4326"}},' '"bbox":[-87.65018,41.85039,-87.65018,41.85039],"coordinates":[-87.65018,41.85039]}' ) if spatialite: victoria_json = ( '{"type":"Point","bbox":[-123.305196,48.462611,-123.305196,48.462611],' '"coordinates":[-123.305196,48.462611]}' ) # Precision argument should only be an integer self.assertRaises(TypeError, City.objects.geojson, precision='foo') # Reference queries and values. # SELECT ST_AsGeoJson("geoapp_city"."point", 8, 0) # FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Pueblo'; self.assertEqual(pueblo_json, City.objects.geojson().get(name='Pueblo').geojson) # SELECT ST_AsGeoJson("geoapp_city"."point", 8, 2) FROM "geoapp_city" # WHERE "geoapp_city"."name" = 'Houston'; # This time we want to include the CRS by using the `crs` keyword. self.assertEqual(houston_json, City.objects.geojson(crs=True, model_att='json').get(name='Houston').json) # SELECT ST_AsGeoJson("geoapp_city"."point", 8, 1) FROM "geoapp_city" # WHERE "geoapp_city"."name" = 'Houston'; # This time we include the bounding box by using the `bbox` keyword. self.assertEqual(victoria_json, City.objects.geojson(bbox=True).get(name='Victoria').geojson) # SELECT ST_AsGeoJson("geoapp_city"."point", 5, 3) FROM "geoapp_city" # WHERE "geoapp_city"."name" = 'Chicago'; # Finally, we set every available keyword. self.assertEqual( chicago_json, City.objects.geojson(bbox=True, crs=True, precision=5).get(name='Chicago').geojson ) @skipUnlessDBFeature("has_gml_method") def test_gml(self): "Testing GML output from the database using GeoQuerySet.gml()." # Should throw a TypeError when trying to obtain GML from a # non-geometry field. qs = City.objects.all() self.assertRaises(TypeError, qs.gml, field_name='name') ptown1 = City.objects.gml(field_name='point', precision=9).get(name='Pueblo') ptown2 = City.objects.gml(precision=9).get(name='Pueblo') if oracle: # No precision parameter for Oracle :-/ gml_regex = re.compile( r'^' r'-104.60925\d+,38.25500\d+ ' r'' ) elif spatialite and connection.ops.spatial_version < (3, 0, 0): # Spatialite before 3.0 has extra colon in SrsName gml_regex = re.compile( r'^-104.609251\d+,38.255001' ) else: gml_regex = re.compile( r'^' r'-104\.60925\d+,38\.255001' ) for ptown in [ptown1, ptown2]: self.assertTrue(gml_regex.match(ptown.gml)) if postgis: self.assertIn('', City.objects.gml(version=3).get(name='Pueblo').gml) @skipUnlessDBFeature("has_kml_method") def test_kml(self): "Testing KML output from the database using GeoQuerySet.kml()." # Should throw a TypeError when trying to obtain KML from a # non-geometry field. qs = City.objects.all() self.assertRaises(TypeError, qs.kml, 'name') # Ensuring the KML is as expected. ptown1 = City.objects.kml(field_name='point', precision=9).get(name='Pueblo') ptown2 = City.objects.kml(precision=9).get(name='Pueblo') for ptown in [ptown1, ptown2]: self.assertEqual('-104.609252,38.255001', ptown.kml) @ignore_warnings(category=RemovedInDjango110Warning) def test_make_line(self): """ Testing the (deprecated) `make_line` GeoQuerySet method and the MakeLine aggregate. """ if not connection.features.supports_make_line_aggr: # Only PostGIS has support for the MakeLine aggregate. For other # backends, test that NotImplementedError is raised self.assertRaises( NotImplementedError, City.objects.all().aggregate, MakeLine('point') ) return # Ensuring that a `TypeError` is raised on models without PointFields. self.assertRaises(TypeError, State.objects.make_line) self.assertRaises(TypeError, Country.objects.make_line) # MakeLine on an inappropriate field returns simply None self.assertIsNone(State.objects.aggregate(MakeLine('poly'))['poly__makeline']) # Reference query: # SELECT AsText(ST_MakeLine(geoapp_city.point)) FROM geoapp_city; ref_line = GEOSGeometry( 'LINESTRING(-95.363151 29.763374,-96.801611 32.782057,' '-97.521157 34.464642,174.783117 -41.315268,-104.609252 38.255001,' '-95.23506 38.971823,-87.650175 41.850385,-123.305196 48.462611)', srid=4326 ) # We check for equality with a tolerance of 10e-5 which is a lower bound # of the precisions of ref_line coordinates line1 = City.objects.make_line() line2 = City.objects.aggregate(MakeLine('point'))['point__makeline'] for line in (line1, line2): self.assertTrue(ref_line.equals_exact(line, tolerance=10e-5), "%s != %s" % (ref_line, line)) @skipUnlessDBFeature("has_num_geom_method") def test_num_geom(self): "Testing the `num_geom` GeoQuerySet method." # Both 'countries' only have two geometries. for c in Country.objects.num_geom(): self.assertEqual(2, c.num_geom) for c in City.objects.filter(point__isnull=False).num_geom(): # Oracle and PostGIS 2.0+ will return 1 for the number of # geometries on non-collections. self.assertEqual(1, c.num_geom) @skipUnlessDBFeature("supports_num_points_poly") def test_num_points(self): "Testing the `num_points` GeoQuerySet method." for c in Country.objects.num_points(): self.assertEqual(c.mpoly.num_points, c.num_points) if not oracle: # Oracle cannot count vertices in Point geometries. for c in City.objects.num_points(): self.assertEqual(1, c.num_points) @skipUnlessDBFeature("has_point_on_surface_method") def test_point_on_surface(self): "Testing the `point_on_surface` GeoQuerySet method." # Reference values. if oracle: # SELECT SDO_UTIL.TO_WKTGEOMETRY(SDO_GEOM.SDO_POINTONSURFACE(GEOAPP_COUNTRY.MPOLY, 0.05)) # FROM GEOAPP_COUNTRY; ref = {'New Zealand': fromstr('POINT (174.616364 -36.100861)', srid=4326), 'Texas': fromstr('POINT (-103.002434 36.500397)', srid=4326), } else: # Using GEOSGeometry to compute the reference point on surface values # -- since PostGIS also uses GEOS these should be the same. ref = {'New Zealand': Country.objects.get(name='New Zealand').mpoly.point_on_surface, 'Texas': Country.objects.get(name='Texas').mpoly.point_on_surface } for c in Country.objects.point_on_surface(): if spatialite: # XXX This seems to be a WKT-translation-related precision issue? tol = 0.00001 else: tol = 0.000000001 self.assertTrue(ref[c.name].equals_exact(c.point_on_surface, tol)) @skipUnlessDBFeature("has_reverse_method") def test_reverse_geom(self): "Testing GeoQuerySet.reverse_geom()." coords = [(-95.363151, 29.763374), (-95.448601, 29.713803)] Track.objects.create(name='Foo', line=LineString(coords)) t = Track.objects.reverse_geom().get(name='Foo') coords.reverse() self.assertEqual(tuple(coords), t.reverse_geom.coords) if oracle: self.assertRaises(TypeError, State.objects.reverse_geom) @skipUnlessDBFeature("has_scale_method") def test_scale(self): "Testing the `scale` GeoQuerySet method." xfac, yfac = 2, 3 tol = 5 # XXX The low precision tolerance is for SpatiaLite qs = Country.objects.scale(xfac, yfac, model_att='scaled') for c in qs: for p1, p2 in zip(c.mpoly, c.scaled): for r1, r2 in zip(p1, p2): for c1, c2 in zip(r1.coords, r2.coords): self.assertAlmostEqual(c1[0] * xfac, c2[0], tol) self.assertAlmostEqual(c1[1] * yfac, c2[1], tol) @skipUnlessDBFeature("has_snap_to_grid_method") def test_snap_to_grid(self): "Testing GeoQuerySet.snap_to_grid()." # Let's try and break snap_to_grid() with bad combinations of arguments. for bad_args in ((), range(3), range(5)): self.assertRaises(ValueError, Country.objects.snap_to_grid, *bad_args) for bad_args in (('1.0',), (1.0, None), tuple(map(six.text_type, range(4)))): self.assertRaises(TypeError, Country.objects.snap_to_grid, *bad_args) # Boundary for San Marino, courtesy of Bjorn Sandvik of thematicmapping.org # from the world borders dataset he provides. wkt = ('MULTIPOLYGON(((12.41580 43.95795,12.45055 43.97972,12.45389 43.98167,' '12.46250 43.98472,12.47167 43.98694,12.49278 43.98917,' '12.50555 43.98861,12.51000 43.98694,12.51028 43.98277,' '12.51167 43.94333,12.51056 43.93916,12.49639 43.92333,' '12.49500 43.91472,12.48778 43.90583,12.47444 43.89722,' '12.46472 43.89555,12.45917 43.89611,12.41639 43.90472,' '12.41222 43.90610,12.40782 43.91366,12.40389 43.92667,' '12.40500 43.94833,12.40889 43.95499,12.41580 43.95795)))') Country.objects.create(name='San Marino', mpoly=fromstr(wkt)) # Because floating-point arithmetic isn't exact, we set a tolerance # to pass into GEOS `equals_exact`. tol = 0.000000001 # SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.1)) FROM "geoapp_country" # WHERE "geoapp_country"."name" = 'San Marino'; ref = fromstr('MULTIPOLYGON(((12.4 44,12.5 44,12.5 43.9,12.4 43.9,12.4 44)))') self.assertTrue(ref.equals_exact(Country.objects.snap_to_grid(0.1).get(name='San Marino').snap_to_grid, tol)) # SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.05, 0.23)) FROM "geoapp_country" # WHERE "geoapp_country"."name" = 'San Marino'; ref = fromstr('MULTIPOLYGON(((12.4 43.93,12.45 43.93,12.5 43.93,12.45 43.93,12.4 43.93)))') self.assertTrue( ref.equals_exact(Country.objects.snap_to_grid(0.05, 0.23).get(name='San Marino').snap_to_grid, tol) ) # SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.5, 0.17, 0.05, 0.23)) FROM "geoapp_country" # WHERE "geoapp_country"."name" = 'San Marino'; ref = fromstr( 'MULTIPOLYGON(((12.4 43.87,12.45 43.87,12.45 44.1,12.5 44.1,12.5 43.87,12.45 43.87,12.4 43.87)))' ) self.assertTrue( ref.equals_exact( Country.objects.snap_to_grid(0.05, 0.23, 0.5, 0.17).get(name='San Marino').snap_to_grid, tol ) ) @skipUnlessDBFeature("has_svg_method") def test_svg(self): "Testing SVG output using GeoQuerySet.svg()." self.assertRaises(TypeError, City.objects.svg, precision='foo') # SELECT AsSVG(geoapp_city.point, 0, 8) FROM geoapp_city WHERE name = 'Pueblo'; svg1 = 'cx="-104.609252" cy="-38.255001"' # Even though relative, only one point so it's practically the same except for # the 'c' letter prefix on the x,y values. svg2 = svg1.replace('c', '') self.assertEqual(svg1, City.objects.svg().get(name='Pueblo').svg) self.assertEqual(svg2, City.objects.svg(relative=5).get(name='Pueblo').svg) @skipUnlessDBFeature("has_transform_method") def test_transform(self): "Testing the transform() GeoQuerySet method." # Pre-transformed points for Houston and Pueblo. htown = fromstr('POINT(1947516.83115183 6322297.06040572)', srid=3084) ptown = fromstr('POINT(992363.390841912 481455.395105533)', srid=2774) prec = 3 # Precision is low due to version variations in PROJ and GDAL. # Asserting the result of the transform operation with the values in # the pre-transformed points. Oracle does not have the 3084 SRID. if not oracle: h = City.objects.transform(htown.srid).get(name='Houston') self.assertEqual(3084, h.point.srid) self.assertAlmostEqual(htown.x, h.point.x, prec) self.assertAlmostEqual(htown.y, h.point.y, prec) p1 = City.objects.transform(ptown.srid, field_name='point').get(name='Pueblo') p2 = City.objects.transform(srid=ptown.srid).get(name='Pueblo') for p in [p1, p2]: self.assertEqual(2774, p.point.srid) self.assertAlmostEqual(ptown.x, p.point.x, prec) self.assertAlmostEqual(ptown.y, p.point.y, prec) @skipUnlessDBFeature("has_translate_method") def test_translate(self): "Testing the `translate` GeoQuerySet method." xfac, yfac = 5, -23 qs = Country.objects.translate(xfac, yfac, model_att='translated') for c in qs: for p1, p2 in zip(c.mpoly, c.translated): for r1, r2 in zip(p1, p2): for c1, c2 in zip(r1.coords, r2.coords): # XXX The low precision is for SpatiaLite self.assertAlmostEqual(c1[0] + xfac, c2[0], 5) self.assertAlmostEqual(c1[1] + yfac, c2[1], 5) # TODO: Oracle can be made to pass if # union1 = union2 = fromstr('POINT (-97.5211570000000023 34.4646419999999978)') # but this seems unexpected and should be investigated to determine the cause. @skipUnlessDBFeature("has_unionagg_method") @no_oracle @ignore_warnings(category=RemovedInDjango110Warning) def test_unionagg(self): """ Testing the (deprecated) `unionagg` (aggregate union) GeoQuerySet method and the Union aggregate. """ tx = Country.objects.get(name='Texas').mpoly # Houston, Dallas -- Ordering may differ depending on backend or GEOS version. union1 = fromstr('MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)') union2 = fromstr('MULTIPOINT(-95.363151 29.763374,-96.801611 32.782057)') qs = City.objects.filter(point__within=tx) self.assertRaises(TypeError, qs.unionagg, 'name') self.assertRaises(ValueError, qs.aggregate, Union('name')) # Using `field_name` keyword argument in one query and specifying an # order in the other (which should not be used because this is # an aggregate method on a spatial column) u1 = qs.unionagg(field_name='point') u2 = qs.order_by('name').unionagg() u3 = qs.aggregate(Union('point'))['point__union'] u4 = qs.order_by('name').aggregate(Union('point'))['point__union'] tol = 0.00001 self.assertTrue(union1.equals_exact(u1, tol) or union2.equals_exact(u1, tol)) self.assertTrue(union1.equals_exact(u2, tol) or union2.equals_exact(u2, tol)) self.assertTrue(union1.equals_exact(u3, tol) or union2.equals_exact(u3, tol)) self.assertTrue(union1.equals_exact(u4, tol) or union2.equals_exact(u4, tol)) qs = City.objects.filter(name='NotACity') self.assertIsNone(qs.unionagg(field_name='point')) self.assertIsNone(qs.aggregate(Union('point'))['point__union']) def test_within_subquery(self): """ Test that using a queryset inside a geo lookup is working (using a subquery) (#14483). """ tex_cities = City.objects.filter( point__within=Country.objects.filter(name='Texas').values('mpoly')).order_by('name') expected = ['Dallas', 'Houston'] if not connection.features.supports_real_shape_operations: expected.append('Oklahoma City') self.assertEqual( list(tex_cities.values_list('name', flat=True)), expected ) def test_non_concrete_field(self): NonConcreteModel.objects.create(point=Point(0, 0), name='name') list(NonConcreteModel.objects.all()) # -*- coding: utf-8 -*- """ Cleanup related operations for resetting the Salt environment and removing a Ceph cluster """ from __future__ import absolute_import import logging import os import shutil import pwd import grp import yaml log = logging.getLogger(__name__) def configuration(): """ Purge all the necessary DeepSea related configurations Note: leave proposals out for now, some may want to minimally roll back without rerunning Stage 1 """ roles() default() def roles(): """ Remove the roles from the cluster/*.sls files """ # Keep yaml human readable/editable friendly_dumper = yaml.SafeDumper friendly_dumper.ignore_aliases = lambda self, data: True cluster_dir = '/srv/pillar/ceph/cluster' for filename in os.listdir(cluster_dir): pathname = "{}/{}".format(cluster_dir, filename) content = None with open(pathname, "r") as sls_file: content = yaml.safe_load(sls_file) log.info("content {}".format(content)) if 'roles' in content: content.pop('roles') with open(pathname, "w") as sls_file: sls_file.write(yaml.dump(content, Dumper=friendly_dumper, default_flow_style=False)) def proposals(): """ Remove all the generated subdirectories in .../proposals """ proposals_dir = '/srv/pillar/ceph/proposals' for path in os.listdir(proposals_dir): for partial in ['role-', 'cluster-', 'profile-', 'config']: if partial in path: log.info("removing {}/{}".format(proposals_dir, path)) shutil.rmtree("{}/{}".format(proposals_dir, path)) def default(): """ Remove the .../stack/defaults directory. Preserve available_roles """ # Keep yaml human readable/editable friendly_dumper = yaml.SafeDumper friendly_dumper.ignore_aliases = lambda self, data: True preserve = {} content = None pathname = "/srv/pillar/ceph/stack/default/{}/cluster.yml".format('ceph') with open(pathname, "r") as sls_file: content = yaml.safe_load(sls_file) preserve['available_roles'] = content['available_roles'] stack_default = "/srv/pillar/ceph/stack/default" shutil.rmtree(stack_default) os.makedirs("{}/{}".format(stack_default, 'ceph')) with open(pathname, "w") as sls_file: sls_file.write(yaml.dump(preserve, Dumper=friendly_dumper, default_flow_style=False)) uid = pwd.getpwnam("salt").pw_uid gid = grp.getgrnam("salt").gr_gid for path in [stack_default, "{}/{}".format(stack_default, 'ceph'), pathname]: os.chown(path, uid, gid) #!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from mcrouter.test.McrouterTestCase import McrouterTestCase class TestNoReplyBase(McrouterTestCase): config = './mcrouter/test/test_noreply.json' def setUp(self): # The order here must corresponds to the order of hosts in the .json self.mc = self.add_server(self.make_memcached()) def get_mcrouter(self): return self.add_mcrouter(self.config) class TestNoReply(TestNoReplyBase): def test_set_noreply(self): mcrouter = self.get_mcrouter() self.assertTrue(mcrouter.set("key", "value", noreply=True)) self.assertTrue(self.eventually_get(key="key", expVal="value")) def test_add_replace_noreply(self): mcrouter = self.get_mcrouter() self.assertTrue(mcrouter.add("key", "value", noreply=True)) self.assertTrue(self.eventually_get(key="key", expVal="value")) self.assertTrue(mcrouter.replace("key", "value1", noreply=True)) self.assertTrue(self.eventually_get(key="key", expVal="value1")) def test_delete_noreply(self): mcrouter = self.get_mcrouter() self.assertTrue(mcrouter.set("key", "value")) self.assertTrue(self.eventually_get(key="key", expVal="value")) self.assertTrue(mcrouter.delete("key", noreply=True)) self.assertFalse(self.mc.get("key")) def test_touch_noreply(self): mcrouter = self.get_mcrouter() self.assertTrue(mcrouter.set("key", "value")) self.assertTrue(self.eventually_get(key="key", expVal="value")) self.assertTrue(mcrouter.touch("key", 100, noreply=True)) self.assertTrue(self.eventually_get(key="key", expVal="value")) def test_arith_noreply(self): mcrouter = self.get_mcrouter() self.assertTrue(mcrouter.set("arith", "1")) self.assertTrue(self.eventually_get(key="arith", expVal="1")) self.assertTrue(mcrouter.incr("arith", noreply=True)) self.assertTrue(self.eventually_get(key="arith", expVal="2")) self.assertTrue(mcrouter.decr("arith", noreply=True)) self.assertTrue(self.eventually_get(key="arith", expVal="1")) class TestNoReplyAppendPrepend(TestNoReplyBase): def __init__(self, *args, **kwargs): super(TestNoReplyAppendPrepend, self).__init__(*args, **kwargs) self.use_mock_mc = True def test_affix_noreply(self): mcrouter = self.get_mcrouter() self.assertTrue(mcrouter.set("key", "value")) self.assertTrue(self.eventually_get(key="key", expVal="value")) self.assertTrue(mcrouter.append("key", "123", noreply=True)) self.assertTrue(self.eventually_get(key="key", expVal="value123")) self.assertTrue(mcrouter.prepend("key", "456", noreply=True)) self.assertTrue(self.eventually_get(key="key", expVal="456value123")) ''' Test cached responses and requests with bodies ''' # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. Test.Summary = ''' Test cached responses and requests with bodies ''' Test.ContinueOnFail = True # Define default ATS ts = Test.MakeATSProcess("ts") server = Test.MakeOriginServer("server") # **testname is required** testName = "" request_header1 = {"headers": "GET / HTTP/1.1\r\nHost: www.example.com\r\n\r\n", "timestamp": "1469733493.993", "body": ""} response_header1 = {"headers": "HTTP/1.1 200 OK\r\nConnection: close\r\nCache-Control: max-age=300\r\n\r\n", "timestamp": "1469733493.993", "body": "xxx"} request_header2 = {"headers": "GET /no_cache_control HTTP/1.1\r\nHost: www.example.com\r\n\r\n", "timestamp": "1469733493.993", "body": ""} response_header2 = {"headers": "HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n", "timestamp": "1469733493.993", "body": "the flinstones"} request_header3 = {"headers": "GET /max_age_10sec HTTP/1.1\r\nHost: www.example.com\r\n\r\n", "timestamp": "1469733493.993", "body": ""} response_header3 = {"headers": "HTTP/1.1 200 OK\r\nConnection: close\r\nCache-Control: max-age=10,public\r\n\r\n", "timestamp": "1469733493.993", "body": "yabadabadoo"} server.addResponse("sessionlog.json", request_header1, response_header1) server.addResponse("sessionlog.json", request_header2, response_header2) server.addResponse("sessionlog.json", request_header3, response_header3) # ATS Configuration ts.Disk.plugin_config.AddLine('xdebug.so') ts.Disk.records_config.update({ 'proxy.config.diags.debug.enabled': 1, 'proxy.config.diags.debug.tags': 'http', 'proxy.config.http.response_via_str': 3, 'proxy.config.http.insert_age_in_response': 0, }) ts.Disk.remap_config.AddLine( 'map / http://127.0.0.1:{0}'.format(server.Variables.Port) ) # Test 1 - 200 response and cache fill tr = Test.AddTestRun() tr.Processes.Default.StartBefore(server) tr.Processes.Default.StartBefore(Test.Processes.ts) tr.Processes.Default.Command = 'curl -s -D - -v --ipv4 --http1.1 -H "x-debug: x-cache,via" -H "Host: www.example.com" http://localhost:{port}/max_age_10sec'.format( port=ts.Variables.port) tr.Processes.Default.ReturnCode = 0 tr.Processes.Default.Streams.stdout = "gold/cache_and_req_body-miss.gold" tr.StillRunningAfter = ts # Test 2 - 200 cached response and using netcat tr = Test.AddTestRun() tr.Processes.Default.Command = "printf 'GET /max_age_10sec HTTP/1.1\r\n''x-debug: x-cache,x-cache-key,via\r\n''Host: www.example.com\r\n''\r\n'|nc 127.0.0.1 -w 1 {port}".format( port=ts.Variables.port) tr.Processes.Default.ReturnCode = 0 tr.Processes.Default.Streams.stdout = "gold/cache_and_req_body-hit.gold" tr.StillRunningAfter = ts # Test 3 - response with no cache control, so cache-miss every time tr = Test.AddTestRun() tr.Processes.Default.Command = "printf 'GET /no_cache_control HTTP/1.1\r\n''x-debug: x-cache,x-cache-key,via\r\n''Host: www.example.com\r\n''\r\n'|nc 127.0.0.1 -w 1 {port}".format( port=ts.Variables.port) tr.Processes.Default.ReturnCode = 0 tr.Processes.Default.Streams.stdout = "gold/cache_no_cc.gold" tr.StillRunningAfter = ts # Test 4 - Cache-Control: no-cache (from client), so cache miss every time tr = Test.AddTestRun() tr.Processes.Default.Command = "printf 'GET /no_cache_control HTTP/1.1\r\n''Cache-Control:no-cache\r\n''x-debug: x-cache,x-cache-key,via\r\n''Host: www.example.com\r\n''\r\n'|nc 127.0.0.1 -w 1 {port}".format( port=ts.Variables.port) tr.Processes.Default.ReturnCode = 0 tr.Processes.Default.Streams.stdout = "gold/cache_no_cc.gold" tr.StillRunningAfter = ts # Test 5 - hit stale cache. tr = Test.AddTestRun() tr.Processes.Default.Command = "sleep 15; printf 'GET /max_age_10sec HTTP/1.1\r\n''x-debug: x-cache,x-cache-key,via\r\n''Host: www.example.com\r\n''\r\n'|nc 127.0.0.1 -w 1 {port}".format( port=ts.Variables.port) tr.Processes.Default.ReturnCode = 0 tr.Processes.Default.Streams.stdout = "gold/cache_hit_stale.gold" tr.StillRunningAfter = ts # Test 6 - only-if-cached. 504 "Not Cached" should be returned if not in cache tr = Test.AddTestRun() tr.Processes.Default.Command = "printf 'GET /no_cache_control HTTP/1.1\r\n''Cache-Control: only-if-cached\r\n''x-debug: x-cache,x-cache-key,via\r\n''Host: www.example.com\r\n''Cache-control: max-age=300\r\n''\r\n'|nc 127.0.0.1 -w 1 {port}".format( port=ts.Variables.port) tr.Processes.Default.ReturnCode = 0 tr.Processes.Default.Streams.stdout = "gold/cache_no_cache.gold" tr.StillRunningAfter = ts # # Verify correct handling of various max-age directives in both clients and # responses. # ts = Test.MakeATSProcess("ts-for-proxy-verifier") replay_file = "replay/cache-control-max-age.replay.yaml" server = Test.MakeVerifierServerProcess("proxy-verifier-server", replay_file) ts.Disk.records_config.update({ 'proxy.config.diags.debug.enabled': 1, 'proxy.config.diags.debug.tags': 'http', 'proxy.config.http.insert_age_in_response': 0, # Disable ignoring max-age in the client request so we can test that # behavior too. 'proxy.config.http.cache.ignore_client_cc_max_age': 0, }) ts.Disk.remap_config.AddLine( 'map / http://127.0.0.1:{0}'.format(server.Variables.http_port) ) tr = Test.AddTestRun("Verify correct max-age cache-control behavior.") tr.Processes.Default.StartBefore(server) tr.Processes.Default.StartBefore(ts) tr.AddVerifierClientProcess("proxy-verifier-client", replay_file, http_ports=[ts.Variables.port]) # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ Test cases for dirdbm module. """ import shutil from base64 import b64decode from twisted.trial import unittest from twisted.persisted import dirdbm from twisted.python.compat import _PY3 from twisted.python.filepath import FilePath class DirDbmTests(unittest.TestCase): def setUp(self): self.path = FilePath(self.mktemp()) self.dbm = dirdbm.open(self.path.path) self.items = ((b'abc', b'foo'), (b'/lalal', b'\000\001'), (b'\000\012', b'baz')) def testAll(self): k = b64decode("//==") self.dbm[k] = b"a" self.dbm[k] = b"a" self.assertEqual(self.dbm[k], b"a") def testRebuildInteraction(self): from twisted.persisted import dirdbm from twisted.python import rebuild s = dirdbm.Shelf('dirdbm.rebuild.test') s[b'key'] = b'value' rebuild.rebuild(dirdbm) # print s['key'] if _PY3: testRebuildInteraction.skip=( "Does not work on Python 3 (https://tm.tl/8887)") def testDbm(self): d = self.dbm # Insert keys keys = [] values = set() for k, v in self.items: d[k] = v keys.append(k) values.add(v) keys.sort() # Check they exist for k, v in self.items: self.assertIn(k, d) self.assertEqual(d[k], v) # Check non existent key try: d[b"XXX"] except KeyError: pass else: assert 0, "didn't raise KeyError on non-existent key" # Check keys(), values() and items() dbkeys = d.keys() dbvalues = set(d.values()) dbitems = set(d.items()) dbkeys.sort() items = set(self.items) self.assertEqual(keys, dbkeys, ".keys() output didn't match: %s != %s" % (repr(keys), repr(dbkeys))) self.assertEqual(values, dbvalues, ".values() output didn't match: %s != %s" % (repr(values), repr(dbvalues))) self.assertEqual(items, dbitems, "items() didn't match: %s != %s" % (repr(items), repr(dbitems))) copyPath = self.mktemp() d2 = d.copyTo(copyPath) copykeys = d.keys() copyvalues = set(d.values()) copyitems = set(d.items()) copykeys.sort() self.assertEqual(dbkeys, copykeys, ".copyTo().keys() didn't match: %s != %s" % (repr(dbkeys), repr(copykeys))) self.assertEqual(dbvalues, copyvalues, ".copyTo().values() didn't match: %s != %s" % (repr(dbvalues), repr(copyvalues))) self.assertEqual(dbitems, copyitems, ".copyTo().items() didn't match: %s != %s" % (repr(dbkeys), repr(copyitems))) d2.clear() self.assertTrue(len(d2.keys()) == len(d2.values()) == len(d2.items()) == len(d2) == 0, ".clear() failed") self.assertNotEqual(len(d), len(d2)) shutil.rmtree(copyPath) # Delete items for k, v in self.items: del d[k] self.assertNotIn(k, d, "key is still in database, even though we deleted it") self.assertEqual(len(d.keys()), 0, "database has keys") self.assertEqual(len(d.values()), 0, "database has values") self.assertEqual(len(d.items()), 0, "database has items") self.assertEqual(len(d), 0, "database has items") def testModificationTime(self): import time # The mtime value for files comes from a different place than the # gettimeofday() system call. On linux, gettimeofday() can be # slightly ahead (due to clock drift which gettimeofday() takes into # account but which open()/write()/close() do not), and if we are # close to the edge of the next second, time.time() can give a value # which is larger than the mtime which results from a subsequent # write(). I consider this a kernel bug, but it is beyond the scope # of this test. Thus we keep the range of acceptability to 3 seconds time. # -warner self.dbm[b"k"] = b"v" self.assertTrue(abs(time.time() - self.dbm.getModificationTime(b"k")) <= 3) self.assertRaises(KeyError, self.dbm.getModificationTime, b"nokey") def testRecovery(self): """ DirDBM: test recovery from directory after a faked crash """ k = self.dbm._encode(b"key1") with self.path.child(k + b".rpl").open(mode="wb") as f: f.write(b"value") k2 = self.dbm._encode(b"key2") with self.path.child(k2).open(mode="wb") as f: f.write(b"correct") with self.path.child(k2 + b".rpl").open(mode="wb") as f: f.write(b"wrong") with self.path.child("aa.new").open(mode="wb") as f: f.write(b"deleted") dbm = dirdbm.DirDBM(self.path.path) self.assertEqual(dbm[b"key1"], b"value") self.assertEqual(dbm[b"key2"], b"correct") self.assertFalse(self.path.globChildren("*.new")) self.assertFalse(self.path.globChildren("*.rpl")) def test_nonStringKeys(self): """ L{dirdbm.DirDBM} operations only support string keys: other types should raise a L{TypeError}. """ self.assertRaises(TypeError, self.dbm.__setitem__, 2, "3") try: self.assertRaises(TypeError, self.dbm.__setitem__, "2", 3) except unittest.FailTest: # dirdbm.Shelf.__setitem__ supports non-string values self.assertIsInstance(self.dbm, dirdbm.Shelf) self.assertRaises(TypeError, self.dbm.__getitem__, 2) self.assertRaises(TypeError, self.dbm.__delitem__, 2) self.assertRaises(TypeError, self.dbm.has_key, 2) self.assertRaises(TypeError, self.dbm.__contains__, 2) self.assertRaises(TypeError, self.dbm.getModificationTime, 2) def test_failSet(self): """ Failure path when setting an item. """ def _writeFail(path, data): path.setContent(data) raise IOError("fail to write") self.dbm[b"failkey"] = b"test" self.patch(self.dbm, "_writeFile", _writeFail) self.assertRaises(IOError, self.dbm.__setitem__, b"failkey", b"test2") class ShelfTests(DirDbmTests): def setUp(self): self.path = FilePath(self.mktemp()) self.dbm = dirdbm.Shelf(self.path.path) self.items = ((b'abc', b'foo'), (b'/lalal', b'\000\001'), (b'\000\012', b'baz'), (b'int', 12), (b'float', 12.0), (b'tuple', (None, 12))) testCases = [DirDbmTests, ShelfTests] # This file is part of beets. # Copyright 2013, Adrian Sampson. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. """The core data store and collection logic for beets. """ import os import sys import logging import shlex import unicodedata import time import re from unidecode import unidecode from beets.mediafile import MediaFile, MutagenError, UnreadableFileError from beets import plugins from beets import util from beets.util import bytestring_path, syspath, normpath, samefile from beets.util.functemplate import Template from beets import dbcore from beets.dbcore import types import beets log = logging.getLogger('beets') # Library-specific query types. class PathQuery(dbcore.FieldQuery): """A query that matches all items under a given path.""" escape_re = re.compile(r'[\\_%]') escape_char = '\\' def __init__(self, field, pattern, fast=True): super(PathQuery, self).__init__(field, pattern, fast) # Match the path as a single file. self.file_path = util.bytestring_path(util.normpath(pattern)) # As a directory (prefix). self.dir_path = util.bytestring_path(os.path.join(self.file_path, '')) def match(self, item): return (item.path == self.file_path) or \ item.path.startswith(self.dir_path) def clause(self): escape = lambda m: self.escape_char + m.group(0) dir_pattern = self.escape_re.sub(escape, self.dir_path) dir_pattern = buffer(dir_pattern + '%') file_blob = buffer(self.file_path) return '({0} = ?) || ({0} LIKE ? ESCAPE ?)'.format(self.field), \ (file_blob, dir_pattern, self.escape_char) # Library-specific field types. class DateType(types.Float): # TODO representation should be `datetime` object # TODO distinguish beetween date and time types query = dbcore.query.DateQuery def format(self, value): return time.strftime(beets.config['time_format'].get(unicode), time.localtime(value or 0)) def parse(self, string): try: # Try a formatted date string. return time.mktime( time.strptime(string, beets.config['time_format'].get(unicode)) ) except ValueError: # Fall back to a plain timestamp number. try: return float(string) except ValueError: return self.null class PathType(types.Type): sql = u'BLOB' query = PathQuery model_type = bytes def format(self, value): return util.displayable_path(value) def parse(self, string): return normpath(bytestring_path(string)) def normalize(self, value): if isinstance(value, unicode): # Paths stored internally as encoded bytes. return bytestring_path(value) elif isinstance(value, buffer): # SQLite must store bytestings as buffers to avoid decoding. # We unwrap buffers to bytes. return bytes(value) else: return value def from_sql(self, sql_value): return self.normalize(sql_value) def to_sql(self, value): if isinstance(value, str): value = buffer(value) return value class MusicalKey(types.String): """String representing the musical key of a song. The standard format is C, Cm, C#, C#m, etc. """ ENHARMONIC = { r'db': 'c#', r'eb': 'd#', r'gb': 'f#', r'ab': 'g#', r'bb': 'a#', } def parse(self, key): key = key.lower() for flat, sharp in self.ENHARMONIC.items(): key = re.sub(flat, sharp, key) key = re.sub(r'[\W\s]+minor', 'm', key) return key.capitalize() def normalize(self, key): if key is None: return None else: return self.parse(key) # Library-specific sort types. class SmartArtistSort(dbcore.query.Sort): """Sort by artist (either album artist or track artist), prioritizing the sort field over the raw field. """ def __init__(self, model_cls, ascending=True): self.album = model_cls is Album self.ascending = ascending def order_clause(self): order = "ASC" if self.ascending else "DESC" if self.album: field = 'albumartist' else: field = 'artist' return ('(CASE {0}_sort WHEN NULL THEN {0} ' 'WHEN "" THEN {0} ' 'ELSE {0}_sort END) {1}').format(field, order) def sort(self, objs): if self.album: key = lambda a: a.albumartist_sort or a.albumartist else: key = lambda i: i.artist_sort or i.artist return sorted(objs, key=key, reverse=not self.ascending) # Special path format key. PF_KEY_DEFAULT = 'default' # Exceptions. class FileOperationError(Exception): """Indicates an error when interacting with a file on disk. Possibilities include an unsupported media type, a permissions error, and an unhandled Mutagen exception. """ def __init__(self, path, reason): """Create an exception describing an operation on the file at `path` with the underlying (chained) exception `reason`. """ super(FileOperationError, self).__init__(path, reason) self.path = path self.reason = reason def __unicode__(self): """Get a string representing the error. Describes both the underlying reason and the file path in question. """ return u'{0}: {1}'.format( util.displayable_path(self.path), unicode(self.reason) ) def __str__(self): return unicode(self).encode('utf8') class ReadError(FileOperationError): """An error while reading a file (i.e. in `Item.read`). """ def __unicode__(self): return u'error reading ' + super(ReadError, self).__unicode__() class WriteError(FileOperationError): """An error while writing a file (i.e. in `Item.write`). """ def __unicode__(self): return u'error writing ' + super(WriteError, self).__unicode__() # Item and Album model classes. class LibModel(dbcore.Model): """Shared concrete functionality for Items and Albums. """ def _template_funcs(self): funcs = DefaultTemplateFunctions(self, self._db).functions() funcs.update(plugins.template_funcs()) return funcs def store(self): super(LibModel, self).store() plugins.send('database_change', lib=self._db) def remove(self): super(LibModel, self).remove() plugins.send('database_change', lib=self._db) def add(self, lib=None): super(LibModel, self).add(lib) plugins.send('database_change', lib=self._db) class FormattedItemMapping(dbcore.db.FormattedMapping): """Add lookup for album-level fields. Album-level fields take precedence if `for_path` is true. """ def __init__(self, item, for_path=False): super(FormattedItemMapping, self).__init__(item, for_path) self.album = item.get_album() self.album_keys = [] if self.album: for key in self.album.keys(True): if key in Album.item_keys or key not in item._fields.keys(): self.album_keys.append(key) self.all_keys = set(self.model_keys).union(self.album_keys) def _get(self, key): """Get the value for a key, either from the album or the item. Raise a KeyError for invalid keys. """ if self.for_path and key in self.album_keys: return self._get_formatted(self.album, key) elif key in self.model_keys: return self._get_formatted(self.model, key) elif key in self.album_keys: return self._get_formatted(self.album, key) else: raise KeyError(key) def __getitem__(self, key): """Get the value for a key. Certain unset values are remapped. """ value = self._get(key) # `artist` and `albumartist` fields fall back to one another. # This is helpful in path formats when the album artist is unset # on as-is imports. if key == 'artist' and not value: return self._get('albumartist') elif key == 'albumartist' and not value: return self._get('artist') else: return value def __iter__(self): return iter(self.all_keys) def __len__(self): return len(self.all_keys) class Item(LibModel): _table = 'items' _flex_table = 'item_attributes' _fields = { 'id': types.PRIMARY_ID, 'path': PathType(), 'album_id': types.FOREIGN_ID, 'title': types.STRING, 'artist': types.STRING, 'artist_sort': types.STRING, 'artist_credit': types.STRING, 'album': types.STRING, 'albumartist': types.STRING, 'albumartist_sort': types.STRING, 'albumartist_credit': types.STRING, 'genre': types.STRING, 'composer': types.STRING, 'grouping': types.STRING, 'year': types.PaddedInt(4), 'month': types.PaddedInt(2), 'day': types.PaddedInt(2), 'track': types.PaddedInt(2), 'tracktotal': types.PaddedInt(2), 'disc': types.PaddedInt(2), 'disctotal': types.PaddedInt(2), 'lyrics': types.STRING, 'comments': types.STRING, 'bpm': types.INTEGER, 'comp': types.BOOLEAN, 'mb_trackid': types.STRING, 'mb_albumid': types.STRING, 'mb_artistid': types.STRING, 'mb_albumartistid': types.STRING, 'albumtype': types.STRING, 'label': types.STRING, 'acoustid_fingerprint': types.STRING, 'acoustid_id': types.STRING, 'mb_releasegroupid': types.STRING, 'asin': types.STRING, 'catalognum': types.STRING, 'script': types.STRING, 'language': types.STRING, 'country': types.STRING, 'albumstatus': types.STRING, 'media': types.STRING, 'albumdisambig': types.STRING, 'disctitle': types.STRING, 'encoder': types.STRING, 'rg_track_gain': types.NULL_FLOAT, 'rg_track_peak': types.NULL_FLOAT, 'rg_album_gain': types.NULL_FLOAT, 'rg_album_peak': types.NULL_FLOAT, 'original_year': types.PaddedInt(4), 'original_month': types.PaddedInt(2), 'original_day': types.PaddedInt(2), 'initial_key': MusicalKey(), 'length': types.FLOAT, 'bitrate': types.ScaledInt(1000, u'kbps'), 'format': types.STRING, 'samplerate': types.ScaledInt(1000, u'kHz'), 'bitdepth': types.INTEGER, 'channels': types.INTEGER, 'mtime': DateType(), 'added': DateType(), } _search_fields = ('artist', 'title', 'comments', 'album', 'albumartist', 'genre') _media_fields = set(MediaFile.readable_fields()) \ .intersection(_fields.keys()) """Set of item fields that are backed by `MediaFile` fields. Any kind of field (fixed, flexible, and computed) may be a media field. Only these fields are read from disk in `read` and written in `write`. """ _formatter = FormattedItemMapping _sorts = {'artist': SmartArtistSort} @classmethod def _getters(cls): getters = plugins.item_field_getters() getters['singleton'] = lambda i: i.album_id is None return getters @classmethod def from_path(cls, path): """Creates a new item from the media file at the specified path. """ # Initiate with values that aren't read from files. i = cls(album_id=None) i.read(path) i.mtime = i.current_mtime() # Initial mtime. return i def __setitem__(self, key, value): """Set the item's value for a standard field or a flexattr. """ # Encode unicode paths and read buffers. if key == 'path': if isinstance(value, unicode): value = bytestring_path(value) elif isinstance(value, buffer): value = str(value) if key in MediaFile.fields(): self.mtime = 0 # Reset mtime on dirty. super(Item, self).__setitem__(key, value) def update(self, values): """Set all key/value pairs in the mapping. If mtime is specified, it is not reset (as it might otherwise be). """ super(Item, self).update(values) if self.mtime == 0 and 'mtime' in values: self.mtime = values['mtime'] def get_album(self): """Get the Album object that this item belongs to, if any, or None if the item is a singleton or is not associated with a library. """ if not self._db: return None return self._db.get_album(self) # Interaction with file metadata. def read(self, read_path=None): """Read the metadata from the associated file. If `read_path` is specified, read metadata from that file instead. Updates all the properties in `_media_fields` from the media file. Raises a `ReadError` if the file could not be read. """ if read_path is None: read_path = self.path else: read_path = normpath(read_path) try: mediafile = MediaFile(syspath(read_path)) except (OSError, IOError, UnreadableFileError) as exc: raise ReadError(read_path, exc) for key in self._media_fields: value = getattr(mediafile, key) if isinstance(value, (int, long)): # Filter values wider than 64 bits (in signed representation). # SQLite cannot store them. py26: Post transition, we can use: # value.bit_length() > 63 if abs(value) >= 2 ** 63: value = 0 self[key] = value # Database's mtime should now reflect the on-disk value. if read_path == self.path: self.mtime = self.current_mtime() self.path = read_path def write(self, path=None): """Write the item's metadata to a media file. All fields in `_media_fields` are written to disk according to the values on this object. Can raise either a `ReadError` or a `WriteError`. """ if path is None: path = self.path else: path = normpath(path) tags = dict(self) plugins.send('write', item=self, path=path, tags=tags) try: mediafile = MediaFile(syspath(path), id3v23=beets.config['id3v23'].get(bool)) except (OSError, IOError, UnreadableFileError) as exc: raise ReadError(self.path, exc) mediafile.update(tags) try: mediafile.save() except (OSError, IOError, MutagenError) as exc: raise WriteError(self.path, exc) # The file has a new mtime. if path == self.path: self.mtime = self.current_mtime() plugins.send('after_write', item=self, path=path) def try_write(self, path=None): """Calls `write()` but catches and logs `FileOperationError` exceptions. Returns `False` an exception was caught and `True` otherwise. """ try: self.write(path) return True except FileOperationError as exc: log.error(exc) return False def try_sync(self, write=None): """Synchronize the item with the database and the media file tags, updating them with this object's current state. By default, the current `path` for the item is used to write tags. If `write` is `False`, no tags are written. If `write` is a path, tags are written to that file instead. Similar to calling :meth:`write` and :meth:`store`. """ if write is True: write = None if write is not False: self.try_write(path=write) self.store() # Files themselves. def move_file(self, dest, copy=False, link=False): """Moves or copies the item's file, updating the path value if the move succeeds. If a file exists at ``dest``, then it is slightly modified to be unique. """ if not util.samefile(self.path, dest): dest = util.unique_path(dest) if copy: util.copy(self.path, dest) plugins.send("item_copied", item=self, source=self.path, destination=dest) elif link: util.link(self.path, dest) plugins.send("item_linked", item=self, source=self.path, destination=dest) else: plugins.send("before_item_moved", item=self, source=self.path, destination=dest) util.move(self.path, dest) plugins.send("item_moved", item=self, source=self.path, destination=dest) # Either copying or moving succeeded, so update the stored path. self.path = dest def current_mtime(self): """Returns the current mtime of the file, rounded to the nearest integer. """ return int(os.path.getmtime(syspath(self.path))) # Model methods. def remove(self, delete=False, with_album=True): """Removes the item. If `delete`, then the associated file is removed from disk. If `with_album`, then the item's album (if any) is removed if it the item was the last in the album. """ super(Item, self).remove() # Remove the album if it is empty. if with_album: album = self.get_album() if album and not album.items(): album.remove(delete, False) # Send a 'item_removed' signal to plugins plugins.send('item_removed', item=self) # Delete the associated file. if delete: util.remove(self.path) util.prune_dirs(os.path.dirname(self.path), self._db.directory) self._db._memotable = {} def move(self, copy=False, link=False, basedir=None, with_album=True): """Move the item to its designated location within the library directory (provided by destination()). Subdirectories are created as needed. If the operation succeeds, the item's path field is updated to reflect the new location. If `copy` is true, moving the file is copied rather than moved. Similarly, `link` creates a symlink instead. basedir overrides the library base directory for the destination. If the item is in an album, the album is given an opportunity to move its art. (This can be disabled by passing with_album=False.) The item is stored to the database if it is in the database, so any dirty fields prior to the move() call will be written as a side effect. You probably want to call save() to commit the DB transaction. """ self._check_db() dest = self.destination(basedir=basedir) # Create necessary ancestry for the move. util.mkdirall(dest) # Perform the move and store the change. old_path = self.path self.move_file(dest, copy, link) self.store() # If this item is in an album, move its art. if with_album: album = self.get_album() if album: album.move_art(copy) album.store() # Prune vacated directory. if not copy: util.prune_dirs(os.path.dirname(old_path), self._db.directory) # Templating. def destination(self, fragment=False, basedir=None, platform=None, path_formats=None): """Returns the path in the library directory designated for the item (i.e., where the file ought to be). fragment makes this method return just the path fragment underneath the root library directory; the path is also returned as Unicode instead of encoded as a bytestring. basedir can override the library's base directory for the destination. """ self._check_db() platform = platform or sys.platform basedir = basedir or self._db.directory path_formats = path_formats or self._db.path_formats # Use a path format based on a query, falling back on the # default. for query, path_format in path_formats: if query == PF_KEY_DEFAULT: continue query, _ = parse_query_string(query, type(self)) if query.match(self): # The query matches the item! Use the corresponding path # format. break else: # No query matched; fall back to default. for query, path_format in path_formats: if query == PF_KEY_DEFAULT: break else: assert False, "no default path format" if isinstance(path_format, Template): subpath_tmpl = path_format else: subpath_tmpl = Template(path_format) # Evaluate the selected template. subpath = self.evaluate_template(subpath_tmpl, True) # Prepare path for output: normalize Unicode characters. if platform == 'darwin': subpath = unicodedata.normalize('NFD', subpath) else: subpath = unicodedata.normalize('NFC', subpath) if beets.config['asciify_paths']: subpath = unidecode(subpath) # Truncate components and remove forbidden characters. subpath = util.sanitize_path(subpath, self._db.replacements) # Encode for the filesystem. if not fragment: subpath = bytestring_path(subpath) # Preserve extension. _, extension = os.path.splitext(self.path) if fragment: # Outputting Unicode. extension = extension.decode('utf8', 'ignore') subpath += extension.lower() # Truncate too-long components. maxlen = beets.config['max_filename_length'].get(int) if not maxlen: # When zero, try to determine from filesystem. maxlen = util.max_filename_length(self._db.directory) subpath = util.truncate_path(subpath, maxlen) if fragment: return subpath else: return normpath(os.path.join(basedir, subpath)) class Album(LibModel): """Provides access to information about albums stored in a library. Reflects the library's "albums" table, including album art. """ _table = 'albums' _flex_table = 'album_attributes' _always_dirty = True _fields = { 'id': types.PRIMARY_ID, 'artpath': PathType(), 'added': DateType(), 'albumartist': types.STRING, 'albumartist_sort': types.STRING, 'albumartist_credit': types.STRING, 'album': types.STRING, 'genre': types.STRING, 'year': types.PaddedInt(4), 'month': types.PaddedInt(2), 'day': types.PaddedInt(2), 'tracktotal': types.PaddedInt(2), 'disctotal': types.PaddedInt(2), 'comp': types.BOOLEAN, 'mb_albumid': types.STRING, 'mb_albumartistid': types.STRING, 'albumtype': types.STRING, 'label': types.STRING, 'mb_releasegroupid': types.STRING, 'asin': types.STRING, 'catalognum': types.STRING, 'script': types.STRING, 'language': types.STRING, 'country': types.STRING, 'albumstatus': types.STRING, 'albumdisambig': types.STRING, 'rg_album_gain': types.NULL_FLOAT, 'rg_album_peak': types.NULL_FLOAT, 'original_year': types.PaddedInt(4), 'original_month': types.PaddedInt(2), 'original_day': types.PaddedInt(2), } _search_fields = ('album', 'albumartist', 'genre') _sorts = { 'albumartist': SmartArtistSort, 'artist': SmartArtistSort, } item_keys = [ 'added', 'albumartist', 'albumartist_sort', 'albumartist_credit', 'album', 'genre', 'year', 'month', 'day', 'tracktotal', 'disctotal', 'comp', 'mb_albumid', 'mb_albumartistid', 'albumtype', 'label', 'mb_releasegroupid', 'asin', 'catalognum', 'script', 'language', 'country', 'albumstatus', 'albumdisambig', 'rg_album_gain', 'rg_album_peak', 'original_year', 'original_month', 'original_day', ] """List of keys that are set on an album's items. """ @classmethod def _getters(cls): # In addition to plugin-provided computed fields, also expose # the album's directory as `path`. getters = plugins.album_field_getters() getters['path'] = Album.item_dir return getters def items(self): """Returns an iterable over the items associated with this album. """ return self._db.items(dbcore.MatchQuery('album_id', self.id)) def remove(self, delete=False, with_items=True): """Removes this album and all its associated items from the library. If delete, then the items' files are also deleted from disk, along with any album art. The directories containing the album are also removed (recursively) if empty. Set with_items to False to avoid removing the album's items. """ super(Album, self).remove() # Delete art file. if delete: artpath = self.artpath if artpath: util.remove(artpath) # Remove (and possibly delete) the constituent items. if with_items: for item in self.items(): item.remove(delete, False) def move_art(self, copy=False, link=False): """Move or copy any existing album art so that it remains in the same directory as the items. """ old_art = self.artpath if not old_art: return new_art = self.art_destination(old_art) if new_art == old_art: return new_art = util.unique_path(new_art) log.debug(u'moving album art {0} to {1}' .format(util.displayable_path(old_art), util.displayable_path(new_art))) if copy: util.copy(old_art, new_art) elif link: util.link(old_art, new_art) else: util.move(old_art, new_art) self.artpath = new_art # Prune old path when moving. if not copy: util.prune_dirs(os.path.dirname(old_art), self._db.directory) def move(self, copy=False, link=False, basedir=None): """Moves (or copies) all items to their destination. Any album art moves along with them. basedir overrides the library base directory for the destination. The album is stored to the database, persisting any modifications to its metadata. """ basedir = basedir or self._db.directory # Ensure new metadata is available to items for destination # computation. self.store() # Move items. items = list(self.items()) for item in items: item.move(copy, link, basedir=basedir, with_album=False) # Move art. self.move_art(copy, link) self.store() def item_dir(self): """Returns the directory containing the album's first item, provided that such an item exists. """ item = self.items().get() if not item: raise ValueError('empty album') return os.path.dirname(item.path) def art_destination(self, image, item_dir=None): """Returns a path to the destination for the album art image for the album. `image` is the path of the image that will be moved there (used for its extension). The path construction uses the existing path of the album's items, so the album must contain at least one item or item_dir must be provided. """ image = bytestring_path(image) item_dir = item_dir or self.item_dir() filename_tmpl = Template(beets.config['art_filename'].get(unicode)) subpath = self.evaluate_template(filename_tmpl, True) if beets.config['asciify_paths']: subpath = unidecode(subpath) subpath = util.sanitize_path(subpath, replacements=self._db.replacements) subpath = bytestring_path(subpath) _, ext = os.path.splitext(image) dest = os.path.join(item_dir, subpath + ext) return bytestring_path(dest) def set_art(self, path, copy=True): """Sets the album's cover art to the image at the given path. The image is copied (or moved) into place, replacing any existing art. """ path = bytestring_path(path) oldart = self.artpath artdest = self.art_destination(path) if oldart and samefile(path, oldart): # Art already set. return elif samefile(path, artdest): # Art already in place. self.artpath = path return # Normal operation. if oldart == artdest: util.remove(oldart) artdest = util.unique_path(artdest) if copy: util.copy(path, artdest) else: util.move(path, artdest) self.artpath = artdest def store(self): """Update the database with the album information. The album's tracks are also updated. """ # Get modified track fields. track_updates = {} for key in self.item_keys: if key in self._dirty: track_updates[key] = self[key] with self._db.transaction(): super(Album, self).store() if track_updates: for item in self.items(): for key, value in track_updates.items(): item[key] = value item.store() def try_sync(self, write=True): """Synchronize the album and its items with the database and their files by updating them with this object's current state. `write` indicates whether to write tags to the item files. """ self.store() for item in self.items(): item.try_sync(bool(write)) # Query construction helpers. def parse_query_parts(parts, model_cls): """Given a beets query string as a list of components, return the `Query` and `Sort` they represent. Like `dbcore.parse_sorted_query`, with beets query prefixes and special path query detection. """ # Get query types and their prefix characters. prefixes = {':': dbcore.query.RegexpQuery} prefixes.update(plugins.queries()) # Special-case path-like queries, which are non-field queries # containing path separators (/). if 'path' in model_cls._fields: path_parts = [] non_path_parts = [] for s in parts: if s.find(os.sep, 0, s.find(':')) != -1: # Separator precedes colon. path_parts.append(s) else: non_path_parts.append(s) else: path_parts = () non_path_parts = parts query, sort = dbcore.parse_sorted_query( model_cls, non_path_parts, prefixes ) # Add path queries to aggregate query. if path_parts: query.subqueries += [PathQuery('path', s) for s in path_parts] return query, sort def parse_query_string(s, model_cls): """Given a beets query string, return the `Query` and `Sort` they represent. The string is split into components using shell-like syntax. """ # A bug in Python < 2.7.3 prevents correct shlex splitting of # Unicode strings. # http://bugs.python.org/issue6988 if isinstance(s, unicode): s = s.encode('utf8') parts = [p.decode('utf8') for p in shlex.split(s)] return parse_query_parts(parts, model_cls) # The Library: interface to the database. class Library(dbcore.Database): """A database of music containing songs and albums. """ _models = (Item, Album) def __init__(self, path='library.blb', directory='~/Music', path_formats=((PF_KEY_DEFAULT, '$artist/$album/$track $title'),), replacements=None): if path != ':memory:': self.path = bytestring_path(normpath(path)) super(Library, self).__init__(path) self.directory = bytestring_path(normpath(directory)) self.path_formats = path_formats self.replacements = replacements self._memotable = {} # Used for template substitution performance. # Adding objects to the database. def add(self, obj): """Add the :class:`Item` or :class:`Album` object to the library database. Return the object's new id. """ obj.add(self) self._memotable = {} return obj.id def add_album(self, items): """Create a new album consisting of a list of items. The items are added to the database if they don't yet have an ID. Return a new :class:`Album` object. The list items must not be empty. """ if not items: raise ValueError(u'need at least one item') # Create the album structure using metadata from the first item. values = dict((key, items[0][key]) for key in Album.item_keys) album = Album(self, **values) # Add the album structure and set the items' album_id fields. # Store or add the items. with self.transaction(): album.add(self) for item in items: item.album_id = album.id if item.id is None: item.add(self) else: item.store() return album # Querying. def _fetch(self, model_cls, query, sort=None): """Parse a query and fetch. If a order specification is present in the query string the `sort` argument is ignored. """ # Parse the query, if necessary. parsed_sort = None if isinstance(query, basestring): query, parsed_sort = parse_query_string(query, model_cls) elif isinstance(query, (list, tuple)): query, parsed_sort = parse_query_parts(query, model_cls) # Any non-null sort specified by the parsed query overrides the # provided sort. if parsed_sort and not isinstance(parsed_sort, dbcore.query.NullSort): sort = parsed_sort return super(Library, self)._fetch( model_cls, query, sort ) def albums(self, query=None, sort=None): """Get :class:`Album` objects matching the query. """ sort = sort or dbcore.sort_from_strings( Album, beets.config['sort_album'].as_str_seq() ) return self._fetch(Album, query, sort) def items(self, query=None, sort=None): """Get :class:`Item` objects matching the query. """ sort = sort or dbcore.sort_from_strings( Item, beets.config['sort_item'].as_str_seq() ) return self._fetch(Item, query, sort) # Convenience accessors. def get_item(self, id): """Fetch an :class:`Item` by its ID. Returns `None` if no match is found. """ return self._get(Item, id) def get_album(self, item_or_id): """Given an album ID or an item associated with an album, return an :class:`Album` object for the album. If no such album exists, returns `None`. """ if isinstance(item_or_id, int): album_id = item_or_id else: album_id = item_or_id.album_id if album_id is None: return None return self._get(Album, album_id) # Default path template resources. def _int_arg(s): """Convert a string argument to an integer for use in a template function. May raise a ValueError. """ return int(s.strip()) class DefaultTemplateFunctions(object): """A container class for the default functions provided to path templates. These functions are contained in an object to provide additional context to the functions -- specifically, the Item being evaluated. """ _prefix = 'tmpl_' def __init__(self, item=None, lib=None): """Paramaterize the functions. If `item` or `lib` is None, then some functions (namely, ``aunique``) will always evaluate to the empty string. """ self.item = item self.lib = lib def functions(self): """Returns a dictionary containing the functions defined in this object. The keys are function names (as exposed in templates) and the values are Python functions. """ out = {} for key in self._func_names: out[key[len(self._prefix):]] = getattr(self, key) return out @staticmethod def tmpl_lower(s): """Convert a string to lower case.""" return s.lower() @staticmethod def tmpl_upper(s): """Covert a string to upper case.""" return s.upper() @staticmethod def tmpl_title(s): """Convert a string to title case.""" return s.title() @staticmethod def tmpl_left(s, chars): """Get the leftmost characters of a string.""" return s[0:_int_arg(chars)] @staticmethod def tmpl_right(s, chars): """Get the rightmost characters of a string.""" return s[-_int_arg(chars):] @staticmethod def tmpl_if(condition, trueval, falseval=u''): """If ``condition`` is nonempty and nonzero, emit ``trueval``; otherwise, emit ``falseval`` (if provided). """ try: int_condition = _int_arg(condition) except ValueError: if condition.lower() == "false": return falseval else: condition = int_condition if condition: return trueval else: return falseval @staticmethod def tmpl_asciify(s): """Translate non-ASCII characters to their ASCII equivalents. """ return unidecode(s) @staticmethod def tmpl_time(s, format): """Format a time value using `strftime`. """ cur_fmt = beets.config['time_format'].get(unicode) return time.strftime(format, time.strptime(s, cur_fmt)) def tmpl_aunique(self, keys=None, disam=None): """Generate a string that is guaranteed to be unique among all albums in the library who share the same set of keys. A fields from "disam" is used in the string if one is sufficient to disambiguate the albums. Otherwise, a fallback opaque value is used. Both "keys" and "disam" should be given as whitespace-separated lists of field names. """ # Fast paths: no album, no item or library, or memoized value. if not self.item or not self.lib: return u'' if self.item.album_id is None: return u'' memokey = ('aunique', keys, disam, self.item.album_id) memoval = self.lib._memotable.get(memokey) if memoval is not None: return memoval keys = keys or 'albumartist album' disam = disam or 'albumtype year label catalognum albumdisambig' keys = keys.split() disam = disam.split() album = self.lib.get_album(self.item) if not album: # Do nothing for singletons. self.lib._memotable[memokey] = u'' return u'' # Find matching albums to disambiguate with. subqueries = [] for key in keys: value = getattr(album, key) subqueries.append(dbcore.MatchQuery(key, value)) albums = self.lib.albums(dbcore.AndQuery(subqueries)) # If there's only one album to matching these details, then do # nothing. if len(albums) == 1: self.lib._memotable[memokey] = u'' return u'' # Find the first disambiguator that distinguishes the albums. for disambiguator in disam: # Get the value for each album for the current field. disam_values = set([getattr(a, disambiguator) for a in albums]) # If the set of unique values is equal to the number of # albums in the disambiguation set, we're done -- this is # sufficient disambiguation. if len(disam_values) == len(albums): break else: # No disambiguator distinguished all fields. res = u' {0}'.format(album.id) self.lib._memotable[memokey] = res return res # Flatten disambiguation value into a string. disam_value = album.formatted(True).get(disambiguator) res = u' [{0}]'.format(disam_value) self.lib._memotable[memokey] = res return res # Get the name of tmpl_* functions in the above class. DefaultTemplateFunctions._func_names = \ [s for s in dir(DefaultTemplateFunctions) if s.startswith(DefaultTemplateFunctions._prefix)] import unittest from unittest.mock import ( call, _Call, create_autospec, MagicMock, Mock, ANY, _CallList, patch, PropertyMock ) from datetime import datetime class SomeClass(object): def one(self, a, b): pass def two(self): pass def three(self, a=None): pass class AnyTest(unittest.TestCase): def test_any(self): self.assertEqual(ANY, object()) mock = Mock() mock(ANY) mock.assert_called_with(ANY) mock = Mock() mock(foo=ANY) mock.assert_called_with(foo=ANY) def test_repr(self): self.assertEqual(repr(ANY), '') self.assertEqual(str(ANY), '') def test_any_and_datetime(self): mock = Mock() mock(datetime.now(), foo=datetime.now()) mock.assert_called_with(ANY, foo=ANY) def test_any_mock_calls_comparison_order(self): mock = Mock() d = datetime.now() class Foo(object): def __eq__(self, other): return False def __ne__(self, other): return True for d in datetime.now(), Foo(): mock.reset_mock() mock(d, foo=d, bar=d) mock.method(d, zinga=d, alpha=d) mock().method(a1=d, z99=d) expected = [ call(ANY, foo=ANY, bar=ANY), call.method(ANY, zinga=ANY, alpha=ANY), call(), call().method(a1=ANY, z99=ANY) ] self.assertEqual(expected, mock.mock_calls) self.assertEqual(mock.mock_calls, expected) class CallTest(unittest.TestCase): def test_call_with_call(self): kall = _Call() self.assertEqual(kall, _Call()) self.assertEqual(kall, _Call(('',))) self.assertEqual(kall, _Call(((),))) self.assertEqual(kall, _Call(({},))) self.assertEqual(kall, _Call(('', ()))) self.assertEqual(kall, _Call(('', {}))) self.assertEqual(kall, _Call(('', (), {}))) self.assertEqual(kall, _Call(('foo',))) self.assertEqual(kall, _Call(('bar', ()))) self.assertEqual(kall, _Call(('baz', {}))) self.assertEqual(kall, _Call(('spam', (), {}))) kall = _Call(((1, 2, 3),)) self.assertEqual(kall, _Call(((1, 2, 3),))) self.assertEqual(kall, _Call(('', (1, 2, 3)))) self.assertEqual(kall, _Call(((1, 2, 3), {}))) self.assertEqual(kall, _Call(('', (1, 2, 3), {}))) kall = _Call(((1, 2, 4),)) self.assertNotEqual(kall, _Call(('', (1, 2, 3)))) self.assertNotEqual(kall, _Call(('', (1, 2, 3), {}))) kall = _Call(('foo', (1, 2, 4),)) self.assertNotEqual(kall, _Call(('', (1, 2, 4)))) self.assertNotEqual(kall, _Call(('', (1, 2, 4), {}))) self.assertNotEqual(kall, _Call(('bar', (1, 2, 4)))) self.assertNotEqual(kall, _Call(('bar', (1, 2, 4), {}))) kall = _Call(({'a': 3},)) self.assertEqual(kall, _Call(('', (), {'a': 3}))) self.assertEqual(kall, _Call(('', {'a': 3}))) self.assertEqual(kall, _Call(((), {'a': 3}))) self.assertEqual(kall, _Call(({'a': 3},))) def test_empty__Call(self): args = _Call() self.assertEqual(args, ()) self.assertEqual(args, ('foo',)) self.assertEqual(args, ((),)) self.assertEqual(args, ('foo', ())) self.assertEqual(args, ('foo',(), {})) self.assertEqual(args, ('foo', {})) self.assertEqual(args, ({},)) def test_named_empty_call(self): args = _Call(('foo', (), {})) self.assertEqual(args, ('foo',)) self.assertEqual(args, ('foo', ())) self.assertEqual(args, ('foo',(), {})) self.assertEqual(args, ('foo', {})) self.assertNotEqual(args, ((),)) self.assertNotEqual(args, ()) self.assertNotEqual(args, ({},)) self.assertNotEqual(args, ('bar',)) self.assertNotEqual(args, ('bar', ())) self.assertNotEqual(args, ('bar', {})) def test_call_with_args(self): args = _Call(((1, 2, 3), {})) self.assertEqual(args, ((1, 2, 3),)) self.assertEqual(args, ('foo', (1, 2, 3))) self.assertEqual(args, ('foo', (1, 2, 3), {})) self.assertEqual(args, ((1, 2, 3), {})) def test_named_call_with_args(self): args = _Call(('foo', (1, 2, 3), {})) self.assertEqual(args, ('foo', (1, 2, 3))) self.assertEqual(args, ('foo', (1, 2, 3), {})) self.assertNotEqual(args, ((1, 2, 3),)) self.assertNotEqual(args, ((1, 2, 3), {})) def test_call_with_kwargs(self): args = _Call(((), dict(a=3, b=4))) self.assertEqual(args, (dict(a=3, b=4),)) self.assertEqual(args, ('foo', dict(a=3, b=4))) self.assertEqual(args, ('foo', (), dict(a=3, b=4))) self.assertEqual(args, ((), dict(a=3, b=4))) def test_named_call_with_kwargs(self): args = _Call(('foo', (), dict(a=3, b=4))) self.assertEqual(args, ('foo', dict(a=3, b=4))) self.assertEqual(args, ('foo', (), dict(a=3, b=4))) self.assertNotEqual(args, (dict(a=3, b=4),)) self.assertNotEqual(args, ((), dict(a=3, b=4))) def test_call_with_args_call_empty_name(self): args = _Call(((1, 2, 3), {})) self.assertEqual(args, call(1, 2, 3)) self.assertEqual(call(1, 2, 3), args) self.assertTrue(call(1, 2, 3) in [args]) def test_call_ne(self): self.assertNotEqual(_Call(((1, 2, 3),)), call(1, 2)) self.assertFalse(_Call(((1, 2, 3),)) != call(1, 2, 3)) self.assertTrue(_Call(((1, 2), {})) != call(1, 2, 3)) def test_call_non_tuples(self): kall = _Call(((1, 2, 3),)) for value in 1, None, self, int: self.assertNotEqual(kall, value) self.assertFalse(kall == value) def test_repr(self): self.assertEqual(repr(_Call()), 'call()') self.assertEqual(repr(_Call(('foo',))), 'call.foo()') self.assertEqual(repr(_Call(((1, 2, 3), {'a': 'b'}))), "call(1, 2, 3, a='b')") self.assertEqual(repr(_Call(('bar', (1, 2, 3), {'a': 'b'}))), "call.bar(1, 2, 3, a='b')") self.assertEqual(repr(call), 'call') self.assertEqual(str(call), 'call') self.assertEqual(repr(call()), 'call()') self.assertEqual(repr(call(1)), 'call(1)') self.assertEqual(repr(call(zz='thing')), "call(zz='thing')") self.assertEqual(repr(call().foo), 'call().foo') self.assertEqual(repr(call(1).foo.bar(a=3).bing), 'call().foo.bar().bing') self.assertEqual( repr(call().foo(1, 2, a=3)), "call().foo(1, 2, a=3)" ) self.assertEqual(repr(call()()), "call()()") self.assertEqual(repr(call(1)(2)), "call()(2)") self.assertEqual( repr(call()().bar().baz.beep(1)), "call()().bar().baz.beep(1)" ) def test_call(self): self.assertEqual(call(), ('', (), {})) self.assertEqual(call('foo', 'bar', one=3, two=4), ('', ('foo', 'bar'), {'one': 3, 'two': 4})) mock = Mock() mock(1, 2, 3) mock(a=3, b=6) self.assertEqual(mock.call_args_list, [call(1, 2, 3), call(a=3, b=6)]) def test_attribute_call(self): self.assertEqual(call.foo(1), ('foo', (1,), {})) self.assertEqual(call.bar.baz(fish='eggs'), ('bar.baz', (), {'fish': 'eggs'})) mock = Mock() mock.foo(1, 2 ,3) mock.bar.baz(a=3, b=6) self.assertEqual(mock.method_calls, [call.foo(1, 2, 3), call.bar.baz(a=3, b=6)]) def test_extended_call(self): result = call(1).foo(2).bar(3, a=4) self.assertEqual(result, ('().foo().bar', (3,), dict(a=4))) mock = MagicMock() mock(1, 2, a=3, b=4) self.assertEqual(mock.call_args, call(1, 2, a=3, b=4)) self.assertNotEqual(mock.call_args, call(1, 2, 3)) self.assertEqual(mock.call_args_list, [call(1, 2, a=3, b=4)]) self.assertEqual(mock.mock_calls, [call(1, 2, a=3, b=4)]) mock = MagicMock() mock.foo(1).bar()().baz.beep(a=6) last_call = call.foo(1).bar()().baz.beep(a=6) self.assertEqual(mock.mock_calls[-1], last_call) self.assertEqual(mock.mock_calls, last_call.call_list()) def test_call_list(self): mock = MagicMock() mock(1) self.assertEqual(call(1).call_list(), mock.mock_calls) mock = MagicMock() mock(1).method(2) self.assertEqual(call(1).method(2).call_list(), mock.mock_calls) mock = MagicMock() mock(1).method(2)(3) self.assertEqual(call(1).method(2)(3).call_list(), mock.mock_calls) mock = MagicMock() int(mock(1).method(2)(3).foo.bar.baz(4)(5)) kall = call(1).method(2)(3).foo.bar.baz(4)(5).__int__() self.assertEqual(kall.call_list(), mock.mock_calls) def test_call_any(self): self.assertEqual(call, ANY) m = MagicMock() int(m) self.assertEqual(m.mock_calls, [ANY]) self.assertEqual([ANY], m.mock_calls) def test_two_args_call(self): args = _Call(((1, 2), {'a': 3}), two=True) self.assertEqual(len(args), 2) self.assertEqual(args[0], (1, 2)) self.assertEqual(args[1], {'a': 3}) other_args = _Call(((1, 2), {'a': 3})) self.assertEqual(args, other_args) class SpecSignatureTest(unittest.TestCase): def _check_someclass_mock(self, mock): self.assertRaises(AttributeError, getattr, mock, 'foo') mock.one(1, 2) mock.one.assert_called_with(1, 2) self.assertRaises(AssertionError, mock.one.assert_called_with, 3, 4) self.assertRaises(TypeError, mock.one, 1) mock.two() mock.two.assert_called_with() self.assertRaises(AssertionError, mock.two.assert_called_with, 3) self.assertRaises(TypeError, mock.two, 1) mock.three() mock.three.assert_called_with() self.assertRaises(AssertionError, mock.three.assert_called_with, 3) self.assertRaises(TypeError, mock.three, 3, 2) mock.three(1) mock.three.assert_called_with(1) mock.three(a=1) mock.three.assert_called_with(a=1) def test_basic(self): for spec in (SomeClass, SomeClass()): mock = create_autospec(spec) self._check_someclass_mock(mock) def test_create_autospec_return_value(self): def f(): pass mock = create_autospec(f, return_value='foo') self.assertEqual(mock(), 'foo') class Foo(object): pass mock = create_autospec(Foo, return_value='foo') self.assertEqual(mock(), 'foo') def test_autospec_reset_mock(self): m = create_autospec(int) int(m) m.reset_mock() self.assertEqual(m.__int__.call_count, 0) def test_mocking_unbound_methods(self): class Foo(object): def foo(self, foo): pass p = patch.object(Foo, 'foo') mock_foo = p.start() Foo().foo(1) mock_foo.assert_called_with(1) def test_create_autospec_unbound_methods(self): # see mock issue 128 # this is expected to fail until the issue is fixed return class Foo(object): def foo(self): pass klass = create_autospec(Foo) instance = klass() self.assertRaises(TypeError, instance.foo, 1) # Note: no type checking on the "self" parameter klass.foo(1) klass.foo.assert_called_with(1) self.assertRaises(TypeError, klass.foo) def test_create_autospec_keyword_arguments(self): class Foo(object): a = 3 m = create_autospec(Foo, a='3') self.assertEqual(m.a, '3') def test_create_autospec_keyword_only_arguments(self): def foo(a, *, b=None): pass m = create_autospec(foo) m(1) m.assert_called_with(1) self.assertRaises(TypeError, m, 1, 2) m(2, b=3) m.assert_called_with(2, b=3) def test_function_as_instance_attribute(self): obj = SomeClass() def f(a): pass obj.f = f mock = create_autospec(obj) mock.f('bing') mock.f.assert_called_with('bing') def test_spec_as_list(self): # because spec as a list of strings in the mock constructor means # something very different we treat a list instance as the type. mock = create_autospec([]) mock.append('foo') mock.append.assert_called_with('foo') self.assertRaises(AttributeError, getattr, mock, 'foo') class Foo(object): foo = [] mock = create_autospec(Foo) mock.foo.append(3) mock.foo.append.assert_called_with(3) self.assertRaises(AttributeError, getattr, mock.foo, 'foo') def test_attributes(self): class Sub(SomeClass): attr = SomeClass() sub_mock = create_autospec(Sub) for mock in (sub_mock, sub_mock.attr): self._check_someclass_mock(mock) def test_builtin_functions_types(self): # we could replace builtin functions / methods with a function # with *args / **kwargs signature. Using the builtin method type # as a spec seems to work fairly well though. class BuiltinSubclass(list): def bar(self, arg): pass sorted = sorted attr = {} mock = create_autospec(BuiltinSubclass) mock.append(3) mock.append.assert_called_with(3) self.assertRaises(AttributeError, getattr, mock.append, 'foo') mock.bar('foo') mock.bar.assert_called_with('foo') self.assertRaises(TypeError, mock.bar, 'foo', 'bar') self.assertRaises(AttributeError, getattr, mock.bar, 'foo') mock.sorted([1, 2]) mock.sorted.assert_called_with([1, 2]) self.assertRaises(AttributeError, getattr, mock.sorted, 'foo') mock.attr.pop(3) mock.attr.pop.assert_called_with(3) self.assertRaises(AttributeError, getattr, mock.attr, 'foo') def test_method_calls(self): class Sub(SomeClass): attr = SomeClass() mock = create_autospec(Sub) mock.one(1, 2) mock.two() mock.three(3) expected = [call.one(1, 2), call.two(), call.three(3)] self.assertEqual(mock.method_calls, expected) mock.attr.one(1, 2) mock.attr.two() mock.attr.three(3) expected.extend( [call.attr.one(1, 2), call.attr.two(), call.attr.three(3)] ) self.assertEqual(mock.method_calls, expected) def test_magic_methods(self): class BuiltinSubclass(list): attr = {} mock = create_autospec(BuiltinSubclass) self.assertEqual(list(mock), []) self.assertRaises(TypeError, int, mock) self.assertRaises(TypeError, int, mock.attr) self.assertEqual(list(mock), []) self.assertIsInstance(mock['foo'], MagicMock) self.assertIsInstance(mock.attr['foo'], MagicMock) def test_spec_set(self): class Sub(SomeClass): attr = SomeClass() for spec in (Sub, Sub()): mock = create_autospec(spec, spec_set=True) self._check_someclass_mock(mock) self.assertRaises(AttributeError, setattr, mock, 'foo', 'bar') self.assertRaises(AttributeError, setattr, mock.attr, 'foo', 'bar') def test_descriptors(self): class Foo(object): @classmethod def f(cls, a, b): pass @staticmethod def g(a, b): pass class Bar(Foo): pass class Baz(SomeClass, Bar): pass for spec in (Foo, Foo(), Bar, Bar(), Baz, Baz()): mock = create_autospec(spec) mock.f(1, 2) mock.f.assert_called_once_with(1, 2) mock.g(3, 4) mock.g.assert_called_once_with(3, 4) def test_recursive(self): class A(object): def a(self): pass foo = 'foo bar baz' bar = foo A.B = A mock = create_autospec(A) mock() self.assertFalse(mock.B.called) mock.a() mock.B.a() self.assertEqual(mock.method_calls, [call.a(), call.B.a()]) self.assertIs(A.foo, A.bar) self.assertIsNot(mock.foo, mock.bar) mock.foo.lower() self.assertRaises(AssertionError, mock.bar.lower.assert_called_with) def test_spec_inheritance_for_classes(self): class Foo(object): def a(self): pass class Bar(object): def f(self): pass class_mock = create_autospec(Foo) self.assertIsNot(class_mock, class_mock()) for this_mock in class_mock, class_mock(): this_mock.a() this_mock.a.assert_called_with() self.assertRaises(TypeError, this_mock.a, 'foo') self.assertRaises(AttributeError, getattr, this_mock, 'b') instance_mock = create_autospec(Foo()) instance_mock.a() instance_mock.a.assert_called_with() self.assertRaises(TypeError, instance_mock.a, 'foo') self.assertRaises(AttributeError, getattr, instance_mock, 'b') # The return value isn't isn't callable self.assertRaises(TypeError, instance_mock) instance_mock.Bar.f() instance_mock.Bar.f.assert_called_with() self.assertRaises(AttributeError, getattr, instance_mock.Bar, 'g') instance_mock.Bar().f() instance_mock.Bar().f.assert_called_with() self.assertRaises(AttributeError, getattr, instance_mock.Bar(), 'g') def test_inherit(self): class Foo(object): a = 3 Foo.Foo = Foo # class mock = create_autospec(Foo) instance = mock() self.assertRaises(AttributeError, getattr, instance, 'b') attr_instance = mock.Foo() self.assertRaises(AttributeError, getattr, attr_instance, 'b') # instance mock = create_autospec(Foo()) self.assertRaises(AttributeError, getattr, mock, 'b') self.assertRaises(TypeError, mock) # attribute instance call_result = mock.Foo() self.assertRaises(AttributeError, getattr, call_result, 'b') def test_builtins(self): # used to fail with infinite recursion create_autospec(1) create_autospec(int) create_autospec('foo') create_autospec(str) create_autospec({}) create_autospec(dict) create_autospec([]) create_autospec(list) create_autospec(set()) create_autospec(set) create_autospec(1.0) create_autospec(float) create_autospec(1j) create_autospec(complex) create_autospec(False) create_autospec(True) def test_function(self): def f(a, b): pass mock = create_autospec(f) self.assertRaises(TypeError, mock) mock(1, 2) mock.assert_called_with(1, 2) f.f = f mock = create_autospec(f) self.assertRaises(TypeError, mock.f) mock.f(3, 4) mock.f.assert_called_with(3, 4) def test_skip_attributeerrors(self): class Raiser(object): def __get__(self, obj, type=None): if obj is None: raise AttributeError('Can only be accessed via an instance') class RaiserClass(object): raiser = Raiser() @staticmethod def existing(a, b): return a + b s = create_autospec(RaiserClass) self.assertRaises(TypeError, lambda x: s.existing(1, 2, 3)) s.existing(1, 2) self.assertRaises(AttributeError, lambda: s.nonexisting) # check we can fetch the raiser attribute and it has no spec obj = s.raiser obj.foo, obj.bar def test_signature_class(self): class Foo(object): def __init__(self, a, b=3): pass mock = create_autospec(Foo) self.assertRaises(TypeError, mock) mock(1) mock.assert_called_once_with(1) mock(4, 5) mock.assert_called_with(4, 5) def test_class_with_no_init(self): # this used to raise an exception # due to trying to get a signature from object.__init__ class Foo(object): pass create_autospec(Foo) def test_signature_callable(self): class Callable(object): def __init__(self): pass def __call__(self, a): pass mock = create_autospec(Callable) mock() mock.assert_called_once_with() self.assertRaises(TypeError, mock, 'a') instance = mock() self.assertRaises(TypeError, instance) instance(a='a') instance.assert_called_once_with(a='a') instance('a') instance.assert_called_with('a') mock = create_autospec(Callable()) mock(a='a') mock.assert_called_once_with(a='a') self.assertRaises(TypeError, mock) mock('a') mock.assert_called_with('a') def test_signature_noncallable(self): class NonCallable(object): def __init__(self): pass mock = create_autospec(NonCallable) instance = mock() mock.assert_called_once_with() self.assertRaises(TypeError, mock, 'a') self.assertRaises(TypeError, instance) self.assertRaises(TypeError, instance, 'a') mock = create_autospec(NonCallable()) self.assertRaises(TypeError, mock) self.assertRaises(TypeError, mock, 'a') def test_create_autospec_none(self): class Foo(object): bar = None mock = create_autospec(Foo) none = mock.bar self.assertNotIsInstance(none, type(None)) none.foo() none.foo.assert_called_once_with() def test_autospec_functions_with_self_in_odd_place(self): class Foo(object): def f(a, self): pass a = create_autospec(Foo) a.f(self=10) a.f.assert_called_with(self=10) def test_autospec_property(self): class Foo(object): @property def foo(self): return 3 foo = create_autospec(Foo) mock_property = foo.foo # no spec on properties self.assertTrue(isinstance(mock_property, MagicMock)) mock_property(1, 2, 3) mock_property.abc(4, 5, 6) mock_property.assert_called_once_with(1, 2, 3) mock_property.abc.assert_called_once_with(4, 5, 6) def test_autospec_slots(self): class Foo(object): __slots__ = ['a'] foo = create_autospec(Foo) mock_slot = foo.a # no spec on slots mock_slot(1, 2, 3) mock_slot.abc(4, 5, 6) mock_slot.assert_called_once_with(1, 2, 3) mock_slot.abc.assert_called_once_with(4, 5, 6) class TestCallList(unittest.TestCase): def test_args_list_contains_call_list(self): mock = Mock() self.assertIsInstance(mock.call_args_list, _CallList) mock(1, 2) mock(a=3) mock(3, 4) mock(b=6) for kall in call(1, 2), call(a=3), call(3, 4), call(b=6): self.assertTrue(kall in mock.call_args_list) calls = [call(a=3), call(3, 4)] self.assertTrue(calls in mock.call_args_list) calls = [call(1, 2), call(a=3)] self.assertTrue(calls in mock.call_args_list) calls = [call(3, 4), call(b=6)] self.assertTrue(calls in mock.call_args_list) calls = [call(3, 4)] self.assertTrue(calls in mock.call_args_list) self.assertFalse(call('fish') in mock.call_args_list) self.assertFalse([call('fish')] in mock.call_args_list) def test_call_list_str(self): mock = Mock() mock(1, 2) mock.foo(a=3) mock.foo.bar().baz('fish', cat='dog') expected = ( "[call(1, 2),\n" " call.foo(a=3),\n" " call.foo.bar(),\n" " call.foo.bar().baz('fish', cat='dog')]" ) self.assertEqual(str(mock.mock_calls), expected) def test_propertymock(self): p = patch('%s.SomeClass.one' % __name__, new_callable=PropertyMock) mock = p.start() try: SomeClass.one mock.assert_called_once_with() s = SomeClass() s.one mock.assert_called_with() self.assertEqual(mock.mock_calls, [call(), call()]) s.one = 3 self.assertEqual(mock.mock_calls, [call(), call(), call(3)]) finally: p.stop() def test_propertymock_returnvalue(self): m = MagicMock() p = PropertyMock() type(m).foo = p returned = m.foo p.assert_called_once_with() self.assertIsInstance(returned, MagicMock) self.assertNotIsInstance(returned, PropertyMock) if __name__ == '__main__': unittest.main() import unittest from unittest import mock from scrapy.settings import (BaseSettings, Settings, SettingsAttribute, SETTINGS_PRIORITIES, get_settings_priority) from . import default_settings class SettingsGlobalFuncsTest(unittest.TestCase): def test_get_settings_priority(self): for prio_str, prio_num in SETTINGS_PRIORITIES.items(): self.assertEqual(get_settings_priority(prio_str), prio_num) self.assertEqual(get_settings_priority(99), 99) class SettingsAttributeTest(unittest.TestCase): def setUp(self): self.attribute = SettingsAttribute('value', 10) def test_set_greater_priority(self): self.attribute.set('value2', 20) self.assertEqual(self.attribute.value, 'value2') self.assertEqual(self.attribute.priority, 20) def test_set_equal_priority(self): self.attribute.set('value2', 10) self.assertEqual(self.attribute.value, 'value2') self.assertEqual(self.attribute.priority, 10) def test_set_less_priority(self): self.attribute.set('value2', 0) self.assertEqual(self.attribute.value, 'value') self.assertEqual(self.attribute.priority, 10) def test_overwrite_basesettings(self): original_dict = {'one': 10, 'two': 20} original_settings = BaseSettings(original_dict, 0) attribute = SettingsAttribute(original_settings, 0) new_dict = {'three': 11, 'four': 21} attribute.set(new_dict, 10) self.assertIsInstance(attribute.value, BaseSettings) self.assertCountEqual(attribute.value, new_dict) self.assertCountEqual(original_settings, original_dict) new_settings = BaseSettings({'five': 12}, 0) attribute.set(new_settings, 0) # Insufficient priority self.assertCountEqual(attribute.value, new_dict) attribute.set(new_settings, 10) self.assertCountEqual(attribute.value, new_settings) def test_repr(self): self.assertEqual(repr(self.attribute), "") class BaseSettingsTest(unittest.TestCase): def setUp(self): self.settings = BaseSettings() def test_set_new_attribute(self): self.settings.set('TEST_OPTION', 'value', 0) self.assertIn('TEST_OPTION', self.settings.attributes) attr = self.settings.attributes['TEST_OPTION'] self.assertIsInstance(attr, SettingsAttribute) self.assertEqual(attr.value, 'value') self.assertEqual(attr.priority, 0) def test_set_settingsattribute(self): myattr = SettingsAttribute(0, 30) # Note priority 30 self.settings.set('TEST_ATTR', myattr, 10) self.assertEqual(self.settings.get('TEST_ATTR'), 0) self.assertEqual(self.settings.getpriority('TEST_ATTR'), 30) def test_set_instance_identity_on_update(self): attr = SettingsAttribute('value', 0) self.settings.attributes = {'TEST_OPTION': attr} self.settings.set('TEST_OPTION', 'othervalue', 10) self.assertIn('TEST_OPTION', self.settings.attributes) self.assertIs(attr, self.settings.attributes['TEST_OPTION']) def test_set_calls_settings_attributes_methods_on_update(self): attr = SettingsAttribute('value', 10) with mock.patch.object(attr, '__setattr__') as mock_setattr, mock.patch.object(attr, 'set') as mock_set: self.settings.attributes = {'TEST_OPTION': attr} for priority in (0, 10, 20): self.settings.set('TEST_OPTION', 'othervalue', priority) mock_set.assert_called_once_with('othervalue', priority) self.assertFalse(mock_setattr.called) mock_set.reset_mock() mock_setattr.reset_mock() def test_setitem(self): settings = BaseSettings() settings.set('key', 'a', 'default') settings['key'] = 'b' self.assertEqual(settings['key'], 'b') self.assertEqual(settings.getpriority('key'), 20) settings['key'] = 'c' self.assertEqual(settings['key'], 'c') settings['key2'] = 'x' self.assertIn('key2', settings) self.assertEqual(settings['key2'], 'x') self.assertEqual(settings.getpriority('key2'), 20) def test_setdict_alias(self): with mock.patch.object(self.settings, 'set') as mock_set: self.settings.setdict({'TEST_1': 'value1', 'TEST_2': 'value2'}, 10) self.assertEqual(mock_set.call_count, 2) calls = [mock.call('TEST_1', 'value1', 10), mock.call('TEST_2', 'value2', 10)] mock_set.assert_has_calls(calls, any_order=True) def test_setmodule_only_load_uppercase_vars(self): class ModuleMock(): UPPERCASE_VAR = 'value' MIXEDcase_VAR = 'othervalue' lowercase_var = 'anothervalue' self.settings.attributes = {} self.settings.setmodule(ModuleMock(), 10) self.assertIn('UPPERCASE_VAR', self.settings.attributes) self.assertNotIn('MIXEDcase_VAR', self.settings.attributes) self.assertNotIn('lowercase_var', self.settings.attributes) self.assertEqual(len(self.settings.attributes), 1) def test_setmodule_alias(self): with mock.patch.object(self.settings, 'set') as mock_set: self.settings.setmodule(default_settings, 10) mock_set.assert_any_call('TEST_DEFAULT', 'defvalue', 10) mock_set.assert_any_call('TEST_DICT', {'key': 'val'}, 10) def test_setmodule_by_path(self): self.settings.attributes = {} self.settings.setmodule(default_settings, 10) ctrl_attributes = self.settings.attributes.copy() self.settings.attributes = {} self.settings.setmodule( 'tests.test_settings.default_settings', 10) self.assertCountEqual(self.settings.attributes.keys(), ctrl_attributes.keys()) for key in ctrl_attributes.keys(): attr = self.settings.attributes[key] ctrl_attr = ctrl_attributes[key] self.assertEqual(attr.value, ctrl_attr.value) self.assertEqual(attr.priority, ctrl_attr.priority) def test_update(self): settings = BaseSettings({'key_lowprio': 0}, priority=0) settings.set('key_highprio', 10, priority=50) custom_settings = BaseSettings({'key_lowprio': 1, 'key_highprio': 11}, priority=30) custom_settings.set('newkey_one', None, priority=50) custom_dict = {'key_lowprio': 2, 'key_highprio': 12, 'newkey_two': None} settings.update(custom_dict, priority=20) self.assertEqual(settings['key_lowprio'], 2) self.assertEqual(settings.getpriority('key_lowprio'), 20) self.assertEqual(settings['key_highprio'], 10) self.assertIn('newkey_two', settings) self.assertEqual(settings.getpriority('newkey_two'), 20) settings.update(custom_settings) self.assertEqual(settings['key_lowprio'], 1) self.assertEqual(settings.getpriority('key_lowprio'), 30) self.assertEqual(settings['key_highprio'], 10) self.assertIn('newkey_one', settings) self.assertEqual(settings.getpriority('newkey_one'), 50) settings.update({'key_lowprio': 3}, priority=20) self.assertEqual(settings['key_lowprio'], 1) def test_update_jsonstring(self): settings = BaseSettings({'number': 0, 'dict': BaseSettings({'key': 'val'})}) settings.update('{"number": 1, "newnumber": 2}') self.assertEqual(settings['number'], 1) self.assertEqual(settings['newnumber'], 2) settings.set("dict", '{"key": "newval", "newkey": "newval2"}') self.assertEqual(settings['dict']['key'], "newval") self.assertEqual(settings['dict']['newkey'], "newval2") def test_delete(self): settings = BaseSettings({'key': None}) settings.set('key_highprio', None, priority=50) settings.delete('key') settings.delete('key_highprio') self.assertNotIn('key', settings) self.assertIn('key_highprio', settings) del settings['key_highprio'] self.assertNotIn('key_highprio', settings) def test_get(self): test_configuration = { 'TEST_ENABLED1': '1', 'TEST_ENABLED2': True, 'TEST_ENABLED3': 1, 'TEST_ENABLED4': 'True', 'TEST_ENABLED5': 'true', 'TEST_ENABLED_WRONG': 'on', 'TEST_DISABLED1': '0', 'TEST_DISABLED2': False, 'TEST_DISABLED3': 0, 'TEST_DISABLED4': 'False', 'TEST_DISABLED5': 'false', 'TEST_DISABLED_WRONG': 'off', 'TEST_INT1': 123, 'TEST_INT2': '123', 'TEST_FLOAT1': 123.45, 'TEST_FLOAT2': '123.45', 'TEST_LIST1': ['one', 'two'], 'TEST_LIST2': 'one,two', 'TEST_STR': 'value', 'TEST_DICT1': {'key1': 'val1', 'ke2': 3}, 'TEST_DICT2': '{"key1": "val1", "ke2": 3}', } settings = self.settings settings.attributes = {key: SettingsAttribute(value, 0) for key, value in test_configuration.items()} self.assertTrue(settings.getbool('TEST_ENABLED1')) self.assertTrue(settings.getbool('TEST_ENABLED2')) self.assertTrue(settings.getbool('TEST_ENABLED3')) self.assertTrue(settings.getbool('TEST_ENABLED4')) self.assertTrue(settings.getbool('TEST_ENABLED5')) self.assertFalse(settings.getbool('TEST_ENABLEDx')) self.assertTrue(settings.getbool('TEST_ENABLEDx', True)) self.assertFalse(settings.getbool('TEST_DISABLED1')) self.assertFalse(settings.getbool('TEST_DISABLED2')) self.assertFalse(settings.getbool('TEST_DISABLED3')) self.assertFalse(settings.getbool('TEST_DISABLED4')) self.assertFalse(settings.getbool('TEST_DISABLED5')) self.assertEqual(settings.getint('TEST_INT1'), 123) self.assertEqual(settings.getint('TEST_INT2'), 123) self.assertEqual(settings.getint('TEST_INTx'), 0) self.assertEqual(settings.getint('TEST_INTx', 45), 45) self.assertEqual(settings.getfloat('TEST_FLOAT1'), 123.45) self.assertEqual(settings.getfloat('TEST_FLOAT2'), 123.45) self.assertEqual(settings.getfloat('TEST_FLOATx'), 0.0) self.assertEqual(settings.getfloat('TEST_FLOATx', 55.0), 55.0) self.assertEqual(settings.getlist('TEST_LIST1'), ['one', 'two']) self.assertEqual(settings.getlist('TEST_LIST2'), ['one', 'two']) self.assertEqual(settings.getlist('TEST_LISTx'), []) self.assertEqual(settings.getlist('TEST_LISTx', ['default']), ['default']) self.assertEqual(settings['TEST_STR'], 'value') self.assertEqual(settings.get('TEST_STR'), 'value') self.assertEqual(settings['TEST_STRx'], None) self.assertEqual(settings.get('TEST_STRx'), None) self.assertEqual(settings.get('TEST_STRx', 'default'), 'default') self.assertEqual(settings.getdict('TEST_DICT1'), {'key1': 'val1', 'ke2': 3}) self.assertEqual(settings.getdict('TEST_DICT2'), {'key1': 'val1', 'ke2': 3}) self.assertEqual(settings.getdict('TEST_DICT3'), {}) self.assertEqual(settings.getdict('TEST_DICT3', {'key1': 5}), {'key1': 5}) self.assertRaises(ValueError, settings.getdict, 'TEST_LIST1') self.assertRaises(ValueError, settings.getbool, 'TEST_ENABLED_WRONG') self.assertRaises(ValueError, settings.getbool, 'TEST_DISABLED_WRONG') def test_getpriority(self): settings = BaseSettings({'key': 'value'}, priority=99) self.assertEqual(settings.getpriority('key'), 99) self.assertEqual(settings.getpriority('nonexistentkey'), None) def test_getwithbase(self): s = BaseSettings({'TEST_BASE': BaseSettings({1: 1, 2: 2}, 'project'), 'TEST': BaseSettings({1: 10, 3: 30}, 'default'), 'HASNOBASE': BaseSettings({3: 3000}, 'default')}) s['TEST'].set(2, 200, 'cmdline') self.assertCountEqual(s.getwithbase('TEST'), {1: 1, 2: 200, 3: 30}) self.assertCountEqual(s.getwithbase('HASNOBASE'), s['HASNOBASE']) self.assertEqual(s.getwithbase('NONEXISTENT'), {}) def test_maxpriority(self): # Empty settings should return 'default' self.assertEqual(self.settings.maxpriority(), 0) self.settings.set('A', 0, 10) self.settings.set('B', 0, 30) self.assertEqual(self.settings.maxpriority(), 30) def test_copy(self): values = { 'TEST_BOOL': True, 'TEST_LIST': ['one', 'two'], 'TEST_LIST_OF_LISTS': [['first_one', 'first_two'], ['second_one', 'second_two']] } self.settings.setdict(values) copy = self.settings.copy() self.settings.set('TEST_BOOL', False) self.assertTrue(copy.get('TEST_BOOL')) test_list = self.settings.get('TEST_LIST') test_list.append('three') self.assertListEqual(copy.get('TEST_LIST'), ['one', 'two']) test_list_of_lists = self.settings.get('TEST_LIST_OF_LISTS') test_list_of_lists[0].append('first_three') self.assertListEqual(copy.get('TEST_LIST_OF_LISTS')[0], ['first_one', 'first_two']) def test_copy_to_dict(self): s = BaseSettings({'TEST_STRING': 'a string', 'TEST_LIST': [1, 2], 'TEST_BOOLEAN': False, 'TEST_BASE': BaseSettings({1: 1, 2: 2}, 'project'), 'TEST': BaseSettings({1: 10, 3: 30}, 'default'), 'HASNOBASE': BaseSettings({3: 3000}, 'default')}) self.assertDictEqual( s.copy_to_dict(), { 'HASNOBASE': {3: 3000}, 'TEST': {1: 10, 3: 30}, 'TEST_BASE': {1: 1, 2: 2}, 'TEST_LIST': [1, 2], 'TEST_BOOLEAN': False, 'TEST_STRING': 'a string', } ) def test_freeze(self): self.settings.freeze() with self.assertRaises(TypeError) as cm: self.settings.set('TEST_BOOL', False) self.assertEqual(str(cm.exception), "Trying to modify an immutable Settings object") def test_frozencopy(self): frozencopy = self.settings.frozencopy() self.assertTrue(frozencopy.frozen) self.assertIsNot(frozencopy, self.settings) class SettingsTest(unittest.TestCase): def setUp(self): self.settings = Settings() @mock.patch.dict('scrapy.settings.SETTINGS_PRIORITIES', {'default': 10}) @mock.patch('scrapy.settings.default_settings', default_settings) def test_initial_defaults(self): settings = Settings() self.assertEqual(len(settings.attributes), 2) self.assertIn('TEST_DEFAULT', settings.attributes) attr = settings.attributes['TEST_DEFAULT'] self.assertIsInstance(attr, SettingsAttribute) self.assertEqual(attr.value, 'defvalue') self.assertEqual(attr.priority, 10) @mock.patch.dict('scrapy.settings.SETTINGS_PRIORITIES', {}) @mock.patch('scrapy.settings.default_settings', {}) def test_initial_values(self): settings = Settings({'TEST_OPTION': 'value'}, 10) self.assertEqual(len(settings.attributes), 1) self.assertIn('TEST_OPTION', settings.attributes) attr = settings.attributes['TEST_OPTION'] self.assertIsInstance(attr, SettingsAttribute) self.assertEqual(attr.value, 'value') self.assertEqual(attr.priority, 10) @mock.patch('scrapy.settings.default_settings', default_settings) def test_autopromote_dicts(self): settings = Settings() mydict = settings.get('TEST_DICT') self.assertIsInstance(mydict, BaseSettings) self.assertIn('key', mydict) self.assertEqual(mydict['key'], 'val') self.assertEqual(mydict.getpriority('key'), 0) @mock.patch('scrapy.settings.default_settings', default_settings) def test_getdict_autodegrade_basesettings(self): settings = Settings() mydict = settings.getdict('TEST_DICT') self.assertIsInstance(mydict, dict) self.assertEqual(len(mydict), 1) self.assertIn('key', mydict) self.assertEqual(mydict['key'], 'val') def test_passing_objects_as_values(self): from scrapy.core.downloader.handlers.file import FileDownloadHandler from scrapy.utils.misc import create_instance from scrapy.utils.test import get_crawler class TestPipeline(): def process_item(self, i, s): return i settings = Settings({ 'ITEM_PIPELINES': { TestPipeline: 800, }, 'DOWNLOAD_HANDLERS': { 'ftp': FileDownloadHandler, }, }) self.assertIn('ITEM_PIPELINES', settings.attributes) mypipeline, priority = settings.getdict('ITEM_PIPELINES').popitem() self.assertEqual(priority, 800) self.assertEqual(mypipeline, TestPipeline) self.assertIsInstance(mypipeline(), TestPipeline) self.assertEqual(mypipeline().process_item('item', None), 'item') myhandler = settings.getdict('DOWNLOAD_HANDLERS').pop('ftp') self.assertEqual(myhandler, FileDownloadHandler) myhandler_instance = create_instance(myhandler, None, get_crawler()) self.assertIsInstance(myhandler_instance, FileDownloadHandler) self.assertTrue(hasattr(myhandler_instance, 'download_request')) if __name__ == "__main__": unittest.main() #!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2013, Serge van Ginderachter # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see . ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'version': '1.0'} DOCUMENTATION = ''' --- module: open_iscsi author: "Serge van Ginderachter (@srvg)" version_added: "1.4" short_description: Manage iscsi targets with open-iscsi description: - Discover targets on given portal, (dis)connect targets, mark targets to manually or auto start, return device nodes of connected targets. requirements: - open_iscsi library and tools (iscsiadm) options: portal: required: false aliases: [ip] description: - the ip address of the iscsi target port: required: false default: 3260 description: - the port on which the iscsi target process listens target: required: false aliases: [name, targetname] description: - the iscsi target name login: required: false choices: [true, false] description: - whether the target node should be connected node_auth: required: false default: CHAP description: - discovery.sendtargets.auth.authmethod node_user: required: false description: - discovery.sendtargets.auth.username node_pass: required: false description: - discovery.sendtargets.auth.password auto_node_startup: aliases: [automatic] required: false choices: [true, false] description: - whether the target node should be automatically connected at startup discover: required: false choices: [true, false] description: - whether the list of target nodes on the portal should be (re)discovered and added to the persistent iscsi database. Keep in mind that iscsiadm discovery resets configurtion, like node.startup to manual, hence combined with auto_node_startup=yes will allways return a changed state. show_nodes: required: false choices: [true, false] description: - whether the list of nodes in the persistent iscsi database should be returned by the module ''' EXAMPLES = ''' # perform a discovery on 10.1.2.3 and show available target nodes - open_iscsi: show_nodes: yes discover: yes portal: 10.1.2.3 # discover targets on portal and login to the one available # (only works if exactly one target is exported to the initiator) - open_iscsi: portal: '{{ iscsi_target }}' login: yes discover: yes # description: connect to the named target, after updating the local # persistent database (cache) - open_iscsi: login: yes target: 'iqn.1986-03.com.sun:02:f8c1f9e0-c3ec-ec84-c9c9-8bfb0cd5de3d' # description: discconnect from the cached named target - open_iscsi: login: no target: 'iqn.1986-03.com.sun:02:f8c1f9e0-c3ec-ec84-c9c9-8bfb0cd5de3d' ''' import glob import time ISCSIADM = 'iscsiadm' def compare_nodelists(l1, l2): l1.sort() l2.sort() return l1 == l2 def iscsi_get_cached_nodes(module, portal=None): cmd = '%s --mode node' % iscsiadm_cmd (rc, out, err) = module.run_command(cmd) if rc == 0: lines = out.splitlines() nodes = [] for line in lines: # line format is "ip:port,target_portal_group_tag targetname" parts = line.split() if len(parts) > 2: module.fail_json(msg='error parsing output', cmd=cmd) target = parts[1] parts = parts[0].split(':') target_portal = parts[0] if portal is None or portal == target_portal: nodes.append(target) # older versions of scsiadm don't have nice return codes # for newer versions see iscsiadm(8); also usr/iscsiadm.c for details # err can contain [N|n]o records... elif rc == 21 or (rc == 255 and "o records found" in err): nodes = [] else: module.fail_json(cmd=cmd, rc=rc, msg=err) return nodes def iscsi_discover(module, portal, port): cmd = '%s --mode discovery --type sendtargets --portal %s:%s' % (iscsiadm_cmd, portal, port) (rc, out, err) = module.run_command(cmd) if rc > 0: module.fail_json(cmd=cmd, rc=rc, msg=err) def target_loggedon(module, target): cmd = '%s --mode session' % iscsiadm_cmd (rc, out, err) = module.run_command(cmd) if rc == 0: return target in out elif rc == 21: return False else: module.fail_json(cmd=cmd, rc=rc, msg=err) def target_login(module, target): node_auth = module.params['node_auth'] node_user = module.params['node_user'] node_pass = module.params['node_pass'] if node_user: params = [('node.session.auth.authmethod', node_auth), ('node.session.auth.username', node_user), ('node.session.auth.password', node_pass)] for (name, value) in params: cmd = '%s --mode node --targetname %s --op=update --name %s --value %s' % (iscsiadm_cmd, target, name, value) (rc, out, err) = module.run_command(cmd) if rc > 0: module.fail_json(cmd=cmd, rc=rc, msg=err) cmd = '%s --mode node --targetname %s --login' % (iscsiadm_cmd, target) (rc, out, err) = module.run_command(cmd) if rc > 0: module.fail_json(cmd=cmd, rc=rc, msg=err) def target_logout(module, target): cmd = '%s --mode node --targetname %s --logout' % (iscsiadm_cmd, target) (rc, out, err) = module.run_command(cmd) if rc > 0: module.fail_json(cmd=cmd, rc=rc, msg=err) def target_device_node(module, target): # if anyone know a better way to find out which devicenodes get created for # a given target... devices = glob.glob('/dev/disk/by-path/*%s*' % target) devdisks = [] for dev in devices: # exclude partitions if "-part" not in dev: devdisk = os.path.realpath(dev) # only add once (multi-path?) if devdisk not in devdisks: devdisks.append(devdisk) return devdisks def target_isauto(module, target): cmd = '%s --mode node --targetname %s' % (iscsiadm_cmd, target) (rc, out, err) = module.run_command(cmd) if rc == 0: lines = out.splitlines() for line in lines: if 'node.startup' in line: return 'automatic' in line return False else: module.fail_json(cmd=cmd, rc=rc, msg=err) def target_setauto(module, target): cmd = '%s --mode node --targetname %s --op=update --name node.startup --value automatic' % (iscsiadm_cmd, target) (rc, out, err) = module.run_command(cmd) if rc > 0: module.fail_json(cmd=cmd, rc=rc, msg=err) def target_setmanual(module, target): cmd = '%s --mode node --targetname %s --op=update --name node.startup --value manual' % (iscsiadm_cmd, target) (rc, out, err) = module.run_command(cmd) if rc > 0: module.fail_json(cmd=cmd, rc=rc, msg=err) def main(): # load ansible module object module = AnsibleModule( argument_spec = dict( # target portal = dict(required=False, aliases=['ip']), port = dict(required=False, default=3260), target = dict(required=False, aliases=['name', 'targetname']), node_auth = dict(required=False, default='CHAP'), node_user = dict(required=False), node_pass = dict(required=False), # actions login = dict(type='bool', aliases=['state']), auto_node_startup = dict(type='bool', aliases=['automatic']), discover = dict(type='bool', default=False), show_nodes = dict(type='bool', default=False) ), required_together=[['discover_user', 'discover_pass'], ['node_user', 'node_pass']], supports_check_mode=True ) global iscsiadm_cmd iscsiadm_cmd = module.get_bin_path('iscsiadm', required=True) # parameters portal = module.params['portal'] target = module.params['target'] port = module.params['port'] login = module.params['login'] automatic = module.params['auto_node_startup'] discover = module.params['discover'] show_nodes = module.params['show_nodes'] check = module.check_mode cached = iscsi_get_cached_nodes(module, portal) # return json dict result = {} result['changed'] = False if discover: if portal is None: module.fail_json(msg = "Need to specify at least the portal (ip) to discover") elif check: nodes = cached else: iscsi_discover(module, portal, port) nodes = iscsi_get_cached_nodes(module, portal) if not compare_nodelists(cached, nodes): result['changed'] |= True result['cache_updated'] = True else: nodes = cached if login is not None or automatic is not None: if target is None: if len(nodes) > 1: module.fail_json(msg = "Need to specify a target") else: target = nodes[0] else: # check given target is in cache check_target = False for node in nodes: if node == target: check_target = True break if not check_target: module.fail_json(msg = "Specified target not found") if show_nodes: result['nodes'] = nodes if login is not None: loggedon = target_loggedon(module, target) if (login and loggedon) or (not login and not loggedon): result['changed'] |= False if login: result['devicenodes'] = target_device_node(module, target) elif not check: if login: target_login(module, target) # give udev some time time.sleep(1) result['devicenodes'] = target_device_node(module, target) else: target_logout(module, target) result['changed'] |= True result['connection_changed'] = True else: result['changed'] |= True result['connection_changed'] = True if automatic is not None: isauto = target_isauto(module, target) if (automatic and isauto) or (not automatic and not isauto): result['changed'] |= False result['automatic_changed'] = False elif not check: if automatic: target_setauto(module, target) else: target_setmanual(module, target) result['changed'] |= True result['automatic_changed'] = True else: result['changed'] |= True result['automatic_changed'] = True module.exit_json(**result) # import module snippets from ansible.module_utils.basic import * if __name__ == '__main__': main() #!/usr/bin/env python # Tai Sakuma ##__________________________________________________________________|| import os, sys import timeit import array import ROOT from alphatwirl.roottree import Events, BEvents ##__________________________________________________________________|| inputPath = 'tree.root' treeName = 'tree' ##__________________________________________________________________|| def use_BEvents(): inputFile = ROOT.TFile.Open(inputPath) tree = inputFile.Get(treeName) events = BEvents(tree) jet_pt = events.jet_pt trigger_path = events.trigger_path trigger_version = events.trigger_version for event in events: for i in range(len(jet_pt)): jet_pt[i] # print [v for v in trigger_path] # print [v for v in trigger_version] ##__________________________________________________________________|| def use_SetBranchAddress(): inputFile = ROOT.TFile.Open(inputPath) tree = inputFile.Get(treeName) tree.SetBranchStatus("*", 0) tree.SetBranchStatus("njet", 1) tree.SetBranchStatus("jet_pt", 1) tree.SetBranchStatus("trigger_path", 1) tree.SetBranchStatus("trigger_version", 1) maxn = 65536 njet = array.array('i',[ 0 ]) jet_pt = array.array('d', maxn*[ 0 ]) tree.SetBranchAddress("njet" , njet) tree.SetBranchAddress("jet_pt" , jet_pt) trigger_path = ROOT.vector('string')() tree.SetBranchAddress("trigger_path", trigger_path) trigger_version = ROOT.vector('int')() tree.SetBranchAddress("trigger_version", trigger_version) for i in xrange(tree.GetEntries()): if tree.GetEntry(i) <= 0: break for i in range(njet[0]): jet_pt[i] # print [v for v in trigger_path] # print [v for v in trigger_version] ##__________________________________________________________________|| ways = ['simplest_way', 'use_SetBranchStatus', 'use_GetLeaf', 'use_SetBranchAddress'] ways = ['use_BEvents', 'use_SetBranchAddress', 'use_BEvents', 'use_SetBranchAddress'] for w in ways: print w, ':', print timeit.timeit(w + '()', number = 1, setup = 'from __main__ import ' + w) ##__________________________________________________________________|| #Licensed to the Apache Software Foundation (ASF) under one #or more contributor license agreements. See the NOTICE file #distributed with this work for additional information #regarding copyright ownership. The ASF licenses this file #to you under the Apache License, Version 2.0 (the #"License"); you may not use this file except in compliance #with the License. You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 #Unless required by applicable law or agreed to in writing, software #distributed under the License is distributed on an "AS IS" BASIS, #WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #See the License for the specific language governing permissions and #limitations under the License. import unittest, os, sys, re, threading, time myDirectory = os.path.realpath(sys.argv[0]) rootDirectory = re.sub("/testing/.*", "", myDirectory) sys.path.append(rootDirectory) from testing.lib import BaseTestSuite from hodlib.HodRing.hodRing import MRSystemDirectoryManager, createMRSystemDirectoryManager from hodlib.Common.threads import simpleCommand excludes = [] # duplicating temporarily until HADOOP-2848 is committed. class MyMockLogger: def __init__(self): self.__logLines = {} def info(self, message): self.__logLines[message] = 'info' def critical(self, message): self.__logLines[message] = 'critical' def warn(self, message): self.__logLines[message] = 'warn' def debug(self, message): # don't track debug lines. pass # verify a certain message has been logged at the defined level of severity. def hasMessage(self, message, level): if not self.__logLines.has_key(message): return False return self.__logLines[message] == level class test_MRSystemDirectoryManager(unittest.TestCase): def setUp(self): self.log = MyMockLogger() def testCleanupArgsString(self): sysDirMgr = MRSystemDirectoryManager(1234, '/user/hod/mapredsystem/hoduser.123.abc.com', \ 'def.com:5678', '/usr/bin/hadoop', self.log) str = sysDirMgr.toCleanupArgs() self.assertTrue(" --jt-pid 1234 --mr-sys-dir /user/hod/mapredsystem/hoduser.123.abc.com --fs-name def.com:5678 --hadoop-path /usr/bin/hadoop ", str) def testCreateMRSysDirInvalidParams(self): # test that no mr system directory manager is created if required keys are not present # this case will test scenarios of non jobtracker daemons. keys = [ 'jt-pid', 'mr-sys-dir', 'fs-name', 'hadoop-path' ] map = { 'jt-pid' : 1234, 'mr-sys-dir' : '/user/hod/mapredsystem/hoduser.def.com', 'fs-name' : 'ghi.com:1234', 'hadoop-path' : '/usr/bin/hadoop' } for key in keys: val = map[key] map[key] = None self.assertEquals(createMRSystemDirectoryManager(map, self.log), None) map[key] = val def testUnresponsiveJobTracker(self): # simulate an unresponsive job tracker, by giving a command that runs longer than the retries # verify that the program returns with the right error message. sc = simpleCommand("sleep", "sleep 300") sc.start() pid = sc.getPid() while pid is None: pid = sc.getPid() sysDirMgr = MRSystemDirectoryManager(pid, '/user/yhemanth/mapredsystem/hoduser.123.abc.com', \ 'def.com:5678', '/usr/bin/hadoop', self.log, retries=3) sysDirMgr.removeMRSystemDirectory() self.log.hasMessage("Job Tracker did not exit even after a minute. Not going to try and cleanup the system directory", 'warn') sc.kill() sc.wait() sc.join() class HodCleanupTestSuite(BaseTestSuite): def __init__(self): # suite setup BaseTestSuite.__init__(self, __name__, excludes) pass def cleanUp(self): # suite tearDown pass def RunHodCleanupTests(): # modulename_suite suite = HodCleanupTestSuite() testResult = suite.runTests() suite.cleanUp() return testResult if __name__ == "__main__": RunHodCleanupTests() # -*- coding: utf-8 -*- # # Boas Práticas em Desenvolvimento documentation build configuration file, created by # sphinx-quickstart on Sat Oct 31 19:44:44 2015. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Boas Práticas em Desenvolvimento' copyright = u'2015, Silvio Rhatto' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.1' # The full version, including alpha/beta/rc tags. release = '0.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. language = 'pt_BR' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build', '_themes', 'README.rst'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = "sphinx_rtd_theme" html_theme_path = ["_themes/sphinx_rtd_theme", ] # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'BoasPrticasemDesenvolvimentodoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'BoasPrticasemDesenvolvimento.tex', u'Boas Práticas em Desenvolvimento Documentation', u'Silvio Rhatto', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'boasprticasemdesenvolvimento', u'Boas Práticas em Desenvolvimento Documentation', [u'Silvio Rhatto'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'BoasPrticasemDesenvolvimento', u'Boas Práticas em Desenvolvimento Documentation', u'Silvio Rhatto', 'BoasPrticasemDesenvolvimento', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' from pyxp import client, fcall from pyxp.client import * from functools import wraps def send(iter, val, default=None): try: return iter.send(val) except StopIteration: return default def awithfile(fn): @wraps(fn) def wrapper(self, path, *args, **kwargs): gen = fn(self, *args, **kwargs) callback, fail, mode = next(gen) def cont(file): send(gen, file) self.aopen(path, cont, fail=fail or callback, mode=mode) return wrapper def requestchain(fn): @wraps(fn) def wrapper(self, *args, **kwargs): gen = fn(self, *args, **kwargs) callback, fail = next(gen) def cont(val): data = gen.send(val) if isinstance(data, fcall.Fcall): self._dorpc(data, cont, fail or callback) else: Client.respond(callback, data) cont(None) return wrapper class Client(client.Client): ROOT_FID = 0 def _awalk(fn): @wraps(fn) @requestchain def wrapper(self, *args, **kwargs): gen = fn(self, *args, **kwargs) path, callback, fail = next(gen) path = self._splitpath(path) fid = self._getfid() ofid = ROOT_FID def fail_(resp, exc, tb): if ofid != ROOT_FID: self._aclunk(fid) self.respond(fail or callback, resp, exc, tb) yield callback, fail_ while path: wname = path[:fcall.MAX_WELEM] path = path[fcall.MAX_WELEM:] resp = yield fcall.Twalk(fid=ofid, newfid=fid, wname=wname) ofid = fid resp = fid while resp is not None: resp = yield send(gen, resp) return wrapper _file = property(lambda self: File) @_awalk def _aopen(self, path, mode, fcall, callback, fail=None, origpath=None): path = self._splitpath(path) fcall.fid = yield path, callback, fail resp = yield fcall yield self._file(self, origpath or '/'.join(path), resp, fcall.fid, mode, cleanup=lambda: self._aclunk(fcall.fid)) def aopen(self, path, callback=True, fail=None, mode=OREAD): assert callable(callback) self._aopen(path, mode, fcall.Topen(mode=mode), callback, fail) def acreate(self, path, callback=True, fail=None, mode=OREAD, perm=0): path = self._splitpath(path) name = path.pop() self._aopen(path, mode, fcall.Tcreate(mode=mode, name=name, perm=perm), callback if callable(callback) else lambda resp: resp and resp.close(), fail, origpath='/'.join(path + [name])) @_awalk def aremove(self, path, callback=True, fail=None): yield fcall.Tremove(fid=(yield path, callback, fail)) @_awalk def astat(self, path, callback, fail=None): resp = yield fcall.Tstat(fid=(yield path, callback, fail)) yield resp.stat @awithfile def aread(self, callback, fail=None, count=None, offset=None, buf=''): file = yield callback, fail, OREAD file.aread(callback, fail, count, offset, buf) @awithfile def awrite(self, data, callback=True, fail=None, offset=None): file = yield callback, fail, OWRITE file.awrite(data, callback, fail, offset) @awithfile def areadlines(self, callback): file = yield callback, fail, OREAD file.areadlines(callback) class File(client.File): @requestchain def stat(self, callback, fail=None): yield callback, fail resp = yield fcall.Tstat() yield resp.stat @requestchain def aread(self, callback, fail=None, count=None, offset=None, buf=''): yield callback, fail setoffset = offset is None if count is None: count = self.iounit if offset is None: offset = self.offset res = [] while count > 0: n = min(count, self.iounit) count -= n resp = yield fcall.Tread(offset=offset, count=n) res.append(resp.data) offset += len(resp.data) if len(resp.data) == 0: break if setoffset: self.offset = offset yield ''.join(res) def areadlines(self, callback): class ctxt: last = None def cont(data, exc, tb): res = True if data: lines = data.split('\n') if ctxt.last: lines[0] = ctxt.last + lines[0] for i in range(0, len(lines) - 1): res = callback(lines[i]) if res is False: return ctxt.last = lines[-1] self.aread(cont) else: if ctxt.last: callback(ctxt.last) callback(None) self.aread(cont) @requestchain def awrite(self, data, callback=True, fail=None, offset=None): yield callback, fail setoffset = offset is None if offset is None: offset = self.offset off = 0 while off < len(data): n = min(len(data), self.iounit) resp = yield fcall.Twrite(offset=offset, data=data[off:off+n]) off += resp.count offset += resp.count if setoffset: self.offset = offset yield off @requestchain def aremove(self, callback=True, fail=None): yield callback, fail yield fcall.Tremove() self.close() yield True # vim:se sts=4 sw=4 et: # -*- coding: utf-8 -*- from django.db import models from django.utils.encoding import python_2_unicode_compatible @python_2_unicode_compatible class Author(models.Model): name = models.CharField(max_length=100) age = models.IntegerField() friends = models.ManyToManyField('self', blank=True) def __str__(self): return self.name @python_2_unicode_compatible class Publisher(models.Model): name = models.CharField(max_length=255) num_awards = models.IntegerField() duration = models.DurationField(blank=True, null=True) def __str__(self): return self.name @python_2_unicode_compatible class Book(models.Model): isbn = models.CharField(max_length=9) name = models.CharField(max_length=255) pages = models.IntegerField() rating = models.FloatField() price = models.DecimalField(decimal_places=2, max_digits=6) authors = models.ManyToManyField(Author) contact = models.ForeignKey(Author, related_name='book_contact_set') publisher = models.ForeignKey(Publisher) pubdate = models.DateField() def __str__(self): return self.name @python_2_unicode_compatible class Store(models.Model): name = models.CharField(max_length=255) books = models.ManyToManyField(Book) original_opening = models.DateTimeField() friday_night_closing = models.TimeField() def __str__(self): return self.name # This code is part of Ansible, but is an independent component. # This particular file snippet, and this file snippet only, is BSD licensed. # Modules you write using this snippet, which is embedded dynamically by Ansible # still belong to the author of the module, and may assign their own license # to the complete work. # # Copyright (c), Franck Cuny , 2014 # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # import json import os import time import traceback from distutils.version import LooseVersion # libcloud try: import libcloud HAS_LIBCLOUD_BASE = True except ImportError: HAS_LIBCLOUD_BASE = False # google-auth try: import google.auth from google.oauth2 import service_account HAS_GOOGLE_AUTH = True except ImportError: HAS_GOOGLE_AUTH = False # google-python-api try: import google_auth_httplib2 from httplib2 import Http from googleapiclient.http import set_user_agent from googleapiclient.errors import HttpError from apiclient.discovery import build HAS_GOOGLE_API_LIB = True except ImportError: HAS_GOOGLE_API_LIB = False import ansible.module_utils.six.moves.urllib.parse as urlparse GCP_DEFAULT_SCOPES = ['https://www.googleapis.com/auth/cloud-platform'] def _get_gcp_ansible_credentials(module): """Helper to fetch creds from AnsibleModule object.""" service_account_email = module.params.get('service_account_email', None) # Note: pem_file is discouraged and will be deprecated credentials_file = module.params.get('pem_file', None) or module.params.get( 'credentials_file', None) project_id = module.params.get('project_id', None) return (service_account_email, credentials_file, project_id) def _get_gcp_environ_var(var_name, default_value): """Wrapper around os.environ.get call.""" return os.environ.get( var_name, default_value) def _get_gcp_environment_credentials(service_account_email, credentials_file, project_id): """Helper to look in environment variables for credentials.""" # If any of the values are not given as parameters, check the appropriate # environment variables. if not service_account_email: service_account_email = _get_gcp_environ_var('GCE_EMAIL', None) if not credentials_file: credentials_file = _get_gcp_environ_var( 'GCE_CREDENTIALS_FILE_PATH', None) or _get_gcp_environ_var( 'GOOGLE_APPLICATION_CREDENTIALS', None) or _get_gcp_environ_var( 'GCE_PEM_FILE_PATH', None) if not project_id: project_id = _get_gcp_environ_var('GCE_PROJECT', None) or _get_gcp_environ_var( 'GOOGLE_CLOUD_PROJECT', None) return (service_account_email, credentials_file, project_id) def _get_gcp_libcloud_credentials(module, service_account_email=None, credentials_file=None, project_id=None): """ Helper to look for libcloud secrets.py file. Note: This has an 'additive' effect right now, filling in vars not specified elsewhere, in order to keep legacy functionality. This method of specifying credentials will be deprecated, otherwise we'd look to make it more restrictive with an all-vars-or-nothing approach. :param service_account: GCP service account email used to make requests :type service_account: ``str`` or None :param credentials_file: Path on disk to credentials file :type credentials_file: ``str`` or None :param project_id: GCP project ID. :type project_id: ``str`` or None :return: tuple of (service_account, credentials_file, project_id) :rtype: ``tuple`` of ``str`` """ if service_account_email is None or credentials_file is None: try: import secrets module.deprecate(msg=("secrets file found at '%s'. This method of specifying " "credentials is deprecated. Please use env vars or " "Ansible YAML files instead" % (secrets.__file__)), version=2.5) except ImportError: secrets = None if hasattr(secrets, 'GCE_PARAMS'): if not service_account_email: service_account_email = secrets.GCE_PARAMS[0] if not credentials_file: credentials_file = secrets.GCE_PARAMS[1] keyword_params = getattr(secrets, 'GCE_KEYWORD_PARAMS', {}) if not project_id: project_id = keyword_params.get('project', None) return (service_account_email, credentials_file, project_id) def _get_gcp_credentials(module, require_valid_json=True, check_libcloud=False): """ Obtain GCP credentials by trying various methods. There are 3 ways to specify GCP credentials: 1. Specify via Ansible module parameters (recommended). 2. Specify via environment variables. Two sets of env vars are available: a) GOOGLE_CLOUD_PROJECT, GOOGLE_CREDENTIALS_APPLICATION (preferred) b) GCE_PROJECT, GCE_CREDENTIAL_FILE_PATH, GCE_EMAIL (legacy, not recommended; req'd if using p12 key) 3. Specify via libcloud secrets.py file (deprecated). There are 3 helper functions to assist in the above. Regardless of method, the user also has the option of specifying a JSON file or a p12 file as the credentials file. JSON is strongly recommended and p12 will be removed in the future. Additionally, flags may be set to require valid json and check the libcloud version. AnsibleModule.fail_json is called only if the project_id cannot be found. :param module: initialized Ansible module object :type module: `class AnsibleModule` :param require_valid_json: If true, require credentials to be valid JSON. Default is True. :type require_valid_json: ``bool`` :params check_libcloud: If true, check the libcloud version available to see if JSON creds are supported. :type check_libcloud: ``bool`` :return: {'service_account_email': service_account_email, 'credentials_file': credentials_file, 'project_id': project_id} :rtype: ``dict`` """ (service_account_email, credentials_file, project_id) = _get_gcp_ansible_credentials(module) # If any of the values are not given as parameters, check the appropriate # environment variables. (service_account_email, credentials_file, project_id) = _get_gcp_environment_credentials(service_account_email, credentials_file, project_id) # If we still don't have one or more of our credentials, attempt to # get the remaining values from the libcloud secrets file. (service_account_email, credentials_file, project_id) = _get_gcp_libcloud_credentials(module, service_account_email, credentials_file, project_id) if credentials_file is None or project_id is None or service_account_email is None: if check_libcloud is True: if project_id is None: # TODO(supertom): this message is legacy and integration tests # depend on it. module.fail_json(msg='Missing GCE connection parameters in libcloud ' 'secrets file.') else: if project_id is None: module.fail_json(msg=('GCP connection error: unable to determine project (%s) or ' 'credentials file (%s)' % (project_id, credentials_file))) # Set these fields to empty strings if they are None # consumers of this will make the distinction between an empty string # and None. if credentials_file is None: credentials_file = '' if service_account_email is None: service_account_email = '' # ensure the credentials file is found and is in the proper format. if credentials_file: _validate_credentials_file(module, credentials_file, require_valid_json=require_valid_json, check_libcloud=check_libcloud) return {'service_account_email': service_account_email, 'credentials_file': credentials_file, 'project_id': project_id} def _validate_credentials_file(module, credentials_file, require_valid_json=True, check_libcloud=False): """ Check for valid credentials file. Optionally check for JSON format and if libcloud supports JSON. :param module: initialized Ansible module object :type module: `class AnsibleModule` :param credentials_file: path to file on disk :type credentials_file: ``str``. Complete path to file on disk. :param require_valid_json: If true, require credentials to be valid JSON. Default is True. :type require_valid_json: ``bool`` :params check_libcloud: If true, check the libcloud version available to see if JSON creds are supported. :type check_libcloud: ``bool`` :returns: True :rtype: ``bool`` """ try: # Try to read credentials as JSON with open(credentials_file) as credentials: json.loads(credentials.read()) # If the credentials are proper JSON and we do not have the minimum # required libcloud version, bail out and return a descriptive # error if check_libcloud and LooseVersion(libcloud.__version__) < '0.17.0': module.fail_json(msg='Using JSON credentials but libcloud minimum version not met. ' 'Upgrade to libcloud>=0.17.0.') return True except IOError as e: module.fail_json(msg='GCP Credentials File %s not found.' % credentials_file, changed=False) return False except ValueError as e: if require_valid_json: module.fail_json( msg='GCP Credentials File %s invalid. Must be valid JSON.' % credentials_file, changed=False) else: module.deprecate(msg=("Non-JSON credentials file provided. This format is deprecated. " " Please generate a new JSON key from the Google Cloud console"), version=2.5) return True def gcp_connect(module, provider, get_driver, user_agent_product, user_agent_version): """Return a Google libcloud driver connection.""" if not HAS_LIBCLOUD_BASE: module.fail_json(msg='libcloud must be installed to use this module') creds = _get_gcp_credentials(module, require_valid_json=False, check_libcloud=True) try: gcp = get_driver(provider)(creds['service_account_email'], creds['credentials_file'], datacenter=module.params.get('zone', None), project=creds['project_id']) gcp.connection.user_agent_append("%s/%s" % ( user_agent_product, user_agent_version)) except (RuntimeError, ValueError) as e: module.fail_json(msg=str(e), changed=False) except Exception as e: module.fail_json(msg=unexpected_error_msg(e), changed=False) return gcp def get_google_cloud_credentials(module, scopes=[]): """ Get credentials object for use with Google Cloud client. Attempts to obtain credentials by calling _get_gcp_credentials. If those are not present will attempt to connect via Application Default Credentials. To connect via libcloud, don't use this function, use gcp_connect instead. For Google Python API Client, see get_google_api_auth for how to connect. For more information on Google's client library options for Python, see: U(https://cloud.google.com/apis/docs/client-libraries-explained#google_api_client_libraries) Google Cloud example: creds, params = get_google_cloud_credentials(module, scopes, user_agent_product, user_agent_version) pubsub_client = pubsub.Client(project=params['project_id'], credentials=creds) pubsub_client.user_agent = 'ansible-pubsub-0.1' ... :param module: initialized Ansible module object :type module: `class AnsibleModule` :param scopes: list of scopes :type module: ``list`` of URIs :returns: A tuple containing (google authorized) credentials object and params dict {'service_account_email': '...', 'credentials_file': '...', 'project_id': ...} :rtype: ``tuple`` """ if not HAS_GOOGLE_AUTH: module.fail_json(msg='Please install google-auth.') conn_params = _get_gcp_credentials(module, require_valid_json=True, check_libcloud=False) try: if conn_params['credentials_file']: credentials = service_account.Credentials.from_service_account_file( conn_params['credentials_file']) if scopes: credentials = credentials.with_scopes(scopes) else: (credentials, project_id) = google.auth.default( scopes=scopes) if project_id is not None: conn_params['project_id'] = project_id return (credentials, conn_params) except Exception as e: module.fail_json(msg=unexpected_error_msg(e), changed=False) return (None, None) def get_google_api_auth(module, scopes=[], user_agent_product='ansible-python-api', user_agent_version='NA'): """ Authentication for use with google-python-api-client. Function calls get_google_cloud_credentials, which attempts to assemble the credentials from various locations. Next it attempts to authenticate with Google. This function returns an httplib2 (compatible) object that can be provided to the Google Python API client. For libcloud, don't use this function, use gcp_connect instead. For Google Cloud, See get_google_cloud_credentials for how to connect. For more information on Google's client library options for Python, see: U(https://cloud.google.com/apis/docs/client-libraries-explained#google_api_client_libraries) Google API example: http_auth, conn_params = get_google_api_auth(module, scopes, user_agent_product, user_agent_version) service = build('myservice', 'v1', http=http_auth) ... :param module: initialized Ansible module object :type module: `class AnsibleModule` :param scopes: list of scopes :type scopes: ``list`` of URIs :param user_agent_product: User agent product. eg: 'ansible-python-api' :type user_agent_product: ``str`` :param user_agent_version: Version string to append to product. eg: 'NA' or '0.1' :type user_agent_version: ``str`` :returns: A tuple containing (google authorized) httplib2 request object and a params dict {'service_account_email': '...', 'credentials_file': '...', 'project_id': ...} :rtype: ``tuple`` """ if not HAS_GOOGLE_API_LIB: module.fail_json(msg="Please install google-api-python-client library") if not scopes: scopes = GCP_DEFAULT_SCOPES try: (credentials, conn_params) = get_google_cloud_credentials(module, scopes) http = set_user_agent(Http(), '%s-%s' % (user_agent_product, user_agent_version)) http_auth = google_auth_httplib2.AuthorizedHttp(credentials, http=http) return (http_auth, conn_params) except Exception as e: module.fail_json(msg=unexpected_error_msg(e), changed=False) return (None, None) def get_google_api_client(module, service, user_agent_product, user_agent_version, scopes=None, api_version='v1'): """ Get the discovery-based python client. Use when a cloud client is not available. client = get_google_api_client(module, 'compute', user_agent_product=USER_AGENT_PRODUCT, user_agent_version=USER_AGENT_VERSION) :returns: A tuple containing the authorized client to the specified service and a params dict {'service_account_email': '...', 'credentials_file': '...', 'project_id': ...} :rtype: ``tuple`` """ if not scopes: scopes = GCP_DEFAULT_SCOPES http_auth, conn_params = get_google_api_auth(module, scopes=scopes, user_agent_product=user_agent_product, user_agent_version=user_agent_version) client = build(service, api_version, http=http_auth) return (client, conn_params) def check_min_pkg_version(pkg_name, minimum_version): """Minimum required version is >= installed version.""" from pkg_resources import get_distribution try: installed_version = get_distribution(pkg_name).version return LooseVersion(installed_version) >= minimum_version except Exception as e: return False def unexpected_error_msg(error): """Create an error string based on passed in error.""" return 'Unexpected response: (%s). Detail: %s' % (str(error), traceback.format_exc()) def get_valid_location(module, driver, location, location_type='zone'): if location_type == 'zone': l = driver.ex_get_zone(location) else: l = driver.ex_get_region(location) if l is None: link = 'https://cloud.google.com/compute/docs/regions-zones/regions-zones#available' module.fail_json(msg=('%s %s is invalid. Please see the list of ' 'available %s at %s' % ( location_type, location, location_type, link)), changed=False) return l def check_params(params, field_list): """ Helper to validate params. Use this in function definitions if they require specific fields to be present. :param params: structure that contains the fields :type params: ``dict`` :param field_list: list of dict representing the fields [{'name': str, 'required': True/False', 'type': cls}] :type field_list: ``list`` of ``dict`` :return True or raises ValueError :rtype: ``bool`` or `class:ValueError` """ for d in field_list: if not d['name'] in params: if 'required' in d and d['required'] is True: raise ValueError(("%s is required and must be of type: %s" % (d['name'], str(d['type'])))) else: if not isinstance(params[d['name']], d['type']): raise ValueError(("%s must be of type: %s. %s (%s) provided." % ( d['name'], str(d['type']), params[d['name']], type(params[d['name']])))) if 'values' in d: if params[d['name']] not in d['values']: raise ValueError(("%s must be one of: %s" % ( d['name'], ','.join(d['values'])))) if isinstance(params[d['name']], int): if 'min' in d: if params[d['name']] < d['min']: raise ValueError(("%s must be greater than or equal to: %s" % ( d['name'], d['min']))) if 'max' in d: if params[d['name']] > d['max']: raise ValueError("%s must be less than or equal to: %s" % ( d['name'], d['max'])) return True class GCPUtils(object): """ Helper utilities for GCP. """ @staticmethod def underscore_to_camel(txt): return txt.split('_')[0] + ''.join(x.capitalize() or '_' for x in txt.split('_')[1:]) @staticmethod def remove_non_gcp_params(params): """ Remove params if found. """ params_to_remove = ['state'] for p in params_to_remove: if p in params: del params[p] return params @staticmethod def params_to_gcp_dict(params, resource_name=None): """ Recursively convert ansible params to GCP Params. Keys are converted from snake to camelCase ex: default_service to defaultService Handles lists, dicts and strings special provision for the resource name """ if not isinstance(params, dict): return params gcp_dict = {} params = GCPUtils.remove_non_gcp_params(params) for k, v in params.items(): gcp_key = GCPUtils.underscore_to_camel(k) if isinstance(v, dict): retval = GCPUtils.params_to_gcp_dict(v) gcp_dict[gcp_key] = retval elif isinstance(v, list): gcp_dict[gcp_key] = [GCPUtils.params_to_gcp_dict(x) for x in v] else: if resource_name and k == resource_name: gcp_dict['name'] = v else: gcp_dict[gcp_key] = v return gcp_dict @staticmethod def execute_api_client_req(req, client=None, raw=True, operation_timeout=180, poll_interval=5, raise_404=True): """ General python api client interaction function. For use with google-api-python-client, or clients created with get_google_api_client function Not for use with Google Cloud client libraries For long-running operations, we make an immediate query and then sleep poll_interval before re-querying. After the request is done we rebuild the request with a get method and return the result. """ try: resp = req.execute() if not resp: return None if raw: return resp if resp['kind'] == 'compute#operation': resp = GCPUtils.execute_api_client_operation_req(req, resp, client, operation_timeout, poll_interval) if 'items' in resp: return resp['items'] return resp except HttpError as h: # Note: 404s can be generated (incorrectly) for dependent # resources not existing. We let the caller determine if # they want 404s raised for their invocation. if h.resp.status == 404 and not raise_404: return None else: raise except Exception: raise @staticmethod def execute_api_client_operation_req(orig_req, op_resp, client, operation_timeout=180, poll_interval=5): """ Poll an operation for a result. """ parsed_url = GCPUtils.parse_gcp_url(orig_req.uri) project_id = parsed_url['project'] resource_name = GCPUtils.get_gcp_resource_from_methodId( orig_req.methodId) resource = GCPUtils.build_resource_from_name(client, resource_name) start_time = time.time() complete = False attempts = 1 while not complete: if start_time + operation_timeout >= time.time(): op_req = client.globalOperations().get(